From 7c9e70701051685711090cfcac0bcd55495a2a2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Piotr=20Anio=C5=82a?= Date: Sun, 2 Nov 2025 11:11:18 +0100 Subject: [PATCH 01/25] Added Ollama-based AI capability MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Piotr Anioła --- requirements.txt | 1 + src/__init__.py | 0 src/javacore_analyser/ai/__init__.py | 0 src/javacore_analyser/ai/ai.py | 42 +++++++++++++++++++ .../ai/ai_overview_prompter.py | 27 ++++++++++++ src/javacore_analyser/ai/prompter.py | 13 ++++++ src/javacore_analyser/ai/tips_prompter.py | 18 ++++++++ src/javacore_analyser/constants.py | 2 + src/javacore_analyser/data/xml/report.xsl | 25 +++++++---- .../javacore_analyser_batch.py | 2 + src/javacore_analyser/javacore_set.py | 17 ++++++++ 11 files changed, 140 insertions(+), 7 deletions(-) create mode 100644 src/__init__.py create mode 100644 src/javacore_analyser/ai/__init__.py create mode 100644 src/javacore_analyser/ai/ai.py create mode 100644 src/javacore_analyser/ai/ai_overview_prompter.py create mode 100644 src/javacore_analyser/ai/prompter.py create mode 100644 src/javacore_analyser/ai/tips_prompter.py diff --git a/requirements.txt b/requirements.txt index 715bdf1..73a2e52 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,3 +6,4 @@ waitress # Production WSGI server flask # WSGI server for development the code tqdm haralyzer +ollama diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/javacore_analyser/ai/__init__.py b/src/javacore_analyser/ai/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/javacore_analyser/ai/ai.py b/src/javacore_analyser/ai/ai.py new file mode 100644 index 0000000..09d9b77 --- /dev/null +++ b/src/javacore_analyser/ai/ai.py @@ -0,0 +1,42 @@ +# +# Copyright IBM Corp. 2025 - 2025 +# SPDX-License-Identifier: Apache-2.0 +# + +from ollama import chat +from ollama import ChatResponse + +from src.javacore_analyser.constants import DEFAULT_MODEL + + +# prerequisites: +# install Ollama from https://ollama.com/download +# > ollama pull granite3.3:8b +# > ollama pull granite-code:3b +# > pip install ollama + +class Ai: + + def __init__(self, javacore_set): + self.prompt = "" + self.javacore_set = javacore_set + self.model = DEFAULT_MODEL + + + def set_model(self, model): + self.model = model + + + def infuse(self, prompter): + content = "" + self.prompt = prompter.construct_prompt() + if self.prompt and len(self.prompt) > 0: + response: ChatResponse = chat(model=self.model, messages=[ + { + 'role': 'user', + 'content': self.prompt, + }, + ]) + content = response.message.content + content = content.replace('\n', '
') + return content \ No newline at end of file diff --git a/src/javacore_analyser/ai/ai_overview_prompter.py b/src/javacore_analyser/ai/ai_overview_prompter.py new file mode 100644 index 0000000..d25e10b --- /dev/null +++ b/src/javacore_analyser/ai/ai_overview_prompter.py @@ -0,0 +1,27 @@ +# +# Copyright IBM Corp. 2025 - 2025 +# SPDX-License-Identifier: Apache-2.0 +# + +from src.javacore_analyser.ai.prompter import Prompter + + +class AiOverviewPrompter(Prompter): + + def construct_prompt(self): + prompt = 'Given the information below, explain how to improve the performance of the java application\n' + prompt += 'Java configuration:\n' + prompt += 'Number of CPUs: ' + self.javacore_set.number_of_cpus + '\n' + prompt += 'Xmx=' + self.javacore_set.xmx + '\n' + prompt += 'Xms=' + self.javacore_set.xms + '\n' + prompt += 'Xmn=' + self.javacore_set.xmn + '\n' + prompt += 'GC policy: ' + self.javacore_set.gc_policy + '\n' + prompt += 'Compressed references: ' + str(self.javacore_set.compressed_refs) + '\n' + prompt += 'Verbose GC: ' + str(self.javacore_set.verbose_gc) + '\n' + prompt += 'OS level: ' + self.javacore_set.os_level + '\n' + prompt += 'System architecture: ' + self.javacore_set.architecture + '\n' + prompt += 'Java version: ' + self.javacore_set.java_version + '\n' + # jvm_start_time = "" + prompt += 'Command line: ' + self.javacore_set.cmd_line + '\n' + # prompt += self.javacore_set.user_args = [] + return prompt diff --git a/src/javacore_analyser/ai/prompter.py b/src/javacore_analyser/ai/prompter.py new file mode 100644 index 0000000..a05007f --- /dev/null +++ b/src/javacore_analyser/ai/prompter.py @@ -0,0 +1,13 @@ +# +# Copyright IBM Corp. 2025 - 2025 +# SPDX-License-Identifier: Apache-2.0 +# + +class Prompter: + + def __init__(self, javacore_set): + self.javacore_set = javacore_set + + + def construct_prompt(self): + return "" \ No newline at end of file diff --git a/src/javacore_analyser/ai/tips_prompter.py b/src/javacore_analyser/ai/tips_prompter.py new file mode 100644 index 0000000..02dae7c --- /dev/null +++ b/src/javacore_analyser/ai/tips_prompter.py @@ -0,0 +1,18 @@ +# +# Copyright IBM Corp. 2025 - 2025 +# SPDX-License-Identifier: Apache-2.0 +# + +from src.javacore_analyser.ai.prompter import Prompter + + +class TipsPrompter(Prompter) : + + def construct_prompt(self): + prompt = "" + if len(self.javacore_set.tips) > 0: + prompt = "Analyse the tips to help identify performance bottlenecks in a Java application: \n" + for tip in self.javacore_set.tips: + for message in tip: + prompt += message + '\n' + return prompt \ No newline at end of file diff --git a/src/javacore_analyser/constants.py b/src/javacore_analyser/constants.py index 4ba4e63..1656845 100644 --- a/src/javacore_analyser/constants.py +++ b/src/javacore_analyser/constants.py @@ -40,3 +40,5 @@ DEFAULT_REPORTS_DIR = "reports" DEFAULT_PORT = 5000 TEMP_DIR = "temp_data" # Folder to store temporary data for creating reports + +DEFAULT_MODEL = 'granite3.3:8b' diff --git a/src/javacore_analyser/data/xml/report.xsl b/src/javacore_analyser/data/xml/report.xsl index f30aa02..d784eb4 100644 --- a/src/javacore_analyser/data/xml/report.xsl +++ b/src/javacore_analyser/data/xml/report.xsl @@ -334,6 +334,10 @@ href="javascript:expand_it(system_properties, toggle_system_properties)" class="expandit">System Information