Skip to content

Commit c832135

Browse files
authored
Merge pull request #6 from P4o1o/local_llm_suggestion
Support for different LLM hosts (remote or local, compatible with OpenAI API interface)
2 parents 626f5fc + eb0ad17 commit c832135

File tree

4 files changed

+31
-21
lines changed

4 files changed

+31
-21
lines changed

README.md

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,15 @@ vibesafe scan -r
9393
vibesafe scan --report
9494
```
9595

96-
**Generate AI Report (Requires API Key):**
96+
*Using a local llm host for report (the llm host must support OpenAI API)
97+
```bash
98+
# example with ollama at local host with default ollama port
99+
vibesafe scan --url http://127.0.0.1:11434 --model gemma3:27b-it-q8_0
100+
```
101+
102+
if --url flag is not specified the report will be done by OpenAI (you will need an OpenAI API Key, see below)
103+
104+
**Generate AI Report from OpenAI (Requires API Key):**
97105

98106
To generate fix suggestions in the Markdown report, you need an OpenAI API key.
99107

src/index.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,8 @@ program.command('scan')
5353
.option('-o, --output <file>', 'Specify JSON output file path (e.g., report.json)')
5454
.option('-r, --report [file]', 'Specify Markdown report file path (defaults to VIBESAFE-REPORT.md)')
5555
.option('--high-only', 'Only report high severity issues')
56+
.option('-m, --model <model>', 'Specify OpenAI model to use for suggestions. If not specified the program will use gpt-4.1-nano', 'gpt-4.1-nano')
57+
.option('-u, --url <url>', 'Use the specified url (e.g. http://localhost:11434 for ollama or https://api.openai.com for ChatGPT) for ai suggestions. If not specified the program will call OpenAI API', 'https://api.openai.com')
5658
.action(async (directory, options) => {
5759
const rootDir = path.resolve(directory);
5860
console.log(`Scanning directory: ${rootDir}`);
@@ -310,7 +312,7 @@ program.command('scan')
310312
infoSecretFindings: infoSecretFindings
311313
};
312314
try {
313-
const markdownContent = await generateMarkdownReport(reportData);
315+
const markdownContent = await generateMarkdownReport(reportData, options.url, options.model);
314316
fs.writeFileSync(reportPath, markdownContent);
315317
console.log(chalk.green(`\nMarkdown report generated successfully at ${reportPath}`));
316318
} catch (error: any) {

src/reporting/aiSuggestions.ts

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -51,11 +51,12 @@ const MAX_FINDINGS_PER_TYPE = 10;
5151
/**
5252
* Generates AI-powered suggestions for fixing findings.
5353
* @param reportData The aggregated findings.
54-
* @param apiKey OpenAI API key.
54+
* @param openaiConf OpenAI API key and url.
55+
* @param model model name.
5556
* @returns A promise resolving to a Markdown string with suggestions.
5657
*/
57-
export async function generateAISuggestions(reportData: ReportData, apiKey: string): Promise<string> {
58-
const openai = new OpenAI({ apiKey });
58+
export async function generateAISuggestions(reportData: ReportData, openaiConf: {baseURL:string, apiKey: string}, model: string): Promise<string> {
59+
const openai = new OpenAI(openaiConf);
5960

6061
// Prepare a simplified list of findings for the prompt
6162
const simplifiedFindings: SimplifiedFinding[] = [
@@ -92,7 +93,7 @@ export async function generateAISuggestions(reportData: ReportData, apiKey: stri
9293

9394
try {
9495
const completion = await openai.chat.completions.create({
95-
model: "gpt-4.1-nano",
96+
model: model,
9697
messages: [
9798
{ role: "system", content: "You are a helpful security assistant providing fix suggestions for code vulnerabilities." },
9899
{ role: "user", content: prompt }

src/reporting/markdown.ts

Lines changed: 14 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ function getSeverityInfo(severity: FindingSeverity | SecretFinding['severity'] |
5353
* @param reportData The aggregated findings.
5454
* @returns A Markdown formatted string.
5555
*/
56-
export async function generateMarkdownReport(reportData: ReportData): Promise<string> {
56+
export async function generateMarkdownReport(reportData: ReportData, url: string, model: string = 'gpt-4.1-nano'): Promise<string> {
5757
let markdown = `# VibeSafe Security Scan Report ✨🛡️\n\n`;
5858
markdown += `Generated: ${new Date().toISOString()}\n\n`;
5959

@@ -198,20 +198,19 @@ export async function generateMarkdownReport(reportData: ReportData): Promise<st
198198
}
199199
}
200200

201-
// --- AI Suggestions ---
202-
const apiKey = process.env.OPENAI_API_KEY;
203-
if (apiKey && apiKey !== 'YOUR_API_KEY_PLACEHOLDER') {
204-
const spinner = ora('Generating AI suggestions (using OpenAI GPT-4o-mini)... ').start();
205-
try {
206-
const aiSuggestions = await generateAISuggestions(reportData, apiKey);
207-
spinner.succeed('AI suggestions generated.');
208-
markdown += aiSuggestions; // Append the suggestions section
209-
} catch (error: any) {
210-
spinner.fail('AI suggestion generation failed.');
211-
markdown += `\n## AI Suggestions\n\n*Error generating suggestions: ${error.message}*\n`; // Append error message
212-
}
213-
} else {
214-
markdown += `\n*AI suggestions skipped. Set the OPENAI_API_KEY environment variable to enable.*\n`;
201+
// --- AI Suggestions ---
202+
let apiKey = process.env.OPENAI_API_KEY;
203+
if (! apiKey){ // ollama dont need an API key but this field can't be none
204+
apiKey = 'YOUR_API_KEY_PLACEHOLDER';
205+
}
206+
const spinner = ora(`Generating AI suggestions (using API from ${url}/v1 with model: ${model})... `).start();
207+
try {
208+
const aiSuggestions = await generateAISuggestions(reportData, {baseURL: url + '/v1', apiKey: apiKey}, model);
209+
spinner.succeed('AI suggestions generated.');
210+
markdown += aiSuggestions; // Append the suggestions section
211+
} catch (error: any) {
212+
spinner.fail('AI suggestion generation failed.');
213+
markdown += `\n## AI Suggestions\n\n*Error generating suggestions: ${error.message}*\n`; // Append error message
215214
}
216215

217216
return markdown;

0 commit comments

Comments
 (0)