Skip to content

Commit 4cfb2f7

Browse files
authored
Add support for Open Router GPT-5 (#35)
This PR adds support for GPT-5 models and newer OpenAI models (O3, O4) in the AI chat panel for OpenRouter!
1 parent 6711d26 commit 4cfb2f7

File tree

1 file changed

+25
-5
lines changed

1 file changed

+25
-5
lines changed

front_end/panels/ai_chat/LLM/OpenRouterProvider.ts

Lines changed: 25 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,22 @@ export class OpenRouterProvider extends LLMBaseProvider {
6363
super();
6464
}
6565

66+
/**
67+
* Check if a model doesn't support temperature parameter
68+
* OpenAI's GPT-5, O3, and O4 models accessed through OpenRouter don't support temperature
69+
*/
70+
private shouldExcludeTemperature(modelName: string): boolean {
71+
// OpenAI models that don't support temperature parameter
72+
// These are accessed through OpenRouter as 'openai/model-name'
73+
const noTemperatureModels = [
74+
'openai/gpt-5',
75+
'openai/o3',
76+
'openai/o4'
77+
];
78+
79+
return noTemperatureModels.some(pattern => modelName.includes(pattern));
80+
}
81+
6682
/**
6783
* Get the chat completions endpoint URL
6884
*/
@@ -231,8 +247,8 @@ export class OpenRouterProvider extends LLMBaseProvider {
231247
messages: this.convertMessagesToOpenRouter(messages),
232248
};
233249

234-
// Add temperature if provided
235-
if (options?.temperature !== undefined) {
250+
// Add temperature if provided and model supports it
251+
if (options?.temperature !== undefined && !this.shouldExcludeTemperature(modelName)) {
236252
payloadBody.temperature = options.temperature;
237253
}
238254

@@ -571,9 +587,13 @@ export class OpenRouterProvider extends LLMBaseProvider {
571587
try {
572588
const testPrompt = 'Please respond with "Connection successful!" to confirm the connection is working.';
573589

574-
const response = await this.call(modelName, testPrompt, '', {
575-
temperature: 0.1,
576-
});
590+
// Only add temperature if the model supports it
591+
const callOptions: LLMCallOptions = {};
592+
if (!this.shouldExcludeTemperature(modelName)) {
593+
callOptions.temperature = 0.1;
594+
}
595+
596+
const response = await this.call(modelName, testPrompt, '', callOptions);
577597

578598
if (response.text?.toLowerCase().includes('connection')) {
579599
return {

0 commit comments

Comments
 (0)