Skip to content

Commit 8849caf

Browse files
authored
DEV: Transition "Select model" settings to only use LlmModels (#675)
We no longer support the "provider:model" format in the "ai_helper_model" and "ai_embeddings_semantic_search_hyde_model" settings. We'll migrate existing values and work with our new data-driven LLM configs from now on.
1 parent ed3d552 commit 8849caf

21 files changed

+111
-88
lines changed

app/controllers/discourse_ai/admin/ai_llms_controller.rb

Lines changed: 2 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -53,12 +53,7 @@ def update
5353
def destroy
5454
llm_model = LlmModel.find(params[:id])
5555

56-
dependant_settings = %i[ai_helper_model ai_embeddings_semantic_search_hyde_model]
57-
58-
in_use_by = []
59-
dependant_settings.each do |s_name|
60-
in_use_by << s_name if SiteSetting.public_send(s_name) == "custom:#{llm_model.id}"
61-
end
56+
in_use_by = DiscourseAi::Configuration::LlmValidator.new.modules_using(llm_model)
6257

6358
if !in_use_by.empty?
6459
return(
@@ -85,11 +80,7 @@ def test
8580

8681
llm_model = LlmModel.new(ai_llm_params)
8782

88-
DiscourseAi::Completions::Llm.proxy_from_obj(llm_model).generate(
89-
"How much is 1 + 1?",
90-
user: current_user,
91-
feature_name: "llm_validator",
92-
)
83+
DiscourseAi::Configuration::LlmValidator.new.run_test(llm_model)
9384

9485
render json: { success: true }
9586
rescue DiscourseAi::Completions::Endpoints::Base::CompletionFailed => e
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
# frozen_string_literal: true
2+
3+
class ChooseLlmModelSettingMigration < ActiveRecord::Migration[7.0]
4+
def up
5+
transition_to_llm_model("ai_helper_model")
6+
transition_to_llm_model("ai_embeddings_semantic_search_hyde_model")
7+
end
8+
9+
def transition_to_llm_model(llm_setting_name)
10+
setting_value =
11+
DB
12+
.query_single(
13+
"SELECT value FROM site_settings WHERE name = :llm_setting",
14+
llm_setting: llm_setting_name,
15+
)
16+
.first
17+
.to_s
18+
19+
return if setting_value.empty?
20+
21+
provider_and_model = setting_value.split(":")
22+
provider = provider_and_model.first
23+
model = provider_and_model.second
24+
return if provider == "custom"
25+
26+
llm_model_id = DB.query_single(<<~SQL, provider: provider, model: model).first.to_s
27+
SELECT id FROM llm_models WHERE provider = :provider AND name = :model
28+
SQL
29+
30+
return if llm_model_id.empty?
31+
32+
DB.exec(<<~SQL, llm_setting: llm_setting_name, new_value: "custom:#{llm_model_id}")
33+
UPDATE site_settings SET value=:new_value WHERE name=:llm_setting
34+
SQL
35+
end
36+
37+
def down
38+
raise ActiveRecord::IrreversibleMigration
39+
end
40+
end

lib/completions/llm.rb

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,11 @@ class Llm
2020
class << self
2121
def provider_names
2222
providers = %w[aws_bedrock anthropic vllm hugging_face cohere open_ai google azure]
23-
providers << "ollama" if Rails.env.development?
23+
if !Rails.env.production?
24+
providers << "fake"
25+
providers << "ollama"
26+
end
27+
2428
providers
2529
end
2630

lib/configuration/llm_dependency_validator.rb

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -10,19 +10,17 @@ def initialize(opts = {})
1010
def valid_value?(val)
1111
return true if val == "f"
1212

13-
SiteSetting.public_send(llm_dependency_setting_name).present?
14-
end
13+
@llm_dependency_setting_name =
14+
DiscourseAi::Configuration::LlmValidator.new.choose_llm_setting_for(@opts[:name])
1515

16-
def error_message
17-
I18n.t("discourse_ai.llm.configuration.set_llm_first", setting: llm_dependency_setting_name)
16+
SiteSetting.public_send(@llm_dependency_setting_name).present?
1817
end
1918

20-
def llm_dependency_setting_name
21-
if @opts[:name] == :ai_embeddings_semantic_search_enabled
22-
:ai_embeddings_semantic_search_hyde_model
23-
else
24-
:ai_helper_model
25-
end
19+
def error_message
20+
I18n.t(
21+
"discourse_ai.llm.configuration.set_llm_first",
22+
setting: @llm_dependency_setting_name,
23+
)
2624
end
2725
end
2826
end

lib/configuration/llm_enumerator.rb

Lines changed: 6 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -10,22 +10,14 @@ def self.valid_value?(val)
1010
end
1111

1212
def self.values
13-
begin
14-
llm_models =
15-
DiscourseAi::Completions::Llm.models_by_provider.flat_map do |provider, models|
16-
endpoint = DiscourseAi::Completions::Endpoints::Base.endpoint_for(provider.to_s)
13+
values = DB.query_hash(<<~SQL)
14+
SELECT display_name AS name, id AS value
15+
FROM llm_models
16+
SQL
1717

18-
models.map do |model_name|
19-
{ name: endpoint.display_name(model_name), value: "#{provider}:#{model_name}" }
20-
end
21-
end
18+
values.each { |value_h| value_h["value"] = "custom:#{value_h["value"]}" }
2219

23-
LlmModel.all.each do |model|
24-
llm_models << { name: model.display_name, value: "custom:#{model.id}" }
25-
end
26-
27-
llm_models
28-
end
20+
values
2921
end
3022

3123
def self.available_ai_bots

lib/configuration/llm_validator.rb

Lines changed: 28 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -9,33 +9,33 @@ def initialize(opts = {})
99

1010
def valid_value?(val)
1111
if val == ""
12+
parent_module_name = modules_and_choose_llm_settings.invert[@opts[:name]]
13+
1214
@parent_enabled = SiteSetting.public_send(parent_module_name)
1315
return !@parent_enabled
1416
end
1517

16-
provider_and_model_name = val.split(":")
17-
provider_name = provider_and_model_name.first
18-
model_name_without_prov = provider_and_model_name[1..].join
19-
is_custom_model = provider_name == "custom"
18+
llm_model_id = val.split(":")&.last
19+
llm_model = LlmModel.find_by(id: llm_model_id)
20+
return false if llm_model.nil?
2021

21-
# Bypass setting validations for custom models. They don't rely on site settings.
22-
if !is_custom_model
23-
endpoint = DiscourseAi::Completions::Endpoints::Base.endpoint_for(provider_name)
22+
run_test(llm_model).tap { |result| @unreachable = result }
23+
rescue StandardError
24+
@unreachable = true
25+
false
26+
end
2427

25-
return false if endpoint.nil?
28+
def run_test(llm_model)
29+
DiscourseAi::Completions::Llm
30+
.proxy_from_obj(llm_model)
31+
.generate("How much is 1 + 1?", user: nil, feature_name: "llm_validator")
32+
.present?
33+
end
2634

27-
if !endpoint.correctly_configured?(model_name_without_prov)
28-
@endpoint = endpoint
29-
return false
30-
end
35+
def modules_using(llm_model)
36+
choose_llm_settings = modules_and_choose_llm_settings.values
3137

32-
if !can_talk_to_model?(val)
33-
@unreachable = true
34-
return false
35-
end
36-
end
37-
38-
true
38+
choose_llm_settings.select { |s| SiteSetting.public_send(s) == "custom:#{llm_model.id}" }
3939
end
4040

4141
def error_message
@@ -48,28 +48,20 @@ def error_message
4848
)
4949
end
5050

51-
return(I18n.t("discourse_ai.llm.configuration.model_unreachable")) if @unreachable
51+
return unless @unreachable
5252

53-
@endpoint&.configuration_hint
53+
I18n.t("discourse_ai.llm.configuration.model_unreachable")
5454
end
5555

56-
def parent_module_name
57-
if @opts[:name] == :ai_embeddings_semantic_search_hyde_model
58-
:ai_embeddings_semantic_search_enabled
59-
else
60-
:composer_ai_helper_enabled
61-
end
56+
def choose_llm_setting_for(module_enabler_setting)
57+
modules_and_choose_llm_settings[module_enabler_setting]
6258
end
6359

64-
private
65-
66-
def can_talk_to_model?(model_name)
67-
DiscourseAi::Completions::Llm
68-
.proxy(model_name)
69-
.generate("How much is 1 + 1?", user: nil, feature_name: "llm_validator")
70-
.present?
71-
rescue StandardError
72-
false
60+
def modules_and_choose_llm_settings
61+
{
62+
ai_embeddings_semantic_search_enabled: :ai_embeddings_semantic_search_hyde_model,
63+
composer_ai_helper_enabled: :ai_helper_model,
64+
}
7365
end
7466
end
7567
end

spec/jobs/regular/stream_post_helper_spec.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
RSpec.describe Jobs::StreamPostHelper do
44
subject(:job) { described_class.new }
55

6-
before { SiteSetting.ai_helper_model = "fake:fake" }
6+
before { assign_fake_provider_to(:ai_helper_model) }
77

88
describe "#execute" do
99
fab!(:topic)

spec/lib/modules/ai_bot/tools/search_spec.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@
108108
after { DiscourseAi::Embeddings::SemanticSearch.clear_cache_for(query) }
109109

110110
it "supports semantic search when enabled" do
111-
SiteSetting.ai_embeddings_semantic_search_hyde_model = "fake:fake"
111+
assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model)
112112
SiteSetting.ai_embeddings_semantic_search_enabled = true
113113
SiteSetting.ai_embeddings_discourse_service_api_endpoint = "http://test.com"
114114

spec/lib/modules/ai_helper/assistant_spec.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
fab!(:empty_locale_user) { Fabricate(:user, locale: "") }
66
let(:prompt) { CompletionPrompt.find_by(id: mode) }
77

8-
before { SiteSetting.ai_helper_model = "fake:fake" }
8+
before { assign_fake_provider_to(:ai_helper_model) }
99

1010
let(:english_text) { <<~STRING }
1111
To perfect his horror, Caesar, surrounded at the base of the statue by the impatient daggers of his friends,

spec/lib/modules/ai_helper/chat_thread_titler_spec.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
RSpec.describe DiscourseAi::AiHelper::ChatThreadTitler do
44
subject(:titler) { described_class.new(thread) }
55

6-
before { SiteSetting.ai_helper_model = "fake:fake" }
6+
before { assign_fake_provider_to(:ai_helper_model) }
77

88
fab!(:thread) { Fabricate(:chat_thread) }
99
fab!(:chat_message) { Fabricate(:chat_message, thread: thread) }

0 commit comments

Comments
 (0)