Skip to content

Commit 89f9256

Browse files
authored
feat: Add ability to specify vertex-ai model location (#4955)
* Make constant a global * Add ability to specify vertex location * Add period * Add a hardcoding path to the frontend * Add docs * Add default value to `CustomConfigKey` * Consume default value from custom-config-key on frontend * Use markdown renderer instead * Update description
1 parent b64c6d5 commit 89f9256

File tree

4 files changed

+40
-25
lines changed

4 files changed

+40
-25
lines changed

backend/onyx/llm/chat_llm.py

Lines changed: 6 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,6 @@
3636
from onyx.llm.interfaces import LLM
3737
from onyx.llm.interfaces import LLMConfig
3838
from onyx.llm.interfaces import ToolChoiceOptions
39-
from onyx.llm.llm_provider_options import CREDENTIALS_FILE_CUSTOM_CONFIG_KEY
4039
from onyx.llm.utils import model_is_reasoning_model
4140
from onyx.server.utils import mask_string
4241
from onyx.utils.logger import setup_logger
@@ -51,7 +50,8 @@
5150
litellm.telemetry = False
5251

5352
_LLM_PROMPT_LONG_TERM_LOG_CATEGORY = "llm_prompt"
54-
VERTEX_CREDENTIALS_KWARG = "vertex_credentials"
53+
VERTEX_CREDENTIALS_FILE_KWARG = "vertex_credentials"
54+
VERTEX_LOCATION_KWARG = "vertex_location"
5555

5656

5757
class LLMTimeoutError(Exception):
@@ -294,13 +294,12 @@ def __init__(
294294
# Specifically pass in "vertex_credentials" / "vertex_location" as a
295295
# model_kwarg to the completion call for vertex AI. More details here:
296296
# https://docs.litellm.ai/docs/providers/vertex
297-
vertex_location_key = "vertex_location"
298297
for k, v in custom_config.items():
299298
if model_provider == "vertex_ai":
300-
if k == VERTEX_CREDENTIALS_KWARG:
299+
if k == VERTEX_CREDENTIALS_FILE_KWARG:
301300
model_kwargs[k] = v
302301
continue
303-
elif k == vertex_location_key:
302+
elif k == VERTEX_LOCATION_KWARG:
304303
model_kwargs[k] = v
305304
continue
306305

@@ -378,13 +377,6 @@ def _completion(
378377
processed_prompt = _prompt_to_dict(prompt)
379378
self._record_call(processed_prompt)
380379

381-
final_model_kwargs = {**self._model_kwargs}
382-
if (
383-
VERTEX_CREDENTIALS_KWARG not in final_model_kwargs
384-
and self.config.credentials_file
385-
):
386-
final_model_kwargs[VERTEX_CREDENTIALS_KWARG] = self.config.credentials_file
387-
388380
try:
389381
return litellm.completion(
390382
mock_response=MOCK_LLM_RESPONSE,
@@ -430,7 +422,7 @@ def _completion(
430422
if structured_response_format
431423
else {}
432424
),
433-
**final_model_kwargs,
425+
**self._model_kwargs,
434426
)
435427
except Exception as e:
436428
self._record_error(processed_prompt, e)
@@ -446,7 +438,7 @@ def _completion(
446438
@property
447439
def config(self) -> LLMConfig:
448440
credentials_file: str | None = (
449-
self._custom_config.get(CREDENTIALS_FILE_CUSTOM_CONFIG_KEY, None)
441+
self._custom_config.get(VERTEX_CREDENTIALS_FILE_KWARG, None)
450442
if self._custom_config
451443
else None
452444
)

backend/onyx/llm/llm_provider_options.py

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@
33
import litellm # type: ignore
44
from pydantic import BaseModel
55

6+
from onyx.llm.chat_llm import VERTEX_CREDENTIALS_FILE_KWARG
7+
from onyx.llm.chat_llm import VERTEX_LOCATION_KWARG
68
from onyx.llm.utils import model_supports_image_input
79
from onyx.server.manage.llm.models import ModelConfigurationView
810

@@ -24,6 +26,7 @@ class CustomConfigKey(BaseModel):
2426
is_required: bool = True
2527
is_secret: bool = False
2628
key_type: CustomConfigKeyType = CustomConfigKeyType.TEXT_INPUT
29+
default_value: str | None = None
2730

2831

2932
class WellKnownLLMProviderDescriptor(BaseModel):
@@ -154,9 +157,6 @@ class WellKnownLLMProviderDescriptor(BaseModel):
154157
}
155158

156159

157-
CREDENTIALS_FILE_CUSTOM_CONFIG_KEY = "CREDENTIALS_FILE"
158-
159-
160160
def fetch_available_well_known_llms() -> list[WellKnownLLMProviderDescriptor]:
161161
return [
162162
WellKnownLLMProviderDescriptor(
@@ -240,13 +240,23 @@ def fetch_available_well_known_llms() -> list[WellKnownLLMProviderDescriptor]:
240240
),
241241
custom_config_keys=[
242242
CustomConfigKey(
243-
name=CREDENTIALS_FILE_CUSTOM_CONFIG_KEY,
243+
name=VERTEX_CREDENTIALS_FILE_KWARG,
244244
display_name="Credentials File",
245245
description="This should be a JSON file containing some private credentials.",
246246
is_required=True,
247247
is_secret=False,
248248
key_type=CustomConfigKeyType.FILE_INPUT,
249249
),
250+
CustomConfigKey(
251+
name=VERTEX_LOCATION_KWARG,
252+
display_name="Location",
253+
description="The location of the Vertex AI model. Please refer to the "
254+
"[Vertex AI configuration docs](https://docs.onyx.app/gen_ai_configs/vertex_ai) for all possible values.",
255+
is_required=False,
256+
is_secret=False,
257+
key_type=CustomConfigKeyType.TEXT_INPUT,
258+
default_value="us-east1",
259+
),
250260
],
251261
default_model=VERTEXAI_DEFAULT_MODEL,
252262
default_fast_model=VERTEXAI_DEFAULT_MODEL,

web/src/app/admin/configuration/llm/LLMProviderUpdateForm.tsx

Lines changed: 16 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import ReactMarkdown from "react-markdown";
12
import { LoadingAnimation } from "@/components/Loading";
23
import { AdvancedOptionsToggle } from "@/components/AdvancedOptionsToggle";
34
import Text from "@/components/ui/text";
@@ -16,7 +17,6 @@ import { useState } from "react";
1617
import { useSWRConfig } from "swr";
1718
import {
1819
LLMProviderView,
19-
ModelConfiguration,
2020
ModelConfigurationUpsertRequest,
2121
WellKnownLLMProviderDescriptor,
2222
} from "./interfaces";
@@ -134,6 +134,14 @@ export function LLMProviderUpdateForm({
134134
selected_model_names: Yup.array().of(Yup.string()),
135135
});
136136

137+
const customLinkRenderer = ({ href, children }: any) => {
138+
return (
139+
<a href={href} className="text-link hover:text-link-hover">
140+
{children}
141+
</a>
142+
);
143+
};
144+
137145
return (
138146
<Formik
139147
initialValues={initialValues}
@@ -307,12 +315,14 @@ export function LLMProviderUpdateForm({
307315
<TextFormField
308316
small={firstTimeConfiguration}
309317
name={`custom_config.${customConfigKey.name}`}
310-
label={
311-
customConfigKey.is_required
312-
? customConfigKey.display_name
313-
: `[Optional] ${customConfigKey.display_name}`
318+
optional={!customConfigKey.is_required}
319+
label={customConfigKey.display_name}
320+
subtext={
321+
<ReactMarkdown components={{ a: customLinkRenderer }}>
322+
{customConfigKey.description}
323+
</ReactMarkdown>
314324
}
315-
subtext={customConfigKey.description || undefined}
325+
placeholder={customConfigKey.default_value || undefined}
316326
/>
317327
</div>
318328
);

web/src/app/admin/configuration/llm/interfaces.ts

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,12 @@ export interface CustomConfigKey {
44
description: string | null;
55
is_required: boolean;
66
is_secret: boolean;
7-
key_type: "text_input" | "file_input";
7+
key_type: CustomConfigKeyType;
8+
default_value?: string;
89
}
910

11+
export type CustomConfigKeyType = "text_input" | "file_input";
12+
1013
export interface ModelConfigurationUpsertRequest {
1114
name: string;
1215
is_visible: boolean;

0 commit comments

Comments
 (0)