Skip to content

Commit 71834c2

Browse files
committed
cleanup ollama support
1 parent 8f56b5b commit 71834c2

File tree

9 files changed

+36
-39
lines changed

9 files changed

+36
-39
lines changed

backend/onyx/db/models.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2351,7 +2351,6 @@ class ModelConfiguration(Base):
23512351
# - The end-user is configuring a model and chooses not to set a max-input-tokens limit.
23522352
max_input_tokens: Mapped[int | None] = mapped_column(Integer, nullable=True)
23532353

2354-
# Whether this model supports image input
23552354
supports_image_input: Mapped[bool | None] = mapped_column(Boolean, nullable=True)
23562355

23572356
llm_provider: Mapped["LLMProvider"] = relationship(

backend/onyx/llm/factory.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -285,7 +285,7 @@ def get_llm(
285285

286286
# NOTE: this is needed since Ollama API key is optional
287287
# User may access Ollama cloud via locally hosted instance (logged in)
288-
# or just via the cloud API (not logged in)
288+
# or just via the cloud API (not logged in, using API key)
289289
provider_extra_headers = _build_provider_extra_headers(provider, custom_config)
290290
if provider_extra_headers:
291291
extra_headers.update(provider_extra_headers)

backend/onyx/llm/llm_provider_options.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -200,7 +200,7 @@ def fetch_available_well_known_llms() -> list[WellKnownLLMProviderDescriptor]:
200200
CustomConfigKey(
201201
name=OLLAMA_API_KEY_CONFIG_KEY,
202202
display_name="Ollama API Key",
203-
description="Optional API key used when connecting to Ollama Cloud (API base must be https://ollama.com).",
203+
description="Optional API key used when connecting to Ollama Cloud (i.e. API base is https://ollama.com).",
204204
is_required=False,
205205
is_secret=True,
206206
)

backend/onyx/llm/utils.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,8 +40,6 @@
4040
from onyx.utils.logger import setup_logger
4141
from shared_configs.configs import LOG_LEVEL
4242

43-
# Database imports for OLLAMA provider check
44-
4543

4644
if TYPE_CHECKING:
4745
from onyx.server.manage.llm.models import LLMProviderView

backend/onyx/server/manage/llm/api.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -43,8 +43,8 @@
4343
from onyx.server.manage.llm.models import LLMProviderUpsertRequest
4444
from onyx.server.manage.llm.models import LLMProviderView
4545
from onyx.server.manage.llm.models import ModelConfigurationUpsertRequest
46+
from onyx.server.manage.llm.models import OllamaFinalModelResponse
4647
from onyx.server.manage.llm.models import OllamaModelDetails
47-
from onyx.server.manage.llm.models import OllamaModelResponse
4848
from onyx.server.manage.llm.models import OllamaModelsRequest
4949
from onyx.server.manage.llm.models import TestLLMRequest
5050
from onyx.server.manage.llm.models import VisionProviderResponse
@@ -503,7 +503,7 @@ def _get_ollama_available_model_names(api_base: str) -> set[str]:
503503
def get_ollama_available_models(
504504
request: OllamaModelsRequest,
505505
_: User | None = Depends(current_admin_user),
506-
) -> list[OllamaModelResponse]:
506+
) -> list[OllamaFinalModelResponse]:
507507
"""Fetch the list of available models from an Ollama server."""
508508

509509
cleaned_api_base = request.api_base.strip().rstrip("/")
@@ -519,7 +519,7 @@ def get_ollama_available_models(
519519
detail="No models found from your Ollama server",
520520
)
521521

522-
models_with_context_size: list[OllamaModelResponse] = []
522+
all_models_with_context_size_and_vision: list[OllamaFinalModelResponse] = []
523523
show_url = f"{cleaned_api_base}/api/show"
524524

525525
for model_name in model_names:
@@ -567,12 +567,12 @@ def get_ollama_available_models(
567567
if not supports_image_input:
568568
supports_image_input = False
569569

570-
models_with_context_size.append(
571-
OllamaModelResponse(
570+
all_models_with_context_size_and_vision.append(
571+
OllamaFinalModelResponse(
572572
name=model_name,
573573
max_input_tokens=context_limit,
574574
supports_image_input=supports_image_input,
575575
)
576576
)
577577

578-
return models_with_context_size
578+
return all_models_with_context_size_and_vision

backend/onyx/server/manage/llm/models.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -205,7 +205,7 @@ class OllamaModelsRequest(BaseModel):
205205
api_base: str
206206

207207

208-
class OllamaModelResponse(BaseModel):
208+
class OllamaFinalModelResponse(BaseModel):
209209
name: str
210210
max_input_tokens: int
211211
supports_image_input: bool

web/src/app/admin/configuration/llm/FetchModelsButton.tsx

Lines changed: 5 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -2,29 +2,12 @@ import { Button } from "@/components/ui/button";
22
import { LoadingAnimation } from "@/components/Loading";
33
import Text from "@/components/ui/text";
44
import { fetchModels } from "./utils";
5-
import { WellKnownLLMProviderDescriptor, LLMProviderView } from "./interfaces";
6-
import { PopupSpec } from "@/components/admin/connectors/Popup";
5+
import {
6+
ProviderFetchModelsConfig,
7+
FetchModelsButtonProps,
8+
} from "./interfaces";
79

8-
interface FetchModelsButtonProps {
9-
llmProviderDescriptor: WellKnownLLMProviderDescriptor;
10-
existingLlmProvider?: LLMProviderView;
11-
values: any;
12-
setFieldValue: any;
13-
isFetchingModels: boolean;
14-
setIsFetchingModels: (loading: boolean) => void;
15-
fetchModelsError: string;
16-
setFetchModelsError: (error: string) => void;
17-
setPopup?: (popup: PopupSpec) => void;
18-
}
19-
20-
interface ProviderConfig {
21-
buttonText: string;
22-
loadingText: string;
23-
helperText: string | React.ReactNode;
24-
isDisabled: (values: any) => boolean;
25-
}
26-
27-
const providerConfigs: Record<string, ProviderConfig> = {
10+
const providerConfigs: Record<string, ProviderFetchModelsConfig> = {
2811
bedrock: {
2912
buttonText: "Fetch Available Models for Region",
3013
loadingText: "Fetching Models...",

web/src/app/admin/configuration/llm/interfaces.ts

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
import { PopupSpec } from "@/components/admin/connectors/Popup";
2+
13
export interface CustomConfigKey {
24
name: string;
35
display_name: string;
@@ -78,3 +80,22 @@ export interface LLMProviderDescriptor {
7880
groups: number[];
7981
model_configurations: ModelConfiguration[];
8082
}
83+
84+
export interface ProviderFetchModelsConfig {
85+
buttonText: string;
86+
loadingText: string;
87+
helperText: string | React.ReactNode;
88+
isDisabled: (values: any) => boolean;
89+
}
90+
91+
export interface FetchModelsButtonProps {
92+
llmProviderDescriptor: WellKnownLLMProviderDescriptor;
93+
existingLlmProvider?: LLMProviderView;
94+
values: any;
95+
setFieldValue: any;
96+
isFetchingModels: boolean;
97+
setIsFetchingModels: (loading: boolean) => void;
98+
fetchModelsError: string;
99+
setFetchModelsError: (error: string) => void;
100+
setPopup?: (popup: PopupSpec) => void;
101+
}

web/src/app/admin/configuration/llm/utils.ts

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,7 @@ import {
1111
OpenAISVG,
1212
QwenIcon,
1313
} from "@/components/icons/icons";
14-
import {
15-
ModelConfiguration,
16-
WellKnownLLMProviderDescriptor,
17-
LLMProviderView,
18-
} from "./interfaces";
14+
import { WellKnownLLMProviderDescriptor, LLMProviderView } from "./interfaces";
1915
import { PopupSpec } from "@/components/admin/connectors/Popup";
2016

2117
export const getProviderIcon = (

0 commit comments

Comments
 (0)