-
Notifications
You must be signed in to change notification settings - Fork 2.1k
feat: ollama official support #5509
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
+897
−439
Merged
Changes from all commits
Commits
Show all changes
14 commits
Select commit
Hold shift + click to select a range
9a44a2a
ollama support
wenxi-onyx b45a5c9
update ollama model names
wenxi-onyx 44fa7bc
handle max tokens and vision capabilities for ollama
wenxi-onyx 63fd86f
.
wenxi-onyx 8a053c9
cleanup ollama support
wenxi-onyx 6a8d57e
cleanup ollama support
wenxi-onyx 6990821
cleanup ollama support
wenxi-onyx 12eb3b4
show ollama card on provider update
wenxi-onyx 3b2eafd
fix: non-image models shouldn't be hidden
wenxi-onyx 7b2ed30
mypy
wenxi-onyx 35385ce
update integration tests
wenxi-onyx ffcc0dc
fix more integration tests
wenxi-onyx e6b71ba
update migration
wenxi-onyx a9184d8
cleanup ollama support
wenxi-onyx File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
37 changes: 37 additions & 0 deletions
37
backend/alembic/versions/64bd5677aeb6_add_image_input_support_to_model_config.py
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,37 @@ | ||
"""Add image input support to model config | ||
|
||
Revision ID: 64bd5677aeb6 | ||
Revises: b30353be4eec | ||
Create Date: 2025-09-28 15:48:12.003612 | ||
|
||
""" | ||
|
||
from alembic import op | ||
import sqlalchemy as sa | ||
|
||
|
||
# revision identifiers, used by Alembic. | ||
revision = "64bd5677aeb6" | ||
down_revision = "b30353be4eec" | ||
branch_labels = None | ||
depends_on = None | ||
|
||
|
||
def upgrade() -> None: | ||
op.add_column( | ||
"model_configuration", | ||
sa.Column("supports_image_input", sa.Boolean(), nullable=True), | ||
) | ||
|
||
# Seems to be left over from when model visibility was introduced and a nullable field. | ||
# Set any null is_visible values to False | ||
connection = op.get_bind() | ||
connection.execute( | ||
sa.text( | ||
"UPDATE model_configuration SET is_visible = false WHERE is_visible IS NULL" | ||
) | ||
) | ||
|
||
|
||
def downgrade() -> None: | ||
op.drop_column("model_configuration", "supports_image_input") |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,8 +1,5 @@ | ||
from typing import Any | ||
|
||
from onyx.chat.models import PersonaOverrideConfig | ||
from onyx.configs.app_configs import DISABLE_GENERATIVE_AI | ||
from onyx.configs.model_configs import GEN_AI_MODEL_FALLBACK_MAX_TOKENS | ||
from onyx.configs.model_configs import GEN_AI_TEMPERATURE | ||
from onyx.db.engine.sql_engine import get_session_with_current_tenant | ||
from onyx.db.llm import fetch_default_provider | ||
|
@@ -13,6 +10,8 @@ | |
from onyx.llm.chat_llm import DefaultMultiLLM | ||
from onyx.llm.exceptions import GenAIDisabledException | ||
from onyx.llm.interfaces import LLM | ||
from onyx.llm.llm_provider_options import OLLAMA_API_KEY_CONFIG_KEY | ||
from onyx.llm.llm_provider_options import OLLAMA_PROVIDER_NAME | ||
from onyx.llm.override_models import LLMOverride | ||
from onyx.llm.utils import get_max_input_tokens_from_llm_provider | ||
from onyx.llm.utils import model_supports_image_input | ||
|
@@ -24,13 +23,22 @@ | |
logger = setup_logger() | ||
|
||
|
||
def _build_extra_model_kwargs(provider: str) -> dict[str, Any]: | ||
"""Ollama requires us to specify the max context window. | ||
def _build_provider_extra_headers( | ||
provider: str, custom_config: dict[str, str] | None | ||
) -> dict[str, str]: | ||
if provider != OLLAMA_PROVIDER_NAME or not custom_config: | ||
return {} | ||
|
||
For now, just using the GEN_AI_MODEL_FALLBACK_MAX_TOKENS value. | ||
TODO: allow model-specific values to be configured via the UI. | ||
""" | ||
return {"num_ctx": GEN_AI_MODEL_FALLBACK_MAX_TOKENS} if provider == "ollama" else {} | ||
raw_api_key = custom_config.get(OLLAMA_API_KEY_CONFIG_KEY) | ||
|
||
api_key = raw_api_key.strip() if raw_api_key else None | ||
if not api_key: | ||
return {} | ||
|
||
if not api_key.lower().startswith("bearer "): | ||
api_key = f"Bearer {api_key}" | ||
|
||
return {"Authorization": api_key} | ||
|
||
|
||
def get_main_llm_from_tuple( | ||
|
@@ -272,6 +280,16 @@ def get_llm( | |
) -> LLM: | ||
if temperature is None: | ||
temperature = GEN_AI_TEMPERATURE | ||
|
||
extra_headers = build_llm_extra_headers(additional_headers) | ||
|
||
# NOTE: this is needed since Ollama API key is optional | ||
# User may access Ollama cloud via locally hosted instance (logged in) | ||
# or just via the cloud API (not logged in, using API key) | ||
provider_extra_headers = _build_provider_extra_headers(provider, custom_config) | ||
if provider_extra_headers: | ||
extra_headers.update(provider_extra_headers) | ||
|
||
return DefaultMultiLLM( | ||
model_provider=provider, | ||
model_name=model, | ||
|
@@ -282,8 +300,8 @@ def get_llm( | |
timeout=timeout, | ||
temperature=temperature, | ||
custom_config=custom_config, | ||
extra_headers=build_llm_extra_headers(additional_headers), | ||
model_kwargs=_build_extra_model_kwargs(provider), | ||
extra_headers=extra_headers, | ||
model_kwargs={}, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. why is this gone? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
|
||
long_term_logger=long_term_logger, | ||
max_input_tokens=max_input_tokens, | ||
) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.