Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions backend/danswer/llm/llm_provider_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ class WellKnownLLMProviderDescriptor(BaseModel):
IGNORABLE_ANTHROPIC_MODELS = [
"claude-2",
"claude-instant-1",
"anthropic/claude-3-5-sonnet-20241022",
]
ANTHROPIC_PROVIDER_NAME = "anthropic"
ANTHROPIC_MODEL_NAMES = [
Expand Down Expand Up @@ -100,8 +101,8 @@ def fetch_available_well_known_llms() -> list[WellKnownLLMProviderDescriptor]:
api_version_required=False,
custom_config_keys=[],
llm_names=fetch_models_for_provider(ANTHROPIC_PROVIDER_NAME),
default_model="claude-3-5-sonnet-20240620",
default_fast_model="claude-3-5-sonnet-20240620",
default_model="claude-3-5-sonnet-20241022",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Watch out for context length issues - unless we bump our LiteLLM version (when they add "support" for this model), will use default max tokens (which is super low, something like 4096)

default_fast_model="claude-3-5-sonnet-20241022",
),
WellKnownLLMProviderDescriptor(
name=AZURE_PROVIDER_NAME,
Expand Down Expand Up @@ -135,8 +136,8 @@ def fetch_available_well_known_llms() -> list[WellKnownLLMProviderDescriptor]:
),
],
llm_names=fetch_models_for_provider(BEDROCK_PROVIDER_NAME),
default_model="anthropic.claude-3-5-sonnet-20240620-v1:0",
default_fast_model="anthropic.claude-3-5-sonnet-20240620-v1:0",
default_model="anthropic.claude-3-5-sonnet-20241022-v2:0",
default_fast_model="anthropic.claude-3-5-sonnet-20241022-v2:0",
),
]

Expand Down
4 changes: 2 additions & 2 deletions backend/requirements/default.txt
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ trafilatura==1.12.2
langchain==0.1.17
langchain-core==0.1.50
langchain-text-splitters==0.0.1
litellm==1.49.5
litellm==1.50.2
lxml==5.3.0
lxml_html_clean==0.2.2
llama-index==0.9.45
Expand All @@ -38,7 +38,7 @@ msal==1.28.0
nltk==3.8.1
Office365-REST-Python-Client==2.5.9
oauthlib==3.2.2
openai==1.51.2
openai==1.52.2
openpyxl==3.1.2
playwright==1.41.2
psutil==5.9.5
Expand Down
4 changes: 2 additions & 2 deletions backend/requirements/model_server.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ cohere==5.6.1
fastapi==0.109.2
google-cloud-aiplatform==1.58.0
numpy==1.26.4
openai==1.51.2
openai==1.52.2
pydantic==2.8.2
retry==0.9.2
safetensors==0.4.2
Expand All @@ -12,5 +12,5 @@ torch==2.2.0
transformers==4.39.2
uvicorn==0.21.1
voyageai==0.2.3
litellm==1.49.5
litellm==1.50.2
sentry-sdk[fastapi,celery,starlette]==2.14.0
6 changes: 4 additions & 2 deletions web/src/lib/hooks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -278,6 +278,7 @@ const MODEL_DISPLAY_NAMES: { [key: string]: string } = {
"claude-2.0": "Claude 2.0",
"claude-instant-1.2": "Claude Instant 1.2",
"claude-3-5-sonnet-20240620": "Claude 3.5 Sonnet",
"claude-3-5-sonnet-20241022": "Claude 3.5 Sonnet (New)",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

seems possible we might need to rename (new) later but i suppose that's easily fixable if it happens


// Bedrock models
"meta.llama3-1-70b-instruct-v1:0": "Llama 3.1 70B",
Expand All @@ -301,6 +302,7 @@ const MODEL_DISPLAY_NAMES: { [key: string]: string } = {
"anthropic.claude-3-opus-20240229-v1:0": "Claude 3 Opus",
"anthropic.claude-3-haiku-20240307-v1:0": "Claude 3 Haiku",
"anthropic.claude-3-5-sonnet-20240620-v1:0": "Claude 3.5 Sonnet",
"anthropic.claude-3-5-sonnet-20241022-v2:0": "Claude 3.5 Sonnet (New)",
"anthropic.claude-3-sonnet-20240229-v1:0": "Claude 3 Sonnet",
"mistral.mistral-large-2402-v1:0": "Mistral Large",
"mistral.mixtral-8x7b-instruct-v0:1": "Mixtral 8x7B Instruct",
Expand All @@ -323,7 +325,7 @@ export const defaultModelsByProvider: { [name: string]: string[] } = {
"meta.llama3-1-8b-instruct-v1:0",
"anthropic.claude-3-opus-20240229-v1:0",
"mistral.mistral-large-2402-v1:0",
"anthropic.claude-3-5-sonnet-20240620-v1:0",
"anthropic.claude-3-5-sonnet-20241022-v2:0",
],
anthropic: ["claude-3-opus-20240229", "claude-3-5-sonnet-20240620"],
anthropic: ["claude-3-opus-20240229", "claude-3-5-sonnet-20241022"],
};
3 changes: 3 additions & 0 deletions web/src/lib/llm/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ const MODEL_NAMES_SUPPORTING_IMAGE_INPUT = [
"gpt-4-1106-vision-preview",
// standard claude names
"claude-3-5-sonnet-20240620",
"claude-3-5-sonnet-20241022",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
Expand All @@ -78,11 +79,13 @@ const MODEL_NAMES_SUPPORTING_IMAGE_INPUT = [
"claude-3-sonnet-20240229-v1:0",
"claude-3-haiku-20240307-v1:0",
"claude-3-5-sonnet-20240620-v1:0",
"claude-3-5-sonnet-20241022-v2:0",
// claude names with full AWS Bedrock names
"anthropic.claude-3-opus-20240229-v1:0",
"anthropic.claude-3-sonnet-20240229-v1:0",
"anthropic.claude-3-haiku-20240307-v1:0",
"anthropic.claude-3-5-sonnet-20240620-v1:0",
"anthropic.claude-3-5-sonnet-20241022-v2:0",
];

export function checkLLMSupportsImageInput(model: string) {
Expand Down
Loading