@@ -955,32 +955,38 @@ const MODEL_DISPLAY_NAMES: { [key: string]: string } = {
955
955
"ai21.j2-ultra-v1" : "J2 Ultra" ,
956
956
"ai21.j2-mid-v1" : "J2 Mid" ,
957
957
958
- // Ollama default models
959
- // These can be directly served from the Ollama CLI
960
- // User may also add arbitrary open source models
961
- "llama3.2" : "Llama 3.2" ,
962
- "llama3.2:1b" : "Llama 3.2 1B" ,
963
- "llama3.2:3b" : "Llama 3.2 3B" ,
964
- "llama3.2:11b" : "Llama 3.2 11B" ,
965
- "llama3.2:90b" : "Llama 3.2 90B" ,
966
- "llama3.1" : "Llama 3.1" ,
967
- "llama3.1:8b" : "Llama 3.1 8B" ,
968
- "llama3.1:70b" : "Llama 3.1 70B" ,
958
+ // Ollama cloud models
959
+ "gpt-oss:20b-cloud" : "gpt-oss 20B Cloud" ,
960
+ "gpt-oss:120b-cloud" : "gpt-oss 120B Cloud" ,
961
+ "deepseek-v3.1:671b-cloud" : "DeepSeek-v3.1 671B Cloud" ,
962
+ "qwen3-coder:480b-cloud" : "Qwen3-Coder 480B Cloud" ,
963
+
964
+ // Ollama models in litellm map (disjoint from ollama's supported model list)
965
+ // https://models.litellm.ai --> provider ollama
966
+ codegeex4 : "CodeGeeX 4" ,
967
+ codegemma : "CodeGemma" ,
968
+ codellama : "CodeLLama" ,
969
+ "deepseek-coder-v2-base" : "DeepSeek-Coder-v2 Base" ,
970
+ "deepseek-coder-v2-instruct" : "DeepSeek-Coder-v2 Instruct" ,
971
+ "deepseek-coder-v2-lite-base" : "DeepSeek-Coder-v2 Lite Base" ,
972
+ "deepseek-coder-v2-lite-instruct" : "DeepSeek-Coder-v2 Lite Instruct" ,
973
+ "internlm2_5-20b-chat" : "InternLM 2.5 20B Chat" ,
974
+ llama2 : "Llama 2" ,
975
+ "llama2-uncensored" : "Llama 2 Uncensored" ,
976
+ "llama2:13b" : "Llama 2 13B" ,
977
+ "llama2:70b" : "Llama 2 70B" ,
978
+ "llama2:7b" : "Llama 2 7B" ,
969
979
llama3 : "Llama 3" ,
970
- "llama3:8b" : "Llama 3 8B" ,
971
980
"llama3:70b" : "Llama 3 70B" ,
972
- phi3 : "Phi-3" ,
973
- "phi3:mini" : "Phi-3 Mini" ,
974
- "phi3:medium" : "Phi-3 Medium" ,
975
- "phi3:medium-4k" : "Phi-3 Medium 4K" ,
976
- "phi3:medium-128k" : "Phi-3 Medium 128K" ,
977
- "mistral-small" : "Mistral Small" ,
978
- "mistral-large" : "Mistral Large" ,
979
- "mistral-nemo" : "Mistral Nemo" ,
980
- "smollm2:135m" : "SmolLM2 135M" ,
981
- "smollm2:360m" : "SmolLM2 360M" ,
982
- "qwen2.5:3b" : "Qwen 2.5 3B" ,
983
- "qwen2.5:7b" : "Qwen 2.5 7B" ,
981
+ "llama3:8b" : "Llama 3 8B" ,
982
+ mistral : "Mistral" , // Mistral 7b
983
+ "mistral-7B-Instruct-v0.1" : "Mistral 7B Instruct v0.1" ,
984
+ "mistral-7B-Instruct-v0.2" : "Mistral 7B Instruct v0.2" ,
985
+ "mistral-large-instruct-2407" : "Mistral Large Instruct 24.07" ,
986
+ "mixtral-8x22B-Instruct-v0.1" : "Mixtral 8x22B Instruct v0.1" ,
987
+ "mixtral8x7B-Instruct-v0.1" : "Mixtral 8x7B Instruct v0.1" ,
988
+ "orca-mini" : "Orca Mini" ,
989
+ vicuna : "Vicuna" ,
984
990
} ;
985
991
986
992
export function getDisplayNameForModel ( modelName : string ) : string {
0 commit comments