|
| 1 | +import { OpenAIChatInput, ChatOpenAI } from '@langchain/openai' |
| 2 | +import { BaseCache } from '@langchain/core/caches' |
| 3 | +import { BaseLLMParams } from '@langchain/core/language_models/llms' |
| 4 | +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' |
| 5 | +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' |
| 6 | + |
| 7 | +class ChatLitellm_ChatModels implements INode { |
| 8 | + label: string |
| 9 | + name: string |
| 10 | + version: number |
| 11 | + type: string |
| 12 | + icon: string |
| 13 | + category: string |
| 14 | + description: string |
| 15 | + baseClasses: string[] |
| 16 | + credential: INodeParams |
| 17 | + inputs: INodeParams[] |
| 18 | + |
| 19 | + constructor() { |
| 20 | + this.label = 'ChatLitellm' |
| 21 | + this.name = 'chatLitellm' |
| 22 | + this.version = 1.0 |
| 23 | + this.type = 'ChatLitellm' |
| 24 | + this.icon = 'litellm.jpg' |
| 25 | + this.category = 'Chat Models' |
| 26 | + this.description = 'Connect to a Litellm server using OpenAI-compatible API' |
| 27 | + this.baseClasses = [this.type, 'BaseChatModel', ...getBaseClasses(ChatOpenAI)] |
| 28 | + this.credential = { |
| 29 | + label: 'Connect Credential', |
| 30 | + name: 'credential', |
| 31 | + type: 'credential', |
| 32 | + credentialNames: ['litellmApi'], |
| 33 | + optional: true |
| 34 | + } |
| 35 | + this.inputs = [ |
| 36 | + { |
| 37 | + label: 'Cache', |
| 38 | + name: 'cache', |
| 39 | + type: 'BaseCache', |
| 40 | + optional: true |
| 41 | + }, |
| 42 | + { |
| 43 | + label: 'Base URL', |
| 44 | + name: 'basePath', |
| 45 | + type: 'string', |
| 46 | + placeholder: 'http://localhost:8000' |
| 47 | + }, |
| 48 | + { |
| 49 | + label: 'Model Name', |
| 50 | + name: 'modelName', |
| 51 | + type: 'string', |
| 52 | + placeholder: 'model_name' |
| 53 | + }, |
| 54 | + { |
| 55 | + label: 'Temperature', |
| 56 | + name: 'temperature', |
| 57 | + type: 'number', |
| 58 | + step: 0.1, |
| 59 | + default: 0.9, |
| 60 | + optional: true |
| 61 | + }, |
| 62 | + { |
| 63 | + label: 'Streaming', |
| 64 | + name: 'streaming', |
| 65 | + type: 'boolean', |
| 66 | + default: true, |
| 67 | + optional: true, |
| 68 | + additionalParams: true |
| 69 | + }, |
| 70 | + { |
| 71 | + label: 'Max Tokens', |
| 72 | + name: 'maxTokens', |
| 73 | + type: 'number', |
| 74 | + step: 1, |
| 75 | + optional: true, |
| 76 | + additionalParams: true |
| 77 | + }, |
| 78 | + { |
| 79 | + label: 'Top P', |
| 80 | + name: 'topP', |
| 81 | + type: 'number', |
| 82 | + step: 0.1, |
| 83 | + optional: true, |
| 84 | + additionalParams: true |
| 85 | + }, |
| 86 | + { |
| 87 | + label: 'Timeout', |
| 88 | + name: 'timeout', |
| 89 | + type: 'number', |
| 90 | + step: 1, |
| 91 | + optional: true, |
| 92 | + additionalParams: true |
| 93 | + } |
| 94 | + ] |
| 95 | + } |
| 96 | + |
| 97 | + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> { |
| 98 | + const cache = nodeData.inputs?.cache as BaseCache |
| 99 | + const basePath = nodeData.inputs?.basePath as string |
| 100 | + const modelName = nodeData.inputs?.modelName as string |
| 101 | + const temperature = nodeData.inputs?.temperature as string |
| 102 | + const streaming = nodeData.inputs?.streaming as boolean |
| 103 | + const maxTokens = nodeData.inputs?.maxTokens as string |
| 104 | + const topP = nodeData.inputs?.topP as string |
| 105 | + const timeout = nodeData.inputs?.timeout as string |
| 106 | + |
| 107 | + const credentialData = await getCredentialData(nodeData.credential ?? '', options) |
| 108 | + const apiKey = getCredentialParam('litellmApiKey', credentialData, nodeData) |
| 109 | + |
| 110 | + const obj: Partial<OpenAIChatInput> & BaseLLMParams & { openAIApiKey?: string } = { |
| 111 | + temperature: parseFloat(temperature), |
| 112 | + modelName, |
| 113 | + streaming: streaming ?? true |
| 114 | + } |
| 115 | + |
| 116 | + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) |
| 117 | + if (topP) obj.topP = parseFloat(topP) |
| 118 | + if (timeout) obj.timeout = parseInt(timeout, 10) |
| 119 | + if (cache) obj.cache = cache |
| 120 | + if (apiKey) obj.openAIApiKey = apiKey |
| 121 | + |
| 122 | + const model = new ChatOpenAI(obj, { basePath }) |
| 123 | + |
| 124 | + return model |
| 125 | + } |
| 126 | +} |
| 127 | + |
| 128 | +module.exports = { nodeClass: ChatLitellm_ChatModels } |
0 commit comments