File tree Expand file tree Collapse file tree 1 file changed +10
-1
lines changed Expand file tree Collapse file tree 1 file changed +10
-1
lines changed Original file line number Diff line number Diff line change 6
6
from langchain_core .messages import BaseMessage , SystemMessage , trim_messages
7
7
from langchain_core .prompts import ChatPromptTemplate
8
8
from langgraph .graph import END , START , MessagesState , StateGraph
9
+ from loguru import logger
9
10
10
11
from chatbot .safety import create_hazard_classifier , hazard_categories
11
12
@@ -29,7 +30,15 @@ def create_agent(
29
30
if hasattr (chat_model , "get_num_tokens_from_messages" ):
30
31
token_counter = chat_model .get_num_tokens_from_messages
31
32
else :
33
+ logger .warning (
34
+ "Could not get token counter function from chat model, will truncate messages by message count. This may lead to context overflow."
35
+ )
32
36
token_counter = len
37
+ if max_tokens is None :
38
+ raise ValueError ("`None` passed as `max_tokens` which is not allowed" )
39
+
40
+ # leave 0.2 for new tokens
41
+ _max_tokens = int (max_tokens * 0.8 )
33
42
34
43
hazard_classifier = None
35
44
if safety_model is not None :
@@ -94,7 +103,7 @@ async def chatbot(state: MessagesState) -> MessagesState:
94
103
windowed_messages : list [BaseMessage ] = trim_messages (
95
104
all_messages ,
96
105
token_counter = token_counter ,
97
- max_tokens = max_tokens ,
106
+ max_tokens = _max_tokens ,
98
107
start_on = "human" , # This means that the first message should be from the user after trimming.
99
108
)
100
109
You can’t perform that action at this time.
0 commit comments