From 6dcf1b3f39213587d703f36eba73da33c1871838 Mon Sep 17 00:00:00 2001 From: Wenxi Onyx Date: Wed, 6 Aug 2025 19:00:55 -0700 Subject: [PATCH] mask llm api key from logs --- backend/onyx/llm/chat_llm.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/onyx/llm/chat_llm.py b/backend/onyx/llm/chat_llm.py index f64417c1c6c..1ab6592629f 100644 --- a/backend/onyx/llm/chat_llm.py +++ b/backend/onyx/llm/chat_llm.py @@ -313,14 +313,14 @@ def __init__( self._model_kwargs = model_kwargs - def log_model_configs(self) -> None: - logger.debug(f"Config: {self.config}") - def _safe_model_config(self) -> dict: dump = self.config.model_dump() dump["api_key"] = mask_string(dump.get("api_key", "")) return dump + def log_model_configs(self) -> None: + logger.debug(f"Config: {self._safe_model_config()}") + def _record_call(self, prompt: LanguageModelInput) -> None: if self._long_term_logger: self._long_term_logger.record(