From 61811b851b410f6a553fe04bd0d07cbd432dcf58 Mon Sep 17 00:00:00 2001 From: Florent Vilmart Date: Fri, 7 Nov 2025 19:10:45 -0500 Subject: [PATCH 1/2] fix: Ensure we drop user after tool messages --- lua/codecompanion/adapters/http/mistral.lua | 14 ++++++ tests/adapters/http/test_mistral.lua | 52 +++++++++++++++++++++ 2 files changed, 66 insertions(+) diff --git a/lua/codecompanion/adapters/http/mistral.lua b/lua/codecompanion/adapters/http/mistral.lua index 079c73e8f..eec7b30a5 100644 --- a/lua/codecompanion/adapters/http/mistral.lua +++ b/lua/codecompanion/adapters/http/mistral.lua @@ -61,6 +61,20 @@ return { return openai.handlers.form_parameters(self, params, messages) end, form_messages = function(self, messages) + local is_previous_tool = false + local messages = vim.iter(messages):filter(function(msg) + local is_tool = msg.role == "tool" + local is_user = msg.role == "user" + local keep = true + -- Mistral does not like user after tool messages, those should always be assistant + -- Worst case we drop 1 user message, but the assistant will respond to the tool + if is_previous_tool and is_user then + log:debug("Dropping user after tool message: %s", msg.content) + keep = false + end + is_previous_tool = is_tool + return keep + end) return openai.handlers.form_messages(self, messages) end, chat_output = function(self, data, tools) diff --git a/tests/adapters/http/test_mistral.lua b/tests/adapters/http/test_mistral.lua index 507e9de42..34b41f598 100644 --- a/tests/adapters/http/test_mistral.lua +++ b/tests/adapters/http/test_mistral.lua @@ -50,6 +50,8 @@ T["Mistral adapter"]["form_messages"]["it can form messages with tools"] = funct }, }, }, + { role = "tool", content = "ok" }, + { role = "assistant", content = "that works" }, } local expected = { @@ -83,6 +85,56 @@ T["Mistral adapter"]["form_messages"]["it can form messages with tools"] = funct }, }, }, + { role = "tool", content = "ok" }, + { role = "assistant", content = "that works" }, + }, + } + + h.eq(expected, adapter.handlers.form_messages(adapter, input)) +end + +T["Mistral adapter"]["form_messages"]["it skips messages when user follows tools"] = function() + local input = { + { role = "user", content = "User1" }, + { + role = "llm", + tools = { + calls = { + { + ["function"] = { + arguments = '{"location":"London, UK","units":"fahrenheit"}', + name = "weather", + }, + id = "call_1_a460d461-60a7-468c-a699-ef9e2dced125", + type = "function", + }, + }, + }, + }, + { role = "tool", content = "ok" }, + { role = "user", content = "my message" }, + } + + local expected = { + messages = { + { + content = "User1", + role = "user", + }, + { + role = "llm", + tool_calls = { + { + ["function"] = { + arguments = '{"location":"London, UK","units":"fahrenheit"}', + name = "weather", + }, + id = "call_1_a460d461-60a7-468c-a699-ef9e2dced125", + type = "function", + }, + }, + }, + { role = "tool", content = "ok" }, }, } From 1ec27e3a60bbde0d7ab93a072da97477c9e36828 Mon Sep 17 00:00:00 2001 From: Florent Vilmart Date: Fri, 14 Nov 2025 14:16:26 -0500 Subject: [PATCH 2/2] better implementation --- lua/codecompanion/adapters/http/mistral.lua | 27 +++++++++++++++------ 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/lua/codecompanion/adapters/http/mistral.lua b/lua/codecompanion/adapters/http/mistral.lua index eec7b30a5..71fb5ad2a 100644 --- a/lua/codecompanion/adapters/http/mistral.lua +++ b/lua/codecompanion/adapters/http/mistral.lua @@ -61,20 +61,33 @@ return { return openai.handlers.form_parameters(self, params, messages) end, form_messages = function(self, messages) + local pending_messages = self.pending_messages or {} local is_previous_tool = false - local messages = vim.iter(messages):filter(function(msg) + for k, msg in ipairs(messages) do local is_tool = msg.role == "tool" local is_user = msg.role == "user" - local keep = true -- Mistral does not like user after tool messages, those should always be assistant - -- Worst case we drop 1 user message, but the assistant will respond to the tool if is_previous_tool and is_user then - log:debug("Dropping user after tool message: %s", msg.content) - keep = false + table.insert(pending_messages, msg) + messages[k] = nil + -- message was dropped, so for the next message, the previous one + -- is still a tool + is_tool = true + else + if not is_previous_tool then + -- Flush pending messages whenever we can + for i, m in ipairs(pending_messages) do + table.insert(messages, m) + end + pending_messages = {} + end end is_previous_tool = is_tool - return keep - end) + end + + -- Keep the pending messages for next round + self.pending_messages = pending_messages + return openai.handlers.form_messages(self, messages) end, chat_output = function(self, data, tools)