Skip to content
39 changes: 39 additions & 0 deletions doc/configuration/adapters.md
Original file line number Diff line number Diff line change
Expand Up @@ -380,3 +380,42 @@ require("codecompanion").setup({
}),
```

## Setup: OpenRouter with Reasoning Output

```lua
require("codecompanion").setup({
adapters = {
http = {
openrouter = function()
return require("codecompanion.adapters").extend("openai_compatible", {
env = {
url = "https://openrouter.ai/api",
api_key = "OPENROUTER_API_KEY",
chat_url = "/v1/chat/completions",
},
handlers = {
parse_extra = function(self, data)
local extra = data.extra
if extra and extra.reasoning then
data.output.reasoning = { content = extra.reasoning }
if data.output.content == "" then
data.output.content = nil
end
end
return data
end,
},
})
end,
},
},
strategies = {
chat = {
adapter = "openrouter",
},
inline = {
adapter = "openrouter",
},
},
})
```
37 changes: 37 additions & 0 deletions doc/extending/adapters.md
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,7 @@ These handlers parse LLM responses:
- `response.parse_chat` - Format chat output for the chat buffer
- `response.parse_inline` - Format output for inline insertion
- `response.parse_tokens` - Extract token count from the response
- `response.parse_extra` - Process non-standard fields in the response (currently only supported by OpenAI-based adapters)

### Tool Handlers

Expand Down Expand Up @@ -376,6 +377,42 @@ handlers = {
}
```

### `response.parse_extra`

Some OpenAI-compatible API providers like deepseek, Gemini and OpenRouter implement a superset of the standard specification, and provide reasoning tokens/summaries within their response.
The non-standard fields in the [`message` (non-streaming)](https://platform.openai.com/docs/api-reference/chat/object#chat-object-choices-message) or [`delta` (streaming)](https://platform.openai.com/docs/api-reference/chat-streaming/streaming#chat_streaming-streaming-choices-delta) object are captured by the OpenAI adapter and can be used to extract the reasoning.

For example, the DeepSeek API provides the reasoning tokens in the `delta.reasoning_content` field.
We can therefore use the following `parse_extra` handler to extract the reasoning tokens and put them into the appropriate output fields:

```lua
handlers = {
response = {
---@param self CodeCompanion.HTTPAdapter
--- `data` is the output of the `parse_chat` handler
---@param data {status: string, output: {role: string?, content: string?}, extra: table}
---@return {status: string, output: {role: string?, content: string?, reasoning:{content: string?}?}}
parse_extra = function(self, data)
local extra = data.extra
if extra.reasoning_content then
-- codecompanion expect the reasoning tokens in this format
data.output.reasoning = { content = extra.reasoning_content }
-- so that codecompanion doesn't mistake this as a normal response with empty string as the content
if data.output.content == "" then
data.output.content = nil
end
end
return data
end
}
}
```

Notes:

1. You don't always have to set `data.output.content` to `nil`. This is mostly intended for `streaming`, and you may encounter issues in non-stream mode if you do that.
2. It's expected that the processed `data` table is returned at the end.

### `request.build_parameters`

For the purposes of the OpenAI adapter, no additional parameters need to be created. So we just pass this through:
Expand Down
78 changes: 11 additions & 67 deletions lua/codecompanion/adapters/http/deepseek.lua
Original file line number Diff line number Diff line change
Expand Up @@ -91,76 +91,20 @@ return {
---Output the data from the API ready for insertion into the chat buffer
---@param self CodeCompanion.HTTPAdapter
---@param data table The streamed JSON data from the API, also formatted by the format_data handler
---@param tools? table The table to write any tool output to
---@return { status: string, output: { role: string, content: string, reasoning: string? } } | nil
---@param tools table The table to write any tool output to
---@return { status: string, output: { role: string, content: string?, reasoning: {content: string}? } } | nil
chat_output = function(self, data, tools)
local output = {}

if data and data ~= "" then
local data_mod = adapter_utils.clean_streamed_data(data)
local ok, json = pcall(vim.json.decode, data_mod, { luanil = { object = true } })

if ok and json.choices and #json.choices > 0 then
local choice = json.choices[1]
local delta = (self.opts and self.opts.stream) and choice.delta or choice.message

if delta then
output.role = delta.role
output.content = delta.content

if delta.reasoning_content then
output.reasoning = output.reasoning or {}
output.reasoning.content = delta.reasoning_content
end

-- Process tools
if self.opts.tools and delta.tool_calls and tools then
for _, tool in ipairs(delta.tool_calls) do
if self.opts.stream then
local index = tool.index
local found = false

for i, existing_tool in ipairs(tools) do
if existing_tool._index == index then
tools[i]["function"].arguments = (tools[i]["function"].arguments or "")
.. (tool["function"]["arguments"] or "")
found = true
break
end
end

if not found then
table.insert(tools, {
["function"] = {
name = tool["function"]["name"],
arguments = tool["function"]["arguments"] or "",
},
id = tool.id,
type = "function",
_index = index,
})
end
else
table.insert(tools, {
_index = tool.index,
["function"] = {
name = tool["function"]["name"],
arguments = tool["function"]["arguments"],
},
id = tool.id,
type = "function",
})
end
end
end

return {
status = "success",
output = output,
}
end
return openai.handlers.chat_output(self, data, tools)
end,
parse_extra = function(self, data)
local extra = data.extra
if extra.reasoning_content then
data.output.reasoning = { content = extra.reasoning_content }
if data.output.content == "" then
data.output.content = nil
end
end
return data
end,
inline_output = function(self, data, context)
return openai.handlers.inline_output(self, data, context)
Expand Down
2 changes: 2 additions & 0 deletions lua/codecompanion/adapters/http/init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ local function get_handler(adapter, name)
parse_chat = "chat_output",
parse_inline = "inline_output",
parse_tokens = "tokens",
parse_extra = "parse_extra",

-- tools
format_calls = "format_tool_calls",
Expand Down Expand Up @@ -84,6 +85,7 @@ end
---@field parse_chat? fun(self: CodeCompanion.HTTPAdapter, data: string|table, tools?: table): { status: string, output: table }|nil
---@field parse_inline? fun(self: CodeCompanion.HTTPAdapter, data: string|table, context?: table): { status: string, output: string }|nil
---@field parse_tokens? fun(self: CodeCompanion.HTTPAdapter, data: table): number|nil
---@field parse_extra? fun(self: CodeCompanion.HTTPAdapter, data: {status: string, output: {role: string?, content: string?}, extra: table}):{status: string, output: {role: string?, content: string?, reasoning:{content: string?}|table|nil}}

---@class CodeCompanion.HTTPAdapter.Handlers.Tools
---@field format_calls? fun(self: CodeCompanion.HTTPAdapter, tools: table): table
Expand Down
32 changes: 32 additions & 0 deletions lua/codecompanion/adapters/http/openai.lua
Original file line number Diff line number Diff line change
@@ -1,5 +1,36 @@
local adapter_utils = require("codecompanion.utils.adapters")
local log = require("codecompanion.utils.log")
local CONSTANTS = {
STANDARD_MESSAGE_FIELDS = {
-- fields that are defined in the standard openai chat-completion API (inc. streaming and non-streaming)
"content",
"function_call",
"refusal",
"role",
"tool_calls",
"annotations",
"audio",
},
}

---Find the non-standard fields in the `message` or `delta` that are not in the standard OpenAI chat-completion specs.
---Returns `nil` if not found.
---@param delta table?
---@return table?
local function find_extra_fields(delta)
if delta == nil then
return nil
end
local extra = {}
vim.iter(delta):each(function(k, v)
if not vim.list_contains(CONSTANTS.STANDARD_MESSAGE_FIELDS, k) then
extra[k] = v
end
end)
if not vim.tbl_isempty(extra) then
return extra
end
end

---@class CodeCompanion.HTTPAdapter.OpenAI: CodeCompanion.HTTPAdapter
return {
Expand Down Expand Up @@ -257,6 +288,7 @@ return {
role = delta.role,
content = delta.content,
},
extra = find_extra_fields(delta),
}
end,

Expand Down
6 changes: 6 additions & 0 deletions lua/codecompanion/strategies/chat/init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -1009,6 +1009,12 @@ function Chat:_submit_http(payload)
end

local result = adapters.call_handler(adapter, "parse_chat", data, tools)

local parse_extra = adapters.get_handler(adapter, "parse_extra")
if result and result.extra and type(parse_extra) == "function" then
result = parse_extra(adapter, result)
end

if result and result.status then
self.status = result.status
if self.status == CONSTANTS.STATUS_SUCCESS then
Expand Down
44 changes: 44 additions & 0 deletions test_json.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
local jsonschema = require("jsonschema")
local schema = {
type = "object",
properties = {
command = {
type = "string",
enum = { "query", "ls" },
description = "Action to perform: 'query' for semantic search or 'ls' to list projects",
},
options = {
type = "object",
properties = {
query = {
type = "array",
items = { type = "string" },
description = "Query messages used for the search.",
},
count = {
type = "integer",
description = "Number of documents to retrieve, must be positive",
},
project_root = {
type = "string",
description = "Project path to search within (must be from 'ls' results)",
},
},
required = { "query" },
additionalProperties = false,
},
},
required = { "command" },
additionalProperties = false,
}

local args = { {
command = "query",
options = { query = { "hi" } },
}, { command = "ls" }, { command = "fhdsaif" } }

local validator = jsonschema.generate_validator(schema)
for _, arg in pairs(args) do
local res, err = validator(arg, schema)
vim.notify(vim.inspect({ args = arg, result = res, error = err }))
end
7 changes: 5 additions & 2 deletions tests/adapters/http/test_deepseek.lua
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,9 @@ T["DeepSeek adapter"]["Streaming"]["can handle reasoning content when streaming"
local lines = vim.fn.readfile("tests/adapters/http/stubs/deepseek_streaming.txt")
for _, line in ipairs(lines) do
local chat_output = adapter.handlers.chat_output(adapter, line)
if adapter.handlers.parse_extra and chat_output.extra then
chat_output = adapter.handlers.parse_extra(adapter, chat_output)
end
if chat_output then
if chat_output.output.reasoning and chat_output.output.reasoning.content then
output.reasoning.content = output.reasoning.content .. chat_output.output.reasoning.content
Expand Down Expand Up @@ -277,7 +280,7 @@ T["DeepSeek adapter"]["No Streaming"]["can process tools"] = function()

local tool_output = {
{
_index = 0,
_index = 1,
["function"] = {
arguments = '{"location": "London", "units": "celsius"}',
name = "weather",
Expand All @@ -286,7 +289,7 @@ T["DeepSeek adapter"]["No Streaming"]["can process tools"] = function()
type = "function",
},
{
_index = 1,
_index = 2,
["function"] = {
arguments = '{"location": "Paris", "units": "celsius"}',
name = "weather",
Expand Down