Skip to content
This repository was archived by the owner on Jul 29, 2025. It is now read-only.

Ollama integration #346

Open
wants to merge 7 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .opencode.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,12 @@
"gopls": {
"command": "gopls"
}
},
"mcpServers": {
"spec-server": {
"command": "spec-server",
"args": ["stdio"],
"disabled": false
}
}
}
39 changes: 36 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -524,6 +524,34 @@ OpenCode includes several built-in commands:

OpenCode implements the Model Context Protocol (MCP) to extend its capabilities through external tools. MCP provides a standardized way for the AI assistant to interact with external services and tools.

### Spec Driven Development

OpenCode supports Spec Driven Development through the `spec-server`, an MCP server that guides the user through a three-phase workflow:

1. **Requirements:** Define user stories and acceptance criteria.
2. **Design:** Create a technical design document.
3. **Tasks:** Generate actionable implementation tasks.

To use the `spec-server`, you first need to install it:

```bash
pip install spec-server
```

Then, you need to add it to your `.opencode.json` configuration file:

```json
{
"mcpServers": {
"spec-server": {
"command": "spec-server",
"args": ["stdio"],
"disabled": false
}
}
}
```

### MCP Features

- **External Tool Integration**: Connect to external tools and services via a standardized protocol
Expand Down Expand Up @@ -626,11 +654,16 @@ This is useful for developers who want to experiment with custom models.

### Configuring a self-hosted provider

You can use a self-hosted model by setting the `LOCAL_ENDPOINT` environment variable.
You can use a self-hosted model by setting one of the following environment variables:

- `OLLAMA_ENDPOINT`: For Ollama models
- `LMSTUDIO_ENDPOINT`: For LMStudio models
- `LOCAL_ENDPOINT`: For other local models

This will cause OpenCode to load and use the models from the specified endpoint.

```bash
LOCAL_ENDPOINT=http://localhost:1235/v1
OLLAMA_ENDPOINT=http://localhost:11434
```

### Configuring a self-hosted model
Expand All @@ -641,7 +674,7 @@ You can also configure a self-hosted model in the configuration file under the `
{
"agents": {
"coder": {
"model": "local.granite-3.3-2b-instruct@q8_0",
"model": "local/Ollama/llama2",
"reasoningEffort": "high"
}
}
Expand Down
34 changes: 34 additions & 0 deletions cmd/config.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
package cmd

import (
"fmt"

"github.com/opencode-ai/opencode/internal/config"
"github.com/opencode-ai/opencode/internal/llm/models"
"github.com/spf13/cobra"
)

var configCmd = &cobra.Command{
Use: "config",
Short: "Configure opencode CLI",
RunE: func(cmd *cobra.Command, args []string) error {
return configure(cmd, args)
},
}

func configure(cmd *cobra.Command, args []string) error {
key, _ := cmd.Flags().GetString("key")
value, _ := cmd.Flags().GetString("value")

if key == "" || value == "" {
return fmt.Errorf("key and value are required")
}

return config.UpdateProviderAPIKey(models.ModelProvider(key), value)
}

func init() {
configCmd.Flags().StringP("key", "k", "", "Configuration key")
configCmd.Flags().StringP("value", "v", "", "Configuration value")
rootCmd.AddCommand(configCmd)
}
21 changes: 21 additions & 0 deletions cmd/config_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
package cmd

import (
"testing"

"github.com/opencode-ai/opencode/internal/config"
"github.com/opencode-ai/opencode/internal/llm/models"
"github.com/stretchr/testify/require"
)

func TestConfigCmd(t *testing.T) {
_, err := config.Load(".", false)
require.NoError(t, err)

rootCmd.SetArgs([]string{"config", "--key", "openai", "--value", "test-key"})
err = rootCmd.Execute()
require.NoError(t, err)

cfg := config.Get()
require.Equal(t, "test-key", cfg.Providers[models.ProviderOpenAI].APIKey)
}
8 changes: 8 additions & 0 deletions cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import (
"github.com/opencode-ai/opencode/internal/app"
"github.com/opencode-ai/opencode/internal/config"
"github.com/opencode-ai/opencode/internal/db"
customerrors "github.com/opencode-ai/opencode/internal/errors"
"github.com/opencode-ai/opencode/internal/format"
"github.com/opencode-ai/opencode/internal/llm/agent"
"github.com/opencode-ai/opencode/internal/logging"
Expand Down Expand Up @@ -284,6 +285,13 @@ func setupSubscriptions(app *app.App, parentCtx context.Context) (chan tea.Msg,
func Execute() {
err := rootCmd.Execute()
if err != nil {
// If the error is a configuration error, we want to print a more helpful message
if e, ok := err.(*customerrors.Error); ok {
if e.Code == customerrors.ErrNotFound {
fmt.Println("No valid provider available. Please configure a provider using 'opencode config'")
os.Exit(1)
}
}
os.Exit(1)
}
}
Expand Down
37 changes: 34 additions & 3 deletions internal/config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (
"runtime"
"strings"

customerrors "github.com/opencode-ai/opencode/internal/errors"
"github.com/opencode-ai/opencode/internal/llm/models"
"github.com/opencode-ai/opencode/internal/logging"
"github.com/spf13/viper"
Expand Down Expand Up @@ -277,6 +278,7 @@ func setProviderDefaults() {
// api-key may be empty when using Entra ID credentials – that's okay
viper.SetDefault("providers.azure.apiKey", os.Getenv("AZURE_OPENAI_API_KEY"))
}
viper.SetDefault("providers.local.endpoint", "http://localhost:11434")
if apiKey, err := LoadGitHubToken(); err == nil && apiKey != "" {
viper.SetDefault("providers.copilot.apiKey", apiKey)
if viper.GetString("providers.copilot.apiKey") == "" {
Expand Down Expand Up @@ -487,7 +489,7 @@ func validateAgent(cfg *Config, name AgentName, agent Agent) error {
if setDefaultModelForAgent(name) {
logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
} else {
return fmt.Errorf("no valid provider available for agent %s", name)
return customerrors.Newf(customerrors.ErrNotFound, "no valid provider available for agent %s", name)
}
return nil
}
Expand All @@ -509,7 +511,7 @@ func validateAgent(cfg *Config, name AgentName, agent Agent) error {
if setDefaultModelForAgent(name) {
logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
} else {
return fmt.Errorf("no valid provider available for agent %s", name)
return customerrors.Newf(customerrors.ErrNotFound, "no valid provider available for agent %s", name)
}
} else {
// Add provider with API key from environment
Expand All @@ -529,7 +531,7 @@ func validateAgent(cfg *Config, name AgentName, agent Agent) error {
if setDefaultModelForAgent(name) {
logging.Info("set default model for agent", "agent", name, "model", cfg.Agents[name].Model)
} else {
return fmt.Errorf("no valid provider available for agent %s", name)
return customerrors.Newf(customerrors.ErrNotFound, "no valid provider available for agent %s", name)
}
}

Expand Down Expand Up @@ -929,6 +931,35 @@ func UpdateTheme(themeName string) error {
})
}

func UpdateProviderAPIKey(provider models.ModelProvider, apiKey string) error {
if cfg == nil {
return fmt.Errorf("config not loaded")
}

if cfg.Providers == nil {
cfg.Providers = make(map[models.ModelProvider]Provider)
}

providerCfg, ok := cfg.Providers[provider]
if !ok {
providerCfg = Provider{}
}
providerCfg.APIKey = apiKey
cfg.Providers[provider] = providerCfg

return updateCfgFile(func(config *Config) {
if config.Providers == nil {
config.Providers = make(map[models.ModelProvider]Provider)
}
providerCfg, ok := config.Providers[provider]
if !ok {
providerCfg = Provider{}
}
providerCfg.APIKey = apiKey
config.Providers[provider] = providerCfg
})
}

// Tries to load Github token from all possible locations
func LoadGitHubToken() (string, error) {
// First check environment variable
Expand Down
46 changes: 46 additions & 0 deletions internal/errors/errors.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
package errors

import "fmt"

// Error is a custom error type that contains a code and a message.
type Error struct {
Code int
Message string
}

// Error returns the error message.
func (e *Error) Error() string {
return e.Message
}

// New creates a new error.
func New(code int, message string) *Error {
return &Error{
Code: code,
Message: message,
}
}

// Newf creates a new error with a formatted message.
func Newf(code int, format string, a ...interface{}) *Error {
return &Error{
Code: code,
Message: fmt.Sprintf(format, a...),
}
}

// Error codes
const (
// ErrUnknown is an unknown error.
ErrUnknown = iota
// ErrNotFound is a not found error.
ErrNotFound
// ErrForbidden is a forbidden error.
ErrForbidden
// ErrBadRequest is a bad request error.
ErrBadRequest
// ErrUnauthorized is an unauthorized error.
ErrUnauthorized
// ErrInternal is an internal error.
ErrInternal
)
17 changes: 17 additions & 0 deletions internal/errors/errors_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
package errors

import (
"testing"

"github.com/stretchr/testify/require"
)

func TestErrors(t *testing.T) {
err := New(ErrNotFound, "not found")
require.Equal(t, "not found", err.Error())
require.Equal(t, ErrNotFound, err.Code)

err = Newf(ErrBadRequest, "bad request %d", 400)
require.Equal(t, "bad request 400", err.Error())
require.Equal(t, ErrBadRequest, err.Code)
}
26 changes: 26 additions & 0 deletions internal/llm/models/cohere.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
package models

const (
ProviderCohere ModelProvider = "cohere"
)

const (
CohereCommandRPlus ModelID = "cohere-command-r-plus"
)

var CohereModels = map[ModelID]Model{
CohereCommandRPlus: {
ID: CohereCommandRPlus,
Name: "Cohere: Command R+",
Provider: ProviderCohere,
APIModel: "command-r-plus",
CostPer1MIn: 0,
CostPer1MOut: 0,
CostPer1MInCached: 0,
CostPer1MOutCached: 0,
ContextWindow: 128000,
DefaultMaxTokens: 4096,
CanReason: true,
SupportsAttachments: true,
},
}
26 changes: 26 additions & 0 deletions internal/llm/models/huggingface.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
package models

const (
ProviderHuggingFace ModelProvider = "huggingface"
)

const (
HuggingFaceMistral7BInstruct ModelID = "huggingface-mistral-7b-instruct"
)

var HuggingFaceModels = map[ModelID]Model{
HuggingFaceMistral7BInstruct: {
ID: HuggingFaceMistral7BInstruct,
Name: "Hugging Face: Mistral 7B Instruct",
Provider: ProviderHuggingFace,
APIModel: "mistralai/Mistral-7B-Instruct-v0.1",
CostPer1MIn: 0,
CostPer1MOut: 0,
CostPer1MInCached: 0,
CostPer1MOutCached: 0,
ContextWindow: 4096,
DefaultMaxTokens: 2048,
CanReason: true,
SupportsAttachments: true,
},
}
Loading