Skip to content
This repository was archived by the owner on Jul 29, 2025. It is now read-only.

feat: add DeepSeek API support #310

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ You can configure OpenCode using environment variables:
| `VERTEXAI_PROJECT` | For Google Cloud VertexAI (Gemini) |
| `VERTEXAI_LOCATION` | For Google Cloud VertexAI (Gemini) |
| `GROQ_API_KEY` | For Groq models |
| `DEEPSEEK_API_KEY` | For DeepSeek models |
| `AWS_ACCESS_KEY_ID` | For AWS Bedrock (Claude) |
| `AWS_SECRET_ACCESS_KEY` | For AWS Bedrock (Claude) |
| `AWS_REGION` | For AWS Bedrock (Claude) |
Expand Down Expand Up @@ -154,6 +155,10 @@ This is useful if you want to use a different shell than your default system she
"apiKey": "your-api-key",
"disabled": false
},
"deepseek": {
"apiKey": "your-api-key",
"disabled": false
},
"openrouter": {
"apiKey": "your-api-key",
"disabled": false
Expand Down Expand Up @@ -256,6 +261,12 @@ OpenCode supports a variety of AI models from different providers:
- Deepseek R1 distill Llama 70b
- Llama 3.3 70b Versatile

### DeepSeek

- DeepSeek Chat
- DeepSeek Coder
- DeepSeek Reasoner (R1)

### Azure OpenAI

- GPT-4.1 family (gpt-4.1, gpt-4.1-mini, gpt-4.1-nano)
Expand Down
1 change: 1 addition & 0 deletions cmd/schema/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,7 @@ func generateSchema() map[string]any {
string(models.ProviderOpenAI),
string(models.ProviderGemini),
string(models.ProviderGROQ),
string(models.ProviderDeepSeek),
string(models.ProviderOpenRouter),
string(models.ProviderBedrock),
string(models.ProviderAzure),
Expand Down
42 changes: 42 additions & 0 deletions internal/config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -267,6 +267,9 @@ func setProviderDefaults() {
if apiKey := os.Getenv("GROQ_API_KEY"); apiKey != "" {
viper.SetDefault("providers.groq.apiKey", apiKey)
}
if apiKey := os.Getenv("DEEPSEEK_API_KEY"); apiKey != "" {
viper.SetDefault("providers.deepseek.apiKey", apiKey)
}
if apiKey := os.Getenv("OPENROUTER_API_KEY"); apiKey != "" {
viper.SetDefault("providers.openrouter.apiKey", apiKey)
}
Expand Down Expand Up @@ -340,6 +343,15 @@ func setProviderDefaults() {
return
}

// DeepSeek configuration
if key := viper.GetString("providers.deepseek.apiKey"); strings.TrimSpace(key) != "" {
viper.SetDefault("agents.coder.model", models.DeepSeekCoder)
viper.SetDefault("agents.summarizer.model", models.DeepSeekChat)
viper.SetDefault("agents.task.model", models.DeepSeekChat)
viper.SetDefault("agents.title.model", models.DeepSeekChat)
return
}

// OpenRouter configuration
if key := viper.GetString("providers.openrouter.apiKey"); strings.TrimSpace(key) != "" {
viper.SetDefault("agents.coder.model", models.OpenRouterClaude37Sonnet)
Expand Down Expand Up @@ -651,6 +663,8 @@ func getProviderAPIKey(provider models.ModelProvider) string {
return os.Getenv("GEMINI_API_KEY")
case models.ProviderGROQ:
return os.Getenv("GROQ_API_KEY")
case models.ProviderDeepSeek:
return os.Getenv("DEEPSEEK_API_KEY")
case models.ProviderAzure:
return os.Getenv("AZURE_OPENAI_API_KEY")
case models.ProviderOpenRouter:
Expand Down Expand Up @@ -781,6 +795,34 @@ func setDefaultModelForAgent(agent AgentName) bool {
return true
}

if apiKey := os.Getenv("DEEPSEEK_API_KEY"); apiKey != "" {
var model models.ModelID
maxTokens := int64(5000)
reasoningEffort := ""

switch agent {
case AgentTitle:
model = models.DeepSeekChat
maxTokens = 80
case AgentTask:
model = models.DeepSeekChat
default:
model = models.DeepSeekCoder
}

// Check if model supports reasoning
if modelInfo, ok := models.SupportedModels[model]; ok && modelInfo.CanReason {
reasoningEffort = "medium"
}

cfg.Agents[agent] = Agent{
Model: model,
MaxTokens: maxTokens,
ReasoningEffort: reasoningEffort,
}
return true
}

if hasAWSCredentials() {
maxTokens := int64(5000)
if agent == AgentTitle {
Expand Down
56 changes: 56 additions & 0 deletions internal/llm/models/deepseek.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
package models

const (
ProviderDeepSeek ModelProvider = "deepseek"

// DeepSeek Models
DeepSeekChat ModelID = "deepseek-chat"
DeepSeekCoder ModelID = "deepseek-coder"
DeepSeekReasoner ModelID = "deepseek-reasoner"
)

// DeepSeek API official models
// https://platform.deepseek.com/api-docs/
// Pricing as of 2025-07-01
var DeepSeekModels = map[ModelID]Model{
DeepSeekChat: {
ID: DeepSeekChat,
Name: "DeepSeek Chat",
Provider: ProviderDeepSeek,
APIModel: "deepseek-chat",
CostPer1MIn: 0.14,
CostPer1MInCached: 0.02,
CostPer1MOutCached: 0.0,
CostPer1MOut: 0.28,
ContextWindow: 128_000, // 官方上限 128k,推荐 ≤ 100k
DefaultMaxTokens: 8000, // 官方建议输出 ≤ 8k
SupportsAttachments: false, // DeepSeek 目前不支持文件上传或函数调用
},
DeepSeekCoder: {
ID: DeepSeekCoder,
Name: "DeepSeek Coder",
Provider: ProviderDeepSeek,
APIModel: "deepseek-coder",
CostPer1MIn: 0.14,
CostPer1MInCached: 0.02,
CostPer1MOutCached: 0.0,
CostPer1MOut: 0.28,
ContextWindow: 128_000, // 官方上限 128k,推荐 ≤ 100k
DefaultMaxTokens: 8000, // 官方建议输出 ≤ 8k
SupportsAttachments: false, // DeepSeek 目前不支持文件上传或函数调用
},
DeepSeekReasoner: {
ID: DeepSeekReasoner,
Name: "DeepSeek Reasoner (R1)",
Provider: ProviderDeepSeek,
APIModel: "deepseek-reasoner",
CostPer1MIn: 0.55,
CostPer1MInCached: 0.14,
CostPer1MOutCached: 0.0,
CostPer1MOut: 2.19,
ContextWindow: 65_536, // R1 模型上下文窗口
DefaultMaxTokens: 16000, // R1 建议输出 ≤ 16k
CanReason: true,
SupportsAttachments: false, // DeepSeek 目前不支持文件上传或函数调用
},
}
10 changes: 6 additions & 4 deletions internal/llm/models/models.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,11 @@ var ProviderPopularity = map[ModelProvider]int{
ProviderOpenAI: 3,
ProviderGemini: 4,
ProviderGROQ: 5,
ProviderOpenRouter: 6,
ProviderBedrock: 7,
ProviderAzure: 8,
ProviderVertexAI: 9,
ProviderDeepSeek: 6,
ProviderOpenRouter: 7,
ProviderBedrock: 8,
ProviderAzure: 9,
ProviderVertexAI: 10,
}

var SupportedModels = map[ModelID]Model{
Expand Down Expand Up @@ -95,4 +96,5 @@ func init() {
maps.Copy(SupportedModels, XAIModels)
maps.Copy(SupportedModels, VertexAIGeminiModels)
maps.Copy(SupportedModels, CopilotModels)
maps.Copy(SupportedModels, DeepSeekModels)
}
25 changes: 25 additions & 0 deletions internal/llm/provider/deepseek.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package provider

type deepseekClient struct {
*openaiClient
}

type DeepSeekClient ProviderClient

func newDeepSeekClient(opts providerClientOptions) DeepSeekClient {
// DeepSeek API 的基础 URL
baseURL := "https://api.deepseek.com"

// 将基础 URL 添加到 OpenAI 客户端选项中
opts.openaiOptions = append(opts.openaiOptions,
WithOpenAIBaseURL(baseURL),
)

// 创建并返回一个包装了 openaiClient 的 deepseekClient
return &deepseekClient{
openaiClient: newOpenAIClient(opts).(*openaiClient),
}
}

// DeepSeek 客户端实际上就是 OpenAI 客户端,只是指向不同的 API 端点
// 所有方法都通过嵌入的 openaiClient 来处理
5 changes: 5 additions & 0 deletions internal/llm/provider/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,11 @@ func NewProvider(providerName models.ModelProvider, opts ...ProviderClientOption
options: clientOptions,
client: newOpenAIClient(clientOptions),
}, nil
case models.ProviderDeepSeek:
return &baseProvider[DeepSeekClient]{
options: clientOptions,
client: newDeepSeekClient(clientOptions),
}, nil
case models.ProviderAzure:
return &baseProvider[AzureClient]{
options: clientOptions,
Expand Down
Loading