Skip to content

Commit

Permalink
feat: support deepseek thinking
Browse files Browse the repository at this point in the history
  • Loading branch information
Sh1n3zZ committed Feb 7, 2025
1 parent 403c2b8 commit fc81fbf
Show file tree
Hide file tree
Showing 8 changed files with 254 additions and 7 deletions.
4 changes: 3 additions & 1 deletion adapter/adapter.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@ import (
"chat/adapter/baichuan"
"chat/adapter/bing"
"chat/adapter/claude"
"chat/adapter/common"
adaptercommon "chat/adapter/common"
"chat/adapter/dashscope"
"chat/adapter/deepseek"
"chat/adapter/hunyuan"
"chat/adapter/midjourney"
"chat/adapter/openai"
Expand Down Expand Up @@ -35,6 +36,7 @@ var channelFactories = map[string]adaptercommon.FactoryCreator{
globals.SkylarkChannelType: skylark.NewChatInstanceFromConfig,
globals.ZhinaoChannelType: zhinao.NewChatInstanceFromConfig,
globals.MidjourneyChannelType: midjourney.NewChatInstanceFromConfig,
globals.DeepseekChannelType: deepseek.NewChatInstanceFromConfig,

globals.MoonshotChannelType: openai.NewChatInstanceFromConfig, // openai format
globals.GroqChannelType: openai.NewChatInstanceFromConfig, // openai format
Expand Down
174 changes: 174 additions & 0 deletions adapter/deepseek/chat.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,174 @@
package deepseek

import (
adaptercommon "chat/adapter/common"
"chat/globals"
"chat/utils"
"errors"
"fmt"
"strings"
)

type ChatInstance struct {
Endpoint string
ApiKey string
isFirstReasoning bool
}

func (c *ChatInstance) GetEndpoint() string {
return c.Endpoint
}

func (c *ChatInstance) GetApiKey() string {
return c.ApiKey
}

func (c *ChatInstance) GetHeader() map[string]string {
return map[string]string{
"Content-Type": "application/json",
"Authorization": fmt.Sprintf("Bearer %s", c.GetApiKey()),
}
}

func NewChatInstance(endpoint, apiKey string) *ChatInstance {
return &ChatInstance{
Endpoint: endpoint,
ApiKey: apiKey,
isFirstReasoning: true,
}
}

func NewChatInstanceFromConfig(conf globals.ChannelConfig) adaptercommon.Factory {
return NewChatInstance(
conf.GetEndpoint(),
conf.GetRandomSecret(),
)
}

func (c *ChatInstance) GetChatEndpoint() string {
return fmt.Sprintf("%s/chat/completions", c.GetEndpoint())
}

func (c *ChatInstance) GetChatBody(props *adaptercommon.ChatProps, stream bool) interface{} {
return ChatRequest{
Model: props.Model,
Messages: props.Message,
MaxTokens: props.MaxTokens,
Stream: stream,
Temperature: props.Temperature,
TopP: props.TopP,
PresencePenalty: props.PresencePenalty,
FrequencyPenalty: props.FrequencyPenalty,
}
}

func processChatResponse(data string) *ChatResponse {
if form := utils.UnmarshalForm[ChatResponse](data); form != nil {
return form
}
return nil
}

func processChatStreamResponse(data string) *ChatStreamResponse {
if form := utils.UnmarshalForm[ChatStreamResponse](data); form != nil {
return form
}
return nil
}

func processChatErrorResponse(data string) *ChatStreamErrorResponse {
if form := utils.UnmarshalForm[ChatStreamErrorResponse](data); form != nil {
return form
}
return nil
}

func (c *ChatInstance) ProcessLine(data string) (string, error) {
if form := processChatStreamResponse(data); form != nil {
if len(form.Choices) == 0 {
return "", nil
}

delta := form.Choices[0].Delta
if delta.ReasoningContent != nil {
content := *delta.ReasoningContent
// replace double newlines with single newlines for markdown
if strings.Contains(content, "\n\n") {
content = strings.ReplaceAll(content, "\n\n", "\n")
}
if c.isFirstReasoning {
c.isFirstReasoning = false
return fmt.Sprintf(">%s", content), nil
}
return content, nil
}
return delta.Content, nil
}

if form := processChatErrorResponse(data); form != nil {
if form.Error.Message != "" {
return "", errors.New(fmt.Sprintf("deepseek error: %s", form.Error.Message))
}
}

return "", nil
}

func (c *ChatInstance) CreateChatRequest(props *adaptercommon.ChatProps) (string, error) {
res, err := utils.Post(
c.GetChatEndpoint(),
c.GetHeader(),
c.GetChatBody(props, false),
props.Proxy,
)

if err != nil || res == nil {
return "", fmt.Errorf("deepseek error: %s", err.Error())
}

data := utils.MapToStruct[ChatResponse](res)
if data == nil {
return "", fmt.Errorf("deepseek error: cannot parse response")
}

if len(data.Choices) == 0 {
return "", fmt.Errorf("deepseek error: no choices")
}

message := data.Choices[0].Message
content := message.Content
if message.ReasoningContent != nil {
content = fmt.Sprintf(">%s\n\n%s", *message.ReasoningContent, content)
}

return content, nil
}

func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, callback globals.Hook) error {
c.isFirstReasoning = true
err := utils.EventScanner(&utils.EventScannerProps{
Method: "POST",
Uri: c.GetChatEndpoint(),
Headers: c.GetHeader(),
Body: c.GetChatBody(props, true),
Callback: func(data string) error {
partial, err := c.ProcessLine(data)
if err != nil {
return err
}
return callback(&globals.Chunk{Content: partial})
},
}, props.Proxy)

if err != nil {
if form := processChatErrorResponse(err.Body); form != nil {
if form.Error.Type == "" && form.Error.Message == "" {
return errors.New(utils.ToMarkdownCode("json", err.Body))
}
return errors.New(fmt.Sprintf("deepseek error: %s (type: %s)", form.Error.Message, form.Error.Type))
}
return err.Error
}

return nil
}
9 changes: 9 additions & 0 deletions adapter/deepseek/reflect.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
package deepseek

import "reflect"

var _ = reflect.TypeOf(ChatInstance{})
var _ = reflect.TypeOf(ChatRequest{})
var _ = reflect.TypeOf(ChatResponse{})
var _ = reflect.TypeOf(ChatStreamResponse{})
var _ = reflect.TypeOf(ChatStreamErrorResponse{})
56 changes: 56 additions & 0 deletions adapter/deepseek/struct.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
package deepseek

import (
"chat/globals"
)

// DeepSeek API is similar to OpenAI API with additional reasoning content

type ChatRequest struct {
Model string `json:"model"`
Messages []globals.Message `json:"messages"`
MaxTokens *int `json:"max_tokens,omitempty"`
Stream bool `json:"stream"`
Temperature *float32 `json:"temperature,omitempty"`
TopP *float32 `json:"top_p,omitempty"`
PresencePenalty *float32 `json:"presence_penalty,omitempty"`
FrequencyPenalty *float32 `json:"frequency_penalty,omitempty"`
}

// ChatResponse is the native http request body for deepseek
type ChatResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []struct {
Index int `json:"index"`
Message globals.Message `json:"message"`
FinishReason string `json:"finish_reason"`
} `json:"choices"`
Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
} `json:"usage"`
}

// ChatStreamResponse is the stream response body for deepseek
type ChatStreamResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []struct {
Delta globals.Message `json:"delta"`
Index int `json:"index"`
FinishReason string `json:"finish_reason"`
} `json:"choices"`
}

type ChatStreamErrorResponse struct {
Error struct {
Message string `json:"message"`
Type string `json:"type"`
} `json:"error"`
}
2 changes: 2 additions & 0 deletions app/src/admin/channel.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ export const ChannelTypes: Record<string, string> = {
groq: "Groq Cloud",
bing: "New Bing",
slack: "Slack Claude",
deepseek: "深度求索 DeepSeek",
};

export const ShortChannelTypes: Record<string, string> = {
Expand All @@ -85,6 +86,7 @@ export const ShortChannelTypes: Record<string, string> = {
groq: "Groq",
bing: "Bing",
slack: "Slack",
deepseek: "深度求索",
};

export const ChannelInfos: Record<string, ChannelInfo> = {
Expand Down
1 change: 1 addition & 0 deletions globals/constant.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ const (
MidjourneyChannelType = "midjourney"
MoonshotChannelType = "moonshot"
GroqChannelType = "groq"
DeepseekChannelType = "deepseek"
)

const (
Expand Down
13 changes: 7 additions & 6 deletions globals/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,13 @@ package globals
type Hook func(data *Chunk) error

type Message struct {
Role string `json:"role"`
Content string `json:"content"`
Name *string `json:"name,omitempty"`
FunctionCall *FunctionCall `json:"function_call,omitempty"` // only `function` role
ToolCallId *string `json:"tool_call_id,omitempty"` // only `tool` role
ToolCalls *ToolCalls `json:"tool_calls,omitempty"` // only `assistant` role
Role string `json:"role"`
Content string `json:"content"`
Name *string `json:"name,omitempty"`
FunctionCall *FunctionCall `json:"function_call,omitempty"` // only `function` role
ToolCallId *string `json:"tool_call_id,omitempty"` // only `tool` role
ToolCalls *ToolCalls `json:"tool_calls,omitempty"` // only `assistant` role
ReasoningContent *string `json:"reasoning_content,omitempty"` // only for deepseek reasoner models
}

type Chunk struct {
Expand Down
2 changes: 2 additions & 0 deletions globals/variables.go
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,8 @@ const (
SkylarkPlus = "skylark-plus-public"
SkylarkPro = "skylark-pro-public"
SkylarkChat = "skylark-chat"
DeepseekV3 = "deepseek-chat"
DeepseekR1 = "deepseek-reasoner"
)

var OpenAIDalleModels = []string{
Expand Down

0 comments on commit fc81fbf

Please sign in to comment.