Skip to content

Commit

Permalink
42: Fixing issues with chatmessage type
Browse files Browse the repository at this point in the history
  • Loading branch information
roma-glushko committed Dec 31, 2023
1 parent a73e058 commit 45c6b15
Show file tree
Hide file tree
Showing 4 changed files with 81 additions and 71 deletions.
15 changes: 6 additions & 9 deletions pkg/api/schemas/language.go
Original file line number Diff line number Diff line change
@@ -1,16 +1,13 @@
package schemas

// ChatRequest defines Glide's Chat Request Schema unified across all language models
type ChatRequest struct {
Message []struct { // TODO: could we reuse ChatMessage?
Role string `json:"role"`
Content string `json:"content"`
} `json:"message"`
MessageHistory []string `json:"messageHistory"`
// UnifiedChatRequest defines Glide's Chat Request Schema unified across all language models
type UnifiedChatRequest struct {
Message ChatMessage `json:"message"`
MessageHistory []ChatMessage `json:"messageHistory"`
}

// ChatResponse defines Glide's Chat Response Schema unified across all language models
type ChatResponse struct {
// UnifiedChatResponse defines Glide's Chat Response Schema unified across all language models
type UnifiedChatResponse struct {
ID string `json:"id,omitempty"`
Created float64 `json:"created,omitempty"`
Choices []*ChatChoice `json:"choices,omitempty"`
Expand Down
2 changes: 1 addition & 1 deletion pkg/providers/language.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,5 @@ import (

// ChatModel defines the interface a provider should fulfill to be able to serve language chat requests
type ChatModel interface {
Chat(ctx *context.Context, request *schemas.ChatRequest) (*schemas.ChatResponse, error)
Chat(ctx *context.Context, request *schemas.UnifiedChatRequest) (*schemas.UnifiedChatResponse, error)
}
117 changes: 64 additions & 53 deletions pkg/providers/openai/chat.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,28 +12,67 @@ import (
"go.uber.org/zap"
)

// ChatRequestSchema is an OpenAI-specific request schema
type ChatRequestSchema struct {
Model string `json:"model"`
Messages []map[string]string `json:"messages"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
N int `json:"n,omitempty"`
StopWords []string `json:"stop,omitempty"`
Stream bool `json:"stream,omitempty"`
FrequencyPenalty int `json:"frequency_penalty,omitempty"`
PresencePenalty int `json:"presence_penalty,omitempty"`
LogitBias *map[int]float64 `json:"logit_bias,omitempty"`
User interface{} `json:"user,omitempty"`
Seed interface{} `json:"seed,omitempty"`
Tools []string `json:"tools,omitempty"`
ToolChoice interface{} `json:"tool_choice,omitempty"`
ResponseFormat interface{} `json:"response_format,omitempty"`
type ChatMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}

// ChatRequest is an OpenAI-specific request schema
type ChatRequest struct {
Model string `json:"model"`
Messages []ChatMessage `json:"messages"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
N int `json:"n,omitempty"`
StopWords []string `json:"stop,omitempty"`
Stream bool `json:"stream,omitempty"`
FrequencyPenalty int `json:"frequency_penalty,omitempty"`
PresencePenalty int `json:"presence_penalty,omitempty"`
LogitBias *map[int]float64 `json:"logit_bias,omitempty"`
User interface{} `json:"user,omitempty"`
Seed interface{} `json:"seed,omitempty"`
Tools []string `json:"tools,omitempty"`
ToolChoice interface{} `json:"tool_choice,omitempty"`
ResponseFormat interface{} `json:"response_format,omitempty"`
}

// NewChatRequestFromConfig fills the struct from the config. Not using reflection because of performance penalty it gives
func NewChatRequestFromConfig(cfg *Config) *ChatRequest {
return &ChatRequest{
Model: cfg.Model,
Temperature: cfg.DefaultParams.Temperature,
TopP: cfg.DefaultParams.TopP,
MaxTokens: cfg.DefaultParams.MaxTokens,
N: cfg.DefaultParams.N,
StopWords: cfg.DefaultParams.StopWords,
Stream: false, // unsupported right now
FrequencyPenalty: cfg.DefaultParams.FrequencyPenalty,
PresencePenalty: cfg.DefaultParams.PresencePenalty,
LogitBias: cfg.DefaultParams.LogitBias,
User: cfg.DefaultParams.User,
Seed: cfg.DefaultParams.Seed,
Tools: cfg.DefaultParams.Tools,
ToolChoice: cfg.DefaultParams.ToolChoice,
ResponseFormat: cfg.DefaultParams.ResponseFormat,
}
}

func NewChatMessagesFromUnifiedRequest(request *schemas.UnifiedChatRequest) []ChatMessage {
messages := make([]ChatMessage, 0, len(request.MessageHistory)+1)

// Add items from messageHistory first and the new chat message last
for _, message := range request.MessageHistory {
messages = append(messages, ChatMessage{Role: message.Role, Content: message.Content})
}

messages = append(messages, ChatMessage{Role: request.Message.Role, Content: request.Message.Content})

return messages
}

// Chat sends a chat request to the specified OpenAI model.
func (c *Client) Chat(ctx context.Context, request *schemas.ChatRequest) (*schemas.ChatResponse, error) {
func (c *Client) Chat(ctx context.Context, request *schemas.UnifiedChatRequest) (*schemas.UnifiedChatResponse, error) {
// Create a new chat request
chatRequest := c.createChatRequestSchema(request)

Expand All @@ -51,43 +90,15 @@ func (c *Client) Chat(ctx context.Context, request *schemas.ChatRequest) (*schem
return chatResponse, nil
}

func (c *Client) createChatRequestSchema(request *schemas.ChatRequest) *ChatRequestSchema {
var messages []map[string]string

// Add items from messageHistory first
messages = append(messages, request.MessageHistory...)

// Add msg variable last
messages = append(messages, request.Message)

// Iterate through unifiedData.Params and add them to the request, otherwise leave the default value
defaultParams := u.Params

chatRequest := &ChatRequestSchema{
Model: c.config.Model,
Messages: messages,
Temperature: 0.8,
TopP: 1,
MaxTokens: 100,
N: 1,
StopWords: []string{},
Stream: false,
FrequencyPenalty: 0,
PresencePenalty: 0,
LogitBias: nil,
User: nil,
Seed: nil,
Tools: []string{},
ToolChoice: nil,
ResponseFormat: nil,
}

// TODO: set params
func (c *Client) createChatRequestSchema(request *schemas.UnifiedChatRequest) *ChatRequest {
// TODO: consider using objectpool to optimize memory allocation
chatRequest := c.chatRequestTemplate // hoping to get a copy of the template
chatRequest.Messages = NewChatMessagesFromUnifiedRequest(request)

return chatRequest
}

func (c *Client) doChatRequest(ctx context.Context, payload *ChatRequestSchema) (*schemas.ChatResponse, error) {
func (c *Client) doChatRequest(ctx context.Context, payload *ChatRequest) (*schemas.UnifiedChatResponse, error) {
// Build request payload
rawPayload, err := json.Marshal(payload)
if err != nil {
Expand Down Expand Up @@ -134,7 +145,7 @@ func (c *Client) doChatRequest(ctx context.Context, payload *ChatRequestSchema)
}

// Parse response
var response schemas.ChatResponse
var response schemas.UnifiedChatResponse

return &response, json.NewDecoder(resp.Body).Decode(&response)
}
18 changes: 10 additions & 8 deletions pkg/providers/openai/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,20 +24,22 @@ var (

// Client is a client for accessing OpenAI API
type Client struct {
baseURL string
chatURL string
config *Config
httpClient *http.Client
telemetry *telemetry.Telemetry
baseURL string
chatURL string
chatRequestTemplate *ChatRequest
config *Config
httpClient *http.Client
telemetry *telemetry.Telemetry
}

// NewClient creates a new OpenAI client for the OpenAI API.
func NewClient(cfg *Config, tel *telemetry.Telemetry) (*Client, error) {
// Create a new client
c := &Client{
baseURL: cfg.BaseURL,
chatURL: fmt.Sprintf("%s%s", cfg.BaseURL, cfg.ChatEndpoint),
config: cfg,
baseURL: cfg.BaseURL,
chatURL: fmt.Sprintf("%s%s", cfg.BaseURL, cfg.ChatEndpoint),
config: cfg,
chatRequestTemplate: NewChatRequestFromConfig(cfg),
httpClient: &http.Client{
// TODO: use values from the config
Timeout: time.Second * 30,
Expand Down

0 comments on commit 45c6b15

Please sign in to comment.