feat: bootstrap goaichat CLI and config system
This commit is contained in:
111
internal/chat/service.go
Normal file
111
internal/chat/service.go
Normal file
@@ -0,0 +1,111 @@
|
||||
package chat
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"log/slog"
|
||||
"strings"
|
||||
|
||||
"github.com/stig/goaichat/internal/config"
|
||||
"github.com/stig/goaichat/internal/openai"
|
||||
)
|
||||
|
||||
// CompletionClient defines the subset of the OpenAI client used by the chat service.
|
||||
type CompletionClient interface {
|
||||
CreateChatCompletion(ctx context.Context, req openai.ChatCompletionRequest) (*openai.ChatCompletionResponse, error)
|
||||
}
|
||||
|
||||
// Service coordinates chat requests with the OpenAI client and maintains session history.
|
||||
type Service struct {
|
||||
logger *slog.Logger
|
||||
client CompletionClient
|
||||
model string
|
||||
temperature float64
|
||||
stream bool
|
||||
history []openai.ChatMessage
|
||||
}
|
||||
|
||||
// NewService constructs a Service from configuration and an OpenAI-compatible client.
|
||||
func NewService(logger *slog.Logger, modelCfg config.ModelConfig, client CompletionClient) (*Service, error) {
|
||||
if logger == nil {
|
||||
return nil, errors.New("logger cannot be nil")
|
||||
}
|
||||
if client == nil {
|
||||
return nil, errors.New("completion client cannot be nil")
|
||||
}
|
||||
if strings.TrimSpace(modelCfg.Name) == "" {
|
||||
return nil, errors.New("model name cannot be empty")
|
||||
}
|
||||
|
||||
return &Service{
|
||||
logger: logger,
|
||||
client: client,
|
||||
model: modelCfg.Name,
|
||||
temperature: modelCfg.Temperature,
|
||||
stream: modelCfg.Stream,
|
||||
history: make([]openai.ChatMessage, 0, 16),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Send submits a user message and returns the assistant reply.
|
||||
func (s *Service) Send(ctx context.Context, input string) (string, error) {
|
||||
if s == nil {
|
||||
return "", errors.New("service is nil")
|
||||
}
|
||||
if ctx == nil {
|
||||
return "", errors.New("context is nil")
|
||||
}
|
||||
|
||||
content := strings.TrimSpace(input)
|
||||
if content == "" {
|
||||
return "", errors.New("input cannot be empty")
|
||||
}
|
||||
|
||||
userMsg := openai.ChatMessage{Role: "user", Content: content}
|
||||
s.history = append(s.history, userMsg)
|
||||
|
||||
messages := append([]openai.ChatMessage(nil), s.history...)
|
||||
temperature := s.temperature
|
||||
|
||||
req := openai.ChatCompletionRequest{
|
||||
Model: s.model,
|
||||
Messages: messages,
|
||||
Stream: s.stream,
|
||||
Temperature: &temperature,
|
||||
}
|
||||
|
||||
s.logger.DebugContext(ctx, "sending chat completion", "model", s.model, "message_count", len(messages))
|
||||
|
||||
resp, err := s.client.CreateChatCompletion(ctx, req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if len(resp.Choices) == 0 {
|
||||
return "", errors.New("no choices returned from completion")
|
||||
}
|
||||
|
||||
reply := resp.Choices[0].Message
|
||||
s.history = append(s.history, reply)
|
||||
|
||||
return reply.Content, nil
|
||||
}
|
||||
|
||||
// History returns a copy of the current conversation history.
|
||||
func (s *Service) History() []openai.ChatMessage {
|
||||
if s == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
historyCopy := make([]openai.ChatMessage, len(s.history))
|
||||
copy(historyCopy, s.history)
|
||||
return historyCopy
|
||||
}
|
||||
|
||||
// Reset clears the in-memory conversation history.
|
||||
func (s *Service) Reset() {
|
||||
if s == nil {
|
||||
return
|
||||
}
|
||||
s.history = s.history[:0]
|
||||
}
|
Reference in New Issue
Block a user