feat: bootstrap goaichat CLI and config system

This commit is contained in:
2025-10-01 14:47:32 +02:00
parent 12a65231ef
commit 0fd24a5cfb
14 changed files with 1082 additions and 3 deletions

193
internal/app/app.go Normal file
View File

@@ -0,0 +1,193 @@
package app
import (
"bufio"
"context"
"errors"
"fmt"
"io"
"log/slog"
"os"
"strings"
"github.com/stig/goaichat/internal/chat"
"github.com/stig/goaichat/internal/config"
"github.com/stig/goaichat/internal/openai"
)
// App encapsulates the Goaichat application runtime wiring.
type App struct {
logger *slog.Logger
config *config.Config
openAI *openai.Client
chat *chat.Service
input io.Reader
output io.Writer
}
// New constructs a new App instance.
func New(logger *slog.Logger, cfg *config.Config, opts ...Option) *App {
app := &App{
logger: logger,
config: cfg,
input: os.Stdin,
output: os.Stdout,
}
for _, opt := range opts {
opt(app)
}
if app.input == nil {
app.input = os.Stdin
}
if app.output == nil {
app.output = os.Stdout
}
return app
}
// Option configures an App instance.
type Option func(*App)
// WithIO overrides the input/output streams, primarily for testing.
func WithIO(in io.Reader, out io.Writer) Option {
return func(a *App) {
a.input = in
a.output = out
}
}
// Run starts the application lifecycle.
func (a *App) Run(ctx context.Context) error {
if a == nil {
return errors.New("app is nil")
}
if ctx == nil {
return errors.New("context is nil")
}
if err := a.initOpenAIClient(); err != nil {
return err
}
if err := a.initChatService(); err != nil {
return err
}
a.logger.InfoContext(ctx, "starting goaichat", "model", a.config.Model.Name, "api_url", a.config.API.URL)
if err := a.runCLILoop(ctx); err != nil {
return err
}
a.logger.InfoContext(ctx, "goaichat shutdown complete")
return nil
}
func (a *App) initOpenAIClient() error {
if a.openAI != nil {
return nil
}
client, err := openai.NewClient(
a.config.API.Key,
openai.WithBaseURL(a.config.API.URL),
)
if err != nil {
return err
}
a.openAI = client
return nil
}
func (a *App) initChatService() error {
if a.chat != nil {
return nil
}
service, err := chat.NewService(a.logger.With("component", "chat"), a.config.Model, a.openAI)
if err != nil {
return err
}
a.chat = service
return nil
}
func (a *App) runCLILoop(ctx context.Context) error {
scanner := bufio.NewScanner(a.input)
if _, err := fmt.Fprintln(a.output, "Type your message. Use /exit to quit, /reset to clear history."); err != nil {
return err
}
for {
if _, err := fmt.Fprint(a.output, "> "); err != nil {
return err
}
if !scanner.Scan() {
if err := scanner.Err(); err != nil {
return err
}
return nil
}
input := scanner.Text()
handled, exit, err := a.handleCommand(ctx, input)
if err != nil {
if _, writeErr := fmt.Fprintf(a.output, "Command error: %v\n", err); writeErr != nil {
return writeErr
}
continue
}
if handled {
if exit {
return nil
}
continue
}
reply, err := a.chat.Send(ctx, input)
if err != nil {
if _, writeErr := fmt.Fprintf(a.output, "Error: %v\n", err); writeErr != nil {
return writeErr
}
continue
}
if _, err := fmt.Fprintf(a.output, "AI: %s\n", reply); err != nil {
return err
}
}
}
func (a *App) handleCommand(ctx context.Context, input string) (handled bool, exit bool, err error) {
_ = ctx
trimmed := strings.TrimSpace(input)
if trimmed == "" {
return true, false, errors.New("no input provided")
}
if !strings.HasPrefix(trimmed, "/") {
return false, false, nil
}
switch trimmed {
case "/exit":
return true, true, nil
case "/reset":
a.chat.Reset()
_, err := fmt.Fprintln(a.output, "History cleared.")
return true, false, err
case "/help":
_, err := fmt.Fprintln(a.output, "Commands: /exit, /reset, /help (more coming soon)")
return true, false, err
default:
_, err := fmt.Fprintf(a.output, "Unknown command %q. Try /help.\n", trimmed)
return true, false, err
}
}

111
internal/chat/service.go Normal file
View File

@@ -0,0 +1,111 @@
package chat
import (
"context"
"errors"
"log/slog"
"strings"
"github.com/stig/goaichat/internal/config"
"github.com/stig/goaichat/internal/openai"
)
// CompletionClient defines the subset of the OpenAI client used by the chat service.
type CompletionClient interface {
CreateChatCompletion(ctx context.Context, req openai.ChatCompletionRequest) (*openai.ChatCompletionResponse, error)
}
// Service coordinates chat requests with the OpenAI client and maintains session history.
type Service struct {
logger *slog.Logger
client CompletionClient
model string
temperature float64
stream bool
history []openai.ChatMessage
}
// NewService constructs a Service from configuration and an OpenAI-compatible client.
func NewService(logger *slog.Logger, modelCfg config.ModelConfig, client CompletionClient) (*Service, error) {
if logger == nil {
return nil, errors.New("logger cannot be nil")
}
if client == nil {
return nil, errors.New("completion client cannot be nil")
}
if strings.TrimSpace(modelCfg.Name) == "" {
return nil, errors.New("model name cannot be empty")
}
return &Service{
logger: logger,
client: client,
model: modelCfg.Name,
temperature: modelCfg.Temperature,
stream: modelCfg.Stream,
history: make([]openai.ChatMessage, 0, 16),
}, nil
}
// Send submits a user message and returns the assistant reply.
func (s *Service) Send(ctx context.Context, input string) (string, error) {
if s == nil {
return "", errors.New("service is nil")
}
if ctx == nil {
return "", errors.New("context is nil")
}
content := strings.TrimSpace(input)
if content == "" {
return "", errors.New("input cannot be empty")
}
userMsg := openai.ChatMessage{Role: "user", Content: content}
s.history = append(s.history, userMsg)
messages := append([]openai.ChatMessage(nil), s.history...)
temperature := s.temperature
req := openai.ChatCompletionRequest{
Model: s.model,
Messages: messages,
Stream: s.stream,
Temperature: &temperature,
}
s.logger.DebugContext(ctx, "sending chat completion", "model", s.model, "message_count", len(messages))
resp, err := s.client.CreateChatCompletion(ctx, req)
if err != nil {
return "", err
}
if len(resp.Choices) == 0 {
return "", errors.New("no choices returned from completion")
}
reply := resp.Choices[0].Message
s.history = append(s.history, reply)
return reply.Content, nil
}
// History returns a copy of the current conversation history.
func (s *Service) History() []openai.ChatMessage {
if s == nil {
return nil
}
historyCopy := make([]openai.ChatMessage, len(s.history))
copy(historyCopy, s.history)
return historyCopy
}
// Reset clears the in-memory conversation history.
func (s *Service) Reset() {
if s == nil {
return
}
s.history = s.history[:0]
}

127
internal/config/config.go Normal file
View File

@@ -0,0 +1,127 @@
package config
import (
"errors"
"fmt"
"os"
"strings"
"gopkg.in/yaml.v3"
)
const (
envAPIKey = "GOAICHAT_API_KEY"
envAPIURL = "GOAICHAT_API_URL"
)
// Config captures runtime configuration for the Goaichat application.
type Config struct {
API APIConfig `yaml:"api"`
Model ModelConfig `yaml:"model"`
Logging LoggingConfig `yaml:"logging"`
UI UIConfig `yaml:"ui"`
}
// APIConfig holds settings for connecting to the OpenAI-compatible API.
type APIConfig struct {
URL string `yaml:"url"`
Key string `yaml:"key"`
}
// ModelConfig controls default model behaviour.
type ModelConfig struct {
Name string `yaml:"name"`
Temperature float64 `yaml:"temperature"`
Stream bool `yaml:"stream"`
}
// LoggingConfig encapsulates logging preferences.
type LoggingConfig struct {
Level string `yaml:"level"`
}
// UIConfig defines terminal rendering preferences.
type UIConfig struct {
ShowTimestamps bool `yaml:"show_timestamps"`
}
// Load reads configuration from the provided path, falling back to defaults and
// environment overrides.
func Load(path string) (*Config, error) {
cfg := defaultConfig()
if path != "" {
if err := loadFile(path, &cfg); err != nil {
return nil, err
}
} else {
if err := loadFile("config.yaml", &cfg); err != nil && !errors.Is(err, os.ErrNotExist) {
return nil, err
}
}
applyEnvOverrides(&cfg)
if err := cfg.validate(); err != nil {
return nil, err
}
return &cfg, nil
}
func loadFile(path string, cfg *Config) error {
data, err := os.ReadFile(path)
if err != nil {
if errors.Is(err, os.ErrNotExist) {
return err
}
return fmt.Errorf("read config: %w", err)
}
if err := yaml.Unmarshal(data, cfg); err != nil {
return fmt.Errorf("parse config: %w", err)
}
return nil
}
func applyEnvOverrides(cfg *Config) {
if url := strings.TrimSpace(os.Getenv(envAPIURL)); url != "" {
cfg.API.URL = url
}
if key := strings.TrimSpace(os.Getenv(envAPIKey)); key != "" {
cfg.API.Key = key
}
}
func (c *Config) validate() error {
if strings.TrimSpace(c.API.URL) == "" {
return errors.New("api.url must be set")
}
if strings.TrimSpace(c.API.Key) == "" {
return errors.New("api.key must be set or GOAICHAT_API_KEY provided")
}
if c.Model.Temperature < 0 || c.Model.Temperature > 2 {
return fmt.Errorf("model.temperature must be between 0 and 2, got %f", c.Model.Temperature)
}
return nil
}
func defaultConfig() Config {
return Config{
API: APIConfig{
URL: "https://api.openai.com/v1",
},
Model: ModelConfig{
Name: "gpt-4o-mini",
Temperature: 0.7,
Stream: true,
},
Logging: LoggingConfig{
Level: "info",
},
UI: UIConfig{
ShowTimestamps: true,
},
}
}

View File

@@ -0,0 +1,97 @@
package config
import (
"os"
"path/filepath"
"testing"
)
func TestLoad_DefaultConfigWithEnvOverrides(t *testing.T) {
t.Setenv(envAPIKey, "test-key")
t.Setenv(envAPIURL, "https://example.com")
cfg, err := Load("")
if err != nil {
t.Fatalf("Load returned error: %v", err)
}
if cfg.API.URL != "https://example.com" {
t.Fatalf("expected API URL override, got %q", cfg.API.URL)
}
if cfg.API.Key != "test-key" {
t.Fatalf("expected API key override, got %q", cfg.API.Key)
}
if cfg.Model.Name != "gpt-4o-mini" {
t.Fatalf("expected default model name, got %q", cfg.Model.Name)
}
}
func TestLoad_FromFile(t *testing.T) {
t.Setenv(envAPIKey, "")
t.Setenv(envAPIURL, "")
dir := t.TempDir()
configPath := filepath.Join(dir, "config.yaml")
content := []byte("api:\n url: https://api.test/v1\n key: test-token\nmodel:\n name: gpt-test\n temperature: 0.5\n stream: false\n")
if err := os.WriteFile(configPath, content, 0o600); err != nil {
t.Fatalf("failed to write config file: %v", err)
}
cfg, err := Load(configPath)
if err != nil {
t.Fatalf("Load returned error: %v", err)
}
if cfg.API.URL != "https://api.test/v1" {
t.Errorf("expected API URL %q, got %q", "https://api.test/v1", cfg.API.URL)
}
if cfg.API.Key != "test-token" {
t.Errorf("expected API key %q, got %q", "test-token", cfg.API.Key)
}
if cfg.Model.Name != "gpt-test" {
t.Errorf("expected model name %q, got %q", "gpt-test", cfg.Model.Name)
}
if cfg.Model.Temperature != 0.5 {
t.Errorf("expected temperature 0.5, got %f", cfg.Model.Temperature)
}
if cfg.Model.Stream != false {
t.Errorf("expected stream false, got %t", cfg.Model.Stream)
}
}
func TestLoad_InvalidTemperature(t *testing.T) {
dir := t.TempDir()
configPath := filepath.Join(dir, "config.yaml")
content := []byte("api:\n url: https://api.test/v1\n key: test-token\nmodel:\n name: gpt-test\n temperature: 5\n")
if err := os.WriteFile(configPath, content, 0o600); err != nil {
t.Fatalf("failed to write config file: %v", err)
}
_, err := Load(configPath)
if err == nil {
t.Fatal("expected error for invalid temperature, got none")
}
}
func TestLoad_MissingAPIKey(t *testing.T) {
// Ensure no environment fallback is present.
t.Setenv(envAPIKey, "")
t.Setenv(envAPIURL, "")
dir := t.TempDir()
configPath := filepath.Join(dir, "config.yaml")
content := []byte("api:\n url: https://api.test/v1\nmodel:\n name: gpt-test\n temperature: 0.5\n")
if err := os.WriteFile(configPath, content, 0o600); err != nil {
t.Fatalf("failed to write config file: %v", err)
}
_, err := Load(configPath)
if err == nil {
t.Fatal("expected error for missing API key, got none")
}
}

110
internal/openai/client.go Normal file
View File

@@ -0,0 +1,110 @@
package openai
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"strings"
"time"
)
const defaultTimeout = 30 * time.Second
// Client wraps HTTP access to the OpenAI-compatible Chat Completions API.
type Client struct {
apiKey string
baseURL string
httpClient *http.Client
}
// ClientOption customizes client construction.
type ClientOption func(*Client)
// WithHTTPClient overrides the default HTTP client.
func WithHTTPClient(hc *http.Client) ClientOption {
return func(c *Client) {
c.httpClient = hc
}
}
// WithBaseURL overrides the default base URL.
func WithBaseURL(url string) ClientOption {
return func(c *Client) {
c.baseURL = url
}
}
// NewClient creates a Client with the provided API key and options.
func NewClient(apiKey string, opts ...ClientOption) (*Client, error) {
apiKey = strings.TrimSpace(apiKey)
if apiKey == "" {
return nil, errors.New("api key cannot be empty")
}
client := &Client{
apiKey: apiKey,
baseURL: "https://api.openai.com/v1",
httpClient: &http.Client{
Timeout: defaultTimeout,
},
}
for _, opt := range opts {
opt(client)
}
return client, nil
}
// CreateChatCompletion issues a chat completion request.
func (c *Client) CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (*ChatCompletionResponse, error) {
if c == nil {
return nil, errors.New("client is nil")
}
payload, err := json.Marshal(req)
if err != nil {
return nil, fmt.Errorf("encode request: %w", err)
}
httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, c.baseURL+"/chat/completions", bytes.NewReader(payload))
if err != nil {
return nil, fmt.Errorf("create request: %w", err)
}
httpReq.Header.Set("Content-Type", "application/json")
httpReq.Header.Set("Authorization", "Bearer "+c.apiKey)
resp, err := c.httpClient.Do(httpReq)
if err != nil {
return nil, fmt.Errorf("execute request: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode >= 200 && resp.StatusCode < 300 {
return decodeSuccess(resp.Body)
}
return nil, decodeError(resp.Body, resp.StatusCode)
}
func decodeSuccess(r io.Reader) (*ChatCompletionResponse, error) {
var response ChatCompletionResponse
if err := json.NewDecoder(r).Decode(&response); err != nil {
return nil, fmt.Errorf("decode response: %w", err)
}
return &response, nil
}
func decodeError(r io.Reader, status int) error {
var apiErr ErrorResponse
if err := json.NewDecoder(r).Decode(&apiErr); err != nil {
return fmt.Errorf("api error (status %d): failed to decode body: %w", status, err)
}
return fmt.Errorf("api error (status %d): %s", status, apiErr.Error.Message)
}

View File

@@ -0,0 +1,93 @@
package openai
import (
"context"
"encoding/json"
"net/http"
"net/http/httptest"
"testing"
"time"
)
func TestNewClient_EmptyKey(t *testing.T) {
if _, err := NewClient(" "); err == nil {
t.Fatal("expected error for empty API key")
}
}
func TestCreateChatCompletion_Success(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if got, want := r.Header.Get("Authorization"), "Bearer test-key"; got != want {
t.Fatalf("expected auth header %q, got %q", want, got)
}
if got, want := r.URL.Path, "/chat/completions"; got != want {
t.Fatalf("expected path %q, got %q", want, got)
}
response := ChatCompletionResponse{
ID: "chatcmpl-1",
Object: "chat.completion",
Choices: []ChatCompletionChoice{
{
Index: 0,
Message: ChatMessage{
Role: "assistant",
Content: "Hello!",
},
FinishReason: "stop",
},
},
Usage: Usage{PromptTokens: 1, CompletionTokens: 1, TotalTokens: 2},
}
w.Header().Set("Content-Type", "application/json")
if err := json.NewEncoder(w).Encode(response); err != nil {
t.Fatalf("failed to encode response: %v", err)
}
}))
defer ts.Close()
client, err := NewClient("test-key", WithBaseURL(ts.URL), WithHTTPClient(ts.Client()))
if err != nil {
t.Fatalf("NewClient returned error: %v", err)
}
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
defer cancel()
resp, err := client.CreateChatCompletion(ctx, ChatCompletionRequest{
Model: "gpt-test",
Messages: []ChatMessage{
{Role: "user", Content: "Hello?"},
},
})
if err != nil {
t.Fatalf("CreateChatCompletion returned error: %v", err)
}
if resp.Choices[0].Message.Content != "Hello!" {
t.Fatalf("unexpected response content: %q", resp.Choices[0].Message.Content)
}
}
func TestCreateChatCompletion_Error(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusUnauthorized)
_ = json.NewEncoder(w).Encode(ErrorResponse{Error: APIError{Message: "invalid"}})
}))
defer ts.Close()
client, err := NewClient("test-key", WithBaseURL(ts.URL), WithHTTPClient(ts.Client()))
if err != nil {
t.Fatalf("NewClient returned error: %v", err)
}
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
defer cancel()
_, err = client.CreateChatCompletion(ctx, ChatCompletionRequest{Model: "gpt-test"})
if err == nil {
t.Fatal("expected error for unauthorized response")
}
}

51
internal/openai/types.go Normal file
View File

@@ -0,0 +1,51 @@
package openai
// ChatMessage represents a single message within a chat completion request or response.
type ChatMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}
// ChatCompletionRequest encapsulates the payload for the OpenAI Chat Completions API.
type ChatCompletionRequest struct {
Model string `json:"model"`
Messages []ChatMessage `json:"messages"`
MaxTokens *int `json:"max_tokens,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
Stream bool `json:"stream,omitempty"`
}
// ChatCompletionChoice captures an individual response choice returned from the API.
type ChatCompletionChoice struct {
Index int `json:"index"`
Message ChatMessage `json:"message"`
FinishReason string `json:"finish_reason"`
}
// Usage captures token accounting for a chat completion call.
type Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
}
// ChatCompletionResponse represents the top-level response payload from the API.
type ChatCompletionResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Choices []ChatCompletionChoice `json:"choices"`
Usage Usage `json:"usage"`
}
// APIError captures structured error responses returned by the API.
type APIError struct {
Message string `json:"message"`
Type string `json:"type"`
Param string `json:"param"`
Code any `json:"code"`
}
// ErrorResponse is returned on non-2xx responses.
type ErrorResponse struct {
Error APIError `json:"error"`
}