From 7d4d56671fec81930380b791133a9825b6d87129 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stig-=C3=98rjan=20Smelror?= Date: Wed, 1 Oct 2025 22:21:44 +0200 Subject: [PATCH] Expose version/build metadata and improve provider error messaging --- cmd/goaichat/main.go | 12 ++++- internal/app/app.go | 45 +++++++++++++--- internal/chat/service.go | 93 ++++++++++++++++++++++++++++++-- internal/openai/client.go | 108 ++++++++++++++++++++++++++++++++------ internal/openai/types.go | 47 +++++++++++++++++ 5 files changed, 278 insertions(+), 27 deletions(-) diff --git a/cmd/goaichat/main.go b/cmd/goaichat/main.go index 6ae7b28..9caa8d6 100644 --- a/cmd/goaichat/main.go +++ b/cmd/goaichat/main.go @@ -10,6 +10,11 @@ import ( "github.com/stig/goaichat/internal/config" ) +var ( + version = "dev" + buildStamp = "" +) + func main() { var configPath string flag.StringVar(&configPath, "config", "", "Path to configuration file") @@ -26,7 +31,12 @@ func main() { ctx, cancel := context.WithCancel(context.Background()) defer cancel() - application := app.New(logger, cfg) + application := app.New( + logger, + cfg, + app.WithVersion(version), + app.WithBuild(buildStamp), + ) if err := application.Run(ctx); err != nil { logger.Error("application terminated with error", "error", err) os.Exit(1) diff --git a/internal/app/app.go b/internal/app/app.go index c2113bf..8a3d856 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -27,16 +27,19 @@ type App struct { input io.Reader output io.Writer status string + version string + build string streamBuffer strings.Builder } // New constructs a new App instance. func New(logger *slog.Logger, cfg *config.Config, opts ...Option) *App { app := &App{ - logger: logger, - config: cfg, - input: os.Stdin, - output: os.Stdout, + logger: logger, + config: cfg, + input: os.Stdin, + output: os.Stdout, + version: "dev", } for _, opt := range opts { @@ -64,6 +67,20 @@ func WithIO(in io.Reader, out io.Writer) Option { } } +// WithVersion sets the application version string presented in the UI header. +func WithVersion(version string) Option { + return func(a *App) { + a.version = strings.TrimSpace(version) + } +} + +// WithBuild sets the application build identifier presented in the UI header. +func WithBuild(build string) Option { + return func(a *App) { + a.build = strings.TrimSpace(build) + } +} + // Run starts the application lifecycle. func (a *App) Run(ctx context.Context) error { if a == nil { @@ -201,7 +218,15 @@ func (a *App) runCLILoop(ctx context.Context) error { } a.clearStreamingContent() - a.setStatus("") + notice := "" + if a.chat != nil { + notice = a.chat.ConsumeStreamingNotice() + } + if strings.TrimSpace(notice) != "" { + a.setStatus("%s", notice) + } else { + a.setStatus("") + } if err := a.maybeSuggestSessionName(ctx); err != nil { a.logger.WarnContext(ctx, "session name suggestion failed", "error", err) @@ -386,7 +411,15 @@ func (a *App) renderUI() error { sessionName = a.chat.SessionName() } - title := fmt.Sprintf("goaichat - %s", sessionName) + name := "goaichat" + if v := strings.TrimSpace(a.version); v != "" { + name = fmt.Sprintf("%s v%s", name, v) + } + if b := strings.TrimSpace(a.build); b != "" { + name = fmt.Sprintf("%s (build %s)", name, b) + } + + title := fmt.Sprintf("%s - %s", name, sessionName) underline := strings.Repeat("=", len(title)) if _, err := fmt.Fprintf(a.output, "%s\n%s\n\n", title, underline); err != nil { return err diff --git a/internal/chat/service.go b/internal/chat/service.go index dc7e11a..01da47c 100644 --- a/internal/chat/service.go +++ b/internal/chat/service.go @@ -37,6 +37,7 @@ type Service struct { model string temperature float64 stream bool + streamNotice string history []openai.ChatMessage sessionID int64 summarySet bool @@ -92,6 +93,8 @@ func (s *Service) Send(ctx context.Context, input string, streamHandler openai.C messages := append([]openai.ChatMessage(nil), s.history...) temperature := s.temperature + s.streamNotice = "" + req := openai.ChatCompletionRequest{ Model: s.model, Messages: messages, @@ -105,11 +108,17 @@ func (s *Service) Send(ctx context.Context, input string, streamHandler openai.C var err error if s.stream { resp, err = s.client.StreamChatCompletion(ctx, req, streamHandler) + if err != nil { + resp, err = s.handleStreamingFailure(ctx, req, err) + if err != nil { + return "", s.translateProviderError(err) + } + } } else { resp, err = s.client.CreateChatCompletion(ctx, req) - } - if err != nil { - return "", err + if err != nil { + return "", s.translateProviderError(err) + } } if len(resp.Choices) == 0 { return "", errors.New("no choices returned from completion") @@ -135,6 +144,16 @@ func (s *Service) History() []openai.ChatMessage { return historyCopy } +// ConsumeStreamingNotice returns any pending streaming notice and clears it. +func (s *Service) ConsumeStreamingNotice() string { + if s == nil { + return "" + } + notice := s.streamNotice + s.streamNotice = "" + return notice +} + // StreamingEnabled reports whether streaming completions are configured for this service. func (s *Service) StreamingEnabled() bool { if s == nil { @@ -143,6 +162,74 @@ func (s *Service) StreamingEnabled() bool { return s.stream } +func (s *Service) translateProviderError(err error) error { + var reqErr *openai.RequestError + if !errors.As(err, &reqErr) { + return err + } + + if guidance, ok := providerStatusGuidance(reqErr.StatusCode()); ok { + return errors.New(guidance) + } + + return err +} + +func (s *Service) handleStreamingFailure(ctx context.Context, req openai.ChatCompletionRequest, streamErr error) (*openai.ChatCompletionResponse, error) { + if s == nil { + return nil, streamErr + } + + var reqErr *openai.RequestError + if !errors.As(streamErr, &reqErr) { + return nil, streamErr + } + + status := reqErr.StatusCode() + if status < 400 || status >= 500 { + return nil, streamErr + } + + guidance, hasGuidance := providerStatusGuidance(status) + message := guidance + if !hasGuidance { + message = strings.TrimSpace(reqErr.Message()) + if message == "" { + message = strings.TrimSpace(streamErr.Error()) + } + if message == "" { + message = "Streaming is unavailable" + } + } + message = fmt.Sprintf("%s\nStreaming has been disabled; responses will be fully buffered.", message) + + s.logger.WarnContext(ctx, "streaming disabled", "status", status, "error", strings.TrimSpace(reqErr.Message())) + s.stream = false + s.streamNotice = message + req.Stream = false + + resp, err := s.client.CreateChatCompletion(ctx, req) + if err != nil { + return nil, s.translateProviderError(err) + } + return resp, nil +} + +func providerStatusGuidance(status int) (string, bool) { + switch status { + case 401: + return "Incorrect API key provided.\nVerify API key, clear browser cache, or generate a new key.", true + case 429: + return "Rate limit reached.\nPace requests and implement exponential backoff.", true + case 500: + return "Server error.\nRetry after a brief wait; contact support if persistent.", true + case 503: + return "Engine overloaded.\nRetry request after a brief wait; contact support if persistent.", true + default: + return "", false + } +} + // Reset clears the in-memory conversation history. func (s *Service) Reset() { if s == nil { diff --git a/internal/openai/client.go b/internal/openai/client.go index b6c8883..4d50f32 100644 --- a/internal/openai/client.go +++ b/internal/openai/client.go @@ -22,6 +22,55 @@ type Client struct { httpClient *http.Client } +type contentPart struct { + Type string `json:"type"` + Text string `json:"text"` +} + +func extractRoleAndContent(raw json.RawMessage) (string, string) { + if len(raw) == 0 || string(raw) == "null" { + return "", "" + } + + var envelope map[string]json.RawMessage + if err := json.Unmarshal(raw, &envelope); err != nil { + return "", "" + } + + var role string + if value, ok := envelope["role"]; ok { + _ = json.Unmarshal(value, &role) + } + + value, ok := envelope["content"] + if !ok { + return role, "" + } + + var text string + if err := json.Unmarshal(value, &text); err == nil { + return role, text + } + + var parts []contentPart + if err := json.Unmarshal(value, &parts); err == nil { + var builder strings.Builder + for _, part := range parts { + if part.Text != "" { + builder.WriteString(part.Text) + } + } + return role, builder.String() + } + + var single contentPart + if err := json.Unmarshal(value, &single); err == nil { + return role, single.Text + } + + return role, "" +} + // ClientOption customizes client construction. type ClientOption func(*Client) @@ -131,10 +180,10 @@ func (c *Client) StreamChatCompletion(ctx context.Context, req ChatCompletionReq ID string `json:"id"` Object string `json:"object"` Choices []struct { - Index int `json:"index"` - Message ChatMessage `json:"message"` - Delta ChatMessage `json:"delta"` - FinishReason string `json:"finish_reason"` + Index int `json:"index"` + Message json.RawMessage `json:"message"` + Delta json.RawMessage `json:"delta"` + FinishReason string `json:"finish_reason"` } `json:"choices"` Usage Usage `json:"usage"` } @@ -145,6 +194,7 @@ func (c *Client) StreamChatCompletion(ctx context.Context, req ChatCompletionReq finish := "" var usage Usage usageReceived := false + var lastMessageText string for scanner.Scan() { line := scanner.Text() @@ -190,19 +240,22 @@ func (c *Client) StreamChatCompletion(ctx context.Context, req ChatCompletionReq finishReason := "" if len(chunk.Choices) > 0 { choice := chunk.Choices[0] - if choice.Message.Role != "" { - role = choice.Message.Role + choiceRole, choiceContent := extractRoleAndContent(choice.Message) + if choiceRole != "" { + role = choiceRole } - if choice.Delta.Role != "" { - role = choice.Delta.Role + deltaRole, deltaContent := extractRoleAndContent(choice.Delta) + if deltaRole != "" { + role = deltaRole } - if choice.Delta.Content != "" { - chunkText = choice.Delta.Content - } else if choice.Message.Content != "" && builder.Len() == 0 { - chunkText = choice.Message.Content + if deltaContent != "" { + chunkText = deltaContent } - if choice.Message.Content != "" && builder.Len() == 0 && chunkText == "" { - chunkText = choice.Message.Content + if chunkText == "" && builder.Len() == 0 && choiceContent != "" { + chunkText = choiceContent + } + if choiceContent != "" { + lastMessageText = choiceContent } if choice.FinishReason != "" { finishReason = choice.FinishReason @@ -237,7 +290,23 @@ func (c *Client) StreamChatCompletion(ctx context.Context, req ChatCompletionReq content := strings.TrimSpace(builder.String()) if content == "" { - return nil, errors.New("stream response contained no content") + if trimmed := strings.TrimSpace(lastMessageText); trimmed != "" { + content = trimmed + } + } + if content == "" { + aggregated.Choices = []ChatCompletionChoice{{ + Index: 0, + Message: ChatMessage{ + Role: role, + Content: "", + }, + FinishReason: finish, + }} + if usageReceived { + aggregated.Usage = usage + } + return &aggregated, nil } aggregated.Choices = []ChatCompletionChoice{{ @@ -280,8 +349,13 @@ func decodeSuccess(r io.Reader) (*ChatCompletionResponse, error) { func decodeError(r io.Reader, status int) error { var apiErr ErrorResponse if err := json.NewDecoder(r).Decode(&apiErr); err != nil { - return fmt.Errorf("api error (status %d): failed to decode body: %w", status, err) + return &RequestError{ + Status: status, + Response: ErrorResponse{ + Error: APIError{Message: fmt.Sprintf("failed to decode error body: %v", err)}, + }, + } } - return fmt.Errorf("api error (status %d): %s", status, apiErr.Error.Message) + return &RequestError{Status: status, Response: apiErr} } diff --git a/internal/openai/types.go b/internal/openai/types.go index 9e4ccf2..7981e24 100644 --- a/internal/openai/types.go +++ b/internal/openai/types.go @@ -1,5 +1,10 @@ package openai +import ( + "fmt" + "strings" +) + // ChatMessage represents a single message within a chat completion request or response. type ChatMessage struct { Role string `json:"role"` @@ -50,6 +55,48 @@ type ChatCompletionStreamEvent struct { // ChatCompletionStreamHandler consumes streaming completion events. type ChatCompletionStreamHandler func(ChatCompletionStreamEvent) error +// RequestError captures an error response returned by the API together with the HTTP status code. +type RequestError struct { + Status int + Response ErrorResponse +} + +// Error implements the error interface. +func (e *RequestError) Error() string { + if e == nil { + return "" + } + msg := strings.TrimSpace(e.Response.Error.Message) + if msg == "" { + return fmt.Sprintf("api error (status %d)", e.Status) + } + return fmt.Sprintf("api error (status %d): %s", e.Status, msg) +} + +// StatusCode returns the originating HTTP status code. +func (e *RequestError) StatusCode() int { + if e == nil { + return 0 + } + return e.Status +} + +// Message returns the raw message provided by the API, if any. +func (e *RequestError) Message() string { + if e == nil { + return "" + } + return e.Response.Error.Message +} + +// Type returns the OpenAI error type string, when present. +func (e *RequestError) Type() string { + if e == nil { + return "" + } + return e.Response.Error.Type +} + // APIError captures structured error responses returned by the API. type APIError struct { Message string `json:"message"`