Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 59 additions & 5 deletions internal/analysis/analyzer.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,38 @@ import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"log/slog"
"net/http"
"strings"
"time"
"unicode"

"github.com/go-authgate/agent-scanner/internal/models"
"github.com/go-authgate/agent-scanner/internal/tlsutil"
)

// clientError is a non-retryable HTTP error (4xx).
type clientError struct {
StatusCode int
Body string
}

func (e *clientError) Error() string {
return fmt.Sprintf("status %d: %s", e.StatusCode, e.Body)
}

// nonRetryableError wraps errors that should not be retried
// (e.g., request construction failures, JSON decode errors).
type nonRetryableError struct {
err error
}

func (e *nonRetryableError) Error() string { return e.err.Error() }
func (e *nonRetryableError) Unwrap() error { return e.err }

// Analyzer performs security analysis on scan results.
type Analyzer interface {
Analyze(ctx context.Context, results []models.ScanPathResult) ([]models.ScanPathResult, error)
Expand Down Expand Up @@ -113,14 +135,24 @@ func (a *remoteAnalyzer) analyzePathResult(
return fmt.Errorf("marshal request: %w", err)
}

// Retry with exponential backoff
// Retry with exponential backoff (only retry on 5xx / network errors)
var resp analysisResponse
maxRetries := 3
for attempt := range maxRetries {
err = a.doRequest(ctx, body, &resp)
if err == nil {
break
}
// Do not retry non-retryable errors (bad URL, JSON decode, etc.)
var nre *nonRetryableError
if errors.As(err, &nre) {
return fmt.Errorf("analysis API: %w", err)
}
// Do not retry client errors (4xx)
var ce *clientError
if errors.As(err, &ce) {
return fmt.Errorf("analysis API: %w", err)
}
if attempt < maxRetries-1 {
backoff := time.Duration(1<<uint(attempt)) * time.Second
slog.Debug("retrying analysis", "attempt", attempt+1, "backoff", backoff)
Expand Down Expand Up @@ -150,7 +182,7 @@ func (a *remoteAnalyzer) doRequest(ctx context.Context, body []byte, resp *analy
bytes.NewReader(body),
)
if err != nil {
return err
return &nonRetryableError{err: err}
}
req.Header.Set("Content-Type", "application/json")

Expand All @@ -161,9 +193,31 @@ func (a *remoteAnalyzer) doRequest(ctx context.Context, body []byte, resp *analy
defer httpResp.Body.Close()

if httpResp.StatusCode >= 400 {
respBody, _ := io.ReadAll(httpResp.Body)
return fmt.Errorf("status %d: %s", httpResp.StatusCode, string(respBody))
respBody, _ := io.ReadAll(io.LimitReader(httpResp.Body, 4096))
bodySnippet := sanitizeBodySnippet(string(respBody), 512)
if httpResp.StatusCode < 500 {
return &clientError{StatusCode: httpResp.StatusCode, Body: bodySnippet}
}
return fmt.Errorf("status %d: %s", httpResp.StatusCode, bodySnippet)
}

if err := json.NewDecoder(httpResp.Body).Decode(resp); err != nil {
return &nonRetryableError{err: fmt.Errorf("decode response: %w", err)}
}
return nil
}

return json.NewDecoder(httpResp.Body).Decode(resp)
// sanitizeBodySnippet truncates s to approximately maxLen bytes (the
// returned string may be slightly longer due to a " [truncated]" suffix)
// and replaces all Unicode control characters with spaces for safe single-line logging.
func sanitizeBodySnippet(s string, maxLen int) string {
if len(s) > maxLen {
s = s[:maxLen] + " [truncated]"
}
return strings.Map(func(r rune) rune {
if unicode.IsControl(r) {
return ' '
}
return r
}, s)
}
Loading
Loading