diff --git a/internal/proxy/transform/stream.go b/internal/proxy/transform/stream.go index e71a17c..4052b75 100644 --- a/internal/proxy/transform/stream.go +++ b/internal/proxy/transform/stream.go @@ -1114,6 +1114,9 @@ func (st *StreamTransformer) transformResponsesAPIToOpenAIChat(r io.Reader, w io headerSent bool } toolCalls := make(map[int]*toolMeta) + // Guard against duplicate role chunks when both response.created and + // response.in_progress arrive for the same stream. + roleSent := false emitChunk := func(delta map[string]interface{}, finishReason interface{}) { choice := map[string]interface{}{ @@ -1194,7 +1197,10 @@ func (st *StreamTransformer) transformResponsesAPIToOpenAIChat(r io.Reader, w io model = m } } - emitChunk(map[string]interface{}{"role": "assistant", "content": ""}, nil) + if !roleSent { + roleSent = true + emitChunk(map[string]interface{}{"role": "assistant", "content": ""}, nil) + } case "response.output_item.added": // Capture function_call metadata so we can emit proper headers later. diff --git a/internal/proxy/transform/stream_test.go b/internal/proxy/transform/stream_test.go index c5613f2..dd17274 100644 --- a/internal/proxy/transform/stream_test.go +++ b/internal/proxy/transform/stream_test.go @@ -1279,3 +1279,38 @@ func TestTransformResponsesAPIToOpenAIChat_ToolCall(t *testing.T) { t.Error("should emit [DONE] sentinel") } } + +func TestTransformResponsesAPIToOpenAIChat_NoDuplicateRoleChunk(t *testing.T) { + // When both response.created and response.in_progress arrive, only one + // assistant role chunk should be emitted. + input := strings.Join([]string{ + `event: response.created`, + `data: {"type":"response.created","response":{"id":"resp_dup1","status":"in_progress","model":"gpt-5","output":[]}}`, + ``, + `event: response.in_progress`, + `data: {"type":"response.in_progress","response":{"id":"resp_dup1","status":"in_progress","model":"gpt-5","output":[]}}`, + ``, + `event: response.output_text.delta`, + `data: {"type":"response.output_text.delta","output_index":0,"content_index":0,"delta":"Hi"}`, + ``, + `event: response.completed`, + `data: {"type":"response.completed","response":{"id":"resp_dup1","status":"completed","model":"gpt-5","output":[]}}`, + ``, + }, "\n") + + st := &StreamTransformer{ + ClientFormat: FormatOpenAIChat, + ProviderFormat: FormatOpenAIResponses, + } + reader := st.TransformSSEStream(strings.NewReader(input)) + output, err := io.ReadAll(reader) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + // Count how many times a role:assistant delta is emitted + roleCount := strings.Count(string(output), `"role":"assistant"`) + if roleCount != 1 { + t.Errorf("expected exactly 1 role:assistant chunk, got %d; output:\n%s", roleCount, string(output)) + } +}