From 918b6955e4f3d6c031bb739c67f742125d1be38c Mon Sep 17 00:00:00 2001 From: Muhammad Zahid Masruri Date: Mon, 9 Feb 2026 23:49:15 +0700 Subject: [PATCH] fix(amp): rewrite model name in response.model for Responses API SSE events The ResponseRewriter's modelFieldPaths was missing 'response.model', causing the mapped model name to leak through SSE streaming events (response.created, response.in_progress, response.completed) in the OpenAI Responses API (/v1/responses). This caused Amp CLI to report 'Unknown OpenAI model' errors when model mapping was active (e.g., gpt-5.2-codex -> gpt-5.3-codex), because the mapped name reached Amp's backend via telemetry. Also sorted modelFieldPaths alphabetically per review feedback and added regression tests for all rewrite paths. Fixes #1463 --- internal/api/modules/amp/response_rewriter.go | 2 +- .../api/modules/amp/response_rewriter_test.go | 110 ++++++++++++++++++ 2 files changed, 111 insertions(+), 1 deletion(-) create mode 100644 internal/api/modules/amp/response_rewriter_test.go diff --git a/internal/api/modules/amp/response_rewriter.go b/internal/api/modules/amp/response_rewriter.go index 57e4922a..715034f1 100644 --- a/internal/api/modules/amp/response_rewriter.go +++ b/internal/api/modules/amp/response_rewriter.go @@ -66,7 +66,7 @@ func (rw *ResponseRewriter) Flush() { } // modelFieldPaths lists all JSON paths where model name may appear -var modelFieldPaths = []string{"model", "modelVersion", "response.modelVersion", "message.model"} +var modelFieldPaths = []string{"message.model", "model", "modelVersion", "response.model", "response.modelVersion"} // rewriteModelInResponse replaces all occurrences of the mapped model with the original model in JSON // It also suppresses "thinking" blocks if "tool_use" is present to ensure Amp client compatibility diff --git a/internal/api/modules/amp/response_rewriter_test.go b/internal/api/modules/amp/response_rewriter_test.go new file mode 100644 index 00000000..114a9516 --- /dev/null +++ b/internal/api/modules/amp/response_rewriter_test.go @@ -0,0 +1,110 @@ +package amp + +import ( + "testing" +) + +func TestRewriteModelInResponse_TopLevel(t *testing.T) { + rw := &ResponseRewriter{originalModel: "gpt-5.2-codex"} + + input := []byte(`{"id":"resp_1","model":"gpt-5.3-codex","output":[]}`) + result := rw.rewriteModelInResponse(input) + + expected := `{"id":"resp_1","model":"gpt-5.2-codex","output":[]}` + if string(result) != expected { + t.Errorf("expected %s, got %s", expected, string(result)) + } +} + +func TestRewriteModelInResponse_ResponseModel(t *testing.T) { + rw := &ResponseRewriter{originalModel: "gpt-5.2-codex"} + + input := []byte(`{"type":"response.completed","response":{"id":"resp_1","model":"gpt-5.3-codex","status":"completed"}}`) + result := rw.rewriteModelInResponse(input) + + expected := `{"type":"response.completed","response":{"id":"resp_1","model":"gpt-5.2-codex","status":"completed"}}` + if string(result) != expected { + t.Errorf("expected %s, got %s", expected, string(result)) + } +} + +func TestRewriteModelInResponse_ResponseCreated(t *testing.T) { + rw := &ResponseRewriter{originalModel: "gpt-5.2-codex"} + + input := []byte(`{"type":"response.created","response":{"id":"resp_1","model":"gpt-5.3-codex","status":"in_progress"}}`) + result := rw.rewriteModelInResponse(input) + + expected := `{"type":"response.created","response":{"id":"resp_1","model":"gpt-5.2-codex","status":"in_progress"}}` + if string(result) != expected { + t.Errorf("expected %s, got %s", expected, string(result)) + } +} + +func TestRewriteModelInResponse_NoModelField(t *testing.T) { + rw := &ResponseRewriter{originalModel: "gpt-5.2-codex"} + + input := []byte(`{"type":"response.output_item.added","item":{"id":"item_1","type":"message"}}`) + result := rw.rewriteModelInResponse(input) + + if string(result) != string(input) { + t.Errorf("expected no modification, got %s", string(result)) + } +} + +func TestRewriteModelInResponse_EmptyOriginalModel(t *testing.T) { + rw := &ResponseRewriter{originalModel: ""} + + input := []byte(`{"model":"gpt-5.3-codex"}`) + result := rw.rewriteModelInResponse(input) + + if string(result) != string(input) { + t.Errorf("expected no modification when originalModel is empty, got %s", string(result)) + } +} + +func TestRewriteStreamChunk_SSEWithResponseModel(t *testing.T) { + rw := &ResponseRewriter{originalModel: "gpt-5.2-codex"} + + chunk := []byte("data: {\"type\":\"response.completed\",\"response\":{\"id\":\"resp_1\",\"model\":\"gpt-5.3-codex\",\"status\":\"completed\"}}\n\n") + result := rw.rewriteStreamChunk(chunk) + + expected := "data: {\"type\":\"response.completed\",\"response\":{\"id\":\"resp_1\",\"model\":\"gpt-5.2-codex\",\"status\":\"completed\"}}\n\n" + if string(result) != expected { + t.Errorf("expected %s, got %s", expected, string(result)) + } +} + +func TestRewriteStreamChunk_MultipleEvents(t *testing.T) { + rw := &ResponseRewriter{originalModel: "gpt-5.2-codex"} + + chunk := []byte("data: {\"type\":\"response.created\",\"response\":{\"model\":\"gpt-5.3-codex\"}}\n\ndata: {\"type\":\"response.output_item.added\",\"item\":{\"id\":\"item_1\"}}\n\n") + result := rw.rewriteStreamChunk(chunk) + + if string(result) == string(chunk) { + t.Error("expected response.model to be rewritten in SSE stream") + } + if !contains(result, []byte(`"model":"gpt-5.2-codex"`)) { + t.Errorf("expected rewritten model in output, got %s", string(result)) + } +} + +func TestRewriteStreamChunk_MessageModel(t *testing.T) { + rw := &ResponseRewriter{originalModel: "claude-opus-4.5"} + + chunk := []byte("data: {\"message\":{\"model\":\"claude-sonnet-4\",\"role\":\"assistant\"}}\n\n") + result := rw.rewriteStreamChunk(chunk) + + expected := "data: {\"message\":{\"model\":\"claude-opus-4.5\",\"role\":\"assistant\"}}\n\n" + if string(result) != expected { + t.Errorf("expected %s, got %s", expected, string(result)) + } +} + +func contains(data, substr []byte) bool { + for i := 0; i <= len(data)-len(substr); i++ { + if string(data[i:i+len(substr)]) == string(substr) { + return true + } + } + return false +}