diff --git a/internal/translator/codex/openai/responses/codex_openai-responses_request.go b/internal/translator/codex/openai/responses/codex_openai-responses_request.go index 3c868682..d9b3b722 100644 --- a/internal/translator/codex/openai/responses/codex_openai-responses_request.go +++ b/internal/translator/codex/openai/responses/codex_openai-responses_request.go @@ -17,6 +17,9 @@ func ConvertOpenAIResponsesRequestToCodex(modelName string, inputRawJSON []byte, rawJSON, _ = sjson.SetBytes(rawJSON, "store", false) rawJSON, _ = sjson.SetBytes(rawJSON, "parallel_tool_calls", true) rawJSON, _ = sjson.SetBytes(rawJSON, "include", []string{"reasoning.encrypted_content"}) + // Codex Responses rejects token limit fields, so strip them out before forwarding. + rawJSON, _ = sjson.DeleteBytes(rawJSON, "max_output_tokens") + rawJSON, _ = sjson.DeleteBytes(rawJSON, "max_completion_tokens") rawJSON, _ = sjson.DeleteBytes(rawJSON, "temperature") rawJSON, _ = sjson.DeleteBytes(rawJSON, "top_p")