diff --git a/internal/translator/codex/openai/chat-completions/codex_openai_response.go b/internal/translator/codex/openai/chat-completions/codex_openai_response.go index 6d86c247..cdea33ee 100644 --- a/internal/translator/codex/openai/chat-completions/codex_openai_response.go +++ b/internal/translator/codex/openai/chat-completions/codex_openai_response.go @@ -90,6 +90,9 @@ func ConvertCodexResponseToOpenAI(_ context.Context, modelName string, originalR if inputTokensResult := usageResult.Get("input_tokens"); inputTokensResult.Exists() { template, _ = sjson.Set(template, "usage.prompt_tokens", inputTokensResult.Int()) } + if cachedTokensResult := usageResult.Get("input_tokens_details.cached_tokens"); cachedTokensResult.Exists() { + template, _ = sjson.Set(template, "usage.prompt_tokens_details.cached_tokens", cachedTokensResult.Int()) + } if reasoningTokensResult := usageResult.Get("output_tokens_details.reasoning_tokens"); reasoningTokensResult.Exists() { template, _ = sjson.Set(template, "usage.completion_tokens_details.reasoning_tokens", reasoningTokensResult.Int()) } @@ -205,6 +208,9 @@ func ConvertCodexResponseToOpenAINonStream(_ context.Context, _ string, original if inputTokensResult := usageResult.Get("input_tokens"); inputTokensResult.Exists() { template, _ = sjson.Set(template, "usage.prompt_tokens", inputTokensResult.Int()) } + if cachedTokensResult := usageResult.Get("input_tokens_details.cached_tokens"); cachedTokensResult.Exists() { + template, _ = sjson.Set(template, "usage.prompt_tokens_details.cached_tokens", cachedTokensResult.Int()) + } if reasoningTokensResult := usageResult.Get("output_tokens_details.reasoning_tokens"); reasoningTokensResult.Exists() { template, _ = sjson.Set(template, "usage.completion_tokens_details.reasoning_tokens", reasoningTokensResult.Int()) }