From c8f20a66a8874bd024bfee2831e7746a66de1832 Mon Sep 17 00:00:00 2001 From: Luis Pater Date: Fri, 7 Nov 2025 22:40:45 +0800 Subject: [PATCH] fix(executor): add logging and prompt cache key handling for OpenAI responses --- internal/runtime/executor/codex_executor.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/internal/runtime/executor/codex_executor.go b/internal/runtime/executor/codex_executor.go index 7527db67..443e41e7 100644 --- a/internal/runtime/executor/codex_executor.go +++ b/internal/runtime/executor/codex_executor.go @@ -508,6 +508,11 @@ func (e *CodexExecutor) cacheHelper(ctx context.Context, from sdktranslator.Form codexCacheMap[key] = cache } } + } else if from == "openai-response" { + promptCacheKey := gjson.GetBytes(req.Payload, "prompt_cache_key") + if promptCacheKey.Exists() { + cache.ID = promptCacheKey.String() + } } rawJSON, _ = sjson.SetBytes(rawJSON, "prompt_cache_key", cache.ID)