fix(gateway): 剥离 Cursor raw body 透传路径中 Codex 不支持的 Responses API 参数
在前一个 commit 的 isResponsesShape 短路路径基础上,补充对 Cursor 云端
带过来的、Codex 上游统一不支持的顶层 Responses API 参数的剥离:
- prompt_cache_retention
- safety_identifier
- metadata
- stream_options
根因补充:这条 raw-body 透传路径为了保留 Cursor 的 input 数组整体结构,
不再经过 ChatCompletionsRequest 的反序列化过滤,所以这些 Go 结构体里
没有对应字段的参数会被原样发到上游,上游返回:
Unsupported parameter: <field>
常规 Chat Completions 转换路径天然通过 ChatCompletionsRequest 丢弃未知字段,
不受影响;此处仅在 isResponsesShape 分支内用 sjson.DeleteBytes 显式过滤,
作用域最小。剥离列表与 openai_gateway_service.go:2034 的
unsupportedFields 语义对齐。
另外在 applyCodexOAuthTransform 的 OAuth 兜底 strip 列表里同步追加
prompt_cache_retention,作为对该函数所有其他 OAuth 调用点的 defense
in depth(当前只有 Cursor 路径的短路已在前面剥过,但保留这一层更稳)。
测试:
- TestCursorMixedShape_StripsUnsupportedFields — 验证所有 4 个字段都被剥
- TestApplyCodexOAuthTransform_StripsPromptCacheRetention — OAuth 兜底路径
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -124,6 +124,14 @@ func applyCodexOAuthTransform(reqBody map[string]any, isCodexCLI bool, isCompact
|
||||
"top_p",
|
||||
"frequency_penalty",
|
||||
"presence_penalty",
|
||||
// prompt_cache_retention is a newer Responses API parameter (cache TTL).
|
||||
// The ChatGPT internal Codex endpoint rejects it with
|
||||
// "Unsupported parameter: prompt_cache_retention". Defense-in-depth
|
||||
// for any OAuth path that reaches this transform — the Cursor
|
||||
// Responses-shape short-circuit in ForwardAsChatCompletions strips
|
||||
// it earlier too, but we keep this line so other OAuth callers are
|
||||
// equally protected.
|
||||
"prompt_cache_retention",
|
||||
} {
|
||||
if _, ok := reqBody[key]; ok {
|
||||
delete(reqBody, key)
|
||||
|
||||
@@ -481,6 +481,26 @@ func TestExtractSystemMessagesFromInput(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
// TestApplyCodexOAuthTransform_StripsPromptCacheRetention is a regression
|
||||
// test: some clients (e.g. Cursor cloud via the Responses-shape compat path)
|
||||
// send prompt_cache_retention, but the ChatGPT internal Codex endpoint
|
||||
// rejects it with "Unsupported parameter: prompt_cache_retention".
|
||||
func TestApplyCodexOAuthTransform_StripsPromptCacheRetention(t *testing.T) {
|
||||
reqBody := map[string]any{
|
||||
"model": "gpt-5.1",
|
||||
"prompt_cache_retention": "24h",
|
||||
"input": []any{
|
||||
map[string]any{"role": "user", "content": "hi"},
|
||||
},
|
||||
}
|
||||
|
||||
applyCodexOAuthTransform(reqBody, false, false)
|
||||
|
||||
_, stillThere := reqBody["prompt_cache_retention"]
|
||||
require.False(t, stillThere,
|
||||
"prompt_cache_retention must be stripped before forwarding to Codex upstream")
|
||||
}
|
||||
|
||||
func TestApplyCodexOAuthTransform_ExtractsSystemMessages(t *testing.T) {
|
||||
reqBody := map[string]any{
|
||||
"model": "gpt-5.1",
|
||||
|
||||
@@ -153,3 +153,47 @@ func TestCursorMixedShape_JSONRoundtrip(t *testing.T) {
|
||||
require.True(t, ok, "input must decode to a Go []any after round-trip")
|
||||
require.Len(t, inputArr, 1)
|
||||
}
|
||||
|
||||
// TestCursorMixedShape_StripsUnsupportedFields mirrors the strip loop in
|
||||
// ForwardAsChatCompletions (isResponsesShape branch). Cursor cloud sends
|
||||
// prompt_cache_retention, safety_identifier, metadata and stream_options
|
||||
// as top-level Responses API parameters, which Codex upstreams reject with
|
||||
// "Unsupported parameter: ...". The fix must remove them from the raw body
|
||||
// before it is forwarded, for BOTH OAuth and API Key account types.
|
||||
func TestCursorMixedShape_StripsUnsupportedFields(t *testing.T) {
|
||||
cursorBody := []byte(`{
|
||||
"model": "gpt-5.4",
|
||||
"stream": true,
|
||||
"prompt_cache_retention": "24h",
|
||||
"safety_identifier": "cursor-user-xyz",
|
||||
"metadata": {"trace_id":"abc","caller":"cursor"},
|
||||
"stream_options": {"include_usage": true},
|
||||
"input": [{"role":"user","content":"hi"}]
|
||||
}`)
|
||||
|
||||
// Sanity: the test fixture contains every field the production code strips.
|
||||
for _, field := range cursorResponsesUnsupportedFields {
|
||||
require.True(t, gjson.GetBytes(cursorBody, field).Exists(),
|
||||
"test fixture must contain %s", field)
|
||||
}
|
||||
|
||||
// Run the exact same loop as the production code.
|
||||
result := cursorBody
|
||||
for _, field := range cursorResponsesUnsupportedFields {
|
||||
if stripped, err := sjson.DeleteBytes(result, field); err == nil {
|
||||
result = stripped
|
||||
}
|
||||
}
|
||||
|
||||
// All unsupported fields must be gone.
|
||||
for _, field := range cursorResponsesUnsupportedFields {
|
||||
assert.False(t, gjson.GetBytes(result, field).Exists(),
|
||||
"%s must be stripped", field)
|
||||
}
|
||||
|
||||
// Everything else must survive intact.
|
||||
assert.Equal(t, "gpt-5.4", gjson.GetBytes(result, "model").String())
|
||||
assert.Equal(t, true, gjson.GetBytes(result, "stream").Bool())
|
||||
assert.True(t, gjson.GetBytes(result, "input").IsArray())
|
||||
assert.Equal(t, "user", gjson.GetBytes(result, "input.0.role").String())
|
||||
}
|
||||
|
||||
@@ -21,6 +21,22 @@ import (
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// cursorResponsesUnsupportedFields are top-level Responses API parameters that
|
||||
// Codex upstreams reject with "Unsupported parameter: ...". They must be
|
||||
// stripped when forwarding a raw client body through the Responses-shape
|
||||
// short-circuit in ForwardAsChatCompletions (see isResponsesShape branch).
|
||||
// The normal Chat Completions → Responses conversion path is unaffected
|
||||
// because ChatCompletionsRequest has no fields for these parameters — unknown
|
||||
// fields are dropped naturally by json.Unmarshal. Kept semantically in sync
|
||||
// with the list in openai_gateway_service.go:2034 used by the /v1/responses
|
||||
// passthrough path.
|
||||
var cursorResponsesUnsupportedFields = []string{
|
||||
"prompt_cache_retention",
|
||||
"safety_identifier",
|
||||
"metadata",
|
||||
"stream_options",
|
||||
}
|
||||
|
||||
// ForwardAsChatCompletions accepts a Chat Completions request body, converts it
|
||||
// to OpenAI Responses API format, forwards to the OpenAI upstream, and converts
|
||||
// the response back to Chat Completions format. All account types (OAuth and API
|
||||
@@ -81,6 +97,16 @@ func (s *OpenAIGatewayService) ForwardAsChatCompletions(
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("rewrite model in responses-shape body: %w", err)
|
||||
}
|
||||
// Strip Responses API parameters that no Codex upstream accepts.
|
||||
// Because this branch forwards the raw body (the normal path rebuilds
|
||||
// it from ChatCompletionsRequest and drops unknown fields naturally),
|
||||
// we must filter these fields explicitly here — otherwise the upstream
|
||||
// rejects the request with "Unsupported parameter: ...".
|
||||
for _, field := range cursorResponsesUnsupportedFields {
|
||||
if stripped, derr := sjson.DeleteBytes(responsesBody, field); derr == nil {
|
||||
responsesBody = stripped
|
||||
}
|
||||
}
|
||||
// Minimal stub populated from the raw body so downstream billing
|
||||
// propagation (ServiceTier, ReasoningEffort) keeps working.
|
||||
responsesReq = &apicompat.ResponsesRequest{
|
||||
|
||||
Reference in New Issue
Block a user