Merge pull request #1575 from shuanbao0/fix/cursor-responses-body-compat

fix(gateway): 兼容 Cursor /v1/chat/completions 的 Responses API body
This commit is contained in:
Wesley Liddick
2026-04-13 22:02:44 +08:00
committed by GitHub
4 changed files with 301 additions and 12 deletions

View File

@@ -124,6 +124,14 @@ func applyCodexOAuthTransform(reqBody map[string]any, isCodexCLI bool, isCompact
"top_p",
"frequency_penalty",
"presence_penalty",
// prompt_cache_retention is a newer Responses API parameter (cache TTL).
// The ChatGPT internal Codex endpoint rejects it with
// "Unsupported parameter: prompt_cache_retention". Defense-in-depth
// for any OAuth path that reaches this transform — the Cursor
// Responses-shape short-circuit in ForwardAsChatCompletions strips
// it earlier too, but we keep this line so other OAuth callers are
// equally protected.
"prompt_cache_retention",
} {
if _, ok := reqBody[key]; ok {
delete(reqBody, key)

View File

@@ -481,6 +481,26 @@ func TestExtractSystemMessagesFromInput(t *testing.T) {
})
}
// TestApplyCodexOAuthTransform_StripsPromptCacheRetention is a regression
// test: some clients (e.g. Cursor cloud via the Responses-shape compat path)
// send prompt_cache_retention, but the ChatGPT internal Codex endpoint
// rejects it with "Unsupported parameter: prompt_cache_retention".
func TestApplyCodexOAuthTransform_StripsPromptCacheRetention(t *testing.T) {
reqBody := map[string]any{
"model": "gpt-5.1",
"prompt_cache_retention": "24h",
"input": []any{
map[string]any{"role": "user", "content": "hi"},
},
}
applyCodexOAuthTransform(reqBody, false, false)
_, stillThere := reqBody["prompt_cache_retention"]
require.False(t, stillThere,
"prompt_cache_retention must be stripped before forwarding to Codex upstream")
}
func TestApplyCodexOAuthTransform_ExtractsSystemMessages(t *testing.T) {
reqBody := map[string]any{
"model": "gpt-5.1",

View File

@@ -0,0 +1,199 @@
package service
import (
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
)
// TestCursorMixedShapeDetection covers the core invariant of the Cursor
// compatibility fix in ForwardAsChatCompletions: when a client POSTs a
// Responses-shaped body (has `input`, no `messages`) to /v1/chat/completions,
// the request must be forwarded as-is with only the `model` field rewritten.
// The raw `input` array (including Cursor's 80KB system prompt) must not be
// discarded or reshaped.
//
// Context:
//
// Before the fix, the handler unmarshaled the body into ChatCompletionsRequest,
// which has no Input field, silently dropping Cursor's input. The subsequent
// conversion produced `input: null`, which Codex upstreams reject with
// "Invalid type for 'input': expected a string, but got an object".
func TestCursorMixedShapeDetection(t *testing.T) {
// Representative Cursor cloud body — shape is what matters, content is
// abridged. Notice: `input` is a Responses-API array, there is no
// `messages` field at all, and `user`/`stream` are at the top level.
cursorBody := []byte(`{
"user": "85df22e7463ab6c2",
"model": "gpt-5.4",
"stream": true,
"input": [
{"role":"system","content":"You are GPT-5.4 running as a coding agent."},
{"role":"user","content":"hello"}
],
"service_tier": "auto",
"reasoning": {"effort": "high"}
}`)
// --- Step 1: Shape detection (mirrors ForwardAsChatCompletions) ---
hasMessages := gjson.GetBytes(cursorBody, "messages").Exists()
hasInput := gjson.GetBytes(cursorBody, "input").Exists()
isResponsesShape := !hasMessages && hasInput
require.True(t, isResponsesShape,
"Cursor body must be detected as Responses-shape (has input, no messages)")
// --- Step 2: Model rewrite (mirrors the sjson.SetBytes branch) ---
const upstreamModel = "gpt-5.1-codex"
rewritten, err := sjson.SetBytes(cursorBody, "model", upstreamModel)
require.NoError(t, err)
// --- Step 3: Invariants of the rewritten body ---
// 3a. model must be rewritten to the upstream target.
assert.Equal(t, upstreamModel, gjson.GetBytes(rewritten, "model").String())
// 3b. input array must be preserved verbatim — no reshaping, no nulling.
inputResult := gjson.GetBytes(rewritten, "input")
require.True(t, inputResult.Exists(), "input field must still exist after rewrite")
require.True(t, inputResult.IsArray(), "input must still be an array (not null, not object)")
items := inputResult.Array()
require.Len(t, items, 2, "both input items must be preserved")
assert.Equal(t, "system", items[0].Get("role").String())
assert.Equal(t, "You are GPT-5.4 running as a coding agent.",
items[0].Get("content").String())
assert.Equal(t, "user", items[1].Get("role").String())
assert.Equal(t, "hello", items[1].Get("content").String())
// 3c. ALL other top-level fields must survive intact.
assert.Equal(t, "85df22e7463ab6c2", gjson.GetBytes(rewritten, "user").String())
assert.Equal(t, true, gjson.GetBytes(rewritten, "stream").Bool())
assert.Equal(t, "auto", gjson.GetBytes(rewritten, "service_tier").String())
assert.Equal(t, "high", gjson.GetBytes(rewritten, "reasoning.effort").String())
// 3d. Final upstream body must NOT contain the old "input":null pattern.
assert.NotContains(t, string(rewritten), `"input":null`,
"rewritten body must not collapse input to null")
}
// TestCursorMixedShapeDetection_NormalChatCompletionsUnaffected guards that
// the shape detection does NOT misfire on a standard Chat Completions request
// (one that has a `messages` array). Such requests must fall through to the
// existing ChatCompletionsToResponses conversion path.
func TestCursorMixedShapeDetection_NormalChatCompletionsUnaffected(t *testing.T) {
body := []byte(`{
"model": "gpt-4o",
"messages": [{"role":"user","content":"hi"}],
"stream": true
}`)
hasMessages := gjson.GetBytes(body, "messages").Exists()
hasInput := gjson.GetBytes(body, "input").Exists()
isResponsesShape := !hasMessages && hasInput
assert.False(t, isResponsesShape,
"standard Chat Completions body must NOT be detected as Responses-shape")
}
// TestCursorMixedShapeDetection_BothFieldsPrefersMessages guards the
// ambiguous case where a client sends both `messages` and `input`. We fall
// through to the normal conversion path (messages wins), since mixing the
// two is almost certainly a client bug and messages is the documented
// Chat Completions contract.
func TestCursorMixedShapeDetection_BothFieldsPrefersMessages(t *testing.T) {
body := []byte(`{
"model": "gpt-4o",
"messages": [{"role":"user","content":"hi"}],
"input": [{"role":"user","content":"other"}]
}`)
hasMessages := gjson.GetBytes(body, "messages").Exists()
hasInput := gjson.GetBytes(body, "input").Exists()
isResponsesShape := !hasMessages && hasInput
assert.False(t, isResponsesShape,
"when both messages and input are present, must not take the Cursor shortcut")
}
// TestCursorMixedShapeDetection_EmptyBody ensures a body with neither
// messages nor input is NOT taken as Cursor-shape (would hit the normal
// conversion and fail on its own with a clearer error).
func TestCursorMixedShapeDetection_EmptyBody(t *testing.T) {
body := []byte(`{"model":"gpt-5.4","stream":true}`)
hasMessages := gjson.GetBytes(body, "messages").Exists()
hasInput := gjson.GetBytes(body, "input").Exists()
isResponsesShape := !hasMessages && hasInput
assert.False(t, isResponsesShape,
"body with neither messages nor input must not be taken as Cursor shape")
}
// TestCursorMixedShape_JSONRoundtrip ensures the rewritten body is still
// valid JSON and parseable back into a map without surprises — catches
// any encoding drift from sjson.
func TestCursorMixedShape_JSONRoundtrip(t *testing.T) {
cursorBody := []byte(`{"model":"gpt-5.4","stream":true,"input":[{"role":"user","content":"hi"}]}`)
rewritten, err := sjson.SetBytes(cursorBody, "model", "gpt-5.1-codex")
require.NoError(t, err)
var parsed map[string]any
require.NoError(t, json.Unmarshal(rewritten, &parsed))
assert.Equal(t, "gpt-5.1-codex", parsed["model"])
assert.Equal(t, true, parsed["stream"])
inputArr, ok := parsed["input"].([]any)
require.True(t, ok, "input must decode to a Go []any after round-trip")
require.Len(t, inputArr, 1)
}
// TestCursorMixedShape_StripsUnsupportedFields mirrors the strip loop in
// ForwardAsChatCompletions (isResponsesShape branch). Cursor cloud sends
// prompt_cache_retention, safety_identifier, metadata and stream_options
// as top-level Responses API parameters, which Codex upstreams reject with
// "Unsupported parameter: ...". The fix must remove them from the raw body
// before it is forwarded, for BOTH OAuth and API Key account types.
func TestCursorMixedShape_StripsUnsupportedFields(t *testing.T) {
cursorBody := []byte(`{
"model": "gpt-5.4",
"stream": true,
"prompt_cache_retention": "24h",
"safety_identifier": "cursor-user-xyz",
"metadata": {"trace_id":"abc","caller":"cursor"},
"stream_options": {"include_usage": true},
"input": [{"role":"user","content":"hi"}]
}`)
// Sanity: the test fixture contains every field the production code strips.
for _, field := range cursorResponsesUnsupportedFields {
require.True(t, gjson.GetBytes(cursorBody, field).Exists(),
"test fixture must contain %s", field)
}
// Run the exact same loop as the production code.
result := cursorBody
for _, field := range cursorResponsesUnsupportedFields {
if stripped, err := sjson.DeleteBytes(result, field); err == nil {
result = stripped
}
}
// All unsupported fields must be gone.
for _, field := range cursorResponsesUnsupportedFields {
assert.False(t, gjson.GetBytes(result, field).Exists(),
"%s must be stripped", field)
}
// Everything else must survive intact.
assert.Equal(t, "gpt-5.4", gjson.GetBytes(result, "model").String())
assert.Equal(t, true, gjson.GetBytes(result, "stream").Bool())
assert.True(t, gjson.GetBytes(result, "input").IsArray())
assert.Equal(t, "user", gjson.GetBytes(result, "input.0.role").String())
}

View File

@@ -16,9 +16,27 @@ import (
"github.com/Wei-Shaw/sub2api/internal/pkg/logger"
"github.com/Wei-Shaw/sub2api/internal/util/responseheaders"
"github.com/gin-gonic/gin"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
"go.uber.org/zap"
)
// cursorResponsesUnsupportedFields are top-level Responses API parameters that
// Codex upstreams reject with "Unsupported parameter: ...". They must be
// stripped when forwarding a raw client body through the Responses-shape
// short-circuit in ForwardAsChatCompletions (see isResponsesShape branch).
// The normal Chat Completions → Responses conversion path is unaffected
// because ChatCompletionsRequest has no fields for these parameters — unknown
// fields are dropped naturally by json.Unmarshal. Kept semantically in sync
// with the list in openai_gateway_service.go:2034 used by the /v1/responses
// passthrough path.
var cursorResponsesUnsupportedFields = []string{
"prompt_cache_retention",
"safety_identifier",
"metadata",
"stream_options",
}
// ForwardAsChatCompletions accepts a Chat Completions request body, converts it
// to OpenAI Responses API format, forwards to the OpenAI upstream, and converts
// the response back to Chat Completions format. All account types (OAuth and API
@@ -55,13 +73,62 @@ func (s *OpenAIGatewayService) ForwardAsChatCompletions(
compatPromptCacheInjected = promptCacheKey != ""
}
// 3. Convert to Responses and forward
// ChatCompletionsToResponses always sets Stream=true (upstream always streams).
responsesReq, err := apicompat.ChatCompletionsToResponses(&chatReq)
if err != nil {
return nil, fmt.Errorf("convert chat completions to responses: %w", err)
// 3. Build the upstream (Responses API) body.
//
// Cursor compatibility: some clients (notably Cursor cloud) send Responses
// API shaped bodies — `input: [...]` with no `messages` field — to the
// /v1/chat/completions URL. Running those through ChatCompletionsToResponses
// would silently drop Cursor's `input` array (the struct has no Input field)
// and produce `input: null`, which Codex upstreams reject with
// "Invalid type for 'input': expected a string, but got an object".
//
// Detect that shape and forward the raw body as-is, only rewriting `model`
// to the resolved upstream model. The downstream codex OAuth transform will
// still normalize store/stream/instructions/etc.
isResponsesShape := !gjson.GetBytes(body, "messages").Exists() && gjson.GetBytes(body, "input").Exists()
var (
responsesReq *apicompat.ResponsesRequest
responsesBody []byte
err error
)
if isResponsesShape {
responsesBody, err = sjson.SetBytes(body, "model", upstreamModel)
if err != nil {
return nil, fmt.Errorf("rewrite model in responses-shape body: %w", err)
}
// Strip Responses API parameters that no Codex upstream accepts.
// Because this branch forwards the raw body (the normal path rebuilds
// it from ChatCompletionsRequest and drops unknown fields naturally),
// we must filter these fields explicitly here — otherwise the upstream
// rejects the request with "Unsupported parameter: ...".
for _, field := range cursorResponsesUnsupportedFields {
if stripped, derr := sjson.DeleteBytes(responsesBody, field); derr == nil {
responsesBody = stripped
}
}
// Minimal stub populated from the raw body so downstream billing
// propagation (ServiceTier, ReasoningEffort) keeps working.
responsesReq = &apicompat.ResponsesRequest{
Model: upstreamModel,
ServiceTier: gjson.GetBytes(responsesBody, "service_tier").String(),
}
if effort := gjson.GetBytes(responsesBody, "reasoning.effort").String(); effort != "" {
responsesReq.Reasoning = &apicompat.ResponsesReasoning{Effort: effort}
}
} else {
// Normal path: convert Chat Completions → Responses.
// ChatCompletionsToResponses always sets Stream=true (upstream always streams).
responsesReq, err = apicompat.ChatCompletionsToResponses(&chatReq)
if err != nil {
return nil, fmt.Errorf("convert chat completions to responses: %w", err)
}
responsesReq.Model = upstreamModel
responsesBody, err = json.Marshal(responsesReq)
if err != nil {
return nil, fmt.Errorf("marshal responses request: %w", err)
}
}
responsesReq.Model = upstreamModel
logFields := []zap.Field{
zap.Int64("account_id", account.ID),
@@ -69,6 +136,7 @@ func (s *OpenAIGatewayService) ForwardAsChatCompletions(
zap.String("billing_model", billingModel),
zap.String("upstream_model", upstreamModel),
zap.Bool("stream", clientStream),
zap.Bool("responses_shape", isResponsesShape),
}
if compatPromptCacheInjected {
logFields = append(logFields,
@@ -78,12 +146,6 @@ func (s *OpenAIGatewayService) ForwardAsChatCompletions(
}
logger.L().Debug("openai chat_completions: model mapping applied", logFields...)
// 4. Marshal Responses request body, then apply OAuth codex transform
responsesBody, err := json.Marshal(responsesReq)
if err != nil {
return nil, fmt.Errorf("marshal responses request: %w", err)
}
if account.Type == AccountTypeOAuth {
var reqBody map[string]any
if err := json.Unmarshal(responsesBody, &reqBody); err != nil {