fix(openai): support remote compact task

This commit is contained in:
神乐
2026-03-06 18:50:28 +08:00
parent 005d0c5f53
commit 3403909354
10 changed files with 446 additions and 57 deletions

View File

@@ -118,6 +118,20 @@ func (h *OpenAIGatewayHandler) Responses(c *gin.Context) {
}
setOpsRequestContext(c, "", false, body)
sessionHashBody := body
if service.IsOpenAIResponsesCompactPathForTest(c) {
if compactSeed := strings.TrimSpace(gjson.GetBytes(body, "prompt_cache_key").String()); compactSeed != "" {
c.Set(service.OpenAICompactSessionSeedKeyForTest(), compactSeed)
}
normalizedCompactBody, normalizedCompact, compactErr := service.NormalizeOpenAICompactRequestBodyForTest(body)
if compactErr != nil {
h.errorResponse(c, http.StatusBadRequest, "invalid_request_error", "Failed to normalize compact request body")
return
}
if normalizedCompact {
body = normalizedCompactBody
}
}
// 校验请求体 JSON 合法性
if !gjson.ValidBytes(body) {
@@ -193,7 +207,7 @@ func (h *OpenAIGatewayHandler) Responses(c *gin.Context) {
}
// Generate session hash (header first; fallback to prompt_cache_key)
sessionHash := h.gatewayService.GenerateSessionHash(c, body)
sessionHash := h.gatewayService.GenerateSessionHash(c, sessionHashBody)
maxAccountSwitches := h.maxAccountSwitches
switchCount := 0

View File

@@ -49,6 +49,7 @@ func RegisterGatewayRoutes(
gateway.GET("/usage", h.Gateway.Usage)
// OpenAI Responses API
gateway.POST("/responses", h.OpenAIGateway.Responses)
gateway.POST("/responses/*subpath", h.OpenAIGateway.Responses)
gateway.GET("/responses", h.OpenAIGateway.ResponsesWebSocket)
// 明确阻止旧协议入口OpenAI 仅支持 Responses API避免客户端误解为会自动路由到其它平台。
gateway.POST("/chat/completions", func(c *gin.Context) {
@@ -77,6 +78,7 @@ func RegisterGatewayRoutes(
// OpenAI Responses API不带v1前缀的别名
r.POST("/responses", bodyLimit, clientRequestID, opsErrorLogger, gin.HandlerFunc(apiKeyAuth), requireGroupAnthropic, h.OpenAIGateway.Responses)
r.POST("/responses/*subpath", bodyLimit, clientRequestID, opsErrorLogger, gin.HandlerFunc(apiKeyAuth), requireGroupAnthropic, h.OpenAIGateway.Responses)
r.GET("/responses", bodyLimit, clientRequestID, opsErrorLogger, gin.HandlerFunc(apiKeyAuth), requireGroupAnthropic, h.OpenAIGateway.ResponsesWebSocket)
// Antigravity 模型列表

View File

@@ -0,0 +1,51 @@
package routes
import (
"net/http"
"net/http/httptest"
"strings"
"testing"
"github.com/Wei-Shaw/sub2api/internal/config"
"github.com/Wei-Shaw/sub2api/internal/handler"
servermiddleware "github.com/Wei-Shaw/sub2api/internal/server/middleware"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/require"
)
func newGatewayRoutesTestRouter() *gin.Engine {
gin.SetMode(gin.TestMode)
router := gin.New()
RegisterGatewayRoutes(
router,
&handler.Handlers{
Gateway: &handler.GatewayHandler{},
OpenAIGateway: &handler.OpenAIGatewayHandler{},
SoraGateway: &handler.SoraGatewayHandler{},
},
servermiddleware.APIKeyAuthMiddleware(func(c *gin.Context) {
c.Next()
}),
nil,
nil,
nil,
nil,
&config.Config{},
)
return router
}
func TestGatewayRoutesOpenAIResponsesCompactPathIsRegistered(t *testing.T) {
router := newGatewayRoutesTestRouter()
for _, path := range []string{"/v1/responses/compact", "/responses/compact"} {
req := httptest.NewRequest(http.MethodPost, path, strings.NewReader(`{"model":"gpt-5"}`))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
require.NotEqual(t, http.StatusNotFound, w.Code, "path=%s should hit OpenAI responses handler", path)
}
}

View File

@@ -77,7 +77,7 @@ type codexTransformResult struct {
PromptCacheKey string
}
func applyCodexOAuthTransform(reqBody map[string]any, isCodexCLI bool) codexTransformResult {
func applyCodexOAuthTransform(reqBody map[string]any, isCodexCLI bool, isCompact bool) codexTransformResult {
result := codexTransformResult{}
// 工具续链需求会影响存储策略与 input 过滤逻辑。
needsToolContinuation := NeedsToolContinuation(reqBody)
@@ -95,15 +95,26 @@ func applyCodexOAuthTransform(reqBody map[string]any, isCodexCLI bool) codexTran
result.NormalizedModel = normalizedModel
}
// OAuth 走 ChatGPT internal API 时store 必须为 false显式 true 也会强制覆盖。
// 避免上游返回 "Store must be set to false"。
if v, ok := reqBody["store"].(bool); !ok || v {
reqBody["store"] = false
result.Modified = true
}
if v, ok := reqBody["stream"].(bool); !ok || !v {
reqBody["stream"] = true
result.Modified = true
if isCompact {
if _, ok := reqBody["store"]; ok {
delete(reqBody, "store")
result.Modified = true
}
if _, ok := reqBody["stream"]; ok {
delete(reqBody, "stream")
result.Modified = true
}
} else {
// OAuth 走 ChatGPT internal API 时store 必须为 false显式 true 也会强制覆盖。
// 避免上游返回 "Store must be set to false"。
if v, ok := reqBody["store"].(bool); !ok || v {
reqBody["store"] = false
result.Modified = true
}
if v, ok := reqBody["stream"].(bool); !ok || !v {
reqBody["stream"] = true
result.Modified = true
}
}
// Strip parameters unsupported by codex models via the Responses API.

View File

@@ -18,7 +18,7 @@ func TestApplyCodexOAuthTransform_ToolContinuationPreservesInput(t *testing.T) {
"tool_choice": "auto",
}
applyCodexOAuthTransform(reqBody, false)
applyCodexOAuthTransform(reqBody, false, false)
// 未显式设置 store=true默认为 false。
store, ok := reqBody["store"].(bool)
@@ -53,7 +53,7 @@ func TestApplyCodexOAuthTransform_ExplicitStoreFalsePreserved(t *testing.T) {
"tool_choice": "auto",
}
applyCodexOAuthTransform(reqBody, false)
applyCodexOAuthTransform(reqBody, false, false)
store, ok := reqBody["store"].(bool)
require.True(t, ok)
@@ -72,13 +72,29 @@ func TestApplyCodexOAuthTransform_ExplicitStoreTrueForcedFalse(t *testing.T) {
"tool_choice": "auto",
}
applyCodexOAuthTransform(reqBody, false)
applyCodexOAuthTransform(reqBody, false, false)
store, ok := reqBody["store"].(bool)
require.True(t, ok)
require.False(t, store)
}
func TestApplyCodexOAuthTransform_CompactForcesNonStreaming(t *testing.T) {
reqBody := map[string]any{
"model": "gpt-5.1-codex",
"store": true,
"stream": true,
}
result := applyCodexOAuthTransform(reqBody, true, true)
_, hasStore := reqBody["store"]
require.False(t, hasStore)
_, hasStream := reqBody["stream"]
require.False(t, hasStream)
require.True(t, result.Modified)
}
func TestApplyCodexOAuthTransform_NonContinuationDefaultsStoreFalseAndStripsIDs(t *testing.T) {
// 非续链场景:未设置 store 时默认 false并移除 input 中的 id。
@@ -89,7 +105,7 @@ func TestApplyCodexOAuthTransform_NonContinuationDefaultsStoreFalseAndStripsIDs(
},
}
applyCodexOAuthTransform(reqBody, false)
applyCodexOAuthTransform(reqBody, false, false)
store, ok := reqBody["store"].(bool)
require.True(t, ok)
@@ -138,7 +154,7 @@ func TestApplyCodexOAuthTransform_NormalizeCodexTools_PreservesResponsesFunction
},
}
applyCodexOAuthTransform(reqBody, false)
applyCodexOAuthTransform(reqBody, false, false)
tools, ok := reqBody["tools"].([]any)
require.True(t, ok)
@@ -158,7 +174,7 @@ func TestApplyCodexOAuthTransform_EmptyInput(t *testing.T) {
"input": []any{},
}
applyCodexOAuthTransform(reqBody, false)
applyCodexOAuthTransform(reqBody, false, false)
input, ok := reqBody["input"].([]any)
require.True(t, ok)
@@ -193,7 +209,7 @@ func TestApplyCodexOAuthTransform_CodexCLI_PreservesExistingInstructions(t *test
"instructions": "existing instructions",
}
result := applyCodexOAuthTransform(reqBody, true) // isCodexCLI=true
result := applyCodexOAuthTransform(reqBody, true, false) // isCodexCLI=true
instructions, ok := reqBody["instructions"].(string)
require.True(t, ok)
@@ -210,7 +226,7 @@ func TestApplyCodexOAuthTransform_CodexCLI_SuppliesDefaultWhenEmpty(t *testing.T
// 没有 instructions 字段
}
result := applyCodexOAuthTransform(reqBody, true) // isCodexCLI=true
result := applyCodexOAuthTransform(reqBody, true, false) // isCodexCLI=true
instructions, ok := reqBody["instructions"].(string)
require.True(t, ok)
@@ -226,7 +242,7 @@ func TestApplyCodexOAuthTransform_NonCodexCLI_OverridesInstructions(t *testing.T
"instructions": "old instructions",
}
result := applyCodexOAuthTransform(reqBody, false) // isCodexCLI=false
result := applyCodexOAuthTransform(reqBody, false, false) // isCodexCLI=false
instructions, ok := reqBody["instructions"].(string)
require.True(t, ok)

View File

@@ -25,6 +25,7 @@ import (
"github.com/Wei-Shaw/sub2api/internal/util/responseheaders"
"github.com/Wei-Shaw/sub2api/internal/util/urlvalidator"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
"go.uber.org/zap"
@@ -49,6 +50,8 @@ const (
openAIWSRetryBackoffInitialDefault = 120 * time.Millisecond
openAIWSRetryBackoffMaxDefault = 2 * time.Second
openAIWSRetryJitterRatioDefault = 0.2
openAICompactSessionSeedKey = "openai_compact_session_seed"
codexCLIVersion = "0.104.0"
)
// OpenAI allowed headers whitelist (for non-passthrough).
@@ -1614,7 +1617,7 @@ func (s *OpenAIGatewayService) Forward(ctx context.Context, c *gin.Context, acco
}
if account.Type == AccountTypeOAuth {
codexResult := applyCodexOAuthTransform(reqBody, isCodexCLI)
codexResult := applyCodexOAuthTransform(reqBody, isCodexCLI, isOpenAIResponsesCompactPath(c))
if codexResult.Modified {
bodyModified = true
disablePatch()
@@ -2046,14 +2049,14 @@ func (s *OpenAIGatewayService) forwardOpenAIPassthrough(
return nil, fmt.Errorf("openai passthrough rejected before upstream: %s", rejectReason)
}
normalizedBody, normalized, err := normalizeOpenAIPassthroughOAuthBody(body)
normalizedBody, normalized, err := normalizeOpenAIPassthroughOAuthBody(body, isOpenAIResponsesCompactPath(c))
if err != nil {
return nil, err
}
if normalized {
body = normalizedBody
reqStream = true
}
reqStream = gjson.GetBytes(body, "stream").Bool()
}
logger.LegacyPrintf("service.openai_gateway",
@@ -2218,6 +2221,7 @@ func (s *OpenAIGatewayService) buildUpstreamRequestOpenAIPassthrough(
targetURL = buildOpenAIResponsesURL(validatedURL)
}
}
targetURL = appendOpenAIResponsesRequestPathSuffix(targetURL, openAIResponsesRequestPathSuffix(c))
req, err := http.NewRequestWithContext(ctx, http.MethodPost, targetURL, bytes.NewReader(body))
if err != nil {
@@ -2251,7 +2255,15 @@ func (s *OpenAIGatewayService) buildUpstreamRequestOpenAIPassthrough(
if chatgptAccountID := account.GetChatGPTAccountID(); chatgptAccountID != "" {
req.Header.Set("chatgpt-account-id", chatgptAccountID)
}
if req.Header.Get("accept") == "" {
if isOpenAIResponsesCompactPath(c) {
req.Header.Set("accept", "application/json")
if req.Header.Get("version") == "" {
req.Header.Set("version", codexCLIVersion)
}
if req.Header.Get("session_id") == "" {
req.Header.Set("session_id", resolveOpenAICompactSessionID(c))
}
} else if req.Header.Get("accept") == "" {
req.Header.Set("accept", "text/event-stream")
}
if req.Header.Get("OpenAI-Beta") == "" {
@@ -2598,6 +2610,7 @@ func (s *OpenAIGatewayService) buildUpstreamRequest(ctx context.Context, c *gin.
default:
targetURL = openaiPlatformAPIURL
}
targetURL = appendOpenAIResponsesRequestPathSuffix(targetURL, openAIResponsesRequestPathSuffix(c))
req, err := http.NewRequestWithContext(ctx, "POST", targetURL, bytes.NewReader(body))
if err != nil {
@@ -2634,7 +2647,17 @@ func (s *OpenAIGatewayService) buildUpstreamRequest(ctx context.Context, c *gin.
} else {
req.Header.Set("originator", "opencode")
}
req.Header.Set("accept", "text/event-stream")
if isOpenAIResponsesCompactPath(c) {
req.Header.Set("accept", "application/json")
if req.Header.Get("version") == "" {
req.Header.Set("version", codexCLIVersion)
}
if req.Header.Get("session_id") == "" {
req.Header.Set("session_id", resolveOpenAICompactSessionID(c))
}
} else {
req.Header.Set("accept", "text/event-stream")
}
if promptCacheKey != "" {
req.Header.Set("conversation_id", promptCacheKey)
req.Header.Set("session_id", promptCacheKey)
@@ -3425,6 +3448,95 @@ func buildOpenAIResponsesURL(base string) string {
return normalized + "/v1/responses"
}
func IsOpenAIResponsesCompactPathForTest(c *gin.Context) bool {
return isOpenAIResponsesCompactPath(c)
}
func OpenAICompactSessionSeedKeyForTest() string {
return openAICompactSessionSeedKey
}
func NormalizeOpenAICompactRequestBodyForTest(body []byte) ([]byte, bool, error) {
return normalizeOpenAICompactRequestBody(body)
}
func isOpenAIResponsesCompactPath(c *gin.Context) bool {
suffix := strings.TrimSpace(openAIResponsesRequestPathSuffix(c))
return suffix == "/compact" || strings.HasPrefix(suffix, "/compact/")
}
func normalizeOpenAICompactRequestBody(body []byte) ([]byte, bool, error) {
if len(body) == 0 {
return body, false, nil
}
normalized := []byte(`{}`)
for _, field := range []string{"model", "input", "instructions", "previous_response_id"} {
value := gjson.GetBytes(body, field)
if !value.Exists() {
continue
}
next, err := sjson.SetRawBytes(normalized, field, []byte(value.Raw))
if err != nil {
return body, false, fmt.Errorf("normalize compact body %s: %w", field, err)
}
normalized = next
}
if bytes.Equal(bytes.TrimSpace(body), bytes.TrimSpace(normalized)) {
return body, false, nil
}
return normalized, true, nil
}
func resolveOpenAICompactSessionID(c *gin.Context) string {
if c != nil {
if sessionID := strings.TrimSpace(c.GetHeader("session_id")); sessionID != "" {
return sessionID
}
if conversationID := strings.TrimSpace(c.GetHeader("conversation_id")); conversationID != "" {
return conversationID
}
if seed, ok := c.Get(openAICompactSessionSeedKey); ok {
if seedStr, ok := seed.(string); ok && strings.TrimSpace(seedStr) != "" {
return strings.TrimSpace(seedStr)
}
}
}
return uuid.NewString()
}
func openAIResponsesRequestPathSuffix(c *gin.Context) string {
if c == nil || c.Request == nil || c.Request.URL == nil {
return ""
}
normalizedPath := strings.TrimRight(strings.TrimSpace(c.Request.URL.Path), "/")
if normalizedPath == "" {
return ""
}
idx := strings.LastIndex(normalizedPath, "/responses")
if idx < 0 {
return ""
}
suffix := normalizedPath[idx+len("/responses"):]
if suffix == "" || suffix == "/" {
return ""
}
if !strings.HasPrefix(suffix, "/") {
return ""
}
return suffix
}
func appendOpenAIResponsesRequestPathSuffix(baseURL, suffix string) string {
trimmedBase := strings.TrimRight(strings.TrimSpace(baseURL), "/")
trimmedSuffix := strings.TrimSpace(suffix)
if trimmedBase == "" || trimmedSuffix == "" {
return trimmedBase
}
return trimmedBase + trimmedSuffix
}
func (s *OpenAIGatewayService) replaceModelInResponseBody(body []byte, fromModel, toModel string) []byte {
// 使用 gjson/sjson 精确替换 model 字段,避免全量 JSON 反序列化
if m := gjson.GetBytes(body, "model"); m.Exists() && m.Str == fromModel {
@@ -3805,8 +3917,8 @@ func extractOpenAIRequestMetaFromBody(body []byte) (model string, stream bool, p
}
// normalizeOpenAIPassthroughOAuthBody 将透传 OAuth 请求体收敛为旧链路关键行为:
// 1) store=false 2) stream=true
func normalizeOpenAIPassthroughOAuthBody(body []byte) ([]byte, bool, error) {
// 1) store=false 2) 非 compact 保持 stream=truecompact 强制 stream=false
func normalizeOpenAIPassthroughOAuthBody(body []byte, compact bool) ([]byte, bool, error) {
if len(body) == 0 {
return body, false, nil
}
@@ -3814,22 +3926,40 @@ func normalizeOpenAIPassthroughOAuthBody(body []byte) ([]byte, bool, error) {
normalized := body
changed := false
if store := gjson.GetBytes(normalized, "store"); !store.Exists() || store.Type != gjson.False {
next, err := sjson.SetBytes(normalized, "store", false)
if err != nil {
return body, false, fmt.Errorf("normalize passthrough body store=false: %w", err)
if compact {
if store := gjson.GetBytes(normalized, "store"); store.Exists() {
next, err := sjson.DeleteBytes(normalized, "store")
if err != nil {
return body, false, fmt.Errorf("normalize passthrough body delete store: %w", err)
}
normalized = next
changed = true
}
normalized = next
changed = true
}
if stream := gjson.GetBytes(normalized, "stream"); !stream.Exists() || stream.Type != gjson.True {
next, err := sjson.SetBytes(normalized, "stream", true)
if err != nil {
return body, false, fmt.Errorf("normalize passthrough body stream=true: %w", err)
if stream := gjson.GetBytes(normalized, "stream"); stream.Exists() {
next, err := sjson.DeleteBytes(normalized, "stream")
if err != nil {
return body, false, fmt.Errorf("normalize passthrough body delete stream: %w", err)
}
normalized = next
changed = true
}
} else {
if store := gjson.GetBytes(normalized, "store"); !store.Exists() || store.Type != gjson.False {
next, err := sjson.SetBytes(normalized, "store", false)
if err != nil {
return body, false, fmt.Errorf("normalize passthrough body store=false: %w", err)
}
normalized = next
changed = true
}
if stream := gjson.GetBytes(normalized, "stream"); !stream.Exists() || stream.Type != gjson.True {
next, err := sjson.SetBytes(normalized, "stream", true)
if err != nil {
return body, false, fmt.Errorf("normalize passthrough body stream=true: %w", err)
}
normalized = next
changed = true
}
normalized = next
changed = true
}
return normalized, changed, nil

View File

@@ -1248,6 +1248,90 @@ func TestOpenAIValidateUpstreamBaseURLEnabledEnforcesAllowlist(t *testing.T) {
}
}
func TestOpenAIResponsesRequestPathSuffix(t *testing.T) {
gin.SetMode(gin.TestMode)
rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
tests := []struct {
name string
path string
want string
}{
{name: "exact v1 responses", path: "/v1/responses", want: ""},
{name: "compact v1 responses", path: "/v1/responses/compact", want: "/compact"},
{name: "compact alias responses", path: "/responses/compact/", want: "/compact"},
{name: "nested suffix", path: "/openai/v1/responses/compact/detail", want: "/compact/detail"},
{name: "unrelated path", path: "/v1/chat/completions", want: ""},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c.Request = httptest.NewRequest(http.MethodPost, tt.path, nil)
require.Equal(t, tt.want, openAIResponsesRequestPathSuffix(c))
})
}
}
func TestOpenAIBuildUpstreamRequestOpenAIPassthroughPreservesCompactPath(t *testing.T) {
gin.SetMode(gin.TestMode)
rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
c.Request = httptest.NewRequest(http.MethodPost, "/v1/responses/compact", bytes.NewReader([]byte(`{"model":"gpt-5"}`)))
svc := &OpenAIGatewayService{}
account := &Account{Type: AccountTypeOAuth}
req, err := svc.buildUpstreamRequestOpenAIPassthrough(c.Request.Context(), c, account, []byte(`{"model":"gpt-5"}`), "token")
require.NoError(t, err)
require.Equal(t, chatgptCodexURL+"/compact", req.URL.String())
require.Equal(t, "application/json", req.Header.Get("Accept"))
require.Equal(t, codexCLIVersion, req.Header.Get("Version"))
require.NotEmpty(t, req.Header.Get("Session_Id"))
}
func TestOpenAIBuildUpstreamRequestCompactForcesJSONAcceptForOAuth(t *testing.T) {
gin.SetMode(gin.TestMode)
rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
c.Request = httptest.NewRequest(http.MethodPost, "/v1/responses/compact", bytes.NewReader([]byte(`{"model":"gpt-5"}`)))
svc := &OpenAIGatewayService{}
account := &Account{
Type: AccountTypeOAuth,
Credentials: map[string]any{"chatgpt_account_id": "chatgpt-acc"},
}
req, err := svc.buildUpstreamRequest(c.Request.Context(), c, account, []byte(`{"model":"gpt-5"}`), "token", false, "", true)
require.NoError(t, err)
require.Equal(t, chatgptCodexURL+"/compact", req.URL.String())
require.Equal(t, "application/json", req.Header.Get("Accept"))
require.Equal(t, codexCLIVersion, req.Header.Get("Version"))
require.NotEmpty(t, req.Header.Get("Session_Id"))
}
func TestOpenAIBuildUpstreamRequestPreservesCompactPathForAPIKeyBaseURL(t *testing.T) {
gin.SetMode(gin.TestMode)
rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
c.Request = httptest.NewRequest(http.MethodPost, "/responses/compact", bytes.NewReader([]byte(`{"model":"gpt-5"}`)))
svc := &OpenAIGatewayService{cfg: &config.Config{
Security: config.SecurityConfig{
URLAllowlist: config.URLAllowlistConfig{Enabled: false},
},
}}
account := &Account{
Type: AccountTypeAPIKey,
Platform: PlatformOpenAI,
Credentials: map[string]any{"base_url": "https://example.com/v1"},
}
req, err := svc.buildUpstreamRequest(c.Request.Context(), c, account, []byte(`{"model":"gpt-5"}`), "token", false, "", false)
require.NoError(t, err)
require.Equal(t, "https://example.com/v1/responses/compact", req.URL.String())
}
// ==================== P1-08 修复model 替换性能优化测试 ====================
func TestReplaceModelInSSELine(t *testing.T) {

View File

@@ -236,6 +236,60 @@ func TestOpenAIGatewayService_OAuthPassthrough_StreamKeepsToolNameAndBodyNormali
require.NotContains(t, body, "\"name\":\"edit\"")
}
func TestOpenAIGatewayService_OAuthPassthrough_CompactUsesJSONAndKeepsNonStreaming(t *testing.T) {
gin.SetMode(gin.TestMode)
rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
c.Request = httptest.NewRequest(http.MethodPost, "/v1/responses/compact", bytes.NewReader(nil))
c.Request.Header.Set("User-Agent", "codex_cli_rs/0.1.0")
c.Request.Header.Set("Content-Type", "application/json")
originalBody := []byte(`{"model":"gpt-5.1-codex","stream":true,"store":true,"instructions":"local-test-instructions","input":[{"type":"text","text":"compact me"}]}`)
resp := &http.Response{
StatusCode: http.StatusOK,
Header: http.Header{"Content-Type": []string{"application/json"}, "x-request-id": []string{"rid-compact"}},
Body: io.NopCloser(strings.NewReader(`{"id":"cmp_123","usage":{"input_tokens":11,"output_tokens":22}}`)),
}
upstream := &httpUpstreamRecorder{resp: resp}
svc := &OpenAIGatewayService{
cfg: &config.Config{Gateway: config.GatewayConfig{ForceCodexCLI: false}},
httpUpstream: upstream,
}
account := &Account{
ID: 123,
Name: "acc",
Platform: PlatformOpenAI,
Type: AccountTypeOAuth,
Concurrency: 1,
Credentials: map[string]any{"access_token": "oauth-token", "chatgpt_account_id": "chatgpt-acc"},
Extra: map[string]any{"openai_passthrough": true},
Status: StatusActive,
Schedulable: true,
RateMultiplier: f64p(1),
}
result, err := svc.Forward(context.Background(), c, account, originalBody)
require.NoError(t, err)
require.NotNil(t, result)
require.False(t, result.Stream)
require.False(t, gjson.GetBytes(upstream.lastBody, "store").Exists())
require.False(t, gjson.GetBytes(upstream.lastBody, "stream").Exists())
require.Equal(t, "gpt-5.1-codex", gjson.GetBytes(upstream.lastBody, "model").String())
require.Equal(t, "compact me", gjson.GetBytes(upstream.lastBody, "input.0.text").String())
require.Equal(t, "local-test-instructions", strings.TrimSpace(gjson.GetBytes(upstream.lastBody, "instructions").String()))
require.Equal(t, "application/json", upstream.lastReq.Header.Get("Accept"))
require.Equal(t, codexCLIVersion, upstream.lastReq.Header.Get("Version"))
require.NotEmpty(t, upstream.lastReq.Header.Get("Session_Id"))
require.Equal(t, "chatgpt.com", upstream.lastReq.Host)
require.Equal(t, "chatgpt-acc", upstream.lastReq.Header.Get("chatgpt-account-id"))
require.Contains(t, rec.Body.String(), `"id":"cmp_123"`)
}
func TestOpenAIGatewayService_OAuthPassthrough_CodexMissingInstructionsRejectedBeforeUpstream(t *testing.T) {
gin.SetMode(gin.TestMode)
logSink, restore := captureStructuredLog(t)

View File

@@ -83,14 +83,7 @@ func (s *FrontendServer) Middleware() gin.HandlerFunc {
path := c.Request.URL.Path
// Skip API routes
if strings.HasPrefix(path, "/api/") ||
strings.HasPrefix(path, "/v1/") ||
strings.HasPrefix(path, "/v1beta/") ||
strings.HasPrefix(path, "/sora/") ||
strings.HasPrefix(path, "/antigravity/") ||
strings.HasPrefix(path, "/setup/") ||
path == "/health" ||
path == "/responses" {
if shouldBypassEmbeddedFrontend(path) {
c.Next()
return
}
@@ -207,14 +200,7 @@ func ServeEmbeddedFrontend() gin.HandlerFunc {
return func(c *gin.Context) {
path := c.Request.URL.Path
if strings.HasPrefix(path, "/api/") ||
strings.HasPrefix(path, "/v1/") ||
strings.HasPrefix(path, "/v1beta/") ||
strings.HasPrefix(path, "/sora/") ||
strings.HasPrefix(path, "/antigravity/") ||
strings.HasPrefix(path, "/setup/") ||
path == "/health" ||
path == "/responses" {
if shouldBypassEmbeddedFrontend(path) {
c.Next()
return
}
@@ -235,6 +221,19 @@ func ServeEmbeddedFrontend() gin.HandlerFunc {
}
}
func shouldBypassEmbeddedFrontend(path string) bool {
trimmed := strings.TrimSpace(path)
return strings.HasPrefix(trimmed, "/api/") ||
strings.HasPrefix(trimmed, "/v1/") ||
strings.HasPrefix(trimmed, "/v1beta/") ||
strings.HasPrefix(trimmed, "/sora/") ||
strings.HasPrefix(trimmed, "/antigravity/") ||
strings.HasPrefix(trimmed, "/setup/") ||
trimmed == "/health" ||
trimmed == "/responses" ||
strings.HasPrefix(trimmed, "/responses/")
}
func serveIndexHTML(c *gin.Context, fsys fs.FS) {
file, err := fsys.Open("index.html")
if err != nil {

View File

@@ -367,6 +367,7 @@ func TestFrontendServer_Middleware(t *testing.T) {
"/setup/init",
"/health",
"/responses",
"/responses/compact",
}
for _, path := range apiPaths {
@@ -388,6 +389,32 @@ func TestFrontendServer_Middleware(t *testing.T) {
}
})
t.Run("skips_responses_compact_post_routes", func(t *testing.T) {
provider := &mockSettingsProvider{
settings: map[string]string{"test": "value"},
}
server, err := NewFrontendServer(provider)
require.NoError(t, err)
router := gin.New()
router.Use(server.Middleware())
nextCalled := false
router.POST("/responses/compact", func(c *gin.Context) {
nextCalled = true
c.String(http.StatusOK, `{"ok":true}`)
})
w := httptest.NewRecorder()
req := httptest.NewRequest(http.MethodPost, "/responses/compact", strings.NewReader(`{"model":"gpt-5"}`))
req.Header.Set("Content-Type", "application/json")
router.ServeHTTP(w, req)
assert.True(t, nextCalled, "next handler should be called for compact API route")
assert.Equal(t, http.StatusOK, w.Code)
assert.JSONEq(t, `{"ok":true}`, w.Body.String())
})
t.Run("serves_index_for_spa_routes", func(t *testing.T) {
provider := &mockSettingsProvider{
settings: map[string]string{"test": "value"},
@@ -543,6 +570,7 @@ func TestServeEmbeddedFrontend(t *testing.T) {
"/setup/init",
"/health",
"/responses",
"/responses/compact",
}
for _, path := range apiPaths {