diff --git a/backend/internal/handler/openai_gateway_handler.go b/backend/internal/handler/openai_gateway_handler.go index 3011b97d..afb1ddc9 100644 --- a/backend/internal/handler/openai_gateway_handler.go +++ b/backend/internal/handler/openai_gateway_handler.go @@ -96,6 +96,8 @@ func (h *OpenAIGatewayHandler) Responses(c *gin.Context) { return } + seedOpenAISessionHeaders(c, reqBody) + userAgent := c.GetHeader("User-Agent") if !openai.IsCodexCLIRequest(userAgent) { existingInstructions, _ := reqBody["instructions"].(string) @@ -299,6 +301,37 @@ func (h *OpenAIGatewayHandler) handleFailoverExhausted(c *gin.Context, statusCod h.handleStreamingAwareError(c, status, errType, errMsg, streamStarted) } +func seedOpenAISessionHeaders(c *gin.Context, reqBody map[string]any) { + if c.GetHeader("session_id") == "" { + if v := firstNonEmptyString( + reqBody["prompt_cache_key"], + reqBody["session_id"], + reqBody["conversation_id"], + reqBody["previous_response_id"], + ); v != "" { + c.Request.Header.Set("session_id", v) + } + } + if c.GetHeader("conversation_id") == "" { + if v := firstNonEmptyString(reqBody["prompt_cache_key"], reqBody["conversation_id"]); v != "" { + c.Request.Header.Set("conversation_id", v) + } + } +} + +func firstNonEmptyString(values ...any) string { + for _, value := range values { + s, ok := value.(string) + if ok { + s = strings.TrimSpace(s) + if s != "" { + return s + } + } + } + return "" +} + func (h *OpenAIGatewayHandler) mapUpstreamError(statusCode int) (int, string, string) { switch statusCode { case 401: diff --git a/backend/internal/service/openai_codex_transform.go b/backend/internal/service/openai_codex_transform.go index 473f9209..97b405b2 100644 --- a/backend/internal/service/openai_codex_transform.go +++ b/backend/internal/service/openai_codex_transform.go @@ -317,15 +317,36 @@ func filterCodexInput(input []any) []any { filtered = append(filtered, item) continue } - if typ, ok := m["type"].(string); ok && typ == "item_reference" { + typ, _ := m["type"].(string) + if typ == "item_reference" { + filtered = append(filtered, m) continue } + // Strip per-item ids; keep call_id only for tool call items so outputs can match. + if isCodexToolCallItemType(typ) { + callID, _ := m["call_id"].(string) + if strings.TrimSpace(callID) == "" { + if id, ok := m["id"].(string); ok && strings.TrimSpace(id) != "" { + m["call_id"] = id + } + } + } delete(m, "id") + if !isCodexToolCallItemType(typ) { + delete(m, "call_id") + } filtered = append(filtered, m) } return filtered } +func isCodexToolCallItemType(typ string) bool { + if typ == "" { + return false + } + return strings.HasSuffix(typ, "_call") || strings.HasSuffix(typ, "_call_output") +} + func normalizeCodexTools(reqBody map[string]any) bool { rawTools, ok := reqBody["tools"] if !ok || rawTools == nil { diff --git a/backend/internal/service/openai_gateway_service.go b/backend/internal/service/openai_gateway_service.go index e86aa2d3..ffd42d2f 100644 --- a/backend/internal/service/openai_gateway_service.go +++ b/backend/internal/service/openai_gateway_service.go @@ -42,6 +42,7 @@ var openaiSSEDataRe = regexp.MustCompile(`^data:\s*`) var openaiAllowedHeaders = map[string]bool{ "accept-language": true, "content-type": true, + "conversation_id": true, "user-agent": true, "originator": true, "session_id": true, @@ -553,6 +554,27 @@ func (s *OpenAIGatewayService) Forward(ctx context.Context, c *gin.Context, acco bodyModified = true } + // Apply Codex model normalization for all OpenAI accounts + if model, ok := reqBody["model"].(string); ok { + normalizedModel := normalizeCodexModel(model) + if normalizedModel != "" && normalizedModel != model { + log.Printf("[OpenAI] Codex model normalization: %s -> %s (account: %s, type: %s, isCodexCLI: %v)", + model, normalizedModel, account.Name, account.Type, isCodexCLI) + reqBody["model"] = normalizedModel + mappedModel = normalizedModel + bodyModified = true + } + } + + // Normalize reasoning.effort parameter (minimal -> none) + if reasoning, ok := reqBody["reasoning"].(map[string]any); ok { + if effort, ok := reasoning["effort"].(string); ok && effort == "minimal" { + reasoning["effort"] = "none" + bodyModified = true + log.Printf("[OpenAI] Normalized reasoning.effort: minimal -> none (account: %s)", account.Name) + } + } + if account.Type == AccountTypeOAuth && !isCodexCLI { codexResult := applyCodexOAuthTransform(reqBody) if codexResult.Modified { @@ -783,9 +805,6 @@ func (s *OpenAIGatewayService) buildUpstreamRequest(ctx context.Context, c *gin. if promptCacheKey != "" { req.Header.Set("conversation_id", promptCacheKey) req.Header.Set("session_id", promptCacheKey) - } else { - req.Header.Del("conversation_id") - req.Header.Del("session_id") } }