diff --git a/backend/internal/handler/openai_gateway_handler.go b/backend/internal/handler/openai_gateway_handler.go
index 068e80ea..8c7d7d52 100644
--- a/backend/internal/handler/openai_gateway_handler.go
+++ b/backend/internal/handler/openai_gateway_handler.go
@@ -8,6 +8,7 @@ import (
"io"
"log"
"net/http"
+ "strings"
"time"
"github.com/Wei-Shaw/sub2api/internal/config"
@@ -93,19 +94,23 @@ func (h *OpenAIGatewayHandler) Responses(c *gin.Context) {
return
}
- // For non-Codex CLI requests, set default instructions
userAgent := c.GetHeader("User-Agent")
// 获取客户端 IP
clientIP := ip.GetClientIP(c)
if !openai.IsCodexCLIRequest(userAgent) {
- reqBody["instructions"] = openai.DefaultInstructions
- // Re-serialize body
- body, err = json.Marshal(reqBody)
- if err != nil {
- h.errorResponse(c, http.StatusInternalServerError, "api_error", "Failed to process request")
- return
+ existingInstructions, _ := reqBody["instructions"].(string)
+ if strings.TrimSpace(existingInstructions) == "" {
+ if instructions := strings.TrimSpace(service.GetOpenCodeInstructions()); instructions != "" {
+ reqBody["instructions"] = instructions
+ // Re-serialize body
+ body, err = json.Marshal(reqBody)
+ if err != nil {
+ h.errorResponse(c, http.StatusInternalServerError, "api_error", "Failed to process request")
+ return
+ }
+ }
}
}
diff --git a/backend/internal/service/openai_codex_transform.go b/backend/internal/service/openai_codex_transform.go
new file mode 100644
index 00000000..965fb770
--- /dev/null
+++ b/backend/internal/service/openai_codex_transform.go
@@ -0,0 +1,528 @@
+package service
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/http"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+)
+
+const (
+ opencodeCodexHeaderURL = "https://raw.githubusercontent.com/anomalyco/opencode/dev/packages/opencode/src/session/prompt/codex_header.txt"
+ codexCacheTTL = 15 * time.Minute
+)
+
+var codexModelMap = map[string]string{
+ "gpt-5.1-codex": "gpt-5.1-codex",
+ "gpt-5.1-codex-low": "gpt-5.1-codex",
+ "gpt-5.1-codex-medium": "gpt-5.1-codex",
+ "gpt-5.1-codex-high": "gpt-5.1-codex",
+ "gpt-5.1-codex-max": "gpt-5.1-codex-max",
+ "gpt-5.1-codex-max-low": "gpt-5.1-codex-max",
+ "gpt-5.1-codex-max-medium": "gpt-5.1-codex-max",
+ "gpt-5.1-codex-max-high": "gpt-5.1-codex-max",
+ "gpt-5.1-codex-max-xhigh": "gpt-5.1-codex-max",
+ "gpt-5.2": "gpt-5.2",
+ "gpt-5.2-none": "gpt-5.2",
+ "gpt-5.2-low": "gpt-5.2",
+ "gpt-5.2-medium": "gpt-5.2",
+ "gpt-5.2-high": "gpt-5.2",
+ "gpt-5.2-xhigh": "gpt-5.2",
+ "gpt-5.2-codex": "gpt-5.2-codex",
+ "gpt-5.2-codex-low": "gpt-5.2-codex",
+ "gpt-5.2-codex-medium": "gpt-5.2-codex",
+ "gpt-5.2-codex-high": "gpt-5.2-codex",
+ "gpt-5.2-codex-xhigh": "gpt-5.2-codex",
+ "gpt-5.1-codex-mini": "gpt-5.1-codex-mini",
+ "gpt-5.1-codex-mini-medium": "gpt-5.1-codex-mini",
+ "gpt-5.1-codex-mini-high": "gpt-5.1-codex-mini",
+ "gpt-5.1": "gpt-5.1",
+ "gpt-5.1-none": "gpt-5.1",
+ "gpt-5.1-low": "gpt-5.1",
+ "gpt-5.1-medium": "gpt-5.1",
+ "gpt-5.1-high": "gpt-5.1",
+ "gpt-5.1-chat-latest": "gpt-5.1",
+ "gpt-5-codex": "gpt-5.1-codex",
+ "codex-mini-latest": "gpt-5.1-codex-mini",
+ "gpt-5-codex-mini": "gpt-5.1-codex-mini",
+ "gpt-5-codex-mini-medium": "gpt-5.1-codex-mini",
+ "gpt-5-codex-mini-high": "gpt-5.1-codex-mini",
+ "gpt-5": "gpt-5.1",
+ "gpt-5-mini": "gpt-5.1",
+ "gpt-5-nano": "gpt-5.1",
+}
+
+type codexTransformResult struct {
+ Modified bool
+ NormalizedModel string
+ PromptCacheKey string
+}
+
+type opencodeCacheMetadata struct {
+ ETag string `json:"etag"`
+ LastFetch string `json:"lastFetch,omitempty"`
+ LastChecked int64 `json:"lastChecked"`
+}
+
+func applyCodexOAuthTransform(reqBody map[string]any) codexTransformResult {
+ result := codexTransformResult{}
+
+ model := ""
+ if v, ok := reqBody["model"].(string); ok {
+ model = v
+ }
+ normalizedModel := normalizeCodexModel(model)
+ if normalizedModel != "" {
+ if model != normalizedModel {
+ reqBody["model"] = normalizedModel
+ result.Modified = true
+ }
+ result.NormalizedModel = normalizedModel
+ }
+
+ if v, ok := reqBody["store"].(bool); !ok || v {
+ reqBody["store"] = false
+ result.Modified = true
+ }
+ if v, ok := reqBody["stream"].(bool); !ok || !v {
+ reqBody["stream"] = true
+ result.Modified = true
+ }
+
+ if _, ok := reqBody["max_output_tokens"]; ok {
+ delete(reqBody, "max_output_tokens")
+ result.Modified = true
+ }
+ if _, ok := reqBody["max_completion_tokens"]; ok {
+ delete(reqBody, "max_completion_tokens")
+ result.Modified = true
+ }
+
+ if normalizeCodexTools(reqBody) {
+ result.Modified = true
+ }
+
+ if v, ok := reqBody["prompt_cache_key"].(string); ok {
+ result.PromptCacheKey = strings.TrimSpace(v)
+ }
+
+ instructions := strings.TrimSpace(getOpenCodeCodexHeader())
+ existingInstructions, _ := reqBody["instructions"].(string)
+ existingInstructions = strings.TrimSpace(existingInstructions)
+
+ if instructions != "" {
+ if existingInstructions != "" && existingInstructions != instructions {
+ if input, ok := reqBody["input"].([]any); ok {
+ reqBody["input"] = prependSystemInstruction(input, existingInstructions)
+ result.Modified = true
+ }
+ }
+ if existingInstructions != instructions {
+ reqBody["instructions"] = instructions
+ result.Modified = true
+ }
+ }
+
+ if input, ok := reqBody["input"].([]any); ok {
+ input = filterCodexInput(input)
+ input = normalizeOrphanedToolOutputs(input)
+ reqBody["input"] = input
+ result.Modified = true
+ }
+
+ return result
+}
+
+func normalizeCodexModel(model string) string {
+ if model == "" {
+ return "gpt-5.1"
+ }
+
+ modelID := model
+ if strings.Contains(modelID, "/") {
+ parts := strings.Split(modelID, "/")
+ modelID = parts[len(parts)-1]
+ }
+
+ if mapped := getNormalizedCodexModel(modelID); mapped != "" {
+ return mapped
+ }
+
+ normalized := strings.ToLower(modelID)
+
+ if strings.Contains(normalized, "gpt-5.2-codex") || strings.Contains(normalized, "gpt 5.2 codex") {
+ return "gpt-5.2-codex"
+ }
+ if strings.Contains(normalized, "gpt-5.2") || strings.Contains(normalized, "gpt 5.2") {
+ return "gpt-5.2"
+ }
+ if strings.Contains(normalized, "gpt-5.1-codex-max") || strings.Contains(normalized, "gpt 5.1 codex max") {
+ return "gpt-5.1-codex-max"
+ }
+ if strings.Contains(normalized, "gpt-5.1-codex-mini") || strings.Contains(normalized, "gpt 5.1 codex mini") {
+ return "gpt-5.1-codex-mini"
+ }
+ if strings.Contains(normalized, "codex-mini-latest") ||
+ strings.Contains(normalized, "gpt-5-codex-mini") ||
+ strings.Contains(normalized, "gpt 5 codex mini") {
+ return "codex-mini-latest"
+ }
+ if strings.Contains(normalized, "gpt-5.1-codex") || strings.Contains(normalized, "gpt 5.1 codex") {
+ return "gpt-5.1-codex"
+ }
+ if strings.Contains(normalized, "gpt-5.1") || strings.Contains(normalized, "gpt 5.1") {
+ return "gpt-5.1"
+ }
+ if strings.Contains(normalized, "codex") {
+ return "gpt-5.1-codex"
+ }
+ if strings.Contains(normalized, "gpt-5") || strings.Contains(normalized, "gpt 5") {
+ return "gpt-5.1"
+ }
+
+ return "gpt-5.1"
+}
+
+func getNormalizedCodexModel(modelID string) string {
+ if modelID == "" {
+ return ""
+ }
+ if mapped, ok := codexModelMap[modelID]; ok {
+ return mapped
+ }
+ lower := strings.ToLower(modelID)
+ for key, value := range codexModelMap {
+ if strings.ToLower(key) == lower {
+ return value
+ }
+ }
+ return ""
+}
+
+func getOpenCodeCachedPrompt(url, cacheFileName, metaFileName string) string {
+ cacheDir := codexCachePath("")
+ if cacheDir == "" {
+ return ""
+ }
+ cacheFile := filepath.Join(cacheDir, cacheFileName)
+ metaFile := filepath.Join(cacheDir, metaFileName)
+
+ var cachedContent string
+ if content, ok := readFile(cacheFile); ok {
+ cachedContent = content
+ }
+
+ var meta opencodeCacheMetadata
+ if loadJSON(metaFile, &meta) && meta.LastChecked > 0 && cachedContent != "" {
+ if time.Since(time.UnixMilli(meta.LastChecked)) < codexCacheTTL {
+ return cachedContent
+ }
+ }
+
+ content, etag, status, err := fetchWithETag(url, meta.ETag)
+ if err == nil && status == http.StatusNotModified && cachedContent != "" {
+ return cachedContent
+ }
+ if err == nil && status >= 200 && status < 300 && content != "" {
+ _ = writeFile(cacheFile, content)
+ meta = opencodeCacheMetadata{
+ ETag: etag,
+ LastFetch: time.Now().UTC().Format(time.RFC3339),
+ LastChecked: time.Now().UnixMilli(),
+ }
+ _ = writeJSON(metaFile, meta)
+ return content
+ }
+
+ return cachedContent
+}
+
+func getOpenCodeCodexHeader() string {
+ return getOpenCodeCachedPrompt(opencodeCodexHeaderURL, "opencode-codex-header.txt", "opencode-codex-header-meta.json")
+}
+
+func GetOpenCodeInstructions() string {
+ return getOpenCodeCodexHeader()
+}
+
+func filterCodexInput(input []any) []any {
+ filtered := make([]any, 0, len(input))
+ for _, item := range input {
+ m, ok := item.(map[string]any)
+ if !ok {
+ filtered = append(filtered, item)
+ continue
+ }
+ if typ, ok := m["type"].(string); ok && typ == "item_reference" {
+ continue
+ }
+ delete(m, "id")
+ filtered = append(filtered, m)
+ }
+ return filtered
+}
+
+func prependSystemInstruction(input []any, instructions string) []any {
+ message := map[string]any{
+ "role": "system",
+ "content": []any{
+ map[string]any{
+ "type": "input_text",
+ "text": instructions,
+ },
+ },
+ }
+ return append([]any{message}, input...)
+}
+
+func normalizeCodexTools(reqBody map[string]any) bool {
+ rawTools, ok := reqBody["tools"]
+ if !ok || rawTools == nil {
+ return false
+ }
+ tools, ok := rawTools.([]any)
+ if !ok {
+ return false
+ }
+
+ modified := false
+ for idx, tool := range tools {
+ toolMap, ok := tool.(map[string]any)
+ if !ok {
+ continue
+ }
+
+ toolType, _ := toolMap["type"].(string)
+ if strings.TrimSpace(toolType) != "function" {
+ continue
+ }
+
+ function, ok := toolMap["function"].(map[string]any)
+ if !ok {
+ continue
+ }
+
+ if _, ok := toolMap["name"]; !ok {
+ if name, ok := function["name"].(string); ok && strings.TrimSpace(name) != "" {
+ toolMap["name"] = name
+ modified = true
+ }
+ }
+ if _, ok := toolMap["description"]; !ok {
+ if desc, ok := function["description"].(string); ok && strings.TrimSpace(desc) != "" {
+ toolMap["description"] = desc
+ modified = true
+ }
+ }
+ if _, ok := toolMap["parameters"]; !ok {
+ if params, ok := function["parameters"]; ok {
+ toolMap["parameters"] = params
+ modified = true
+ }
+ }
+ if _, ok := toolMap["strict"]; !ok {
+ if strict, ok := function["strict"]; ok {
+ toolMap["strict"] = strict
+ modified = true
+ }
+ }
+
+ tools[idx] = toolMap
+ }
+
+ if modified {
+ reqBody["tools"] = tools
+ }
+
+ return modified
+}
+
+func normalizeOrphanedToolOutputs(input []any) []any {
+ functionCallIDs := map[string]bool{}
+ localShellCallIDs := map[string]bool{}
+ customToolCallIDs := map[string]bool{}
+
+ for _, item := range input {
+ m, ok := item.(map[string]any)
+ if !ok {
+ continue
+ }
+ callID := getCallID(m)
+ if callID == "" {
+ continue
+ }
+ switch m["type"] {
+ case "function_call":
+ functionCallIDs[callID] = true
+ case "local_shell_call":
+ localShellCallIDs[callID] = true
+ case "custom_tool_call":
+ customToolCallIDs[callID] = true
+ }
+ }
+
+ output := make([]any, 0, len(input))
+ for _, item := range input {
+ m, ok := item.(map[string]any)
+ if !ok {
+ output = append(output, item)
+ continue
+ }
+ switch m["type"] {
+ case "function_call_output":
+ callID := getCallID(m)
+ if callID == "" || (!functionCallIDs[callID] && !localShellCallIDs[callID]) {
+ output = append(output, convertOrphanedOutputToMessage(m, callID))
+ continue
+ }
+ case "custom_tool_call_output":
+ callID := getCallID(m)
+ if callID == "" || !customToolCallIDs[callID] {
+ output = append(output, convertOrphanedOutputToMessage(m, callID))
+ continue
+ }
+ case "local_shell_call_output":
+ callID := getCallID(m)
+ if callID == "" || !localShellCallIDs[callID] {
+ output = append(output, convertOrphanedOutputToMessage(m, callID))
+ continue
+ }
+ }
+ output = append(output, m)
+ }
+ return output
+}
+
+func getCallID(item map[string]any) string {
+ raw, ok := item["call_id"]
+ if !ok {
+ return ""
+ }
+ callID, ok := raw.(string)
+ if !ok {
+ return ""
+ }
+ callID = strings.TrimSpace(callID)
+ if callID == "" {
+ return ""
+ }
+ return callID
+}
+
+func convertOrphanedOutputToMessage(item map[string]any, callID string) map[string]any {
+ toolName := "tool"
+ if name, ok := item["name"].(string); ok && name != "" {
+ toolName = name
+ }
+ labelID := callID
+ if labelID == "" {
+ labelID = "unknown"
+ }
+ text := stringifyOutput(item["output"])
+ if len(text) > 16000 {
+ text = text[:16000] + "\n...[truncated]"
+ }
+ return map[string]any{
+ "type": "message",
+ "role": "assistant",
+ "content": fmt.Sprintf("[Previous %s result; call_id=%s]: %s", toolName, labelID, text),
+ }
+}
+
+func stringifyOutput(output any) string {
+ switch v := output.(type) {
+ case string:
+ return v
+ default:
+ if data, err := json.Marshal(v); err == nil {
+ return string(data)
+ }
+ return fmt.Sprintf("%v", v)
+ }
+}
+
+func codexCachePath(filename string) string {
+ home, err := os.UserHomeDir()
+ if err != nil {
+ return ""
+ }
+ cacheDir := filepath.Join(home, ".opencode", "cache")
+ if filename == "" {
+ return cacheDir
+ }
+ return filepath.Join(cacheDir, filename)
+}
+
+func readFile(path string) (string, bool) {
+ if path == "" {
+ return "", false
+ }
+ data, err := os.ReadFile(path)
+ if err != nil {
+ return "", false
+ }
+ return string(data), true
+}
+
+func writeFile(path, content string) error {
+ if path == "" {
+ return fmt.Errorf("empty cache path")
+ }
+ if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil {
+ return err
+ }
+ return os.WriteFile(path, []byte(content), 0o644)
+}
+
+func loadJSON(path string, target any) bool {
+ data, err := os.ReadFile(path)
+ if err != nil {
+ return false
+ }
+ if err := json.Unmarshal(data, target); err != nil {
+ return false
+ }
+ return true
+}
+
+func writeJSON(path string, value any) error {
+ if path == "" {
+ return fmt.Errorf("empty json path")
+ }
+ if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil {
+ return err
+ }
+ data, err := json.Marshal(value)
+ if err != nil {
+ return err
+ }
+ return os.WriteFile(path, data, 0o644)
+}
+
+func fetchWithETag(url, etag string) (string, string, int, error) {
+ req, err := http.NewRequest(http.MethodGet, url, nil)
+ if err != nil {
+ return "", "", 0, err
+ }
+ req.Header.Set("User-Agent", "sub2api-codex")
+ if etag != "" {
+ req.Header.Set("If-None-Match", etag)
+ }
+ resp, err := http.DefaultClient.Do(req)
+ if err != nil {
+ return "", "", 0, err
+ }
+ defer func() {
+ _ = resp.Body.Close()
+ }()
+
+ body, err := io.ReadAll(resp.Body)
+ if err != nil {
+ return "", "", resp.StatusCode, err
+ }
+ return string(body), resp.Header.Get("etag"), resp.StatusCode, nil
+}
diff --git a/backend/internal/service/openai_gateway_service.go b/backend/internal/service/openai_gateway_service.go
index 9d365ad6..8b1f214b 100644
--- a/backend/internal/service/openai_gateway_service.go
+++ b/backend/internal/service/openai_gateway_service.go
@@ -12,6 +12,7 @@ import (
"io"
"log"
"net/http"
+ "os"
"regexp"
"sort"
"strconv"
@@ -20,6 +21,7 @@ import (
"time"
"github.com/Wei-Shaw/sub2api/internal/config"
+ "github.com/Wei-Shaw/sub2api/internal/pkg/openai"
"github.com/Wei-Shaw/sub2api/internal/util/responseheaders"
"github.com/Wei-Shaw/sub2api/internal/util/urlvalidator"
"github.com/gin-gonic/gin"
@@ -528,33 +530,38 @@ func (s *OpenAIGatewayService) Forward(ctx context.Context, c *gin.Context, acco
// Extract model and stream from parsed body
reqModel, _ := reqBody["model"].(string)
reqStream, _ := reqBody["stream"].(bool)
+ promptCacheKey := ""
+ if v, ok := reqBody["prompt_cache_key"].(string); ok {
+ promptCacheKey = strings.TrimSpace(v)
+ }
// Track if body needs re-serialization
bodyModified := false
originalModel := reqModel
- // Apply model mapping
- mappedModel := account.GetMappedModel(reqModel)
- if mappedModel != reqModel {
- reqBody["model"] = mappedModel
- bodyModified = true
+ isCodexCLI := openai.IsCodexCLIRequest(c.GetHeader("User-Agent"))
+
+ // Apply model mapping (skip for Codex CLI for transparent forwarding)
+ mappedModel := reqModel
+ if !isCodexCLI {
+ mappedModel = account.GetMappedModel(reqModel)
+ if mappedModel != reqModel {
+ reqBody["model"] = mappedModel
+ bodyModified = true
+ }
}
- // For OAuth accounts using ChatGPT internal API:
- // 1. Add store: false
- // 2. Normalize input format for Codex API compatibility
- if account.Type == AccountTypeOAuth {
- reqBody["store"] = false
- // Codex 上游不接受 max_output_tokens 参数,需要在转发前移除。
- delete(reqBody, "max_output_tokens")
- bodyModified = true
-
- // Normalize input format: convert AI SDK multi-part content format to simplified format
- // AI SDK sends: {"content": [{"type": "input_text", "text": "..."}]}
- // Codex API expects: {"content": "..."}
- if normalizeInputForCodexAPI(reqBody) {
+ if account.Type == AccountTypeOAuth && !isCodexCLI {
+ codexResult := applyCodexOAuthTransform(reqBody)
+ if codexResult.Modified {
bodyModified = true
}
+ if codexResult.NormalizedModel != "" {
+ mappedModel = codexResult.NormalizedModel
+ }
+ if codexResult.PromptCacheKey != "" {
+ promptCacheKey = codexResult.PromptCacheKey
+ }
}
// Re-serialize body only if modified
@@ -573,7 +580,7 @@ func (s *OpenAIGatewayService) Forward(ctx context.Context, c *gin.Context, acco
}
// Build upstream request
- upstreamReq, err := s.buildUpstreamRequest(ctx, c, account, body, token, reqStream)
+ upstreamReq, err := s.buildUpstreamRequest(ctx, c, account, body, token, reqStream, promptCacheKey, isCodexCLI)
if err != nil {
return nil, err
}
@@ -634,7 +641,7 @@ func (s *OpenAIGatewayService) Forward(ctx context.Context, c *gin.Context, acco
}, nil
}
-func (s *OpenAIGatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Context, account *Account, body []byte, token string, isStream bool) (*http.Request, error) {
+func (s *OpenAIGatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Context, account *Account, body []byte, token string, isStream bool, promptCacheKey string, isCodexCLI bool) (*http.Request, error) {
// Determine target URL based on account type
var targetURL string
switch account.Type {
@@ -674,12 +681,6 @@ func (s *OpenAIGatewayService) buildUpstreamRequest(ctx context.Context, c *gin.
if chatgptAccountID != "" {
req.Header.Set("chatgpt-account-id", chatgptAccountID)
}
- // Set accept header based on stream mode
- if isStream {
- req.Header.Set("accept", "text/event-stream")
- } else {
- req.Header.Set("accept", "application/json")
- }
}
// Whitelist passthrough headers
@@ -691,6 +692,22 @@ func (s *OpenAIGatewayService) buildUpstreamRequest(ctx context.Context, c *gin.
}
}
}
+ if account.Type == AccountTypeOAuth {
+ req.Header.Set("OpenAI-Beta", "responses=experimental")
+ if isCodexCLI {
+ req.Header.Set("originator", "codex_cli_rs")
+ } else {
+ req.Header.Set("originator", "opencode")
+ }
+ req.Header.Set("accept", "text/event-stream")
+ if promptCacheKey != "" {
+ req.Header.Set("conversation_id", promptCacheKey)
+ req.Header.Set("session_id", promptCacheKey)
+ } else {
+ req.Header.Del("conversation_id")
+ req.Header.Del("session_id")
+ }
+ }
// Apply custom User-Agent if configured
customUA := account.GetOpenAIUserAgent()
@@ -708,6 +725,7 @@ func (s *OpenAIGatewayService) buildUpstreamRequest(ctx context.Context, c *gin.
func (s *OpenAIGatewayService) handleErrorResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account) (*OpenAIForwardResult, error) {
body, _ := io.ReadAll(resp.Body)
+ logUpstreamErrorBody(account.ID, resp.StatusCode, body)
// Check custom error codes
if !account.ShouldHandleErrorCode(resp.StatusCode) {
@@ -766,6 +784,24 @@ func (s *OpenAIGatewayService) handleErrorResponse(ctx context.Context, resp *ht
return nil, fmt.Errorf("upstream error: %d", resp.StatusCode)
}
+func logUpstreamErrorBody(accountID int64, statusCode int, body []byte) {
+ if strings.ToLower(strings.TrimSpace(os.Getenv("GATEWAY_LOG_UPSTREAM_ERROR_BODY"))) != "true" {
+ return
+ }
+
+ maxBytes := 2048
+ if rawMax := strings.TrimSpace(os.Getenv("GATEWAY_LOG_UPSTREAM_ERROR_BODY_MAX_BYTES")); rawMax != "" {
+ if parsed, err := strconv.Atoi(rawMax); err == nil && parsed > 0 {
+ maxBytes = parsed
+ }
+ }
+ if len(body) > maxBytes {
+ body = body[:maxBytes]
+ }
+
+ log.Printf("Upstream error body: account=%d status=%d body=%q", accountID, statusCode, string(body))
+}
+
// openaiStreamingResult streaming response result
type openaiStreamingResult struct {
usage *OpenAIUsage
@@ -1018,6 +1054,13 @@ func (s *OpenAIGatewayService) handleNonStreamingResponse(ctx context.Context, r
return nil, err
}
+ if account.Type == AccountTypeOAuth {
+ bodyLooksLikeSSE := bytes.Contains(body, []byte("data:")) || bytes.Contains(body, []byte("event:"))
+ if isEventStreamResponse(resp.Header) || bodyLooksLikeSSE {
+ return s.handleOAuthSSEToJSON(resp, c, body, originalModel, mappedModel)
+ }
+ }
+
// Parse usage
var response struct {
Usage struct {
@@ -1057,6 +1100,110 @@ func (s *OpenAIGatewayService) handleNonStreamingResponse(ctx context.Context, r
return usage, nil
}
+func isEventStreamResponse(header http.Header) bool {
+ contentType := strings.ToLower(header.Get("Content-Type"))
+ return strings.Contains(contentType, "text/event-stream")
+}
+
+func (s *OpenAIGatewayService) handleOAuthSSEToJSON(resp *http.Response, c *gin.Context, body []byte, originalModel, mappedModel string) (*OpenAIUsage, error) {
+ bodyText := string(body)
+ finalResponse, ok := extractCodexFinalResponse(bodyText)
+
+ usage := &OpenAIUsage{}
+ if ok {
+ var response struct {
+ Usage struct {
+ InputTokens int `json:"input_tokens"`
+ OutputTokens int `json:"output_tokens"`
+ InputTokenDetails struct {
+ CachedTokens int `json:"cached_tokens"`
+ } `json:"input_tokens_details"`
+ } `json:"usage"`
+ }
+ if err := json.Unmarshal(finalResponse, &response); err == nil {
+ usage.InputTokens = response.Usage.InputTokens
+ usage.OutputTokens = response.Usage.OutputTokens
+ usage.CacheReadInputTokens = response.Usage.InputTokenDetails.CachedTokens
+ }
+ body = finalResponse
+ if originalModel != mappedModel {
+ body = s.replaceModelInResponseBody(body, mappedModel, originalModel)
+ }
+ } else {
+ usage = s.parseSSEUsageFromBody(bodyText)
+ if originalModel != mappedModel {
+ bodyText = s.replaceModelInSSEBody(bodyText, mappedModel, originalModel)
+ }
+ body = []byte(bodyText)
+ }
+
+ responseheaders.WriteFilteredHeaders(c.Writer.Header(), resp.Header, s.cfg.Security.ResponseHeaders)
+
+ contentType := "application/json; charset=utf-8"
+ if !ok {
+ contentType = resp.Header.Get("Content-Type")
+ if contentType == "" {
+ contentType = "text/event-stream"
+ }
+ }
+ c.Data(resp.StatusCode, contentType, body)
+
+ return usage, nil
+}
+
+func extractCodexFinalResponse(body string) ([]byte, bool) {
+ lines := strings.Split(body, "\n")
+ for _, line := range lines {
+ if !openaiSSEDataRe.MatchString(line) {
+ continue
+ }
+ data := openaiSSEDataRe.ReplaceAllString(line, "")
+ if data == "" || data == "[DONE]" {
+ continue
+ }
+ var event struct {
+ Type string `json:"type"`
+ Response json.RawMessage `json:"response"`
+ }
+ if json.Unmarshal([]byte(data), &event) != nil {
+ continue
+ }
+ if event.Type == "response.done" || event.Type == "response.completed" {
+ if len(event.Response) > 0 {
+ return event.Response, true
+ }
+ }
+ }
+ return nil, false
+}
+
+func (s *OpenAIGatewayService) parseSSEUsageFromBody(body string) *OpenAIUsage {
+ usage := &OpenAIUsage{}
+ lines := strings.Split(body, "\n")
+ for _, line := range lines {
+ if !openaiSSEDataRe.MatchString(line) {
+ continue
+ }
+ data := openaiSSEDataRe.ReplaceAllString(line, "")
+ if data == "" || data == "[DONE]" {
+ continue
+ }
+ s.parseSSEUsage(data, usage)
+ }
+ return usage
+}
+
+func (s *OpenAIGatewayService) replaceModelInSSEBody(body, fromModel, toModel string) string {
+ lines := strings.Split(body, "\n")
+ for i, line := range lines {
+ if !openaiSSEDataRe.MatchString(line) {
+ continue
+ }
+ lines[i] = s.replaceModelInSSELine(line, fromModel, toModel)
+ }
+ return strings.Join(lines, "\n")
+}
+
func (s *OpenAIGatewayService) validateUpstreamBaseURL(raw string) (string, error) {
if s.cfg != nil && !s.cfg.Security.URLAllowlist.Enabled {
normalized, err := urlvalidator.ValidateURLFormat(raw, s.cfg.Security.URLAllowlist.AllowInsecureHTTP)
@@ -1096,101 +1243,6 @@ func (s *OpenAIGatewayService) replaceModelInResponseBody(body []byte, fromModel
return newBody
}
-// normalizeInputForCodexAPI converts AI SDK multi-part content format to simplified format
-// that the ChatGPT internal Codex API expects.
-//
-// AI SDK sends content as an array of typed objects:
-//
-// {"content": [{"type": "input_text", "text": "hello"}]}
-//
-// ChatGPT Codex API expects content as a simple string:
-//
-// {"content": "hello"}
-//
-// This function modifies reqBody in-place and returns true if any modification was made.
-func normalizeInputForCodexAPI(reqBody map[string]any) bool {
- input, ok := reqBody["input"]
- if !ok {
- return false
- }
-
- // Handle case where input is a simple string (already compatible)
- if _, isString := input.(string); isString {
- return false
- }
-
- // Handle case where input is an array of messages
- inputArray, ok := input.([]any)
- if !ok {
- return false
- }
-
- modified := false
- for _, item := range inputArray {
- message, ok := item.(map[string]any)
- if !ok {
- continue
- }
-
- content, ok := message["content"]
- if !ok {
- continue
- }
-
- // If content is already a string, no conversion needed
- if _, isString := content.(string); isString {
- continue
- }
-
- // If content is an array (AI SDK format), convert to string
- contentArray, ok := content.([]any)
- if !ok {
- continue
- }
-
- // Extract text from content array
- var textParts []string
- for _, part := range contentArray {
- partMap, ok := part.(map[string]any)
- if !ok {
- continue
- }
-
- // Handle different content types
- partType, _ := partMap["type"].(string)
- switch partType {
- case "input_text", "text":
- // Extract text from input_text or text type
- if text, ok := partMap["text"].(string); ok {
- textParts = append(textParts, text)
- }
- case "input_image", "image":
- // For images, we need to preserve the original format
- // as ChatGPT Codex API may support images in a different way
- // For now, skip image parts (they will be lost in conversion)
- // TODO: Consider preserving image data or handling it separately
- continue
- case "input_file", "file":
- // Similar to images, file inputs may need special handling
- continue
- default:
- // For unknown types, try to extract text if available
- if text, ok := partMap["text"].(string); ok {
- textParts = append(textParts, text)
- }
- }
- }
-
- // Convert content array to string
- if len(textParts) > 0 {
- message["content"] = strings.Join(textParts, "\n")
- modified = true
- }
- }
-
- return modified
-}
-
// OpenAIRecordUsageInput input for recording usage
type OpenAIRecordUsageInput struct {
Result *OpenAIForwardResult
diff --git a/backend/internal/service/openai_gateway_service_test.go b/backend/internal/service/openai_gateway_service_test.go
index 8562d940..55e11b30 100644
--- a/backend/internal/service/openai_gateway_service_test.go
+++ b/backend/internal/service/openai_gateway_service_test.go
@@ -220,7 +220,7 @@ func TestOpenAIInvalidBaseURLWhenAllowlistDisabled(t *testing.T) {
Credentials: map[string]any{"base_url": "://invalid-url"},
}
- _, err := svc.buildUpstreamRequest(c.Request.Context(), c, account, []byte("{}"), "token", false)
+ _, err := svc.buildUpstreamRequest(c.Request.Context(), c, account, []byte("{}"), "token", false, "", false)
if err == nil {
t.Fatalf("expected error for invalid base_url when allowlist disabled")
}
diff --git a/backend/internal/service/prompts/codex_opencode_bridge.txt b/backend/internal/service/prompts/codex_opencode_bridge.txt
new file mode 100644
index 00000000..093aa0f2
--- /dev/null
+++ b/backend/internal/service/prompts/codex_opencode_bridge.txt
@@ -0,0 +1,122 @@
+# Codex Running in OpenCode
+
+You are running Codex through OpenCode, an open-source terminal coding assistant. OpenCode provides different tools but follows Codex operating principles.
+
+## CRITICAL: Tool Replacements
+
+
+❌ APPLY_PATCH DOES NOT EXIST → ✅ USE "edit" INSTEAD
+- NEVER use: apply_patch, applyPatch
+- ALWAYS use: edit tool for ALL file modifications
+- Before modifying files: Verify you're using "edit", NOT "apply_patch"
+
+
+
+❌ UPDATE_PLAN DOES NOT EXIST → ✅ USE "todowrite" INSTEAD
+- NEVER use: update_plan, updatePlan, read_plan, readPlan
+- ALWAYS use: todowrite for task/plan updates, todoread to read plans
+- Before plan operations: Verify you're using "todowrite", NOT "update_plan"
+
+
+## Available OpenCode Tools
+
+**File Operations:**
+- `write` - Create new files
+ - Overwriting existing files requires a prior Read in this session; default to ASCII unless the file already uses Unicode.
+- `edit` - Modify existing files (REPLACES apply_patch)
+ - Requires a prior Read in this session; preserve exact indentation; ensure `oldString` uniquely matches or use `replaceAll`; edit fails if ambiguous or missing.
+- `read` - Read file contents
+
+**Search/Discovery:**
+- `grep` - Search file contents (tool, not bash grep); use `include` to filter patterns; set `path` only when not searching workspace root; for cross-file match counts use bash with `rg`.
+- `glob` - Find files by pattern; defaults to workspace cwd unless `path` is set.
+- `list` - List directories (requires absolute paths)
+
+**Execution:**
+- `bash` - Run shell commands
+ - No workdir parameter; do not include it in tool calls.
+ - Always include a short description for the command.
+ - Do not use cd; use absolute paths in commands.
+ - Quote paths containing spaces with double quotes.
+ - Chain multiple commands with ';' or '&&'; avoid newlines.
+ - Use Grep/Glob tools for searches; only use bash with `rg` when you need counts or advanced features.
+ - Do not use `ls`/`cat` in bash; use `list`/`read` tools instead.
+ - For deletions (rm), verify by listing parent dir with `list`.
+
+**Network:**
+- `webfetch` - Fetch web content
+ - Use fully-formed URLs (http/https; http auto-upgrades to https).
+ - Always set `format` to one of: text | markdown | html; prefer markdown unless otherwise required.
+ - Read-only; short cache window.
+
+**Task Management:**
+- `todowrite` - Manage tasks/plans (REPLACES update_plan)
+- `todoread` - Read current plan
+
+## Substitution Rules
+
+Base instruction says: You MUST use instead:
+apply_patch → edit
+update_plan → todowrite
+read_plan → todoread
+
+**Path Usage:** Use per-tool conventions to avoid conflicts:
+- Tool calls: `read`, `edit`, `write`, `list` require absolute paths.
+- Searches: `grep`/`glob` default to the workspace cwd; prefer relative include patterns; set `path` only when a different root is needed.
+- Presentation: In assistant messages, show workspace-relative paths; use absolute paths only inside tool calls.
+- Tool schema overrides general path preferences—do not convert required absolute paths to relative.
+
+## Verification Checklist
+
+Before file/plan modifications:
+1. Am I using "edit" NOT "apply_patch"?
+2. Am I using "todowrite" NOT "update_plan"?
+3. Is this tool in the approved list above?
+4. Am I following each tool's path requirements?
+
+If ANY answer is NO → STOP and correct before proceeding.
+
+## OpenCode Working Style
+
+**Communication:**
+- Send brief preambles (8-12 words) before tool calls, building on prior context
+- Provide progress updates during longer tasks
+
+**Execution:**
+- Keep working autonomously until query is fully resolved before yielding
+- Don't return to user with partial solutions
+
+**Code Approach:**
+- New projects: Be ambitious and creative
+- Existing codebases: Surgical precision - modify only what's requested unless explicitly instructed to do otherwise
+
+**Testing:**
+- If tests exist: Start specific to your changes, then broader validation
+
+## Advanced Tools
+
+**Task Tool (Sub-Agents):**
+- Use the Task tool (functions.task) to launch sub-agents
+- Check the Task tool description for current agent types and their capabilities
+- Useful for complex analysis, specialized workflows, or tasks requiring isolated context
+- The agent list is dynamically generated - refer to tool schema for available agents
+
+**Parallelization:**
+- When multiple independent tool calls are needed, use multi_tool_use.parallel to run them concurrently.
+- Reserve sequential calls for ordered or data-dependent steps.
+
+**MCP Tools:**
+- Model Context Protocol servers provide additional capabilities
+- MCP tools are prefixed: `mcp____`
+- Check your available tools for MCP integrations
+- Use when the tool's functionality matches your task needs
+
+## What Remains from Codex
+
+Sandbox policies, approval mechanisms, final answer formatting, git commit protocols, and file reference formats all follow Codex instructions. In approval policy "never", never request escalations.
+
+## Approvals & Safety
+- Assume workspace-write filesystem, network enabled, approval on-failure unless explicitly stated otherwise.
+- When a command fails due to sandboxing or permissions, retry with escalated permissions if allowed by policy, including a one-line justification.
+- Treat destructive commands (e.g., `rm`, `git reset --hard`) as requiring explicit user request or approval.
+- When uncertain, prefer non-destructive verification first (e.g., confirm file existence with `list`, then delete with `bash`).
\ No newline at end of file
diff --git a/backend/internal/service/prompts/tool_remap_message.txt b/backend/internal/service/prompts/tool_remap_message.txt
new file mode 100644
index 00000000..4ff986e1
--- /dev/null
+++ b/backend/internal/service/prompts/tool_remap_message.txt
@@ -0,0 +1,63 @@
+
+
+YOU ARE IN A DIFFERENT ENVIRONMENT. These instructions override ALL previous tool references.
+
+
+
+
+❌ APPLY_PATCH DOES NOT EXIST → ✅ USE "edit" INSTEAD
+- NEVER use: apply_patch, applyPatch
+- ALWAYS use: edit tool for ALL file modifications
+- Before modifying files: Verify you're using "edit", NOT "apply_patch"
+
+
+
+❌ UPDATE_PLAN DOES NOT EXIST → ✅ USE "todowrite" INSTEAD
+- NEVER use: update_plan, updatePlan
+- ALWAYS use: todowrite for ALL task/plan operations
+- Use todoread to read current plan
+- Before plan operations: Verify you're using "todowrite", NOT "update_plan"
+
+
+
+
+File Operations:
+ • write - Create new files
+ • edit - Modify existing files (REPLACES apply_patch)
+ • patch - Apply diff patches
+ • read - Read file contents
+
+Search/Discovery:
+ • grep - Search file contents
+ • glob - Find files by pattern
+ • list - List directories (use relative paths)
+
+Execution:
+ • bash - Run shell commands
+
+Network:
+ • webfetch - Fetch web content
+
+Task Management:
+ • todowrite - Manage tasks/plans (REPLACES update_plan)
+ • todoread - Read current plan
+
+
+
+Base instruction says: You MUST use instead:
+apply_patch → edit
+update_plan → todowrite
+read_plan → todoread
+absolute paths → relative paths
+
+
+
+Before file/plan modifications:
+1. Am I using "edit" NOT "apply_patch"?
+2. Am I using "todowrite" NOT "update_plan"?
+3. Is this tool in the approved list above?
+4. Am I using relative paths?
+
+If ANY answer is NO → STOP and correct before proceeding.
+
+
\ No newline at end of file