fix(openai): 增强 OpenCode 兼容性和模型规范化

## 主要改动

1. **模型规范化扩展到所有账号**
   - 将 Codex 模型规范化(如 gpt-5-nano → gpt-5.1)应用到所有 OpenAI 账号类型
   - 不再仅限于 OAuth 非 CLI 请求
   - 解决 Codex CLI 使用 ChatGPT 账号时的模型兼容性问题

2. **reasoning.effort 参数规范化**
   - 自动将 `minimal` 转换为 `none`
   - 解决 gpt-5.1 模型不支持 `minimal` 值的问题

3. **Session/Conversation ID fallback 机制**
   - 从请求体多个字段提取 session_id/conversation_id
   - 优先级:prompt_cache_key → session_id → conversation_id → previous_response_id
   - 支持 Codex CLI 的会话保持

4. **Tool Call ID fallback**
   - 当 call_id 为空时使用 id 字段作为 fallback
   - 确保 tool call 输出能正确匹配
   - 保留 item_reference 类型的 items

5. **Header 优化**
   - 添加 conversation_id 到允许的 headers
   - 移除删除 session headers 的逻辑

## 相关 Issue
- 参考 OpenCode issue #3118 关于 item_reference 的讨论
This commit is contained in:
ianshaw
2026-01-12 20:18:53 -08:00
parent f9713e8733
commit 3d6e01a58f
3 changed files with 77 additions and 4 deletions

View File

@@ -96,6 +96,8 @@ func (h *OpenAIGatewayHandler) Responses(c *gin.Context) {
return
}
seedOpenAISessionHeaders(c, reqBody)
userAgent := c.GetHeader("User-Agent")
if !openai.IsCodexCLIRequest(userAgent) {
existingInstructions, _ := reqBody["instructions"].(string)
@@ -299,6 +301,37 @@ func (h *OpenAIGatewayHandler) handleFailoverExhausted(c *gin.Context, statusCod
h.handleStreamingAwareError(c, status, errType, errMsg, streamStarted)
}
func seedOpenAISessionHeaders(c *gin.Context, reqBody map[string]any) {
if c.GetHeader("session_id") == "" {
if v := firstNonEmptyString(
reqBody["prompt_cache_key"],
reqBody["session_id"],
reqBody["conversation_id"],
reqBody["previous_response_id"],
); v != "" {
c.Request.Header.Set("session_id", v)
}
}
if c.GetHeader("conversation_id") == "" {
if v := firstNonEmptyString(reqBody["prompt_cache_key"], reqBody["conversation_id"]); v != "" {
c.Request.Header.Set("conversation_id", v)
}
}
}
func firstNonEmptyString(values ...any) string {
for _, value := range values {
s, ok := value.(string)
if ok {
s = strings.TrimSpace(s)
if s != "" {
return s
}
}
}
return ""
}
func (h *OpenAIGatewayHandler) mapUpstreamError(statusCode int) (int, string, string) {
switch statusCode {
case 401:

View File

@@ -317,15 +317,36 @@ func filterCodexInput(input []any) []any {
filtered = append(filtered, item)
continue
}
if typ, ok := m["type"].(string); ok && typ == "item_reference" {
typ, _ := m["type"].(string)
if typ == "item_reference" {
filtered = append(filtered, m)
continue
}
// Strip per-item ids; keep call_id only for tool call items so outputs can match.
if isCodexToolCallItemType(typ) {
callID, _ := m["call_id"].(string)
if strings.TrimSpace(callID) == "" {
if id, ok := m["id"].(string); ok && strings.TrimSpace(id) != "" {
m["call_id"] = id
}
}
}
delete(m, "id")
if !isCodexToolCallItemType(typ) {
delete(m, "call_id")
}
filtered = append(filtered, m)
}
return filtered
}
func isCodexToolCallItemType(typ string) bool {
if typ == "" {
return false
}
return strings.HasSuffix(typ, "_call") || strings.HasSuffix(typ, "_call_output")
}
func normalizeCodexTools(reqBody map[string]any) bool {
rawTools, ok := reqBody["tools"]
if !ok || rawTools == nil {

View File

@@ -42,6 +42,7 @@ var openaiSSEDataRe = regexp.MustCompile(`^data:\s*`)
var openaiAllowedHeaders = map[string]bool{
"accept-language": true,
"content-type": true,
"conversation_id": true,
"user-agent": true,
"originator": true,
"session_id": true,
@@ -553,6 +554,27 @@ func (s *OpenAIGatewayService) Forward(ctx context.Context, c *gin.Context, acco
bodyModified = true
}
// Apply Codex model normalization for all OpenAI accounts
if model, ok := reqBody["model"].(string); ok {
normalizedModel := normalizeCodexModel(model)
if normalizedModel != "" && normalizedModel != model {
log.Printf("[OpenAI] Codex model normalization: %s -> %s (account: %s, type: %s, isCodexCLI: %v)",
model, normalizedModel, account.Name, account.Type, isCodexCLI)
reqBody["model"] = normalizedModel
mappedModel = normalizedModel
bodyModified = true
}
}
// Normalize reasoning.effort parameter (minimal -> none)
if reasoning, ok := reqBody["reasoning"].(map[string]any); ok {
if effort, ok := reasoning["effort"].(string); ok && effort == "minimal" {
reasoning["effort"] = "none"
bodyModified = true
log.Printf("[OpenAI] Normalized reasoning.effort: minimal -> none (account: %s)", account.Name)
}
}
if account.Type == AccountTypeOAuth && !isCodexCLI {
codexResult := applyCodexOAuthTransform(reqBody)
if codexResult.Modified {
@@ -783,9 +805,6 @@ func (s *OpenAIGatewayService) buildUpstreamRequest(ctx context.Context, c *gin.
if promptCacheKey != "" {
req.Header.Set("conversation_id", promptCacheKey)
req.Header.Set("session_id", promptCacheKey)
} else {
req.Header.Del("conversation_id")
req.Header.Del("session_id")
}
}