From 3a07e92b6072bee6068007a58fc7b93fd978be33 Mon Sep 17 00:00:00 2001 From: Alex Date: Tue, 7 Apr 2026 11:40:41 +0300 Subject: [PATCH] fix(openai): do not normalize /completion API token based accounts --- backend/internal/service/openai_gateway_chat_completions.go | 2 +- backend/internal/service/openai_gateway_messages.go | 2 +- backend/internal/service/openai_ws_forwarder.go | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/backend/internal/service/openai_gateway_chat_completions.go b/backend/internal/service/openai_gateway_chat_completions.go index be076cc0..d77af66f 100644 --- a/backend/internal/service/openai_gateway_chat_completions.go +++ b/backend/internal/service/openai_gateway_chat_completions.go @@ -46,7 +46,7 @@ func (s *OpenAIGatewayService) ForwardAsChatCompletions( // 2. Resolve model mapping early so compat prompt_cache_key injection can // derive a stable seed from the final upstream model family. billingModel := resolveOpenAIForwardModel(account, originalModel, defaultMappedModel) - upstreamModel := normalizeCodexModel(billingModel) + upstreamModel := normalizeOpenAIModelForUpstream(account, billingModel) promptCacheKey = strings.TrimSpace(promptCacheKey) compatPromptCacheInjected := false diff --git a/backend/internal/service/openai_gateway_messages.go b/backend/internal/service/openai_gateway_messages.go index dd416269..6f53928b 100644 --- a/backend/internal/service/openai_gateway_messages.go +++ b/backend/internal/service/openai_gateway_messages.go @@ -62,7 +62,7 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic( // 3. Model mapping billingModel := resolveOpenAIForwardModel(account, normalizedModel, defaultMappedModel) - upstreamModel := normalizeCodexModel(billingModel) + upstreamModel := normalizeOpenAIModelForUpstream(account, billingModel) responsesReq.Model = upstreamModel logger.L().Debug("openai messages: model mapping applied", diff --git a/backend/internal/service/openai_ws_forwarder.go b/backend/internal/service/openai_ws_forwarder.go index 6d45baab..83849bf3 100644 --- a/backend/internal/service/openai_ws_forwarder.go +++ b/backend/internal/service/openai_ws_forwarder.go @@ -2515,7 +2515,7 @@ func (s *OpenAIGatewayService) ProxyResponsesWebSocketFromClient( } normalized = next } - upstreamModel := normalizeCodexModel(account.GetMappedModel(originalModel)) + upstreamModel := normalizeOpenAIModelForUpstream(account, account.GetMappedModel(originalModel)) if upstreamModel != originalModel { next, setErr := applyPayloadMutation(normalized, "model", upstreamModel) if setErr != nil { @@ -2773,7 +2773,7 @@ func (s *OpenAIGatewayService) ProxyResponsesWebSocketFromClient( mappedModel := "" var mappedModelBytes []byte if originalModel != "" { - mappedModel = normalizeCodexModel(account.GetMappedModel(originalModel)) + mappedModel = normalizeOpenAIModelForUpstream(account, account.GetMappedModel(originalModel)) needModelReplace = mappedModel != "" && mappedModel != originalModel if needModelReplace { mappedModelBytes = []byte(mappedModel)