fix(openai): do not normalize /completion API token based accounts

This commit is contained in:
Alex
2026-04-07 11:40:41 +03:00
parent 7eecc49c3a
commit 3a07e92b60
3 changed files with 4 additions and 4 deletions

View File

@@ -46,7 +46,7 @@ func (s *OpenAIGatewayService) ForwardAsChatCompletions(
// 2. Resolve model mapping early so compat prompt_cache_key injection can
// derive a stable seed from the final upstream model family.
billingModel := resolveOpenAIForwardModel(account, originalModel, defaultMappedModel)
upstreamModel := normalizeCodexModel(billingModel)
upstreamModel := normalizeOpenAIModelForUpstream(account, billingModel)
promptCacheKey = strings.TrimSpace(promptCacheKey)
compatPromptCacheInjected := false

View File

@@ -62,7 +62,7 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic(
// 3. Model mapping
billingModel := resolveOpenAIForwardModel(account, normalizedModel, defaultMappedModel)
upstreamModel := normalizeCodexModel(billingModel)
upstreamModel := normalizeOpenAIModelForUpstream(account, billingModel)
responsesReq.Model = upstreamModel
logger.L().Debug("openai messages: model mapping applied",

View File

@@ -2515,7 +2515,7 @@ func (s *OpenAIGatewayService) ProxyResponsesWebSocketFromClient(
}
normalized = next
}
upstreamModel := normalizeCodexModel(account.GetMappedModel(originalModel))
upstreamModel := normalizeOpenAIModelForUpstream(account, account.GetMappedModel(originalModel))
if upstreamModel != originalModel {
next, setErr := applyPayloadMutation(normalized, "model", upstreamModel)
if setErr != nil {
@@ -2773,7 +2773,7 @@ func (s *OpenAIGatewayService) ProxyResponsesWebSocketFromClient(
mappedModel := ""
var mappedModelBytes []byte
if originalModel != "" {
mappedModel = normalizeCodexModel(account.GetMappedModel(originalModel))
mappedModel = normalizeOpenAIModelForUpstream(account, account.GetMappedModel(originalModel))
needModelReplace = mappedModel != "" && mappedModel != originalModel
if needModelReplace {
mappedModelBytes = []byte(mappedModel)