Merge pull request #1529 from IanShaw027/feat/group-messages-dispatch-redo
feat: 为openai分组增加messages调度模型映射并支持instructions模板注入
This commit is contained in:
@@ -347,6 +347,12 @@ type GatewayConfig struct {
|
||||
// ForceCodexCLI: 强制将 OpenAI `/v1/responses` 请求按 Codex CLI 处理。
|
||||
// 用于网关未透传/改写 User-Agent 时的兼容兜底(默认关闭,避免影响其他客户端)。
|
||||
ForceCodexCLI bool `mapstructure:"force_codex_cli"`
|
||||
// ForcedCodexInstructionsTemplateFile: 服务端强制附加到 Codex 顶层 instructions 的模板文件路径。
|
||||
// 模板渲染后会直接覆盖最终 instructions;若需要保留客户端 system 转换结果,请在模板中显式引用 {{ .ExistingInstructions }}。
|
||||
ForcedCodexInstructionsTemplateFile string `mapstructure:"forced_codex_instructions_template_file"`
|
||||
// ForcedCodexInstructionsTemplate: 启动时从模板文件读取并缓存的模板内容。
|
||||
// 该字段不直接参与配置反序列化,仅用于请求热路径避免重复读盘。
|
||||
ForcedCodexInstructionsTemplate string `mapstructure:"-"`
|
||||
// OpenAIPassthroughAllowTimeoutHeaders: OpenAI 透传模式是否放行客户端超时头
|
||||
// 关闭(默认)可避免 x-stainless-timeout 等头导致上游提前断流。
|
||||
OpenAIPassthroughAllowTimeoutHeaders bool `mapstructure:"openai_passthrough_allow_timeout_headers"`
|
||||
@@ -1029,6 +1035,14 @@ func load(allowMissingJWTSecret bool) (*Config, error) {
|
||||
cfg.Log.Environment = strings.TrimSpace(cfg.Log.Environment)
|
||||
cfg.Log.StacktraceLevel = strings.ToLower(strings.TrimSpace(cfg.Log.StacktraceLevel))
|
||||
cfg.Log.Output.FilePath = strings.TrimSpace(cfg.Log.Output.FilePath)
|
||||
cfg.Gateway.ForcedCodexInstructionsTemplateFile = strings.TrimSpace(cfg.Gateway.ForcedCodexInstructionsTemplateFile)
|
||||
if cfg.Gateway.ForcedCodexInstructionsTemplateFile != "" {
|
||||
content, err := os.ReadFile(cfg.Gateway.ForcedCodexInstructionsTemplateFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("read forced codex instructions template %q: %w", cfg.Gateway.ForcedCodexInstructionsTemplateFile, err)
|
||||
}
|
||||
cfg.Gateway.ForcedCodexInstructionsTemplate = string(content)
|
||||
}
|
||||
|
||||
// 兼容旧键 gateway.openai_ws.sticky_previous_response_ttl_seconds。
|
||||
// 新键未配置(<=0)时回退旧键;新键优先。
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
@@ -223,6 +225,23 @@ func TestLoadSchedulingConfigFromEnv(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadForcedCodexInstructionsTemplate(t *testing.T) {
|
||||
resetViperWithJWTSecret(t)
|
||||
|
||||
tempDir := t.TempDir()
|
||||
templatePath := filepath.Join(tempDir, "codex-instructions.md.tmpl")
|
||||
configPath := filepath.Join(tempDir, "config.yaml")
|
||||
|
||||
require.NoError(t, os.WriteFile(templatePath, []byte("server-prefix\n\n{{ .ExistingInstructions }}"), 0o644))
|
||||
require.NoError(t, os.WriteFile(configPath, []byte("gateway:\n forced_codex_instructions_template_file: \""+templatePath+"\"\n"), 0o644))
|
||||
t.Setenv("DATA_DIR", tempDir)
|
||||
|
||||
cfg, err := Load()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, templatePath, cfg.Gateway.ForcedCodexInstructionsTemplateFile)
|
||||
require.Equal(t, "server-prefix\n\n{{ .ExistingInstructions }}", cfg.Gateway.ForcedCodexInstructionsTemplate)
|
||||
}
|
||||
|
||||
func TestLoadDefaultSecurityToggles(t *testing.T) {
|
||||
resetViperWithJWTSecret(t)
|
||||
|
||||
|
||||
10
backend/internal/domain/openai_messages_dispatch.go
Normal file
10
backend/internal/domain/openai_messages_dispatch.go
Normal file
@@ -0,0 +1,10 @@
|
||||
package domain
|
||||
|
||||
// OpenAIMessagesDispatchModelConfig controls how Anthropic /v1/messages
|
||||
// requests are mapped onto OpenAI/Codex models.
|
||||
type OpenAIMessagesDispatchModelConfig struct {
|
||||
OpusMappedModel string `json:"opus_mapped_model,omitempty"`
|
||||
SonnetMappedModel string `json:"sonnet_mapped_model,omitempty"`
|
||||
HaikuMappedModel string `json:"haiku_mapped_model,omitempty"`
|
||||
ExactModelMappings map[string]string `json:"exact_model_mappings,omitempty"`
|
||||
}
|
||||
@@ -105,10 +105,11 @@ type CreateGroupRequest struct {
|
||||
// 支持的模型系列(仅 antigravity 平台使用)
|
||||
SupportedModelScopes []string `json:"supported_model_scopes"`
|
||||
// OpenAI Messages 调度配置(仅 openai 平台使用)
|
||||
AllowMessagesDispatch bool `json:"allow_messages_dispatch"`
|
||||
RequireOAuthOnly bool `json:"require_oauth_only"`
|
||||
RequirePrivacySet bool `json:"require_privacy_set"`
|
||||
DefaultMappedModel string `json:"default_mapped_model"`
|
||||
AllowMessagesDispatch bool `json:"allow_messages_dispatch"`
|
||||
RequireOAuthOnly bool `json:"require_oauth_only"`
|
||||
RequirePrivacySet bool `json:"require_privacy_set"`
|
||||
DefaultMappedModel string `json:"default_mapped_model"`
|
||||
MessagesDispatchModelConfig service.OpenAIMessagesDispatchModelConfig `json:"messages_dispatch_model_config"`
|
||||
// 从指定分组复制账号(创建后自动绑定)
|
||||
CopyAccountsFromGroupIDs []int64 `json:"copy_accounts_from_group_ids"`
|
||||
}
|
||||
@@ -139,10 +140,11 @@ type UpdateGroupRequest struct {
|
||||
// 支持的模型系列(仅 antigravity 平台使用)
|
||||
SupportedModelScopes *[]string `json:"supported_model_scopes"`
|
||||
// OpenAI Messages 调度配置(仅 openai 平台使用)
|
||||
AllowMessagesDispatch *bool `json:"allow_messages_dispatch"`
|
||||
RequireOAuthOnly *bool `json:"require_oauth_only"`
|
||||
RequirePrivacySet *bool `json:"require_privacy_set"`
|
||||
DefaultMappedModel *string `json:"default_mapped_model"`
|
||||
AllowMessagesDispatch *bool `json:"allow_messages_dispatch"`
|
||||
RequireOAuthOnly *bool `json:"require_oauth_only"`
|
||||
RequirePrivacySet *bool `json:"require_privacy_set"`
|
||||
DefaultMappedModel *string `json:"default_mapped_model"`
|
||||
MessagesDispatchModelConfig *service.OpenAIMessagesDispatchModelConfig `json:"messages_dispatch_model_config"`
|
||||
// 从指定分组复制账号(同步操作:先清空当前分组的账号绑定,再绑定源分组的账号)
|
||||
CopyAccountsFromGroupIDs []int64 `json:"copy_accounts_from_group_ids"`
|
||||
}
|
||||
@@ -257,6 +259,7 @@ func (h *GroupHandler) Create(c *gin.Context) {
|
||||
RequireOAuthOnly: req.RequireOAuthOnly,
|
||||
RequirePrivacySet: req.RequirePrivacySet,
|
||||
DefaultMappedModel: req.DefaultMappedModel,
|
||||
MessagesDispatchModelConfig: req.MessagesDispatchModelConfig,
|
||||
CopyAccountsFromGroupIDs: req.CopyAccountsFromGroupIDs,
|
||||
})
|
||||
if err != nil {
|
||||
@@ -307,6 +310,7 @@ func (h *GroupHandler) Update(c *gin.Context) {
|
||||
RequireOAuthOnly: req.RequireOAuthOnly,
|
||||
RequirePrivacySet: req.RequirePrivacySet,
|
||||
DefaultMappedModel: req.DefaultMappedModel,
|
||||
MessagesDispatchModelConfig: req.MessagesDispatchModelConfig,
|
||||
CopyAccountsFromGroupIDs: req.CopyAccountsFromGroupIDs,
|
||||
})
|
||||
if err != nil {
|
||||
|
||||
@@ -133,16 +133,17 @@ func GroupFromServiceAdmin(g *service.Group) *AdminGroup {
|
||||
return nil
|
||||
}
|
||||
out := &AdminGroup{
|
||||
Group: groupFromServiceBase(g),
|
||||
ModelRouting: g.ModelRouting,
|
||||
ModelRoutingEnabled: g.ModelRoutingEnabled,
|
||||
MCPXMLInject: g.MCPXMLInject,
|
||||
DefaultMappedModel: g.DefaultMappedModel,
|
||||
SupportedModelScopes: g.SupportedModelScopes,
|
||||
AccountCount: g.AccountCount,
|
||||
ActiveAccountCount: g.ActiveAccountCount,
|
||||
RateLimitedAccountCount: g.RateLimitedAccountCount,
|
||||
SortOrder: g.SortOrder,
|
||||
Group: groupFromServiceBase(g),
|
||||
ModelRouting: g.ModelRouting,
|
||||
ModelRoutingEnabled: g.ModelRoutingEnabled,
|
||||
MCPXMLInject: g.MCPXMLInject,
|
||||
DefaultMappedModel: g.DefaultMappedModel,
|
||||
MessagesDispatchModelConfig: g.MessagesDispatchModelConfig,
|
||||
SupportedModelScopes: g.SupportedModelScopes,
|
||||
AccountCount: g.AccountCount,
|
||||
ActiveAccountCount: g.ActiveAccountCount,
|
||||
RateLimitedAccountCount: g.RateLimitedAccountCount,
|
||||
SortOrder: g.SortOrder,
|
||||
}
|
||||
if len(g.AccountGroups) > 0 {
|
||||
out.AccountGroups = make([]AccountGroup, 0, len(g.AccountGroups))
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
package dto
|
||||
|
||||
import "time"
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/Wei-Shaw/sub2api/internal/domain"
|
||||
)
|
||||
|
||||
type User struct {
|
||||
ID int64 `json:"id"`
|
||||
@@ -112,7 +116,8 @@ type AdminGroup struct {
|
||||
MCPXMLInject bool `json:"mcp_xml_inject"`
|
||||
|
||||
// OpenAI Messages 调度配置(仅 openai 平台使用)
|
||||
DefaultMappedModel string `json:"default_mapped_model"`
|
||||
DefaultMappedModel string `json:"default_mapped_model"`
|
||||
MessagesDispatchModelConfig domain.OpenAIMessagesDispatchModelConfig `json:"messages_dispatch_model_config"`
|
||||
|
||||
// 支持的模型系列(仅 antigravity 平台使用)
|
||||
SupportedModelScopes []string `json:"supported_model_scopes"`
|
||||
|
||||
@@ -47,6 +47,13 @@ func resolveOpenAIForwardDefaultMappedModel(apiKey *service.APIKey, fallbackMode
|
||||
return strings.TrimSpace(apiKey.Group.DefaultMappedModel)
|
||||
}
|
||||
|
||||
func resolveOpenAIMessagesDispatchMappedModel(apiKey *service.APIKey, requestedModel string) string {
|
||||
if apiKey == nil || apiKey.Group == nil {
|
||||
return ""
|
||||
}
|
||||
return strings.TrimSpace(apiKey.Group.ResolveMessagesDispatchModel(requestedModel))
|
||||
}
|
||||
|
||||
// NewOpenAIGatewayHandler creates a new OpenAIGatewayHandler
|
||||
func NewOpenAIGatewayHandler(
|
||||
gatewayService *service.OpenAIGatewayService,
|
||||
@@ -551,6 +558,7 @@ func (h *OpenAIGatewayHandler) Messages(c *gin.Context) {
|
||||
}
|
||||
reqModel := modelResult.String()
|
||||
routingModel := service.NormalizeOpenAICompatRequestedModel(reqModel)
|
||||
preferredMappedModel := resolveOpenAIMessagesDispatchMappedModel(apiKey, reqModel)
|
||||
reqStream := gjson.GetBytes(body, "stream").Bool()
|
||||
|
||||
reqLog = reqLog.With(zap.String("model", reqModel), zap.Bool("stream", reqStream))
|
||||
@@ -609,17 +617,20 @@ func (h *OpenAIGatewayHandler) Messages(c *gin.Context) {
|
||||
failedAccountIDs := make(map[int64]struct{})
|
||||
sameAccountRetryCount := make(map[int64]int)
|
||||
var lastFailoverErr *service.UpstreamFailoverError
|
||||
effectiveMappedModel := preferredMappedModel
|
||||
|
||||
for {
|
||||
// 清除上一次迭代的降级模型标记,避免残留影响本次迭代
|
||||
c.Set("openai_messages_fallback_model", "")
|
||||
currentRoutingModel := routingModel
|
||||
if effectiveMappedModel != "" {
|
||||
currentRoutingModel = effectiveMappedModel
|
||||
}
|
||||
reqLog.Debug("openai_messages.account_selecting", zap.Int("excluded_account_count", len(failedAccountIDs)))
|
||||
selection, scheduleDecision, err := h.gatewayService.SelectAccountWithScheduler(
|
||||
c.Request.Context(),
|
||||
apiKey.GroupID,
|
||||
"", // no previous_response_id
|
||||
sessionHash,
|
||||
routingModel,
|
||||
currentRoutingModel,
|
||||
failedAccountIDs,
|
||||
service.OpenAIUpstreamTransportAny,
|
||||
)
|
||||
@@ -628,29 +639,7 @@ func (h *OpenAIGatewayHandler) Messages(c *gin.Context) {
|
||||
zap.Error(err),
|
||||
zap.Int("excluded_account_count", len(failedAccountIDs)),
|
||||
)
|
||||
// 首次调度失败 + 有默认映射模型 → 用默认模型重试
|
||||
if len(failedAccountIDs) == 0 {
|
||||
defaultModel := ""
|
||||
if apiKey.Group != nil {
|
||||
defaultModel = apiKey.Group.DefaultMappedModel
|
||||
}
|
||||
if defaultModel != "" && defaultModel != routingModel {
|
||||
reqLog.Info("openai_messages.fallback_to_default_model",
|
||||
zap.String("default_mapped_model", defaultModel),
|
||||
)
|
||||
selection, scheduleDecision, err = h.gatewayService.SelectAccountWithScheduler(
|
||||
c.Request.Context(),
|
||||
apiKey.GroupID,
|
||||
"",
|
||||
sessionHash,
|
||||
defaultModel,
|
||||
failedAccountIDs,
|
||||
service.OpenAIUpstreamTransportAny,
|
||||
)
|
||||
if err == nil && selection != nil {
|
||||
c.Set("openai_messages_fallback_model", defaultModel)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
h.anthropicStreamingAwareError(c, http.StatusServiceUnavailable, "api_error", "Service temporarily unavailable", streamStarted)
|
||||
return
|
||||
@@ -682,9 +671,7 @@ func (h *OpenAIGatewayHandler) Messages(c *gin.Context) {
|
||||
service.SetOpsLatencyMs(c, service.OpsRoutingLatencyMsKey, time.Since(routingStart).Milliseconds())
|
||||
forwardStart := time.Now()
|
||||
|
||||
// Forward 层需要始终拿到 group 默认映射模型,这样未命中账号级映射的
|
||||
// Claude 兼容模型才不会在后续 Codex 规范化中意外退化到 gpt-5.1。
|
||||
defaultMappedModel := resolveOpenAIForwardDefaultMappedModel(apiKey, c.GetString("openai_messages_fallback_model"))
|
||||
defaultMappedModel := strings.TrimSpace(effectiveMappedModel)
|
||||
// 应用渠道模型映射到请求体
|
||||
forwardBody := body
|
||||
if channelMappingMsg.Mapped {
|
||||
|
||||
@@ -360,7 +360,7 @@ func TestResolveOpenAIForwardDefaultMappedModel(t *testing.T) {
|
||||
require.Equal(t, "gpt-5.2", resolveOpenAIForwardDefaultMappedModel(apiKey, " gpt-5.2 "))
|
||||
})
|
||||
|
||||
t.Run("uses_group_default_on_normal_path", func(t *testing.T) {
|
||||
t.Run("uses_group_default_when_explicit_fallback_absent", func(t *testing.T) {
|
||||
apiKey := &service.APIKey{
|
||||
Group: &service.Group{DefaultMappedModel: "gpt-5.4"},
|
||||
}
|
||||
@@ -376,6 +376,45 @@ func TestResolveOpenAIForwardDefaultMappedModel(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func TestResolveOpenAIMessagesDispatchMappedModel(t *testing.T) {
|
||||
t.Run("exact_claude_model_override_wins", func(t *testing.T) {
|
||||
apiKey := &service.APIKey{
|
||||
Group: &service.Group{
|
||||
MessagesDispatchModelConfig: service.OpenAIMessagesDispatchModelConfig{
|
||||
SonnetMappedModel: "gpt-5.2",
|
||||
ExactModelMappings: map[string]string{
|
||||
"claude-sonnet-4-5-20250929": "gpt-5.4-mini-high",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
require.Equal(t, "gpt-5.4-mini", resolveOpenAIMessagesDispatchMappedModel(apiKey, "claude-sonnet-4-5-20250929"))
|
||||
})
|
||||
|
||||
t.Run("uses_family_default_when_no_override", func(t *testing.T) {
|
||||
apiKey := &service.APIKey{Group: &service.Group{}}
|
||||
require.Equal(t, "gpt-5.4", resolveOpenAIMessagesDispatchMappedModel(apiKey, "claude-opus-4-6"))
|
||||
require.Equal(t, "gpt-5.3-codex", resolveOpenAIMessagesDispatchMappedModel(apiKey, "claude-sonnet-4-5-20250929"))
|
||||
require.Equal(t, "gpt-5.4-mini", resolveOpenAIMessagesDispatchMappedModel(apiKey, "claude-haiku-4-5-20251001"))
|
||||
})
|
||||
|
||||
t.Run("returns_empty_for_non_claude_or_missing_group", func(t *testing.T) {
|
||||
require.Empty(t, resolveOpenAIMessagesDispatchMappedModel(nil, "claude-sonnet-4-5-20250929"))
|
||||
require.Empty(t, resolveOpenAIMessagesDispatchMappedModel(&service.APIKey{}, "claude-sonnet-4-5-20250929"))
|
||||
require.Empty(t, resolveOpenAIMessagesDispatchMappedModel(&service.APIKey{Group: &service.Group{}}, "gpt-5.4"))
|
||||
})
|
||||
|
||||
t.Run("does_not_fall_back_to_group_default_mapped_model", func(t *testing.T) {
|
||||
apiKey := &service.APIKey{
|
||||
Group: &service.Group{
|
||||
DefaultMappedModel: "gpt-5.4",
|
||||
},
|
||||
}
|
||||
require.Empty(t, resolveOpenAIMessagesDispatchMappedModel(apiKey, "gpt-5.4"))
|
||||
require.Equal(t, "gpt-5.3-codex", resolveOpenAIMessagesDispatchMappedModel(apiKey, "claude-sonnet-4-5-20250929"))
|
||||
})
|
||||
}
|
||||
|
||||
func TestOpenAIResponses_MissingDependencies_ReturnsServiceUnavailable(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ type AnthropicRequest struct {
|
||||
|
||||
// AnthropicOutputConfig controls output generation parameters.
|
||||
type AnthropicOutputConfig struct {
|
||||
Effort string `json:"effort,omitempty"` // "low" | "medium" | "high"
|
||||
Effort string `json:"effort,omitempty"` // "low" | "medium" | "high" | "max"
|
||||
}
|
||||
|
||||
// AnthropicThinking configures extended thinking in the Anthropic API.
|
||||
@@ -167,7 +167,7 @@ type ResponsesRequest struct {
|
||||
|
||||
// ResponsesReasoning configures reasoning effort in the Responses API.
|
||||
type ResponsesReasoning struct {
|
||||
Effort string `json:"effort"` // "low" | "medium" | "high"
|
||||
Effort string `json:"effort"` // "low" | "medium" | "high" | "xhigh"
|
||||
Summary string `json:"summary,omitempty"` // "auto" | "concise" | "detailed"
|
||||
}
|
||||
|
||||
@@ -345,7 +345,7 @@ type ChatCompletionsRequest struct {
|
||||
StreamOptions *ChatStreamOptions `json:"stream_options,omitempty"`
|
||||
Tools []ChatTool `json:"tools,omitempty"`
|
||||
ToolChoice json.RawMessage `json:"tool_choice,omitempty"`
|
||||
ReasoningEffort string `json:"reasoning_effort,omitempty"` // "low" | "medium" | "high"
|
||||
ReasoningEffort string `json:"reasoning_effort,omitempty"` // "low" | "medium" | "high" | "xhigh"
|
||||
ServiceTier string `json:"service_tier,omitempty"`
|
||||
Stop json.RawMessage `json:"stop,omitempty"` // string or []string
|
||||
|
||||
|
||||
@@ -58,7 +58,8 @@ func (r *groupRepository) Create(ctx context.Context, groupIn *service.Group) er
|
||||
SetAllowMessagesDispatch(groupIn.AllowMessagesDispatch).
|
||||
SetRequireOauthOnly(groupIn.RequireOAuthOnly).
|
||||
SetRequirePrivacySet(groupIn.RequirePrivacySet).
|
||||
SetDefaultMappedModel(groupIn.DefaultMappedModel)
|
||||
SetDefaultMappedModel(groupIn.DefaultMappedModel).
|
||||
SetMessagesDispatchModelConfig(groupIn.MessagesDispatchModelConfig)
|
||||
|
||||
// 设置模型路由配置
|
||||
if groupIn.ModelRouting != nil {
|
||||
@@ -124,7 +125,8 @@ func (r *groupRepository) Update(ctx context.Context, groupIn *service.Group) er
|
||||
SetAllowMessagesDispatch(groupIn.AllowMessagesDispatch).
|
||||
SetRequireOauthOnly(groupIn.RequireOAuthOnly).
|
||||
SetRequirePrivacySet(groupIn.RequirePrivacySet).
|
||||
SetDefaultMappedModel(groupIn.DefaultMappedModel)
|
||||
SetDefaultMappedModel(groupIn.DefaultMappedModel).
|
||||
SetMessagesDispatchModelConfig(groupIn.MessagesDispatchModelConfig)
|
||||
|
||||
// 显式处理可空字段:nil 需要 clear,非 nil 需要 set。
|
||||
if groupIn.DailyLimitUSD != nil {
|
||||
|
||||
@@ -152,10 +152,11 @@ type CreateGroupInput struct {
|
||||
// 支持的模型系列(仅 antigravity 平台使用)
|
||||
SupportedModelScopes []string
|
||||
// OpenAI Messages 调度配置(仅 openai 平台使用)
|
||||
AllowMessagesDispatch bool
|
||||
DefaultMappedModel string
|
||||
RequireOAuthOnly bool
|
||||
RequirePrivacySet bool
|
||||
AllowMessagesDispatch bool
|
||||
DefaultMappedModel string
|
||||
RequireOAuthOnly bool
|
||||
RequirePrivacySet bool
|
||||
MessagesDispatchModelConfig OpenAIMessagesDispatchModelConfig
|
||||
// 从指定分组复制账号(创建分组后在同一事务内绑定)
|
||||
CopyAccountsFromGroupIDs []int64
|
||||
}
|
||||
@@ -186,10 +187,11 @@ type UpdateGroupInput struct {
|
||||
// 支持的模型系列(仅 antigravity 平台使用)
|
||||
SupportedModelScopes *[]string
|
||||
// OpenAI Messages 调度配置(仅 openai 平台使用)
|
||||
AllowMessagesDispatch *bool
|
||||
DefaultMappedModel *string
|
||||
RequireOAuthOnly *bool
|
||||
RequirePrivacySet *bool
|
||||
AllowMessagesDispatch *bool
|
||||
DefaultMappedModel *string
|
||||
RequireOAuthOnly *bool
|
||||
RequirePrivacySet *bool
|
||||
MessagesDispatchModelConfig *OpenAIMessagesDispatchModelConfig
|
||||
// 从指定分组复制账号(同步操作:先清空当前分组的账号绑定,再绑定源分组的账号)
|
||||
CopyAccountsFromGroupIDs []int64
|
||||
}
|
||||
@@ -908,7 +910,9 @@ func (s *adminServiceImpl) CreateGroup(ctx context.Context, input *CreateGroupIn
|
||||
RequireOAuthOnly: input.RequireOAuthOnly,
|
||||
RequirePrivacySet: input.RequirePrivacySet,
|
||||
DefaultMappedModel: input.DefaultMappedModel,
|
||||
MessagesDispatchModelConfig: normalizeOpenAIMessagesDispatchModelConfig(input.MessagesDispatchModelConfig),
|
||||
}
|
||||
sanitizeGroupMessagesDispatchFields(group)
|
||||
if err := s.groupRepo.Create(ctx, group); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -1135,6 +1139,10 @@ func (s *adminServiceImpl) UpdateGroup(ctx context.Context, id int64, input *Upd
|
||||
if input.DefaultMappedModel != nil {
|
||||
group.DefaultMappedModel = *input.DefaultMappedModel
|
||||
}
|
||||
if input.MessagesDispatchModelConfig != nil {
|
||||
group.MessagesDispatchModelConfig = normalizeOpenAIMessagesDispatchModelConfig(*input.MessagesDispatchModelConfig)
|
||||
}
|
||||
sanitizeGroupMessagesDispatchFields(group)
|
||||
|
||||
if err := s.groupRepo.Update(ctx, group); err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -10,6 +10,11 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func ptrString[T ~string](v T) *string {
|
||||
s := string(v)
|
||||
return &s
|
||||
}
|
||||
|
||||
// groupRepoStubForAdmin 用于测试 AdminService 的 GroupRepository Stub
|
||||
type groupRepoStubForAdmin struct {
|
||||
created *Group // 记录 Create 调用的参数
|
||||
@@ -245,6 +250,116 @@ func TestAdminService_UpdateGroup_PartialImagePricing(t *testing.T) {
|
||||
require.Nil(t, repo.updated.ImagePrice4K)
|
||||
}
|
||||
|
||||
func TestAdminService_CreateGroup_NormalizesMessagesDispatchModelConfig(t *testing.T) {
|
||||
repo := &groupRepoStubForAdmin{}
|
||||
svc := &adminServiceImpl{groupRepo: repo}
|
||||
|
||||
group, err := svc.CreateGroup(context.Background(), &CreateGroupInput{
|
||||
Name: "dispatch-group",
|
||||
Description: "dispatch config",
|
||||
Platform: PlatformOpenAI,
|
||||
RateMultiplier: 1.0,
|
||||
MessagesDispatchModelConfig: OpenAIMessagesDispatchModelConfig{
|
||||
OpusMappedModel: " gpt-5.4-high ",
|
||||
SonnetMappedModel: " gpt-5.3-codex ",
|
||||
HaikuMappedModel: " gpt-5.4-mini-medium ",
|
||||
ExactModelMappings: map[string]string{
|
||||
" claude-sonnet-4-5-20250929 ": " gpt-5.2-high ",
|
||||
},
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, group)
|
||||
require.NotNil(t, repo.created)
|
||||
require.Equal(t, OpenAIMessagesDispatchModelConfig{
|
||||
OpusMappedModel: "gpt-5.4",
|
||||
SonnetMappedModel: "gpt-5.3-codex",
|
||||
HaikuMappedModel: "gpt-5.4-mini",
|
||||
ExactModelMappings: map[string]string{
|
||||
"claude-sonnet-4-5-20250929": "gpt-5.2",
|
||||
},
|
||||
}, repo.created.MessagesDispatchModelConfig)
|
||||
}
|
||||
|
||||
func TestAdminService_UpdateGroup_NormalizesMessagesDispatchModelConfig(t *testing.T) {
|
||||
existingGroup := &Group{
|
||||
ID: 1,
|
||||
Name: "existing-group",
|
||||
Platform: PlatformOpenAI,
|
||||
Status: StatusActive,
|
||||
}
|
||||
repo := &groupRepoStubForAdmin{getByID: existingGroup}
|
||||
svc := &adminServiceImpl{groupRepo: repo}
|
||||
|
||||
group, err := svc.UpdateGroup(context.Background(), 1, &UpdateGroupInput{
|
||||
MessagesDispatchModelConfig: &OpenAIMessagesDispatchModelConfig{
|
||||
SonnetMappedModel: " gpt-5.4-medium ",
|
||||
ExactModelMappings: map[string]string{
|
||||
" claude-haiku-4-5-20251001 ": " gpt-5.4-mini-high ",
|
||||
},
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, group)
|
||||
require.NotNil(t, repo.updated)
|
||||
require.Equal(t, OpenAIMessagesDispatchModelConfig{
|
||||
SonnetMappedModel: "gpt-5.4",
|
||||
ExactModelMappings: map[string]string{
|
||||
"claude-haiku-4-5-20251001": "gpt-5.4-mini",
|
||||
},
|
||||
}, repo.updated.MessagesDispatchModelConfig)
|
||||
}
|
||||
|
||||
func TestAdminService_CreateGroup_ClearsMessagesDispatchFieldsForNonOpenAIPlatform(t *testing.T) {
|
||||
repo := &groupRepoStubForAdmin{}
|
||||
svc := &adminServiceImpl{groupRepo: repo}
|
||||
|
||||
group, err := svc.CreateGroup(context.Background(), &CreateGroupInput{
|
||||
Name: "anthropic-group",
|
||||
Description: "non-openai",
|
||||
Platform: PlatformAnthropic,
|
||||
RateMultiplier: 1.0,
|
||||
AllowMessagesDispatch: true,
|
||||
DefaultMappedModel: "gpt-5.4",
|
||||
MessagesDispatchModelConfig: OpenAIMessagesDispatchModelConfig{
|
||||
OpusMappedModel: "gpt-5.4",
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, group)
|
||||
require.NotNil(t, repo.created)
|
||||
require.False(t, repo.created.AllowMessagesDispatch)
|
||||
require.Empty(t, repo.created.DefaultMappedModel)
|
||||
require.Equal(t, OpenAIMessagesDispatchModelConfig{}, repo.created.MessagesDispatchModelConfig)
|
||||
}
|
||||
|
||||
func TestAdminService_UpdateGroup_ClearsMessagesDispatchFieldsWhenPlatformChangesAwayFromOpenAI(t *testing.T) {
|
||||
existingGroup := &Group{
|
||||
ID: 1,
|
||||
Name: "existing-openai-group",
|
||||
Platform: PlatformOpenAI,
|
||||
Status: StatusActive,
|
||||
AllowMessagesDispatch: true,
|
||||
DefaultMappedModel: "gpt-5.4",
|
||||
MessagesDispatchModelConfig: OpenAIMessagesDispatchModelConfig{
|
||||
SonnetMappedModel: "gpt-5.3-codex",
|
||||
},
|
||||
}
|
||||
repo := &groupRepoStubForAdmin{getByID: existingGroup}
|
||||
svc := &adminServiceImpl{groupRepo: repo}
|
||||
|
||||
group, err := svc.UpdateGroup(context.Background(), 1, &UpdateGroupInput{
|
||||
Platform: PlatformAnthropic,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, group)
|
||||
require.NotNil(t, repo.updated)
|
||||
require.Equal(t, PlatformAnthropic, repo.updated.Platform)
|
||||
require.False(t, repo.updated.AllowMessagesDispatch)
|
||||
require.Empty(t, repo.updated.DefaultMappedModel)
|
||||
require.Equal(t, OpenAIMessagesDispatchModelConfig{}, repo.updated.MessagesDispatchModelConfig)
|
||||
}
|
||||
|
||||
func TestAdminService_ListGroups_WithSearch(t *testing.T) {
|
||||
// 测试:
|
||||
// 1. search 参数正常传递到 repository 层
|
||||
|
||||
@@ -3,8 +3,12 @@ package service
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Wei-Shaw/sub2api/internal/domain"
|
||||
)
|
||||
|
||||
type OpenAIMessagesDispatchModelConfig = domain.OpenAIMessagesDispatchModelConfig
|
||||
|
||||
type Group struct {
|
||||
ID int64
|
||||
Name string
|
||||
@@ -49,10 +53,11 @@ type Group struct {
|
||||
SortOrder int
|
||||
|
||||
// OpenAI Messages 调度配置(仅 openai 平台使用)
|
||||
AllowMessagesDispatch bool
|
||||
RequireOAuthOnly bool // 仅允许非 apikey 类型账号关联(OpenAI/Antigravity/Anthropic/Gemini)
|
||||
RequirePrivacySet bool // 调度时仅允许 privacy 已成功设置的账号(OpenAI/Antigravity/Anthropic/Gemini)
|
||||
DefaultMappedModel string
|
||||
AllowMessagesDispatch bool
|
||||
RequireOAuthOnly bool // 仅允许非 apikey 类型账号关联(OpenAI/Antigravity/Anthropic/Gemini)
|
||||
RequirePrivacySet bool // 调度时仅允许 privacy 已成功设置的账号(OpenAI/Antigravity/Anthropic/Gemini)
|
||||
DefaultMappedModel string
|
||||
MessagesDispatchModelConfig OpenAIMessagesDispatchModelConfig
|
||||
|
||||
CreatedAt time.Time
|
||||
UpdatedAt time.Time
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
type forcedCodexInstructionsTemplateData struct {
|
||||
ExistingInstructions string
|
||||
OriginalModel string
|
||||
NormalizedModel string
|
||||
BillingModel string
|
||||
UpstreamModel string
|
||||
}
|
||||
|
||||
func applyForcedCodexInstructionsTemplate(
|
||||
reqBody map[string]any,
|
||||
templateText string,
|
||||
data forcedCodexInstructionsTemplateData,
|
||||
) (bool, error) {
|
||||
rendered, err := renderForcedCodexInstructionsTemplate(templateText, data)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if rendered == "" {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
existing, _ := reqBody["instructions"].(string)
|
||||
if strings.TrimSpace(existing) == rendered {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
reqBody["instructions"] = rendered
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func renderForcedCodexInstructionsTemplate(
|
||||
templateText string,
|
||||
data forcedCodexInstructionsTemplateData,
|
||||
) (string, error) {
|
||||
tmpl, err := template.New("forced_codex_instructions").Option("missingkey=zero").Parse(templateText)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("parse forced codex instructions template: %w", err)
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
if err := tmpl.Execute(&buf, data); err != nil {
|
||||
return "", fmt.Errorf("render forced codex instructions template: %w", err)
|
||||
}
|
||||
|
||||
return strings.TrimSpace(buf.String()), nil
|
||||
}
|
||||
@@ -6,9 +6,12 @@ import (
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/Wei-Shaw/sub2api/internal/config"
|
||||
"github.com/Wei-Shaw/sub2api/internal/pkg/apicompat"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/stretchr/testify/require"
|
||||
@@ -127,3 +130,101 @@ func TestForwardAsAnthropic_NormalizesRoutingAndEffortForGpt54XHigh(t *testing.T
|
||||
t.Logf("upstream body: %s", string(upstream.lastBody))
|
||||
t.Logf("response body: %s", rec.Body.String())
|
||||
}
|
||||
|
||||
func TestForwardAsAnthropic_ForcedCodexInstructionsTemplatePrependsRenderedInstructions(t *testing.T) {
|
||||
t.Parallel()
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
templateDir := t.TempDir()
|
||||
templatePath := filepath.Join(templateDir, "codex-instructions.md.tmpl")
|
||||
require.NoError(t, os.WriteFile(templatePath, []byte("server-prefix\n\n{{ .ExistingInstructions }}"), 0o644))
|
||||
|
||||
rec := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(rec)
|
||||
body := []byte(`{"model":"gpt-5.4","max_tokens":16,"system":"client-system","messages":[{"role":"user","content":"hello"}],"stream":false}`)
|
||||
c.Request = httptest.NewRequest(http.MethodPost, "/v1/messages", bytes.NewReader(body))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
upstreamBody := strings.Join([]string{
|
||||
`data: {"type":"response.completed","response":{"id":"resp_1","object":"response","model":"gpt-5.4","status":"completed","output":[{"type":"message","id":"msg_1","role":"assistant","status":"completed","content":[{"type":"output_text","text":"ok"}]}],"usage":{"input_tokens":5,"output_tokens":2,"total_tokens":7}}}`,
|
||||
"",
|
||||
"data: [DONE]",
|
||||
"",
|
||||
}, "\n")
|
||||
upstream := &httpUpstreamRecorder{resp: &http.Response{
|
||||
StatusCode: http.StatusOK,
|
||||
Header: http.Header{"Content-Type": []string{"text/event-stream"}, "x-request-id": []string{"rid_forced"}},
|
||||
Body: io.NopCloser(strings.NewReader(upstreamBody)),
|
||||
}}
|
||||
|
||||
svc := &OpenAIGatewayService{
|
||||
cfg: &config.Config{Gateway: config.GatewayConfig{
|
||||
ForcedCodexInstructionsTemplateFile: templatePath,
|
||||
ForcedCodexInstructionsTemplate: "server-prefix\n\n{{ .ExistingInstructions }}",
|
||||
}},
|
||||
httpUpstream: upstream,
|
||||
}
|
||||
account := &Account{
|
||||
ID: 1,
|
||||
Name: "openai-oauth",
|
||||
Platform: PlatformOpenAI,
|
||||
Type: AccountTypeOAuth,
|
||||
Concurrency: 1,
|
||||
Credentials: map[string]any{
|
||||
"access_token": "oauth-token",
|
||||
"chatgpt_account_id": "chatgpt-acc",
|
||||
},
|
||||
}
|
||||
|
||||
result, err := svc.ForwardAsAnthropic(context.Background(), c, account, body, "", "gpt-5.1")
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, result)
|
||||
require.Equal(t, "server-prefix\n\nclient-system", gjson.GetBytes(upstream.lastBody, "instructions").String())
|
||||
}
|
||||
|
||||
func TestForwardAsAnthropic_ForcedCodexInstructionsTemplateUsesCachedTemplateContent(t *testing.T) {
|
||||
t.Parallel()
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
rec := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(rec)
|
||||
body := []byte(`{"model":"gpt-5.4","max_tokens":16,"system":"client-system","messages":[{"role":"user","content":"hello"}],"stream":false}`)
|
||||
c.Request = httptest.NewRequest(http.MethodPost, "/v1/messages", bytes.NewReader(body))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
upstreamBody := strings.Join([]string{
|
||||
`data: {"type":"response.completed","response":{"id":"resp_1","object":"response","model":"gpt-5.4","status":"completed","output":[{"type":"message","id":"msg_1","role":"assistant","status":"completed","content":[{"type":"output_text","text":"ok"}]}],"usage":{"input_tokens":5,"output_tokens":2,"total_tokens":7}}}`,
|
||||
"",
|
||||
"data: [DONE]",
|
||||
"",
|
||||
}, "\n")
|
||||
upstream := &httpUpstreamRecorder{resp: &http.Response{
|
||||
StatusCode: http.StatusOK,
|
||||
Header: http.Header{"Content-Type": []string{"text/event-stream"}, "x-request-id": []string{"rid_forced_cached"}},
|
||||
Body: io.NopCloser(strings.NewReader(upstreamBody)),
|
||||
}}
|
||||
|
||||
svc := &OpenAIGatewayService{
|
||||
cfg: &config.Config{Gateway: config.GatewayConfig{
|
||||
ForcedCodexInstructionsTemplateFile: "/path/that/should/not/be/read.tmpl",
|
||||
ForcedCodexInstructionsTemplate: "cached-prefix\n\n{{ .ExistingInstructions }}",
|
||||
}},
|
||||
httpUpstream: upstream,
|
||||
}
|
||||
account := &Account{
|
||||
ID: 1,
|
||||
Name: "openai-oauth",
|
||||
Platform: PlatformOpenAI,
|
||||
Type: AccountTypeOAuth,
|
||||
Concurrency: 1,
|
||||
Credentials: map[string]any{
|
||||
"access_token": "oauth-token",
|
||||
"chatgpt_account_id": "chatgpt-acc",
|
||||
},
|
||||
}
|
||||
|
||||
result, err := svc.ForwardAsAnthropic(context.Background(), c, account, body, "", "gpt-5.1")
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, result)
|
||||
require.Equal(t, "cached-prefix\n\nclient-system", gjson.GetBytes(upstream.lastBody, "instructions").String())
|
||||
}
|
||||
|
||||
@@ -86,6 +86,24 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic(
|
||||
return nil, fmt.Errorf("unmarshal for codex transform: %w", err)
|
||||
}
|
||||
codexResult := applyCodexOAuthTransform(reqBody, false, false)
|
||||
forcedTemplateText := ""
|
||||
if s.cfg != nil {
|
||||
forcedTemplateText = s.cfg.Gateway.ForcedCodexInstructionsTemplate
|
||||
}
|
||||
templateUpstreamModel := upstreamModel
|
||||
if codexResult.NormalizedModel != "" {
|
||||
templateUpstreamModel = codexResult.NormalizedModel
|
||||
}
|
||||
existingInstructions, _ := reqBody["instructions"].(string)
|
||||
if _, err := applyForcedCodexInstructionsTemplate(reqBody, forcedTemplateText, forcedCodexInstructionsTemplateData{
|
||||
ExistingInstructions: strings.TrimSpace(existingInstructions),
|
||||
OriginalModel: originalModel,
|
||||
NormalizedModel: normalizedModel,
|
||||
BillingModel: billingModel,
|
||||
UpstreamModel: templateUpstreamModel,
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if codexResult.NormalizedModel != "" {
|
||||
upstreamModel = codexResult.NormalizedModel
|
||||
}
|
||||
|
||||
100
backend/internal/service/openai_messages_dispatch.go
Normal file
100
backend/internal/service/openai_messages_dispatch.go
Normal file
@@ -0,0 +1,100 @@
|
||||
package service
|
||||
|
||||
import "strings"
|
||||
|
||||
const (
|
||||
defaultOpenAIMessagesDispatchOpusMappedModel = "gpt-5.4"
|
||||
defaultOpenAIMessagesDispatchSonnetMappedModel = "gpt-5.3-codex"
|
||||
defaultOpenAIMessagesDispatchHaikuMappedModel = "gpt-5.4-mini"
|
||||
)
|
||||
|
||||
func normalizeOpenAIMessagesDispatchMappedModel(model string) string {
|
||||
model = NormalizeOpenAICompatRequestedModel(strings.TrimSpace(model))
|
||||
return strings.TrimSpace(model)
|
||||
}
|
||||
|
||||
func normalizeOpenAIMessagesDispatchModelConfig(cfg OpenAIMessagesDispatchModelConfig) OpenAIMessagesDispatchModelConfig {
|
||||
out := OpenAIMessagesDispatchModelConfig{
|
||||
OpusMappedModel: normalizeOpenAIMessagesDispatchMappedModel(cfg.OpusMappedModel),
|
||||
SonnetMappedModel: normalizeOpenAIMessagesDispatchMappedModel(cfg.SonnetMappedModel),
|
||||
HaikuMappedModel: normalizeOpenAIMessagesDispatchMappedModel(cfg.HaikuMappedModel),
|
||||
}
|
||||
|
||||
if len(cfg.ExactModelMappings) > 0 {
|
||||
out.ExactModelMappings = make(map[string]string, len(cfg.ExactModelMappings))
|
||||
for requestedModel, mappedModel := range cfg.ExactModelMappings {
|
||||
requestedModel = strings.TrimSpace(requestedModel)
|
||||
mappedModel = normalizeOpenAIMessagesDispatchMappedModel(mappedModel)
|
||||
if requestedModel == "" || mappedModel == "" {
|
||||
continue
|
||||
}
|
||||
out.ExactModelMappings[requestedModel] = mappedModel
|
||||
}
|
||||
if len(out.ExactModelMappings) == 0 {
|
||||
out.ExactModelMappings = nil
|
||||
}
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func claudeMessagesDispatchFamily(model string) string {
|
||||
normalized := strings.ToLower(strings.TrimSpace(model))
|
||||
if !strings.HasPrefix(normalized, "claude") {
|
||||
return ""
|
||||
}
|
||||
switch {
|
||||
case strings.Contains(normalized, "opus"):
|
||||
return "opus"
|
||||
case strings.Contains(normalized, "sonnet"):
|
||||
return "sonnet"
|
||||
case strings.Contains(normalized, "haiku"):
|
||||
return "haiku"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func (g *Group) ResolveMessagesDispatchModel(requestedModel string) string {
|
||||
if g == nil {
|
||||
return ""
|
||||
}
|
||||
requestedModel = strings.TrimSpace(requestedModel)
|
||||
if requestedModel == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
cfg := normalizeOpenAIMessagesDispatchModelConfig(g.MessagesDispatchModelConfig)
|
||||
if mappedModel := strings.TrimSpace(cfg.ExactModelMappings[requestedModel]); mappedModel != "" {
|
||||
return mappedModel
|
||||
}
|
||||
|
||||
switch claudeMessagesDispatchFamily(requestedModel) {
|
||||
case "opus":
|
||||
if mappedModel := strings.TrimSpace(cfg.OpusMappedModel); mappedModel != "" {
|
||||
return mappedModel
|
||||
}
|
||||
return defaultOpenAIMessagesDispatchOpusMappedModel
|
||||
case "sonnet":
|
||||
if mappedModel := strings.TrimSpace(cfg.SonnetMappedModel); mappedModel != "" {
|
||||
return mappedModel
|
||||
}
|
||||
return defaultOpenAIMessagesDispatchSonnetMappedModel
|
||||
case "haiku":
|
||||
if mappedModel := strings.TrimSpace(cfg.HaikuMappedModel); mappedModel != "" {
|
||||
return mappedModel
|
||||
}
|
||||
return defaultOpenAIMessagesDispatchHaikuMappedModel
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func sanitizeGroupMessagesDispatchFields(g *Group) {
|
||||
if g == nil || g.Platform == PlatformOpenAI {
|
||||
return
|
||||
}
|
||||
g.AllowMessagesDispatch = false
|
||||
g.DefaultMappedModel = ""
|
||||
g.MessagesDispatchModelConfig = OpenAIMessagesDispatchModelConfig{}
|
||||
}
|
||||
27
backend/internal/service/openai_messages_dispatch_test.go
Normal file
27
backend/internal/service/openai_messages_dispatch_test.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package service
|
||||
|
||||
import "testing"
|
||||
|
||||
import "github.com/stretchr/testify/require"
|
||||
|
||||
func TestNormalizeOpenAIMessagesDispatchModelConfig(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
cfg := normalizeOpenAIMessagesDispatchModelConfig(OpenAIMessagesDispatchModelConfig{
|
||||
OpusMappedModel: " gpt-5.4-high ",
|
||||
SonnetMappedModel: "gpt-5.3-codex",
|
||||
HaikuMappedModel: " gpt-5.4-mini-medium ",
|
||||
ExactModelMappings: map[string]string{
|
||||
" claude-sonnet-4-5-20250929 ": " gpt-5.2-high ",
|
||||
"": "gpt-5.4",
|
||||
"claude-opus-4-6": " ",
|
||||
},
|
||||
})
|
||||
|
||||
require.Equal(t, "gpt-5.4", cfg.OpusMappedModel)
|
||||
require.Equal(t, "gpt-5.3-codex", cfg.SonnetMappedModel)
|
||||
require.Equal(t, "gpt-5.4-mini", cfg.HaikuMappedModel)
|
||||
require.Equal(t, map[string]string{
|
||||
"claude-sonnet-4-5-20250929": "gpt-5.2",
|
||||
}, cfg.ExactModelMappings)
|
||||
}
|
||||
Reference in New Issue
Block a user