Merge pull request #496 from mt21625457/main
feat(模型): 添加 gpt-5.3 Codex 映射与价格配置
This commit is contained in:
4
.github/workflows/backend-ci.yml
vendored
4
.github/workflows/backend-ci.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
|||||||
cache: true
|
cache: true
|
||||||
- name: Verify Go version
|
- name: Verify Go version
|
||||||
run: |
|
run: |
|
||||||
go version | grep -q 'go1.25.6'
|
go version | grep -q 'go1.25.7'
|
||||||
- name: Unit tests
|
- name: Unit tests
|
||||||
working-directory: backend
|
working-directory: backend
|
||||||
run: make test-unit
|
run: make test-unit
|
||||||
@@ -38,7 +38,7 @@ jobs:
|
|||||||
cache: true
|
cache: true
|
||||||
- name: Verify Go version
|
- name: Verify Go version
|
||||||
run: |
|
run: |
|
||||||
go version | grep -q 'go1.25.6'
|
go version | grep -q 'go1.25.7'
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v9
|
uses: golangci/golangci-lint-action@v9
|
||||||
with:
|
with:
|
||||||
|
|||||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -115,7 +115,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Verify Go version
|
- name: Verify Go version
|
||||||
run: |
|
run: |
|
||||||
go version | grep -q 'go1.25.6'
|
go version | grep -q 'go1.25.7'
|
||||||
|
|
||||||
# Docker setup for GoReleaser
|
# Docker setup for GoReleaser
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
|
|||||||
2
.github/workflows/security-scan.yml
vendored
2
.github/workflows/security-scan.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
|||||||
cache-dependency-path: backend/go.sum
|
cache-dependency-path: backend/go.sum
|
||||||
- name: Verify Go version
|
- name: Verify Go version
|
||||||
run: |
|
run: |
|
||||||
go version | grep -q 'go1.25.6'
|
go version | grep -q 'go1.25.7'
|
||||||
- name: Run govulncheck
|
- name: Run govulncheck
|
||||||
working-directory: backend
|
working-directory: backend
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|
||||||
ARG NODE_IMAGE=node:24-alpine
|
ARG NODE_IMAGE=node:24-alpine
|
||||||
ARG GOLANG_IMAGE=golang:1.25.6-alpine
|
ARG GOLANG_IMAGE=golang:1.25.7-alpine
|
||||||
ARG ALPINE_IMAGE=alpine:3.20
|
ARG ALPINE_IMAGE=alpine:3.20
|
||||||
ARG GOPROXY=https://goproxy.cn,direct
|
ARG GOPROXY=https://goproxy.cn,direct
|
||||||
ARG GOSUMDB=sum.golang.google.cn
|
ARG GOSUMDB=sum.golang.google.cn
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
<div align="center">
|
<div align="center">
|
||||||
|
|
||||||
[](https://golang.org/)
|
[](https://golang.org/)
|
||||||
[](https://vuejs.org/)
|
[](https://vuejs.org/)
|
||||||
[](https://www.postgresql.org/)
|
[](https://www.postgresql.org/)
|
||||||
[](https://redis.io/)
|
[](https://redis.io/)
|
||||||
@@ -44,7 +44,7 @@ Sub2API is an AI API gateway platform designed to distribute and manage API quot
|
|||||||
|
|
||||||
| Component | Technology |
|
| Component | Technology |
|
||||||
|-----------|------------|
|
|-----------|------------|
|
||||||
| Backend | Go 1.25.5, Gin, Ent |
|
| Backend | Go 1.25.7, Gin, Ent |
|
||||||
| Frontend | Vue 3.4+, Vite 5+, TailwindCSS |
|
| Frontend | Vue 3.4+, Vite 5+, TailwindCSS |
|
||||||
| Database | PostgreSQL 15+ |
|
| Database | PostgreSQL 15+ |
|
||||||
| Cache/Queue | Redis 7+ |
|
| Cache/Queue | Redis 7+ |
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
<div align="center">
|
<div align="center">
|
||||||
|
|
||||||
[](https://golang.org/)
|
[](https://golang.org/)
|
||||||
[](https://vuejs.org/)
|
[](https://vuejs.org/)
|
||||||
[](https://www.postgresql.org/)
|
[](https://www.postgresql.org/)
|
||||||
[](https://redis.io/)
|
[](https://redis.io/)
|
||||||
@@ -44,7 +44,7 @@ Sub2API 是一个 AI API 网关平台,用于分发和管理 AI 产品订阅(
|
|||||||
|
|
||||||
| 组件 | 技术 |
|
| 组件 | 技术 |
|
||||||
|------|------|
|
|------|------|
|
||||||
| 后端 | Go 1.25.5, Gin, Ent |
|
| 后端 | Go 1.25.7, Gin, Ent |
|
||||||
| 前端 | Vue 3.4+, Vite 5+, TailwindCSS |
|
| 前端 | Vue 3.4+, Vite 5+, TailwindCSS |
|
||||||
| 数据库 | PostgreSQL 15+ |
|
| 数据库 | PostgreSQL 15+ |
|
||||||
| 缓存/队列 | Redis 7+ |
|
| 缓存/队列 | Redis 7+ |
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM golang:1.25.6-alpine
|
FROM golang:1.25.7-alpine
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
0.1.61
|
0.1.70
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
module github.com/Wei-Shaw/sub2api
|
module github.com/Wei-Shaw/sub2api
|
||||||
|
|
||||||
go 1.25.6
|
go 1.25.7
|
||||||
|
|
||||||
require (
|
require (
|
||||||
entgo.io/ent v0.14.5
|
entgo.io/ent v0.14.5
|
||||||
|
|||||||
@@ -170,8 +170,6 @@ github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovk
|
|||||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
|
|
||||||
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
|
||||||
github.com/mattn/go-sqlite3 v1.14.17 h1:mCRHCLDUBXgpKAqIKsaAaAsrAlbkeomtRFKXh2L6YIM=
|
github.com/mattn/go-sqlite3 v1.14.17 h1:mCRHCLDUBXgpKAqIKsaAaAsrAlbkeomtRFKXh2L6YIM=
|
||||||
github.com/mattn/go-sqlite3 v1.14.17/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
|
github.com/mattn/go-sqlite3 v1.14.17/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
|
||||||
github.com/mdelapenya/tlscert v0.2.0 h1:7H81W6Z/4weDvZBNOfQte5GpIMo0lGYEeWbkGp5LJHI=
|
github.com/mdelapenya/tlscert v0.2.0 h1:7H81W6Z/4weDvZBNOfQte5GpIMo0lGYEeWbkGp5LJHI=
|
||||||
@@ -205,8 +203,6 @@ github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
|
|||||||
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
|
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
|
||||||
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
||||||
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||||
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
|
|
||||||
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
|
||||||
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||||
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||||
github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040=
|
github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040=
|
||||||
@@ -234,8 +230,6 @@ github.com/refraction-networking/utls v1.8.1 h1:yNY1kapmQU8JeM1sSw2H2asfTIwWxIkr
|
|||||||
github.com/refraction-networking/utls v1.8.1/go.mod h1:jkSOEkLqn+S/jtpEHPOsVv/4V4EVnelwbMQl4vCWXAM=
|
github.com/refraction-networking/utls v1.8.1/go.mod h1:jkSOEkLqn+S/jtpEHPOsVv/4V4EVnelwbMQl4vCWXAM=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||||
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
|
|
||||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
|
||||||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
||||||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
||||||
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
|
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
|
||||||
@@ -258,8 +252,6 @@ github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8=
|
|||||||
github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
|
github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
|
||||||
github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0=
|
github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0=
|
||||||
github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
|
github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
|
||||||
github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
|
|
||||||
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
|
|
||||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ=
|
github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ=
|
||||||
|
|||||||
@@ -15,6 +15,8 @@ type Model struct {
|
|||||||
|
|
||||||
// DefaultModels OpenAI models list
|
// DefaultModels OpenAI models list
|
||||||
var DefaultModels = []Model{
|
var DefaultModels = []Model{
|
||||||
|
{ID: "gpt-5.3", Object: "model", Created: 1735689600, OwnedBy: "openai", Type: "model", DisplayName: "GPT-5.3"},
|
||||||
|
{ID: "gpt-5.3-codex", Object: "model", Created: 1735689600, OwnedBy: "openai", Type: "model", DisplayName: "GPT-5.3 Codex"},
|
||||||
{ID: "gpt-5.2", Object: "model", Created: 1733875200, OwnedBy: "openai", Type: "model", DisplayName: "GPT-5.2"},
|
{ID: "gpt-5.2", Object: "model", Created: 1733875200, OwnedBy: "openai", Type: "model", DisplayName: "GPT-5.2"},
|
||||||
{ID: "gpt-5.2-codex", Object: "model", Created: 1733011200, OwnedBy: "openai", Type: "model", DisplayName: "GPT-5.2 Codex"},
|
{ID: "gpt-5.2-codex", Object: "model", Created: 1733011200, OwnedBy: "openai", Type: "model", DisplayName: "GPT-5.2 Codex"},
|
||||||
{ID: "gpt-5.1-codex-max", Object: "model", Created: 1730419200, OwnedBy: "openai", Type: "model", DisplayName: "GPT-5.1 Codex Max"},
|
{ID: "gpt-5.1-codex-max", Object: "model", Created: 1730419200, OwnedBy: "openai", Type: "model", DisplayName: "GPT-5.1 Codex Max"},
|
||||||
|
|||||||
@@ -21,6 +21,17 @@ const (
|
|||||||
var codexCLIInstructions string
|
var codexCLIInstructions string
|
||||||
|
|
||||||
var codexModelMap = map[string]string{
|
var codexModelMap = map[string]string{
|
||||||
|
"gpt-5.3": "gpt-5.3",
|
||||||
|
"gpt-5.3-none": "gpt-5.3",
|
||||||
|
"gpt-5.3-low": "gpt-5.3",
|
||||||
|
"gpt-5.3-medium": "gpt-5.3",
|
||||||
|
"gpt-5.3-high": "gpt-5.3",
|
||||||
|
"gpt-5.3-xhigh": "gpt-5.3",
|
||||||
|
"gpt-5.3-codex": "gpt-5.3-codex",
|
||||||
|
"gpt-5.3-codex-low": "gpt-5.3-codex",
|
||||||
|
"gpt-5.3-codex-medium": "gpt-5.3-codex",
|
||||||
|
"gpt-5.3-codex-high": "gpt-5.3-codex",
|
||||||
|
"gpt-5.3-codex-xhigh": "gpt-5.3-codex",
|
||||||
"gpt-5.1-codex": "gpt-5.1-codex",
|
"gpt-5.1-codex": "gpt-5.1-codex",
|
||||||
"gpt-5.1-codex-low": "gpt-5.1-codex",
|
"gpt-5.1-codex-low": "gpt-5.1-codex",
|
||||||
"gpt-5.1-codex-medium": "gpt-5.1-codex",
|
"gpt-5.1-codex-medium": "gpt-5.1-codex",
|
||||||
@@ -156,6 +167,12 @@ func normalizeCodexModel(model string) string {
|
|||||||
if strings.Contains(normalized, "gpt-5.2") || strings.Contains(normalized, "gpt 5.2") {
|
if strings.Contains(normalized, "gpt-5.2") || strings.Contains(normalized, "gpt 5.2") {
|
||||||
return "gpt-5.2"
|
return "gpt-5.2"
|
||||||
}
|
}
|
||||||
|
if strings.Contains(normalized, "gpt-5.3-codex") || strings.Contains(normalized, "gpt 5.3 codex") {
|
||||||
|
return "gpt-5.3-codex"
|
||||||
|
}
|
||||||
|
if strings.Contains(normalized, "gpt-5.3") || strings.Contains(normalized, "gpt 5.3") {
|
||||||
|
return "gpt-5.3"
|
||||||
|
}
|
||||||
if strings.Contains(normalized, "gpt-5.1-codex-max") || strings.Contains(normalized, "gpt 5.1 codex max") {
|
if strings.Contains(normalized, "gpt-5.1-codex-max") || strings.Contains(normalized, "gpt 5.1 codex max") {
|
||||||
return "gpt-5.1-codex-max"
|
return "gpt-5.1-codex-max"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -176,6 +176,19 @@ func TestApplyCodexOAuthTransform_EmptyInput(t *testing.T) {
|
|||||||
require.Len(t, input, 0)
|
require.Len(t, input, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestNormalizeCodexModel_Gpt53(t *testing.T) {
|
||||||
|
cases := map[string]string{
|
||||||
|
"gpt-5.3": "gpt-5.3",
|
||||||
|
"gpt-5.3-codex": "gpt-5.3-codex",
|
||||||
|
"gpt-5.3-codex-xhigh": "gpt-5.3-codex",
|
||||||
|
"gpt 5.3 codex": "gpt-5.3-codex",
|
||||||
|
}
|
||||||
|
|
||||||
|
for input, expected := range cases {
|
||||||
|
require.Equal(t, expected, normalizeCodexModel(input))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func setupCodexCache(t *testing.T) {
|
func setupCodexCache(t *testing.T) {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
|
|||||||
@@ -651,7 +651,8 @@ func (s *PricingService) matchByModelFamily(model string) *LiteLLMModelPricing {
|
|||||||
// 回退顺序:
|
// 回退顺序:
|
||||||
// 1. gpt-5.2-codex -> gpt-5.2(去掉后缀如 -codex, -mini, -max 等)
|
// 1. gpt-5.2-codex -> gpt-5.2(去掉后缀如 -codex, -mini, -max 等)
|
||||||
// 2. gpt-5.2-20251222 -> gpt-5.2(去掉日期版本号)
|
// 2. gpt-5.2-20251222 -> gpt-5.2(去掉日期版本号)
|
||||||
// 3. 最终回退到 DefaultTestModel (gpt-5.1-codex)
|
// 3. gpt-5.3-codex -> gpt-5.2-codex
|
||||||
|
// 4. 最终回退到 DefaultTestModel (gpt-5.1-codex)
|
||||||
func (s *PricingService) matchOpenAIModel(model string) *LiteLLMModelPricing {
|
func (s *PricingService) matchOpenAIModel(model string) *LiteLLMModelPricing {
|
||||||
// 尝试的回退变体
|
// 尝试的回退变体
|
||||||
variants := s.generateOpenAIModelVariants(model, openAIModelDatePattern)
|
variants := s.generateOpenAIModelVariants(model, openAIModelDatePattern)
|
||||||
@@ -663,6 +664,13 @@ func (s *PricingService) matchOpenAIModel(model string) *LiteLLMModelPricing {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if strings.HasPrefix(model, "gpt-5.3-codex") {
|
||||||
|
if pricing, ok := s.pricingData["gpt-5.2-codex"]; ok {
|
||||||
|
log.Printf("[Pricing] OpenAI fallback matched %s -> %s", model, "gpt-5.2-codex")
|
||||||
|
return pricing
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// 最终回退到 DefaultTestModel
|
// 最终回退到 DefaultTestModel
|
||||||
defaultModel := strings.ToLower(openai.DefaultTestModel)
|
defaultModel := strings.ToLower(openai.DefaultTestModel)
|
||||||
if pricing, ok := s.pricingData[defaultModel]; ok {
|
if pricing, ok := s.pricingData[defaultModel]; ok {
|
||||||
|
|||||||
@@ -1605,7 +1605,7 @@
|
|||||||
"cache_read_input_token_cost": 1.4e-07,
|
"cache_read_input_token_cost": 1.4e-07,
|
||||||
"input_cost_per_token": 1.38e-06,
|
"input_cost_per_token": 1.38e-06,
|
||||||
"litellm_provider": "azure",
|
"litellm_provider": "azure",
|
||||||
"max_input_tokens": 272000,
|
"max_input_tokens": 400000,
|
||||||
"max_output_tokens": 128000,
|
"max_output_tokens": 128000,
|
||||||
"max_tokens": 128000,
|
"max_tokens": 128000,
|
||||||
"mode": "responses",
|
"mode": "responses",
|
||||||
@@ -16951,6 +16951,209 @@
|
|||||||
"supports_tool_choice": false,
|
"supports_tool_choice": false,
|
||||||
"supports_vision": true
|
"supports_vision": true
|
||||||
},
|
},
|
||||||
|
"gpt-5.3": {
|
||||||
|
"cache_read_input_token_cost": 1.75e-07,
|
||||||
|
"cache_read_input_token_cost_priority": 3.5e-07,
|
||||||
|
"input_cost_per_token": 1.75e-06,
|
||||||
|
"input_cost_per_token_priority": 3.5e-06,
|
||||||
|
"litellm_provider": "openai",
|
||||||
|
"max_input_tokens": 400000,
|
||||||
|
"max_output_tokens": 128000,
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"mode": "chat",
|
||||||
|
"output_cost_per_token": 1.4e-05,
|
||||||
|
"output_cost_per_token_priority": 2.8e-05,
|
||||||
|
"supported_endpoints": [
|
||||||
|
"/v1/chat/completions",
|
||||||
|
"/v1/batch",
|
||||||
|
"/v1/responses"
|
||||||
|
],
|
||||||
|
"supported_modalities": [
|
||||||
|
"text",
|
||||||
|
"image"
|
||||||
|
],
|
||||||
|
"supported_output_modalities": [
|
||||||
|
"text",
|
||||||
|
"image"
|
||||||
|
],
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_native_streaming": true,
|
||||||
|
"supports_parallel_function_calling": true,
|
||||||
|
"supports_pdf_input": true,
|
||||||
|
"supports_prompt_caching": true,
|
||||||
|
"supports_reasoning": true,
|
||||||
|
"supports_response_schema": true,
|
||||||
|
"supports_system_messages": true,
|
||||||
|
"supports_tool_choice": true,
|
||||||
|
"supports_service_tier": true,
|
||||||
|
"supports_vision": true
|
||||||
|
},
|
||||||
|
"gpt-5.3-2025-12-11": {
|
||||||
|
"cache_read_input_token_cost": 1.75e-07,
|
||||||
|
"cache_read_input_token_cost_priority": 3.5e-07,
|
||||||
|
"input_cost_per_token": 1.75e-06,
|
||||||
|
"input_cost_per_token_priority": 3.5e-06,
|
||||||
|
"litellm_provider": "openai",
|
||||||
|
"max_input_tokens": 400000,
|
||||||
|
"max_output_tokens": 128000,
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"mode": "chat",
|
||||||
|
"output_cost_per_token": 1.4e-05,
|
||||||
|
"output_cost_per_token_priority": 2.8e-05,
|
||||||
|
"supported_endpoints": [
|
||||||
|
"/v1/chat/completions",
|
||||||
|
"/v1/batch",
|
||||||
|
"/v1/responses"
|
||||||
|
],
|
||||||
|
"supported_modalities": [
|
||||||
|
"text",
|
||||||
|
"image"
|
||||||
|
],
|
||||||
|
"supported_output_modalities": [
|
||||||
|
"text",
|
||||||
|
"image"
|
||||||
|
],
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_native_streaming": true,
|
||||||
|
"supports_parallel_function_calling": true,
|
||||||
|
"supports_pdf_input": true,
|
||||||
|
"supports_prompt_caching": true,
|
||||||
|
"supports_reasoning": true,
|
||||||
|
"supports_response_schema": true,
|
||||||
|
"supports_system_messages": true,
|
||||||
|
"supports_tool_choice": true,
|
||||||
|
"supports_service_tier": true,
|
||||||
|
"supports_vision": true
|
||||||
|
},
|
||||||
|
"gpt-5.3-chat-latest": {
|
||||||
|
"cache_read_input_token_cost": 1.75e-07,
|
||||||
|
"cache_read_input_token_cost_priority": 3.5e-07,
|
||||||
|
"input_cost_per_token": 1.75e-06,
|
||||||
|
"input_cost_per_token_priority": 3.5e-06,
|
||||||
|
"litellm_provider": "openai",
|
||||||
|
"max_input_tokens": 128000,
|
||||||
|
"max_output_tokens": 16384,
|
||||||
|
"max_tokens": 16384,
|
||||||
|
"mode": "chat",
|
||||||
|
"output_cost_per_token": 1.4e-05,
|
||||||
|
"output_cost_per_token_priority": 2.8e-05,
|
||||||
|
"supported_endpoints": [
|
||||||
|
"/v1/chat/completions",
|
||||||
|
"/v1/responses"
|
||||||
|
],
|
||||||
|
"supported_modalities": [
|
||||||
|
"text",
|
||||||
|
"image"
|
||||||
|
],
|
||||||
|
"supported_output_modalities": [
|
||||||
|
"text"
|
||||||
|
],
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_native_streaming": true,
|
||||||
|
"supports_parallel_function_calling": true,
|
||||||
|
"supports_pdf_input": true,
|
||||||
|
"supports_prompt_caching": true,
|
||||||
|
"supports_reasoning": true,
|
||||||
|
"supports_response_schema": true,
|
||||||
|
"supports_system_messages": true,
|
||||||
|
"supports_tool_choice": true,
|
||||||
|
"supports_vision": true
|
||||||
|
},
|
||||||
|
"gpt-5.3-pro": {
|
||||||
|
"input_cost_per_token": 2.1e-05,
|
||||||
|
"litellm_provider": "openai",
|
||||||
|
"max_input_tokens": 400000,
|
||||||
|
"max_output_tokens": 128000,
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"mode": "responses",
|
||||||
|
"output_cost_per_token": 1.68e-04,
|
||||||
|
"supported_endpoints": [
|
||||||
|
"/v1/batch",
|
||||||
|
"/v1/responses"
|
||||||
|
],
|
||||||
|
"supported_modalities": [
|
||||||
|
"text",
|
||||||
|
"image"
|
||||||
|
],
|
||||||
|
"supported_output_modalities": [
|
||||||
|
"text"
|
||||||
|
],
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_native_streaming": true,
|
||||||
|
"supports_parallel_function_calling": true,
|
||||||
|
"supports_pdf_input": true,
|
||||||
|
"supports_prompt_caching": true,
|
||||||
|
"supports_reasoning": true,
|
||||||
|
"supports_response_schema": true,
|
||||||
|
"supports_system_messages": true,
|
||||||
|
"supports_tool_choice": true,
|
||||||
|
"supports_vision": true,
|
||||||
|
"supports_web_search": true
|
||||||
|
},
|
||||||
|
"gpt-5.3-pro-2025-12-11": {
|
||||||
|
"input_cost_per_token": 2.1e-05,
|
||||||
|
"litellm_provider": "openai",
|
||||||
|
"max_input_tokens": 400000,
|
||||||
|
"max_output_tokens": 128000,
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"mode": "responses",
|
||||||
|
"output_cost_per_token": 1.68e-04,
|
||||||
|
"supported_endpoints": [
|
||||||
|
"/v1/batch",
|
||||||
|
"/v1/responses"
|
||||||
|
],
|
||||||
|
"supported_modalities": [
|
||||||
|
"text",
|
||||||
|
"image"
|
||||||
|
],
|
||||||
|
"supported_output_modalities": [
|
||||||
|
"text"
|
||||||
|
],
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_native_streaming": true,
|
||||||
|
"supports_parallel_function_calling": true,
|
||||||
|
"supports_pdf_input": true,
|
||||||
|
"supports_prompt_caching": true,
|
||||||
|
"supports_reasoning": true,
|
||||||
|
"supports_response_schema": true,
|
||||||
|
"supports_system_messages": true,
|
||||||
|
"supports_tool_choice": true,
|
||||||
|
"supports_vision": true,
|
||||||
|
"supports_web_search": true
|
||||||
|
},
|
||||||
|
"gpt-5.3-codex": {
|
||||||
|
"cache_read_input_token_cost": 1.75e-07,
|
||||||
|
"cache_read_input_token_cost_priority": 3.5e-07,
|
||||||
|
"input_cost_per_token": 1.75e-06,
|
||||||
|
"input_cost_per_token_priority": 3.5e-06,
|
||||||
|
"litellm_provider": "openai",
|
||||||
|
"max_input_tokens": 400000,
|
||||||
|
"max_output_tokens": 128000,
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"mode": "responses",
|
||||||
|
"output_cost_per_token": 1.4e-05,
|
||||||
|
"output_cost_per_token_priority": 2.8e-05,
|
||||||
|
"supported_endpoints": [
|
||||||
|
"/v1/responses"
|
||||||
|
],
|
||||||
|
"supported_modalities": [
|
||||||
|
"text",
|
||||||
|
"image"
|
||||||
|
],
|
||||||
|
"supported_output_modalities": [
|
||||||
|
"text"
|
||||||
|
],
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_native_streaming": true,
|
||||||
|
"supports_parallel_function_calling": true,
|
||||||
|
"supports_pdf_input": true,
|
||||||
|
"supports_prompt_caching": true,
|
||||||
|
"supports_reasoning": true,
|
||||||
|
"supports_response_schema": true,
|
||||||
|
"supports_system_messages": false,
|
||||||
|
"supports_tool_choice": true,
|
||||||
|
"supports_vision": true
|
||||||
|
},
|
||||||
"gpt-5.2": {
|
"gpt-5.2": {
|
||||||
"cache_read_input_token_cost": 1.75e-07,
|
"cache_read_input_token_cost": 1.75e-07,
|
||||||
"cache_read_input_token_cost_priority": 3.5e-07,
|
"cache_read_input_token_cost_priority": 3.5e-07,
|
||||||
@@ -16988,6 +17191,39 @@
|
|||||||
"supports_service_tier": true,
|
"supports_service_tier": true,
|
||||||
"supports_vision": true
|
"supports_vision": true
|
||||||
},
|
},
|
||||||
|
"gpt-5.2-codex": {
|
||||||
|
"cache_read_input_token_cost": 1.75e-07,
|
||||||
|
"cache_read_input_token_cost_priority": 3.5e-07,
|
||||||
|
"input_cost_per_token": 1.75e-06,
|
||||||
|
"input_cost_per_token_priority": 3.5e-06,
|
||||||
|
"litellm_provider": "openai",
|
||||||
|
"max_input_tokens": 400000,
|
||||||
|
"max_output_tokens": 128000,
|
||||||
|
"max_tokens": 128000,
|
||||||
|
"mode": "responses",
|
||||||
|
"output_cost_per_token": 1.4e-05,
|
||||||
|
"output_cost_per_token_priority": 2.8e-05,
|
||||||
|
"supported_endpoints": [
|
||||||
|
"/v1/responses"
|
||||||
|
],
|
||||||
|
"supported_modalities": [
|
||||||
|
"text",
|
||||||
|
"image"
|
||||||
|
],
|
||||||
|
"supported_output_modalities": [
|
||||||
|
"text"
|
||||||
|
],
|
||||||
|
"supports_function_calling": true,
|
||||||
|
"supports_native_streaming": true,
|
||||||
|
"supports_parallel_function_calling": true,
|
||||||
|
"supports_pdf_input": true,
|
||||||
|
"supports_prompt_caching": true,
|
||||||
|
"supports_reasoning": true,
|
||||||
|
"supports_response_schema": true,
|
||||||
|
"supports_system_messages": false,
|
||||||
|
"supports_tool_choice": true,
|
||||||
|
"supports_vision": true
|
||||||
|
},
|
||||||
"gpt-5.2-2025-12-11": {
|
"gpt-5.2-2025-12-11": {
|
||||||
"cache_read_input_token_cost": 1.75e-07,
|
"cache_read_input_token_cost": 1.75e-07,
|
||||||
"cache_read_input_token_cost_priority": 3.5e-07,
|
"cache_read_input_token_cost_priority": 3.5e-07,
|
||||||
|
|||||||
Reference in New Issue
Block a user