fix: 将 README 中 Codex WS 配置迁移至使用密钥弹窗

- 移除 README.md / README_CN.md 中的 Codex CLI WebSocket v2 配置示例
- UseKeyModal OpenAI 分组新增 "Codex CLI (WebSocket)" tab,区分普通模式与 WS 模式
- 普通模式 config.toml 不含 WebSocket 字段,WS 模式包含 supports_websockets 和 features 配置
- 所有配置统一使用 sub2api 作为 model_provider 名称
This commit is contained in:
shaw
2026-02-28 23:32:11 +08:00
parent fbfbb26fd2
commit f7fa71bc28
5 changed files with 48 additions and 57 deletions

View File

@@ -57,34 +57,6 @@ Sub2API is an AI API gateway platform designed to distribute and manage API quot
--- ---
## Codex CLI WebSocket v2 Example
To enable OpenAI WebSocket Mode v2 in Codex CLI with Sub2API, add the following to `~/.codex/config.toml`:
```toml
model_provider = "aicodx2api"
model = "gpt-5.3-codex"
review_model = "gpt-5.3-codex"
model_reasoning_effort = "xhigh"
disable_response_storage = true
network_access = "enabled"
windows_wsl_setup_acknowledged = true
[model_providers.aicodx2api]
name = "aicodx2api"
base_url = "https://api.sub2api.ai"
wire_api = "responses"
supports_websockets = true
requires_openai_auth = true
[features]
responses_websockets_v2 = true
```
After updating the config, restart Codex CLI.
---
## Deployment ## Deployment
### Method 1: Script Installation (Recommended) ### Method 1: Script Installation (Recommended)

View File

@@ -62,34 +62,6 @@ Sub2API 是一个 AI API 网关平台,用于分发和管理 AI 产品订阅(
- 当请求包含 `function_call_output` 时,需要携带 `previous_response_id`,或在 `input` 中包含带 `call_id``tool_call`/`function_call`,或带非空 `id` 且与 `function_call_output.call_id` 匹配的 `item_reference` - 当请求包含 `function_call_output` 时,需要携带 `previous_response_id`,或在 `input` 中包含带 `call_id``tool_call`/`function_call`,或带非空 `id` 且与 `function_call_output.call_id` 匹配的 `item_reference`
- 若依赖上游历史记录,网关会强制 `store=true` 并需要复用 `previous_response_id`,以避免出现 “No tool call found for function call output” 错误。 - 若依赖上游历史记录,网关会强制 `store=true` 并需要复用 `previous_response_id`,以避免出现 “No tool call found for function call output” 错误。
## Codex CLI 开启 OpenAI WebSocket Mode v2 示例配置
如需在 Codex CLI 中通过 Sub2API 启用 OpenAI WebSocket Mode v2可将以下配置写入 `~/.codex/config.toml`
```toml
model_provider = "aicodx2api"
model = "gpt-5.3-codex"
review_model = "gpt-5.3-codex"
model_reasoning_effort = "xhigh"
disable_response_storage = true
network_access = "enabled"
windows_wsl_setup_acknowledged = true
[model_providers.aicodx2api]
name = "aicodx2api"
base_url = "https://api.sub2api.ai"
wire_api = "responses"
supports_websockets = true
requires_openai_auth = true
[features]
responses_websockets_v2 = true
```
配置更新后,重启 Codex CLI 使其生效。
---
## 部署方式 ## 部署方式
### 方式一:脚本安装(推荐) ### 方式一:脚本安装(推荐)

View File

@@ -268,6 +268,7 @@ const clientTabs = computed((): TabConfig[] => {
case 'openai': case 'openai':
return [ return [
{ id: 'codex', label: t('keys.useKeyModal.cliTabs.codexCli'), icon: TerminalIcon }, { id: 'codex', label: t('keys.useKeyModal.cliTabs.codexCli'), icon: TerminalIcon },
{ id: 'codex-ws', label: t('keys.useKeyModal.cliTabs.codexCliWs'), icon: TerminalIcon },
{ id: 'opencode', label: t('keys.useKeyModal.cliTabs.opencode'), icon: TerminalIcon } { id: 'opencode', label: t('keys.useKeyModal.cliTabs.opencode'), icon: TerminalIcon }
] ]
case 'gemini': case 'gemini':
@@ -306,7 +307,7 @@ const showShellTabs = computed(() => activeClientTab.value !== 'opencode')
const currentTabs = computed(() => { const currentTabs = computed(() => {
if (!showShellTabs.value) return [] if (!showShellTabs.value) return []
if (props.platform === 'openai') { if (activeClientTab.value === 'codex' || activeClientTab.value === 'codex-ws') {
return openaiTabs return openaiTabs
} }
return shellTabs return shellTabs
@@ -401,6 +402,9 @@ const currentFiles = computed((): FileConfig[] => {
switch (props.platform) { switch (props.platform) {
case 'openai': case 'openai':
if (activeClientTab.value === 'codex-ws') {
return generateOpenAIWsFiles(baseUrl, apiKey)
}
return generateOpenAIFiles(baseUrl, apiKey) return generateOpenAIFiles(baseUrl, apiKey)
case 'gemini': case 'gemini':
return [generateGeminiCliContent(baseUrl, apiKey)] return [generateGeminiCliContent(baseUrl, apiKey)]
@@ -524,6 +528,47 @@ requires_openai_auth = true`
] ]
} }
function generateOpenAIWsFiles(baseUrl: string, apiKey: string): FileConfig[] {
const isWindows = activeTab.value === 'windows'
const configDir = isWindows ? '%userprofile%\\.codex' : '~/.codex'
// config.toml content with WebSocket v2
const configContent = `model_provider = "sub2api"
model = "gpt-5.3-codex"
model_reasoning_effort = "high"
network_access = "enabled"
disable_response_storage = true
windows_wsl_setup_acknowledged = true
model_verbosity = "high"
[model_providers.sub2api]
name = "sub2api"
base_url = "${baseUrl}"
wire_api = "responses"
supports_websockets = true
requires_openai_auth = true
[features]
responses_websockets_v2 = true`
// auth.json content
const authContent = `{
"OPENAI_API_KEY": "${apiKey}"
}`
return [
{
path: `${configDir}/config.toml`,
content: configContent,
hint: t('keys.useKeyModal.openai.configTomlHint')
},
{
path: `${configDir}/auth.json`,
content: authContent
}
]
}
function generateOpenCodeConfig(platform: string, baseUrl: string, apiKey: string, pathLabel?: string): FileConfig { function generateOpenCodeConfig(platform: string, baseUrl: string, apiKey: string, pathLabel?: string): FileConfig {
const provider: Record<string, any> = { const provider: Record<string, any> = {
[platform]: { [platform]: {

View File

@@ -501,6 +501,7 @@ export default {
claudeCode: 'Claude Code', claudeCode: 'Claude Code',
geminiCli: 'Gemini CLI', geminiCli: 'Gemini CLI',
codexCli: 'Codex CLI', codexCli: 'Codex CLI',
codexCliWs: 'Codex CLI (WebSocket)',
opencode: 'OpenCode', opencode: 'OpenCode',
}, },
antigravity: { antigravity: {

View File

@@ -503,6 +503,7 @@ export default {
claudeCode: 'Claude Code', claudeCode: 'Claude Code',
geminiCli: 'Gemini CLI', geminiCli: 'Gemini CLI',
codexCli: 'Codex CLI', codexCli: 'Codex CLI',
codexCliWs: 'Codex CLI (WebSocket)',
opencode: 'OpenCode' opencode: 'OpenCode'
}, },
antigravity: { antigravity: {