fix: 将 README 中 Codex WS 配置迁移至使用密钥弹窗
- 移除 README.md / README_CN.md 中的 Codex CLI WebSocket v2 配置示例 - UseKeyModal OpenAI 分组新增 "Codex CLI (WebSocket)" tab,区分普通模式与 WS 模式 - 普通模式 config.toml 不含 WebSocket 字段,WS 模式包含 supports_websockets 和 features 配置 - 所有配置统一使用 sub2api 作为 model_provider 名称
This commit is contained in:
28
README.md
28
README.md
@@ -57,34 +57,6 @@ Sub2API is an AI API gateway platform designed to distribute and manage API quot
|
||||
|
||||
---
|
||||
|
||||
## Codex CLI WebSocket v2 Example
|
||||
|
||||
To enable OpenAI WebSocket Mode v2 in Codex CLI with Sub2API, add the following to `~/.codex/config.toml`:
|
||||
|
||||
```toml
|
||||
model_provider = "aicodx2api"
|
||||
model = "gpt-5.3-codex"
|
||||
review_model = "gpt-5.3-codex"
|
||||
model_reasoning_effort = "xhigh"
|
||||
disable_response_storage = true
|
||||
network_access = "enabled"
|
||||
windows_wsl_setup_acknowledged = true
|
||||
|
||||
[model_providers.aicodx2api]
|
||||
name = "aicodx2api"
|
||||
base_url = "https://api.sub2api.ai"
|
||||
wire_api = "responses"
|
||||
supports_websockets = true
|
||||
requires_openai_auth = true
|
||||
|
||||
[features]
|
||||
responses_websockets_v2 = true
|
||||
```
|
||||
|
||||
After updating the config, restart Codex CLI.
|
||||
|
||||
---
|
||||
|
||||
## Deployment
|
||||
|
||||
### Method 1: Script Installation (Recommended)
|
||||
|
||||
28
README_CN.md
28
README_CN.md
@@ -62,34 +62,6 @@ Sub2API 是一个 AI API 网关平台,用于分发和管理 AI 产品订阅(
|
||||
- 当请求包含 `function_call_output` 时,需要携带 `previous_response_id`,或在 `input` 中包含带 `call_id` 的 `tool_call`/`function_call`,或带非空 `id` 且与 `function_call_output.call_id` 匹配的 `item_reference`。
|
||||
- 若依赖上游历史记录,网关会强制 `store=true` 并需要复用 `previous_response_id`,以避免出现 “No tool call found for function call output” 错误。
|
||||
|
||||
## Codex CLI 开启 OpenAI WebSocket Mode v2 示例配置
|
||||
|
||||
如需在 Codex CLI 中通过 Sub2API 启用 OpenAI WebSocket Mode v2,可将以下配置写入 `~/.codex/config.toml`:
|
||||
|
||||
```toml
|
||||
model_provider = "aicodx2api"
|
||||
model = "gpt-5.3-codex"
|
||||
review_model = "gpt-5.3-codex"
|
||||
model_reasoning_effort = "xhigh"
|
||||
disable_response_storage = true
|
||||
network_access = "enabled"
|
||||
windows_wsl_setup_acknowledged = true
|
||||
|
||||
[model_providers.aicodx2api]
|
||||
name = "aicodx2api"
|
||||
base_url = "https://api.sub2api.ai"
|
||||
wire_api = "responses"
|
||||
supports_websockets = true
|
||||
requires_openai_auth = true
|
||||
|
||||
[features]
|
||||
responses_websockets_v2 = true
|
||||
```
|
||||
|
||||
配置更新后,重启 Codex CLI 使其生效。
|
||||
|
||||
---
|
||||
|
||||
## 部署方式
|
||||
|
||||
### 方式一:脚本安装(推荐)
|
||||
|
||||
@@ -268,6 +268,7 @@ const clientTabs = computed((): TabConfig[] => {
|
||||
case 'openai':
|
||||
return [
|
||||
{ id: 'codex', label: t('keys.useKeyModal.cliTabs.codexCli'), icon: TerminalIcon },
|
||||
{ id: 'codex-ws', label: t('keys.useKeyModal.cliTabs.codexCliWs'), icon: TerminalIcon },
|
||||
{ id: 'opencode', label: t('keys.useKeyModal.cliTabs.opencode'), icon: TerminalIcon }
|
||||
]
|
||||
case 'gemini':
|
||||
@@ -306,7 +307,7 @@ const showShellTabs = computed(() => activeClientTab.value !== 'opencode')
|
||||
|
||||
const currentTabs = computed(() => {
|
||||
if (!showShellTabs.value) return []
|
||||
if (props.platform === 'openai') {
|
||||
if (activeClientTab.value === 'codex' || activeClientTab.value === 'codex-ws') {
|
||||
return openaiTabs
|
||||
}
|
||||
return shellTabs
|
||||
@@ -401,6 +402,9 @@ const currentFiles = computed((): FileConfig[] => {
|
||||
|
||||
switch (props.platform) {
|
||||
case 'openai':
|
||||
if (activeClientTab.value === 'codex-ws') {
|
||||
return generateOpenAIWsFiles(baseUrl, apiKey)
|
||||
}
|
||||
return generateOpenAIFiles(baseUrl, apiKey)
|
||||
case 'gemini':
|
||||
return [generateGeminiCliContent(baseUrl, apiKey)]
|
||||
@@ -524,6 +528,47 @@ requires_openai_auth = true`
|
||||
]
|
||||
}
|
||||
|
||||
function generateOpenAIWsFiles(baseUrl: string, apiKey: string): FileConfig[] {
|
||||
const isWindows = activeTab.value === 'windows'
|
||||
const configDir = isWindows ? '%userprofile%\\.codex' : '~/.codex'
|
||||
|
||||
// config.toml content with WebSocket v2
|
||||
const configContent = `model_provider = "sub2api"
|
||||
model = "gpt-5.3-codex"
|
||||
model_reasoning_effort = "high"
|
||||
network_access = "enabled"
|
||||
disable_response_storage = true
|
||||
windows_wsl_setup_acknowledged = true
|
||||
model_verbosity = "high"
|
||||
|
||||
[model_providers.sub2api]
|
||||
name = "sub2api"
|
||||
base_url = "${baseUrl}"
|
||||
wire_api = "responses"
|
||||
supports_websockets = true
|
||||
requires_openai_auth = true
|
||||
|
||||
[features]
|
||||
responses_websockets_v2 = true`
|
||||
|
||||
// auth.json content
|
||||
const authContent = `{
|
||||
"OPENAI_API_KEY": "${apiKey}"
|
||||
}`
|
||||
|
||||
return [
|
||||
{
|
||||
path: `${configDir}/config.toml`,
|
||||
content: configContent,
|
||||
hint: t('keys.useKeyModal.openai.configTomlHint')
|
||||
},
|
||||
{
|
||||
path: `${configDir}/auth.json`,
|
||||
content: authContent
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
function generateOpenCodeConfig(platform: string, baseUrl: string, apiKey: string, pathLabel?: string): FileConfig {
|
||||
const provider: Record<string, any> = {
|
||||
[platform]: {
|
||||
|
||||
@@ -501,6 +501,7 @@ export default {
|
||||
claudeCode: 'Claude Code',
|
||||
geminiCli: 'Gemini CLI',
|
||||
codexCli: 'Codex CLI',
|
||||
codexCliWs: 'Codex CLI (WebSocket)',
|
||||
opencode: 'OpenCode',
|
||||
},
|
||||
antigravity: {
|
||||
|
||||
@@ -503,6 +503,7 @@ export default {
|
||||
claudeCode: 'Claude Code',
|
||||
geminiCli: 'Gemini CLI',
|
||||
codexCli: 'Codex CLI',
|
||||
codexCliWs: 'Codex CLI (WebSocket)',
|
||||
opencode: 'OpenCode'
|
||||
},
|
||||
antigravity: {
|
||||
|
||||
Reference in New Issue
Block a user