diff --git a/frontend/src/components/keys/UseKeyModal.vue b/frontend/src/components/keys/UseKeyModal.vue index a61ce6b4..5ae2fa3f 100644 --- a/frontend/src/components/keys/UseKeyModal.vue +++ b/frontend/src/components/keys/UseKeyModal.vue @@ -512,12 +512,14 @@ function generateOpenAIFiles(baseUrl: string, apiKey: string): FileConfig[] { // config.toml content const configContent = `model_provider = "OpenAI" -model = "gpt-5.3-codex" -review_model = "gpt-5.3-codex" -model_reasoning_effort = "xhigh" +model = "gpt-5.4" +review_model = "gpt-5.4" +model_reasoning_effort = "high" disable_response_storage = true network_access = "enabled" windows_wsl_setup_acknowledged = true +model_context_window = 1000000 +model_auto_compact_token_limit = 900000 [model_providers.OpenAI] name = "OpenAI" @@ -549,12 +551,14 @@ function generateOpenAIWsFiles(baseUrl: string, apiKey: string): FileConfig[] { // config.toml content with WebSocket v2 const configContent = `model_provider = "OpenAI" -model = "gpt-5.3-codex" -review_model = "gpt-5.3-codex" -model_reasoning_effort = "xhigh" +model = "gpt-5.4" +review_model = "gpt-5.4" +model_reasoning_effort = "high" disable_response_storage = true network_access = "enabled" windows_wsl_setup_acknowledged = true +model_context_window = 1000000 +model_auto_compact_token_limit = 900000 [model_providers.OpenAI] name = "OpenAI"