@@ -716,24 +716,29 @@ for (const workflowPath of workflowPaths) {
716716
717717// Matches the Codex config.toml heredoc opening followed (possibly with
718718// previously-injected lines in between) by [shell_environment_policy], so we
719- // can inject [model_providers.openai] config at the top of the config.toml
720- // before the shell environment policy section. The non-greedy (?:...)* skips
721- // any lines previously inserted by earlier versions of this script, making the
719+ // can inject a custom model provider at the top of the config.toml before the
720+ // shell environment policy section. The non-greedy (?:...)* skips any lines
721+ // previously inserted by earlier versions of this script, making the
722722// transformation idempotent and upgradable. The hash in the heredoc delimiter
723723// varies across compiler versions, so we match \w+ instead of a literal hash.
724724//
725725// Codex v0.121+ ignores OPENAI_BASE_URL env var when constructing WebSocket URLs
726726// for the responses API (wss://api.openai.com/v1/responses), connecting directly
727727// to OpenAI and sending the api-proxy placeholder key → 401 Unauthorized.
728- // Setting supports_websockets=false disables WebSocket transport, forcing Codex
729- // to use REST for all API calls. REST calls respect OPENAI_BASE_URL (set by AWF's
730- // docker-manager to http://172.30.0.30:10000), which routes them through the
731- // api-proxy sidecar that injects the real OpenAI API key.
728+ //
729+ // The built-in "openai" provider ID is reserved and cannot be overridden via
730+ // [model_providers.openai] (Codex will reject the config). Instead we define a
731+ // custom provider "openai-proxy" that:
732+ // - points to the AWF api-proxy sidecar at http://172.30.0.30:10000
733+ // - sets supports_websockets=false to force REST (which respects base_url)
734+ // - uses OPENAI_API_KEY (placeholder injected by AWF) for auth; the sidecar
735+ // replaces it with the real key before forwarding to OpenAI
736+ // We then set model_provider = "openai-proxy" to activate it.
732737//
733738// See: https://developers.openai.com/codex/config-reference
734739const codexConfigTomlHeredocRegex =
735740 / ^ ( \s + ) ( c a t > " \/ t m p \/ g h - a w \/ m c p - c o n f i g \/ c o n f i g \. t o m l " < < G H _ A W _ C O D E X _ S H E L L _ P O L I C Y _ \w + _ E O F \n ) (?: \1[ ^ \n ] * \n ) * ?( \1\[ s h e l l _ e n v i r o n m e n t _ p o l i c y \] ) / m;
736- const CODEX_OPENAI_BASE_URL_SENTINEL = 'supports_websockets = false ' ;
741+ const CODEX_PROXY_PROVIDER_SENTINEL = 'model_providers.openai-proxy ' ;
737742
738743// Apply Codex-specific transformations to OpenAI/Codex workflow files only.
739744// These transformations must not be applied to Claude, Copilot, or other
@@ -748,35 +753,36 @@ for (const workflowPath of codexWorkflowPaths) {
748753 }
749754 let modified = false ;
750755
751- // Inject [model_providers.openai] with supports_websockets=false into the Codex
752- // config.toml heredoc to disable WebSocket transport for the OpenAI provider.
753- // Codex v0.121+ ignores OPENAI_BASE_URL for WebSocket URL construction and
754- // connects directly to wss://api.openai.com/v1/responses with the api-proxy
755- // placeholder key, causing 401 Unauthorized. With WebSocket disabled, Codex
756- // falls back to REST, which correctly routes through OPENAI_BASE_URL
757- // (http://172.30.0.30:10000) → api-proxy sidecar → real OpenAI API key.
758- if ( ! content . includes ( CODEX_OPENAI_BASE_URL_SENTINEL ) ) {
756+ // Inject a custom "openai-proxy" provider into the Codex config.toml heredoc.
757+ // This disables WebSocket transport and routes REST API calls through the AWF
758+ // api-proxy sidecar (at 172.30.0.30:10000), which injects the real OpenAI key.
759+ if ( ! content . includes ( CODEX_PROXY_PROVIDER_SENTINEL ) ) {
759760 const heredocMatch = content . match ( codexConfigTomlHeredocRegex ) ;
760761 if ( heredocMatch ) {
761762 const indent = heredocMatch [ 1 ] ;
762763 const modelProvidersBlock =
763- `${ indent } [model_providers.openai]\n` +
764- `${ indent } ${ CODEX_OPENAI_BASE_URL_SENTINEL } \n` +
764+ `${ indent } model_provider = "openai-proxy"\n` +
765+ `${ indent } \n` +
766+ `${ indent } [model_providers.openai-proxy]\n` +
767+ `${ indent } name = "OpenAI AWF proxy"\n` +
768+ `${ indent } base_url = "http://172.30.0.30:10000"\n` +
769+ `${ indent } env_key = "OPENAI_API_KEY"\n` +
770+ `${ indent } supports_websockets = false\n` +
765771 `${ indent } \n` ;
766772 content = content . replace (
767773 codexConfigTomlHeredocRegex ,
768774 `$1$2${ modelProvidersBlock } $3`
769775 ) ;
770776 modified = true ;
771- console . log ( ` Injected [model_providers. openai] supports_websockets=false into Codex config.toml heredoc` ) ;
777+ console . log ( ` Injected openai-proxy custom provider into Codex config.toml heredoc` ) ;
772778 } else {
773779 console . warn (
774780 ` WARNING: Could not find Codex config.toml heredoc pattern to inject model_providers config. ` +
775781 `The compiled lock file may have changed structure. Manual review required.`
776782 ) ;
777783 }
778784 } else {
779- console . log ( ` [model_providers. openai] supports_websockets=false already present in Codex config.toml` ) ;
785+ console . log ( ` openai-proxy custom provider already present in Codex config.toml` ) ;
780786 }
781787
782788 // Preserve empty lines as truly empty (no trailing whitespace) to keep the
0 commit comments