Files
codex/prs/bolinfest/PR-2019.md
2025-09-02 15:17:45 -07:00

9.5 KiB

PR #2019: Fix rust build on windows

Description

This pull request implements a fix from #2000, as well as fixed an additional problem with path lengths on windows that prevents the login from displaying.

Full Diff

diff --git a/codex-rs/login/Cargo.toml b/codex-rs/login/Cargo.toml
index a290c01eb6..85c11505ec 100644
--- a/codex-rs/login/Cargo.toml
+++ b/codex-rs/login/Cargo.toml
@@ -12,6 +12,7 @@ chrono = { version = "0.4", features = ["serde"] }
 reqwest = { version = "0.12", features = ["json"] }
 serde = { version = "1", features = ["derive"] }
 serde_json = "1"
+tempfile = "3"
 thiserror = "2.0.12"
 tokio = { version = "1", features = [
     "io-std",
diff --git a/codex-rs/login/src/lib.rs b/codex-rs/login/src/lib.rs
index 2a8f6749b4..f25f885bdc 100644
--- a/codex-rs/login/src/lib.rs
+++ b/codex-rs/login/src/lib.rs
@@ -18,6 +18,7 @@ use std::process::Stdio;
 use std::sync::Arc;
 use std::sync::Mutex;
 use std::time::Duration;
+use tempfile::NamedTempFile;
 use tokio::process::Command;
 
 pub use crate::token_data::TokenData;
@@ -263,9 +264,9 @@ pub struct SpawnedLogin {
 
 /// Spawn the ChatGPT login Python server as a child process and return a handle to its process.
 pub fn spawn_login_with_chatgpt(codex_home: &Path) -> std::io::Result<SpawnedLogin> {
+    let script_path = write_login_script_to_disk()?;
     let mut cmd = std::process::Command::new("python3");
-    cmd.arg("-c")
-        .arg(SOURCE_FOR_PYTHON_SERVER)
+    cmd.arg(&script_path)
         .env("CODEX_HOME", codex_home)
         .env("CODEX_CLIENT_ID", CLIENT_ID)
         .stdin(Stdio::null())
@@ -315,9 +316,9 @@ pub fn spawn_login_with_chatgpt(codex_home: &Path) -> std::io::Result<SpawnedLog
 /// recorded in memory. Otherwise, the subprocess's output will be sent to the
 /// current process's stdout/stderr.
 pub async fn login_with_chatgpt(codex_home: &Path, capture_output: bool) -> std::io::Result<()> {
+    let script_path = write_login_script_to_disk()?;
     let child = Command::new("python3")
-        .arg("-c")
-        .arg(SOURCE_FOR_PYTHON_SERVER)
+        .arg(&script_path)
         .env("CODEX_HOME", codex_home)
         .env("CODEX_CLIENT_ID", CLIENT_ID)
         .stdin(Stdio::null())
@@ -344,6 +345,17 @@ pub async fn login_with_chatgpt(codex_home: &Path, capture_output: bool) -> std:
     }
 }
 
+fn write_login_script_to_disk() -> std::io::Result<PathBuf> {
+    // Write the embedded Python script to a file to avoid very long
+    // command-line arguments (Windows error 206).
+    let mut tmp = NamedTempFile::new()?;
+    tmp.write_all(SOURCE_FOR_PYTHON_SERVER.as_bytes())?;
+    tmp.flush()?;
+
+    let (_file, path) = tmp.keep()?;
+    Ok(path)
+}
+
 pub fn login_with_api_key(codex_home: &Path, api_key: &str) -> std::io::Result<()> {
     let auth_dot_json = AuthDotJson {
         openai_api_key: Some(api_key.to_string()),
diff --git a/codex-rs/tui/src/tui.rs b/codex-rs/tui/src/tui.rs
index e0bf9bcc57..0447e32ae9 100644
--- a/codex-rs/tui/src/tui.rs
+++ b/codex-rs/tui/src/tui.rs
@@ -29,14 +29,17 @@ pub fn init(_config: &Config) -> Result<Tui> {
     // Enable keyboard enhancement flags so modifiers for keys like Enter are disambiguated.
     // chat_composer.rs is using a keyboard event listener to enter for any modified keys
     // to create a new line that require this.
-    execute!(
+    // Some terminals (notably legacy Windows consoles) do not support
+    // keyboard enhancement flags. Attempt to enable them, but continue
+    // gracefully if unsupported.
+    let _ = execute!(
         stdout(),
         PushKeyboardEnhancementFlags(
             KeyboardEnhancementFlags::DISAMBIGUATE_ESCAPE_CODES
                 | KeyboardEnhancementFlags::REPORT_EVENT_TYPES
                 | KeyboardEnhancementFlags::REPORT_ALTERNATE_KEYS
         )
-    )?;
+    );
     set_panic_hook();
 
     // Clear screen and move cursor to top-left before drawing UI
@@ -57,7 +60,8 @@ fn set_panic_hook() {
 
 /// Restore the terminal to its original state
 pub fn restore() -> Result<()> {
-    execute!(stdout(), PopKeyboardEnhancementFlags)?;
+    // Pop may fail on platforms that didn't support the push; ignore errors.
+    let _ = execute!(stdout(), PopKeyboardEnhancementFlags);
     execute!(stdout(), DisableBracketedPaste)?;
     disable_raw_mode()?;
     Ok(())

Review Comments

codex-rs/login/src/lib.rs

@@ -261,11 +262,25 @@ pub struct SpawnedLogin {
     pub stderr: Arc<Mutex<Vec<u8>>>,
 }
 
+fn ensure_login_script(codex_home: &Path) -> std::io::Result<PathBuf> {

Instead of writing to CODEX_HOME, can we use tempfile::NamedTempFile in the OS temp directory? The main thing I care about is that as part of Drop, it should be deleted.

I suppose that still works if the tempfile::NamedTempFile is created in CODEX_HOME, but it's possible it won't get cleaned up in the event of a panic, so I think we should stick with the temp folder unless there is a compelling reason not to.

@@ -261,11 +262,25 @@ pub struct SpawnedLogin {
     pub stderr: Arc<Mutex<Vec<u8>>>,
 }
 
+fn ensure_login_script(codex_home: &Path) -> std::io::Result<PathBuf> {

No you need to return the NamedTempFile from this function so it stays alive.

@@ -261,11 +263,22 @@ pub struct SpawnedLogin {
     pub stderr: Arc<Mutex<Vec<u8>>>,
 }
 
+fn ensure_login_script(codex_home: &Path) -> std::io::Result<PathBuf> {
fn ensure_login_script(codex_home: &Path) -> std::io::Result<NamedTempFile> {
@@ -261,11 +263,22 @@ pub struct SpawnedLogin {
     pub stderr: Arc<Mutex<Vec<u8>>>,
 }
 
+fn ensure_login_script(codex_home: &Path) -> std::io::Result<PathBuf> {
+    // Write the embedded Python script to a file to avoid very long
+    // command-line arguments (Windows error 206).
+    let mut tmp = NamedTempFile::new()?;
+    tmp.write_all(SOURCE_FOR_PYTHON_SERVER.as_bytes())?;
+    tmp.flush()?;
+
+    let (_file, path) = tmp.keep()?;
+    Ok(path)
+}
+
 /// Spawn the ChatGPT login Python server as a child process and return a handle to its process.
 pub fn spawn_login_with_chatgpt(codex_home: &Path) -> std::io::Result<SpawnedLogin> {
+    let script_path = ensure_login_script(codex_home)?;

I don't love the names, but:

    let script_file = ensure_login_script(codex_home)?;
    let script_path = script_file.path();
@@ -261,11 +263,22 @@ pub struct SpawnedLogin {
     pub stderr: Arc<Mutex<Vec<u8>>>,
 }
 
+fn ensure_login_script(codex_home: &Path) -> std::io::Result<PathBuf> {
+    // Write the embedded Python script to a file to avoid very long
+    // command-line arguments (Windows error 206).
+    let mut tmp = NamedTempFile::new()?;
+    tmp.write_all(SOURCE_FOR_PYTHON_SERVER.as_bytes())?;
+    tmp.flush()?;
+
+    let (_file, path) = tmp.keep()?;
+    Ok(path)

By returning NamedTempFile, we don't have to invoke keep(), which is the goal.

@@ -261,11 +263,22 @@ pub struct SpawnedLogin {
     pub stderr: Arc<Mutex<Vec<u8>>>,
 }
 
+fn ensure_login_script(codex_home: &Path) -> std::io::Result<PathBuf> {
+    // Write the embedded Python script to a file to avoid very long
+    // command-line arguments (Windows error 206).
+    let mut tmp = NamedTempFile::new()?;
+    tmp.write_all(SOURCE_FOR_PYTHON_SERVER.as_bytes())?;
+    tmp.flush()?;
+
+    let (_file, path) = tmp.keep()?;
+    Ok(path)
+}
+
 /// Spawn the ChatGPT login Python server as a child process and return a handle to its process.
 pub fn spawn_login_with_chatgpt(codex_home: &Path) -> std::io::Result<SpawnedLogin> {
+    let script_path = ensure_login_script(codex_home)?;

Ah, I suppose the NamedTempFile has to be a field of SpawnedLogin for the lifetime to work out?

@@ -261,11 +263,22 @@ pub struct SpawnedLogin {
     pub stderr: Arc<Mutex<Vec<u8>>>,
 }
 
+fn ensure_login_script(codex_home: &Path) -> std::io::Result<PathBuf> {
+    // Write the embedded Python script to a file to avoid very long
+    // command-line arguments (Windows error 206).
+    let mut tmp = NamedTempFile::new()?;
+    tmp.write_all(SOURCE_FOR_PYTHON_SERVER.as_bytes())?;
+    tmp.flush()?;
+
+    let (_file, path) = tmp.keep()?;
+    Ok(path)
+}
+
 /// Spawn the ChatGPT login Python server as a child process and return a handle to its process.
 pub fn spawn_login_with_chatgpt(codex_home: &Path) -> std::io::Result<SpawnedLogin> {
+    let script_path = ensure_login_script(codex_home)?;

Hmm, tracing this through, we would have to get this all the way to spawn_completion_poller() in codex-rs/tui/src/onboarding/auth.rs, which I admit is a bit gross. I guess we should stick with keep()?