9.5 KiB
PR #2019: Fix rust build on windows
- URL: https://github.com/openai/codex/pull/2019
- Author: joshleblanc
- Created: 2025-08-08 15:18:17 UTC
- Updated: 2025-08-08 18:03:37 UTC
- Changes: +24/-7, Files changed: 3, Commits: 5
Description
This pull request implements a fix from #2000, as well as fixed an additional problem with path lengths on windows that prevents the login from displaying.
Full Diff
diff --git a/codex-rs/login/Cargo.toml b/codex-rs/login/Cargo.toml
index a290c01eb6..85c11505ec 100644
--- a/codex-rs/login/Cargo.toml
+++ b/codex-rs/login/Cargo.toml
@@ -12,6 +12,7 @@ chrono = { version = "0.4", features = ["serde"] }
reqwest = { version = "0.12", features = ["json"] }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
+tempfile = "3"
thiserror = "2.0.12"
tokio = { version = "1", features = [
"io-std",
diff --git a/codex-rs/login/src/lib.rs b/codex-rs/login/src/lib.rs
index 2a8f6749b4..f25f885bdc 100644
--- a/codex-rs/login/src/lib.rs
+++ b/codex-rs/login/src/lib.rs
@@ -18,6 +18,7 @@ use std::process::Stdio;
use std::sync::Arc;
use std::sync::Mutex;
use std::time::Duration;
+use tempfile::NamedTempFile;
use tokio::process::Command;
pub use crate::token_data::TokenData;
@@ -263,9 +264,9 @@ pub struct SpawnedLogin {
/// Spawn the ChatGPT login Python server as a child process and return a handle to its process.
pub fn spawn_login_with_chatgpt(codex_home: &Path) -> std::io::Result<SpawnedLogin> {
+ let script_path = write_login_script_to_disk()?;
let mut cmd = std::process::Command::new("python3");
- cmd.arg("-c")
- .arg(SOURCE_FOR_PYTHON_SERVER)
+ cmd.arg(&script_path)
.env("CODEX_HOME", codex_home)
.env("CODEX_CLIENT_ID", CLIENT_ID)
.stdin(Stdio::null())
@@ -315,9 +316,9 @@ pub fn spawn_login_with_chatgpt(codex_home: &Path) -> std::io::Result<SpawnedLog
/// recorded in memory. Otherwise, the subprocess's output will be sent to the
/// current process's stdout/stderr.
pub async fn login_with_chatgpt(codex_home: &Path, capture_output: bool) -> std::io::Result<()> {
+ let script_path = write_login_script_to_disk()?;
let child = Command::new("python3")
- .arg("-c")
- .arg(SOURCE_FOR_PYTHON_SERVER)
+ .arg(&script_path)
.env("CODEX_HOME", codex_home)
.env("CODEX_CLIENT_ID", CLIENT_ID)
.stdin(Stdio::null())
@@ -344,6 +345,17 @@ pub async fn login_with_chatgpt(codex_home: &Path, capture_output: bool) -> std:
}
}
+fn write_login_script_to_disk() -> std::io::Result<PathBuf> {
+ // Write the embedded Python script to a file to avoid very long
+ // command-line arguments (Windows error 206).
+ let mut tmp = NamedTempFile::new()?;
+ tmp.write_all(SOURCE_FOR_PYTHON_SERVER.as_bytes())?;
+ tmp.flush()?;
+
+ let (_file, path) = tmp.keep()?;
+ Ok(path)
+}
+
pub fn login_with_api_key(codex_home: &Path, api_key: &str) -> std::io::Result<()> {
let auth_dot_json = AuthDotJson {
openai_api_key: Some(api_key.to_string()),
diff --git a/codex-rs/tui/src/tui.rs b/codex-rs/tui/src/tui.rs
index e0bf9bcc57..0447e32ae9 100644
--- a/codex-rs/tui/src/tui.rs
+++ b/codex-rs/tui/src/tui.rs
@@ -29,14 +29,17 @@ pub fn init(_config: &Config) -> Result<Tui> {
// Enable keyboard enhancement flags so modifiers for keys like Enter are disambiguated.
// chat_composer.rs is using a keyboard event listener to enter for any modified keys
// to create a new line that require this.
- execute!(
+ // Some terminals (notably legacy Windows consoles) do not support
+ // keyboard enhancement flags. Attempt to enable them, but continue
+ // gracefully if unsupported.
+ let _ = execute!(
stdout(),
PushKeyboardEnhancementFlags(
KeyboardEnhancementFlags::DISAMBIGUATE_ESCAPE_CODES
| KeyboardEnhancementFlags::REPORT_EVENT_TYPES
| KeyboardEnhancementFlags::REPORT_ALTERNATE_KEYS
)
- )?;
+ );
set_panic_hook();
// Clear screen and move cursor to top-left before drawing UI
@@ -57,7 +60,8 @@ fn set_panic_hook() {
/// Restore the terminal to its original state
pub fn restore() -> Result<()> {
- execute!(stdout(), PopKeyboardEnhancementFlags)?;
+ // Pop may fail on platforms that didn't support the push; ignore errors.
+ let _ = execute!(stdout(), PopKeyboardEnhancementFlags);
execute!(stdout(), DisableBracketedPaste)?;
disable_raw_mode()?;
Ok(())
Review Comments
codex-rs/login/src/lib.rs
- Created: 2025-08-08 16:55:50 UTC | Link: https://github.com/openai/codex/pull/2019#discussion_r2263557978
@@ -261,11 +262,25 @@ pub struct SpawnedLogin {
pub stderr: Arc<Mutex<Vec<u8>>>,
}
+fn ensure_login_script(codex_home: &Path) -> std::io::Result<PathBuf> {
Instead of writing to
CODEX_HOME, can we usetempfile::NamedTempFilein the OS temp directory? The main thing I care about is that as part ofDrop, it should be deleted.I suppose that still works if the
tempfile::NamedTempFileis created inCODEX_HOME, but it's possible it won't get cleaned up in the event of a panic, so I think we should stick with the temp folder unless there is a compelling reason not to.
- Created: 2025-08-08 17:11:47 UTC | Link: https://github.com/openai/codex/pull/2019#discussion_r2263609355
@@ -261,11 +262,25 @@ pub struct SpawnedLogin {
pub stderr: Arc<Mutex<Vec<u8>>>,
}
+fn ensure_login_script(codex_home: &Path) -> std::io::Result<PathBuf> {
No you need to return the
NamedTempFilefrom this function so it stays alive.
- Created: 2025-08-08 17:12:07 UTC | Link: https://github.com/openai/codex/pull/2019#discussion_r2263610755
@@ -261,11 +263,22 @@ pub struct SpawnedLogin {
pub stderr: Arc<Mutex<Vec<u8>>>,
}
+fn ensure_login_script(codex_home: &Path) -> std::io::Result<PathBuf> {
fn ensure_login_script(codex_home: &Path) -> std::io::Result<NamedTempFile> {
- Created: 2025-08-08 17:12:53 UTC | Link: https://github.com/openai/codex/pull/2019#discussion_r2263613520
@@ -261,11 +263,22 @@ pub struct SpawnedLogin {
pub stderr: Arc<Mutex<Vec<u8>>>,
}
+fn ensure_login_script(codex_home: &Path) -> std::io::Result<PathBuf> {
+ // Write the embedded Python script to a file to avoid very long
+ // command-line arguments (Windows error 206).
+ let mut tmp = NamedTempFile::new()?;
+ tmp.write_all(SOURCE_FOR_PYTHON_SERVER.as_bytes())?;
+ tmp.flush()?;
+
+ let (_file, path) = tmp.keep()?;
+ Ok(path)
+}
+
/// Spawn the ChatGPT login Python server as a child process and return a handle to its process.
pub fn spawn_login_with_chatgpt(codex_home: &Path) -> std::io::Result<SpawnedLogin> {
+ let script_path = ensure_login_script(codex_home)?;
I don't love the names, but:
let script_file = ensure_login_script(codex_home)?; let script_path = script_file.path();
- Created: 2025-08-08 17:13:28 UTC | Link: https://github.com/openai/codex/pull/2019#discussion_r2263614509
@@ -261,11 +263,22 @@ pub struct SpawnedLogin {
pub stderr: Arc<Mutex<Vec<u8>>>,
}
+fn ensure_login_script(codex_home: &Path) -> std::io::Result<PathBuf> {
+ // Write the embedded Python script to a file to avoid very long
+ // command-line arguments (Windows error 206).
+ let mut tmp = NamedTempFile::new()?;
+ tmp.write_all(SOURCE_FOR_PYTHON_SERVER.as_bytes())?;
+ tmp.flush()?;
+
+ let (_file, path) = tmp.keep()?;
+ Ok(path)
By returning
NamedTempFile, we don't have to invokekeep(), which is the goal.
- Created: 2025-08-08 17:17:03 UTC | Link: https://github.com/openai/codex/pull/2019#discussion_r2263620104
@@ -261,11 +263,22 @@ pub struct SpawnedLogin {
pub stderr: Arc<Mutex<Vec<u8>>>,
}
+fn ensure_login_script(codex_home: &Path) -> std::io::Result<PathBuf> {
+ // Write the embedded Python script to a file to avoid very long
+ // command-line arguments (Windows error 206).
+ let mut tmp = NamedTempFile::new()?;
+ tmp.write_all(SOURCE_FOR_PYTHON_SERVER.as_bytes())?;
+ tmp.flush()?;
+
+ let (_file, path) = tmp.keep()?;
+ Ok(path)
+}
+
/// Spawn the ChatGPT login Python server as a child process and return a handle to its process.
pub fn spawn_login_with_chatgpt(codex_home: &Path) -> std::io::Result<SpawnedLogin> {
+ let script_path = ensure_login_script(codex_home)?;
Ah, I suppose the
NamedTempFilehas to be a field ofSpawnedLoginfor the lifetime to work out?
- Created: 2025-08-08 17:20:43 UTC | Link: https://github.com/openai/codex/pull/2019#discussion_r2263626322
@@ -261,11 +263,22 @@ pub struct SpawnedLogin {
pub stderr: Arc<Mutex<Vec<u8>>>,
}
+fn ensure_login_script(codex_home: &Path) -> std::io::Result<PathBuf> {
+ // Write the embedded Python script to a file to avoid very long
+ // command-line arguments (Windows error 206).
+ let mut tmp = NamedTempFile::new()?;
+ tmp.write_all(SOURCE_FOR_PYTHON_SERVER.as_bytes())?;
+ tmp.flush()?;
+
+ let (_file, path) = tmp.keep()?;
+ Ok(path)
+}
+
/// Spawn the ChatGPT login Python server as a child process and return a handle to its process.
pub fn spawn_login_with_chatgpt(codex_home: &Path) -> std::io::Result<SpawnedLogin> {
+ let script_path = ensure_login_script(codex_home)?;
Hmm, tracing this through, we would have to get this all the way to
spawn_completion_poller()incodex-rs/tui/src/onboarding/auth.rs, which I admit is a bit gross. I guess we should stick withkeep()?