Compare commits

...

5 Commits

Author SHA1 Message Date
David Wiesen
1bd7682b37 Skip WindowsApps helper read grants 2026-03-22 21:52:02 -07:00
jif-oai
b9fa08ec61 try to fix bazel (#15328)
Fix Bazel macOS CI failures caused by the llvm module's pinned macOS SDK
URL returning 403 Forbidden from Apple's CDN.

Bump llvm to 0.6.8, switch to the new osx.from_archive(...) /
osx.frameworks(...) API, and refresh MODULE.bazel.lock so Bazel uses the
updated SDK archive configuration.
2026-03-20 10:18:19 -07:00
Eric Traut
4f28b64abc Add temporary app-server originator fallback for codex-tui (#15218)
## Summary
- make app-server treat `clientInfo.name == "codex-tui"` as a legacy
compatibility case
- fall back to `DEFAULT_ORIGINATOR` instead of sending `codex-tui` as
the originator header
- add a TODO noting this is a temporary workaround that should be
removed later

## Testing
- Not run (not requested)
2026-03-20 10:51:21 -06:00
pakrym-oai
ba85a58039 Add remote env CI matrix and integration test (#14869)
`CODEX_TEST_REMOTE_ENV` will make `test_codex` start the executor
"remotely" (inside a docker container) turning any integration test into
remote test.
2026-03-20 08:02:50 -07:00
xl-openai
e5f4d1fef5 feat: prefer git for curated plugin sync (#15275)
start with git clone, fallback to http.
2026-03-20 00:06:24 -07:00
20 changed files with 1546 additions and 644 deletions

View File

@@ -527,7 +527,7 @@ jobs:
key: apt-${{ matrix.runner }}-${{ matrix.target }}-v1
tests:
name: Tests — ${{ matrix.runner }} - ${{ matrix.target }}
name: Tests — ${{ matrix.runner }} - ${{ matrix.target }}${{ matrix.remote_env == 'true' && ' (remote)' || '' }}
runs-on: ${{ matrix.runs_on || matrix.runner }}
timeout-minutes: 30
needs: changed
@@ -553,6 +553,7 @@ jobs:
- runner: ubuntu-24.04
target: x86_64-unknown-linux-gnu
profile: dev
remote_env: "true"
runs_on:
group: codex-runners
labels: codex-linux-x64
@@ -590,6 +591,7 @@ jobs:
sudo apt-get update -y
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends pkg-config libcap-dev
fi
# Some integration tests rely on DotSlash being installed.
# See https://github.com/openai/codex/pull/7617.
- name: Install DotSlash
@@ -674,6 +676,15 @@ jobs:
sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0
fi
- name: Set up remote test env (Docker)
if: ${{ runner.os == 'Linux' && matrix.remote_env == 'true' }}
shell: bash
run: |
set -euo pipefail
export CODEX_TEST_REMOTE_ENV_CONTAINER_NAME=codex-remote-test-env
source "${GITHUB_WORKSPACE}/scripts/test-remote-env.sh"
echo "CODEX_TEST_REMOTE_ENV=${CODEX_TEST_REMOTE_ENV}" >> "$GITHUB_ENV"
- name: tests
id: test
run: cargo nextest run --all-features --no-fail-fast --target ${{ matrix.target }} --cargo-profile ci-test --timings
@@ -726,6 +737,16 @@ jobs:
echo '```';
} >> "$GITHUB_STEP_SUMMARY"
- name: Tear down remote test env
if: ${{ always() && runner.os == 'Linux' && matrix.remote_env == 'true' }}
shell: bash
run: |
set +e
if [[ "${{ steps.test.outcome }}" != "success" ]]; then
docker logs codex-remote-test-env || true
fi
docker rm -f codex-remote-test-env >/dev/null 2>&1 || true
- name: verify tests passed
if: steps.test.outcome == 'failure'
run: |

View File

@@ -2,31 +2,41 @@ module(name = "codex")
bazel_dep(name = "bazel_skylib", version = "1.8.2")
bazel_dep(name = "platforms", version = "1.0.0")
bazel_dep(name = "llvm", version = "0.6.7")
bazel_dep(name = "llvm", version = "0.6.8")
register_toolchains("@llvm//toolchain:all")
osx = use_extension("@llvm//extensions:osx.bzl", "osx")
osx.framework(name = "ApplicationServices")
osx.framework(name = "AppKit")
osx.framework(name = "ColorSync")
osx.framework(name = "CoreFoundation")
osx.framework(name = "CoreGraphics")
osx.framework(name = "CoreServices")
osx.framework(name = "CoreText")
osx.framework(name = "AudioToolbox")
osx.framework(name = "CFNetwork")
osx.framework(name = "FontServices")
osx.framework(name = "AudioUnit")
osx.framework(name = "CoreAudio")
osx.framework(name = "CoreAudioTypes")
osx.framework(name = "Foundation")
osx.framework(name = "ImageIO")
osx.framework(name = "IOKit")
osx.framework(name = "Kernel")
osx.framework(name = "OSLog")
osx.framework(name = "Security")
osx.framework(name = "SystemConfiguration")
osx.from_archive(
sha256 = "6a4922f89487a96d7054ec6ca5065bfddd9f1d017c74d82f1d79cecf7feb8228",
strip_prefix = "Payload/Library/Developer/CommandLineTools/SDKs/MacOSX26.2.sdk",
type = "pkg",
urls = [
"https://swcdn.apple.com/content/downloads/26/44/047-81934-A_28TPKM5SD1/ps6pk6dk4x02vgfa5qsctq6tgf23t5f0w2/CLTools_macOSNMOS_SDK.pkg",
],
)
osx.frameworks(names = [
"ApplicationServices",
"AppKit",
"ColorSync",
"CoreFoundation",
"CoreGraphics",
"CoreServices",
"CoreText",
"AudioToolbox",
"CFNetwork",
"FontServices",
"AudioUnit",
"CoreAudio",
"CoreAudioTypes",
"Foundation",
"ImageIO",
"IOKit",
"Kernel",
"OSLog",
"Security",
"SystemConfiguration",
])
use_repo(osx, "macos_sdk")
# Needed to disable xcode...

3
MODULE.bazel.lock generated
View File

@@ -86,7 +86,8 @@
"https://bcr.bazel.build/modules/libcap/2.27.bcr.1/source.json": "3b116cbdbd25a68ffb587b672205f6d353a4c19a35452e480d58fc89531e0a10",
"https://bcr.bazel.build/modules/libpfm/4.11.0/MODULE.bazel": "45061ff025b301940f1e30d2c16bea596c25b176c8b6b3087e92615adbd52902",
"https://bcr.bazel.build/modules/llvm/0.6.7/MODULE.bazel": "d37a2e10571864dc6a5bb53c29216d90b9400bbcadb422337f49107fd2eaf0d2",
"https://bcr.bazel.build/modules/llvm/0.6.7/source.json": "c40bcce08d2adbd658aae609976ce4ae4fdc44f3299fffa29c7fa9bf7e7d6d2b",
"https://bcr.bazel.build/modules/llvm/0.6.8/MODULE.bazel": "53468e4a4be409c2d34e5b7331d2e1fef982151b777655ca3c0047225b333629",
"https://bcr.bazel.build/modules/llvm/0.6.8/source.json": "b673af466f716e01d6243f59e47729e99f37dc5e17026d2bf18c98206f09b6c5",
"https://bcr.bazel.build/modules/nlohmann_json/3.6.1/MODULE.bazel": "6f7b417dcc794d9add9e556673ad25cb3ba835224290f4f848f8e2db1e1fca74",
"https://bcr.bazel.build/modules/nlohmann_json/3.6.1/source.json": "f448c6e8963fdfa7eb831457df83ad63d3d6355018f6574fb017e8169deb43a9",
"https://bcr.bazel.build/modules/openssl/3.5.4.bcr.0/MODULE.bazel": "0f6b8f20b192b9ff0781406256150bcd46f19e66d807dcb0c540548439d6fc35",

1
codex-rs/Cargo.lock generated
View File

@@ -3121,6 +3121,7 @@ dependencies = [
"base64 0.22.1",
"codex-arg0",
"codex-core",
"codex-exec-server",
"codex-features",
"codex-protocol",
"codex-utils-absolute-path",

View File

@@ -53,6 +53,7 @@ use codex_core::ThreadManager;
use codex_core::config::Config;
use codex_core::config_loader::CloudRequirementsLoader;
use codex_core::config_loader::LoaderOverrides;
use codex_core::default_client::DEFAULT_ORIGINATOR;
use codex_core::default_client::SetOriginatorError;
use codex_core::default_client::USER_AGENT_SUFFIX;
use codex_core::default_client::get_codex_user_agent;
@@ -78,6 +79,7 @@ use toml::Value as TomlValue;
use tracing::Instrument;
const EXTERNAL_AUTH_REFRESH_TIMEOUT: Duration = Duration::from_secs(10);
const TUI_APP_SERVER_CLIENT_NAME: &str = "codex-tui";
#[derive(Clone)]
struct ExternalAuthRefreshBridge {
@@ -551,7 +553,14 @@ impl MessageProcessor {
} = params.client_info;
session.app_server_client_name = Some(name.clone());
session.client_version = Some(version.clone());
if let Err(error) = set_default_originator(name.clone()) {
let originator = if name == TUI_APP_SERVER_CLIENT_NAME {
// TODO: Remove this temporary workaround once app-server clients no longer
// need to retain the legacy TUI `codex_cli_rs` originator behavior.
DEFAULT_ORIGINATOR.to_string()
} else {
name.clone()
};
if let Err(error) = set_default_originator(originator) {
match error {
SetOriginatorError::InvalidHeaderValue => {
let error = JSONRPCErrorError {

View File

@@ -1,356 +0,0 @@
use crate::default_client::build_reqwest_client;
use reqwest::Client;
use serde::Deserialize;
use std::fs;
use std::io::Cursor;
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
use std::time::Duration;
use zip::ZipArchive;
const GITHUB_API_BASE_URL: &str = "https://api.github.com";
const GITHUB_API_ACCEPT_HEADER: &str = "application/vnd.github+json";
const GITHUB_API_VERSION_HEADER: &str = "2022-11-28";
const OPENAI_PLUGINS_OWNER: &str = "openai";
const OPENAI_PLUGINS_REPO: &str = "plugins";
const CURATED_PLUGINS_RELATIVE_DIR: &str = ".tmp/plugins";
const CURATED_PLUGINS_SHA_FILE: &str = ".tmp/plugins.sha";
const CURATED_PLUGINS_HTTP_TIMEOUT: Duration = Duration::from_secs(30);
#[derive(Debug, Deserialize)]
struct GitHubRepositorySummary {
default_branch: String,
}
#[derive(Debug, Deserialize)]
struct GitHubGitRefSummary {
object: GitHubGitRefObject,
}
#[derive(Debug, Deserialize)]
struct GitHubGitRefObject {
sha: String,
}
pub(crate) fn curated_plugins_repo_path(codex_home: &Path) -> PathBuf {
codex_home.join(CURATED_PLUGINS_RELATIVE_DIR)
}
pub(crate) fn read_curated_plugins_sha(codex_home: &Path) -> Option<String> {
read_sha_file(codex_home.join(CURATED_PLUGINS_SHA_FILE).as_path())
}
pub(crate) fn sync_openai_plugins_repo(codex_home: &Path) -> Result<String, String> {
sync_openai_plugins_repo_with_api_base_url(codex_home, GITHUB_API_BASE_URL)
}
fn sync_openai_plugins_repo_with_api_base_url(
codex_home: &Path,
api_base_url: &str,
) -> Result<String, String> {
let repo_path = curated_plugins_repo_path(codex_home);
let sha_path = codex_home.join(CURATED_PLUGINS_SHA_FILE);
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.map_err(|err| format!("failed to create curated plugins sync runtime: {err}"))?;
let remote_sha = runtime.block_on(fetch_curated_repo_remote_sha(api_base_url))?;
let local_sha = read_sha_file(&sha_path);
if local_sha.as_deref() == Some(remote_sha.as_str()) && repo_path.is_dir() {
return Ok(remote_sha);
}
let Some(parent) = repo_path.parent() else {
return Err(format!(
"failed to determine curated plugins parent directory for {}",
repo_path.display()
));
};
fs::create_dir_all(parent).map_err(|err| {
format!(
"failed to create curated plugins parent directory {}: {err}",
parent.display()
)
})?;
let clone_dir = tempfile::Builder::new()
.prefix("plugins-clone-")
.tempdir_in(parent)
.map_err(|err| {
format!(
"failed to create temporary curated plugins directory in {}: {err}",
parent.display()
)
})?;
let cloned_repo_path = clone_dir.path().join("repo");
let zipball_bytes = runtime.block_on(fetch_curated_repo_zipball(api_base_url, &remote_sha))?;
extract_zipball_to_dir(&zipball_bytes, &cloned_repo_path)?;
if !cloned_repo_path
.join(".agents/plugins/marketplace.json")
.is_file()
{
return Err(format!(
"curated plugins archive missing marketplace manifest at {}",
cloned_repo_path
.join(".agents/plugins/marketplace.json")
.display()
));
}
if repo_path.exists() {
let backup_dir = tempfile::Builder::new()
.prefix("plugins-backup-")
.tempdir_in(parent)
.map_err(|err| {
format!(
"failed to create curated plugins backup directory in {}: {err}",
parent.display()
)
})?;
let backup_repo_path = backup_dir.path().join("repo");
fs::rename(&repo_path, &backup_repo_path).map_err(|err| {
format!(
"failed to move previous curated plugins repo out of the way at {}: {err}",
repo_path.display()
)
})?;
if let Err(err) = fs::rename(&cloned_repo_path, &repo_path) {
let rollback_result = fs::rename(&backup_repo_path, &repo_path);
return match rollback_result {
Ok(()) => Err(format!(
"failed to activate new curated plugins repo at {}: {err}",
repo_path.display()
)),
Err(rollback_err) => {
let backup_path = backup_dir.keep().join("repo");
Err(format!(
"failed to activate new curated plugins repo at {}: {err}; failed to restore previous repo (left at {}): {rollback_err}",
repo_path.display(),
backup_path.display()
))
}
};
}
} else {
fs::rename(&cloned_repo_path, &repo_path).map_err(|err| {
format!(
"failed to activate curated plugins repo at {}: {err}",
repo_path.display()
)
})?;
}
if let Some(parent) = sha_path.parent() {
fs::create_dir_all(parent).map_err(|err| {
format!(
"failed to create curated plugins sha directory {}: {err}",
parent.display()
)
})?;
}
fs::write(&sha_path, format!("{remote_sha}\n")).map_err(|err| {
format!(
"failed to write curated plugins sha file {}: {err}",
sha_path.display()
)
})?;
Ok(remote_sha)
}
async fn fetch_curated_repo_remote_sha(api_base_url: &str) -> Result<String, String> {
let api_base_url = api_base_url.trim_end_matches('/');
let repo_url = format!("{api_base_url}/repos/{OPENAI_PLUGINS_OWNER}/{OPENAI_PLUGINS_REPO}");
let client = build_reqwest_client();
let repo_body = fetch_github_text(&client, &repo_url, "get curated plugins repository").await?;
let repo_summary: GitHubRepositorySummary =
serde_json::from_str(&repo_body).map_err(|err| {
format!("failed to parse curated plugins repository response from {repo_url}: {err}")
})?;
if repo_summary.default_branch.is_empty() {
return Err(format!(
"curated plugins repository response from {repo_url} did not include a default branch"
));
}
let git_ref_url = format!("{repo_url}/git/ref/heads/{}", repo_summary.default_branch);
let git_ref_body =
fetch_github_text(&client, &git_ref_url, "get curated plugins HEAD ref").await?;
let git_ref: GitHubGitRefSummary = serde_json::from_str(&git_ref_body).map_err(|err| {
format!("failed to parse curated plugins ref response from {git_ref_url}: {err}")
})?;
if git_ref.object.sha.is_empty() {
return Err(format!(
"curated plugins ref response from {git_ref_url} did not include a HEAD sha"
));
}
Ok(git_ref.object.sha)
}
async fn fetch_curated_repo_zipball(
api_base_url: &str,
remote_sha: &str,
) -> Result<Vec<u8>, String> {
let api_base_url = api_base_url.trim_end_matches('/');
let repo_url = format!("{api_base_url}/repos/{OPENAI_PLUGINS_OWNER}/{OPENAI_PLUGINS_REPO}");
let zipball_url = format!("{repo_url}/zipball/{remote_sha}");
let client = build_reqwest_client();
fetch_github_bytes(&client, &zipball_url, "download curated plugins archive").await
}
async fn fetch_github_text(client: &Client, url: &str, context: &str) -> Result<String, String> {
let response = github_request(client, url)
.send()
.await
.map_err(|err| format!("failed to {context} from {url}: {err}"))?;
let status = response.status();
let body = response.text().await.unwrap_or_default();
if !status.is_success() {
return Err(format!(
"{context} from {url} failed with status {status}: {body}"
));
}
Ok(body)
}
async fn fetch_github_bytes(client: &Client, url: &str, context: &str) -> Result<Vec<u8>, String> {
let response = github_request(client, url)
.send()
.await
.map_err(|err| format!("failed to {context} from {url}: {err}"))?;
let status = response.status();
let body = response
.bytes()
.await
.map_err(|err| format!("failed to read {context} response from {url}: {err}"))?;
if !status.is_success() {
let body_text = String::from_utf8_lossy(&body);
return Err(format!(
"{context} from {url} failed with status {status}: {body_text}"
));
}
Ok(body.to_vec())
}
fn github_request(client: &Client, url: &str) -> reqwest::RequestBuilder {
client
.get(url)
.timeout(CURATED_PLUGINS_HTTP_TIMEOUT)
.header("accept", GITHUB_API_ACCEPT_HEADER)
.header("x-github-api-version", GITHUB_API_VERSION_HEADER)
}
fn read_sha_file(sha_path: &Path) -> Option<String> {
fs::read_to_string(sha_path)
.ok()
.map(|sha| sha.trim().to_string())
.filter(|sha| !sha.is_empty())
}
fn extract_zipball_to_dir(bytes: &[u8], destination: &Path) -> Result<(), String> {
fs::create_dir_all(destination).map_err(|err| {
format!(
"failed to create curated plugins extraction directory {}: {err}",
destination.display()
)
})?;
let cursor = Cursor::new(bytes);
let mut archive = ZipArchive::new(cursor)
.map_err(|err| format!("failed to open curated plugins zip archive: {err}"))?;
for index in 0..archive.len() {
let mut entry = archive
.by_index(index)
.map_err(|err| format!("failed to read curated plugins zip entry: {err}"))?;
let Some(relative_path) = entry.enclosed_name() else {
return Err(format!(
"curated plugins zip entry `{}` escapes extraction root",
entry.name()
));
};
let mut components = relative_path.components();
let Some(Component::Normal(_)) = components.next() else {
continue;
};
let output_relative = components.fold(PathBuf::new(), |mut path, component| {
if let Component::Normal(segment) = component {
path.push(segment);
}
path
});
if output_relative.as_os_str().is_empty() {
continue;
}
let output_path = destination.join(&output_relative);
if entry.is_dir() {
fs::create_dir_all(&output_path).map_err(|err| {
format!(
"failed to create curated plugins directory {}: {err}",
output_path.display()
)
})?;
continue;
}
if let Some(parent) = output_path.parent() {
fs::create_dir_all(parent).map_err(|err| {
format!(
"failed to create curated plugins directory {}: {err}",
parent.display()
)
})?;
}
let mut output = fs::File::create(&output_path).map_err(|err| {
format!(
"failed to create curated plugins file {}: {err}",
output_path.display()
)
})?;
std::io::copy(&mut entry, &mut output).map_err(|err| {
format!(
"failed to write curated plugins file {}: {err}",
output_path.display()
)
})?;
apply_zip_permissions(&entry, &output_path)?;
}
Ok(())
}
#[cfg(unix)]
fn apply_zip_permissions(entry: &zip::read::ZipFile<'_>, output_path: &Path) -> Result<(), String> {
let Some(mode) = entry.unix_mode() else {
return Ok(());
};
fs::set_permissions(output_path, fs::Permissions::from_mode(mode)).map_err(|err| {
format!(
"failed to set permissions on curated plugins file {}: {err}",
output_path.display()
)
})
}
#[cfg(not(unix))]
fn apply_zip_permissions(
_entry: &zip::read::ZipFile<'_>,
_output_path: &Path,
) -> Result<(), String> {
Ok(())
}
#[cfg(test)]
#[path = "curated_repo_tests.rs"]
mod tests;

View File

@@ -1,159 +0,0 @@
use super::*;
use pretty_assertions::assert_eq;
use std::io::Write;
use tempfile::tempdir;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::method;
use wiremock::matchers::path;
use zip::ZipWriter;
use zip::write::SimpleFileOptions;
#[test]
fn curated_plugins_repo_path_uses_codex_home_tmp_dir() {
let tmp = tempdir().expect("tempdir");
assert_eq!(
curated_plugins_repo_path(tmp.path()),
tmp.path().join(".tmp/plugins")
);
}
#[test]
fn read_curated_plugins_sha_reads_trimmed_sha_file() {
let tmp = tempdir().expect("tempdir");
fs::create_dir_all(tmp.path().join(".tmp")).expect("create tmp");
fs::write(tmp.path().join(".tmp/plugins.sha"), "abc123\n").expect("write sha");
assert_eq!(
read_curated_plugins_sha(tmp.path()).as_deref(),
Some("abc123")
);
}
#[tokio::test]
async fn sync_openai_plugins_repo_downloads_zipball_and_records_sha() {
let tmp = tempdir().expect("tempdir");
let server = MockServer::start().await;
let sha = "0123456789abcdef0123456789abcdef01234567";
Mock::given(method("GET"))
.and(path("/repos/openai/plugins"))
.respond_with(ResponseTemplate::new(200).set_body_string(r#"{"default_branch":"main"}"#))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path("/repos/openai/plugins/git/ref/heads/main"))
.respond_with(
ResponseTemplate::new(200)
.set_body_string(format!(r#"{{"object":{{"sha":"{sha}"}}}}"#)),
)
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path(format!("/repos/openai/plugins/zipball/{sha}")))
.respond_with(
ResponseTemplate::new(200)
.insert_header("content-type", "application/zip")
.set_body_bytes(curated_repo_zipball_bytes(sha)),
)
.mount(&server)
.await;
let server_uri = server.uri();
let tmp_path = tmp.path().to_path_buf();
tokio::task::spawn_blocking(move || {
sync_openai_plugins_repo_with_api_base_url(tmp_path.as_path(), &server_uri)
})
.await
.expect("sync task should join")
.expect("sync should succeed");
let repo_path = curated_plugins_repo_path(tmp.path());
assert!(repo_path.join(".agents/plugins/marketplace.json").is_file());
assert!(
repo_path
.join("plugins/gmail/.codex-plugin/plugin.json")
.is_file()
);
assert_eq!(read_curated_plugins_sha(tmp.path()).as_deref(), Some(sha));
}
#[tokio::test]
async fn sync_openai_plugins_repo_skips_archive_download_when_sha_matches() {
let tmp = tempdir().expect("tempdir");
let repo_path = curated_plugins_repo_path(tmp.path());
fs::create_dir_all(repo_path.join(".agents/plugins")).expect("create repo");
fs::write(
repo_path.join(".agents/plugins/marketplace.json"),
r#"{"name":"openai-curated","plugins":[]}"#,
)
.expect("write marketplace");
fs::create_dir_all(tmp.path().join(".tmp")).expect("create tmp");
let sha = "fedcba9876543210fedcba9876543210fedcba98";
fs::write(tmp.path().join(".tmp/plugins.sha"), format!("{sha}\n")).expect("write sha");
let server = MockServer::start().await;
Mock::given(method("GET"))
.and(path("/repos/openai/plugins"))
.respond_with(ResponseTemplate::new(200).set_body_string(r#"{"default_branch":"main"}"#))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path("/repos/openai/plugins/git/ref/heads/main"))
.respond_with(
ResponseTemplate::new(200)
.set_body_string(format!(r#"{{"object":{{"sha":"{sha}"}}}}"#)),
)
.mount(&server)
.await;
let server_uri = server.uri();
let tmp_path = tmp.path().to_path_buf();
tokio::task::spawn_blocking(move || {
sync_openai_plugins_repo_with_api_base_url(tmp_path.as_path(), &server_uri)
})
.await
.expect("sync task should join")
.expect("sync should succeed");
assert_eq!(read_curated_plugins_sha(tmp.path()).as_deref(), Some(sha));
assert!(repo_path.join(".agents/plugins/marketplace.json").is_file());
}
fn curated_repo_zipball_bytes(sha: &str) -> Vec<u8> {
let cursor = Cursor::new(Vec::new());
let mut writer = ZipWriter::new(cursor);
let options = SimpleFileOptions::default();
let root = format!("openai-plugins-{sha}");
writer
.start_file(format!("{root}/.agents/plugins/marketplace.json"), options)
.expect("start marketplace entry");
writer
.write_all(
br#"{
"name": "openai-curated",
"plugins": [
{
"name": "gmail",
"source": {
"source": "local",
"path": "./plugins/gmail"
}
}
]
}"#,
)
.expect("write marketplace");
writer
.start_file(
format!("{root}/plugins/gmail/.codex-plugin/plugin.json"),
options,
)
.expect("start plugin manifest entry");
writer
.write_all(br#"{"name":"gmail"}"#)
.expect("write plugin manifest");
writer.finish().expect("finish zip writer").into_inner()
}

View File

@@ -60,6 +60,7 @@ use std::sync::RwLock;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use std::time::Instant;
use tokio::sync::Mutex;
use toml_edit::value;
use tracing::info;
use tracing::warn;
@@ -463,6 +464,7 @@ pub struct PluginsManager {
store: PluginStore,
featured_plugin_ids_cache: RwLock<Option<CachedFeaturedPluginIds>>,
cached_enabled_outcome: RwLock<Option<PluginLoadOutcome>>,
remote_sync_lock: Mutex<()>,
restriction_product: Option<Product>,
analytics_events_client: RwLock<Option<AnalyticsEventsClient>>,
}
@@ -488,6 +490,7 @@ impl PluginsManager {
store: PluginStore::new(codex_home),
featured_plugin_ids_cache: RwLock::new(None),
cached_enabled_outcome: RwLock::new(None),
remote_sync_lock: Mutex::new(()),
restriction_product,
analytics_events_client: RwLock::new(None),
}
@@ -777,6 +780,8 @@ impl PluginsManager {
auth: Option<&CodexAuth>,
additive_only: bool,
) -> Result<RemotePluginSyncResult, PluginRemoteSyncError> {
let _remote_sync_guard = self.remote_sync_lock.lock().await;
if !config.features.enabled(Feature::Plugins) {
return Ok(RemotePluginSyncResult::default());
}

View File

@@ -1,4 +1,3 @@
mod curated_repo;
mod discoverable;
mod injection;
mod manager;
@@ -12,9 +11,6 @@ mod store;
pub(crate) mod test_support;
mod toggles;
pub(crate) use curated_repo::curated_plugins_repo_path;
pub(crate) use curated_repo::read_curated_plugins_sha;
pub(crate) use curated_repo::sync_openai_plugins_repo;
pub(crate) use discoverable::list_tool_suggest_discoverable_plugins;
pub(crate) use injection::build_plugin_injections;
pub use manager::AppConnectorId;
@@ -52,5 +48,8 @@ pub use remote::RemotePluginFetchError;
pub use remote::fetch_remote_featured_plugin_ids;
pub(crate) use render::render_explicit_plugin_instructions;
pub(crate) use render::render_plugins_section;
pub(crate) use startup_sync::curated_plugins_repo_path;
pub(crate) use startup_sync::read_curated_plugins_sha;
pub(crate) use startup_sync::sync_openai_plugins_repo;
pub use store::PluginId;
pub use toggles::collect_plugin_enabled_candidates;

View File

@@ -1,19 +1,143 @@
use crate::default_client::build_reqwest_client;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use std::process::Output;
use std::process::Stdio;
use std::sync::Arc;
use std::time::Duration;
use reqwest::Client;
use serde::Deserialize;
use tracing::info;
use tracing::warn;
use zip::ZipArchive;
use crate::AuthManager;
use crate::config::Config;
use super::PluginsManager;
const GITHUB_API_BASE_URL: &str = "https://api.github.com";
const GITHUB_API_ACCEPT_HEADER: &str = "application/vnd.github+json";
const GITHUB_API_VERSION_HEADER: &str = "2022-11-28";
const OPENAI_PLUGINS_OWNER: &str = "openai";
const OPENAI_PLUGINS_REPO: &str = "plugins";
const CURATED_PLUGINS_RELATIVE_DIR: &str = ".tmp/plugins";
const CURATED_PLUGINS_SHA_FILE: &str = ".tmp/plugins.sha";
const CURATED_PLUGINS_GIT_TIMEOUT: Duration = Duration::from_secs(30);
const CURATED_PLUGINS_HTTP_TIMEOUT: Duration = Duration::from_secs(30);
const STARTUP_REMOTE_PLUGIN_SYNC_MARKER_FILE: &str = ".tmp/app-server-remote-plugin-sync-v1";
const STARTUP_REMOTE_PLUGIN_SYNC_PREREQUISITE_TIMEOUT: Duration = Duration::from_secs(5);
#[derive(Debug, Deserialize)]
struct GitHubRepositorySummary {
default_branch: String,
}
#[derive(Debug, Deserialize)]
struct GitHubGitRefSummary {
object: GitHubGitRefObject,
}
#[derive(Debug, Deserialize)]
struct GitHubGitRefObject {
sha: String,
}
pub(crate) fn curated_plugins_repo_path(codex_home: &Path) -> PathBuf {
codex_home.join(CURATED_PLUGINS_RELATIVE_DIR)
}
pub(crate) fn read_curated_plugins_sha(codex_home: &Path) -> Option<String> {
read_sha_file(codex_home.join(CURATED_PLUGINS_SHA_FILE).as_path())
}
pub(crate) fn sync_openai_plugins_repo(codex_home: &Path) -> Result<String, String> {
sync_openai_plugins_repo_with_transport_overrides(codex_home, "git", GITHUB_API_BASE_URL)
}
fn sync_openai_plugins_repo_with_transport_overrides(
codex_home: &Path,
git_binary: &str,
api_base_url: &str,
) -> Result<String, String> {
match sync_openai_plugins_repo_via_git(codex_home, git_binary) {
Ok(remote_sha) => Ok(remote_sha),
Err(err) => {
warn!(
error = %err,
git_binary,
"git sync failed for curated plugin sync; falling back to GitHub HTTP"
);
sync_openai_plugins_repo_via_http(codex_home, api_base_url)
}
}
}
fn sync_openai_plugins_repo_via_git(codex_home: &Path, git_binary: &str) -> Result<String, String> {
let repo_path = curated_plugins_repo_path(codex_home);
let sha_path = codex_home.join(CURATED_PLUGINS_SHA_FILE);
let remote_sha = git_ls_remote_head_sha(git_binary)?;
let local_sha = read_local_git_or_sha_file(&repo_path, &sha_path, git_binary);
if local_sha.as_deref() == Some(remote_sha.as_str()) && repo_path.join(".git").is_dir() {
return Ok(remote_sha);
}
let cloned_repo_path = prepare_curated_repo_parent_and_temp_dir(&repo_path)?;
let clone_output = run_git_command_with_timeout(
Command::new(git_binary)
.env("GIT_OPTIONAL_LOCKS", "0")
.arg("clone")
.arg("--depth")
.arg("1")
.arg("https://github.com/openai/plugins.git")
.arg(&cloned_repo_path),
"git clone curated plugins repo",
CURATED_PLUGINS_GIT_TIMEOUT,
)?;
ensure_git_success(&clone_output, "git clone curated plugins repo")?;
let cloned_sha = git_head_sha(&cloned_repo_path, git_binary)?;
if cloned_sha != remote_sha {
return Err(format!(
"curated plugins clone HEAD mismatch: expected {remote_sha}, got {cloned_sha}"
));
}
ensure_marketplace_manifest_exists(&cloned_repo_path)?;
activate_curated_repo(&repo_path, &cloned_repo_path)?;
write_curated_plugins_sha(&sha_path, &remote_sha)?;
Ok(remote_sha)
}
fn sync_openai_plugins_repo_via_http(
codex_home: &Path,
api_base_url: &str,
) -> Result<String, String> {
let repo_path = curated_plugins_repo_path(codex_home);
let sha_path = codex_home.join(CURATED_PLUGINS_SHA_FILE);
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.map_err(|err| format!("failed to create curated plugins sync runtime: {err}"))?;
let remote_sha = runtime.block_on(fetch_curated_repo_remote_sha(api_base_url))?;
let local_sha = read_sha_file(&sha_path);
if local_sha.as_deref() == Some(remote_sha.as_str()) && repo_path.is_dir() {
return Ok(remote_sha);
}
let cloned_repo_path = prepare_curated_repo_parent_and_temp_dir(&repo_path)?;
let zipball_bytes = runtime.block_on(fetch_curated_repo_zipball(api_base_url, &remote_sha))?;
extract_zipball_to_dir(&zipball_bytes, &cloned_repo_path)?;
ensure_marketplace_manifest_exists(&cloned_repo_path)?;
activate_curated_repo(&repo_path, &cloned_repo_path)?;
write_curated_plugins_sha(&sha_path, &remote_sha)?;
Ok(remote_sha)
}
pub(super) fn start_startup_remote_plugin_sync_once(
manager: Arc<PluginsManager>,
codex_home: PathBuf,
@@ -103,93 +227,438 @@ async fn write_startup_remote_plugin_sync_marker(codex_home: &Path) -> std::io::
tokio::fs::write(marker_path, b"ok\n").await
}
#[cfg(test)]
mod tests {
use super::*;
use crate::auth::CodexAuth;
use crate::config::CONFIG_TOML_FILE;
use crate::plugins::curated_plugins_repo_path;
use crate::plugins::test_support::TEST_CURATED_PLUGIN_SHA;
use crate::plugins::test_support::write_curated_plugin_sha;
use crate::plugins::test_support::write_file;
use crate::plugins::test_support::write_openai_curated_marketplace;
use pretty_assertions::assert_eq;
use tempfile::tempdir;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::header;
use wiremock::matchers::method;
use wiremock::matchers::path;
fn prepare_curated_repo_parent_and_temp_dir(repo_path: &Path) -> Result<PathBuf, String> {
let Some(parent) = repo_path.parent() else {
return Err(format!(
"failed to determine curated plugins parent directory for {}",
repo_path.display()
));
};
std::fs::create_dir_all(parent).map_err(|err| {
format!(
"failed to create curated plugins parent directory {}: {err}",
parent.display()
)
})?;
#[tokio::test]
async fn startup_remote_plugin_sync_writes_marker_and_reconciles_state() {
let tmp = tempdir().expect("tempdir");
let curated_root = curated_plugins_repo_path(tmp.path());
write_openai_curated_marketplace(&curated_root, &["linear"]);
write_curated_plugin_sha(tmp.path());
write_file(
&tmp.path().join(CONFIG_TOML_FILE),
r#"[features]
plugins = true
let clone_dir = tempfile::Builder::new()
.prefix("plugins-clone-")
.tempdir_in(parent)
.map_err(|err| {
format!(
"failed to create temporary curated plugins directory in {}: {err}",
parent.display()
)
})?;
Ok(clone_dir.keep())
}
[plugins."linear@openai-curated"]
enabled = false
"#,
);
fn ensure_marketplace_manifest_exists(repo_path: &Path) -> Result<(), String> {
if repo_path.join(".agents/plugins/marketplace.json").is_file() {
return Ok(());
}
Err(format!(
"curated plugins archive missing marketplace manifest at {}",
repo_path.join(".agents/plugins/marketplace.json").display()
))
}
let server = MockServer::start().await;
Mock::given(method("GET"))
.and(path("/backend-api/plugins/list"))
.and(header("authorization", "Bearer Access Token"))
.and(header("chatgpt-account-id", "account_id"))
.respond_with(ResponseTemplate::new(200).set_body_string(
r#"[
{"id":"1","name":"linear","marketplace_name":"openai-curated","version":"1.0.0","enabled":true}
]"#,
))
.mount(&server)
.await;
fn activate_curated_repo(repo_path: &Path, staged_repo_path: &Path) -> Result<(), String> {
if repo_path.exists() {
let parent = repo_path.parent().ok_or_else(|| {
format!(
"failed to determine curated plugins parent directory for {}",
repo_path.display()
)
})?;
let backup_dir = tempfile::Builder::new()
.prefix("plugins-backup-")
.tempdir_in(parent)
.map_err(|err| {
format!(
"failed to create curated plugins backup directory in {}: {err}",
parent.display()
)
})?;
let backup_repo_path = backup_dir.path().join("repo");
let mut config = crate::plugins::test_support::load_plugins_config(tmp.path()).await;
config.chatgpt_base_url = format!("{}/backend-api/", server.uri());
let manager = Arc::new(PluginsManager::new(tmp.path().to_path_buf()));
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
std::fs::rename(repo_path, &backup_repo_path).map_err(|err| {
format!(
"failed to move previous curated plugins repo out of the way at {}: {err}",
repo_path.display()
)
})?;
start_startup_remote_plugin_sync_once(
Arc::clone(&manager),
tmp.path().to_path_buf(),
config,
auth_manager,
);
let marker_path = tmp.path().join(STARTUP_REMOTE_PLUGIN_SYNC_MARKER_FILE);
tokio::time::timeout(Duration::from_secs(5), async {
loop {
if marker_path.is_file() {
break;
if let Err(err) = std::fs::rename(staged_repo_path, repo_path) {
let rollback_result = std::fs::rename(&backup_repo_path, repo_path);
return match rollback_result {
Ok(()) => Err(format!(
"failed to activate new curated plugins repo at {}: {err}",
repo_path.display()
)),
Err(rollback_err) => {
let backup_path = backup_dir.keep().join("repo");
Err(format!(
"failed to activate new curated plugins repo at {}: {err}; failed to restore previous repo (left at {}): {rollback_err}",
repo_path.display(),
backup_path.display()
))
}
tokio::time::sleep(Duration::from_millis(10)).await;
};
}
} else {
std::fs::rename(staged_repo_path, repo_path).map_err(|err| {
format!(
"failed to activate curated plugins repo at {}: {err}",
repo_path.display()
)
})?;
}
Ok(())
}
fn write_curated_plugins_sha(sha_path: &Path, remote_sha: &str) -> Result<(), String> {
if let Some(parent) = sha_path.parent() {
std::fs::create_dir_all(parent).map_err(|err| {
format!(
"failed to create curated plugins sha directory {}: {err}",
parent.display()
)
})?;
}
std::fs::write(sha_path, format!("{remote_sha}\n")).map_err(|err| {
format!(
"failed to write curated plugins sha file {}: {err}",
sha_path.display()
)
})
}
fn read_local_git_or_sha_file(
repo_path: &Path,
sha_path: &Path,
git_binary: &str,
) -> Option<String> {
if repo_path.join(".git").is_dir()
&& let Ok(sha) = git_head_sha(repo_path, git_binary)
{
return Some(sha);
}
read_sha_file(sha_path)
}
fn git_ls_remote_head_sha(git_binary: &str) -> Result<String, String> {
let output = run_git_command_with_timeout(
Command::new(git_binary)
.env("GIT_OPTIONAL_LOCKS", "0")
.arg("ls-remote")
.arg("https://github.com/openai/plugins.git")
.arg("HEAD"),
"git ls-remote curated plugins repo",
CURATED_PLUGINS_GIT_TIMEOUT,
)?;
ensure_git_success(&output, "git ls-remote curated plugins repo")?;
let stdout = String::from_utf8_lossy(&output.stdout);
let Some(first_line) = stdout.lines().next() else {
return Err("git ls-remote returned empty output for curated plugins repo".to_string());
};
let Some((sha, _)) = first_line.split_once('\t') else {
return Err(format!(
"unexpected git ls-remote output for curated plugins repo: {first_line}"
));
};
if sha.is_empty() {
return Err("git ls-remote returned empty sha for curated plugins repo".to_string());
}
Ok(sha.to_string())
}
fn git_head_sha(repo_path: &Path, git_binary: &str) -> Result<String, String> {
let output = Command::new(git_binary)
.env("GIT_OPTIONAL_LOCKS", "0")
.arg("-C")
.arg(repo_path)
.arg("rev-parse")
.arg("HEAD")
.output()
.map_err(|err| {
format!(
"failed to run git rev-parse HEAD in {}: {err}",
repo_path.display()
)
})?;
ensure_git_success(&output, "git rev-parse HEAD")?;
let sha = String::from_utf8_lossy(&output.stdout).trim().to_string();
if sha.is_empty() {
return Err(format!(
"git rev-parse HEAD returned empty output in {}",
repo_path.display()
));
}
Ok(sha)
}
fn run_git_command_with_timeout(
command: &mut Command,
context: &str,
timeout: Duration,
) -> Result<Output, String> {
let mut child = command
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.map_err(|err| format!("failed to run {context}: {err}"))?;
let start = std::time::Instant::now();
loop {
match child.try_wait() {
Ok(Some(_)) => {
return child
.wait_with_output()
.map_err(|err| format!("failed to wait for {context}: {err}"));
}
})
.await
.expect("marker should be written");
Ok(None) => {}
Err(err) => return Err(format!("failed to poll {context}: {err}")),
}
assert!(
tmp.path()
.join(format!(
"plugins/cache/openai-curated/linear/{TEST_CURATED_PLUGIN_SHA}"
if start.elapsed() >= timeout {
match child.try_wait() {
Ok(Some(_)) => {
return child
.wait_with_output()
.map_err(|err| format!("failed to wait for {context}: {err}"));
}
Ok(None) => {}
Err(err) => return Err(format!("failed to poll {context}: {err}")),
}
let _ = child.kill();
let output = child
.wait_with_output()
.map_err(|err| format!("failed to wait for {context} after timeout: {err}"))?;
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
return if stderr.is_empty() {
Err(format!("{context} timed out after {}s", timeout.as_secs()))
} else {
Err(format!(
"{context} timed out after {}s: {stderr}",
timeout.as_secs()
))
.is_dir()
);
let config = std::fs::read_to_string(tmp.path().join(CONFIG_TOML_FILE))
.expect("config should exist");
assert!(config.contains(r#"[plugins."linear@openai-curated"]"#));
assert!(config.contains("enabled = true"));
};
}
let marker_contents =
std::fs::read_to_string(marker_path).expect("marker should be readable");
assert_eq!(marker_contents, "ok\n");
std::thread::sleep(Duration::from_millis(100));
}
}
fn ensure_git_success(output: &Output, context: &str) -> Result<(), String> {
if output.status.success() {
return Ok(());
}
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
if stderr.is_empty() {
Err(format!("{context} failed with status {}", output.status))
} else {
Err(format!(
"{context} failed with status {}: {stderr}",
output.status
))
}
}
async fn fetch_curated_repo_remote_sha(api_base_url: &str) -> Result<String, String> {
let api_base_url = api_base_url.trim_end_matches('/');
let repo_url = format!("{api_base_url}/repos/{OPENAI_PLUGINS_OWNER}/{OPENAI_PLUGINS_REPO}");
let client = build_reqwest_client();
let repo_body = fetch_github_text(&client, &repo_url, "get curated plugins repository").await?;
let repo_summary: GitHubRepositorySummary =
serde_json::from_str(&repo_body).map_err(|err| {
format!("failed to parse curated plugins repository response from {repo_url}: {err}")
})?;
if repo_summary.default_branch.is_empty() {
return Err(format!(
"curated plugins repository response from {repo_url} did not include a default branch"
));
}
let git_ref_url = format!("{repo_url}/git/ref/heads/{}", repo_summary.default_branch);
let git_ref_body =
fetch_github_text(&client, &git_ref_url, "get curated plugins HEAD ref").await?;
let git_ref: GitHubGitRefSummary = serde_json::from_str(&git_ref_body).map_err(|err| {
format!("failed to parse curated plugins ref response from {git_ref_url}: {err}")
})?;
if git_ref.object.sha.is_empty() {
return Err(format!(
"curated plugins ref response from {git_ref_url} did not include a HEAD sha"
));
}
Ok(git_ref.object.sha)
}
async fn fetch_curated_repo_zipball(
api_base_url: &str,
remote_sha: &str,
) -> Result<Vec<u8>, String> {
let api_base_url = api_base_url.trim_end_matches('/');
let repo_url = format!("{api_base_url}/repos/{OPENAI_PLUGINS_OWNER}/{OPENAI_PLUGINS_REPO}");
let zipball_url = format!("{repo_url}/zipball/{remote_sha}");
let client = build_reqwest_client();
fetch_github_bytes(&client, &zipball_url, "download curated plugins archive").await
}
async fn fetch_github_text(client: &Client, url: &str, context: &str) -> Result<String, String> {
let response = github_request(client, url)
.send()
.await
.map_err(|err| format!("failed to {context} from {url}: {err}"))?;
let status = response.status();
let body = response.text().await.unwrap_or_default();
if !status.is_success() {
return Err(format!(
"{context} from {url} failed with status {status}: {body}"
));
}
Ok(body)
}
async fn fetch_github_bytes(client: &Client, url: &str, context: &str) -> Result<Vec<u8>, String> {
let response = github_request(client, url)
.send()
.await
.map_err(|err| format!("failed to {context} from {url}: {err}"))?;
let status = response.status();
let body = response
.bytes()
.await
.map_err(|err| format!("failed to read {context} response from {url}: {err}"))?;
if !status.is_success() {
let body_text = String::from_utf8_lossy(&body);
return Err(format!(
"{context} from {url} failed with status {status}: {body_text}"
));
}
Ok(body.to_vec())
}
fn github_request(client: &Client, url: &str) -> reqwest::RequestBuilder {
client
.get(url)
.timeout(CURATED_PLUGINS_HTTP_TIMEOUT)
.header("accept", GITHUB_API_ACCEPT_HEADER)
.header("x-github-api-version", GITHUB_API_VERSION_HEADER)
}
fn read_sha_file(sha_path: &Path) -> Option<String> {
std::fs::read_to_string(sha_path)
.ok()
.map(|sha| sha.trim().to_string())
.filter(|sha| !sha.is_empty())
}
fn extract_zipball_to_dir(bytes: &[u8], destination: &Path) -> Result<(), String> {
std::fs::create_dir_all(destination).map_err(|err| {
format!(
"failed to create curated plugins extraction directory {}: {err}",
destination.display()
)
})?;
let cursor = std::io::Cursor::new(bytes);
let mut archive = ZipArchive::new(cursor)
.map_err(|err| format!("failed to open curated plugins zip archive: {err}"))?;
for index in 0..archive.len() {
let mut entry = archive
.by_index(index)
.map_err(|err| format!("failed to read curated plugins zip entry: {err}"))?;
let Some(relative_path) = entry.enclosed_name() else {
return Err(format!(
"curated plugins zip entry `{}` escapes extraction root",
entry.name()
));
};
let mut components = relative_path.components();
let Some(std::path::Component::Normal(_)) = components.next() else {
continue;
};
let output_relative = components.fold(PathBuf::new(), |mut path, component| {
if let std::path::Component::Normal(segment) = component {
path.push(segment);
}
path
});
if output_relative.as_os_str().is_empty() {
continue;
}
let output_path = destination.join(&output_relative);
if entry.is_dir() {
std::fs::create_dir_all(&output_path).map_err(|err| {
format!(
"failed to create curated plugins directory {}: {err}",
output_path.display()
)
})?;
continue;
}
if let Some(parent) = output_path.parent() {
std::fs::create_dir_all(parent).map_err(|err| {
format!(
"failed to create curated plugins directory {}: {err}",
parent.display()
)
})?;
}
let mut output = std::fs::File::create(&output_path).map_err(|err| {
format!(
"failed to create curated plugins file {}: {err}",
output_path.display()
)
})?;
std::io::copy(&mut entry, &mut output).map_err(|err| {
format!(
"failed to write curated plugins file {}: {err}",
output_path.display()
)
})?;
apply_zip_permissions(&entry, &output_path)?;
}
Ok(())
}
#[cfg(unix)]
fn apply_zip_permissions(entry: &zip::read::ZipFile<'_>, output_path: &Path) -> Result<(), String> {
use std::os::unix::fs::PermissionsExt;
let Some(mode) = entry.unix_mode() else {
return Ok(());
};
std::fs::set_permissions(output_path, std::fs::Permissions::from_mode(mode)).map_err(|err| {
format!(
"failed to set permissions on curated plugins file {}: {err}",
output_path.display()
)
})
}
#[cfg(not(unix))]
fn apply_zip_permissions(
_entry: &zip::read::ZipFile<'_>,
_output_path: &Path,
) -> Result<(), String> {
Ok(())
}
#[cfg(test)]
#[path = "startup_sync_tests.rs"]
mod tests;

View File

@@ -0,0 +1,383 @@
use super::*;
use crate::auth::CodexAuth;
use crate::config::CONFIG_TOML_FILE;
use crate::plugins::test_support::TEST_CURATED_PLUGIN_SHA;
use crate::plugins::test_support::write_curated_plugin_sha;
use crate::plugins::test_support::write_file;
use crate::plugins::test_support::write_openai_curated_marketplace;
use pretty_assertions::assert_eq;
use std::io::Write;
use tempfile::tempdir;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::header;
use wiremock::matchers::method;
use wiremock::matchers::path;
use zip::ZipWriter;
use zip::write::SimpleFileOptions;
#[test]
fn curated_plugins_repo_path_uses_codex_home_tmp_dir() {
let tmp = tempdir().expect("tempdir");
assert_eq!(
curated_plugins_repo_path(tmp.path()),
tmp.path().join(".tmp/plugins")
);
}
#[test]
fn read_curated_plugins_sha_reads_trimmed_sha_file() {
let tmp = tempdir().expect("tempdir");
std::fs::create_dir_all(tmp.path().join(".tmp")).expect("create tmp");
std::fs::write(tmp.path().join(".tmp/plugins.sha"), "abc123\n").expect("write sha");
assert_eq!(
read_curated_plugins_sha(tmp.path()).as_deref(),
Some("abc123")
);
}
#[cfg(unix)]
#[test]
fn sync_openai_plugins_repo_prefers_git_when_available() {
use std::os::unix::fs::PermissionsExt;
let tmp = tempdir().expect("tempdir");
let bin_dir = tempfile::Builder::new()
.prefix("fake-git-")
.tempdir()
.expect("tempdir");
let git_path = bin_dir.path().join("git");
let sha = "0123456789abcdef0123456789abcdef01234567";
std::fs::write(
&git_path,
format!(
r#"#!/bin/sh
if [ "$1" = "ls-remote" ]; then
printf '%s\tHEAD\n' "{sha}"
exit 0
fi
if [ "$1" = "clone" ]; then
dest="$5"
mkdir -p "$dest/.git" "$dest/.agents/plugins" "$dest/plugins/gmail/.codex-plugin"
cat > "$dest/.agents/plugins/marketplace.json" <<'EOF'
{{"name":"openai-curated","plugins":[{{"name":"gmail","source":{{"source":"local","path":"./plugins/gmail"}}}}]}}
EOF
printf '%s\n' '{{"name":"gmail"}}' > "$dest/plugins/gmail/.codex-plugin/plugin.json"
exit 0
fi
if [ "$1" = "-C" ] && [ "$3" = "rev-parse" ] && [ "$4" = "HEAD" ]; then
printf '%s\n' "{sha}"
exit 0
fi
echo "unexpected git invocation: $@" >&2
exit 1
"#
),
)
.expect("write fake git");
let mut permissions = std::fs::metadata(&git_path)
.expect("metadata")
.permissions();
permissions.set_mode(0o755);
std::fs::set_permissions(&git_path, permissions).expect("chmod");
let synced_sha = sync_openai_plugins_repo_with_transport_overrides(
tmp.path(),
git_path.to_str().expect("utf8 path"),
"http://127.0.0.1:9",
)
.expect("git sync should succeed");
assert_eq!(synced_sha, sha);
assert!(curated_plugins_repo_path(tmp.path()).join(".git").is_dir());
assert!(
curated_plugins_repo_path(tmp.path())
.join(".agents/plugins/marketplace.json")
.is_file()
);
assert_eq!(read_curated_plugins_sha(tmp.path()).as_deref(), Some(sha));
}
#[tokio::test]
async fn sync_openai_plugins_repo_falls_back_to_http_when_git_is_unavailable() {
let tmp = tempdir().expect("tempdir");
let server = MockServer::start().await;
let sha = "0123456789abcdef0123456789abcdef01234567";
Mock::given(method("GET"))
.and(path("/repos/openai/plugins"))
.respond_with(ResponseTemplate::new(200).set_body_string(r#"{"default_branch":"main"}"#))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path("/repos/openai/plugins/git/ref/heads/main"))
.respond_with(
ResponseTemplate::new(200)
.set_body_string(format!(r#"{{"object":{{"sha":"{sha}"}}}}"#)),
)
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path(format!("/repos/openai/plugins/zipball/{sha}")))
.respond_with(
ResponseTemplate::new(200)
.insert_header("content-type", "application/zip")
.set_body_bytes(curated_repo_zipball_bytes(sha)),
)
.mount(&server)
.await;
let server_uri = server.uri();
let tmp_path = tmp.path().to_path_buf();
let synced_sha = tokio::task::spawn_blocking(move || {
sync_openai_plugins_repo_with_transport_overrides(
tmp_path.as_path(),
"missing-git-for-test",
&server_uri,
)
})
.await
.expect("sync task should join")
.expect("fallback sync should succeed");
let repo_path = curated_plugins_repo_path(tmp.path());
assert_eq!(synced_sha, sha);
assert!(repo_path.join(".agents/plugins/marketplace.json").is_file());
assert!(
repo_path
.join("plugins/gmail/.codex-plugin/plugin.json")
.is_file()
);
assert_eq!(read_curated_plugins_sha(tmp.path()).as_deref(), Some(sha));
}
#[cfg(unix)]
#[tokio::test]
async fn sync_openai_plugins_repo_falls_back_to_http_when_git_sync_fails() {
use std::os::unix::fs::PermissionsExt;
let tmp = tempdir().expect("tempdir");
let bin_dir = tempfile::Builder::new()
.prefix("fake-git-fail-")
.tempdir()
.expect("tempdir");
let git_path = bin_dir.path().join("git");
let sha = "0123456789abcdef0123456789abcdef01234567";
std::fs::write(
&git_path,
r#"#!/bin/sh
echo "simulated git failure" >&2
exit 1
"#,
)
.expect("write fake git");
let mut permissions = std::fs::metadata(&git_path)
.expect("metadata")
.permissions();
permissions.set_mode(0o755);
std::fs::set_permissions(&git_path, permissions).expect("chmod");
let server = MockServer::start().await;
Mock::given(method("GET"))
.and(path("/repos/openai/plugins"))
.respond_with(ResponseTemplate::new(200).set_body_string(r#"{"default_branch":"main"}"#))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path("/repos/openai/plugins/git/ref/heads/main"))
.respond_with(
ResponseTemplate::new(200)
.set_body_string(format!(r#"{{"object":{{"sha":"{sha}"}}}}"#)),
)
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path(format!("/repos/openai/plugins/zipball/{sha}")))
.respond_with(
ResponseTemplate::new(200)
.insert_header("content-type", "application/zip")
.set_body_bytes(curated_repo_zipball_bytes(sha)),
)
.mount(&server)
.await;
let server_uri = server.uri();
let tmp_path = tmp.path().to_path_buf();
let synced_sha = tokio::task::spawn_blocking(move || {
sync_openai_plugins_repo_with_transport_overrides(
tmp_path.as_path(),
git_path.to_str().expect("utf8 path"),
&server_uri,
)
})
.await
.expect("sync task should join")
.expect("fallback sync should succeed");
let repo_path = curated_plugins_repo_path(tmp.path());
assert_eq!(synced_sha, sha);
assert!(repo_path.join(".agents/plugins/marketplace.json").is_file());
assert!(
repo_path
.join("plugins/gmail/.codex-plugin/plugin.json")
.is_file()
);
assert_eq!(read_curated_plugins_sha(tmp.path()).as_deref(), Some(sha));
}
#[tokio::test]
async fn sync_openai_plugins_repo_skips_archive_download_when_sha_matches() {
let tmp = tempdir().expect("tempdir");
let repo_path = curated_plugins_repo_path(tmp.path());
std::fs::create_dir_all(repo_path.join(".agents/plugins")).expect("create repo");
std::fs::write(
repo_path.join(".agents/plugins/marketplace.json"),
r#"{"name":"openai-curated","plugins":[]}"#,
)
.expect("write marketplace");
std::fs::create_dir_all(tmp.path().join(".tmp")).expect("create tmp");
let sha = "fedcba9876543210fedcba9876543210fedcba98";
std::fs::write(tmp.path().join(".tmp/plugins.sha"), format!("{sha}\n")).expect("write sha");
let server = MockServer::start().await;
Mock::given(method("GET"))
.and(path("/repos/openai/plugins"))
.respond_with(ResponseTemplate::new(200).set_body_string(r#"{"default_branch":"main"}"#))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path("/repos/openai/plugins/git/ref/heads/main"))
.respond_with(
ResponseTemplate::new(200)
.set_body_string(format!(r#"{{"object":{{"sha":"{sha}"}}}}"#)),
)
.mount(&server)
.await;
let server_uri = server.uri();
let tmp_path = tmp.path().to_path_buf();
tokio::task::spawn_blocking(move || {
sync_openai_plugins_repo_with_transport_overrides(
tmp_path.as_path(),
"missing-git-for-test",
&server_uri,
)
})
.await
.expect("sync task should join")
.expect("sync should succeed");
assert_eq!(read_curated_plugins_sha(tmp.path()).as_deref(), Some(sha));
assert!(repo_path.join(".agents/plugins/marketplace.json").is_file());
}
#[tokio::test]
async fn startup_remote_plugin_sync_writes_marker_and_reconciles_state() {
let tmp = tempdir().expect("tempdir");
let curated_root = curated_plugins_repo_path(tmp.path());
write_openai_curated_marketplace(&curated_root, &["linear"]);
write_curated_plugin_sha(tmp.path());
write_file(
&tmp.path().join(CONFIG_TOML_FILE),
r#"[features]
plugins = true
[plugins."linear@openai-curated"]
enabled = false
"#,
);
let server = MockServer::start().await;
Mock::given(method("GET"))
.and(path("/backend-api/plugins/list"))
.and(header("authorization", "Bearer Access Token"))
.and(header("chatgpt-account-id", "account_id"))
.respond_with(ResponseTemplate::new(200).set_body_string(
r#"[
{"id":"1","name":"linear","marketplace_name":"openai-curated","version":"1.0.0","enabled":true}
]"#,
))
.mount(&server)
.await;
let mut config = crate::plugins::test_support::load_plugins_config(tmp.path()).await;
config.chatgpt_base_url = format!("{}/backend-api/", server.uri());
let manager = Arc::new(PluginsManager::new(tmp.path().to_path_buf()));
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
start_startup_remote_plugin_sync_once(
Arc::clone(&manager),
tmp.path().to_path_buf(),
config,
auth_manager,
);
let marker_path = tmp.path().join(STARTUP_REMOTE_PLUGIN_SYNC_MARKER_FILE);
tokio::time::timeout(Duration::from_secs(5), async {
loop {
if marker_path.is_file() {
break;
}
tokio::time::sleep(Duration::from_millis(10)).await;
}
})
.await
.expect("marker should be written");
assert!(
tmp.path()
.join(format!(
"plugins/cache/openai-curated/linear/{TEST_CURATED_PLUGIN_SHA}"
))
.is_dir()
);
let config =
std::fs::read_to_string(tmp.path().join(CONFIG_TOML_FILE)).expect("config should exist");
assert!(config.contains(r#"[plugins."linear@openai-curated"]"#));
assert!(config.contains("enabled = true"));
let marker_contents = std::fs::read_to_string(marker_path).expect("marker should be readable");
assert_eq!(marker_contents, "ok\n");
}
fn curated_repo_zipball_bytes(sha: &str) -> Vec<u8> {
let cursor = std::io::Cursor::new(Vec::new());
let mut writer = ZipWriter::new(cursor);
let options = SimpleFileOptions::default();
let root = format!("openai-plugins-{sha}");
writer
.start_file(format!("{root}/.agents/plugins/marketplace.json"), options)
.expect("start marketplace entry");
writer
.write_all(
br#"{
"name": "openai-curated",
"plugins": [
{
"name": "gmail",
"source": {
"source": "local",
"path": "./plugins/gmail"
}
}
]
}"#,
)
.expect("write marketplace");
writer
.start_file(
format!("{root}/plugins/gmail/.codex-plugin/plugin.json"),
options,
)
.expect("start plugin manifest entry");
writer
.write_all(br#"{"name":"gmail"}"#)
.expect("write plugin manifest");
writer.finish().expect("finish zip writer").into_inner()
}

View File

@@ -13,6 +13,7 @@ assert_cmd = { workspace = true }
base64 = { workspace = true }
codex-arg0 = { workspace = true }
codex-core = { workspace = true }
codex-exec-server = { workspace = true }
codex-features = { workspace = true }
codex-protocol = { workspace = true }
codex-utils-absolute-path = { workspace = true }

View File

@@ -289,6 +289,29 @@ pub fn sandbox_network_env_var() -> &'static str {
codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
}
const REMOTE_ENV_ENV_VAR: &str = "CODEX_TEST_REMOTE_ENV";
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct RemoteEnvConfig {
pub container_name: String,
}
pub fn get_remote_test_env() -> Option<RemoteEnvConfig> {
if std::env::var_os(REMOTE_ENV_ENV_VAR).is_none() {
eprintln!("Skipping test because {REMOTE_ENV_ENV_VAR} is not set.");
return None;
}
let container_name = std::env::var(REMOTE_ENV_ENV_VAR)
.unwrap_or_else(|_| panic!("{REMOTE_ENV_ENV_VAR} must be set"));
assert!(
!container_name.trim().is_empty(),
"{REMOTE_ENV_ENV_VAR} must not be empty"
);
Some(RemoteEnvConfig { container_name })
}
pub fn format_with_current_shell(command: &str) -> Vec<String> {
codex_core::shell::default_user_shell().derive_exec_args(command, /*use_login_shell*/ true)
}

View File

@@ -1,10 +1,16 @@
use std::mem::swap;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use std::sync::Arc;
use std::sync::atomic::AtomicU64;
use std::sync::atomic::Ordering;
use std::time::Duration;
use std::time::Instant;
use anyhow::Context;
use anyhow::Result;
use anyhow::anyhow;
use codex_core::CodexAuth;
use codex_core::CodexThread;
use codex_core::ModelProviderInfo;
@@ -14,6 +20,8 @@ use codex_core::config::Config;
use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use codex_core::shell::Shell;
use codex_core::shell::get_shell_by_model_provided_path;
use codex_exec_server::CreateDirectoryOptions;
use codex_exec_server::ExecutorFileSystem;
use codex_features::Feature;
use codex_protocol::config_types::ServiceTier;
use codex_protocol::openai_models::ModelsResponse;
@@ -24,10 +32,13 @@ use codex_protocol::protocol::SandboxPolicy;
use codex_protocol::protocol::SessionConfiguredEvent;
use codex_protocol::protocol::SessionSource;
use codex_protocol::user_input::UserInput;
use codex_utils_absolute_path::AbsolutePathBuf;
use serde_json::Value;
use tempfile::TempDir;
use wiremock::MockServer;
use crate::RemoteEnvConfig;
use crate::get_remote_test_env;
use crate::load_default_config_for_test;
use crate::responses::WebSocketTestServer;
use crate::responses::output_value_to_text;
@@ -41,6 +52,254 @@ use wiremock::matchers::path_regex;
type ConfigMutator = dyn FnOnce(&mut Config) + Send;
type PreBuildHook = dyn FnOnce(&Path) + Send + 'static;
const TEST_MODEL_WITH_EXPERIMENTAL_TOOLS: &str = "test-gpt-5.1-codex";
const REMOTE_EXEC_SERVER_START_TIMEOUT: Duration = Duration::from_secs(5);
const REMOTE_EXEC_SERVER_POLL_INTERVAL: Duration = Duration::from_millis(25);
static REMOTE_EXEC_SERVER_INSTANCE_COUNTER: AtomicU64 = AtomicU64::new(0);
#[derive(Debug)]
struct RemoteExecServerProcess {
container_name: String,
pid: u32,
remote_exec_server_path: String,
stdout_path: String,
cleanup_paths: Vec<String>,
}
impl Drop for RemoteExecServerProcess {
fn drop(&mut self) {
let cleanup_paths = self.cleanup_paths.join(" ");
let cleanup_paths_script = if cleanup_paths.is_empty() {
String::new()
} else {
format!("rm -rf {cleanup_paths}; ")
};
let script = format!(
"if kill -0 {pid} 2>/dev/null; then kill {pid}; fi; {cleanup_paths_script}rm -f {remote_exec_server_path} {stdout_path}",
pid = self.pid,
cleanup_paths_script = cleanup_paths_script,
remote_exec_server_path = self.remote_exec_server_path,
stdout_path = self.stdout_path
);
let _ = docker_command_capture_stdout(["exec", &self.container_name, "sh", "-lc", &script]);
}
}
impl RemoteExecServerProcess {
fn register_cleanup_path(&mut self, path: &Path) {
self.cleanup_paths.push(path.display().to_string());
}
}
#[derive(Debug)]
pub struct TestEnv {
environment: codex_exec_server::Environment,
cwd: PathBuf,
_local_cwd_temp_dir: Option<TempDir>,
_remote_exec_server_process: Option<RemoteExecServerProcess>,
}
impl TestEnv {
pub async fn local() -> Result<Self> {
let local_cwd_temp_dir = TempDir::new()?;
let cwd = local_cwd_temp_dir.path().to_path_buf();
let environment =
codex_exec_server::Environment::create(/*experimental_exec_server_url*/ None).await?;
Ok(Self {
environment,
cwd,
_local_cwd_temp_dir: Some(local_cwd_temp_dir),
_remote_exec_server_process: None,
})
}
pub fn environment(&self) -> &codex_exec_server::Environment {
&self.environment
}
pub fn experimental_exec_server_url(&self) -> Option<&str> {
self.environment.experimental_exec_server_url()
}
}
pub async fn test_env() -> Result<TestEnv> {
match get_remote_test_env() {
Some(remote_env) => {
let mut remote_process = start_remote_exec_server(&remote_env)?;
let remote_ip = remote_container_ip(&remote_env.container_name)?;
let websocket_url = rewrite_websocket_host(&remote_process.listen_url, &remote_ip)?;
let environment = codex_exec_server::Environment::create(Some(websocket_url)).await?;
let cwd = remote_aware_cwd_path();
environment
.get_filesystem()
.create_directory(
&absolute_path(&cwd)?,
CreateDirectoryOptions { recursive: true },
)
.await?;
remote_process.process.register_cleanup_path(&cwd);
Ok(TestEnv {
environment,
cwd,
_local_cwd_temp_dir: None,
_remote_exec_server_process: Some(remote_process.process),
})
}
None => TestEnv::local().await,
}
}
struct RemoteExecServerStart {
process: RemoteExecServerProcess,
listen_url: String,
}
fn start_remote_exec_server(remote_env: &RemoteEnvConfig) -> Result<RemoteExecServerStart> {
let container_name = remote_env.container_name.as_str();
let instance_id = remote_exec_server_instance_id();
let remote_exec_server_path = format!("/tmp/codex-exec-server-{instance_id}");
let stdout_path = format!("/tmp/codex-exec-server-{instance_id}.stdout");
let local_binary = codex_utils_cargo_bin::cargo_bin("codex-exec-server")
.context("resolve codex-exec-server binary")?;
let local_binary = local_binary.to_string_lossy().to_string();
let remote_binary = format!("{container_name}:{remote_exec_server_path}");
docker_command_success(["cp", &local_binary, &remote_binary])?;
docker_command_success([
"exec",
container_name,
"chmod",
"+x",
&remote_exec_server_path,
])?;
let start_script = format!(
"rm -f {stdout_path}; \
nohup {remote_exec_server_path} --listen ws://0.0.0.0:0 > {stdout_path} 2>&1 & \
echo $!"
);
let pid_output =
docker_command_capture_stdout(["exec", container_name, "sh", "-lc", &start_script])?;
let pid = pid_output
.trim()
.parse::<u32>()
.with_context(|| format!("parse remote exec-server PID from {pid_output:?}"))?;
let listen_url = wait_for_remote_listen_url(container_name, &stdout_path)?;
Ok(RemoteExecServerStart {
process: RemoteExecServerProcess {
container_name: container_name.to_string(),
pid,
remote_exec_server_path,
stdout_path,
cleanup_paths: Vec::new(),
},
listen_url,
})
}
fn remote_aware_cwd_path() -> PathBuf {
PathBuf::from(format!(
"/tmp/codex-core-test-cwd-{}",
remote_exec_server_instance_id()
))
}
fn wait_for_remote_listen_url(container_name: &str, stdout_path: &str) -> Result<String> {
let deadline = Instant::now() + REMOTE_EXEC_SERVER_START_TIMEOUT;
loop {
let line = docker_command_capture_stdout([
"exec",
container_name,
"sh",
"-lc",
&format!("head -n 1 {stdout_path} 2>/dev/null || true"),
])?;
let listen_url = line.trim();
if listen_url.starts_with("ws://") {
return Ok(listen_url.to_string());
}
if Instant::now() >= deadline {
return Err(anyhow!(
"timed out waiting for remote exec-server listen URL in container `{container_name}` after {REMOTE_EXEC_SERVER_START_TIMEOUT:?}"
));
}
std::thread::sleep(REMOTE_EXEC_SERVER_POLL_INTERVAL);
}
}
fn remote_exec_server_instance_id() -> String {
let instance = REMOTE_EXEC_SERVER_INSTANCE_COUNTER.fetch_add(1, Ordering::Relaxed);
format!("{}-{instance}", std::process::id())
}
fn remote_container_ip(container_name: &str) -> Result<String> {
let ip = docker_command_capture_stdout([
"inspect",
"-f",
"{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}",
container_name,
])?;
let ip = ip.trim();
if ip.is_empty() {
return Err(anyhow!(
"container `{container_name}` has no IP address; cannot connect to remote exec-server"
));
}
Ok(ip.to_string())
}
fn rewrite_websocket_host(listen_url: &str, host: &str) -> Result<String> {
let Some(address) = listen_url.strip_prefix("ws://") else {
return Err(anyhow!(
"unexpected websocket listen URL `{listen_url}`; expected ws://IP:PORT"
));
};
let Some((_, port)) = address.rsplit_once(':') else {
return Err(anyhow!(
"unexpected websocket listen URL `{listen_url}`; expected ws://IP:PORT"
));
};
Ok(format!("ws://{host}:{port}"))
}
fn docker_command_success<const N: usize>(args: [&str; N]) -> Result<()> {
let output = Command::new("docker")
.args(args)
.output()
.with_context(|| format!("run docker {:?}", args))?;
if !output.status.success() {
return Err(anyhow!(
"docker {:?} failed: stdout={} stderr={}",
args,
String::from_utf8_lossy(&output.stdout).trim(),
String::from_utf8_lossy(&output.stderr).trim()
));
}
Ok(())
}
fn docker_command_capture_stdout<const N: usize>(args: [&str; N]) -> Result<String> {
let output = Command::new("docker")
.args(args)
.output()
.with_context(|| format!("run docker {:?}", args))?;
if !output.status.success() {
return Err(anyhow!(
"docker {:?} failed: stdout={} stderr={}",
args,
String::from_utf8_lossy(&output.stdout).trim(),
String::from_utf8_lossy(&output.stderr).trim()
));
}
String::from_utf8(output.stdout).context("docker stdout must be utf-8")
}
fn absolute_path(path: &Path) -> Result<AbsolutePathBuf> {
AbsolutePathBuf::try_from(path.to_path_buf())
.map_err(|err| anyhow!("invalid absolute path {}: {err}", path.display()))
}
/// A collection of different ways the model can output an apply_patch call
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@@ -124,6 +383,24 @@ impl TestCodexBuilder {
Box::pin(self.build_with_home(server, home, /*resume_from*/ None)).await
}
pub async fn build_remote_aware(
&mut self,
server: &wiremock::MockServer,
) -> anyhow::Result<TestCodex> {
let test_env = test_env().await?;
let experimental_exec_server_url =
test_env.experimental_exec_server_url().map(str::to_owned);
let cwd = test_env.cwd.to_path_buf();
self.config_mutators.push(Box::new(move |config| {
config.experimental_exec_server_url = experimental_exec_server_url;
config.cwd = cwd;
}));
let mut test = self.build(server).await?;
test._test_env = test_env;
Ok(test)
}
pub async fn build_with_streaming_server(
&mut self,
server: &StreamingSseServer,
@@ -176,7 +453,8 @@ impl TestCodexBuilder {
) -> anyhow::Result<TestCodex> {
let base_url = format!("{}/v1", server.uri());
let (config, cwd) = self.prepare_config(base_url, &home).await?;
Box::pin(self.build_from_config(config, cwd, home, resume_from)).await
Box::pin(self.build_from_config(config, cwd, home, resume_from, TestEnv::local().await?))
.await
}
async fn build_with_home_and_base_url(
@@ -186,7 +464,8 @@ impl TestCodexBuilder {
resume_from: Option<PathBuf>,
) -> anyhow::Result<TestCodex> {
let (config, cwd) = self.prepare_config(base_url, &home).await?;
Box::pin(self.build_from_config(config, cwd, home, resume_from)).await
Box::pin(self.build_from_config(config, cwd, home, resume_from, TestEnv::local().await?))
.await
}
async fn build_from_config(
@@ -195,6 +474,7 @@ impl TestCodexBuilder {
cwd: Arc<TempDir>,
home: Arc<TempDir>,
resume_from: Option<PathBuf>,
test_env: TestEnv,
) -> anyhow::Result<TestCodex> {
let auth = self.auth.clone();
let thread_manager = if config.model_catalog.is_some() {
@@ -258,6 +538,7 @@ impl TestCodexBuilder {
codex: new_conversation.thread,
session_configured: new_conversation.session_configured,
thread_manager,
_test_env: test_env,
})
}
@@ -354,6 +635,7 @@ pub struct TestCodex {
pub session_configured: SessionConfiguredEvent,
pub config: Config,
pub thread_manager: Arc<ThreadManager>,
_test_env: TestEnv,
}
impl TestCodex {
@@ -369,6 +651,14 @@ impl TestCodex {
self.cwd_path().join(rel)
}
pub fn executor_environment(&self) -> &TestEnv {
&self._test_env
}
pub fn fs(&self) -> Arc<dyn ExecutorFileSystem> {
self._test_env.environment().get_filesystem()
}
pub async fn submit_turn(&self, prompt: &str) -> Result<()> {
self.submit_turn_with_policies(
prompt,
@@ -431,7 +721,7 @@ impl TestCodex {
text_elements: Vec::new(),
}],
final_output_json_schema: None,
cwd: self.cwd.path().to_path_buf(),
cwd: self.config.cwd.clone(),
approval_policy,
sandbox_policy,
model: session_model,

View File

@@ -2277,14 +2277,9 @@ async fn code_mode_can_call_hidden_dynamic_tools() -> Result<()> {
false,
)
.await?;
let test = TestCodex {
home: base_test.home,
cwd: base_test.cwd,
codex: new_thread.thread,
session_configured: new_thread.session_configured,
config: base_test.config,
thread_manager: base_test.thread_manager,
};
let mut test = base_test;
test.codex = new_thread.thread;
test.session_configured = new_thread.session_configured;
let code = r#"
import { ALL_TOOLS, hidden_dynamic_tool } from "tools.js";

View File

@@ -103,6 +103,7 @@ mod prompt_caching;
mod quota_exceeded;
mod read_file;
mod realtime_conversation;
mod remote_env;
mod remote_models;
mod request_compression;
#[cfg(not(target_os = "windows"))]

View File

@@ -0,0 +1,57 @@
use anyhow::Result;
use codex_exec_server::RemoveOptions;
use codex_utils_absolute_path::AbsolutePathBuf;
use core_test_support::get_remote_test_env;
use core_test_support::test_codex::test_env;
use pretty_assertions::assert_eq;
use std::path::PathBuf;
use std::time::SystemTime;
use std::time::UNIX_EPOCH;
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn remote_test_env_can_connect_and_use_filesystem() -> Result<()> {
let Some(_remote_env) = get_remote_test_env() else {
return Ok(());
};
let test_env = test_env().await?;
let file_system = test_env.environment().get_filesystem();
let file_path = remote_test_file_path();
let file_path_abs = absolute_path(file_path.clone())?;
let payload = b"remote-test-env-ok".to_vec();
file_system
.write_file(&file_path_abs, payload.clone())
.await?;
let actual = file_system.read_file(&file_path_abs).await?;
assert_eq!(actual, payload);
file_system
.remove(
&file_path_abs,
RemoveOptions {
recursive: false,
force: true,
},
)
.await?;
Ok(())
}
fn absolute_path(path: PathBuf) -> Result<AbsolutePathBuf> {
AbsolutePathBuf::try_from(path.clone())
.map_err(|err| anyhow::anyhow!("invalid absolute path {}: {err}", path.display()))
}
fn remote_test_file_path() -> PathBuf {
let nanos = match SystemTime::now().duration_since(UNIX_EPOCH) {
Ok(duration) => duration.as_nanos(),
Err(_) => 0,
};
PathBuf::from(format!(
"/tmp/codex-remote-test-env-{}-{nanos}.txt",
std::process::id()
))
}

View File

@@ -3,6 +3,7 @@
use base64::Engine;
use base64::engine::general_purpose::STANDARD as BASE64_STANDARD;
use codex_core::CodexAuth;
use codex_exec_server::CreateDirectoryOptions;
use codex_features::Feature;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::openai_models::ConfigShellToolType;
@@ -32,12 +33,16 @@ use core_test_support::test_codex::TestCodex;
use core_test_support::test_codex::test_codex;
use core_test_support::wait_for_event;
use core_test_support::wait_for_event_with_timeout;
use image::DynamicImage;
use image::GenericImageView;
use image::ImageBuffer;
use image::Rgba;
use image::load_from_memory;
use pretty_assertions::assert_eq;
use serde_json::Value;
use std::io::Cursor;
use std::path::Path;
use std::path::PathBuf;
use tokio::time::Duration;
use wiremock::BodyPrintLimit;
use wiremock::MockServer;
@@ -73,6 +78,11 @@ fn find_image_message(body: &Value) -> Option<&Value> {
image_messages(body).into_iter().next()
}
fn absolute_path(path: &Path) -> anyhow::Result<codex_utils_absolute_path::AbsolutePathBuf> {
codex_utils_absolute_path::AbsolutePathBuf::try_from(path.to_path_buf())
.map_err(|err| anyhow::anyhow!("invalid absolute path {}: {err}", path.display()))
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn user_turn_with_local_image_attaches_image() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
@@ -171,23 +181,37 @@ async fn view_image_tool_attaches_local_image() -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
let mut builder = test_codex();
let test = builder.build_remote_aware(&server).await?;
let TestCodex {
codex,
cwd,
session_configured,
config,
..
} = test_codex().build(&server).await?;
} = &test;
let cwd = config.cwd.clone();
let rel_path = PathBuf::from("assets/example.png");
let abs_path = cwd.join(&rel_path);
let abs_path_absolute = absolute_path(&abs_path)?;
let assets_dir = cwd.join("assets");
let file_system = test.fs();
let rel_path = "assets/example.png";
let abs_path = cwd.path().join(rel_path);
if let Some(parent) = abs_path.parent() {
std::fs::create_dir_all(parent)?;
}
let original_width = 2304;
let original_height = 864;
let image = ImageBuffer::from_pixel(original_width, original_height, Rgba([255u8, 0, 0, 255]));
image.save(&abs_path)?;
let mut cursor = Cursor::new(Vec::new());
DynamicImage::ImageRgba8(image).write_to(&mut cursor, image::ImageFormat::Png)?;
file_system
.create_directory(
&absolute_path(&assets_dir)?,
CreateDirectoryOptions { recursive: true },
)
.await?;
file_system
.write_file(&abs_path_absolute, cursor.into_inner())
.await?;
let call_id = "view-image-call";
let arguments = serde_json::json!({ "path": rel_path }).to_string();
@@ -214,7 +238,7 @@ async fn view_image_tool_attaches_local_image() -> anyhow::Result<()> {
text_elements: Vec::new(),
}],
final_output_json_schema: None,
cwd: cwd.path().to_path_buf(),
cwd: cwd.clone(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::DangerFullAccess,
model: session_model,
@@ -228,7 +252,7 @@ async fn view_image_tool_attaches_local_image() -> anyhow::Result<()> {
let mut tool_event = None;
wait_for_event_with_timeout(
&codex,
codex,
|event| match event {
EventMsg::ViewImageToolCall(_) => {
tool_event = Some(event.clone());

View File

@@ -287,11 +287,37 @@ fn profile_read_roots(user_profile: &Path) -> Vec<PathBuf> {
.collect()
}
fn is_protected_windowsapps_package_path(path: &Path) -> bool {
for ancestor in path.ancestors() {
let Some(name) = ancestor.file_name() else {
continue;
};
if !name.to_string_lossy().eq_ignore_ascii_case("WindowsApps") {
continue;
}
let Some(parent_name) = ancestor
.parent()
.and_then(Path::file_name)
.map(|value| value.to_string_lossy())
else {
continue;
};
if parent_name.eq_ignore_ascii_case("Program Files")
|| parent_name.eq_ignore_ascii_case("Program Files (x86)")
{
return true;
}
}
false
}
fn gather_helper_read_roots(codex_home: &Path) -> Vec<PathBuf> {
let mut roots = Vec::new();
if let Ok(exe) = std::env::current_exe() {
if let Some(dir) = exe.parent() {
roots.push(dir.to_path_buf());
if !is_protected_windowsapps_package_path(dir) {
roots.push(dir.to_path_buf());
}
}
}
let helper_dir = helper_bin_dir(codex_home);
@@ -723,6 +749,30 @@ mod tests {
assert_eq!(vec![missing_profile], roots);
}
#[test]
fn protected_windowsapps_package_paths_are_excluded_from_helper_roots() {
let tmp = TempDir::new().expect("tempdir");
let store_path = tmp
.path()
.join("Program Files")
.join("WindowsApps")
.join("OpenAI.Codex_26.313.5234.0_x64__2p2nqsd0c76g0")
.join("app")
.join("resources");
fs::create_dir_all(&store_path).expect("create fake store path");
assert!(is_protected_windowsapps_package_path(&store_path));
}
#[test]
fn non_store_paths_are_not_treated_as_windowsapps_packages() {
let tmp = TempDir::new().expect("tempdir");
let regular_path = tmp.path().join("app").join("resources");
fs::create_dir_all(&regular_path).expect("create regular path");
assert!(!is_protected_windowsapps_package_path(&regular_path));
}
#[test]
fn gather_read_roots_includes_helper_bin_dir() {
let tmp = TempDir::new().expect("tempdir");

78
scripts/test-remote-env.sh Executable file
View File

@@ -0,0 +1,78 @@
#!/usr/bin/env bash
# Remote-env setup script for codex-rs integration tests.
#
# Usage (source-only):
# source scripts/test-remote-env.sh
# cd codex-rs
# cargo test -p codex-core --test all remote_env_connects_creates_temp_dir_and_runs_sample_script
# codex_remote_env_cleanup
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
is_sourced() {
[[ "${BASH_SOURCE[0]}" != "$0" ]]
}
setup_remote_env() {
local container_name
local codex_exec_server_binary_path
container_name="${CODEX_TEST_REMOTE_ENV_CONTAINER_NAME:-codex-remote-test-env-local-$(date +%s)-${RANDOM}}"
codex_exec_server_binary_path="${REPO_ROOT}/codex-rs/target/debug/codex-exec-server"
if ! command -v docker >/dev/null 2>&1; then
echo "docker is required (Colima or Docker Desktop)" >&2
return 1
fi
if ! docker info >/dev/null 2>&1; then
echo "docker daemon is not reachable; for Colima run: colima start" >&2
return 1
fi
if ! command -v cargo >/dev/null 2>&1; then
echo "cargo is required to build codex-exec-server" >&2
return 1
fi
(
cd "${REPO_ROOT}/codex-rs"
cargo build -p codex-exec-server --bin codex-exec-server
)
if [[ ! -f "${codex_exec_server_binary_path}" ]]; then
echo "codex-exec-server binary not found at ${codex_exec_server_binary_path}" >&2
return 1
fi
docker rm -f "${container_name}" >/dev/null 2>&1 || true
docker run -d --name "${container_name}" ubuntu:24.04 sleep infinity >/dev/null
export CODEX_TEST_REMOTE_ENV="${container_name}"
}
codex_remote_env_cleanup() {
if [[ -n "${CODEX_TEST_REMOTE_ENV:-}" ]]; then
docker rm -f "${CODEX_TEST_REMOTE_ENV}" >/dev/null 2>&1 || true
unset CODEX_TEST_REMOTE_ENV
fi
}
if ! is_sourced; then
echo "source this script instead of executing it: source scripts/test-remote-env.sh" >&2
exit 1
fi
old_shell_options="$(set +o)"
set -euo pipefail
if setup_remote_env; then
status=0
echo "CODEX_TEST_REMOTE_ENV=${CODEX_TEST_REMOTE_ENV}"
echo "Remote env ready. Run your command, then call: codex_remote_env_cleanup"
else
status=$?
fi
eval "${old_shell_options}"
return "${status}"