mirror of
https://github.com/openai/codex.git
synced 2026-02-03 23:43:39 +00:00
Compare commits
5 Commits
codex-work
...
fix-bazel-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7e194a169f | ||
|
|
6c1b2c448a | ||
|
|
0a04f4dd69 | ||
|
|
06e61a6e59 | ||
|
|
b582daf2d9 |
@@ -1,3 +1,4 @@
|
||||
# Without this, Bazel will consider BUILD.bazel files in
|
||||
# .git/sl/origbackups (which can be populated by Sapling SCM).
|
||||
.git
|
||||
codex-rs/target
|
||||
|
||||
9
.bazelrc
9
.bazelrc
@@ -23,7 +23,14 @@ common:windows --@rules_rust//rust/settings:experimental_use_sh_toolchain_for_bo
|
||||
|
||||
common --incompatible_strict_action_env
|
||||
# Not ideal, but We need to allow dotslash to be found
|
||||
common --test_env=PATH=/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin
|
||||
common:linux --test_env=PATH=/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin
|
||||
common:macos --test_env=PATH=/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin
|
||||
|
||||
# Pass through some env vars Windows needs to use powershell?
|
||||
common:windows --test_env=PATH
|
||||
common:windows --test_env=SYSTEMROOT
|
||||
common:windows --test_env=COMSPEC
|
||||
common:windows --test_env=WINDIR
|
||||
|
||||
common --test_output=errors
|
||||
common --bes_results_url=https://app.buildbuddy.io/invocation/
|
||||
|
||||
14
.github/workflows/bazel.yml
vendored
14
.github/workflows/bazel.yml
vendored
@@ -10,11 +10,6 @@ on:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# Cancel previous actions from the same PR or branch except 'main' branch.
|
||||
# See https://docs.github.com/en/actions/using-jobs/using-concurrency and https://docs.github.com/en/actions/learn-github-actions/contexts for more info.
|
||||
group: concurrency-group::${{ github.workflow }}::${{ github.event.pull_request.number > 0 && format('pr-{0}', github.event.pull_request.number) || github.ref_name }}${{ github.ref_name == 'main' && format('::{0}', github.run_id) || ''}}
|
||||
cancel-in-progress: ${{ github.ref_name != 'main' }}
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
@@ -36,9 +31,10 @@ jobs:
|
||||
target: aarch64-unknown-linux-musl
|
||||
- os: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
# TODO: Enable Windows once we fix the toolchain issues there.
|
||||
#- os: windows-latest
|
||||
# target: x86_64-pc-windows-gnullvm
|
||||
|
||||
# Windows
|
||||
- os: windows-latest
|
||||
target: x86_64-pc-windows-gnullvm
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
# Configure a human readable name for each job
|
||||
@@ -95,7 +91,7 @@ jobs:
|
||||
shell: pwsh
|
||||
run: |
|
||||
# Use a very short path to reduce argv/path length issues.
|
||||
"BAZEL_STARTUP_ARGS=--output_user_root=C:\" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
|
||||
"BAZEL_STARTUP_ARGS=--output_user_root=D:\" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
|
||||
|
||||
- name: bazel test //...
|
||||
env:
|
||||
|
||||
13
.github/workflows/ci.bazelrc
vendored
13
.github/workflows/ci.bazelrc
vendored
@@ -1,6 +1,13 @@
|
||||
common --remote_download_minimal
|
||||
common --nobuild_runfile_links
|
||||
common --keep_going
|
||||
common --verbose_failures
|
||||
|
||||
# Disable disk cache since we have remote one and aren't using persistent workers.
|
||||
common --disk_cache=
|
||||
|
||||
# Rearrange caches on Windows so they're on the same volume as the checkout.
|
||||
common:windows --repo_contents_cache=D:/a/.cache/bazel-repo-contents-cache
|
||||
common:windows --repository_cache=D:/a/.cache/bazel-repo-cache
|
||||
|
||||
# We prefer to run the build actions entirely remotely so we can dial up the concurrency.
|
||||
# We have platform-specific tests, so we want to execute the tests on all platforms using the strongest sandboxing available on each platform.
|
||||
@@ -16,5 +23,5 @@ common:macos --config=remote
|
||||
common:macos --strategy=remote
|
||||
common:macos --strategy=TestRunner=darwin-sandbox,local
|
||||
|
||||
common:windows --strategy=TestRunner=local
|
||||
|
||||
# On windows we can not cross-build the tests but run them locally due to what appears to be a Bazel bug
|
||||
# (windows vs unix path confusion)
|
||||
|
||||
@@ -37,6 +37,7 @@ single_version_override(
|
||||
"//patches:rules_rust.patch",
|
||||
"//patches:rules_rust_windows_gnu.patch",
|
||||
"//patches:rules_rust_musl.patch",
|
||||
"//patches:rules_rust_direct_deps.patch",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -53,7 +54,8 @@ rust = use_extension("@rules_rust//rust:extensions.bzl", "rust")
|
||||
rust.toolchain(
|
||||
edition = "2024",
|
||||
extra_target_triples = RUST_TRIPLES,
|
||||
versions = ["1.93.0"],
|
||||
# TODO(zbarsky): bump to 1.93 after fixing mingw
|
||||
versions = ["1.92.0"],
|
||||
)
|
||||
use_repo(rust, "rust_toolchains")
|
||||
|
||||
|
||||
4
codex-rs/Cargo.lock
generated
4
codex-rs/Cargo.lock
generated
@@ -339,7 +339,6 @@ dependencies = [
|
||||
"core_test_support",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shlex",
|
||||
"tokio",
|
||||
"uuid",
|
||||
"wiremock",
|
||||
@@ -1105,7 +1104,6 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serial_test",
|
||||
"shlex",
|
||||
"tempfile",
|
||||
"time",
|
||||
"tokio",
|
||||
@@ -1900,6 +1898,7 @@ dependencies = [
|
||||
"codex-windows-sandbox",
|
||||
"color-eyre",
|
||||
"crossterm",
|
||||
"ctor 0.6.3",
|
||||
"derive_more 2.1.1",
|
||||
"diffy",
|
||||
"dirs",
|
||||
@@ -4644,7 +4643,6 @@ dependencies = [
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shlex",
|
||||
"tokio",
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
@@ -67,4 +67,3 @@ rmcp = { workspace = true, default-features = false, features = [
|
||||
] }
|
||||
serial_test = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
shlex = { workspace = true }
|
||||
|
||||
@@ -26,4 +26,3 @@ tokio = { workspace = true, features = [
|
||||
uuid = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
core_test_support = { path = "../../../core/tests/common" }
|
||||
shlex = { workspace = true }
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use core_test_support::join_shell_command_args;
|
||||
use core_test_support::responses;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
@@ -9,7 +10,7 @@ pub fn create_shell_command_sse_response(
|
||||
call_id: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
// The `arguments` for the `shell_command` tool is a serialized JSON object.
|
||||
let command_str = shlex::try_join(command.iter().map(String::as_str))?;
|
||||
let command_str = join_shell_command_args(&command);
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"command": command_str,
|
||||
"workdir": workdir.map(|w| w.to_string_lossy()),
|
||||
|
||||
@@ -21,13 +21,13 @@ use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_app_server_protocol::SendUserTurnParams;
|
||||
use codex_app_server_protocol::SendUserTurnResponse;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_core::parse_command::parse_command;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_core::protocol_config_types::ReasoningSummary;
|
||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -202,25 +202,32 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
let working_directory = tmp.path().join("workdir");
|
||||
std::fs::create_dir(&working_directory)?;
|
||||
|
||||
// Mock server will request a python shell call for the first and second turn, then finish.
|
||||
let responses = vec![
|
||||
create_shell_command_sse_response(
|
||||
let (shell_command, expected_shell_command) = if cfg!(windows) {
|
||||
let command = "Get-Process".to_string();
|
||||
(vec![command.clone()], format_with_current_shell(&command))
|
||||
} else {
|
||||
let command = "python3 -c 'print(42)'";
|
||||
(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
format_with_current_shell(command),
|
||||
)
|
||||
};
|
||||
|
||||
// Mock server will request an untrusted shell call for the first and second turn, then finish.
|
||||
let responses = vec![
|
||||
create_shell_command_sse_response(
|
||||
shell_command.clone(),
|
||||
Some(&working_directory),
|
||||
Some(5000),
|
||||
"call1",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 1")?,
|
||||
create_shell_command_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
shell_command.clone(),
|
||||
Some(&working_directory),
|
||||
Some(5000),
|
||||
"call2",
|
||||
@@ -293,16 +300,16 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
panic!("expected ExecCommandApproval request, got: {request:?}");
|
||||
};
|
||||
|
||||
let parsed_cmd = parse_command(&expected_shell_command);
|
||||
|
||||
assert_eq!(
|
||||
ExecCommandApprovalParams {
|
||||
conversation_id,
|
||||
call_id: "call1".to_string(),
|
||||
command: format_with_current_shell("python3 -c 'print(42)'"),
|
||||
command: expected_shell_command,
|
||||
cwd: working_directory.clone(),
|
||||
reason: None,
|
||||
parsed_cmd: vec![ParsedCommand::Unknown {
|
||||
cmd: "python3 -c 'print(42)'".to_string()
|
||||
}],
|
||||
parsed_cmd,
|
||||
},
|
||||
params
|
||||
);
|
||||
|
||||
@@ -53,6 +53,18 @@ use tokio::time::timeout;
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const TEST_ORIGINATOR: &str = "codex_vscode";
|
||||
|
||||
fn untrusted_shell_command() -> Vec<String> {
|
||||
if cfg!(windows) {
|
||||
vec!["Get-Process".to_string()]
|
||||
} else {
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_sends_originator_header() -> Result<()> {
|
||||
let responses = vec![create_final_assistant_message_sse_response("Done")?];
|
||||
@@ -547,30 +559,12 @@ async fn turn_start_exec_approval_toggle_v2() -> Result<()> {
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().to_path_buf();
|
||||
|
||||
// Mock server: first turn requests a shell call (elicitation), then completes.
|
||||
// Mock server: first turn requests an untrusted shell call (elicitation), then completes.
|
||||
// Second turn same, but we'll set approval_policy=never to avoid elicitation.
|
||||
let responses = vec![
|
||||
create_shell_command_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call1",
|
||||
)?,
|
||||
create_shell_command_sse_response(untrusted_shell_command(), None, Some(5000), "call1")?,
|
||||
create_final_assistant_message_sse_response("done 1")?,
|
||||
create_shell_command_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call2",
|
||||
)?,
|
||||
create_shell_command_sse_response(untrusted_shell_command(), None, Some(5000), "call2")?,
|
||||
create_final_assistant_message_sse_response("done 2")?,
|
||||
];
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
@@ -693,11 +687,7 @@ async fn turn_start_exec_approval_decline_v2() -> Result<()> {
|
||||
|
||||
let responses = vec![
|
||||
create_shell_command_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
untrusted_shell_command(),
|
||||
None,
|
||||
Some(5000),
|
||||
"call-decline",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use codex_utils_cargo_bin::find_resource;
|
||||
use codex_utils_cargo_bin::repo_root;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fs;
|
||||
@@ -9,7 +9,12 @@ use tempfile::tempdir;
|
||||
|
||||
#[test]
|
||||
fn test_apply_patch_scenarios() -> anyhow::Result<()> {
|
||||
let scenarios_dir = find_resource!("tests/fixtures/scenarios")?;
|
||||
let scenarios_dir = repo_root()?
|
||||
.join("codex-rs")
|
||||
.join("apply-patch")
|
||||
.join("tests")
|
||||
.join("fixtures")
|
||||
.join("scenarios");
|
||||
for scenario in fs::read_dir(scenarios_dir)? {
|
||||
let scenario = scenario?;
|
||||
let path = scenario.path();
|
||||
|
||||
@@ -462,10 +462,33 @@ fn stage_str(stage: codex_core::features::Stage) -> &'static str {
|
||||
}
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
|
||||
cli_main(codex_linux_sandbox_exe).await?;
|
||||
Ok(())
|
||||
})
|
||||
#[cfg(windows)]
|
||||
{
|
||||
run_cli_with_large_stack()
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
|
||||
cli_main(codex_linux_sandbox_exe).await?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn run_cli_with_large_stack() -> anyhow::Result<()> {
|
||||
let handle = std::thread::Builder::new()
|
||||
.stack_size(8 * 1024 * 1024)
|
||||
.spawn(|| {
|
||||
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
|
||||
cli_main(codex_linux_sandbox_exe).await?;
|
||||
Ok(())
|
||||
})
|
||||
})?;
|
||||
match handle.join() {
|
||||
Ok(result) => result,
|
||||
Err(err) => std::panic::resume_unwind(err),
|
||||
}
|
||||
}
|
||||
|
||||
async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
|
||||
@@ -40,5 +40,6 @@ codex_rust_crate(
|
||||
"//codex-rs/rmcp-client:test_stdio_server",
|
||||
"//codex-rs/rmcp-client:test_streamable_http_server",
|
||||
"//codex-rs/cli:codex",
|
||||
"//codex-rs/exec:codex-exec",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -192,7 +192,21 @@ fn get_powershell_shell(path: Option<&PathBuf>) -> Option<Shell> {
|
||||
"pwsh",
|
||||
vec!["/usr/local/bin/pwsh"],
|
||||
)
|
||||
.or_else(|| get_shell_path(ShellType::PowerShell, path, "powershell", vec![]));
|
||||
.or_else(|| get_shell_path(ShellType::PowerShell, path, "powershell", vec![]))
|
||||
.or_else(|| {
|
||||
if cfg!(windows) {
|
||||
std::env::var_os("SystemRoot").and_then(|root| {
|
||||
let candidate = PathBuf::from(root)
|
||||
.join("System32")
|
||||
.join("WindowsPowerShell")
|
||||
.join("v1.0")
|
||||
.join("powershell.exe");
|
||||
file_exists(&candidate)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
shell_path.map(|shell_path| Shell {
|
||||
shell_type: ShellType::PowerShell,
|
||||
|
||||
@@ -246,6 +246,40 @@ pub fn format_with_current_shell_display_non_login(command: &str) -> String {
|
||||
.expect("serialize current shell command without login")
|
||||
}
|
||||
|
||||
pub fn join_shell_command_args(command: &[String]) -> String {
|
||||
if cfg!(windows) {
|
||||
if command.len() == 1 {
|
||||
return command[0].clone();
|
||||
}
|
||||
return join_cmd_args(command);
|
||||
}
|
||||
shlex::try_join(command.iter().map(String::as_str)).expect("serialize shell_command arguments")
|
||||
}
|
||||
|
||||
fn join_cmd_args(command: &[String]) -> String {
|
||||
command
|
||||
.iter()
|
||||
.map(|arg| quote_cmd_arg(arg))
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ")
|
||||
}
|
||||
|
||||
fn quote_cmd_arg(arg: &str) -> String {
|
||||
if arg.is_empty() {
|
||||
return "\"\"".to_string();
|
||||
}
|
||||
|
||||
let needs_quotes = arg
|
||||
.chars()
|
||||
.any(|c| c.is_whitespace() || matches!(c, '^' | '&' | '|' | '<' | '>' | '(' | ')'));
|
||||
if !needs_quotes {
|
||||
return arg.to_string();
|
||||
}
|
||||
|
||||
let escaped = arg.replace('"', "\"\"");
|
||||
format!("\"{escaped}\"")
|
||||
}
|
||||
|
||||
pub fn stdio_server_bin() -> Result<String, CargoBinError> {
|
||||
codex_utils_cargo_bin::cargo_bin("test_stdio_server").map(|p| p.to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
@@ -47,6 +47,9 @@ async fn apply_patch_harness_with(
|
||||
) -> Result<TestCodexHarness> {
|
||||
let builder = configure(test_codex()).with_config(|config| {
|
||||
config.include_apply_patch_tool = true;
|
||||
if cfg!(windows) {
|
||||
config.features.enable(Feature::PowershellUtf8);
|
||||
}
|
||||
});
|
||||
TestCodexHarness::with_builder(builder).await
|
||||
}
|
||||
|
||||
@@ -57,6 +57,7 @@ use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::body_string_contains;
|
||||
use wiremock::matchers::header;
|
||||
use wiremock::matchers::header_regex;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
@@ -67,6 +68,33 @@ fn sse_completed(id: &str) -> String {
|
||||
load_sse_fixture_with_id("../fixtures/completed_template.json", id)
|
||||
}
|
||||
|
||||
fn existing_env_var_with_value() -> (&'static str, String) {
|
||||
let candidates: &[&str] = if cfg!(windows) {
|
||||
&[
|
||||
"SystemRoot",
|
||||
"SYSTEMROOT",
|
||||
"WINDIR",
|
||||
"ComSpec",
|
||||
"TEMP",
|
||||
"TMP",
|
||||
"OS",
|
||||
"USERNAME",
|
||||
"COMPUTERNAME",
|
||||
"PATH",
|
||||
]
|
||||
} else {
|
||||
&["USER", "LOGNAME", "HOME", "SHELL", "PATH"]
|
||||
};
|
||||
for &key in candidates {
|
||||
if let Ok(val) = std::env::var(key)
|
||||
&& !val.is_empty()
|
||||
{
|
||||
return (key, val);
|
||||
}
|
||||
}
|
||||
panic!("expected one of {candidates:?} to be set");
|
||||
}
|
||||
|
||||
#[expect(clippy::unwrap_used)]
|
||||
fn assert_message_role(request_body: &serde_json::Value, role: &str) {
|
||||
assert_eq!(request_body["role"].as_str().unwrap(), role);
|
||||
@@ -1620,7 +1648,7 @@ async fn context_window_error_sets_total_tokens_to_model_window() -> anyhow::Res
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn azure_overrides_assign_properties_used_for_responses_url() {
|
||||
skip_if_no_network!();
|
||||
let existing_env_var_with_random_value = if cfg!(windows) { "USERNAME" } else { "USER" };
|
||||
let (env_key, env_value) = existing_env_var_with_value();
|
||||
|
||||
// Mock server
|
||||
let server = MockServer::start().await;
|
||||
@@ -1635,14 +1663,7 @@ async fn azure_overrides_assign_properties_used_for_responses_url() {
|
||||
.and(path("/openai/responses"))
|
||||
.and(query_param("api-version", "2025-04-01-preview"))
|
||||
.and(header_regex("Custom-Header", "Value"))
|
||||
.and(header_regex(
|
||||
"Authorization",
|
||||
format!(
|
||||
"Bearer {}",
|
||||
std::env::var(existing_env_var_with_random_value).unwrap()
|
||||
)
|
||||
.as_str(),
|
||||
))
|
||||
.and(header("Authorization", format!("Bearer {env_value}")))
|
||||
.respond_with(first)
|
||||
.expect(1)
|
||||
.mount(&server)
|
||||
@@ -1652,7 +1673,7 @@ async fn azure_overrides_assign_properties_used_for_responses_url() {
|
||||
name: "custom".to_string(),
|
||||
base_url: Some(format!("{}/openai", server.uri())),
|
||||
// Reuse the existing environment variable to avoid using unsafe code
|
||||
env_key: Some(existing_env_var_with_random_value.to_string()),
|
||||
env_key: Some(env_key.to_string()),
|
||||
experimental_bearer_token: None,
|
||||
query_params: Some(std::collections::HashMap::from([(
|
||||
"api-version".to_string(),
|
||||
@@ -1700,7 +1721,7 @@ async fn azure_overrides_assign_properties_used_for_responses_url() {
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn env_var_overrides_loaded_auth() {
|
||||
skip_if_no_network!();
|
||||
let existing_env_var_with_random_value = if cfg!(windows) { "USERNAME" } else { "USER" };
|
||||
let (env_key, env_value) = existing_env_var_with_value();
|
||||
|
||||
// Mock server
|
||||
let server = MockServer::start().await;
|
||||
@@ -1715,14 +1736,7 @@ async fn env_var_overrides_loaded_auth() {
|
||||
.and(path("/openai/responses"))
|
||||
.and(query_param("api-version", "2025-04-01-preview"))
|
||||
.and(header_regex("Custom-Header", "Value"))
|
||||
.and(header_regex(
|
||||
"Authorization",
|
||||
format!(
|
||||
"Bearer {}",
|
||||
std::env::var(existing_env_var_with_random_value).unwrap()
|
||||
)
|
||||
.as_str(),
|
||||
))
|
||||
.and(header("Authorization", format!("Bearer {env_value}")))
|
||||
.respond_with(first)
|
||||
.expect(1)
|
||||
.mount(&server)
|
||||
@@ -1732,7 +1746,7 @@ async fn env_var_overrides_loaded_auth() {
|
||||
name: "custom".to_string(),
|
||||
base_url: Some(format!("{}/openai", server.uri())),
|
||||
// Reuse the existing environment variable to avoid using unsafe code
|
||||
env_key: Some(existing_env_var_with_random_value.to_string()),
|
||||
env_key: Some(env_key.to_string()),
|
||||
query_params: Some(std::collections::HashMap::from([(
|
||||
"api-version".to_string(),
|
||||
"2025-04-01-preview".to_string(),
|
||||
|
||||
@@ -211,7 +211,7 @@ async fn shell_command_times_out_with_timeout_ms() -> anyhow::Result<()> {
|
||||
let harness = shell_command_harness_with(|builder| builder.with_model("gpt-5.1")).await?;
|
||||
let call_id = "shell-command-timeout";
|
||||
let command = if cfg!(windows) {
|
||||
"timeout /t 5"
|
||||
"Start-Sleep -Seconds 5"
|
||||
} else {
|
||||
"sleep 5"
|
||||
};
|
||||
@@ -253,14 +253,13 @@ async fn unicode_output(login: bool) -> anyhow::Result<()> {
|
||||
.await?;
|
||||
|
||||
let call_id = "unicode_output";
|
||||
mount_shell_responses_with_timeout(
|
||||
&harness,
|
||||
call_id,
|
||||
"git -c alias.say='!printf \"%s\" \"naïve_café\"' say",
|
||||
Some(login),
|
||||
MEDIUM_TIMEOUT,
|
||||
)
|
||||
.await;
|
||||
let command = if cfg!(windows) {
|
||||
"Write-Output 'naïve_café'"
|
||||
} else {
|
||||
"git -c alias.say='!printf \"%s\" \"naïve_café\"' say"
|
||||
};
|
||||
mount_shell_responses_with_timeout(&harness, call_id, command, Some(login), MEDIUM_TIMEOUT)
|
||||
.await;
|
||||
harness.submit("run the command without login").await?;
|
||||
|
||||
let output = harness.function_call_stdout(call_id).await;
|
||||
|
||||
@@ -740,7 +740,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn session_streams_updates_before_walk_complete() {
|
||||
fn session_emits_updates_and_complete() {
|
||||
let dir = create_temp_tree(600);
|
||||
let reporter = Arc::new(RecordingReporter::default());
|
||||
let session = create_session(dir.path(), SessionOptions::default(), reporter.clone())
|
||||
@@ -751,7 +751,12 @@ mod tests {
|
||||
|
||||
assert!(completed);
|
||||
let updates = reporter.updates();
|
||||
assert!(updates.iter().any(|snapshot| !snapshot.walk_complete));
|
||||
assert!(!updates.is_empty());
|
||||
assert!(
|
||||
updates
|
||||
.last()
|
||||
.is_some_and(|snapshot| snapshot.walk_complete)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -25,4 +25,3 @@ tokio = { workspace = true, features = [
|
||||
] }
|
||||
wiremock = { workspace = true }
|
||||
core_test_support = { path = "../../../core/tests/common" }
|
||||
shlex = { workspace = true }
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use core_test_support::join_shell_command_args;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
|
||||
@@ -8,7 +9,7 @@ pub fn create_shell_command_sse_response(
|
||||
call_id: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
// The `arguments` for the `shell_command` tool is a serialized JSON object.
|
||||
let command_str = shlex::try_join(command.iter().map(String::as_str))?;
|
||||
let command_str = join_shell_command_args(&command);
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"command": command_str,
|
||||
"workdir": workdir.map(|w| w.to_string_lossy()),
|
||||
|
||||
@@ -58,23 +58,21 @@ async fn test_shell_command_approval_triggers_elicitation() {
|
||||
async fn shell_command_approval_triggers_elicitation() -> anyhow::Result<()> {
|
||||
// Use a simple, untrusted command that creates a file so we can
|
||||
// observe a side-effect.
|
||||
//
|
||||
// Cross‑platform approach: run a tiny Python snippet to touch the file
|
||||
// using `python3 -c ...` on all platforms.
|
||||
let workdir_for_shell_function_call = TempDir::new()?;
|
||||
let created_filename = "created_by_shell_tool.txt";
|
||||
let created_file = workdir_for_shell_function_call
|
||||
.path()
|
||||
.join(created_filename);
|
||||
|
||||
let shell_command = vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
format!("import pathlib; pathlib.Path('{created_filename}').touch()"),
|
||||
];
|
||||
let expected_shell_command = format_with_current_shell(&format!(
|
||||
"python3 -c \"import pathlib; pathlib.Path('{created_filename}').touch()\""
|
||||
));
|
||||
let (shell_command, expected_shell_command) = if cfg!(windows) {
|
||||
let command = format!("New-Item -ItemType File -Path {created_filename} -Force");
|
||||
(vec![command.clone()], format_with_current_shell(&command))
|
||||
} else {
|
||||
let command = format!("import pathlib; pathlib.Path('{created_filename}').touch()");
|
||||
(
|
||||
vec!["python3".to_string(), "-c".to_string(), command.clone()],
|
||||
format_with_current_shell(&format!("python3 -c \"{command}\"")),
|
||||
)
|
||||
};
|
||||
|
||||
let McpHandle {
|
||||
process: mut mcp_process,
|
||||
|
||||
@@ -16,6 +16,12 @@ use std::ffi::OsString;
|
||||
#[cfg(windows)]
|
||||
use std::env;
|
||||
#[cfg(windows)]
|
||||
use std::ffi::OsStr;
|
||||
#[cfg(windows)]
|
||||
use std::path::Path;
|
||||
#[cfg(windows)]
|
||||
use std::path::PathBuf;
|
||||
#[cfg(windows)]
|
||||
use tracing::debug;
|
||||
|
||||
/// Resolves a program to its executable path on Unix systems.
|
||||
@@ -45,6 +51,15 @@ pub fn resolve(program: OsString, env: &HashMap<String, String>) -> std::io::Res
|
||||
|
||||
// Extract PATH from environment for search locations
|
||||
let search_path = env.get("PATH");
|
||||
let pathext = env.get("PATHEXT");
|
||||
|
||||
if let Some(resolved) = resolve_with_pathext(&program, search_path, pathext, &cwd) {
|
||||
debug!(
|
||||
"Resolved {:?} to {:?} using PATHEXT-aware search",
|
||||
program, resolved
|
||||
);
|
||||
return Ok(resolved.into_os_string());
|
||||
}
|
||||
|
||||
// Attempt resolution via which crate
|
||||
match which::which_in(&program, search_path, &cwd) {
|
||||
@@ -63,6 +78,85 @@ pub fn resolve(program: OsString, env: &HashMap<String, String>) -> std::io::Res
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn resolve_with_pathext(
|
||||
program: &OsStr,
|
||||
search_path: Option<&String>,
|
||||
pathext: Option<&String>,
|
||||
cwd: &Path,
|
||||
) -> Option<PathBuf> {
|
||||
let pathexts = parse_pathext(pathext);
|
||||
let program_path = Path::new(program);
|
||||
let has_extension = program_path.extension().is_some();
|
||||
let has_path_separator = program_path.components().nth(1).is_some();
|
||||
|
||||
if has_path_separator {
|
||||
let base = if program_path.is_absolute() {
|
||||
program_path.to_path_buf()
|
||||
} else {
|
||||
cwd.join(program_path)
|
||||
};
|
||||
return resolve_with_extensions(&base, has_extension, &pathexts);
|
||||
}
|
||||
|
||||
let search_dirs = search_path
|
||||
.map(String::as_str)
|
||||
.map(std::env::split_paths)
|
||||
.map(Iterator::collect)
|
||||
.or_else(|| std::env::var_os("PATH").map(|paths| std::env::split_paths(&paths).collect()))
|
||||
.unwrap_or_else(|| vec![cwd.to_path_buf()]);
|
||||
|
||||
for dir in search_dirs {
|
||||
let base = dir.join(program_path);
|
||||
if let Some(resolved) = resolve_with_extensions(&base, has_extension, &pathexts) {
|
||||
return Some(resolved);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn resolve_with_extensions(
|
||||
base: &Path,
|
||||
has_extension: bool,
|
||||
pathexts: &[OsString],
|
||||
) -> Option<PathBuf> {
|
||||
if base.exists() {
|
||||
return Some(base.to_path_buf());
|
||||
}
|
||||
|
||||
if has_extension {
|
||||
return None;
|
||||
}
|
||||
|
||||
for ext in pathexts {
|
||||
let candidate = base.with_extension(ext);
|
||||
if candidate.exists() {
|
||||
return Some(candidate);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn parse_pathext(pathext: Option<&String>) -> Vec<OsString> {
|
||||
let raw = pathext
|
||||
.map(String::as_str)
|
||||
.map(str::to_owned)
|
||||
.or_else(|| std::env::var("PATHEXT").ok())
|
||||
.unwrap_or_else(|| ".COM;.EXE;.BAT;.CMD".to_string());
|
||||
|
||||
raw.split(';')
|
||||
.map(str::trim)
|
||||
.filter(|ext| !ext.is_empty())
|
||||
.map(|ext| ext.trim_start_matches('.'))
|
||||
.filter(|ext| !ext.is_empty())
|
||||
.map(OsString::from)
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -128,15 +222,24 @@ mod tests {
|
||||
|
||||
// Apply platform-specific resolution
|
||||
let resolved = resolve(program, &env.mcp_env)?;
|
||||
let resolved_display = resolved.clone();
|
||||
|
||||
// Verify resolved path executes successfully
|
||||
let mut cmd = Command::new(resolved);
|
||||
cmd.envs(&env.mcp_env);
|
||||
let output = cmd.output().await;
|
||||
|
||||
if let Err(err) = &output {
|
||||
eprintln!(
|
||||
"resolved execution failed: {err:?}; resolved={resolved_display:?}; PATH={:?}; PATHEXT={:?}",
|
||||
env.mcp_env.get("PATH"),
|
||||
env.mcp_env.get("PATHEXT"),
|
||||
);
|
||||
}
|
||||
|
||||
assert!(
|
||||
output.is_ok(),
|
||||
"Resolved program should execute successfully"
|
||||
"Resolved program should execute successfully: {output:?}"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -120,6 +120,7 @@ codex-utils-cargo-bin = { workspace = true }
|
||||
codex-utils-pty = { workspace = true }
|
||||
assert_matches = { workspace = true }
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
ctor = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
|
||||
46
codex-rs/tui/src/insta_runfiles.rs
Normal file
46
codex-rs/tui/src/insta_runfiles.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
use std::fs;
|
||||
use std::io::BufRead;
|
||||
use std::io::BufReader;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[ctor::ctor]
|
||||
fn init_insta_runfiles() {
|
||||
sync_snapshots("_main/codex-rs/tui/");
|
||||
}
|
||||
|
||||
fn sync_snapshots(prefix: &str) {
|
||||
let Some(manifest) = std::env::var_os("RUNFILES_MANIFEST_FILE") else {
|
||||
return;
|
||||
};
|
||||
let manifest = PathBuf::from(manifest);
|
||||
let Some(runfiles_root) = manifest.parent() else {
|
||||
return;
|
||||
};
|
||||
let Ok(file) = fs::File::open(&manifest) else {
|
||||
return;
|
||||
};
|
||||
let reader = BufReader::new(file);
|
||||
|
||||
for line in reader.lines().map_while(Result::ok) {
|
||||
if line.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let Some((key, value)) = line.split_once(' ') else {
|
||||
continue;
|
||||
};
|
||||
if !key.starts_with(prefix) || !key.ends_with(".snap") {
|
||||
continue;
|
||||
}
|
||||
|
||||
let dest = runfiles_root.join(key);
|
||||
if dest.exists() {
|
||||
continue;
|
||||
}
|
||||
if let Some(parent) = dest.parent()
|
||||
&& fs::create_dir_all(parent).is_err()
|
||||
{
|
||||
continue;
|
||||
}
|
||||
let _ = fs::copy(value, &dest);
|
||||
}
|
||||
}
|
||||
@@ -105,6 +105,8 @@ mod version;
|
||||
|
||||
mod wrapping;
|
||||
|
||||
#[cfg(test)]
|
||||
mod insta_runfiles;
|
||||
#[cfg(test)]
|
||||
pub mod test_backend;
|
||||
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
use std::ffi::OsString;
|
||||
use std::io::BufRead;
|
||||
use std::io::BufReader;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub use path_absolutize;
|
||||
@@ -25,6 +28,25 @@ pub enum CargoBinError {
|
||||
},
|
||||
}
|
||||
|
||||
pub fn repo_root() -> Result<PathBuf, std::io::Error> {
|
||||
if let Ok(runfiles_dir) = std::env::var("RUNFILES_DIR") {
|
||||
let root = PathBuf::from(runfiles_dir).join("_main");
|
||||
if root.exists() {
|
||||
return Ok(root);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(root) = runfiles_manifest_root() {
|
||||
return Ok(root);
|
||||
}
|
||||
|
||||
if let Some(root) = repo_root_from_current_dir() {
|
||||
return Ok(root);
|
||||
}
|
||||
|
||||
Ok(std::env::current_dir()?)
|
||||
}
|
||||
|
||||
/// Returns an absolute path to a binary target built for the current test run.
|
||||
///
|
||||
/// In `cargo test`, `CARGO_BIN_EXE_*` env vars are absolute, but Buck2 may set
|
||||
@@ -102,23 +124,76 @@ macro_rules! find_resource {
|
||||
.join("_main")
|
||||
.join(bazel_package)
|
||||
.join(resource);
|
||||
// Note we also have to normalize (but not canonicalize!)
|
||||
// the path for _Bazel_ because the original value ends with
|
||||
// `codex-rs/exec-server/tests/common/../suite/bash`, but
|
||||
// the `tests/common` folder will not exist at runtime under
|
||||
// Bazel. As such, we have to normalize it before passing it
|
||||
// to `dotslash fetch`.
|
||||
manifest_dir.absolutize().map(|p| p.to_path_buf())
|
||||
if manifest_dir.exists() {
|
||||
// Note we also have to normalize (but not canonicalize!)
|
||||
// the path for _Bazel_ because the original value ends with
|
||||
// `codex-rs/exec-server/tests/common/../suite/bash`, but
|
||||
// the `tests/common` folder will not exist at runtime under
|
||||
// Bazel. As such, we have to normalize it before passing it
|
||||
// to `dotslash fetch`.
|
||||
manifest_dir.absolutize().map(|p| p.to_path_buf())
|
||||
} else if std::env::var_os("RUNFILES_MANIFEST_FILE").is_none() {
|
||||
Err(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"RUNFILES_DIR set but resource missing",
|
||||
))
|
||||
} else if resource == std::path::Path::new(".") {
|
||||
$crate::runfiles_package_root_from_manifest(bazel_package).ok_or_else(
|
||||
|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"runfiles manifest missing package root entry",
|
||||
)
|
||||
},
|
||||
)
|
||||
} else {
|
||||
let key =
|
||||
$crate::runfiles_manifest_key_for_resource(bazel_package, resource);
|
||||
$crate::runfiles_manifest_lookup(&key).ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
format!("runfiles manifest missing entry for {key}"),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
None => Err(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"BAZEL_PACKAGE not set in Bazel build",
|
||||
)),
|
||||
},
|
||||
Err(_) => {
|
||||
let manifest_dir = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
Ok(manifest_dir.join(resource))
|
||||
}
|
||||
Err(_) => match std::env::var("RUNFILES_MANIFEST_FILE") {
|
||||
Ok(_) => match option_env!("BAZEL_PACKAGE") {
|
||||
Some(bazel_package) => {
|
||||
if resource == std::path::Path::new(".") {
|
||||
$crate::runfiles_package_root_from_manifest(bazel_package)
|
||||
.ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"runfiles manifest missing package root entry",
|
||||
)
|
||||
})
|
||||
} else {
|
||||
let key =
|
||||
$crate::runfiles_manifest_key_for_resource(bazel_package, resource);
|
||||
$crate::runfiles_manifest_lookup(&key).ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
format!("runfiles manifest missing entry for {key}"),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
None => Err(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"BAZEL_PACKAGE not set in Bazel build",
|
||||
)),
|
||||
},
|
||||
Err(_) => {
|
||||
let manifest_dir = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
Ok(manifest_dir.join(resource))
|
||||
}
|
||||
},
|
||||
}
|
||||
}};
|
||||
}
|
||||
@@ -128,6 +203,8 @@ fn resolve_bin_from_env(key: &str, value: OsString) -> Result<PathBuf, CargoBinE
|
||||
|
||||
if abs.exists() {
|
||||
Ok(abs)
|
||||
} else if let Some(resolved) = resolve_from_runfiles_manifest(&abs) {
|
||||
Ok(resolved)
|
||||
} else {
|
||||
Err(CargoBinError::ResolvedPathDoesNotExist {
|
||||
key: key.to_owned(),
|
||||
@@ -152,6 +229,95 @@ fn absolutize_from_buck_or_cwd(path: PathBuf) -> Result<PathBuf, CargoBinError>
|
||||
.join(path))
|
||||
}
|
||||
|
||||
fn resolve_from_runfiles_manifest(path: &Path) -> Option<PathBuf> {
|
||||
let key = runfiles_manifest_key(path)?;
|
||||
runfiles_manifest_lookup(&key)
|
||||
}
|
||||
|
||||
fn runfiles_manifest_key(path: &Path) -> Option<String> {
|
||||
let raw = path.to_string_lossy();
|
||||
let (prefix, idx) = if let Some(idx) = raw.rfind(".runfiles\\") {
|
||||
(".runfiles\\", idx)
|
||||
} else {
|
||||
(".runfiles/", raw.rfind(".runfiles/")?)
|
||||
};
|
||||
let mut key = raw[idx + prefix.len()..].replace('\\', "/");
|
||||
while key.starts_with('/') {
|
||||
key.remove(0);
|
||||
}
|
||||
if key.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(key)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn runfiles_manifest_lookup(key: &str) -> Option<PathBuf> {
|
||||
let manifest_path = std::env::var_os("RUNFILES_MANIFEST_FILE")?;
|
||||
let file = std::fs::File::open(manifest_path).ok()?;
|
||||
let reader = BufReader::new(file);
|
||||
|
||||
for line in reader.lines().map_while(Result::ok) {
|
||||
if line.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let (manifest_key, manifest_path) = line.split_once(' ')?;
|
||||
if manifest_key == key {
|
||||
return Some(PathBuf::from(manifest_path));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn runfiles_manifest_key_for_resource(bazel_package: &str, resource: &Path) -> String {
|
||||
let mut resource_path = resource.to_string_lossy().replace('\\', "/");
|
||||
while resource_path.starts_with("./") {
|
||||
resource_path = resource_path.trim_start_matches("./").to_string();
|
||||
}
|
||||
while resource_path.starts_with('/') {
|
||||
resource_path = resource_path.trim_start_matches('/').to_string();
|
||||
}
|
||||
|
||||
if resource_path.is_empty() {
|
||||
format!("_main/{bazel_package}")
|
||||
} else {
|
||||
format!("_main/{bazel_package}/{resource_path}")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn runfiles_package_root_from_manifest(bazel_package: &str) -> Option<PathBuf> {
|
||||
let build_key = format!("_main/{bazel_package}/BUILD.bazel");
|
||||
if let Some(path) = runfiles_manifest_lookup(&build_key) {
|
||||
return path.parent().map(PathBuf::from);
|
||||
}
|
||||
|
||||
let cargo_key = format!("_main/{bazel_package}/Cargo.toml");
|
||||
runfiles_manifest_lookup(&cargo_key)
|
||||
.and_then(|path| path.parent().map(PathBuf::from))
|
||||
}
|
||||
|
||||
fn runfiles_manifest_root() -> Option<PathBuf> {
|
||||
let root = runfiles_manifest_lookup("_main/codex-rs/Cargo.toml")?;
|
||||
root.parent()?.parent().map(PathBuf::from)
|
||||
}
|
||||
|
||||
fn repo_root_from_current_dir() -> Option<PathBuf> {
|
||||
let mut dir = std::env::current_dir().ok()?;
|
||||
loop {
|
||||
if dir.join("codex-rs").is_dir() {
|
||||
return Some(dir);
|
||||
}
|
||||
if dir.file_name().is_some_and(|name| name == "codex-rs") {
|
||||
return dir.parent().map(PathBuf::from);
|
||||
}
|
||||
if !dir.pop() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Best-effort attempt to find the Buck project root for the currently running
|
||||
/// process.
|
||||
///
|
||||
|
||||
1
defs.bzl
1
defs.bzl
@@ -79,6 +79,7 @@ def codex_rust_crate(
|
||||
proc_macro_dev_deps = all_crate_deps(proc_macro_dev = True)
|
||||
|
||||
test_env = {
|
||||
"INSTA_REQUIRE_FULL_MATCH": "0",
|
||||
"INSTA_WORKSPACE_ROOT": ".",
|
||||
"INSTA_SNAPSHOT_PATH": "src",
|
||||
}
|
||||
|
||||
515
patches/rules_rust_direct_deps.patch
Normal file
515
patches/rules_rust_direct_deps.patch
Normal file
@@ -0,0 +1,515 @@
|
||||
diff -ruN rules_rust_src/rust/platform/triple.bzl rules_rust_mod/rust/platform/triple.bzl
|
||||
--- rules_rust_src/rust/platform/triple.bzl 2025-12-09 17:08:33
|
||||
+++ rules_rust_mod/rust/platform/triple.bzl 2026-01-13 20:19:36
|
||||
@@ -156,9 +156,10 @@
|
||||
if "win" in repository_ctx.os.name:
|
||||
_validate_cpu_architecture(arch, supported_architectures["windows"])
|
||||
prefix = "{}-pc-windows".format(arch)
|
||||
+ default_abi = "gnullvm"
|
||||
return triple("{}-{}".format(
|
||||
prefix,
|
||||
- abi.get(prefix, "msvc"),
|
||||
+ abi.get(prefix, default_abi),
|
||||
))
|
||||
|
||||
fail("Unhandled host os: {}", repository_ctx.os.name)
|
||||
diff -ruN rules_rust_src/rust/platform/triple_mappings.bzl rules_rust_mod/rust/platform/triple_mappings.bzl
|
||||
--- rules_rust_src/rust/platform/triple_mappings.bzl 2025-12-09 17:08:33
|
||||
+++ rules_rust_mod/rust/platform/triple_mappings.bzl 2026-01-13 20:19:41
|
||||
@@ -29,6 +29,7 @@
|
||||
"i686-pc-windows-msvc": _support(std = True, host_tools = True),
|
||||
"i686-unknown-linux-gnu": _support(std = True, host_tools = True),
|
||||
"x86_64-apple-darwin": _support(std = True, host_tools = True),
|
||||
+ "x86_64-pc-windows-gnullvm": _support(std = True, host_tools = True),
|
||||
"x86_64-pc-windows-msvc": _support(std = True, host_tools = True),
|
||||
"x86_64-unknown-linux-gnu": _support(std = True, host_tools = True),
|
||||
"x86_64-unknown-nixos-gnu": _support(std = True, host_tools = True), # Same as `x86_64-unknown-linux-gnu` but with `@platforms//os:nixos`.
|
||||
@@ -45,6 +46,7 @@
|
||||
"aarch64-apple-ios": _support(std = True, host_tools = False),
|
||||
"aarch64-apple-ios-sim": _support(std = True, host_tools = False),
|
||||
"aarch64-linux-android": _support(std = True, host_tools = False),
|
||||
+ "aarch64-pc-windows-gnullvm": _support(std = True, host_tools = True),
|
||||
"aarch64-pc-windows-msvc": _support(std = True, host_tools = True),
|
||||
"aarch64-unknown-fuchsia": _support(std = True, host_tools = False),
|
||||
"aarch64-unknown-uefi": _support(std = True, host_tools = False),
|
||||
diff -ruN rules_rust_src/rust/private/clippy.bzl rules_rust_mod/rust/private/clippy.bzl
|
||||
--- rules_rust_src/rust/private/clippy.bzl 2025-12-09 17:08:33
|
||||
+++ rules_rust_mod/rust/private/clippy.bzl 2026-01-13 09:56:14
|
||||
@@ -140,7 +140,11 @@
|
||||
ctx.rule.attr.lint_config[LintsInfo].clippy_lint_files + \
|
||||
ctx.rule.attr.lint_config[LintsInfo].rustc_lint_files
|
||||
|
||||
- compile_inputs, out_dir, build_env_files, build_flags_files, linkstamp_outs, ambiguous_libs = collect_inputs(
|
||||
+ transitive_crate_dir = ctx.actions.declare_directory(ctx.label.name + "_transitive_crates")
|
||||
+ transitive_crate_dir_args = ctx.actions.args()
|
||||
+ transitive_crate_dir_args.add_all([transitive_crate_dir], expand_directories = False, format_each = "-Ldependency=%s")
|
||||
+
|
||||
+ compile_inputs, out_dir, build_env_files, build_flags_files, linkstamp_outs, ambiguous_libs, _proc_macro_outputs = collect_inputs(
|
||||
ctx,
|
||||
ctx.rule.file,
|
||||
ctx.rule.files,
|
||||
@@ -151,8 +155,10 @@
|
||||
feature_configuration,
|
||||
crate_info,
|
||||
dep_info,
|
||||
+ crate_info.proc_macro_deps.to_list(),
|
||||
build_info,
|
||||
lint_files,
|
||||
+ transitive_crate_dir,
|
||||
)
|
||||
|
||||
if clippy_diagnostics_file:
|
||||
@@ -229,7 +235,7 @@
|
||||
outputs = outputs + [x for x in [clippy_diagnostics_file] if x],
|
||||
env = env,
|
||||
tools = [clippy_executable],
|
||||
- arguments = args.all,
|
||||
+ arguments = args.all + [transitive_crate_dir_args],
|
||||
mnemonic = "Clippy",
|
||||
progress_message = "Clippy %{label}",
|
||||
toolchain = "@rules_rust//rust:toolchain_type",
|
||||
diff -ruN rules_rust_src/rust/private/common.bzl rules_rust_mod/rust/private/common.bzl
|
||||
--- rules_rust_src/rust/private/common.bzl 2025-12-09 17:08:33
|
||||
+++ rules_rust_mod/rust/private/common.bzl 2026-01-13 20:43:07
|
||||
@@ -31,9 +31,9 @@
|
||||
#
|
||||
# Note: Code in `.github/workflows/crate_universe.yaml` looks for this line, if
|
||||
# you remove it or change its format, you will also need to update that code.
|
||||
-DEFAULT_RUST_VERSION = "1.86.0"
|
||||
+DEFAULT_RUST_VERSION = "1.92.0"
|
||||
|
||||
-DEFAULT_NIGHTLY_ISO_DATE = "2025-04-03"
|
||||
+DEFAULT_NIGHTLY_ISO_DATE = "2025-10-03"
|
||||
|
||||
def _create_crate_info(**kwargs):
|
||||
"""A constructor for a `CrateInfo` provider
|
||||
diff -ruN rules_rust_src/rust/private/rust.bzl rules_rust_mod/rust/private/rust.bzl
|
||||
--- rules_rust_src/rust/private/rust.bzl 2025-12-09 17:08:33
|
||||
+++ rules_rust_mod/rust/private/rust.bzl 2026-01-13 09:56:14
|
||||
@@ -935,6 +935,7 @@
|
||||
},
|
||||
fragments = ["cpp"],
|
||||
toolchains = [
|
||||
+ str(Label("@@bazel_lib+//lib:copy_to_directory_toolchain_type")),
|
||||
str(Label("//rust:toolchain_type")),
|
||||
config_common.toolchain_type("@bazel_tools//tools/cpp:toolchain_type", mandatory = False),
|
||||
],
|
||||
@@ -1033,6 +1034,7 @@
|
||||
fragments = ["cpp"],
|
||||
cfg = _rust_static_library_transition,
|
||||
toolchains = [
|
||||
+ str(Label("@@bazel_lib+//lib:copy_to_directory_toolchain_type")),
|
||||
str(Label("//rust:toolchain_type")),
|
||||
config_common.toolchain_type("@bazel_tools//tools/cpp:toolchain_type", mandatory = False),
|
||||
],
|
||||
@@ -1082,6 +1084,7 @@
|
||||
fragments = ["cpp"],
|
||||
cfg = _rust_shared_library_transition,
|
||||
toolchains = [
|
||||
+ str(Label("@@bazel_lib+//lib:copy_to_directory_toolchain_type")),
|
||||
str(Label("//rust:toolchain_type")),
|
||||
config_common.toolchain_type("@bazel_tools//tools/cpp:toolchain_type", mandatory = False),
|
||||
],
|
||||
@@ -1138,6 +1141,7 @@
|
||||
),
|
||||
fragments = ["cpp"],
|
||||
toolchains = [
|
||||
+ str(Label("@@bazel_lib+//lib:copy_to_directory_toolchain_type")),
|
||||
str(Label("//rust:toolchain_type")),
|
||||
config_common.toolchain_type("@bazel_tools//tools/cpp:toolchain_type", mandatory = False),
|
||||
],
|
||||
@@ -1222,6 +1226,7 @@
|
||||
fragments = ["cpp"],
|
||||
cfg = _rust_binary_transition,
|
||||
toolchains = [
|
||||
+ str(Label("@@bazel_lib+//lib:copy_to_directory_toolchain_type")),
|
||||
str(Label("//rust:toolchain_type")),
|
||||
config_common.toolchain_type("@bazel_tools//tools/cpp:toolchain_type", mandatory = False),
|
||||
],
|
||||
@@ -1365,6 +1370,7 @@
|
||||
executable = True,
|
||||
fragments = ["cpp"],
|
||||
toolchains = [
|
||||
+ str(Label("@@bazel_lib+//lib:copy_to_directory_toolchain_type")),
|
||||
str(Label("//rust:toolchain_type")),
|
||||
config_common.toolchain_type("@bazel_tools//tools/cpp:toolchain_type", mandatory = False),
|
||||
],
|
||||
@@ -1381,6 +1387,7 @@
|
||||
attrs = dict(_common_attrs_for_binary_without_process_wrapper(_common_attrs).items()),
|
||||
fragments = ["cpp"],
|
||||
toolchains = [
|
||||
+ str(Label("@@bazel_lib+//lib:copy_to_directory_toolchain_type")),
|
||||
str(Label("//rust:toolchain_type")),
|
||||
config_common.toolchain_type("@bazel_tools//tools/cpp:toolchain_type", mandatory = False),
|
||||
],
|
||||
@@ -1421,6 +1428,7 @@
|
||||
fragments = ["cpp"],
|
||||
test = True,
|
||||
toolchains = [
|
||||
+ str(Label("@@bazel_lib+//lib:copy_to_directory_toolchain_type")),
|
||||
str(Label("//rust:toolchain_type")),
|
||||
config_common.toolchain_type("@bazel_tools//tools/cpp:toolchain_type", mandatory = False),
|
||||
],
|
||||
@@ -1458,6 +1466,7 @@
|
||||
cfg = _rust_test_transition,
|
||||
test = True,
|
||||
toolchains = [
|
||||
+ str(Label("@@bazel_lib+//lib:copy_to_directory_toolchain_type")),
|
||||
str(Label("//rust:toolchain_type")),
|
||||
config_common.toolchain_type("@bazel_tools//tools/cpp:toolchain_type", mandatory = False),
|
||||
],
|
||||
diff -ruN rules_rust_src/rust/private/rustc.bzl rules_rust_mod/rust/private/rustc.bzl
|
||||
--- rules_rust_src/rust/private/rustc.bzl 2025-12-09 17:08:33
|
||||
+++ rules_rust_mod/rust/private/rustc.bzl 2026-01-13 20:55:51
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
"""Functionality for constructing actions that invoke the Rust compiler"""
|
||||
|
||||
+load("@@bazel_lib+//lib:copy_to_directory.bzl", "copy_to_directory_bin_action")
|
||||
load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
|
||||
load(
|
||||
"@bazel_tools//tools/build_defs/cc:action_names.bzl",
|
||||
@@ -695,8 +696,10 @@
|
||||
feature_configuration,
|
||||
crate_info,
|
||||
dep_info,
|
||||
+ proc_macro_deps,
|
||||
build_info,
|
||||
lint_files,
|
||||
+ transitive_crate_dir,
|
||||
stamp = False,
|
||||
force_depend_on_objects = False,
|
||||
experimental_use_cc_common_link = False,
|
||||
@@ -714,8 +717,10 @@
|
||||
feature_configuration (FeatureConfiguration): Feature configuration to be queried.
|
||||
crate_info (CrateInfo): The Crate information of the crate to process build scripts for.
|
||||
dep_info (DepInfo): The target Crate's dependency information.
|
||||
+ proc_macro_deps (list[DepVariantInfo]): Proc-macro dependencies for the crate.
|
||||
build_info (BuildInfo): The target Crate's build settings.
|
||||
lint_files (list): List of files with rustc args for the Crate's lint settings.
|
||||
+ transitive_crate_dir (File): Directory containing consolidated crate outputs.
|
||||
stamp (bool, optional): Whether or not workspace status stamping is enabled. For more details see
|
||||
https://docs.bazel.build/versions/main/user-manual.html#flag--stamp
|
||||
force_depend_on_objects (bool, optional): Forces dependencies of this rule to be objects rather than
|
||||
@@ -773,6 +778,19 @@
|
||||
if _depend_on_metadata(crate_info, force_depend_on_objects):
|
||||
transitive_crate_outputs = dep_info.transitive_metadata_outputs
|
||||
|
||||
+ proc_macro_outputs = []
|
||||
+ for dep in proc_macro_deps:
|
||||
+ dep_crate_info = dep.crate_info
|
||||
+ if dep_crate_info and _is_proc_macro(dep_crate_info):
|
||||
+ proc_macro_outputs.append(dep_crate_info.output)
|
||||
+ for dep in dep_info.direct_crates.to_list():
|
||||
+ if hasattr(dep, "dep"):
|
||||
+ dep_crate_info = dep.dep
|
||||
+ else:
|
||||
+ dep_crate_info = dep
|
||||
+ if _is_proc_macro(dep_crate_info) and dep_crate_info.output not in proc_macro_outputs:
|
||||
+ proc_macro_outputs.append(dep_crate_info.output)
|
||||
+
|
||||
nolinkstamp_compile_direct_inputs = []
|
||||
if build_info:
|
||||
if build_info.rustc_env:
|
||||
@@ -800,6 +818,8 @@
|
||||
else:
|
||||
runtime_libs = cc_toolchain.static_runtime_lib(feature_configuration = feature_configuration)
|
||||
|
||||
+ proc_macro_outputs_depset = depset(proc_macro_outputs)
|
||||
+
|
||||
nolinkstamp_compile_inputs = depset(
|
||||
nolinkstamp_compile_direct_inputs +
|
||||
additional_transitive_inputs,
|
||||
@@ -808,6 +828,7 @@
|
||||
linker_depset,
|
||||
crate_info.srcs,
|
||||
transitive_crate_outputs,
|
||||
+ proc_macro_outputs_depset,
|
||||
crate_info.compile_data,
|
||||
dep_info.transitive_proc_macro_data,
|
||||
toolchain.all_files,
|
||||
@@ -846,8 +867,27 @@
|
||||
# If stamping is enabled include the volatile and stable status info file
|
||||
stamp_info = [ctx.version_file, ctx.info_file] if stamp else []
|
||||
|
||||
+ transitive_crate_files = depset(
|
||||
+ transitive = [
|
||||
+ dep_info.transitive_crate_outputs,
|
||||
+ dep_info.transitive_metadata_outputs,
|
||||
+ ],
|
||||
+ ).to_list() + proc_macro_outputs
|
||||
+
|
||||
+ copy_to_directory_bin_action(
|
||||
+ ctx,
|
||||
+ name = transitive_crate_dir.basename,
|
||||
+ dst = transitive_crate_dir,
|
||||
+ copy_to_directory_bin = ctx.toolchains["@@bazel_lib+//lib:copy_to_directory_toolchain_type"].copy_to_directory_info.bin,
|
||||
+ files = transitive_crate_files,
|
||||
+ root_paths = [],
|
||||
+ replace_prefixes = {"**/": ""},
|
||||
+ hardlink = "off" if toolchain.target_os == "windows" else "auto",
|
||||
+ include_external_repositories = ["**"],
|
||||
+ )
|
||||
+
|
||||
compile_inputs = depset(
|
||||
- linkstamp_outs + stamp_info,
|
||||
+ [transitive_crate_dir] + linkstamp_outs + stamp_info,
|
||||
transitive = [
|
||||
nolinkstamp_compile_inputs,
|
||||
],
|
||||
@@ -870,7 +910,7 @@
|
||||
build_env_files = list(build_env_files)
|
||||
build_env_files.append(build_env_file)
|
||||
compile_inputs = depset(build_env_files + lint_files, transitive = [build_script_compile_inputs, compile_inputs])
|
||||
- return compile_inputs, out_dir, build_env_files, build_flags_files, linkstamp_outs, ambiguous_libs
|
||||
+ return compile_inputs, out_dir, build_env_files, build_flags_files, linkstamp_outs, ambiguous_libs, proc_macro_outputs_depset
|
||||
|
||||
def _will_emit_object_file(emit):
|
||||
for e in emit:
|
||||
@@ -892,6 +932,7 @@
|
||||
feature_configuration,
|
||||
crate_info,
|
||||
dep_info,
|
||||
+ transitive_crate_dir,
|
||||
linkstamp_outs,
|
||||
ambiguous_libs,
|
||||
output_hash,
|
||||
@@ -923,6 +964,7 @@
|
||||
feature_configuration (FeatureConfiguration): Class used to construct command lines from CROSSTOOL features.
|
||||
crate_info (CrateInfo): The CrateInfo provider of the target crate
|
||||
dep_info (DepInfo): The DepInfo provider of the target crate
|
||||
+ transitive_crate_dir (File): Directory containing consolidated crate outputs.
|
||||
linkstamp_outs (list): Linkstamp outputs of native dependencies
|
||||
ambiguous_libs (dict): Ambiguous libs, see `_disambiguate_libs`
|
||||
output_hash (str): The hashed path of the crate root
|
||||
@@ -1155,7 +1197,13 @@
|
||||
use_metadata = _depend_on_metadata(crate_info, force_depend_on_objects)
|
||||
|
||||
# These always need to be added, even if not linking this crate.
|
||||
- add_crate_link_flags(rustc_flags, dep_info, force_all_deps_direct, use_metadata)
|
||||
+ add_crate_link_flags(
|
||||
+ rustc_flags,
|
||||
+ dep_info,
|
||||
+ force_all_deps_direct,
|
||||
+ use_metadata,
|
||||
+ transitive_crate_dir,
|
||||
+ )
|
||||
|
||||
needs_extern_proc_macro_flag = _is_proc_macro(crate_info) and crate_info.edition != "2015"
|
||||
if needs_extern_proc_macro_flag:
|
||||
@@ -1359,7 +1407,11 @@
|
||||
rust_flags = rust_flags + ctx.attr.lint_config[LintsInfo].rustc_lint_flags
|
||||
lint_files = lint_files + ctx.attr.lint_config[LintsInfo].rustc_lint_files
|
||||
|
||||
- compile_inputs, out_dir, build_env_files, build_flags_files, linkstamp_outs, ambiguous_libs = collect_inputs(
|
||||
+ transitive_crate_dir = ctx.actions.declare_directory(attr.name + "_transitive_crates")
|
||||
+ transitive_crate_dir_args = ctx.actions.args()
|
||||
+ transitive_crate_dir_args.add_all([transitive_crate_dir], expand_directories = False, format_each = "-Ldependency=%s")
|
||||
+
|
||||
+ compile_inputs, out_dir, build_env_files, build_flags_files, linkstamp_outs, ambiguous_libs, proc_macro_outputs = collect_inputs(
|
||||
ctx = ctx,
|
||||
file = ctx.file,
|
||||
files = ctx.files,
|
||||
@@ -1369,8 +1421,10 @@
|
||||
feature_configuration = feature_configuration,
|
||||
crate_info = crate_info,
|
||||
dep_info = dep_info,
|
||||
+ proc_macro_deps = proc_macro_deps,
|
||||
build_info = build_info,
|
||||
lint_files = lint_files,
|
||||
+ transitive_crate_dir = transitive_crate_dir,
|
||||
stamp = stamp,
|
||||
experimental_use_cc_common_link = experimental_use_cc_common_link,
|
||||
)
|
||||
@@ -1410,6 +1464,7 @@
|
||||
feature_configuration = feature_configuration,
|
||||
crate_info = crate_info,
|
||||
dep_info = dep_info,
|
||||
+ transitive_crate_dir = transitive_crate_dir,
|
||||
linkstamp_outs = linkstamp_outs,
|
||||
ambiguous_libs = ambiguous_libs,
|
||||
output_hash = output_hash,
|
||||
@@ -1437,6 +1492,7 @@
|
||||
feature_configuration = feature_configuration,
|
||||
crate_info = crate_info,
|
||||
dep_info = dep_info,
|
||||
+ transitive_crate_dir = transitive_crate_dir,
|
||||
linkstamp_outs = linkstamp_outs,
|
||||
ambiguous_libs = ambiguous_libs,
|
||||
output_hash = output_hash,
|
||||
@@ -1509,7 +1565,8 @@
|
||||
inputs = compile_inputs,
|
||||
outputs = action_outputs,
|
||||
env = env,
|
||||
- arguments = args.all,
|
||||
+ arguments = args.all + [transitive_crate_dir_args],
|
||||
+ tools = proc_macro_outputs.to_list(),
|
||||
mnemonic = "Rustc",
|
||||
progress_message = "Compiling Rust {} {}{} ({} file{})".format(
|
||||
crate_info.type,
|
||||
@@ -1527,7 +1584,8 @@
|
||||
inputs = compile_inputs,
|
||||
outputs = [build_metadata] + [x for x in [rustc_rmeta_output] if x],
|
||||
env = env,
|
||||
- arguments = args_metadata.all,
|
||||
+ arguments = args_metadata.all + [transitive_crate_dir_args],
|
||||
+ tools = proc_macro_outputs.to_list(),
|
||||
mnemonic = "RustcMetadata",
|
||||
progress_message = "Compiling Rust metadata {} {}{} ({} file{})".format(
|
||||
crate_info.type,
|
||||
@@ -1547,7 +1605,8 @@
|
||||
inputs = compile_inputs,
|
||||
outputs = action_outputs,
|
||||
env = env,
|
||||
- arguments = [args.rustc_path, args.rustc_flags],
|
||||
+ arguments = [args.rustc_path, args.rustc_flags, transitive_crate_dir_args],
|
||||
+ tools = proc_macro_outputs.to_list(),
|
||||
mnemonic = "Rustc",
|
||||
progress_message = "Compiling Rust (without process_wrapper) {} {}{} ({} file{})".format(
|
||||
crate_info.type,
|
||||
@@ -2119,7 +2178,7 @@
|
||||
dirs[f.dirname] = None
|
||||
return dirs.keys()
|
||||
|
||||
-def add_crate_link_flags(args, dep_info, force_all_deps_direct = False, use_metadata = False):
|
||||
+def add_crate_link_flags(args, dep_info, force_all_deps_direct = False, use_metadata = False, transitive_crate_dir = None):
|
||||
"""Adds link flags to an Args object reference
|
||||
|
||||
Args:
|
||||
@@ -2137,6 +2196,11 @@
|
||||
],
|
||||
) if force_all_deps_direct else dep_info.direct_crates
|
||||
|
||||
+ if transitive_crate_dir:
|
||||
+ for crate in direct_crates.to_list():
|
||||
+ _add_crate_link_flag_in_dir(args, crate, transitive_crate_dir, use_metadata)
|
||||
+ return
|
||||
+
|
||||
crate_to_link_flags = _crate_to_link_flag_metadata if use_metadata else _crate_to_link_flag
|
||||
args.add_all(direct_crates, uniquify = True, map_each = crate_to_link_flags)
|
||||
|
||||
@@ -2146,6 +2210,34 @@
|
||||
uniquify = True,
|
||||
format_each = "-Ldependency=%s",
|
||||
)
|
||||
+
|
||||
+def _add_crate_link_flag_in_dir(args, crate, transitive_crate_dir, use_metadata):
|
||||
+ """A helper macro used by `add_crate_link_flags` for adding crate link flags to a Arg object
|
||||
+
|
||||
+ Args:
|
||||
+ crate (CrateInfo|AliasableDepInfo): A CrateInfo or an AliasableDepInfo provider
|
||||
+ transitive_crate_dir (File): Directory containing consolidated crate outputs.
|
||||
+ use_metadata (bool): Whether to prefer metadata files when available.
|
||||
+
|
||||
+ Returns:
|
||||
+ list: Link flags for the given provider
|
||||
+ """
|
||||
+ if hasattr(crate, "dep"):
|
||||
+ name = crate.name
|
||||
+ crate_info = crate.dep
|
||||
+ else:
|
||||
+ name = crate.name
|
||||
+ crate_info = crate
|
||||
+
|
||||
+ if _is_proc_macro(crate_info):
|
||||
+ args.add("--extern={}={}/{}".format(name, transitive_crate_dir.path, crate_info.output.basename))
|
||||
+ return
|
||||
+
|
||||
+ lib_or_meta = crate_info.metadata if use_metadata else crate_info.output
|
||||
+ if use_metadata and (not crate_info.metadata or not crate_info.metadata_supports_pipelining):
|
||||
+ lib_or_meta = crate_info.output
|
||||
+
|
||||
+ args.add("--extern={}={}/{}".format(name, transitive_crate_dir.path, lib_or_meta.basename))
|
||||
|
||||
def _crate_to_link_flag_metadata(crate):
|
||||
"""A helper macro used by `add_crate_link_flags` for adding crate link flags to a Arg object
|
||||
diff -ruN rules_rust_src/rust/private/rustdoc.bzl rules_rust_mod/rust/private/rustdoc.bzl
|
||||
--- rules_rust_src/rust/private/rustdoc.bzl 2025-12-09 17:08:33
|
||||
+++ rules_rust_mod/rust/private/rustdoc.bzl 2026-01-13 09:56:14
|
||||
@@ -103,7 +103,11 @@
|
||||
aliases = crate_info.aliases,
|
||||
)
|
||||
|
||||
- compile_inputs, out_dir, build_env_files, build_flags_files, linkstamp_outs, ambiguous_libs = collect_inputs(
|
||||
+ transitive_crate_dir = ctx.actions.declare_directory(ctx.label.name + "_transitive_crates")
|
||||
+ transitive_crate_dir_args = ctx.actions.args()
|
||||
+ transitive_crate_dir_args.add_all([transitive_crate_dir], expand_directories = False, format_each = "-Ldependency=%s")
|
||||
+
|
||||
+ compile_inputs, out_dir, build_env_files, build_flags_files, linkstamp_outs, ambiguous_libs, _proc_macro_outputs = collect_inputs(
|
||||
ctx = ctx,
|
||||
file = ctx.file,
|
||||
files = ctx.files,
|
||||
@@ -113,8 +117,10 @@
|
||||
feature_configuration = feature_configuration,
|
||||
crate_info = crate_info,
|
||||
dep_info = dep_info,
|
||||
+ proc_macro_deps = crate_info.proc_macro_deps.to_list(),
|
||||
build_info = build_info,
|
||||
lint_files = lint_files,
|
||||
+ transitive_crate_dir = transitive_crate_dir,
|
||||
# If this is a rustdoc test, we need to depend on rlibs rather than .rmeta.
|
||||
force_depend_on_objects = is_test,
|
||||
include_link_flags = False,
|
||||
@@ -167,7 +173,7 @@
|
||||
executable = ctx.executable._process_wrapper,
|
||||
inputs = all_inputs,
|
||||
env = env,
|
||||
- arguments = args.all,
|
||||
+ arguments = args.all + [transitive_crate_dir_args],
|
||||
tools = [toolchain.rust_doc],
|
||||
)
|
||||
|
||||
diff -ruN rules_rust_src/rust/private/unpretty.bzl rules_rust_mod/rust/private/unpretty.bzl
|
||||
--- rules_rust_src/rust/private/unpretty.bzl 2025-12-09 17:08:33
|
||||
+++ rules_rust_mod/rust/private/unpretty.bzl 2026-01-13 09:56:14
|
||||
@@ -147,7 +147,11 @@
|
||||
)
|
||||
lint_files = []
|
||||
|
||||
- compile_inputs, out_dir, build_env_files, build_flags_files, linkstamp_outs, ambiguous_libs = collect_inputs(
|
||||
+ transitive_crate_dir = ctx.actions.declare_directory(ctx.label.name + "_transitive_crates")
|
||||
+ transitive_crate_dir_args = ctx.actions.args()
|
||||
+ transitive_crate_dir_args.add_all([transitive_crate_dir], expand_directories = False, format_each = "-Ldependency=%s")
|
||||
+
|
||||
+ compile_inputs, out_dir, build_env_files, build_flags_files, linkstamp_outs, ambiguous_libs, _proc_macro_outputs = collect_inputs(
|
||||
ctx,
|
||||
ctx.rule.file,
|
||||
ctx.rule.files,
|
||||
@@ -158,8 +162,10 @@
|
||||
feature_configuration,
|
||||
crate_info,
|
||||
dep_info,
|
||||
+ crate_info.proc_macro_deps.to_list(),
|
||||
build_info,
|
||||
lint_files,
|
||||
+ transitive_crate_dir,
|
||||
)
|
||||
|
||||
output_groups = {}
|
||||
@@ -217,7 +223,7 @@
|
||||
inputs = compile_inputs,
|
||||
outputs = [unpretty_out],
|
||||
env = env,
|
||||
- arguments = args.all,
|
||||
+ arguments = args.all + [transitive_crate_dir_args],
|
||||
mnemonic = mnemonic,
|
||||
toolchain = "@rules_rust//rust:toolchain_type",
|
||||
)
|
||||
diff -ruN rules_rust_src/rust/repositories.bzl rules_rust_mod/rust/repositories.bzl
|
||||
--- rules_rust_src/rust/repositories.bzl 2025-12-09 17:08:33
|
||||
+++ rules_rust_mod/rust/repositories.bzl 2026-01-13 20:19:46
|
||||
@@ -43,12 +43,12 @@
|
||||
# Note: Code in `.github/workflows/crate_universe.yaml` looks for this line, if you remove it or change its format, you will also need to update that code.
|
||||
DEFAULT_TOOLCHAIN_TRIPLES = {
|
||||
"aarch64-apple-darwin": "rust_macos_aarch64",
|
||||
- "aarch64-pc-windows-msvc": "rust_windows_aarch64",
|
||||
+ "aarch64-pc-windows-gnullvm": "rust_windows_aarch64",
|
||||
"aarch64-unknown-linux-gnu": "rust_linux_aarch64",
|
||||
"powerpc64le-unknown-linux-gnu": "rust_linux_powerpc64le",
|
||||
"s390x-unknown-linux-gnu": "rust_linux_s390x",
|
||||
"x86_64-apple-darwin": "rust_macos_x86_64",
|
||||
- "x86_64-pc-windows-msvc": "rust_windows_x86_64",
|
||||
+ "x86_64-pc-windows-gnullvm": "rust_windows_x86_64",
|
||||
"x86_64-unknown-freebsd": "rust_freebsd_x86_64",
|
||||
"x86_64-unknown-linux-gnu": "rust_linux_x86_64",
|
||||
}
|
||||
Reference in New Issue
Block a user