Compare commits

...

13 Commits

Author SHA1 Message Date
Friel
679de088a3 Keep max-depth collab tool schemas stable 2026-04-04 01:12:31 +00:00
Friel
9d48bd4dc3 Cover subagent request_user_input rejection 2026-04-03 22:53:55 +00:00
Friel
df88dc103a Keep request_user_input visible in subagents 2026-04-03 20:39:29 +00:00
Friel
e09cc1bd9d Restore rules_rust patch context indentation 2026-04-02 07:58:13 +00:00
Friel
75bd6a1f29 Fix rules_rust bootstrap patch formatting 2026-04-02 07:55:17 +00:00
Friel
14dfcc1262 Merge upstream/main into subagent inbox injection 2026-04-02 07:49:01 +00:00
Friel
afaa5e887c test(core): annotate inbox control test literals 2026-03-28 16:22:01 -07:00
Friel
83e58d5036 test(app-server): annotate remaining shell workdir literals 2026-03-28 12:06:07 -07:00
Friel
73418e2882 test(app-server): annotate inbox resume literals 2026-03-28 11:56:40 -07:00
Friel
ebc45db194 test(features): annotate auth literals 2026-03-28 11:35:15 -07:00
Friel
3be3e8fc87 fix(core): apply refreshed clippy suggestions
Co-authored-by: Codex <noreply@openai.com>
2026-03-28 11:35:15 -07:00
Friel
4d0a1431f8 fix(core): adapt inbox delivery rebase to current main
Keep the rebased inbox-delivery branch on current core APIs by moving the inbox-specific coverage into current main's control tests and updating the turn-restart helper to use RegularTask.

Co-authored-by: Codex <noreply@openai.com>
2026-03-28 11:35:15 -07:00
Friel
0e91619094 feat(agents): enable subagent inbox delivery
Preserve subagent inbox delivery on the current origin/main base and collapse the branch back to a single commit for easier future restacks.
2026-03-28 11:35:15 -07:00
43 changed files with 2230 additions and 196 deletions

View File

@@ -8846,6 +8846,7 @@ mod tests {
sandbox_policy: codex_protocol::protocol::SandboxPolicy::DangerFullAccess,
cwd: PathBuf::from("/tmp"),
ephemeral: false,
agent_use_function_call_inbox: false,
reasoning_effort: None,
personality: None,
session_source: SessionSource::Cli,

View File

@@ -20,11 +20,17 @@ use core_test_support::fs_wait;
use pretty_assertions::assert_eq;
use serde_json::Value;
use std::path::Path;
use std::time::Duration;
use tempfile::TempDir;
use tokio::time::timeout;
#[cfg(windows)]
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(25);
#[cfg(not(windows))]
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
#[cfg(windows)]
const DEFAULT_NOTIFY_FILE_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(25);
#[cfg(not(windows))]
const DEFAULT_NOTIFY_FILE_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(5);
#[tokio::test]
async fn initialize_uses_client_info_name_as_originator() -> Result<()> {
@@ -270,9 +276,9 @@ async fn turn_start_notify_payload_includes_initialize_client_name() -> Result<(
mcp.read_stream_until_notification_message("turn/completed"),
)
.await??;
fs_wait::wait_for_path_exists(&notify_file, Duration::from_secs(5)).await?;
let payload_raw = tokio::fs::read_to_string(&notify_file).await?;
let notify_file = Path::new(&notify_file);
fs_wait::wait_for_path_exists(notify_file, DEFAULT_NOTIFY_FILE_TIMEOUT).await?;
let payload_raw = tokio::fs::read_to_string(notify_file).await?;
let payload: Value = serde_json::from_str(&payload_raw)?;
assert_eq!(payload["client"], "xcode");

View File

@@ -1,3 +1,7 @@
use super::analytics::assert_basic_thread_initialized_event;
use super::analytics::enable_analytics_capture;
use super::analytics::thread_initialized_event;
use super::analytics::wait_for_analytics_payload;
use anyhow::Result;
use app_test_support::ChatGptAuthFixture;
use app_test_support::McpProcess;
@@ -62,52 +66,22 @@ use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use tempfile::TempDir;
use tokio::time::sleep;
use tokio::time::timeout;
use uuid::Uuid;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::method;
use wiremock::matchers::path;
use super::analytics::assert_basic_thread_initialized_event;
use super::analytics::enable_analytics_capture;
use super::analytics::thread_initialized_event;
use super::analytics::wait_for_analytics_payload;
#[cfg(windows)]
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(25);
#[cfg(not(windows))]
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
const CODEX_5_2_INSTRUCTIONS_TEMPLATE_DEFAULT: &str = "You are Codex, a coding agent based on GPT-5. You and the user share the same workspace and collaborate to achieve the user's goals.";
async fn wait_for_responses_request_count(
server: &wiremock::MockServer,
expected_count: usize,
) -> Result<()> {
timeout(DEFAULT_READ_TIMEOUT, async {
loop {
let Some(requests) = server.received_requests().await else {
anyhow::bail!("wiremock did not record requests");
};
let responses_request_count = requests
.iter()
.filter(|request| {
request.method == "POST" && request.url.path().ends_with("/responses")
})
.count();
if responses_request_count == expected_count {
return Ok::<(), anyhow::Error>(());
}
if responses_request_count > expected_count {
anyhow::bail!(
"expected exactly {expected_count} /responses requests, got {responses_request_count}"
);
}
tokio::time::sleep(std::time::Duration::from_millis(10)).await;
}
})
.await??;
Ok(())
}
#[tokio::test]
async fn thread_resume_rejects_unmaterialized_thread() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
@@ -1069,13 +1043,9 @@ async fn thread_resume_replays_pending_command_execution_request_approval() -> R
let responses = vec![
create_final_assistant_message_sse_response("seeded")?,
create_shell_command_sse_response(
vec![
"python3".to_string(),
"-c".to_string(),
"print(42)".to_string(),
],
fast_shell_command(),
/*workdir*/ None,
Some(5000),
Some(1000),
"call-1",
)?,
create_final_assistant_message_sse_response("done")?,
@@ -1195,7 +1165,7 @@ async fn thread_resume_replays_pending_command_execution_request_approval() -> R
primary.read_stream_until_notification_message("turn/completed"),
)
.await??;
wait_for_responses_request_count(&server, /*expected_count*/ 3).await?;
wait_for_mock_request_count(&server, /*expected*/ 3).await?;
Ok(())
}
@@ -1361,11 +1331,50 @@ async fn thread_resume_replays_pending_file_change_request_approval() -> Result<
primary.read_stream_until_notification_message("turn/completed"),
)
.await??;
wait_for_responses_request_count(&server, /*expected_count*/ 3).await?;
wait_for_mock_request_count(&server, /*expected*/ 3).await?;
Ok(())
}
fn fast_shell_command() -> Vec<String> {
if cfg!(windows) {
vec![
"cmd".to_string(),
"/d".to_string(),
"/c".to_string(),
"echo 42".to_string(),
]
} else {
vec![
"python3".to_string(),
"-c".to_string(),
"print(42)".to_string(),
]
}
}
async fn wait_for_mock_request_count(server: &MockServer, expected: usize) -> Result<()> {
let deadline = tokio::time::Instant::now() + DEFAULT_READ_TIMEOUT;
loop {
let requests = server
.received_requests()
.await
.ok_or_else(|| anyhow::anyhow!("failed to fetch received requests"))?;
if requests.len() >= expected {
return Ok(());
}
if tokio::time::Instant::now() >= deadline {
anyhow::bail!(
"expected at least {expected} mock requests, observed {}",
requests.len()
);
}
sleep(std::time::Duration::from_millis(50)).await;
}
}
#[tokio::test]
async fn thread_resume_with_overrides_defers_updated_at_until_turn_start() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;

View File

@@ -273,9 +273,14 @@ async fn thread_start_accepts_flex_service_tier() -> Result<()> {
let codex_home = TempDir::new()?;
create_config_toml(codex_home.path(), &server.uri())?;
let read_timeout = if cfg!(windows) {
std::time::Duration::from_secs(15)
} else {
DEFAULT_READ_TIMEOUT
};
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
timeout(read_timeout, mcp.initialize()).await??;
let req_id = mcp
.send_thread_start_request(ThreadStartParams {
@@ -285,7 +290,7 @@ async fn thread_start_accepts_flex_service_tier() -> Result<()> {
.await?;
let resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
read_timeout,
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
)
.await??;

View File

@@ -33,6 +33,9 @@ use pretty_assertions::assert_eq;
use tempfile::TempDir;
use tokio::time::timeout;
#[cfg(windows)]
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(25);
#[cfg(not(windows))]
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
async fn wait_for_responses_request_count_to_stabilize(

View File

@@ -1026,24 +1026,16 @@ async fn turn_start_exec_approval_toggle_v2() -> Result<()> {
// Second turn same, but we'll set approval_policy=never to avoid elicitation.
let responses = vec![
create_shell_command_sse_response(
vec![
"python3".to_string(),
"-c".to_string(),
"print(42)".to_string(),
],
fast_shell_command(),
/*workdir*/ None,
Some(5000),
Some(1000),
"call1",
)?,
create_final_assistant_message_sse_response("done 1")?,
create_shell_command_sse_response(
vec![
"python3".to_string(),
"-c".to_string(),
"print(42)".to_string(),
],
fast_shell_command(),
/*workdir*/ None,
Some(5000),
Some(1000),
"call2",
)?,
create_final_assistant_message_sse_response("done 2")?,
@@ -1170,6 +1162,23 @@ async fn turn_start_exec_approval_toggle_v2() -> Result<()> {
Ok(())
}
fn fast_shell_command() -> Vec<String> {
if cfg!(windows) {
vec![
"cmd".to_string(),
"/d".to_string(),
"/c".to_string(),
"echo 42".to_string(),
]
} else {
vec![
"python3".to_string(),
"-c".to_string(),
"print(42)".to_string(),
]
}
}
#[tokio::test]
async fn turn_start_exec_approval_decline_v2() -> Result<()> {
skip_if_no_network!(Ok(()));

View File

@@ -332,6 +332,9 @@
"default": null,
"description": "Optional feature toggles scoped to this profile.",
"properties": {
"agent_function_call_inbox": {
"type": "boolean"
},
"apply_patch_freeform": {
"type": "boolean"
},
@@ -2030,6 +2033,9 @@
"default": null,
"description": "Centralized feature flags (new). Prefer this over individual toggles.",
"properties": {
"agent_function_call_inbox": {
"type": "boolean"
},
"apply_patch_freeform": {
"type": "boolean"
},

View File

@@ -2,6 +2,8 @@ use crate::codex::Session;
use crate::codex::TurnContext;
use crate::function_tool::FunctionCallError;
use codex_protocol::ThreadId;
use codex_protocol::protocol::SessionSource;
use codex_protocol::protocol::SubAgentSource;
use std::sync::Arc;
/// Resolves a single tool-facing agent target to a thread id.
@@ -14,6 +16,13 @@ pub(crate) async fn resolve_agent_target(
if let Ok(thread_id) = ThreadId::from_string(target) {
return Ok(thread_id);
}
if matches!(target, "parent" | "root")
&& let SessionSource::SubAgent(SubAgentSource::ThreadSpawn {
parent_thread_id, ..
}) = &turn.session_source
{
return Ok(*parent_thread_id);
}
session
.services

File diff suppressed because it is too large Load Diff

View File

@@ -14,7 +14,10 @@ use codex_login::CodexAuth;
use codex_protocol::AgentPath;
use codex_protocol::config_types::ModeKind;
use codex_protocol::models::ContentItem;
use codex_protocol::models::ResponseInputItem;
use codex_protocol::models::ResponseItem;
use codex_protocol::protocol::AGENT_INBOX_KIND;
use codex_protocol::protocol::AgentInboxPayload;
use codex_protocol::protocol::ErrorEvent;
use codex_protocol::protocol::EventMsg;
use codex_protocol::protocol::InterAgentCommunication;
@@ -414,6 +417,179 @@ async fn send_input_submits_user_message() {
assert_eq!(captured, Some(expected));
}
#[test]
fn build_agent_inbox_items_emits_function_call_and_output() {
let sender_thread_id = ThreadId::new();
let items = build_agent_inbox_items(
sender_thread_id,
"watchdog update".to_string(),
/*prepend_turn_start_user_message*/ false,
)
.expect("tool role should build inbox items");
assert_eq!(items.len(), 2);
let call_id = match &items[0] {
ResponseInputItem::FunctionCall {
name,
arguments,
call_id,
} => {
assert_eq!(name, AGENT_INBOX_KIND);
assert_eq!(arguments, "{}");
call_id.clone()
}
other => panic!("expected function call item, got {other:?}"),
};
match &items[1] {
ResponseInputItem::FunctionCallOutput {
call_id: output_call_id,
output,
} => {
assert_eq!(output_call_id, &call_id);
let output_text = output
.body
.to_text()
.expect("payload should convert to text");
let payload: AgentInboxPayload =
serde_json::from_str(&output_text).expect("payload should be valid json");
assert!(payload.injected);
assert_eq!(payload.kind, AGENT_INBOX_KIND);
assert_eq!(payload.sender_thread_id, sender_thread_id);
assert_eq!(payload.message, "watchdog update");
}
other => panic!("expected function call output item, got {other:?}"),
}
}
#[test]
fn build_agent_inbox_items_prepends_empty_user_message_when_requested() {
let sender_thread_id = ThreadId::new();
let items = build_agent_inbox_items(
sender_thread_id,
"watchdog update".to_string(),
/*prepend_turn_start_user_message*/ true,
)
.expect("tool role should build inbox items");
assert_eq!(items.len(), 3);
assert_eq!(
items[0],
ResponseInputItem::Message {
role: "user".to_string(),
content: vec![ContentItem::InputText {
text: String::new(),
}],
}
);
assert_matches!(&items[1], ResponseInputItem::FunctionCall { .. });
assert_matches!(&items[2], ResponseInputItem::FunctionCallOutput { .. });
}
#[tokio::test]
async fn send_agent_message_to_root_thread_defaults_to_user_input() {
let harness = AgentControlHarness::new().await;
let (receiver_thread_id, _thread) = harness.start_thread().await;
let sender_thread_id = ThreadId::new();
let submission_id = harness
.control
.send_agent_message(
receiver_thread_id,
sender_thread_id,
"watchdog update".to_string(),
)
.await
.expect("send_agent_message should succeed");
assert!(!submission_id.is_empty());
let expected = (
receiver_thread_id,
Op::UserInput {
items: vec![UserInput::Text {
text: "watchdog update".to_string(),
text_elements: Vec::new(),
}],
final_output_json_schema: None,
},
);
let captured = harness
.manager
.captured_ops()
.into_iter()
.find(|entry| *entry == expected);
assert_eq!(captured, Some(expected));
}
#[tokio::test]
async fn send_agent_message_to_root_thread_injects_response_items_when_enabled() {
let mut harness = AgentControlHarness::new().await;
harness.config.agent_use_function_call_inbox = true;
let (receiver_thread_id, _thread) = harness.start_thread().await;
let sender_thread_id = ThreadId::new();
let submission_id = harness
.control
.send_agent_message(
receiver_thread_id,
sender_thread_id,
"watchdog update".to_string(),
)
.await
.expect("send_agent_message should succeed");
assert!(!submission_id.is_empty());
let captured = harness
.manager
.captured_ops()
.into_iter()
.find(|(thread_id, op)| {
*thread_id == receiver_thread_id && matches!(op, Op::InjectResponseItems { .. })
})
.expect("expected injected agent inbox op");
let Op::InjectResponseItems { items } = captured.1 else {
unreachable!("matched above");
};
assert_eq!(items.len(), 3);
match &items[0] {
ResponseInputItem::Message { role, content } => {
assert_eq!(role, "user");
assert_eq!(
content,
&vec![ContentItem::InputText {
text: String::new(),
}]
);
}
other => panic!("expected prepended user message, got {other:?}"),
}
match &items[1] {
ResponseInputItem::FunctionCall {
name, arguments, ..
} => {
assert_eq!(name, AGENT_INBOX_KIND);
assert_eq!(arguments, "{}");
}
other => panic!("expected function call item, got {other:?}"),
}
match &items[2] {
ResponseInputItem::FunctionCallOutput { output, .. } => {
let output_text = output
.body
.to_text()
.expect("payload should convert to text");
let payload: AgentInboxPayload =
serde_json::from_str(&output_text).expect("payload should be valid json");
assert_eq!(payload.sender_thread_id, sender_thread_id);
assert_eq!(payload.message, "watchdog update");
}
other => panic!("expected function call output item, got {other:?}"),
}
}
#[tokio::test]
async fn send_inter_agent_communication_without_turn_queues_message_without_triggering_turn() {
let harness = AgentControlHarness::new().await;

View File

@@ -496,13 +496,6 @@ impl Codex {
);
}
if let SessionSource::SubAgent(SubAgentSource::ThreadSpawn { depth, .. }) = session_source
&& depth >= config.agent_max_depth
{
let _ = config.features.disable(Feature::SpawnCsv);
let _ = config.features.disable(Feature::Collab);
}
if config.features.enabled(Feature::JsRepl)
&& let Err(err) = resolve_compatible_node(config.js_repl_node_path.as_deref()).await
{
@@ -1136,6 +1129,9 @@ impl SessionConfiguration {
sandbox_policy: self.sandbox_policy.get().clone(),
cwd: self.cwd.to_path_buf(),
ephemeral: self.original_config_do_not_use.ephemeral,
agent_use_function_call_inbox: self
.original_config_do_not_use
.agent_use_function_call_inbox,
reasoning_effort: self.collaboration_mode.reasoning_effort(),
personality: self.personality,
session_source: self.session_source.clone(),
@@ -4453,6 +4449,10 @@ async fn submission_loop(sess: Arc<Session>, config: Arc<Config>, rx_sub: Receiv
handlers::inter_agent_communication(&sess, sub.id.clone(), communication).await;
false
}
Op::InjectResponseItems { items } => {
handlers::inject_response_items(&sess, sub.id.clone(), items).await;
false
}
Op::ExecApproval {
id: approval_id,
turn_id,
@@ -4653,7 +4653,10 @@ mod handlers {
use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::Settings;
use codex_protocol::dynamic_tools::DynamicToolResponse;
use codex_protocol::items::TurnItem;
use codex_protocol::mcp::RequestId as ProtocolRequestId;
use codex_protocol::models::ResponseInputItem;
use codex_protocol::models::ResponseItem;
use codex_protocol::user_input::UserInput;
use codex_rmcp_client::ElicitationAction;
use codex_rmcp_client::ElicitationResponse;
@@ -4791,6 +4794,76 @@ mod handlers {
}
}
pub async fn inject_response_items(
sess: &Arc<Session>,
sub_id: String,
items: Vec<ResponseInputItem>,
) {
const MAX_TURN_RESTART_ATTEMPTS: usize = 3;
let mut pending_items = items;
let mut attempts = 0usize;
loop {
match sess.inject_response_items(pending_items).await {
Ok(()) => return,
Err(items_without_active_turn) => {
pending_items = items_without_active_turn;
}
}
if attempts >= MAX_TURN_RESTART_ATTEMPTS {
warn!(
attempts,
remaining_items = pending_items.len(),
"dropping response items after repeated turn restart failures"
);
return;
}
attempts += 1;
let turn_input =
pop_leading_user_message_input(&mut pending_items).unwrap_or_else(|| {
vec![UserInput::Text {
text: String::new(),
text_elements: Vec::new(),
}]
});
let turn_sub_id = if attempts == 1 {
sub_id.clone()
} else {
format!("{sub_id}-retry-{attempts}")
};
let current_context = sess.new_default_turn_with_sub_id(turn_sub_id).await;
// Keep injected inbox wakeups visible to telemetry after the TurnContext field rename.
current_context.session_telemetry.user_prompt(&turn_input);
sess.refresh_mcp_servers_if_requested(&current_context)
.await;
sess.spawn_task(
Arc::clone(&current_context),
turn_input,
crate::tasks::RegularTask::new(),
)
.await;
if pending_items.is_empty() {
return;
}
}
}
fn pop_leading_user_message_input(
items: &mut Vec<ResponseInputItem>,
) -> Option<Vec<UserInput>> {
let first_item = items.first().cloned()?;
let response_item = ResponseItem::from(first_item);
let TurnItem::UserMessage(user_message) = crate::parse_turn_item(&response_item)? else {
return None;
};
let _ = items.remove(0);
Some(user_message.content)
}
pub async fn run_user_shell_command(sess: &Arc<Session>, sub_id: String, command: String) {
if let Some((turn_context, cancellation_token)) =
sess.active_turn_context_and_cancellation_token().await

View File

@@ -38,6 +38,7 @@ pub struct ThreadConfigSnapshot {
pub sandbox_policy: SandboxPolicy,
pub cwd: PathBuf,
pub ephemeral: bool,
pub agent_use_function_call_inbox: bool,
pub reasoning_effort: Option<ReasoningEffort>,
pub personality: Option<Personality>,
pub session_source: SessionSource,

View File

@@ -1736,6 +1736,28 @@ fn feature_table_overrides_legacy_flags() -> std::io::Result<()> {
Ok(())
}
#[test]
fn feature_table_enables_agent_function_call_inbox() -> std::io::Result<()> {
let codex_home = TempDir::new()?;
let mut entries = BTreeMap::new();
entries.insert("agent_function_call_inbox".to_string(), true);
let cfg = ConfigToml {
features: Some(codex_features::FeaturesToml { entries }),
..Default::default()
};
let config = Config::load_from_base_config_with_overrides(
cfg,
ConfigOverrides::default(),
codex_home.path().to_path_buf(),
)?;
assert!(config.features.enabled(Feature::AgentFunctionCallInbox));
assert!(config.agent_use_function_call_inbox);
Ok(())
}
#[test]
fn legacy_toggles_map_to_features() -> std::io::Result<()> {
let codex_home = TempDir::new()?;
@@ -4460,6 +4482,7 @@ fn test_precedence_fixture_with_o3_profile() -> std::io::Result<()> {
agent_roles: BTreeMap::new(),
memories: MemoriesConfig::default(),
agent_job_max_runtime_seconds: DEFAULT_AGENT_JOB_MAX_RUNTIME_SECONDS,
agent_use_function_call_inbox: false,
codex_home: fixture.codex_home(),
sqlite_home: fixture.codex_home(),
log_dir: fixture.codex_home().join("log"),
@@ -4602,6 +4625,7 @@ fn test_precedence_fixture_with_gpt3_profile() -> std::io::Result<()> {
agent_roles: BTreeMap::new(),
memories: MemoriesConfig::default(),
agent_job_max_runtime_seconds: DEFAULT_AGENT_JOB_MAX_RUNTIME_SECONDS,
agent_use_function_call_inbox: false,
codex_home: fixture.codex_home(),
sqlite_home: fixture.codex_home(),
log_dir: fixture.codex_home().join("log"),
@@ -4742,6 +4766,7 @@ fn test_precedence_fixture_with_zdr_profile() -> std::io::Result<()> {
agent_roles: BTreeMap::new(),
memories: MemoriesConfig::default(),
agent_job_max_runtime_seconds: DEFAULT_AGENT_JOB_MAX_RUNTIME_SECONDS,
agent_use_function_call_inbox: false,
codex_home: fixture.codex_home(),
sqlite_home: fixture.codex_home(),
log_dir: fixture.codex_home().join("log"),
@@ -4868,6 +4893,7 @@ fn test_precedence_fixture_with_gpt5_profile() -> std::io::Result<()> {
agent_roles: BTreeMap::new(),
memories: MemoriesConfig::default(),
agent_job_max_runtime_seconds: DEFAULT_AGENT_JOB_MAX_RUNTIME_SECONDS,
agent_use_function_call_inbox: false,
codex_home: fixture.codex_home(),
sqlite_home: fixture.codex_home(),
log_dir: fixture.codex_home().join("log"),

View File

@@ -404,6 +404,10 @@ pub struct Config {
pub agent_max_threads: Option<usize>,
/// Maximum runtime in seconds for agent job workers before they are failed.
pub agent_job_max_runtime_seconds: Option<u64>,
/// When true, inbound agent messages to non-subagent threads are delivered
/// as a synthetic function_call/function_call_output pair instead of plain
/// user input.
pub agent_use_function_call_inbox: bool,
/// Maximum nesting depth allowed for spawned agent threads.
pub agent_max_depth: i32,
@@ -2356,6 +2360,7 @@ impl Config {
.as_ref()
.and_then(|agents| agents.job_max_runtime_seconds)
.or(DEFAULT_AGENT_JOB_MAX_RUNTIME_SECONDS);
let agent_use_function_call_inbox = features.enabled(Feature::AgentFunctionCallInbox);
if agent_job_max_runtime_seconds == Some(0) {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
@@ -2644,6 +2649,7 @@ impl Config {
agent_roles,
memories: cfg.memories.unwrap_or_default().into(),
agent_job_max_runtime_seconds,
agent_use_function_call_inbox,
codex_home,
sqlite_home,
log_dir,

View File

@@ -428,6 +428,17 @@ pub(crate) fn last_assistant_message_from_item(
pub(crate) fn response_input_to_response_item(input: &ResponseInputItem) -> Option<ResponseItem> {
match input {
ResponseInputItem::FunctionCall {
name,
arguments,
call_id,
} => Some(ResponseItem::FunctionCall {
id: None,
name: name.clone(),
namespace: None,
arguments: arguments.clone(),
call_id: call_id.clone(),
}),
ResponseInputItem::FunctionCallOutput { call_id, output } => {
Some(ResponseItem::FunctionCallOutput {
call_id: call_id.clone(),

View File

@@ -431,6 +431,16 @@ pub(crate) fn response_input_to_code_mode_result(response: ResponseInputItem) ->
raw_arguments: String::new(),
})
}
ResponseInputItem::FunctionCall {
name,
arguments,
call_id,
} => serde_json::json!({
"type": "function_call",
"name": name,
"arguments": arguments,
"call_id": call_id,
}),
}
}

View File

@@ -1,4 +1,5 @@
use super::*;
use crate::agent::agent_resolver::resolve_agent_target;
use crate::agent::control::render_input_preview;
pub(crate) struct Handler;
@@ -25,8 +26,8 @@ impl ToolHandler for Handler {
} = invocation;
let arguments = function_arguments(payload)?;
let args: SendInputArgs = parse_arguments(&arguments)?;
let receiver_thread_id = parse_agent_id_target(&args.target)?;
let input_items = parse_collab_input(args.message, args.items)?;
let receiver_thread_id = resolve_agent_target(&session, &turn, &args.target).await?;
let input_items = parse_collab_input(args.message.clone(), args.items.clone())?;
let prompt = render_input_preview(&input_items);
let receiver_agent = session
.services
@@ -53,9 +54,15 @@ impl ToolHandler for Handler {
.into(),
)
.await;
let agent_control = session.services.agent_control.clone();
let result = agent_control
.send_input(receiver_thread_id, input_items)
let result = session
.services
.agent_control
.send_agent_message_or_input(
receiver_thread_id,
session.conversation_id,
args.message,
args.items,
)
.await
.map_err(|err| collab_agent_error(receiver_thread_id, err));
let status = session

View File

@@ -8,7 +8,6 @@ use crate::models_manager::manager::RefreshStrategy;
use crate::tools::context::FunctionToolOutput;
use crate::tools::context::ToolOutput;
use crate::tools::context::ToolPayload;
use codex_features::Feature;
use codex_protocol::AgentPath;
use codex_protocol::ThreadId;
use codex_protocol::models::BaseInstructions;
@@ -264,12 +263,7 @@ pub(crate) fn apply_spawn_agent_runtime_overrides(
Ok(())
}
pub(crate) fn apply_spawn_agent_overrides(config: &mut Config, child_depth: i32) {
if child_depth >= config.agent_max_depth && !config.features.enabled(Feature::MultiAgentV2) {
let _ = config.features.disable(Feature::SpawnCsv);
let _ = config.features.disable(Feature::Collab);
}
}
pub(crate) fn apply_spawn_agent_overrides(_config: &mut Config, _child_depth: i32) {}
pub(crate) async fn apply_requested_spawn_agent_model_overrides(
session: &Session,

View File

@@ -1762,6 +1762,10 @@ async fn spawn_agent_allows_depth_up_to_configured_max_depth() {
let mut config = (*turn.config).clone();
config.agent_max_depth = DEFAULT_AGENT_MAX_DEPTH + 1;
config
.features
.enable(Feature::SpawnCsv)
.expect("test config should allow spawn_csv");
turn.config = Arc::new(config);
turn.session_source = SessionSource::SubAgent(SubAgentSource::ThreadSpawn {
parent_thread_id: session.conversation_id,
@@ -1785,6 +1789,12 @@ async fn spawn_agent_allows_depth_up_to_configured_max_depth() {
let result: SpawnAgentResult =
serde_json::from_str(&content).expect("spawn_agent result should be json");
assert!(!result.agent_id.is_empty());
let child_thread = manager
.get_thread(parse_agent_id(&result.agent_id))
.await
.expect("spawned max-depth child thread should exist");
assert!(child_thread.enabled(Feature::Collab));
assert!(child_thread.enabled(Feature::SpawnCsv));
assert!(
result
.nickname
@@ -1851,7 +1861,10 @@ async fn send_input_rejects_invalid_id() {
let FunctionCallError::RespondToModel(msg) = err else {
panic!("expected respond-to-model error");
};
assert!(msg.starts_with("invalid agent id not-a-uuid:"));
assert_eq!(
msg,
"agent_name must use only lowercase letters, digits, and underscores"
);
}
#[tokio::test]
@@ -3204,6 +3217,7 @@ async fn build_agent_spawn_config_uses_turn_context_values() {
let config = build_agent_spawn_config(&base_instructions, &turn).expect("spawn config");
let mut expected = (*turn.config).clone();
expected.features = config.features.clone();
expected.base_instructions = Some(base_instructions.text);
expected.model = Some(turn.model_info.slug.clone());
expected.model_provider = turn.provider.clone();

View File

@@ -6,6 +6,7 @@ use crate::tools::handlers::parse_arguments;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
use async_trait::async_trait;
use codex_protocol::protocol::SessionSource;
use codex_protocol::request_user_input::RequestUserInputArgs;
use codex_tools::REQUEST_USER_INPUT_TOOL_NAME;
use codex_tools::normalize_request_user_input_args;
@@ -41,6 +42,12 @@ impl ToolHandler for RequestUserInputHandler {
}
};
if matches!(turn.session_source, SessionSource::SubAgent(_)) {
return Err(FunctionCallError::RespondToModel(
"request_user_input can only be used by the root thread".to_string(),
));
}
let mode = session.collaboration_mode().await.mode;
if let Some(message) =
request_user_input_unavailable_message(mode, self.default_mode_request_user_input)
@@ -69,3 +76,72 @@ impl ToolHandler for RequestUserInputHandler {
Ok(FunctionToolOutput::from_text(content, Some(true)))
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::codex::make_session_and_context;
use crate::turn_diff_tracker::TurnDiffTracker;
use codex_protocol::ThreadId;
use codex_protocol::protocol::SubAgentSource;
use pretty_assertions::assert_eq;
use serde_json::json;
use std::sync::Arc;
use tokio::sync::Mutex;
#[tokio::test]
async fn request_user_input_rejects_subagent_threads() {
let (session, mut turn_context) = make_session_and_context().await;
turn_context.session_source = SessionSource::SubAgent(SubAgentSource::ThreadSpawn {
parent_thread_id: ThreadId::new(),
depth: 1,
agent_path: None,
agent_nickname: None,
agent_role: None,
});
let err = match (RequestUserInputHandler {
default_mode_request_user_input: true,
})
.handle(ToolInvocation {
session: Arc::new(session),
turn: Arc::new(turn_context),
tracker: Arc::new(Mutex::new(TurnDiffTracker::default())),
call_id: "call-1".to_string(),
tool_name: REQUEST_USER_INPUT_TOOL_NAME.to_string(),
tool_namespace: None,
payload: ToolPayload::Function {
arguments: json!({
"questions": [{
"header": "Hdr",
"question": "Pick one",
"id": "pick_one",
"options": [
{
"label": "A",
"description": "A"
},
{
"label": "B",
"description": "B"
}
]
}]
})
.to_string(),
},
})
.await
{
Ok(_) => panic!("subagents should not be allowed to request user input"),
Err(err) => err,
};
assert_eq!(
err,
FunctionCallError::RespondToModel(
"request_user_input can only be used by the root thread".to_string(),
)
);
}
}

View File

@@ -739,6 +739,11 @@ impl JsReplManager {
fn summarize_tool_call_response(response: &ResponseInputItem) -> JsReplToolCallResponseSummary {
match response {
ResponseInputItem::Message { content, .. } => Self::summarize_message_payload(content),
ResponseInputItem::FunctionCall { arguments, .. } => Self::summarize_text_payload(
Some("function_call"),
JsReplToolCallPayloadKind::FunctionText,
arguments,
),
ResponseInputItem::FunctionCallOutput { output, .. } => {
let payload_kind = if output.content_items().is_some() {
JsReplToolCallPayloadKind::FunctionContentItems

View File

@@ -142,6 +142,8 @@ pub enum Feature {
MultiAgentV2,
/// Enable CSV-backed agent job tools.
SpawnCsv,
/// Deliver inbound agent messages via a synthetic function-call inbox envelope.
AgentFunctionCallInbox,
/// Enable apps.
Apps,
/// Enable the tool_search tool for apps.
@@ -719,6 +721,12 @@ pub const FEATURES: &[FeatureSpec] = &[
stage: Stage::UnderDevelopment,
default_enabled: false,
},
FeatureSpec {
id: Feature::AgentFunctionCallInbox,
key: "agent_function_call_inbox",
stage: Stage::UnderDevelopment,
default_enabled: false,
},
FeatureSpec {
id: Feature::SpawnCsv,
key: "enable_fanout",
@@ -907,3 +915,125 @@ pub fn unstable_features_warning_event(
#[cfg(test)]
mod tests;
#[cfg(test)]
mod inbox_feature_tests {
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn under_development_features_are_disabled_by_default() {
for spec in FEATURES {
if matches!(spec.stage, Stage::UnderDevelopment) {
assert_eq!(
spec.default_enabled, false,
"feature `{}` is under development and must be disabled by default",
spec.key
);
}
}
}
#[test]
fn default_enabled_features_are_stable() {
for spec in FEATURES {
if spec.default_enabled {
assert!(
matches!(spec.stage, Stage::Stable | Stage::Removed),
"feature `{}` is enabled by default but is not stable/removed ({:?})",
spec.key,
spec.stage
);
}
}
}
#[test]
fn use_linux_sandbox_bwrap_is_removed() {
assert_eq!(Feature::UseLinuxSandboxBwrap.stage(), Stage::Removed);
assert_eq!(Feature::UseLinuxSandboxBwrap.default_enabled(), false);
}
#[test]
fn js_repl_is_experimental_and_user_toggleable() {
let spec = Feature::JsRepl.info();
let stage = spec.stage;
let expected_node_version = include_str!("../../node-version.txt").trim_end();
assert!(matches!(stage, Stage::Experimental { .. }));
assert_eq!(stage.experimental_menu_name(), Some("JavaScript REPL"));
assert_eq!(
stage.experimental_menu_description().map(str::to_owned),
Some(format!(
"Enable a persistent Node-backed JavaScript REPL for interactive website debugging and other inline JavaScript execution capabilities. Requires Node >= v{expected_node_version} installed."
))
);
assert_eq!(Feature::JsRepl.default_enabled(), false);
}
#[test]
fn guardian_approval_is_experimental_and_user_toggleable() {
let spec = Feature::GuardianApproval.info();
let stage = spec.stage;
assert!(matches!(stage, Stage::Experimental { .. }));
assert_eq!(stage.experimental_menu_name(), Some("Guardian Approvals"));
assert_eq!(
stage.experimental_menu_description().map(str::to_owned),
Some(
"When Codex needs approval for higher-risk actions (e.g. sandbox escapes or blocked network access), route eligible approval requests to a carefully-prompted security reviewer subagent rather than blocking the agent on your input. This can consume significantly more tokens because it runs a subagent on every approval request.".to_string()
)
);
assert_eq!(stage.experimental_announcement(), None);
assert_eq!(Feature::GuardianApproval.default_enabled(), false);
}
#[test]
fn request_permissions_tool_is_under_development() {
assert_eq!(
Feature::RequestPermissionsTool.stage(),
Stage::UnderDevelopment
);
assert_eq!(Feature::RequestPermissionsTool.default_enabled(), false);
}
#[test]
fn image_generation_is_under_development() {
assert_eq!(Feature::ImageGeneration.stage(), Stage::UnderDevelopment);
assert_eq!(Feature::ImageGeneration.default_enabled(), false);
}
#[test]
fn collab_is_legacy_alias_for_multi_agent() {
assert_eq!(feature_for_key("multi_agent"), Some(Feature::Collab));
assert_eq!(feature_for_key("collab"), Some(Feature::Collab));
}
#[test]
fn apps_require_feature_flag_and_chatgpt_auth() {
let mut features = Features::with_defaults();
assert!(!features.apps_enabled_for_auth(/*auth*/ None));
features.enable(Feature::Apps);
assert!(!features.apps_enabled_for_auth(/*auth*/ None));
let api_key_auth = CodexAuth::from_api_key("test-api-key");
assert!(!features.apps_enabled_for_auth(Some(&api_key_auth)));
let chatgpt_auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
assert!(features.apps_enabled_for_auth(Some(&chatgpt_auth)));
}
#[test]
fn agent_function_call_inbox_is_under_development() {
assert_eq!(
Feature::AgentFunctionCallInbox.stage(),
Stage::UnderDevelopment
);
assert_eq!(Feature::AgentFunctionCallInbox.default_enabled(), false);
assert_eq!(
feature_for_key("agent_function_call_inbox"),
Some(Feature::AgentFunctionCallInbox)
);
}
}

View File

@@ -122,6 +122,11 @@ pub enum ResponseInputItem {
role: String,
content: Vec<ContentItem>,
},
FunctionCall {
name: String,
arguments: String,
call_id: String,
},
FunctionCallOutput {
call_id: String,
#[ts(as = "FunctionCallOutputBody")]
@@ -915,6 +920,17 @@ impl From<ResponseInputItem> for ResponseItem {
end_turn: None,
phase: None,
},
ResponseInputItem::FunctionCall {
name,
arguments,
call_id,
} => Self::FunctionCall {
id: None,
name,
namespace: None,
arguments,
call_id,
},
ResponseInputItem::FunctionCallOutput { call_id, output } => {
Self::FunctionCallOutput { call_id, output }
}

View File

@@ -100,6 +100,26 @@ pub const COLLABORATION_MODE_CLOSE_TAG: &str = "</collaboration_mode>";
pub const REALTIME_CONVERSATION_OPEN_TAG: &str = "<realtime_conversation>";
pub const REALTIME_CONVERSATION_CLOSE_TAG: &str = "</realtime_conversation>";
pub const USER_MESSAGE_BEGIN: &str = "## My request for Codex:";
pub const AGENT_INBOX_KIND: &str = "agent_inbox";
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, JsonSchema)]
pub struct AgentInboxPayload {
pub injected: bool,
pub kind: String,
pub sender_thread_id: ThreadId,
pub message: String,
}
impl AgentInboxPayload {
pub fn new(sender_thread_id: ThreadId, message: String) -> Self {
Self {
injected: true,
kind: AGENT_INBOX_KIND.to_string(),
sender_thread_id,
message,
}
}
}
/// Submission Queue Entry - requests from user
#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema)]
@@ -244,6 +264,9 @@ pub enum Op {
final_output_json_schema: Option<Value>,
},
/// Inject non-user response items into an existing turn, or start a turn if needed.
InjectResponseItems { items: Vec<ResponseInputItem> },
/// Similar to [`Op::UserInput`], but contains additional context required
/// for a turn of a [`crate::codex_thread::CodexThread`].
UserTurn {
@@ -588,6 +611,7 @@ impl Op {
Self::UserInputAnswer { .. } => "user_input_answer",
Self::RequestPermissionsResponse { .. } => "request_permissions_response",
Self::DynamicToolResponse { .. } => "dynamic_tool_response",
Self::InjectResponseItems { .. } => "inject_response_items",
Self::AddToHistory { .. } => "add_to_history",
Self::GetHistoryEntryRequest { .. } => "get_history_entry_request",
Self::ListMcpTools => "list_mcp_tools",

View File

@@ -141,7 +141,7 @@ impl ToolsConfig {
let include_collab_tools = features.enabled(Feature::Collab);
let include_multi_agent_v2 = features.enabled(Feature::MultiAgentV2);
let include_agent_jobs = features.enabled(Feature::SpawnCsv);
let include_request_user_input = !matches!(session_source, SessionSource::SubAgent(_));
let include_request_user_input = true;
let include_default_mode_request_user_input =
include_request_user_input && features.enabled(Feature::DefaultModeRequestUserInput);
let include_search_tool =

View File

@@ -131,7 +131,7 @@ fn shell_zsh_fork_prefers_shell_command_over_unified_exec() {
}
#[test]
fn subagents_disable_request_user_input_and_agent_jobs_workers_opt_in_by_label() {
fn subagents_keep_request_user_input_schema_and_agent_jobs_workers_opt_in_by_label() {
let model_info = model_info();
let mut features = Features::with_defaults();
features.enable(Feature::SpawnCsv);
@@ -149,7 +149,7 @@ fn subagents_disable_request_user_input_and_agent_jobs_workers_opt_in_by_label()
windows_sandbox_level: WindowsSandboxLevel::Disabled,
});
assert!(!tools_config.request_user_input);
assert!(tools_config.request_user_input);
assert!(!tools_config.default_mode_request_user_input);
assert!(tools_config.agent_jobs_tools);
assert!(tools_config.agent_jobs_worker_tools);

View File

@@ -36,6 +36,24 @@ Codex can run a notification hook when the agent finishes a turn. See the config
When Codex knows which client started the turn, the legacy notify JSON payload also includes a top-level `client` field. The TUI reports `codex-tui`, and the app server reports the `clientInfo.name` value from `initialize`.
## Agent Inbox Delivery
By default, inbound messages from other agents are delivered to non-subagent threads as normal
user input. If you want those handoffs to appear as explicit non-user transcript activity, you can
opt into a synthetic function-call/function-call-output envelope:
```toml
[features]
agent_function_call_inbox = true
```
When enabled, Codex injects inbound agent messages into non-subagent threads as an `agent_inbox`
function-call/function-call-output pair. This is primarily a model-behavior workaround for cases
where you want a subagent handoff to start a valid turn while still being clearly marked as
non-user activity in the transcript.
Messages sent to subagents continue to arrive as normal user input.
## JSON Schema
The generated JSON Schema for `config.toml` lives at `codex-rs/core/config.schema.json`.

View File

@@ -7,13 +7,13 @@ diff --git a/builder/cc_builder.rs b/builder/cc_builder.rs
use std::collections::HashMap;
-use std::path::PathBuf;
+use std::path::{Path, PathBuf};
#[non_exhaustive]
#[derive(PartialEq, Eq)]
@@ -681,6 +681,16 @@
}
let mut memcmp_compile_args = Vec::from(memcmp_compiler.args());
+ // Keep the probe self-contained and avoid invoking external debug tools
+ // (for example `dsymutil`) that may be missing in hermetic sandboxes.
+ memcmp_compile_args.retain(|arg| {
@@ -30,7 +30,7 @@ diff --git a/builder/cc_builder.rs b/builder/cc_builder.rs
@@ -692,6 +702,15 @@
}
}
+ if let Some(execroot) = Self::bazel_execroot(self.manifest_dir.as_path()) {
+ // In Bazel build-script sandboxes, `cc` can pass `bazel-out/...` args
+ // relative to the execroot while the process runs from elsewhere.

View File

@@ -17,7 +17,7 @@ diff --git a/builder/cc_builder.rs b/builder/cc_builder.rs
+ emit_warning("Skipping memcmp probe for Bazel windows-msvc build scripts.");
+ return;
+ }
let basename = "memcmp_invalid_stripped_check";
let exec_path = out_dir().join(basename);
let memcmp_build = cc::Build::default();
@@ -30,7 +30,7 @@ diff --git a/builder/cc_builder.rs b/builder/cc_builder.rs
memcmp_compile_args.push(flag.into());
}
}
- if let Some(execroot) = Self::bazel_execroot(self.manifest_dir.as_path()) {
+ if let Some(execroot) = bazel_execroot {
// In Bazel build-script sandboxes, `cc` can pass `bazel-out/...` args

View File

@@ -18,7 +18,7 @@ diff --git a/builder/main.rs b/builder/main.rs
+ .components()
+ .any(|component| component.as_os_str() == "bazel-out")
}
fn use_prebuilt_nasm() -> bool {
+ let use_prebuilt_for_bazel_windows_msvc = is_bazel_windows_msvc_build_script();
target_os() == "windows"
@@ -31,7 +31,7 @@ diff --git a/builder/main.rs b/builder/main.rs
+ && (use_prebuilt_for_bazel_windows_msvc || !test_nasm_command())
&& (Some(true) == allow_prebuilt_nasm() || is_prebuilt_nasm())
}
fn allow_prebuilt_nasm() -> Option<bool> {
diff --git a/builder/nasm_builder.rs b/builder/nasm_builder.rs
--- a/builder/nasm_builder.rs
@@ -40,7 +40,7 @@ diff --git a/builder/nasm_builder.rs b/builder/nasm_builder.rs
if self.files.is_empty() {
return vec![];
}
- if test_nasm_command() {
+ if test_nasm_command() && !use_prebuilt_nasm() {
for src in &self.files {

View File

@@ -14,7 +14,7 @@ index 89dcf81..cf27c92 100644
+ "llvm/utils/mlgo-utils/combine_training_corpus.py",
+ "llvm/docs/_themes/llvm-theme",
]
test_docs_subprojects = [
diff --git a/runtimes/mingw/BUILD.bazel b/runtimes/mingw/BUILD.bazel
index ebd99db..9eb5d5b 100644
@@ -23,7 +23,7 @@ index ebd99db..9eb5d5b 100644
@@ -334,6 +334,30 @@ stub_library(
name = "stdc++",
)
+# Clang may inject -lssp and -lssp_nonshared for windows-gnu links.
+# Provide compatibility archives in the MinGW runtime search directory.
+stub_library(

View File

@@ -22,7 +22,7 @@ index b9a0ce1..67e491c 100644
@@ -10,6 +10,11 @@ def _channel(version):
return "beta"
return "stable"
+def _exec_triple_suffix(exec_triple):
+ if exec_triple.system == "windows":
+ return "{}_{}_{}".format(exec_triple.system, exec_triple.arch, exec_triple.abi)
@@ -32,16 +32,16 @@ index b9a0ce1..67e491c 100644
*,
version,
@@ -23,15 +28,14 @@ def declare_rustc_toolchains(
for triple in execs:
exec_triple = _parse_triple(triple)
- triple_suffix = exec_triple.system + "_" + exec_triple.arch
+ triple_suffix = _exec_triple_suffix(exec_triple)
rustc_repo_label = "@rustc_{}_{}//:".format(triple_suffix, version_key)
cargo_repo_label = "@cargo_{}_{}//:".format(triple_suffix, version_key)
clippy_repo_label = "@clippy_{}_{}//:".format(triple_suffix, version_key)
- rust_toolchain_name = "{}_{}_{}_rust_toolchain".format(
- exec_triple.system,
- exec_triple.arch,
@@ -49,10 +49,10 @@ index b9a0ce1..67e491c 100644
+ triple_suffix,
version_key,
)
@@ -90,11 +94,8 @@ def declare_rustc_toolchains(
target_key = sanitize_triple(target_triple)
native.toolchain(
- name = "{}_{}_to_{}_{}".format(exec_triple.system, exec_triple.arch, target_key, version_key),
- exec_compatible_with = [
@@ -74,7 +74,7 @@ index a219209..ecb6b05 100644
-load("//rs/experimental/platforms:triples.bzl", "SUPPORTED_EXEC_TRIPLES")
+load("//rs/experimental/platforms:triples.bzl", "SUPPORTED_EXEC_TRIPLES", "triple_to_constraint_set")
load("//rs/experimental/toolchains:toolchain_utils.bzl", "sanitize_version")
+def _exec_triple_suffix(exec_triple):
+ if exec_triple.system == "windows":
+ return "{}_{}_{}".format(exec_triple.system, exec_triple.arch, exec_triple.abi)
@@ -84,15 +84,15 @@ index a219209..ecb6b05 100644
if version.startswith("nightly"):
return "nightly"
@@ -22,14 +27,13 @@ def declare_rustfmt_toolchains(
for triple in execs:
exec_triple = _parse_triple(triple)
- triple_suffix = exec_triple.system + "_" + exec_triple.arch
+ triple_suffix = _exec_triple_suffix(exec_triple)
rustc_repo_label = "@rustc_{}_{}//:".format(triple_suffix, version_key)
rustfmt_repo_label = "@rustfmt_{}_{}//:".format(triple_suffix, rustfmt_version_key)
- rustfmt_toolchain_name = "{}_{}_{}_rustfmt_toolchain".format(
- exec_triple.system,
- exec_triple.arch,
@@ -100,10 +100,10 @@ index a219209..ecb6b05 100644
+ triple_suffix,
version_key,
)
@@ -43,11 +47,8 @@ def declare_rustfmt_toolchains(
)
native.toolchain(
- name = "{}_{}_rustfmt_{}".format(exec_triple.system, exec_triple.arch, version_key),
- exec_compatible_with = [
@@ -122,7 +122,7 @@ index 7bb0205..ace556b 100644
@@ -37,6 +37,11 @@ def _normalize_arch_name(arch):
return "aarch64"
return arch
+def _exec_triple_suffix(exec_triple):
+ if exec_triple.system == "windows":
+ return "{}_{}_{}".format(exec_triple.system, exec_triple.arch, exec_triple.abi)
@@ -130,22 +130,22 @@ index 7bb0205..ace556b 100644
+
def _sanitize_path_fragment(path):
return path.replace("/", "_").replace(":", "_")
@@ -181,7 +186,7 @@ def _toolchains_impl(mctx):
for triple in SUPPORTED_EXEC_TRIPLES:
exec_triple = _parse_triple(triple)
- triple_suffix = exec_triple.system + "_" + exec_triple.arch
+ triple_suffix = _exec_triple_suffix(exec_triple)
rustc_name = "rustc_{}_{}".format(triple_suffix, version_key)
rustc_repository(
@@ -230,7 +235,7 @@ def _toolchains_impl(mctx):
for triple in SUPPORTED_EXEC_TRIPLES:
exec_triple = _parse_triple(triple)
- triple_suffix = exec_triple.system + "_" + exec_triple.arch
+ triple_suffix = _exec_triple_suffix(exec_triple)
rustfmt_repository(
name = "rustfmt_{}_{}".format(triple_suffix, version_key),

View File

@@ -15,7 +15,7 @@ diff --git a/rust/extensions.bzl b/rust/extensions.bzl
"extra_target_triples": {repository_set.target_triple: [str(v) for v in repository_set.target_compatible_with]},
"name": repository_set.name,
@@ -166,6 +167,9 @@ _COMMON_TAG_KWARGS = {
_RUST_REPOSITORY_SET_TAG_ATTRS = {
+ "exec_compatible_with": attr.label_list(
+ doc = "Execution platform constraints for this repository_set.",

View File

@@ -13,7 +13,7 @@ diff --git a/rust/private/rustc.bzl b/rust/private/rustc.bzl
@@ -129,6 +129,20 @@
build_setting = config.bool(flag = True),
)
-def _get_rustc_env(attr, toolchain, crate_name):
+def _effective_target_arch(toolchain, use_exec_target):
+ return toolchain.exec_triple.arch if use_exec_target else toolchain.target_arch
@@ -31,9 +31,9 @@ diff --git a/rust/private/rustc.bzl b/rust/private/rustc.bzl
+
+def _get_rustc_env(attr, toolchain, crate_name, use_exec_target = False):
"""Gathers rustc environment variables
@@ -147,6 +161,6 @@
result = {
- "CARGO_CFG_TARGET_ARCH": "" if toolchain.target_arch == None else toolchain.target_arch,
- "CARGO_CFG_TARGET_OS": "" if toolchain.target_os == None else toolchain.target_os,
@@ -44,15 +44,15 @@ diff --git a/rust/private/rustc.bzl b/rust/private/rustc.bzl
@@ -997,9 +1011,11 @@
if build_metadata and not use_json_output:
fail("build_metadata requires parse_json_output")
+ use_exec_target = is_exec_configuration(ctx) and crate_info.type == "bin"
+
output_dir = getattr(crate_info.output, "dirname", None)
linker_script = getattr(file, "linker_script", None)
- env = _get_rustc_env(attr, toolchain, crate_info.name)
+ env = _get_rustc_env(attr, toolchain, crate_info.name, use_exec_target)
# Wrapper args first
@@ -1138,5 +1154,5 @@
if error_format != "json":
@@ -64,7 +64,7 @@ diff --git a/rust/private/rustc.bzl b/rust/private/rustc.bzl
@@ -1144,6 +1160,6 @@
if linker_script:
rustc_flags.add(linker_script, format = "--codegen=link-arg=-T%s")
# Tell Rustc where to find the standard library (or libcore)
- rustc_flags.add_all(toolchain.rust_std_paths, before_each = "-L", format_each = "%s")
+ rustc_flags.add_all(_effective_rust_std_paths(toolchain, use_exec_target), before_each = "-L", format_each = "%s")

View File

@@ -6,7 +6,7 @@ diff --git a/cargo/private/cargo_build_script.bzl b/cargo/private/cargo_build_sc
"""Translate GNU-flavored cc args when exec-side build scripts target Windows MSVC."""
if toolchain.target_flag_value != toolchain.exec_triple.str or not toolchain.exec_triple.str.endswith("-pc-windows-msvc"):
return args
- rewritten = []
- skip_next = False
- for arg in args:
@@ -24,22 +24,22 @@ diff --git a/cargo/private/cargo_build_script.bzl b/cargo/private/cargo_build_sc
+ if skip_next:
+ skip_next = False
+ continue
if arg == "-target":
- skip_next = True
+ skip_next = True
continue
if arg.startswith("-target=") or arg.startswith("--target="):
continue
if arg == "-nostdlibinc" or arg.startswith("--sysroot"):
continue
- if "mingw-w64-" in arg or "mingw_import_libraries_directory" in arg or "mingw_crt_library_search_directory" in arg:
+ if arg.startswith("-fstack-protector") or arg.startswith("-D_FORTIFY_SOURCE="):
continue
- if arg.startswith("-fstack-protector"):
- continue
-
@@ -50,9 +50,9 @@ diff --git a/cargo/private/cargo_build_script.bzl b/cargo/private/cargo_build_sc
+ if "mingw-w64-" in path or "mingw_import_libraries_directory" in path or "mingw_crt_library_search_directory" in path:
+ skip_next = True
+ continue
rewritten.append(arg)
- return [
- "-target",
- toolchain.target_flag_value,
@@ -98,7 +98,7 @@ diff --git a/cargo/private/cargo_build_script.bzl b/cargo/private/cargo_build_sc
+ rewritten.append(arg)
+
+ return rewritten
def get_cc_compile_args_and_env(cc_toolchain, feature_configuration):
"""Gather cc environment variables from the given `cc_toolchain`
@@ -509,6 +550,7 @@ def _construct_build_script_env(
@@ -107,5 +107,5 @@ diff --git a/cargo/private/cargo_build_script.bzl b/cargo/private/cargo_build_sc
env["LD"] = linker
+ link_args = _rewrite_windows_exec_msvc_link_args(toolchain, link_args)
env["LDFLAGS"] = " ".join(_pwd_flags(link_args))
# Defaults for cxx flags.

View File

@@ -16,7 +16,7 @@ diff --git a/rust/toolchain.bzl b/rust/toolchain.bzl
rustfmt = None,
linker = None):
@@ -312,7 +313,15 @@ def _generate_sysroot(
# Made available to support $(location) expansion in stdlib_linkflags and extra_rustc_flags.
transitive_file_sets.append(depset(ctx.files.rust_std))
+
@@ -24,7 +24,7 @@ diff --git a/rust/toolchain.bzl b/rust/toolchain.bzl
+ if exec_rust_std:
+ sysroot_exec_rust_std = _symlink_sysroot_tree(ctx, name, exec_rust_std)
+ transitive_file_sets.extend([sysroot_exec_rust_std])
+ # Made available to support $(location) expansion in extra_exec_rustc_flags.
+ transitive_file_sets.append(depset(ctx.files.exec_rust_std))
+
@@ -49,10 +49,10 @@ diff --git a/rust/toolchain.bzl b/rust/toolchain.bzl
rustc_lib = sysroot_rustc_lib,
@@ -410,12 +421,14 @@ def _rust_toolchain_impl(ctx):
)
rust_std = ctx.attr.rust_std
+ exec_rust_std = ctx.attr.exec_rust_std if ctx.attr.exec_rust_std else rust_std
sysroot = _generate_sysroot(
ctx = ctx,
rustc = ctx.file.rustc,
@@ -63,12 +63,12 @@ diff --git a/rust/toolchain.bzl b/rust/toolchain.bzl
rustfmt = ctx.file.rustfmt,
clippy = ctx.file.clippy_driver,
@@ -452,7 +465,7 @@ def _rust_toolchain_impl(ctx):
expanded_stdlib_linkflags = _expand_flags(ctx, "stdlib_linkflags", rust_std[rust_common.stdlib_info].srcs, make_variables)
expanded_extra_rustc_flags = _expand_flags(ctx, "extra_rustc_flags", rust_std[rust_common.stdlib_info].srcs, make_variables)
- expanded_extra_exec_rustc_flags = _expand_flags(ctx, "extra_exec_rustc_flags", rust_std[rust_common.stdlib_info].srcs, make_variables)
+ expanded_extra_exec_rustc_flags = _expand_flags(ctx, "extra_exec_rustc_flags", exec_rust_std[rust_common.stdlib_info].srcs, make_variables)
linking_context = cc_common.create_linking_context(
linker_inputs = depset([
@@ -793,6 +806,10 @@ rust_toolchain = rule(
@@ -123,13 +123,13 @@ diff --git a/rust/private/rustc.bzl b/rust/private/rustc.bzl
@@ -1011,7 +1011,10 @@ def construct_arguments(
if build_metadata and not use_json_output:
fail("build_metadata requires parse_json_output")
- use_exec_target = is_exec_configuration(ctx) and crate_info.type == "bin"
+ # Exec-configuration crates (build scripts, proc-macros, and their
+ # dependencies) must all target the exec triple so they can link against
+ # each other and the exec-side standard library.
+ use_exec_target = is_exec_configuration(ctx)
output_dir = getattr(crate_info.output, "dirname", None)
linker_script = getattr(file, "linker_script", None)
diff --git a/rust/repositories.bzl b/rust/repositories.bzl
@@ -138,7 +138,7 @@ diff --git a/rust/repositories.bzl b/rust/repositories.bzl
@@ -536,6 +536,18 @@ def _rust_toolchain_tools_repository_impl(ctx):
build_components.append(rust_stdlib_content)
sha256s.update(rust_stdlib_sha256)
+ exec_rust_std_label = None
+ if exec_triple.str != target_triple.str:
+ exec_rust_stdlib_content, exec_rust_stdlib_sha256 = load_rust_stdlib(

View File

@@ -4,7 +4,7 @@ diff --git a/cargo/private/cargo_build_script.bzl b/cargo/private/cargo_build_sc
@@ -120,6 +120,63 @@
executable = True,
)
+def _strip_stack_protector_for_windows_llvm_mingw(toolchain, args):
+ """Drop stack protector flags unsupported by llvm-mingw build-script probes."""
+ if "windows-gnullvm" not in toolchain.target_flag_value:
@@ -64,11 +64,11 @@ diff --git a/cargo/private/cargo_build_script.bzl b/cargo/private/cargo_build_sc
+
def get_cc_compile_args_and_env(cc_toolchain, feature_configuration):
"""Gather cc environment variables from the given `cc_toolchain`
@@ -503,6 +560,10 @@
if not env["AR"]:
env["AR"] = cc_toolchain.ar_executable
+ cc_c_args = _strip_stack_protector_for_windows_llvm_mingw(toolchain, cc_c_args)
+ cc_cxx_args = _strip_stack_protector_for_windows_llvm_mingw(toolchain, cc_cxx_args)
+ cc_c_args = _rewrite_windows_exec_msvc_cc_args(toolchain, cc_c_args)

View File

@@ -3,7 +3,7 @@
@@ -2305,7 +2305,7 @@
return crate.metadata.dirname
return crate.output.dirname
-def _portable_link_flags(lib, use_pic, ambiguous_libs, get_lib_name, for_windows = False, for_darwin = False, flavor_msvc = False):
+def _portable_link_flags(lib, use_pic, ambiguous_libs, get_lib_name, for_windows = False, for_darwin = False, flavor_msvc = False, use_direct_driver = False):
artifact = get_preferred_artifact(lib, use_pic)
@@ -18,7 +18,7 @@
+ return [
+ "-Clink-arg={}".format(artifact.path),
+ ]
if flavor_msvc:
return [
@@ -2381,7 +2386,7 @@
@@ -27,7 +27,7 @@
get_lib_name = get_lib_name_for_windows if flavor_msvc else get_lib_name_default
- ret.extend(_portable_link_flags(lib, use_pic, ambiguous_libs, get_lib_name, flavor_msvc = flavor_msvc))
+ ret.extend(_portable_link_flags(lib, use_pic, ambiguous_libs, get_lib_name, flavor_msvc = flavor_msvc, use_direct_driver = use_direct_driver))
# Windows toolchains can inherit POSIX defaults like -pthread from C deps,
# which fails to link with the MinGW/LLD toolchain. Drop them here.
@@ -2558,17 +2563,25 @@
@@ -59,6 +59,6 @@
+ map_each = get_lib_name,
+ format_each = "-lstatic=%s",
+ )
def _get_dirname(file):
"""A helper function for `_add_native_link_flags`.

View File

@@ -6,7 +6,7 @@
+++ b/build.rs
@@ -543,10 +543,15 @@
}
fn static_lib_name(suffix: &str) -> String {
- let target_os = env::var("CARGO_CFG_TARGET_OS").unwrap();
- if target_os == "windows" {
@@ -25,7 +25,7 @@
@@ -577,7 +577,23 @@
path
}
+fn out_dir_abs() -> PathBuf {
+ let cwd = env::current_dir().unwrap();
+
@@ -45,10 +45,10 @@
+
build_dir().join("gn_out").join("obj")
}
@@ -794,22 +810,23 @@
}
fn print_link_flags() {
+ let target = env::var("TARGET").unwrap();
println!("cargo:rustc-link-lib=static=rusty_v8");
@@ -60,7 +60,7 @@
.split_whitespace()
.any(|ba| ba == "use_custom_libcxx=false")
});
if should_dyn_link_libcxx {
// Based on https://github.com/alexcrichton/cc-rs/blob/fba7feded71ee4f63cfe885673ead6d7b4f2f454/src/lib.rs#L2462
if let Ok(stdlib) = env::var("CXXSTDLIB") {

View File

@@ -8,7 +8,7 @@ index 9648e4a..88efd41 100644
--- a/orig/v8-14.6.202.11/bazel/defs.bzl
+++ b/mod/v8-14.6.202.11/bazel/defs.bzl
@@ -97,7 +97,7 @@ v8_config = rule(
def _default_args():
return struct(
- deps = [":define_flags", "@libcxx//:libc++"],
@@ -75,12 +75,12 @@ index 85f31b7..7314584 100644
- default = "none",
+ default = "False",
)
# Default setting for v8_enable_pointer_compression.
@@ -4077,28 +4077,14 @@ filegroup(
}),
)
-v8_library(
- name = "lib_dragonbox",
- srcs = ["third_party/dragonbox/src/include/dragonbox/dragonbox.h"],
@@ -94,7 +94,7 @@ index 85f31b7..7314584 100644
+ name = "lib_dragonbox",
+ actual = "@dragonbox//:dragonbox",
)
-v8_library(
- name = "lib_fp16",
- srcs = ["third_party/fp16/src/include/fp16.h"],
@@ -110,7 +110,7 @@ index 85f31b7..7314584 100644
+ name = "lib_fp16",
+ actual = "@fp16//:fp16",
)
filegroup(
@@ -4405,6 +4391,20 @@ genrule(
srcs = [
@@ -155,11 +155,11 @@ index 85f31b7..7314584 100644
+ requirement("markupsafe"),
],
)
@@ -4448,6 +4451,15 @@ filegroup(
],
)
+cc_library(
+ name = "rusty_v8_internal_headers",
+ hdrs = [
@@ -175,7 +175,7 @@ index 85f31b7..7314584 100644
@@ -4567,16 +4579,9 @@ cc_library(
],
)
-cc_library(
- name = "simdutf",
- srcs = ["third_party/simdutf/simdutf.cpp"],
@@ -190,7 +190,7 @@ index 85f31b7..7314584 100644
+ name = "simdutf",
+ actual = "@simdutf//:simdutf",
)
v8_library(
@@ -4593,7 +4598,7 @@ v8_library(
copts = ["-Wno-implicit-fallthrough"],
@@ -216,7 +216,7 @@ index 85f31b7..7314584 100644
actual = "icu/v8",
+ visibility = ["//visibility:public"],
)
v8_library(
@@ -4715,7 +4721,7 @@ v8_binary(
],
@@ -226,7 +226,7 @@ index 85f31b7..7314584 100644
+ "@icu//:icu",
],
)
diff --git a/orig/v8-14.6.202.11/bazel/BUILD.icu b/mod/v8-14.6.202.11/bazel/BUILD.icu
index 5fda2f4..381386c 100644
--- a/orig/v8-14.6.202.11/bazel/BUILD.icu

View File

@@ -62,13 +62,13 @@ diff --git a/orig/v8-14.6.202.11/MODULE.bazel b/mod/v8-14.6.202.11/MODULE.bazel
+ commit = "3d2de1816307bac63c16a297e8c4dc501b4076df",
+ remote = "https://chromium.googlesource.com/external/github.com/Maratyszcza/FP16.git",
+)
pip = use_extension("@rules_python//python/extensions:pip.bzl", "pip")
pip.parse(
@@ -22,171 +72,3 @@
)
use_repo(pip, "v8_python_deps")
-# Define the local LLVM toolchain repository
-llvm_toolchain_repository = use_repo_rule("//bazel/toolchain:llvm_repository.bzl", "llvm_toolchain_repository")
-
@@ -248,22 +248,22 @@ new file mode 100644
+@@ -2,7 +2,7 @@
+ load("@bazel_skylib//lib:selects.bzl", "selects")
+ load("@rules_license//rules:license.bzl", "license")
+
+
+-load("@rules_cc//cc:defs.bzl", "cc_test")
++load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_test")
+ # Placeholder#2 for Guitar, do not remove
+
+
+ package(
+@@ -134,11 +134,7 @@ COPTS = select({
+ ],
+ })
+
+
+-DEFINES = select({
+- ":compiler_msvc": ["HWY_SHARED_DEFINE"],
+- ":compiler_clangcl": ["HWY_SHARED_DEFINE"],
+- "//conditions:default": [],
+-})
++DEFINES = []
+
+
+ # Unused on Bazel builds, where this is not defined/known; Copybara replaces
+ # usages with an empty list.

View File

@@ -8,7 +8,7 @@ index 179a10f..4791e96 100644
+++ b/mod/v8-14.6.202.11/src/base/bits.h
@@ -270,11 +270,17 @@ inline constexpr uint32_t RoundDownToPowerOfTwo32(uint32_t value) {
}
// Precondition: 0 <= shift < 32
+#ifdef RotateRight32
+#undef RotateRight32
@@ -16,7 +16,7 @@ index 179a10f..4791e96 100644
inline constexpr uint32_t RotateRight32(uint32_t value, uint32_t shift) {
return (value >> shift) | (value << ((32 - shift) & 31));
}
// Precondition: 0 <= shift < 32
+#ifdef RotateLeft32
+#undef RotateLeft32
@@ -31,7 +31,7 @@ index 6176ed4..a02043d 100644
@@ -64,6 +64,7 @@ namespace {
volatile sig_atomic_t in_signal_handler = 0;
bool dump_stack_in_signal_handler = true;
+#if HAVE_EXECINFO_H
// The prefix used for mangled symbols, per the Itanium C++ ABI:
// http://www.codesourcery.com/cxx-abi/abi.html#mangling
@@ -39,7 +39,7 @@ index 6176ed4..a02043d 100644
@@ -73,7 +74,6 @@ const char kMangledSymbolPrefix[] = "_Z";
const char kSymbolCharacters[] =
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_";
-#if HAVE_EXECINFO_H
// Demangles C++ symbols in the given text. Example:
//
@@ -49,14 +49,14 @@ index 861cfe4..1e73954 100644
--- a/orig/v8-14.6.202.11/src/base/export-template.h
+++ b/mod/v8-14.6.202.11/src/base/export-template.h
@@ -153,8 +153,10 @@
EXPORT_TEMPLATE_TEST(DEFAULT, );
EXPORT_TEMPLATE_TEST(DEFAULT, __attribute__((visibility("default"))));
+#if defined(_MSC_VER)
EXPORT_TEMPLATE_TEST(MSVC_HACK, __declspec(dllexport));
EXPORT_TEMPLATE_TEST(DEFAULT, __declspec(dllimport));
+#endif
#undef EXPORT_TEMPLATE_TEST
#undef EXPORT_TEMPLATE_TEST_DEFAULT_DEFAULT
diff --git a/orig/v8-14.6.202.11/src/base/platform/platform-posix.cc b/mod/v8-14.6.202.11/src/base/platform/platform-posix.cc
@@ -65,12 +65,12 @@ index 420df0b..6f47969 100644
+++ b/mod/v8-14.6.202.11/src/base/platform/platform-posix.cc
@@ -95,7 +95,7 @@ extern int madvise(caddr_t, size_t, int);
#endif
#if defined(V8_LIBC_GLIBC)
-extern "C" void* __libc_stack_end;
+extern "C" void* __libc_stack_end V8_WEAK;
#endif
namespace v8 {
@@ -1476,7 +1476,8 @@ Stack::StackSlot Stack::ObtainCurrentThreadStackStart() {
// __libc_stack_end is process global and thus is only valid for
@@ -94,13 +94,13 @@ index f5d9ddc..542ea1a 100644
-
-#ifndef __MINGW64_VERSION_MAJOR
+#if !defined(__MINGW64_VERSION_MAJOR)
#define _TRUNCATE 0
#define STRUNCATE 80
@@ -81,9 +79,6 @@ inline void MemoryFence() {
__asm__ __volatile__("xchgl %%eax,%0 ":"=r" (barrier));
}
-#endif // __MINGW64_VERSION_MAJOR
-
-
@@ -110,25 +110,25 @@ index f5d9ddc..542ea1a 100644
@@ -134,6 +129,8 @@ int strncpy_s(char* dest, size_t dest_size, const char* source, size_t count) {
return 0;
}
+#endif // !defined(__MINGW64_VERSION_MAJOR)
+
#endif // __MINGW32__
namespace v8 {
@@ -743,8 +740,10 @@ void OS::StrNCpy(char* dest, int length, const char* src, size_t n) {
}
+#if defined(__MINGW32__) && !defined(__MINGW64_VERSION_MAJOR)
#undef _TRUNCATE
#undef STRUNCATE
+#endif
DEFINE_LAZY_LEAKY_OBJECT_GETTER(RandomNumberGenerator,
GetPlatformRandomNumberGenerator)
@@ -1894,3 +1893,4 @@ Stack::StackSlot Stack::GetCurrentStackPosition() {
} // namespace base
} // namespace v8
+
@@ -294,9 +294,9 @@ index bda0e43..b44f1d9 100644
--- a/orig/v8-14.6.202.11/src/libplatform/default-thread-isolated-allocator.cc
+++ b/mod/v8-14.6.202.11/src/libplatform/default-thread-isolated-allocator.cc
@@ -23,7 +23,7 @@ extern int pkey_free(int pkey) V8_WEAK;
namespace {
-bool KernelHasPkruFix() {
+[[maybe_unused]] bool KernelHasPkruFix() {
// PKU was broken on Linux kernels before 5.13 (see
@@ -319,9 +319,9 @@ index 27e748f..cbf886c 100644
+#endif // !V8_CC_MSVC || V8_OS_WIN
memset(&gc_subroot_entries_, 0, sizeof(gc_subroot_entries_));
}
@@ -3878,3 +3878,4 @@ void HeapSnapshotJSONSerializer::SerializeLocations() {
}
} // namespace v8::internal
+

View File

@@ -6,5 +6,5 @@ index 2d5a2a2..6e8c4cd 100644
-#![doc = include_str!("../readme.md")]
+#![doc = "windows-link"]
#![no_std]
/// Defines an external function to import.