Compare commits

...

6 Commits

Author SHA1 Message Date
Jiaming Zhang
ed6d07a7f1 feat(devicecheck): use app-server bundle id 2026-04-30 14:23:10 -07:00
Jiaming Zhang
59fddcf4bb feat(release): bundle DeviceCheckProbe with app-server 2026-04-30 08:33:36 -07:00
Jiaming Zhang
40b9b66974 chore(ci): clarify DeviceCheckProbe staging test name 2026-04-30 08:22:31 -07:00
Jiaming Zhang
96d1f28839 codex: fix CI failure on PR #19761 2026-04-30 07:22:19 -07:00
Jiaming Zhang
273e5d754e feat(core): send attestation for ChatGPT compaction and realtime setup 2026-04-30 07:10:52 -07:00
Jiaming Zhang
2fa3e50ef8 feat(devicecheck): forward macos attestation from cli 2026-04-30 07:10:26 -07:00
15 changed files with 708 additions and 12 deletions

View File

@@ -7,6 +7,10 @@ inputs:
binaries:
description: Space-delimited binary basenames to sign and notarize.
default: "codex codex-responses-api-proxy"
app-bundles:
description: Space-delimited macOS app bundle basenames to sign and notarize.
required: false
default: ""
sign-binaries:
description: Whether to sign and notarize the macOS binaries.
required: false
@@ -123,6 +127,7 @@ runs:
env:
TARGET: ${{ inputs.target }}
BINARIES: ${{ inputs.binaries }}
APP_BUNDLES: ${{ inputs.app-bundles }}
run: |
set -euo pipefail
@@ -143,12 +148,18 @@ runs:
codesign --force --options runtime --timestamp --entitlements "$entitlements_path" --sign "$APPLE_CODESIGN_IDENTITY" "${keychain_args[@]}" "$path"
done
for app_bundle in ${APP_BUNDLES}; do
path="codex-rs/target/${TARGET}/release/${app_bundle}"
codesign --force --options runtime --timestamp --sign "$APPLE_CODESIGN_IDENTITY" "${keychain_args[@]}" "$path"
done
- name: Notarize macOS binaries
if: ${{ inputs.sign-binaries == 'true' }}
shell: bash
env:
TARGET: ${{ inputs.target }}
BINARIES: ${{ inputs.binaries }}
APP_BUNDLES: ${{ inputs.app-bundles }}
APPLE_NOTARIZATION_KEY_P8: ${{ inputs.apple-notarization-key-p8 }}
APPLE_NOTARIZATION_KEY_ID: ${{ inputs.apple-notarization-key-id }}
APPLE_NOTARIZATION_ISSUER_ID: ${{ inputs.apple-notarization-issuer-id }}
@@ -191,6 +202,26 @@ runs:
notarize_binary "${binary}"
done
notarize_app_bundle() {
local app_bundle="$1"
local source_path="codex-rs/target/${TARGET}/release/${app_bundle}"
local archive_path="${RUNNER_TEMP}/${app_bundle}.zip"
if [[ ! -d "$source_path" ]]; then
echo "App bundle $source_path not found"
exit 1
fi
rm -f "$archive_path"
ditto -c -k --keepParent "$source_path" "$archive_path"
notarize_submission "$app_bundle" "$archive_path" "$notary_key_path"
}
for app_bundle in ${APP_BUNDLES}; do
notarize_app_bundle "${app_bundle}"
done
- name: Sign and notarize macOS dmg
if: ${{ inputs.sign-dmg == 'true' }}
shell: bash

View File

@@ -55,11 +55,15 @@ jobs:
python3 ./scripts/stage_npm_packages.py \
--release-version "$CODEX_VERSION" \
--workflow-url "$WORKFLOW_URL" \
--skip-native-component devicecheck-probe \
--package codex \
--output-dir "$OUTPUT_DIR"
PACK_OUTPUT="${OUTPUT_DIR}/codex-npm-${CODEX_VERSION}.tgz"
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
- name: Test Apple Silicon npm package includes DeviceCheckProbe
run: python3 codex-cli/scripts/test_build_npm_package.py
- name: Upload staged npm package artifact
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:

View File

@@ -72,24 +72,28 @@ jobs:
bundle: primary
artifact_name: aarch64-apple-darwin
binaries: "codex codex-responses-api-proxy"
app_bundles: "DeviceCheckProbe.app"
build_dmg: "true"
- runner: macos-15-xlarge
target: aarch64-apple-darwin
bundle: app-server
artifact_name: aarch64-apple-darwin-app-server
binaries: "codex-app-server"
app_bundles: "DeviceCheckProbe.app"
build_dmg: "false"
- runner: macos-15-xlarge
target: x86_64-apple-darwin
bundle: primary
artifact_name: x86_64-apple-darwin
binaries: "codex codex-responses-api-proxy"
app_bundles: ""
build_dmg: "true"
- runner: macos-15-xlarge
target: x86_64-apple-darwin
bundle: app-server
artifact_name: x86_64-apple-darwin-app-server
binaries: "codex-app-server"
app_bundles: ""
build_dmg: "false"
# Release artifacts intentionally ship MUSL-linked Linux binaries.
- runner: ubuntu-24.04
@@ -265,6 +269,15 @@ jobs:
echo "CARGO_PROFILE_RELEASE_LTO: ${CARGO_PROFILE_RELEASE_LTO}"
cargo build --target ${{ matrix.target }} --release --timings "${build_args[@]}"
- if: ${{ runner.os == 'macOS' && matrix.target == 'aarch64-apple-darwin' && matrix.app_bundles != '' }}
name: Build macOS DeviceCheck probe
shell: bash
run: |
set -euo pipefail
./devicecheck-probe/build-probe.sh \
--target "${{ matrix.target }}" \
--out "target/${{ matrix.target }}/release"
- name: Upload Cargo timings
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
@@ -286,6 +299,7 @@ jobs:
with:
target: ${{ matrix.target }}
binaries: ${{ matrix.binaries }}
app-bundles: ${{ matrix.app_bundles }}
sign-binaries: "true"
sign-dmg: "false"
apple-certificate: ${{ secrets.APPLE_CERTIFICATE_P12 }}
@@ -321,6 +335,12 @@ jobs:
ditto "${binary_path}" "${dmg_root}/${binary}"
done
if [[ -d "${release_dir}/DeviceCheckProbe.app" ]]; then
mkdir -p "${dmg_root}/devicecheck-probe"
ditto "${release_dir}/DeviceCheckProbe.app" \
"${dmg_root}/devicecheck-probe/DeviceCheckProbe.app"
fi
rm -f "$dmg_path"
hdiutil create \
-volname "$volname" \
@@ -361,6 +381,14 @@ jobs:
fi
done
if [[ -d "target/${{ matrix.target }}/release/DeviceCheckProbe.app" ]]; then
mkdir -p "$dest/devicecheck-probe"
ditto "target/${{ matrix.target }}/release/DeviceCheckProbe.app" \
"$dest/devicecheck-probe/DeviceCheckProbe.app"
COPYFILE_DISABLE=1 tar -C "$dest" -czf "$dest/devicecheck-probe-${{ matrix.target }}.tar.gz" \
devicecheck-probe
fi
if [[ "${{ matrix.build_dmg }}" == "true" ]]; then
cp target/${{ matrix.target }}/release/codex-${{ matrix.target }}.dmg "$dest/codex-${{ matrix.target }}.dmg"
fi
@@ -382,6 +410,9 @@ jobs:
# run `zstd --rm`, because that flag deletes the original files.
for f in "$dest"/*; do
base="$(basename "$f")"
if [[ -d "$f" ]]; then
continue
fi
# Skip files that are already archives (shouldn't happen, but be
# safe).
if [[ "$base" == *.tar.gz || "$base" == *.zip || "$base" == *.dmg ]]; then
@@ -394,7 +425,12 @@ jobs:
fi
# Create per-binary tar.gz
tar -C "$dest" -czf "$dest/${base}.tar.gz" "$base"
if [[ -d "$dest/devicecheck-probe" ]] \
&& [[ "$base" == "codex-aarch64-apple-darwin" || "$base" == "codex-app-server-aarch64-apple-darwin" ]]; then
COPYFILE_DISABLE=1 tar -C "$dest" -czf "$dest/${base}.tar.gz" "$base" devicecheck-probe
else
COPYFILE_DISABLE=1 tar -C "$dest" -czf "$dest/${base}.tar.gz" "$base"
fi
# Also create .zst and remove the uncompressed binaries to keep
# non-Windows artifact directories small.

View File

@@ -72,7 +72,7 @@ PACKAGE_NATIVE_COMPONENTS: dict[str, list[str]] = {
"codex-linux-x64": ["codex", "rg"],
"codex-linux-arm64": ["codex", "rg"],
"codex-darwin-x64": ["codex", "rg"],
"codex-darwin-arm64": ["codex", "rg"],
"codex-darwin-arm64": ["codex", "devicecheck-probe", "rg"],
"codex-win32-x64": ["codex", "rg", "codex-windows-sandbox-setup", "codex-command-runner"],
"codex-win32-arm64": ["codex", "rg", "codex-windows-sandbox-setup", "codex-command-runner"],
"codex-responses-api-proxy": ["codex-responses-api-proxy"],
@@ -91,6 +91,7 @@ COMPONENT_DEST_DIR: dict[str, str] = {
"codex-responses-api-proxy": "codex-responses-api-proxy",
"codex-windows-sandbox-setup": "codex",
"codex-command-runner": "codex",
"devicecheck-probe": "devicecheck-probe",
"rg": "path",
}
@@ -137,6 +138,17 @@ def parse_args() -> argparse.Namespace:
type=Path,
help="Directory containing pre-installed native binaries to bundle (vendor root).",
)
parser.add_argument(
"--skip-native-component",
dest="skip_native_components",
action="append",
default=[],
choices=tuple(COMPONENT_DEST_DIR),
help=(
"Skip one native component while staging. May be repeated. "
"Intended for historical-artifact smoke tests only."
),
)
return parser.parse_args()
@@ -160,7 +172,11 @@ def main() -> int:
stage_sources(staging_dir, version, package)
vendor_src = args.vendor_src.resolve() if args.vendor_src else None
native_components = PACKAGE_NATIVE_COMPONENTS.get(package, [])
native_components = [
component
for component in PACKAGE_NATIVE_COMPONENTS.get(package, [])
if component not in args.skip_native_components
]
target_filter = PACKAGE_TARGET_FILTERS.get(package)
if native_components:

View File

@@ -42,6 +42,15 @@ class BinaryComponent:
WINDOWS_TARGETS = tuple(target for target in BINARY_TARGETS if "windows" in target)
APPLE_SILICON_TARGETS = ("aarch64-apple-darwin",)
@dataclass(frozen=True)
class ArchiveComponent:
artifact_prefix: str
dest_dir: str
archive_member: str
targets: tuple[str, ...]
BINARY_COMPONENTS = {
"codex": BinaryComponent(
@@ -68,6 +77,15 @@ BINARY_COMPONENTS = {
),
}
ARCHIVE_COMPONENTS = {
"devicecheck-probe": ArchiveComponent(
artifact_prefix="devicecheck-probe",
dest_dir="devicecheck-probe",
archive_member="devicecheck-probe",
targets=APPLE_SILICON_TARGETS,
),
}
RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
("x86_64-unknown-linux-musl", "linux-x86_64"),
("aarch64-unknown-linux-musl", "linux-aarch64"),
@@ -132,7 +150,7 @@ def parse_args() -> argparse.Namespace:
"--component",
dest="components",
action="append",
choices=tuple(list(BINARY_COMPONENTS) + ["rg"]),
choices=tuple(list(BINARY_COMPONENTS) + list(ARCHIVE_COMPONENTS) + ["rg"]),
help=(
"Limit installation to the specified components."
" May be repeated. Defaults to codex, codex-windows-sandbox-setup,"
@@ -181,6 +199,11 @@ def main() -> int:
vendor_dir,
[BINARY_COMPONENTS[name] for name in components if name in BINARY_COMPONENTS],
)
install_archive_components(
artifacts_dir,
vendor_dir,
[ARCHIVE_COMPONENTS[name] for name in components if name in ARCHIVE_COMPONENTS],
)
if "rg" in components:
with _gha_group("Fetch ripgrep binaries"):
@@ -337,6 +360,45 @@ def _archive_name_for_target(artifact_prefix: str, target: str) -> str:
return f"{artifact_prefix}-{target}.zst"
def install_archive_components(
artifacts_dir: Path,
vendor_dir: Path,
selected_components: Sequence[ArchiveComponent],
) -> None:
if not selected_components:
return
for component in selected_components:
for target in component.targets:
archive_path = artifacts_dir / target / f"{component.artifact_prefix}-{target}.tar.gz"
if not archive_path.exists():
raise FileNotFoundError(f"Expected artifact not found: {archive_path}")
dest_dir = vendor_dir / target
dest_dir.mkdir(parents=True, exist_ok=True)
extract_archive_tree(archive_path, component.archive_member, dest_dir / component.dest_dir)
print(f" installed {dest_dir / component.dest_dir}")
def extract_archive_tree(archive_path: Path, archive_member: str, dest: Path) -> None:
dest.parent.mkdir(parents=True, exist_ok=True)
with tempfile.TemporaryDirectory(prefix="codex-archive-tree-") as tmp_dir_str:
tmp_dir = Path(tmp_dir_str)
with tarfile.open(archive_path, "r:gz") as tar:
members = [
member
for member in tar.getmembers()
if member.name == archive_member or member.name.startswith(f"{archive_member}/")
]
if not members:
raise RuntimeError(f"Entry '{archive_member}' not found in archive {archive_path}.")
tar.extractall(path=tmp_dir, members=members, filter="data")
extracted = tmp_dir / archive_member
if dest.exists():
shutil.rmtree(dest)
shutil.move(str(extracted), dest)
def _fetch_single_rg(
vendor_dir: Path,
target: str,

View File

@@ -0,0 +1,61 @@
#!/usr/bin/env python3
"""Focused tests for Codex CLI npm package staging."""
from __future__ import annotations
import subprocess
import sys
import tempfile
import unittest
from pathlib import Path
SCRIPT_DIR = Path(__file__).resolve().parent
BUILD_SCRIPT = SCRIPT_DIR / "build_npm_package.py"
APPLE_SILICON_TARGET = "aarch64-apple-darwin"
class BuildNpmPackageTests(unittest.TestCase):
def test_darwin_arm64_package_includes_devicecheck_probe(self) -> None:
with tempfile.TemporaryDirectory(prefix="codex-npm-test-") as tmp_dir_str:
tmp_dir = Path(tmp_dir_str)
vendor_src = tmp_dir / "vendor"
target_dir = vendor_src / APPLE_SILICON_TARGET
(target_dir / "codex").mkdir(parents=True)
(target_dir / "codex" / "codex").touch()
(target_dir / "devicecheck-probe" / "DeviceCheckProbe.app").mkdir(parents=True)
(target_dir / "path").mkdir(parents=True)
(target_dir / "path" / "rg").touch()
staging_dir = tmp_dir / "stage"
subprocess.run(
[
sys.executable,
str(BUILD_SCRIPT),
"--package",
"codex-darwin-arm64",
"--version",
"0.0.0-test",
"--staging-dir",
str(staging_dir),
"--vendor-src",
str(vendor_src),
],
check=True,
)
staged_target_dir = staging_dir / "vendor" / APPLE_SILICON_TARGET
self.assertTrue((staged_target_dir / "codex" / "codex").exists())
self.assertTrue(
(
staged_target_dir
/ "devicecheck-probe"
/ "DeviceCheckProbe.app"
).is_dir()
)
self.assertTrue((staged_target_dir / "path" / "rg").exists())
if __name__ == "__main__":
unittest.main()

View File

@@ -107,6 +107,8 @@ use tracing::warn;
use crate::client_common::Prompt;
use crate::client_common::ResponseEvent;
use crate::client_common::ResponseStream;
use crate::devicecheck::X_OAI_ATTESTATION_HEADER;
use crate::devicecheck::macos_devicecheck_header;
use crate::flags::CODEX_RS_SSE_FIXTURE;
use crate::util::emit_feedback_auth_recovery_tags;
use codex_api::map_api_error;
@@ -116,6 +118,7 @@ use codex_login::auth_env_telemetry::AuthEnvTelemetry;
use codex_login::auth_env_telemetry::collect_auth_env_telemetry;
use codex_model_provider::SharedModelProvider;
use codex_model_provider::create_model_provider;
use codex_model_provider_info::CHATGPT_CODEX_BASE_URL;
#[cfg(test)]
use codex_model_provider_info::DEFAULT_WEBSOCKET_CONNECT_TIMEOUT_MS;
use codex_model_provider_info::ModelProviderInfo;
@@ -422,6 +425,8 @@ impl ModelClient {
return Ok(Vec::new());
}
let client_setup = self.current_client_setup().await?;
let should_send_attestation =
should_send_attestation(&client_setup.api_provider, AttestationPurpose::Compaction);
let transport = ReqwestTransport::new(build_reqwest_client());
let request_telemetry = Self::build_request_telemetry(
session_telemetry,
@@ -475,6 +480,9 @@ impl ModelClient {
extra_headers.extend(build_conversation_headers(Some(
self.state.conversation_id.to_string(),
)));
if should_send_attestation && let Some(header_value) = macos_devicecheck_header() {
extra_headers.insert(X_OAI_ATTESTATION_HEADER, header_value);
}
let trace_attempt = compaction_trace.start_attempt(&payload);
let result = client
.compact_input(&payload, extra_headers)
@@ -493,10 +501,18 @@ impl ModelClient {
// Create the media call over HTTP first, then retain matching auth so realtime can attach
// the server-side control WebSocket to the call id from that HTTP response.
let client_setup = self.current_client_setup().await?;
let should_send_attestation = should_send_attestation(
&client_setup.api_provider,
AttestationPurpose::RealtimeWebrtcCallSetup,
);
let mut sideband_headers = extra_headers.clone();
sideband_headers.extend(sideband_websocket_auth_headers(
client_setup.api_auth.as_ref(),
));
let mut extra_headers = extra_headers;
if should_send_attestation && let Some(header_value) = macos_devicecheck_header() {
extra_headers.insert(X_OAI_ATTESTATION_HEADER, header_value);
}
let transport = ReqwestTransport::new(build_reqwest_client());
let response =
ApiRealtimeCallClient::new(transport, client_setup.api_provider, client_setup.api_auth)
@@ -703,7 +719,8 @@ impl ModelClient {
auth_context: AuthRequestTelemetryContext,
request_route_telemetry: RequestRouteTelemetry,
) -> std::result::Result<ApiWebSocketConnection, ApiError> {
let headers = self.build_websocket_headers(turn_state.as_ref(), turn_metadata_header);
let headers =
self.build_websocket_headers(&api_provider, turn_state.as_ref(), turn_metadata_header);
let websocket_telemetry = ModelClientSession::build_websocket_telemetry(
session_telemetry,
auth_context,
@@ -782,6 +799,7 @@ impl ModelClient {
/// replayed on reconnect within the same turn.
fn build_websocket_headers(
&self,
provider: &codex_api::Provider,
turn_state: Option<&Arc<OnceLock<String>>>,
turn_metadata_header: Option<&str>,
) -> ApiHeaderMap {
@@ -797,6 +815,7 @@ impl ModelClient {
}
headers.extend(build_conversation_headers(Some(conversation_id)));
headers.extend(self.build_responses_identity_headers());
extend_devicecheck_header_for_responses_endpoint(&mut headers, provider);
headers.insert(
OPENAI_BETA_HEADER,
HeaderValue::from_static(RESPONSES_WEBSOCKETS_V2_BETA_HEADER_VALUE),
@@ -911,6 +930,7 @@ impl ModelClientSession {
/// regardless of transport choice.
fn build_responses_options(
&self,
provider: &codex_api::Provider,
turn_metadata_header: Option<&str>,
compression: Compression,
) -> ApiResponsesOptions {
@@ -926,6 +946,7 @@ impl ModelClientSession {
turn_metadata_header.as_ref(),
);
headers.extend(self.client.build_responses_identity_headers());
extend_devicecheck_header_for_responses_endpoint(&mut headers, provider);
headers
},
compression,
@@ -1202,8 +1223,11 @@ impl ModelClientSession {
self.client.state.auth_env_telemetry.clone(),
);
let compression = self.responses_request_compression(client_setup.auth.as_ref());
let options = self.build_responses_options(turn_metadata_header, compression);
let options = self.build_responses_options(
&client_setup.api_provider,
turn_metadata_header,
compression,
);
let request = self.build_responses_request(
&client_setup.api_provider,
prompt,
@@ -1309,7 +1333,11 @@ impl ModelClientSession {
);
let compression = self.responses_request_compression(client_setup.auth.as_ref());
let options = self.build_responses_options(turn_metadata_header, compression);
let options = self.build_responses_options(
&client_setup.api_provider,
turn_metadata_header,
compression,
);
let request = self.build_responses_request(
&client_setup.api_provider,
prompt,
@@ -1613,6 +1641,38 @@ fn build_responses_headers(
headers
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum AttestationPurpose {
Response,
Compaction,
RealtimeWebrtcCallSetup,
}
fn should_send_attestation(provider: &codex_api::Provider, purpose: AttestationPurpose) -> bool {
let provider_is_chatgpt_codex = provider
.base_url
.trim_end_matches('/')
.eq_ignore_ascii_case(CHATGPT_CODEX_BASE_URL);
provider_is_chatgpt_codex
&& matches!(
purpose,
AttestationPurpose::Response
| AttestationPurpose::Compaction
| AttestationPurpose::RealtimeWebrtcCallSetup
)
}
fn extend_devicecheck_header_for_responses_endpoint(
headers: &mut ApiHeaderMap,
provider: &codex_api::Provider,
) {
if should_send_attestation(provider, AttestationPurpose::Response)
&& let Some(header_value) = macos_devicecheck_header()
{
headers.insert(X_OAI_ATTESTATION_HEADER, header_value);
}
}
fn subagent_header_value(session_source: &SessionSource) -> Option<String> {
match session_source {
SessionSource::SubAgent(subagent_source) => match subagent_source {

View File

@@ -1,3 +1,4 @@
use super::AttestationPurpose;
use super::AuthRequestTelemetryContext;
use super::ModelClient;
use super::PendingUnauthorizedRetry;
@@ -8,7 +9,9 @@ use super::X_CODEX_TURN_METADATA_HEADER;
use super::X_CODEX_WINDOW_ID_HEADER;
use super::X_OPENAI_SUBAGENT_HEADER;
use codex_api::ApiError;
use codex_api::Provider as ApiProvider;
use codex_api::ResponseEvent;
use codex_api::RetryConfig as ApiRetryConfig;
use codex_app_server_protocol::AuthMode;
use codex_model_provider::BearerAuthProvider;
use codex_model_provider_info::WireApi;
@@ -383,3 +386,43 @@ fn auth_request_telemetry_context_tracks_attached_auth_and_retry_phase() {
assert_eq!(auth_context.recovery_mode, Some("managed"));
assert_eq!(auth_context.recovery_phase, Some("refresh_token"));
}
fn api_provider(base_url: &str) -> ApiProvider {
ApiProvider {
name: "test".to_string(),
base_url: base_url.to_string(),
query_params: None,
headers: http::HeaderMap::new(),
retry: ApiRetryConfig {
max_attempts: 1,
base_delay: Duration::from_millis(1),
retry_429: false,
retry_5xx: true,
retry_transport: true,
},
stream_idle_timeout: Duration::from_secs(1),
}
}
#[test]
fn should_send_attestation_for_allowed_chatgpt_codex_purposes() {
let provider = api_provider("https://chatgpt.com/backend-api/codex/");
for purpose in [
AttestationPurpose::Response,
AttestationPurpose::Compaction,
AttestationPurpose::RealtimeWebrtcCallSetup,
] {
assert!(super::should_send_attestation(&provider, purpose));
}
}
#[test]
fn should_not_send_attestation_for_non_chatgpt_codex_provider() {
let provider = api_provider("https://api.openai.com/v1");
assert!(!super::should_send_attestation(
&provider,
AttestationPurpose::Response,
));
}

View File

@@ -0,0 +1,198 @@
#[cfg(target_os = "macos")]
use std::path::PathBuf;
#[cfg(target_os = "macos")]
use std::process::Command;
use http::HeaderValue;
#[cfg(target_os = "macos")]
use serde::Deserialize;
#[cfg(any(target_os = "macos", test))]
use serde::Serialize;
pub(crate) const X_OAI_ATTESTATION_HEADER: &str = "x-oai-attestation";
#[cfg(target_os = "macos")]
const CODEX_ELECTRON_RESOURCES_PATH_ENV_VAR: &str = "CODEX_ELECTRON_RESOURCES_PATH";
#[cfg(target_os = "macos")]
const PROBE_APP_NAME: &str = "DeviceCheckProbe.app";
#[cfg(target_os = "macos")]
const PROBE_EXECUTABLE_NAME: &str = "DeviceCheckProbe";
#[cfg(target_os = "macos")]
const CLI_PROBE_DIR_NAME: &str = "devicecheck-probe";
#[cfg(target_os = "macos")]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct DeviceCheckProbeReport {
supported: bool,
token_base64: Option<String>,
error: Option<String>,
latency_ms: Option<f64>,
}
#[cfg(any(target_os = "macos", test))]
#[derive(Debug, Serialize)]
struct DeviceCheckHeaderPayload<'a> {
#[serde(skip_serializing_if = "Option::is_none")]
token: Option<&'a str>,
#[serde(skip_serializing_if = "Option::is_none")]
failure_reason: Option<&'a str>,
#[serde(skip_serializing_if = "Option::is_none")]
failure_detail: Option<&'a str>,
#[serde(rename = "t", skip_serializing_if = "Option::is_none")]
latency_ms: Option<f64>,
}
pub(crate) fn macos_devicecheck_header() -> Option<HeaderValue> {
#[cfg(not(target_os = "macos"))]
{
None
}
#[cfg(target_os = "macos")]
{
HeaderValue::from_str(&macos_devicecheck_payload()).ok()
}
}
#[cfg(target_os = "macos")]
fn macos_devicecheck_payload() -> String {
if std::env::consts::ARCH == "x86_64" {
return failure_payload(
"unsupported_architecture",
Some("DeviceCheck is not supported on Intel Macs"),
/*latency_ms*/ None,
);
}
let Some(probe_app_path) = probe_app_path() else {
return failure_payload(
"probe_app_unavailable",
/*failure_detail*/ None,
/*latency_ms*/ None,
);
};
let probe_executable = probe_app_path
.join("Contents")
.join("MacOS")
.join(PROBE_EXECUTABLE_NAME);
let output = match Command::new(&probe_executable).output() {
Ok(output) => output,
Err(err) => {
return failure_payload(
"probe_launch_failed",
Some(&err.to_string()),
/*latency_ms*/ None,
);
}
};
if !output.status.success() {
return failure_payload(
"probe_failed",
Some(String::from_utf8_lossy(&output.stderr).trim()),
/*latency_ms*/ None,
);
}
let report: DeviceCheckProbeReport = match serde_json::from_slice(&output.stdout) {
Ok(report) => report,
Err(err) => {
return failure_payload(
"probe_output_invalid",
Some(&err.to_string()),
/*latency_ms*/ None,
);
}
};
if !report.supported {
return failure_payload(
"unsupported_device",
/*failure_detail*/ None,
report.latency_ms,
);
}
if let Some(token) = report.token_base64.as_deref() {
return token_payload(token, report.latency_ms);
}
failure_payload(
"token_generation_failed",
report.error.as_deref().or(Some("probe returned no token")),
report.latency_ms,
)
}
#[cfg(target_os = "macos")]
fn probe_app_path() -> Option<PathBuf> {
std::env::var_os(CODEX_ELECTRON_RESOURCES_PATH_ENV_VAR)
.map(PathBuf::from)
.map(|resources_path| resources_path.join(PROBE_APP_NAME))
.or_else(cli_probe_app_path)
}
#[cfg(target_os = "macos")]
fn cli_probe_app_path() -> Option<PathBuf> {
let executable_path = std::env::current_exe().ok()?;
let executable_dir = executable_path.parent()?;
let candidate_paths = [
executable_dir.join(CLI_PROBE_DIR_NAME).join(PROBE_APP_NAME),
executable_dir
.parent()?
.join(CLI_PROBE_DIR_NAME)
.join(PROBE_APP_NAME),
];
candidate_paths.into_iter().find(|path| path.exists())
}
#[cfg(any(target_os = "macos", test))]
fn token_payload(token: &str, latency_ms: Option<f64>) -> String {
serde_json::to_string(&DeviceCheckHeaderPayload {
token: Some(token),
failure_reason: None,
failure_detail: None,
latency_ms,
})
.unwrap_or_else(|_| r#"{"failure_reason":"payload_serialization_failed"}"#.to_string())
}
#[cfg(any(target_os = "macos", test))]
fn failure_payload(
failure_reason: &str,
failure_detail: Option<&str>,
latency_ms: Option<f64>,
) -> String {
serde_json::to_string(&DeviceCheckHeaderPayload {
token: None,
failure_reason: Some(failure_reason),
failure_detail,
latency_ms,
})
.unwrap_or_else(|_| r#"{"failure_reason":"payload_serialization_failed"}"#.to_string())
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn token_payload_matches_macos_devicecheck_schema() {
assert_eq!(
token_payload("token", /*latency_ms*/ Some(12.5)),
r#"{"token":"token","t":12.5}"#
);
}
#[test]
fn failure_payload_matches_macos_devicecheck_schema() {
assert_eq!(
failure_payload(
"unsupported_architecture",
Some("Intel Mac"),
/*latency_ms*/ Some(12.5),
),
r#"{"failure_reason":"unsupported_architecture","failure_detail":"Intel Mac","t":12.5}"#
);
}
}

View File

@@ -28,6 +28,7 @@ pub mod config;
pub mod connectors;
pub mod context;
mod context_manager;
mod devicecheck;
mod environment_selection;
pub mod exec;
pub mod exec_env;

View File

@@ -0,0 +1,73 @@
import DeviceCheck
import Foundation
struct DeviceCheckProbeReport: Encodable {
let supported: Bool
let tokenBase64: String?
let error: String?
let latencyMs: Double?
}
func writeReport(_ report: DeviceCheckProbeReport) throws {
let data = try JSONEncoder().encode(report)
FileHandle.standardOutput.write(data)
FileHandle.standardOutput.write(Data("\n".utf8))
}
let device = DCDevice.current
if !device.isSupported {
let report = DeviceCheckProbeReport(
supported: false,
tokenBase64: nil,
error: nil,
latencyMs: nil
)
try writeReport(report)
exit(0)
}
func requestToken() -> (result: DispatchTimeoutResult, token: Data?, error: Error?) {
let semaphore = DispatchSemaphore(value: 0)
var token: Data?
var tokenError: Error?
device.generateToken { data, error in
token = data
tokenError = error
semaphore.signal()
}
return (semaphore.wait(timeout: .now() + 1), token, tokenError)
}
func isUnknownSystemFailure(_ error: Error?) -> Bool {
(error as? DCError)?.code == .unknownSystemFailure
}
let tokenGenerationStart = DispatchTime.now()
var attempt = requestToken()
if attempt.result == .success, isUnknownSystemFailure(attempt.error) {
attempt = requestToken()
}
let latencyMs = Double(
DispatchTime.now().uptimeNanoseconds - tokenGenerationStart.uptimeNanoseconds
) / 1_000_000
if attempt.result == .timedOut {
let report = DeviceCheckProbeReport(
supported: true,
tokenBase64: nil,
error: "timed out waiting for DeviceCheck token",
latencyMs: latencyMs
)
try writeReport(report)
exit(1)
}
let report = DeviceCheckProbeReport(
supported: true,
tokenBase64: attempt.token?.base64EncodedString(),
error: attempt.error.map(String.init(describing:)),
latencyMs: latencyMs
)
try writeReport(report)

View File

@@ -0,0 +1,22 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleExecutable</key>
<string>DeviceCheckProbe</string>
<key>CFBundleIdentifier</key>
<string>com.openai.codex.app-server</string>
<key>CFBundleName</key>
<string>DeviceCheckProbe</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSBackgroundOnly</key>
<true/>
<key>NSPrincipalClass</key>
<string>NSApplication</string>
</dict>
</plist>

View File

@@ -0,0 +1,75 @@
#!/usr/bin/env bash
set -euo pipefail
usage() {
cat >&2 <<'USAGE'
Usage: build-probe.sh --target TARGET --out DIR
Builds DeviceCheckProbe.app for the requested macOS Rust target triple.
The caller is responsible for code signing the resulting app bundle.
USAGE
}
target=""
out_dir=""
while [[ $# -gt 0 ]]; do
case "$1" in
--target)
target="${2:-}"
shift 2
;;
--out)
out_dir="${2:-}"
shift 2
;;
-h|--help)
usage
exit 0
;;
*)
echo "unknown argument: $1" >&2
usage
exit 2
;;
esac
done
if [[ -z "$target" || -z "$out_dir" ]]; then
usage
exit 2
fi
case "$target" in
aarch64-apple-darwin)
swift_target="arm64-apple-macosx13.0"
;;
x86_64-apple-darwin)
swift_target="x86_64-apple-macosx13.0"
;;
*)
echo "unsupported target: $target" >&2
exit 2
;;
esac
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
app_dir="$out_dir/DeviceCheckProbe.app"
contents_dir="$app_dir/Contents"
macos_dir="$contents_dir/MacOS"
module_cache_path="$out_dir/module-cache"
rm -rf "$app_dir"
mkdir -p "$macos_dir" "$module_cache_path"
cp "$script_dir/Info.plist" "$contents_dir/Info.plist"
CLANG_MODULE_CACHE_PATH="$module_cache_path" \
MACOSX_DEPLOYMENT_TARGET=13.0 \
swiftc \
-target "$swift_target" \
-framework DeviceCheck \
-framework Foundation \
"$script_dir/DeviceCheckProbe.swift" \
-o "$macos_dir/DeviceCheckProbe"
echo "$app_dir"

View File

@@ -34,6 +34,7 @@ const MAX_REQUEST_MAX_RETRIES: u64 = 100;
const OPENAI_PROVIDER_NAME: &str = "OpenAI";
pub const OPENAI_PROVIDER_ID: &str = "openai";
pub const CHATGPT_CODEX_BASE_URL: &str = "https://chatgpt.com/backend-api/codex";
const AMAZON_BEDROCK_PROVIDER_NAME: &str = "Amazon Bedrock";
pub const AMAZON_BEDROCK_PROVIDER_ID: &str = "amazon-bedrock";
pub const AMAZON_BEDROCK_DEFAULT_BASE_URL: &str =
@@ -234,7 +235,7 @@ impl ModelProviderInfo {
auth_mode,
Some(AuthMode::Chatgpt | AuthMode::ChatgptAuthTokens | AuthMode::AgentIdentity)
) {
"https://chatgpt.com/backend-api/codex"
CHATGPT_CODEX_BASE_URL
} else {
"https://api.openai.com/v1"
};

View File

@@ -58,14 +58,24 @@ def parse_args() -> argparse.Namespace:
action="store_true",
help="Retain temporary staging directories instead of deleting them.",
)
parser.add_argument(
"--skip-native-component",
dest="skip_native_components",
action="append",
default=[],
help=(
"Skip one native component while staging. May be repeated. "
"Intended for historical-artifact smoke tests only."
),
)
return parser.parse_args()
def collect_native_components(packages: list[str]) -> set[str]:
def collect_native_components(packages: list[str], skipped_components: set[str]) -> set[str]:
components: set[str] = set()
for package in packages:
components.update(PACKAGE_NATIVE_COMPONENTS.get(package, []))
return components
return components - skipped_components
def expand_packages(packages: list[str]) -> list[str]:
@@ -146,7 +156,8 @@ def main() -> int:
runner_temp = Path(os.environ.get("RUNNER_TEMP", tempfile.gettempdir()))
packages = expand_packages(list(args.packages))
native_components = collect_native_components(packages)
skipped_components = set(args.skip_native_components)
native_components = collect_native_components(packages, skipped_components)
vendor_temp_root: Path | None = None
vendor_src: Path | None = None
@@ -184,6 +195,8 @@ def main() -> int:
if vendor_src is not None:
cmd.extend(["--vendor-src", str(vendor_src)])
for component in sorted(skipped_components):
cmd.extend(["--skip-native-component", component])
try:
run_command(cmd)