refactor: using deep assertions instead of expect tests; removed Display impls

This commit is contained in:
kevin zhao
2025-11-12 17:20:59 -05:00
parent 8a98b7afd3
commit ad55ca629c
5 changed files with 145 additions and 172 deletions

18
codex-rs/Cargo.lock generated
View File

@@ -1194,9 +1194,9 @@ version = "0.0.0"
dependencies = [
"anyhow",
"clap",
"expect-test",
"multimap",
"parking_lot",
"pretty_assertions",
"serde",
"serde_json",
"shlex",
@@ -2150,12 +2150,6 @@ dependencies = [
"syn 2.0.104",
]
[[package]]
name = "dissimilar"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8975ffdaa0ef3661bfe02dbdcc06c9f829dfafe6a3c474de366a8d5e44276921"
[[package]]
name = "doc-comment"
version = "0.3.3"
@@ -2387,16 +2381,6 @@ dependencies = [
"pin-project-lite",
]
[[package]]
name = "expect-test"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63af43ff4431e848fb47472a920f14fa71c24de13255a5692e93d4e90302acb0"
dependencies = [
"dissimilar",
"once_cell",
]
[[package]]
name = "eyre"
version = "0.6.12"

View File

@@ -27,4 +27,4 @@ parking_lot = { workspace = true }
shlex = { workspace = true }
[dev-dependencies]
expect-test = { workspace = true }
pretty_assertions = { workspace = true }

View File

@@ -55,23 +55,3 @@ impl Evaluation {
matches!(self, Self::Match { .. })
}
}
impl std::fmt::Display for Evaluation {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::NoMatch => f.write_str("noMatch"),
Self::Match {
decision,
matched_rules,
} => {
writeln!(f, "match {{")?;
writeln!(f, " decision: {decision},")?;
writeln!(f, " matchedRules: [")?;
for rule in matched_rules {
writeln!(f, " {rule},")?;
}
write!(f, " ]\n}}")
}
}
}
}

View File

@@ -29,12 +29,6 @@ impl PatternToken {
}
}
impl std::fmt::Display for PatternToken {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(render_pattern_token(self).as_str())
}
}
/// Prefix matcher for commands with support for alternative match tokens.
/// First token is fixed since we key by the first token in policy.
#[derive(Clone, Debug, Eq, PartialEq)]
@@ -60,12 +54,6 @@ impl PrefixPattern {
}
}
impl std::fmt::Display for PrefixPattern {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "[{}]", render_pattern(self))
}
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum RuleMatch {
@@ -139,14 +127,14 @@ impl PrefixRule {
pub fn description(&self) -> String {
format!(
"prefix_rule(pattern = [{}], decision = {})",
render_pattern(&self.pattern),
render_decision(self.decision)
"prefix_rule(pattern = {pattern:?}, decision = {decision})",
pattern = self.pattern,
decision = self.decision
)
}
}
#[derive(Clone, Debug)]
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum Rule {
Prefix(PrefixRule),
}
@@ -175,40 +163,7 @@ impl Rule {
}
}
impl std::fmt::Display for Rule {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Prefix(rule) => write!(
f,
"prefix_rule(pattern = {}, decision = {})",
rule.pattern, rule.decision
),
}
}
}
fn join_command(command: &[String]) -> String {
try_join(command.iter().map(String::as_str))
.unwrap_or_else(|_| "unable to render example".to_string())
}
fn render_pattern(pattern: &PrefixPattern) -> String {
let mut tokens = vec![pattern.first.as_ref().to_string()];
tokens.extend(pattern.rest.iter().map(render_pattern_token));
tokens.join(", ")
}
fn render_pattern_token(token: &PatternToken) -> String {
match token {
PatternToken::Single(value) => value.clone(),
PatternToken::Alts(values) => format!("[{}]", values.join(", ")),
}
}
fn render_decision(decision: Decision) -> &'static str {
match decision {
Decision::Allow => "allow",
Decision::Prompt => "prompt",
Decision::Forbidden => "forbidden",
}
}

View File

@@ -1,22 +1,19 @@
use std::sync::Arc;
use codex_execpolicy2::Decision;
use codex_execpolicy2::Evaluation;
use codex_execpolicy2::PolicyParser;
use codex_execpolicy2::Rule;
use expect_test::expect;
use codex_execpolicy2::RuleMatch;
use codex_execpolicy2::rule::PatternToken;
use codex_execpolicy2::rule::PrefixPattern;
use codex_execpolicy2::rule::PrefixRule;
use pretty_assertions::assert_eq;
fn tokens(cmd: &[&str]) -> Vec<String> {
cmd.iter().map(std::string::ToString::to_string).collect()
}
fn rules_to_string(rules: &[Rule]) -> String {
format!(
"[{}]",
rules
.iter()
.map(std::string::ToString::to_string)
.collect::<Vec<_>>()
.join(", ")
)
}
#[test]
fn basic_match() {
let policy_src = r#"
@@ -29,14 +26,16 @@ prefix_rule(
.expect("parse policy");
let cmd = tokens(&["git", "status"]);
let evaluation = policy.check(&cmd);
expect![[r#"
match {
decision: allow,
matchedRules: [
prefixRuleMatch { matchedPrefix: ["git", "status"], decision: allow },
]
}"#]]
.assert_eq(&evaluation.to_string());
assert_eq!(
Evaluation::Match {
decision: Decision::Allow,
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git", "status"]),
decision: Decision::Allow,
}],
},
evaluation
);
}
#[test]
@@ -51,30 +50,50 @@ prefix_rule(
let bash_rules = policy.rules().get_vec("bash").expect("bash rules");
let sh_rules = policy.rules().get_vec("sh").expect("sh rules");
expect![[r#"[prefix_rule(pattern = [bash, [-c, -l]], decision = allow)]"#]]
.assert_eq(&rules_to_string(bash_rules));
expect![[r#"[prefix_rule(pattern = [sh, [-c, -l]], decision = allow)]"#]]
.assert_eq(&rules_to_string(sh_rules));
assert_eq!(
vec![Rule::Prefix(PrefixRule {
pattern: PrefixPattern {
first: Arc::from("bash"),
rest: vec![PatternToken::Alts(vec!["-c".to_string(), "-l".to_string()])].into(),
},
decision: Decision::Allow,
})],
bash_rules.clone()
);
assert_eq!(
vec![Rule::Prefix(PrefixRule {
pattern: PrefixPattern {
first: Arc::from("sh"),
rest: vec![PatternToken::Alts(vec!["-c".to_string(), "-l".to_string()])].into(),
},
decision: Decision::Allow,
})],
sh_rules.clone()
);
let bash_eval = policy.check(&tokens(&["bash", "-c", "echo", "hi"]));
expect![[r#"
match {
decision: allow,
matchedRules: [
prefixRuleMatch { matchedPrefix: ["bash", "-c"], decision: allow },
]
}"#]]
.assert_eq(&bash_eval.to_string());
assert_eq!(
Evaluation::Match {
decision: Decision::Allow,
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["bash", "-c"]),
decision: Decision::Allow,
}],
},
bash_eval
);
let sh_eval = policy.check(&tokens(&["sh", "-l", "echo", "hi"]));
expect![[r#"
match {
decision: allow,
matchedRules: [
prefixRuleMatch { matchedPrefix: ["sh", "-l"], decision: allow },
]
}"#]]
.assert_eq(&sh_eval.to_string());
assert_eq!(
Evaluation::Match {
decision: Decision::Allow,
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["sh", "-l"]),
decision: Decision::Allow,
}],
},
sh_eval
);
}
#[test]
@@ -88,28 +107,47 @@ prefix_rule(
let policy = parser.parse().expect("parse policy");
let rules = policy.rules().get_vec("npm").expect("npm rules");
expect![[r#"[prefix_rule(pattern = [npm, [i, install], [--legacy-peer-deps, --no-save]], decision = allow)]"#]]
.assert_eq(&rules_to_string(rules));
assert_eq!(
vec![Rule::Prefix(PrefixRule {
pattern: PrefixPattern {
first: Arc::from("npm"),
rest: vec![
PatternToken::Alts(vec!["i".to_string(), "install".to_string()]),
PatternToken::Alts(vec![
"--legacy-peer-deps".to_string(),
"--no-save".to_string()
]),
]
.into(),
},
decision: Decision::Allow,
})],
rules.clone()
);
let npm_i = policy.check(&tokens(&["npm", "i", "--legacy-peer-deps"]));
expect![[r#"
match {
decision: allow,
matchedRules: [
prefixRuleMatch { matchedPrefix: ["npm", "i", "--legacy-peer-deps"], decision: allow },
]
}"#]]
.assert_eq(&npm_i.to_string());
assert_eq!(
Evaluation::Match {
decision: Decision::Allow,
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["npm", "i", "--legacy-peer-deps"]),
decision: Decision::Allow,
}],
},
npm_i
);
let npm_install = policy.check(&tokens(&["npm", "install", "--no-save", "leftpad"]));
expect![[r#"
match {
decision: allow,
matchedRules: [
prefixRuleMatch { matchedPrefix: ["npm", "install", "--no-save"], decision: allow },
]
}"#]]
.assert_eq(&npm_install.to_string());
assert_eq!(
Evaluation::Match {
decision: Decision::Allow,
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["npm", "install", "--no-save"]),
decision: Decision::Allow,
}],
},
npm_install
);
}
#[test]
@@ -127,14 +165,16 @@ prefix_rule(
let parser = PolicyParser::new("test.codexpolicy", policy_src);
let policy = parser.parse().expect("parse policy");
let match_eval = policy.check(&tokens(&["git", "status"]));
expect![[r#"
match {
decision: allow,
matchedRules: [
prefixRuleMatch { matchedPrefix: ["git", "status"], decision: allow },
]
}"#]]
.assert_eq(&match_eval.to_string());
assert_eq!(
Evaluation::Match {
decision: Decision::Allow,
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git", "status"]),
decision: Decision::Allow,
}],
},
match_eval
);
let no_match_eval = policy.check(&tokens(&[
"git",
@@ -142,7 +182,7 @@ prefix_rule(
"color.status=always",
"status",
]));
expect!["noMatch"].assert_eq(&no_match_eval.to_string());
assert_eq!(Evaluation::NoMatch, no_match_eval);
}
#[test]
@@ -165,24 +205,38 @@ prefix_rule(
let policy = parser.parse().expect("parse policy");
let status = policy.check(&tokens(&["git", "status"]));
expect![[r#"
match {
decision: prompt,
matchedRules: [
prefixRuleMatch { matchedPrefix: ["git", "status"], decision: allow },
prefixRuleMatch { matchedPrefix: ["git"], decision: prompt },
]
}"#]]
.assert_eq(&status.to_string());
assert_eq!(
Evaluation::Match {
decision: Decision::Prompt,
matched_rules: vec![
RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git", "status"]),
decision: Decision::Allow,
},
RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git"]),
decision: Decision::Prompt,
},
],
},
status
);
let commit = policy.check(&tokens(&["git", "commit", "-m", "hi"]));
expect![[r#"
match {
decision: forbidden,
matchedRules: [
prefixRuleMatch { matchedPrefix: ["git"], decision: prompt },
prefixRuleMatch { matchedPrefix: ["git", "commit"], decision: forbidden },
]
}"#]]
.assert_eq(&commit.to_string());
assert_eq!(
Evaluation::Match {
decision: Decision::Forbidden,
matched_rules: vec![
RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git"]),
decision: Decision::Prompt,
},
RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git", "commit"]),
decision: Decision::Forbidden,
},
],
},
commit
);
}