remove tokenize_command use cases

This commit is contained in:
kevin zhao
2025-11-10 14:14:28 -08:00
parent 71ea7edca4
commit eea9bff1fb
9 changed files with 61 additions and 79 deletions

View File

@@ -1,9 +0,0 @@
use crate::error::Error;
use crate::error::Result;
pub fn tokenize_command(raw: &str) -> Result<Vec<String>> {
shlex::split(raw).ok_or_else(|| Error::TokenizationFailed {
example: raw.to_string(),
reason: "invalid shell tokens".to_string(),
})
}

View File

@@ -2,12 +2,12 @@ prefix_rule(
id = "git_status",
pattern = ["git", "status"],
match = [
"git status",
"git status -- path/to/file",
["git", "status"],
["git", "status", "--", "path/to/file"],
],
not_match = [
"git statusx",
"git reset --hard",
["git", "statusx"],
["git", "reset", "--hard"],
],
)
@@ -16,13 +16,13 @@ prefix_rule(
pattern = ["npm", ["i", "install"]],
decision = "prompt",
match = [
"npm i",
"npm install",
"npm install lodash",
["npm", "i"],
["npm", "install"],
["npm", "install", "lodash"],
],
not_match = [
"npmx install",
"npm outdated",
["npmx", "install"],
["npm", "outdated"],
],
)
@@ -31,6 +31,6 @@ prefix_rule(
pattern = ["git", "reset", "--hard"],
decision = "forbidden",
match = [
"git reset --hard",
["git", "reset", "--hard"],
],
)

View File

@@ -8,8 +8,8 @@ pub enum Error {
InvalidDecision(String),
#[error("invalid pattern element: {0}")]
InvalidPattern(String),
#[error("failed to tokenize example `{example}`: {reason}")]
TokenizationFailed { example: String, reason: String },
#[error("invalid example: {0}")]
InvalidExample(String),
#[error("expected example to match rule `{rule_id}`: {example}")]
ExampleDidNotMatch { rule_id: String, example: String },
#[error("expected example to not match rule `{rule_id}`: {example}")]

View File

@@ -1,11 +1,9 @@
pub mod command;
pub mod decision;
pub mod error;
pub mod parser;
pub mod policy;
pub mod rule;
pub use command::tokenize_command;
pub use decision::Decision;
pub use error::Error;
pub use error::Result;

View File

@@ -6,7 +6,6 @@ use anyhow::Result;
use anyhow::bail;
use codex_execpolicy2::PolicyParser;
use codex_execpolicy2::load_default_policy;
use codex_execpolicy2::tokenize_command;
fn main() -> Result<()> {
let mut args = std::env::args().skip(1);
@@ -45,17 +44,11 @@ fn run_subcommand(
fn cmd_check(policy_path: Option<String>, args: Vec<String>) -> Result<()> {
if args.is_empty() {
bail!("usage: codex-execpolicy2 check <command tokens...|\"command string\">");
bail!("usage: codex-execpolicy2 check <command tokens...>");
}
let policy = load_policy(policy_path)?;
let tokens = if args.len() == 1 {
tokenize_command(&args[0])?
} else {
args
};
match policy.evaluate(&tokens) {
match policy.evaluate(&args) {
Some(eval) => {
let json = serde_json::to_string_pretty(&eval)?;
println!("{json}");
@@ -80,6 +73,6 @@ fn load_policy(policy_path: Option<String>) -> Result<codex_execpolicy2::Policy>
fn print_usage() {
eprintln!(
"usage:
codex-execpolicy2 [--policy path] check <command tokens...|\"command string\">"
codex-execpolicy2 [--policy path] check <command tokens...>"
);
}

View File

@@ -12,7 +12,6 @@ use starlark::values::list::ListRef;
use starlark::values::list::UnpackList;
use starlark::values::none::NoneType;
use crate::command::tokenize_command;
use crate::decision::Decision;
use crate::error::Error;
use crate::error::Result;
@@ -136,13 +135,36 @@ fn parse_pattern<'v>(pattern: UnpackList<Value<'v>>) -> Result<Vec<Vec<String>>>
Ok(expand_pattern(&parts))
}
fn parse_examples<'v>(examples: UnpackList<Value<'v>>) -> Result<Vec<Vec<String>>> {
let mut parsed = Vec::new();
for example in examples.items {
let list = ListRef::from_value(example).ok_or_else(|| {
Error::InvalidExample("example must be a list of strings".to_string())
})?;
let mut tokens = Vec::new();
for value in list.content() {
let token = value.unpack_str().ok_or_else(|| {
Error::InvalidExample("example tokens must be strings".to_string())
})?;
tokens.push(token.to_string());
}
if tokens.is_empty() {
return Err(Error::InvalidExample(
"example cannot be an empty list".to_string(),
));
}
parsed.push(tokens);
}
Ok(parsed)
}
#[starlark_module]
fn policy_builtins(builder: &mut GlobalsBuilder) {
fn prefix_rule<'v>(
pattern: UnpackList<Value<'v>>,
decision: Option<&'v str>,
r#match: Option<UnpackList<&'v str>>,
not_match: Option<UnpackList<&'v str>>,
r#match: Option<UnpackList<Value<'v>>>,
not_match: Option<UnpackList<Value<'v>>>,
id: Option<&'v str>,
eval: &mut Evaluator<'v, '_, '_>,
) -> anyhow::Result<NoneType> {
@@ -153,24 +175,10 @@ fn policy_builtins(builder: &mut GlobalsBuilder) {
let prefixes = parse_pattern(pattern)?;
let positive_examples: Vec<Vec<String>> = r#match
.map(|examples| {
examples
.items
.into_iter()
.map(tokenize_command)
.collect::<Result<Vec<_>>>()
})
.transpose()?
.unwrap_or_default();
let positive_examples: Vec<Vec<String>> =
r#match.map(parse_examples).transpose()?.unwrap_or_default();
let negative_examples: Vec<Vec<String>> = not_match
.map(|examples| {
examples
.items
.into_iter()
.map(tokenize_command)
.collect::<Result<Vec<_>>>()
})
.map(parse_examples)
.transpose()?
.unwrap_or_default();