diff --git a/src/rules/pattern_lexer.rs b/src/rules/pattern_lexer.rs index 82947b0..4c70747 100644 --- a/src/rules/pattern_lexer.rs +++ b/src/rules/pattern_lexer.rs @@ -21,6 +21,9 @@ pub enum LexToken { CloseBracket, /// Angle-bracket placeholder (e.g. "", "") Placeholder(String), + /// A multi-word alternation where at least one alternative contains spaces. + /// e.g., `"npx prettier"|prettier` -> [["npx", "prettier"], ["prettier"]] + MultiWordAlternation(Vec>), } /// Tokenize a pattern string into a sequence of `LexToken`s. @@ -37,7 +40,8 @@ pub fn tokenize(pattern: &str) -> Result, PatternParseError> { chars.next(); } - // Quoted string -> Literal (content without quotes) + // Quoted string -> Literal (content without quotes), or + // multi-word alternation if followed by `|` '"' | '\'' => { let quote = ch; chars.next(); // consume opening quote @@ -46,7 +50,15 @@ pub fn tokenize(pattern: &str) -> Result, PatternParseError> { "unclosed quote starting at position {pos}" )) })?; - tokens.push(LexToken::Literal(value)); + // Check if this starts a multi-word alternation: "quoted"|... + if let Some(&(_, '|')) = chars.peek() { + let first_words: Vec = + value.split_whitespace().map(|s| s.to_string()).collect(); + let token = consume_alternation_continuation(&mut chars, first_words)?; + tokens.push(token); + } else { + tokens.push(LexToken::Literal(value)); + } } // Angle bracket placeholder: , @@ -105,7 +117,7 @@ pub fn tokenize(pattern: &str) -> Result, PatternParseError> { // Negation: !value or !a|b|c '!' => { chars.next(); // consume '!' - let word = consume_word(&mut chars, None); + let word = consume_word(&mut chars, None, None); if word.is_empty() { return Err(PatternParseError::InvalidSyntax( "empty negation".to_string(), @@ -114,10 +126,61 @@ pub fn tokenize(pattern: &str) -> Result, PatternParseError> { tokens.push(classify_negation(&word)?); } - // Any other character: consume a word (until whitespace, bracket, or angle bracket) + // Any other character: consume a word (until whitespace, bracket, angle bracket, or quote) _ => { - let word = consume_word(&mut chars, Some(ch)); - tokens.push(classify_word(&word)?); + let word = consume_word(&mut chars, Some(ch), None); + // Check if the word ends with `|` and next char is a quote, + // indicating a multi-word alternation like `prettier|"npx prettier"` + if word.ends_with('|') { + if let Some(&(qpos, q @ ('"' | '\''))) = chars.peek() { + let prefix = &word[..word.len() - 1]; + // Split existing pipe-separated parts into individual alternatives + let mut alternatives: Vec> = prefix + .split('|') + .map(|part| { + if part.is_empty() { + Err(PatternParseError::EmptyAlternation) + } else { + Ok(vec![part.to_string()]) + } + }) + .collect::>()?; + // Now consume the quoted part and any further alternatives + chars.next(); // consume opening quote + let quoted = consume_until(&mut chars, q).ok_or_else(|| { + PatternParseError::InvalidSyntax(format!( + "unclosed quote starting at position {qpos}" + )) + })?; + let quoted_words: Vec = + quoted.split_whitespace().map(|s| s.to_string()).collect(); + if quoted_words.is_empty() { + return Err(PatternParseError::EmptyAlternation); + } + // Continue consuming further alternatives if more `|` follow + let token = consume_alternation_continuation(&mut chars, quoted_words)?; + // Merge: prepend the bare-word alternatives to the continuation result + match token { + LexToken::MultiWordAlternation(mut rest) => { + alternatives.append(&mut rest); + tokens.push(classify_multi_word_alternation(alternatives)?); + } + LexToken::Alternation(rest_alts) => { + alternatives.extend(rest_alts.into_iter().map(|alt| vec![alt])); + tokens.push(classify_multi_word_alternation(alternatives)?); + } + _ => unreachable!( + "consume_alternation_continuation returned unexpected: {token:?}" + ), + } + } else { + // Trailing `|` without a following quote: delegate to classify_word + // which will report EmptyAlternation + tokens.push(classify_word(&word)?); + } + } else { + tokens.push(classify_word(&word)?); + } } } } @@ -133,9 +196,11 @@ pub fn tokenize(pattern: &str) -> Result, PatternParseError> { /// Consume characters forming a "word" (non-whitespace, non-bracket, non-angle-bracket). /// If `prefix` is provided, it is prepended to the result. +/// If `extra_stop` is provided, the function also stops at that character. fn consume_word( chars: &mut std::iter::Peekable>, prefix: Option, + extra_stop: Option, ) -> String { let mut word = match prefix { Some(c) => { @@ -145,7 +210,7 @@ fn consume_word( None => String::new(), }; while let Some(&(_, c)) = chars.peek() { - if is_word_boundary(c) { + if is_word_boundary(c) || extra_stop == Some(c) { break; } word.push(c); @@ -170,7 +235,7 @@ fn consume_until( } fn is_word_boundary(c: char) -> bool { - matches!(c, ' ' | '\t' | '[' | ']' | '<') + matches!(c, ' ' | '\t' | '[' | ']' | '<' | '"' | '\'') } /// Classify a raw word into the appropriate LexToken. @@ -195,6 +260,85 @@ fn classify_negation(word: &str) -> Result { } } +/// Consume remaining alternatives in a multi-word alternation. +/// +/// Called after the first alternative has already been parsed (either quoted or bare word). +/// `first_words` is the word list of the first alternative. +/// Expects the iterator to be positioned at a `|` character (or past the first alternative). +/// Returns a `MultiWordAlternation` token (which may be downgraded to `Alternation` if all +/// alternatives are single-word). +fn consume_alternation_continuation( + chars: &mut std::iter::Peekable>, + first_words: Vec, +) -> Result { + let mut alternatives = vec![first_words]; + + while let Some(&(_, '|')) = chars.peek() { + chars.next(); // consume '|' + + match chars.peek() { + Some(&(pos, q @ ('"' | '\''))) => { + chars.next(); // consume opening quote + let quoted = consume_until(chars, q).ok_or_else(|| { + PatternParseError::InvalidSyntax(format!( + "unclosed quote starting at position {pos}" + )) + })?; + let words: Vec = quoted.split_whitespace().map(|s| s.to_string()).collect(); + if words.is_empty() { + return Err(PatternParseError::EmptyAlternation); + } + alternatives.push(words); + } + Some(&(_, c)) if !is_word_boundary(c) => { + let word = consume_word(chars, Some(c), Some('|')); + if word.is_empty() { + return Err(PatternParseError::EmptyAlternation); + } + alternatives.push(vec![word]); + } + _ => { + return Err(PatternParseError::EmptyAlternation); + } + } + } + + classify_multi_word_alternation(alternatives) +} + +/// Classify a list of word-list alternatives into the appropriate LexToken. +/// +/// If all alternatives are single-word, returns `Alternation`. +/// Otherwise returns `MultiWordAlternation`. +fn classify_multi_word_alternation( + alternatives: Vec>, +) -> Result { + if alternatives.iter().all(|alt| alt.len() == 1) { + // All single-word: use regular Alternation + let parts: Vec = alternatives.into_iter().map(|mut v| v.remove(0)).collect(); + validate_alternation_parts_vec(&parts)?; + Ok(LexToken::Alternation(parts)) + } else { + // At least one multi-word alternative + for alt in &alternatives { + if alt.is_empty() { + return Err(PatternParseError::EmptyAlternation); + } + } + Ok(LexToken::MultiWordAlternation(alternatives)) + } +} + +/// Validate that no part in a pre-split alternation is empty. +fn validate_alternation_parts_vec(parts: &[String]) -> Result<(), PatternParseError> { + for part in parts { + if part.is_empty() { + return Err(PatternParseError::EmptyAlternation); + } + } + Ok(()) +} + /// Split on '|' and validate that no part is empty. fn validate_alternation_parts(word: &str) -> Result, PatternParseError> { word.split('|') @@ -556,6 +700,130 @@ mod tests { ); } + // === Multi-word alternation === + + #[rstest] + #[case::quoted_then_bare( + r#""npx prettier"|prettier"#, + vec![LexToken::MultiWordAlternation(vec![ + vec!["npx".into(), "prettier".into()], + vec!["prettier".into()], + ])] + )] + #[case::bare_then_quoted( + r#"prettier|"npx prettier""#, + vec![LexToken::MultiWordAlternation(vec![ + vec!["prettier".into()], + vec!["npx".into(), "prettier".into()], + ])] + )] + #[case::three_alternatives( + r#""npx prettier"|"bunx prettier"|prettier"#, + vec![LexToken::MultiWordAlternation(vec![ + vec!["npx".into(), "prettier".into()], + vec!["bunx".into(), "prettier".into()], + vec!["prettier".into()], + ])] + )] + #[case::multi_word_with_trailing_tokens( + r#""npx prettier"|prettier *"#, + vec![ + LexToken::MultiWordAlternation(vec![ + vec!["npx".into(), "prettier".into()], + vec!["prettier".into()], + ]), + LexToken::Wildcard, + ] + )] + #[case::all_single_word_quoted_becomes_alternation( + r#""ast-grep"|sg"#, + vec![LexToken::Alternation(vec!["ast-grep".into(), "sg".into()])] + )] + #[case::mixed_single_and_multi_word( + r#"prettier|"npx prettier"|"bunx prettier""#, + vec![LexToken::MultiWordAlternation(vec![ + vec!["prettier".into()], + vec!["npx".into(), "prettier".into()], + vec!["bunx".into(), "prettier".into()], + ])] + )] + #[case::single_quoted_multi_word( + "prettier|'npx prettier'", + vec![LexToken::MultiWordAlternation(vec![ + vec!["prettier".into()], + vec!["npx".into(), "prettier".into()], + ])] + )] + #[case::all_single_word_via_bare_and_quoted( + r#"foo|"bar""#, + vec![LexToken::Alternation(vec!["foo".into(), "bar".into()])] + )] + #[case::three_bare_and_quoted_single_word( + r#"foo|"bar"|baz"#, + vec![LexToken::Alternation(vec!["foo".into(), "bar".into(), "baz".into()])] + )] + #[case::quoted_multi_then_two_bare( + r#""npx prettier"|foo|bar"#, + vec![LexToken::MultiWordAlternation(vec![ + vec!["npx".into(), "prettier".into()], + vec!["foo".into()], + vec!["bar".into()], + ])] + )] + #[case::bare_pipe_bare_pipe_quoted( + r#"foo|bar|"npx prettier""#, + vec![LexToken::MultiWordAlternation(vec![ + vec!["foo".into()], + vec!["bar".into()], + vec!["npx".into(), "prettier".into()], + ])] + )] + fn tokenize_multi_word_alternation(#[case] input: &str, #[case] expected: Vec) { + assert_eq!(tokenize(input).unwrap(), expected); + } + + // === Multi-word alternation error cases === + + #[rstest] + #[case::empty_quoted_alternative(r#"""|prettier"#)] + #[case::trailing_pipe_after_quoted(r#""npx prettier"|"#)] + fn tokenize_multi_word_alternation_errors(#[case] input: &str) { + let result = tokenize(input); + assert!( + matches!(result, Err(PatternParseError::EmptyAlternation)), + "expected EmptyAlternation for {input:?}, got {result:?}" + ); + } + + #[rstest] + #[case::bare_then_unclosed_quote( + r#"prettier|"npx prettier"#, + "invalid syntax: unclosed quote starting at position 9" + )] + #[case::quoted_then_unclosed_quote( + r#""npx prettier"|"bunx prettier"#, + "invalid syntax: unclosed quote starting at position 15" + )] + fn tokenize_multi_word_unclosed_quote_reports_quote_position( + #[case] input: &str, + #[case] expected_msg: &str, + ) { + let err = tokenize(input).expect_err(&format!("expected error for: {input:?}")); + assert_eq!(err.to_string(), expected_msg); + } + + // === Backward compatibility: single-word alternation unchanged === + + #[test] + fn tokenize_single_word_alternation_unchanged() { + // Existing single-word alternation should still produce Alternation, not MultiWordAlternation + let result = tokenize("ast-grep|sg").unwrap(); + assert_eq!( + result, + vec![LexToken::Alternation(vec!["ast-grep".into(), "sg".into()])] + ); + } + #[test] fn tokenize_single_literal() { let result = tokenize("ls").unwrap(); diff --git a/src/rules/pattern_matcher.rs b/src/rules/pattern_matcher.rs index ff65922..77b7a96 100644 --- a/src/rules/pattern_matcher.rs +++ b/src/rules/pattern_matcher.rs @@ -970,4 +970,77 @@ mod tests { expected ); } + + // ======================================== + // Multi-word alternation matching + // ======================================== + + /// Helper: parse pattern with parse_multi, then check if any expanded pattern matches. + fn check_multi_match(pattern_str: &str, command_str: &str, definitions: &Definitions) -> bool { + use crate::rules::pattern_parser::parse_multi; + + let patterns = parse_multi(pattern_str).unwrap(); + for pattern in &patterns { + let schema = build_schema_from_pattern(pattern); + let command = parse_command(command_str, &schema).unwrap(); + if matches(pattern, &command, definitions) { + return true; + } + } + false + } + + #[rstest] + #[case::npx_variant(r#""npx prettier"|prettier *"#, "npx prettier --write .", true)] + #[case::bare_variant(r#""npx prettier"|prettier *"#, "prettier --write .", true)] + #[case::no_match_different_runner( + r#""npx prettier"|prettier *"#, + "yarn prettier --write .", + false + )] + #[case::no_match_different_tool(r#""npx prettier"|prettier *"#, "npx eslint --fix .", false)] + #[case::three_alternatives_first( + r#""npx prettier"|"bunx prettier"|prettier *"#, + "npx prettier --write .", + true + )] + #[case::three_alternatives_second( + r#""npx prettier"|"bunx prettier"|prettier *"#, + "bunx prettier --write .", + true + )] + #[case::three_alternatives_third( + r#""npx prettier"|"bunx prettier"|prettier *"#, + "prettier --write .", + true + )] + #[case::python_pytest_module(r#""python -m pytest"|pytest *"#, "python -m pytest tests/", true)] + #[case::python_pytest_bare(r#""python -m pytest"|pytest *"#, "pytest tests/", true)] + #[case::python_pytest_no_match(r#""python -m pytest"|pytest *"#, "python -m mypy", false)] + fn multi_word_alternation_matching( + #[case] pattern_str: &str, + #[case] command_str: &str, + #[case] expected: bool, + ) { + assert_eq!( + check_multi_match(pattern_str, command_str, &empty_defs()), + expected, + "pattern {pattern_str:?} vs command {command_str:?}", + ); + } + + #[rstest] + #[case::backward_compat_first("ast-grep|sg scan *", "ast-grep scan foo", true)] + #[case::backward_compat_second("ast-grep|sg scan *", "sg scan foo", true)] + #[case::backward_compat_no_match("ast-grep|sg scan *", "rg scan foo", false)] + fn multi_word_alternation_backward_compat( + #[case] pattern_str: &str, + #[case] command_str: &str, + #[case] expected: bool, + ) { + assert_eq!( + check_multi_match(pattern_str, command_str, &empty_defs()), + expected, + ); + } } diff --git a/src/rules/pattern_parser.rs b/src/rules/pattern_parser.rs index 0a0ab8b..04432a3 100644 --- a/src/rules/pattern_parser.rs +++ b/src/rules/pattern_parser.rs @@ -71,6 +71,54 @@ pub enum PatternToken { /// - `word|word` -> Alternation /// - everything else -> Literal pub fn parse(pattern: &str) -> Result { + let lex_tokens = tokenize_pattern(pattern)?; + build_pattern_from_tokens(&lex_tokens) +} + +/// Parse a pattern string that may contain multi-word alternation. +/// +/// For patterns with multi-word alternation (e.g., `"npx prettier"|prettier *`), +/// returns multiple `Pattern` instances — one for each alternative: +/// - `Pattern { command: "npx", tokens: [Literal("prettier"), Wildcard] }` +/// - `Pattern { command: "prettier", tokens: [Wildcard] }` +/// +/// For regular patterns (no multi-word alternation), returns a single `Pattern` +/// in the vector, equivalent to calling `parse`. +pub fn parse_multi(pattern: &str) -> Result, super::PatternParseError> { + let lex_tokens = tokenize_pattern(pattern)?; + + match &lex_tokens[0] { + LexToken::MultiWordAlternation(alternatives) => { + let rest = &lex_tokens[1..]; + let rest_tokens = build_pattern_tokens(rest, false)?; + let mut patterns = Vec::with_capacity(alternatives.len()); + + for alt in alternatives { + // Each alternative is a list of words; the first word is the command name, + // the rest are prepended as Literal tokens before the shared remaining tokens. + let command = CommandPattern::Literal(alt[0].clone()); + let prefix_tokens: Vec = alt[1..] + .iter() + .map(|w| PatternToken::Literal(w.clone())) + .collect(); + + let mut tokens = prefix_tokens; + tokens.extend(rest_tokens.clone()); + + patterns.push(Pattern { command, tokens }); + } + + Ok(patterns) + } + _ => { + let pattern = build_pattern_from_tokens(&lex_tokens)?; + Ok(vec![pattern]) + } + } +} + +/// Tokenize a pattern string, returning an error if empty. +fn tokenize_pattern(pattern: &str) -> Result, super::PatternParseError> { use super::PatternParseError; let trimmed = pattern.trim(); @@ -83,6 +131,14 @@ pub fn parse(pattern: &str) -> Result { return Err(PatternParseError::InvalidSyntax("empty pattern".into())); } + Ok(lex_tokens) +} + +/// Build a single Pattern from already-tokenized lex tokens. +/// The first token becomes the command name, the rest become pattern tokens. +fn build_pattern_from_tokens(lex_tokens: &[LexToken]) -> Result { + use super::PatternParseError; + let command = match &lex_tokens[0] { LexToken::Literal(s) => CommandPattern::Literal(s.clone()), LexToken::Alternation(alts) => CommandPattern::Alternation(alts.clone()), @@ -207,6 +263,12 @@ fn build_pattern_tokens( "unexpected closing bracket".into(), )); } + + LexToken::MultiWordAlternation(_) => { + return Err(PatternParseError::InvalidSyntax( + "multi-word alternation is only supported in command position".into(), + )); + } } } @@ -229,6 +291,9 @@ fn lex_to_pattern_value(token: &LexToken) -> Result Err( super::PatternParseError::InvalidSyntax("bracket cannot be used as flag value".into()), ), + LexToken::MultiWordAlternation(_) => Err(super::PatternParseError::InvalidSyntax( + "multi-word alternation cannot be used as flag value".into(), + )), } } @@ -640,4 +705,73 @@ mod tests { "wrong error variant for {input:?}: expected {expected_variant}, got {debug}" ); } + + // === Multi-word alternation (parse_multi) === + + #[rstest] + #[case::two_alternatives( + r#""npx prettier"|prettier *"#, + vec![ + Pattern { + command: CommandPattern::Literal("npx".into()), + tokens: vec![PatternToken::Literal("prettier".into()), PatternToken::Wildcard], + }, + Pattern { + command: CommandPattern::Literal("prettier".into()), + tokens: vec![PatternToken::Wildcard], + }, + ] + )] + #[case::three_alternatives( + r#""npx prettier"|"bunx prettier"|prettier *"#, + vec![ + Pattern { + command: CommandPattern::Literal("npx".into()), + tokens: vec![PatternToken::Literal("prettier".into()), PatternToken::Wildcard], + }, + Pattern { + command: CommandPattern::Literal("bunx".into()), + tokens: vec![PatternToken::Literal("prettier".into()), PatternToken::Wildcard], + }, + Pattern { + command: CommandPattern::Literal("prettier".into()), + tokens: vec![PatternToken::Wildcard], + }, + ] + )] + #[case::multi_word_with_subcommand( + r#""python -m pytest"|pytest *"#, + vec![ + Pattern { + command: CommandPattern::Literal("python".into()), + tokens: vec![ + PatternToken::Literal("-m".into()), + PatternToken::Literal("pytest".into()), + PatternToken::Wildcard, + ], + }, + Pattern { + command: CommandPattern::Literal("pytest".into()), + tokens: vec![PatternToken::Wildcard], + }, + ] + )] + fn parse_multi_expands_alternatives(#[case] input: &str, #[case] expected: Vec) { + let result = parse_multi(input).unwrap(); + assert_eq!(result, expected); + } + + #[rstest] + #[case::single_word_alternation("ast-grep|sg scan *", 1)] + #[case::simple_literal("git status", 1)] + #[case::wildcard_command("* --help", 1)] + fn parse_multi_no_expansion(#[case] input: &str, #[case] expected_count: usize) { + let result = parse_multi(input).unwrap(); + assert_eq!( + result.len(), + expected_count, + "expected {expected_count} patterns for {input:?}, got {}", + result.len() + ); + } } diff --git a/src/rules/rule_engine.rs b/src/rules/rule_engine.rs index 9e523ba..0973156 100644 --- a/src/rules/rule_engine.rs +++ b/src/rules/rule_engine.rs @@ -10,7 +10,7 @@ use crate::rules::command_parser::{ }; use crate::rules::expr_evaluator::{ExprContext, evaluate}; use crate::rules::pattern_matcher::{extract_placeholder, matches_with_captures}; -use crate::rules::pattern_parser::{Pattern, PatternToken, parse as parse_pattern}; +use crate::rules::pattern_parser::{Pattern, PatternToken, parse_multi}; /// Context for rule evaluation, providing environment variables and /// working directory for `when` clause evaluation. @@ -252,36 +252,43 @@ fn evaluate_command_inner( None => continue, }; - let pattern = parse_pattern(pattern_str)?; - let schema = build_flag_schema(&pattern); - let parsed_command = parse_command(command, &schema)?; + let patterns = parse_multi(pattern_str)?; - let captures = matches_with_captures(&pattern, &parsed_command, definitions); - if captures.is_none() { - continue; - } + // Try each expanded pattern (multi-word alternation produces multiple patterns); + // use the first matching pattern for this rule. + for pattern in &patterns { + let schema = build_flag_schema(pattern); + let parsed_command = parse_command(command, &schema)?; - // Evaluate when clause if present - if let Some(when_expr) = &rule.when { - let expr_context = build_expr_context(&parsed_command, context, definitions); - match evaluate(when_expr, &expr_context) { - Ok(true) => {} - Ok(false) => continue, - Err(e) => return Err(e.into()), + let captures = matches_with_captures(pattern, &parsed_command, definitions); + if captures.is_none() { + continue; } - } - match_infos.push(RuleMatchInfo { - action_kind, - pattern: pattern_str.to_string(), - matched_tokens: captures.unwrap_or_default(), - }); + // Evaluate when clause if present + if let Some(when_expr) = &rule.when { + let expr_context = build_expr_context(&parsed_command, context, definitions); + match evaluate(when_expr, &expr_context) { + Ok(true) => {} + Ok(false) => continue, + Err(e) => return Err(e.into()), + } + } - matched.push(MatchedRule { - action_kind, - rule, - pattern_str: pattern_str.to_string(), - }); + match_infos.push(RuleMatchInfo { + action_kind, + pattern: pattern_str.to_string(), + matched_tokens: captures.unwrap_or_default(), + }); + + matched.push(MatchedRule { + action_kind, + rule, + pattern_str: pattern_str.to_string(), + }); + + break; + } } // Try wrapper pattern matching for recursive evaluation @@ -353,11 +360,21 @@ fn try_unwrap_wrapper( }; for wrapper_pattern_str in wrappers { - let pattern = parse_pattern(wrapper_pattern_str)?; - let schema = build_flag_schema(&pattern); - let parsed_command = parse_command(command, &schema)?; + let patterns = parse_multi(wrapper_pattern_str)?; + + // Try each expanded pattern for this wrapper definition + let mut extracted_tokens = None; + for pattern in &patterns { + let schema = build_flag_schema(pattern); + let parsed_command = parse_command(command, &schema)?; + + if let Some(tokens) = extract_placeholder(pattern, &parsed_command, definitions)? { + extracted_tokens = Some(tokens); + break; + } + } - if let Some(tokens) = extract_placeholder(&pattern, &parsed_command, definitions)? { + if let Some(tokens) = extracted_tokens { // Single token: a shell script string (e.g., from `bash -c `) // that should be passed as-is for tree-sitter to parse. // Multiple tokens: a structured command + args (e.g., from `sudo `) diff --git a/tests/integration/config_to_rule_evaluation.rs b/tests/integration/config_to_rule_evaluation.rs index 959e5c2..75b8f79 100644 --- a/tests/integration/config_to_rule_evaluation.rs +++ b/tests/integration/config_to_rule_evaluation.rs @@ -374,3 +374,60 @@ fn full_config_evaluates_correctly( let result = evaluate_command(&config, command, &empty_context).unwrap(); expected(&result.action); } + +// ======================================== +// Multi-word alternation (end-to-end via YAML config) +// ======================================== + +#[rstest] +#[case::npx_prettier_allowed( + "npx prettier --write .", + assert_allow as ActionAssertion, +)] +#[case::bunx_prettier_allowed( + "bunx prettier --write .", + assert_allow as ActionAssertion, +)] +#[case::bare_prettier_allowed( + "prettier --write .", + assert_allow as ActionAssertion, +)] +#[case::unrelated_command_default( + "yarn prettier --write .", + assert_default as ActionAssertion, +)] +fn multi_word_alternation_config( + #[case] command: &str, + #[case] expected: ActionAssertion, + empty_context: EvalContext, +) { + let config = parse_config(indoc! {r#" + rules: + - allow: '"npx prettier"|"bunx prettier"|prettier *' + "#}) + .unwrap(); + + let result = evaluate_command(&config, command, &empty_context).unwrap(); + expected(&result.action); +} + +#[rstest] +#[case::npx_denied("npx prettier --write /etc/passwd", assert_deny as ActionAssertion)] +#[case::bare_denied("prettier --write /etc/passwd", assert_deny as ActionAssertion)] +#[case::npx_allowed("npx prettier --write .", assert_allow as ActionAssertion)] +#[case::bare_allowed("prettier --write .", assert_allow as ActionAssertion)] +fn multi_word_alternation_allow_and_deny( + #[case] command: &str, + #[case] expected: ActionAssertion, + empty_context: EvalContext, +) { + let config = parse_config(indoc! {r#" + rules: + - allow: '"npx prettier"|prettier *' + - deny: '"npx prettier"|prettier --write /etc/passwd' + "#}) + .unwrap(); + + let result = evaluate_command(&config, command, &empty_context).unwrap(); + expected(&result.action); +}