From 02356b198d45d19cfc1052a9e155e0db1782cc90 Mon Sep 17 00:00:00 2001 From: Jason Lee Date: Mon, 9 Jan 2023 22:24:10 +0800 Subject: [PATCH] Add to support `///` and `//!` syntax for add doc comment for rules. Resolve #748 For example: ```rust //! A parser for JSON file. /// Matches object, e.g.: `{ "foo": "bar" }` object = { "{" ~ pair ~ ("," ~ pair)* ~ "}" | "{" ~ "}" } ``` should generate: ```rust /// A parser for JSON file. enum Rule { /// Matches object, e.g.: `{ "foo": "bar" }` object, } ``` --- generator/src/generator.rs | 36 ++++++++++-- generator/src/lib.rs | 24 +++++++- grammars/src/grammars/json.pest | 3 + meta/src/ast.rs | 3 + meta/src/grammar.pest | 18 ++++-- meta/src/optimizer/concatenator.rs | 8 ++- meta/src/optimizer/factorizer.rs | 8 ++- meta/src/optimizer/lister.rs | 8 ++- meta/src/optimizer/mod.rs | 27 +++++++++ meta/src/optimizer/restorer.rs | 19 ++++++- meta/src/optimizer/rotater.rs | 8 ++- meta/src/optimizer/skipper.rs | 8 ++- meta/src/optimizer/unroller.rs | 8 ++- meta/src/parser.rs | 89 +++++++++++++++++++++++++++--- 14 files changed, 240 insertions(+), 27 deletions(-) diff --git a/generator/src/generator.rs b/generator/src/generator.rs index fc1263d8..2f4173e9 100644 --- a/generator/src/generator.rs +++ b/generator/src/generator.rs @@ -23,6 +23,7 @@ pub fn generate( path: Option, rules: Vec, defaults: Vec<&str>, + grammar_docs: Vec<&str>, include_grammar: bool, ) -> TokenStream { let uses_eoi = defaults.iter().any(|name| *name == "EOI"); @@ -36,7 +37,7 @@ pub fn generate( } else { quote!() }; - let rule_enum = generate_enum(&rules, uses_eoi); + let rule_enum = generate_enum(&rules, grammar_docs, uses_eoi); let patterns = generate_patterns(&rules, uses_eoi); let skip = generate_skip(&rules); @@ -181,10 +182,26 @@ fn generate_include(name: &Ident, path: &str) -> TokenStream { } } -fn generate_enum(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream { - let rules = rules.iter().map(|rule| format_ident!("r#{}", rule.name)); +fn generate_enum(rules: &[OptimizedRule], grammar_docs: Vec<&str>, uses_eoi: bool) -> TokenStream { + let rules = rules.iter().map(|rule| { + let rule_name = format_ident!("r#{}", rule.name); + if rule.comments.is_empty() { + quote! { + #rule_name + } + } else { + let comments = rule.comments.join("\n"); + quote! { + #[doc = #comments] + #rule_name + } + } + }); + + let grammar_docs = grammar_docs.join("\n"); if uses_eoi { quote! { + #[doc = #grammar_docs] #[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)] #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] pub enum Rule { @@ -194,6 +211,7 @@ fn generate_enum(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream { } } else { quote! { + #[doc = #grammar_docs] #[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)] #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] pub enum Rule { @@ -208,6 +226,7 @@ fn generate_patterns(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream { .iter() .map(|rule| { let rule = format_ident!("r#{}", rule.name); + quote! { Rule::#rule => rules::#rule(state) } @@ -667,14 +686,17 @@ mod tests { name: "f".to_owned(), ty: RuleType::Normal, expr: OptimizedExpr::Ident("g".to_owned()), + comments: vec!["This is rule comment".to_owned()], }]; assert_eq!( - generate_enum(&rules, false).to_string(), + generate_enum(&rules, vec!["Rule doc", "hello"], false).to_string(), quote! { + #[doc = "Rule doc\nhello"] #[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)] #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] pub enum Rule { + #[doc = "This is rule comment"] r#f } } @@ -966,11 +988,13 @@ mod tests { name: "a".to_owned(), ty: RuleType::Silent, expr: OptimizedExpr::Str("b".to_owned()), + comments: vec![], }, OptimizedRule { name: "if".to_owned(), ty: RuleType::Silent, expr: OptimizedExpr::Ident("a".to_owned()), + comments: vec!["If statement".to_owned()], }, ]; @@ -981,15 +1005,17 @@ mod tests { current_dir.push("test.pest"); let test_path = current_dir.to_str().expect("path contains invalid unicode"); assert_eq!( - generate(name, &generics, Some(PathBuf::from("test.pest")), rules, defaults, true).to_string(), + generate(name, &generics, Some(PathBuf::from("test.pest")), rules, defaults, vec!["This is Rule doc", "This is second line"], true).to_string(), quote! { #[allow(non_upper_case_globals)] const _PEST_GRAMMAR_MyParser: &'static str = include_str!(#test_path); + #[doc = "This is Rule doc\nThis is second line"] #[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)] #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] pub enum Rule { r#a, + #[doc = "If statement"] r#if } diff --git a/generator/src/lib.rs b/generator/src/lib.rs index 938bd168..87818a6d 100644 --- a/generator/src/lib.rs +++ b/generator/src/lib.rs @@ -26,6 +26,7 @@ use std::fs::File; use std::io::{self, Read}; use std::path::Path; +use pest::iterators::Pairs; use proc_macro2::TokenStream; use syn::{Attribute, DeriveInput, Generics, Ident, Lit, Meta}; @@ -90,11 +91,32 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream { Err(error) => panic!("error parsing \n{}", error.renamed_rules(rename_meta_rule)), }; + let grammar_docs = consume_grammar_doc(pairs.clone()); + let defaults = unwrap_or_report(validator::validate_pairs(pairs.clone())); let ast = unwrap_or_report(parser::consume_rules(pairs)); let optimized = optimizer::optimize(ast); - generator::generate(name, &generics, path, optimized, defaults, include_grammar) + generator::generate( + name, + &generics, + path, + optimized, + defaults, + grammar_docs, + include_grammar, + ) +} + +fn consume_grammar_doc(pairs: Pairs<'_, Rule>) -> Vec<&'_ str> { + let mut docs = vec![]; + for pair in pairs { + if pair.as_rule() == Rule::grammar_doc { + docs.push(pair.as_str()[3..pair.as_str().len()].trim()); + } + } + + docs } fn read_file>(path: P) -> io::Result { diff --git a/grammars/src/grammars/json.pest b/grammars/src/grammars/json.pest index f8b423a5..2692e8ab 100644 --- a/grammars/src/grammars/json.pest +++ b/grammars/src/grammars/json.pest @@ -7,8 +7,11 @@ // option. All files in the project carrying such notice may not be copied, // modified, or distributed except according to those terms. +//! A parser for JSON file. +//! And this is a example for JSON parser. json = { SOI ~ (object | array) ~ EOI } +/// Matches object, e.g.: `{ "foo": "bar" }` object = { "{" ~ pair ~ ("," ~ pair)* ~ "}" | "{" ~ "}" } pair = { string ~ ":" ~ value } diff --git a/meta/src/ast.rs b/meta/src/ast.rs index ffac8ea7..781855d1 100644 --- a/meta/src/ast.rs +++ b/meta/src/ast.rs @@ -10,6 +10,7 @@ //! Types for the pest's abstract syntax tree. /// A grammar rule +#[non_exhaustive] #[derive(Clone, Debug, Eq, PartialEq)] pub struct Rule { /// The name of the rule @@ -18,6 +19,8 @@ pub struct Rule { pub ty: RuleType, /// The rule's expression pub expr: Expr, + /// Doc comments of the rule + pub(crate) comments: Vec, } /// All possible rule types diff --git a/meta/src/grammar.pest b/meta/src/grammar.pest index 282ca35b..817588bd 100644 --- a/meta/src/grammar.pest +++ b/meta/src/grammar.pest @@ -7,11 +7,12 @@ // option. All files in the project carrying such notice may not be copied, // modified, or distributed except according to those terms. -grammar_rules = _{ SOI ~ grammar_rule+ ~ EOI } +grammar_rules = _{ SOI ~ grammar_doc* ~ (grammar_rule)+ ~ EOI } grammar_rule = { identifier ~ assignment_operator ~ modifier? ~ - opening_brace ~ expression ~ closing_brace + opening_brace ~ expression ~ closing_brace | + line_doc } assignment_operator = { "=" } @@ -92,7 +93,12 @@ quote = { "\"" } single_quote = { "'" } range_operator = { ".." } -newline = _{ "\n" | "\r\n" } -WHITESPACE = _{ " " | "\t" | newline } -block_comment = _{ "/*" ~ (block_comment | !"*/" ~ ANY)* ~ "*/" } -COMMENT = _{ block_comment | ("//" ~ (!newline ~ ANY)*) } +newline = _{ "\n" | "\r\n" } +WHITESPACE = _{ " " | "\t" | newline } +line_comment = _{ ("//" ~ !("/" | "!") ~ (!newline ~ ANY)*) } +block_comment = _{ "/*" ~ (block_comment | !"*/" ~ ANY)* ~ "*/" } +COMMENT = _{ block_comment | line_comment } + +// ref: https://doc.rust-lang.org/reference/comments.html +grammar_doc = ${ "//!" ~ (!newline ~ ANY)* } +line_doc = ${ "///" ~ !"/" ~ (!newline ~ ANY)* } diff --git a/meta/src/optimizer/concatenator.rs b/meta/src/optimizer/concatenator.rs index 31d3aa53..3e991987 100644 --- a/meta/src/optimizer/concatenator.rs +++ b/meta/src/optimizer/concatenator.rs @@ -10,7 +10,12 @@ use crate::ast::*; pub fn concatenate(rule: Rule) -> Rule { - let Rule { name, ty, expr } = rule; + let Rule { + name, + ty, + expr, + comments, + } = rule; Rule { name, ty, @@ -29,5 +34,6 @@ pub fn concatenate(rule: Rule) -> Rule { expr } }), + comments, } } diff --git a/meta/src/optimizer/factorizer.rs b/meta/src/optimizer/factorizer.rs index 5481870b..a653bb4f 100644 --- a/meta/src/optimizer/factorizer.rs +++ b/meta/src/optimizer/factorizer.rs @@ -10,7 +10,12 @@ use crate::ast::*; pub fn factor(rule: Rule) -> Rule { - let Rule { name, ty, expr } = rule; + let Rule { + name, + ty, + expr, + comments, + } = rule; Rule { name, ty, @@ -47,5 +52,6 @@ pub fn factor(rule: Rule) -> Rule { expr => expr, } }), + comments, } } diff --git a/meta/src/optimizer/lister.rs b/meta/src/optimizer/lister.rs index e1988503..b9f7ae82 100644 --- a/meta/src/optimizer/lister.rs +++ b/meta/src/optimizer/lister.rs @@ -10,7 +10,12 @@ use crate::ast::*; pub fn list(rule: Rule) -> Rule { - let Rule { name, ty, expr } = rule; + let Rule { + name, + ty, + expr, + comments, + } = rule; Rule { name, ty, @@ -38,5 +43,6 @@ pub fn list(rule: Rule) -> Rule { expr => expr, } }), + comments, } } diff --git a/meta/src/optimizer/mod.rs b/meta/src/optimizer/mod.rs index 2038753b..89ef7cab 100644 --- a/meta/src/optimizer/mod.rs +++ b/meta/src/optimizer/mod.rs @@ -80,6 +80,7 @@ fn rule_to_optimized_rule(rule: Rule) -> OptimizedRule { name: rule.name, ty: rule.ty, expr: to_optimized(rule.expr), + comments: rule.comments, } } @@ -99,6 +100,8 @@ pub struct OptimizedRule { pub ty: RuleType, /// The optimized expression of the rule. pub expr: OptimizedExpr, + /// The doc comments of the rule. + pub comments: Vec, } /// The optimized version of the pest AST's `Expr`. @@ -319,6 +322,7 @@ mod tests { ), Str(String::from("d")) )), + comments: vec![], }] }; let rotated = { @@ -333,6 +337,7 @@ mod tests { Choice(Str(String::from("c")), Str(String::from("d"))) ) )), + comments: vec![], }] }; @@ -350,12 +355,14 @@ mod tests { NegPred(Choice(Str(String::from("a")), Str(String::from("b")))), Ident(String::from("ANY")) ))), + comments: vec![], }] }; let skipped = vec![OptimizedRule { name: "rule".to_owned(), ty: RuleType::Atomic, expr: OptimizedExpr::Skip(vec![String::from("a"), String::from("b")]), + comments: vec![], }]; assert_eq!(optimize(rules), skipped); @@ -372,12 +379,14 @@ mod tests { Seq(Str(String::from("a")), Str(String::from("b"))), Seq(Str(String::from("c")), Str(String::from("d"))) )), + comments: vec![], }] }; let concatenated = vec![OptimizedRule { name: "rule".to_owned(), ty: RuleType::Atomic, expr: OptimizedExpr::Str(String::from("abcd")), + comments: vec![], }]; assert_eq!(optimize(rules), concatenated); @@ -389,6 +398,7 @@ mod tests { name: "rule".to_owned(), ty: RuleType::Atomic, expr: Expr::RepExact(Box::new(Expr::Ident(String::from("a"))), 3), + comments: vec![], }]; let unrolled = { use crate::optimizer::OptimizedExpr::*; @@ -399,6 +409,7 @@ mod tests { Ident(String::from("a")), Seq(Ident(String::from("a")), Ident(String::from("a"))) )), + comments: vec![], }] }; @@ -411,6 +422,7 @@ mod tests { name: "rule".to_owned(), ty: RuleType::Atomic, expr: Expr::RepMax(Box::new(Expr::Str("a".to_owned())), 3), + comments: vec![], }]; let unrolled = { use crate::optimizer::OptimizedExpr::*; @@ -421,6 +433,7 @@ mod tests { Opt(Str(String::from("a"))), Seq(Opt(Str(String::from("a"))), Opt(Str(String::from("a")))) )), + comments: vec![], }] }; @@ -433,6 +446,7 @@ mod tests { name: "rule".to_owned(), ty: RuleType::Atomic, expr: Expr::RepMin(Box::new(Expr::Str("a".to_owned())), 2), + comments: vec![], }]; let unrolled = { use crate::optimizer::OptimizedExpr::*; @@ -443,6 +457,7 @@ mod tests { Str(String::from("a")), Seq(Str(String::from("a")), Rep(Str(String::from("a")))) )), + comments: vec![], }] }; @@ -455,6 +470,7 @@ mod tests { name: "rule".to_owned(), ty: RuleType::Atomic, expr: Expr::RepMinMax(Box::new(Expr::Str("a".to_owned())), 2, 3), + comments: vec![], }]; let unrolled = { use crate::optimizer::OptimizedExpr::*; @@ -472,6 +488,7 @@ mod tests { Str(String::from("a")), Seq(Str(String::from("a")), Opt(Str(String::from("a")))) )), + comments: vec![], }] }; @@ -489,12 +506,14 @@ mod tests { Seq(Insens(String::from("a")), Insens(String::from("b"))), Seq(Insens(String::from("c")), Insens(String::from("d"))) )), + comments: vec![], }] }; let concatenated = vec![OptimizedRule { name: "rule".to_owned(), ty: RuleType::Atomic, expr: OptimizedExpr::Insens(String::from("abcd")), + comments: vec![], }]; assert_eq!(optimize(rules), concatenated); @@ -517,6 +536,7 @@ mod tests { Ident(String::from("d")) ) )), + comments: vec![], }] }; let optimized = { @@ -531,6 +551,7 @@ mod tests { Choice(Ident(String::from("c")), Ident(String::from("d"))) ) )), + comments: vec![], }] }; @@ -548,6 +569,7 @@ mod tests { Seq(Ident(String::from("a")), Ident(String::from("b"))), Ident(String::from("a")) )), + comments: vec![], }] }; let optimized = { @@ -556,6 +578,7 @@ mod tests { name: "rule".to_owned(), ty: RuleType::Silent, expr: box_tree!(Seq(Ident(String::from("a")), Opt(Ident(String::from("b"))))), + comments: vec![], }] }; @@ -573,6 +596,7 @@ mod tests { Ident(String::from("a")), Seq(Ident(String::from("a")), Ident(String::from("b"))) )), + comments: vec![], }] }; let optimized = { @@ -581,6 +605,7 @@ mod tests { name: "rule".to_owned(), ty: RuleType::Silent, expr: box_tree!(Ident(String::from("a"))), + comments: vec![], }] }; @@ -598,6 +623,7 @@ mod tests { Rep(Seq(Ident(String::from("a")), Ident(String::from("b")))), Ident(String::from("a")) )), + comments: vec![], }] }; let optimized = { @@ -609,6 +635,7 @@ mod tests { Ident(String::from("a")), Rep(Seq(Ident(String::from("b")), Ident(String::from("a")))) )), + comments: vec![], }] }; diff --git a/meta/src/optimizer/restorer.rs b/meta/src/optimizer/restorer.rs index e128e03f..cccf9ebb 100644 --- a/meta/src/optimizer/restorer.rs +++ b/meta/src/optimizer/restorer.rs @@ -14,9 +14,19 @@ pub fn restore_on_err( rule: OptimizedRule, rules: &HashMap, ) -> OptimizedRule { - let OptimizedRule { name, ty, expr } = rule; + let OptimizedRule { + name, + ty, + expr, + comments, + } = rule; let expr = expr.map_bottom_up(|expr| wrap_branching_exprs(expr, rules)); - OptimizedRule { name, ty, expr } + OptimizedRule { + name, + ty, + expr, + comments, + } } fn wrap_branching_exprs( @@ -100,6 +110,7 @@ mod tests { name: "rule".to_owned(), ty: RuleType::Normal, expr: box_tree!(Opt(Str("a".to_string()))), + comments: vec![], }]; assert_eq!( @@ -114,12 +125,14 @@ mod tests { name: "rule".to_owned(), ty: RuleType::Normal, expr: box_tree!(Rep(Push(Str("a".to_string())))), + comments: vec![], }]; let restored = OptimizedRule { name: "rule".to_owned(), ty: RuleType::Normal, expr: box_tree!(Rep(RestoreOnErr(Push(Str("a".to_string()))))), + comments: vec![], }; assert_eq!( @@ -134,6 +147,7 @@ mod tests { name: "rule".to_owned(), ty: RuleType::Normal, expr: box_tree!(Choice(Push(Str("a".to_string())), Str("a".to_string()))), + comments: vec![], }]; let restored = OptimizedRule { @@ -143,6 +157,7 @@ mod tests { RestoreOnErr(Push(Str("a".to_string()))), Str("a".to_string()) )), + comments: vec![], }; assert_eq!( diff --git a/meta/src/optimizer/rotater.rs b/meta/src/optimizer/rotater.rs index 7a7d8fba..23176d4e 100644 --- a/meta/src/optimizer/rotater.rs +++ b/meta/src/optimizer/rotater.rs @@ -35,10 +35,16 @@ pub fn rotate(rule: Rule) -> Rule { } } - let Rule { name, ty, expr } = rule; + let Rule { + name, + ty, + expr, + comments, + } = rule; Rule { name, ty, expr: expr.map_top_down(rotate_internal), + comments, } } diff --git a/meta/src/optimizer/skipper.rs b/meta/src/optimizer/skipper.rs index 40bc5a16..4458af04 100644 --- a/meta/src/optimizer/skipper.rs +++ b/meta/src/optimizer/skipper.rs @@ -28,7 +28,12 @@ pub fn skip(rule: Rule) -> Rule { } } - let Rule { name, ty, expr } = rule; + let Rule { + name, + ty, + expr, + comments, + } = rule; Rule { name, ty, @@ -52,5 +57,6 @@ pub fn skip(rule: Rule) -> Rule { } else { expr }, + comments, } } diff --git a/meta/src/optimizer/unroller.rs b/meta/src/optimizer/unroller.rs index e3c360d9..419a4469 100644 --- a/meta/src/optimizer/unroller.rs +++ b/meta/src/optimizer/unroller.rs @@ -10,7 +10,12 @@ use crate::ast::*; pub fn unroll(rule: Rule) -> Rule { - let Rule { name, ty, expr } = rule; + let Rule { + name, + ty, + expr, + comments, + } = rule; Rule { name, ty, @@ -62,5 +67,6 @@ pub fn unroll(rule: Rule) -> Rule { .unwrap(), expr => expr, }), + comments, } } diff --git a/meta/src/parser.rs b/meta/src/parser.rs index fc0224b3..b71e171c 100644 --- a/meta/src/parser.rs +++ b/meta/src/parser.rs @@ -11,6 +11,7 @@ use std::char; use std::iter::Peekable; +use std::sync::Mutex; use pest::error::{Error, ErrorVariant}; use pest::iterators::{Pair, Pairs}; @@ -50,6 +51,8 @@ pub struct ParserRule<'i> { pub ty: RuleType, /// The rule's parser node pub node: ParserNode<'i>, + /// Doc comments of the rule + pub(crate) comments: Vec, } /// The pest grammar node @@ -167,9 +170,20 @@ pub enum ParserExpr<'i> { } fn convert_rule(rule: ParserRule<'_>) -> AstRule { - let ParserRule { name, ty, node, .. } = rule; + let ParserRule { + name, + ty, + node, + comments, + .. + } = rule; let expr = convert_node(node); - AstRule { name, ty, expr } + AstRule { + name, + ty, + expr, + comments, + } } fn convert_node(node: ParserNode<'_>) -> Expr { @@ -243,10 +257,23 @@ pub fn rename_meta_rule(rule: &Rule) -> String { Rule::insensitive_string => "`^`".to_owned(), Rule::range_operator => "`..`".to_owned(), Rule::single_quote => "`'`".to_owned(), + Rule::grammar_doc => "//!".to_owned(), + Rule::line_doc => "///".to_owned(), other_rule => format!("{:?}", other_rule), } } +fn filter_line_docs(pair: &Pair<'_, Rule>, line_docs: &mut Vec) -> bool { + let mut pairs = pair.clone().into_inner(); + let pair = pairs.next().unwrap(); + if pair.as_rule() == Rule::line_doc { + line_docs.push(pair.as_str()[3..pair.as_str().len()].trim().to_string()); + false + } else { + true + } +} + fn consume_rules_with_spans( pairs: Pairs<'_, Rule>, ) -> Result>, Vec>> { @@ -254,8 +281,11 @@ fn consume_rules_with_spans( .op(Op::infix(Rule::choice_operator, Assoc::Left)) .op(Op::infix(Rule::sequence_operator, Assoc::Left)); + let line_docs: Mutex> = Mutex::new(vec![]); + pairs .filter(|pair| pair.as_rule() == Rule::grammar_rule) + .filter(|pair| filter_line_docs(pair, &mut line_docs.lock().unwrap())) .map(|pair| { let mut pairs = pair.into_inner().peekable(); @@ -286,11 +316,17 @@ fn consume_rules_with_spans( let node = consume_expr(inner_nodes, &pratt)?; + // consume doc comments + let mut line_docs = line_docs.lock().unwrap(); + let comments = line_docs.clone(); + line_docs.clear(); + Ok(ParserRule { name, span, ty, node, + comments, }) }) .collect() @@ -1093,13 +1129,44 @@ mod tests { }; } + #[test] + fn grammar_doc_and_line_doc() { + let input = "//! hello\n/// world\na = { \"a\" }"; + parses_to! { + parser: PestParser, + input: input, + rule: Rule::grammar_rules, + tokens: [ + grammar_doc(0, 9), + grammar_rule(10, 19, [ + line_doc(10, 19), + ]), + grammar_rule(20, 31, [ + identifier(20, 21), + assignment_operator(22, 23), + opening_brace(24, 25), + expression(26, 30, [ + term(26, 30, [ + string(26, 29, [ + quote(26, 27), + inner_str(27, 28), + quote(28, 29) + ]) + ]) + ]), + closing_brace(30, 31), + ]) + ] + }; + } + #[test] fn wrong_identifier() { fails_with! { parser: PestParser, input: "0", rule: Rule::grammar_rules, - positives: vec![Rule::identifier], + positives: vec![Rule::grammar_rule, Rule::grammar_doc], negatives: vec![], pos: 0 }; @@ -1315,8 +1382,11 @@ mod tests { #[test] fn ast() { - let input = - "rule = _{ a{1} ~ \"a\"{3,} ~ b{, 2} ~ \"b\"{1, 2} | !(^\"c\" | PUSH('d'..'e'))?* }"; + let input = r##" + /// This is line comment + /// This is rule + rule = _{ a{1} ~ "a"{3,} ~ b{, 2} ~ "b"{1, 2} | !(^"c" | PUSH('d'..'e'))?* } + "##; let pairs = PestParser::parse(Rule::grammar_rules, input).unwrap(); let ast = consume_rules_with_spans(pairs).unwrap(); @@ -1347,7 +1417,11 @@ mod tests { )))) )) )))))) - ) + ), + comments: vec![ + "This is line comment".to_string(), + "This is rule".to_string(), + ], },] ); } @@ -1368,7 +1442,8 @@ mod tests { expr: Expr::Seq( Box::new(Expr::PeekSlice(-4, None)), Box::new(Expr::PeekSlice(0, Some(3))), - ) + ), + comments: vec![], }], ); }