cfgrammar/yacc/
grammar.rs

1#![allow(clippy::derive_partial_eq_without_eq)]
2use std::{cell::RefCell, collections::HashMap, fmt::Write, str::FromStr};
3
4#[cfg(feature = "bincode")]
5use bincode::{Decode, Encode};
6use num_traits::{AsPrimitive, PrimInt, Unsigned};
7#[cfg(feature = "serde")]
8use serde::{Deserialize, Serialize};
9use vob::Vob;
10
11use super::{
12    YaccKind, ast,
13    firsts::YaccFirsts,
14    follows::YaccFollows,
15    parser::{YaccGrammarError, YaccGrammarResult},
16};
17use crate::{PIdx, RIdx, SIdx, Span, Symbol, TIdx};
18
19const START_RULE: &str = "^";
20const IMPLICIT_RULE: &str = "~";
21const IMPLICIT_START_RULE: &str = "^~";
22
23pub type PrecedenceLevel = u64;
24#[derive(Clone, Copy, Debug, PartialEq)]
25#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
26#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
27pub struct Precedence {
28    pub level: PrecedenceLevel,
29    pub kind: AssocKind,
30}
31
32#[derive(Clone, Copy, Debug, PartialEq)]
33#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
34#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
35pub enum AssocKind {
36    Left,
37    Right,
38    Nonassoc,
39}
40
41/// Representation of a `YaccGrammar`. See the [top-level documentation](../../index.html) for the
42/// guarantees this struct makes about rules, tokens, productions, and symbols.
43#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
44#[cfg_attr(feature = "bincode", derive(Encode))]
45pub struct YaccGrammar<StorageT = u32> {
46    /// How many rules does this grammar have?
47    rules_len: RIdx<StorageT>,
48    /// A mapping from `RIdx` -> `(Span, String)`.
49    rule_names: Box<[(String, Span)]>,
50    /// A mapping from `TIdx` -> `Option<(Span, String)>`. Every user-specified token will have a name,
51    /// but tokens inserted by cfgrammar (e.g. the EOF token) won't.
52    token_names: Box<[Option<(Span, String)>]>,
53    /// A mapping from `TIdx` -> `Option<Precedence>`
54    token_precs: Box<[Option<Precedence>]>,
55    /// A mapping from `TIdx` -> `Option<String>` for the %epp declaration, giving pretty-printed
56    /// versions of token names that can be presented to the user in case of an error. Every
57    /// user-specified token will have a name that can be presented to the user (if a token doesn't
58    /// have an %epp entry, the token name will be used in lieu), but tokens inserted by cfgrammar
59    /// (e.g. the EOF token) won't.
60    token_epp: Box<[Option<String>]>,
61    /// How many tokens does this grammar have?
62    tokens_len: TIdx<StorageT>,
63    /// The offset of the EOF token.
64    eof_token_idx: TIdx<StorageT>,
65    /// How many productions does this grammar have?
66    prods_len: PIdx<StorageT>,
67    /// Which production is the sole production of the start rule?
68    start_prod: PIdx<StorageT>,
69    /// A list of all productions.
70    prods: Box<[Box<[Symbol<StorageT>]>]>,
71    /// A mapping from rules to their productions. Note that 1) the order of rules is identical to
72    /// that of `rule_names` 2) every rule will have at least 1 production 3) productions
73    /// are not necessarily stored sequentially.
74    rules_prods: Box<[Box<[PIdx<StorageT>]>]>,
75    /// A mapping from productions to their corresponding rule indexes.
76    prods_rules: Box<[RIdx<StorageT>]>,
77    /// The precedence of each production.
78    prod_precs: Box<[Option<Precedence>]>,
79    /// The index of the rule added for implicit tokens, if they were specified; otherwise
80    /// `None`.
81    implicit_rule: Option<RIdx<StorageT>>,
82    /// User defined Rust programs which can be called within actions
83    actions: Box<[Option<String>]>,
84    /// A `(name, type)` pair defining an extra parameter to pass to action functions.
85    parse_param: Option<(String, String)>,
86    /// Lifetimes for `param_args`
87    programs: Option<String>,
88    /// The actiontypes of rules (one per rule).
89    actiontypes: Box<[Option<String>]>,
90    /// Tokens marked as %avoid_insert (if any).
91    avoid_insert: Option<Vob>,
92    /// How many shift/reduce conflicts the grammar author expected (if any).
93    expect: Option<usize>,
94    /// How many reduce/reduce conflicts the grammar author expected (if any).
95    expectrr: Option<usize>,
96}
97
98// The implementations of `Decode` and `BorrowDecode`
99// Contain a modified copy of the output of `cargo expand` of the derivation:
100// #[cfg_attr(feature = "bincode", derive(Decode))]
101//
102// The current version of bincode has bugs in it's derive macro implementation
103// https://github.com/bincode-org/bincode/issues/763
104// https://github.com/bincode-org/bincode/issues/646
105//
106// Once those are fixed, we can replace this with derive.
107
108#[cfg(feature = "bincode")]
109impl<StorageT, __Context> Decode<__Context> for YaccGrammar<StorageT>
110where
111    StorageT: Decode<__Context> + 'static,
112{
113    fn decode<__D: bincode::de::Decoder<Context = __Context>>(
114        decoder: &mut __D,
115    ) -> Result<Self, bincode::error::DecodeError> {
116        Ok(Self {
117            rules_len: Decode::decode(decoder)?,
118            rule_names: Decode::decode(decoder)?,
119            token_names: Decode::decode(decoder)?,
120            token_precs: Decode::decode(decoder)?,
121            token_epp: Decode::decode(decoder)?,
122            tokens_len: Decode::decode(decoder)?,
123            eof_token_idx: Decode::decode(decoder)?,
124            prods_len: Decode::decode(decoder)?,
125            start_prod: Decode::decode(decoder)?,
126            prods: Decode::decode(decoder)?,
127            rules_prods: Decode::decode(decoder)?,
128            prods_rules: Decode::decode(decoder)?,
129            prod_precs: Decode::decode(decoder)?,
130            implicit_rule: Decode::decode(decoder)?,
131            actions: Decode::decode(decoder)?,
132            parse_param: Decode::decode(decoder)?,
133            programs: Decode::decode(decoder)?,
134            actiontypes: Decode::decode(decoder)?,
135            avoid_insert: Decode::decode(decoder)?,
136            expect: Decode::decode(decoder)?,
137            expectrr: Decode::decode(decoder)?,
138        })
139    }
140}
141
142// Eventually this should just be provided through:
143// #[cfg_attr(feature = "bincode", derive(Decode))]
144//
145// See comment at the corresponding bincode::Decode impl.
146#[cfg(feature = "bincode")]
147impl<'__de, StorageT, __Context> bincode::BorrowDecode<'__de, __Context> for YaccGrammar<StorageT>
148where
149    StorageT: bincode::de::BorrowDecode<'__de, __Context> + '__de,
150{
151    fn borrow_decode<__D: ::bincode::de::BorrowDecoder<'__de, Context = __Context>>(
152        decoder: &mut __D,
153    ) -> Result<Self, bincode::error::DecodeError> {
154        Ok(Self {
155            rules_len: bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
156            rule_names: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
157            token_names: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
158            token_precs: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
159            token_epp: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
160            tokens_len: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
161            eof_token_idx: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
162            prods_len: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
163            start_prod: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
164            prods: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
165            rules_prods: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
166            prods_rules: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
167            prod_precs: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
168            implicit_rule: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
169            actions: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
170            parse_param: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
171            programs: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
172            actiontypes: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
173            avoid_insert: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
174            expect: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
175            expectrr: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
176        })
177    }
178}
179
180// Internally, we assume that a grammar's start rule has a single production. Since we manually
181// create the start rule ourselves (without relying on user input), this is a safe assumption.
182
183impl YaccGrammar<u32> {
184    pub fn new(yk: YaccKind, s: &str) -> YaccGrammarResult<Self> {
185        YaccGrammar::new_with_storaget(yk, s)
186    }
187}
188
189impl<StorageT: 'static + PrimInt + Unsigned> FromStr for YaccGrammar<StorageT>
190where
191    usize: AsPrimitive<StorageT>,
192{
193    type Err = Vec<YaccGrammarError>;
194    fn from_str(s: &str) -> YaccGrammarResult<Self> {
195        let ast_validation = ast::ASTWithValidityInfo::from_str(s)?;
196        Self::new_from_ast_with_validity_info(&ast_validation)
197    }
198}
199
200impl<StorageT: 'static + PrimInt + Unsigned> YaccGrammar<StorageT>
201where
202    usize: AsPrimitive<StorageT>,
203{
204    /// Takes as input a Yacc grammar of [`YaccKind`](enum.YaccKind.html) as a `String` `s` and returns a
205    /// [`YaccGrammar`](grammar/struct.YaccGrammar.html) (or
206    /// ([`YaccGrammarError`](grammar/enum.YaccGrammarError.html) on error).
207    ///
208    /// As we're compiling the `YaccGrammar`, we add a new start rule (which we'll refer to as `^`,
209    /// though the actual name is a fresh name that is guaranteed to be unique) that references the
210    /// user defined start rule.
211    pub fn new_with_storaget(yk: YaccKind, s: &str) -> YaccGrammarResult<Self> {
212        let ast_validation = ast::ASTWithValidityInfo::new(yk, s);
213        Self::new_from_ast_with_validity_info(&ast_validation)
214    }
215
216    pub fn new_from_ast_with_validity_info(
217        ast_validation: &ast::ASTWithValidityInfo,
218    ) -> YaccGrammarResult<Self> {
219        if !ast_validation.is_valid() {
220            return Err(ast_validation.errors().to_owned());
221        }
222        let ast = ast_validation.ast();
223        // Check that StorageT is big enough to hold RIdx/PIdx/SIdx/TIdx values; after these
224        // checks we can guarantee that things like RIdx(ast.rules.len().as_()) are safe.
225        if ast.rules.len() > num_traits::cast(StorageT::max_value()).unwrap() {
226            panic!("StorageT is not big enough to store this grammar's rules.");
227        }
228        if ast.tokens.len() > num_traits::cast(StorageT::max_value()).unwrap() {
229            panic!("StorageT is not big enough to store this grammar's tokens.");
230        }
231        if ast.prods.len() > num_traits::cast(StorageT::max_value()).unwrap() {
232            panic!("StorageT is not big enough to store this grammar's productions.");
233        }
234        for p in &ast.prods {
235            if p.symbols.len() > num_traits::cast(StorageT::max_value()).unwrap() {
236                panic!(
237                    "StorageT is not big enough to store the symbols of at least one of this grammar's productions."
238                );
239            }
240        }
241
242        let mut rule_names: Vec<(String, Span)> = Vec::with_capacity(ast.rules.len() + 1);
243
244        // Generate a guaranteed unique start rule name. We simply keep making the string longer
245        // until we've hit something unique (at the very worst, this will require looping for as
246        // many times as there are rules). We use the same technique later for unique end
247        // token and whitespace names.
248        let mut start_rule = START_RULE.to_string();
249        while ast.rules.get(&start_rule).is_some() {
250            start_rule += START_RULE;
251        }
252        rule_names.push((start_rule.clone(), Span::new(0, 0)));
253
254        let implicit_rule;
255        let implicit_start_rule;
256        match ast_validation.yacc_kind() {
257            YaccKind::Original(_) | YaccKind::Grmtools => {
258                implicit_rule = None;
259                implicit_start_rule = None;
260            }
261            YaccKind::Eco => {
262                if ast.implicit_tokens.is_some() {
263                    let mut n1 = IMPLICIT_RULE.to_string();
264                    while ast.rules.get(&n1).is_some() {
265                        n1 += IMPLICIT_RULE;
266                    }
267                    rule_names.push((n1.clone(), Span::new(0, 0)));
268                    implicit_rule = Some(n1);
269                    let mut n2 = IMPLICIT_START_RULE.to_string();
270                    while ast.rules.get(&n2).is_some() {
271                        n2 += IMPLICIT_START_RULE;
272                    }
273                    rule_names.push((n2.clone(), Span::new(0, 0)));
274                    implicit_start_rule = Some(n2);
275                } else {
276                    implicit_rule = None;
277                    implicit_start_rule = None;
278                }
279            }
280        };
281
282        for (
283            k,
284            ast::Rule {
285                name: (_, name_span),
286                ..
287            },
288        ) in &ast.rules
289        {
290            rule_names.push((k.clone(), *name_span));
291        }
292        let mut rules_prods: Vec<Vec<PIdx<StorageT>>> = Vec::with_capacity(rule_names.len());
293        let mut rule_map = HashMap::<String, RIdx<StorageT>>::new();
294        for (i, (v, _)) in rule_names.iter().enumerate() {
295            rules_prods.push(Vec::new());
296            rule_map.insert(v.clone(), RIdx(i.as_()));
297        }
298
299        let mut token_names: Vec<Option<(Span, String)>> = Vec::with_capacity(ast.tokens.len() + 1);
300        let mut token_precs: Vec<Option<Precedence>> = Vec::with_capacity(ast.tokens.len() + 1);
301        let mut token_epp: Vec<Option<String>> = Vec::with_capacity(ast.tokens.len() + 1);
302        for (i, k) in ast.tokens.iter().enumerate() {
303            token_names.push(Some((ast.spans[i], k.clone())));
304            token_precs.push(ast.precs.get(k).map(|(prec, _)| prec).cloned());
305            token_epp.push(Some(
306                ast.epp.get(k).map(|(_, (s, _))| s).unwrap_or(k).clone(),
307            ));
308        }
309        let eof_token_idx = TIdx(token_names.len().as_());
310        token_names.push(None);
311        token_precs.push(None);
312        token_epp.push(None);
313        let mut token_map = HashMap::<String, TIdx<StorageT>>::new();
314        for (i, v) in token_names.iter().enumerate() {
315            if let Some((_, n)) = v.as_ref() {
316                token_map.insert(n.clone(), TIdx(i.as_()));
317            }
318        }
319
320        // In order to avoid fiddling about with production indices from the AST, we simply map
321        // tem 1:1 to grammar indices. That means that any new productions are added to the *end*
322        // of the list of productions.
323        let mut prods = vec![None; ast.prods.len()];
324        let mut prod_precs: Vec<Option<Option<Precedence>>> = vec![None; ast.prods.len()];
325        let mut prods_rules = vec![None; ast.prods.len()];
326        let mut actions = vec![None; ast.prods.len()];
327        let mut actiontypes = vec![None; rule_names.len()];
328        let (start_name, _) = ast.start.as_ref().unwrap();
329        for (astrulename, _) in &rule_names {
330            let ridx = rule_map[astrulename];
331            if astrulename == &start_rule {
332                // Add the special start rule which has a single production which references a
333                // single rule.
334                rules_prods[usize::from(ridx)].push(PIdx(prods.len().as_()));
335                let start_prod = match implicit_start_rule {
336                    None => {
337                        // Add ^: S;
338                        vec![Symbol::Rule(rule_map[start_name])]
339                    }
340                    Some(ref s) => {
341                        // An implicit rule has been specified, so the special start rule
342                        // needs to reference the intermediate start rule required. Therefore add:
343                        //   ^: ^~;
344                        vec![Symbol::Rule(rule_map[s])]
345                    }
346                };
347                prods.push(Some(start_prod));
348                prod_precs.push(Some(None));
349                prods_rules.push(Some(ridx));
350                actions.push(None);
351                continue;
352            } else if implicit_start_rule.as_ref() == Some(astrulename) {
353                // Add the intermediate start rule (handling implicit tokens at the beginning of
354                // the file):
355                //   ^~: ~ S;
356                rules_prods[usize::from(rule_map[astrulename])].push(PIdx(prods.len().as_()));
357                prods.push(Some(vec![
358                    Symbol::Rule(rule_map[implicit_rule.as_ref().unwrap()]),
359                    Symbol::Rule(rule_map[start_name]),
360                ]));
361                prod_precs.push(Some(None));
362                prods_rules.push(Some(ridx));
363                continue;
364            } else if implicit_rule.as_ref() == Some(astrulename) {
365                // Add the implicit rule: ~: "IMPLICIT_TOKEN_1" ~ | ... | "IMPLICIT_TOKEN_N" ~ | ;
366                let implicit_prods = &mut rules_prods[usize::from(rule_map[astrulename])];
367                // Add a production for each implicit token
368                for (t, _) in ast.implicit_tokens.as_ref().unwrap().iter() {
369                    implicit_prods.push(PIdx(prods.len().as_()));
370                    prods.push(Some(vec![Symbol::Token(token_map[t]), Symbol::Rule(ridx)]));
371                    prod_precs.push(Some(None));
372                    prods_rules.push(Some(ridx));
373                }
374                // Add an empty production
375                implicit_prods.push(PIdx(prods.len().as_()));
376                prods.push(Some(vec![]));
377                prod_precs.push(Some(None));
378                prods_rules.push(Some(ridx));
379                continue;
380            } else {
381                actiontypes[usize::from(ridx)] = ast.rules[astrulename].actiont.clone();
382            }
383
384            let rule = &mut rules_prods[usize::from(ridx)];
385            for &pidx in &ast.rules[astrulename].pidxs {
386                let astprod = &ast.prods[pidx];
387                let mut prod = Vec::with_capacity(astprod.symbols.len());
388                for astsym in &astprod.symbols {
389                    match *astsym {
390                        ast::Symbol::Rule(ref n, _) => {
391                            prod.push(Symbol::Rule(rule_map[n]));
392                        }
393                        ast::Symbol::Token(ref n, _) => {
394                            prod.push(Symbol::Token(token_map[n]));
395                            if let Some(implicit_rule) = &implicit_rule {
396                                prod.push(Symbol::Rule(rule_map[implicit_rule]));
397                            }
398                        }
399                    };
400                }
401                let mut prec = None;
402                if let Some(ref n) = astprod.precedence {
403                    prec = Some(ast.precs[n]);
404                } else {
405                    for astsym in astprod.symbols.iter().rev() {
406                        if let ast::Symbol::Token(ref n, _) = *astsym {
407                            if let Some(p) = ast.precs.get(n) {
408                                prec = Some(*p);
409                            }
410                            break;
411                        }
412                    }
413                }
414                (*rule).push(PIdx(pidx.as_()));
415                prods[pidx] = Some(prod);
416                prod_precs[pidx] = Some(prec.map(|(prec, _)| prec));
417                prods_rules[pidx] = Some(ridx);
418                if let Some(ref s) = astprod.action {
419                    actions[pidx] = Some(s.clone());
420                }
421            }
422        }
423
424        let avoid_insert = if let Some(ai) = &ast.avoid_insert {
425            let mut aiv = Vob::from_elem(false, token_names.len());
426            for n in ai.keys() {
427                aiv.set(usize::from(token_map[n]), true);
428            }
429            Some(aiv)
430        } else {
431            None
432        };
433
434        assert!(!token_names.is_empty());
435        assert!(!rule_names.is_empty());
436        Ok(YaccGrammar {
437            rules_len: RIdx(rule_names.len().as_()),
438            rule_names: rule_names.into_boxed_slice(),
439            tokens_len: TIdx(token_names.len().as_()),
440            eof_token_idx,
441            token_names: token_names.into_boxed_slice(),
442            token_precs: token_precs.into_boxed_slice(),
443            token_epp: token_epp.into_boxed_slice(),
444            prods_len: PIdx(prods.len().as_()),
445            start_prod: rules_prods[usize::from(rule_map[&start_rule])][0],
446            rules_prods: rules_prods
447                .iter()
448                .map(|x| x.iter().copied().collect())
449                .collect(),
450            prods_rules: prods_rules.into_iter().map(Option::unwrap).collect(),
451            prods: prods
452                .into_iter()
453                .map(|x| x.unwrap().into_boxed_slice())
454                .collect(),
455            prod_precs: prod_precs.into_iter().map(Option::unwrap).collect(),
456            implicit_rule: implicit_rule.map(|x| rule_map[&x]),
457            actions: actions.into_boxed_slice(),
458            parse_param: ast.parse_param.clone(),
459            programs: ast.programs.clone(),
460            avoid_insert,
461            actiontypes: actiontypes.into_boxed_slice(),
462            expect: ast.expect.map(|(n, _)| n),
463            expectrr: ast.expectrr.map(|(n, _)| n),
464        })
465    }
466
467    /// How many productions does this grammar have?
468    pub fn prods_len(&self) -> PIdx<StorageT> {
469        self.prods_len
470    }
471
472    /// Return an iterator which produces (in order from `0..self.prods_len()`) all this
473    /// grammar's valid `PIdx`s.
474    pub fn iter_pidxs(&self) -> impl Iterator<Item = PIdx<StorageT>> {
475        // We can use as_ safely, because we know that we're only generating integers from
476        // 0..self.rules_len() and, since rules_len() returns an RIdx<StorageT>, then by
477        // definition the integers we're creating fit within StorageT.
478        Box::new((0..usize::from(self.prods_len())).map(|x| PIdx(x.as_())))
479    }
480
481    /// Get the sequence of symbols for production `pidx`. Panics if `pidx` doesn't exist.
482    pub fn prod(&self, pidx: PIdx<StorageT>) -> &[Symbol<StorageT>] {
483        &self.prods[usize::from(pidx)]
484    }
485
486    /// How many symbols does production `pidx` have? Panics if `pidx` doesn't exist.
487    pub fn prod_len(&self, pidx: PIdx<StorageT>) -> SIdx<StorageT> {
488        // Since we've already checked that StorageT can store all the symbols for every production
489        // in the grammar, the call to as_ is safe.
490        SIdx(self.prods[usize::from(pidx)].len().as_())
491    }
492
493    /// Return the rule index of the production `pidx`. Panics if `pidx` doesn't exist.
494    pub fn prod_to_rule(&self, pidx: PIdx<StorageT>) -> RIdx<StorageT> {
495        self.prods_rules[usize::from(pidx)]
496    }
497
498    /// Return the precedence of production `pidx` (where `None` indicates "no precedence specified").
499    /// Panics if `pidx` doesn't exist.
500    pub fn prod_precedence(&self, pidx: PIdx<StorageT>) -> Option<Precedence> {
501        self.prod_precs[usize::from(pidx)]
502    }
503
504    /// Return the production index of the start rule's sole production (for Yacc grammars the
505    /// start rule is defined to have precisely one production).
506    pub fn start_prod(&self) -> PIdx<StorageT> {
507        self.start_prod
508    }
509
510    /// How many rules does this grammar have?
511    pub fn rules_len(&self) -> RIdx<StorageT> {
512        self.rules_len
513    }
514
515    /// Return an iterator which produces (in order from `0..self.rules_len()`) all this
516    /// grammar's valid `RIdx`s.
517    pub fn iter_rules(&self) -> impl Iterator<Item = RIdx<StorageT>> {
518        // We can use as_ safely, because we know that we're only generating integers from
519        // 0..self.rules_len() and, since rules_len() returns an RIdx<StorageT>, then by
520        // definition the integers we're creating fit within StorageT.
521        Box::new((0..usize::from(self.rules_len())).map(|x| RIdx(x.as_())))
522    }
523
524    /// Return the productions for rule `ridx`. Panics if `ridx` doesn't exist.
525    pub fn rule_to_prods(&self, ridx: RIdx<StorageT>) -> &[PIdx<StorageT>] {
526        &self.rules_prods[usize::from(ridx)]
527    }
528
529    /// Return the name of rule `ridx`. Panics if `ridx` doesn't exist.
530    #[deprecated(since = "0.13.0", note = "Please use rule_name_str instead")]
531    pub fn rule_name(&self, ridx: RIdx<StorageT>) -> &str {
532        self.rule_name_str(ridx)
533    }
534
535    /// Return the name of rule `ridx`. Panics if `ridx` doesn't exist.
536    pub fn rule_name_str(&self, ridx: RIdx<StorageT>) -> &str {
537        let (name, _) = &self.rule_names[usize::from(ridx)];
538        name.as_str()
539    }
540
541    /// Return the span of rule `ridx`. Panics if `ridx` doesn't exist.
542    pub fn rule_name_span(&self, ridx: RIdx<StorageT>) -> Span {
543        let (_, span) = self.rule_names[usize::from(ridx)];
544        span
545    }
546
547    /// Return the `RIdx` of the implict rule if it exists, or `None` otherwise.
548    pub fn implicit_rule(&self) -> Option<RIdx<StorageT>> {
549        self.implicit_rule
550    }
551
552    /// Return the index of the rule named `n` or `None` if it doesn't exist.
553    pub fn rule_idx(&self, n: &str) -> Option<RIdx<StorageT>> {
554        self.rule_names
555            .iter()
556            .position(|(x, _)| x == n)
557            // The call to as_() is safe because rule_names is guaranteed to be
558            // small enough to fit into StorageT
559            .map(|x| RIdx(x.as_()))
560    }
561
562    /// What is the index of the start rule? Note that cfgrammar will have inserted at least one
563    /// rule "above" the user's start rule.
564    pub fn start_rule_idx(&self) -> RIdx<StorageT> {
565        self.prod_to_rule(self.start_prod)
566    }
567
568    /// How many tokens does this grammar have?
569    pub fn tokens_len(&self) -> TIdx<StorageT> {
570        self.tokens_len
571    }
572
573    /// Return an iterator which produces (in order from `0..self.tokens_len()`) all this
574    /// grammar's valid `TIdx`s.
575    pub fn iter_tidxs(&self) -> impl Iterator<Item = TIdx<StorageT>> {
576        // We can use as_ safely, because we know that we're only generating integers from
577        // 0..self.rules_len() and, since rules_len() returns an TIdx<StorageT>, then by
578        // definition the integers we're creating fit within StorageT.
579        Box::new((0..usize::from(self.tokens_len())).map(|x| TIdx(x.as_())))
580    }
581
582    /// Return the index of the end token.
583    pub fn eof_token_idx(&self) -> TIdx<StorageT> {
584        self.eof_token_idx
585    }
586
587    /// Return the name of token `tidx` (where `None` indicates "the rule has no name"). Panics if
588    /// `tidx` doesn't exist.
589    pub fn token_name(&self, tidx: TIdx<StorageT>) -> Option<&str> {
590        self.token_names[usize::from(tidx)]
591            .as_ref()
592            .map(|(_, x)| x.as_str())
593    }
594
595    /// Return the precedence of token `tidx` (where `None` indicates "no precedence specified").
596    /// Panics if `tidx` doesn't exist.
597    pub fn token_precedence(&self, tidx: TIdx<StorageT>) -> Option<Precedence> {
598        self.token_precs[usize::from(tidx)]
599    }
600
601    /// Return the %epp entry for token `tidx` (where `None` indicates "the token has no
602    /// pretty-printed value"). Panics if `tidx` doesn't exist.
603    pub fn token_epp(&self, tidx: TIdx<StorageT>) -> Option<&str> {
604        self.token_epp[usize::from(tidx)].as_deref()
605    }
606
607    /// Return the span for token given by `tidx` if one exists.
608    /// If `None`, the token is either implicit and not derived from a token
609    /// in the source, otherwise the `YaccGrammar` itself may not derived from a
610    /// textual source in which case the token may be explicit but still lack spans
611    /// from its construction.
612    pub fn token_span(&self, tidx: TIdx<StorageT>) -> Option<Span> {
613        self.token_names[usize::from(tidx)]
614            .as_ref()
615            .map(|(span, _)| *span)
616    }
617
618    /// Get the action for production `pidx`. Panics if `pidx` doesn't exist.
619    pub fn action(&self, pidx: PIdx<StorageT>) -> &Option<String> {
620        &self.actions[usize::from(pidx)]
621    }
622
623    pub fn actiontype(&self, ridx: RIdx<StorageT>) -> &Option<String> {
624        &self.actiontypes[usize::from(ridx)]
625    }
626
627    pub fn parse_param(&self) -> &Option<(String, String)> {
628        &self.parse_param
629    }
630
631    /// Get the programs part of the grammar
632    pub fn programs(&self) -> &Option<String> {
633        &self.programs
634    }
635
636    /// Returns a map from names to `TIdx`s of all tokens that a lexer will need to generate valid
637    /// inputs from this grammar.
638    pub fn tokens_map(&self) -> HashMap<&str, TIdx<StorageT>> {
639        let mut m = HashMap::with_capacity(usize::from(self.tokens_len) - 1);
640        for tidx in self.iter_tidxs() {
641            if let Some((_, n)) = self.token_names[usize::from(tidx)].as_ref() {
642                m.insert(&**n, tidx);
643            }
644        }
645        m
646    }
647
648    /// Return the index of the token named `n` or `None` if it doesn't exist.
649    pub fn token_idx(&self, n: &str) -> Option<TIdx<StorageT>> {
650        self.token_names
651            .iter()
652            .position(|x| x.as_ref().is_some_and(|(_, x)| x == n))
653            // The call to as_() is safe because token_names is guaranteed to be small
654            // enough to fit into StorageT
655            .map(|x| TIdx(x.as_()))
656    }
657
658    /// Is the token `tidx` marked as `%avoid_insert`?
659    pub fn avoid_insert(&self, tidx: TIdx<StorageT>) -> bool {
660        if let Some(ai) = &self.avoid_insert {
661            ai.get(usize::from(tidx)).unwrap()
662        } else {
663            false
664        }
665    }
666
667    // How many shift/reduce conflicts were expected?
668    pub fn expect(&self) -> Option<usize> {
669        self.expect
670    }
671
672    // How many reduce/reduce conflicts were expected?
673    pub fn expectrr(&self) -> Option<usize> {
674        self.expectrr
675    }
676
677    /// Is there a path from the `from` rule to the `to` rule? Note that recursive rules
678    /// return `true` for a path from themselves to themselves.
679    pub fn has_path(&self, from: RIdx<StorageT>, to: RIdx<StorageT>) -> bool {
680        let mut seen = vec![];
681        seen.resize(usize::from(self.rules_len()), false);
682        let mut todo = vec![];
683        todo.resize(usize::from(self.rules_len()), false);
684        todo[usize::from(from)] = true;
685        loop {
686            let mut empty = true;
687            for ridx in self.iter_rules() {
688                if !todo[usize::from(ridx)] {
689                    continue;
690                }
691                seen[usize::from(ridx)] = true;
692                todo[usize::from(ridx)] = false;
693                empty = false;
694                for pidx in self.rule_to_prods(ridx).iter() {
695                    for sym in self.prod(*pidx) {
696                        if let Symbol::Rule(p_ridx) = *sym {
697                            if p_ridx == to {
698                                return true;
699                            }
700                            if !seen[usize::from(p_ridx)] {
701                                todo[usize::from(p_ridx)] = true;
702                            }
703                        }
704                    }
705                }
706            }
707            if empty {
708                return false;
709            }
710        }
711    }
712
713    /// Returns the string representation of a given production `pidx`.
714    pub fn pp_prod(&self, pidx: PIdx<StorageT>) -> String {
715        let mut sprod = String::new();
716        let ridx = self.prod_to_rule(pidx);
717        sprod.push_str(self.rule_name_str(ridx));
718        sprod.push(':');
719        for sym in self.prod(pidx) {
720            let s = match sym {
721                Symbol::Token(tidx) => self.token_name(*tidx).unwrap(),
722                Symbol::Rule(ridx) => self.rule_name_str(*ridx),
723            };
724            write!(sprod, " \"{}\"", s).ok();
725        }
726        sprod
727    }
728
729    /// Return a `SentenceGenerator` which can then generate minimal sentences for any rule
730    /// based on the user-defined `token_cost` function which gives the associated cost for
731    /// generating each token (where the cost must be greater than 0). Note that multiple
732    /// tokens can have the same score. The simplest cost function is thus `|_| 1`.
733    pub fn sentence_generator<F>(&self, token_cost: F) -> SentenceGenerator<StorageT>
734    where
735        F: Fn(TIdx<StorageT>) -> u8,
736    {
737        SentenceGenerator::new(self, token_cost)
738    }
739
740    /// Return a `YaccFirsts` struct for this grammar.
741    pub fn firsts(&self) -> YaccFirsts<StorageT> {
742        YaccFirsts::new(self)
743    }
744
745    /// Return a `YaccFirsts` struct for this grammar.
746    pub fn follows(&self) -> YaccFollows<StorageT> {
747        YaccFollows::new(self)
748    }
749}
750
751/// A `SentenceGenerator` can generate minimal sentences for any given rule. e.g. for the
752/// grammar:
753///
754/// ```text
755/// %start A
756/// %%
757/// A: A B | ;
758/// B: C | D;
759/// C: 'x' B | 'x';
760/// D: 'y' B | 'y' 'z';
761/// ```
762///
763/// the following are valid minimal sentences:
764///
765/// ```text
766/// A: []
767/// B: [x]
768/// C: [x]
769/// D: [y, x] or [y, z]
770/// ```
771pub struct SentenceGenerator<'a, StorageT> {
772    grm: &'a YaccGrammar<StorageT>,
773    rule_min_costs: RefCell<Option<Vec<u16>>>,
774    rule_max_costs: RefCell<Option<Vec<u16>>>,
775    token_costs: Vec<u8>,
776}
777
778impl<'a, StorageT: 'static + PrimInt + Unsigned> SentenceGenerator<'a, StorageT>
779where
780    usize: AsPrimitive<StorageT>,
781{
782    fn new<F>(grm: &'a YaccGrammar<StorageT>, token_cost: F) -> Self
783    where
784        F: Fn(TIdx<StorageT>) -> u8,
785    {
786        let mut token_costs = Vec::with_capacity(usize::from(grm.tokens_len()));
787        for tidx in grm.iter_tidxs() {
788            token_costs.push(token_cost(tidx));
789        }
790        SentenceGenerator {
791            grm,
792            token_costs,
793            rule_min_costs: RefCell::new(None),
794            rule_max_costs: RefCell::new(None),
795        }
796    }
797
798    /// What is the cost of a minimal sentence for the rule `ridx`? Note that, unlike
799    /// `min_sentence`, this function does not actually *build* a sentence and it is thus much
800    /// faster.
801    pub fn min_sentence_cost(&self, ridx: RIdx<StorageT>) -> u16 {
802        self.rule_min_costs
803            .borrow_mut()
804            .get_or_insert_with(|| rule_min_costs(self.grm, &self.token_costs))[usize::from(ridx)]
805    }
806
807    /// What is the cost of a maximal sentence for the rule `ridx`? Rules which can generate
808    /// sentences of unbounded length return None; rules which can only generate maximal strings of
809    /// a finite length return a `Some(u16)`.
810    pub fn max_sentence_cost(&self, ridx: RIdx<StorageT>) -> Option<u16> {
811        let v = self
812            .rule_max_costs
813            .borrow_mut()
814            .get_or_insert_with(|| rule_max_costs(self.grm, &self.token_costs))[usize::from(ridx)];
815        if v == u16::MAX { None } else { Some(v) }
816    }
817
818    /// Non-deterministically return a minimal sentence from the set of minimal sentences for the
819    /// rule `ridx`.
820    pub fn min_sentence(&self, ridx: RIdx<StorageT>) -> Vec<TIdx<StorageT>> {
821        let cheapest_prod = |p_ridx: RIdx<StorageT>| -> PIdx<StorageT> {
822            let mut low_sc = None;
823            let mut low_idx = None;
824            for &pidx in self.grm.rule_to_prods(p_ridx).iter() {
825                let mut sc = 0;
826                for sym in self.grm.prod(pidx).iter() {
827                    sc += match *sym {
828                        Symbol::Rule(i) => self.min_sentence_cost(i),
829                        Symbol::Token(i) => u16::from(self.token_costs[usize::from(i)]),
830                    };
831                }
832                if low_sc.is_none() || Some(sc) < low_sc {
833                    low_sc = Some(sc);
834                    low_idx = Some(pidx);
835                }
836            }
837            low_idx.unwrap()
838        };
839
840        let mut s = vec![];
841        let mut st = vec![(cheapest_prod(ridx), 0)];
842        while let Some((pidx, sym_idx)) = st.pop() {
843            let prod = self.grm.prod(pidx);
844            for (sidx, sym) in prod.iter().enumerate().skip(sym_idx) {
845                match sym {
846                    Symbol::Rule(s_ridx) => {
847                        st.push((pidx, sidx + 1));
848                        st.push((cheapest_prod(*s_ridx), 0));
849                    }
850                    Symbol::Token(s_tidx) => {
851                        s.push(*s_tidx);
852                    }
853                }
854            }
855        }
856        s
857    }
858
859    /// Return (in arbitrary order) all the minimal sentences for the rule `ridx`.
860    pub fn min_sentences(&self, ridx: RIdx<StorageT>) -> Vec<Vec<TIdx<StorageT>>> {
861        let cheapest_prods = |p_ridx: RIdx<StorageT>| -> Vec<PIdx<StorageT>> {
862            let mut low_sc = None;
863            let mut low_idxs = vec![];
864            for &pidx in self.grm.rule_to_prods(p_ridx).iter() {
865                let mut sc = 0;
866                for sym in self.grm.prod(pidx).iter() {
867                    sc += match *sym {
868                        Symbol::Rule(s_ridx) => self.min_sentence_cost(s_ridx),
869                        Symbol::Token(s_tidx) => u16::from(self.token_costs[usize::from(s_tidx)]),
870                    };
871                }
872                if low_sc.is_none() || Some(sc) <= low_sc {
873                    if Some(sc) < low_sc {
874                        low_idxs.clear();
875                    }
876                    low_sc = Some(sc);
877                    low_idxs.push(pidx);
878                }
879            }
880            low_idxs
881        };
882
883        let mut sts = Vec::new(); // Output sentences
884        for pidx in cheapest_prods(ridx) {
885            let prod = self.grm.prod(pidx);
886            if prod.is_empty() {
887                sts.push(vec![]);
888                continue;
889            }
890
891            // We construct the minimal sentences in two phases.
892            //
893            // First, for each symbol in the production, we gather all the possible minimal
894            // sentences for it. If, for the grammar:
895            //   X: 'a' Y
896            //   Y: 'b' | 'c'
897            // we ask for the minimal sentences of X's only production we'll end up with a vec of
898            // vecs as follows:
899            //   [[['a']], [['b'], ['c']]]
900
901            let mut ms = Vec::with_capacity(prod.len());
902            for sym in prod {
903                match *sym {
904                    Symbol::Rule(s_ridx) => ms.push(self.min_sentences(s_ridx)),
905                    Symbol::Token(s_tidx) => ms.push(vec![vec![s_tidx]]),
906                }
907            }
908
909            // Second, we need to generate all combinations of the gathered sentences. We do this
910            // by writing our own simple numeric incrementing scheme. If we rewrite the list from
911            // above as follows:
912            //
913            //      0 1 <- call this axis "i"
914            //   0: a b
915            //   1:   c
916            //   ^
917            //   |
918            //   call this axis "todo"
919            //
920            // this hopefully becomes easier to see. Let's call the list "ms": the combinations we
921            // need to generate are thus:
922            //
923            //   ms[0][0] + ms[1][0]  (i.e. 'ab')
924            //   ms[0][0] + ms[1][1]  (i.e. 'ac')
925            //
926            // The easiest way to model this is to have a list (todo) with each entry starting at
927            // 0. After each iteration around the loop (i) we add 1 to the last todo column's
928            // entry: if that spills over the length of the corresponding ms entry, then we reset
929            // that column to zero, and try adding 1 to the previous column (as many times as
930            // needed). If the first column spills, then we're done. This is basically normal
931            // arithmetic but with each digit having an arbitrary base.
932
933            let mut todo = vec![0; prod.len()];
934            let mut cur = Vec::new();
935            'b: loop {
936                for i in 0..todo.len() {
937                    cur.extend(&ms[i][todo[i]]);
938                }
939                sts.push(std::mem::take(&mut cur));
940
941                let mut j = todo.len() - 1;
942                loop {
943                    if todo[j] + 1 == ms[j].len() {
944                        if j == 0 {
945                            break 'b;
946                        }
947                        todo[j] = 0;
948                        j -= 1;
949                    } else {
950                        todo[j] += 1;
951                        break;
952                    }
953                }
954            }
955        }
956        sts
957    }
958}
959
960/// Return the cost of a minimal string for each rule in this grammar. The cost of a
961/// token is specified by the user-defined `token_cost` function.
962#[allow(clippy::unnecessary_unwrap)]
963fn rule_min_costs<StorageT: 'static + PrimInt + Unsigned>(
964    grm: &YaccGrammar<StorageT>,
965    token_costs: &[u8],
966) -> Vec<u16>
967where
968    usize: AsPrimitive<StorageT>,
969{
970    // We use a simple(ish) fixed-point algorithm to determine costs. We maintain two lists
971    // "costs" and "done". An integer costs[i] starts at 0 and monotonically increments
972    // until done[i] is true, at which point costs[i] value is fixed. We also use the done
973    // list as a simple "todo" list: whilst there is at least one false value in done, there is
974    // still work to do.
975    //
976    // On each iteration of the loop, we examine each rule in the todo list to see if
977    // we can get a better idea of its true cost. Some are trivial:
978    //   * A rule with an empty production immediately has a cost of 0.
979    //   * Rules whose productions don't reference any rules (i.e. only contain tokens) can be
980    //     immediately given a cost by calculating the lowest-cost production.
981    // However if a rule A references another rule B, we may need to wait until
982    // we've fully analysed B before we can cost A. This might seem to cause problems with
983    // recursive rules, so we introduce the concept of "incomplete costs" i.e. if a production
984    // references a rule we can work out its minimum possible cost simply by counting
985    // the production's token costs. Since rules can have a mix of complete and
986    // incomplete productions, this is sometimes enough to allow us to assign a final cost to
987    // a rule (if the lowest complete production's cost is lower than or equal to all
988    // the lowest incomplete production's cost). This allows us to make progress, since it
989    // means that we can iteratively improve our knowledge of a token's minimum cost:
990    // eventually we will reach a point where we can determine it definitively.
991
992    let mut costs = vec![0; usize::from(grm.rules_len())];
993    let mut done = vec![false; usize::from(grm.rules_len())];
994    loop {
995        let mut all_done = true;
996        for i in 0..done.len() {
997            if done[i] {
998                continue;
999            }
1000            all_done = false;
1001            let mut ls_cmplt = None; // lowest completed cost
1002            let mut ls_noncmplt = None; // lowest non-completed cost
1003
1004            // The call to as_() is guaranteed safe because done.len() == grm.rules_len(), and
1005            // we guarantee that grm.rules_len() can fit in StorageT.
1006            for pidx in grm.rule_to_prods(RIdx(i.as_())).iter() {
1007                let mut c: u16 = 0; // production cost
1008                let mut cmplt = true;
1009                for sym in grm.prod(*pidx) {
1010                    let sc = match *sym {
1011                        Symbol::Token(tidx) => u16::from(token_costs[usize::from(tidx)]),
1012                        Symbol::Rule(ridx) => {
1013                            if !done[usize::from(ridx)] {
1014                                cmplt = false;
1015                            }
1016                            costs[usize::from(ridx)]
1017                        }
1018                    };
1019                    c = c
1020                        .checked_add(sc)
1021                        .expect("Overflow occurred when calculating rule costs");
1022                }
1023                if cmplt && (ls_cmplt.is_none() || Some(c) < ls_cmplt) {
1024                    ls_cmplt = Some(c);
1025                } else if !cmplt && (ls_noncmplt.is_none() || Some(c) < ls_noncmplt) {
1026                    ls_noncmplt = Some(c);
1027                }
1028            }
1029            if ls_cmplt.is_some() && (ls_noncmplt.is_none() || ls_cmplt < ls_noncmplt) {
1030                debug_assert!(ls_cmplt.unwrap() >= costs[i]);
1031                costs[i] = ls_cmplt.unwrap();
1032                done[i] = true;
1033            } else if let Some(ls_noncmplt) = ls_noncmplt {
1034                debug_assert!(ls_noncmplt >= costs[i]);
1035                costs[i] = ls_noncmplt;
1036            }
1037        }
1038        if all_done {
1039            debug_assert!(done.iter().all(|x| *x));
1040            break;
1041        }
1042    }
1043    costs
1044}
1045
1046/// Return the cost of the maximal string for each rule in this grammar (u32::max_val()
1047/// representing "this rule can generate strings of infinite length"). The cost of a
1048/// token is specified by the user-defined `token_cost` function.
1049#[allow(clippy::unnecessary_unwrap)]
1050fn rule_max_costs<StorageT: 'static + PrimInt + Unsigned>(
1051    grm: &YaccGrammar<StorageT>,
1052    token_costs: &[u8],
1053) -> Vec<u16>
1054where
1055    usize: AsPrimitive<StorageT>,
1056{
1057    let mut done = vec![false; usize::from(grm.rules_len())];
1058    let mut costs = vec![0; usize::from(grm.rules_len())];
1059
1060    // First mark all recursive rules.
1061    for ridx in grm.iter_rules() {
1062        // Calling has_path so frequently is not exactly efficient...
1063        if grm.has_path(ridx, ridx) {
1064            costs[usize::from(ridx)] = u16::MAX;
1065            done[usize::from(ridx)] = true;
1066        }
1067    }
1068
1069    loop {
1070        let mut all_done = true;
1071        for i in 0..done.len() {
1072            if done[i] {
1073                continue;
1074            }
1075            all_done = false;
1076            let mut hs_cmplt = None; // highest completed cost
1077            let mut hs_noncmplt = None; // highest non-completed cost
1078
1079            // The call to as_() is guaranteed safe because done.len() == grm.rules_len(), and
1080            // we guarantee that grm.rules_len() can fit in StorageT.
1081            'a: for pidx in grm.rule_to_prods(RIdx(i.as_())).iter() {
1082                let mut c: u16 = 0; // production cost
1083                let mut cmplt = true;
1084                for sym in grm.prod(*pidx) {
1085                    let sc = match *sym {
1086                        Symbol::Token(s_tidx) => u16::from(token_costs[usize::from(s_tidx)]),
1087                        Symbol::Rule(s_ridx) => {
1088                            if costs[usize::from(s_ridx)] == u16::MAX {
1089                                // As soon as we find reference to an infinite rule, we
1090                                // can stop looking.
1091                                hs_cmplt = Some(u16::MAX);
1092                                break 'a;
1093                            }
1094                            if !done[usize::from(s_ridx)] {
1095                                cmplt = false;
1096                            }
1097                            costs[usize::from(s_ridx)]
1098                        }
1099                    };
1100                    c = c
1101                        .checked_add(sc)
1102                        .expect("Overflow occurred when calculating rule costs");
1103                    if c == u16::MAX {
1104                        panic!("Unable to represent cost in 64 bits.");
1105                    }
1106                }
1107                if cmplt && (hs_cmplt.is_none() || Some(c) > hs_cmplt) {
1108                    hs_cmplt = Some(c);
1109                } else if !cmplt && (hs_noncmplt.is_none() || Some(c) > hs_noncmplt) {
1110                    hs_noncmplt = Some(c);
1111                }
1112            }
1113            if hs_cmplt.is_some() && (hs_noncmplt.is_none() || hs_cmplt > hs_noncmplt) {
1114                debug_assert!(hs_cmplt.unwrap() >= costs[i]);
1115                costs[i] = hs_cmplt.unwrap();
1116                done[i] = true;
1117            } else if let Some(hs_noncmplt) = hs_noncmplt {
1118                debug_assert!(hs_noncmplt >= costs[i]);
1119                costs[i] = hs_noncmplt;
1120            }
1121        }
1122        if all_done {
1123            debug_assert!(done.iter().all(|x| *x));
1124            break;
1125        }
1126    }
1127    costs
1128}
1129
1130#[cfg(test)]
1131mod test {
1132    use super::{
1133        super::{AssocKind, Precedence, YaccGrammar, YaccKind, YaccOriginalActionKind},
1134        IMPLICIT_RULE, IMPLICIT_START_RULE, rule_max_costs, rule_min_costs,
1135    };
1136    use crate::{PIdx, RIdx, Span, Symbol, TIdx};
1137    use std::collections::HashMap;
1138    use std::str::FromStr;
1139
1140    macro_rules! bslice {
1141        () => (
1142            ::Vec::new().into_boxed_slice()
1143        );
1144        ($elem:expr; $n:expr) => (
1145            ::vec::from_elem($elem, $n).into_boxed_slice()
1146        );
1147        ($($x:expr),+ $(,)?) => (
1148            <[_]>::into_vec(
1149                Box::new([$($x),+])
1150            ).into_boxed_slice()
1151        );
1152    }
1153
1154    #[test]
1155    fn test_minimal() {
1156        let grm = YaccGrammar::new(
1157            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1158            "%start R %token T %% R: 'T';",
1159        )
1160        .unwrap();
1161
1162        assert_eq!(grm.start_prod, PIdx(1));
1163        assert_eq!(grm.implicit_rule(), None);
1164        grm.rule_idx("^").unwrap();
1165        grm.rule_idx("R").unwrap();
1166        grm.token_idx("T").unwrap();
1167
1168        assert_eq!(&*grm.rules_prods, &[bslice![PIdx(1)], bslice![PIdx(0)]]);
1169        let start_prod = grm.prod(grm.rules_prods[usize::from(grm.rule_idx("^").unwrap())][0]);
1170        assert_eq!(*start_prod, [Symbol::Rule(grm.rule_idx("R").unwrap())]);
1171        let r_prod = grm.prod(grm.rules_prods[usize::from(grm.rule_idx("R").unwrap())][0]);
1172        assert_eq!(*r_prod, [Symbol::Token(grm.token_idx("T").unwrap())]);
1173        assert_eq!(&*grm.prods_rules, &[RIdx(1), RIdx(0)]);
1174
1175        assert_eq!(
1176            grm.tokens_map(),
1177            [("T", TIdx(0))]
1178                .iter()
1179                .cloned()
1180                .collect::<HashMap<&str, TIdx<_>>>()
1181        );
1182        assert_eq!(grm.iter_rules().collect::<Vec<_>>(), vec![RIdx(0), RIdx(1)]);
1183    }
1184
1185    #[test]
1186    fn test_rule_ref() {
1187        let grm = YaccGrammar::new(
1188            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1189            "%start R %token T %% R : S; S: 'T';",
1190        )
1191        .unwrap();
1192
1193        grm.rule_idx("^").unwrap();
1194        grm.rule_idx("R").unwrap();
1195        grm.rule_idx("S").unwrap();
1196        grm.token_idx("T").unwrap();
1197        assert!(grm.token_name(grm.eof_token_idx()).is_none());
1198
1199        assert_eq!(
1200            &*grm.rules_prods,
1201            &[bslice![PIdx(2)], bslice![PIdx(0)], bslice![PIdx(1)]]
1202        );
1203        let start_prod = grm.prod(grm.rules_prods[usize::from(grm.rule_idx("^").unwrap())][0]);
1204        assert_eq!(*start_prod, [Symbol::Rule(grm.rule_idx("R").unwrap())]);
1205        let r_prod = grm.prod(grm.rules_prods[usize::from(grm.rule_idx("R").unwrap())][0]);
1206        assert_eq!(r_prod.len(), 1);
1207        assert_eq!(r_prod[0], Symbol::Rule(grm.rule_idx("S").unwrap()));
1208        let s_prod = grm.prod(grm.rules_prods[usize::from(grm.rule_idx("S").unwrap())][0]);
1209        assert_eq!(s_prod.len(), 1);
1210        assert_eq!(s_prod[0], Symbol::Token(grm.token_idx("T").unwrap()));
1211    }
1212
1213    #[test]
1214    #[rustfmt::skip]
1215    fn test_long_prod() {
1216        let grm = YaccGrammar::new(
1217            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1218            "%start R %token T1 T2 %% R : S 'T1' S; S: 'T2';"
1219        ).unwrap();
1220
1221        grm.rule_idx("^").unwrap();
1222        grm.rule_idx("R").unwrap();
1223        grm.rule_idx("S").unwrap();
1224        grm.token_idx("T1").unwrap();
1225        grm.token_idx("T2").unwrap();
1226
1227        assert_eq!(&*grm.rules_prods, &[bslice![PIdx(2)],
1228                                         bslice![PIdx(0)],
1229                                         bslice![PIdx(1)]]);
1230        assert_eq!(&*grm.prods_rules, &[RIdx(1),
1231                                         RIdx(2),
1232                                         RIdx(0)]);
1233        let start_prod = grm.prod(grm.rules_prods[usize::from(grm.rule_idx("^").unwrap())][0]);
1234        assert_eq!(*start_prod, [Symbol::Rule(grm.rule_idx("R").unwrap())]);
1235        let r_prod = grm.prod(grm.rules_prods[usize::from(grm.rule_idx("R").unwrap())][0]);
1236        assert_eq!(r_prod.len(), 3);
1237        assert_eq!(r_prod[0], Symbol::Rule(grm.rule_idx("S").unwrap()));
1238        assert_eq!(r_prod[1], Symbol::Token(grm.token_idx("T1").unwrap()));
1239        assert_eq!(r_prod[2], Symbol::Rule(grm.rule_idx("S").unwrap()));
1240        let s_prod = grm.prod(grm.rules_prods[usize::from(grm.rule_idx("S").unwrap())][0]);
1241        assert_eq!(s_prod.len(), 1);
1242        assert_eq!(s_prod[0], Symbol::Token(grm.token_idx("T2").unwrap()));
1243    }
1244
1245    #[test]
1246    fn test_prods_rules() {
1247        let grm = YaccGrammar::new(
1248            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1249            "
1250            %start A
1251            %%
1252            A: B
1253             | C;
1254            B: 'x';
1255            C: 'y'
1256             | 'z';
1257          ",
1258        )
1259        .unwrap();
1260
1261        assert_eq!(
1262            &*grm.prods_rules,
1263            &[RIdx(1), RIdx(1), RIdx(2), RIdx(3), RIdx(3), RIdx(0)]
1264        );
1265    }
1266
1267    #[test]
1268    #[rustfmt::skip]
1269    fn test_left_right_nonassoc_precs() {
1270        let grm = YaccGrammar::new(
1271            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1272            "
1273            %start Expr
1274            %right '='
1275            %left '+' '-'
1276            %left '/'
1277            %left '*'
1278            %nonassoc '~'
1279            %%
1280            Expr : Expr '=' Expr
1281                 | Expr '+' Expr
1282                 | Expr '-' Expr
1283                 | Expr '/' Expr
1284                 | Expr '*' Expr
1285                 | Expr '~' Expr
1286                 | 'id' ;
1287          ").unwrap();
1288
1289        assert_eq!(grm.prod_precs.len(), 8);
1290        assert_eq!(grm.prod_precs[0].unwrap(), Precedence{level: 0, kind: AssocKind::Right});
1291        assert_eq!(grm.prod_precs[1].unwrap(), Precedence{level: 1, kind: AssocKind::Left});
1292        assert_eq!(grm.prod_precs[2].unwrap(), Precedence{level: 1, kind: AssocKind::Left});
1293        assert_eq!(grm.prod_precs[3].unwrap(), Precedence{level: 2, kind: AssocKind::Left});
1294        assert_eq!(grm.prod_precs[4].unwrap(), Precedence{level: 3, kind: AssocKind::Left});
1295        assert_eq!(grm.prod_precs[5].unwrap(), Precedence{level: 4, kind: AssocKind::Nonassoc});
1296        assert!(grm.prod_precs[6].is_none());
1297        assert_eq!(grm.prod_precs[7], None);
1298    }
1299
1300    #[test]
1301    #[rustfmt::skip]
1302    fn test_prec_override() {
1303        let grm = YaccGrammar::new(
1304            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1305            "
1306            %start expr
1307            %left '+' '-'
1308            %left '*' '/'
1309            %%
1310            expr : expr '+' expr
1311                 | expr '-' expr
1312                 | expr '*' expr
1313                 | expr '/' expr
1314                 | '-'  expr %prec '*'
1315                 | 'id' ;
1316        "
1317        ).unwrap();
1318        assert_eq!(grm.prod_precs.len(), 7);
1319        assert_eq!(grm.prod_precs[0].unwrap(), Precedence{level: 0, kind: AssocKind::Left});
1320        assert_eq!(grm.prod_precs[1].unwrap(), Precedence{level: 0, kind: AssocKind::Left});
1321        assert_eq!(grm.prod_precs[2].unwrap(), Precedence{level: 1, kind: AssocKind::Left});
1322        assert_eq!(grm.prod_precs[3].unwrap(), Precedence{level: 1, kind: AssocKind::Left});
1323        assert_eq!(grm.prod_precs[4].unwrap(), Precedence{level: 1, kind: AssocKind::Left});
1324        assert!(grm.prod_precs[5].is_none());
1325        assert_eq!(grm.prod_precs[6], None);
1326    }
1327
1328    #[test]
1329    #[rustfmt::skip]
1330    fn test_implicit_tokens_rewrite() {
1331        let grm = YaccGrammar::new(
1332            YaccKind::Eco,
1333            "
1334          %implicit_tokens ws1 ws2
1335          %start S
1336          %%
1337          S: 'a' | T;
1338          T: 'c' |;
1339          "
1340        ).unwrap();
1341
1342        // Check that the above grammar has been rewritten to:
1343        //   ^ : ^~;
1344        //   ^~: ~ S;
1345        //   ~ : ws1 | ws2 | ;
1346        //   S : 'a' ~ | T;
1347        //   T : 'c' ~ | ;
1348
1349        assert_eq!(grm.prod_precs.len(), 9);
1350
1351        let itfs_rule_idx = grm.rule_idx(IMPLICIT_START_RULE).unwrap();
1352        assert_eq!(grm.rules_prods[usize::from(itfs_rule_idx)].len(), 1);
1353
1354        let itfs_prod1 = &grm.prods[usize::from(grm.rules_prods[usize::from(itfs_rule_idx)][0])];
1355        assert_eq!(itfs_prod1.len(), 2);
1356        assert_eq!(itfs_prod1[0], Symbol::Rule(grm.rule_idx(IMPLICIT_RULE).unwrap()));
1357        assert_eq!(itfs_prod1[1], Symbol::Rule(grm.rule_idx("S").unwrap()));
1358
1359        let s_rule_idx = grm.rule_idx("S").unwrap();
1360        assert_eq!(grm.rules_prods[usize::from(s_rule_idx)].len(), 2);
1361
1362        let s_prod1 = &grm.prods[usize::from(grm.rules_prods[usize::from(s_rule_idx)][0])];
1363        assert_eq!(s_prod1.len(), 2);
1364        assert_eq!(s_prod1[0], Symbol::Token(grm.token_idx("a").unwrap()));
1365        assert_eq!(s_prod1[1], Symbol::Rule(grm.rule_idx(IMPLICIT_RULE).unwrap()));
1366
1367        let s_prod2 = &grm.prods[usize::from(grm.rules_prods[usize::from(s_rule_idx)][1])];
1368        assert_eq!(s_prod2.len(), 1);
1369        assert_eq!(s_prod2[0], Symbol::Rule(grm.rule_idx("T").unwrap()));
1370
1371        let t_rule_idx = grm.rule_idx("T").unwrap();
1372        assert_eq!(grm.rules_prods[usize::from(s_rule_idx)].len(), 2);
1373
1374        let t_prod1 = &grm.prods[usize::from(grm.rules_prods[usize::from(t_rule_idx)][0])];
1375        assert_eq!(t_prod1.len(), 2);
1376        assert_eq!(t_prod1[0], Symbol::Token(grm.token_idx("c").unwrap()));
1377        assert_eq!(t_prod1[1], Symbol::Rule(grm.rule_idx(IMPLICIT_RULE).unwrap()));
1378
1379        let t_prod2 = &grm.prods[usize::from(grm.rules_prods[usize::from(t_rule_idx)][1])];
1380        assert_eq!(t_prod2.len(), 0);
1381
1382        assert_eq!(Some(grm.rule_idx(IMPLICIT_RULE).unwrap()), grm.implicit_rule());
1383        let i_rule_idx = grm.rule_idx(IMPLICIT_RULE).unwrap();
1384        assert_eq!(grm.rules_prods[usize::from(i_rule_idx)].len(), 3);
1385        let i_prod1 = &grm.prods[usize::from(grm.rules_prods[usize::from(i_rule_idx)][0])];
1386        let i_prod2 = &grm.prods[usize::from(grm.rules_prods[usize::from(i_rule_idx)][1])];
1387        assert_eq!(i_prod1.len(), 2);
1388        assert_eq!(i_prod2.len(), 2);
1389        // We don't know what order the implicit rule will contain our tokens in,
1390        // hence the awkward dance below.
1391        let cnd1 = bslice![
1392            Symbol::Token(grm.token_idx("ws1").unwrap()),
1393            Symbol::Rule(grm.implicit_rule().unwrap()),
1394        ];
1395        let cnd2 = bslice![
1396            Symbol::Token(grm.token_idx("ws2").unwrap()),
1397            Symbol::Rule(grm.implicit_rule().unwrap()),
1398        ];
1399        assert!((*i_prod1 == cnd1 && *i_prod2 == cnd2) || (*i_prod1 == cnd2 && *i_prod2 == cnd1));
1400        let i_prod3 = &grm.prods[usize::from(grm.rules_prods[usize::from(i_rule_idx)][2])];
1401        assert_eq!(i_prod3.len(), 0);
1402    }
1403
1404    #[test]
1405    #[rustfmt::skip]
1406    fn test_has_path() {
1407        let grm = YaccGrammar::new(
1408            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1409            "
1410            %start A
1411            %%
1412            A: B;
1413            B: B 'x' | C;
1414            C: C 'y' | ;
1415          "
1416        ).unwrap();
1417
1418        let a_ridx = grm.rule_idx("A").unwrap();
1419        let b_ridx = grm.rule_idx("B").unwrap();
1420        let c_ridx = grm.rule_idx("C").unwrap();
1421        assert!(grm.has_path(a_ridx, b_ridx));
1422        assert!(grm.has_path(a_ridx, c_ridx));
1423        assert!(grm.has_path(b_ridx, b_ridx));
1424        assert!(grm.has_path(b_ridx, c_ridx));
1425        assert!(grm.has_path(c_ridx, c_ridx));
1426        assert!(!grm.has_path(a_ridx, a_ridx));
1427        assert!(!grm.has_path(b_ridx, a_ridx));
1428        assert!(!grm.has_path(c_ridx, a_ridx));
1429    }
1430
1431    #[test]
1432    #[rustfmt::skip]
1433    fn test_rule_min_costs() {
1434        let grm = YaccGrammar::new(
1435            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1436            "
1437            %start A
1438            %%
1439            A: A B | ;
1440            B: C | D | E;
1441            C: 'x' B | 'x';
1442            D: 'y' B | 'y' 'z';
1443            E: 'x' A | 'x' 'y';
1444          "
1445        ).unwrap();
1446
1447        let scores = rule_min_costs(&grm, &[1, 1, 1]);
1448        assert_eq!(scores[usize::from(grm.rule_idx("A").unwrap())], 0);
1449        assert_eq!(scores[usize::from(grm.rule_idx("B").unwrap())], 1);
1450        assert_eq!(scores[usize::from(grm.rule_idx("C").unwrap())], 1);
1451        assert_eq!(scores[usize::from(grm.rule_idx("D").unwrap())], 2);
1452        assert_eq!(scores[usize::from(grm.rule_idx("E").unwrap())], 1);
1453    }
1454
1455    #[test]
1456    fn test_min_sentences() {
1457        let grm = YaccGrammar::new(
1458            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1459            "
1460            %start A
1461            %%
1462            A: A B | ;
1463            B: C | D;
1464            C: 'x' B | 'x';
1465            D: 'y' B | 'y' 'z';
1466          ",
1467        )
1468        .unwrap();
1469
1470        let sg = grm.sentence_generator(|_| 1);
1471
1472        let find = |nt_name: &str, str_cnds: Vec<Vec<&str>>| {
1473            let cnds = str_cnds
1474                .iter()
1475                .map(|x| {
1476                    x.iter()
1477                        .map(|y| grm.token_idx(y).unwrap())
1478                        .collect::<Vec<_>>()
1479                })
1480                .collect::<Vec<_>>();
1481
1482            let ms = sg.min_sentence(grm.rule_idx(nt_name).unwrap());
1483            if !cnds.iter().any(|x| x == &ms) {
1484                panic!("{:?} doesn't have any matches in {:?}", ms, str_cnds);
1485            }
1486
1487            let min_sts = sg.min_sentences(grm.rule_idx(nt_name).unwrap());
1488            assert_eq!(cnds.len(), min_sts.len());
1489            for ms in min_sts {
1490                if !cnds.iter().any(|x| x == &ms) {
1491                    panic!("{:?} doesn't have any matches in {:?}", ms, str_cnds);
1492                }
1493            }
1494        };
1495
1496        find("A", vec![vec![]]);
1497        find("B", vec![vec!["x"]]);
1498        find("C", vec![vec!["x"]]);
1499        find("D", vec![vec!["y", "x"], vec!["y", "z"]]);
1500    }
1501
1502    #[test]
1503    #[rustfmt::skip]
1504    fn test_rule_max_costs1() {
1505        let grm = YaccGrammar::new(
1506            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1507            "
1508            %start A
1509            %%
1510            A: A B | ;
1511            B: C | D | E;
1512            C: 'x' B | 'x';
1513            D: 'y' B | 'y' 'z';
1514            E: 'x' A | 'x' 'y';
1515          "
1516        ).unwrap();
1517
1518        let scores = rule_max_costs(&grm, &[1, 1, 1]);
1519        assert_eq!(scores[usize::from(grm.rule_idx("A").unwrap())], u16::MAX);
1520        assert_eq!(scores[usize::from(grm.rule_idx("B").unwrap())], u16::MAX);
1521        assert_eq!(scores[usize::from(grm.rule_idx("C").unwrap())], u16::MAX);
1522        assert_eq!(scores[usize::from(grm.rule_idx("D").unwrap())], u16::MAX);
1523        assert_eq!(scores[usize::from(grm.rule_idx("E").unwrap())], u16::MAX);
1524    }
1525
1526    #[test]
1527    #[rustfmt::skip]
1528    fn test_rule_max_costs2() {
1529        let grm = YaccGrammar::new(
1530            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1531            "
1532            %start A
1533            %%
1534            A: A B | B;
1535            B: C | D;
1536            C: 'x' 'y' | 'x';
1537            D: 'y' 'x' | 'y' 'x' 'z';
1538          "
1539        ).unwrap();
1540
1541        let scores = rule_max_costs(&grm, &[1, 1, 1]);
1542        assert_eq!(scores[usize::from(grm.rule_idx("A").unwrap())], u16::MAX);
1543        assert_eq!(scores[usize::from(grm.rule_idx("B").unwrap())], 3);
1544        assert_eq!(scores[usize::from(grm.rule_idx("C").unwrap())], 2);
1545        assert_eq!(scores[usize::from(grm.rule_idx("D").unwrap())], 3);
1546    }
1547
1548    #[test]
1549    fn test_out_of_order_productions() {
1550        // Example taken from p54 of Locally least-cost error repair in LR parsers, Carl Cerecke
1551        let grm = YaccGrammar::new(
1552            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1553            "
1554            %start S
1555            %%
1556            S: A 'c' 'd'
1557             | B 'c' 'e';
1558            A: 'a';
1559            B: 'a'
1560             | 'b';
1561            A: 'b';
1562            ",
1563        )
1564        .unwrap();
1565
1566        assert_eq!(
1567            &*grm.prods_rules,
1568            &[
1569                RIdx(1),
1570                RIdx(1),
1571                RIdx(2),
1572                RIdx(3),
1573                RIdx(3),
1574                RIdx(2),
1575                RIdx(0)
1576            ]
1577        );
1578    }
1579
1580    #[test]
1581    fn test_token_spans() {
1582        let src = "%%\nAB: 'a' | 'foo';";
1583        let grm =
1584            YaccGrammar::new(YaccKind::Original(YaccOriginalActionKind::NoAction), src).unwrap();
1585        let token_map = grm.tokens_map();
1586        let a_tidx = token_map.get("a");
1587        let foo_tidx = token_map.get("foo");
1588        let a_span = grm.token_span(*a_tidx.unwrap()).unwrap();
1589        let foo_span = grm.token_span(*foo_tidx.unwrap()).unwrap();
1590        let ab_span = grm.rule_name_span(grm.rule_idx("AB").unwrap());
1591        assert_eq!(a_span, Span::new(8, 9));
1592        assert_eq!(foo_span, Span::new(14, 17));
1593        assert_eq!(ab_span, Span::new(3, 5));
1594        assert_eq!(&src[a_span.start()..a_span.end()], "a");
1595        assert_eq!(&src[foo_span.start()..foo_span.end()], "foo");
1596        assert_eq!(&src[ab_span.start()..ab_span.end()], "AB");
1597    }
1598
1599    #[test]
1600    fn token_span_issue296() {
1601        let src = "%%
1602                   S: | AB;
1603                   A: 'a' 'b';
1604                   B: 'b' 'c';
1605                   AB: A AB | B ';' AB;
1606                   %%
1607                   ";
1608        let grm =
1609            YaccGrammar::new(YaccKind::Original(YaccOriginalActionKind::NoAction), src).unwrap();
1610        let token_map = grm.tokens_map();
1611        let c_tidx = token_map.get("c").unwrap();
1612        assert_eq!(grm.token_name(*c_tidx), Some("c"));
1613        let c_span = grm.token_span(*c_tidx).unwrap();
1614        assert_eq!(&src[c_span.start()..c_span.end()], "c");
1615    }
1616
1617    #[test]
1618    fn test_grmtools_section_yacckinds() {
1619        let srcs = [
1620            "%grmtools{yacckind: Original(NoAction)}
1621                 %%
1622                 Start: ;",
1623            "%grmtools{yacckind: YaccKind::Original(GenericParseTree)}
1624                 %%
1625                 Start: ;",
1626            "%grmtools{yacckind: YaccKind::Original(yaccoriginalactionkind::useraction)}
1627                 %actiontype ()
1628                 %%
1629                 Start: ;",
1630            "%grmtools{yacckind: Original(YACCOriginalActionKind::NoAction)}
1631                 %%
1632                 Start: ;",
1633            "%grmtools{yacckind: YaccKind::Grmtools}
1634                 %%
1635                 Start -> () : ;",
1636        ];
1637        for src in srcs {
1638            YaccGrammar::<u32>::from_str(src).unwrap();
1639        }
1640    }
1641
1642    #[test]
1643    fn test_grmtools_section_invalid_yacckinds() {
1644        let srcs = [
1645            "%grmtools{yacckind: Foo}",
1646            "%grmtools{yacckind: YaccKind::Foo}",
1647            "%grmtools{yacckindof: YaccKind::Grmtools}",
1648            "%grmtools{yacckindof: Grmtools}",
1649            "%grmtools{yacckindof: YaccKindFoo::Foo}",
1650            "%grmtools{yacckind: Foo::Grmtools}",
1651            "%grmtools{yacckind: YaccKind::Original}",
1652            "%grmtools{yacckind: YaccKind::OriginalFoo}",
1653            "%grmtools{yacckind: YaccKind::Original()}",
1654            "%grmtools{yacckind: YaccKind::Original(Foo)}",
1655            "%grmtools{yacckind: YaccKind::Original(YaccOriginalActionKind)}",
1656            "%grmtools{yacckind: YaccKind::Original(YaccOriginalActionKind::Foo)}",
1657            "%grmtools{yacckind: YaccKind::Original(Foo::NoActions)}",
1658            "%grmtools{yacckind: YaccKind::Original(Foo::NoActionsBar)}",
1659        ];
1660
1661        for src in srcs {
1662            let s = format!("{}\n%%\nStart();\n", src);
1663            assert!(YaccGrammar::<u32>::from_str(&s).is_err());
1664        }
1665    }
1666
1667    #[test]
1668    fn test_grmtools_section_commas() {
1669        // We can't actually test much here, because
1670        // We don't have a second value to test.
1671        //
1672        // `RecoveryKind` seemed like an option for an additional value to allow,
1673        // but that is part of `lrpar` which cfgrammar doesn't depend upon.
1674        let src = r#"
1675                %grmtools{
1676                    yacckind: YaccKind::Grmtools,
1677                }
1678                %%
1679                Start -> () : ;
1680            "#;
1681        YaccGrammar::<u32>::from_str(src).unwrap();
1682        let src = r#"
1683                %grmtools{
1684                    yacckind: YaccKind::Grmtools
1685                }
1686                %%
1687                Start -> () : ;
1688            "#;
1689        YaccGrammar::<u32>::from_str(src).unwrap();
1690    }
1691}