cfgrammar/yacc/
grammar.rs

1#![allow(clippy::derive_partial_eq_without_eq)]
2use std::{cell::RefCell, collections::HashMap, fmt::Write, str::FromStr};
3
4#[cfg(feature = "bincode")]
5use bincode::{Decode, Encode};
6use num_traits::{AsPrimitive, PrimInt, Unsigned};
7#[cfg(feature = "serde")]
8use serde::{Deserialize, Serialize};
9use vob::Vob;
10
11use super::{
12    YaccKind, ast,
13    firsts::YaccFirsts,
14    follows::YaccFollows,
15    parser::{YaccGrammarError, YaccGrammarResult},
16};
17use crate::{PIdx, RIdx, SIdx, Span, Symbol, TIdx};
18
19const START_RULE: &str = "^";
20const IMPLICIT_RULE: &str = "~";
21const IMPLICIT_START_RULE: &str = "^~";
22
23pub type PrecedenceLevel = u64;
24#[derive(Clone, Copy, Debug, PartialEq)]
25#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
26#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
27pub struct Precedence {
28    pub level: PrecedenceLevel,
29    pub kind: AssocKind,
30}
31
32#[derive(Clone, Copy, Debug, PartialEq)]
33#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
34#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
35pub enum AssocKind {
36    Left,
37    Right,
38    Nonassoc,
39}
40
41/// Representation of a `YaccGrammar`. See the [top-level documentation](../../index.html) for the
42/// guarantees this struct makes about rules, tokens, productions, and symbols.
43#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
44#[cfg_attr(feature = "bincode", derive(Encode))]
45pub struct YaccGrammar<StorageT = u32> {
46    /// How many rules does this grammar have?
47    rules_len: RIdx<StorageT>,
48    /// A mapping from `RIdx` -> `(Span, String)`.
49    rule_names: Box<[(String, Span)]>,
50    /// A mapping from `TIdx` -> `Option<(Span, String)>`. Every user-specified token will have a name,
51    /// but tokens inserted by cfgrammar (e.g. the EOF token) won't.
52    token_names: Box<[Option<(Span, String)>]>,
53    /// A mapping from `TIdx` -> `Option<Precedence>`
54    token_precs: Box<[Option<Precedence>]>,
55    /// A mapping from `TIdx` -> `Option<String>` for the %epp declaration, giving pretty-printed
56    /// versions of token names that can be presented to the user in case of an error. Every
57    /// user-specified token will have a name that can be presented to the user (if a token doesn't
58    /// have an %epp entry, the token name will be used in lieu), but tokens inserted by cfgrammar
59    /// (e.g. the EOF token) won't.
60    token_epp: Box<[Option<String>]>,
61    /// How many tokens does this grammar have?
62    tokens_len: TIdx<StorageT>,
63    /// The offset of the EOF token.
64    eof_token_idx: TIdx<StorageT>,
65    /// How many productions does this grammar have?
66    prods_len: PIdx<StorageT>,
67    /// Which production is the sole production of the start rule?
68    start_prod: PIdx<StorageT>,
69    /// A list of all productions.
70    prods: Box<[Box<[Symbol<StorageT>]>]>,
71    /// A mapping from rules to their productions. Note that 1) the order of rules is identical to
72    /// that of `rule_names` 2) every rule will have at least 1 production 3) productions
73    /// are not necessarily stored sequentially.
74    rules_prods: Box<[Box<[PIdx<StorageT>]>]>,
75    /// A mapping from productions to their corresponding rule indexes.
76    prods_rules: Box<[RIdx<StorageT>]>,
77    /// The precedence of each production.
78    prod_precs: Box<[Option<Precedence>]>,
79    /// The index of the rule added for implicit tokens, if they were specified; otherwise
80    /// `None`.
81    implicit_rule: Option<RIdx<StorageT>>,
82    /// User defined Rust programs which can be called within actions
83    actions: Box<[Option<String>]>,
84    /// A `(name, type)` pair defining an extra parameter to pass to action functions.
85    parse_param: Option<(String, String)>,
86    /// Generic parameters (types and lifetimes) to pass to action functions.
87    parse_generics: Option<String>,
88    /// Lifetimes for `param_args`
89    programs: Option<String>,
90    /// The actiontypes of rules (one per rule).
91    actiontypes: Box<[Option<String>]>,
92    /// Tokens marked as %avoid_insert (if any).
93    avoid_insert: Option<Vob>,
94    /// How many shift/reduce conflicts the grammar author expected (if any).
95    expect: Option<usize>,
96    /// How many reduce/reduce conflicts the grammar author expected (if any).
97    expectrr: Option<usize>,
98}
99
100// The implementations of `Decode` and `BorrowDecode`
101// Contain a modified copy of the output of `cargo expand` of the derivation:
102// #[cfg_attr(feature = "bincode", derive(Decode))]
103//
104// The current version of bincode has bugs in it's derive macro implementation
105// https://github.com/bincode-org/bincode/issues/763
106// https://github.com/bincode-org/bincode/issues/646
107//
108// Once those are fixed, we can replace this with derive.
109
110#[cfg(feature = "bincode")]
111impl<StorageT, __Context> Decode<__Context> for YaccGrammar<StorageT>
112where
113    StorageT: Decode<__Context> + 'static,
114{
115    fn decode<__D: bincode::de::Decoder<Context = __Context>>(
116        decoder: &mut __D,
117    ) -> Result<Self, bincode::error::DecodeError> {
118        Ok(Self {
119            rules_len: Decode::decode(decoder)?,
120            rule_names: Decode::decode(decoder)?,
121            token_names: Decode::decode(decoder)?,
122            token_precs: Decode::decode(decoder)?,
123            token_epp: Decode::decode(decoder)?,
124            tokens_len: Decode::decode(decoder)?,
125            eof_token_idx: Decode::decode(decoder)?,
126            prods_len: Decode::decode(decoder)?,
127            start_prod: Decode::decode(decoder)?,
128            prods: Decode::decode(decoder)?,
129            rules_prods: Decode::decode(decoder)?,
130            prods_rules: Decode::decode(decoder)?,
131            prod_precs: Decode::decode(decoder)?,
132            implicit_rule: Decode::decode(decoder)?,
133            actions: Decode::decode(decoder)?,
134            parse_param: Decode::decode(decoder)?,
135            parse_generics: Decode::decode(decoder)?,
136            programs: Decode::decode(decoder)?,
137            actiontypes: Decode::decode(decoder)?,
138            avoid_insert: Decode::decode(decoder)?,
139            expect: Decode::decode(decoder)?,
140            expectrr: Decode::decode(decoder)?,
141        })
142    }
143}
144
145// Eventually this should just be provided through:
146// #[cfg_attr(feature = "bincode", derive(Decode))]
147//
148// See comment at the corresponding bincode::Decode impl.
149#[cfg(feature = "bincode")]
150impl<'__de, StorageT, __Context> bincode::BorrowDecode<'__de, __Context> for YaccGrammar<StorageT>
151where
152    StorageT: bincode::de::BorrowDecode<'__de, __Context> + '__de,
153{
154    fn borrow_decode<__D: ::bincode::de::BorrowDecoder<'__de, Context = __Context>>(
155        decoder: &mut __D,
156    ) -> Result<Self, bincode::error::DecodeError> {
157        Ok(Self {
158            rules_len: bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
159            rule_names: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
160            token_names: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
161            token_precs: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
162            token_epp: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
163            tokens_len: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
164            eof_token_idx: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
165            prods_len: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
166            start_prod: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
167            prods: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
168            rules_prods: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
169            prods_rules: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
170            prod_precs: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
171            implicit_rule: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
172            actions: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
173            parse_param: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
174            parse_generics: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
175            programs: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
176            actiontypes: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
177            avoid_insert: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
178            expect: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
179            expectrr: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?,
180        })
181    }
182}
183
184// Internally, we assume that a grammar's start rule has a single production. Since we manually
185// create the start rule ourselves (without relying on user input), this is a safe assumption.
186
187impl YaccGrammar<u32> {
188    pub fn new(yk: YaccKind, s: &str) -> YaccGrammarResult<Self> {
189        YaccGrammar::new_with_storaget(yk, s)
190    }
191}
192
193impl<StorageT: 'static + PrimInt + Unsigned> FromStr for YaccGrammar<StorageT>
194where
195    usize: AsPrimitive<StorageT>,
196{
197    type Err = Vec<YaccGrammarError>;
198    fn from_str(s: &str) -> YaccGrammarResult<Self> {
199        let ast_validation = ast::ASTWithValidityInfo::from_str(s)?;
200        Self::new_from_ast_with_validity_info(&ast_validation)
201    }
202}
203
204impl<StorageT: 'static + PrimInt + Unsigned> YaccGrammar<StorageT>
205where
206    usize: AsPrimitive<StorageT>,
207{
208    /// Takes as input a Yacc grammar of [`YaccKind`](enum.YaccKind.html) as a `String` `s` and returns a
209    /// [`YaccGrammar`](grammar/struct.YaccGrammar.html) (or
210    /// ([`YaccGrammarError`](grammar/enum.YaccGrammarError.html) on error).
211    ///
212    /// As we're compiling the `YaccGrammar`, we add a new start rule (which we'll refer to as `^`,
213    /// though the actual name is a fresh name that is guaranteed to be unique) that references the
214    /// user defined start rule.
215    pub fn new_with_storaget(yk: YaccKind, s: &str) -> YaccGrammarResult<Self> {
216        let ast_validation = ast::ASTWithValidityInfo::new(yk, s);
217        Self::new_from_ast_with_validity_info(&ast_validation)
218    }
219
220    pub fn new_from_ast_with_validity_info(
221        ast_validation: &ast::ASTWithValidityInfo,
222    ) -> YaccGrammarResult<Self> {
223        if !ast_validation.is_valid() {
224            return Err(ast_validation.errors().to_owned());
225        }
226        let ast = ast_validation.ast();
227        // Check that StorageT is big enough to hold RIdx/PIdx/SIdx/TIdx values; after these
228        // checks we can guarantee that things like RIdx(ast.rules.len().as_()) are safe.
229        if ast.rules.len() > num_traits::cast(StorageT::max_value()).unwrap() {
230            panic!("StorageT is not big enough to store this grammar's rules.");
231        }
232        if ast.tokens.len() > num_traits::cast(StorageT::max_value()).unwrap() {
233            panic!("StorageT is not big enough to store this grammar's tokens.");
234        }
235        if ast.prods.len() > num_traits::cast(StorageT::max_value()).unwrap() {
236            panic!("StorageT is not big enough to store this grammar's productions.");
237        }
238        for p in &ast.prods {
239            if p.symbols.len() > num_traits::cast(StorageT::max_value()).unwrap() {
240                panic!(
241                    "StorageT is not big enough to store the symbols of at least one of this grammar's productions."
242                );
243            }
244        }
245
246        let mut rule_names: Vec<(String, Span)> = Vec::with_capacity(ast.rules.len() + 1);
247
248        // Generate a guaranteed unique start rule name. We simply keep making the string longer
249        // until we've hit something unique (at the very worst, this will require looping for as
250        // many times as there are rules). We use the same technique later for unique end
251        // token and whitespace names.
252        let mut start_rule = START_RULE.to_string();
253        while ast.rules.get(&start_rule).is_some() {
254            start_rule += START_RULE;
255        }
256        rule_names.push((start_rule.clone(), Span::new(0, 0)));
257
258        let implicit_rule;
259        let implicit_start_rule;
260        match ast_validation.yacc_kind() {
261            YaccKind::Original(_) | YaccKind::Grmtools => {
262                implicit_rule = None;
263                implicit_start_rule = None;
264            }
265            YaccKind::Eco => {
266                if ast.implicit_tokens.is_some() {
267                    let mut n1 = IMPLICIT_RULE.to_string();
268                    while ast.rules.get(&n1).is_some() {
269                        n1 += IMPLICIT_RULE;
270                    }
271                    rule_names.push((n1.clone(), Span::new(0, 0)));
272                    implicit_rule = Some(n1);
273                    let mut n2 = IMPLICIT_START_RULE.to_string();
274                    while ast.rules.get(&n2).is_some() {
275                        n2 += IMPLICIT_START_RULE;
276                    }
277                    rule_names.push((n2.clone(), Span::new(0, 0)));
278                    implicit_start_rule = Some(n2);
279                } else {
280                    implicit_rule = None;
281                    implicit_start_rule = None;
282                }
283            }
284        };
285
286        for (
287            k,
288            ast::Rule {
289                name: (_, name_span),
290                ..
291            },
292        ) in &ast.rules
293        {
294            rule_names.push((k.clone(), *name_span));
295        }
296        let mut rules_prods: Vec<Vec<PIdx<StorageT>>> = Vec::with_capacity(rule_names.len());
297        let mut rule_map = HashMap::<String, RIdx<StorageT>>::new();
298        for (i, (v, _)) in rule_names.iter().enumerate() {
299            rules_prods.push(Vec::new());
300            rule_map.insert(v.clone(), RIdx(i.as_()));
301        }
302
303        let mut token_names: Vec<Option<(Span, String)>> = Vec::with_capacity(ast.tokens.len() + 1);
304        let mut token_precs: Vec<Option<Precedence>> = Vec::with_capacity(ast.tokens.len() + 1);
305        let mut token_epp: Vec<Option<String>> = Vec::with_capacity(ast.tokens.len() + 1);
306        for (i, k) in ast.tokens.iter().enumerate() {
307            token_names.push(Some((ast.spans[i], k.clone())));
308            token_precs.push(ast.precs.get(k).map(|(prec, _)| prec).cloned());
309            token_epp.push(Some(
310                ast.epp.get(k).map(|(_, (s, _))| s).unwrap_or(k).clone(),
311            ));
312        }
313        let eof_token_idx = TIdx(token_names.len().as_());
314        token_names.push(None);
315        token_precs.push(None);
316        token_epp.push(None);
317        let mut token_map = HashMap::<String, TIdx<StorageT>>::new();
318        for (i, v) in token_names.iter().enumerate() {
319            if let Some((_, n)) = v.as_ref() {
320                token_map.insert(n.clone(), TIdx(i.as_()));
321            }
322        }
323
324        // In order to avoid fiddling about with production indices from the AST, we simply map
325        // tem 1:1 to grammar indices. That means that any new productions are added to the *end*
326        // of the list of productions.
327        let mut prods = vec![None; ast.prods.len()];
328        let mut prod_precs: Vec<Option<Option<Precedence>>> = vec![None; ast.prods.len()];
329        let mut prods_rules = vec![None; ast.prods.len()];
330        let mut actions = vec![None; ast.prods.len()];
331        let mut actiontypes = vec![None; rule_names.len()];
332        let (start_name, _) = ast.start.as_ref().unwrap();
333        for (astrulename, _) in &rule_names {
334            let ridx = rule_map[astrulename];
335            if astrulename == &start_rule {
336                // Add the special start rule which has a single production which references a
337                // single rule.
338                rules_prods[usize::from(ridx)].push(PIdx(prods.len().as_()));
339                let start_prod = match implicit_start_rule {
340                    None => {
341                        // Add ^: S;
342                        vec![Symbol::Rule(rule_map[start_name])]
343                    }
344                    Some(ref s) => {
345                        // An implicit rule has been specified, so the special start rule
346                        // needs to reference the intermediate start rule required. Therefore add:
347                        //   ^: ^~;
348                        vec![Symbol::Rule(rule_map[s])]
349                    }
350                };
351                prods.push(Some(start_prod));
352                prod_precs.push(Some(None));
353                prods_rules.push(Some(ridx));
354                actions.push(None);
355                continue;
356            } else if implicit_start_rule.as_ref() == Some(astrulename) {
357                // Add the intermediate start rule (handling implicit tokens at the beginning of
358                // the file):
359                //   ^~: ~ S;
360                rules_prods[usize::from(rule_map[astrulename])].push(PIdx(prods.len().as_()));
361                prods.push(Some(vec![
362                    Symbol::Rule(rule_map[implicit_rule.as_ref().unwrap()]),
363                    Symbol::Rule(rule_map[start_name]),
364                ]));
365                prod_precs.push(Some(None));
366                prods_rules.push(Some(ridx));
367                continue;
368            } else if implicit_rule.as_ref() == Some(astrulename) {
369                // Add the implicit rule: ~: "IMPLICIT_TOKEN_1" ~ | ... | "IMPLICIT_TOKEN_N" ~ | ;
370                let implicit_prods = &mut rules_prods[usize::from(rule_map[astrulename])];
371                // Add a production for each implicit token
372                for (t, _) in ast.implicit_tokens.as_ref().unwrap().iter() {
373                    implicit_prods.push(PIdx(prods.len().as_()));
374                    prods.push(Some(vec![Symbol::Token(token_map[t]), Symbol::Rule(ridx)]));
375                    prod_precs.push(Some(None));
376                    prods_rules.push(Some(ridx));
377                }
378                // Add an empty production
379                implicit_prods.push(PIdx(prods.len().as_()));
380                prods.push(Some(vec![]));
381                prod_precs.push(Some(None));
382                prods_rules.push(Some(ridx));
383                continue;
384            } else {
385                actiontypes[usize::from(ridx)] = ast.rules[astrulename].actiont.clone();
386            }
387
388            let rule = &mut rules_prods[usize::from(ridx)];
389            for &pidx in &ast.rules[astrulename].pidxs {
390                let astprod = &ast.prods[pidx];
391                let mut prod = Vec::with_capacity(astprod.symbols.len());
392                for astsym in &astprod.symbols {
393                    match *astsym {
394                        ast::Symbol::Rule(ref n, _) => {
395                            prod.push(Symbol::Rule(rule_map[n]));
396                        }
397                        ast::Symbol::Token(ref n, _) => {
398                            prod.push(Symbol::Token(token_map[n]));
399                            if let Some(implicit_rule) = &implicit_rule {
400                                prod.push(Symbol::Rule(rule_map[implicit_rule]));
401                            }
402                        }
403                    };
404                }
405                let mut prec = None;
406                if let Some(ref n) = astprod.precedence {
407                    prec = Some(ast.precs[n]);
408                } else {
409                    for astsym in astprod.symbols.iter().rev() {
410                        if let ast::Symbol::Token(ref n, _) = *astsym {
411                            if let Some(p) = ast.precs.get(n) {
412                                prec = Some(*p);
413                            }
414                            break;
415                        }
416                    }
417                }
418                (*rule).push(PIdx(pidx.as_()));
419                prods[pidx] = Some(prod);
420                prod_precs[pidx] = Some(prec.map(|(prec, _)| prec));
421                prods_rules[pidx] = Some(ridx);
422                if let Some(ref s) = astprod.action {
423                    actions[pidx] = Some(s.clone());
424                }
425            }
426        }
427
428        let avoid_insert = if let Some(ai) = &ast.avoid_insert {
429            let mut aiv = Vob::from_elem(false, token_names.len());
430            for n in ai.keys() {
431                aiv.set(usize::from(token_map[n]), true);
432            }
433            Some(aiv)
434        } else {
435            None
436        };
437
438        assert!(!token_names.is_empty());
439        assert!(!rule_names.is_empty());
440        Ok(YaccGrammar {
441            rules_len: RIdx(rule_names.len().as_()),
442            rule_names: rule_names.into_boxed_slice(),
443            tokens_len: TIdx(token_names.len().as_()),
444            eof_token_idx,
445            token_names: token_names.into_boxed_slice(),
446            token_precs: token_precs.into_boxed_slice(),
447            token_epp: token_epp.into_boxed_slice(),
448            prods_len: PIdx(prods.len().as_()),
449            start_prod: rules_prods[usize::from(rule_map[&start_rule])][0],
450            rules_prods: rules_prods
451                .iter()
452                .map(|x| x.iter().copied().collect())
453                .collect(),
454            prods_rules: prods_rules.into_iter().map(Option::unwrap).collect(),
455            prods: prods
456                .into_iter()
457                .map(|x| x.unwrap().into_boxed_slice())
458                .collect(),
459            prod_precs: prod_precs.into_iter().map(Option::unwrap).collect(),
460            implicit_rule: implicit_rule.map(|x| rule_map[&x]),
461            actions: actions.into_boxed_slice(),
462            parse_param: ast.parse_param.clone(),
463            parse_generics: ast.parse_generics.clone(),
464            programs: ast.programs.clone(),
465            avoid_insert,
466            actiontypes: actiontypes.into_boxed_slice(),
467            expect: ast.expect.map(|(n, _)| n),
468            expectrr: ast.expectrr.map(|(n, _)| n),
469        })
470    }
471
472    /// How many productions does this grammar have?
473    pub fn prods_len(&self) -> PIdx<StorageT> {
474        self.prods_len
475    }
476
477    /// Return an iterator which produces (in order from `0..self.prods_len()`) all this
478    /// grammar's valid `PIdx`s.
479    pub fn iter_pidxs(&self) -> impl Iterator<Item = PIdx<StorageT>> {
480        // We can use as_ safely, because we know that we're only generating integers from
481        // 0..self.rules_len() and, since rules_len() returns an RIdx<StorageT>, then by
482        // definition the integers we're creating fit within StorageT.
483        Box::new((0..usize::from(self.prods_len())).map(|x| PIdx(x.as_())))
484    }
485
486    /// Get the sequence of symbols for production `pidx`. Panics if `pidx` doesn't exist.
487    pub fn prod(&self, pidx: PIdx<StorageT>) -> &[Symbol<StorageT>] {
488        &self.prods[usize::from(pidx)]
489    }
490
491    /// How many symbols does production `pidx` have? Panics if `pidx` doesn't exist.
492    pub fn prod_len(&self, pidx: PIdx<StorageT>) -> SIdx<StorageT> {
493        // Since we've already checked that StorageT can store all the symbols for every production
494        // in the grammar, the call to as_ is safe.
495        SIdx(self.prods[usize::from(pidx)].len().as_())
496    }
497
498    /// Return the rule index of the production `pidx`. Panics if `pidx` doesn't exist.
499    pub fn prod_to_rule(&self, pidx: PIdx<StorageT>) -> RIdx<StorageT> {
500        self.prods_rules[usize::from(pidx)]
501    }
502
503    /// Return the precedence of production `pidx` (where `None` indicates "no precedence specified").
504    /// Panics if `pidx` doesn't exist.
505    pub fn prod_precedence(&self, pidx: PIdx<StorageT>) -> Option<Precedence> {
506        self.prod_precs[usize::from(pidx)]
507    }
508
509    /// Return the production index of the start rule's sole production (for Yacc grammars the
510    /// start rule is defined to have precisely one production).
511    pub fn start_prod(&self) -> PIdx<StorageT> {
512        self.start_prod
513    }
514
515    /// How many rules does this grammar have?
516    pub fn rules_len(&self) -> RIdx<StorageT> {
517        self.rules_len
518    }
519
520    /// Return an iterator which produces (in order from `0..self.rules_len()`) all this
521    /// grammar's valid `RIdx`s.
522    pub fn iter_rules(&self) -> impl Iterator<Item = RIdx<StorageT>> {
523        // We can use as_ safely, because we know that we're only generating integers from
524        // 0..self.rules_len() and, since rules_len() returns an RIdx<StorageT>, then by
525        // definition the integers we're creating fit within StorageT.
526        Box::new((0..usize::from(self.rules_len())).map(|x| RIdx(x.as_())))
527    }
528
529    /// Return the productions for rule `ridx`. Panics if `ridx` doesn't exist.
530    pub fn rule_to_prods(&self, ridx: RIdx<StorageT>) -> &[PIdx<StorageT>] {
531        &self.rules_prods[usize::from(ridx)]
532    }
533
534    /// Return the name of rule `ridx`. Panics if `ridx` doesn't exist.
535    #[deprecated(since = "0.13.0", note = "Please use rule_name_str instead")]
536    pub fn rule_name(&self, ridx: RIdx<StorageT>) -> &str {
537        self.rule_name_str(ridx)
538    }
539
540    /// Return the name of rule `ridx`. Panics if `ridx` doesn't exist.
541    pub fn rule_name_str(&self, ridx: RIdx<StorageT>) -> &str {
542        let (name, _) = &self.rule_names[usize::from(ridx)];
543        name.as_str()
544    }
545
546    /// Return the span of rule `ridx`. Panics if `ridx` doesn't exist.
547    pub fn rule_name_span(&self, ridx: RIdx<StorageT>) -> Span {
548        let (_, span) = self.rule_names[usize::from(ridx)];
549        span
550    }
551
552    /// Return the `RIdx` of the implict rule if it exists, or `None` otherwise.
553    pub fn implicit_rule(&self) -> Option<RIdx<StorageT>> {
554        self.implicit_rule
555    }
556
557    /// Return the index of the rule named `n` or `None` if it doesn't exist.
558    pub fn rule_idx(&self, n: &str) -> Option<RIdx<StorageT>> {
559        self.rule_names
560            .iter()
561            .position(|(x, _)| x == n)
562            // The call to as_() is safe because rule_names is guaranteed to be
563            // small enough to fit into StorageT
564            .map(|x| RIdx(x.as_()))
565    }
566
567    /// What is the index of the start rule? Note that cfgrammar will have inserted at least one
568    /// rule "above" the user's start rule.
569    pub fn start_rule_idx(&self) -> RIdx<StorageT> {
570        self.prod_to_rule(self.start_prod)
571    }
572
573    /// How many tokens does this grammar have?
574    pub fn tokens_len(&self) -> TIdx<StorageT> {
575        self.tokens_len
576    }
577
578    /// Return an iterator which produces (in order from `0..self.tokens_len()`) all this
579    /// grammar's valid `TIdx`s.
580    pub fn iter_tidxs(&self) -> impl Iterator<Item = TIdx<StorageT>> {
581        // We can use as_ safely, because we know that we're only generating integers from
582        // 0..self.rules_len() and, since rules_len() returns an TIdx<StorageT>, then by
583        // definition the integers we're creating fit within StorageT.
584        Box::new((0..usize::from(self.tokens_len())).map(|x| TIdx(x.as_())))
585    }
586
587    /// Return the index of the end token.
588    pub fn eof_token_idx(&self) -> TIdx<StorageT> {
589        self.eof_token_idx
590    }
591
592    /// Return the name of token `tidx` (where `None` indicates "the rule has no name"). Panics if
593    /// `tidx` doesn't exist.
594    pub fn token_name(&self, tidx: TIdx<StorageT>) -> Option<&str> {
595        self.token_names[usize::from(tidx)]
596            .as_ref()
597            .map(|(_, x)| x.as_str())
598    }
599
600    /// Return the precedence of token `tidx` (where `None` indicates "no precedence specified").
601    /// Panics if `tidx` doesn't exist.
602    pub fn token_precedence(&self, tidx: TIdx<StorageT>) -> Option<Precedence> {
603        self.token_precs[usize::from(tidx)]
604    }
605
606    /// Return the %epp entry for token `tidx` (where `None` indicates "the token has no
607    /// pretty-printed value"). Panics if `tidx` doesn't exist.
608    pub fn token_epp(&self, tidx: TIdx<StorageT>) -> Option<&str> {
609        self.token_epp[usize::from(tidx)].as_deref()
610    }
611
612    /// Return the span for token given by `tidx` if one exists.
613    /// If `None`, the token is either implicit and not derived from a token
614    /// in the source, otherwise the `YaccGrammar` itself may not derived from a
615    /// textual source in which case the token may be explicit but still lack spans
616    /// from its construction.
617    pub fn token_span(&self, tidx: TIdx<StorageT>) -> Option<Span> {
618        self.token_names[usize::from(tidx)]
619            .as_ref()
620            .map(|(span, _)| *span)
621    }
622
623    /// Get the action for production `pidx`. Panics if `pidx` doesn't exist.
624    pub fn action(&self, pidx: PIdx<StorageT>) -> &Option<String> {
625        &self.actions[usize::from(pidx)]
626    }
627
628    pub fn actiontype(&self, ridx: RIdx<StorageT>) -> &Option<String> {
629        &self.actiontypes[usize::from(ridx)]
630    }
631
632    pub fn parse_param(&self) -> &Option<(String, String)> {
633        &self.parse_param
634    }
635
636    pub fn parse_generics(&self) -> &Option<String> {
637        &self.parse_generics
638    }
639
640    /// Get the programs part of the grammar
641    pub fn programs(&self) -> &Option<String> {
642        &self.programs
643    }
644
645    /// Returns a map from names to `TIdx`s of all tokens that a lexer will need to generate valid
646    /// inputs from this grammar.
647    pub fn tokens_map(&self) -> HashMap<&str, TIdx<StorageT>> {
648        let mut m = HashMap::with_capacity(usize::from(self.tokens_len) - 1);
649        for tidx in self.iter_tidxs() {
650            if let Some((_, n)) = self.token_names[usize::from(tidx)].as_ref() {
651                m.insert(&**n, tidx);
652            }
653        }
654        m
655    }
656
657    /// Return the index of the token named `n` or `None` if it doesn't exist.
658    pub fn token_idx(&self, n: &str) -> Option<TIdx<StorageT>> {
659        self.token_names
660            .iter()
661            .position(|x| x.as_ref().is_some_and(|(_, x)| x == n))
662            // The call to as_() is safe because token_names is guaranteed to be small
663            // enough to fit into StorageT
664            .map(|x| TIdx(x.as_()))
665    }
666
667    /// Is the token `tidx` marked as `%avoid_insert`?
668    pub fn avoid_insert(&self, tidx: TIdx<StorageT>) -> bool {
669        if let Some(ai) = &self.avoid_insert {
670            ai.get(usize::from(tidx)).unwrap()
671        } else {
672            false
673        }
674    }
675
676    // How many shift/reduce conflicts were expected?
677    pub fn expect(&self) -> Option<usize> {
678        self.expect
679    }
680
681    // How many reduce/reduce conflicts were expected?
682    pub fn expectrr(&self) -> Option<usize> {
683        self.expectrr
684    }
685
686    /// Is there a path from the `from` rule to the `to` rule? Note that recursive rules
687    /// return `true` for a path from themselves to themselves.
688    pub fn has_path(&self, from: RIdx<StorageT>, to: RIdx<StorageT>) -> bool {
689        let mut seen = vec![];
690        seen.resize(usize::from(self.rules_len()), false);
691        let mut todo = vec![];
692        todo.resize(usize::from(self.rules_len()), false);
693        todo[usize::from(from)] = true;
694        loop {
695            let mut empty = true;
696            for ridx in self.iter_rules() {
697                if !todo[usize::from(ridx)] {
698                    continue;
699                }
700                seen[usize::from(ridx)] = true;
701                todo[usize::from(ridx)] = false;
702                empty = false;
703                for pidx in self.rule_to_prods(ridx).iter() {
704                    for sym in self.prod(*pidx) {
705                        if let Symbol::Rule(p_ridx) = *sym {
706                            if p_ridx == to {
707                                return true;
708                            }
709                            if !seen[usize::from(p_ridx)] {
710                                todo[usize::from(p_ridx)] = true;
711                            }
712                        }
713                    }
714                }
715            }
716            if empty {
717                return false;
718            }
719        }
720    }
721
722    /// Returns the string representation of a given production `pidx`.
723    pub fn pp_prod(&self, pidx: PIdx<StorageT>) -> String {
724        let mut sprod = String::new();
725        let ridx = self.prod_to_rule(pidx);
726        sprod.push_str(self.rule_name_str(ridx));
727        sprod.push(':');
728        for sym in self.prod(pidx) {
729            let s = match sym {
730                Symbol::Token(tidx) => self.token_name(*tidx).unwrap(),
731                Symbol::Rule(ridx) => self.rule_name_str(*ridx),
732            };
733            write!(sprod, " \"{}\"", s).ok();
734        }
735        sprod
736    }
737
738    /// Return a `SentenceGenerator` which can then generate minimal sentences for any rule
739    /// based on the user-defined `token_cost` function which gives the associated cost for
740    /// generating each token (where the cost must be greater than 0). Note that multiple
741    /// tokens can have the same score. The simplest cost function is thus `|_| 1`.
742    pub fn sentence_generator<F>(&self, token_cost: F) -> SentenceGenerator<StorageT>
743    where
744        F: Fn(TIdx<StorageT>) -> u8,
745    {
746        SentenceGenerator::new(self, token_cost)
747    }
748
749    /// Return a `YaccFirsts` struct for this grammar.
750    pub fn firsts(&self) -> YaccFirsts<StorageT> {
751        YaccFirsts::new(self)
752    }
753
754    /// Return a `YaccFirsts` struct for this grammar.
755    pub fn follows(&self) -> YaccFollows<StorageT> {
756        YaccFollows::new(self)
757    }
758}
759
760/// A `SentenceGenerator` can generate minimal sentences for any given rule. e.g. for the
761/// grammar:
762///
763/// ```text
764/// %start A
765/// %%
766/// A: A B | ;
767/// B: C | D;
768/// C: 'x' B | 'x';
769/// D: 'y' B | 'y' 'z';
770/// ```
771///
772/// the following are valid minimal sentences:
773///
774/// ```text
775/// A: []
776/// B: [x]
777/// C: [x]
778/// D: [y, x] or [y, z]
779/// ```
780pub struct SentenceGenerator<'a, StorageT> {
781    grm: &'a YaccGrammar<StorageT>,
782    rule_min_costs: RefCell<Option<Vec<u16>>>,
783    rule_max_costs: RefCell<Option<Vec<u16>>>,
784    token_costs: Vec<u8>,
785}
786
787impl<'a, StorageT: 'static + PrimInt + Unsigned> SentenceGenerator<'a, StorageT>
788where
789    usize: AsPrimitive<StorageT>,
790{
791    fn new<F>(grm: &'a YaccGrammar<StorageT>, token_cost: F) -> Self
792    where
793        F: Fn(TIdx<StorageT>) -> u8,
794    {
795        let mut token_costs = Vec::with_capacity(usize::from(grm.tokens_len()));
796        for tidx in grm.iter_tidxs() {
797            token_costs.push(token_cost(tidx));
798        }
799        SentenceGenerator {
800            grm,
801            token_costs,
802            rule_min_costs: RefCell::new(None),
803            rule_max_costs: RefCell::new(None),
804        }
805    }
806
807    /// What is the cost of a minimal sentence for the rule `ridx`? Note that, unlike
808    /// `min_sentence`, this function does not actually *build* a sentence and it is thus much
809    /// faster.
810    pub fn min_sentence_cost(&self, ridx: RIdx<StorageT>) -> u16 {
811        self.rule_min_costs
812            .borrow_mut()
813            .get_or_insert_with(|| rule_min_costs(self.grm, &self.token_costs))[usize::from(ridx)]
814    }
815
816    /// What is the cost of a maximal sentence for the rule `ridx`? Rules which can generate
817    /// sentences of unbounded length return None; rules which can only generate maximal strings of
818    /// a finite length return a `Some(u16)`.
819    pub fn max_sentence_cost(&self, ridx: RIdx<StorageT>) -> Option<u16> {
820        let v = self
821            .rule_max_costs
822            .borrow_mut()
823            .get_or_insert_with(|| rule_max_costs(self.grm, &self.token_costs))[usize::from(ridx)];
824        if v == u16::MAX { None } else { Some(v) }
825    }
826
827    /// Non-deterministically return a minimal sentence from the set of minimal sentences for the
828    /// rule `ridx`.
829    pub fn min_sentence(&self, ridx: RIdx<StorageT>) -> Vec<TIdx<StorageT>> {
830        let cheapest_prod = |p_ridx: RIdx<StorageT>| -> PIdx<StorageT> {
831            let mut low_sc = None;
832            let mut low_idx = None;
833            for &pidx in self.grm.rule_to_prods(p_ridx).iter() {
834                let mut sc = 0;
835                for sym in self.grm.prod(pidx).iter() {
836                    sc += match *sym {
837                        Symbol::Rule(i) => self.min_sentence_cost(i),
838                        Symbol::Token(i) => u16::from(self.token_costs[usize::from(i)]),
839                    };
840                }
841                if low_sc.is_none() || Some(sc) < low_sc {
842                    low_sc = Some(sc);
843                    low_idx = Some(pidx);
844                }
845            }
846            low_idx.unwrap()
847        };
848
849        let mut s = vec![];
850        let mut st = vec![(cheapest_prod(ridx), 0)];
851        while let Some((pidx, sym_idx)) = st.pop() {
852            let prod = self.grm.prod(pidx);
853            for (sidx, sym) in prod.iter().enumerate().skip(sym_idx) {
854                match sym {
855                    Symbol::Rule(s_ridx) => {
856                        st.push((pidx, sidx + 1));
857                        st.push((cheapest_prod(*s_ridx), 0));
858                    }
859                    Symbol::Token(s_tidx) => {
860                        s.push(*s_tidx);
861                    }
862                }
863            }
864        }
865        s
866    }
867
868    /// Return (in arbitrary order) all the minimal sentences for the rule `ridx`.
869    pub fn min_sentences(&self, ridx: RIdx<StorageT>) -> Vec<Vec<TIdx<StorageT>>> {
870        let cheapest_prods = |p_ridx: RIdx<StorageT>| -> Vec<PIdx<StorageT>> {
871            let mut low_sc = None;
872            let mut low_idxs = vec![];
873            for &pidx in self.grm.rule_to_prods(p_ridx).iter() {
874                let mut sc = 0;
875                for sym in self.grm.prod(pidx).iter() {
876                    sc += match *sym {
877                        Symbol::Rule(s_ridx) => self.min_sentence_cost(s_ridx),
878                        Symbol::Token(s_tidx) => u16::from(self.token_costs[usize::from(s_tidx)]),
879                    };
880                }
881                if low_sc.is_none() || Some(sc) <= low_sc {
882                    if Some(sc) < low_sc {
883                        low_idxs.clear();
884                    }
885                    low_sc = Some(sc);
886                    low_idxs.push(pidx);
887                }
888            }
889            low_idxs
890        };
891
892        let mut sts = Vec::new(); // Output sentences
893        for pidx in cheapest_prods(ridx) {
894            let prod = self.grm.prod(pidx);
895            if prod.is_empty() {
896                sts.push(vec![]);
897                continue;
898            }
899
900            // We construct the minimal sentences in two phases.
901            //
902            // First, for each symbol in the production, we gather all the possible minimal
903            // sentences for it. If, for the grammar:
904            //   X: 'a' Y
905            //   Y: 'b' | 'c'
906            // we ask for the minimal sentences of X's only production we'll end up with a vec of
907            // vecs as follows:
908            //   [[['a']], [['b'], ['c']]]
909
910            let mut ms = Vec::with_capacity(prod.len());
911            for sym in prod {
912                match *sym {
913                    Symbol::Rule(s_ridx) => ms.push(self.min_sentences(s_ridx)),
914                    Symbol::Token(s_tidx) => ms.push(vec![vec![s_tidx]]),
915                }
916            }
917
918            // Second, we need to generate all combinations of the gathered sentences. We do this
919            // by writing our own simple numeric incrementing scheme. If we rewrite the list from
920            // above as follows:
921            //
922            //      0 1 <- call this axis "i"
923            //   0: a b
924            //   1:   c
925            //   ^
926            //   |
927            //   call this axis "todo"
928            //
929            // this hopefully becomes easier to see. Let's call the list "ms": the combinations we
930            // need to generate are thus:
931            //
932            //   ms[0][0] + ms[1][0]  (i.e. 'ab')
933            //   ms[0][0] + ms[1][1]  (i.e. 'ac')
934            //
935            // The easiest way to model this is to have a list (todo) with each entry starting at
936            // 0. After each iteration around the loop (i) we add 1 to the last todo column's
937            // entry: if that spills over the length of the corresponding ms entry, then we reset
938            // that column to zero, and try adding 1 to the previous column (as many times as
939            // needed). If the first column spills, then we're done. This is basically normal
940            // arithmetic but with each digit having an arbitrary base.
941
942            let mut todo = vec![0; prod.len()];
943            let mut cur = Vec::new();
944            'b: loop {
945                for i in 0..todo.len() {
946                    cur.extend(&ms[i][todo[i]]);
947                }
948                sts.push(std::mem::take(&mut cur));
949
950                let mut j = todo.len() - 1;
951                loop {
952                    if todo[j] + 1 == ms[j].len() {
953                        if j == 0 {
954                            break 'b;
955                        }
956                        todo[j] = 0;
957                        j -= 1;
958                    } else {
959                        todo[j] += 1;
960                        break;
961                    }
962                }
963            }
964        }
965        sts
966    }
967}
968
969/// Return the cost of a minimal string for each rule in this grammar. The cost of a
970/// token is specified by the user-defined `token_cost` function.
971#[allow(clippy::unnecessary_unwrap)]
972fn rule_min_costs<StorageT: 'static + PrimInt + Unsigned>(
973    grm: &YaccGrammar<StorageT>,
974    token_costs: &[u8],
975) -> Vec<u16>
976where
977    usize: AsPrimitive<StorageT>,
978{
979    // We use a simple(ish) fixed-point algorithm to determine costs. We maintain two lists
980    // "costs" and "done". An integer costs[i] starts at 0 and monotonically increments
981    // until done[i] is true, at which point costs[i] value is fixed. We also use the done
982    // list as a simple "todo" list: whilst there is at least one false value in done, there is
983    // still work to do.
984    //
985    // On each iteration of the loop, we examine each rule in the todo list to see if
986    // we can get a better idea of its true cost. Some are trivial:
987    //   * A rule with an empty production immediately has a cost of 0.
988    //   * Rules whose productions don't reference any rules (i.e. only contain tokens) can be
989    //     immediately given a cost by calculating the lowest-cost production.
990    // However if a rule A references another rule B, we may need to wait until
991    // we've fully analysed B before we can cost A. This might seem to cause problems with
992    // recursive rules, so we introduce the concept of "incomplete costs" i.e. if a production
993    // references a rule we can work out its minimum possible cost simply by counting
994    // the production's token costs. Since rules can have a mix of complete and
995    // incomplete productions, this is sometimes enough to allow us to assign a final cost to
996    // a rule (if the lowest complete production's cost is lower than or equal to all
997    // the lowest incomplete production's cost). This allows us to make progress, since it
998    // means that we can iteratively improve our knowledge of a token's minimum cost:
999    // eventually we will reach a point where we can determine it definitively.
1000
1001    let mut costs = vec![0; usize::from(grm.rules_len())];
1002    let mut done = vec![false; usize::from(grm.rules_len())];
1003    loop {
1004        let mut all_done = true;
1005        for i in 0..done.len() {
1006            if done[i] {
1007                continue;
1008            }
1009            all_done = false;
1010            let mut ls_cmplt = None; // lowest completed cost
1011            let mut ls_noncmplt = None; // lowest non-completed cost
1012
1013            // The call to as_() is guaranteed safe because done.len() == grm.rules_len(), and
1014            // we guarantee that grm.rules_len() can fit in StorageT.
1015            for pidx in grm.rule_to_prods(RIdx(i.as_())).iter() {
1016                let mut c: u16 = 0; // production cost
1017                let mut cmplt = true;
1018                for sym in grm.prod(*pidx) {
1019                    let sc = match *sym {
1020                        Symbol::Token(tidx) => u16::from(token_costs[usize::from(tidx)]),
1021                        Symbol::Rule(ridx) => {
1022                            if !done[usize::from(ridx)] {
1023                                cmplt = false;
1024                            }
1025                            costs[usize::from(ridx)]
1026                        }
1027                    };
1028                    c = c
1029                        .checked_add(sc)
1030                        .expect("Overflow occurred when calculating rule costs");
1031                }
1032                if cmplt && (ls_cmplt.is_none() || Some(c) < ls_cmplt) {
1033                    ls_cmplt = Some(c);
1034                } else if !cmplt && (ls_noncmplt.is_none() || Some(c) < ls_noncmplt) {
1035                    ls_noncmplt = Some(c);
1036                }
1037            }
1038            if ls_cmplt.is_some() && (ls_noncmplt.is_none() || ls_cmplt < ls_noncmplt) {
1039                debug_assert!(ls_cmplt.unwrap() >= costs[i]);
1040                costs[i] = ls_cmplt.unwrap();
1041                done[i] = true;
1042            } else if let Some(ls_noncmplt) = ls_noncmplt {
1043                debug_assert!(ls_noncmplt >= costs[i]);
1044                costs[i] = ls_noncmplt;
1045            }
1046        }
1047        if all_done {
1048            debug_assert!(done.iter().all(|x| *x));
1049            break;
1050        }
1051    }
1052    costs
1053}
1054
1055/// Return the cost of the maximal string for each rule in this grammar (u32::max_val()
1056/// representing "this rule can generate strings of infinite length"). The cost of a
1057/// token is specified by the user-defined `token_cost` function.
1058#[allow(clippy::unnecessary_unwrap)]
1059fn rule_max_costs<StorageT: 'static + PrimInt + Unsigned>(
1060    grm: &YaccGrammar<StorageT>,
1061    token_costs: &[u8],
1062) -> Vec<u16>
1063where
1064    usize: AsPrimitive<StorageT>,
1065{
1066    let mut done = vec![false; usize::from(grm.rules_len())];
1067    let mut costs = vec![0; usize::from(grm.rules_len())];
1068
1069    // First mark all recursive rules.
1070    for ridx in grm.iter_rules() {
1071        // Calling has_path so frequently is not exactly efficient...
1072        if grm.has_path(ridx, ridx) {
1073            costs[usize::from(ridx)] = u16::MAX;
1074            done[usize::from(ridx)] = true;
1075        }
1076    }
1077
1078    loop {
1079        let mut all_done = true;
1080        for i in 0..done.len() {
1081            if done[i] {
1082                continue;
1083            }
1084            all_done = false;
1085            let mut hs_cmplt = None; // highest completed cost
1086            let mut hs_noncmplt = None; // highest non-completed cost
1087
1088            // The call to as_() is guaranteed safe because done.len() == grm.rules_len(), and
1089            // we guarantee that grm.rules_len() can fit in StorageT.
1090            'a: for pidx in grm.rule_to_prods(RIdx(i.as_())).iter() {
1091                let mut c: u16 = 0; // production cost
1092                let mut cmplt = true;
1093                for sym in grm.prod(*pidx) {
1094                    let sc = match *sym {
1095                        Symbol::Token(s_tidx) => u16::from(token_costs[usize::from(s_tidx)]),
1096                        Symbol::Rule(s_ridx) => {
1097                            if costs[usize::from(s_ridx)] == u16::MAX {
1098                                // As soon as we find reference to an infinite rule, we
1099                                // can stop looking.
1100                                hs_cmplt = Some(u16::MAX);
1101                                break 'a;
1102                            }
1103                            if !done[usize::from(s_ridx)] {
1104                                cmplt = false;
1105                            }
1106                            costs[usize::from(s_ridx)]
1107                        }
1108                    };
1109                    c = c
1110                        .checked_add(sc)
1111                        .expect("Overflow occurred when calculating rule costs");
1112                    if c == u16::MAX {
1113                        panic!("Unable to represent cost in 64 bits.");
1114                    }
1115                }
1116                if cmplt && (hs_cmplt.is_none() || Some(c) > hs_cmplt) {
1117                    hs_cmplt = Some(c);
1118                } else if !cmplt && (hs_noncmplt.is_none() || Some(c) > hs_noncmplt) {
1119                    hs_noncmplt = Some(c);
1120                }
1121            }
1122            if hs_cmplt.is_some() && (hs_noncmplt.is_none() || hs_cmplt > hs_noncmplt) {
1123                debug_assert!(hs_cmplt.unwrap() >= costs[i]);
1124                costs[i] = hs_cmplt.unwrap();
1125                done[i] = true;
1126            } else if let Some(hs_noncmplt) = hs_noncmplt {
1127                debug_assert!(hs_noncmplt >= costs[i]);
1128                costs[i] = hs_noncmplt;
1129            }
1130        }
1131        if all_done {
1132            debug_assert!(done.iter().all(|x| *x));
1133            break;
1134        }
1135    }
1136    costs
1137}
1138
1139#[cfg(test)]
1140mod test {
1141    use super::{
1142        super::{AssocKind, Precedence, YaccGrammar, YaccKind, YaccOriginalActionKind},
1143        IMPLICIT_RULE, IMPLICIT_START_RULE, rule_max_costs, rule_min_costs,
1144    };
1145    use crate::{PIdx, RIdx, Span, Symbol, TIdx};
1146    use std::collections::HashMap;
1147    use std::str::FromStr;
1148
1149    macro_rules! bslice {
1150        () => (
1151            ::Vec::new().into_boxed_slice()
1152        );
1153        ($elem:expr; $n:expr) => (
1154            ::vec::from_elem($elem, $n).into_boxed_slice()
1155        );
1156        ($($x:expr),+ $(,)?) => (
1157            <[_]>::into_vec(
1158                Box::new([$($x),+])
1159            ).into_boxed_slice()
1160        );
1161    }
1162
1163    #[test]
1164    fn test_minimal() {
1165        let grm = YaccGrammar::new(
1166            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1167            "%start R %token T %% R: 'T';",
1168        )
1169        .unwrap();
1170
1171        assert_eq!(grm.start_prod, PIdx(1));
1172        assert_eq!(grm.implicit_rule(), None);
1173        grm.rule_idx("^").unwrap();
1174        grm.rule_idx("R").unwrap();
1175        grm.token_idx("T").unwrap();
1176
1177        assert_eq!(&*grm.rules_prods, &[bslice![PIdx(1)], bslice![PIdx(0)]]);
1178        let start_prod = grm.prod(grm.rules_prods[usize::from(grm.rule_idx("^").unwrap())][0]);
1179        assert_eq!(*start_prod, [Symbol::Rule(grm.rule_idx("R").unwrap())]);
1180        let r_prod = grm.prod(grm.rules_prods[usize::from(grm.rule_idx("R").unwrap())][0]);
1181        assert_eq!(*r_prod, [Symbol::Token(grm.token_idx("T").unwrap())]);
1182        assert_eq!(&*grm.prods_rules, &[RIdx(1), RIdx(0)]);
1183
1184        assert_eq!(
1185            grm.tokens_map(),
1186            [("T", TIdx(0))]
1187                .iter()
1188                .cloned()
1189                .collect::<HashMap<&str, TIdx<_>>>()
1190        );
1191        assert_eq!(grm.iter_rules().collect::<Vec<_>>(), vec![RIdx(0), RIdx(1)]);
1192    }
1193
1194    #[test]
1195    fn test_rule_ref() {
1196        let grm = YaccGrammar::new(
1197            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1198            "%start R %token T %% R : S; S: 'T';",
1199        )
1200        .unwrap();
1201
1202        grm.rule_idx("^").unwrap();
1203        grm.rule_idx("R").unwrap();
1204        grm.rule_idx("S").unwrap();
1205        grm.token_idx("T").unwrap();
1206        assert!(grm.token_name(grm.eof_token_idx()).is_none());
1207
1208        assert_eq!(
1209            &*grm.rules_prods,
1210            &[bslice![PIdx(2)], bslice![PIdx(0)], bslice![PIdx(1)]]
1211        );
1212        let start_prod = grm.prod(grm.rules_prods[usize::from(grm.rule_idx("^").unwrap())][0]);
1213        assert_eq!(*start_prod, [Symbol::Rule(grm.rule_idx("R").unwrap())]);
1214        let r_prod = grm.prod(grm.rules_prods[usize::from(grm.rule_idx("R").unwrap())][0]);
1215        assert_eq!(r_prod.len(), 1);
1216        assert_eq!(r_prod[0], Symbol::Rule(grm.rule_idx("S").unwrap()));
1217        let s_prod = grm.prod(grm.rules_prods[usize::from(grm.rule_idx("S").unwrap())][0]);
1218        assert_eq!(s_prod.len(), 1);
1219        assert_eq!(s_prod[0], Symbol::Token(grm.token_idx("T").unwrap()));
1220    }
1221
1222    #[test]
1223    #[rustfmt::skip]
1224    fn test_long_prod() {
1225        let grm = YaccGrammar::new(
1226            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1227            "%start R %token T1 T2 %% R : S 'T1' S; S: 'T2';"
1228        ).unwrap();
1229
1230        grm.rule_idx("^").unwrap();
1231        grm.rule_idx("R").unwrap();
1232        grm.rule_idx("S").unwrap();
1233        grm.token_idx("T1").unwrap();
1234        grm.token_idx("T2").unwrap();
1235
1236        assert_eq!(&*grm.rules_prods, &[bslice![PIdx(2)],
1237                                         bslice![PIdx(0)],
1238                                         bslice![PIdx(1)]]);
1239        assert_eq!(&*grm.prods_rules, &[RIdx(1),
1240                                         RIdx(2),
1241                                         RIdx(0)]);
1242        let start_prod = grm.prod(grm.rules_prods[usize::from(grm.rule_idx("^").unwrap())][0]);
1243        assert_eq!(*start_prod, [Symbol::Rule(grm.rule_idx("R").unwrap())]);
1244        let r_prod = grm.prod(grm.rules_prods[usize::from(grm.rule_idx("R").unwrap())][0]);
1245        assert_eq!(r_prod.len(), 3);
1246        assert_eq!(r_prod[0], Symbol::Rule(grm.rule_idx("S").unwrap()));
1247        assert_eq!(r_prod[1], Symbol::Token(grm.token_idx("T1").unwrap()));
1248        assert_eq!(r_prod[2], Symbol::Rule(grm.rule_idx("S").unwrap()));
1249        let s_prod = grm.prod(grm.rules_prods[usize::from(grm.rule_idx("S").unwrap())][0]);
1250        assert_eq!(s_prod.len(), 1);
1251        assert_eq!(s_prod[0], Symbol::Token(grm.token_idx("T2").unwrap()));
1252    }
1253
1254    #[test]
1255    fn test_prods_rules() {
1256        let grm = YaccGrammar::new(
1257            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1258            "
1259            %start A
1260            %%
1261            A: B
1262             | C;
1263            B: 'x';
1264            C: 'y'
1265             | 'z';
1266          ",
1267        )
1268        .unwrap();
1269
1270        assert_eq!(
1271            &*grm.prods_rules,
1272            &[RIdx(1), RIdx(1), RIdx(2), RIdx(3), RIdx(3), RIdx(0)]
1273        );
1274    }
1275
1276    #[test]
1277    #[rustfmt::skip]
1278    fn test_left_right_nonassoc_precs() {
1279        let grm = YaccGrammar::new(
1280            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1281            "
1282            %start Expr
1283            %right '='
1284            %left '+' '-'
1285            %left '/'
1286            %left '*'
1287            %nonassoc '~'
1288            %%
1289            Expr : Expr '=' Expr
1290                 | Expr '+' Expr
1291                 | Expr '-' Expr
1292                 | Expr '/' Expr
1293                 | Expr '*' Expr
1294                 | Expr '~' Expr
1295                 | 'id' ;
1296          ").unwrap();
1297
1298        assert_eq!(grm.prod_precs.len(), 8);
1299        assert_eq!(grm.prod_precs[0].unwrap(), Precedence{level: 0, kind: AssocKind::Right});
1300        assert_eq!(grm.prod_precs[1].unwrap(), Precedence{level: 1, kind: AssocKind::Left});
1301        assert_eq!(grm.prod_precs[2].unwrap(), Precedence{level: 1, kind: AssocKind::Left});
1302        assert_eq!(grm.prod_precs[3].unwrap(), Precedence{level: 2, kind: AssocKind::Left});
1303        assert_eq!(grm.prod_precs[4].unwrap(), Precedence{level: 3, kind: AssocKind::Left});
1304        assert_eq!(grm.prod_precs[5].unwrap(), Precedence{level: 4, kind: AssocKind::Nonassoc});
1305        assert!(grm.prod_precs[6].is_none());
1306        assert_eq!(grm.prod_precs[7], None);
1307    }
1308
1309    #[test]
1310    #[rustfmt::skip]
1311    fn test_prec_override() {
1312        let grm = YaccGrammar::new(
1313            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1314            "
1315            %start expr
1316            %left '+' '-'
1317            %left '*' '/'
1318            %%
1319            expr : expr '+' expr
1320                 | expr '-' expr
1321                 | expr '*' expr
1322                 | expr '/' expr
1323                 | '-'  expr %prec '*'
1324                 | 'id' ;
1325        "
1326        ).unwrap();
1327        assert_eq!(grm.prod_precs.len(), 7);
1328        assert_eq!(grm.prod_precs[0].unwrap(), Precedence{level: 0, kind: AssocKind::Left});
1329        assert_eq!(grm.prod_precs[1].unwrap(), Precedence{level: 0, kind: AssocKind::Left});
1330        assert_eq!(grm.prod_precs[2].unwrap(), Precedence{level: 1, kind: AssocKind::Left});
1331        assert_eq!(grm.prod_precs[3].unwrap(), Precedence{level: 1, kind: AssocKind::Left});
1332        assert_eq!(grm.prod_precs[4].unwrap(), Precedence{level: 1, kind: AssocKind::Left});
1333        assert!(grm.prod_precs[5].is_none());
1334        assert_eq!(grm.prod_precs[6], None);
1335    }
1336
1337    #[test]
1338    #[rustfmt::skip]
1339    fn test_implicit_tokens_rewrite() {
1340        let grm = YaccGrammar::new(
1341            YaccKind::Eco,
1342            "
1343          %implicit_tokens ws1 ws2
1344          %start S
1345          %%
1346          S: 'a' | T;
1347          T: 'c' |;
1348          "
1349        ).unwrap();
1350
1351        // Check that the above grammar has been rewritten to:
1352        //   ^ : ^~;
1353        //   ^~: ~ S;
1354        //   ~ : ws1 | ws2 | ;
1355        //   S : 'a' ~ | T;
1356        //   T : 'c' ~ | ;
1357
1358        assert_eq!(grm.prod_precs.len(), 9);
1359
1360        let itfs_rule_idx = grm.rule_idx(IMPLICIT_START_RULE).unwrap();
1361        assert_eq!(grm.rules_prods[usize::from(itfs_rule_idx)].len(), 1);
1362
1363        let itfs_prod1 = &grm.prods[usize::from(grm.rules_prods[usize::from(itfs_rule_idx)][0])];
1364        assert_eq!(itfs_prod1.len(), 2);
1365        assert_eq!(itfs_prod1[0], Symbol::Rule(grm.rule_idx(IMPLICIT_RULE).unwrap()));
1366        assert_eq!(itfs_prod1[1], Symbol::Rule(grm.rule_idx("S").unwrap()));
1367
1368        let s_rule_idx = grm.rule_idx("S").unwrap();
1369        assert_eq!(grm.rules_prods[usize::from(s_rule_idx)].len(), 2);
1370
1371        let s_prod1 = &grm.prods[usize::from(grm.rules_prods[usize::from(s_rule_idx)][0])];
1372        assert_eq!(s_prod1.len(), 2);
1373        assert_eq!(s_prod1[0], Symbol::Token(grm.token_idx("a").unwrap()));
1374        assert_eq!(s_prod1[1], Symbol::Rule(grm.rule_idx(IMPLICIT_RULE).unwrap()));
1375
1376        let s_prod2 = &grm.prods[usize::from(grm.rules_prods[usize::from(s_rule_idx)][1])];
1377        assert_eq!(s_prod2.len(), 1);
1378        assert_eq!(s_prod2[0], Symbol::Rule(grm.rule_idx("T").unwrap()));
1379
1380        let t_rule_idx = grm.rule_idx("T").unwrap();
1381        assert_eq!(grm.rules_prods[usize::from(s_rule_idx)].len(), 2);
1382
1383        let t_prod1 = &grm.prods[usize::from(grm.rules_prods[usize::from(t_rule_idx)][0])];
1384        assert_eq!(t_prod1.len(), 2);
1385        assert_eq!(t_prod1[0], Symbol::Token(grm.token_idx("c").unwrap()));
1386        assert_eq!(t_prod1[1], Symbol::Rule(grm.rule_idx(IMPLICIT_RULE).unwrap()));
1387
1388        let t_prod2 = &grm.prods[usize::from(grm.rules_prods[usize::from(t_rule_idx)][1])];
1389        assert_eq!(t_prod2.len(), 0);
1390
1391        assert_eq!(Some(grm.rule_idx(IMPLICIT_RULE).unwrap()), grm.implicit_rule());
1392        let i_rule_idx = grm.rule_idx(IMPLICIT_RULE).unwrap();
1393        assert_eq!(grm.rules_prods[usize::from(i_rule_idx)].len(), 3);
1394        let i_prod1 = &grm.prods[usize::from(grm.rules_prods[usize::from(i_rule_idx)][0])];
1395        let i_prod2 = &grm.prods[usize::from(grm.rules_prods[usize::from(i_rule_idx)][1])];
1396        assert_eq!(i_prod1.len(), 2);
1397        assert_eq!(i_prod2.len(), 2);
1398        // We don't know what order the implicit rule will contain our tokens in,
1399        // hence the awkward dance below.
1400        let cnd1 = bslice![
1401            Symbol::Token(grm.token_idx("ws1").unwrap()),
1402            Symbol::Rule(grm.implicit_rule().unwrap()),
1403        ];
1404        let cnd2 = bslice![
1405            Symbol::Token(grm.token_idx("ws2").unwrap()),
1406            Symbol::Rule(grm.implicit_rule().unwrap()),
1407        ];
1408        assert!((*i_prod1 == cnd1 && *i_prod2 == cnd2) || (*i_prod1 == cnd2 && *i_prod2 == cnd1));
1409        let i_prod3 = &grm.prods[usize::from(grm.rules_prods[usize::from(i_rule_idx)][2])];
1410        assert_eq!(i_prod3.len(), 0);
1411    }
1412
1413    #[test]
1414    #[rustfmt::skip]
1415    fn test_has_path() {
1416        let grm = YaccGrammar::new(
1417            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1418            "
1419            %start A
1420            %%
1421            A: B;
1422            B: B 'x' | C;
1423            C: C 'y' | ;
1424          "
1425        ).unwrap();
1426
1427        let a_ridx = grm.rule_idx("A").unwrap();
1428        let b_ridx = grm.rule_idx("B").unwrap();
1429        let c_ridx = grm.rule_idx("C").unwrap();
1430        assert!(grm.has_path(a_ridx, b_ridx));
1431        assert!(grm.has_path(a_ridx, c_ridx));
1432        assert!(grm.has_path(b_ridx, b_ridx));
1433        assert!(grm.has_path(b_ridx, c_ridx));
1434        assert!(grm.has_path(c_ridx, c_ridx));
1435        assert!(!grm.has_path(a_ridx, a_ridx));
1436        assert!(!grm.has_path(b_ridx, a_ridx));
1437        assert!(!grm.has_path(c_ridx, a_ridx));
1438    }
1439
1440    #[test]
1441    #[rustfmt::skip]
1442    fn test_rule_min_costs() {
1443        let grm = YaccGrammar::new(
1444            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1445            "
1446            %start A
1447            %%
1448            A: A B | ;
1449            B: C | D | E;
1450            C: 'x' B | 'x';
1451            D: 'y' B | 'y' 'z';
1452            E: 'x' A | 'x' 'y';
1453          "
1454        ).unwrap();
1455
1456        let scores = rule_min_costs(&grm, &[1, 1, 1]);
1457        assert_eq!(scores[usize::from(grm.rule_idx("A").unwrap())], 0);
1458        assert_eq!(scores[usize::from(grm.rule_idx("B").unwrap())], 1);
1459        assert_eq!(scores[usize::from(grm.rule_idx("C").unwrap())], 1);
1460        assert_eq!(scores[usize::from(grm.rule_idx("D").unwrap())], 2);
1461        assert_eq!(scores[usize::from(grm.rule_idx("E").unwrap())], 1);
1462    }
1463
1464    #[test]
1465    fn test_min_sentences() {
1466        let grm = YaccGrammar::new(
1467            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1468            "
1469            %start A
1470            %%
1471            A: A B | ;
1472            B: C | D;
1473            C: 'x' B | 'x';
1474            D: 'y' B | 'y' 'z';
1475          ",
1476        )
1477        .unwrap();
1478
1479        let sg = grm.sentence_generator(|_| 1);
1480
1481        let find = |nt_name: &str, str_cnds: Vec<Vec<&str>>| {
1482            let cnds = str_cnds
1483                .iter()
1484                .map(|x| {
1485                    x.iter()
1486                        .map(|y| grm.token_idx(y).unwrap())
1487                        .collect::<Vec<_>>()
1488                })
1489                .collect::<Vec<_>>();
1490
1491            let ms = sg.min_sentence(grm.rule_idx(nt_name).unwrap());
1492            if !cnds.iter().any(|x| x == &ms) {
1493                panic!("{:?} doesn't have any matches in {:?}", ms, str_cnds);
1494            }
1495
1496            let min_sts = sg.min_sentences(grm.rule_idx(nt_name).unwrap());
1497            assert_eq!(cnds.len(), min_sts.len());
1498            for ms in min_sts {
1499                if !cnds.iter().any(|x| x == &ms) {
1500                    panic!("{:?} doesn't have any matches in {:?}", ms, str_cnds);
1501                }
1502            }
1503        };
1504
1505        find("A", vec![vec![]]);
1506        find("B", vec![vec!["x"]]);
1507        find("C", vec![vec!["x"]]);
1508        find("D", vec![vec!["y", "x"], vec!["y", "z"]]);
1509    }
1510
1511    #[test]
1512    #[rustfmt::skip]
1513    fn test_rule_max_costs1() {
1514        let grm = YaccGrammar::new(
1515            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1516            "
1517            %start A
1518            %%
1519            A: A B | ;
1520            B: C | D | E;
1521            C: 'x' B | 'x';
1522            D: 'y' B | 'y' 'z';
1523            E: 'x' A | 'x' 'y';
1524          "
1525        ).unwrap();
1526
1527        let scores = rule_max_costs(&grm, &[1, 1, 1]);
1528        assert_eq!(scores[usize::from(grm.rule_idx("A").unwrap())], u16::MAX);
1529        assert_eq!(scores[usize::from(grm.rule_idx("B").unwrap())], u16::MAX);
1530        assert_eq!(scores[usize::from(grm.rule_idx("C").unwrap())], u16::MAX);
1531        assert_eq!(scores[usize::from(grm.rule_idx("D").unwrap())], u16::MAX);
1532        assert_eq!(scores[usize::from(grm.rule_idx("E").unwrap())], u16::MAX);
1533    }
1534
1535    #[test]
1536    #[rustfmt::skip]
1537    fn test_rule_max_costs2() {
1538        let grm = YaccGrammar::new(
1539            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1540            "
1541            %start A
1542            %%
1543            A: A B | B;
1544            B: C | D;
1545            C: 'x' 'y' | 'x';
1546            D: 'y' 'x' | 'y' 'x' 'z';
1547          "
1548        ).unwrap();
1549
1550        let scores = rule_max_costs(&grm, &[1, 1, 1]);
1551        assert_eq!(scores[usize::from(grm.rule_idx("A").unwrap())], u16::MAX);
1552        assert_eq!(scores[usize::from(grm.rule_idx("B").unwrap())], 3);
1553        assert_eq!(scores[usize::from(grm.rule_idx("C").unwrap())], 2);
1554        assert_eq!(scores[usize::from(grm.rule_idx("D").unwrap())], 3);
1555    }
1556
1557    #[test]
1558    fn test_out_of_order_productions() {
1559        // Example taken from p54 of Locally least-cost error repair in LR parsers, Carl Cerecke
1560        let grm = YaccGrammar::new(
1561            YaccKind::Original(YaccOriginalActionKind::GenericParseTree),
1562            "
1563            %start S
1564            %%
1565            S: A 'c' 'd'
1566             | B 'c' 'e';
1567            A: 'a';
1568            B: 'a'
1569             | 'b';
1570            A: 'b';
1571            ",
1572        )
1573        .unwrap();
1574
1575        assert_eq!(
1576            &*grm.prods_rules,
1577            &[
1578                RIdx(1),
1579                RIdx(1),
1580                RIdx(2),
1581                RIdx(3),
1582                RIdx(3),
1583                RIdx(2),
1584                RIdx(0)
1585            ]
1586        );
1587    }
1588
1589    #[test]
1590    fn test_token_spans() {
1591        let src = "%%\nAB: 'a' | 'foo';";
1592        let grm =
1593            YaccGrammar::new(YaccKind::Original(YaccOriginalActionKind::NoAction), src).unwrap();
1594        let token_map = grm.tokens_map();
1595        let a_tidx = token_map.get("a");
1596        let foo_tidx = token_map.get("foo");
1597        let a_span = grm.token_span(*a_tidx.unwrap()).unwrap();
1598        let foo_span = grm.token_span(*foo_tidx.unwrap()).unwrap();
1599        let ab_span = grm.rule_name_span(grm.rule_idx("AB").unwrap());
1600        assert_eq!(a_span, Span::new(8, 9));
1601        assert_eq!(foo_span, Span::new(14, 17));
1602        assert_eq!(ab_span, Span::new(3, 5));
1603        assert_eq!(&src[a_span.start()..a_span.end()], "a");
1604        assert_eq!(&src[foo_span.start()..foo_span.end()], "foo");
1605        assert_eq!(&src[ab_span.start()..ab_span.end()], "AB");
1606    }
1607
1608    #[test]
1609    fn token_span_issue296() {
1610        let src = "%%
1611                   S: | AB;
1612                   A: 'a' 'b';
1613                   B: 'b' 'c';
1614                   AB: A AB | B ';' AB;
1615                   %%
1616                   ";
1617        let grm =
1618            YaccGrammar::new(YaccKind::Original(YaccOriginalActionKind::NoAction), src).unwrap();
1619        let token_map = grm.tokens_map();
1620        let c_tidx = token_map.get("c").unwrap();
1621        assert_eq!(grm.token_name(*c_tidx), Some("c"));
1622        let c_span = grm.token_span(*c_tidx).unwrap();
1623        assert_eq!(&src[c_span.start()..c_span.end()], "c");
1624    }
1625
1626    #[test]
1627    fn test_grmtools_section_yacckinds() {
1628        let srcs = [
1629            "%grmtools{yacckind: Original(NoAction)}
1630                 %%
1631                 Start: ;",
1632            "%grmtools{yacckind: YaccKind::Original(GenericParseTree)}
1633                 %%
1634                 Start: ;",
1635            "%grmtools{yacckind: YaccKind::Original(yaccoriginalactionkind::useraction)}
1636                 %actiontype ()
1637                 %%
1638                 Start: ;",
1639            "%grmtools{yacckind: Original(YACCOriginalActionKind::NoAction)}
1640                 %%
1641                 Start: ;",
1642            "%grmtools{yacckind: YaccKind::Grmtools}
1643                 %%
1644                 Start -> () : ;",
1645        ];
1646        for src in srcs {
1647            YaccGrammar::<u32>::from_str(src).unwrap();
1648        }
1649    }
1650
1651    #[test]
1652    fn test_grmtools_section_invalid_yacckinds() {
1653        let srcs = [
1654            "%grmtools{yacckind: Foo}",
1655            "%grmtools{yacckind: YaccKind::Foo}",
1656            "%grmtools{yacckindof: YaccKind::Grmtools}",
1657            "%grmtools{yacckindof: Grmtools}",
1658            "%grmtools{yacckindof: YaccKindFoo::Foo}",
1659            "%grmtools{yacckind: Foo::Grmtools}",
1660            "%grmtools{yacckind: YaccKind::Original}",
1661            "%grmtools{yacckind: YaccKind::OriginalFoo}",
1662            "%grmtools{yacckind: YaccKind::Original()}",
1663            "%grmtools{yacckind: YaccKind::Original(Foo)}",
1664            "%grmtools{yacckind: YaccKind::Original(YaccOriginalActionKind)}",
1665            "%grmtools{yacckind: YaccKind::Original(YaccOriginalActionKind::Foo)}",
1666            "%grmtools{yacckind: YaccKind::Original(Foo::NoActions)}",
1667            "%grmtools{yacckind: YaccKind::Original(Foo::NoActionsBar)}",
1668        ];
1669
1670        for src in srcs {
1671            let s = format!("{}\n%%\nStart();\n", src);
1672            assert!(YaccGrammar::<u32>::from_str(&s).is_err());
1673        }
1674    }
1675
1676    #[test]
1677    fn test_grmtools_section_commas() {
1678        // We can't actually test much here, because
1679        // We don't have a second value to test.
1680        //
1681        // `RecoveryKind` seemed like an option for an additional value to allow,
1682        // but that is part of `lrpar` which cfgrammar doesn't depend upon.
1683        let src = r#"
1684                %grmtools{
1685                    yacckind: YaccKind::Grmtools,
1686                }
1687                %%
1688                Start -> () : ;
1689            "#;
1690        YaccGrammar::<u32>::from_str(src).unwrap();
1691        let src = r#"
1692                %grmtools{
1693                    yacckind: YaccKind::Grmtools
1694                }
1695                %%
1696                Start -> () : ;
1697            "#;
1698        YaccGrammar::<u32>::from_str(src).unwrap();
1699    }
1700}