From 912eaf40d4efd29b7e3489d51c55b8b79206df79 Mon Sep 17 00:00:00 2001 From: Enrico Tassi Date: Tue, 5 Mar 2019 11:42:17 +0100 Subject: [parsing] Split Tok.t into Tok.t and Tok.pattern Tokens were having a double role: - the output of the lexer - the items of grammar entries, especially terminals Now tokens are the output of the lexer, and this paves the way for using a richer data type, eg including Loc.t Patterns, as in Plexing.pattern, only represent patterns (for tokens) and now have a bit more structure (eg the wildcard is represented as None, not as "", while a regular pattern for "x" as Some "x") --- parsing/notation_gram.ml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'parsing/notation_gram.ml') diff --git a/parsing/notation_gram.ml b/parsing/notation_gram.ml index fc5feba58b..a50f8d69e3 100644 --- a/parsing/notation_gram.ml +++ b/parsing/notation_gram.ml @@ -21,7 +21,7 @@ type level = Constrexpr.notation_entry * precedence * tolerability list * constr (* first argument is InCustomEntry s for custom entries *) type grammar_constr_prod_item = - | GramConstrTerminal of Tok.t + | GramConstrTerminal of Tok.pattern | GramConstrNonTerminal of Extend.constr_prod_entry_key * Id.t option | GramConstrListMark of int * bool * int (* tells action rule to make a list of the n previous parsed items; -- cgit v1.2.3