diff options
| author | Emilio Jesus Gallego Arias | 2019-08-19 02:35:56 +0200 |
|---|---|---|
| committer | Emilio Jesus Gallego Arias | 2020-03-25 23:45:00 -0400 |
| commit | 13929f39f8560cfcb3aacf20c84c6dcb5295cec5 (patch) | |
| tree | a4204cd4bced576d6d846ebac908aab5092c66a5 | |
| parent | 4a88beff476d2c27eae381bc8a61f777015c0617 (diff) | |
[parsing] Make grammar extension type private.
After the gramlib merge and the type-safe interface added to it, the
grammar extension type is redundant; we thus make it private as a
first step on consolidating it with the one in gramlib's.
| -rw-r--r-- | coqpp/coqpp_main.ml | 32 | ||||
| -rw-r--r-- | gramlib/grammar.ml | 11 | ||||
| -rw-r--r-- | gramlib/grammar.mli | 8 | ||||
| -rw-r--r-- | parsing/extend.ml | 33 | ||||
| -rw-r--r-- | parsing/pcoq.ml | 393 | ||||
| -rw-r--r-- | parsing/pcoq.mli | 36 | ||||
| -rw-r--r-- | plugins/ltac/tacentries.ml | 34 | ||||
| -rw-r--r-- | user-contrib/Ltac2/g_ltac2.mlg | 9 | ||||
| -rw-r--r-- | user-contrib/Ltac2/tac2core.ml | 71 | ||||
| -rw-r--r-- | user-contrib/Ltac2/tac2entries.ml | 14 | ||||
| -rw-r--r-- | user-contrib/Ltac2/tac2entries.mli | 2 | ||||
| -rw-r--r-- | vernac/egramcoq.ml | 73 | ||||
| -rw-r--r-- | vernac/egramml.ml | 16 | ||||
| -rw-r--r-- | vernac/egramml.mli | 4 | ||||
| -rw-r--r-- | vernac/pvernac.ml | 5 | ||||
| -rw-r--r-- | vernac/vernacextend.ml | 18 | ||||
| -rw-r--r-- | vernac/vernacextend.mli | 2 |
17 files changed, 420 insertions, 341 deletions
diff --git a/coqpp/coqpp_main.ml b/coqpp/coqpp_main.ml index bdffabf0b2..5d1f0a876f 100644 --- a/coqpp/coqpp_main.ml +++ b/coqpp/coqpp_main.ml @@ -217,43 +217,43 @@ let rec print_prod fmt p = and print_extrule fmt (tkn, vars, body) = let tkn = List.rev tkn in - fprintf fmt "@[Extend.Rule@ (@[%a@],@ @[(%a)@])@]" (print_symbols ~norec:false) tkn print_fun (vars, body) + fprintf fmt "@[Pcoq.Rule@ (@[%a@],@ @[(%a)@])@]" (print_symbols ~norec:false) tkn print_fun (vars, body) and print_symbols ~norec fmt = function -| [] -> fprintf fmt "Extend.Stop" +| [] -> fprintf fmt "Pcoq.Stop" | tkn :: tkns -> - let c = if norec then "Extend.NextNoRec" else "Extend.Next" in + let c = if norec then "Pcoq.NextNoRec" else "Pcoq.Next" in fprintf fmt "%s @[(%a,@ %a)@]" c (print_symbols ~norec) tkns print_symbol tkn and print_symbol fmt tkn = match tkn with | SymbToken (t, s) -> - fprintf fmt "(Extend.Atoken (%a))" print_tok (t, s) + fprintf fmt "(Pcoq.G.Symbol.token (%a))" print_tok (t, s) | SymbEntry (e, None) -> - fprintf fmt "(Extend.Aentry %s)" e + fprintf fmt "(Pcoq.G.Symbol.nterm %s)" e | SymbEntry (e, Some l) -> - fprintf fmt "(Extend.Aentryl (%s, %a))" e print_string l + fprintf fmt "(Pcoq.G.Symbol.nterml %s (%a))" e print_string l | SymbSelf -> - fprintf fmt "Extend.Aself" + fprintf fmt "Pcoq.G.Symbol.self" | SymbNext -> - fprintf fmt "Extend.Anext" + fprintf fmt "Pcoq.G.Symbol.next" | SymbList0 (s, None) -> - fprintf fmt "(Extend.Alist0 %a)" print_symbol s + fprintf fmt "(Pcoq.G.Symbol.list0 %a)" print_symbol s | SymbList0 (s, Some sep) -> - fprintf fmt "(Extend.Alist0sep (%a, %a))" print_symbol s print_symbol sep + fprintf fmt "(Pcoq.G.Symbol.list0sep (%a) (%a) false)" print_symbol s print_symbol sep | SymbList1 (s, None) -> - fprintf fmt "(Extend.Alist1 %a)" print_symbol s + fprintf fmt "(Pcoq.G.Symbol.list1 (%a))" print_symbol s | SymbList1 (s, Some sep) -> - fprintf fmt "(Extend.Alist1sep (%a, %a))" print_symbol s print_symbol sep + fprintf fmt "(Pcoq.G.Symbol.list1sep (%a) (%a) false)" print_symbol s print_symbol sep | SymbOpt s -> - fprintf fmt "(Extend.Aopt %a)" print_symbol s + fprintf fmt "(Pcoq.G.Symbol.opt %a)" print_symbol s | SymbRules rules -> let pr fmt (r, body) = let (vars, tkn) = List.split r in let tkn = List.rev tkn in - fprintf fmt "Extend.Rules @[(%a,@ (%a))@]" (print_symbols ~norec:true) tkn print_fun (vars, body) + fprintf fmt "Pcoq.G.Rules.make @[(%a)@ (%a)@]" (print_symbols ~norec:true) tkn print_fun (vars, body) in let pr fmt rules = print_list fmt pr rules in - fprintf fmt "(Extend.Arules %a)" pr (List.rev rules) + fprintf fmt "(Pcoq.G.Symbol.rules ~warning:None %a)" pr (List.rev rules) | SymbQuote c -> fprintf fmt "(%s)" c @@ -452,7 +452,7 @@ let terminal s = let p = if s <> "" && s.[0] >= '0' && s.[0] <= '9' then "CLexer.terminal_numeral" else "CLexer.terminal" in - let c = Printf.sprintf "Extend.Atoken (%s \"%s\")" p s in + let c = Printf.sprintf "Pcoq.G.Symbol.token (%s \"%s\")" p s in SymbQuote c let rec parse_symb self = function diff --git a/gramlib/grammar.ml b/gramlib/grammar.ml index 0024d70466..818608674e 100644 --- a/gramlib/grammar.ml +++ b/gramlib/grammar.ml @@ -77,8 +77,7 @@ module type S = sig val make : ('a, _, 'f, Loc.t -> 'a) Rule.t -> 'f -> 'a t end - module Unsafe : - sig + module Unsafe : sig val clear_entry : 'a Entry.t -> unit end val safe_extend : warning:(string -> unit) option -> @@ -87,6 +86,10 @@ module type S = sig list -> unit val safe_delete_rule : 'a Entry.t -> ('a, _, 'f, 'r) Rule.t -> unit + + (* Used in custom entries, should tweak? *) + val level_of_nonterm : ('a, norec, 'c) Symbol.t -> string option + end (* Implementation *) @@ -1666,4 +1669,8 @@ let safe_delete_rule e r = let AnyS (symbols, _) = get_symbols r in delete_rule e symbols +let level_of_nonterm sym = match sym with + | Snterml (_,l) -> Some l + | _ -> None + end diff --git a/gramlib/grammar.mli b/gramlib/grammar.mli index f0423a92af..4ac85bd358 100644 --- a/gramlib/grammar.mli +++ b/gramlib/grammar.mli @@ -86,16 +86,20 @@ module type S = sig val make : ('a, _, 'f, Loc.t -> 'a) Rule.t -> 'f -> 'a t end - module Unsafe : - sig + module Unsafe : sig val clear_entry : 'a Entry.t -> unit end + val safe_extend : warning:(string -> unit) option -> 'a Entry.t -> Gramext.position option -> (string option * Gramext.g_assoc option * 'a Production.t list) list -> unit val safe_delete_rule : 'a Entry.t -> ('a, _, 'f, 'r) Rule.t -> unit + + (* Used in custom entries, should tweak? *) + val level_of_nonterm : ('a, norec, 'c) Symbol.t -> string option + end (** Signature type of the functor [Grammar.GMake]. The types and functions are almost the same than in generic interface, but: diff --git a/parsing/extend.ml b/parsing/extend.ml index 20297fa156..9c7f5c87c4 100644 --- a/parsing/extend.ml +++ b/parsing/extend.ml @@ -77,36 +77,3 @@ type ('a,'b,'c) ty_user_symbol = | TUopt : ('a,'b,'c) ty_user_symbol -> ('a option, 'b option, 'c option) ty_user_symbol | TUentry : ('a, 'b, 'c) Genarg.ArgT.tag -> ('a,'b,'c) ty_user_symbol | TUentryl : ('a, 'b, 'c) Genarg.ArgT.tag * int -> ('a,'b,'c) ty_user_symbol - -(** {5 Type-safe grammar extension} *) - -(* Should be merged with gramlib's implementation *) - -type norec = Gramlib.Grammar.norec -type mayrec = Gramlib.Grammar.mayrec - -type ('self, 'trec, 'a) symbol = -| Atoken : 'c Tok.p -> ('self, norec, 'c) symbol -| Alist1 : ('self, 'trec, 'a) symbol -> ('self, 'trec, 'a list) symbol -| Alist1sep : ('self, 'trec, 'a) symbol * ('self, norec, _) symbol - -> ('self, 'trec, 'a list) symbol -| Alist0 : ('self, 'trec, 'a) symbol -> ('self, 'trec, 'a list) symbol -| Alist0sep : ('self, 'trec, 'a) symbol * ('self, norec, _) symbol - -> ('self, 'trec, 'a list) symbol -| Aopt : ('self, 'trec, 'a) symbol -> ('self, 'trec, 'a option) symbol -| Aself : ('self, mayrec, 'self) symbol -| Anext : ('self, mayrec, 'self) symbol -| Aentry : 'a entry -> ('self, norec, 'a) symbol -| Aentryl : 'a entry * string -> ('self, norec, 'a) symbol -| Arules : 'a rules list -> ('self, norec, 'a) symbol - -and ('self, 'trec, _, 'r) rule = -| Stop : ('self, norec, 'r, 'r) rule -| Next : ('self, _, 'a, 'r) rule * ('self, _, 'b) symbol -> ('self, mayrec, 'b -> 'a, 'r) rule -| NextNoRec : ('self, norec, 'a, 'r) rule * ('self, norec, 'b) symbol -> ('self, norec, 'b -> 'a, 'r) rule - -and 'a rules = -| Rules : (_, norec, 'act, Loc.t -> 'a) rule * 'act -> 'a rules - -type 'a production_rule = -| Rule : ('a, _, 'act, Loc.t -> 'a) rule * 'act -> 'a production_rule diff --git a/parsing/pcoq.ml b/parsing/pcoq.ml index b3f997e1b3..5601dcb474 100644 --- a/parsing/pcoq.ml +++ b/parsing/pcoq.ml @@ -15,109 +15,256 @@ open Genarg open Gramlib (** The parser of Coq *) -module G : sig - include Grammar.S with type te = Tok.t and type 'c pattern = 'c Tok.p - -(* where Grammar.S - -module type S = - sig - type te = 'x; - type parsable = 'x; - value parsable : Stream.t char -> parsable; - value tokens : string -> list (string * int); - value glexer : Plexing.lexer te; - value set_algorithm : parse_algorithm -> unit; - module Entry : - sig - type e 'a = 'y; - value create : string -> e 'a; - value parse : e 'a -> parsable -> 'a; - value parse_token_stream : e 'a -> Stream.t te -> 'a; - value name : e 'a -> string; - value of_parser : string -> (Stream.t te -> 'a) -> e 'a; - value print : Format.formatter -> e 'a -> unit; - external obj : e 'a -> Gramext.g_entry te = "%identity"; - end - ; - module Unsafe : - sig - value gram_reinit : Plexing.lexer te -> unit; - value clear_entry : Entry.e 'a -> unit; - end - ; - value extend : - Entry.e 'a -> option Gramext.position -> - list - (option string * option Gramext.g_assoc * - list (list (Gramext.g_symbol te) * Gramext.g_action)) -> - unit; - value delete_rule : Entry.e 'a -> list (Gramext.g_symbol te) -> unit; - end - *) +type norec = Gramlib.Grammar.norec +type mayrec = Gramlib.Grammar.mayrec + +type ('self, 'trec, 'a) symbol = +| Atoken : 'c Tok.p -> ('self, norec, 'c) symbol +| Alist1 : ('self, 'trec, 'a) symbol -> ('self, 'trec, 'a list) symbol +| Alist1sep : ('self, 'trec, 'a) symbol * ('self, norec, _) symbol + -> ('self, 'trec, 'a list) symbol +| Alist0 : ('self, 'trec, 'a) symbol -> ('self, 'trec, 'a list) symbol +| Alist0sep : ('self, 'trec, 'a) symbol * ('self, norec, _) symbol + -> ('self, 'trec, 'a list) symbol +| Aopt : ('self, 'trec, 'a) symbol -> ('self, 'trec, 'a option) symbol +| Aself : ('self, mayrec, 'self) symbol +| Anext : ('self, mayrec, 'self) symbol +| Aentry : 'a entry -> ('self, norec, 'a) symbol +| Aentryl : 'a entry * string -> ('self, norec, 'a) symbol +| Arules : 'a rules list -> ('self, norec, 'a) symbol + +and ('self, 'trec, _, 'r) rule = +| Stop : ('self, norec, 'r, 'r) rule +| Next : ('self, _, 'a, 'r) rule * ('self, _, 'b) symbol -> ('self, mayrec, 'b -> 'a, 'r) rule +| NextNoRec : ('self, norec, 'a, 'r) rule * ('self, norec, 'b) symbol -> ('self, norec, 'b -> 'a, 'r) rule + +and 'a rules = +| Rules : (_, norec, 'act, Loc.t -> 'a) rule * 'act -> 'a rules + +type 'a production_rule = +| Rule : ('a, _, 'act, Loc.t -> 'a) rule * 'act -> 'a production_rule - type coq_parsable +module G : sig - val coq_parsable : ?loc:Loc.t -> char Stream.t -> coq_parsable - val entry_create : string -> 'a entry - val entry_parse : 'a entry -> coq_parsable -> 'a + include Grammar.S + with type te = Tok.t + and type 'c pattern = 'c Tok.p + and type 'a pattern = 'a Tok.p + and type ('self, 'trec, 'a) Symbol.t = ('self, 'trec, 'a) symbol + and type ('self, 'trec, 'f, 'r) Rule.t = ('self, 'trec, 'f, 'r) rule + and type 'a Rules.t = 'a rules + and type 'a Production.t = 'a production_rule - val comment_state : coq_parsable -> ((int * int) * string) list + val comment_state : Parsable.t -> ((int * int) * string) list + val level_of_nonterm : ('a,norec,'c) Symbol.t -> string option + val generalize_symbol : ('a, 'tr, 'c) Symbol.t -> ('a, norec, 'c) symbol option end with type 'a Entry.t = 'a Extend.entry = struct - include Grammar.GMake(CLexer.Lexer) + module G_ = Grammar.GMake(CLexer.Lexer) - type coq_parsable = Parsable.t * CLexer.lexer_state ref + type te = G_.te + type 'c pattern = 'c G_.pattern + + type coq_parsable = G_.Parsable.t * CLexer.lexer_state ref let coq_parsable ?loc c = let state = ref (CLexer.init_lexer_state ()) in CLexer.set_lexer_state !state; - let a = Parsable.make ?loc c in + let a = G_.Parsable.make ?loc c in state := CLexer.get_lexer_state (); (a,state) - let entry_create = Entry.make - - let entry_parse e (p,state) = - CLexer.set_lexer_state !state; - try - let c = Entry.parse e p in - state := CLexer.get_lexer_state (); - c - with Ploc.Exc (loc,e) -> - CLexer.drop_lexer_state (); - let loc' = Loc.get_loc (Exninfo.info e) in - let loc = match loc' with None -> loc | Some loc -> loc in - Loc.raise ~loc e - let comment_state (p,state) = CLexer.get_comment_state !state -end + module Parsable = struct + type t = coq_parsable + let make = coq_parsable + (* let comment_state = comment_state *) + end -module Parsable = -struct - type t = G.coq_parsable - let make = G.coq_parsable - let comment_state = G.comment_state -end + let tokens = G_.tokens -module Entry = -struct + module Entry = struct + include G_.Entry + + let parse e (p,state) = + CLexer.set_lexer_state !state; + try + let c = G_.Entry.parse e p in + state := CLexer.get_lexer_state (); + c + with Ploc.Exc (loc,e) -> + CLexer.drop_lexer_state (); + let loc' = Loc.get_loc (Exninfo.info e) in + let loc = match loc' with None -> loc | Some loc -> loc in + Loc.raise ~loc e + + end + + module Symbol = struct + type ('self, 'trec, 'a) t = ('self, 'trec, 'a) symbol + let token tok = Atoken tok + let list0 e = Alist0 e + let list0sep e s _ = Alist0sep (e,s) + let list1 e = Alist1 e + let list1sep e s _ = Alist1sep (e,s) + let opt e = Aopt e + let self = Aself + let next = Anext + let nterm e = Aentry e + let nterml e s = Aentryl (e,s) + let rules ~warning:_ r = Arules r + end - type 'a t = 'a Grammar.GMake(CLexer.Lexer).Entry.t + module Rule = struct + type ('self, 'trec, 'f, 'r) t = ('self, 'trec, 'f, 'r) rule + let stop = Stop + let next a b = Next (a,b) + let next_norec a b = NextNoRec (a,b) + end - let create = G.Entry.make - let parse = G.entry_parse - let print = G.Entry.print - let of_parser = G.Entry.of_parser - let name = G.Entry.name - let parse_token_stream = G.Entry.parse_token_stream + module Rules = struct + type 'a t = 'a rules + let make a f = Rules(a,f) + end + + module Production = struct + type 'a t = 'a production_rule + let make a f = Rule(a,f) + end + + module Unsafe = struct + let clear_entry = G_.Unsafe.clear_entry + end + + (** FIXME: This is a hack around a deficient camlp5 API *) + type 'a any_production = AnyProduction : ('a, 'tr, 'f, Loc.t -> 'a) G_.Rule.t * 'f -> 'a any_production + + let rec symbol_of_prod_entry_key : type s tr a. (s, tr, a) symbol -> (s, tr, a) G_.Symbol.t = + function + | Atoken t -> G_.Symbol.token t + | Alist1 s -> + let s = symbol_of_prod_entry_key s in + G_.Symbol.list1 s + | Alist1sep (s,sep) -> + let s = symbol_of_prod_entry_key s in + let sep = symbol_of_prod_entry_key sep in + G_.Symbol.list1sep s sep false + | Alist0 s -> + let s = symbol_of_prod_entry_key s in + G_.Symbol.list0 s + | Alist0sep (s,sep) -> + let s = symbol_of_prod_entry_key s in + let sep = symbol_of_prod_entry_key sep in + G_.Symbol.list0sep s sep false + | Aopt s -> + let s = symbol_of_prod_entry_key s in + G_.Symbol.opt s + | Aself -> G_.Symbol.self + | Anext -> G_.Symbol.next + | Aentry e -> G_.Symbol.nterm e + | Aentryl (e, n) -> G_.Symbol.nterml e n + | Arules rs -> + let warning msg = Feedback.msg_warning Pp.(str msg) in + G_.Symbol.rules ~warning:(Some warning) (List.map symbol_of_rules rs) + + and symbol_of_rule : type s tr a r. (s, tr, a, r) rule -> (s, tr, a, r) G_.Rule.t = function + | Stop -> + G_.Rule.stop + | Next (r, s) -> + let r = symbol_of_rule r in + let s = symbol_of_prod_entry_key s in + G_.Rule.next r s + | NextNoRec (r, s) -> + let r = symbol_of_rule r in + let s = symbol_of_prod_entry_key s in + G_.Rule.next_norec r s + + and symbol_of_rules : type a. a rules -> a G_.Rules.t = function + | Rules (r, act) -> + let symb = symbol_of_rule r in + G_.Rules.make symb act + + let of_coq_production_rule : type a. a production_rule -> a any_production = function + | Rule (toks, act) -> + AnyProduction (symbol_of_rule toks, act) + + let of_coq_single_extend_statement (lvl, assoc, rule) = + (lvl, assoc, List.map of_coq_production_rule rule) + + let of_coq_extend_statement (pos, st) = + (pos, List.map of_coq_single_extend_statement st) + + let fix_extend_statement (pos, st) = + let fix_single_extend_statement (lvl, assoc, rules) = + let fix_production_rule (AnyProduction (s, act)) = G_.Production.make s act in + (lvl, assoc, List.map fix_production_rule rules) + in + (pos, List.map fix_single_extend_statement st) + + let safe_extend ~warning e pos ext = + let pos, ext = of_coq_extend_statement (pos,ext) in + let pos, ext = fix_extend_statement (pos, ext) in + G_.safe_extend ~warning e pos ext + + let safe_delete_rule e r = + let r = symbol_of_rule r in + G_.safe_delete_rule e r + + let level_of_nonterm = function + | Aentryl (_,l) -> Some l + | _ -> None + + exception SelfSymbol + let rec generalize_symbol : + type a tr s. (s, tr, a) symbol -> (s, norec, a) symbol = + function + | Atoken tok -> + Atoken tok + | Alist1 e -> + Alist1 (generalize_symbol e) + | Alist1sep (e, sep) -> + let e = generalize_symbol e in + let sep = generalize_symbol sep in + Alist1sep (e, sep) + | Alist0 e -> + Alist0 (generalize_symbol e) + | Alist0sep (e, sep) -> + let e = generalize_symbol e in + let sep = generalize_symbol sep in + Alist0sep (e, sep) + | Aopt e -> + Aopt (generalize_symbol e) + | Aself -> + raise SelfSymbol + | Anext -> + raise SelfSymbol + | Aentry e -> + Aentry e + | Aentryl (e, l) -> + Aentryl (e, l) + | Arules r -> + Arules r + + let generalize_symbol s = + try Some (generalize_symbol s) + with SelfSymbol -> None end +module Parsable = struct + include G.Parsable + let comment_state = G.comment_state +end + +module Entry = struct + include G.Entry + let create = make +end + module Lookahead = struct @@ -189,71 +336,6 @@ end (** Binding general entry keys to symbol *) -let rec symbol_of_prod_entry_key : type s tr a. (s, tr, a) symbol -> (s, tr, a) G.Symbol.t = -function -| Atoken t -> G.Symbol.token t -| Alist1 s -> - let s = symbol_of_prod_entry_key s in - G.Symbol.list1 s -| Alist1sep (s,sep) -> - let s = symbol_of_prod_entry_key s in - let sep = symbol_of_prod_entry_key sep in - G.Symbol.list1sep s sep false -| Alist0 s -> - let s = symbol_of_prod_entry_key s in - G.Symbol.list0 s -| Alist0sep (s,sep) -> - let s = symbol_of_prod_entry_key s in - let sep = symbol_of_prod_entry_key sep in - G.Symbol.list0sep s sep false -| Aopt s -> - let s = symbol_of_prod_entry_key s in - G.Symbol.opt s -| Aself -> G.Symbol.self -| Anext -> G.Symbol.next -| Aentry e -> G.Symbol.nterm e -| Aentryl (e, n) -> G.Symbol.nterml e n -| Arules rs -> - let warning msg = Feedback.msg_warning Pp.(str msg) in - G.Symbol.rules ~warning:(Some warning) (List.map symbol_of_rules rs) - -and symbol_of_rule : type s tr a r. (s, tr, a, Loc.t -> r) Extend.rule -> (s, tr, a, Loc.t -> r) G.Rule.t = function -| Stop -> - G.Rule.stop -| Next (r, s) -> - let r = symbol_of_rule r in - let s = symbol_of_prod_entry_key s in - G.Rule.next r s -| NextNoRec (r, s) -> - let r = symbol_of_rule r in - let s = symbol_of_prod_entry_key s in - G.Rule.next_norec r s - -and symbol_of_rules : type a. a Extend.rules -> a G.Rules.t = function -| Rules (r, act) -> - let symb = symbol_of_rule r in - G.Rules.make symb act - -(** FIXME: This is a hack around a deficient camlp5 API *) -type 'a any_production = AnyProduction : ('a, 'tr, 'f, Loc.t -> 'a) G.Rule.t * 'f -> 'a any_production - -let of_coq_production_rule : type a. a Extend.production_rule -> a any_production = function -| Rule (toks, act) -> - AnyProduction (symbol_of_rule toks, act) - -let of_coq_single_extend_statement (lvl, assoc, rule) = - (lvl, assoc, List.map of_coq_production_rule rule) - -let of_coq_extend_statement (pos, st) = - (pos, List.map of_coq_single_extend_statement st) - -let fix_extend_statement (pos, st) = - let fix_single_extend_statement (lvl, assoc, rules) = - let fix_production_rule (AnyProduction (s, act)) = G.Production.make s act in - (lvl, assoc, List.map fix_production_rule rules) - in - (pos, List.map fix_single_extend_statement st) - (** Type of reinitialization data *) type gram_reinit = Gramlib.Gramext.g_assoc * Gramlib.Gramext.position @@ -293,7 +375,7 @@ let camlp5_entries = ref EntryDataMap.empty let grammar_delete e (pos,rls) = List.iter (fun (n,ass,lev) -> - List.iter (fun (AnyProduction (pil,_)) -> G.safe_delete_rule e pil) (List.rev lev)) + List.iter (fun (Rule(pil,_)) -> G.safe_delete_rule e pil) (List.rev lev)) (List.rev rls) let grammar_delete_reinit e reinit (pos, rls) = @@ -308,22 +390,20 @@ let grammar_delete_reinit e reinit (pos, rls) = (** Extension *) -let grammar_extend e ext = - let ext = of_coq_extend_statement ext in - let undo () = grammar_delete e ext in - let pos, ext = fix_extend_statement ext in +let grammar_extend e (pos,ext) = + let undo () = grammar_delete e (pos,ext) in let redo () = G.safe_extend ~warning:None e pos ext in camlp5_state := ByEXTEND (undo, redo) :: !camlp5_state; redo () let grammar_extend_sync e ext = camlp5_state := ByGrammar (ExtendRule (e, ext)) :: !camlp5_state; - let pos, ext = fix_extend_statement (of_coq_extend_statement ext) in + let pos, ext = ext in G.safe_extend ~warning:None e pos ext let grammar_extend_sync_reinit e reinit ext = camlp5_state := ByGrammar (ExtendRuleReinit (e, reinit, ext)) :: !camlp5_state; - let pos, ext = fix_extend_statement (of_coq_extend_statement ext) in + let pos, ext = ext in G.safe_extend ~warning:None e pos ext (** The apparent parser of Coq; encapsulate G to keep track @@ -344,11 +424,11 @@ let rec remove_grammars n = match !camlp5_state with | [] -> anomaly ~label:"Pcoq.remove_grammars" (Pp.str "too many rules to remove.") | ByGrammar (ExtendRuleReinit (g, reinit, ext)) :: t -> - grammar_delete_reinit g reinit (of_coq_extend_statement ext); + grammar_delete_reinit g reinit ext; camlp5_state := t; remove_grammars (n-1) | ByGrammar (ExtendRule (g, ext)) :: t -> - grammar_delete g (of_coq_extend_statement ext); + grammar_delete g ext; camlp5_state := t; remove_grammars (n-1) | ByEXTEND (undo,redo)::t -> @@ -393,7 +473,7 @@ let map_entry f en = let parse_string f ?loc x = let strm = Stream.of_string x in - Gram.entry_parse f (Gram.coq_parsable ?loc strm) + G.Entry.parse f (G.Parsable.make ?loc strm) type gram_universe = string @@ -531,10 +611,9 @@ module Module = end let epsilon_value (type s tr a) f (e : (s, tr, a) symbol) = - let s = symbol_of_prod_entry_key e in - let r = G.Production.make (G.Rule.next G.Rule.stop s) (fun x _ -> f x) in + let r = G.Production.make (G.Rule.next G.Rule.stop e) (fun x _ -> f x) in let ext = [None, None, [r]] in - let entry = Gram.entry_create "epsilon" in + let entry = G.Entry.make "epsilon" in let warning msg = Feedback.msg_warning Pp.(str msg) in let () = G.safe_extend ~warning:(Some warning) entry None ext in try Some (parse_string entry "") with _ -> None @@ -601,7 +680,7 @@ let extend_entry_command (type a) (type b) (tag : (a, b) entry_command) (g : a) | (_, st) :: _ -> st in let (names, st) = modify g grammar_state in - let entries = List.map (fun name -> Gram.entry_create name) names in + let entries = List.map (fun name -> G.Entry.make name) names in let iter name e = camlp5_state := ByEntry (tag, name, e) :: !camlp5_state; let EntryData.Ex old = diff --git a/parsing/pcoq.mli b/parsing/pcoq.mli index 87c7f168ce..63a4c1dd58 100644 --- a/parsing/pcoq.mli +++ b/parsing/pcoq.mli @@ -9,7 +9,6 @@ (************************************************************************) open Names -open Extend open Genarg open Constrexpr open Libnames @@ -222,7 +221,40 @@ module Module : val module_type : module_ast Entry.t end -val epsilon_value : ('a -> 'self) -> ('self, _, 'a) Extend.symbol -> 'self option +(** {5 Type-safe grammar extension} *) + +type ('self, 'trec, 'a) symbol + +type norec = Gramlib.Grammar.norec +type mayrec = Gramlib.Grammar.mayrec + +type ('self, 'trec, _, 'r) rule = +| Stop : ('self, norec, 'r, 'r) rule +| Next : ('self, _, 'a, 'r) rule * ('self, _, 'b) symbol -> ('self, mayrec, 'b -> 'a, 'r) rule +| NextNoRec : ('self, norec, 'a, 'r) rule * ('self, norec, 'b) symbol -> ('self, norec, 'b -> 'a, 'r) rule + +type 'a rules = + | Rules : (_, norec, 'act, Loc.t -> 'a) rule * 'act -> 'a rules + +type 'a production_rule = + | Rule : ('a, _, 'act, Loc.t -> 'a) rule * 'act -> 'a production_rule + +module G : sig + + include Gramlib.Grammar.S + + val level_of_nonterm : ('a,norec,'c) Symbol.t -> string option + val generalize_symbol : ('a, 'tr, 'c) Symbol.t -> ('a, norec, 'c) symbol option + +end with type 'a Entry.t = 'a Entry.t + and type te = Tok.t + and type 'a pattern = 'a Tok.p + and type ('self, 'trec, 'a) Symbol.t = ('self, 'trec, 'a) symbol + and type ('self, 'trec, 'f, 'r) Rule.t = ('self, 'trec, 'f, 'r) rule + and type 'a Rules.t = 'a rules + and type 'a Production.t = 'a production_rule + +val epsilon_value : ('a -> 'self) -> ('self, _, 'a) symbol -> 'self option (** {5 Extending the parser without synchronization} *) diff --git a/plugins/ltac/tacentries.ml b/plugins/ltac/tacentries.ml index 4af5699317..53b5386375 100644 --- a/plugins/ltac/tacentries.ml +++ b/plugins/ltac/tacentries.ml @@ -44,11 +44,11 @@ let coincide s pat off = !break let atactic n = - if n = 5 then Aentry Pltac.binder_tactic - else Aentryl (Pltac.tactic_expr, string_of_int n) + if n = 5 then Pcoq.G.Symbol.nterm Pltac.binder_tactic + else Pcoq.G.Symbol.nterml Pltac.tactic_expr (string_of_int n) type entry_name = EntryName : - 'a raw_abstract_argument_type * (Tacexpr.raw_tactic_expr, _, 'a) Extend.symbol -> entry_name + 'a raw_abstract_argument_type * (Tacexpr.raw_tactic_expr, _, 'a) Pcoq.symbol -> entry_name (** Quite ad-hoc *) let get_tacentry n m = @@ -57,8 +57,8 @@ let get_tacentry n m = && not (Int.equal m 5) (* Because tactic5 is at binder_tactic *) && not (Int.equal m 0) (* Because tactic0 is at simple_tactic *) in - if check_lvl n then EntryName (rawwit Tacarg.wit_tactic, Aself) - else if check_lvl (n + 1) then EntryName (rawwit Tacarg.wit_tactic, Anext) + if check_lvl n then EntryName (rawwit Tacarg.wit_tactic, Pcoq.G.Symbol.self) + else if check_lvl (n + 1) then EntryName (rawwit Tacarg.wit_tactic, Pcoq.G.Symbol.next) else EntryName (rawwit Tacarg.wit_tactic, atactic n) let get_separator = function @@ -140,23 +140,23 @@ let head_is_ident tg = match tg.tacgram_prods with let rec prod_item_of_symbol lev = function | Extend.Ulist1 s -> let EntryName (Rawwit typ, e) = prod_item_of_symbol lev s in - EntryName (Rawwit (ListArg typ), Alist1 e) + EntryName (Rawwit (ListArg typ), Pcoq.G.Symbol.list1 e) | Extend.Ulist0 s -> let EntryName (Rawwit typ, e) = prod_item_of_symbol lev s in - EntryName (Rawwit (ListArg typ), Alist0 e) + EntryName (Rawwit (ListArg typ), Pcoq.G.Symbol.list0 e) | Extend.Ulist1sep (s, sep) -> let EntryName (Rawwit typ, e) = prod_item_of_symbol lev s in - EntryName (Rawwit (ListArg typ), Alist1sep (e, Atoken (CLexer.terminal sep))) + EntryName (Rawwit (ListArg typ), Pcoq.G.Symbol.list1sep e (Pcoq.G.Symbol.token (CLexer.terminal sep)) false) | Extend.Ulist0sep (s, sep) -> let EntryName (Rawwit typ, e) = prod_item_of_symbol lev s in - EntryName (Rawwit (ListArg typ), Alist0sep (e, Atoken (CLexer.terminal sep))) + EntryName (Rawwit (ListArg typ), Pcoq.G.Symbol.list0sep e (Pcoq.G.Symbol.token (CLexer.terminal sep)) false) | Extend.Uopt s -> let EntryName (Rawwit typ, e) = prod_item_of_symbol lev s in - EntryName (Rawwit (OptArg typ), Aopt e) + EntryName (Rawwit (OptArg typ), Pcoq.G.Symbol.opt e) | Extend.Uentry arg -> let ArgT.Any tag = arg in let wit = ExtraArg tag in - EntryName (Rawwit wit, Extend.Aentry (genarg_grammar wit)) + EntryName (Rawwit wit, Pcoq.G.Symbol.nterm (genarg_grammar wit)) | Extend.Uentryl (s, n) -> let ArgT.Any tag = s in assert (coincide (ArgT.repr tag) "tactic" 0); @@ -399,19 +399,19 @@ let create_ltac_quotation name cast (e, l) = in let () = ltac_quotations := String.Set.add name !ltac_quotations in let entry = match l with - | None -> Aentry e - | Some l -> Aentryl (e, string_of_int l) + | None -> Pcoq.G.Symbol.nterm e + | Some l -> Pcoq.G.Symbol.nterml e (string_of_int l) in (* let level = Some "1" in *) let level = None in let assoc = None in let rule = Next (Next (Next (Next (Next (Stop, - Atoken (CLexer.terminal name)), - Atoken (CLexer.terminal ":")), - Atoken (CLexer.terminal "(")), + Pcoq.G.Symbol.token (CLexer.terminal name)), + Pcoq.G.Symbol.token (CLexer.terminal ":")), + Pcoq.G.Symbol.token (CLexer.terminal "(")), entry), - Atoken (CLexer.terminal ")")) + Pcoq.G.Symbol.token (CLexer.terminal ")")) in let action _ v _ _ _ loc = cast (Some loc, v) in let gram = (level, assoc, [Rule (rule, action)]) in diff --git a/user-contrib/Ltac2/g_ltac2.mlg b/user-contrib/Ltac2/g_ltac2.mlg index 5e04959e9a..52d92d87c0 100644 --- a/user-contrib/Ltac2/g_ltac2.mlg +++ b/user-contrib/Ltac2/g_ltac2.mlg @@ -826,12 +826,11 @@ END let () = -let open Extend in let open Tok in let (++) r s = Next (r, s) in let rules = [ Rule ( - Stop ++ Aentry test_dollar_ident ++ Atoken (PKEYWORD "$") ++ Aentry Prim.ident, + Stop ++ Pcoq.G.Symbol.nterm test_dollar_ident ++ Pcoq.G.Symbol.token (PKEYWORD "$") ++ Pcoq.G.Symbol.nterm Prim.ident, begin fun id _ _ loc -> let id = Loc.tag ~loc id in let arg = Genarg.in_gen (Genarg.rawwit Tac2env.wit_ltac2_quotation) id in @@ -840,7 +839,7 @@ let rules = [ ); Rule ( - Stop ++ Aentry test_ampersand_ident ++ Atoken (PKEYWORD "&") ++ Aentry Prim.ident, + Stop ++ Pcoq.G.Symbol.nterm test_ampersand_ident ++ Pcoq.G.Symbol.token (PKEYWORD "&") ++ Pcoq.G.Symbol.nterm Prim.ident, begin fun id _ _ loc -> let tac = Tac2quote.of_exact_hyp ~loc (CAst.make ~loc id) in let arg = Genarg.in_gen (Genarg.rawwit Tac2env.wit_ltac2_constr) tac in @@ -849,8 +848,8 @@ let rules = [ ); Rule ( - Stop ++ Atoken (PIDENT (Some "ltac2")) ++ Atoken (PKEYWORD ":") ++ - Atoken (PKEYWORD "(") ++ Aentry tac2expr ++ Atoken (PKEYWORD ")"), + Stop ++ Pcoq.G.Symbol.token (PIDENT (Some "ltac2")) ++ Pcoq.G.Symbol.token (PKEYWORD ":") ++ + Pcoq.G.Symbol.token (PKEYWORD "(") ++ Pcoq.G.Symbol.nterm tac2expr ++ Pcoq.G.Symbol.token (PKEYWORD ")"), begin fun _ tac _ _ _ loc -> let arg = Genarg.in_gen (Genarg.rawwit Tac2env.wit_ltac2_constr) tac in CAst.make ~loc (CHole (None, Namegen.IntroAnonymous, Some arg)) diff --git a/user-contrib/Ltac2/tac2core.ml b/user-contrib/Ltac2/tac2core.ml index 38b05bed6b..2edbcc38f5 100644 --- a/user-contrib/Ltac2/tac2core.ml +++ b/user-contrib/Ltac2/tac2core.ml @@ -1431,7 +1431,7 @@ let q_unit = CAst.make @@ CTacCst (AbsKn (Tuple 0)) let add_generic_scope s entry arg = let parse = function | [] -> - let scope = Extend.Aentry entry in + let scope = Pcoq.G.Symbol.nterm entry in let act x = CAst.make @@ CTacExt (arg, x) in Tac2entries.ScopeRule (scope, act) | arg -> scope_fail s arg @@ -1442,14 +1442,14 @@ open CAst let () = add_scope "keyword" begin function | [SexprStr {loc;v=s}] -> - let scope = Extend.Atoken (Tok.PKEYWORD s) in + let scope = Pcoq.G.Symbol.token (Tok.PKEYWORD s) in Tac2entries.ScopeRule (scope, (fun _ -> q_unit)) | arg -> scope_fail "keyword" arg end let () = add_scope "terminal" begin function | [SexprStr {loc;v=s}] -> - let scope = Extend.Atoken (CLexer.terminal s) in + let scope = Pcoq.G.Symbol.token (CLexer.terminal s) in Tac2entries.ScopeRule (scope, (fun _ -> q_unit)) | arg -> scope_fail "terminal" arg end @@ -1457,13 +1457,13 @@ end let () = add_scope "list0" begin function | [tok] -> let Tac2entries.ScopeRule (scope, act) = Tac2entries.parse_scope tok in - let scope = Extend.Alist0 scope in + let scope = Pcoq.G.Symbol.list0 scope in let act l = Tac2quote.of_list act l in Tac2entries.ScopeRule (scope, act) | [tok; SexprStr {v=str}] -> let Tac2entries.ScopeRule (scope, act) = Tac2entries.parse_scope tok in - let sep = Extend.Atoken (CLexer.terminal str) in - let scope = Extend.Alist0sep (scope, sep) in + let sep = Pcoq.G.Symbol.token (CLexer.terminal str) in + let scope = Pcoq.G.Symbol.list0sep scope sep false in let act l = Tac2quote.of_list act l in Tac2entries.ScopeRule (scope, act) | arg -> scope_fail "list0" arg @@ -1472,13 +1472,13 @@ end let () = add_scope "list1" begin function | [tok] -> let Tac2entries.ScopeRule (scope, act) = Tac2entries.parse_scope tok in - let scope = Extend.Alist1 scope in + let scope = Pcoq.G.Symbol.list1 scope in let act l = Tac2quote.of_list act l in Tac2entries.ScopeRule (scope, act) | [tok; SexprStr {v=str}] -> let Tac2entries.ScopeRule (scope, act) = Tac2entries.parse_scope tok in - let sep = Extend.Atoken (CLexer.terminal str) in - let scope = Extend.Alist1sep (scope, sep) in + let sep = Pcoq.G.Symbol.token (CLexer.terminal str) in + let scope = Pcoq.G.Symbol.list1sep scope sep false in let act l = Tac2quote.of_list act l in Tac2entries.ScopeRule (scope, act) | arg -> scope_fail "list1" arg @@ -1487,7 +1487,7 @@ end let () = add_scope "opt" begin function | [tok] -> let Tac2entries.ScopeRule (scope, act) = Tac2entries.parse_scope tok in - let scope = Extend.Aopt scope in + let scope = Pcoq.G.Symbol.opt scope in let act opt = match opt with | None -> CAst.make @@ CTacCst (AbsKn (Other Core.c_none)) @@ -1500,7 +1500,7 @@ end let () = add_scope "self" begin function | [] -> - let scope = Extend.Aself in + let scope = Pcoq.G.Symbol.self in let act tac = tac in Tac2entries.ScopeRule (scope, act) | arg -> scope_fail "self" arg @@ -1508,7 +1508,7 @@ end let () = add_scope "next" begin function | [] -> - let scope = Extend.Anext in + let scope = Pcoq.G.Symbol.next in let act tac = tac in Tac2entries.ScopeRule (scope, act) | arg -> scope_fail "next" arg @@ -1517,12 +1517,12 @@ end let () = add_scope "tactic" begin function | [] -> (* Default to level 5 parsing *) - let scope = Extend.Aentryl (tac2expr, "5") in + let scope = Pcoq.G.Symbol.nterml tac2expr "5" in let act tac = tac in Tac2entries.ScopeRule (scope, act) | [SexprInt {loc;v=n}] as arg -> let () = if n < 0 || n > 6 then scope_fail "tactic" arg in - let scope = Extend.Aentryl (tac2expr, string_of_int n) in + let scope = Pcoq.G.Symbol.nterml tac2expr (string_of_int n) in let act tac = tac in Tac2entries.ScopeRule (scope, act) | arg -> scope_fail "tactic" arg @@ -1543,12 +1543,12 @@ let () = add_scope "constr" (fun arg -> arg in let act e = Tac2quote.of_constr ~delimiters e in - Tac2entries.ScopeRule (Extend.Aentry Pcoq.Constr.constr, act) + Tac2entries.ScopeRule (Pcoq.G.Symbol.nterm Pcoq.Constr.constr, act) ) let add_expr_scope name entry f = add_scope name begin function - | [] -> Tac2entries.ScopeRule (Extend.Aentry entry, f) + | [] -> Tac2entries.ScopeRule (Pcoq.G.Symbol.nterm entry, f) | arg -> scope_fail name arg end @@ -1578,28 +1578,7 @@ let () = add_generic_scope "pattern" Pcoq.Constr.constr Tac2quote.wit_pattern (** seq scope, a bit hairy *) -open Extend -exception SelfSymbol - -let rec generalize_symbol : - type a tr s. (s, tr, a) Extend.symbol -> (s, Extend.norec, a) Extend.symbol = function -| Atoken tok -> Atoken tok -| Alist1 e -> Alist1 (generalize_symbol e) -| Alist1sep (e, sep) -> - let e = generalize_symbol e in - let sep = generalize_symbol sep in - Alist1sep (e, sep) -| Alist0 e -> Alist0 (generalize_symbol e) -| Alist0sep (e, sep) -> - let e = generalize_symbol e in - let sep = generalize_symbol sep in - Alist0sep (e, sep) -| Aopt e -> Aopt (generalize_symbol e) -| Aself -> raise SelfSymbol -| Anext -> raise SelfSymbol -| Aentry e -> Aentry e -| Aentryl (e, l) -> Aentryl (e, l) -| Arules r -> Arules r +open Pcoq type _ converter = | CvNil : (Loc.t -> raw_tacexpr) converter @@ -1611,14 +1590,19 @@ let rec apply : type a. a converter -> raw_tacexpr list -> a = function | CvCns (c, Some f) -> fun accu x -> apply c (f x :: accu) type seqrule = -| Seqrule : (Tac2expr.raw_tacexpr, Extend.norec, 'act, Loc.t -> raw_tacexpr) rule * 'act converter -> seqrule +| Seqrule : (Tac2expr.raw_tacexpr, Pcoq.norec, 'act, Loc.t -> raw_tacexpr) rule * 'act converter -> seqrule let rec make_seq_rule = function | [] -> Seqrule (Stop, CvNil) | tok :: rem -> let Tac2entries.ScopeRule (scope, f) = Tac2entries.parse_scope tok in - let scope = generalize_symbol scope in + let scope = + match Pcoq.G.generalize_symbol scope with + | None -> + CErrors.user_err (str "Recursive symbols (self / next) are not allowed in local rules") + | Some scope -> scope + in let Seqrule (r, c) = make_seq_rule rem in let r = NextNoRec (r, scope) in let f = match tok with @@ -1629,11 +1613,8 @@ let rec make_seq_rule = function let () = add_scope "seq" begin fun toks -> let scope = - try - let Seqrule (r, c) = make_seq_rule (List.rev toks) in - Arules [Rules (r, apply c [])] - with SelfSymbol -> - CErrors.user_err (str "Recursive symbols (self / next) are not allowed in local rules") + let Seqrule (r, c) = make_seq_rule (List.rev toks) in + Pcoq.G.Symbol.rules ~warning:None [Rules (r, apply c [])] in Tac2entries.ScopeRule (scope, (fun e -> e)) end diff --git a/user-contrib/Ltac2/tac2entries.ml b/user-contrib/Ltac2/tac2entries.ml index e9945794d3..d5d5bad987 100644 --- a/user-contrib/Ltac2/tac2entries.ml +++ b/user-contrib/Ltac2/tac2entries.ml @@ -558,7 +558,7 @@ type 'a token = | TacNonTerm of Name.t * 'a type scope_rule = -| ScopeRule : (raw_tacexpr, _, 'a) Extend.symbol * ('a -> raw_tacexpr) -> scope_rule +| ScopeRule : (raw_tacexpr, _, 'a) Pcoq.symbol * ('a -> raw_tacexpr) -> scope_rule type scope_interpretation = sexpr list -> scope_rule @@ -583,7 +583,7 @@ let parse_scope = function CErrors.user_err ?loc (str "Unknown scope" ++ spc () ++ Names.Id.print id) | SexprStr {v=str} -> let v_unit = CAst.make @@ CTacCst (AbsKn (Tuple 0)) in - ScopeRule (Extend.Atoken (Tok.PIDENT (Some str)), (fun _ -> v_unit)) + ScopeRule (Pcoq.G.Symbol.token (Tok.PIDENT (Some str)), (fun _ -> v_unit)) | tok -> let loc = loc_of_token tok in CErrors.user_err ?loc (str "Invalid parsing token") @@ -611,19 +611,19 @@ type synext = { type krule = | KRule : - (raw_tacexpr, _, 'act, Loc.t -> raw_tacexpr) Extend.rule * + (raw_tacexpr, _, 'act, Loc.t -> raw_tacexpr) Pcoq.rule * ((Loc.t -> (Name.t * raw_tacexpr) list -> raw_tacexpr) -> 'act) -> krule let rec get_rule (tok : scope_rule token list) : krule = match tok with -| [] -> KRule (Extend.Stop, fun k loc -> k loc []) +| [] -> KRule (Pcoq.Stop, fun k loc -> k loc []) | TacNonTerm (na, ScopeRule (scope, inj)) :: tok -> let KRule (rule, act) = get_rule tok in - let rule = Extend.Next (rule, scope) in + let rule = Pcoq.Next (rule, scope) in let act k e = act (fun loc acc -> k loc ((na, inj e) :: acc)) in KRule (rule, act) | TacTerm t :: tok -> let KRule (rule, act) = get_rule tok in - let rule = Extend.Next (rule, Extend.Atoken (CLexer.terminal t)) in + let rule = Pcoq.Next (rule, Pcoq.G.Symbol.token (CLexer.terminal t)) in let act k _ = act k in KRule (rule, act) @@ -637,7 +637,7 @@ let perform_notation syn st = let bnd = List.map map args in CAst.make ~loc @@ CTacLet (false, bnd, syn.synext_exp) in - let rule = Extend.Rule (rule, act mk) in + let rule = Pcoq.Rule (rule, act mk) in let lev = match syn.synext_lev with | None -> None | Some lev -> Some (string_of_int lev) diff --git a/user-contrib/Ltac2/tac2entries.mli b/user-contrib/Ltac2/tac2entries.mli index fed43a4dd5..4938e96cae 100644 --- a/user-contrib/Ltac2/tac2entries.mli +++ b/user-contrib/Ltac2/tac2entries.mli @@ -36,7 +36,7 @@ val perform_eval : pstate:Proof_global.t option -> raw_tacexpr -> unit (** {5 Notations} *) type scope_rule = -| ScopeRule : (raw_tacexpr, _, 'a) Extend.symbol * ('a -> raw_tacexpr) -> scope_rule +| ScopeRule : (raw_tacexpr, _, 'a) Pcoq.symbol * ('a -> raw_tacexpr) -> scope_rule type scope_interpretation = sexpr list -> scope_rule diff --git a/vernac/egramcoq.ml b/vernac/egramcoq.ml index 5dae389a62..ba542101c8 100644 --- a/vernac/egramcoq.ml +++ b/vernac/egramcoq.ml @@ -325,51 +325,56 @@ let is_binder_level custom (custom',from) e = match e with | _ -> false let make_sep_rules = function - | [tk] -> Atoken tk + | [tk] -> + Pcoq.G.Symbol.token tk | tkl -> - let rec mkrule : 'a Tok.p list -> 'a rules = function - | [] -> Rules (Stop, fun _ -> (* dropped anyway: *) "") - | tkn :: rem -> - let Rules (r, f) = mkrule rem in - let r = NextNoRec (r, Atoken tkn) in - Rules (r, fun _ -> f) - in - let r = mkrule (List.rev tkl) in - Arules [r] + let rec mkrule : 'a Tok.p list -> 'a rules = function + | [] -> + Rules (Stop, fun _ -> (* dropped anyway: *) "") + | tkn :: rem -> + let Rules (r, f) = mkrule rem in + let r = NextNoRec (r, Pcoq.G.Symbol.token tkn) in + Rules (r, fun _ -> f) + in + let r = mkrule (List.rev tkl) in + Pcoq.G.Symbol.rules ~warning:None [r] type ('s, 'a) mayrec_symbol = | MayRecNo : ('s, norec, 'a) symbol -> ('s, 'a) mayrec_symbol | MayRecMay : ('s, mayrec, 'a) symbol -> ('s, 'a) mayrec_symbol let symbol_of_target : type s. _ -> _ -> _ -> _ -> s target -> (s, s) mayrec_symbol = fun custom p assoc from forpat -> - if is_binder_level custom from p then (* Prevent self *) MayRecNo (Aentryl (target_entry custom forpat, "200")) - else if is_self custom from p then MayRecMay Aself + if is_binder_level custom from p + then + (* Prevent self *) + MayRecNo (Pcoq.G.Symbol.nterml (target_entry custom forpat) "200") + else if is_self custom from p then MayRecMay Pcoq.G.Symbol.self else let g = target_entry custom forpat in let lev = adjust_level custom assoc from p in begin match lev with - | DefaultLevel -> MayRecNo (Aentry g) - | NextLevel -> MayRecMay Anext - | NumLevel lev -> MayRecNo (Aentryl (g, string_of_int lev)) + | DefaultLevel -> MayRecNo (Pcoq.G.Symbol.nterm g) + | NextLevel -> MayRecMay Pcoq.G.Symbol.next + | NumLevel lev -> MayRecNo (Pcoq.G.Symbol.nterml g (string_of_int lev)) end let symbol_of_entry : type s r. _ -> _ -> (s, r) entry -> (s, r) mayrec_symbol = fun assoc from typ -> match typ with | TTConstr (s, p, forpat) -> symbol_of_target s p assoc from forpat | TTConstrList (s, typ', [], forpat) -> begin match symbol_of_target s typ' assoc from forpat with - | MayRecNo s -> MayRecNo (Alist1 s) - | MayRecMay s -> MayRecMay (Alist1 s) end + | MayRecNo s -> MayRecNo (Pcoq.G.Symbol.list1 s) + | MayRecMay s -> MayRecMay (Pcoq.G.Symbol.list1 s) end | TTConstrList (s, typ', tkl, forpat) -> begin match symbol_of_target s typ' assoc from forpat with - | MayRecNo s -> MayRecNo (Alist1sep (s, make_sep_rules tkl)) - | MayRecMay s -> MayRecMay (Alist1sep (s, make_sep_rules tkl)) end -| TTPattern p -> MayRecNo (Aentryl (Constr.pattern, string_of_int p)) -| TTClosedBinderList [] -> MayRecNo (Alist1 (Aentry Constr.binder)) -| TTClosedBinderList tkl -> MayRecNo (Alist1sep (Aentry Constr.binder, make_sep_rules tkl)) -| TTName -> MayRecNo (Aentry Prim.name) -| TTOpenBinderList -> MayRecNo (Aentry Constr.open_binders) -| TTBigint -> MayRecNo (Aentry Prim.bignat) -| TTReference -> MayRecNo (Aentry Constr.global) + | MayRecNo s -> MayRecNo (Pcoq.G.Symbol.list1sep s (make_sep_rules tkl) false) + | MayRecMay s -> MayRecMay (Pcoq.G.Symbol.list1sep s (make_sep_rules tkl) false) end +| TTPattern p -> MayRecNo (Pcoq.G.Symbol.nterml Constr.pattern (string_of_int p)) +| TTClosedBinderList [] -> MayRecNo (Pcoq.G.Symbol.list1 (Pcoq.G.Symbol.nterm Constr.binder)) +| TTClosedBinderList tkl -> MayRecNo (Pcoq.G.Symbol.list1sep (Pcoq.G.Symbol.nterm Constr.binder) (make_sep_rules tkl) false) +| TTName -> MayRecNo (Pcoq.G.Symbol.nterm Prim.name) +| TTOpenBinderList -> MayRecNo (Pcoq.G.Symbol.nterm Constr.open_binders) +| TTBigint -> MayRecNo (Pcoq.G.Symbol.nterm Prim.bignat) +| TTReference -> MayRecNo (Pcoq.G.Symbol.nterm Constr.global) let interp_entry forpat e = match e with | ETProdName -> TTAny TTName @@ -461,16 +466,16 @@ let rec ty_eval : type s a. (s, a, Loc.t -> s) ty_rule -> s gen_eval -> s env -> ty_eval rem f { env with constrs; constrlists; } type ('s, 'a, 'r) mayrec_rule = -| MayRecRNo : ('s, Extend.norec, 'a, 'r) Extend.rule -> ('s, 'a, 'r) mayrec_rule -| MayRecRMay : ('s, Extend.mayrec, 'a, 'r) Extend.rule -> ('s, 'a, 'r) mayrec_rule +| MayRecRNo : ('s, norec, 'a, 'r) rule -> ('s, 'a, 'r) mayrec_rule +| MayRecRMay : ('s, mayrec, 'a, 'r) rule -> ('s, 'a, 'r) mayrec_rule let rec ty_erase : type s a r. (s, a, r) ty_rule -> (s, a, r) mayrec_rule = function | TyStop -> MayRecRNo Stop | TyMark (_, _, _, r) -> ty_erase r | TyNext (rem, TyTerm tok) -> begin match ty_erase rem with - | MayRecRNo rem -> MayRecRMay (Next (rem, Atoken tok)) - | MayRecRMay rem -> MayRecRMay (Next (rem, Atoken tok)) end + | MayRecRNo rem -> MayRecRMay (Next (rem, Pcoq.G.Symbol.token tok)) + | MayRecRMay rem -> MayRecRMay (Next (rem, Pcoq.G.Symbol.token tok)) end | TyNext (rem, TyNonTerm (_, _, s, _)) -> begin match ty_erase rem, s with | MayRecRNo rem, MayRecNo s -> MayRecRMay (Next (rem, s)) @@ -522,7 +527,13 @@ let rec pure_sublevels' assoc from forpat level = function let rem = pure_sublevels' assoc from forpat level rem in let push where p rem = match symbol_of_target where p assoc from forpat with - | MayRecNo (Aentryl (_,i)) when different_levels (fst from,level) (where,i) -> (where,int_of_string i) :: rem + | MayRecNo sym -> + (match Pcoq.G.level_of_nonterm sym with + | None -> rem + | Some i -> + if different_levels (fst from,level) (where,i) then + (where,int_of_string i) :: rem + else rem) | _ -> rem in (match e with | ETProdPattern i -> push InConstrEntry (NumLevel i,InternalProd) rem diff --git a/vernac/egramml.ml b/vernac/egramml.ml index 793aad6b24..bcd8de1ff4 100644 --- a/vernac/egramml.ml +++ b/vernac/egramml.ml @@ -24,9 +24,9 @@ type 's grammar_prod_item = type 'a ty_arg = ('a -> raw_generic_argument) type ('self, 'tr, _, 'r) ty_rule = -| TyStop : ('self, Extend.norec, 'r, 'r) ty_rule -| TyNext : ('self, _, 'a, 'r) ty_rule * ('self, _, 'b) Extend.symbol * 'b ty_arg option -> - ('self, Extend.mayrec, 'b -> 'a, 'r) ty_rule +| TyStop : ('self, Pcoq.norec, 'r, 'r) ty_rule +| TyNext : ('self, _, 'a, 'r) ty_rule * ('self, _, 'b) Pcoq.symbol * 'b ty_arg option -> + ('self, Pcoq.mayrec, 'b -> 'a, 'r) ty_rule type ('self, 'r) any_ty_rule = | AnyTyRule : ('self, _, 'act, Loc.t -> 'r) ty_rule -> ('self, 'r) any_ty_rule @@ -35,7 +35,7 @@ let rec ty_rule_of_gram = function | [] -> AnyTyRule TyStop | GramTerminal s :: rem -> let AnyTyRule rem = ty_rule_of_gram rem in - let tok = Atoken (CLexer.terminal s) in + let tok = Pcoq.G.Symbol.token (CLexer.terminal s) in let r = TyNext (rem, tok, None) in AnyTyRule r | GramNonTerminal (_, (t, tok)) :: rem -> @@ -44,9 +44,9 @@ let rec ty_rule_of_gram = function let r = TyNext (rem, tok, inj) in AnyTyRule r -let rec ty_erase : type s tr a r. (s, tr, a, r) ty_rule -> (s, tr, a, r) Extend.rule = function -| TyStop -> Extend.Stop -| TyNext (rem, tok, _) -> Extend.Next (ty_erase rem, tok) +let rec ty_erase : type s tr a r. (s, tr, a, r) ty_rule -> (s, tr, a, r) Pcoq.rule = function +| TyStop -> Pcoq.Stop +| TyNext (rem, tok, _) -> Pcoq.Next (ty_erase rem, tok) type 'r gen_eval = Loc.t -> raw_generic_argument list -> 'r @@ -62,7 +62,7 @@ let make_rule f prod = let symb = ty_erase ty_rule in let f loc l = f loc (List.rev l) in let act = ty_eval ty_rule f in - Extend.Rule (symb, act) + Pcoq.Rule (symb, act) let rec proj_symbol : type a b c. (a, b, c) ty_user_symbol -> (a, b, c) genarg_type = function | TUentry a -> ExtraArg a diff --git a/vernac/egramml.mli b/vernac/egramml.mli index 7f6656b079..e6e4748b59 100644 --- a/vernac/egramml.mli +++ b/vernac/egramml.mli @@ -18,7 +18,7 @@ open Vernacexpr type 's grammar_prod_item = | GramTerminal of string | GramNonTerminal : ('a Genarg.raw_abstract_argument_type * - ('s, _, 'a) Extend.symbol) Loc.located -> 's grammar_prod_item + ('s, _, 'a) Pcoq.symbol) Loc.located -> 's grammar_prod_item val extend_vernac_command_grammar : extend_name -> vernac_expr Pcoq.Entry.t option -> @@ -32,4 +32,4 @@ val proj_symbol : ('a, 'b, 'c) Extend.ty_user_symbol -> ('a, 'b, 'c) Genarg.gena val make_rule : (Loc.t -> Genarg.raw_generic_argument list -> 'a) -> - 'a grammar_prod_item list -> 'a Extend.production_rule + 'a grammar_prod_item list -> 'a Pcoq.production_rule diff --git a/vernac/pvernac.ml b/vernac/pvernac.ml index 08625b41a6..aebded72f8 100644 --- a/vernac/pvernac.ml +++ b/vernac/pvernac.ml @@ -51,12 +51,11 @@ module Vernac_ = let noedit_mode = gec_vernac "noedit_command" let () = - let open Extend in let act_vernac v loc = Some v in let act_eoi _ loc = None in let rule = [ - Rule (Next (Stop, Atoken Tok.PEOI), act_eoi); - Rule (Next (Stop, Aentry vernac_control), act_vernac); + Rule (Next (Stop, Pcoq.G.Symbol.token Tok.PEOI), act_eoi); + Rule (Next (Stop, Pcoq.G.Symbol.nterm vernac_control), act_vernac); ] in Pcoq.grammar_extend main_entry (None, [None, None, rule]) diff --git a/vernac/vernacextend.ml b/vernac/vernacextend.ml index 0e8202da9f..fc6ece27cd 100644 --- a/vernac/vernacextend.ml +++ b/vernac/vernacextend.ml @@ -166,15 +166,15 @@ let rec untype_command : type r s. (r, s) ty_sig -> r -> plugin_args -> vernac_c | Some Refl -> untype_command ty (f v) args end -let rec untype_user_symbol : type s a b c. (a, b, c) Extend.ty_user_symbol -> (s, Extend.norec, a) Extend.symbol = +let rec untype_user_symbol : type s a b c. (a, b, c) Extend.ty_user_symbol -> (s, Pcoq.norec, a) Pcoq.symbol = let open Extend in function -| TUlist1 l -> Alist1 (untype_user_symbol l) -| TUlist1sep (l, s) -> Alist1sep (untype_user_symbol l, Atoken (CLexer.terminal s)) -| TUlist0 l -> Alist0 (untype_user_symbol l) -| TUlist0sep (l, s) -> Alist0sep (untype_user_symbol l, Atoken (CLexer.terminal s)) -| TUopt o -> Aopt (untype_user_symbol o) -| TUentry a -> Aentry (Pcoq.genarg_grammar (Genarg.ExtraArg a)) -| TUentryl (a, i) -> Aentryl (Pcoq.genarg_grammar (Genarg.ExtraArg a), string_of_int i) +| TUlist1 l -> Pcoq.G.Symbol.list1 (untype_user_symbol l) +| TUlist1sep (l, s) -> Pcoq.G.Symbol.list1sep (untype_user_symbol l) (Pcoq.G.Symbol.token (CLexer.terminal s)) false +| TUlist0 l -> Pcoq.G.Symbol.list0 (untype_user_symbol l) +| TUlist0sep (l, s) -> Pcoq.G.Symbol.list0sep (untype_user_symbol l) (Pcoq.G.Symbol.token (CLexer.terminal s)) false +| TUopt o -> Pcoq.G.Symbol.opt (untype_user_symbol o) +| TUentry a -> Pcoq.G.Symbol.nterm (Pcoq.genarg_grammar (Genarg.ExtraArg a)) +| TUentryl (a, i) -> Pcoq.G.Symbol.nterml (Pcoq.genarg_grammar (Genarg.ExtraArg a)) (string_of_int i) let rec untype_grammar : type r s. (r, s) ty_sig -> 'a Egramml.grammar_prod_item list = function | TyNil -> [] @@ -229,7 +229,7 @@ let vernac_extend ~command ?classifier ?entry ext = type 'a argument_rule = | Arg_alias of 'a Pcoq.Entry.t -| Arg_rules of 'a Extend.production_rule list +| Arg_rules of 'a Pcoq.production_rule list type 'a vernac_argument = { arg_printer : Environ.env -> Evd.evar_map -> 'a -> Pp.t; diff --git a/vernac/vernacextend.mli b/vernac/vernacextend.mli index 90c00415d4..0acb5f43f9 100644 --- a/vernac/vernacextend.mli +++ b/vernac/vernacextend.mli @@ -111,7 +111,7 @@ type 'a argument_rule = | Arg_alias of 'a Pcoq.Entry.t (** This is used because CAMLP5 parser can be dumb about rule factorization, which sometimes requires two entries to be the same. *) -| Arg_rules of 'a Extend.production_rule list +| Arg_rules of 'a Pcoq.production_rule list (** There is a discrepancy here as we use directly extension rules and thus entries instead of ty_user_symbol and thus arguments as roots. *) |
