(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* Dash n | '+' -> Plus n | '*' -> Star n | _ -> assert false (* For now we just keep the top-level location of the whole vernacular, that is to say, including attributes and control flags; this is not very convenient for advanced clients tho, so in the future it'd be cool to actually locate the attributes and control flags individually too. *) let add_control_flag ~loc ~flag { CAst.v = cmd } = CAst.make ~loc { cmd with control = flag :: cmd.control } let test_hash_ident = let open Pcoq.Lookahead in to_entry "test_hash_ident" begin lk_kw "#" >> lk_ident >> check_no_space end let test_id_colon = let open Pcoq.Lookahead in to_entry "test_id_colon" begin lk_ident >> lk_kw ":" end } GRAMMAR EXTEND Gram GLOBAL: vernac_control quoted_attributes gallina_ext noedit_mode subprf; vernac_control: FIRST [ [ IDENT "Time"; c = vernac_control -> { add_control_flag ~loc ~flag:(ControlTime false) c } | IDENT "Redirect"; s = ne_string; c = vernac_control -> { add_control_flag ~loc ~flag:(ControlRedirect s) c } | IDENT "Timeout"; n = natural; c = vernac_control -> { add_control_flag ~loc ~flag:(ControlTimeout n) c } | IDENT "Fail"; c = vernac_control -> { add_control_flag ~loc ~flag:ControlFail c } | v = decorated_vernac -> { let (attrs, expr) = v in CAst.make ~loc { control = []; attrs; expr = expr } } ] ] ; decorated_vernac: [ [ a = LIST0 quoted_attributes ; fv = vernac -> { let (f, v) = fv in (List.append (List.flatten a) f, v) } ] ] ; quoted_attributes: [ [ "#[" ; a = attribute_list ; "]" -> { a } ] ] ; attribute_list: [ [ a = LIST0 attribute SEP "," -> { a } ] ] ; attribute: [ [ k = ident ; v = attr_value -> { Names.Id.to_string k, v } (* Required because "ident" is declared a keyword when loading Ltac. *) | IDENT "using" ; v = attr_value -> { "using", v } ] ] ; attr_value: [ [ "=" ; v = string -> { VernacFlagLeaf (FlagString v) } | "=" ; v = IDENT -> { VernacFlagLeaf (FlagIdent v) } | "(" ; a = attribute_list ; ")" -> { VernacFlagList a } | -> { VernacFlagEmpty } ] ] ; legacy_attr: [ [ IDENT "Local" -> { ("local", VernacFlagEmpty) } | IDENT "Global" -> { ("global", VernacFlagEmpty) } | IDENT "Polymorphic" -> { Attributes.vernac_polymorphic_flag } | IDENT "Monomorphic" -> { Attributes.vernac_monomorphic_flag } | IDENT "Cumulative" -> { ("universes", VernacFlagList ["cumulative", VernacFlagEmpty]) } | IDENT "NonCumulative" -> { ("universes", VernacFlagList ["cumulative", VernacFlagLeaf (FlagIdent "no")]) } | IDENT "Private" -> { ("private", VernacFlagList ["matching", VernacFlagEmpty]) } | IDENT "Program" -> { ("program", VernacFlagEmpty) } ] ] ; vernac: [ [ attrs = LIST0 legacy_attr; v = vernac_aux -> { (attrs, v) } ] ] ; vernac_aux: (* Better to parse "." here: in case of failure (e.g. in coerce_to_var), *) (* "." is still in the stream and discard_to_dot works correctly *) [ [ g = gallina; "." -> { g } | g = gallina_ext; "." -> { g } | c = command; "." -> { c } | c = syntax; "." -> { c } | c = subprf -> { c } ] ] ; vernac_aux: LAST [ [ prfcom = command_entry -> { prfcom } ] ] ; noedit_mode: [ [ c = query_command -> { c None } ] ] ; subprf: [ [ s = BULLET -> { VernacBullet (make_bullet s) } | "{" -> { VernacSubproof None } | "}" -> { VernacEndSubproof } ] ] ; END { let warn_plural_command = CWarnings.create ~name:"plural-command" ~category:"pedantic" ~default:CWarnings.Disabled (fun kwd -> strbrk (Printf.sprintf "Command \"%s\" expects more than one assumption." kwd)) let test_plural_form loc kwd = function | [(_,([_],_))] -> warn_plural_command ~loc kwd | _ -> () let test_plural_form_types loc kwd = function | [([_],_)] -> warn_plural_command ~loc kwd | _ -> () let lname_of_lident : lident -> lname = CAst.map (fun s -> Name s) let name_of_ident_decl : ident_decl -> name_decl = on_fst lname_of_lident let test_variance_ident = let open Pcoq.Lookahead in to_entry "test_variance_ident" begin lk_kws ["=";"+";"*"] >> lk_ident end } (* Gallina declarations *) GRAMMAR EXTEND Gram GLOBAL: gallina gallina_ext thm_token def_token assumption_token def_body of_type record_field decl_notations fix_definition ident_decl univ_decl; gallina: (* Definition, Theorem, Variable, Axiom, ... *) [ [ thm = thm_token; id = ident_decl; bl = binders; ":"; c = lconstr; l = LIST0 [ "with"; id = ident_decl; bl = binders; ":"; c = lconstr -> { (id,(bl,c)) } ] -> { VernacStartTheoremProof (thm, (id,(bl,c))::l) } | stre = assumption_token; nl = inline; bl = assum_list -> { VernacAssumption (stre, nl, bl) } | tk = assumptions_token; nl = inline; bl = assum_list -> { let (kwd,stre) = tk in test_plural_form loc kwd bl; VernacAssumption (stre, nl, bl) } | d = def_token; id = ident_decl; b = def_body -> { VernacDefinition (d, name_of_ident_decl id, b) } | IDENT "Let"; id = ident_decl; b = def_body -> { VernacDefinition ((DoDischarge, Let), name_of_ident_decl id, b) } (* Gallina inductive declarations *) | f = finite_token; indl = LIST1 inductive_definition SEP "with" -> { VernacInductive (f, indl) } | "Fixpoint"; recs = LIST1 fix_definition SEP "with" -> { VernacFixpoint (NoDischarge, recs) } | IDENT "Let"; "Fixpoint"; recs = LIST1 fix_definition SEP "with" -> { VernacFixpoint (DoDischarge, recs) } | "CoFixpoint"; corecs = LIST1 cofix_definition SEP "with" -> { VernacCoFixpoint (NoDischarge, corecs) } | IDENT "Let"; "CoFixpoint"; corecs = LIST1 cofix_definition SEP "with" -> { VernacCoFixpoint (DoDischarge, corecs) } | IDENT "Scheme"; l = LIST1 scheme SEP "with" -> { VernacScheme l } | IDENT "Combined"; IDENT "Scheme"; id = identref; IDENT "from"; l = LIST1 identref SEP "," -> { VernacCombinedScheme (id, l) } | IDENT "Register"; g = global; "as"; quid = qualid -> { VernacRegister(g, RegisterCoqlib quid) } | IDENT "Register"; IDENT "Inline"; g = global -> { VernacRegister(g, RegisterInline) } | IDENT "Primitive"; id = ident_decl; typopt = OPT [ ":"; typ = lconstr -> { typ } ]; ":="; r = register_token -> { VernacPrimitive(id, r, typopt) } | IDENT "Universe"; l = LIST1 identref -> { VernacUniverse l } | IDENT "Universes"; l = LIST1 identref -> { VernacUniverse l } | IDENT "Constraint"; l = LIST1 univ_constraint SEP "," -> { VernacConstraint l } ] ] ; register_token: [ [ test_hash_ident; "#"; r = IDENT -> { CPrimitives.parse_op_or_type ~loc r } ] ] ; thm_token: [ [ "Theorem" -> { Theorem } | IDENT "Lemma" -> { Lemma } | IDENT "Fact" -> { Fact } | IDENT "Remark" -> { Remark } | IDENT "Corollary" -> { Corollary } | IDENT "Proposition" -> { Proposition } | IDENT "Property" -> { Property } ] ] ; def_token: [ [ "Definition" -> { (NoDischarge,Definition) } | IDENT "Example" -> { (NoDischarge,Example) } | IDENT "SubClass" -> { (NoDischarge,SubClass) } ] ] ; assumption_token: [ [ "Hypothesis" -> { (DoDischarge, Logical) } | "Variable" -> { (DoDischarge, Definitional) } | "Axiom" -> { (NoDischarge, Logical) } | "Parameter" -> { (NoDischarge, Definitional) } | IDENT "Conjecture" -> { (NoDischarge, Conjectural) } ] ] ; assumptions_token: [ [ IDENT "Hypotheses" -> { ("Hypotheses", (DoDischarge, Logical)) } | IDENT "Variables" -> { ("Variables", (DoDischarge, Definitional)) } | IDENT "Axioms" -> { ("Axioms", (NoDischarge, Logical)) } | IDENT "Parameters" -> { ("Parameters", (NoDischarge, Definitional)) } | IDENT "Conjectures" -> { ("Conjectures", (NoDischarge, Conjectural)) } ] ] ; inline: [ [ IDENT "Inline"; "("; i = natural; ")" -> { InlineAt i } | IDENT "Inline" -> { DefaultInline } | -> { NoInline } ] ] ; univ_constraint: [ [ l = universe_name; ord = [ "<" -> { Univ.Lt } | "=" -> { Univ.Eq } | "<=" -> { Univ.Le } ]; r = universe_name -> { (l, ord, r) } ] ] ; univ_decl: [ [ "@{" ; l = LIST0 identref; ext = [ "+" -> { true } | -> { false } ]; cs = [ "|"; l' = LIST0 univ_constraint SEP ","; ext = [ "+" -> { true } | -> { false } ]; "}" -> { (l',ext) } | ext = [ "}" -> { true } | bar_cbrace -> { false } ] -> { ([], ext) } ] -> { let open UState in { univdecl_instance = l; univdecl_extensible_instance = ext; univdecl_constraints = fst cs; univdecl_extensible_constraints = snd cs } } ] ] ; variance: [ [ "+" -> { Univ.Variance.Covariant } | "=" -> { Univ.Variance.Invariant } | "*" -> { Univ.Variance.Irrelevant } ] ] ; variance_identref: [ [ id = identref -> { (id, None) } | test_variance_ident; v = variance; id = identref -> { (id, Some v) } (* We need this test to help the parser avoid the conflict between "+" before ident (covariance) and trailing "+" (extra univs allowed) *) ] ] ; cumul_univ_decl: [ [ "@{" ; l = LIST0 variance_identref; ext = [ "+" -> { true } | -> { false } ]; cs = [ "|"; l' = LIST0 univ_constraint SEP ","; ext = [ "+" -> { true } | -> { false } ]; "}" -> { (l',ext) } | ext = [ "}" -> { true } | bar_cbrace -> { false } ] -> { ([], ext) } ] -> { let open UState in { univdecl_instance = l; univdecl_extensible_instance = ext; univdecl_constraints = fst cs; univdecl_extensible_constraints = snd cs } } ] ] ; ident_decl: [ [ i = identref; l = OPT univ_decl -> { (i, l) } ] ] ; cumul_ident_decl: [ [ i = identref; l = OPT cumul_univ_decl -> { (i, l) } ] ] ; finite_token: [ [ IDENT "Inductive" -> { Inductive_kw } | IDENT "CoInductive" -> { CoInductive } | IDENT "Variant" -> { Variant } | IDENT "Record" -> { Record } | IDENT "Structure" -> { Structure } | IDENT "Class" -> { Class true } ] ] ; (* Simple definitions *) def_body: [ [ bl = binders; ":="; red = reduce; c = lconstr -> { match c.CAst.v with | CCast(c, Glob_term.CastConv t) -> DefineBody (bl, red, c, Some t) | _ -> DefineBody (bl, red, c, None) } | bl = binders; ":"; t = lconstr; ":="; red = reduce; c = lconstr -> { DefineBody (bl, red, c, Some t) } | bl = binders; ":"; t = lconstr -> { ProveBody (bl, t) } ] ] ; reduce: [ [ IDENT "Eval"; r = red_expr; "in" -> { Some r } | -> { None } ] ] ; decl_notation: [ [ ntn = ne_lstring; ":="; c = constr; modl = syntax_modifiers; scopt = OPT [ ":"; sc = IDENT -> { sc } ] -> { { decl_ntn_string = ntn; decl_ntn_interp = c; decl_ntn_scope = scopt; decl_ntn_modifiers = modl; } } ] ] ; decl_sep: [ [ IDENT "and" -> { () } ] ] ; decl_notations: [ [ "where"; l = LIST1 decl_notation SEP decl_sep -> { l } | -> { [] } ] ] ; (* Inductives and records *) opt_constructors_or_fields: [ [ ":="; lc = constructors_or_record -> { lc } | -> { RecordDecl (None, []) } ] ] ; inductive_definition: [ [ oc = opt_coercion; id = cumul_ident_decl; indpar = binders; extrapar = OPT [ "|"; p = binders -> { p } ]; c = OPT [ ":"; c = lconstr -> { c } ]; lc=opt_constructors_or_fields; ntn = decl_notations -> { (((oc,id),(indpar,extrapar),c,lc),ntn) } ] ] ; constructors_or_record: [ [ "|"; l = LIST1 constructor SEP "|" -> { Constructors l } | id = identref ; c = constructor_type; "|"; l = LIST1 constructor SEP "|" -> { Constructors ((c id)::l) } | id = identref ; c = constructor_type -> { Constructors [ c id ] } | cstr = identref; "{"; fs = record_fields; "}" -> { RecordDecl (Some cstr,fs) } | "{";fs = record_fields; "}" -> { RecordDecl (None,fs) } | -> { Constructors [] } ] ] ; (* csort: [ [ s = sort -> CSort (loc,s) ] ] ; *) opt_coercion: [ [ ">" -> { true } | -> { false } ] ] ; (* (co)-fixpoints *) fix_definition: [ [ id_decl = ident_decl; bl = binders_fixannot; rtype = type_cstr; body_def = OPT [":="; def = lconstr -> { def } ]; notations = decl_notations -> { let binders, rec_order = bl in {fname = fst id_decl; univs = snd id_decl; rec_order; binders; rtype; body_def; notations} } ] ] ; cofix_definition: [ [ id_decl = ident_decl; binders = binders; rtype = type_cstr; body_def = OPT [":="; def = lconstr -> { def }]; notations = decl_notations -> { {fname = fst id_decl; univs = snd id_decl; rec_order = (); binders; rtype; body_def; notations} } ]] ; (* Inductive schemes *) scheme: [ [ kind = scheme_kind -> { (None,kind) } | id = identref; ":="; kind = scheme_kind -> { (Some id,kind) } ] ] ; scheme_kind: [ [ IDENT "Induction"; "for"; ind = smart_global; IDENT "Sort"; s = sort_family-> { InductionScheme(true,ind,s) } | IDENT "Minimality"; "for"; ind = smart_global; IDENT "Sort"; s = sort_family-> { InductionScheme(false,ind,s) } | IDENT "Elimination"; "for"; ind = smart_global; IDENT "Sort"; s = sort_family-> { CaseScheme(true,ind,s) } | IDENT "Case"; "for"; ind = smart_global; IDENT "Sort"; s = sort_family-> { CaseScheme(false,ind,s) } | IDENT "Equality"; "for" ; ind = smart_global -> { EqualityScheme(ind) } ] ] ; (* Various Binders *) (* (* ... without coercions *) binder_nodef: [ [ b = binder_let -> (match b with CLocalAssum(l,ty) -> (l,ty) | CLocalDef _ -> Util.user_err_loc (loc,"fix_param",Pp.str"defined binder not allowed here.")) ] ] ; *) (* ... with coercions *) record_field: [ [ attr = LIST0 quoted_attributes ; bd = record_binder; rf_priority = OPT [ "|"; n = natural -> { n } ]; rf_notation = decl_notations -> { let rf_canonical = attr |> List.flatten |> parse canonical_field in let rf_subclass, rf_decl = bd in rf_decl, { rf_subclass ; rf_priority ; rf_notation ; rf_canonical } } ] ] ; record_fields: [ [ f = record_field; ";"; fs = record_fields -> { f :: fs } | f = record_field -> { [f] } | -> { [] } ] ] ; field_body: [ [ l = binders; oc = of_type; t = lconstr -> { fun id -> (oc,AssumExpr (id,l,t)) } | l = binders; oc = of_type; t = lconstr; ":="; b = lconstr -> { fun id -> (oc,DefExpr (id,l,b,Some t)) } | l = binders; ":="; b = lconstr -> { fun id -> match b.CAst.v with | CCast(b', (CastConv t|CastVM t|CastNative t)) -> (NoInstance,DefExpr(id,l,b',Some t)) | _ -> (NoInstance,DefExpr(id,l,b,None)) } ] ] ; record_binder: [ [ id = name -> { (NoInstance,AssumExpr(id, [], CAst.make ~loc @@ CHole (None, IntroAnonymous, None))) } | id = name; f = field_body -> { f id } ] ] ; assum_list: [ [ bl = LIST1 assum_coe -> { bl } | b = assumpt -> { [b] } ] ] ; assum_coe: [ [ "("; a = assumpt; ")" -> { a } ] ] ; assumpt: [ [ idl = LIST1 ident_decl; oc = of_type; c = lconstr -> { (oc <> NoInstance,(idl,c)) } ] ] ; constructor_type: [[ l = binders; t= [ coe = of_type; c = lconstr -> { fun l id -> (coe <> NoInstance,(id,mkProdCN ~loc l c)) } | -> { fun l id -> (false,(id,mkProdCN ~loc l (CAst.make ~loc @@ CHole (None, IntroAnonymous, None)))) } ] -> { t l } ]] ; constructor: [ [ id = identref; c=constructor_type -> { c id } ] ] ; of_type: [ [ ":>" -> { BackInstance } | ":"; ">" -> { BackInstance } | ":" -> { NoInstance } ] ] ; END { let test_only_starredidentrefs = let open Pcoq.Lookahead in to_entry "test_only_starredidentrefs" begin lk_list (lk_ident <+> lk_kws ["Type"; "*"]) >> (lk_kws [".";")"]) end let starredidentreflist_to_expr l = match l with | [] -> SsEmpty | x :: xs -> List.fold_right (fun i acc -> SsUnion(i,acc)) xs x let warn_deprecated_include_type = CWarnings.create ~name:"deprecated-include-type" ~category:"deprecated" (fun () -> strbrk "Include Type is deprecated; use Include instead") let warn_deprecated_as_ident_kind = CWarnings.create ~name:"deprecated-as-ident-kind" ~category:"deprecated" (fun () -> strbrk "grammar kind \"as ident\" no longer accepts \"_\"; use \"as name\" instead to accept \"_\", too, or silence the warning if you actually intended to accept only identifiers.") } (* Modules and Sections *) GRAMMAR EXTEND Gram GLOBAL: gallina_ext module_expr module_type section_subset_expr; gallina_ext: [ [ (* Interactive module declaration *) IDENT "Module"; export = export_token; id = identref; bl = LIST0 module_binder; sign = of_module_type; body = is_module_expr -> { VernacDefineModule (export, id, bl, sign, body) } | IDENT "Module"; "Type"; id = identref; bl = LIST0 module_binder; sign = check_module_types; body = is_module_type -> { VernacDeclareModuleType (id, bl, sign, body) } | IDENT "Declare"; IDENT "Module"; export = export_token; id = identref; bl = LIST0 module_binder; ":"; mty = module_type_inl -> { VernacDeclareModule (export, id, bl, mty) } (* Section beginning *) | IDENT "Section"; id = identref -> { VernacBeginSection id } (* This end a Section a Module or a Module Type *) | IDENT "End"; id = identref -> { VernacEndSegment id } (* Naming a set of section hyps *) | IDENT "Collection"; id = identref; ":="; expr = section_subset_expr -> { VernacNameSectionHypSet (id, expr) } (* Requiring an already compiled module *) | IDENT "Require"; export = export_token; qidl = LIST1 global -> { VernacRequire (None, export, qidl) } | IDENT "From" ; ns = global ; IDENT "Require"; export = export_token ; qidl = LIST1 global -> { VernacRequire (Some ns, export, qidl) } | IDENT "Import"; qidl = LIST1 filtered_import -> { VernacImport (false,qidl) } | IDENT "Export"; qidl = LIST1 filtered_import -> { VernacImport (true,qidl) } | IDENT "Include"; e = module_type_inl; l = LIST0 ext_module_expr -> { VernacInclude(e::l) } | IDENT "Include"; "Type"; e = module_type_inl; l = LIST0 ext_module_type -> { warn_deprecated_include_type ~loc (); VernacInclude(e::l) } ] ] ; filtered_import: [ [ m = global -> { (m, ImportAll) } | m = global; "("; ns = LIST1 one_import_filter_name SEP ","; ")" -> { (m, ImportNames ns) } ] ] ; one_import_filter_name: [ [ n = global; etc = OPT [ "("; ".."; ")" -> { () } ] -> { n, Option.has_some etc } ] ] ; export_token: [ [ IDENT "Import" -> { Some false } | IDENT "Export" -> { Some true } | -> { None } ] ] ; ext_module_type: [ [ "<+"; mty = module_type_inl -> { mty } ] ] ; ext_module_expr: [ [ "<+"; mexpr = module_expr_inl -> { mexpr } ] ] ; check_module_type: [ [ "<:"; mty = module_type_inl -> { mty } ] ] ; check_module_types: [ [ mtys = LIST0 check_module_type -> { mtys } ] ] ; of_module_type: [ [ ":"; mty = module_type_inl -> { Enforce mty } | mtys = check_module_types -> { Check mtys } ] ] ; is_module_type: [ [ ":="; mty = module_type_inl ; l = LIST0 ext_module_type -> { (mty::l) } | -> { [] } ] ] ; is_module_expr: [ [ ":="; mexpr = module_expr_inl; l = LIST0 ext_module_expr -> { (mexpr::l) } | -> { [] } ] ] ; functor_app_annot: [ [ "["; IDENT "inline"; "at"; IDENT "level"; i = natural; "]" -> { InlineAt i } | "["; IDENT "no"; IDENT "inline"; "]" -> { NoInline } | -> { DefaultInline } ] ] ; module_expr_inl: [ [ "!"; me = module_expr -> { (me,NoInline) } | me = module_expr; a = functor_app_annot -> { (me,a) } ] ] ; module_type_inl: [ [ "!"; me = module_type -> { (me,NoInline) } | me = module_type; a = functor_app_annot -> { (me,a) } ] ] ; (* Module binder *) module_binder: [ [ "("; export = export_token; idl = LIST1 identref; ":"; mty = module_type_inl; ")" -> { (export,idl,mty) } ] ] ; (* Module expressions *) module_expr: [ [ me = module_expr_atom -> { me } | me1 = module_expr; me2 = module_expr_atom -> { CAst.make ~loc @@ CMapply (me1,me2) } ] ] ; module_expr_atom: [ [ qid = qualid -> { CAst.make ~loc @@ CMident qid } | "("; me = module_expr; ")" -> { me } ] ] ; with_declaration: [ [ "Definition"; fqid = fullyqualid; udecl = OPT univ_decl; ":="; c = Constr.lconstr -> { CWith_Definition (fqid,udecl,c) } | IDENT "Module"; fqid = fullyqualid; ":="; qid = qualid -> { CWith_Module (fqid,qid) } ] ] ; module_type: [ [ qid = qualid -> { CAst.make ~loc @@ CMident qid } | "("; mt = module_type; ")" -> { mt } | mty = module_type; me = module_expr_atom -> { CAst.make ~loc @@ CMapply (mty,me) } | mty = module_type; "with"; decl = with_declaration -> { CAst.make ~loc @@ CMwith (mty,decl) } ] ] ; (* Proof using *) section_subset_expr: [ [ test_only_starredidentrefs; l = LIST0 starredidentref -> { starredidentreflist_to_expr l } | e = ssexpr -> { e } ]] ; starredidentref: [ [ i = identref -> { SsSingl i } | i = identref; "*" -> { SsFwdClose(SsSingl i) } | "Type" -> { SsType } | "Type"; "*" -> { SsFwdClose SsType } ]] ; ssexpr: [ "35" [ "-"; e = ssexpr -> { SsCompl e } ] | "50" [ e1 = ssexpr; "-"; e2 = ssexpr-> { SsSubstr(e1,e2) } | e1 = ssexpr; "+"; e2 = ssexpr-> { SsUnion(e1,e2) } ] | "0" [ i = starredidentref -> { i } | "("; test_only_starredidentrefs; l = LIST0 starredidentref; ")"-> { starredidentreflist_to_expr l } | "("; test_only_starredidentrefs; l = LIST0 starredidentref; ")"; "*" -> { SsFwdClose(starredidentreflist_to_expr l) } | "("; e = ssexpr; ")"-> { e } | "("; e = ssexpr; ")"; "*" -> { SsFwdClose e } ] ] ; END (* Extensions: implicits, coercions, etc. *) GRAMMAR EXTEND Gram GLOBAL: gallina_ext hint_info scope_delimiter; gallina_ext: [ [ (* Transparent and Opaque *) IDENT "Transparent"; l = LIST1 smart_global -> { VernacSetOpacity (Conv_oracle.transparent, l) } | IDENT "Opaque"; l = LIST1 smart_global -> { VernacSetOpacity (Conv_oracle.Opaque, l) } | IDENT "Strategy"; l = LIST1 [ v=strategy_level; "["; q=LIST1 smart_global; "]" -> { (v,q) } ] -> { VernacSetStrategy l } (* Canonical structure *) | IDENT "Canonical"; OPT [ IDENT "Structure" -> {()} ]; qid = global; ud = OPT [ u = OPT univ_decl; d = def_body -> { (u,d) } ] -> { match ud with | None -> VernacCanonical CAst.(make ?loc:qid.CAst.loc @@ AN qid) | Some (u,d) -> let s = coerce_reference_to_id qid in VernacDefinition ((NoDischarge,CanonicalStructure),((CAst.make ?loc:qid.CAst.loc (Name s)),u),d) } | IDENT "Canonical"; OPT [ IDENT "Structure" -> {()} ]; ntn = by_notation -> { VernacCanonical CAst.(make ~loc @@ ByNotation ntn) } (* Coercions *) | IDENT "Coercion"; qid = global; u = OPT univ_decl; d = def_body -> { let s = coerce_reference_to_id qid in VernacDefinition ((NoDischarge,Coercion),((CAst.make ?loc:qid.CAst.loc (Name s)),u),d) } | IDENT "Identity"; IDENT "Coercion"; f = identref; ":"; s = class_rawexpr; ">->"; t = class_rawexpr -> { VernacIdentityCoercion (f, s, t) } | IDENT "Coercion"; qid = global; ":"; s = class_rawexpr; ">->"; t = class_rawexpr -> { VernacCoercion (CAst.make ~loc @@ AN qid, s, t) } | IDENT "Coercion"; ntn = by_notation; ":"; s = class_rawexpr; ">->"; t = class_rawexpr -> { VernacCoercion (CAst.make ~loc @@ ByNotation ntn, s, t) } | IDENT "Context"; c = LIST1 binder -> { VernacContext (List.flatten c) } | IDENT "Instance"; namesup = instance_name; ":"; t = term LEVEL "200"; info = hint_info ; props = [ ":="; "{"; r = record_declaration; "}" -> { Some (true,r) } | ":="; c = lconstr -> { Some (false,c) } | -> { None } ] -> { VernacInstance (fst namesup,snd namesup,t,props,info) } | IDENT "Existing"; IDENT "Instance"; id = global; info = hint_info -> { VernacExistingInstance [id, info] } | IDENT "Existing"; IDENT "Instances"; ids = LIST1 global; pri = OPT [ "|"; i = natural -> { i } ] -> { let info = { Typeclasses.hint_priority = pri; hint_pattern = None } in let insts = List.map (fun i -> (i, info)) ids in VernacExistingInstance insts } | IDENT "Existing"; IDENT "Class"; is = global -> { VernacExistingClass is } (* Arguments *) | IDENT "Arguments"; qid = smart_global; args = LIST0 arg_specs; more_implicits = OPT [ ","; impl = LIST1 [ impl = LIST0 implicits_alt -> { List.flatten impl } ] SEP "," -> { impl } ]; mods = OPT [ ":"; l = LIST1 args_modifier SEP "," -> { l } ] -> { let mods = match mods with None -> [] | Some l -> List.flatten l in let more_implicits = Option.default [] more_implicits in VernacArguments (qid, List.flatten args, more_implicits, mods) } | IDENT "Implicit"; "Type"; bl = reserv_list -> { VernacReserve bl } | IDENT "Implicit"; IDENT "Types"; bl = reserv_list -> { test_plural_form_types loc "Implicit Types" bl; VernacReserve bl } | IDENT "Generalizable"; gen = [IDENT "All"; IDENT "Variables" -> { Some [] } | IDENT "No"; IDENT "Variables" -> { None } | ["Variable" -> { () } | IDENT "Variables" -> { () } ]; idl = LIST1 identref -> { Some idl } ] -> { VernacGeneralizable gen } ] ] ; args_modifier: [ [ IDENT "simpl"; IDENT "nomatch" -> { [`ReductionDontExposeCase] } | IDENT "simpl"; IDENT "never" -> { [`ReductionNeverUnfold] } | IDENT "default"; IDENT "implicits" -> { [`DefaultImplicits] } | IDENT "clear"; IDENT "implicits" -> { [`ClearImplicits] } | IDENT "clear"; IDENT "scopes" -> { [`ClearScopes] } | IDENT "clear"; IDENT "bidirectionality"; IDENT "hint" -> { [`ClearBidiHint] } | IDENT "rename" -> { [`Rename] } | IDENT "assert" -> { [`Assert] } | IDENT "extra"; IDENT "scopes" -> { [`ExtraScopes] } | IDENT "clear"; IDENT "scopes"; IDENT "and"; IDENT "implicits" -> { [`ClearImplicits; `ClearScopes] } | IDENT "clear"; IDENT "implicits"; IDENT "and"; IDENT "scopes" -> { [`ClearImplicits; `ClearScopes] } ] ] ; scope_delimiter: [ [ "%"; key = IDENT -> { key } ] ] ; argument_spec: [ [ b = OPT "!"; id = name ; s = OPT scope_delimiter -> { id.CAst.v, not (Option.is_empty b), Option.map (fun x -> CAst.make ~loc x) s } ] ]; (* List of arguments implicit status, scope, modifiers *) arg_specs: [ [ item = argument_spec -> { let name, recarg_like, notation_scope = item in [RealArg { name=name; recarg_like=recarg_like; notation_scope=notation_scope; implicit_status = Explicit}] } | "/" -> { [VolatileArg] } | "&" -> { [BidiArg] } | "("; items = LIST1 argument_spec; ")"; sc = OPT scope_delimiter -> { let f x = match sc, x with | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc y) x | Some _, Some _ -> user_err Pp.(str "scope declared twice") in List.map (fun (name,recarg_like,notation_scope) -> RealArg { name=name; recarg_like=recarg_like; notation_scope=f notation_scope; implicit_status = Explicit}) items } | "["; items = LIST1 argument_spec; "]"; sc = OPT scope_delimiter -> { let f x = match sc, x with | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc y) x | Some _, Some _ -> user_err Pp.(str "scope declared twice") in List.map (fun (name,recarg_like,notation_scope) -> RealArg { name=name; recarg_like=recarg_like; notation_scope=f notation_scope; implicit_status = NonMaxImplicit}) items } | "{"; items = LIST1 argument_spec; "}"; sc = OPT scope_delimiter -> { let f x = match sc, x with | None, x -> x | x, None -> Option.map (fun y -> CAst.make ~loc y) x | Some _, Some _ -> user_err Pp.(str "scope declared twice") in List.map (fun (name,recarg_like,notation_scope) -> RealArg { name=name; recarg_like=recarg_like; notation_scope=f notation_scope; implicit_status = MaxImplicit}) items } ] ]; (* Same as [arg_specs], but with only implicit status and names *) implicits_alt: [ [ name = name -> { [(name.CAst.v, Explicit)] } | "["; items = LIST1 name; "]" -> { List.map (fun name -> (name.CAst.v, NonMaxImplicit)) items } | "{"; items = LIST1 name; "}" -> { List.map (fun name -> (name.CAst.v, MaxImplicit)) items } ] ]; instance_name: [ [ name = ident_decl; bl = binders -> { (CAst.map (fun id -> Name id) (fst name), snd name), bl } | -> { ((CAst.make ~loc Anonymous), None), [] } ] ] ; hint_info: [ [ "|"; i = OPT natural; pat = OPT constr_pattern -> { { Typeclasses.hint_priority = i; hint_pattern = pat } } | -> { { Typeclasses.hint_priority = None; hint_pattern = None } } ] ] ; reserv_list: [ [ bl = LIST1 reserv_tuple -> { bl } | b = simple_reserv -> { [b] } ] ] ; reserv_tuple: [ [ "("; a = simple_reserv; ")" -> { a } ] ] ; simple_reserv: [ [ idl = LIST1 identref; ":"; c = lconstr -> { (idl,c) } ] ] ; END GRAMMAR EXTEND Gram GLOBAL: command query_command class_rawexpr gallina_ext search_query search_queries; gallina_ext: [ [ IDENT "Export"; "Set"; table = setting_name; v = option_setting -> { VernacSetOption (true, table, v) } | IDENT "Export"; IDENT "Unset"; table = setting_name -> { VernacSetOption (true, table, OptionUnset) } ] ]; command: [ [ IDENT "Comments"; l = LIST0 comment -> { VernacComments l } (* Hack! Should be in grammar_ext, but camlp5 factorizes badly *) | IDENT "Declare"; IDENT "Instance"; id = ident_decl; bl = binders; ":"; t = term LEVEL "200"; info = hint_info -> { VernacDeclareInstance (id, bl, t, info) } (* Should be in syntax, but camlp5 would not factorize *) | IDENT "Declare"; IDENT "Scope"; sc = IDENT -> { VernacDeclareScope sc } (* System directory *) | IDENT "Pwd" -> { VernacChdir None } | IDENT "Cd" -> { VernacChdir None } | IDENT "Cd"; dir = ne_string -> { VernacChdir (Some dir) } | IDENT "Load"; verbosely = [ IDENT "Verbose" -> { true } | -> { false } ]; s = [ s = ne_string -> { s } | s = IDENT -> { s } ] -> { VernacLoad (verbosely, s) } | IDENT "Declare"; IDENT "ML"; IDENT "Module"; l = LIST1 ne_string -> { VernacDeclareMLModule l } | IDENT "Locate"; l = locatable -> { VernacLocate l } (* Managing load paths *) | IDENT "Add"; IDENT "LoadPath"; physical_path = ne_string; "as"; logical_path = dirpath -> { VernacAddLoadPath { implicit = false; logical_path; physical_path } } | IDENT "Add"; IDENT "Rec"; IDENT "LoadPath"; physical_path = ne_string; "as"; logical_path = dirpath -> { VernacAddLoadPath { implicit = true; logical_path; physical_path } } | IDENT "Remove"; IDENT "LoadPath"; dir = ne_string -> { VernacRemoveLoadPath dir } (* Type-Checking *) | "Type"; c = lconstr -> { VernacGlobalCheck c } (* Printing (careful factorization of entries) *) | IDENT "Print"; p = printable -> { VernacPrint p } | IDENT "Print"; qid = smart_global; l = OPT univ_name_list -> { VernacPrint (PrintName (qid,l)) } | IDENT "Print"; IDENT "Module"; "Type"; qid = global -> { VernacPrint (PrintModuleType qid) } | IDENT "Print"; IDENT "Module"; qid = global -> { VernacPrint (PrintModule qid) } | IDENT "Print"; IDENT "Namespace" ; ns = dirpath -> { VernacPrint (PrintNamespace ns) } | IDENT "Inspect"; n = natural -> { VernacPrint (PrintInspect n) } | IDENT "Add"; IDENT "ML"; IDENT "Path"; dir = ne_string -> { VernacAddMLPath dir } (* For acting on parameter tables *) | "Set"; table = setting_name; v = option_setting -> { VernacSetOption (false, table, v) } | IDENT "Unset"; table = setting_name -> { VernacSetOption (false, table, OptionUnset) } | IDENT "Print"; IDENT "Table"; table = setting_name -> { VernacPrintOption table } | IDENT "Add"; table = IDENT; field = IDENT; v = LIST1 table_value -> { VernacAddOption ([table;field], v) } (* A global value below will be hidden by a field above! *) (* In fact, we give priority to secondary tables *) (* No syntax for tertiary tables due to conflict *) (* (but they are unused anyway) *) | IDENT "Add"; table = IDENT; v = LIST1 table_value -> { VernacAddOption ([table], v) } | IDENT "Test"; table = setting_name; "for"; v = LIST1 table_value -> { VernacMemOption (table, v) } | IDENT "Test"; table = setting_name -> { VernacPrintOption table } | IDENT "Remove"; table = IDENT; field = IDENT; v= LIST1 table_value -> { VernacRemoveOption ([table;field], v) } | IDENT "Remove"; table = IDENT; v = LIST1 table_value -> { VernacRemoveOption ([table], v) } ]] ; query_command: (* TODO: rapprocher Eval et Check *) [ [ IDENT "Eval"; r = red_expr; "in"; c = lconstr; "." -> { fun g -> VernacCheckMayEval (Some r, g, c) } | IDENT "Compute"; c = lconstr; "." -> { fun g -> VernacCheckMayEval (Some (Genredexpr.CbvVm None), g, c) } | IDENT "Check"; c = lconstr; "." -> { fun g -> VernacCheckMayEval (None, g, c) } (* Searching the environment *) | IDENT "About"; qid = smart_global; l = OPT univ_name_list; "." -> { fun g -> VernacPrint (PrintAbout (qid,l,g)) } | IDENT "SearchPattern"; c = constr_pattern; l = in_or_out_modules; "." -> { fun g -> VernacSearch (SearchPattern c,g, l) } | IDENT "SearchRewrite"; c = constr_pattern; l = in_or_out_modules; "." -> { fun g -> VernacSearch (SearchRewrite c,g, l) } | IDENT "Search"; s = search_query; l = search_queries; "." -> { let (sl,m) = l in fun g -> VernacSearch (Search (s::sl),g, m) } ] ] ; printable: [ [ IDENT "Term"; qid = smart_global; l = OPT univ_name_list -> { PrintName (qid,l) } | IDENT "All" -> { PrintFullContext } | IDENT "Section"; s = global -> { PrintSectionContext s } | IDENT "Grammar"; ent = IDENT -> (* This should be in "syntax" section but is here for factorization*) { PrintGrammar ent } | IDENT "Custom"; IDENT "Grammar"; ent = IDENT -> (* Should also be in "syntax" section *) { PrintCustomGrammar ent } | IDENT "LoadPath"; dir = OPT dirpath -> { PrintLoadPath dir } | IDENT "Modules" -> { user_err Pp.(str "Print Modules is obsolete; use Print Libraries instead") } | IDENT "Libraries" -> { PrintModules } | IDENT "ML"; IDENT "Path" -> { PrintMLLoadPath } | IDENT "ML"; IDENT "Modules" -> { PrintMLModules } | IDENT "Debug"; IDENT "GC" -> { PrintDebugGC } | IDENT "Graph" -> { PrintGraph } | IDENT "Classes" -> { PrintClasses } | IDENT "TypeClasses" -> { PrintTypeClasses } | IDENT "Instances"; qid = smart_global -> { PrintInstances qid } | IDENT "Coercions" -> { PrintCoercions } | IDENT "Coercion"; IDENT "Paths"; s = class_rawexpr; t = class_rawexpr -> { PrintCoercionPaths (s,t) } | IDENT "Canonical"; IDENT "Projections"; qids = LIST0 smart_global -> { PrintCanonicalConversions qids } | IDENT "Typing"; IDENT "Flags" -> { PrintTypingFlags } | IDENT "Tables" -> { PrintTables } | IDENT "Options" -> { PrintTables (* A Synonymous to Tables *) } | IDENT "Hint" -> { PrintHintGoal } | IDENT "Hint"; qid = smart_global -> { PrintHint qid } | IDENT "Hint"; "*" -> { PrintHintDb } | IDENT "HintDb"; s = IDENT -> { PrintHintDbName s } | IDENT "Scopes" -> { PrintScopes } | IDENT "Scope"; s = IDENT -> { PrintScope s } | IDENT "Visibility"; s = OPT IDENT -> { PrintVisibility s } | IDENT "Implicit"; qid = smart_global -> { PrintImplicit qid } | b = [ IDENT "Sorted" -> { true } | -> { false } ]; IDENT "Universes"; g = OPT printunivs_subgraph; fopt = OPT ne_string -> { PrintUniverses (b, g, fopt) } | IDENT "Assumptions"; qid = smart_global -> { PrintAssumptions (false, false, qid) } | IDENT "Opaque"; IDENT "Dependencies"; qid = smart_global -> { PrintAssumptions (true, false, qid) } | IDENT "Transparent"; IDENT "Dependencies"; qid = smart_global -> { PrintAssumptions (false, true, qid) } | IDENT "All"; IDENT "Dependencies"; qid = smart_global -> { PrintAssumptions (true, true, qid) } | IDENT "Strategy"; qid = smart_global -> { PrintStrategy (Some qid) } | IDENT "Strategies" -> { PrintStrategy None } | IDENT "Registered" -> { PrintRegistered } ] ] ; printunivs_subgraph: [ [ IDENT "Subgraph"; "("; l = LIST0 reference; ")" -> { l } ] ] ; class_rawexpr: [ [ IDENT "Funclass" -> { FunClass } | IDENT "Sortclass" -> { SortClass } | qid = smart_global -> { RefClass qid } ] ] ; locatable: [ [ qid = smart_global -> { LocateAny qid } | IDENT "Term"; qid = smart_global -> { LocateTerm qid } | IDENT "File"; f = ne_string -> { LocateFile f } | IDENT "Library"; qid = global -> { LocateLibrary qid } | IDENT "Module"; qid = global -> { LocateModule qid } ] ] ; option_setting: [ [ -> { OptionSetTrue } | n = integer -> { OptionSetInt n } | s = STRING -> { OptionSetString s } ] ] ; table_value: [ [ id = global -> { Goptions.QualidRefValue id } | s = STRING -> { Goptions.StringRefValue s } ] ] ; setting_name: [ [ fl = LIST1 [ x = IDENT -> { x } ] -> { fl } ]] ; ne_in_or_out_modules: [ [ IDENT "inside"; l = LIST1 global -> { SearchInside l } | IDENT "outside"; l = LIST1 global -> { SearchOutside l } ] ] ; in_or_out_modules: [ [ m = ne_in_or_out_modules -> { m } | -> { SearchOutside [] } ] ] ; comment: [ [ c = constr -> { CommentConstr c } | s = STRING -> { CommentString s } | n = natural -> { CommentInt n } ] ] ; positive_search_mark: [ [ "-" -> { false } | -> { true } ] ] ; search_query: [ [ b = positive_search_mark; s = search_item -> { (b, SearchLiteral s) } | b = positive_search_mark; "["; l = LIST1 (LIST1 search_query) SEP "|"; "]" -> { (b, SearchDisjConj l) } ] ] ; search_item: [ [ test_id_colon; where = search_where; ":"; s = ne_string; sc = OPT scope_delimiter -> { SearchString (where,s,sc) } | IDENT "is"; ":"; kl = logical_kind -> { SearchKind kl } | s = ne_string; sc = OPT scope_delimiter -> { SearchString ((Anywhere,false),s,sc) } | test_id_colon; where = search_where; ":"; p = constr_pattern -> { SearchSubPattern (where,p) } | p = constr_pattern -> { SearchSubPattern ((Anywhere,false),p) } ] ] ; logical_kind: [ [ k = thm_token -> { IsProof k } | k = assumption_token -> { IsAssumption (snd k) } | k = IDENT "Context" -> { IsAssumption Context } | k = extended_def_token -> { IsDefinition k } | IDENT "Primitive" -> { IsPrimitive } ] ] ; extended_def_token: [ [ k = def_token -> { snd k } | IDENT "Coercion" -> { Coercion } | IDENT "Instance" -> { Instance } | IDENT "Scheme" -> { Scheme } | IDENT "Canonical" -> { CanonicalStructure } | IDENT "Field" -> { StructureComponent } | IDENT "Method" -> { Method } ] ] ; search_where: [ [ IDENT "head" -> { Anywhere, true } | IDENT "hyp" -> { InHyp, false } | IDENT "concl" -> { InConcl, false } | IDENT "headhyp" -> { InHyp, true } | IDENT "headconcl" -> { InConcl, true } ] ] ; search_queries: [ [ m = ne_in_or_out_modules -> { ([],m) } | s = search_query; l = search_queries -> { let (sl,m) = l in (s::sl,m) } | -> { ([],SearchOutside []) } ] ] ; univ_name_list: [ [ "@{" ; l = LIST0 name; "}" -> { l } ] ] ; END GRAMMAR EXTEND Gram GLOBAL: command; command: [ [ (* State management *) IDENT "Write"; IDENT "State"; s = IDENT -> { VernacWriteState s } | IDENT "Write"; IDENT "State"; s = ne_string -> { VernacWriteState s } | IDENT "Restore"; IDENT "State"; s = IDENT -> { VernacRestoreState s } | IDENT "Restore"; IDENT "State"; s = ne_string -> { VernacRestoreState s } (* Resetting *) | IDENT "Reset"; IDENT "Initial" -> { VernacResetInitial } | IDENT "Reset"; id = identref -> { VernacResetName id } | IDENT "Back" -> { VernacBack 1 } | IDENT "Back"; n = natural -> { VernacBack n } (* Tactic Debugger *) | IDENT "Debug"; IDENT "On" -> { VernacSetOption (false, ["Ltac";"Debug"], OptionSetTrue) } | IDENT "Debug"; IDENT "Off" -> { VernacSetOption (false, ["Ltac";"Debug"], OptionUnset) } (* registration of a custom reduction *) | IDENT "Declare"; IDENT "Reduction"; s = IDENT; ":="; r = red_expr -> { VernacDeclareReduction (s,r) } (* factorized here, though relevant for syntax extensions *) | IDENT "Declare"; IDENT "Custom"; IDENT "Entry"; s = IDENT -> { VernacDeclareCustomEntry s } ] ]; END (* Grammar extensions *) GRAMMAR EXTEND Gram GLOBAL: syntax only_parsing syntax_modifiers; syntax: [ [ IDENT "Open"; IDENT "Scope"; sc = IDENT -> { VernacOpenCloseScope (true,sc) } | IDENT "Close"; IDENT "Scope"; sc = IDENT -> { VernacOpenCloseScope (false,sc) } | IDENT "Delimit"; IDENT "Scope"; sc = IDENT; "with"; key = IDENT -> { VernacDelimiters (sc, Some key) } | IDENT "Undelimit"; IDENT "Scope"; sc = IDENT -> { VernacDelimiters (sc, None) } | IDENT "Bind"; IDENT "Scope"; sc = IDENT; "with"; refl = LIST1 class_rawexpr -> { VernacBindScope (sc,refl) } | IDENT "Infix"; op = ne_lstring; ":="; p = constr; modl = syntax_modifiers; sc = OPT [ ":"; sc = IDENT -> { sc } ] -> { VernacInfix ((op,modl),p,sc) } | IDENT "Notation"; id = identref; idl = LIST0 ident; ":="; c = constr; b = only_parsing -> { VernacSyntacticDefinition (id,(idl,c),{ onlyparsing = b }) } | IDENT "Notation"; s = lstring; ":="; c = constr; modl = syntax_modifiers; sc = OPT [ ":"; sc = IDENT -> { sc } ] -> { VernacNotation (c,(s,modl),sc) } | IDENT "Format"; IDENT "Notation"; n = STRING; s = STRING; fmt = STRING -> { VernacNotationAddFormat (n,s,fmt) } | IDENT "Reserved"; IDENT "Infix"; s = ne_lstring; l = syntax_modifiers -> { let s = CAst.map (fun s -> "x '"^s^"' y") s in VernacSyntaxExtension (true,(s,l)) } | IDENT "Reserved"; IDENT "Notation"; s = ne_lstring; l = syntax_modifiers -> { VernacSyntaxExtension (false, (s,l)) } (* "Print" "Grammar" and "Declare" "Scope" should be here but are in "command" entry in order to factorize with other "Print"-based or "Declare"-based vernac entries *) ] ] ; only_parsing: [ [ "("; IDENT "only"; IDENT "parsing"; ")" -> { true } | -> { false } ] ] ; level: [ [ IDENT "level"; n = natural -> { NumLevel n } | IDENT "next"; IDENT "level" -> { NextLevel } ] ] ; syntax_modifier: [ [ "at"; IDENT "level"; n = natural -> { SetLevel n } | "in"; IDENT "custom"; x = IDENT -> { SetCustomEntry (x,None) } | "in"; IDENT "custom"; x = IDENT; "at"; IDENT "level"; n = natural -> { SetCustomEntry (x,Some n) } | IDENT "left"; IDENT "associativity" -> { SetAssoc Gramlib.Gramext.LeftA } | IDENT "right"; IDENT "associativity" -> { SetAssoc Gramlib.Gramext.RightA } | IDENT "no"; IDENT "associativity" -> { SetAssoc Gramlib.Gramext.NonA } | IDENT "only"; IDENT "printing" -> { SetOnlyPrinting } | IDENT "only"; IDENT "parsing" -> { SetOnlyParsing } | IDENT "format"; s1 = [s = STRING -> { CAst.make ~loc s } ]; s2 = OPT [s = STRING -> { CAst.make ~loc s } ] -> { begin match s1, s2 with | { CAst.v = k }, Some s -> SetFormat(k,s) | s, None -> SetFormat ("text",s) end } | x = IDENT; ","; l = LIST1 [id = IDENT -> { id } ] SEP ","; "at"; lev = level -> { SetItemLevel (x::l,None,lev) } | x = IDENT; "at"; lev = level; b = OPT binder_interp -> { SetItemLevel ([x],b,lev) } | x = IDENT; b = binder_interp -> { SetItemLevel ([x],Some b,DefaultLevel) } | x = IDENT; typ = explicit_subentry -> { SetEntryType (x,typ) } ] ] ; syntax_modifiers: [ [ "("; l = LIST1 syntax_modifier SEP ","; ")" -> { l } | -> { [] } ] ] ; explicit_subentry: [ [ (* Warning to be turn into an error at the end of deprecation phase (for 8.14) *) IDENT "ident" -> { ETName false } (* To be activated at the end of transitory phase (for 8.15) | IDENT "ident" -> { ETIdent } *) | IDENT "name" -> { ETName true } (* Boolean to remove at the end of transitory phase *) | IDENT "global" -> { ETGlobal } | IDENT "bigint" -> { ETBigint } | IDENT "binder" -> { ETBinder true } | IDENT "constr" -> { ETConstr (InConstrEntry,None,DefaultLevel) } | IDENT "constr"; n = at_level_opt; b = OPT binder_interp -> { ETConstr (InConstrEntry,b,n) } | IDENT "pattern" -> { ETPattern (false,None) } | IDENT "pattern"; "at"; IDENT "level"; n = natural -> { ETPattern (false,Some n) } | IDENT "strict"; IDENT "pattern" -> { ETPattern (true,None) } | IDENT "strict"; IDENT "pattern"; "at"; IDENT "level"; n = natural -> { ETPattern (true,Some n) } | IDENT "closed"; IDENT "binder" -> { ETBinder false } | IDENT "custom"; x = IDENT; n = at_level_opt; b = OPT binder_interp -> { ETConstr (InCustomEntry x,b,n) } ] ] ; at_level_opt: [ [ "at"; n = level -> { n } | -> { DefaultLevel } ] ] ; binder_interp: [ [ "as"; IDENT "ident" -> { warn_deprecated_as_ident_kind (); Notation_term.AsIdent } | "as"; IDENT "name" -> { Notation_term.AsName } | "as"; IDENT "pattern" -> { Notation_term.AsNameOrPattern } | "as"; IDENT "strict"; IDENT "pattern" -> { Notation_term.AsStrictPattern } ] ] ; END