diff options
53 files changed, 1396 insertions, 2051 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 956d74c8c1..3a626796a6 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -578,6 +578,8 @@ library:ci-bedrock2: name: "$CI_JOB_NAME" paths: - _build_ci + variables: + NJOBS: "1" library:ci-color: extends: .ci-template-flambda diff --git a/Makefile.vofiles b/Makefile.vofiles index b6e0cd0a08..fe7ca7c36f 100644 --- a/Makefile.vofiles +++ b/Makefile.vofiles @@ -31,9 +31,9 @@ ALLMODS:=$(call vo_to_mod,$(ALLVO:.$(VO)=.vo)) # Converting a stdlib filename into native compiler filenames # Used for install targets -vo_to_cm = $(foreach vo,$(1),$(dir $(vo)).coq-native/$(subst /,_,$(patsubst user-contrib/%, N%, $(patsubst theories/%,NCoq_%,$(patsubst plugins/%,NCoq_%,$(vo:.$(VO)=.cm*)))))) +vo_to_cm = $(foreach vo,$(1),$(dir $(vo)).coq-native/$(subst /,_,$(patsubst user-contrib/%,N%, $(patsubst theories/%,NCoq_%,$(patsubst plugins/%,NCoq_%,$(vo:.$(VO)=.cm*)))))) -vo_to_obj = $(foreach vo,$(1),$(dir $(vo)).coq-native/$(subst /,_,$(patsubst user-contrib/%, N%, $(patsubst theories/%,NCoq_%,$(patsubst plugins/%,NCoq_%,$(vo:.$(VO)=.o)))))) +vo_to_obj = $(foreach vo,$(1),$(dir $(vo)).coq-native/$(subst /,_,$(patsubst user-contrib/%,N%, $(patsubst theories/%,NCoq_%,$(patsubst plugins/%,NCoq_%,$(vo:.$(VO)=.o)))))) ifdef QUICK GLOBFILES:= diff --git a/dev/doc/critical-bugs b/dev/doc/critical-bugs index 67becb251a..2d187f7bae 100644 --- a/dev/doc/critical-bugs +++ b/dev/doc/critical-bugs @@ -255,6 +255,18 @@ Conversion machines GH issue number: #9925 risk: + component: "virtual machine" (compilation to bytecode ran by a C-interpreter) + summary: broken long multiplication primitive integer emulation layer on 32 bits + introduced: e43b176 + impacted released versions: 8.10.0, 8.10.1, 8.10.2 + impacted development branches: 8.11 + impacted coqchk versions: none (no virtual machine in coqchk) + fixed in: 4e176a7 + found by: Soegtrop, Melquiond + exploit: test-suite/bugs/closed/bug_11321.v + GH issue number: #11321 + risk: critical, as any BigN computation on 32-bit architectures is wrong + component: "native" conversion machine (translation to OCaml which compiles to native code) summary: translation of identifier from Coq to OCaml was not bijective, leading to identify True and False introduced: V8.5 diff --git a/doc/changelog/02-specification-language/11233-master+fix11231-missing-variable-pattern-matching-decompilation.rst b/doc/changelog/02-specification-language/11233-master+fix11231-missing-variable-pattern-matching-decompilation.rst new file mode 100644 index 0000000000..941469d698 --- /dev/null +++ b/doc/changelog/02-specification-language/11233-master+fix11231-missing-variable-pattern-matching-decompilation.rst @@ -0,0 +1,6 @@ +- **Fixed:** + A dependency was missing when looking for default clauses in the + algorithm for printing pattern matching clauses (`#11233 + <https://github.com/coq/coq/pull/11233>`_, by Hugo Herbelin, fixing + `#11231 <https://github.com/coq/coq/pull/11231>`_, reported by Barry + Jay). diff --git a/doc/changelog/04-tactics/11288-omega+depr.rst b/doc/changelog/04-tactics/11288-omega+depr.rst new file mode 100644 index 0000000000..2832e6db61 --- /dev/null +++ b/doc/changelog/04-tactics/11288-omega+depr.rst @@ -0,0 +1,6 @@ +- **Removed:** + The undocumented ``omega with`` tactic variant has been removed, + using ``lia`` is the recommended replacement, tho the old semantics + of ``omega with *`` can be recovered with ``zify; omega`` + (`#11288 <https://github.com/coq/coq/pull/11288>`_, + by Emilio Jesus Gallego Arias). diff --git a/doc/changelog/04-tactics/11337-omega-with-depr.rst b/doc/changelog/04-tactics/11337-omega-with-depr.rst new file mode 100644 index 0000000000..25e929e030 --- /dev/null +++ b/doc/changelog/04-tactics/11337-omega-with-depr.rst @@ -0,0 +1,6 @@ +- **Deprecated:** + The undocumented ``omega with`` tactic variant has been deprecated, + using ``lia`` is the recommended replacement, tho the old semantics + of ``omega with *`` can be recovered with ``zify; omega`` + (`#11337 <https://github.com/coq/coq/pull/11337>`_, + by Emilio Jesus Gallego Arias). diff --git a/doc/changelog/07-commands-and-options/10747-canonical-better-message.rst b/doc/changelog/07-commands-and-options/10747-canonical-better-message.rst new file mode 100644 index 0000000000..e73be9c642 --- /dev/null +++ b/doc/changelog/07-commands-and-options/10747-canonical-better-message.rst @@ -0,0 +1,5 @@ +- **Changed:** + The :cmd:`Print Canonical Projections` command now can take constants and + prints only the unification rules that involve or are synthesized from given + constants (`#10747 <https://github.com/coq/coq/pull/10747>`_, + by Kazuhiko Sakaguchi). diff --git a/doc/sphinx/README.rst b/doc/sphinx/README.rst index 549598b187..a34b2d5195 100644 --- a/doc/sphinx/README.rst +++ b/doc/sphinx/README.rst @@ -143,22 +143,24 @@ Here is the list of all objects of the Coq domain (The symbol :black_nib: indica application of a tactic. ``.. prodn::`` A grammar production. - This is useful if you intend to document individual grammar productions. - Otherwise, use Sphinx's `production lists + Use ``.. prodn`` to document grammar productions instead of Sphinx + `production lists <http://www.sphinx-doc.org/en/stable/markup/para.html#directive-productionlist>`_. - Unlike ``.. productionlist``\ s, this directive accepts notation syntax. - - - Usage:: - - .. prodn:: token += production - .. prodn:: token ::= production + prodn displays multiple productions together with alignment similar to ``.. productionlist``, + however unlike ``.. productionlist``\ s, this directive accepts notation syntax. Example:: - .. prodn:: term += let: @pattern := @term in @term .. prodn:: occ_switch ::= { {? {| + | - } } {* @num } } + term += let: @pattern := @term in @term + | second_production + + The first line defines "occ_switch", which must be unique in the document. The second + references and expands the definition of "term", whose main definition is elsewhere + in the document. The third form is for continuing the + definition of a nonterminal when it has multiple productions. It leaves the first + column in the output blank. ``.. table::`` :black_nib: A Coq table, i.e. a setting that is a set of values. Example:: diff --git a/doc/sphinx/_static/coqnotations.sty b/doc/sphinx/_static/coqnotations.sty index 3548b8754c..3dfe4db439 100644 --- a/doc/sphinx/_static/coqnotations.sty +++ b/doc/sphinx/_static/coqnotations.sty @@ -67,11 +67,26 @@ \newcssclass{notation-sup}{\nsup{#1}} \newcssclass{notation-sub}{\nsub{#1}} -\newcssclass{notation}{\nnotation{#1}} +\newcssclass{notation}{\nnotation{\textbf{#1}}} \newcssclass{repeat}{\nrepeat{#1}} \newcssclass{repeat-wrapper}{\nwrapper{#1}} +\newcssclass{repeat-wrapper-with-sub}{\nwrapper{#1}} \newcssclass{hole}{\nhole{#1}} \newcssclass{alternative}{\nalternative{\nbox{#1}}{0pt}} \newcssclass{alternative-block}{#1} \newcssclass{repeated-alternative}{\nalternative{#1}{\nboxsep}} \newcssclass{alternative-separator}{\quad\naltsep{}\quad} +\newcssclass{prodn-table}{% + \begin{savenotes} + \sphinxattablestart + \begin{tabulary}{\linewidth}[t]{lLL} + #1 + \end{tabulary} + \par + \sphinxattableend + \end{savenotes}} +% latex puts targets 1 line below where they should be; prodn-target corrects for this +\newcssclass{prodn-target}{\raisebox{\dimexpr \nscriptsize \relax}{#1}} +\newcssclass{prodn-cell-nonterminal}{#1 &} +\newcssclass{prodn-cell-op}{#1 &} +\newcssclass{prodn-cell-production}{#1\\} diff --git a/doc/sphinx/_static/notations.css b/doc/sphinx/_static/notations.css index 4a5fa0b328..3806ba6ee6 100644 --- a/doc/sphinx/_static/notations.css +++ b/doc/sphinx/_static/notations.css @@ -10,6 +10,7 @@ .notation { /* font-family: "Ubuntu Mono", "Consolas", monospace; */ white-space: pre-wrap; + font-weight: bold; } .notation .notation-sup { @@ -85,7 +86,8 @@ padding-right: 0.6em; /* Space for the left half of the sub- and sup-scripts */ } -.notation .repeat-wrapper { +.notation .repeat-wrapper, +.notation .repeat-wrapper-with-sub { display: inline-block; position: relative; margin-right: 0.4em; /* Space for the right half of the sub- and sup-scripts */ @@ -165,10 +167,52 @@ /* Overrides */ /*************/ -.rst-content table.docutils td, .rst-content table.docutils th { - padding: 8px; /* Reduce horizontal padding */ - border-left: none; - border-right: none; +.prodn-table { + display: table; + margin: 1.5em 0px; + vertical-align: baseline; + font-weight: bold; +} + +.prodn-column-group { + display: table-column-group; +} + +.prodn-column { + display: table-column; +} + +.prodn-row-group { + display: table-row-group; +} + +.prodn-row { + display: table-row; +} + +.prodn-cell-nonterminal, +.prodn-cell-op, +.prodn-cell-production +{ + display: table-cell; +} + +.prodn-cell-nonterminal { + padding-right: 0.49em; +} + +.prodn-cell-op { + padding-right: 0.90em; + font-weight: normal; +} + +.prodn-table .notation > .repeat-wrapper { + margin-top: 0.28em; +} + +.prodn-table .notation > .repeat-wrapper-with-sub { + margin-top: 0.28em; + margin-bottom: 0.28em; } /* We can't display nested blocks otherwise */ diff --git a/doc/sphinx/addendum/extended-pattern-matching.rst b/doc/sphinx/addendum/extended-pattern-matching.rst index 45b3f6f161..15f42591ce 100644 --- a/doc/sphinx/addendum/extended-pattern-matching.rst +++ b/doc/sphinx/addendum/extended-pattern-matching.rst @@ -192,7 +192,7 @@ Disjunctive patterns -------------------- Multiple patterns that share the same right-hand-side can be -factorized using the notation :n:`{+| @patterns_comma}`. For +factorized using the notation :n:`{+| {+, @pattern } }`. For instance, :g:`max` can be rewritten as follows: .. coqtop:: in reset diff --git a/doc/sphinx/language/coq-library.rst b/doc/sphinx/language/coq-library.rst index cad5e4e67e..80f209fcf1 100644 --- a/doc/sphinx/language/coq-library.rst +++ b/doc/sphinx/language/coq-library.rst @@ -95,25 +95,23 @@ Logic The basic library of |Coq| comes with the definitions of standard (intuitionistic) logical connectives (they are defined as inductive constructions). They are equipped with an appealing syntax enriching the -subclass :token:`form` of the syntactic class :token:`term`. The syntax of -:token:`form` is shown below: - -.. /!\ Please keep the blanks in the lines below, experimentally they produce - a nice last column. Or even better, find a proper way to do this! - -.. productionlist:: - form : True (True) - : False (False) - : ~ `form` (not) - : `form` /\ `form` (and) - : `form` \/ `form` (or) - : `form` -> `form` (primitive implication) - : `form` <-> `form` (iff) - : forall `ident` : `type`, `form` (primitive for all) - : exists `ident` [: `specif`], `form` (ex) - : exists2 `ident` [: `specif`], `form` & `form` (ex2) - : `term` = `term` (eq) - : `term` = `term` :> `specif` (eq) +subclass :token:`form` of the syntactic class :token:`term`. The constructs +for :production:`form` are: + +============================================== ======= +True True +False False +:n:`~ @form` not +:n:`@form /\ @form` and +:n:`@form \/ @form` or +:n:`@form -> @form` primitive implication +:n:`@form <-> @form` iff +:n:`forall @ident : @type, @form` primitive for all +:n:`exists @ident {? @specif}, @form` ex +:n:`exists2 @ident {? @specif}, @form & @form` ex2 +:n:`@term = @term` eq +:n:`@term = @term :> @specif` eq +============================================== ======= .. note:: @@ -281,19 +279,20 @@ In the basic library, we find in ``Datatypes.v`` the definition of the basic data-types of programming, defined as inductive constructions over the sort ``Set``. Some of them come with a special syntax shown below (this syntax table is common with -the next section :ref:`specification`): - -.. productionlist:: - specif : `specif` * `specif` (prod) - : `specif` + `specif` (sum) - : `specif` + { `specif` } (sumor) - : { `specif` } + { `specif` } (sumbool) - : { `ident` : `specif` | `form` } (sig) - : { `ident` : `specif` | `form` & `form` } (sig2) - : { `ident` : `specif` & `specif` } (sigT) - : { `ident` : `specif` & `specif` & `specif` } (sigT2) - term : (`term`, `term`) (pair) - +the next section :ref:`specification`). The constructs for :production:`specif` are: + +============================================= ======= +:n:`@specif * @specif` prod +:n:`@specif + @specif` sum +:n:`@specif + { @specif }` sumor +:n:`{ @specif } + { @specif }` sumbool +:n:`{ @ident : @specif | @form }` sig +:n:`{ @ident : @specif | @form & @form }` sig2 +:n:`{ @ident : @specif & @specif }` sigT +:n:`{ @ident : @specif & @specif & @specif }` sigT2 +============================================= ======= + +The notation for pairs (elements of type prod) is: :n:`(@term, @term)` Programming +++++++++++ diff --git a/doc/sphinx/language/gallina-extensions.rst b/doc/sphinx/language/gallina-extensions.rst index 8caa289a47..78428be18f 100644 --- a/doc/sphinx/language/gallina-extensions.rst +++ b/doc/sphinx/language/gallina-extensions.rst @@ -155,19 +155,19 @@ available: .. _record_projections_grammar: - .. insertgram term_projection term_projection + .. insertprodn term_projection term_projection - .. productionlist:: coq - term_projection : `term0` .( `qualid` `args_opt` ) - : `term0` .( @ `qualid` `term1_list_opt` ) + .. prodn:: + term_projection ::= @term0 .( @qualid {* @arg } ) + | @term0 .( @ @qualid {* @term1 } ) Syntax of Record projections The corresponding grammar rules are given in the preceding grammar. When :token:`qualid` -denotes a projection, the syntax :n:`@term.(@qualid)` is equivalent to :n:`@qualid @term`, -the syntax :n:`@term.(@qualid {+ @arg })` to :n:`@qualid {+ @arg } @term`. -and the syntax :n:`@term.(@@qualid {+ @term })` to :n:`@@qualid {+ @term } @term`. -In each case, :token:`term` is the object projected and the +denotes a projection, the syntax :n:`@term0.(@qualid)` is equivalent to :n:`@qualid @term0`, +the syntax :n:`@term0.(@qualid {+ @arg })` to :n:`@qualid {+ @arg } @term0`. +and the syntax :n:`@term0.(@@qualid {+ @term0 })` to :n:`@@qualid {+ @term0 } @term0`. +In each case, :token:`term0` is the object projected and the other arguments are the parameters of the inductive type. @@ -1878,27 +1878,16 @@ Controlling the insertion of implicit arguments not followed by explicit argumen Explicit applications ~~~~~~~~~~~~~~~~~~~~~ -In presence of non-strict or contextual argument, or in presence of +In presence of non-strict or contextual arguments, or in presence of partial applications, the synthesis of implicit arguments may fail, so -one may have to give explicitly certain implicit arguments of an -application. The syntax for this is :n:`(@ident := @term)` where :token:`ident` is the -name of the implicit argument and term is its corresponding explicit -term. Alternatively, one can locally deactivate the hiding of implicit -arguments of a function by using the notation :n:`@qualid {+ @term }`. -This syntax extension is given in the following grammar: +one may have to explicitly give certain implicit arguments of an +application. Use the :n:`(@ident := @term)` form of :token:`arg` to do so, +where :token:`ident` is the name of the implicit argument and :token:`term` +is its corresponding explicit term. Alternatively, one can deactivate +the hiding of implicit arguments for a single function application using the +:n:`@ @qualid {? @univ_annot } {* @term1 }` form of :token:`term10`. -.. _explicit_app_grammar: - - .. productionlist:: explicit_apps - term : @ `qualid` `term` … `term` - : @ `qualid` - : `qualid` `argument` … `argument` - argument : `term` - : (`ident` := `term`) - - Syntax for explicitly giving implicit arguments - -.. example:: (continued) +.. example:: Syntax for explicitly giving implicit arguments (continued) .. coqtop:: all @@ -2075,11 +2064,13 @@ in :ref:`canonicalstructures`; here only a simple example is given. This is equivalent to a regular definition of :token:`ident` followed by the declaration :n:`Canonical @ident`. -.. cmd:: Print Canonical Projections +.. cmd:: Print Canonical Projections {* @ident} This displays the list of global names that are components of some canonical structure. For each of them, the canonical structure of - which it is a projection is indicated. + which it is a projection is indicated. If constants are given as + its arguments, only the unification rules that involve or are + synthesized from simultaneously all given constants will be shown. .. example:: @@ -2089,10 +2080,15 @@ in :ref:`canonicalstructures`; here only a simple example is given. Print Canonical Projections. + .. coqtop:: all + + Print Canonical Projections nat. + .. note:: - The last line would not show up if the corresponding projection (namely - :g:`Prf_equiv`) were annotated as not canonical, as described above. + The last line in the first example would not show up if the + corresponding projection (namely :g:`Prf_equiv`) were annotated as not + canonical, as described above. Implicit types of variables ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -2312,17 +2308,13 @@ Printing universes Existential variables --------------------- -.. insertgram term_evar evar_binding - -.. productionlist:: coq - term_evar : ?[ `ident` ] - : ?[ ?`ident` ] - : ?`ident` `evar_bindings_opt` - evar_bindings_opt : @{ `evar_bindings_semi` } - : `empty` - evar_bindings_semi : `evar_bindings_semi` ; `evar_binding` - : `evar_binding` - evar_binding : `ident` := `term` +.. insertprodn term_evar evar_binding + +.. prodn:: + term_evar ::= ?[ @ident ] + | ?[ ?@ident ] + | ?@ident {? @%{ {+; @evar_binding } %} } + evar_binding ::= @ident := @term |Coq| terms can include existential variables which represents unknown subterms to eventually be replaced by actual subterms. diff --git a/doc/sphinx/language/gallina-specification-language.rst b/doc/sphinx/language/gallina-specification-language.rst index 3cc101d06b..368724f9d2 100644 --- a/doc/sphinx/language/gallina-specification-language.rst +++ b/doc/sphinx/language/gallina-specification-language.rst @@ -16,27 +16,27 @@ In Coq, logical objects are typed to ensure their logical correctness. The rules implemented by the typing algorithm are described in Chapter :ref:`calculusofinductiveconstructions`. -About the grammars in the manual -================================ +.. About the grammars in the manual + ================================ -Grammars are presented in Backus-Naur form (BNF). Terminal symbols are -set in black ``typewriter font``. In addition, there are special notations for -regular expressions. + Grammars are presented in Backus-Naur form (BNF). Terminal symbols are + set in black ``typewriter font``. In addition, there are special notations for + regular expressions. -An expression enclosed in square brackets ``[…]`` means at most one -occurrence of this expression (this corresponds to an optional -component). + An expression enclosed in square brackets ``[…]`` means at most one + occurrence of this expression (this corresponds to an optional + component). -The notation “``entry sep … sep entry``” stands for a non empty sequence -of expressions parsed by entry and separated by the literal “``sep``” [1]_. + The notation “``entry sep … sep entry``” stands for a non empty sequence + of expressions parsed by entry and separated by the literal “``sep``” [1]_. -Similarly, the notation “``entry … entry``” stands for a non empty -sequence of expressions parsed by the “``entry``” entry, without any -separator between. + Similarly, the notation “``entry … entry``” stands for a non empty + sequence of expressions parsed by the “``entry``” entry, without any + separator between. -At the end, the notation “``[entry sep … sep entry]``” stands for a -possibly empty sequence of expressions parsed by the “``entry``” entry, -separated by the literal “``sep``”. + At the end, the notation “``[entry sep … sep entry]``” stands for a + possibly empty sequence of expressions parsed by the “``entry``” entry, + separated by the literal “``sep``”. .. _lexical-conventions: @@ -58,10 +58,12 @@ Identifiers recognized by the following grammar (except that the string ``_`` is reserved; it is not a valid identifier): - .. productionlist:: coq - ident : `first_letter`[`subsequent_letter`…`subsequent_letter`] - first_letter : a..z ∣ A..Z ∣ _ ∣ `unicode_letter` - subsequent_letter : `first_letter` ∣ 0..9 ∣ ' ∣ `unicode_id_part` + .. insertprodn ident subsequent_letter + + .. prodn:: + ident ::= @first_letter {* @subsequent_letter } + first_letter ::= {| a .. z | A .. Z | _ | @unicode_letter } + subsequent_letter ::= {| @first_letter | @digit | ' | @unicode_id_part } All characters are meaningful. In particular, identifiers are case-sensitive. :production:`unicode_letter` non-exhaustively includes Latin, @@ -77,13 +79,13 @@ Numerals integer. Underscores embedded in the digits are ignored, for example ``1_000_000`` is the same as ``1000000``. - .. productionlist:: coq - numeral : `num`[. `num`][`exp`[`sign`]`num`] - int : [-]`num` - num : `digit`…`digit` - digit : 0..9 - exp : e | E - sign : + | - + .. insertprodn numeral digit + + .. prodn:: + numeral ::= {+ @digit } {? . {+ @digit } } {? {| e | E } {? {| + | - } } {+ @digit } } + int ::= {? - } {+ @digit } + num ::= {+ @digit } + digit ::= 0 .. 9 Strings Strings begin and end with ``"`` (double quote). Use ``""`` to represent @@ -139,50 +141,39 @@ presentation of Cic is given in Chapter :ref:`calculusofinductiveconstructions`. are given in Chapter :ref:`extensionsofgallina`. How to customize the syntax is described in Chapter :ref:`syntaxextensionsandinterpretationscopes`. -.. insertgram term binders_opt - -.. productionlist:: coq - term : forall `open_binders` , `term` - : fun `open_binders` => `term` - : `term_let` - : if `term` `as_return_type_opt` then `term` else `term` - : `term_fix` - : `term100` - term100 : `term_cast` - : `term10` - term10 : `term1` `args` - : @ `qualid` `universe_annot_opt` `term1_list_opt` - : `term1` - args : `args` `arg` - : `arg` - arg : ( `ident` := `term` ) - : `term1` - term1_list_opt : `term1_list_opt` `term1` - : `empty` - empty : - term1 : `term_projection` - : `term0` % `ident` - : `term0` - args_opt : `args` - : `empty` - term0 : `qualid` `universe_annot_opt` - : `sort` - : `numeral` - : `string` - : _ - : `term_evar` - : `term_match` - : ( `term` ) - : {| `fields_def` |} - : `{ `term` } - : `( `term` ) - : ltac : ( `ltac_expr` ) - fields_def : `field_def` ; `fields_def` - : `field_def` - : `empty` - field_def : `qualid` `binders_opt` := `term` - binders_opt : `binders` - : `empty` +.. insertprodn term field_def + +.. prodn:: + term ::= forall @open_binders , @term + | fun @open_binders => @term + | @term_let + | if @term {? {? as @name } return @term100 } then @term else @term + | @term_fix + | @term_cofix + | @term100 + term100 ::= @term_cast + | @term10 + term10 ::= @term1 {+ @arg } + | @ @qualid {? @univ_annot } {* @term1 } + | @term1 + arg ::= ( @ident := @term ) + | @term1 + term1 ::= @term_projection + | @term0 % @ident + | @term0 + term0 ::= @qualid {? @univ_annot } + | @sort + | @numeral + | @string + | _ + | @term_evar + | @term_match + | ( @term ) + | %{%| {* @field_def } %|%} + | `%{ @term %} + | `( @term ) + | ltac : ( @ltac_expr ) + field_def ::= @qualid {* @binder } := @term Types ----- @@ -196,12 +187,11 @@ of types inside the syntactic class :token:`term`. Qualified identifiers and simple identifiers -------------------------------------------- -.. insertgram qualid field +.. insertprodn qualid field_ident -.. productionlist:: coq - qualid : `qualid` `field` - : `ident` - field : .`ident` +.. prodn:: + qualid ::= @ident {* @field_ident } + field_ident ::= .@ident *Qualified identifiers* (:token:`qualid`) denote *global constants* (definitions, lemmas, theorems, remarks or facts), *global variables* @@ -210,7 +200,7 @@ types*. *Simple identifiers* (or shortly :token:`ident`) are a syntactic subset of qualified identifiers. Identifiers may also denote *local variables*, while qualified identifiers do not. -Field identifiers, written :token:`field`, are identifiers prefixed by +Field identifiers, written :token:`field_ident`, are identifiers prefixed by `.` (dot) with no blank between the dot and the identifier. @@ -237,34 +227,27 @@ numbers (see :ref:`datatypes`). Sorts ----- -.. insertgram sort universe_level - -.. productionlist:: coq - sort : Set - : Prop - : SProp - : Type - : Type @{ _ } - : Type @{ `universe` } - universe : max ( `universe_exprs_comma` ) - : `universe_expr` - universe_exprs_comma : `universe_exprs_comma` , `universe_expr` - : `universe_expr` - universe_expr : `universe_name` `universe_increment_opt` - universe_name : `qualid` - : Set - : Prop - universe_increment_opt : + `num` - : `empty` - universe_annot_opt : @{ `universe_levels_opt` } - : `empty` - universe_levels_opt : `universe_levels_opt` `universe_level` - : `empty` - universe_level : Set - : Prop - : Type - : _ - : `qualid` +.. insertprodn sort univ_annot + +.. prodn:: + sort ::= Set + | Prop + | SProp + | Type + | Type @%{ _ %} + | Type @%{ @universe %} + universe ::= max ( {+, @universe_expr } ) + | @universe_expr + universe_expr ::= @universe_name {? + @num } + universe_name ::= @qualid + | Set + | Prop + universe_level ::= Set + | Prop + | Type + | _ + | @qualid + univ_annot ::= @%{ {* @universe_level } %} There are four sorts :g:`SProp`, :g:`Prop`, :g:`Set` and :g:`Type`. @@ -272,12 +255,12 @@ There are four sorts :g:`SProp`, :g:`Prop`, :g:`Set` and :g:`Type`. propositions* (also called *strict propositions*). - :g:`Prop` is the universe of *logical propositions*. The logical propositions - themselves are typing the proofs. We denote propositions by :production:`form`. + themselves are typing the proofs. We denote propositions by :token:`form`. This constitutes a semantic subclass of the syntactic class :token:`term`. - :g:`Set` is the universe of *program types* or *specifications*. The specifications themselves are typing the programs. We denote - specifications by :production:`specif`. This constitutes a semantic subclass of + specifications by :token:`specif`. This constitutes a semantic subclass of the syntactic class :token:`term`. - :g:`Type` is the type of sorts. @@ -289,34 +272,24 @@ More on sorts can be found in Section :ref:`sorts`. Binders ------- -.. insertgram open_binders exclam_opt - -.. productionlist:: coq - open_binders : `names` : `term` - : `binders` - names : `names` `name` - : `name` - name : _ - : `ident` - binders : `binders` `binder` - : `binder` - binder : `name` - : ( `names` : `term` ) - : ( `name` `colon_term_opt` := `term` ) - : { `name` } - : { `names` `colon_term_opt` } - : `( `typeclass_constraints_comma` ) - : `{ `typeclass_constraints_comma` } - : ' `pattern0` - : ( `name` : `term` | `term` ) - typeclass_constraints_comma : `typeclass_constraints_comma` , `typeclass_constraint` - : `typeclass_constraint` - typeclass_constraint : `exclam_opt` `term` - : { `name` } : `exclam_opt` `term` - : `name` : `exclam_opt` `term` - exclam_opt : ! - : `empty` - +.. insertprodn open_binders typeclass_constraint + +.. prodn:: + open_binders ::= {+ @name } : @term + | {+ @binder } + name ::= _ + | @ident + binder ::= @name + | ( {+ @name } : @term ) + | ( @name {? : @term } := @term ) + | %{ {+ @name } {? : @term } %} + | `( {+, @typeclass_constraint } ) + | `%{ {+, @typeclass_constraint } %} + | ' @pattern0 + | ( @name : @term %| @term ) + typeclass_constraint ::= {? ! } @term + | %{ @name %} : {? ! } @term + | @name : {? ! } @term Various constructions such as :g:`fun`, :g:`forall`, :g:`fix` and :g:`cofix` *bind* variables. A binding is represented by an identifier. If the binding @@ -335,7 +308,7 @@ variable can be introduced at the same time. It is also possible to give the type of the variable as follows: :n:`(@ident : @type := @term)`. -Lists of :token:`binder` are allowed. In the case of :g:`fun` and :g:`forall`, +Lists of :token:`binder`\s are allowed. In the case of :g:`fun` and :g:`forall`, it is intended that at least one binder of the list is an assumption otherwise fun and forall gets identical. Moreover, parentheses can be omitted in the case of a single sequence of bindings sharing the same type (e.g.: @@ -411,13 +384,13 @@ Section :ref:`explicit-applications`). Type cast --------- -.. insertgram term_cast term_cast +.. insertprodn term_cast term_cast -.. productionlist:: coq - term_cast : `term10` <: `term` - : `term10` <<: `term` - : `term10` : `term` - : `term10` :> +.. prodn:: + term_cast ::= @term10 <: @term + | @term10 <<: @term + | @term10 : @term + | @term10 :> The expression :n:`@term : @type` is a type cast expression. It enforces the type of :token:`term` to be :token:`type`. @@ -444,21 +417,14 @@ guess the missing piece of information. Let-in definitions ------------------ -.. insertgram term_let names_comma +.. insertprodn term_let term_let -.. productionlist:: coq - term_let : let `name` `colon_term_opt` := `term` in `term` - : let `name` `binders` `colon_term_opt` := `term` in `term` - : let `single_fix` in `term` - : let `names_tuple` `as_return_type_opt` := `term` in `term` - : let ' `pattern` := `term` `return_type_opt` in `term` - : let ' `pattern` in `pattern` := `term` `return_type` in `term` - colon_term_opt : : `term` - : `empty` - names_tuple : ( `names_comma` ) - : () - names_comma : `names_comma` , `name` - : `name` +.. prodn:: + term_let ::= let @name {? : @term } := @term in @term + | let @name {+ @binder } {? : @term } := @term in @term + | let ( {*, @name } ) {? {? as @name } return @term100 } := @term in @term + | let ' @pattern := @term {? return @term100 } in @term + | let ' @pattern in @pattern := @term return @term100 in @term :n:`let @ident := @term in @term’` denotes the local binding of :token:`term` to the variable @@ -471,57 +437,25 @@ stands for :n:`let @ident := fun {+ @binder} => @term in @term’`. Definition by cases: match -------------------------- -.. insertgram term_match record_pattern - -.. productionlist:: coq - term_match : match `case_items_comma` `return_type_opt` with `or_opt` `eqns_or_opt` end - case_items_comma : `case_items_comma` , `case_item` - : `case_item` - return_type_opt : `return_type` - : `empty` - as_return_type_opt : `as_name_opt` `return_type` - : `empty` - return_type : return `term100` - case_item : `term100` `as_name_opt` `in_opt` - as_name_opt : as `name` - : `empty` - in_opt : in `pattern` - : `empty` - or_opt : | - : `empty` - eqns_or_opt : `eqns_or` - : `empty` - eqns_or : `eqns_or` | `eqn` - : `eqn` - eqn : `patterns_comma_list_or` => `term` - patterns_comma_list_or : `patterns_comma_list_or` | `patterns_comma` - : `patterns_comma` - patterns_comma : `patterns_comma` , `pattern` - : `pattern` - pattern : `pattern10` : `term` - : `pattern10` - pattern10 : `pattern1` as `name` - : `pattern1_list` - : @ `qualid` `pattern1_list_opt` - : `pattern1` - pattern1_list : `pattern1_list` `pattern1` - : `pattern1` - pattern1_list_opt : `pattern1_list` - : `empty` - pattern1 : `pattern0` % `ident` - : `pattern0` - pattern0 : `qualid` - : {| `record_patterns_opt` |} - : _ - : ( `patterns_or` ) - : `numeral` - : `string` - patterns_or : `patterns_or` | `pattern` - : `pattern` - record_patterns_opt : `record_patterns_opt` ; `record_pattern` - : `record_pattern` - : `empty` - record_pattern : `qualid` := `pattern` +.. insertprodn term_match pattern0 + +.. prodn:: + term_match ::= match {+, @case_item } {? return @term100 } with {? %| } {*| @eqn } end + case_item ::= @term100 {? as @name } {? in @pattern } + eqn ::= {+| {+, @pattern } } => @term + pattern ::= @pattern10 : @term + | @pattern10 + pattern10 ::= @pattern1 as @name + | @pattern1 {* @pattern1 } + | @ @qualid {* @pattern1 } + pattern1 ::= @pattern0 % @ident + | @pattern0 + pattern0 ::= @qualid + | %{%| {* @qualid := @pattern } %|%} + | _ + | ( {+| @pattern } ) + | @numeral + | @string Objects of inductive types can be destructured by a case-analysis construction called *pattern matching* expression. A pattern matching @@ -531,31 +465,30 @@ to apply specific treatments accordingly. This paragraph describes the basic form of pattern matching. See Section :ref:`Mult-match` and Chapter :ref:`extendedpatternmatching` for the description of the general form. The basic form of pattern matching is characterized -by a single :token:`case_item` expression, a :token:`patterns_comma` restricted to a +by a single :token:`case_item` expression, an :token:`eqn` restricted to a single :token:`pattern` and :token:`pattern` restricted to the form :n:`@qualid {* @ident}`. -The expression match ":token:`term`:math:`_0` :token:`return_type_opt` with -:token:`pattern`:math:`_1` => :token:`term`:math:`_1` :math:`|` … :math:`|` -:token:`pattern`:math:`_n` => :token:`term`:math:`_n` end" denotes a -*pattern matching* over the term :token:`term`:math:`_0` (expected to be -of an inductive type :math:`I`). The terms :token:`term`:math:`_1`\ …\ -:token:`term`:math:`_n` are the *branches* of the pattern matching -expression. Each of :token:`pattern`:math:`_i` has a form :token:`qualid` -:token:`ident` where :token:`qualid` must denote a constructor. There should be +The expression +:n:`match @term {? return @term100 } with {+| @pattern__i => @term__i } end` denotes a +*pattern matching* over the term :n:`@term` (expected to be +of an inductive type :math:`I`). The :n:`@term__i` +are the *branches* of the pattern matching +expression. Each :n:`@pattern__i` has the form :n:`@qualid @ident` +where :n:`@qualid` must denote a constructor. There should be exactly one branch for every constructor of :math:`I`. -The :token:`return_type_opt` expresses the type returned by the whole match +The :n:`return @term100` clause gives the type returned by the whole match expression. There are several cases. In the *non dependent* case, all -branches have the same type, and the :token:`return_type_opt` is the common type of -branches. In this case, :token:`return_type_opt` can usually be omitted as it can be -inferred from the type of the branches [2]_. +branches have the same type, and the :n:`return @term100` specifies that type. +In this case, :n:`return @term100` can usually be omitted as it can be +inferred from the type of the branches [1]_. In the *dependent* case, there are three subcases. In the first subcase, the type in each branch may depend on the exact value being matched in the branch. In this case, the whole pattern matching itself depends on the term being matched. This dependency of the term being matched in the -return type is expressed with an “as :token:`ident`” clause where :token:`ident` +return type is expressed with an :n:`@ident` clause where :token:`ident` is dependent in the return type. For instance, in the following example: .. coqtop:: in @@ -604,19 +537,19 @@ type of each branch can depend on the type dependencies specific to the branch and the whole pattern matching expression has a type determined by the specific dependencies in the type of the term being matched. This dependency of the return type in the annotations of the inductive type -is expressed using a “:g:`in` :math:`I` :g:`_ … _` :token:`pattern`:math:`_1` … -:token:`pattern`:math:`_n`” clause, where +is expressed with a clause in the form +:n:`in @qualid {+ _ } {+ @pattern }`, where -- :math:`I` is the inductive type of the term being matched; +- :token:`qualid` is the inductive type of the term being matched; -- the :g:`_` are matching the parameters of the inductive type: the +- the holes :n:`_` match the parameters of the inductive type: the return type is not dependent on them. -- the :token:`pattern`:math:`_i` are matching the annotations of the +- each :n:`@pattern` matches the annotations of the inductive type: the return type is dependent on them -- in the basic case which we describe below, each :token:`pattern`:math:`_i` - is a name :token:`ident`:math:`_i`; see :ref:`match-in-patterns` for the +- in the basic case which we describe below, each :n:`@pattern` + is a name :n:`@ident`; see :ref:`match-in-patterns` for the general case For instance, in the following example: @@ -651,27 +584,18 @@ Sections :ref:`if-then-else` and :ref:`irrefutable-patterns`). Recursive and co-recursive functions: fix and cofix --------------------------------------------------- -.. insertgram term_fix term1_extended_opt +.. insertprodn term_fix term1_extended + +.. prodn:: + term_fix ::= let fix @fix_body in @term + | fix @fix_body {? {+ with @fix_body } for @ident } + fix_body ::= @ident {* @binder } {? @fixannot } {? : @term } := @term + fixannot ::= %{ struct @ident %} + | %{ wf @term1_extended @ident %} + | %{ measure @term1_extended {? @ident } {? @term1_extended } %} + term1_extended ::= @term1 + | @ @qualid {? @univ_annot } -.. productionlist:: coq - term_fix : `single_fix` - : `single_fix` with `fix_bodies` for `ident` - single_fix : fix `fix_body` - : cofix `fix_body` - fix_bodies : `fix_bodies` with `fix_body` - : `fix_body` - fix_body : `ident` `binders_opt` `fixannot_opt` `colon_term_opt` := `term` - fixannot_opt : `fixannot` - : `empty` - fixannot : { struct `ident` } - : { wf `term1_extended` `ident` } - : { measure `term1_extended` `ident_opt` `term1_extended_opt` } - term1_extended : `term1` - : @ `qualid` `universe_annot_opt` - ident_opt : `ident` - : `empty` - term1_extended_opt : `term1_extended` - : `empty` The expression “``fix`` :token:`ident`:math:`_1` :token:`binder`:math:`_1` ``:`` :token:`type`:math:`_1` ``:=`` :token:`term`:math:`_1` ``with … with`` @@ -681,6 +605,17 @@ The expression “``fix`` :token:`ident`:math:`_1` :token:`binder`:math:`_1` ``: recursion. It is the local counterpart of the :cmd:`Fixpoint` command. When :math:`n=1`, the “``for`` :token:`ident`:math:`_i`” clause is omitted. +The association of a single fixpoint and a local definition have a special +syntax: :n:`let fix @ident @binders := @term in` stands for +:n:`let @ident := fix @ident @binders := @term in`. The same applies for co-fixpoints. + +.. insertprodn term_cofix cofix_body + +.. prodn:: + term_cofix ::= let cofix @cofix_body in @term + | cofix @cofix_body {? {+ with @cofix_body } for @ident } + cofix_body ::= @ident {* @binder } {? : @term } := @term + The expression “``cofix`` :token:`ident`:math:`_1` :token:`binder`:math:`_1` ``:`` :token:`type`:math:`_1` ``with … with`` :token:`ident`:math:`_n` :token:`binder`:math:`_n` : :token:`type`:math:`_n` ``for`` :token:`ident`:math:`_i`” denotes the @@ -688,10 +623,6 @@ The expression “``cofix`` :token:`ident`:math:`_1` :token:`binder`:math:`_1` ` co-recursion. It is the local counterpart of the :cmd:`CoFixpoint` command. When :math:`n=1`, the “``for`` :token:`ident`:math:`_i`” clause is omitted. -The association of a single fixpoint and a local definition have a special -syntax: :n:`let fix @ident @binders := @term in` stands for -:n:`let @ident := fix @ident @binders := @term in`. The same applies for co-fixpoints. - .. _vernacular: The Vernacular @@ -715,6 +646,8 @@ The Vernacular : ( `ident` … `ident` : `term` ) … ( `ident` … `ident` : `term` ) definition : [Local] Definition `ident` [`binders`] [: `term`] := `term` . : Let `ident` [`binders`] [: `term`] := `term` . + binders : binders binder + : binder inductive : Inductive `ind_body` with … with `ind_body` . : CoInductive `ind_body` with … with `ind_body` . ind_body : `ident` [`binders`] : `term` := @@ -1545,7 +1478,7 @@ Chapter :ref:`Tactics`. The basic assertion command is: The name you provided is already defined. You have then to choose another name. - .. exn:: Nested proofs are not allowed unless you turn the :flag:`Nested Proofs Allowed` flag on. + .. exn:: Nested proofs are not allowed unless you turn the Nested Proofs Allowed flag on. You are asserting a new statement while already being in proof editing mode. This feature, called nested proofs, is disabled by default. @@ -1715,10 +1648,5 @@ variety of commands: command with some attribute it does not understand. .. [1] - This is similar to the expression “*entry* :math:`\{` sep *entry* - :math:`\}`” in standard BNF, or “*entry* :math:`(` sep *entry* - :math:`)`\ \*” in the syntax of regular expressions. - -.. [2] Except if the inductive type is empty in which case there is no equation that can be used to infer the return type. diff --git a/doc/tools/coqrst/coqdomain.py b/doc/tools/coqrst/coqdomain.py index eff70bdac5..1f9178f4b6 100644 --- a/doc/tools/coqrst/coqdomain.py +++ b/doc/tools/coqrst/coqdomain.py @@ -403,55 +403,60 @@ class TableObject(NotationObject): class ProductionObject(CoqObject): r"""A grammar production. - This is useful if you intend to document individual grammar productions. - Otherwise, use Sphinx's `production lists + Use ``.. prodn`` to document grammar productions instead of Sphinx + `production lists <http://www.sphinx-doc.org/en/stable/markup/para.html#directive-productionlist>`_. - Unlike ``.. productionlist``\ s, this directive accepts notation syntax. - - - Usage:: - - .. prodn:: token += production - .. prodn:: token ::= production + prodn displays multiple productions together with alignment similar to ``.. productionlist``, + however unlike ``.. productionlist``\ s, this directive accepts notation syntax. Example:: - .. prodn:: term += let: @pattern := @term in @term .. prodn:: occ_switch ::= { {? {| + | - } } {* @num } } + term += let: @pattern := @term in @term + | second_production + + The first line defines "occ_switch", which must be unique in the document. The second + references and expands the definition of "term", whose main definition is elsewhere + in the document. The third form is for continuing the + definition of a nonterminal when it has multiple productions. It leaves the first + column in the output blank. """ subdomain = "prodn" #annotation = "Grammar production" + # handle_signature is called for each line of input in the prodn:: + # 'signatures' accumulates them in order to combine the lines into a single table: + signatures = None + def _render_signature(self, signature, signode): raise NotImplementedError(self) SIG_ERROR = ("{}: Invalid syntax in ``.. prodn::`` directive" + "\nExpected ``name ::= ...`` or ``name += ...``" - + " (e.g. ``pattern += constr:(@ident)``)") + + " (e.g. ``pattern += constr:(@ident)``)\n" + + " in `{}`") def handle_signature(self, signature, signode): - nsplits = 2 - parts = signature.split(maxsplit=nsplits) - if len(parts) != 3: - loc = os.path.basename(get_node_location(signode)) - raise ExtensionError(ProductionObject.SIG_ERROR.format(loc)) - - lhs, op, rhs = (part.strip() for part in parts) - if op not in ["::=", "+="]: - loc = os.path.basename(get_node_location(signode)) - raise ExtensionError(ProductionObject.SIG_ERROR.format(loc)) - - self._render_annotation(signode) - - lhs_op = '{} {} '.format(lhs, op) - lhs_node = nodes.literal(lhs_op, lhs_op) - - position = self.state_machine.get_source_and_line(self.lineno) - rhs_node = notation_to_sphinx(rhs, *position) - signode += addnodes.desc_name(signature, '', lhs_node, rhs_node) + parts = signature.split(maxsplit=1) + if parts[0].strip() == "|" and len(parts) == 2: + lhs = "" + op = "|" + rhs = parts[1].strip() + else: + nsplits = 2 + parts = signature.split(maxsplit=nsplits) + if len(parts) != 3: + loc = os.path.basename(get_node_location(signode)) + raise ExtensionError(ProductionObject.SIG_ERROR.format(loc, signature)) + else: + lhs, op, rhs = (part.strip() for part in parts) + if op not in ["::=", "+="]: + loc = os.path.basename(get_node_location(signode)) + raise ExtensionError(ProductionObject.SIG_ERROR.format(loc, signature)) + self.signatures.append((lhs, op, rhs)) return ('token', lhs) if op == '::=' else None def _add_index_entry(self, name, target): @@ -468,6 +473,49 @@ class ProductionObject(CoqObject): self._warn_if_duplicate_name(objects, name) objects[name] = env.docname, targetid + def run(self): + self.signatures = [] + indexnode = super().run()[0] # makes calls to handle_signature + + table = nodes.inline(classes=['prodn-table']) + tgroup = nodes.inline(classes=['prodn-column-group']) + for i in range(3): + tgroup += nodes.inline(classes=['prodn-column']) + table += tgroup + tbody = nodes.inline(classes=['prodn-row-group']) + table += tbody + + # create rows + for signature in self.signatures: + lhs, op, rhs = signature + position = self.state_machine.get_source_and_line(self.lineno) + + row = nodes.inline(classes=['prodn-row']) + entry = nodes.inline(classes=['prodn-cell-nonterminal']) + if lhs != "": + target_name = 'grammar-token-' + lhs + target = nodes.target('', '', ids=[target_name], names=[target_name]) + # putting prodn-target on the target node won't appear in the tex file + inline = nodes.inline(classes=['prodn-target']) + inline += target + entry += inline + entry += notation_to_sphinx('@'+lhs, *position) + else: + entry += nodes.literal('', '') + row += entry + + entry = nodes.inline(classes=['prodn-cell-op']) + entry += nodes.literal(op, op) + row += entry + + entry = nodes.inline(classes=['prodn-cell-production']) + entry += notation_to_sphinx(rhs, *position) + row += entry + + tbody += row + + return [indexnode, table] # only this node goes into the doc + class ExceptionObject(NotationObject): """An error raised by a Coq command or tactic. diff --git a/doc/tools/coqrst/notations/TacticNotations.g b/doc/tools/coqrst/notations/TacticNotations.g index 905b52525a..f9cf26a21e 100644 --- a/doc/tools/coqrst/notations/TacticNotations.g +++ b/doc/tools/coqrst/notations/TacticNotations.g @@ -42,7 +42,8 @@ LALT: '{|'; LGROUP: '{+' | '{*' | '{?'; LBRACE: '{'; RBRACE: '}'; -ESCAPED: '%{' | '%}' | '%|'; +// todo: need a cleaner way to escape the 3-character strings here +ESCAPED: '%{' | '%}' | '%|' | '`%{' | '@%{'; PIPE: '|'; ATOM: '@' | '_' | ~[@_{}| ]+; ID: '@' ('_'? [a-zA-Z0-9])+; diff --git a/doc/tools/coqrst/notations/TacticNotationsLexer.py b/doc/tools/coqrst/notations/TacticNotationsLexer.py index e3a115e32a..7bda849010 100644 --- a/doc/tools/coqrst/notations/TacticNotationsLexer.py +++ b/doc/tools/coqrst/notations/TacticNotationsLexer.py @@ -8,33 +8,35 @@ import sys def serializedATN(): with StringIO() as buf: buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\f") - buf.write("M\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") + buf.write("S\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\3\2\3\2\3\2\3\3\3\3") buf.write("\3\3\3\3\3\3\3\3\5\3!\n\3\3\4\3\4\3\5\3\5\3\6\3\6\3\6") - buf.write("\3\6\3\6\3\6\5\6-\n\6\3\7\3\7\3\b\3\b\6\b\63\n\b\r\b\16") - buf.write("\b\64\5\b\67\n\b\3\t\3\t\5\t;\n\t\3\t\6\t>\n\t\r\t\16") - buf.write("\t?\3\n\3\n\3\n\6\nE\n\n\r\n\16\nF\3\13\6\13J\n\13\r\13") - buf.write("\16\13K\2\2\f\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13") - buf.write("\25\f\3\2\5\4\2BBaa\6\2\"\"BBaa}\177\5\2\62;C\\c|\2V\2") - buf.write("\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3") - buf.write("\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2") - buf.write("\2\2\2\25\3\2\2\2\3\27\3\2\2\2\5 \3\2\2\2\7\"\3\2\2\2") - buf.write("\t$\3\2\2\2\13,\3\2\2\2\r.\3\2\2\2\17\66\3\2\2\2\218\3") - buf.write("\2\2\2\23A\3\2\2\2\25I\3\2\2\2\27\30\7}\2\2\30\31\7~\2") - buf.write("\2\31\4\3\2\2\2\32\33\7}\2\2\33!\7-\2\2\34\35\7}\2\2\35") - buf.write("!\7,\2\2\36\37\7}\2\2\37!\7A\2\2 \32\3\2\2\2 \34\3\2\2") - buf.write("\2 \36\3\2\2\2!\6\3\2\2\2\"#\7}\2\2#\b\3\2\2\2$%\7\177") - buf.write("\2\2%\n\3\2\2\2&\'\7\'\2\2\'-\7}\2\2()\7\'\2\2)-\7\177") - buf.write("\2\2*+\7\'\2\2+-\7~\2\2,&\3\2\2\2,(\3\2\2\2,*\3\2\2\2") - buf.write("-\f\3\2\2\2./\7~\2\2/\16\3\2\2\2\60\67\t\2\2\2\61\63\n") - buf.write("\3\2\2\62\61\3\2\2\2\63\64\3\2\2\2\64\62\3\2\2\2\64\65") - buf.write("\3\2\2\2\65\67\3\2\2\2\66\60\3\2\2\2\66\62\3\2\2\2\67") - buf.write("\20\3\2\2\28=\7B\2\29;\7a\2\2:9\3\2\2\2:;\3\2\2\2;<\3") - buf.write("\2\2\2<>\t\4\2\2=:\3\2\2\2>?\3\2\2\2?=\3\2\2\2?@\3\2\2") - buf.write("\2@\22\3\2\2\2AB\7a\2\2BD\7a\2\2CE\t\4\2\2DC\3\2\2\2E") - buf.write("F\3\2\2\2FD\3\2\2\2FG\3\2\2\2G\24\3\2\2\2HJ\7\"\2\2IH") - buf.write("\3\2\2\2JK\3\2\2\2KI\3\2\2\2KL\3\2\2\2L\26\3\2\2\2\13") - buf.write("\2 ,\64\66:?FK\2") + buf.write("\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\5\6\63\n\6\3\7\3") + buf.write("\7\3\b\3\b\6\b9\n\b\r\b\16\b:\5\b=\n\b\3\t\3\t\5\tA\n") + buf.write("\t\3\t\6\tD\n\t\r\t\16\tE\3\n\3\n\3\n\6\nK\n\n\r\n\16") + buf.write("\nL\3\13\6\13P\n\13\r\13\16\13Q\2\2\f\3\3\5\4\7\5\t\6") + buf.write("\13\7\r\b\17\t\21\n\23\13\25\f\3\2\5\4\2BBaa\6\2\"\"B") + buf.write("Baa}\177\5\2\62;C\\c|\2^\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3") + buf.write("\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2") + buf.write("\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\3\27\3\2\2\2") + buf.write("\5 \3\2\2\2\7\"\3\2\2\2\t$\3\2\2\2\13\62\3\2\2\2\r\64") + buf.write("\3\2\2\2\17<\3\2\2\2\21>\3\2\2\2\23G\3\2\2\2\25O\3\2\2") + buf.write("\2\27\30\7}\2\2\30\31\7~\2\2\31\4\3\2\2\2\32\33\7}\2\2") + buf.write("\33!\7-\2\2\34\35\7}\2\2\35!\7,\2\2\36\37\7}\2\2\37!\7") + buf.write("A\2\2 \32\3\2\2\2 \34\3\2\2\2 \36\3\2\2\2!\6\3\2\2\2\"") + buf.write("#\7}\2\2#\b\3\2\2\2$%\7\177\2\2%\n\3\2\2\2&\'\7\'\2\2") + buf.write("\'\63\7}\2\2()\7\'\2\2)\63\7\177\2\2*+\7\'\2\2+\63\7~") + buf.write("\2\2,-\7b\2\2-.\7\'\2\2.\63\7}\2\2/\60\7B\2\2\60\61\7") + buf.write("\'\2\2\61\63\7}\2\2\62&\3\2\2\2\62(\3\2\2\2\62*\3\2\2") + buf.write("\2\62,\3\2\2\2\62/\3\2\2\2\63\f\3\2\2\2\64\65\7~\2\2\65") + buf.write("\16\3\2\2\2\66=\t\2\2\2\679\n\3\2\28\67\3\2\2\29:\3\2") + buf.write("\2\2:8\3\2\2\2:;\3\2\2\2;=\3\2\2\2<\66\3\2\2\2<8\3\2\2") + buf.write("\2=\20\3\2\2\2>C\7B\2\2?A\7a\2\2@?\3\2\2\2@A\3\2\2\2A") + buf.write("B\3\2\2\2BD\t\4\2\2C@\3\2\2\2DE\3\2\2\2EC\3\2\2\2EF\3") + buf.write("\2\2\2F\22\3\2\2\2GH\7a\2\2HJ\7a\2\2IK\t\4\2\2JI\3\2\2") + buf.write("\2KL\3\2\2\2LJ\3\2\2\2LM\3\2\2\2M\24\3\2\2\2NP\7\"\2\2") + buf.write("ON\3\2\2\2PQ\3\2\2\2QO\3\2\2\2QR\3\2\2\2R\26\3\2\2\2\13") + buf.write("\2 \62:<@ELQ\2") return buf.getvalue() diff --git a/doc/tools/coqrst/notations/html.py b/doc/tools/coqrst/notations/html.py index d9c5383774..1136ee4997 100644 --- a/doc/tools/coqrst/notations/html.py +++ b/doc/tools/coqrst/notations/html.py @@ -61,7 +61,7 @@ class TacticNotationsToHTMLVisitor(TacticNotationsVisitor): tags.sub(sub.getText()[1:]) def visitEscaped(self, ctx:TacticNotationsParser.EscapedContext): - tags.span(ctx.ESCAPED().getText()[1:]) + tags.span(ctx.ESCAPED().getText().replace("%", "")) def visitWhitespace(self, ctx:TacticNotationsParser.WhitespaceContext): text(" ") diff --git a/doc/tools/coqrst/notations/plain.py b/doc/tools/coqrst/notations/plain.py index 93a7ec4683..23996b0d63 100644 --- a/doc/tools/coqrst/notations/plain.py +++ b/doc/tools/coqrst/notations/plain.py @@ -53,7 +53,7 @@ class TacticNotationsToDotsVisitor(TacticNotationsVisitor): self.buffer.write("‘{}’".format(ctx.ID().getText()[1:])) def visitEscaped(self, ctx:TacticNotationsParser.EscapedContext): - self.buffer.write(ctx.ESCAPED().getText()[1:]) + self.buffer.write(ctx.ESCAPED().getText().replace("%", "")) def visitWhitespace(self, ctx:TacticNotationsParser.WhitespaceContext): self.buffer.write(" ") diff --git a/doc/tools/coqrst/notations/sphinx.py b/doc/tools/coqrst/notations/sphinx.py index 4ca0a2ef83..ab18d136b8 100644 --- a/doc/tools/coqrst/notations/sphinx.py +++ b/doc/tools/coqrst/notations/sphinx.py @@ -45,7 +45,11 @@ class TacticNotationsToSphinxVisitor(TacticNotationsVisitor): def visitRepeat(self, ctx:TacticNotationsParser.RepeatContext): # Uses inline nodes instead of subscript and superscript to ensure that # we get the right customization hooks at the LaTeX level - wrapper = nodes.inline('', '', classes=['repeat-wrapper']) + separator = ctx.ATOM() or ctx.PIPE() + # I wanted to have 2 independent classes "repeat-wrapper" and "with-sub" here, + # but that breaks the latex build (invalid .tex file) + classes = 'repeat-wrapper-with-sub' if separator else 'repeat-wrapper' + wrapper = nodes.inline('', '', classes=[classes]) children = self.visitChildren(ctx) if len(children) == 1 and self.is_alternative(children[0]): @@ -58,7 +62,6 @@ class TacticNotationsToSphinxVisitor(TacticNotationsVisitor): repeat_marker = ctx.LGROUP().getText()[1] wrapper += nodes.inline(repeat_marker, repeat_marker, classes=['notation-sup']) - separator = ctx.ATOM() or ctx.PIPE() if separator: sep = separator.getText() wrapper += nodes.inline(sep, sep, classes=['notation-sub']) @@ -72,10 +75,33 @@ class TacticNotationsToSphinxVisitor(TacticNotationsVisitor): sp += nodes.Text("}") return [sp] + def escape(self, atom): + node = nodes.inline("","") + while atom != "": + if atom[0] == "'": + node += nodes.raw("\\textquotesingle{}", "\\textquotesingle{}", format="latex") + atom = atom[1:] + elif atom[0] == "`": + node += nodes.raw("\\`{}", "\\`{}", format="latex") + atom = atom[1:] + else: + index_ap = atom.find("'") + index_bt = atom.find("`") + if index_ap == -1: + index = index_bt + elif index_bt == -1: + index = index_ap + else: + index = min(index_ap, index_bt) + lit = atom if index == -1 else atom[:index] + node += nodes.inline(lit, lit) + atom = atom[len(lit):] + return node + def visitAtomic(self, ctx:TacticNotationsParser.AtomicContext): atom = ctx.ATOM().getText() sub = ctx.SUB() - node = nodes.inline(atom, atom) + node = self.escape(atom) if sub: sub_index = sub.getText()[2:] @@ -101,7 +127,7 @@ class TacticNotationsToSphinxVisitor(TacticNotationsVisitor): def visitEscaped(self, ctx:TacticNotationsParser.EscapedContext): escaped = ctx.ESCAPED().getText() - return [nodes.inline(escaped, escaped[1:])] + return [self.escape(escaped.replace("%", ""))] def visitWhitespace(self, ctx:TacticNotationsParser.WhitespaceContext): return [nodes.Text(" ")] diff --git a/doc/tools/docgram/README.md b/doc/tools/docgram/README.md index a0a1809133..182532e413 100644 --- a/doc/tools/docgram/README.md +++ b/doc/tools/docgram/README.md @@ -194,14 +194,15 @@ to the grammar. ### `.rst` file updates -`doc_grammar` updates `.rst` files when it sees the following 3 lines +`doc_grammar` updates `.rst` files where it sees the following 3 lines ``` -.. insertgram <start> <end> -.. productionlist:: XXX +.. insertprodn <start> <end> + +.. prodn:: ``` -The end of the existing `productionlist` is recognized by a blank line. +The end of the existing `prodn` is recognized by a blank line. ### Other details diff --git a/doc/tools/docgram/common.edit_mlg b/doc/tools/docgram/common.edit_mlg index 06b49a0a18..9c1827f5b7 100644 --- a/doc/tools/docgram/common.edit_mlg +++ b/doc/tools/docgram/common.edit_mlg @@ -65,6 +65,7 @@ DELETE: [ | test_lpar_idnum_coloneq | test_nospace_pipe_closedcurly | test_show_goal +| ensure_fixannot (* SSR *) (* | ssr_null_entry *) @@ -101,18 +102,8 @@ hyp: [ | var ] -empty: [ -| -] - -or_opt: [ -| "|" -| empty -] - ltac_expr_opt: [ -| tactic_expr5 -| empty +| OPT tactic_expr5 ] ltac_expr_opt_list_or: [ @@ -124,7 +115,7 @@ tactic_then_gen: [ | EDIT ADD_OPT tactic_expr5 "|" tactic_then_gen | EDIT ADD_OPT tactic_expr5 ".." tactic_then_last | REPLACE OPT tactic_expr5 ".." tactic_then_last -| WITH ltac_expr_opt ".." or_opt ltac_expr_opt_list_or +| WITH ltac_expr_opt ".." OPT "|" ltac_expr_opt_list_or ] ltac_expr_opt_list_or: [ @@ -144,24 +135,23 @@ fullyqualid: [ | qualid ] - -field: [ | DELETENT ] - -field: [ +field_ident: [ | "." ident ] basequalid: [ | REPLACE ident fields -| WITH qualid field +| WITH ident LIST0 field_ident +| DELETE ident ] +field: [ | DELETENT ] fields: [ | DELETENT ] dirpath: [ | REPLACE ident LIST0 field | WITH ident -| dirpath field +| dirpath field_ident ] binders: [ @@ -172,45 +162,37 @@ lconstr: [ | DELETE l_constr ] -let_type_cstr: [ -| DELETE OPT [ ":" lconstr ] -| rec_type_cstr +type_cstr: [ +| REPLACE ":" lconstr +| WITH OPT ( ":" lconstr ) +| DELETE (* empty *) ] -as_name_opt: [ -| "as" name -| empty +let_type_cstr: [ +| DELETE OPT [ ":" lconstr ] +| type_cstr ] (* rename here because we want to use "return_type" for something else *) RENAME: [ -| return_type as_return_type_opt -] - -as_return_type_opt: [ -| REPLACE OPT [ OPT [ "as" name ] case_type ] -| WITH as_name_opt case_type -| empty +| return_type as_return_type ] case_item: [ | REPLACE operconstr100 OPT [ "as" name ] OPT [ "in" pattern200 ] -| WITH operconstr100 as_name_opt OPT [ "in" pattern200 ] -] - -as_dirpath: [ -| DELETE OPT [ "as" dirpath ] -| "as" dirpath -| empty +| WITH operconstr100 OPT ("as" name) OPT [ "in" pattern200 ] ] binder_constr: [ | MOVETO term_let "let" name binders let_type_cstr ":=" operconstr200 "in" operconstr200 -| MOVETO term_let "let" single_fix "in" operconstr200 -| MOVETO term_let "let" [ "(" LIST0 name SEP "," ")" | "()" ] as_return_type_opt ":=" operconstr200 "in" operconstr200 +| MOVETO term_fix "let" "fix" fix_decl "in" operconstr200 +| MOVETO term_cofix "let" "cofix" cofix_decl "in" operconstr200 +| MOVETO term_let "let" [ "(" LIST0 name SEP "," ")" | "()" ] as_return_type ":=" operconstr200 "in" operconstr200 | MOVETO term_let "let" "'" pattern200 ":=" operconstr200 "in" operconstr200 | MOVETO term_let "let" "'" pattern200 ":=" operconstr200 case_type "in" operconstr200 | MOVETO term_let "let" "'" pattern200 "in" pattern200 ":=" operconstr200 case_type "in" operconstr200 +| MOVETO term_fix "fix" fix_decls +| MOVETO term_cofix "cofix" cofix_decls ] term_let: [ @@ -218,8 +200,8 @@ term_let: [ | WITH "let" name let_type_cstr ":=" operconstr200 "in" operconstr200 | "let" name LIST1 binder let_type_cstr ":=" operconstr200 "in" operconstr200 (* Don't need to document that "( )" is equivalent to "()" *) -| REPLACE "let" [ "(" LIST0 name SEP "," ")" | "()" ] as_return_type_opt ":=" operconstr200 "in" operconstr200 -| WITH "let" [ "(" LIST1 name SEP "," ")" | "()" ] as_return_type_opt ":=" operconstr200 "in" operconstr200 +| REPLACE "let" [ "(" LIST0 name SEP "," ")" | "()" ] as_return_type ":=" operconstr200 "in" operconstr200 +| WITH "let" "(" LIST0 name SEP "," ")" as_return_type ":=" operconstr200 "in" operconstr200 | REPLACE "let" "'" pattern200 ":=" operconstr200 "in" operconstr200 | WITH "let" "'" pattern200 ":=" operconstr200 OPT case_type "in" operconstr200 | DELETE "let" "'" pattern200 ":=" operconstr200 case_type "in" operconstr200 @@ -228,6 +210,8 @@ term_let: [ atomic_constr: [ (* @Zimmi48: "string" used only for notations, but keep to be consistent with patterns *) (* | DELETE string *) +| REPLACE global univ_instance +| WITH global OPT univ_instance | REPLACE "?" "[" ident "]" | WITH "?[" ident "]" | MOVETO term_evar "?[" ident "]" @@ -253,6 +237,8 @@ operconstr10: [ (* fixme: add in as a prodn somewhere *) | MOVETO dangling_pattern_extension_rule "@" pattern_identref LIST1 identref | DELETE dangling_pattern_extension_rule +| REPLACE "@" global univ_instance LIST0 operconstr9 +| WITH "@" global OPT univ_instance LIST0 operconstr9 ] operconstr9: [ @@ -260,64 +246,45 @@ operconstr9: [ | DELETE ".." operconstr0 ".." ] -arg_list: [ -| arg_list appl_arg -| appl_arg -] - -arg_list_opt: [ -| arg_list -| empty -] - operconstr1: [ | REPLACE operconstr0 ".(" global LIST0 appl_arg ")" -| WITH operconstr0 ".(" global arg_list_opt ")" -| MOVETO term_projection operconstr0 ".(" global arg_list_opt ")" +| WITH operconstr0 ".(" global LIST0 appl_arg ")" +| MOVETO term_projection operconstr0 ".(" global LIST0 appl_arg ")" | MOVETO term_projection operconstr0 ".(" "@" global LIST0 ( operconstr9 ) ")" ] operconstr0: [ (* @Zimmi48: This rule is a hack, according to Hugo, and should not be shown in the manual. *) | DELETE "{" binder_constr "}" +| REPLACE "{|" record_declaration bar_cbrace +| WITH "{|" LIST0 field_def bar_cbrace ] -single_fix: [ -| DELETE fix_kw fix_decl -| "fix" fix_decl -| "cofix" fix_decl +fix_decls: [ +| DELETE fix_decl +| REPLACE fix_decl "with" LIST1 fix_decl SEP "with" "for" identref +| WITH fix_decl OPT ( LIST1 ("with" fix_decl) "for" identref ) ] -fix_kw: [ | DELETENT ] +cofix_decls: [ +| DELETE cofix_decl +| REPLACE cofix_decl "with" LIST1 cofix_decl SEP "with" "for" identref +| WITH cofix_decl OPT ( LIST1 ( "with" cofix_decl ) "for" identref ) +] -binders_fixannot: [ -(* -| REPLACE impl_name_head impl_ident_tail binders_fixannot -| WITH impl_name_head impl_ident_tail "}" binders_fixannot -*) -(* Omit this complex detail. See https://github.com/coq/coq/pull/10614#discussion_r344118146 *) -| DELETE impl_name_head impl_ident_tail binders_fixannot +fields_def: [ +| REPLACE field_def ";" fields_def +| WITH LIST1 field_def SEP ";" +| DELETE field_def +] -| DELETE fixannot +binders_fixannot: [ | DELETE binder binders_fixannot +| DELETE fixannot | DELETE (* empty *) - | LIST0 binder OPT fixannot ] -impl_ident_tail: [ -| DELETENT -(* -| REPLACE "}" -| WITH empty -| REPLACE LIST1 name ":" lconstr "}" -| WITH LIST1 name ":" lconstr -| REPLACE LIST1 name "}" -| WITH LIST1 name -| REPLACE ":" lconstr "}" -| WITH ":" lconstr -*) -] of_type_with_opt_coercion: [ | DELETE ":>" ">" @@ -347,18 +314,28 @@ closed_binder: [ | DELETE "(" name ":" lconstr ")" | DELETE "(" name ":=" lconstr ")" + | REPLACE "(" name ":" lconstr ":=" lconstr ")" -| WITH "(" name rec_type_cstr ":=" lconstr ")" +| WITH "(" name type_cstr ":=" lconstr ")" +| DELETE "{" name "}" | DELETE "{" name LIST1 name "}" | REPLACE "{" name LIST1 name ":" lconstr "}" -| WITH "{" LIST1 name rec_type_cstr "}" +| WITH "{" LIST1 name type_cstr "}" | DELETE "{" name ":" lconstr "}" ] +name_colon: [ +| name ":" +] + typeclass_constraint: [ | EDIT ADD_OPT "!" operconstr200 +| REPLACE "{" name "}" ":" [ "!" | ] operconstr200 +| WITH "{" name "}" ":" OPT "!" operconstr200 +| REPLACE name_colon [ "!" | ] operconstr200 +| WITH name_colon OPT "!" operconstr200 ] (* ?? From the grammar, Prim.name seems to be only "_" but ident is also accepted "*) @@ -376,62 +353,54 @@ DELETE: [ | orient_rw ] -pattern1_list: [ -| pattern1_list pattern1 -| pattern1 -] - -pattern1_list_opt: [ -| pattern1_list -| empty -] - pattern10: [ | REPLACE pattern1 LIST1 pattern1 -| WITH LIST1 pattern1 -| REPLACE "@" reference LIST0 pattern1 -| WITH "@" reference pattern1_list_opt +| WITH pattern1 LIST0 pattern1 +| DELETE pattern1 ] pattern0: [ | REPLACE "(" pattern200 ")" | WITH "(" LIST1 pattern200 SEP "|" ")" | DELETE "(" pattern200 "|" LIST1 pattern200 SEP "|" ")" +| REPLACE "{|" record_patterns bar_cbrace +| WITH "{|" LIST0 record_pattern bar_cbrace ] -patterns_comma: [ -| patterns_comma "," pattern100 -| pattern100 -] - -patterns_comma_list_or: [ -| patterns_comma_list_or "|" patterns_comma -| patterns_comma +DELETE: [ +| record_patterns ] eqn: [ | REPLACE LIST1 mult_pattern SEP "|" "=>" lconstr -| WITH patterns_comma_list_or "=>" lconstr +| WITH LIST1 [ LIST1 pattern100 SEP "," ] SEP "|" "=>" lconstr ] -record_patterns: [ -| REPLACE record_pattern ";" record_patterns -| WITH record_patterns ";" record_pattern +universe_increment: [ +| REPLACE "+" natural +| WITH OPT ( "+" natural ) +| DELETE (* empty *) ] -(* todo: binders should be binders_opt *) - +evar_instance: [ +| REPLACE "@{" LIST1 inst SEP ";" "}" +| WITH OPT ( "@{" LIST1 inst SEP ";" "}" ) +| DELETE (* empty *) +] -(* lexer stuff *) -bigint: [ -| DELETE NUMERAL -| num +univ_instance: [ +| DELETE (* empty *) ] -ident: [ -| DELETENT +constr: [ +| REPLACE "@" global univ_instance +| WITH "@" global OPT univ_instance ] +(* todo: binders should be binders_opt *) + + +(* lexer stuff *) IDENT: [ | ident ] @@ -445,11 +414,45 @@ LEFTQMARK: [ | "?" ] +digit: [ +| "0" ".." "9" +] + +num: [ +| LIST1 digit +] + natural: [ | DELETENT ] natural: [ | num (* todo: or should it be "nat"? *) ] +numeral: [ +| LIST1 digit OPT ("." LIST1 digit) OPT [ [ "e" | "E" ] OPT [ "+" | "-" ] LIST1 digit ] +] + +int: [ +| OPT "-" LIST1 digit +] + +bigint: [ +| DELETE NUMERAL +| num +] + +first_letter: [ +| [ "a" ".." "z" | "A" ".." "Z" | "_" | unicode_letter ] +] + +subsequent_letter: [ +| [ first_letter | digit | "'" | unicode_id_part ] +] + +ident: [ +| DELETE IDENT +| first_letter LIST0 subsequent_letter +] + NUMERAL: [ | numeral ] @@ -467,10 +470,6 @@ STRING: [ (* added productions *) -name_colon: [ -| name ":" -] - command_entry: [ | noedit_mode ] @@ -528,12 +527,6 @@ simple_tactic: [ | WITH "eintros" intropatterns ] -intropatterns: [ -| DELETE LIST0 intropattern -| intropatterns intropattern -| empty -] - (* todo: don't use DELETENT for this *) ne_intropatterns: [ | DELETENT ] @@ -594,7 +587,6 @@ SPLICE: [ | reference | bar_cbrace | lconstr -| impl_name_head (* | ast_closure_term @@ -665,6 +657,15 @@ SPLICE: [ | name_colon | closed_binder | binders_fixannot +| as_return_type +| case_type +| fields_def +| universe_increment +| type_cstr +| record_pattern +| evar_instance +| fix_decls +| cofix_decls ] RENAME: [ @@ -703,20 +704,13 @@ RENAME: [ | BULLET bullet | nat_or_var num_or_var | fix_decl fix_body -| instance universe_annot_opt -| rec_type_cstr colon_term_opt -| fix_constr term_fix +| cofix_decl cofix_body | constr term1_extended -| case_type return_type | appl_arg arg -| record_patterns record_patterns_opt -| universe_increment universe_increment_opt | rec_definition fix_definition | corec_definition cofix_definition -| record_field_instance field_def -| record_fields_instance fields_def -| evar_instance evar_bindings_opt | inst evar_binding +| univ_instance univ_annot ] diff --git a/doc/tools/docgram/doc_grammar.ml b/doc/tools/docgram/doc_grammar.ml index 70976e705e..b50c427742 100644 --- a/doc/tools/docgram/doc_grammar.ml +++ b/doc/tools/docgram/doc_grammar.ml @@ -49,7 +49,7 @@ let default_args = { } let start_symbols = ["vernac_toplevel"] -let tokens = [ "bullet"; "ident"; "int"; "num"; "numeral"; "string" ] +let tokens = [ "bullet"; "string"; "unicode_id_part"; "unicode_letter" ] (* translated symbols *) @@ -148,8 +148,10 @@ module DocGram = struct let g_add_prod_after g ins_after nt prod = let prods = try NTMap.find nt !g.map with Not_found -> [] in - (* todo: add check for duplicates *) - g_add_after g ~update:true ins_after nt (prods @ [prod]) + if prods <> [] then + g_update_prods g nt (prods @ [prod]) + else + g_add_after g ~update:true ins_after nt [prod] (* replace the map and order *) let g_reorder g map order = @@ -237,7 +239,17 @@ and prod_to_str ?(plist=false) prod = let rec output_prodn = function - | Sterm s -> let s = if List.mem s ["{"; "{|"; "|"; "}"] then "%" ^ s else s in + | Sterm s -> + let s = match s with + | "|}" -> "%|%}" + | "{|" -> "%{%|" + | "`{" -> "`%{" + | "@{" -> "@%{" + | "{" + | "}" + | "|" -> "%" ^ s + | _ -> s + in sprintf "%s" s | Snterm s -> sprintf "@%s" s | Slist1 sym -> sprintf "{+ %s }" (output_prodn sym) @@ -266,7 +278,14 @@ and output_sep sep = | Sterm s -> sprintf "%s" s (* avoid escaping separator *) | _ -> output_prodn sep -and prod_to_prodn prod = String.concat " " (List.map output_prodn prod) +and prod_to_prodn_r prod = + match prod with + | Sterm s :: Snterm "ident" :: tl when List.mem s ["?"; "."] -> + (sprintf "%s@ident" s) :: (prod_to_prodn_r tl) + | p :: tl -> (output_prodn p) :: (prod_to_prodn_r tl) + | [] -> [] + +and prod_to_prodn prod = String.concat " " (prod_to_prodn_r prod) let pr_prods nt prods = (* duplicative *) Printf.printf "%s: [\n" nt; @@ -304,11 +323,11 @@ let print_in_order out g fmt nt_order hide = fprintf out "%s%s\n" pfx str) prods; | `PRODN -> - fprintf out "\n%s:\n" nt; - List.iter (fun prod -> + fprintf out "\n%s:\n%s " nt nt; + List.iteri (fun i prod -> let str = prod_to_prodn prod in - let pfx = if str = "" then "" else " " in - fprintf out "%s%s\n" pfx str) + let op = if i = 0 then "::=" else "+=" in + fprintf out "%s %s\n" op str) prods; with Not_found -> error "Missing nt '%s' in print_in_order\n" nt) nt_order @@ -458,8 +477,10 @@ let ematch prod edit = -> ematchr [psym] [sym] && ematchr [psep] [sep] | (Sparen psyml, Sparen syml) -> ematchr psyml syml - | (Sprod psymll, Sprod symll) - -> List.fold_left (&&) true (List.map2 ematchr psymll symll) + | (Sprod psymll, Sprod symll) -> + if List.compare_lengths psymll symll != 0 then false + else + List.fold_left (&&) true (List.map2 ematchr psymll symll) | _, _ -> phd = hd in m && ematchr ptl tl @@ -691,17 +712,22 @@ let rec edit_prod g top edit_map prod = | _ -> [Snterm binding] with Not_found -> [sym0] in + let maybe_wrap syms = + match syms with + | s :: [] -> List.hd syms + | s -> Sparen (List.rev syms) + in let rec edit_symbol sym0 = match sym0 with | Sterm s -> [sym0] | Snterm s -> edit_nt edit_map sym0 s - | Slist1 sym -> [Slist1 (List.hd (edit_symbol sym))] + | Slist1 sym -> [Slist1 (maybe_wrap (edit_symbol sym))] (* you'll get a run-time failure deleting a SEP symbol *) - | Slist1sep (sym, sep) -> [Slist1sep (List.hd (edit_symbol sym), (List.hd (edit_symbol sep)))] - | Slist0 sym -> [Slist0 (List.hd (edit_symbol sym))] - | Slist0sep (sym, sep) -> [Slist0sep (List.hd (edit_symbol sym), (List.hd (edit_symbol sep)))] - | Sopt sym -> [Sopt (List.hd (edit_symbol sym))] + | Slist1sep (sym, sep) -> [Slist1sep (maybe_wrap (edit_symbol sym), (List.hd (edit_symbol sep)))] + | Slist0 sym -> [Slist0 (maybe_wrap (edit_symbol sym))] + | Slist0sep (sym, sep) -> [Slist0sep (maybe_wrap (edit_symbol sym), (List.hd (edit_symbol sep)))] + | Sopt sym -> [Sopt (maybe_wrap (edit_symbol sym))] | Sparen slist -> [Sparen (List.hd (edit_prod g false edit_map slist))] | Sprod slistlist -> let (_, prods) = edit_rule g edit_map "" slistlist in [Sprod prods] @@ -1079,7 +1105,9 @@ let apply_edit_file g edits = g_add_prod_after g (Some nt) nt2 oprod; let prods' = (try let posn = find_first oprod prods nt in - let prods = insert_after posn [[Snterm nt2]] prods in (* insert new prod *) + let prods = if List.mem [Snterm nt2] prods then prods + else insert_after posn [[Snterm nt2]] prods (* insert new prod *) + in remove_prod oprod prods nt (* remove orig prod *) with Not_found -> prods) in @@ -1091,6 +1119,7 @@ let apply_edit_file g edits = aux tl (edit_single_prod g oprod prods nt) add_nt | (Snterm "REPLACE" :: oprod) :: (Snterm "WITH" :: rprod) :: tl -> report_undef_nts g rprod ""; + (* todo: check result not already present *) let prods' = (try let posn = find_first oprod prods nt in let prods = insert_after posn [rprod] prods in (* insert new prod *) @@ -1580,7 +1609,7 @@ let process_rst g file args seen tac_prods cmd_prods = line in (* todo: maybe pass end_index? *) - let output_insertgram start_index end_ indent is_coq_group = + let output_insertprodn start_index end_ indent = let rec copy_prods list = match list with | [] -> () @@ -1590,21 +1619,21 @@ let process_rst g file args seen tac_prods cmd_prods = warn "%s line %d: '%s' already included at %s line %d\n" file !linenum nt prev_file prev_linenum; with Not_found -> - if is_coq_group then - seen := { !seen with nts = (NTMap.add nt (file, !linenum) !seen.nts)} ); + seen := { !seen with nts = (NTMap.add nt (file, !linenum) !seen.nts)} ); let prods = NTMap.find nt !g.map in List.iteri (fun i prod -> - let rhs = String.trim (sprintf ": %s" (prod_to_str ~plist:true prod)) in - fprintf new_rst "%s %s %s\n" indent (if i = 0 then nt else String.make (String.length nt) ' ') rhs) + let rhs = String.trim (prod_to_prodn prod) in + let sep = if i = 0 then " ::=" else "|" in + fprintf new_rst "%s %s%s %s\n" indent (if i = 0 then nt else "") sep rhs) prods; if nt <> end_ then copy_prods tl in copy_prods (nthcdr start_index !g.order) in - let process_insertgram line rhs = + let process_insertprodn line rhs = if not (Str.string_match ig_args_regex rhs 0) then - error "%s line %d: bad arguments '%s' for 'insertgram'\n" file !linenum rhs + error "%s line %d: bad arguments '%s' for 'insertprodn'\n" file !linenum rhs else begin let start = Str.matched_group 1 rhs in let end_ = Str.matched_group 2 rhs in @@ -1624,19 +1653,18 @@ let process_rst g file args seen tac_prods cmd_prods = try let line2 = getline() in if not (Str.string_match blank_regex line2 0) then - error "%s line %d: expecting a blank line after 'insertgram'\n" file !linenum + error "%s line %d: expecting a blank line after 'insertprodn'\n" file !linenum else begin let line3 = getline() in - if not (Str.string_match dir_regex line3 0) || (Str.matched_group 2 line3) <> "productionlist::" then - error "%s line %d: expecting 'productionlist' after 'insertgram'\n" file !linenum + if not (Str.string_match dir_regex line3 0) || (Str.matched_group 2 line3) <> "prodn::" then + error "%s line %d: expecting 'prodn' after 'insertprodn'\n" file !linenum else begin let indent = Str.matched_group 1 line3 in - let is_coq_group = ("coq" = String.trim (Str.matched_group 3 line3)) in let rec skip_to_end () = let endline = getline() in if Str.string_match end_prodlist_regex endline 0 then begin fprintf new_rst "%s\n\n%s\n" line line3; - output_insertgram start_index end_ indent is_coq_group; + output_insertprodn start_index end_ indent; fprintf new_rst "%s\n" endline end else skip_to_end () @@ -1657,9 +1685,9 @@ let process_rst g file args seen tac_prods cmd_prods = let dir = Str.matched_group 2 line in let rhs = String.trim (Str.matched_group 3 line) in match dir with - | "productionlist::" -> + | "prodn::" -> if rhs = "coq" then - warn "%s line %d: Missing 'insertgram' before 'productionlist:: coq'\n" file !linenum; + warn "%s line %d: Missing 'insertprodn' before 'prodn:: coq'\n" file !linenum; fprintf new_rst "%s\n" line; | "tacn::" when args.check_tacs -> if not (StringSet.mem rhs tac_prods) then @@ -1675,8 +1703,8 @@ let process_rst g file args seen tac_prods cmd_prods = warn "%s line %d: Repeated command: '%s'\n" file !linenum rhs; seen := { !seen with cmds = (NTMap.add rhs (file, !linenum) !seen.cmds)}; fprintf new_rst "%s\n" line - | "insertgram" -> - process_insertgram line rhs + | "insertprodn" -> + process_insertprodn line rhs | _ -> fprintf new_rst "%s\n" line end else fprintf new_rst "%s\n" line; diff --git a/doc/tools/docgram/fullGrammar b/doc/tools/docgram/fullGrammar index ebaeb392a5..e12589bb89 100644 --- a/doc/tools/docgram/fullGrammar +++ b/doc/tools/docgram/fullGrammar @@ -64,7 +64,7 @@ lconstr: [ constr: [ | operconstr8 -| "@" global instance +| "@" global univ_instance ] operconstr200: [ @@ -90,7 +90,7 @@ operconstr90: [ operconstr10: [ | operconstr9 LIST1 appl_arg -| "@" global instance LIST0 operconstr9 +| "@" global univ_instance LIST0 operconstr9 | "@" pattern_identref LIST1 identref | operconstr9 ] @@ -123,16 +123,16 @@ operconstr0: [ ] record_declaration: [ -| record_fields_instance +| fields_def ] -record_fields_instance: [ -| record_field_instance ";" record_fields_instance -| record_field_instance +fields_def: [ +| field_def ";" fields_def +| field_def | ] -record_field_instance: [ +field_def: [ | global binders ":=" lconstr ] @@ -140,13 +140,15 @@ binder_constr: [ | "forall" open_binders "," operconstr200 | "fun" open_binders "=>" operconstr200 | "let" name binders let_type_cstr ":=" operconstr200 "in" operconstr200 -| "let" single_fix "in" operconstr200 +| "let" "fix" fix_decl "in" operconstr200 +| "let" "cofix" cofix_decl "in" operconstr200 | "let" [ "(" LIST0 name SEP "," ")" | "()" ] return_type ":=" operconstr200 "in" operconstr200 | "let" "'" pattern200 ":=" operconstr200 "in" operconstr200 | "let" "'" pattern200 ":=" operconstr200 case_type "in" operconstr200 | "let" "'" pattern200 "in" pattern200 ":=" operconstr200 case_type "in" operconstr200 | "if" operconstr200 return_type "then" operconstr200 "else" operconstr200 -| fix_constr +| "fix" fix_decls +| "cofix" cofix_decls ] appl_arg: [ @@ -155,7 +157,7 @@ appl_arg: [ ] atomic_constr: [ -| global instance +| global univ_instance | sort | NUMERAL | string @@ -174,7 +176,7 @@ evar_instance: [ | ] -instance: [ +univ_instance: [ | "@{" LIST0 universe_level "}" | ] @@ -187,22 +189,22 @@ universe_level: [ | global ] -fix_constr: [ -| single_fix -| single_fix "with" LIST1 fix_decl SEP "with" "for" identref +fix_decls: [ +| fix_decl +| fix_decl "with" LIST1 fix_decl SEP "with" "for" identref ] -single_fix: [ -| fix_kw fix_decl +cofix_decls: [ +| cofix_decl +| cofix_decl "with" LIST1 cofix_decl SEP "with" "for" identref ] -fix_kw: [ -| "fix" -| "cofix" +fix_decl: [ +| identref binders_fixannot type_cstr ":=" operconstr200 ] -fix_decl: [ -| identref binders_fixannot let_type_cstr ":=" operconstr200 +cofix_decl: [ +| identref binders type_cstr ":=" operconstr200 ] match_constr: [ @@ -282,26 +284,14 @@ pattern0: [ | string ] -impl_ident_tail: [ -| "}" -| LIST1 name ":" lconstr "}" -| LIST1 name "}" -| ":" lconstr "}" -] - fixannot: [ | "{" "struct" identref "}" | "{" "wf" constr identref "}" | "{" "measure" constr OPT identref OPT constr "}" ] -impl_name_head: [ -| impl_ident_head -] - binders_fixannot: [ -| impl_name_head impl_ident_tail binders_fixannot -| fixannot +| ensure_fixannot fixannot | binder binders_fixannot | ] @@ -344,6 +334,11 @@ typeclass_constraint: [ | operconstr200 ] +type_cstr: [ +| ":" lconstr +| +] + let_type_cstr: [ | OPT [ ":" lconstr ] ] @@ -514,9 +509,6 @@ command: [ | "Add" "LoadPath" ne_string as_dirpath | "Add" "Rec" "LoadPath" ne_string as_dirpath | "Remove" "LoadPath" ne_string -| "AddPath" ne_string "as" as_dirpath -| "AddRecPath" ne_string "as" as_dirpath -| "DelPath" ne_string | "Type" lconstr | "Print" printable | "Print" smart_global OPT univ_name_list @@ -963,16 +955,11 @@ opt_coercion: [ ] rec_definition: [ -| ident_decl binders_fixannot rec_type_cstr OPT [ ":=" lconstr ] decl_notation +| ident_decl binders_fixannot type_cstr OPT [ ":=" lconstr ] decl_notation ] corec_definition: [ -| ident_decl binders rec_type_cstr OPT [ ":=" lconstr ] decl_notation -] - -rec_type_cstr: [ -| ":" lconstr -| +| ident_decl binders type_cstr OPT [ ":=" lconstr ] decl_notation ] scheme: [ @@ -994,7 +981,6 @@ record_field: [ record_fields: [ | record_field ";" record_fields -| record_field ";" | record_field | ] @@ -1395,7 +1381,6 @@ syntax: [ only_parsing: [ | "(" "only" "parsing" ")" -| "(" "compat" STRING ")" | ] @@ -1413,7 +1398,6 @@ syntax_modifier: [ | "no" "associativity" | "only" "printing" | "only" "parsing" -| "compat" STRING | "format" STRING OPT STRING | IDENT; "," LIST1 IDENT SEP "," "at" level | IDENT; "at" level diff --git a/doc/tools/docgram/orderedGrammar b/doc/tools/docgram/orderedGrammar index 545ccde03a..63e0ca129c 100644 --- a/doc/tools/docgram/orderedGrammar +++ b/doc/tools/docgram/orderedGrammar @@ -8,30 +8,15 @@ vernac_toplevel: [ | "Quit" "." | "BackTo" num "." | "Show" "Goal" num "at" num "." -| "Show" "Proof" "Diffs" removed_opt "." +| "Show" "Proof" "Diffs" OPT "removed" "." | vernac_control ] -removed_opt: [ -| "removed" -| empty -] - tactic_mode: [ -| toplevel_selector_opt query_command -| toplevel_selector_opt "{" -| toplevel_selector_opt ltac_info_opt ltac_expr ltac_use_default -| "par" ":" ltac_info_opt ltac_expr ltac_use_default -] - -toplevel_selector_opt: [ -| toplevel_selector -| empty -] - -ltac_info_opt: [ -| "Info" num -| empty +| OPT toplevel_selector query_command +| OPT toplevel_selector "{" +| OPT toplevel_selector OPT ( "Info" num ) ltac_expr ltac_use_default +| "par" ":" OPT ( "Info" num ) ltac_expr ltac_use_default ] ltac_use_default: [ @@ -44,15 +29,16 @@ vernac_control: [ | "Redirect" string vernac_control | "Timeout" num vernac_control | "Fail" vernac_control -| quoted_attributes_list_opt vernac +| LIST0 ( "#[" LIST0 attribute SEP "," "]" ) vernac ] term: [ | "forall" open_binders "," term | "fun" open_binders "=>" term | term_let -| "if" term as_return_type_opt "then" term "else" term +| "if" term OPT [ OPT [ "as" name ] "return" term100 ] "then" term "else" term | term_fix +| term_cofix | term100 ] @@ -62,43 +48,24 @@ term100: [ ] term10: [ -| term1 args -| "@" qualid universe_annot_opt term1_list_opt +| term1 LIST1 arg +| "@" qualid OPT univ_annot LIST0 term1 | term1 ] -args: [ -| args arg -| arg -] - arg: [ | "(" ident ":=" term ")" | term1 ] -term1_list_opt: [ -| term1_list_opt term1 -| empty -] - -empty: [ -| -] - term1: [ | term_projection | term0 "%" ident | term0 ] -args_opt: [ -| args -| empty -] - term0: [ -| qualid universe_annot_opt +| qualid OPT univ_annot | sort | numeral | string @@ -106,46 +73,25 @@ term0: [ | term_evar | term_match | "(" term ")" -| "{|" fields_def "|}" +| "{|" LIST0 field_def "|}" | "`{" term "}" | "`(" term ")" | "ltac" ":" "(" ltac_expr ")" ] -fields_def: [ -| field_def ";" fields_def -| field_def -| empty -] - field_def: [ -| qualid binders_opt ":=" term -] - -binders_opt: [ -| binders -| empty +| qualid LIST0 binder ":=" term ] term_projection: [ -| term0 ".(" qualid args_opt ")" -| term0 ".(" "@" qualid term1_list_opt ")" +| term0 ".(" qualid LIST0 arg ")" +| term0 ".(" "@" qualid LIST0 ( term1 ) ")" ] term_evar: [ | "?[" ident "]" | "?[" "?" ident "]" -| "?" ident evar_bindings_opt -] - -evar_bindings_opt: [ -| "@{" evar_bindings_semi "}" -| empty -] - -evar_bindings_semi: [ -| evar_bindings_semi ";" evar_binding -| evar_binding +| "?" ident OPT ( "@{" LIST1 evar_binding SEP ";" "}" ) ] evar_binding: [ @@ -153,42 +99,26 @@ evar_binding: [ ] dangling_pattern_extension_rule: [ -| "@" "?" ident ident_list -] - -ident_list: [ -| ident_list ident -| ident +| "@" "?" ident LIST1 ident ] record_fields: [ | record_field ";" record_fields -| record_field ";" | record_field -| empty +| ] record_field: [ -| quoted_attributes_list_opt record_binder num_opt2 decl_notation +| LIST0 ( "#[" LIST0 attribute SEP "," "]" ) record_binder OPT [ "|" num ] decl_notation ] decl_notation: [ -| "where" one_decl_notation_list -| empty -] - -one_decl_notation_list: [ -| one_decl_notation_list "and" one_decl_notation -| one_decl_notation +| "where" LIST1 one_decl_notation SEP "and" +| ] one_decl_notation: [ -| string ":=" term1_extended ident_opt3 -] - -ident_opt3: [ -| ":" ident -| empty +| string ":=" term1_extended OPT [ ":" ident ] ] record_binder: [ @@ -197,9 +127,9 @@ record_binder: [ ] record_binder_body: [ -| binders_opt of_type_with_opt_coercion term -| binders_opt of_type_with_opt_coercion term ":=" term -| binders_opt ":=" term +| LIST0 binder of_type_with_opt_coercion term +| LIST0 binder of_type_with_opt_coercion term ":=" term +| LIST0 binder ":=" term ] of_type_with_opt_coercion: [ @@ -208,43 +138,50 @@ of_type_with_opt_coercion: [ | ":" ] -num_opt2: [ -| "|" num -| empty +attribute: [ +| ident attribute_value ] -quoted_attributes_list_opt: [ -| quoted_attributes_list_opt "#[" attribute_list_comma_opt "]" -| empty +attribute_value: [ +| "=" string +| "(" LIST0 attribute SEP "," ")" +| ] -attribute_list_comma_opt: [ -| attribute_list_comma -| empty +qualid: [ +| ident LIST0 field_ident ] -attribute_list_comma: [ -| attribute_list_comma "," attribute -| attribute +field_ident: [ +| "." ident ] -attribute: [ -| ident attribute_value +numeral: [ +| LIST1 digit OPT ( "." LIST1 digit ) OPT [ [ "e" | "E" ] OPT [ "+" | "-" ] LIST1 digit ] ] -attribute_value: [ -| "=" string -| "(" attribute_list_comma_opt ")" -| empty +int: [ +| OPT "-" LIST1 digit ] -qualid: [ -| qualid field -| ident +num: [ +| LIST1 digit ] -field: [ -| "." ident +digit: [ +| "0" ".." "9" +] + +ident: [ +| first_letter LIST0 subsequent_letter +] + +first_letter: [ +| [ "a" ".." "z" | "A" ".." "Z" | "_" | unicode_letter ] +] + +subsequent_letter: [ +| [ first_letter | digit | "'" | unicode_id_part ] ] sort: [ @@ -257,17 +194,12 @@ sort: [ ] universe: [ -| "max" "(" universe_exprs_comma ")" -| universe_expr -] - -universe_exprs_comma: [ -| universe_exprs_comma "," universe_expr +| "max" "(" LIST1 universe_expr SEP "," ")" | universe_expr ] universe_expr: [ -| universe_name universe_increment_opt +| universe_name OPT ( "+" num ) ] universe_name: [ @@ -276,21 +208,6 @@ universe_name: [ | "Prop" ] -universe_increment_opt: [ -| "+" num -| empty -] - -universe_annot_opt: [ -| "@{" universe_levels_opt "}" -| empty -] - -universe_levels_opt: [ -| universe_levels_opt universe_level -| empty -] - universe_level: [ | "Set" | "Prop" @@ -299,83 +216,50 @@ universe_level: [ | qualid ] -term_fix: [ -| single_fix -| single_fix "with" fix_bodies "for" ident -] - -single_fix: [ -| "fix" fix_body -| "cofix" fix_body +univ_annot: [ +| "@{" LIST0 universe_level "}" ] -fix_bodies: [ -| fix_bodies "with" fix_body -| fix_body +term_fix: [ +| "let" "fix" fix_body "in" term +| "fix" fix_body OPT ( LIST1 ( "with" fix_body ) "for" ident ) ] fix_body: [ -| ident binders_opt fixannot_opt colon_term_opt ":=" term -] - -fixannot_opt: [ -| fixannot -| empty +| ident LIST0 binder OPT fixannot OPT ( ":" term ) ":=" term ] fixannot: [ | "{" "struct" ident "}" | "{" "wf" term1_extended ident "}" -| "{" "measure" term1_extended ident_opt term1_extended_opt "}" +| "{" "measure" term1_extended OPT ident OPT term1_extended "}" ] term1_extended: [ | term1 -| "@" qualid universe_annot_opt +| "@" qualid OPT univ_annot ] -ident_opt: [ -| ident -| empty +term_cofix: [ +| "let" "cofix" cofix_body "in" term +| "cofix" cofix_body OPT ( LIST1 ( "with" cofix_body ) "for" ident ) ] -term1_extended_opt: [ -| term1_extended -| empty +cofix_body: [ +| ident LIST0 binder OPT ( ":" term ) ":=" term ] term_let: [ -| "let" name colon_term_opt ":=" term "in" term -| "let" name binders colon_term_opt ":=" term "in" term -| "let" single_fix "in" term -| "let" names_tuple as_return_type_opt ":=" term "in" term -| "let" "'" pattern ":=" term return_type_opt "in" term -| "let" "'" pattern "in" pattern ":=" term return_type "in" term -] - -colon_term_opt: [ -| ":" term -| empty -] - -names_tuple: [ -| "(" names_comma ")" -| "()" -] - -names_comma: [ -| names_comma "," name -| name +| "let" name OPT ( ":" term ) ":=" term "in" term +| "let" name LIST1 binder OPT ( ":" term ) ":=" term "in" term +| "let" "(" LIST0 name SEP "," ")" OPT [ OPT [ "as" name ] "return" term100 ] ":=" term "in" term +| "let" "'" pattern ":=" term OPT ( "return" term100 ) "in" term +| "let" "'" pattern "in" pattern ":=" term "return" term100 "in" term ] open_binders: [ -| names ":" term -| binders -] - -names: [ -| names name -| name +| LIST1 name ":" term +| LIST1 binder ] name: [ @@ -383,37 +267,21 @@ name: [ | ident ] -binders: [ -| binders binder -| binder -] - binder: [ | name -| "(" names ":" term ")" -| "(" name colon_term_opt ":=" term ")" -| "{" name "}" -| "{" names colon_term_opt "}" -| "`(" typeclass_constraints_comma ")" -| "`{" typeclass_constraints_comma "}" +| "(" LIST1 name ":" term ")" +| "(" name OPT ( ":" term ) ":=" term ")" +| "{" LIST1 name OPT ( ":" term ) "}" +| "`(" LIST1 typeclass_constraint SEP "," ")" +| "`{" LIST1 typeclass_constraint SEP "," "}" | "'" pattern0 | "(" name ":" term "|" term ")" ] -typeclass_constraints_comma: [ -| typeclass_constraints_comma "," typeclass_constraint -| typeclass_constraint -] - typeclass_constraint: [ -| exclam_opt term -| "{" name "}" ":" exclam_opt term -| name ":" exclam_opt term -] - -exclam_opt: [ -| "!" -| empty +| OPT "!" term +| "{" name "}" ":" OPT "!" term +| name ":" OPT "!" term ] term_cast: [ @@ -424,69 +292,15 @@ term_cast: [ ] term_match: [ -| "match" case_items_comma return_type_opt "with" or_opt eqns_or_opt "end" -] - -case_items_comma: [ -| case_items_comma "," case_item -| case_item -] - -return_type_opt: [ -| return_type -| empty -] - -as_return_type_opt: [ -| as_name_opt return_type -| empty -] - -return_type: [ -| "return" term100 +| "match" LIST1 case_item SEP "," OPT ( "return" term100 ) "with" OPT "|" LIST0 eqn SEP "|" "end" ] case_item: [ -| term100 as_name_opt in_opt -] - -as_name_opt: [ -| "as" name -| empty -] - -in_opt: [ -| "in" pattern -| empty -] - -or_opt: [ -| "|" -| empty -] - -eqns_or_opt: [ -| eqns_or -| empty -] - -eqns_or: [ -| eqns_or "|" eqn -| eqn +| term100 OPT ( "as" name ) OPT [ "in" pattern ] ] eqn: [ -| patterns_comma_list_or "=>" term -] - -patterns_comma_list_or: [ -| patterns_comma_list_or "|" patterns_comma -| patterns_comma -] - -patterns_comma: [ -| patterns_comma "," pattern -| pattern +| LIST1 [ LIST1 pattern SEP "," ] SEP "|" "=>" term ] pattern: [ @@ -496,19 +310,8 @@ pattern: [ pattern10: [ | pattern1 "as" name -| pattern1_list -| "@" qualid pattern1_list_opt -| pattern1 -] - -pattern1_list: [ -| pattern1_list pattern1 -| pattern1 -] - -pattern1_list_opt: [ -| pattern1_list -| empty +| pattern1 LIST0 pattern1 +| "@" qualid LIST0 pattern1 ] pattern1: [ @@ -518,28 +321,13 @@ pattern1: [ pattern0: [ | qualid -| "{|" record_patterns_opt "|}" +| "{|" LIST0 ( qualid ":=" pattern ) "|}" | "_" -| "(" patterns_or ")" +| "(" LIST1 pattern SEP "|" ")" | numeral | string ] -patterns_or: [ -| patterns_or "|" pattern -| pattern -] - -record_patterns_opt: [ -| record_patterns_opt ";" record_pattern -| record_pattern -| empty -] - -record_pattern: [ -| qualid ":=" pattern -] - vernac: [ | "Local" vernac_poly | "Global" vernac_poly @@ -571,78 +359,28 @@ subprf: [ ] gallina: [ -| thm_token ident_decl binders_opt ":" term with_list_opt +| thm_token ident_decl LIST0 binder ":" term LIST0 [ "with" ident_decl LIST0 binder ":" term ] | assumption_token inline assum_list | assumptions_token inline assum_list | def_token ident_decl def_body | "Let" ident def_body -| cumulativity_token_opt private_token finite_token inductive_definition_list -| "Fixpoint" fix_definition_list -| "Let" "Fixpoint" fix_definition_list -| "CoFixpoint" cofix_definition_list -| "Let" "CoFixpoint" cofix_definition_list -| "Scheme" scheme_list -| "Combined" "Scheme" ident "from" ident_list_comma +| OPT cumulativity_token private_token finite_token LIST1 inductive_definition SEP "with" +| "Fixpoint" LIST1 fix_definition SEP "with" +| "Let" "Fixpoint" LIST1 fix_definition SEP "with" +| "CoFixpoint" LIST1 cofix_definition SEP "with" +| "Let" "CoFixpoint" LIST1 cofix_definition SEP "with" +| "Scheme" LIST1 scheme SEP "with" +| "Combined" "Scheme" ident "from" LIST1 ident SEP "," | "Register" qualid "as" qualid | "Register" "Inline" qualid -| "Primitive" ident term_opt ":=" register_token -| "Universe" ident_list -| "Universes" ident_list -| "Constraint" univ_constraint_list_comma -] - -term_opt: [ -| ":" term -| empty -] - -univ_constraint_list_comma: [ -| univ_constraint_list_comma "," univ_constraint -| univ_constraint -] - -with_list_opt: [ -| with_list_opt "with" ident_decl binders_opt ":" term -| empty -] - -cumulativity_token_opt: [ -| cumulativity_token -| empty -] - -inductive_definition_list: [ -| inductive_definition_list "with" inductive_definition -| inductive_definition -] - -fix_definition_list: [ -| fix_definition_list "with" fix_definition -| fix_definition +| "Primitive" ident OPT [ ":" term ] ":=" register_token +| "Universe" LIST1 ident +| "Universes" LIST1 ident +| "Constraint" LIST1 univ_constraint SEP "," ] fix_definition: [ -| ident_decl binders_opt fixannot_opt colon_term_opt term_opt2 decl_notation -] - -term_opt2: [ -| ":=" term -| empty -] - -cofix_definition_list: [ -| cofix_definition_list "with" cofix_definition -| cofix_definition -] - -scheme_list: [ -| scheme_list "with" scheme -| scheme -] - -ident_list_comma: [ -| ident_list_comma "," ident -| ident +| ident_decl LIST0 binder OPT fixannot OPT ( ":" term ) OPT [ ":=" term ] decl_notation ] register_token: [ @@ -731,21 +469,15 @@ assumptions_token: [ inline: [ | "Inline" "(" num ")" | "Inline" -| empty +| ] univ_constraint: [ -| universe_name lt_alt universe_name -] - -lt_alt: [ -| "<" -| "=" -| "<=" +| universe_name [ "<" | "=" | "<=" ] universe_name ] ident_decl: [ -| ident univ_decl_opt +| ident OPT ( "@{" LIST0 ident [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | "|}" ] ] ) ] finite_token: [ @@ -764,46 +496,41 @@ cumulativity_token: [ private_token: [ | "Private" -| empty +| ] def_body: [ -| binders_opt ":=" reduce term -| binders_opt ":" term ":=" reduce term -| binders_opt ":" term +| LIST0 binder ":=" reduce term +| LIST0 binder ":" term ":=" reduce term +| LIST0 binder ":" term ] reduce: [ | "Eval" red_expr "in" -| empty +| ] red_expr: [ | "red" | "hnf" -| "simpl" delta_flag ref_or_pattern_occ_opt +| "simpl" delta_flag OPT ref_or_pattern_occ | "cbv" strategy_flag | "cbn" strategy_flag | "lazy" strategy_flag | "compute" delta_flag -| "vm_compute" ref_or_pattern_occ_opt -| "native_compute" ref_or_pattern_occ_opt -| "unfold" unfold_occ_list_comma -| "fold" term1_extended_list -| "pattern" pattern_occ_list_comma +| "vm_compute" OPT ref_or_pattern_occ +| "native_compute" OPT ref_or_pattern_occ +| "unfold" LIST1 unfold_occ SEP "," +| "fold" LIST1 term1_extended +| "pattern" LIST1 pattern_occ SEP "," | ident ] strategy_flag: [ -| red_flags_list +| LIST1 red_flags | delta_flag ] -red_flags_list: [ -| red_flags_list red_flags -| red_flags -] - red_flags: [ | "beta" | "iota" @@ -815,14 +542,9 @@ red_flags: [ ] delta_flag: [ -| "-" "[" smart_global_list "]" -| "[" smart_global_list "]" -| empty -] - -ref_or_pattern_occ_opt: [ -| ref_or_pattern_occ -| empty +| "-" "[" LIST1 smart_global "]" +| "[" LIST1 smart_global "]" +| ] ref_or_pattern_occ: [ @@ -830,83 +552,48 @@ ref_or_pattern_occ: [ | term1_extended occs ] -unfold_occ_list_comma: [ -| unfold_occ_list_comma "," unfold_occ -| unfold_occ -] - unfold_occ: [ | smart_global occs ] -pattern_occ_list_comma: [ -| pattern_occ_list_comma "," pattern_occ -| pattern_occ -] - opt_constructors_or_fields: [ | ":=" constructor_list_or_record_decl -| empty +| ] inductive_definition: [ -| opt_coercion ident_decl binders_opt term_opt opt_constructors_or_fields decl_notation +| opt_coercion ident_decl LIST0 binder OPT [ ":" term ] opt_constructors_or_fields decl_notation ] opt_coercion: [ | ">" -| empty +| ] constructor_list_or_record_decl: [ -| "|" constructor_list_or -| ident constructor_type "|" constructor_list_or_opt +| "|" LIST1 constructor SEP "|" +| ident constructor_type "|" LIST0 constructor SEP "|" | ident constructor_type | ident "{" record_fields "}" | "{" record_fields "}" -| empty -] - -constructor_list_or: [ -| constructor_list_or "|" constructor -| constructor -] - -constructor_list_or_opt: [ -| constructor_list_or -| empty +| ] assum_list: [ -| assum_coe_list +| LIST1 assum_coe | simple_assum_coe ] -assum_coe_list: [ -| assum_coe_list assum_coe -| assum_coe -] - assum_coe: [ | "(" simple_assum_coe ")" ] simple_assum_coe: [ -| ident_decl_list of_type_with_opt_coercion term -] - -ident_decl_list: [ -| ident_decl_list ident_decl -| ident_decl +| LIST1 ident_decl of_type_with_opt_coercion term ] constructor_type: [ -| binders_opt of_type_with_opt_coercion_opt -] - -of_type_with_opt_coercion_opt: [ -| of_type_with_opt_coercion term -| empty +| LIST0 binder [ of_type_with_opt_coercion term | ] ] constructor: [ @@ -914,7 +601,7 @@ constructor: [ ] cofix_definition: [ -| ident_decl binders_opt colon_term_opt term_opt2 decl_notation +| ident_decl LIST0 binder OPT ( ":" term ) OPT [ ":=" term ] decl_notation ] scheme: [ @@ -943,67 +630,47 @@ smart_global: [ ] by_notation: [ -| string ident_opt2 -] - -ident_opt2: [ -| "%" ident -| empty +| string OPT [ "%" ident ] ] gallina_ext: [ -| "Module" export_token ident module_binder_list_opt of_module_type is_module_expr -| "Module" "Type" ident module_binder_list_opt module_type_inl_list_opt is_module_type -| "Declare" "Module" export_token ident module_binder_list_opt ":" module_type_inl +| "Module" export_token ident LIST0 ( "(" export_token LIST1 ident ":" module_type_inl ")" ) of_module_type is_module_expr +| "Module" "Type" ident LIST0 ( "(" export_token LIST1 ident ":" module_type_inl ")" ) LIST0 ( "<:" module_type_inl ) is_module_type +| "Declare" "Module" export_token ident LIST0 ( "(" export_token LIST1 ident ":" module_type_inl ")" ) ":" module_type_inl | "Section" ident | "Chapter" ident | "End" ident | "Collection" ident ":=" section_subset_expr -| "Require" export_token qualid_list -| "From" qualid "Require" export_token qualid_list -| "Import" qualid_list -| "Export" qualid_list -| "Include" module_type_inl module_expr_inl_list_opt -| "Include" "Type" module_type_inl module_type_inl_list_opt -| "Transparent" smart_global_list -| "Opaque" smart_global_list -| "Strategy" strategy_level_list -| "Canonical" Structure_opt qualid univ_decl_opt2 -| "Canonical" Structure_opt by_notation -| "Coercion" qualid univ_decl_opt def_body +| "Require" export_token LIST1 qualid +| "From" qualid "Require" export_token LIST1 qualid +| "Import" LIST1 qualid +| "Export" LIST1 qualid +| "Include" module_type_inl LIST0 ( "<+" module_expr_inl ) +| "Include" "Type" module_type_inl LIST0 ( "<+" module_type_inl ) +| "Transparent" LIST1 smart_global +| "Opaque" LIST1 smart_global +| "Strategy" LIST1 [ strategy_level "[" LIST1 smart_global "]" ] +| "Canonical" OPT "Structure" qualid OPT [ OPT ( "@{" LIST0 ident [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | "|}" ] ] ) def_body ] +| "Canonical" OPT "Structure" by_notation +| "Coercion" qualid OPT ( "@{" LIST0 ident [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | "|}" ] ] ) def_body | "Identity" "Coercion" ident ":" class_rawexpr ">->" class_rawexpr | "Coercion" qualid ":" class_rawexpr ">->" class_rawexpr | "Coercion" by_notation ":" class_rawexpr ">->" class_rawexpr -| "Context" binders -| "Instance" instance_name ":" term hint_info fields_def_opt +| "Context" LIST1 binder +| "Instance" instance_name ":" term hint_info [ ":=" "{" [ LIST1 field_def SEP ";" | ] "}" | ":=" term | ] | "Existing" "Instance" qualid hint_info -| "Existing" "Instances" qualid_list num_opt2 +| "Existing" "Instances" LIST1 qualid OPT [ "|" num ] | "Existing" "Class" qualid -| "Arguments" smart_global argument_spec_block_list_opt more_implicits_block_opt arguments_modifier_opt +| "Arguments" smart_global LIST0 argument_spec_block OPT [ "," LIST1 [ LIST0 more_implicits_block ] SEP "," ] OPT [ ":" LIST1 arguments_modifier SEP "," ] | "Implicit" "Type" reserv_list | "Implicit" "Types" reserv_list -| "Generalizable" All_alt -| "Export" "Set" ident_list option_setting -| "Export" "Unset" ident_list -] - -smart_global_list: [ -| smart_global_list smart_global -| smart_global -] - -num_opt: [ -| num -| empty -] - -qualid_list: [ -| qualid_list qualid -| qualid +| "Generalizable" [ "All" "Variables" | "No" "Variables" | [ "Variable" | "Variables" ] LIST1 ident ] +| "Export" "Set" LIST1 ident option_setting +| "Export" "Unset" LIST1 ident ] option_setting: [ -| empty +| | int | string ] @@ -1015,132 +682,35 @@ class_rawexpr: [ ] hint_info: [ -| "|" num_opt term1_extended_opt -| empty -] - -module_binder_list_opt: [ -| module_binder_list_opt "(" export_token ident_list ":" module_type_inl ")" -| empty -] - -module_type_inl_list_opt: [ -| module_type_inl_list_opt module_type_inl -| empty -] - -module_expr_inl_list_opt: [ -| module_expr_inl_list_opt module_expr_inl -| empty -] - -strategy_level_list: [ -| strategy_level_list strategy_level "[" smart_global_list "]" -| strategy_level "[" smart_global_list "]" -] - -fields_def_opt: [ -| ":=" "{" fields_def "}" -| ":=" term -| empty -] - -argument_spec_block_list_opt: [ -| argument_spec_block_list_opt argument_spec_block -| empty -] - -more_implicits_block_opt: [ -| "," more_implicits_block_list_comma -| empty -] - -more_implicits_block_list_comma: [ -| more_implicits_block_list_comma "," more_implicits_block_list_opt -| more_implicits_block_list_opt -] - -arguments_modifier_opt: [ -| ":" arguments_modifier_list_comma -| empty -] - -arguments_modifier_list_comma: [ -| arguments_modifier_list_comma "," arguments_modifier -| arguments_modifier -] - -All_alt: [ -| "All" "Variables" -| "No" "Variables" -| Variable_alt ident_list -] - -Variable_alt: [ -| "Variable" -| "Variables" -] - -more_implicits_block_list_opt: [ -| more_implicits_block_list_opt more_implicits_block -| empty -] - -univ_decl_opt2: [ -| univ_decl_opt def_body -| empty -] - -univ_decl_opt: [ -| "@{" ident_list_opt plus_opt univ_constraint_alt -| empty -] - -plus_opt: [ -| "+" -| empty -] - -univ_constraint_alt: [ -| "|" univ_constraint_list_comma_opt plus_opt "}" -| rbrace_alt -] - -univ_constraint_list_comma_opt: [ -| univ_constraint_list_comma -| empty -] - -rbrace_alt: [ -| "}" -| "|}" +| "|" OPT num OPT term1_extended +| ] export_token: [ | "Import" | "Export" -| empty +| ] of_module_type: [ | ":" module_type_inl -| module_type_inl_list_opt +| LIST0 ( "<:" module_type_inl ) ] is_module_type: [ -| ":=" module_type_inl module_type_inl_list_opt -| empty +| ":=" module_type_inl LIST0 ( "<+" module_type_inl ) +| ] is_module_expr: [ -| ":=" module_expr_inl module_expr_inl_list_opt -| empty +| ":=" module_expr_inl LIST0 ( "<+" module_expr_inl ) +| ] functor_app_annot: [ | "[" "inline" "at" "level" num "]" | "[" "no" "inline" "]" -| empty +| ] module_expr_inl: [ @@ -1171,33 +741,23 @@ module_type: [ ] with_declaration: [ -| "Definition" qualid univ_decl_opt ":=" term +| "Definition" qualid OPT ( "@{" LIST0 ident [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | "|}" ] ] ) ":=" term | "Module" qualid ":=" qualid ] argument_spec_block: [ -| exclam_opt name scope_delimiter_opt +| OPT "!" name OPT ( "%" ident ) | "/" | "&" -| "(" scope_delimiter_list ")" scope_delimiter_opt -| "[" scope_delimiter_list "]" scope_delimiter_opt -| "{" scope_delimiter_list "}" scope_delimiter_opt -] - -scope_delimiter_opt: [ -| "%" ident -| empty -] - -scope_delimiter_list: [ -| scope_delimiter_list scope_delimiter_opt -| scope_delimiter_opt +| "(" LIST1 ( OPT "!" name OPT ( "%" ident ) ) ")" OPT ( "%" ident ) +| "[" LIST1 ( OPT "!" name OPT ( "%" ident ) ) "]" OPT ( "%" ident ) +| "{" LIST1 ( OPT "!" name OPT ( "%" ident ) ) "}" OPT ( "%" ident ) ] more_implicits_block: [ | name -| "[" names "]" -| "{" names "}" +| "[" LIST1 name "]" +| "{" LIST1 name "}" ] strategy_level: [ @@ -1208,26 +768,21 @@ strategy_level: [ ] instance_name: [ -| ident_decl binders_opt -| empty +| ident_decl LIST0 binder +| ] reserv_list: [ -| reserv_tuple_list +| LIST1 reserv_tuple | simple_reserv ] -reserv_tuple_list: [ -| reserv_tuple_list reserv_tuple -| reserv_tuple -] - reserv_tuple: [ | "(" simple_reserv ")" ] simple_reserv: [ -| ident_list ":" term +| LIST1 ident ":" term ] arguments_modifier: [ @@ -1244,46 +799,36 @@ arguments_modifier: [ | "clear" "implicits" "and" "scopes" ] -Structure_opt: [ -| "Structure" -| empty -] - command: [ | "Goal" term -| "Comments" comment_list_opt -| "Declare" "Instance" ident_decl binders_opt ":" term hint_info | "Declare" "Scope" ident | "Pwd" | "Cd" | "Cd" string -| "Load" Verbose_opt string_alt -| "Declare" "ML" "Module" string_list +| "Load" [ "Verbose" | ] [ string | ident ] +| "Declare" "ML" "Module" LIST1 string | "Locate" locatable | "Add" "LoadPath" string as_dirpath | "Add" "Rec" "LoadPath" string as_dirpath | "Remove" "LoadPath" string -| "AddPath" string "as" as_dirpath -| "AddRecPath" string "as" as_dirpath -| "DelPath" string | "Type" term | "Print" printable -| "Print" smart_global univ_name_list_opt +| "Print" smart_global OPT ( "@{" LIST0 name "}" ) | "Print" "Module" "Type" qualid | "Print" "Module" qualid | "Print" "Namespace" dirpath | "Inspect" num | "Add" "ML" "Path" string | "Add" "Rec" "ML" "Path" string -| "Set" ident_list option_setting -| "Unset" ident_list -| "Print" "Table" ident_list -| "Add" ident ident option_ref_value_list -| "Add" ident option_ref_value_list -| "Test" ident_list "for" option_ref_value_list -| "Test" ident_list -| "Remove" ident ident option_ref_value_list -| "Remove" ident option_ref_value_list +| "Set" LIST1 ident option_setting +| "Unset" LIST1 ident +| "Print" "Table" LIST1 ident +| "Add" ident ident LIST1 option_ref_value +| "Add" ident LIST1 option_ref_value +| "Test" LIST1 ident "for" LIST1 option_ref_value +| "Test" LIST1 ident +| "Remove" ident ident LIST1 option_ref_value +| "Remove" ident LIST1 option_ref_value | "Write" "State" ident | "Write" "State" string | "Restore" "State" ident @@ -1328,9 +873,11 @@ command: [ | "Show" "Intros" | "Show" "Match" qualid | "Guarded" -| "Create" "HintDb" ident discriminated_opt -| "Remove" "Hints" qualid_list opt_hintbases +| "Create" "HintDb" ident [ "discriminated" | ] +| "Remove" "Hints" LIST1 qualid opt_hintbases | "Hint" hint opt_hintbases +| "Comments" LIST0 comment +| "Declare" "Instance" ident_decl LIST0 binder ":" term hint_info | "Obligation" int "of" ident ":" term withtac | "Obligation" int "of" ident withtac | "Obligation" int ":" term withtac @@ -1360,20 +907,20 @@ command: [ | "Add" "Relation" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "transitivity" "proved" "by" term1_extended "as" ident | "Add" "Relation" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "symmetry" "proved" "by" term1_extended "transitivity" "proved" "by" term1_extended "as" ident | "Add" "Relation" term1_extended term1_extended "transitivity" "proved" "by" term1_extended "as" ident -| "Add" "Parametric" "Relation" binders_opt ":" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "symmetry" "proved" "by" term1_extended "as" ident -| "Add" "Parametric" "Relation" binders_opt ":" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "as" ident -| "Add" "Parametric" "Relation" binders_opt ":" term1_extended term1_extended "as" ident -| "Add" "Parametric" "Relation" binders_opt ":" term1_extended term1_extended "symmetry" "proved" "by" term1_extended "as" ident -| "Add" "Parametric" "Relation" binders_opt ":" term1_extended term1_extended "symmetry" "proved" "by" term1_extended "transitivity" "proved" "by" term1_extended "as" ident -| "Add" "Parametric" "Relation" binders_opt ":" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "transitivity" "proved" "by" term1_extended "as" ident -| "Add" "Parametric" "Relation" binders_opt ":" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "symmetry" "proved" "by" term1_extended "transitivity" "proved" "by" term1_extended "as" ident -| "Add" "Parametric" "Relation" binders_opt ":" term1_extended term1_extended "transitivity" "proved" "by" term1_extended "as" ident +| "Add" "Parametric" "Relation" LIST0 binder ":" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "symmetry" "proved" "by" term1_extended "as" ident +| "Add" "Parametric" "Relation" LIST0 binder ":" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "as" ident +| "Add" "Parametric" "Relation" LIST0 binder ":" term1_extended term1_extended "as" ident +| "Add" "Parametric" "Relation" LIST0 binder ":" term1_extended term1_extended "symmetry" "proved" "by" term1_extended "as" ident +| "Add" "Parametric" "Relation" LIST0 binder ":" term1_extended term1_extended "symmetry" "proved" "by" term1_extended "transitivity" "proved" "by" term1_extended "as" ident +| "Add" "Parametric" "Relation" LIST0 binder ":" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "transitivity" "proved" "by" term1_extended "as" ident +| "Add" "Parametric" "Relation" LIST0 binder ":" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "symmetry" "proved" "by" term1_extended "transitivity" "proved" "by" term1_extended "as" ident +| "Add" "Parametric" "Relation" LIST0 binder ":" term1_extended term1_extended "transitivity" "proved" "by" term1_extended "as" ident | "Add" "Setoid" term1_extended term1_extended term1_extended "as" ident -| "Add" "Parametric" "Setoid" binders_opt ":" term1_extended term1_extended term1_extended "as" ident +| "Add" "Parametric" "Setoid" LIST0 binder ":" term1_extended term1_extended term1_extended "as" ident | "Add" "Morphism" term1_extended ":" ident | "Declare" "Morphism" term1_extended ":" ident | "Add" "Morphism" term1_extended "with" "signature" term "as" ident -| "Add" "Parametric" "Morphism" binders_opt ":" term1_extended "with" "signature" term "as" ident +| "Add" "Parametric" "Morphism" LIST0 binder ":" term1_extended "with" "signature" term "as" ident | "Grab" "Existential" "Variables" | "Unshelve" | "Declare" "Equivalent" "Keys" term1_extended term1_extended @@ -1401,49 +948,49 @@ command: [ | "Show" "Zify" "CstOp" (* micromega plugin *) | "Show" "Zify" "BinRel" (* micromega plugin *) | "Show" "Zify" "Spec" (* micromega plugin *) -| "Add" "Ring" ident ":" term1_extended ring_mods_opt (* setoid_ring plugin *) +| "Add" "Ring" ident ":" term1_extended OPT ( "(" LIST1 ring_mod SEP "," ")" ) (* setoid_ring plugin *) | "Hint" "Cut" "[" hints_path "]" opthints -| "Typeclasses" "Transparent" qualid_list_opt -| "Typeclasses" "Opaque" qualid_list_opt -| "Typeclasses" "eauto" ":=" debug eauto_search_strategy int_opt +| "Typeclasses" "Transparent" LIST0 qualid +| "Typeclasses" "Opaque" LIST0 qualid +| "Typeclasses" "eauto" ":=" debug eauto_search_strategy OPT int +| "Proof" "with" ltac_expr OPT [ "using" section_subset_expr ] +| "Proof" "using" section_subset_expr OPT [ "with" ltac_expr ] +| "Tactic" "Notation" OPT ( "(" "at" "level" num ")" ) LIST1 ltac_production_item ":=" ltac_expr | "Print" "Rewrite" "HintDb" ident -| "Proof" "with" ltac_expr using_opt -| "Proof" "using" section_subset_expr with_opt -| "Tactic" "Notation" ltac_tactic_level_opt ltac_production_item_list ":=" ltac_expr | "Print" "Ltac" qualid | "Locate" "Ltac" qualid -| "Ltac" tacdef_body_list +| "Ltac" LIST1 tacdef_body SEP "with" | "Print" "Ltac" "Signatures" | "Set" "Firstorder" "Solver" ltac_expr | "Print" "Firstorder" "Solver" +| "Function" LIST1 fix_definition SEP "with" (* funind plugin *) +| "Functional" "Scheme" LIST1 fun_scheme_arg SEP "with" (* funind plugin *) | "Extraction" qualid (* extraction plugin *) -| "Recursive" "Extraction" qualid_list (* extraction plugin *) -| "Extraction" string qualid_list (* extraction plugin *) -| "Extraction" "TestCompile" qualid_list (* extraction plugin *) -| "Separate" "Extraction" qualid_list (* extraction plugin *) +| "Recursive" "Extraction" LIST1 qualid (* extraction plugin *) +| "Extraction" string LIST1 qualid (* extraction plugin *) +| "Extraction" "TestCompile" LIST1 qualid (* extraction plugin *) +| "Separate" "Extraction" LIST1 qualid (* extraction plugin *) | "Extraction" "Library" ident (* extraction plugin *) | "Recursive" "Extraction" "Library" ident (* extraction plugin *) | "Extraction" "Language" language (* extraction plugin *) -| "Extraction" "Inline" qualid_list (* extraction plugin *) -| "Extraction" "NoInline" qualid_list (* extraction plugin *) +| "Extraction" "Inline" LIST1 qualid (* extraction plugin *) +| "Extraction" "NoInline" LIST1 qualid (* extraction plugin *) | "Print" "Extraction" "Inline" (* extraction plugin *) | "Reset" "Extraction" "Inline" (* extraction plugin *) -| "Extraction" "Implicit" qualid "[" int_or_id_list_opt "]" (* extraction plugin *) -| "Extraction" "Blacklist" ident_list (* extraction plugin *) +| "Extraction" "Implicit" qualid "[" LIST0 int_or_id "]" (* extraction plugin *) +| "Extraction" "Blacklist" LIST1 ident (* extraction plugin *) | "Print" "Extraction" "Blacklist" (* extraction plugin *) | "Reset" "Extraction" "Blacklist" (* extraction plugin *) -| "Extract" "Constant" qualid string_list_opt "=>" mlname (* extraction plugin *) +| "Extract" "Constant" qualid LIST0 string "=>" mlname (* extraction plugin *) | "Extract" "Inlined" "Constant" qualid "=>" mlname (* extraction plugin *) -| "Extract" "Inductive" qualid "=>" mlname "[" mlname_list_opt "]" string_opt (* extraction plugin *) +| "Extract" "Inductive" qualid "=>" mlname "[" LIST0 mlname "]" OPT string (* extraction plugin *) | "Show" "Extraction" (* extraction plugin *) -| "Function" fix_definition_list (* funind plugin *) -| "Functional" "Scheme" fun_scheme_arg_list (* funind plugin *) | "Functional" "Case" fun_scheme_arg (* funind plugin *) | "Generate" "graph" "for" qualid (* funind plugin *) -| "Hint" "Rewrite" orient term1_extended_list ":" ident_list_opt -| "Hint" "Rewrite" orient term1_extended_list "using" ltac_expr ":" ident_list_opt -| "Hint" "Rewrite" orient term1_extended_list -| "Hint" "Rewrite" orient term1_extended_list "using" ltac_expr +| "Hint" "Rewrite" orient LIST1 term1_extended ":" LIST0 ident +| "Hint" "Rewrite" orient LIST1 term1_extended "using" ltac_expr ":" LIST0 ident +| "Hint" "Rewrite" orient LIST1 term1_extended +| "Hint" "Rewrite" orient LIST1 term1_extended "using" ltac_expr | "Derive" "Inversion_clear" ident "with" term1_extended "Sort" sort_family | "Derive" "Inversion_clear" ident "with" term1_extended | "Derive" "Inversion" ident "with" term1_extended "Sort" sort_family @@ -1453,7 +1000,7 @@ command: [ | "Declare" "Left" "Step" term1_extended | "Declare" "Right" "Step" term1_extended | "Print" "Rings" (* setoid_ring plugin *) -| "Add" "Field" ident ":" term1_extended field_mods_opt (* setoid_ring plugin *) +| "Add" "Field" ident ":" term1_extended OPT ( "(" LIST1 field_mod SEP "," ")" ) (* setoid_ring plugin *) | "Print" "Fields" (* setoid_ring plugin *) | "Numeral" "Notation" qualid qualid qualid ":" ident numnotoption | "String" "Notation" qualid qualid qualid ":" ident @@ -1462,31 +1009,11 @@ command: [ orient: [ | "->" | "<-" -| empty -] - -string_opt: [ -| string -| empty -] - -qualid_list_opt: [ -| qualid_list_opt qualid -| empty -] - -univ_name_list_opt: [ -| "@{" name_list_opt "}" -| empty -] - -name_list_opt: [ -| name_list_opt name -| empty +| ] section_subset_expr: [ -| starredidentref_list_opt +| LIST0 starredidentref | ssexpr ] @@ -1503,17 +1030,12 @@ ssexpr50: [ ssexpr0: [ | starredidentref -| "(" starredidentref_list_opt ")" -| "(" starredidentref_list_opt ")" "*" +| "(" LIST0 starredidentref ")" +| "(" LIST0 starredidentref ")" "*" | "(" ssexpr ")" | "(" ssexpr ")" "*" ] -starredidentref_list_opt: [ -| starredidentref_list_opt starredidentref -| empty -] - starredidentref: [ | ident | ident "*" @@ -1521,43 +1043,13 @@ starredidentref: [ | "Type" "*" ] -int_opt: [ -| int -| empty -] - -using_opt: [ -| "using" section_subset_expr -| empty -] - -with_opt: [ -| "with" ltac_expr -| empty -] - -ltac_tactic_level_opt: [ -| "(" "at" "level" num ")" -| empty -] - -ltac_production_item_list: [ -| ltac_production_item_list ltac_production_item -| ltac_production_item -] - -tacdef_body_list: [ -| tacdef_body_list "with" tacdef_body -| tacdef_body -] - printable: [ -| "Term" smart_global univ_name_list_opt +| "Term" smart_global OPT ( "@{" LIST0 name "}" ) | "All" | "Section" qualid | "Grammar" ident | "Custom" "Grammar" ident -| "LoadPath" dirpath_opt +| "LoadPath" OPT dirpath | "Modules" | "Libraries" | "ML" "Path" @@ -1579,9 +1071,9 @@ printable: [ | "HintDb" ident | "Scopes" | "Scope" ident -| "Visibility" ident_opt +| "Visibility" OPT ident | "Implicit" smart_global -| Sorted_opt "Universes" printunivs_subgraph_opt string_opt +| [ "Sorted" | ] "Universes" OPT ( "Subgraph" "(" LIST0 qualid ")" ) OPT string | "Assumptions" smart_global | "Opaque" "Dependencies" smart_global | "Transparent" "Dependencies" smart_global @@ -1591,84 +1083,9 @@ printable: [ | "Registered" ] -dirpath_opt: [ -| dirpath -| empty -] - dirpath: [ | ident -| dirpath field -] - -Sorted_opt: [ -| "Sorted" -| empty -] - -printunivs_subgraph_opt: [ -| "Subgraph" "(" qualid_list_opt ")" -| empty -] - -comment_list_opt: [ -| comment_list_opt comment -| empty -] - -Verbose_opt: [ -| "Verbose" -| empty -] - -string_alt: [ -| string -| ident -] - -string_list: [ -| string_list string -| string -] - -option_ref_value_list: [ -| option_ref_value_list option_ref_value -| option_ref_value -] - -discriminated_opt: [ -| "discriminated" -| empty -] - -string_list_opt: [ -| string_list_opt string -| empty -] - -mlname_list_opt: [ -| mlname_list_opt mlname -| empty -] - -fun_scheme_arg_list: [ -| fun_scheme_arg_list "with" fun_scheme_arg -| fun_scheme_arg -] - -term1_extended_list: [ -| term1_extended_list term1_extended -| term1_extended -] - -ring_mods_opt: [ -| "(" ring_mod_list_comma ")" (* setoid_ring plugin *) -| empty -] - -field_mods_opt: [ -| "(" field_mod_list_comma ")" (* setoid_ring plugin *) -| empty +| dirpath field_ident ] locatable: [ @@ -1685,8 +1102,7 @@ option_ref_value: [ ] as_dirpath: [ -| "as" dirpath -| empty +| OPT [ "as" dirpath ] ] comment: [ @@ -1701,25 +1117,20 @@ reference_or_constr: [ ] hint: [ -| "Resolve" reference_or_constr_list hint_info -| "Resolve" "->" qualid_list num_opt -| "Resolve" "<-" qualid_list num_opt -| "Immediate" reference_or_constr_list +| "Resolve" LIST1 reference_or_constr hint_info +| "Resolve" "->" LIST1 qualid OPT num +| "Resolve" "<-" LIST1 qualid OPT num +| "Immediate" LIST1 reference_or_constr | "Variables" "Transparent" | "Variables" "Opaque" | "Constants" "Transparent" | "Constants" "Opaque" -| "Transparent" qualid_list -| "Opaque" qualid_list -| "Mode" qualid plus_list -| "Unfold" qualid_list -| "Constructors" qualid_list -| "Extern" num term1_extended_opt "=>" ltac_expr -] - -reference_or_constr_list: [ -| reference_or_constr_list reference_or_constr -| reference_or_constr +| "Transparent" LIST1 qualid +| "Opaque" LIST1 qualid +| "Mode" qualid LIST1 [ "+" | "!" | "-" ] +| "Unfold" LIST1 qualid +| "Constructors" LIST1 qualid +| "Extern" num OPT term1_extended "=>" ltac_expr ] constr_body: [ @@ -1727,20 +1138,9 @@ constr_body: [ | ":" term ":=" term ] -plus_list: [ -| plus_list plus_alt -| plus_alt -] - -plus_alt: [ -| "+" -| "!" -| "-" -] - withtac: [ | "with" ltac_expr -| empty +| ] ltac_def_kind: [ @@ -1749,23 +1149,18 @@ ltac_def_kind: [ ] tacdef_body: [ -| qualid fun_var_list ltac_def_kind ltac_expr +| qualid LIST1 fun_var ltac_def_kind ltac_expr | qualid ltac_def_kind ltac_expr ] ltac_production_item: [ | string -| ident "(" ident ltac_production_sep_opt ")" +| ident "(" ident OPT ( "," string ) ")" | ident ] -ltac_production_sep_opt: [ -| "," string -| empty -] - numnotoption: [ -| empty +| | "(" "warning" "after" num ")" | "(" "abstract" "after" num ")" ] @@ -1797,44 +1192,34 @@ ring_mod: [ | "abstract" (* setoid_ring plugin *) | "morphism" term1_extended (* setoid_ring plugin *) | "constants" "[" ltac_expr "]" (* setoid_ring plugin *) -| "closed" "[" qualid_list "]" (* setoid_ring plugin *) +| "closed" "[" LIST1 qualid "]" (* setoid_ring plugin *) | "preprocess" "[" ltac_expr "]" (* setoid_ring plugin *) | "postprocess" "[" ltac_expr "]" (* setoid_ring plugin *) | "setoid" term1_extended term1_extended (* setoid_ring plugin *) | "sign" term1_extended (* setoid_ring plugin *) -| "power" term1_extended "[" qualid_list "]" (* setoid_ring plugin *) +| "power" term1_extended "[" LIST1 qualid "]" (* setoid_ring plugin *) | "power_tac" term1_extended "[" ltac_expr "]" (* setoid_ring plugin *) | "div" term1_extended (* setoid_ring plugin *) ] -ring_mod_list_comma: [ -| ring_mod_list_comma "," ring_mod -| ring_mod -] - field_mod: [ | ring_mod (* setoid_ring plugin *) | "completeness" term1_extended (* setoid_ring plugin *) ] -field_mod_list_comma: [ -| field_mod_list_comma "," field_mod -| field_mod -] - debug: [ | "debug" -| empty +| ] eauto_search_strategy: [ | "(bfs)" | "(dfs)" -| empty +| ] hints_path_atom: [ -| qualid_list +| LIST1 qualid | "_" ] @@ -1849,62 +1234,52 @@ hints_path: [ ] opthints: [ -| ":" ident_list -| empty +| ":" LIST1 ident +| ] opt_hintbases: [ -| empty -| ":" ident_list -] - -int_or_id_list_opt: [ -| int_or_id_list_opt int_or_id -| empty +| +| ":" LIST1 ident ] query_command: [ | "Eval" red_expr "in" term "." | "Compute" term "." | "Check" term "." -| "About" smart_global univ_name_list_opt "." +| "About" smart_global OPT ( "@{" LIST0 name "}" ) "." | "SearchHead" term1_extended in_or_out_modules "." | "SearchPattern" term1_extended in_or_out_modules "." | "SearchRewrite" term1_extended in_or_out_modules "." | "Search" searchabout_query searchabout_queries "." | "SearchAbout" searchabout_query searchabout_queries "." -| "SearchAbout" "[" searchabout_query_list "]" in_or_out_modules "." +| "SearchAbout" "[" LIST1 searchabout_query "]" in_or_out_modules "." ] ne_in_or_out_modules: [ -| "inside" qualid_list -| "outside" qualid_list +| "inside" LIST1 qualid +| "outside" LIST1 qualid ] in_or_out_modules: [ | ne_in_or_out_modules -| empty +| ] positive_search_mark: [ | "-" -| empty +| ] searchabout_query: [ -| positive_search_mark string scope_delimiter_opt +| positive_search_mark string OPT ( "%" ident ) | positive_search_mark term1_extended ] searchabout_queries: [ | ne_in_or_out_modules | searchabout_query searchabout_queries -| empty -] - -searchabout_query_list: [ -| searchabout_query_list searchabout_query -| searchabout_query +| ] syntax: [ @@ -1912,34 +1287,18 @@ syntax: [ | "Close" "Scope" ident | "Delimit" "Scope" ident "with" ident | "Undelimit" "Scope" ident -| "Bind" "Scope" ident "with" class_rawexpr_list -| "Infix" string ":=" term1_extended syntax_modifier_opt ident_opt3 -| "Notation" ident ident_list_opt ":=" term1_extended only_parsing -| "Notation" string ":=" term1_extended syntax_modifier_opt ident_opt3 +| "Bind" "Scope" ident "with" LIST1 class_rawexpr +| "Infix" string ":=" term1_extended [ "(" LIST1 syntax_modifier SEP "," ")" | ] OPT [ ":" ident ] +| "Notation" ident LIST0 ident ":=" term1_extended only_parsing +| "Notation" string ":=" term1_extended [ "(" LIST1 syntax_modifier SEP "," ")" | ] OPT [ ":" ident ] | "Format" "Notation" string string string -| "Reserved" "Infix" string syntax_modifier_opt -| "Reserved" "Notation" string syntax_modifier_opt -] - -class_rawexpr_list: [ -| class_rawexpr_list class_rawexpr -| class_rawexpr -] - -syntax_modifier_opt: [ -| "(" syntax_modifier_list_comma ")" -| empty -] - -syntax_modifier_list_comma: [ -| syntax_modifier_list_comma "," syntax_modifier -| syntax_modifier +| "Reserved" "Infix" string [ "(" LIST1 syntax_modifier SEP "," ")" | ] +| "Reserved" "Notation" string [ "(" LIST1 syntax_modifier SEP "," ")" | ] ] only_parsing: [ | "(" "only" "parsing" ")" -| "(" "compat" string ")" -| empty +| ] level: [ @@ -1956,9 +1315,8 @@ syntax_modifier: [ | "no" "associativity" | "only" "printing" | "only" "parsing" -| "compat" string -| "format" string string_opt -| ident "," ident_list_comma "at" level +| "format" string OPT string +| ident "," LIST1 ident SEP "," "at" level | ident "at" level | ident "at" level constr_as_binder_kind | ident constr_as_binder_kind @@ -1971,23 +1329,13 @@ syntax_extension_type: [ | "bigint" | "binder" | "constr" -| "constr" level_opt constr_as_binder_kind_opt +| "constr" OPT ( "at" level ) OPT constr_as_binder_kind | "pattern" | "pattern" "at" "level" num | "strict" "pattern" | "strict" "pattern" "at" "level" num | "closed" "binder" -| "custom" ident level_opt constr_as_binder_kind_opt -] - -level_opt: [ -| level -| empty -] - -constr_as_binder_kind_opt: [ -| constr_as_binder_kind -| empty +| "custom" ident OPT ( "at" level ) OPT constr_as_binder_kind ] constr_as_binder_kind: [ @@ -2032,9 +1380,9 @@ simple_tactic: [ | "split" "with" bindings | "esplit" "with" bindings | "exists" -| "exists" bindings_list_comma +| "exists" LIST1 bindings SEP "," | "eexists" -| "eexists" bindings_list_comma +| "eexists" LIST1 bindings SEP "," | "intros" "until" quantified_hypothesis | "intro" | "intro" ident @@ -2050,17 +1398,17 @@ simple_tactic: [ | "move" ident "at" "bottom" | "move" ident "after" ident | "move" ident "before" ident -| "rename" rename_list_comma -| "revert" ident_list +| "rename" LIST1 rename SEP "," +| "revert" LIST1 ident | "simple" "induction" quantified_hypothesis | "simple" "destruct" quantified_hypothesis | "double" "induction" quantified_hypothesis quantified_hypothesis | "admit" | "fix" ident num | "cofix" ident -| "clear" ident_list_opt -| "clear" "-" ident_list -| "clearbody" ident_list +| "clear" LIST0 ident +| "clear" "-" LIST1 ident +| "clearbody" LIST1 ident | "generalize" "dependent" term1_extended | "replace" term1_extended "with" term1_extended clause_dft_concl by_arg_tac | "replace" "->" term1_extended clause_dft_concl @@ -2078,10 +1426,10 @@ simple_tactic: [ | "injection" destruction_arg | "einjection" | "einjection" destruction_arg -| "injection" "as" simple_intropattern_list_opt -| "injection" destruction_arg "as" simple_intropattern_list_opt -| "einjection" "as" simple_intropattern_list_opt -| "einjection" destruction_arg "as" simple_intropattern_list_opt +| "injection" "as" LIST0 simple_intropattern +| "injection" destruction_arg "as" LIST0 simple_intropattern +| "einjection" "as" LIST0 simple_intropattern +| "einjection" destruction_arg "as" LIST0 simple_intropattern | "simple" "injection" | "simple" "injection" destruction_arg | "dependent" "rewrite" orient term1_extended @@ -2091,11 +1439,11 @@ simple_tactic: [ | "decompose" "sum" term1_extended | "decompose" "record" term1_extended | "absurd" term1_extended -| "contradiction" constr_with_bindings_opt -| "autorewrite" "with" ident_list clause_dft_concl -| "autorewrite" "with" ident_list clause_dft_concl "using" ltac_expr -| "autorewrite" "*" "with" ident_list clause_dft_concl -| "autorewrite" "*" "with" ident_list clause_dft_concl "using" ltac_expr +| "contradiction" OPT constr_with_bindings +| "autorewrite" "with" LIST1 ident clause_dft_concl +| "autorewrite" "with" LIST1 ident clause_dft_concl "using" ltac_expr +| "autorewrite" "*" "with" LIST1 ident clause_dft_concl +| "autorewrite" "*" "with" LIST1 ident clause_dft_concl "using" ltac_expr | "rewrite" "*" orient term1_extended "in" ident "at" occurrences by_arg_tac | "rewrite" "*" orient term1_extended "at" occurrences "in" ident by_arg_tac | "rewrite" "*" orient term1_extended "in" ident by_arg_tac @@ -2106,7 +1454,7 @@ simple_tactic: [ | "notypeclasses" "refine" term1_extended | "simple" "notypeclasses" "refine" term1_extended | "solve_constraints" -| "subst" ident_list +| "subst" LIST1 ident | "subst" | "simple" "subst" | "evar" "(" ident ":" term ")" @@ -2150,7 +1498,7 @@ simple_tactic: [ | "swap" int_or_var int_or_var | "revgoals" | "guard" int_or_var comparison int_or_var -| "decompose" "[" term1_extended_list "]" term1_extended +| "decompose" "[" LIST1 term1_extended "]" term1_extended | "optimize_heap" | "start" "ltac" "profiling" | "stop" "ltac" "profiling" @@ -2158,32 +1506,32 @@ simple_tactic: [ | "show" "ltac" "profile" | "show" "ltac" "profile" "cutoff" int | "show" "ltac" "profile" string -| "restart_timer" string_opt -| "finish_timing" string_opt -| "finish_timing" "(" string ")" string_opt +| "restart_timer" OPT string +| "finish_timing" OPT string +| "finish_timing" "(" string ")" OPT string | "eassumption" | "eexact" term1_extended | "trivial" auto_using hintbases | "info_trivial" auto_using hintbases | "debug" "trivial" auto_using hintbases -| "auto" int_or_var_opt auto_using hintbases -| "info_auto" int_or_var_opt auto_using hintbases -| "debug" "auto" int_or_var_opt auto_using hintbases -| "prolog" "[" term1_extended_list_opt "]" int_or_var -| "eauto" int_or_var_opt int_or_var_opt auto_using hintbases -| "new" "auto" int_or_var_opt auto_using hintbases -| "debug" "eauto" int_or_var_opt int_or_var_opt auto_using hintbases -| "info_eauto" int_or_var_opt int_or_var_opt auto_using hintbases -| "dfs" "eauto" int_or_var_opt auto_using hintbases +| "auto" OPT int_or_var auto_using hintbases +| "info_auto" OPT int_or_var auto_using hintbases +| "debug" "auto" OPT int_or_var auto_using hintbases +| "prolog" "[" LIST0 term1_extended "]" int_or_var +| "eauto" OPT int_or_var OPT int_or_var auto_using hintbases +| "new" "auto" OPT int_or_var auto_using hintbases +| "debug" "eauto" OPT int_or_var OPT int_or_var auto_using hintbases +| "info_eauto" OPT int_or_var OPT int_or_var auto_using hintbases +| "dfs" "eauto" OPT int_or_var auto_using hintbases | "autounfold" hintbases clause_dft_concl | "autounfold_one" hintbases "in" ident | "autounfold_one" hintbases | "unify" term1_extended term1_extended | "unify" term1_extended term1_extended "with" ident | "convert_concl_no_check" term1_extended -| "typeclasses" "eauto" "bfs" int_or_var_opt "with" ident_list -| "typeclasses" "eauto" int_or_var_opt "with" ident_list -| "typeclasses" "eauto" int_or_var_opt +| "typeclasses" "eauto" "bfs" OPT int_or_var "with" LIST1 ident +| "typeclasses" "eauto" OPT int_or_var "with" LIST1 ident +| "typeclasses" "eauto" OPT int_or_var | "head_of_constr" ident term1_extended | "not_evar" term1_extended | "is_ground" term1_extended @@ -2209,16 +1557,16 @@ simple_tactic: [ | "rewrite_strat" rewstrategy "in" ident | "intros" intropattern_list_opt | "eintros" intropattern_list_opt -| "apply" constr_with_bindings_arg_list_comma in_hyp_as -| "eapply" constr_with_bindings_arg_list_comma in_hyp_as -| "simple" "apply" constr_with_bindings_arg_list_comma in_hyp_as -| "simple" "eapply" constr_with_bindings_arg_list_comma in_hyp_as -| "elim" constr_with_bindings_arg eliminator_opt -| "eelim" constr_with_bindings_arg eliminator_opt +| "apply" LIST1 constr_with_bindings_arg SEP "," in_hyp_as +| "eapply" LIST1 constr_with_bindings_arg SEP "," in_hyp_as +| "simple" "apply" LIST1 constr_with_bindings_arg SEP "," in_hyp_as +| "simple" "eapply" LIST1 constr_with_bindings_arg SEP "," in_hyp_as +| "elim" constr_with_bindings_arg OPT ( "using" constr_with_bindings ) +| "eelim" constr_with_bindings_arg OPT ( "using" constr_with_bindings ) | "case" induction_clause_list | "ecase" induction_clause_list -| "fix" ident num "with" fixdecl_list -| "cofix" ident "with" cofixdecl_list +| "fix" ident num "with" LIST1 fixdecl +| "cofix" ident "with" LIST1 cofixdecl | "pose" bindings_with_parameters | "pose" term1_extended as_name | "epose" bindings_with_parameters @@ -2242,47 +1590,47 @@ simple_tactic: [ | "enough" term1_extended as_ipat by_tactic | "eenough" term1_extended as_ipat by_tactic | "generalize" term1_extended -| "generalize" term1_extended term1_extended_list -| "generalize" term1_extended occs as_name pattern_occ_list_opt +| "generalize" term1_extended LIST1 term1_extended +| "generalize" term1_extended occs as_name LIST0 [ "," pattern_occ as_name ] | "induction" induction_clause_list | "einduction" induction_clause_list | "destruct" induction_clause_list | "edestruct" induction_clause_list -| "rewrite" oriented_rewriter_list_comma clause_dft_concl by_tactic -| "erewrite" oriented_rewriter_list_comma clause_dft_concl by_tactic -| "dependent" simple_alt quantified_hypothesis as_or_and_ipat with_opt2 +| "rewrite" LIST1 oriented_rewriter SEP "," clause_dft_concl by_tactic +| "erewrite" LIST1 oriented_rewriter SEP "," clause_dft_concl by_tactic +| "dependent" [ "simple" "inversion" | "inversion" | "inversion_clear" ] quantified_hypothesis as_or_and_ipat OPT [ "with" term1_extended ] | "simple" "inversion" quantified_hypothesis as_or_and_ipat in_hyp_list | "inversion" quantified_hypothesis as_or_and_ipat in_hyp_list | "inversion_clear" quantified_hypothesis as_or_and_ipat in_hyp_list | "inversion" quantified_hypothesis "using" term1_extended in_hyp_list | "red" clause_dft_concl | "hnf" clause_dft_concl -| "simpl" delta_flag ref_or_pattern_occ_opt clause_dft_concl +| "simpl" delta_flag OPT ref_or_pattern_occ clause_dft_concl | "cbv" strategy_flag clause_dft_concl | "cbn" strategy_flag clause_dft_concl | "lazy" strategy_flag clause_dft_concl | "compute" delta_flag clause_dft_concl -| "vm_compute" ref_or_pattern_occ_opt clause_dft_concl -| "native_compute" ref_or_pattern_occ_opt clause_dft_concl -| "unfold" unfold_occ_list_comma clause_dft_concl -| "fold" term1_extended_list clause_dft_concl -| "pattern" pattern_occ_list_comma clause_dft_concl +| "vm_compute" OPT ref_or_pattern_occ clause_dft_concl +| "native_compute" OPT ref_or_pattern_occ clause_dft_concl +| "unfold" LIST1 unfold_occ SEP "," clause_dft_concl +| "fold" LIST1 term1_extended clause_dft_concl +| "pattern" LIST1 pattern_occ SEP "," clause_dft_concl | "change" conversion clause_dft_concl | "change_no_check" conversion clause_dft_concl | "btauto" | "rtauto" | "congruence" | "congruence" int -| "congruence" "with" term1_extended_list -| "congruence" int "with" term1_extended_list +| "congruence" "with" LIST1 term1_extended +| "congruence" int "with" LIST1 term1_extended | "f_equal" -| "firstorder" ltac_expr_opt firstorder_using -| "firstorder" ltac_expr_opt "with" ident_list -| "firstorder" ltac_expr_opt firstorder_using "with" ident_list -| "gintuition" ltac_expr_opt -| "functional" "inversion" quantified_hypothesis qualid_opt (* funind plugin *) -| "functional" "induction" term1_extended_list fun_ind_using with_names (* funind plugin *) -| "soft" "functional" "induction" term1_extended_list fun_ind_using with_names (* funind plugin *) +| "firstorder" OPT ltac_expr firstorder_using +| "firstorder" OPT ltac_expr "with" LIST1 ident +| "firstorder" OPT ltac_expr firstorder_using "with" LIST1 ident +| "gintuition" OPT ltac_expr +| "functional" "inversion" quantified_hypothesis OPT qualid (* funind plugin *) +| "functional" "induction" LIST1 term1_extended fun_ind_using with_names (* funind plugin *) +| "soft" "functional" "induction" LIST1 term1_extended fun_ind_using with_names (* funind plugin *) | "myred" (* micromega plugin *) | "psatz_Z" int_or_var ltac_expr (* micromega plugin *) | "psatz_Z" ltac_expr (* micromega plugin *) @@ -2304,12 +1652,12 @@ simple_tactic: [ | "saturate" (* micromega plugin *) | "nsatz_compute" term1_extended (* nsatz plugin *) | "omega" (* omega plugin *) -| "omega" "with" ident_list (* omega plugin *) +| "omega" "with" LIST1 ident (* omega plugin *) | "omega" "with" "*" (* omega plugin *) | "protect_fv" string "in" ident (* setoid_ring plugin *) | "protect_fv" string (* setoid_ring plugin *) -| "ring_lookup" ltac_expr0 "[" term1_extended_list_opt "]" term1_extended_list (* setoid_ring plugin *) -| "field_lookup" ltac_expr "[" term1_extended_list_opt "]" term1_extended_list (* setoid_ring plugin *) +| "ring_lookup" ltac_expr0 "[" LIST0 term1_extended "]" LIST1 term1_extended (* setoid_ring plugin *) +| "field_lookup" ltac_expr "[" LIST0 term1_extended "]" LIST1 term1_extended (* setoid_ring plugin *) ] int_or_var: [ @@ -2317,13 +1665,8 @@ int_or_var: [ | ident ] -constr_with_bindings_opt: [ -| constr_with_bindings -| empty -] - hloc: [ -| empty +| | "in" "|-" "*" | "in" ident | "in" "(" "Type" "of" ident ")" @@ -2338,30 +1681,25 @@ rename: [ by_arg_tac: [ | "by" ltac_expr3 -| empty +| ] in_clause: [ | in_clause | "*" occs | "*" "|-" concl_occ -| hypident_occ_list_comma_opt "|-" concl_occ -| hypident_occ_list_comma_opt +| LIST0 hypident_occ SEP "," "|-" concl_occ +| LIST0 hypident_occ SEP "," ] occs: [ | "at" occs_nums -| empty -] - -hypident_occ_list_comma_opt: [ -| hypident_occ_list_comma -| empty +| ] as_ipat: [ | "as" simple_intropattern -| empty +| ] or_and_intropattern_loc: [ @@ -2371,40 +1709,35 @@ or_and_intropattern_loc: [ as_or_and_ipat: [ | "as" or_and_intropattern_loc -| empty +| ] eqn_ipat: [ | "eqn" ":" naming_intropattern | "_eqn" ":" naming_intropattern | "_eqn" -| empty +| ] as_name: [ | "as" ident -| empty +| ] by_tactic: [ | "by" ltac_expr3 -| empty +| ] rewriter: [ | "!" constr_with_bindings_arg -| qmark_alt constr_with_bindings_arg +| [ "?" | "?" ] constr_with_bindings_arg | num "!" constr_with_bindings_arg -| num qmark_alt constr_with_bindings_arg +| num [ "?" | "?" ] constr_with_bindings_arg | num constr_with_bindings_arg | constr_with_bindings_arg ] -qmark_alt: [ -| "?" -| "?" -] - oriented_rewriter: [ | orient rewriter ] @@ -2414,53 +1747,22 @@ induction_clause: [ ] induction_clause_list: [ -| induction_clause_list_comma eliminator_opt opt_clause -] - -induction_clause_list_comma: [ -| induction_clause_list_comma "," induction_clause -| induction_clause -] - -eliminator_opt: [ -| "using" constr_with_bindings -| empty +| LIST1 induction_clause SEP "," OPT ( "using" constr_with_bindings ) opt_clause ] auto_using: [ -| "using" term1_extended_list_comma -| empty -] - -term1_extended_list_comma: [ -| term1_extended_list_comma "," term1_extended -| term1_extended +| "using" LIST1 term1_extended SEP "," +| ] intropattern_list_opt: [ -| intropattern_list_opt intropattern -| empty +| LIST0 intropattern ] or_and_intropattern: [ | "[" intropattern_or_list_or "]" -| "(" simple_intropattern_list_comma_opt ")" -| "(" simple_intropattern "&" simple_intropattern_list_ ")" -] - -simple_intropattern_list_comma_opt: [ -| simple_intropattern_list_comma -| empty -] - -simple_intropattern_list_comma: [ -| simple_intropattern_list_comma "," simple_intropattern -| simple_intropattern -] - -simple_intropattern_list_: [ -| simple_intropattern_list_ "&" simple_intropattern -| simple_intropattern +| "(" LIST0 simple_intropattern SEP "," ")" +| "(" simple_intropattern "&" LIST1 simple_intropattern SEP "&" ")" ] intropattern_or_list_or: [ @@ -2468,11 +1770,6 @@ intropattern_or_list_or: [ | intropattern_list_opt ] -simple_intropattern_list_opt: [ -| simple_intropattern_list_opt simple_intropattern -| empty -] - equality_intropattern: [ | "->" | "<-" @@ -2492,12 +1789,7 @@ intropattern: [ ] simple_intropattern: [ -| simple_intropattern_closed term0_list_opt -] - -term0_list_opt: [ -| term0_list_opt "%" term0 -| empty +| simple_intropattern_closed LIST0 [ "%" term0 ] ] simple_intropattern_closed: [ @@ -2513,65 +1805,14 @@ simple_binding: [ ] bindings: [ -| simple_binding_list -| term1_extended_list -] - -simple_binding_list: [ -| simple_binding_list simple_binding -| simple_binding -] - -constr_with_bindings_arg_list_comma: [ -| constr_with_bindings_arg_list_comma "," constr_with_bindings_arg -| constr_with_bindings_arg -] - -fixdecl_list: [ -| fixdecl_list fixdecl -| fixdecl -] - -cofixdecl_list: [ -| cofixdecl_list cofixdecl -| cofixdecl -] - -pattern_occ_list_opt: [ -| pattern_occ_list_opt "," pattern_occ as_name -| empty +| LIST1 simple_binding +| LIST1 term1_extended ] pattern_occ: [ | term1_extended occs ] -oriented_rewriter_list_comma: [ -| oriented_rewriter_list_comma "," oriented_rewriter -| oriented_rewriter -] - -simple_alt: [ -| "simple" "inversion" -| "inversion" -| "inversion_clear" -] - -with_opt2: [ -| "with" term1_extended -| empty -] - -bindings_list_comma: [ -| bindings_list_comma "," bindings -| bindings -] - -rename_list_comma: [ -| rename_list_comma "," rename -| rename -] - comparison: [ | "=" | "<" @@ -2582,22 +1823,12 @@ comparison: [ hintbases: [ | "with" "*" -| "with" ident_list -| empty -] - -qualid_opt: [ -| qualid -| empty +| "with" LIST1 ident +| ] bindings_with_parameters: [ -| "(" ident simple_binder_list_opt ":=" term ")" -] - -simple_binder_list_opt: [ -| simple_binder_list_opt simple_binder -| empty +| "(" ident LIST0 simple_binder ":=" term ")" ] hypident: [ @@ -2613,23 +1844,23 @@ hypident_occ: [ clause_dft_concl: [ | "in" in_clause | occs -| empty +| ] clause_dft_all: [ | "in" in_clause -| empty +| ] opt_clause: [ | "in" in_clause | "at" occs_nums -| empty +| ] occs_nums: [ -| num_or_var_list -| "-" num_or_var int_or_var_list_opt +| LIST1 num_or_var +| "-" num_or_var LIST0 int_or_var ] num_or_var: [ @@ -2637,47 +1868,37 @@ num_or_var: [ | ident ] -int_or_var_list_opt: [ -| int_or_var_list_opt int_or_var -| empty -] - -num_or_var_list: [ -| num_or_var_list num_or_var -| num_or_var -] - concl_occ: [ | "*" occs -| empty +| ] in_hyp_list: [ -| "in" ident_list -| empty +| "in" LIST1 ident +| ] in_hyp_as: [ | "in" ident as_ipat -| empty +| ] simple_binder: [ | name -| "(" names ":" term ")" +| "(" LIST1 name ":" term ")" ] fixdecl: [ -| "(" ident simple_binder_list_opt struct_annot ":" term ")" +| "(" ident LIST0 simple_binder struct_annot ":" term ")" ] struct_annot: [ | "{" "struct" name "}" -| empty +| ] cofixdecl: [ -| "(" ident simple_binder_list_opt ":" term ")" +| "(" ident LIST0 simple_binder ":" term ")" ] constr_with_bindings: [ @@ -2686,7 +1907,7 @@ constr_with_bindings: [ with_bindings: [ | "with" bindings -| empty +| ] destruction_arg: [ @@ -2713,36 +1934,26 @@ conversion: [ firstorder_using: [ | "using" qualid -| "using" qualid "," qualid_list_comma -| "using" qualid qualid qualid_list_opt -| empty -] - -qualid_list_comma: [ -| qualid_list_comma "," qualid -| qualid +| "using" qualid "," LIST1 qualid SEP "," +| "using" qualid qualid LIST0 qualid +| ] fun_ind_using: [ | "using" constr_with_bindings (* funind plugin *) -| empty (* funind plugin *) +| (* funind plugin *) ] with_names: [ | "as" simple_intropattern (* funind plugin *) -| empty (* funind plugin *) +| (* funind plugin *) ] occurrences: [ -| int_list +| LIST1 int | ident ] -int_list: [ -| int_list int -| int -] - rewstrategy: [ | term1_extended | "<-" term1_extended @@ -2764,51 +1975,31 @@ rewstrategy: [ | "choice" rewstrategy rewstrategy | "old_hints" ident | "hints" ident -| "terms" term1_extended_list_opt +| "terms" LIST0 term1_extended | "eval" red_expr | "fold" term1_extended ] -hypident_occ_list_comma: [ -| hypident_occ_list_comma "," hypident_occ -| hypident_occ -] - ltac_expr: [ | binder_tactic | ltac_expr4 ] binder_tactic: [ -| "fun" fun_var_list "=>" ltac_expr -| "let" rec_opt let_clause_list "in" ltac_expr +| "fun" LIST1 fun_var "=>" ltac_expr +| "let" [ "rec" | ] LIST1 let_clause SEP "with" "in" ltac_expr | "info" ltac_expr ] -fun_var_list: [ -| fun_var_list fun_var -| fun_var -] - fun_var: [ | ident | "_" ] -rec_opt: [ -| "rec" -| empty -] - -let_clause_list: [ -| let_clause_list "with" let_clause -| let_clause -] - let_clause: [ | ident ":=" ltac_expr | "_" ":=" ltac_expr -| ident fun_var_list ":=" ltac_expr +| ident LIST1 fun_var ":=" ltac_expr ] ltac_expr4: [ @@ -2820,27 +2011,28 @@ ltac_expr4: [ ] multi_goal_tactics: [ -| ltac_expr_opt "|" multi_goal_tactics -| ltac_expr_opt ".." or_opt ltac_expr_opt_list_or +| OPT ltac_expr "|" multi_goal_tactics +| ltac_expr_opt ".." OPT "|" ltac_expr_opt_list_or | ltac_expr -| empty +| ] ltac_expr_opt: [ -| ltac_expr -| empty +| OPT ltac_expr ] ltac_expr_opt_list_or: [ | ltac_expr_opt_list_or "|" ltac_expr_opt | ltac_expr_opt +| ltac_expr_opt_list_or "|" OPT ltac_expr +| OPT ltac_expr ] ltac_expr3: [ | "try" ltac_expr3 | "do" int_or_var ltac_expr3 | "timeout" int_or_var ltac_expr3 -| "time" string_opt ltac_expr3 +| "time" OPT string ltac_expr3 | "repeat" ltac_expr3 | "progress" ltac_expr3 | "once" ltac_expr3 @@ -2863,48 +2055,23 @@ ltac_expr2: [ ltac_expr1: [ | ltac_match_term +| "first" "[" LIST0 ltac_expr SEP "|" "]" +| "solve" "[" LIST0 ltac_expr SEP "|" "]" +| "idtac" LIST0 message_token +| failkw [ int_or_var | ] LIST0 message_token | ltac_match_goal -| "first" "[" ltac_expr_list_or_opt "]" -| "solve" "[" ltac_expr_list_or_opt "]" -| "idtac" message_token_list_opt -| failkw int_or_var_opt message_token_list_opt | simple_tactic | tactic_arg -| qualid tactic_arg_compat_list_opt +| qualid LIST0 tactic_arg_compat | ltac_expr0 ] -ltac_expr_list_or_opt: [ -| ltac_expr_list_or -| empty -] - -ltac_expr_list_or: [ -| ltac_expr_list_or "|" ltac_expr -| ltac_expr -] - -message_token_list_opt: [ -| message_token_list_opt message_token -| empty -] - message_token: [ | ident | string | int ] -int_or_var_opt: [ -| int_or_var -| empty -] - -term1_extended_list_opt: [ -| term1_extended_list_opt term1_extended -| empty -] - failkw: [ | "fail" | "gfail" @@ -2914,26 +2081,16 @@ tactic_arg: [ | "eval" red_expr "in" term | "context" ident "[" term "]" | "type" "of" term -| "fresh" fresh_id_list_opt +| "fresh" LIST0 fresh_id | "type_term" term1_extended | "numgoals" ] -fresh_id_list_opt: [ -| fresh_id_list_opt fresh_id -| empty -] - fresh_id: [ | string | qualid ] -tactic_arg_compat_list_opt: [ -| tactic_arg_compat_list_opt tactic_arg_compat -| empty -] - tactic_arg_compat: [ | tactic_arg | term @@ -2963,22 +2120,17 @@ only_selector: [ ] selector: [ -| range_selector_list_comma +| LIST1 range_selector SEP "," | "[" ident "]" ] -range_selector_list_comma: [ -| range_selector_list_comma "," range_selector -| range_selector -] - range_selector: [ | num "-" num | num ] ltac_match_term: [ -| match_key ltac_expr "with" or_opt match_rule_list_or "end" +| match_key ltac_expr "with" OPT "|" LIST1 match_rule SEP "|" "end" ] match_key: [ @@ -2987,67 +2139,27 @@ match_key: [ | "lazymatch" ] -match_rule_list_or: [ -| match_rule_list_or "|" match_rule -| match_rule -] - match_rule: [ -| match_pattern_alt "=>" ltac_expr -] - -match_pattern_alt: [ -| match_pattern -| "_" +| [ match_pattern | "_" ] "=>" ltac_expr ] match_pattern: [ -| "context" ident_opt "[" term "]" +| "context" OPT ident "[" term "]" | term ] ltac_match_goal: [ -| match_key reverse_opt "goal" "with" or_opt match_context_rule_list_or "end" -] - -reverse_opt: [ -| "reverse" -| empty -] - -match_context_rule_list_or: [ -| match_context_rule_list_or "|" match_context_rule -| match_context_rule +| match_key OPT "reverse" "goal" "with" OPT "|" LIST1 match_context_rule SEP "|" "end" ] match_context_rule: [ -| match_hyp_list_comma_opt "|-" match_pattern "=>" ltac_expr -| "[" match_hyp_list_comma_opt "|-" match_pattern "]" "=>" ltac_expr +| LIST0 match_hyp SEP "," "|-" match_pattern "=>" ltac_expr +| "[" LIST0 match_hyp SEP "," "|-" match_pattern "]" "=>" ltac_expr | "_" "=>" ltac_expr ] -match_hyp_list_comma_opt: [ -| match_hyp_list_comma -| empty -] - -match_hyp_list_comma: [ -| match_hyp_list_comma "," match_hyp -| match_hyp -] - match_hyp: [ | name ":" match_pattern -| name ":=" match_pattern_opt match_pattern -] - -match_pattern_opt: [ -| "[" match_pattern "]" ":" -| empty -] - -ident_list_opt: [ -| ident_list_opt ident -| empty +| name ":=" OPT ( "[" match_pattern "]" ":" ) match_pattern ] diff --git a/doc/tools/docgram/productionlist.edit_mlg b/doc/tools/docgram/productionlist.edit_mlg index 42d94e76bb..8170b71e7a 100644 --- a/doc/tools/docgram/productionlist.edit_mlg +++ b/doc/tools/docgram/productionlist.edit_mlg @@ -13,32 +13,6 @@ DOC_GRAMMAR -EXPAND: [ | ] - -RENAME: [ -| name_alt names_tuple -| binder_list binders -| binder_list_opt binders_opt -| typeclass_constraint_list_comma typeclass_constraints_comma -| universe_expr_list_comma universe_exprs_comma -| universe_level_list_opt universe_levels_opt -| name_list names -| name_list_comma names_comma -| case_item_list_comma case_items_comma -| eqn_list_or_opt eqns_or_opt -| eqn_list_or eqns_or -| pattern_list_or patterns_or -| fix_body_list fix_bodies -| arg_list args -| arg_list_opt args_opt -| evar_binding_list_semi evar_bindings_semi -] - -binders_opt: [ -| REPLACE binders_opt binder -| WITH binders -] - (* this is here because they're inside _opt generated by EXPAND *) SPLICE: [ | ltac_info diff --git a/kernel/uint63_31.ml b/kernel/uint63_31.ml index e38389ca13..445166f6af 100644 --- a/kernel/uint63_31.ml +++ b/kernel/uint63_31.ml @@ -15,8 +15,8 @@ let _ = assert (Sys.word_size = 32) let uint_size = 63 -let maxuint63 = Int64.of_string "0x7FFFFFFFFFFFFFFF" -let maxuint31 = Int64.of_string "0x7FFFFFFF" +let maxuint63 = 0x7FFF_FFFF_FFFF_FFFFL +let maxuint31 = 0x7FFF_FFFFL let zero = Int64.zero let one = Int64.one @@ -118,27 +118,30 @@ let div21 xh xl y = let div21 xh xl y = if Int64.compare y xh <= 0 then zero, zero else div21 xh xl y - (* exact multiplication *) +(* exact multiplication *) let mulc x y = - let lx = ref (Int64.logand x maxuint31) in - let ly = ref (Int64.logand y maxuint31) in + let lx = Int64.logand x maxuint31 in + let ly = Int64.logand y maxuint31 in let hx = Int64.shift_right x 31 in let hy = Int64.shift_right y 31 in - let hr = ref (Int64.mul hx hy) in - let lr = ref (Int64.logor (Int64.mul !lx !ly) (Int64.shift_left !hr 62)) in - hr := (Int64.shift_right_logical !hr 1); - lx := Int64.mul !lx hy; - ly := Int64.mul hx !ly; - hr := Int64.logor !hr (Int64.add (Int64.shift_right !lx 32) (Int64.shift_right !ly 32)); - lr := Int64.add !lr (Int64.shift_left !lx 31); - hr := Int64.add !hr (Int64.shift_right_logical !lr 63); - lr := Int64.add (Int64.shift_left !ly 31) (mask63 !lr); - hr := Int64.add !hr (Int64.shift_right_logical !lr 63); - if Int64.logand !lr Int64.min_int <> 0L - then Int64.(sub !hr one, mask63 !lr) - else (!hr, !lr) - -let equal x y = mask63 x = mask63 y + (* compute the median products *) + let s = Int64.add (Int64.mul lx hy) (Int64.mul hx ly) in + (* s fits on 64 bits, split it into a 33-bit high part and a 31-bit low part *) + let lr = Int64.shift_left (Int64.logand s maxuint31) 31 in + let hr = Int64.shift_right_logical s 31 in + (* add the outer products *) + let lr = Int64.add (Int64.mul lx ly) lr in + let hr = Int64.add (Int64.mul hx hy) hr in + (* hr fits on 64 bits, since the final result fits on 126 bits *) + (* now x * y = hr * 2^62 + lr and lr < 2^63 *) + let lr = Int64.add lr (Int64.shift_left (Int64.logand hr 1L) 62) in + let hr = Int64.shift_right_logical hr 1 in + (* now x * y = hr * 2^63 + lr, but lr might be too large *) + if Int64.logand lr Int64.min_int <> 0L + then Int64.add hr 1L, mask63 lr + else hr, lr + +let equal (x : t) y = x = y let compare x y = Int64.compare x y diff --git a/parsing/pcoq.ml b/parsing/pcoq.ml index 734dd8ee8a..26afdcb9d5 100644 --- a/parsing/pcoq.ml +++ b/parsing/pcoq.ml @@ -533,6 +533,7 @@ let extend_entry_command (type a) (type b) (tag : (a, b) entry_command) (g : a) try EntryDataMap.find tag !camlp5_entries with Not_found -> EntryData.Ex String.Map.empty in + let () = assert (not @@ String.Map.mem name old) in let entries = String.Map.add name e old in camlp5_entries := EntryDataMap.add tag (EntryData.Ex entries) !camlp5_entries in diff --git a/plugins/omega/g_omega.mlg b/plugins/omega/g_omega.mlg index 84964a7bd2..7c653b223e 100644 --- a/plugins/omega/g_omega.mlg +++ b/plugins/omega/g_omega.mlg @@ -21,40 +21,9 @@ DECLARE PLUGIN "omega_plugin" { open Ltac_plugin -open Names -open Coq_omega -open Stdarg - -let eval_tactic name = - let dp = DirPath.make (List.map Id.of_string ["PreOmega"; "omega"; "Coq"]) in - let kn = KerName.make (ModPath.MPfile dp) (Label.make name) in - let tac = Tacenv.interp_ltac kn in - Tacinterp.eval_tactic tac - -let omega_tactic l = - let tacs = List.map - (function - | "nat" -> eval_tactic "zify_nat" - | "positive" -> eval_tactic "zify_positive" - | "N" -> eval_tactic "zify_N" - | "Z" -> eval_tactic "zify_op" - | s -> CErrors.user_err Pp.(str ("No Omega knowledge base for type "^s))) - (Util.List.sort_uniquize String.compare l) - in - Tacticals.New.tclTHEN - (Tacticals.New.tclREPEAT (Tacticals.New.tclTHENLIST tacs)) - (omega_solver) } TACTIC EXTEND omega -| [ "omega" ] -> { omega_tactic [] } +| [ "omega" ] -> { Coq_omega.omega_solver } END - -TACTIC EXTEND omega' -| [ "omega" "with" ne_ident_list(l) ] -> - { omega_tactic (List.map Names.Id.to_string l) } -| [ "omega" "with" "*" ] -> - { Tacticals.New.tclTHEN (eval_tactic "zify") (omega_tactic []) } -END - diff --git a/pretyping/detyping.ml b/pretyping/detyping.ml index 862865bd90..037006bc47 100644 --- a/pretyping/detyping.ml +++ b/pretyping/detyping.ml @@ -455,7 +455,9 @@ let rec decomp_branch tags nal flags (avoid,env as e) sigma c = (avoid', add_name_opt na' body t env) sigma c let rec build_tree na isgoal e sigma ci cl = - let mkpat n rhs pl = DAst.make @@ PatCstr((ci.ci_ind,n+1),pl,update_name sigma na rhs) in + let mkpat n rhs pl = + let na = update_name sigma na rhs in + na, DAst.make @@ PatCstr((ci.ci_ind,n+1),pl,na) in let cnl = ci.ci_pp_info.cstr_tags in List.flatten (List.init (Array.length cl) @@ -485,7 +487,9 @@ and align_tree nal isgoal (e,c as rhs) sigma = match nal with and contract_branch isgoal e sigma (cdn,mkpat,rhs) = let nal,rhs = decomp_branch cdn [] isgoal e sigma rhs in let mat = align_tree nal isgoal rhs sigma in - List.map (fun (ids,hd,rhs) -> ids,mkpat rhs hd,rhs) mat + List.map (fun (ids,hd,rhs) -> + let na, pat = mkpat rhs hd in + (Nameops.Name.fold_right Id.Set.add na ids, pat, rhs)) mat (**********************************************************************) (* Transform internal representation of pattern-matching into list of *) diff --git a/pretyping/recordops.ml b/pretyping/recordops.ml index 5b416a99f9..35e182840b 100644 --- a/pretyping/recordops.ml +++ b/pretyping/recordops.ml @@ -114,7 +114,7 @@ let find_primitive_projection c = (* the effective components of a structure and the projections of the *) (* structure *) -(* Table des definitions "object" : pour chaque object c, +(* Table of "object" definitions: for each object c, c := [x1:B1]...[xk:Bk](Build_R a1...am t1...t_n) @@ -127,16 +127,19 @@ let find_primitive_projection c = that maps the pair (Li,ci) to the following data + o_ORIGIN = c (the constant name which this conversion rule is + synthesized from) o_DEF = c o_TABS = B1...Bk o_INJ = Some n (when ci is a reference to the parameter xi) - o_PARAMS = a1...am - o_NARAMS = m + o_TPARAMS = a1...am + o_NPARAMS = m o_TCOMP = ui1...uir *) type obj_typ = { + o_ORIGIN : Constant.t; o_DEF : constr; o_CTX : Univ.AUContext.t; o_INJ : int option; (* position of trivial argument if any *) @@ -224,7 +227,7 @@ let compute_canonical_projections env ~warn (con,ind) = match cs_pattern_of_constr nenv t with | patt, o_INJ, o_TCOMPS -> ((GlobRef.ConstRef proji_sp, (patt, t)), - { o_DEF ; o_CTX ; o_INJ ; o_TABS ; o_TPARAMS ; o_NPARAMS ; o_TCOMPS }) + { o_ORIGIN = con ; o_DEF ; o_CTX ; o_INJ ; o_TABS ; o_TPARAMS ; o_NPARAMS ; o_TCOMPS }) :: acc | exception Not_found -> if warn then warn_projection_no_head_constant (sign, env, t, con, proji_sp); diff --git a/pretyping/recordops.mli b/pretyping/recordops.mli index e8b0d771aa..aaba7cc3e5 100644 --- a/pretyping/recordops.mli +++ b/pretyping/recordops.mli @@ -73,6 +73,7 @@ type cs_pattern = | Default_cs type obj_typ = { + o_ORIGIN : Constant.t; o_DEF : constr; o_CTX : Univ.AUContext.t; o_INJ : int option; (** position of trivial argument *) diff --git a/printing/ppconstr.ml b/printing/ppconstr.ml index 2da163b8ee..b55a41471a 100644 --- a/printing/ppconstr.ml +++ b/printing/ppconstr.ml @@ -275,7 +275,7 @@ let tag_var = tag Tag.variable pr_reference r, latom | CPatOr pl -> - let pp = pr_patt mt (lpator,Any) in + let pp p = hov 0 (pr_patt mt (lpator,Any) p) in surround (hov 0 (prlist_with_sep pr_spcbar pp pl)), lpator | CPatNotation ((_,"( _ )"),([p],[]),[]) -> @@ -304,7 +304,8 @@ let tag_var = tag Tag.variable spc() ++ hov 4 (pr_with_comments ?loc (str "| " ++ - hov 0 (prlist_with_sep pr_spcbar (prlist_with_sep sep_v (pr_patt ltop)) pl + hov 0 (prlist_with_sep pr_spcbar + (fun p -> hov 0 (prlist_with_sep sep_v (pr_patt ltop) p)) pl ++ str " =>") ++ pr_sep_com spc (pr ltop) rhs)) diff --git a/test-suite/bugs/closed/bug_11321.v b/test-suite/bugs/closed/bug_11321.v new file mode 100644 index 0000000000..ce95280fb1 --- /dev/null +++ b/test-suite/bugs/closed/bug_11321.v @@ -0,0 +1,10 @@ +Require Import Cyclic63. + +Goal False. +Proof. +assert (4294967296 *c 2147483648 = WW 2 0)%int63 as H. + vm_cast_no_check (@eq_refl (zn2z int) (WW 2 0)%int63). +generalize (f_equal (zn2z_to_Z wB to_Z) H). +now rewrite mulc_WW_spec. +Fail Qed. +Abort. diff --git a/test-suite/output/Cases.out b/test-suite/output/Cases.out index e84ac85aa8..6976610b22 100644 --- a/test-suite/output/Cases.out +++ b/test-suite/output/Cases.out @@ -166,3 +166,15 @@ fun x : K => match x with : K -> nat The command has indeed failed with message: Pattern "S _, _" is redundant in this clause. +stray = +fun N : Tree => +match N with +| App (App Node (Node as strayvariable)) _ | + App (App Node (App Node _ as strayvariable)) _ | + App (App Node (App (App Node Node) (App _ _) as strayvariable)) _ | + App (App Node (App (App Node (App _ _)) _ as strayvariable)) _ | + App (App Node (App (App (App _ _) _) _ as strayvariable)) _ => + strayvariable +| _ => Node +end + : Tree -> Tree diff --git a/test-suite/output/Cases.v b/test-suite/output/Cases.v index a040b69b44..262ec2b677 100644 --- a/test-suite/output/Cases.v +++ b/test-suite/output/Cases.v @@ -222,3 +222,23 @@ Check fun x => match x with a3 | a4 | a1 => 3 | _ => 2 end. (* Test redundant clause within a disjunctive pattern *) Fail Check fun n m => match n, m with 0, 0 | _, S _ | S 0, _ | S (S _ | _), _ => false end. + +Module Bug11231. + +(* Missing dependency in computing if a clause is a default clause *) + +Inductive Tree: Set := +| Node : Tree +| App : Tree -> Tree -> Tree +. + +Definition stray N := +match N with +| App (App Node (App (App Node Node) Node)) _ => Node +| App (App Node strayvariable) _ => strayvariable +| _ => Node +end. + +Print stray. + +End Bug11231. diff --git a/test-suite/output/PrintCanonicalProjections.out b/test-suite/output/PrintCanonicalProjections.out new file mode 100644 index 0000000000..a4e2251440 --- /dev/null +++ b/test-suite/output/PrintCanonicalProjections.out @@ -0,0 +1,18 @@ +bool <- sort_eq ( bool_eqType ) +bool <- sort_TYPE ( bool_TYPE ) +nat <- sort_eq ( nat_eqType ) +nat <- sort_TYPE ( nat_TYPE ) +prod <- sort_eq ( prod_eqType ) +prod <- sort_TYPE ( prod_TYPE ) +sum <- sort_eq ( sum_eqType ) +sum <- sort_TYPE ( sum_TYPE ) +sum <- sort_TYPE ( sum_TYPE ) +prod <- sort_TYPE ( prod_TYPE ) +nat <- sort_TYPE ( nat_TYPE ) +bool <- sort_TYPE ( bool_TYPE ) +sum <- sort_eq ( sum_eqType ) +prod <- sort_eq ( prod_eqType ) +nat <- sort_eq ( nat_eqType ) +bool <- sort_eq ( bool_eqType ) +bool <- sort_TYPE ( bool_TYPE ) +bool <- sort_eq ( bool_eqType ) diff --git a/test-suite/output/PrintCanonicalProjections.v b/test-suite/output/PrintCanonicalProjections.v new file mode 100644 index 0000000000..808cdffe39 --- /dev/null +++ b/test-suite/output/PrintCanonicalProjections.v @@ -0,0 +1,46 @@ +Record TYPE := Pack_TYPE { sort_TYPE :> Type }. +Record eqType := Pack_eq { sort_eq :> Type; _ : sort_eq -> sort_eq -> bool }. + +Definition eq_op (T : eqType) : T -> T -> bool := + match T with Pack_eq _ op => op end. + +Definition bool_eqb b1 b2 := + match b1, b2 with + | false, false => true + | true, true => true + | _, _ => false + end. + +Canonical bool_TYPE := Pack_TYPE bool. +Canonical bool_eqType := Pack_eq bool bool_eqb. + +Canonical nat_TYPE := Pack_TYPE nat. +Canonical nat_eqType := Pack_eq nat Nat.eqb. + +Definition prod_eqb (T U : eqType) (x y : T * U) := + match x, y with + | (x1, x2), (y1, y2) => + andb (eq_op _ x1 y1) (eq_op _ x2 y2) + end. + +Canonical prod_TYPE (T U : TYPE) := Pack_TYPE (T * U). +Canonical prod_eqType (T U : eqType) := Pack_eq (T * U) (prod_eqb T U). + +Definition sum_eqb (T U : eqType) (x y : T + U) := + match x, y with + | inl x, inl y => eq_op _ x y + | inr x, inr y => eq_op _ x y + | _, _ => false + end. + +Canonical sum_TYPE (T U : TYPE) := Pack_TYPE (T + U). +Canonical sum_eqType (T U : eqType) := Pack_eq (T + U) (sum_eqb T U). + +Print Canonical Projections bool. +Print Canonical Projections nat. +Print Canonical Projections prod. +Print Canonical Projections sum. +Print Canonical Projections sort_TYPE. +Print Canonical Projections sort_eq. +Print Canonical Projections sort_TYPE bool. +Print Canonical Projections bool_eqType. diff --git a/test-suite/output/unification.out b/test-suite/output/unification.out new file mode 100644 index 0000000000..dfd755da61 --- /dev/null +++ b/test-suite/output/unification.out @@ -0,0 +1,11 @@ +The command has indeed failed with message: +In environment +x : T +T : Type +a : T +Unable to unify "T" with "?X@{x0:=x; x:=C a}" (cannot instantiate +"?X" because "T" is not in its scope: available arguments are +"x" "C a"). +The command has indeed failed with message: +The term "id" has type "ID" while it is expected to have type +"Type -> ?T" (cannot instantiate "?T" because "A" is not in its scope). diff --git a/test-suite/output/unification.v b/test-suite/output/unification.v new file mode 100644 index 0000000000..ff99f2e23c --- /dev/null +++ b/test-suite/output/unification.v @@ -0,0 +1,12 @@ +(* Unification error tests *) + +Module A. + +(* Check regression of an UNBOUND_REL bug *) +Inductive T := C : forall {A}, A -> T. +Fail Check fun x => match x return ?[X] with C a => a end. + +(* Bug #3634 *) +Fail Check (id:Type -> _). + +End A. diff --git a/test-suite/success/Omega.v b/test-suite/success/Omega.v index 470e4f0580..5e0f90d59b 100644 --- a/test-suite/success/Omega.v +++ b/test-suite/success/Omega.v @@ -90,5 +90,5 @@ Qed. (* Postponed... problem with goals of the form "(n*m=0)%nat -> (n*m=0)%Z" *) Lemma lem10 : forall n m:nat, le n (plus n (mult n m)). Proof. -intros; omega with *. +intros; zify; omega. Qed. diff --git a/test-suite/success/OmegaPre.v b/test-suite/success/OmegaPre.v index 17531064cc..0223255067 100644 --- a/test-suite/success/OmegaPre.v +++ b/test-suite/success/OmegaPre.v @@ -16,112 +16,112 @@ Open Scope Z_scope. Goal forall a:Z, Z.max a a = a. intros. -omega with *. +zify; omega. Qed. Goal forall a b:Z, Z.max a b = Z.max b a. intros. -omega with *. +zify; omega. Qed. Goal forall a b c:Z, Z.max a (Z.max b c) = Z.max (Z.max a b) c. intros. -omega with *. +zify; omega. Qed. Goal forall a b:Z, Z.max a b + Z.min a b = a + b. intros. -omega with *. +zify; omega. Qed. Goal forall a:Z, (Z.abs a)*(Z.sgn a) = a. intros. zify. -intuition; subst; omega. (* pure multiplication: omega alone can't do it *) +intuition; subst; zify; omega. (* pure multiplication: zify; omega alone can't do it *) Qed. Goal forall a:Z, Z.abs a = a -> a >= 0. intros. -omega with *. +zify; omega. Qed. Goal forall a:Z, Z.sgn a = a -> a = 1 \/ a = 0 \/ a = -1. intros. -omega with *. +zify; omega. Qed. (* zify_nat *) Goal forall m: nat, (m<2)%nat -> (0<= m+m <=2)%nat. intros. -omega with *. +zify; omega. Qed. Goal forall m:nat, (m<1)%nat -> (m=0)%nat. intros. -omega with *. +zify; omega. Qed. Goal forall m: nat, (m<=100)%nat -> (0<= m+m <=200)%nat. intros. -omega with *. +zify; omega. Qed. (* 2000 instead of 200: works, but quite slow *) Goal forall m: nat, (m*m>=0)%nat. intros. -omega with *. +zify; omega. Qed. (* zify_positive *) Goal forall m: positive, (m<2)%positive -> (2 <= m+m /\ m+m <= 2)%positive. intros. -omega with *. +zify; omega. Qed. Goal forall m:positive, (m<2)%positive -> (m=1)%positive. intros. -omega with *. +zify; omega. Qed. Goal forall m: positive, (m<=1000)%positive -> (2<=m+m/\m+m <=2000)%positive. intros. -omega with *. +zify; omega. Qed. Goal forall m: positive, (m*m>=1)%positive. intros. -omega with *. +zify; omega. Qed. (* zify_N *) Goal forall m:N, (m<2)%N -> (0 <= m+m /\ m+m <= 2)%N. intros. -omega with *. +zify; omega. Qed. Goal forall m:N, (m<1)%N -> (m=0)%N. intros. -omega with *. +zify; omega. Qed. Goal forall m:N, (m<=1000)%N -> (0<=m+m/\m+m <=2000)%N. intros. -omega with *. +zify; omega. Qed. Goal forall m:N, (m*m>=0)%N. intros. -omega with *. +zify; omega. Qed. (* mix of datatypes *) Goal forall p, Z.of_N (N.of_nat (N.to_nat (Npos p))) = Zpos p. intros. -omega with *. +zify; omega. Qed. diff --git a/test-suite/success/custom_entry.v b/test-suite/success/custom_entry.v new file mode 100644 index 0000000000..e88ae65e18 --- /dev/null +++ b/test-suite/success/custom_entry.v @@ -0,0 +1,13 @@ +Declare Custom Entry foo. + +Print Custom Grammar foo. + +Notation "[ e ]" := e (e custom foo at level 0). + +Print Custom Grammar foo. + +Notation "1" := O (in custom foo at level 0). + +Print Custom Grammar foo. + +Fail Declare Custom Entry foo. diff --git a/vernac/egramcoq.ml b/vernac/egramcoq.ml index b65a126f55..07656f9715 100644 --- a/vernac/egramcoq.ml +++ b/vernac/egramcoq.ml @@ -278,6 +278,10 @@ let find_custom_entry s = try (find_custom_entry constr_custom_entry sc, find_custom_entry pattern_custom_entry sp) with Not_found -> user_err Pp.(str "Undeclared custom entry: " ++ str s ++ str ".") +let exists_custom_entry s = match find_custom_entry s with +| _ -> true +| exception _ -> false + let locality_of_custom_entry s = String.Set.mem s !custom_entry_locality (* This computes the name of the level where to add a new rule *) diff --git a/vernac/egramcoq.mli b/vernac/egramcoq.mli index f879d51660..6768d24d5c 100644 --- a/vernac/egramcoq.mli +++ b/vernac/egramcoq.mli @@ -19,4 +19,7 @@ val extend_constr_grammar : Notation_gram.one_notation_grammar -> unit (** Add a term notation rule to the parsing system. *) val create_custom_entry : local:bool -> string -> unit + +val exists_custom_entry : string -> bool + val locality_of_custom_entry : string -> bool diff --git a/vernac/g_vernac.mlg b/vernac/g_vernac.mlg index 436648c163..69ab0fafe9 100644 --- a/vernac/g_vernac.mlg +++ b/vernac/g_vernac.mlg @@ -1026,7 +1026,8 @@ GRAMMAR EXTEND Gram | IDENT "Coercions" -> { PrintCoercions } | IDENT "Coercion"; IDENT "Paths"; s = class_rawexpr; t = class_rawexpr -> { PrintCoercionPaths (s,t) } - | IDENT "Canonical"; IDENT "Projections" -> { PrintCanonicalConversions } + | IDENT "Canonical"; IDENT "Projections"; qids = LIST0 smart_global + -> { PrintCanonicalConversions qids } | IDENT "Typing"; IDENT "Flags" -> { PrintTypingFlags } | IDENT "Tables" -> { PrintTables } | IDENT "Options" -> { PrintTables (* A Synonymous to Tables *) } diff --git a/vernac/himsg.ml b/vernac/himsg.ml index c8e308cb9c..ba7ae5069b 100644 --- a/vernac/himsg.ml +++ b/vernac/himsg.ml @@ -297,6 +297,7 @@ let explain_unification_error env sigma p1 p2 = function strbrk " with term " ++ pr_leconstr_env env sigma rhs ++ strbrk " that would depend on itself"] | NotClean ((evk,args),env,c) -> + let env = make_all_name_different env sigma in [str "cannot instantiate " ++ quote (pr_existential_key sigma evk) ++ strbrk " because " ++ pr_leconstr_env env sigma c ++ strbrk " is not in its scope" ++ diff --git a/vernac/metasyntax.ml b/vernac/metasyntax.ml index 35681aed13..222e9eb825 100644 --- a/vernac/metasyntax.ml +++ b/vernac/metasyntax.ml @@ -1654,10 +1654,16 @@ let add_syntactic_definition ~local deprecation env ident (vars,c) { onlyparsing (**********************************************************************) (* Declaration of custom entry *) +let warn_custom_entry = + CWarnings.create ~name:"custom-entry-overriden" ~category:"parsing" + (fun s -> + strbrk "Custom entry " ++ str s ++ strbrk " has been overriden.") + let load_custom_entry _ _ = () let open_custom_entry _ (_,(local,s)) = - Egramcoq.create_custom_entry ~local s + if Egramcoq.exists_custom_entry s then warn_custom_entry s + else Egramcoq.create_custom_entry ~local s let cache_custom_entry o = load_custom_entry 1 o; @@ -1677,4 +1683,7 @@ let inCustomEntry : locality_flag * string -> obj = classify_function = classify_custom_entry} let declare_custom_entry local s = - Lib.add_anonymous_leaf (inCustomEntry (local,s)) + if Egramcoq.exists_custom_entry s then + user_err Pp.(str "Custom entry " ++ str s ++ str " already exists") + else + Lib.add_anonymous_leaf (inCustomEntry (local,s)) diff --git a/vernac/ppvernac.ml b/vernac/ppvernac.ml index 1742027076..a1bd99c237 100644 --- a/vernac/ppvernac.ml +++ b/vernac/ppvernac.ml @@ -513,8 +513,8 @@ let string_of_theorem_kind = let open Decls in function keyword "Print Coercion Paths" ++ spc() ++ pr_class_rawexpr s ++ spc() ++ pr_class_rawexpr t - | PrintCanonicalConversions -> - keyword "Print Canonical Structures" + | PrintCanonicalConversions qids -> + keyword "Print Canonical Structures" ++ prlist pr_smart_global qids | PrintTypingFlags -> keyword "Print Typing Flags" | PrintTables -> diff --git a/vernac/prettyp.ml b/vernac/prettyp.ml index d04a02febc..8ced35c3be 100644 --- a/vernac/prettyp.ml +++ b/vernac/prettyp.ml @@ -996,12 +996,26 @@ let print_path_between cls clt = in print_path ((i,j),p) -let print_canonical_projections env sigma = +let print_canonical_projections env sigma grefs = + let match_proj_gref ((x,y),c) gr = + GlobRef.equal x gr || + begin match y with + | Const_cs y -> GlobRef.equal y gr + | _ -> false + end || + match gr with + | GlobRef.ConstRef con -> Names.Constant.equal c.o_ORIGIN con + | _ -> false + in + let projs = + List.filter (fun p -> List.for_all (match_proj_gref p) grefs) + (canonical_projections ()) + in prlist_with_sep fnl (fun ((r1,r2),o) -> pr_cs_pattern r2 ++ str " <- " ++ pr_global r1 ++ str " ( " ++ pr_lconstr_env env sigma o.o_DEF ++ str " )") - (canonical_projections ()) + projs (*************************************************************************) diff --git a/vernac/prettyp.mli b/vernac/prettyp.mli index 2ee9c4ed33..ac41f30c5d 100644 --- a/vernac/prettyp.mli +++ b/vernac/prettyp.mli @@ -53,7 +53,7 @@ val print_graph : unit -> Pp.t val print_classes : unit -> Pp.t val print_coercions : unit -> Pp.t val print_path_between : Coercionops.cl_typ -> Coercionops.cl_typ -> Pp.t -val print_canonical_projections : env -> Evd.evar_map -> Pp.t +val print_canonical_projections : env -> Evd.evar_map -> GlobRef.t list -> Pp.t (** Pretty-printing functions for type classes and instances *) val print_typeclasses : unit -> Pp.t diff --git a/vernac/vernacentries.ml b/vernac/vernacentries.ml index d3bc06cd6c..99d74f16cc 100644 --- a/vernac/vernacentries.ml +++ b/vernac/vernacentries.ml @@ -1701,7 +1701,9 @@ let vernac_print ~pstate ~atts = | PrintCoercions -> Prettyp.print_coercions () | PrintCoercionPaths (cls,clt) -> Prettyp.print_path_between (cl_of_qualid cls) (cl_of_qualid clt) - | PrintCanonicalConversions -> Prettyp.print_canonical_projections env sigma + | PrintCanonicalConversions qids -> + let grefs = List.map Smartlocate.smart_global qids in + Prettyp.print_canonical_projections env sigma grefs | PrintUniverses (sort, subgraph, dst) -> print_universes ~sort ~subgraph dst | PrintHint r -> Hints.pr_hint_ref env sigma (smart_global r) | PrintHintGoal -> diff --git a/vernac/vernacexpr.ml b/vernac/vernacexpr.ml index 32ff8b8fb2..1daa244986 100644 --- a/vernac/vernacexpr.ml +++ b/vernac/vernacexpr.ml @@ -46,7 +46,7 @@ type printable = | PrintInstances of qualid or_by_notation | PrintCoercions | PrintCoercionPaths of class_rawexpr * class_rawexpr - | PrintCanonicalConversions + | PrintCanonicalConversions of qualid or_by_notation list | PrintUniverses of bool * qualid list option * string option | PrintHint of qualid or_by_notation | PrintHintGoal |
