diff options
146 files changed, 3181 insertions, 2371 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 4fdbe6e818..68bb24ac77 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -18,7 +18,7 @@ stages: variables: # Format: $IMAGE-V$DATE [Cache is not used as of today but kept here # for reference] - CACHEKEY: "bionic_coq-V2019-12-08-V82" + CACHEKEY: "bionic_coq-V2019-03-01-V43" IMAGE: "$CI_REGISTRY_IMAGE:$CACHEKEY" # By default, jobs run in the base switch; override to select another switch OPAM_SWITCH: "base" diff --git a/Makefile.build b/Makefile.build index 6590f5f308..9e0a402730 100644 --- a/Makefile.build +++ b/Makefile.build @@ -555,7 +555,7 @@ $(FAKEIDEBYTE): $(FAKEIDECMO) | $(IDETOPBYTE) # votour: a small vo explorer (based on the checker) -VOTOURCMO:=clib/cObj.cmo kernel/uint63.cmo kernel/float64.cmo checker/analyze.cmo checker/values.cmo checker/votour.cmo +VOTOURCMO:=clib/cObj.cmo checker/analyze.cmo checker/values.cmo checker/votour.cmo bin/votour: $(call bestobj, $(VOTOURCMO)) $(LIBCOQRUN) $(SHOW)'OCAMLBEST -o $@' diff --git a/Makefile.doc b/Makefile.doc index eb58162cc0..1249555cd7 100644 --- a/Makefile.doc +++ b/Makefile.doc @@ -247,7 +247,7 @@ PLUGIN_MLGS := $(wildcard plugins/*/*.mlg) OMITTED_PLUGIN_MLGS := plugins/ssr/ssrparser.mlg plugins/ssr/ssrvernac.mlg plugins/ssrmatching/g_ssrmatching.mlg DOC_MLGS := */*.mlg $(sort $(filter-out $(OMITTED_PLUGIN_MLGS), $(PLUGIN_MLGS))) DOC_EDIT_MLGS := doc/tools/docgram/*.edit_mlg -DOC_RSTS := doc/sphinx/*.rst doc/sphinx/*/*.rst +DOC_RSTS := doc/sphinx/*/*.rst doc/tools/docgram/fullGrammar: $(DOC_GRAM) $(DOC_MLGS) $(SHOW)'DOC_GRAM' @@ -256,7 +256,7 @@ doc/tools/docgram/fullGrammar: $(DOC_GRAM) $(DOC_MLGS) #todo: add a dependency of sphinx on updated_rsts when we're ready doc/tools/docgram/orderedGrammar doc/tools/docgram/updated_rsts: $(DOC_GRAM) $(DOC_EDIT_MLGS) $(SHOW)'DOC_GRAM_RSTS' - $(HIDE)$(DOC_GRAM) $(DOC_MLGS) $(DOC_RSTS) + $(HIDE)$(DOC_GRAM) -check-cmds $(DOC_MLGS) $(DOC_RSTS) doc/tools/docgram/updated_rsts: doc/tools/docgram/orderedGrammar diff --git a/configure.ml b/configure.ml index 6e15cdbe4e..55d71f6c2e 100644 --- a/configure.ml +++ b/configure.ml @@ -975,7 +975,7 @@ let config_runtime () = | Some flags -> string_split ',' flags | _ when use_custom -> [custom_flag] | _ when !prefs.local -> - ["-dllib";"-lcoqrun";"-dllpath";coqtop/"kernel/byterun"] + ["-dllib";"-lcoqrun";"-dllpath";("\"" ^ coqtop ^ "/kernel/byterun\"")] | _ -> let ld="CAML_LD_LIBRARY_PATH" in build_loadpath := sprintf "export %s:='%s/kernel/byterun':$(%s)" ld coqtop ld; diff --git a/dev/ci/docker/bionic_coq/Dockerfile b/dev/ci/docker/bionic_coq/Dockerfile index b8f9d99702..41392b4b8c 100644 --- a/dev/ci/docker/bionic_coq/Dockerfile +++ b/dev/ci/docker/bionic_coq/Dockerfile @@ -1,4 +1,4 @@ -# CACHEKEY: "bionic_coq-V2019-12-08-V82" +# CACHEKEY: "bionic_coq-V2019-03-01-V43" # ^^ Update when modifying this file. FROM ubuntu:bionic @@ -37,9 +37,9 @@ ENV COMPILER="4.05.0" # Common OPAM packages. # `num` does not have a version number as the right version to install varies # with the compiler version. -ENV BASE_OPAM="num ocamlfind.1.8.1 dune.2.0.0 ounit.2.0.8 odoc.1.4.2" \ +ENV BASE_OPAM="num ocamlfind.1.8.1 dune.2.0.1 ounit.2.0.8 odoc.1.4.2" \ CI_OPAM="menhir.20190626 ocamlgraph.1.8.8" \ - BASE_ONLY_OPAM="elpi.1.8.0" + BASE_ONLY_OPAM="elpi.1.10.2" # BASE switch; CI_OPAM contains Coq's CI dependencies. ENV COQIDE_OPAM="cairo2.0.6.1 lablgtk3-sourceview3.3.0.beta6" diff --git a/dev/ci/user-overlays/11051-gares-elpi-1.8.sh b/dev/ci/user-overlays/11051-gares-elpi-1.8.sh deleted file mode 100644 index 7845654375..0000000000 --- a/dev/ci/user-overlays/11051-gares-elpi-1.8.sh +++ /dev/null @@ -1,6 +0,0 @@ -if [ "$CI_PULL_REQUEST" = "11051" ] || [ "$CI_BRANCH" = "elpi-1.8" ]; then - - elpi_CI_REF="coq-master+v1.2" - elpi_CI_GITURL=https://github.com/LPCIC/coq-elpi - -fi diff --git a/dev/ci/user-overlays/11602-herbelin-master+support-only-parsing-where-clause.sh b/dev/ci/user-overlays/11602-herbelin-master+support-only-parsing-where-clause.sh new file mode 100644 index 0000000000..8b0f86d951 --- /dev/null +++ b/dev/ci/user-overlays/11602-herbelin-master+support-only-parsing-where-clause.sh @@ -0,0 +1,6 @@ +if [ "$CI_PULL_REQUEST" = "11602" ] || [ "$CI_BRANCH" = "master+support-only-parsing-where-clause" ]; then + + equations_CI_REF=master+adapt-coq-pr11602-only-parsing-where-notation + equations_CI_GITURL=https://github.com/herbelin/Coq-Equations + +fi diff --git a/dev/ci/user-overlays/11708-gares-elpi-1.10.sh b/dev/ci/user-overlays/11708-gares-elpi-1.10.sh new file mode 100644 index 0000000000..121190e5f6 --- /dev/null +++ b/dev/ci/user-overlays/11708-gares-elpi-1.10.sh @@ -0,0 +1,6 @@ +if [ "$CI_PULL_REQUEST" = "11708" ] || [ "$CI_BRANCH" = " elpi-1.10+coq-elpi-1.3" ]; then + + elpi_CI_REF="coq-master+coq-elpi-1.3" + elpi_CI_GITURL=https://github.com/LPCIC/coq-elpi + +fi diff --git a/dev/doc/build-system.dune.md b/dev/doc/build-system.dune.md index cd35064b18..777eec97c6 100644 --- a/dev/doc/build-system.dune.md +++ b/dev/doc/build-system.dune.md @@ -46,21 +46,27 @@ of build threads `(jobs N)` and display options `(display _mode_)`. ## Running binaries [coqtop / coqide] -There are two special targets `states` and `quickide` that will -generate "shims" for running `coqtop` and `coqide` in a fast build. In -order to use them, do: +Running `coqtop` directly with `dune exec -- coqtop` won't in general +work well unless you are using `dune exec -- coqtop -noinit`. The +`coqtop` binary doesn't depend itself on Coq's prelude, so plugins / +vo files may go stale if you rebuild only `coqtop`. + +Instead, you should use the provided "shims" for running `coqtop` and +`coqide` in a fast build. In order to use them, do: ``` $ make -f Makefile.dune voboot # Only once per session $ dune exec -- dev/shim/coqtop-prelude ``` -or `quickide` / `dev/shim/coqide-prelude` for CoqIDE. These targets -enjoy quick incremental compilation thanks to `-opaque` so they tend -to be very fast while developing. +or `quickide` / `dev/shim/coqide-prelude` for CoqIDE, etc.... See +`dev/shim/dune` for a complete list of targets. These targets enjoy +quick incremental compilation thanks to `-opaque` so they tend to be +very fast while developing. Note that for a fast developer build of ML files, the `check` target -will be faster. +is faster, as it doesn't link the binaries and uses the non-optimizing +compiler. ## Targets @@ -214,3 +220,12 @@ useful to Coq, some examples are: - Cross-compilation. - Automatic Generation of OPAM files. - Multi-directory libraries. + +## FAQ + +- I get "Error: Dynlink error: Interface mismatch": + + You are likely running a partial build which doesn't include + implicitly loaded plugins / vo files. See the "Running binaries + [coqtop / coqide]" section above as to how to correctly call Coq's + binaries. diff --git a/doc/changelog/03-notations/11602-master+support-only-parsing-where-clause.rst b/doc/changelog/03-notations/11602-master+support-only-parsing-where-clause.rst new file mode 100644 index 0000000000..1d30d16664 --- /dev/null +++ b/doc/changelog/03-notations/11602-master+support-only-parsing-where-clause.rst @@ -0,0 +1,6 @@ +- **Added:** + Notations declared with the ``where`` clause in the declaration of + inductive types, coinductive types, record fields, fixpoints and + cofixpoints now support the ``only parsing`` modifier + (`#11602 <https://github.com/coq/coq/pull/11602>`_, + by Hugo Herbelin). diff --git a/doc/changelog/04-tactics/11429-zify-optimisation.rst b/doc/changelog/04-tactics/11429-zify-optimisation.rst new file mode 100644 index 0000000000..ce5bfa4aad --- /dev/null +++ b/doc/changelog/04-tactics/11429-zify-optimisation.rst @@ -0,0 +1,3 @@ +- **Changed** + Improve the efficiency of `zify` by rewritting the remaining Ltac code in OCaml. + (`#11429 <https://github.com/coq/coq/pull/11429>`_, by Frédéric Besson). diff --git a/doc/changelog/04-tactics/11522-master+pose-proof-wo-as-syntax.rst b/doc/changelog/04-tactics/11522-master+pose-proof-wo-as-syntax.rst new file mode 100644 index 0000000000..3dd103b115 --- /dev/null +++ b/doc/changelog/04-tactics/11522-master+pose-proof-wo-as-syntax.rst @@ -0,0 +1,6 @@ +- **Added:** + Syntax :n:`pose proof (@ident:=@term)` as an + alternative to :n:`pose proof @term as @ident`, following the model of + :n:`pose (@ident:=@term)`. See documentation of :tacn:`pose proof` + (`#11522 <https://github.com/coq/coq/pull/11522>`_, + by Hugo Herbelin). diff --git a/doc/changelog/09-coqide/10008-snyke7+escape_spaces.rst b/doc/changelog/09-coqide/10008-snyke7+escape_spaces.rst new file mode 100644 index 0000000000..99b1592fb3 --- /dev/null +++ b/doc/changelog/09-coqide/10008-snyke7+escape_spaces.rst @@ -0,0 +1,4 @@ +- **Fixed:** + Fix file paths containing spaces when compiling + (`#10008 <https://github.com/coq/coq/pull/10008>`_, + by snyke7, fixing `#11595 <https://github.com/coq/coq/pull/11595>`_). diff --git a/doc/changelog/11-infrastructure-and-dependencies/11618-loadpath+split_ml_handling.rst b/doc/changelog/11-infrastructure-and-dependencies/11618-loadpath+split_ml_handling.rst new file mode 100644 index 0000000000..77fa556321 --- /dev/null +++ b/doc/changelog/11-infrastructure-and-dependencies/11618-loadpath+split_ml_handling.rst @@ -0,0 +1,9 @@ +- **Changed:** + Recursive OCaml loadpaths are not supported anymore; the command + ``Add Rec ML Path`` has been removed; :cmd:`Add ML Path` is now the + preferred one. We have also dropped support for the non-qualified + version of the :cmd:`Add LoadPath` command, that is to say, + the ``Add LoadPath dir`` version; now, + you must always specify a prefix now using ``Add Loadpath dir as Prefix.`` + (`#11618 <https://github.com/coq/coq/pull/11618>`_, + by Emilio Jesus Gallego Arias). diff --git a/doc/sphinx/README.rst b/doc/sphinx/README.rst index a34b2d5195..89b4bda71a 100644 --- a/doc/sphinx/README.rst +++ b/doc/sphinx/README.rst @@ -34,14 +34,14 @@ Names (link targets) are auto-generated for most simple objects, though they can Most objects should have a body (i.e. a block of indented text following the signature, called “contents” in Sphinx terms). Undocumented objects should have the ``:undocumented:`` flag instead, as shown above. When multiple objects have a single description, they can be grouped into a single object, like this (semicolons can be used to separate the names of the objects; names starting with ``_`` will be omitted from the indexes):: - .. cmdv:: Lemma @ident {? @binders} : @type - Remark @ident {? @binders} : @type - Fact @ident {? @binders} : @type - Corollary @ident {? @binders} : @type - Proposition @ident {? @binders} : @type + .. cmdv:: Lemma @ident {* @binder } : @type + Remark @ident {* @binder } : @type + Fact @ident {* @binder } : @type + Corollary @ident {* @binder } : @type + Proposition @ident {* @binder } : @type :name: Lemma; Remark; Fact; Corollary; Proposition - These commands are all synonyms of :n:`Theorem @ident {? @binders } : type`. + These commands are all synonyms of :n:`Theorem @ident {* @binder } : type`. Notations --------- @@ -89,10 +89,15 @@ Objects Here is the list of all objects of the Coq domain (The symbol :black_nib: indicates an object whose signature can be written using the notations DSL): +``.. attr::`` :black_nib: An attribute. + Example:: + + .. attr:: local + ``.. cmd::`` :black_nib: A Coq command. Example:: - .. cmd:: Infix "@symbol" := @term ({+, @modifier}). + .. cmd:: Infix @string := @term1_extended {? ( {+, @syntax_modifier } ) } {? : @ident } This command is equivalent to :n:`…`. diff --git a/doc/sphinx/README.template.rst b/doc/sphinx/README.template.rst index 51d5174567..c5e0007e78 100644 --- a/doc/sphinx/README.template.rst +++ b/doc/sphinx/README.template.rst @@ -34,14 +34,14 @@ Names (link targets) are auto-generated for most simple objects, though they can Most objects should have a body (i.e. a block of indented text following the signature, called “contents” in Sphinx terms). Undocumented objects should have the ``:undocumented:`` flag instead, as shown above. When multiple objects have a single description, they can be grouped into a single object, like this (semicolons can be used to separate the names of the objects; names starting with ``_`` will be omitted from the indexes):: - .. cmdv:: Lemma @ident {? @binders} : @type - Remark @ident {? @binders} : @type - Fact @ident {? @binders} : @type - Corollary @ident {? @binders} : @type - Proposition @ident {? @binders} : @type + .. cmdv:: Lemma @ident {* @binder } : @type + Remark @ident {* @binder } : @type + Fact @ident {* @binder } : @type + Corollary @ident {* @binder } : @type + Proposition @ident {* @binder } : @type :name: Lemma; Remark; Fact; Corollary; Proposition - These commands are all synonyms of :n:`Theorem @ident {? @binders } : type`. + These commands are all synonyms of :n:`Theorem @ident {* @binder } : type`. Notations --------- diff --git a/doc/sphinx/addendum/generalized-rewriting.rst b/doc/sphinx/addendum/generalized-rewriting.rst index 9f5741f72a..94ab6e789c 100644 --- a/doc/sphinx/addendum/generalized-rewriting.rst +++ b/doc/sphinx/addendum/generalized-rewriting.rst @@ -170,7 +170,7 @@ compatibility constraints. Adding new relations and morphisms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. cmd:: Add Parametric Relation @binders : (A t1 ... tn) (Aeq t′1 ... t′m) {? reflexivity proved by @term} {? symmetry proved by @term} {? transitivity proved by @term} as @ident +.. cmd:: Add Parametric Relation {* @binder } : (A t1 ... tn) (Aeq t′1 ... t′m) {? reflexivity proved by @term} {? symmetry proved by @term} {? transitivity proved by @term} as @ident This command declares a parametric relation :g:`Aeq: forall (y1 : β1 ... ym : βm)`, :g:`relation (A t1 ... tn)` over :g:`(A : αi -> ... αn -> Type)`. @@ -219,7 +219,7 @@ replace terms with related ones only in contexts that are syntactic compositions of parametric morphism instances declared with the following command. -.. cmd:: Add Parametric Morphism @binders : (@ident {+ @term__1}) with signature @term__2 as @ident +.. cmd:: Add Parametric Morphism {* @binder } : (@ident {+ @term__1}) with signature @term__2 as @ident This command declares a parametric morphism :n:`@ident {+ @term__1}` of signature :n:`@term__2`. The final identifier :token:`ident` gives a unique diff --git a/doc/sphinx/addendum/implicit-coercions.rst b/doc/sphinx/addendum/implicit-coercions.rst index 19b33f0d90..b007509b2e 100644 --- a/doc/sphinx/addendum/implicit-coercions.rst +++ b/doc/sphinx/addendum/implicit-coercions.rst @@ -198,7 +198,7 @@ Figure :ref:`vernacular` as follows: \comindex{Hypothesis \mbox{\rm (and coercions)}} .. productionlist:: - assumption : `assumption_keyword` `assums` . + assumption : `assumption_token` `assums` . assums : `simple_assums` : (`simple_assums`) ... (`simple_assums`) simple_assums : `ident` ... `ident` :[>] `term` @@ -215,12 +215,6 @@ grammar of inductive types from Figure :ref:`vernacular` as follows: \comindex{Inductive \mbox{\rm (and coercions)}} \comindex{CoInductive \mbox{\rm (and coercions)}} -.. productionlist:: - inductive : Inductive `ind_body` with ... with `ind_body` - : CoInductive `ind_body` with ... with `ind_body` - ind_body : `ident` [ `binders` ] : `term` := [[|] `constructor` | ... | `constructor` ] - constructor : `ident` [ `binders` ] [:[>] `term` ] - Especially, if the extra ``>`` is present in a constructor declaration, this constructor is declared as a coercion. @@ -240,7 +234,7 @@ declaration, this constructor is declared as a coercion. Same as :cmd:`Identity Coercion` but locally to the current section. - .. cmdv:: SubClass @ident := @type + .. cmd:: SubClass @ident_decl @def_body :name: SubClass If :n:`@type` is a class :n:`@ident'` applied to some arguments then @@ -251,7 +245,7 @@ declaration, this constructor is declared as a coercion. :n:`Definition @ident := @type.` :n:`Identity Coercion Id_@ident_@ident' : @ident >-> @ident'`. - .. cmdv:: Local SubClass @ident := @type + .. cmdv:: Local SubClass @ident_decl @def_body Same as before but locally to the current section. @@ -299,7 +293,7 @@ Classes as Records We allow the definition of *Structures with Inheritance* (or classes as records) by extending the existing :cmd:`Record` macro. Its new syntax is: -.. cmdv:: Record {? >} @ident {? @binders} : @sort := {? @ident} { {+; @ident :{? >} @term } } +.. cmdv:: {| Record | Structure } {? >} @ident {* @binder } : @sort := {? @ident} { {+; @ident :{? >} @term } } The first identifier :token:`ident` is the name of the defined record and :token:`sort` is its type. The optional identifier after ``:=`` is the name @@ -315,12 +309,6 @@ by extending the existing :cmd:`Record` macro. Its new syntax is: (this may fail if the uniform inheritance condition is not satisfied). -.. cmdv:: Structure {? >} @ident {? @binders} : @sort := {? @ident} { {+; @ident :{? >} @term } } - :name: Structure - - This is a synonym of :cmd:`Record`. - - Coercions and Sections ---------------------- diff --git a/doc/sphinx/addendum/program.rst b/doc/sphinx/addendum/program.rst index a17dca1693..549249d25c 100644 --- a/doc/sphinx/addendum/program.rst +++ b/doc/sphinx/addendum/program.rst @@ -174,7 +174,7 @@ Program Definition .. exn:: In environment … the term: @term does not have type @type. Actually, it has type ... :undocumented: - .. cmdv:: Program Definition @ident @binders : @type := @term + .. cmdv:: Program Definition @ident {* @binder } : @type := @term This is equivalent to: @@ -189,7 +189,7 @@ Program Definition Program Fixpoint ~~~~~~~~~~~~~~~~ -.. cmd:: Program Fixpoint @ident @binders {? {@order}} : @type := @term +.. cmd:: Program Fixpoint @ident {* @binder } {? {@order}} : @type := @term The optional order annotation follows the grammar: diff --git a/doc/sphinx/addendum/type-classes.rst b/doc/sphinx/addendum/type-classes.rst index 57a2254100..af4e9051bb 100644 --- a/doc/sphinx/addendum/type-classes.rst +++ b/doc/sphinx/addendum/type-classes.rst @@ -295,14 +295,18 @@ the Existing Instance command to achieve the same effect. Summary of the commands ----------------------- -.. cmd:: Class @ident {? @binders} : {? @sort} := {? @ident} { {+; @ident :{? >} @term } } +.. cmd:: Class @inductive_definition {* with @inductive_definition } The :cmd:`Class` command is used to declare a typeclass with parameters :token:`binders` and fields the declared record fields. + This command supports the :attr:`universes(polymorphic)`, :attr:`universes(monomorphic)`, + :attr:`universes(template)`, :attr:`universes(notemplate)`, + :attr:`Cumulative`, :attr:`NonCumulative` and :attr:`Private` attributes. + .. _singleton-class: - .. cmdv:: Class @ident {? @binders} : {? @sort} := @ident : @term + .. cmdv:: Class @ident {* @binder } : {? @sort} := @ident : @term This variant declares a *singleton* class with a single method. This singleton class is a so-called definitional class, represented simply @@ -324,7 +328,7 @@ Summary of the commands This command has no effect when used on a typeclass. -.. cmd:: Instance @ident {? @binders} : @term__0 {+ @term} {? | @num} := { {*; @field_def} } +.. cmd:: Instance @ident {* @binder } : @term__0 {+ @term} {? | @num} := { {*; @field_def} } This command is used to declare a typeclass instance named :token:`ident` of the class :n:`@term__0` with parameters :token:`term` and @@ -337,10 +341,10 @@ Summary of the commands :tacn:`auto` hints. If the priority :token:`num` is not specified, it defaults to the number of non-dependent binders of the instance. - .. cmdv:: Instance @ident {? @binders} : forall {? @binders}, @term__0 {+ @term} {? | @num } := @term + .. cmdv:: Instance @ident {* @binder } : forall {* @binder }, @term__0 {+ @term} {? | @num } := @term This syntax is used for declaration of singleton class instances or - for directly giving an explicit term of type :n:`forall @binders, @term__0 + for directly giving an explicit term of type :n:`forall {* @binder }, @term__0 {+ @term}`. One need not even mention the unique field name for singleton classes. diff --git a/doc/sphinx/addendum/universe-polymorphism.rst b/doc/sphinx/addendum/universe-polymorphism.rst index f9cc25959c..c069782add 100644 --- a/doc/sphinx/addendum/universe-polymorphism.rst +++ b/doc/sphinx/addendum/universe-polymorphism.rst @@ -152,6 +152,8 @@ Many other commands support the ``Polymorphic`` flag, including: - :cmd:`Hint Resolve` and :cmd:`Hint Rewrite` will use the auto/rewrite hint polymorphically, not at a single instance. +.. _cumulative: + Cumulative, NonCumulative ------------------------- diff --git a/doc/sphinx/conf.py b/doc/sphinx/conf.py index 77cb3ecc21..22102aa3ab 100755 --- a/doc/sphinx/conf.py +++ b/doc/sphinx/conf.py @@ -183,16 +183,21 @@ todo_include_todos = False nitpicky = True nitpick_ignore = [ ('token', token) for token in [ + 'binders', 'collection', 'command', + 'definition', 'dirpath', + 'inductive', + 'ind_body', 'modpath', 'module', - 'red_expr', + 'simple_tactic', 'symbol', 'tactic', 'term_pattern', 'term_pattern_string', + 'toplevel_selector', ]] # -- Options for HTML output ---------------------------------------------- diff --git a/doc/sphinx/coq-cmdindex.rst b/doc/sphinx/coq-cmdindex.rst index fd0b342ae4..18d2e379ac 100644 --- a/doc/sphinx/coq-cmdindex.rst +++ b/doc/sphinx/coq-cmdindex.rst @@ -2,6 +2,8 @@ .. hack to get index in TOC +.. _command_index: + ----------------- Command index ----------------- diff --git a/doc/sphinx/coq-tacindex.rst b/doc/sphinx/coq-tacindex.rst index 31b2f7f8cb..cddcdf83e8 100644 --- a/doc/sphinx/coq-tacindex.rst +++ b/doc/sphinx/coq-tacindex.rst @@ -2,6 +2,8 @@ .. hack to get index in TOC +.. _tactic_index: + ------------- Tactic index ------------- diff --git a/doc/sphinx/language/coq-library.rst b/doc/sphinx/language/coq-library.rst index 80f209fcf1..39f2ccec29 100644 --- a/doc/sphinx/language/coq-library.rst +++ b/doc/sphinx/language/coq-library.rst @@ -23,7 +23,7 @@ These libraries and developments are available for download at http://coq.inria.fr (see :ref:`userscontributions`). This chapter briefly reviews the |Coq| libraries whose contents can -also be browsed at http://coq.inria.fr/stdlib. +also be browsed at http://coq.inria.fr/stdlib/. diff --git a/doc/sphinx/language/gallina-extensions.rst b/doc/sphinx/language/gallina-extensions.rst index 9686500a35..b9e181dd94 100644 --- a/doc/sphinx/language/gallina-extensions.rst +++ b/doc/sphinx/language/gallina-extensions.rst @@ -24,22 +24,31 @@ expressions. In this sense, the :cmd:`Record` construction allows defining record : `record_keyword` `record_body` with … with `record_body` record_keyword : Record | Inductive | CoInductive record_body : `ident` [ `binders` ] [: `sort` ] := [ `ident` ] { [ `field` ; … ; `field` ] }. - field : `ident` [ `binders` ] : `type` [ where `notation` ] + field : `ident` [ `binders` ] : `type` [ `decl_notations` ] : `ident` [ `binders` ] [: `type` ] := `term` -.. cmd:: Record @ident @binders {? : @sort} := {? @ident} { {*; @ident @binders : @type } } +.. cmd:: {| Record | Structure } @inductive_definition {* with @inductive_definition } + :name: Record; Structure The first identifier :token:`ident` is the name of the defined record and :token:`sort` is its type. The optional identifier following ``:=`` is the name of its constructor. If it is omitted, the default name :n:`Build_@ident`, where :token:`ident` is the record name, is used. If :token:`sort` is omitted, the default sort is :math:`\Type`. The identifiers inside the brackets are the names of - fields. For a given field :token:`ident`, its type is :n:`forall @binders, @type`. - Remark that the type of a particular identifier may depend on a previously-given identifier. Thus the - order of the fields is important. Finally, :token:`binders` are parameters of the record. + fields. For a given field :token:`ident`, its type is :n:`forall {* @binder }, @type`. + Notice that the type of a particular identifier may depend on a previously-given identifier. Thus the + order of the fields is important. The record can depend as a whole on parameters :token:`binders` + and each field can also depend on its own :token:`binders`. Finally, notations can be attached to + fields using the :n:`decl_notations` annotation. + + :cmd:`Record` and :cmd:`Structure` are synonyms. + + This command supports the :attr:`universes(polymorphic)`, :attr:`universes(monomorphic)`, + :attr:`universes(template)`, :attr:`universes(notemplate)`, + :attr:`Cumulative`, :attr:`NonCumulative` and :attr:`Private` attributes. More generally, a record may have explicitly defined (a.k.a. manifest) fields. For instance, we might have: -:n:`Record @ident @binders : @sort := { @ident__1 : @type__1 ; @ident__2 := @term__2 ; @ident__3 : @type__3 }`. +:n:`Record @ident {* @binder } : @sort := { @ident__1 : @type__1 ; @ident__2 := @term__2 ; @ident__3 : @type__3 }`. in which case the correctness of :n:`@type__3` may rely on the instance :n:`@term__2` of :n:`@ident__2` and :n:`@term__2` may in turn depend on :n:`@ident__1`. .. example:: @@ -62,7 +71,7 @@ in which case the correctness of :n:`@type__3` may rely on the instance :n:`@ter Let us now see the work done by the ``Record`` macro. First the macro generates a variant type definition with just one constructor: -:n:`Variant @ident {? @binders } : @sort := @ident__0 {? @binders }`. +:n:`Variant @ident {* @binder } : @sort := @ident__0 {* @binder }`. To build an object of type :token:`ident`, one should provide the constructor :n:`@ident__0` with the appropriate number of terms filling the fields of the record. @@ -183,8 +192,6 @@ other arguments are the parameters of the inductive type. defined with the ``Record`` keyword can be activated with the :flag:`Nonrecursive Elimination Schemes` flag (see :ref:`proofschemes-induction-principles`). -.. note:: ``Structure`` is a synonym of the keyword ``Record``. - .. warn:: @ident cannot be defined. It can happen that the definition of a projection is impossible. @@ -696,7 +703,7 @@ used by ``Function``. A more precise description is given below. .. cmdv:: Function @ident {* @binder } : @type := @term - Defines the not recursive function :token:`ident` as if declared with + Defines the nonrecursive function :token:`ident` as if it was declared with :cmd:`Definition`. Moreover the following are defined: + :token:`ident`\ ``_rect``, :token:`ident`\ ``_rec`` and :token:`ident`\ ``_ind``, @@ -817,32 +824,6 @@ Sections create local contexts which can be shared across multiple definitions. Most commands, like :cmd:`Hint`, :cmd:`Notation`, option management, … which appear inside a section are canceled when the section is closed. -.. cmd:: Variable @ident : @type - - This command links :token:`type` to the name :token:`ident` in the context of - the current section. When the current section is closed, name :token:`ident` - will be unknown and every object using this variable will be explicitly - parameterized (the variable is *discharged*). - - .. exn:: @ident already exists. - :name: @ident already exists. (Variable) - :undocumented: - - .. cmdv:: Variable {+ @ident } : @type - - Links :token:`type` to each :token:`ident`. - - .. cmdv:: Variable {+ ( {+ @ident } : @type ) } - - Declare one or more variables with various types. - - .. cmdv:: Variables {+ ( {+ @ident } : @type) } - Hypothesis {+ ( {+ @ident } : @type) } - Hypotheses {+ ( {+ @ident } : @type) } - :name: Variables; Hypothesis; Hypotheses - - These variants are synonyms of :n:`Variable {+ ( {+ @ident } : @type) }`. - .. cmd:: Let @ident := @term This command binds the value :token:`term` to the name :token:`ident` in the @@ -855,7 +836,7 @@ Sections create local contexts which can be shared across multiple definitions. :name: @ident already exists. (Let) :undocumented: - .. cmdv:: Let @ident {? @binders } {? : @type } := @term + .. cmdv:: Let @ident {* @binder } {? : @type } := @term :undocumented: .. cmdv:: Let Fixpoint @ident @fix_body {* with @fix_body} @@ -866,7 +847,7 @@ Sections create local contexts which can be shared across multiple definitions. :name: Let CoFixpoint :undocumented: -.. cmd:: Context @binders +.. cmd:: Context {* @binder } Declare variables in the context of the current section, like :cmd:`Variable`, but also allowing implicit variables, :ref:`implicit-generalization`, and @@ -1011,7 +992,7 @@ Reserved commands inside an interactive module type: This is a shortcut for the command :n:`Include @module` for each :token:`module`. -.. cmd:: @assumption_keyword Inline @assums +.. cmd:: @assumption_token Inline @assums :name: Inline The instance of this assumption will be automatically expanded at functor application, except when @@ -1673,11 +1654,11 @@ The syntax is also supported in internal binders. For instance, in the following kinds of expressions, the type of each declaration present in :token:`binders` can be bracketed to mark the declaration as implicit: -:n:`fun (@ident:forall @binders, @type) => @term`, -:n:`forall (@ident:forall @binders, @type), @type`, -:n:`let @ident @binders := @term in @term`, -:n:`fix @ident @binders := @term in @term` and -:n:`cofix @ident @binders := @term in @term`. +:n:`fun (@ident:forall {* @binder }, @type) => @term`, +:n:`forall (@ident:forall {* @binder }, @type), @type`, +:n:`let @ident {* @binder } := @term in @term`, +:n:`fix @ident {* @binder } := @term in @term` and +:n:`cofix @ident {* @binder } := @term in @term`. Here is an example: .. coqtop:: all diff --git a/doc/sphinx/language/gallina-specification-language.rst b/doc/sphinx/language/gallina-specification-language.rst index 7ce4538a4d..e12ff1ba98 100644 --- a/doc/sphinx/language/gallina-specification-language.rst +++ b/doc/sphinx/language/gallina-specification-language.rst @@ -49,11 +49,11 @@ Blanks Comments Comments are enclosed between ``(*`` and ``*)``. They can be nested. - They can contain any character. However, embedded :token:`string` literals must be + They can contain any character. However, embedded :n:`@string` literals must be correctly closed. Comments are treated as blanks. Identifiers - Identifiers, written :token:`ident`, are sequences of letters, digits, ``_`` and + Identifiers, written :n:`@ident`, are sequences of letters, digits, ``_`` and ``'``, that do not start with a digit or ``'``. That is, they are recognized by the following grammar (except that the string ``_`` is reserved; it is not a valid identifier): @@ -74,8 +74,8 @@ Identifiers Numerals Numerals are sequences of digits with an optional fractional part - and exponent, optionally preceded by a minus sign. :token:`int` is an integer; - a numeral without fractional or exponent parts. :token:`num` is a non-negative + and exponent, optionally preceded by a minus sign. :n:`@int` is an integer; + a numeral without fractional or exponent parts. :n:`@num` is a non-negative integer. Underscores embedded in the digits are ignored, for example ``1_000_000`` is the same as ``1000000``. @@ -175,12 +175,19 @@ is described in Chapter :ref:`syntaxextensionsandinterpretationscopes`. | ltac : ( @ltac_expr ) field_def ::= @qualid {* @binder } := @term +.. _types: + Types ----- -Coq terms are typed. Coq types are recognized by the same syntactic -class as :token:`term`. We denote by :production:`type` the semantic subclass -of types inside the syntactic class :token:`term`. +.. prodn:: + type ::= @term + +:n:`@type`\s are a subset of :n:`@term`\s; not every :n:`@term` is a :n:`@type`. +Every term has an associated type, which +can be determined by applying the :ref:`typing-rules`. Distinct terms +may share the same type, for example 0 and 1 are both of type `nat`, the +natural numbers. .. _gallina-identifiers: @@ -193,14 +200,14 @@ Qualified identifiers and simple identifiers qualid ::= @ident {* @field_ident } field_ident ::= .@ident -*Qualified identifiers* (:token:`qualid`) denote *global constants* +*Qualified identifiers* (:n:`@qualid`) denote *global constants* (definitions, lemmas, theorems, remarks or facts), *global variables* (parameters or axioms), *inductive types* or *constructors of inductive -types*. *Simple identifiers* (or shortly :token:`ident`) are a syntactic subset +types*. *Simple identifiers* (or shortly :n:`@ident`) are a syntactic subset of qualified identifiers. Identifiers may also denote *local variables*, while qualified identifiers do not. -Field identifiers, written :token:`field_ident`, are identifiers prefixed by +Field identifiers, written :n:`@field_ident`, are identifiers prefixed by `.` (dot) with no blank between the dot and the identifier. @@ -215,7 +222,7 @@ numbers (see :ref:`datatypes`). .. note:: - Negative integers are not at the same level as :token:`num`, for this + Negative integers are not at the same level as :n:`@num`, for this would make precedence unnatural. .. index:: @@ -227,7 +234,7 @@ numbers (see :ref:`datatypes`). Sorts ----- -.. insertprodn sort univ_annot +.. insertprodn sort univ_constraint .. prodn:: sort ::= Set @@ -242,12 +249,14 @@ Sorts universe_name ::= @qualid | Set | Prop + univ_annot ::= @%{ {* @universe_level } %} universe_level ::= Set | Prop | Type | _ | @qualid - univ_annot ::= @%{ {* @universe_level } %} + univ_decl ::= @%{ {* @ident } {? + } {? %| {*, @univ_constraint } {? + } } %} + univ_constraint ::= @universe_name {| < | = | <= } @universe_name There are four sorts :g:`SProp`, :g:`Prop`, :g:`Set` and :g:`Type`. @@ -255,13 +264,13 @@ There are four sorts :g:`SProp`, :g:`Prop`, :g:`Set` and :g:`Type`. propositions* (also called *strict propositions*). - :g:`Prop` is the universe of *logical propositions*. The logical propositions - themselves are typing the proofs. We denote propositions by :token:`form`. - This constitutes a semantic subclass of the syntactic class :token:`term`. + themselves are typing the proofs. We denote propositions by :n:`@form`. + This constitutes a semantic subclass of the syntactic class :n:`@term`. - :g:`Set` is the universe of *program types* or *specifications*. The specifications themselves are typing the programs. We denote - specifications by :token:`specif`. This constitutes a semantic subclass of - the syntactic class :token:`term`. + specifications by :n:`@specif`. This constitutes a semantic subclass of + the syntactic class :n:`@term`. - :g:`Type` is the type of sorts. @@ -280,15 +289,15 @@ Binders name ::= _ | @ident binder ::= @name - | ( {+ @name } : @term ) - | ( @name {? : @term } := @term ) - | %{ {+ @name } {? : @term } %} - | [ {+ @name } {? : @term } ] + | ( {+ @name } : @type ) + | ( @name {? : @type } := @term ) + | ( @name : @type %| @term ) + | %{ {+ @name } {? : @type } %} + | [ {+ @name } {? : @type } ] | `( {+, @typeclass_constraint } ) | `%{ {+, @typeclass_constraint } %} | `[ {+, @typeclass_constraint } ] | ' @pattern0 - | ( @name : @term %| @term ) typeclass_constraint ::= {? ! } @term | %{ @name %} : {? ! } @term | @name : {? ! } @term @@ -303,14 +312,14 @@ a notation for a sequence of binding variables sharing the same type: binder can also be any pattern prefixed by a quote, e.g. :g:`'(x,y)`. Some constructions allow the binding of a variable to value. This is -called a “let-binder”. The entry :token:`binder` of the grammar accepts +called a “let-binder”. The entry :n:`@binder` of the grammar accepts either an assumption binder as defined above or a let-binder. The notation in the latter case is :n:`(@ident := @term)`. In a let-binder, only one variable can be introduced at the same time. It is also possible to give the type of the variable as follows: :n:`(@ident : @type := @term)`. -Lists of :token:`binder`\s are allowed. In the case of :g:`fun` and :g:`forall`, +Lists of :n:`@binder`\s are allowed. In the case of :g:`fun` and :g:`forall`, it is intended that at least one binder of the list is an assumption otherwise fun and forall gets identical. Moreover, parentheses can be omitted in the case of a single sequence of bindings sharing the same type (e.g.: @@ -322,9 +331,9 @@ Abstractions: fun ----------------- The expression :n:`fun @ident : @type => @term` defines the -*abstraction* of the variable :token:`ident`, of type :token:`type`, over the term -:token:`term`. It denotes a function of the variable :token:`ident` that evaluates to -the expression :token:`term` (e.g. :g:`fun x : A => x` denotes the identity +*abstraction* of the variable :n:`@ident`, of type :n:`@type`, over the term +:n:`@term`. It denotes a function of the variable :n:`@ident` that evaluates to +the expression :n:`@term` (e.g. :g:`fun x : A => x` denotes the identity function on type :g:`A`). The keyword :g:`fun` can be followed by several binders as given in Section :ref:`binders`. Functions over several variables are equivalent to an iteration of one-variable @@ -341,12 +350,12 @@ Products: forall ---------------- The expression :n:`forall @ident : @type, @term` denotes the -*product* of the variable :token:`ident` of type :token:`type`, over the term :token:`term`. +*product* of the variable :n:`@ident` of type :n:`@type`, over the term :n:`@term`. As for abstractions, :g:`forall` is followed by a binder list, and products over several variables are equivalent to an iteration of one-variable -products. Note that :token:`term` is intended to be a type. +products. Note that :n:`@term` is intended to be a type. -If the variable :token:`ident` occurs in :token:`term`, the product is called +If the variable :n:`@ident` occurs in :n:`@term`, the product is called *dependent product*. The intention behind a dependent product :g:`forall x : A, B` is twofold. It denotes either the universal quantification of the variable :g:`x` of type :g:`A` @@ -391,13 +400,13 @@ Type cast | @term10 :> The expression :n:`@term : @type` is a type cast expression. It enforces -the type of :token:`term` to be :token:`type`. +the type of :n:`@term` to be :n:`@type`. :n:`@term <: @type` locally sets up the virtual machine for checking that -:token:`term` has type :token:`type`. +:n:`@term` has type :n:`@type`. -:n:`@term <<: @type` uses native compilation for checking that :token:`term` -has type :token:`type`. +:n:`@term <<: @type` uses native compilation for checking that :n:`@term` +has type :n:`@type`. .. index:: _ @@ -418,15 +427,15 @@ Let-in definitions .. insertprodn term_let term_let .. prodn:: - term_let ::= let @name {? : @term } := @term in @term - | let @name {+ @binder } {? : @term } := @term in @term + term_let ::= let @name {? : @type } := @term in @term + | let @name {+ @binder } {? : @type } := @term in @term | let ( {*, @name } ) {? {? as @name } return @term100 } := @term in @term | let ' @pattern := @term {? return @term100 } in @term | let ' @pattern in @pattern := @term return @term100 in @term :n:`let @ident := @term in @term’` -denotes the local binding of :token:`term` to the variable -:token:`ident` in :token:`term`’. There is a syntactic sugar for let-in +denotes the local binding of :n:`@term` to the variable +:n:`@ident` in :n:`@term`’. There is a syntactic sugar for let-in definition of functions: :n:`let @ident {+ @binder} := @term in @term’` stands for :n:`let @ident := fun {+ @binder} => @term in @term’`. @@ -463,8 +472,8 @@ to apply specific treatments accordingly. This paragraph describes the basic form of pattern matching. See Section :ref:`Mult-match` and Chapter :ref:`extendedpatternmatching` for the description of the general form. The basic form of pattern matching is characterized -by a single :token:`case_item` expression, an :token:`eqn` restricted to a -single :token:`pattern` and :token:`pattern` restricted to the form +by a single :n:`@case_item` expression, an :n:`@eqn` restricted to a +single :n:`@pattern` and :n:`@pattern` restricted to the form :n:`@qualid {* @ident}`. The expression @@ -486,7 +495,7 @@ In the *dependent* case, there are three subcases. In the first subcase, the type in each branch may depend on the exact value being matched in the branch. In this case, the whole pattern matching itself depends on the term being matched. This dependency of the term being matched in the -return type is expressed with an :n:`@ident` clause where :token:`ident` +return type is expressed with an :n:`@ident` clause where :n:`@ident` is dependent in the return type. For instance, in the following example: .. coqtop:: in @@ -538,7 +547,7 @@ dependency of the return type in the annotations of the inductive type is expressed with a clause in the form :n:`in @qualid {+ _ } {+ @pattern }`, where -- :token:`qualid` is the inductive type of the term being matched; +- :n:`@qualid` is the inductive type of the term being matched; - the holes :n:`_` match the parameters of the inductive type: the return type is not dependent on them. @@ -587,7 +596,7 @@ Recursive and co-recursive functions: fix and cofix .. prodn:: term_fix ::= let fix @fix_body in @term | fix @fix_body {? {+ with @fix_body } for @ident } - fix_body ::= @ident {* @binder } {? @fixannot } {? : @term } := @term + fix_body ::= @ident {* @binder } {? @fixannot } {? : @type } := @term fixannot ::= %{ struct @ident %} | %{ wf @term1_extended @ident %} | %{ measure @term1_extended {? @ident } {? @term1_extended } %} @@ -595,92 +604,55 @@ Recursive and co-recursive functions: fix and cofix | @ @qualid {? @univ_annot } -The expression “``fix`` :token:`ident`:math:`_1` :token:`binder`:math:`_1` ``:`` -:token:`type`:math:`_1` ``:=`` :token:`term`:math:`_1` ``with … with`` -:token:`ident`:math:`_n` :token:`binder`:math:`_n` : :token:`type`:math:`_n` -``:=`` :token:`term`:math:`_n` ``for`` :token:`ident`:math:`_i`” denotes the +The expression ":n:`fix @ident__1 @binder__1 : @type__1 := @term__1 with … with @ident__n @binder__n : @type__n := @term__n for @ident__i`" denotes the :math:`i`-th component of a block of functions defined by mutual structural recursion. It is the local counterpart of the :cmd:`Fixpoint` command. When -:math:`n=1`, the “``for`` :token:`ident`:math:`_i`” clause is omitted. +:math:`n=1`, the ":n:`for @ident__i`" clause is omitted. The association of a single fixpoint and a local definition have a special -syntax: :n:`let fix @ident @binders := @term in` stands for -:n:`let @ident := fix @ident @binders := @term in`. The same applies for co-fixpoints. +syntax: :n:`let fix @ident {* @binder } := @term in` stands for +:n:`let @ident := fix @ident {* @binder } := @term in`. The same applies for co-fixpoints. .. insertprodn term_cofix cofix_body .. prodn:: term_cofix ::= let cofix @cofix_body in @term | cofix @cofix_body {? {+ with @cofix_body } for @ident } - cofix_body ::= @ident {* @binder } {? : @term } := @term + cofix_body ::= @ident {* @binder } {? : @type } := @term -The expression “``cofix`` :token:`ident`:math:`_1` :token:`binder`:math:`_1` ``:`` -:token:`type`:math:`_1` ``with … with`` :token:`ident`:math:`_n` :token:`binder`:math:`_n` -: :token:`type`:math:`_n` ``for`` :token:`ident`:math:`_i`” denotes the -:math:`i`-th component of a block of terms defined by a mutual guarded +The expression +":n:`cofix @ident__1 @binder__1 : @type__1 with … with @ident__n @binder__n : @type__n for @ident__i`" +denotes the :math:`i`-th component of a block of terms defined by a mutual guarded co-recursion. It is the local counterpart of the :cmd:`CoFixpoint` command. When -:math:`n=1`, the “``for`` :token:`ident`:math:`_i`” clause is omitted. +:math:`n=1`, the ":n:`for @ident__i`" clause is omitted. .. _vernacular: The Vernacular ============== -.. insertgramXX vernac ident_opt2 - -.. productionlist:: coq - decorated-sentence : [ `decoration` … `decoration` ] `sentence` - sentence : `assumption` - : `definition` - : `inductive` - : `fixpoint` - : `assertion` `proof` - assumption : `assumption_keyword` `assums`. - assumption_keyword : Axiom | Conjecture - : Parameter | Parameters - : Variable | Variables - : Hypothesis | Hypotheses - assums : `ident` … `ident` : `term` - : ( `ident` … `ident` : `term` ) … ( `ident` … `ident` : `term` ) - definition : [Local] Definition `ident` [`binders`] [: `term`] := `term` . - : Let `ident` [`binders`] [: `term`] := `term` . - binders : binders binder - : binder - inductive : Inductive `ind_body` with … with `ind_body` . - : CoInductive `ind_body` with … with `ind_body` . - ind_body : `ident` [`binders`] : `term` := - : [[|] `ident` [`binders`] [:`term`] | … | `ident` [`binders`] [:`term`]] - fixpoint : Fixpoint `fix_body` with … with `fix_body` . - : CoFixpoint `fix_body` with … with `fix_body` . - assertion : `assertion_keyword` `ident` [`binders`] : `term` . - assertion_keyword : Theorem | Lemma - : Remark | Fact - : Corollary | Property | Proposition - : Definition | Example - proof : Proof . … Qed . - : Proof . … Defined . - : Proof . … Admitted . - decoration : #[ `attributes` ] - attributes : [`attribute`, … , `attribute`] - attribute : `ident` - : `ident` = `string` - : `ident` ( `attributes` ) - -.. todo:: This use of … in this grammar is inconsistent - What about removing the proof part of this grammar from this chapter - and putting it somewhere where top-level tactics can be described as well? - See also #7583. - -This grammar describes *The Vernacular* which is the language of -commands of Gallina. A sentence of the vernacular language, like in -many natural languages, begins with a capital letter and ends with a -dot. - -Sentences may be *decorated* with so-called *attributes*, -which are described in the corresponding section (:ref:`gallina-attributes`). - -The different kinds of command are described hereafter. They all suppose -that the terms occurring in the sentences are well-typed. +.. insertprodn vernacular vernacular + +.. prodn:: + vernacular ::= {* {? @all_attrs } {| @command | @ltac_expr } . } + +The top-level input to |Coq| is a series of :production:`command`\s and :production:`tactic`\s, +each terminated with a period +and optionally decorated with :ref:`gallina-attributes`. :n:`@ltac_expr` syntax supports both simple +and compound tactics. For example: ``split.`` is a simple tactic while ``split; auto.`` combines two +simple tactics. + +Tactics specify how to transform the current proof state as a step in creating a proof. They +are syntactically valid only when |Coq| is in proof mode, such as after a :cmd:`Theorem` command +and before any subsequent proof-terminating command such as :cmd:`Qed`. See :ref:`proofhandling` for more +on proof mode. + +By convention, command names begin with uppercase letters, while +tactic names begin with lowercase letters. Commands appear in the +HTML documentation in blue boxes after the label "Command". In the pdf, they appear +after the boldface label "Command:". Commands are listed in the :ref:`command_index`. + +Similarly, tactics appear after the label "Tactic". Tactics are listed in the :ref:`tactic_index`. .. _gallina-assumptions: @@ -688,73 +660,68 @@ Assumptions ----------- Assumptions extend the environment with axioms, parameters, hypotheses -or variables. An assumption binds an :token:`ident` to a :token:`type`. It is accepted -by Coq if and only if this :token:`type` is a correct type in the environment -preexisting the declaration and if :token:`ident` was not previously defined in -the same module. This :token:`type` is considered to be the type (or -specification, or statement) assumed by :token:`ident` and we say that :token:`ident` -has type :token:`type`. +or variables. An assumption binds an :n:`@ident` to a :n:`@type`. It is accepted +by Coq if and only if this :n:`@type` is a correct type in the environment +preexisting the declaration and if :n:`@ident` was not previously defined in +the same module. This :n:`@type` is considered to be the type (or +specification, or statement) assumed by :n:`@ident` and we say that :n:`@ident` +has type :n:`@type`. .. _Axiom: -.. cmd:: Parameter @ident : @type - - This command links :token:`type` to the name :token:`ident` as its specification in - the global context. The fact asserted by :token:`type` is thus assumed as a - postulate. - - .. exn:: @ident already exists. - :name: @ident already exists. (Axiom) - :undocumented: - - .. cmdv:: Parameter {+ @ident } : @type +.. cmd:: @assumption_token {? Inline {? ( @num ) } } {| {+ ( @assumpt ) } | @assumpt } + :name: Axiom; Axioms; Conjecture; Conjectures; Hypothesis; Hypotheses; Parameter; Parameters; Variable; Variables - Adds several parameters with specification :token:`type`. + .. insertprodn assumption_token of_type - .. cmdv:: Parameter {+ ( {+ @ident } : @type ) } + .. prodn:: + assumption_token ::= {| Axiom | Axioms } + | {| Conjecture | Conjectures } + | {| Parameter | Parameters } + | {| Hypothesis | Hypotheses } + | {| Variable | Variables } + assumpt ::= {+ @ident_decl } @of_type + ident_decl ::= @ident {? @univ_decl } + of_type ::= {| : | :> | :>> } @type - Adds blocks of parameters with different specifications. + These commands bind one or more :n:`@ident`\(s) to specified :n:`@type`\(s) as their specifications in + the global context. The fact asserted by the :n:`@type` (or, equivalently, the existence + of an object of this type) is accepted as a postulate. - .. cmdv:: Local Parameter {+ ( {+ @ident } : @type ) } - :name: Local Parameter + :cmd:`Axiom`, :cmd:`Conjecture`, :cmd:`Parameter` and their plural forms + are equivalent. They can take the :attr:`local` attribute (see :ref:`gallina-attributes`), + which makes the defined :n:`@ident`\s accessible by :cmd:`Import` and its variants + only through their fully qualified names. - Such parameters are never made accessible through their unqualified name by - :cmd:`Import` and its variants. You have to explicitly give their fully - qualified name to refer to them. + Similarly, :cmd:`Hypothesis`, :cmd:`Variable` and their plural forms are equivalent. Outside + of a section, these are equivalent to :n:`Local Parameter`. Inside a section, the + :n:`@ident`\s defined are only accessible within the section. When the current section + is closed, the :n:`@ident`\(s) become undefined and every object depending on them will be explicitly + parameterized (i.e., the variables are *discharged*). See Section :ref:`section-mechanism`. - .. cmdv:: {? Local } Parameters {+ ( {+ @ident } : @type ) } - {? Local } Axiom {+ ( {+ @ident } : @type ) } - {? Local } Axioms {+ ( {+ @ident } : @type ) } - {? Local } Conjecture {+ ( {+ @ident } : @type ) } - {? Local } Conjectures {+ ( {+ @ident } : @type ) } - :name: Parameters; Axiom; Axioms; Conjecture; Conjectures +.. example:: Simple assumptions - These variants are synonyms of :n:`{? Local } Parameter {+ ( {+ @ident } : @type ) }`. + .. coqtop:: reset in - .. cmdv:: Variable {+ ( {+ @ident } : @type ) } - Variables {+ ( {+ @ident } : @type ) } - Hypothesis {+ ( {+ @ident } : @type ) } - Hypotheses {+ ( {+ @ident } : @type ) } - :name: Variable (outside a section); Variables (outside a section); Hypothesis (outside a section); Hypotheses (outside a section) + Parameter X Y : Set. + Parameter (R : X -> Y -> Prop) (S : Y -> X -> Prop). + Axiom R_S_inv : forall x y, R x y <-> S y x. - Outside of any section, these variants are synonyms of - :n:`Local Parameter {+ ( {+ @ident } : @type ) }`. - For their meaning inside a section, see :cmd:`Variable` in - :ref:`section-mechanism`. +.. exn:: @ident already exists. + :name: @ident already exists. (Axiom) + :undocumented: - .. warn:: @ident is declared as a local axiom [local-declaration,scope] +.. warn:: @ident is declared as a local axiom [local-declaration,scope] - Warning generated when using :cmd:`Variable` instead of - :cmd:`Local Parameter`. + Warning generated when using :cmd:`Variable` or its equivalent + instead of :n:`Local Parameter` or its equivalent. .. note:: - It is advised to use the commands :cmd:`Axiom`, :cmd:`Conjecture` and + We advise using the commands :cmd:`Axiom`, :cmd:`Conjecture` and :cmd:`Hypothesis` (and their plural forms) for logical postulates (i.e. when - the assertion :token:`type` is of sort :g:`Prop`), and to use the commands + the assertion :n:`@type` is of sort :g:`Prop`), and to use the commands :cmd:`Parameter` and :cmd:`Variable` (and their plural forms) in other cases - (corresponding to the declaration of an abstract mathematical entity). - -.. seealso:: Section :ref:`section-mechanism`. + (corresponding to the declaration of an abstract object of the given type). .. _gallina-definitions: @@ -777,85 +744,92 @@ type which is the type of its body. A formal presentation of constants and environments is given in Section :ref:`typing-rules`. -.. cmd:: Definition @ident := @term - - This command binds :token:`term` to the name :token:`ident` in the environment, - provided that :token:`term` is well-typed. - - .. exn:: @ident already exists. - :name: @ident already exists. (Definition) - :undocumented: - - .. cmdv:: Definition @ident : @type := @term - - This variant checks that the type of :token:`term` is definitionally equal to - :token:`type`, and registers :token:`ident` as being of type - :token:`type`, and bound to value :token:`term`. - - .. exn:: The term @term has type @type while it is expected to have type @type'. - :undocumented: - - .. cmdv:: Definition @ident @binders {? : @type } := @term +.. cmd:: {| Definition | Example } @ident_decl @def_body + :name: Definition; Example - This is equivalent to - :n:`Definition @ident : forall @binders, @type := fun @binders => @term`. + .. insertprodn def_body def_body - .. cmdv:: Local Definition @ident {? @binders } {? : @type } := @term - :name: Local Definition + .. prodn:: + def_body ::= {* @binder } {? : @type } := {? @reduce } @term + | {* @binder } : @type - Such definitions are never made accessible through their - unqualified name by :cmd:`Import` and its variants. - You have to explicitly give their fully qualified name to refer to them. + These commands bind :n:`@term` to the name :n:`@ident` in the environment, + provided that :n:`@term` is well-typed. They can take the :attr:`local` attribute (see :ref:`gallina-attributes`), + which makes the defined :n:`@ident` accessible by :cmd:`Import` and its variants + only through their fully qualified names. + If :n:`@reduce` is present then :n:`@ident` is bound to the result of the specified + computation on :n:`@term`. - .. cmdv:: {? Local } Example @ident {? @binders } {? : @type } := @term - :name: Example + If :n:`@term` is omitted, Coq enters the proof editing mode. This can be + used to define a term incrementally, in particular by relying on the :tacn:`refine` tactic. + In this case, the proof should be terminated with :cmd:`Defined` in order to define a constant + for which the computational behavior is relevant. See :ref:`proof-editing-mode`. - This is equivalent to :cmd:`Definition`. + The form :n:`Definition @ident : @type := @term` checks that the type of :n:`@term` + is definitionally equal to :n:`@type`, and registers :n:`@ident` as being of type + :n:`@type`, and bound to value :n:`@term`. - .. cmdv:: Let @ident := @term - :name: Let (outside a section) + The form :n:`Definition @ident {* @binder } : @type := @term` is equivalent to + :n:`Definition @ident : forall {* @binder }, @type := fun {* @binder } => @term`. - Outside of any section, this variant is a synonym of - :n:`Local Definition @ident := @term`. - For its meaning inside a section, see :cmd:`Let` in - :ref:`section-mechanism`. - - .. warn:: @ident is declared as a local definition [local-declaration,scope] + .. seealso:: :cmd:`Opaque`, :cmd:`Transparent`, :tacn:`unfold`. - Warning generated when using :cmd:`Let` instead of - :cmd:`Local Definition`. + .. exn:: @ident already exists. + :name: @ident already exists. (Definition) + :undocumented: -.. seealso:: Section :ref:`section-mechanism`, commands :cmd:`Opaque`, - :cmd:`Transparent`, and tactic :tacn:`unfold`. + .. exn:: The term @term has type @type while it is expected to have type @type'. + :undocumented: .. _gallina-inductive-definitions: -Inductive definitions ---------------------- - -We gradually explain simple inductive types, simple annotated inductive -types, simple parametric inductive types, mutually inductive types. We -explain also co-inductive types. - -Simple inductive types -~~~~~~~~~~~~~~~~~~~~~~ +Inductive types +--------------- -.. cmd:: Inductive @ident : {? @sort } := {? | } @ident : @type {* | @ident : @type } - - This command defines a simple inductive type and its constructors. - The first :token:`ident` is the name of the inductively defined type - and :token:`sort` is the universe where it lives. The next :token:`ident`\s - are the names of its constructors and :token:`type` their respective types. - Depending on the universe where the inductive type :token:`ident` lives - (e.g. its type :token:`sort`), Coq provides a number of destructors. - Destructors are named :token:`ident`\ ``_sind``,:token:`ident`\ ``_ind``, - :token:`ident`\ ``_rec`` or :token:`ident`\ ``_rect`` which respectively - correspond to elimination principles on :g:`SProp`, :g:`Prop`, :g:`Set` and :g:`Type`. - The type of the destructors expresses structural induction/recursion - principles over objects of type :token:`ident`. - The constant :token:`ident`\ ``_ind`` is always provided, - whereas :token:`ident`\ ``_rec`` and :token:`ident`\ ``_rect`` can be - impossible to derive (for example, when :token:`ident` is a proposition). +.. cmd:: Inductive @inductive_definition {* with @inductive_definition } + + .. insertprodn inductive_definition field_body + + .. prodn:: + inductive_definition ::= {? > } @ident_decl {* @binder } {? %| {* @binder } } {? : @type } {? := {? @constructors_or_record } } {? @decl_notations } + constructors_or_record ::= {? %| } {+| @constructor } + | {? @ident } %{ {+; @record_field } %} + constructor ::= @ident {* @binder } {? @of_type } + record_field ::= {* #[ {*, @attr } ] } @name {? @field_body } {? %| @num } {? @decl_notations } + field_body ::= {* @binder } @of_type + | {* @binder } @of_type := @term + | {* @binder } := @term + + This command defines one or more + inductive types and its constructors. Coq generates destructors + depending on the universe that the inductive type belongs to. + + The destructors are named :n:`@ident`\ ``_rect``, :n:`@ident`\ ``_ind``, + :n:`@ident`\ ``_rec`` and :n:`@ident`\ ``_sind``, which + respectively correspond to elimination principles on :g:`Type`, :g:`Prop`, + :g:`Set` and :g:`SProp`. The type of the destructors + expresses structural induction/recursion principles over objects of + type :n:`@ident`. The constant :n:`@ident`\ ``_ind`` is always + generated, whereas :n:`@ident`\ ``_rec`` and :n:`@ident`\ ``_rect`` + may be impossible to derive (for example, when :n:`@ident` is a + proposition). + + This command supports the :attr:`universes(polymorphic)`, :attr:`universes(monomorphic)`, + :attr:`universes(template)`, :attr:`universes(notemplate)`, + :attr:`Cumulative`, :attr:`NonCumulative` and :attr:`Private` attributes. + + Mutually inductive types can be defined by including multiple :n:`@inductive_definition`\s. + The :n:`@ident`\s are simultaneously added to the environment before the types of constructors are checked. + Each :n:`@ident` can be used independently thereafter. + See :ref:`mutually_inductive_types`. + + If the entire inductive definition is parameterized with :n:`@binder`\s, the parameters correspond + to a local context in which the entire set of inductive declarations is interpreted. + For this reason, the parameters must be strictly the same for each inductive type. + See :ref:`parametrized-inductive-types`. + + Constructor :n:`@ident`\s can come with :n:`@binder`\s, in which case + the actual type of the constructor is :n:`forall {* @binder }, @type`. .. exn:: Non strictly positive occurrence of @ident in @type. @@ -867,66 +841,71 @@ Simple inductive types .. exn:: The conclusion of @type is not valid; it must be built from @ident. The conclusion of the type of the constructors must be the inductive type - :token:`ident` being defined (or :token:`ident` applied to arguments in + :n:`@ident` being defined (or :n:`@ident` applied to arguments in the case of annotated inductive types — cf. next section). - .. example:: +The following subsections show examples of simple inductive types, simple annotated +inductive types, simple parametric inductive types and mutually inductive +types. - The set of natural numbers is defined as: +.. _simple-inductive-types: - .. coqtop:: all +Simple inductive types +~~~~~~~~~~~~~~~~~~~~~~ - Inductive nat : Set := - | O : nat - | S : nat -> nat. +A simple inductive type belongs to a universe that is a simple :n:`sort`. - The type nat is defined as the least :g:`Set` containing :g:`O` and closed by - the :g:`S` constructor. The names :g:`nat`, :g:`O` and :g:`S` are added to the - environment. +.. example:: - Now let us have a look at the elimination principles. They are three of them: - :g:`nat_ind`, :g:`nat_rec` and :g:`nat_rect`. The type of :g:`nat_ind` is: + The set of natural numbers is defined as: - .. coqtop:: all + .. coqtop:: reset all - Check nat_ind. + Inductive nat : Set := + | O : nat + | S : nat -> nat. - This is the well known structural induction principle over natural - numbers, i.e. the second-order form of Peano’s induction principle. It - allows proving some universal property of natural numbers (:g:`forall - n:nat, P n`) by induction on :g:`n`. + The type nat is defined as the least :g:`Set` containing :g:`O` and closed by + the :g:`S` constructor. The names :g:`nat`, :g:`O` and :g:`S` are added to the + environment. - The types of :g:`nat_rec` and :g:`nat_rect` are similar, except that they pertain - to :g:`(P:nat->Set)` and :g:`(P:nat->Type)` respectively. They correspond to - primitive induction principles (allowing dependent types) respectively - over sorts ``Set`` and ``Type``. + This definition generates four elimination principles: + :g:`nat_rect`, :g:`nat_ind`, :g:`nat_rec` and :g:`nat_sind`. The type of :g:`nat_ind` is: - .. cmdv:: Inductive @ident {? : @sort } := {? | } {*| @ident {? @binders } {? : @type } } + .. coqtop:: all - Constructors :token:`ident`\s can come with :token:`binders` in which case, - the actual type of the constructor is :n:`forall @binders, @type`. + Check nat_ind. - In the case where inductive types have no annotations (next section - gives an example of such annotations), a constructor can be defined - by only giving the type of its arguments. + This is the well known structural induction principle over natural + numbers, i.e. the second-order form of Peano’s induction principle. It + allows proving universal properties of natural numbers (:g:`forall + n:nat, P n`) by induction on :g:`n`. - .. example:: + The types of :g:`nat_rect`, :g:`nat_rec` and :g:`nat_sind` are similar, except that they + apply to, respectively, :g:`(P:nat->Type)`, :g:`(P:nat->Set)` and :g:`(P:nat->SProp)`. They correspond to + primitive induction principles (allowing dependent types) respectively + over sorts ```Type``, ``Set`` and ``SProp``. - .. coqtop:: none +In the case where inductive types don't have annotations (the next section +gives an example of annotations), a constructor can be defined +by giving the type of its arguments alone. - Reset nat. +.. example:: - .. coqtop:: in + .. coqtop:: reset none - Inductive nat : Set := O | S (_:nat). + Reset nat. + .. coqtop:: in + + Inductive nat : Set := O | S (_:nat). Simple annotated inductive types ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -In an annotated inductive types, the universe where the inductive type -is defined is no longer a simple sort, but what is called an arity, -which is a type whose conclusion is a sort. +In annotated inductive types, the universe where the inductive type +is defined is no longer a simple :n:`@sort`, but what is called an arity, +which is a type whose conclusion is a :n:`@sort`. .. example:: @@ -939,72 +918,74 @@ which is a type whose conclusion is a sort. | even_0 : even O | even_SS : forall n:nat, even n -> even (S (S n)). - The type :g:`nat->Prop` means that even is a unary predicate (inductively + The type :g:`nat->Prop` means that :g:`even` is a unary predicate (inductively defined) over natural numbers. The type of its two constructors are the - defining clauses of the predicate even. The type of :g:`even_ind` is: + defining clauses of the predicate :g:`even`. The type of :g:`even_ind` is: .. coqtop:: all Check even_ind. - From a mathematical point of view it asserts that the natural numbers satisfying - the predicate even are exactly in the smallest set of naturals satisfying the + From a mathematical point of view, this asserts that the natural numbers satisfying + the predicate :g:`even` are exactly in the smallest set of naturals satisfying the clauses :g:`even_0` or :g:`even_SS`. This is why, when we want to prove any predicate :g:`P` over elements of :g:`even`, it is enough to prove it for :g:`O` and to prove that if any natural number :g:`n` satisfies :g:`P` its double - successor :g:`(S (S n))` satisfies also :g:`P`. This is indeed analogous to the + successor :g:`(S (S n))` satisfies also :g:`P`. This is analogous to the structural induction principle we got for :g:`nat`. +.. _parametrized-inductive-types: + Parameterized inductive types ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. cmdv:: Inductive @ident @binders {? : @type } := {? | } @ident : @type {* | @ident : @type} +In the previous example, each constructor introduces a different +instance of the predicate :g:`even`. In some cases, all the constructors +introduce the same generic instance of the inductive definition, in +which case, instead of an annotation, we use a context of parameters +which are :n:`@binder`\s shared by all the constructors of the definition. - In the previous example, each constructor introduces a different - instance of the predicate :g:`even`. In some cases, all the constructors - introduce the same generic instance of the inductive definition, in - which case, instead of an annotation, we use a context of parameters - which are :token:`binders` shared by all the constructors of the definition. +Parameters differ from inductive type annotations in that the +conclusion of each type of constructor invokes the inductive type with +the same parameter values of its specification. - Parameters differ from inductive type annotations in the fact that the - conclusion of each type of constructor invoke the inductive type with - the same values of parameters as its specification. +.. example:: - .. example:: + A typical example is the definition of polymorphic lists: - A typical example is the definition of polymorphic lists: + .. coqtop:: all - .. coqtop:: in + Inductive list (A:Set) : Set := + | nil : list A + | cons : A -> list A -> list A. - Inductive list (A:Set) : Set := - | nil : list A - | cons : A -> list A -> list A. + In the type of :g:`nil` and :g:`cons`, we write ":g:`list A`" and not + just ":g:`list`". The constructors :g:`nil` and :g:`cons` have these types: - In the type of :g:`nil` and :g:`cons`, we write :g:`(list A)` and not - just :g:`list`. The constructors :g:`nil` and :g:`cons` will have respectively - types: + .. coqtop:: all - .. coqtop:: all + Check nil. + Check cons. - Check nil. - Check cons. + Observe that the destructors are also quantified with :g:`(A:Set)`, for example: - Types of destructors are also quantified with :g:`(A:Set)`. + .. coqtop:: all - Once again, it is possible to specify only the type of the arguments - of the constructors, and to omit the type of the conclusion: + Check list_ind. - .. coqtop:: none + Once again, the types of the constructor arguments and of the conclusion can be omitted: + + .. coqtop:: none - Reset list. + Reset list. - .. coqtop:: in + .. coqtop:: in - Inductive list (A:Set) : Set := nil | cons (_:A) (_:list A). + Inductive list (A:Set) : Set := nil | cons (_:A) (_:list A). .. note:: - + It is possible in the type of a constructor, to - invoke recursively the inductive definition on an argument which is not + + The constructor type can + recursively invoke the inductive definition on an argument which is not the parameter itself. One can define : @@ -1019,7 +1000,9 @@ Parameterized inductive types .. coqtop:: all reset - Inductive list2 (A:Set) : Set := nil2 | cons2 (_:A) (_:list2 (A*A)). + Inductive list2 (A:Set) : Set := + | nil2 + | cons2 (_:A) (_:list2 (A*A)). But the following definition will give an error: @@ -1078,42 +1061,16 @@ Parameterized inductive types .. seealso:: Section :ref:`inductive-definitions` and the :tacn:`induction` tactic. -Variants -~~~~~~~~ - -.. cmd:: Variant @ident @binders {? : @type } := {? | } @ident : @type {* | @ident : @type} - - The :cmd:`Variant` command is identical to the :cmd:`Inductive` command, except - that it disallows recursive definition of types (for instance, lists cannot - be defined using :cmd:`Variant`). No induction scheme is generated for - this variant, unless the :flag:`Nonrecursive Elimination Schemes` flag is on. - - .. exn:: The @num th argument of @ident must be @ident in @type. - :undocumented: +.. _mutually_inductive_types: Mutually defined inductive types ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. cmdv:: Inductive @ident {? : @type } := {? | } {*| @ident : @type } {* with {? | } {*| @ident {? : @type } } } - - This variant allows defining a block of mutually inductive types. - It has the same semantics as the above :cmd:`Inductive` definition for each - :token:`ident`. All :token:`ident` are simultaneously added to the environment. - Then well-typing of constructors can be checked. Each one of the :token:`ident` - can be used on its own. +.. example:: Mutually defined inductive types - .. cmdv:: Inductive @ident @binders {? : @type } := {? | } {*| @ident : @type } {* with {? | } {*| @ident @binders {? : @type } } } - - In this variant, the inductive definitions are parameterized - with :token:`binders`. However, parameters correspond to a local context - in which the whole set of inductive declarations is done. For this - reason, the parameters must be strictly the same for each inductive types. - -.. example:: - - The typical example of a mutual inductive data type is the one for trees and - forests. We assume given two types :g:`A` and :g:`B` as variables. It can - be declared the following way. + A typical example of mutually inductive data types is trees and + forests. We assume two types :g:`A` and :g:`B` that are given as variables. The types can + be declared like this: .. coqtop:: in @@ -1125,13 +1082,10 @@ Mutually defined inductive types | leaf : B -> forest | cons : tree -> forest -> forest. - This declaration generates automatically six induction principles. They are - respectively called :g:`tree_rec`, :g:`tree_ind`, :g:`tree_rect`, - :g:`forest_rec`, :g:`forest_ind`, :g:`forest_rect`. These ones are not the most - general ones but are just the induction principles corresponding to each - inductive part seen as a single inductive definition. + This declaration automatically generates eight induction principles. They are not the most + general principles, but they correspond to each inductive part seen as a single inductive definition. - To illustrate this point on our example, we give the types of :g:`tree_rec` + To illustrate this point on our example, here are the types of :g:`tree_rec` and :g:`forest_rec`. .. coqtop:: all @@ -1142,7 +1096,7 @@ Mutually defined inductive types Assume we want to parameterize our mutual inductive definitions with the two type variables :g:`A` and :g:`B`, the declaration should be - done the following way: + done as follows: .. coqdoc:: @@ -1161,10 +1115,27 @@ Mutually defined inductive types A generic command :cmd:`Scheme` is useful to build automatically various mutual induction principles. +Variants +~~~~~~~~ + +.. cmd:: Variant @inductive_definition {* with @inductive_definition } + + The :cmd:`Variant` command is identical to the :cmd:`Inductive` command, except + that it disallows recursive definition of types (for instance, lists cannot + be defined using :cmd:`Variant`). No induction scheme is generated for + this variant, unless the :flag:`Nonrecursive Elimination Schemes` flag is on. + + This command supports the :attr:`universes(polymorphic)`, :attr:`universes(monomorphic)`, + :attr:`universes(template)`, :attr:`universes(notemplate)`, + :attr:`Cumulative`, :attr:`NonCumulative` and :attr:`Private` attributes. + + .. exn:: The @num th argument of @ident must be @ident in @type. + :undocumented: + .. _coinductive-types: Co-inductive types -~~~~~~~~~~~~~~~~~~ +------------------ The objects of an inductive type are well-founded with respect to the constructors of the type. In other words, such objects contain only a @@ -1174,7 +1145,7 @@ constructors. Infinite objects are introduced by a non-ending (but effective) process of construction, defined in terms of the constructors of the type. -.. cmd:: CoInductive @ident @binders {? : @type } := {? | } @ident : @type {* | @ident : @type} +.. cmd:: CoInductive @inductive_definition {* with @inductive_definition } This command introduces a co-inductive type. The syntax of the command is the same as the command :cmd:`Inductive`. @@ -1182,10 +1153,14 @@ of the type. type, since such principles only make sense for inductive types. For co-inductive types, the only elimination principle is case analysis. + This command supports the :attr:`universes(polymorphic)`, :attr:`universes(monomorphic)`, + :attr:`universes(template)`, :attr:`universes(notemplate)`, + :attr:`Cumulative`, :attr:`NonCumulative` and :attr:`Private` attributes. + .. example:: - An example of a co-inductive type is the type of infinite sequences of - natural numbers, usually called streams. + The type of infinite sequences of natural numbers, usually called streams, + is an example of a co-inductive type. .. coqtop:: in @@ -1199,13 +1174,12 @@ of the type. Definition hd (x:Stream) := let (a,s) := x in a. Definition tl (x:Stream) := let (a,s) := x in s. -Definition of co-inductive predicates and blocks of mutually +Definitions of co-inductive predicates and blocks of mutually co-inductive definitions are also allowed. .. example:: - An example of a co-inductive predicate is the extensional equality on - streams: + The extensional equality on streams is an example of a co-inductive type: .. coqtop:: in @@ -1219,7 +1193,7 @@ co-inductive definitions are also allowed. objects in Section :ref:`cofixpoint`. Caveat -++++++ +~~~~~~ The ability to define co-inductive types by constructors, hereafter called *positive co-inductive types*, is known to break subject reduction. The story is @@ -1287,27 +1261,41 @@ constructions. .. _Fixpoint: -.. cmd:: Fixpoint @ident @binders {? {struct @ident} } {? : @type } := @term +.. cmd:: Fixpoint @fix_definition {* with @fix_definition } + + .. insertprodn fix_definition fix_definition + + .. prodn:: + fix_definition ::= @ident_decl {* @binder } {? @fixannot } {? : @type } {? := @term } {? @decl_notations } This command allows defining functions by pattern matching over inductive objects using a fixed point construction. The meaning of this declaration is - to define :token:`ident` a recursive function with arguments specified by - the :token:`binders` such that :token:`ident` applied to arguments - corresponding to these :token:`binders` has type :token:`type`, and is - equivalent to the expression :token:`term`. The type of :token:`ident` is - consequently :n:`forall @binders, @type` and its value is equivalent - to :n:`fun @binders => @term`. + to define :n:`@ident` as a recursive function with arguments specified by + the :n:`@binder`\s such that :n:`@ident` applied to arguments + corresponding to these :n:`@binder`\s has type :n:`@type`, and is + equivalent to the expression :n:`@term`. The type of :n:`@ident` is + consequently :n:`forall {* @binder }, @type` and its value is equivalent + to :n:`fun {* @binder } => @term`. To be accepted, a :cmd:`Fixpoint` definition has to satisfy some syntactical constraints on a special argument called the decreasing argument. They are needed to ensure that the :cmd:`Fixpoint` definition always terminates. - The point of the :n:`{struct @ident}` annotation is to let the user tell the - system which argument decreases along the recursive calls. + The point of the :n:`{struct @ident}` annotation (see :n:`@fixannot`) is to + let the user tell the system which argument decreases along the recursive calls. - The :n:`{struct @ident}` annotation may be left implicit, in this case the - system tries successively arguments from left to right until it finds one + The :n:`{struct @ident}` annotation may be left implicit, in which case the + system successively tries arguments from left to right until it finds one that satisfies the decreasing condition. + The :n:`with` clause allows simultaneously defining several mutual fixpoints. + It is especially useful when defining functions over mutually defined + inductive types. Example: :ref:`Mutual Fixpoints<example_mutual_fixpoints>`. + + If :n:`@term` is omitted, :n:`@type` is required and Coq enters the proof editing mode. + This can be used to define a term incrementally, in particular by relying on the :tacn:`refine` tactic. + In this case, the proof should be terminated with :cmd:`Defined` in order to define a constant + for which the computational behavior is relevant. See :ref:`proof-editing-mode`. + .. note:: + Some fixpoints may have several arguments that fit as decreasing @@ -1388,35 +1376,35 @@ constructions. end end. +.. _example_mutual_fixpoints: - .. cmdv:: Fixpoint @ident @binders {? {struct @ident} } {? : @type } := @term {* with @ident @binders {? : @type } := @term } + .. example:: Mutual fixpoints - This variant allows defining simultaneously several mutual fixpoints. - It is especially useful when defining functions over mutually defined - inductive types. + The size of trees and forests can be defined the following way: - .. example:: - - The size of trees and forests can be defined the following way: - - .. coqtop:: all + .. coqtop:: all - Fixpoint tree_size (t:tree) : nat := - match t with - | node a f => S (forest_size f) - end - with forest_size (f:forest) : nat := - match f with - | leaf b => 1 - | cons t f' => (tree_size t + forest_size f') - end. + Fixpoint tree_size (t:tree) : nat := + match t with + | node a f => S (forest_size f) + end + with forest_size (f:forest) : nat := + match f with + | leaf b => 1 + | cons t f' => (tree_size t + forest_size f') + end. .. _cofixpoint: Definitions of recursive objects in co-inductive types ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. cmd:: CoFixpoint @ident {? @binders } {? : @type } := @term +.. cmd:: CoFixpoint @cofix_definition {* with @cofix_definition } + + .. insertprodn cofix_definition cofix_definition + + .. prodn:: + cofix_definition ::= @ident_decl {* @binder } {? : @type } {? := @term } {? @decl_notations } This command introduces a method for constructing an infinite object of a coinductive type. For example, the stream containing all natural numbers can @@ -1428,7 +1416,7 @@ Definitions of recursive objects in co-inductive types CoFixpoint from (n:nat) : Stream := Seq n (from (S n)). - Oppositely to recursive ones, there is no decreasing argument in a + Unlike recursive definitions, there is no decreasing argument in a co-recursive definition. To be admissible, a method of construction must provide at least one extra constructor of the infinite object for each iteration. A syntactical guard condition is imposed on co-recursive @@ -1457,10 +1445,63 @@ Definitions of recursive objects in co-inductive types Eval compute in (hd (from 0)). Eval compute in (tl (from 0)). - .. cmdv:: CoFixpoint @ident {? @binders } {? : @type } := @term {* with @ident {? @binders } : {? @type } := @term } + As in the :cmd:`Fixpoint` command, the :n:`with` clause allows simultaneously + defining several mutual cofixpoints. + + If :n:`@term` is omitted, :n:`@type` is required and Coq enters the proof editing mode. + This can be used to define a term incrementally, in particular by relying on the :tacn:`refine` tactic. + In this case, the proof should be terminated with :cmd:`Defined` in order to define a constant + for which the computational behavior is relevant. See :ref:`proof-editing-mode`. + +.. _Computations: + +Computations +------------ + +.. insertprodn reduce pattern_occ + +.. prodn:: + reduce ::= Eval @red_expr in + red_expr ::= red + | hnf + | simpl {? @delta_flag } {? @ref_or_pattern_occ } + | cbv {? @strategy_flag } + | cbn {? @strategy_flag } + | lazy {? @strategy_flag } + | compute {? @delta_flag } + | vm_compute {? @ref_or_pattern_occ } + | native_compute {? @ref_or_pattern_occ } + | unfold {+, @unfold_occ } + | fold {+ @term1_extended } + | pattern {+, @pattern_occ } + | @ident + delta_flag ::= {? - } [ {+ @smart_global } ] + smart_global ::= @qualid + | @by_notation + by_notation ::= @string {? % @ident } + strategy_flag ::= {+ @red_flags } + | @delta_flag + red_flags ::= beta + | iota + | match + | fix + | cofix + | zeta + | delta {? @delta_flag } + ref_or_pattern_occ ::= @smart_global {? at @occs_nums } + | @term1_extended {? at @occs_nums } + occs_nums ::= {+ @num_or_var } + | - @num_or_var {* @int_or_var } + num_or_var ::= @num + | @ident + int_or_var ::= @int + | @ident + unfold_occ ::= @smart_global {? at @occs_nums } + pattern_occ ::= @term1_extended {? at @occs_nums } - As in the :cmd:`Fixpoint` command, it is possible to introduce a block of - mutually dependent methods. +See :ref:`Conversion-rules`. + +.. todo:: Add text here .. _Assertions: @@ -1472,40 +1513,26 @@ inhabitant of the type) is interactively built using tactics. The interactive proof mode is described in Chapter :ref:`proofhandling` and the tactics in Chapter :ref:`Tactics`. The basic assertion command is: -.. cmd:: Theorem @ident {? @binders } : @type +.. cmd:: @thm_token @ident_decl {* @binder } : @type {* with @ident_decl {* @binder } : @type } + :name: Theorem; Lemma; Fact; Remark; Corollary; Proposition; Property - After the statement is asserted, Coq needs a proof. Once a proof of - :token:`type` under the assumptions represented by :token:`binders` is given and - validated, the proof is generalized into a proof of :n:`forall @binders, @type` and - the theorem is bound to the name :token:`ident` in the environment. + .. insertprodn thm_token thm_token - .. exn:: The term @term has type @type which should be Set, Prop or Type. - :undocumented: + .. prodn:: + thm_token ::= Theorem + | Lemma + | Fact + | Remark + | Corollary + | Proposition + | Property - .. exn:: @ident already exists. - :name: @ident already exists. (Theorem) - - The name you provided is already defined. You have then to choose - another name. - - .. exn:: Nested proofs are not allowed unless you turn the Nested Proofs Allowed flag on. - - You are asserting a new statement while already being in proof editing mode. - This feature, called nested proofs, is disabled by default. - To activate it, turn the :flag:`Nested Proofs Allowed` flag on. - - .. cmdv:: Lemma @ident {? @binders } : @type - Remark @ident {? @binders } : @type - Fact @ident {? @binders } : @type - Corollary @ident {? @binders } : @type - Proposition @ident {? @binders } : @type - :name: Lemma; Remark; Fact; Corollary; Proposition - - These commands are all synonyms of :n:`Theorem @ident {? @binders } : type`. - -.. cmdv:: Theorem @ident {? @binders } : @type {* with @ident {? @binders } : @type} + After the statement is asserted, Coq needs a proof. Once a proof of + :n:`@type` under the assumptions represented by :n:`@binder`\s is given and + validated, the proof is generalized into a proof of :n:`forall {* @binder }, @type` and + the theorem is bound to the name :n:`@ident` in the environment. - This command is useful for theorems that are proved by simultaneous induction + Forms using the :n:`with` clause are useful for theorems that are proved by simultaneous induction over a mutually inductive assumption, or that assert mutually dependent statements in some mutual co-inductive type. It is equivalent to :cmd:`Fixpoint` or :cmd:`CoFixpoint` but using tactics to build the proof of @@ -1522,46 +1549,29 @@ Chapter :ref:`Tactics`. The basic assertion command is: correct at some time of the interactive development of a proof, use the command :cmd:`Guarded`. - The command can be used also with :cmd:`Lemma`, :cmd:`Remark`, etc. instead of - :cmd:`Theorem`. - -.. cmdv:: Definition @ident {? @binders } : @type - - This allows defining a term of type :token:`type` using the proof editing - mode. It behaves as :cmd:`Theorem` but is intended to be used in conjunction with - :cmd:`Defined` in order to define a constant of which the computational - behavior is relevant. - - The command can be used also with :cmd:`Example` instead of :cmd:`Definition`. - - .. seealso:: :cmd:`Opaque`, :cmd:`Transparent`, :tacn:`unfold`. - -.. cmdv:: Let @ident {? @binders } : @type - - Like :n:`Definition @ident {? @binders } : @type` except that the definition is - turned into a let-in definition generalized over the declarations depending - on it after closing the current section. + .. exn:: The term @term has type @type which should be Set, Prop or Type. + :undocumented: -.. cmdv:: Fixpoint @ident @binders : @type {* with @ident @binders : @type} + .. exn:: @ident already exists. + :name: @ident already exists. (Theorem) - This generalizes the syntax of :cmd:`Fixpoint` so that one or more bodies - can be defined interactively using the proof editing mode (when a - body is omitted, its type is mandatory in the syntax). When the block - of proofs is completed, it is intended to be ended by :cmd:`Defined`. + The name you provided is already defined. You have then to choose + another name. -.. cmdv:: CoFixpoint @ident {? @binders } : @type {* with @ident {? @binders } : @type} + .. exn:: Nested proofs are not allowed unless you turn the Nested Proofs Allowed flag on. - This generalizes the syntax of :cmd:`CoFixpoint` so that one or more bodies - can be defined interactively using the proof editing mode. + You are asserting a new statement while already being in proof editing mode. + This feature, called nested proofs, is disabled by default. + To activate it, turn the :flag:`Nested Proofs Allowed` flag on. -A proof starts by the keyword :cmd:`Proof`. Then Coq enters the proof editing mode -until the proof is completed. The proof editing mode essentially contains -tactics that are described in chapter :ref:`Tactics`. Besides tactics, there -are commands to manage the proof editing mode. They are described in Chapter +Proofs start with the keyword :cmd:`Proof`. Then Coq enters the proof editing mode +until the proof is completed. In proof editing mode, the user primarily enters +tactics, which are described in chapter :ref:`Tactics`. The user may also enter +commands to manage the proof editing mode. They are described in Chapter :ref:`proofhandling`. -When the proof is completed it should be validated and put in the environment -using the keyword :cmd:`Qed`. +When the proof is complete, use the :cmd:`Qed` command so the kernel verifies +the proof and adds it to the environment. .. note:: @@ -1590,33 +1600,96 @@ using the keyword :cmd:`Qed`. Attributes ----------- -Any vernacular command can be decorated with a list of attributes, enclosed -between ``#[`` (hash and opening square bracket) and ``]`` (closing square bracket) -and separated by commas ``,``. Multiple space-separated blocks may be provided. +.. insertprodn all_attrs legacy_attrs -Each attribute has a name (an identifier) and may have a value. -A value is either a :token:`string` (in which case it is specified with an equal ``=`` sign), -or a list of attributes, enclosed within brackets. +.. prodn:: + all_attrs ::= {* #[ {*, @attr } ] } {? @legacy_attrs } + attr ::= @ident {? @attr_value } + attr_value ::= = @string + | ( {*, @attr } ) + legacy_attrs ::= {? {| Local | Global } } {? {| Polymorphic | Monomorphic } } {? Program } {? {| Cumulative | NonCumulative } } {? Private } + +Attributes modify the behavior of a command or tactic. +Syntactically, most commands and tactics can be decorated with attributes, but +attributes not supported by the command or tactic will be flagged as errors. + +The order of top-level attributes doesn't affect their meaning. ``#[foo,bar]``, ``#[bar,foo]``, +``#[foo]#[bar]`` and ``#[bar]#[foo]`` are equivalent. + +The legacy attributes (:n:`@legacy_attrs`) provide an older, alternate syntax +for certain attributes. They are equivalent to new attributes as follows: + +================ ================================ +Legacy attribute New attribute +================ ================================ +`Local` :attr:`local` +`Global` :attr:`global` +`Polymorphic` :attr:`universes(polymorphic)` +`Monomorphic` :attr:`universes(monomorphic)` +`Cumulative` none +`NonCumulative` none +`Private` none +`Program` :attr:`program` +================ ================================ Some attributes are specific to a command, and so are described with -that command. Currently, the following attributes are recognized by a -variety of commands: +that command. Currently, the following attributes are recognized: + +.. attr:: universes(monomorphic) + :name: universes(monomorphic) + + See :ref:`polymorphicuniverses`. + +.. attr:: universes(polymorphic) + :name: universes(polymorphic) + + See :ref:`polymorphicuniverses`. + +.. attr:: universes(template) + :name: universes(template) + + See :ref:`Template-polymorphism` + +.. attr:: universes(notemplate) + :name: universes(notemplate) + + See :ref:`Template-polymorphism` + +.. attr:: program -``universes(monomorphic)``, ``universes(polymorphic)`` - Equivalent to the ``Monomorphic`` and - ``Polymorphic`` flags (see :ref:`polymorphicuniverses`). + See :ref:`programs`. -``program`` - Takes no value, equivalent to the ``Program`` flag - (see :ref:`programs`). +.. attr:: global -``global``, ``local`` - Take no value, equivalent to the ``Global`` and ``Local`` flags - (see :ref:`controlling-locality-of-commands`). + See :ref:`controlling-locality-of-commands`. -``deprecated`` - Takes as value the optional attributes ``since`` and ``note``; - both have a string value. +.. attr:: local + + See :ref:`controlling-locality-of-commands`. + +.. attr:: Cumulative + + Legacy attribute, only allowed in a polymorphic context. + Specifies that two instances of the same inductive type (family) are convertible + based on the universe variances; they do not need to be equal. + See :ref:`cumulative`. + +.. attr:: NonCumulative + + Legacy attribute, only allowed in a polymorphic context. + Specifies that two instances of the same inductive type (family) are convertible + only if all the universes are equal. + See :ref:`cumulative`. + +.. attr:: Private + + Legacy attribute. Documentation to be added. + +.. attr:: deprecated ( {? since = @string , } {? note = @string } ) + :name: deprecated + + At least one of :n:`since` or :n:`note` must be present. If both are present, + either one may appear first and they must be separated by a comma. This attribute is supported by the following commands: :cmd:`Ltac`, :cmd:`Tactic Notation`, :cmd:`Notation`, :cmd:`Infix`. @@ -1634,12 +1707,13 @@ variety of commands: :n:`@string__1` is the actual notation, :n:`@string__2` is the version number, :n:`@string__3` is the note. -``canonical`` +.. attr:: canonical + This attribute can decorate a :cmd:`Definition` or :cmd:`Let` command. It is equivalent to having a :cmd:`Canonical Structure` declaration just after the command. - This attirbute can take the value ``false`` when decorating a record field + This attribute can take the value ``false`` when decorating a record field declaration with the effect of preventing the field from being involved in the inference of canonical instances. diff --git a/doc/sphinx/proof-engine/ltac2.rst b/doc/sphinx/proof-engine/ltac2.rst index 9362a7729e..06106a6b4c 100644 --- a/doc/sphinx/proof-engine/ltac2.rst +++ b/doc/sphinx/proof-engine/ltac2.rst @@ -909,9 +909,9 @@ Ltac2 from Ltac1 Same as above by switching Ltac1 by Ltac2 and using the `ltac2` quotation instead. -.. productionlist:: coq - ltac_expr : ltac2 : ( `ltac2_term` ) - : ltac2 : ( `ident` ... `ident` |- `ltac2_term` ) +.. prodn:: + ltac_expr += ltac2 : ( `ltac2_term` ) + | ltac2 : ( `ident` ... `ident` |- `ltac2_term` ) The typing rules are dual, that is, the optional identifiers are bound with type `Ltac2.Ltac1.t` in the Ltac2 expression, which is expected to have diff --git a/doc/sphinx/proof-engine/proof-handling.rst b/doc/sphinx/proof-engine/proof-handling.rst index b1734b3f19..03eebc32f9 100644 --- a/doc/sphinx/proof-engine/proof-handling.rst +++ b/doc/sphinx/proof-engine/proof-handling.rst @@ -80,20 +80,19 @@ list of assertion commands is given in :ref:`Assertions`. The command a while when the proof is large. In some exceptional cases one may even incur a memory overflow. - .. cmdv:: Defined - :name: Defined +.. cmd:: Defined - Same as :cmd:`Qed` but the proof is then declared transparent, which means - that its content can be explicitly used for type checking and that it can be - unfolded in conversion tactics (see :ref:`performingcomputations`, - :cmd:`Opaque`, :cmd:`Transparent`). + Same as :cmd:`Qed`, except the proof is made *transparent*, which means + that its content can be explicitly used for type checking and that it can be + unfolded in conversion tactics (see :ref:`performingcomputations`, + :cmd:`Opaque`, :cmd:`Transparent`). - .. cmdv:: Save @ident - :name: Save +.. cmd:: Save @ident + :name: Save - Forces the name of the original goal to be :token:`ident`. This - command (and the following ones) can only be used if the original goal - has been opened using the :cmd:`Goal` command. + Forces the name of the original goal to be :token:`ident`. This + command can only be used if the original goal + was opened using the :cmd:`Goal` command. .. cmd:: Admitted diff --git a/doc/sphinx/proof-engine/ssreflect-proof-language.rst b/doc/sphinx/proof-engine/ssreflect-proof-language.rst index 46215f16a6..90a991794f 100644 --- a/doc/sphinx/proof-engine/ssreflect-proof-language.rst +++ b/doc/sphinx/proof-engine/ssreflect-proof-language.rst @@ -1702,7 +1702,7 @@ Intro patterns when it is the very *first* :token:`i_pattern` after tactic ``=>`` tactical *and* tactic is not a move, is a *branching*:token:`i_pattern`. It executes the sequence - :token:`i_item`:math:`_i` on the i-th subgoal produced by tactic. The + :n:`@i_item__i` on the i-th subgoal produced by tactic. The execution of tactic should thus generate exactly m subgoals, unless the ``[…]`` :token:`i_pattern` comes after an initial ``//`` or ``//=`` :token:`s_item` that closes some of the goals produced by ``tactic``, in diff --git a/doc/sphinx/proof-engine/tactics.rst b/doc/sphinx/proof-engine/tactics.rst index aee62f1b27..2bfd810ea1 100644 --- a/doc/sphinx/proof-engine/tactics.rst +++ b/doc/sphinx/proof-engine/tactics.rst @@ -1281,16 +1281,16 @@ Managing the local context is an occurrence clause whose syntax and behavior are described in :ref:`goal occurrences <occurrencessets>`. - .. tacv:: set (@ident @binders := @term) {? in @goal_occurrences } + .. tacv:: set (@ident {* @binder } := @term) {? in @goal_occurrences } - This is equivalent to :n:`set (@ident := fun @binders => @term) {? in @goal_occurrences }`. + This is equivalent to :n:`set (@ident := fun {* @binder } => @term) {? in @goal_occurrences }`. .. tacv:: set @term {? in @goal_occurrences } This behaves as :n:`set (@ident := @term) {? in @goal_occurrences }` but :token:`ident` is generated by Coq. - .. tacv:: eset (@ident {? @binders } := @term) {? in @goal_occurrences } + .. tacv:: eset (@ident {* @binder } := @term) {? in @goal_occurrences } eset @term {? in @goal_occurrences } :name: eset; _ @@ -1326,16 +1326,16 @@ Managing the local context without performing any replacement in the goal or in the hypotheses. It is equivalent to :n:`set (@ident := @term) in |-`. - .. tacv:: pose (@ident @binders := @term) + .. tacv:: pose (@ident {* @binder } := @term) - This is equivalent to :n:`pose (@ident := fun @binders => @term)`. + This is equivalent to :n:`pose (@ident := fun {* @binder } => @term)`. .. tacv:: pose @term This behaves as :n:`pose (@ident := @term)` but :token:`ident` is generated by Coq. - .. tacv:: epose (@ident {? @binders} := @term) + .. tacv:: epose (@ident {* @binder } := @term) epose @term :name: epose; _ @@ -1452,6 +1452,19 @@ Controlling the proof flow While :tacn:`pose proof` expects that no existential variables are generated by the tactic, :tacn:`epose proof` removes this constraint. +.. tacv:: pose proof (@ident := @term) + + This is an alternative syntax for :n:`assert (@ident := @term)` and + :n:`pose proof @term as @ident`, following the model of :n:`pose + (@ident := @term)` but dropping the value of :token:`ident`. + +.. tacv:: epose proof (@ident := @term) + + This is an alternative syntax for :n:`eassert (@ident := @term)` + and :n:`epose proof @term as @ident`, following the model of + :n:`epose (@ident := @term)` but dropping the value of + :token:`ident`. + .. tacv:: enough (@ident : @type) :name: enough diff --git a/doc/sphinx/proof-engine/vernacular-commands.rst b/doc/sphinx/proof-engine/vernacular-commands.rst index a38c26c2b3..d1f3dcc309 100644 --- a/doc/sphinx/proof-engine/vernacular-commands.rst +++ b/doc/sphinx/proof-engine/vernacular-commands.rst @@ -745,11 +745,6 @@ the toplevel, and using them in source files is discouraged. :n:`-Q @string @dirpath`. It adds the physical directory string to the current |Coq| loadpath and maps it to the logical directory dirpath. - .. cmdv:: Add LoadPath @string - - Performs as :n:`Add LoadPath @string @dirpath` but - for the empty directory path. - .. cmd:: Add Rec LoadPath @string as @dirpath @@ -757,11 +752,6 @@ the toplevel, and using them in source files is discouraged. :n:`-R @string @dirpath`. It adds the physical directory string and all its subdirectories to the current |Coq| loadpath. - .. cmdv:: Add Rec LoadPath @string - - Works as :n:`Add Rec LoadPath @string as @dirpath` but for the empty - logical directory path. - .. cmd:: Remove LoadPath @string @@ -784,12 +774,6 @@ the toplevel, and using them in source files is discouraged. loadpath (see the command `Declare ML Module`` in Section :ref:`compiled-files`). -.. cmd:: Add Rec ML Path @string - - This command adds the directory :n:`@string` and all its subdirectories to - the current OCaml loadpath (see the command :cmd:`Declare ML Module`). - - .. cmd:: Print ML Path @string This command displays the current OCaml loadpath. This diff --git a/doc/sphinx/user-extensions/proof-schemes.rst b/doc/sphinx/user-extensions/proof-schemes.rst index 5b0b3c51b0..34197c4fcf 100644 --- a/doc/sphinx/user-extensions/proof-schemes.rst +++ b/doc/sphinx/user-extensions/proof-schemes.rst @@ -337,31 +337,31 @@ Generation of inversion principles with ``Derive`` ``Inversion`` ----------------------------------------------------------------- .. cmd:: Derive Inversion @ident with @ident Sort @sort - Derive Inversion @ident with (forall @binders, @ident @term) Sort @sort + Derive Inversion @ident with (forall {* @binder }, @ident @term) Sort @sort This command generates an inversion principle for the :tacn:`inversion ... using ...` tactic. The first :token:`ident` is the name of the generated principle. The second :token:`ident` should be an inductive predicate, and :token:`binders` the variables occurring in the term :token:`term`. This command generates the inversion lemma for the sort - :token:`sort` corresponding to the instance :n:`forall @binders, @ident @term`. + :token:`sort` corresponding to the instance :n:`forall {* @binder }, @ident @term`. When applied, it is equivalent to having inverted the instance with the tactic :g:`inversion`. .. cmdv:: Derive Inversion_clear @ident with @ident Sort @sort - Derive Inversion_clear @ident with (forall @binders, @ident @term) Sort @sort + Derive Inversion_clear @ident with (forall {* @binder }, @ident @term) Sort @sort When applied, it is equivalent to having inverted the instance with the tactic inversion replaced by the tactic `inversion_clear`. .. cmdv:: Derive Dependent Inversion @ident with @ident Sort @sort - Derive Dependent Inversion @ident with (forall @binders, @ident @term) Sort @sort + Derive Dependent Inversion @ident with (forall {* @binder }, @ident @term) Sort @sort When applied, it is equivalent to having inverted the instance with the tactic `dependent inversion`. .. cmdv:: Derive Dependent Inversion_clear @ident with @ident Sort @sort - Derive Dependent Inversion_clear @ident with (forall @binders, @ident @term) Sort @sort + Derive Dependent Inversion_clear @ident with (forall {* @binder }, @ident @term) Sort @sort When applied, it is equivalent to having inverted the instance with the tactic `dependent inversion_clear`. diff --git a/doc/sphinx/user-extensions/syntax-extensions.rst b/doc/sphinx/user-extensions/syntax-extensions.rst index c568f13c12..fd95a5cef4 100644 --- a/doc/sphinx/user-extensions/syntax-extensions.rst +++ b/doc/sphinx/user-extensions/syntax-extensions.rst @@ -310,10 +310,10 @@ at the time of use of the notation. The Infix command ~~~~~~~~~~~~~~~~~~ -The :cmd:`Infix` command is a shortening for declaring notations of infix +The :cmd:`Infix` command is a shortcut for declaring notations for infix symbols. -.. cmd:: Infix "@symbol" := @term {? (@modifiers) }. +.. cmd:: Infix @string := @term {? (@modifiers) } This command is equivalent to @@ -366,7 +366,7 @@ Thanks to reserved notations, the inductive, co-inductive, record, recursive and corecursive definitions can benefit from customized notations. To do this, insert a ``where`` notation clause after the definition of the (co)inductive type or (co)recursive term (or after the definition of each of them in case of mutual -definitions). The exact syntax is given by :token:`decl_notation` for inductive, +definitions). The exact syntax is given by :n:`@decl_notation` for inductive, co-inductive, recursive and corecursive definitions and in :ref:`record-types` for records. Here are examples: @@ -909,12 +909,11 @@ notations are given below. The optional :production:`scope` is described in notation : [Local] Notation `string` := `term` [(`modifiers`)] [: `scope`]. : [Local] Infix `string` := `qualid` [(`modifiers`)] [: `scope`]. : [Local] Reserved Notation `string` [(`modifiers`)] . - : Inductive `ind_body` [`decl_notation`] with … with `ind_body` [`decl_notation`]. - : CoInductive `ind_body` [`decl_notation`] with … with `ind_body` [`decl_notation`]. - : Fixpoint `fix_body` [`decl_notation`] with … with `fix_body` [`decl_notation`]. - : CoFixpoint `fix_body` [`decl_notation`] with … with `fix_body` [`decl_notation`]. + : Inductive `ind_body` [`decl_notations`] with … with `ind_body` [`decl_notations`]. + : CoInductive `ind_body` [`decl_notations`] with … with `ind_body` [`decl_notations`]. + : Fixpoint `fix_body` [`decl_notations`] with … with `fix_body` [`decl_notations`]. + : CoFixpoint `fix_body` [`decl_notations`] with … with `fix_body` [`decl_notations`]. : [Local] Declare Custom Entry `ident`. - decl_notation : [where `string` := `term` [: `scope`] and … and `string` := `term` [: `scope`]]. modifiers : `modifier`, … , `modifier` modifier : at level `num` : in custom `ident` @@ -944,6 +943,12 @@ notations are given below. The optional :production:`scope` is described in : as pattern : as strict pattern +.. insertprodn decl_notations decl_notation + +.. prodn:: + decl_notations ::= where @decl_notation {* and @decl_notation } + decl_notation ::= @string := @term1_extended [(only parsing)] {? : @ident } + .. note:: No typing of the denoted expression is performed at definition time. Type checking is done only at the time of use of the notation. diff --git a/doc/tools/coqrst/coqdomain.py b/doc/tools/coqrst/coqdomain.py index 85d86bed62..d6ecf311f1 100644 --- a/doc/tools/coqrst/coqdomain.py +++ b/doc/tools/coqrst/coqdomain.py @@ -198,12 +198,23 @@ class CoqObject(ObjectDescription): index_text += " " + self.index_suffix self.indexnode['entries'].append(('single', index_text, target, '', None)) + aliases = None # additional indexed names for a command or other object + def add_target_and_index(self, name, _, signode): """Attach a link target to `signode` and an index entry for `name`. This is only called (from ``ObjectDescription.run``) if ``:noindex:`` isn't specified.""" if name: target = self._add_target(signode, name) self._add_index_entry(name, target) + if self.aliases is not None: + parent = signode.parent + for alias in self.aliases: + aliasnode = nodes.inline('', '') + signode.parent.append(aliasnode) + target2 = self._add_target(aliasnode, alias) + self._add_index_entry(name, target2) + parent.remove(signode) # move to the end + parent.append(signode) return target def _prepare_names(self): @@ -213,10 +224,15 @@ class CoqObject(ObjectDescription): self._names = {} else: names = [n.strip() for n in names.split(";")] - if len(names) != len(sigs): + if len(sigs) > 1 and len(names) != len(sigs): ERR = ("Expected {} semicolon-separated names, got {}. " + "Please provide one name per signature line.") raise self.error(ERR.format(len(names), len(sigs))) + if len(sigs) == 1 and len(names) > 1: + self.aliases = names[:-1] + names = names[-1:] + else: + self.aliases = None self._names = dict(zip(sigs, names)) def run(self): @@ -278,7 +294,7 @@ class VernacObject(NotationObject): Example:: - .. cmd:: Infix "@symbol" := @term ({+, @modifier}). + .. cmd:: Infix @string := @term1_extended {? ( {+, @syntax_modifier } ) } {? : @ident } This command is equivalent to :n:`…`. """ @@ -325,6 +341,20 @@ class TacticNotationObject(NotationObject): index_suffix = "(tactic)" annotation = None +class AttributeNotationObject(NotationObject): + """An attribute. + + Example:: + + .. attr:: local + """ + subdomain = "attr" + index_suffix = "(attribute)" + annotation = "Attribute" + + def _name_from_signature(self, signature): + return notation_to_string(signature) + class TacticNotationVariantObject(TacticNotationObject): """A variant of a tactic. @@ -1066,6 +1096,10 @@ class CoqVernacIndex(CoqSubdomainsIndex): class CoqTacticIndex(CoqSubdomainsIndex): name, localname, shortname, subdomains = "tacindex", "Tactic Index", "tactics", ["tacn"] +# Attribute index is generated but not included in output +class CoqAttributeIndex(CoqSubdomainsIndex): + name, localname, shortname, subdomains = "attrindex", "Attribute Index", "attributes", ["attr"] + class CoqOptionIndex(CoqSubdomainsIndex): name, localname, shortname, subdomains = "optindex", "Flags, options and Tables Index", "options", ["flag", "opt", "table"] @@ -1142,6 +1176,7 @@ class CoqDomain(Domain): 'opt': ObjType('opt', 'opt'), 'flag': ObjType('flag', 'flag'), 'table': ObjType('table', 'table'), + 'attr': ObjType('attr', 'attr'), 'thm': ObjType('thm', 'thm'), 'prodn': ObjType('prodn', 'prodn'), 'exn': ObjType('exn', 'exn'), @@ -1160,6 +1195,7 @@ class CoqDomain(Domain): 'opt': OptionObject, 'flag': FlagObject, 'table': TableObject, + 'attr': AttributeNotationObject, 'thm': GallinaObject, 'prodn' : ProductionObject, 'exn': ExceptionObject, @@ -1173,6 +1209,7 @@ class CoqDomain(Domain): 'opt': XRefRole(warn_dangling=True), 'flag': XRefRole(warn_dangling=True), 'table': XRefRole(warn_dangling=True), + 'attr': XRefRole(warn_dangling=True), 'thm': XRefRole(warn_dangling=True), 'prodn' : XRefRole(warn_dangling=True), 'exn': XRefRole(warn_dangling=True), @@ -1196,6 +1233,7 @@ class CoqDomain(Domain): 'opt': {}, 'flag': {}, 'table': {}, + 'attr': {}, 'thm': {}, 'prodn' : {}, 'exn': {}, diff --git a/doc/tools/docgram/README.md b/doc/tools/docgram/README.md index 182532e413..fc6d0ace0d 100644 --- a/doc/tools/docgram/README.md +++ b/doc/tools/docgram/README.md @@ -176,9 +176,13 @@ that appear in the specified production: production **without** `<grammar_symbol>`. If found, both productions are replaced with single production with `OPT <grammar_symbol>` - The current version handles a single USE_NT or ADD_OPT per EDIT. + The current version handles a single USE_NT or ADD_OPT per EDIT. These symbols + may appear in the middle of the production given in the EDIT. -* `REPLACE` - (2 sequential productions) - removes `<oldprod>` and +`INSERTALL <symbols>` - inserts <symbols> at the beginning of every production in +<edited_nt>. + +`REPLACE` - (2 sequential productions) - removes `<oldprod>` and inserts `<newprod>` in its place. ``` @@ -186,7 +190,14 @@ that appear in the specified production: | WITH <newprod> ``` -* `PRINT` <nonterminal> - prints the nonterminal definition at that point in +`MOVETO <destination> <production>` - moves the production to `<destination>` and, + if needed, creates a new production <edited_nt> -> <destination>. + +`OPTINREF` - verifies that <edited_nt> has an empty production. If so, it removes +the empty production and replaces all references to <edited_nt> throughout the +grammar with `OPT <edited_nt>` + +`PRINT` <nonterminal> - prints the nonterminal definition at that point in applying the edits. Most useful when the edits get a bit complicated to follow. * (any other nonterminal name) - adds a new production (and possibly a new nonterminal) diff --git a/doc/tools/docgram/common.edit_mlg b/doc/tools/docgram/common.edit_mlg index 9c1827f5b7..3524d77380 100644 --- a/doc/tools/docgram/common.edit_mlg +++ b/doc/tools/docgram/common.edit_mlg @@ -39,7 +39,7 @@ RENAME: [ | Prim.natural natural *) | Vernac.rec_definition rec_definition - +(* todo: hmm, rename adds 1 prodn to closed_binder?? *) | Constr.closed_binder closed_binder ] @@ -162,11 +162,6 @@ lconstr: [ | DELETE l_constr ] -type_cstr: [ -| REPLACE ":" lconstr -| WITH OPT ( ":" lconstr ) -| DELETE (* empty *) -] let_type_cstr: [ | DELETE OPT [ ":" lconstr ] @@ -210,8 +205,6 @@ term_let: [ atomic_constr: [ (* @Zimmi48: "string" used only for notations, but keep to be consistent with patterns *) (* | DELETE string *) -| REPLACE global univ_instance -| WITH global OPT univ_instance | REPLACE "?" "[" ident "]" | WITH "?[" ident "]" | MOVETO term_evar "?[" ident "]" @@ -237,8 +230,6 @@ operconstr10: [ (* fixme: add in as a prodn somewhere *) | MOVETO dangling_pattern_extension_rule "@" pattern_identref LIST1 identref | DELETE dangling_pattern_extension_rule -| REPLACE "@" global univ_instance LIST0 operconstr9 -| WITH "@" global OPT univ_instance LIST0 operconstr9 ] operconstr9: [ @@ -285,13 +276,6 @@ binders_fixannot: [ | LIST0 binder OPT fixannot ] - -of_type_with_opt_coercion: [ -| DELETE ":>" ">" -| DELETE ":" ">" ">" -| DELETE ":" ">" -] - binder: [ | DELETE name ] @@ -306,11 +290,15 @@ open_binders: [ | DELETE closed_binder binders ] +type: [ +| operconstr200 +] + closed_binder: [ | name | REPLACE "(" name LIST1 name ":" lconstr ")" -| WITH "(" LIST1 name ":" lconstr ")" +| WITH "(" LIST1 name ":" type ")" | DELETE "(" name ":" lconstr ")" | DELETE "(" name ":=" lconstr ")" @@ -324,6 +312,16 @@ closed_binder: [ | REPLACE "{" name LIST1 name ":" lconstr "}" | WITH "{" LIST1 name type_cstr "}" | DELETE "{" name ":" lconstr "}" + +| DELETE "[" name "]" +| DELETE "[" name LIST1 name "]" + +| REPLACE "[" name LIST1 name ":" lconstr "]" +| WITH "[" LIST1 name type_cstr "]" +| DELETE "[" name ":" lconstr "]" + +| REPLACE "(" Prim.name ":" lconstr "|" lconstr ")" +| WITH "(" Prim.name ":" type "|" lconstr ")" ] name_colon: [ @@ -377,28 +375,151 @@ eqn: [ ] universe_increment: [ -| REPLACE "+" natural -| WITH OPT ( "+" natural ) -| DELETE (* empty *) +| OPTINREF ] evar_instance: [ -| REPLACE "@{" LIST1 inst SEP ";" "}" -| WITH OPT ( "@{" LIST1 inst SEP ";" "}" ) +| OPTINREF +] + +gallina: [ +| REPLACE thm_token ident_decl binders ":" lconstr LIST0 [ "with" ident_decl binders ":" lconstr ] +| WITH thm_token ident_decl binders ":" type LIST0 [ "with" ident_decl binders ":" type ] +| DELETE assumptions_token inline assum_list +| REPLACE OPT cumulativity_token private_token finite_token LIST1 inductive_definition SEP "with" +| WITH "Inductive" inductive_definition LIST0 ( "with" inductive_definition ) +| "CoInductive" inductive_definition LIST0 ( "with" inductive_definition ) +| "Variant" inductive_definition LIST0 ( "with" inductive_definition ) +| [ "Record" | "Structure" ] inductive_definition LIST0 ( "with" inductive_definition ) +| "Class" inductive_definition LIST0 ( "with" inductive_definition ) +| REPLACE "Fixpoint" LIST1 rec_definition SEP "with" +| WITH "Fixpoint" rec_definition LIST0 ( "with" rec_definition ) +| REPLACE "Let" "Fixpoint" LIST1 rec_definition SEP "with" +| WITH "Let" "Fixpoint" rec_definition LIST0 ( "with" rec_definition ) +| REPLACE "CoFixpoint" LIST1 corec_definition SEP "with" +| WITH "CoFixpoint" corec_definition LIST0 ( "with" corec_definition ) +| REPLACE "Let" "CoFixpoint" LIST1 corec_definition SEP "with" +| WITH "Let" "CoFixpoint" corec_definition LIST0 ( "with" corec_definition ) +| REPLACE "Scheme" LIST1 scheme SEP "with" +| WITH "Scheme" scheme LIST0 ( "with" scheme ) +] + +DELETE: [ +| private_token +| cumulativity_token +] + +opt_coercion: [ +| OPTINREF +] + +opt_constructors_or_fields: [ +| OPTINREF +] + +SPLICE: [ +| opt_coercion +| opt_constructors_or_fields +] + +constructor_list_or_record_decl: [ +| OPTINREF +] + +record_fields: [ +| REPLACE record_field ";" record_fields +| WITH LIST1 record_field SEP ";" +| DELETE record_field | DELETE (* empty *) ] +decl_notation: [ +| REPLACE "where" LIST1 one_decl_notation SEP decl_sep +| WITH "where" one_decl_notation LIST0 ( decl_sep one_decl_notation ) +] + +assumptions_token: [ +| DELETENT +] + +inline: [ +| REPLACE "Inline" "(" natural ")" +| WITH "Inline" OPT ( "(" natural ")" ) +| DELETE "Inline" +| OPTINREF +] + univ_instance: [ -| DELETE (* empty *) +| OPTINREF +] + +univ_decl: [ +| REPLACE "@{" LIST0 identref [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | bar_cbrace ] ] +| WITH "@{" LIST0 identref OPT "+" OPT [ "|" LIST0 univ_constraint SEP "," OPT "+" ] "}" +] + +of_type_with_opt_coercion: [ +| DELETENT +] + +of_type_with_opt_coercion: [ +| [ ":" | ":>" | ":>>" ] type +] + +attribute_value: [ +| OPTINREF +] + +def_body: [ +| DELETE binders ":=" reduce lconstr +| REPLACE binders ":" lconstr ":=" reduce lconstr +| WITH LIST0 binder OPT (":" type) ":=" reduce lconstr +| REPLACE binders ":" lconstr +| WITH LIST0 binder ":" type +] + +reduce: [ +| OPTINREF +] + +occs: [ +| OPTINREF +] + +delta_flag: [ +| REPLACE "-" "[" LIST1 smart_global "]" +| WITH OPT "-" "[" LIST1 smart_global "]" +| DELETE "[" LIST1 smart_global "]" +| OPTINREF +] + +strategy_flag: [ +| REPLACE OPT delta_flag +| WITH delta_flag +| (* empty *) +| OPTINREF ] -constr: [ -| REPLACE "@" global univ_instance -| WITH "@" global OPT univ_instance +export_token: [ +| OPTINREF ] -(* todo: binders should be binders_opt *) +functor_app_annot: [ +| OPTINREF +] + +is_module_expr: [ +| OPTINREF +] +is_module_type: [ +| OPTINREF +] + +gallina_ext: [ +| REPLACE "Arguments" smart_global LIST0 argument_spec_block OPT [ "," LIST1 [ LIST0 more_implicits_block ] SEP "," ] OPT [ ":" LIST1 arguments_modifier SEP "," ] +| WITH "Arguments" smart_global LIST0 argument_spec_block LIST0 [ "," LIST0 more_implicits_block ] OPT [ ":" LIST1 arguments_modifier SEP "," ] +] (* lexer stuff *) IDENT: [ @@ -478,6 +599,8 @@ tactic_expr1: [ | EDIT match_key ADD_OPT "reverse" "goal" "with" match_context_list "end" | MOVETO ltac_match_goal match_key OPT "reverse" "goal" "with" match_context_list "end" | MOVETO ltac_match_term match_key tactic_expr5 "with" match_list "end" +| REPLACE failkw [ int_or_var | ] LIST0 message_token +| WITH failkw OPT int_or_var LIST0 message_token ] DELETE: [ @@ -544,12 +667,146 @@ bar_cbrace: [ | WITH "|}" ] +printable: [ +| REPLACE [ "Sorted" | ] "Universes" OPT printunivs_subgraph OPT ne_string +| WITH OPT "Sorted" "Universes" OPT printunivs_subgraph OPT ne_string +| INSERTALL "Print" +] + +command: [ +| REPLACE "Print" printable +| WITH printable +| "SubClass" ident_decl def_body +| REPLACE "Ltac" LIST1 ltac_tacdef_body SEP "with" +| WITH "Ltac" ltac_tacdef_body LIST0 ( "with" ltac_tacdef_body ) +| REPLACE "Function" LIST1 function_rec_definition_loc SEP "with" (* funind plugin *) +| WITH "Function" function_rec_definition_loc LIST0 ( "with" function_rec_definition_loc ) (* funind plugin *) +| REPLACE "Functional" "Scheme" LIST1 fun_scheme_arg SEP "with" (* funind plugin *) +| WITH "Functional" "Scheme" fun_scheme_arg LIST0 ( "with" fun_scheme_arg ) (* funind plugin *) + +] + +only_parsing: [ +| OPTINREF +] + +syntax: [ +| REPLACE "Infix" ne_lstring ":=" constr [ "(" LIST1 syntax_modifier SEP "," ")" | ] OPT [ ":" IDENT ] +| WITH "Infix" ne_lstring ":=" constr OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] OPT [ ":" IDENT ] +| REPLACE "Notation" lstring ":=" constr [ "(" LIST1 syntax_modifier SEP "," ")" | ] OPT [ ":" IDENT ] +| WITH "Notation" lstring ":=" constr OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] OPT [ ":" IDENT ] +| REPLACE "Reserved" "Infix" ne_lstring [ "(" LIST1 syntax_modifier SEP "," ")" | ] +| WITH "Reserved" "Infix" ne_lstring OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] +| REPLACE "Reserved" "Notation" ne_lstring [ "(" LIST1 syntax_modifier SEP "," ")" | ] +| WITH "Reserved" "Notation" ne_lstring OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] +] + +numnotoption: [ +| OPTINREF +] + +binder_tactic: [ +| REPLACE "let" [ "rec" | ] LIST1 let_clause SEP "with" "in" tactic_expr5 +| WITH "let" OPT "rec" let_clause LIST0 ( "with" let_clause ) "in" tactic_expr5 +] + +tactic_then_gen: [ +| OPTINREF +] + +record_binder_body: [ +| REPLACE binders of_type_with_opt_coercion lconstr +| WITH binders of_type_with_opt_coercion +| REPLACE binders of_type_with_opt_coercion lconstr ":=" lconstr +| WITH binders of_type_with_opt_coercion ":=" lconstr +] + +simple_assum_coe: [ +| REPLACE LIST1 ident_decl of_type_with_opt_coercion lconstr +| WITH LIST1 ident_decl of_type_with_opt_coercion +] + +constructor_type: [ +| REPLACE binders [ of_type_with_opt_coercion lconstr | ] +| WITH binders OPT of_type_with_opt_coercion +] + (* todo: is this really correct? Search for "Pvernac.register_proof_mode" *) (* consider tactic_command vs tac2mode *) vernac_aux: [ | tactic_mode "." ] +def_token: [ +| DELETE "SubClass" (* document separately from Definition and Example *) +] + +assumption_token: [ +| REPLACE "Axiom" +| WITH [ "Axiom" | "Axioms" ] +| REPLACE "Conjecture" +| WITH [ "Conjecture" | "Conjectures" ] +| REPLACE "Hypothesis" +| WITH [ "Hypothesis" | "Hypotheses" ] +| REPLACE "Parameter" +| WITH [ "Parameter" | "Parameters" ] +| REPLACE "Variable" +| WITH [ "Variable" | "Variables" ] +] + +legacy_attrs: [ +| OPT [ "Local" | "Global" ] OPT [ "Polymorphic" | "Monomorphic" ] OPT "Program" OPT [ "Cumulative" | "NonCumulative" ] OPT "Private" +] + +all_attrs: [ +| LIST0 ( "#[" LIST0 attribute SEP "," "]" ) OPT legacy_attrs +] + +vernacular: [ +| LIST0 ( OPT all_attrs [ command | tactic ] "." ) +] + +rec_definition: [ +| REPLACE ident_decl binders_fixannot type_cstr OPT [ ":=" lconstr ] decl_notation +| WITH ident_decl binders_fixannot type_cstr OPT [ ":=" lconstr ] decl_notation +] + +corec_definition: [ +| REPLACE ident_decl binders type_cstr OPT [ ":=" lconstr ] decl_notation +| WITH ident_decl binders type_cstr OPT [ ":=" lconstr ] decl_notation +] + +type_cstr: [ +| REPLACE ":" lconstr +| WITH ":" type +| OPTINREF +] + +decl_notation: [ +| OPTINREF +] + +inductive_definition: [ +| REPLACE OPT ">" ident_decl binders OPT [ "|" binders ] OPT [ ":" lconstr ] OPT ( ":=" OPT constructor_list_or_record_decl ) OPT decl_notation +| WITH OPT ">" ident_decl binders OPT [ "|" binders ] OPT [ ":" type ] OPT ( ":=" OPT constructor_list_or_record_decl ) OPT decl_notation +] + +constructor_list_or_record_decl: [ +| DELETE "|" LIST1 constructor SEP "|" +| REPLACE identref constructor_type "|" LIST1 constructor SEP "|" +| WITH OPT "|" LIST1 constructor SEP "|" +| DELETE identref constructor_type +| REPLACE identref "{" record_fields "}" +| WITH OPT identref "{" record_fields "}" +| DELETE "{" record_fields "}" +] + +record_binder: [ +| REPLACE name record_binder_body +| WITH name OPT record_binder_body +| DELETE name +] + SPLICE: [ | noedit_mode | command_entry @@ -642,8 +899,6 @@ SPLICE: [ | tactic | uconstr | impl_ident_head -| argument_spec -| at_level | branches | check_module_type | decorated_vernac @@ -666,7 +921,27 @@ SPLICE: [ | evar_instance | fix_decls | cofix_decls -] +| assum_list +| assum_coe +| inline +| occs +| univ_name_list +| ltac_info +| field_mods +| ltac_production_sep +| ltac_tactic_level +| printunivs_subgraph +| ring_mods +| scope_delimiter +| eliminator (* todo: splice or not? *) +| quoted_attributes (* todo: splice or not? *) +| printable +| only_parsing +| def_token +| record_fields +| constructor_type +| record_binder +] (* end SPLICE *) RENAME: [ | clause clause_dft_concl @@ -711,6 +986,14 @@ RENAME: [ | corec_definition cofix_definition | inst evar_binding | univ_instance univ_annot +| simple_assum_coe assumpt +| of_type_with_opt_coercion of_type +| decl_notation decl_notations +| one_decl_notation decl_notation +| attribute attr +| attribute_value attr_value +| constructor_list_or_record_decl constructors_or_record +| record_binder_body field_body ] diff --git a/doc/tools/docgram/doc_grammar.ml b/doc/tools/docgram/doc_grammar.ml index b50c427742..5fcb56f5f2 100644 --- a/doc/tools/docgram/doc_grammar.ml +++ b/doc/tools/docgram/doc_grammar.ml @@ -693,6 +693,58 @@ let create_edit_map g op edits = let remove_Sedit2 p = List.filter (fun sym -> match sym with | Sedit2 _ -> false | _ -> true) p +(* don't deal with Sedit, Sedit2 yet (ever?) *) +let rec pmatch fullprod fullpat repl = + let map_prod prod = List.concat (List.map (fun s -> pmatch [s] fullpat repl) prod) in + let pmatch_wrap sym = + let r = pmatch [sym] fullpat repl in + match r with + | a :: b :: tl -> Sparen r + | [a] -> a + | x -> error "pmatch: should not happen"; Sterm "??" + in + + let symmatch_r s = + let res = match s with + | Slist1 sym -> Slist1 (pmatch_wrap sym) + | Slist1sep (sym, sep) -> Slist1sep (pmatch_wrap sym, sep) + | Slist0 sym -> Slist0 (pmatch_wrap sym) + | Slist0sep (sym, sep) -> Slist0sep (pmatch_wrap sym, sep) + | Sopt sym -> Sopt (pmatch_wrap sym) + | Sparen prod -> Sparen (map_prod prod) + | Sprod prods -> Sprod (List.map map_prod prods) + | sym -> sym + in +(* Printf.printf "symmatch of %s gives %s\n" (prod_to_str [s]) (prod_to_str [res]);*) + res + in + + let rec pmatch_r prod pat match_start start_res res = +(* Printf.printf "pmatch_r: prod = %s; pat = %s; res = %s\n" (prod_to_str prod) (prod_to_str pat) (prod_to_str res);*) + match prod, pat with + | _, [] -> + let new_res = (List.rev repl) @ res in + pmatch_r prod fullpat prod new_res new_res (* subst and continue *) + | [], _ -> (List.rev ((List.rev match_start) @ res)) (* leftover partial match *) + | hdprod :: tlprod, hdpat :: tlpat -> + if hdprod = hdpat then + pmatch_r tlprod tlpat match_start start_res res + else + (* match from the next starting position *) + match match_start with + | hd :: tl -> + let new_res = (symmatch_r hd) :: start_res in + pmatch_r tl fullpat tl new_res new_res + | [] -> List.rev res (* done *) + in + pmatch_r fullprod fullpat fullprod [] [] + +(* global replace of production substrings, rhs only *) +let global_repl g pat repl = + List.iter (fun nt -> + g_update_prods g nt (List.map (fun prod -> pmatch prod pat repl) (NTMap.find nt !g.map)) + ) !g.order + (* edit a production: rename nonterminals, drop nonterminals, substitute nonterminals *) let rec edit_prod g top edit_map prod = let edit_nt edit_map sym0 nt = @@ -1112,8 +1164,15 @@ let apply_edit_file g edits = with Not_found -> prods) in aux tl prods' add_nt + | (Snterm "OPTINREF" :: _) :: tl -> + if not (List.mem [] prods) then + error "OPTINREF but no empty production for %s\n" nt; + global_repl g [(Snterm nt)] [(Sopt (Snterm nt))]; + aux tl (remove_prod [] prods nt) add_nt + | (Snterm "INSERTALL" :: syms) :: tl -> + aux tl (List.map (fun p -> syms @ p) prods) add_nt | (Snterm "PRINT" :: _) :: tl -> - pr_prods nt (NTMap.find nt !g.map); + pr_prods nt prods; aux tl prods add_nt | (Snterm "EDIT" :: oprod) :: tl -> aux tl (edit_single_prod g oprod prods nt) add_nt @@ -1593,6 +1652,7 @@ type seen = { nts: (string * int) NTMap.t; tacs: (string * int) NTMap.t; cmds: (string * int) NTMap.t; + cmdvs: (string * int) NTMap.t; } let process_rst g file args seen tac_prods cmd_prods = @@ -1640,9 +1700,9 @@ let process_rst g file args seen tac_prods cmd_prods = let start_index = index_of start !g.order in let end_index = index_of end_ !g.order in if start_index = None then - error "%s line %d: '%s' is undefined\n" file !linenum start; + error "%s line %d: '%s' is undefined in insertprodn\n" file !linenum start; if end_index = None then - error "%s line %d: '%s' is undefined\n" file !linenum end_; + error "%s line %d: '%s' is undefined in insertprodn\n" file !linenum end_; if start_index <> None && end_index <> None then check_range_consistency g start end_; match start_index, end_index with @@ -1697,12 +1757,17 @@ let process_rst g file args seen tac_prods cmd_prods = seen := { !seen with tacs = (NTMap.add rhs (file, !linenum) !seen.tacs)}; fprintf new_rst "%s\n" line | "cmd::" when args.check_cmds -> +(* if not (StringSet.mem rhs cmd_prods) then warn "%s line %d: Unknown command: '%s'\n" file !linenum rhs; if NTMap.mem rhs !seen.cmds then warn "%s line %d: Repeated command: '%s'\n" file !linenum rhs; +*) seen := { !seen with cmds = (NTMap.add rhs (file, !linenum) !seen.cmds)}; fprintf new_rst "%s\n" line + | "cmdv::" when args.check_cmds -> + seen := { !seen with cmdvs = (NTMap.add rhs (file, !linenum) !seen.cmdvs)}; + fprintf new_rst "%s\n" line | "insertprodn" -> process_insertprodn line rhs | _ -> fprintf new_rst "%s\n" line @@ -1818,15 +1883,43 @@ let process_grammar args = list, StringSet.of_list list in let tac_list, tac_prods = plist "simple_tactic" in let cmd_list, cmd_prods = plist "command" in - let seen = ref { nts=NTMap.empty; tacs=NTMap.empty; cmds=NTMap.empty } in + let seen = ref { nts=NTMap.empty; tacs=NTMap.empty; cmds=NTMap.empty; cmdvs=NTMap.empty } in List.iter (fun file -> process_rst g file args seen tac_prods cmd_prods) args.rst_files; report_omitted_prods !g.order !seen.nts "Nonterminal" ""; let out = open_out (dir "updated_rsts") in close_out out; +(* if args.check_tacs then report_omitted_prods tac_list !seen.tacs "Tactic" "\n "; if args.check_cmds then - report_omitted_prods cmd_list !seen.cmds "Command" "\n " + report_omitted_prods cmd_list !seen.cmds "Command" "\n "; +*) + + let rstCmds = StringSet.of_list (List.map (fun b -> let c, _ = b in c) (NTMap.bindings !seen.cmds)) in + let rstCmdvs = StringSet.of_list (List.map (fun b -> let c, _ = b in c) (NTMap.bindings !seen.cmdvs)) in + let command_nts = ["command"; "gallina"; "gallina_ext"; "query_command"; "syntax"] in + (* TODO: need to handle tactic_mode (overlaps with query_command) and subprf *) + let gramCmds = List.fold_left (fun set nt -> + StringSet.union set (StringSet.of_list (List.map (fun p -> String.trim (prod_to_prodn p)) (NTMap.find nt !prodn_gram.map))) + ) StringSet.empty command_nts in + + let allCmds = StringSet.union rstCmdvs (StringSet.union rstCmds gramCmds) in + let out = open_out_bin (dir "prodnCommands") in + StringSet.iter (fun c -> + let rsts = StringSet.mem c rstCmds in + let gram = StringSet.mem c gramCmds in + let pfx = match rsts, gram with + | true, false -> "+" + | false, true -> "-" + | false, false -> "?" + | _, _ -> " " + in + let var = if StringSet.mem c rstCmdvs then "v" else " " in + fprintf out "%s%s %s\n" pfx var c) + allCmds; + close_out out; + Printf.printf "# cmds in rsts, gram, total = %d %d %d\n" (StringSet.cardinal gramCmds) + (StringSet.cardinal rstCmds) (StringSet.cardinal allCmds); end; (* generate output for prodn: simple_tactic, command, also for Ltac?? *) diff --git a/doc/tools/docgram/fullGrammar b/doc/tools/docgram/fullGrammar index e12589bb89..529d81e424 100644 --- a/doc/tools/docgram/fullGrammar +++ b/doc/tools/docgram/fullGrammar @@ -322,8 +322,13 @@ closed_binder: [ | "{" name LIST1 name ":" lconstr "}" | "{" name ":" lconstr "}" | "{" name LIST1 name "}" +| "[" name "]" +| "[" name LIST1 name ":" lconstr "]" +| "[" name ":" lconstr "]" +| "[" name LIST1 name "]" | "`(" LIST1 typeclass_constraint SEP "," ")" | "`{" LIST1 typeclass_constraint SEP "," "}" +| "`[" LIST1 typeclass_constraint SEP "," "]" | "'" pattern0 ] @@ -643,6 +648,7 @@ command: [ | "Show" "Ltac" "Profile" | "Show" "Ltac" "Profile" "CutOff" int | "Show" "Ltac" "Profile" string +| "Show" "Lia" "Profile" (* micromega plugin *) | "Add" "InjTyp" constr (* micromega plugin *) | "Add" "BinOp" constr (* micromega plugin *) | "Add" "UnOp" constr (* micromega plugin *) @@ -937,12 +943,12 @@ opt_constructors_or_fields: [ ] inductive_definition: [ -| opt_coercion ident_decl binders OPT [ ":" lconstr ] opt_constructors_or_fields decl_notation +| opt_coercion ident_decl binders OPT [ "|" binders ] OPT [ ":" lconstr ] opt_constructors_or_fields decl_notation ] constructor_list_or_record_decl: [ | "|" LIST1 constructor SEP "|" -| identref constructor_type "|" LIST0 constructor SEP "|" +| identref constructor_type "|" LIST1 constructor SEP "|" | identref constructor_type | identref "{" record_fields "}" | "{" record_fields "}" @@ -1270,7 +1276,7 @@ printable: [ | "Instances" smart_global | "Coercions" | "Coercion" "Paths" class_rawexpr class_rawexpr -| "Canonical" "Projections" +| "Canonical" "Projections" LIST0 smart_global | "Typing" "Flags" | "Tables" | "Options" @@ -1400,8 +1406,7 @@ syntax_modifier: [ | "only" "parsing" | "format" STRING OPT STRING | IDENT; "," LIST1 IDENT SEP "," "at" level -| IDENT; "at" level -| IDENT; "at" level constr_as_binder_kind +| IDENT; "at" level OPT constr_as_binder_kind | IDENT constr_as_binder_kind | IDENT syntax_extension_type ] @@ -1412,17 +1417,18 @@ syntax_extension_type: [ | "bigint" | "binder" | "constr" -| "constr" OPT at_level OPT constr_as_binder_kind +| "constr" at_level_opt OPT constr_as_binder_kind | "pattern" | "pattern" "at" "level" natural | "strict" "pattern" | "strict" "pattern" "at" "level" natural | "closed" "binder" -| "custom" IDENT OPT at_level OPT constr_as_binder_kind +| "custom" IDENT at_level_opt OPT constr_as_binder_kind ] -at_level: [ +at_level_opt: [ | "at" level +| ] constr_as_binder_kind: [ @@ -1719,7 +1725,6 @@ simple_tactic: [ | "restart_timer" OPT string | "finish_timing" OPT string | "finish_timing" "(" string ")" OPT string -| "myred" (* micromega plugin *) | "psatz_Z" int_or_var tactic (* micromega plugin *) | "psatz_Z" tactic (* micromega plugin *) | "xlia" tactic (* micromega plugin *) @@ -1735,13 +1740,12 @@ simple_tactic: [ | "psatz_R" tactic (* micromega plugin *) | "psatz_Q" int_or_var tactic (* micromega plugin *) | "psatz_Q" tactic (* micromega plugin *) -| "iter_specs" tactic (* micromega plugin *) +| "zify_iter_specs" tactic (* micromega plugin *) | "zify_op" (* micromega plugin *) -| "saturate" (* micromega plugin *) +| "zify_saturate" (* micromega plugin *) +| "zify_iter_let" tactic (* micromega plugin *) | "nsatz_compute" constr (* nsatz plugin *) | "omega" (* omega plugin *) -| "omega" "with" LIST1 ident (* omega plugin *) -| "omega" "with" "*" (* omega plugin *) | "rtauto" | "protect_fv" string "in" ident (* setoid_ring plugin *) | "protect_fv" string (* setoid_ring plugin *) diff --git a/doc/tools/docgram/orderedGrammar b/doc/tools/docgram/orderedGrammar index 63e0ca129c..908e3ccd51 100644 --- a/doc/tools/docgram/orderedGrammar +++ b/doc/tools/docgram/orderedGrammar @@ -29,7 +29,7 @@ vernac_control: [ | "Redirect" string vernac_control | "Timeout" num vernac_control | "Fail" vernac_control -| LIST0 ( "#[" LIST0 attribute SEP "," "]" ) vernac +| LIST0 ( "#[" LIST0 attr SEP "," "]" ) vernac ] term: [ @@ -102,50 +102,24 @@ dangling_pattern_extension_rule: [ | "@" "?" ident LIST1 ident ] -record_fields: [ -| record_field ";" record_fields -| record_field -| -] - -record_field: [ -| LIST0 ( "#[" LIST0 attribute SEP "," "]" ) record_binder OPT [ "|" num ] decl_notation -] - -decl_notation: [ -| "where" LIST1 one_decl_notation SEP "and" -| -] - -one_decl_notation: [ -| string ":=" term1_extended OPT [ ":" ident ] +assumption_token: [ +| [ "Axiom" | "Axioms" ] +| [ "Conjecture" | "Conjectures" ] +| [ "Parameter" | "Parameters" ] +| [ "Hypothesis" | "Hypotheses" ] +| [ "Variable" | "Variables" ] ] -record_binder: [ -| name -| name record_binder_body +assumpt: [ +| LIST1 ident_decl of_type ] -record_binder_body: [ -| LIST0 binder of_type_with_opt_coercion term -| LIST0 binder of_type_with_opt_coercion term ":=" term -| LIST0 binder ":=" term -] - -of_type_with_opt_coercion: [ -| ":>>" -| ":>" -| ":" +ident_decl: [ +| ident OPT univ_decl ] -attribute: [ -| ident attribute_value -] - -attribute_value: [ -| "=" string -| "(" LIST0 attribute SEP "," ")" -| +of_type: [ +| [ ":" | ":>" | ":>>" ] type ] qualid: [ @@ -156,6 +130,10 @@ field_ident: [ | "." ident ] +type: [ +| term +] + numeral: [ | LIST1 digit OPT ( "." LIST1 digit ) OPT [ [ "e" | "E" ] OPT [ "+" | "-" ] LIST1 digit ] ] @@ -184,6 +162,27 @@ subsequent_letter: [ | [ first_letter | digit | "'" | unicode_id_part ] ] +vernacular: [ +| LIST0 ( OPT all_attrs [ command | ltac_expr ] "." ) +] + +all_attrs: [ +| LIST0 ( "#[" LIST0 attr SEP "," "]" ) OPT legacy_attrs +] + +attr: [ +| ident OPT attr_value +] + +attr_value: [ +| "=" string +| "(" LIST0 attr SEP "," ")" +] + +legacy_attrs: [ +| OPT [ "Local" | "Global" ] OPT [ "Polymorphic" | "Monomorphic" ] OPT "Program" OPT [ "Cumulative" | "NonCumulative" ] OPT "Private" +] + sort: [ | "Set" | "Prop" @@ -208,6 +207,10 @@ universe_name: [ | "Prop" ] +univ_annot: [ +| "@{" LIST0 universe_level "}" +] + universe_level: [ | "Set" | "Prop" @@ -216,8 +219,12 @@ universe_level: [ | qualid ] -univ_annot: [ -| "@{" LIST0 universe_level "}" +univ_decl: [ +| "@{" LIST0 ident OPT "+" OPT [ "|" LIST0 univ_constraint SEP "," OPT "+" ] "}" +] + +univ_constraint: [ +| universe_name [ "<" | "=" | "<=" ] universe_name ] term_fix: [ @@ -226,7 +233,7 @@ term_fix: [ ] fix_body: [ -| ident LIST0 binder OPT fixannot OPT ( ":" term ) ":=" term +| ident LIST0 binder OPT fixannot OPT ( ":" type ) ":=" term ] fixannot: [ @@ -246,12 +253,12 @@ term_cofix: [ ] cofix_body: [ -| ident LIST0 binder OPT ( ":" term ) ":=" term +| ident LIST0 binder OPT ( ":" type ) ":=" term ] term_let: [ -| "let" name OPT ( ":" term ) ":=" term "in" term -| "let" name LIST1 binder OPT ( ":" term ) ":=" term "in" term +| "let" name OPT ( ":" type ) ":=" term "in" term +| "let" name LIST1 binder OPT ( ":" type ) ":=" term "in" term | "let" "(" LIST0 name SEP "," ")" OPT [ OPT [ "as" name ] "return" term100 ] ":=" term "in" term | "let" "'" pattern ":=" term OPT ( "return" term100 ) "in" term | "let" "'" pattern "in" pattern ":=" term "return" term100 "in" term @@ -269,13 +276,15 @@ name: [ binder: [ | name -| "(" LIST1 name ":" term ")" -| "(" name OPT ( ":" term ) ":=" term ")" -| "{" LIST1 name OPT ( ":" term ) "}" +| "(" LIST1 name ":" type ")" +| "(" name OPT ( ":" type ) ":=" term ")" +| "(" name ":" type "|" term ")" +| "{" LIST1 name OPT ( ":" type ) "}" +| "[" LIST1 name OPT ( ":" type ) "]" | "`(" LIST1 typeclass_constraint SEP "," ")" | "`{" LIST1 typeclass_constraint SEP "," "}" +| "`[" LIST1 typeclass_constraint SEP "," "]" | "'" pattern0 -| "(" name ":" term "|" term ")" ] typeclass_constraint: [ @@ -359,17 +368,20 @@ subprf: [ ] gallina: [ -| thm_token ident_decl LIST0 binder ":" term LIST0 [ "with" ident_decl LIST0 binder ":" term ] -| assumption_token inline assum_list -| assumptions_token inline assum_list -| def_token ident_decl def_body +| thm_token ident_decl LIST0 binder ":" type LIST0 [ "with" ident_decl LIST0 binder ":" type ] +| assumption_token OPT ( "Inline" OPT ( "(" num ")" ) ) [ LIST1 ( "(" assumpt ")" ) | assumpt ] +| [ "Definition" | "Example" ] ident_decl def_body | "Let" ident def_body -| OPT cumulativity_token private_token finite_token LIST1 inductive_definition SEP "with" -| "Fixpoint" LIST1 fix_definition SEP "with" -| "Let" "Fixpoint" LIST1 fix_definition SEP "with" -| "CoFixpoint" LIST1 cofix_definition SEP "with" -| "Let" "CoFixpoint" LIST1 cofix_definition SEP "with" -| "Scheme" LIST1 scheme SEP "with" +| "Inductive" inductive_definition LIST0 ( "with" inductive_definition ) +| "CoInductive" inductive_definition LIST0 ( "with" inductive_definition ) +| "Variant" inductive_definition LIST0 ( "with" inductive_definition ) +| [ "Record" | "Structure" ] inductive_definition LIST0 ( "with" inductive_definition ) +| "Class" inductive_definition LIST0 ( "with" inductive_definition ) +| "Fixpoint" fix_definition LIST0 ( "with" fix_definition ) +| "Let" "Fixpoint" fix_definition LIST0 ( "with" fix_definition ) +| "CoFixpoint" cofix_definition LIST0 ( "with" cofix_definition ) +| "Let" "CoFixpoint" cofix_definition LIST0 ( "with" cofix_definition ) +| "Scheme" scheme LIST0 ( "with" scheme ) | "Combined" "Scheme" ident "from" LIST1 ident SEP "," | "Register" qualid "as" qualid | "Register" "Inline" qualid @@ -380,7 +392,15 @@ gallina: [ ] fix_definition: [ -| ident_decl LIST0 binder OPT fixannot OPT ( ":" term ) OPT [ ":=" term ] decl_notation +| ident_decl LIST0 binder OPT fixannot OPT ( ":" type ) OPT [ ":=" term ] OPT decl_notations +] + +decl_notations: [ +| "where" decl_notation LIST0 ( "and" decl_notation ) +] + +decl_notation: [ +| string ":=" term1_extended OPT [ ":" ident ] ] register_token: [ @@ -444,80 +464,23 @@ thm_token: [ | "Property" ] -def_token: [ -| "Definition" -| "Example" -| "SubClass" -] - -assumption_token: [ -| "Hypothesis" -| "Variable" -| "Axiom" -| "Parameter" -| "Conjecture" -] - -assumptions_token: [ -| "Hypotheses" -| "Variables" -| "Axioms" -| "Parameters" -| "Conjectures" -] - -inline: [ -| "Inline" "(" num ")" -| "Inline" -| -] - -univ_constraint: [ -| universe_name [ "<" | "=" | "<=" ] universe_name -] - -ident_decl: [ -| ident OPT ( "@{" LIST0 ident [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | "|}" ] ] ) -] - -finite_token: [ -| "Inductive" -| "CoInductive" -| "Variant" -| "Record" -| "Structure" -| "Class" -] - -cumulativity_token: [ -| "Cumulative" -| "NonCumulative" -] - -private_token: [ -| "Private" -| -] - def_body: [ -| LIST0 binder ":=" reduce term -| LIST0 binder ":" term ":=" reduce term -| LIST0 binder ":" term +| LIST0 binder OPT ( ":" type ) ":=" OPT reduce term +| LIST0 binder ":" type ] reduce: [ | "Eval" red_expr "in" -| ] red_expr: [ | "red" | "hnf" -| "simpl" delta_flag OPT ref_or_pattern_occ -| "cbv" strategy_flag -| "cbn" strategy_flag -| "lazy" strategy_flag -| "compute" delta_flag +| "simpl" OPT delta_flag OPT ref_or_pattern_occ +| "cbv" OPT strategy_flag +| "cbn" OPT strategy_flag +| "lazy" OPT strategy_flag +| "compute" OPT delta_flag | "vm_compute" OPT ref_or_pattern_occ | "native_compute" OPT ref_or_pattern_occ | "unfold" LIST1 unfold_occ SEP "," @@ -526,6 +489,19 @@ red_expr: [ | ident ] +delta_flag: [ +| OPT "-" "[" LIST1 smart_global "]" +] + +smart_global: [ +| qualid +| by_notation +] + +by_notation: [ +| string OPT [ "%" ident ] +] + strategy_flag: [ | LIST1 red_flags | delta_flag @@ -538,70 +514,71 @@ red_flags: [ | "fix" | "cofix" | "zeta" -| "delta" delta_flag +| "delta" OPT delta_flag ] -delta_flag: [ -| "-" "[" LIST1 smart_global "]" -| "[" LIST1 smart_global "]" -| +ref_or_pattern_occ: [ +| smart_global OPT ( "at" occs_nums ) +| term1_extended OPT ( "at" occs_nums ) ] -ref_or_pattern_occ: [ -| smart_global occs -| term1_extended occs +occs_nums: [ +| LIST1 num_or_var +| "-" num_or_var LIST0 int_or_var ] -unfold_occ: [ -| smart_global occs +num_or_var: [ +| num +| ident ] -opt_constructors_or_fields: [ -| ":=" constructor_list_or_record_decl -| +int_or_var: [ +| int +| ident ] -inductive_definition: [ -| opt_coercion ident_decl LIST0 binder OPT [ ":" term ] opt_constructors_or_fields decl_notation +unfold_occ: [ +| smart_global OPT ( "at" occs_nums ) ] -opt_coercion: [ -| ">" -| +pattern_occ: [ +| term1_extended OPT ( "at" occs_nums ) ] -constructor_list_or_record_decl: [ -| "|" LIST1 constructor SEP "|" -| ident constructor_type "|" LIST0 constructor SEP "|" -| ident constructor_type -| ident "{" record_fields "}" -| "{" record_fields "}" -| +finite_token: [ +| "Inductive" +| "CoInductive" +| "Variant" +| "Record" +| "Structure" +| "Class" ] -assum_list: [ -| LIST1 assum_coe -| simple_assum_coe +inductive_definition: [ +| OPT ">" ident_decl LIST0 binder OPT [ "|" LIST0 binder ] OPT [ ":" type ] OPT ( ":=" OPT constructors_or_record ) OPT decl_notations ] -assum_coe: [ -| "(" simple_assum_coe ")" +constructors_or_record: [ +| OPT "|" LIST1 constructor SEP "|" +| OPT ident "{" LIST1 record_field SEP ";" "}" ] -simple_assum_coe: [ -| LIST1 ident_decl of_type_with_opt_coercion term +constructor: [ +| ident LIST0 binder OPT of_type ] -constructor_type: [ -| LIST0 binder [ of_type_with_opt_coercion term | ] +record_field: [ +| LIST0 ( "#[" LIST0 attr SEP "," "]" ) name OPT field_body OPT [ "|" num ] OPT decl_notations ] -constructor: [ -| ident constructor_type +field_body: [ +| LIST0 binder of_type +| LIST0 binder of_type ":=" term +| LIST0 binder ":=" term ] cofix_definition: [ -| ident_decl LIST0 binder OPT ( ":" term ) OPT [ ":=" term ] decl_notation +| ident_decl LIST0 binder OPT ( ":" type ) OPT [ ":=" term ] OPT decl_notations ] scheme: [ @@ -624,25 +601,16 @@ sort_family: [ | "Type" ] -smart_global: [ -| qualid -| by_notation -] - -by_notation: [ -| string OPT [ "%" ident ] -] - gallina_ext: [ -| "Module" export_token ident LIST0 ( "(" export_token LIST1 ident ":" module_type_inl ")" ) of_module_type is_module_expr -| "Module" "Type" ident LIST0 ( "(" export_token LIST1 ident ":" module_type_inl ")" ) LIST0 ( "<:" module_type_inl ) is_module_type -| "Declare" "Module" export_token ident LIST0 ( "(" export_token LIST1 ident ":" module_type_inl ")" ) ":" module_type_inl +| "Module" OPT export_token ident LIST0 module_binder of_module_type OPT is_module_expr +| "Module" "Type" ident LIST0 module_binder LIST0 ( "<:" module_type_inl ) OPT is_module_type +| "Declare" "Module" OPT export_token ident LIST0 module_binder ":" module_type_inl | "Section" ident | "Chapter" ident | "End" ident | "Collection" ident ":=" section_subset_expr -| "Require" export_token LIST1 qualid -| "From" qualid "Require" export_token LIST1 qualid +| "Require" OPT export_token LIST1 qualid +| "From" qualid "Require" OPT export_token LIST1 qualid | "Import" LIST1 qualid | "Export" LIST1 qualid | "Include" module_type_inl LIST0 ( "<+" module_expr_inl ) @@ -650,9 +618,9 @@ gallina_ext: [ | "Transparent" LIST1 smart_global | "Opaque" LIST1 smart_global | "Strategy" LIST1 [ strategy_level "[" LIST1 smart_global "]" ] -| "Canonical" OPT "Structure" qualid OPT [ OPT ( "@{" LIST0 ident [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | "|}" ] ] ) def_body ] +| "Canonical" OPT "Structure" qualid OPT [ OPT univ_decl def_body ] | "Canonical" OPT "Structure" by_notation -| "Coercion" qualid OPT ( "@{" LIST0 ident [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | "|}" ] ] ) def_body +| "Coercion" qualid OPT univ_decl def_body | "Identity" "Coercion" ident ":" class_rawexpr ">->" class_rawexpr | "Coercion" qualid ":" class_rawexpr ">->" class_rawexpr | "Coercion" by_notation ":" class_rawexpr ">->" class_rawexpr @@ -661,7 +629,7 @@ gallina_ext: [ | "Existing" "Instance" qualid hint_info | "Existing" "Instances" LIST1 qualid OPT [ "|" num ] | "Existing" "Class" qualid -| "Arguments" smart_global LIST0 argument_spec_block OPT [ "," LIST1 [ LIST0 more_implicits_block ] SEP "," ] OPT [ ":" LIST1 arguments_modifier SEP "," ] +| "Arguments" smart_global LIST0 argument_spec_block LIST0 [ "," LIST0 more_implicits_block ] OPT [ ":" LIST1 arguments_modifier SEP "," ] | "Implicit" "Type" reserv_list | "Implicit" "Types" reserv_list | "Generalizable" [ "All" "Variables" | "No" "Variables" | [ "Variable" | "Variables" ] LIST1 ident ] @@ -689,43 +657,41 @@ hint_info: [ export_token: [ | "Import" | "Export" -| ] -of_module_type: [ -| ":" module_type_inl -| LIST0 ( "<:" module_type_inl ) +module_binder: [ +| "(" OPT export_token LIST1 ident ":" module_type_inl ")" ] -is_module_type: [ -| ":=" module_type_inl LIST0 ( "<+" module_type_inl ) -| +module_type_inl: [ +| "!" module_type +| module_type OPT functor_app_annot ] -is_module_expr: [ -| ":=" module_expr_inl LIST0 ( "<+" module_expr_inl ) -| +module_type: [ +| qualid +| "(" module_type ")" +| module_type module_expr_atom +| module_type "with" with_declaration +] + +with_declaration: [ +| "Definition" qualid OPT univ_decl ":=" term +| "Module" qualid ":=" qualid ] functor_app_annot: [ | "[" "inline" "at" "level" num "]" | "[" "no" "inline" "]" -| -] - -module_expr_inl: [ -| "!" module_expr -| module_expr functor_app_annot ] -module_type_inl: [ -| "!" module_type -| module_type functor_app_annot +of_module_type: [ +| ":" module_type_inl +| LIST0 ( "<:" module_type_inl ) ] -module_expr: [ -| module_expr_atom -| module_expr module_expr_atom +is_module_type: [ +| ":=" module_type_inl LIST0 ( "<+" module_type_inl ) ] module_expr_atom: [ @@ -733,25 +699,31 @@ module_expr_atom: [ | "(" module_expr ")" ] -module_type: [ -| qualid -| "(" module_type ")" -| module_type module_expr_atom -| module_type "with" with_declaration +module_expr: [ +| module_expr_atom +| module_expr module_expr_atom ] -with_declaration: [ -| "Definition" qualid OPT ( "@{" LIST0 ident [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | "|}" ] ] ) ":=" term -| "Module" qualid ":=" qualid +is_module_expr: [ +| ":=" module_expr_inl LIST0 ( "<+" module_expr_inl ) +] + +module_expr_inl: [ +| "!" module_expr +| module_expr OPT functor_app_annot ] argument_spec_block: [ -| OPT "!" name OPT ( "%" ident ) +| argument_spec | "/" | "&" -| "(" LIST1 ( OPT "!" name OPT ( "%" ident ) ) ")" OPT ( "%" ident ) -| "[" LIST1 ( OPT "!" name OPT ( "%" ident ) ) "]" OPT ( "%" ident ) -| "{" LIST1 ( OPT "!" name OPT ( "%" ident ) ) "}" OPT ( "%" ident ) +| "(" LIST1 argument_spec ")" OPT ( "%" ident ) +| "[" LIST1 argument_spec "]" OPT ( "%" ident ) +| "{" LIST1 argument_spec "}" OPT ( "%" ident ) +] + +argument_spec: [ +| OPT "!" name OPT ( "%" ident ) ] more_implicits_block: [ @@ -760,6 +732,20 @@ more_implicits_block: [ | "{" LIST1 name "}" ] +arguments_modifier: [ +| "simpl" "nomatch" +| "simpl" "never" +| "default" "implicits" +| "clear" "bidirectionality" "hint" +| "clear" "implicits" +| "clear" "scopes" +| "clear" "scopes" "and" "implicits" +| "clear" "implicits" "and" "scopes" +| "rename" +| "assert" +| "extra" "scopes" +] + strategy_level: [ | "expand" | "opaque" @@ -785,20 +771,6 @@ simple_reserv: [ | LIST1 ident ":" term ] -arguments_modifier: [ -| "simpl" "nomatch" -| "simpl" "never" -| "default" "implicits" -| "clear" "implicits" -| "clear" "scopes" -| "clear" "bidirectionality" "hint" -| "rename" -| "assert" -| "extra" "scopes" -| "clear" "scopes" "and" "implicits" -| "clear" "implicits" "and" "scopes" -] - command: [ | "Goal" term | "Declare" "Scope" ident @@ -812,7 +784,43 @@ command: [ | "Add" "Rec" "LoadPath" string as_dirpath | "Remove" "LoadPath" string | "Type" term -| "Print" printable +| "Print" "Term" smart_global OPT ( "@{" LIST0 name "}" ) +| "Print" "All" +| "Print" "Section" qualid +| "Print" "Grammar" ident +| "Print" "Custom" "Grammar" ident +| "Print" "LoadPath" OPT dirpath +| "Print" "Modules" +| "Print" "Libraries" +| "Print" "ML" "Path" +| "Print" "ML" "Modules" +| "Print" "Debug" "GC" +| "Print" "Graph" +| "Print" "Classes" +| "Print" "TypeClasses" +| "Print" "Instances" smart_global +| "Print" "Coercions" +| "Print" "Coercion" "Paths" class_rawexpr class_rawexpr +| "Print" "Canonical" "Projections" LIST0 smart_global +| "Print" "Typing" "Flags" +| "Print" "Tables" +| "Print" "Options" +| "Print" "Hint" +| "Print" "Hint" smart_global +| "Print" "Hint" "*" +| "Print" "HintDb" ident +| "Print" "Scopes" +| "Print" "Scope" ident +| "Print" "Visibility" OPT ident +| "Print" "Implicit" smart_global +| "Print" OPT "Sorted" "Universes" OPT ( "Subgraph" "(" LIST0 qualid ")" ) OPT string +| "Print" "Assumptions" smart_global +| "Print" "Opaque" "Dependencies" smart_global +| "Print" "Transparent" "Dependencies" smart_global +| "Print" "All" "Dependencies" smart_global +| "Print" "Strategy" smart_global +| "Print" "Strategies" +| "Print" "Registered" | "Print" smart_global OPT ( "@{" LIST0 name "}" ) | "Print" "Module" "Type" qualid | "Print" "Module" qualid @@ -931,6 +939,7 @@ command: [ | "Show" "Ltac" "Profile" | "Show" "Ltac" "Profile" "CutOff" int | "Show" "Ltac" "Profile" string +| "Show" "Lia" "Profile" (* micromega plugin *) | "Add" "InjTyp" term1_extended (* micromega plugin *) | "Add" "BinOp" term1_extended (* micromega plugin *) | "Add" "UnOp" term1_extended (* micromega plugin *) @@ -959,12 +968,12 @@ command: [ | "Print" "Rewrite" "HintDb" ident | "Print" "Ltac" qualid | "Locate" "Ltac" qualid -| "Ltac" LIST1 tacdef_body SEP "with" +| "Ltac" tacdef_body LIST0 ( "with" tacdef_body ) | "Print" "Ltac" "Signatures" | "Set" "Firstorder" "Solver" ltac_expr | "Print" "Firstorder" "Solver" -| "Function" LIST1 fix_definition SEP "with" (* funind plugin *) -| "Functional" "Scheme" LIST1 fun_scheme_arg SEP "with" (* funind plugin *) +| "Function" fix_definition LIST0 ( "with" fix_definition ) +| "Functional" "Scheme" fun_scheme_arg LIST0 ( "with" fun_scheme_arg ) | "Extraction" qualid (* extraction plugin *) | "Recursive" "Extraction" LIST1 qualid (* extraction plugin *) | "Extraction" string LIST1 qualid (* extraction plugin *) @@ -1002,8 +1011,9 @@ command: [ | "Print" "Rings" (* setoid_ring plugin *) | "Add" "Field" ident ":" term1_extended OPT ( "(" LIST1 field_mod SEP "," ")" ) (* setoid_ring plugin *) | "Print" "Fields" (* setoid_ring plugin *) -| "Numeral" "Notation" qualid qualid qualid ":" ident numnotoption +| "Numeral" "Notation" qualid qualid qualid ":" ident OPT numnotoption | "String" "Notation" qualid qualid qualid ":" ident +| "SubClass" ident_decl def_body ] orient: [ @@ -1043,46 +1053,6 @@ starredidentref: [ | "Type" "*" ] -printable: [ -| "Term" smart_global OPT ( "@{" LIST0 name "}" ) -| "All" -| "Section" qualid -| "Grammar" ident -| "Custom" "Grammar" ident -| "LoadPath" OPT dirpath -| "Modules" -| "Libraries" -| "ML" "Path" -| "ML" "Modules" -| "Debug" "GC" -| "Graph" -| "Classes" -| "TypeClasses" -| "Instances" smart_global -| "Coercions" -| "Coercion" "Paths" class_rawexpr class_rawexpr -| "Canonical" "Projections" -| "Typing" "Flags" -| "Tables" -| "Options" -| "Hint" -| "Hint" smart_global -| "Hint" "*" -| "HintDb" ident -| "Scopes" -| "Scope" ident -| "Visibility" OPT ident -| "Implicit" smart_global -| [ "Sorted" | ] "Universes" OPT ( "Subgraph" "(" LIST0 qualid ")" ) OPT string -| "Assumptions" smart_global -| "Opaque" "Dependencies" smart_global -| "Transparent" "Dependencies" smart_global -| "All" "Dependencies" smart_global -| "Strategy" smart_global -| "Strategies" -| "Registered" -] - dirpath: [ | ident | dirpath field_ident @@ -1160,7 +1130,6 @@ ltac_production_item: [ ] numnotoption: [ -| | "(" "warning" "after" num ")" | "(" "abstract" "after" num ")" ] @@ -1288,17 +1257,12 @@ syntax: [ | "Delimit" "Scope" ident "with" ident | "Undelimit" "Scope" ident | "Bind" "Scope" ident "with" LIST1 class_rawexpr -| "Infix" string ":=" term1_extended [ "(" LIST1 syntax_modifier SEP "," ")" | ] OPT [ ":" ident ] -| "Notation" ident LIST0 ident ":=" term1_extended only_parsing -| "Notation" string ":=" term1_extended [ "(" LIST1 syntax_modifier SEP "," ")" | ] OPT [ ":" ident ] +| "Infix" string ":=" term1_extended OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] OPT [ ":" ident ] +| "Notation" ident LIST0 ident ":=" term1_extended OPT ( "(" "only" "parsing" ")" ) +| "Notation" string ":=" term1_extended OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] OPT [ ":" ident ] | "Format" "Notation" string string string -| "Reserved" "Infix" string [ "(" LIST1 syntax_modifier SEP "," ")" | ] -| "Reserved" "Notation" string [ "(" LIST1 syntax_modifier SEP "," ")" | ] -] - -only_parsing: [ -| "(" "only" "parsing" ")" -| +| "Reserved" "Infix" string OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] +| "Reserved" "Notation" string OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] ] level: [ @@ -1317,31 +1281,35 @@ syntax_modifier: [ | "only" "parsing" | "format" string OPT string | ident "," LIST1 ident SEP "," "at" level -| ident "at" level -| ident "at" level constr_as_binder_kind +| ident "at" level OPT constr_as_binder_kind | ident constr_as_binder_kind | ident syntax_extension_type ] +constr_as_binder_kind: [ +| "as" "ident" +| "as" "pattern" +| "as" "strict" "pattern" +] + syntax_extension_type: [ | "ident" | "global" | "bigint" | "binder" | "constr" -| "constr" OPT ( "at" level ) OPT constr_as_binder_kind +| "constr" at_level_opt OPT constr_as_binder_kind | "pattern" | "pattern" "at" "level" num | "strict" "pattern" | "strict" "pattern" "at" "level" num | "closed" "binder" -| "custom" ident OPT ( "at" level ) OPT constr_as_binder_kind +| "custom" ident at_level_opt OPT constr_as_binder_kind ] -constr_as_binder_kind: [ -| "as" "ident" -| "as" "pattern" -| "as" "strict" "pattern" +at_level_opt: [ +| "at" level +| ] simple_tactic: [ @@ -1591,7 +1559,7 @@ simple_tactic: [ | "eenough" term1_extended as_ipat by_tactic | "generalize" term1_extended | "generalize" term1_extended LIST1 term1_extended -| "generalize" term1_extended occs as_name LIST0 [ "," pattern_occ as_name ] +| "generalize" term1_extended OPT ( "at" occs_nums ) as_name LIST0 [ "," pattern_occ as_name ] | "induction" induction_clause_list | "einduction" induction_clause_list | "destruct" induction_clause_list @@ -1605,11 +1573,11 @@ simple_tactic: [ | "inversion" quantified_hypothesis "using" term1_extended in_hyp_list | "red" clause_dft_concl | "hnf" clause_dft_concl -| "simpl" delta_flag OPT ref_or_pattern_occ clause_dft_concl -| "cbv" strategy_flag clause_dft_concl -| "cbn" strategy_flag clause_dft_concl -| "lazy" strategy_flag clause_dft_concl -| "compute" delta_flag clause_dft_concl +| "simpl" OPT delta_flag OPT ref_or_pattern_occ clause_dft_concl +| "cbv" OPT strategy_flag clause_dft_concl +| "cbn" OPT strategy_flag clause_dft_concl +| "lazy" OPT strategy_flag clause_dft_concl +| "compute" OPT delta_flag clause_dft_concl | "vm_compute" OPT ref_or_pattern_occ clause_dft_concl | "native_compute" OPT ref_or_pattern_occ clause_dft_concl | "unfold" LIST1 unfold_occ SEP "," clause_dft_concl @@ -1631,7 +1599,6 @@ simple_tactic: [ | "functional" "inversion" quantified_hypothesis OPT qualid (* funind plugin *) | "functional" "induction" LIST1 term1_extended fun_ind_using with_names (* funind plugin *) | "soft" "functional" "induction" LIST1 term1_extended fun_ind_using with_names (* funind plugin *) -| "myred" (* micromega plugin *) | "psatz_Z" int_or_var ltac_expr (* micromega plugin *) | "psatz_Z" ltac_expr (* micromega plugin *) | "xlia" ltac_expr (* micromega plugin *) @@ -1647,24 +1614,18 @@ simple_tactic: [ | "psatz_R" ltac_expr (* micromega plugin *) | "psatz_Q" int_or_var ltac_expr (* micromega plugin *) | "psatz_Q" ltac_expr (* micromega plugin *) -| "iter_specs" ltac_expr (* micromega plugin *) +| "zify_iter_specs" ltac_expr (* micromega plugin *) | "zify_op" (* micromega plugin *) -| "saturate" (* micromega plugin *) +| "zify_saturate" (* micromega plugin *) +| "zify_iter_let" ltac_expr (* micromega plugin *) | "nsatz_compute" term1_extended (* nsatz plugin *) | "omega" (* omega plugin *) -| "omega" "with" LIST1 ident (* omega plugin *) -| "omega" "with" "*" (* omega plugin *) | "protect_fv" string "in" ident (* setoid_ring plugin *) | "protect_fv" string (* setoid_ring plugin *) | "ring_lookup" ltac_expr0 "[" LIST0 term1_extended "]" LIST1 term1_extended (* setoid_ring plugin *) | "field_lookup" ltac_expr "[" LIST0 term1_extended "]" LIST1 term1_extended (* setoid_ring plugin *) ] -int_or_var: [ -| int -| ident -] - hloc: [ | | "in" "|-" "*" @@ -1686,17 +1647,12 @@ by_arg_tac: [ in_clause: [ | in_clause -| "*" occs +| "*" OPT ( "at" occs_nums ) | "*" "|-" concl_occ | LIST0 hypident_occ SEP "," "|-" concl_occ | LIST0 hypident_occ SEP "," ] -occs: [ -| "at" occs_nums -| -] - as_ipat: [ | "as" simple_intropattern | @@ -1809,10 +1765,6 @@ bindings: [ | LIST1 term1_extended ] -pattern_occ: [ -| term1_extended occs -] - comparison: [ | "=" | "<" @@ -1838,12 +1790,12 @@ hypident: [ ] hypident_occ: [ -| hypident occs +| hypident OPT ( "at" occs_nums ) ] clause_dft_concl: [ | "in" in_clause -| occs +| OPT ( "at" occs_nums ) | ] @@ -1858,18 +1810,8 @@ opt_clause: [ | ] -occs_nums: [ -| LIST1 num_or_var -| "-" num_or_var LIST0 int_or_var -] - -num_or_var: [ -| num -| ident -] - concl_occ: [ -| "*" occs +| "*" OPT ( "at" occs_nums ) | ] @@ -1987,7 +1929,7 @@ ltac_expr: [ binder_tactic: [ | "fun" LIST1 fun_var "=>" ltac_expr -| "let" [ "rec" | ] LIST1 let_clause SEP "with" "in" ltac_expr +| "let" OPT "rec" let_clause LIST0 ( "with" let_clause ) "in" ltac_expr | "info" ltac_expr ] @@ -2005,16 +1947,15 @@ let_clause: [ ltac_expr4: [ | ltac_expr3 ";" binder_tactic | ltac_expr3 ";" ltac_expr3 -| ltac_expr3 ";" "[" multi_goal_tactics "]" -| ltac_expr3 ";" "[" ">" multi_goal_tactics "]" +| ltac_expr3 ";" "[" OPT multi_goal_tactics "]" | ltac_expr3 +| ltac_expr3 ";" "[" ">" OPT multi_goal_tactics "]" ] multi_goal_tactics: [ | OPT ltac_expr "|" multi_goal_tactics | ltac_expr_opt ".." OPT "|" ltac_expr_opt_list_or | ltac_expr -| ] ltac_expr_opt: [ @@ -2044,6 +1985,20 @@ ltac_expr3: [ | ltac_expr2 ] +only_selector: [ +| "only" selector ":" +] + +selector: [ +| LIST1 range_selector SEP "," +| "[" ident "]" +] + +range_selector: [ +| num "-" num +| num +] + ltac_expr2: [ | ltac_expr1 "+" binder_tactic | ltac_expr1 "+" ltac_expr2 @@ -2058,7 +2013,7 @@ ltac_expr1: [ | "first" "[" LIST0 ltac_expr SEP "|" "]" | "solve" "[" LIST0 ltac_expr SEP "|" "]" | "idtac" LIST0 message_token -| failkw [ int_or_var | ] LIST0 message_token +| failkw OPT int_or_var LIST0 message_token | ltac_match_goal | simple_tactic | tactic_arg @@ -2099,7 +2054,7 @@ tactic_arg_compat: [ ltac_expr0: [ | "(" ltac_expr ")" -| "[>" multi_goal_tactics "]" +| "[>" OPT multi_goal_tactics "]" | tactic_atom ] @@ -2115,20 +2070,6 @@ toplevel_selector: [ | "!" ":" ] -only_selector: [ -| "only" selector ":" -] - -selector: [ -| LIST1 range_selector SEP "," -| "[" ident "]" -] - -range_selector: [ -| num "-" num -| num -] - ltac_match_term: [ | match_key ltac_expr "with" OPT "|" LIST1 match_rule SEP "|" "end" ] diff --git a/doc/tools/docgram/productionlist.edit_mlg b/doc/tools/docgram/productionlist.edit_mlg index 8170b71e7a..93eb38d1a0 100644 --- a/doc/tools/docgram/productionlist.edit_mlg +++ b/doc/tools/docgram/productionlist.edit_mlg @@ -12,19 +12,3 @@ (* Contents used to generate productionlists in doc *) DOC_GRAMMAR - -(* this is here because they're inside _opt generated by EXPAND *) -SPLICE: [ -| ltac_info -| eliminator -| field_mods -| ltac_production_sep -| ltac_tactic_level -| module_binder -| printunivs_subgraph -| quoted_attributes -| ring_mods -| scope_delimiter -| univ_decl -| univ_name_list -] diff --git a/engine/logic_monad.ml b/engine/logic_monad.ml index 1caf2c2722..76d98c5ddd 100644 --- a/engine/logic_monad.ml +++ b/engine/logic_monad.ml @@ -89,12 +89,12 @@ struct let catch = fun s h -> (); fun () -> try s () with Exception e as src -> - let (src, info) = CErrors.push src in + let (src, info) = Exninfo.capture src in h (e, info) () let read_line = fun () -> try read_line () with e -> - let (e, info) = CErrors.push e in - raise (e, info) () + let (e, info) = Exninfo.capture e in + raise (e,info) () let print_char = fun c -> (); fun () -> print_char c @@ -104,8 +104,8 @@ struct let make f = (); fun () -> try f () with e when CErrors.noncritical e -> - let (e, info) = CErrors.push e in - Util.iraise (Exception e, info) + let (e, info) = Exninfo.capture e in + Exninfo.iraise (Exception e, info) (** Use the current logger. The buffer is also flushed. *) let print_debug s = make (fun _ -> Feedback.msg_debug s) @@ -115,8 +115,8 @@ struct let run = fun x -> try x () with Exception e as src -> - let (src, info) = CErrors.push src in - Util.iraise (e, info) + let (src, info) = Exninfo.capture src in + Exninfo.iraise (e, info) end (** {6 Logical layer} *) diff --git a/engine/namegen.ml b/engine/namegen.ml index bcc8c34a4d..d2c37fb716 100644 --- a/engine/namegen.ml +++ b/engine/namegen.ml @@ -259,15 +259,17 @@ let visible_ids sigma (nenv, c) = let (gseen, vseen, ids) = !accu in let g = global_of_constr c in if not (GlobRef.Set_env.mem g gseen) then - begin - try let gseen = GlobRef.Set_env.add g gseen in - let short = Nametab.shortest_qualid_of_global Id.Set.empty g in - let dir, id = repr_qualid short in - let ids = if DirPath.is_empty dir then Id.Set.add id ids else ids in + let ids = match Nametab.shortest_qualid_of_global Id.Set.empty g with + | short -> + let dir, id = repr_qualid short in + if DirPath.is_empty dir then Id.Set.add id ids else ids + | exception Not_found -> + (* This may happen if given pathological terms or when manipulating + open modules *) + ids + in accu := (gseen, vseen, ids) - with Not_found when !Flags.in_debugger || !Flags.in_toplevel -> () - end | Rel p -> let (gseen, vseen, ids) = !accu in if p > n && not (Int.Set.mem p vseen) then diff --git a/engine/proofview.ml b/engine/proofview.ml index a26ce71141..6a4e490408 100644 --- a/engine/proofview.ml +++ b/engine/proofview.ml @@ -228,7 +228,7 @@ let apply ~name ~poly env t sp = let ans = Proof.repr (Proof.run t P.{trace=false; name; poly} (sp,env)) in let ans = Logic_monad.NonLogical.run ans in match ans with - | Nil (e, info) -> iraise (TacticFailure e, info) + | Nil (e, info) -> Exninfo.iraise (TacticFailure e, info) | Cons ((r, (state, _), status, info), _) -> let (status, gaveup) = status in let status = (status, state.shelf, gaveup) in @@ -328,8 +328,8 @@ let tclEXACTLY_ONCE e t = (** [tclCASE t] wraps the {!Proofview_monad.Logical.split} primitive. *) type 'a case = -| Fail of iexn -| Next of 'a * (iexn -> 'a tactic) +| Fail of Exninfo.iexn +| Next of 'a * (Exninfo.iexn -> 'a tactic) let tclCASE t = let open Logic_monad in let map = function @@ -1096,7 +1096,7 @@ module Goal = struct let (gl, sigma) = nf_gmake env sigma goal in tclTHEN (Unsafe.tclEVARS sigma) (InfoL.tag (Info.DBranch) (f gl)) with e when catchable_exception e -> - let (e, info) = CErrors.push e in + let (e, info) = Exninfo.capture e in tclZERO ~info e end end @@ -1114,7 +1114,7 @@ module Goal = struct tclEVARMAP >>= fun sigma -> try f (gmake env sigma goal) with e when catchable_exception e -> - let (e, info) = CErrors.push e in + let (e, info) = Exninfo.capture e in tclZERO ~info e end end @@ -1127,7 +1127,7 @@ module Goal = struct tclEVARMAP >>= fun sigma -> try f (gmake env sigma goal) with e when catchable_exception e -> - let (e, info) = CErrors.push e in + let (e, info) = Exninfo.capture e in tclZERO ~info e end | _ -> @@ -1218,7 +1218,7 @@ module V82 = struct InfoL.leaf (Info.Tactic (fun _ _ -> Pp.str"<unknown>")) >> Pv.set { ps with solution = evd; comb = sgs; } with e when catchable_exception e -> - let (e, info) = CErrors.push e in + let (e, info) = Exninfo.capture e in tclZERO ~info e @@ -1261,8 +1261,8 @@ module V82 = struct let (_,final,_,_) = apply ~name ~poly (goal_env env gls.Evd.sigma gls.Evd.it) t init in { Evd.sigma = final.solution ; it = CList.map drop_state final.comb } with Logic_monad.TacticFailure e as src -> - let (_, info) = CErrors.push src in - iraise (e, info) + let (_, info) = Exninfo.capture src in + Exninfo.iraise (e, info) let put_status = Status.put @@ -1271,7 +1271,7 @@ module V82 = struct let wrap_exceptions f = try f () with e when catchable_exception e -> - let (e, info) = CErrors.push e in tclZERO ~info e + let (e, info) = Exninfo.capture e in tclZERO ~info e end diff --git a/engine/proofview.mli b/engine/proofview.mli index a92179ab5b..5bfbc6a649 100644 --- a/engine/proofview.mli +++ b/engine/proofview.mli @@ -14,7 +14,6 @@ ['a tactic] is the (abstract) type of tactics modifying the proof state and returning a value of type ['a]. *) -open Util open EConstr (** Main state of tactics *) @@ -194,18 +193,18 @@ val tclZERO : ?info:Exninfo.info -> exn -> 'a tactic the successes of [t1] have been depleted and it failed with [e], then it behaves as [t2 e]. In other words, [tclOR] inserts a backtracking point. *) -val tclOR : 'a tactic -> (iexn -> 'a tactic) -> 'a tactic +val tclOR : 'a tactic -> (Exninfo.iexn -> 'a tactic) -> 'a tactic (** [tclORELSE t1 t2] is equal to [t1] if [t1] has at least one success or [t2 e] if [t1] fails with [e]. It is analogous to [try/with] handler of exception in that it is not a backtracking point. *) -val tclORELSE : 'a tactic -> (iexn -> 'a tactic) -> 'a tactic +val tclORELSE : 'a tactic -> (Exninfo.iexn -> 'a tactic) -> 'a tactic (** [tclIFCATCH a s f] is a generalisation of {!tclORELSE}: if [a] succeeds at least once then it behaves as [tclBIND a s] otherwise, if [a] fails with [e], then it behaves as [f e]. *) -val tclIFCATCH : 'a tactic -> ('a -> 'b tactic) -> (iexn -> 'b tactic) -> 'b tactic +val tclIFCATCH : 'a tactic -> ('a -> 'b tactic) -> (Exninfo.iexn -> 'b tactic) -> 'b tactic (** [tclONCE t] behave like [t] except it has at most one success: [tclONCE t] stops after the first success of [t]. If [t] fails @@ -227,8 +226,8 @@ val tclEXACTLY_ONCE : exn -> 'a tactic -> 'a tactic continuation. It is the most general primitive to control backtracking. *) type 'a case = - | Fail of iexn - | Next of 'a * (iexn -> 'a tactic) + | Fail of Exninfo.iexn + | Next of 'a * (Exninfo.iexn -> 'a tactic) val tclCASE : 'a tactic -> 'a case tactic (** [tclBREAK p t] is a generalization of [tclONCE t]. Instead of @@ -236,7 +235,7 @@ val tclCASE : 'a tactic -> 'a case tactic failure with an exception [e] such that [p e = Some e'] is raised. At which point it drops the remaining successes, failing with [e']. [tclONCE t] is equivalent to [tclBREAK (fun e -> Some e) t]. *) -val tclBREAK : (iexn -> iexn option) -> 'a tactic -> 'a tactic +val tclBREAK : (Exninfo.iexn -> Exninfo.iexn option) -> 'a tactic -> 'a tactic (** {7 Focusing tactics} *) @@ -508,8 +507,8 @@ end module UnsafeRepr : sig type state = Proofview_monad.Logical.Unsafe.state - val repr : 'a tactic -> ('a, state, state, iexn) Logic_monad.BackState.t - val make : ('a, state, state, iexn) Logic_monad.BackState.t -> 'a tactic + val repr : 'a tactic -> ('a, state, state, Exninfo.iexn) Logic_monad.BackState.t + val make : ('a, state, state, Exninfo.iexn) Logic_monad.BackState.t -> 'a tactic end (** {6 Goal-dependent tactics} *) diff --git a/ide/coq_commands.ml b/ide/coq_commands.ml index 5b9ea17ba7..790b427e4c 100644 --- a/ide/coq_commands.ml +++ b/ide/coq_commands.ml @@ -21,7 +21,6 @@ let commands = [ "Add Printing Let"; "Add Printing Record"; "Add Rec LoadPath"; - "Add Rec ML Path"; "Add Ring A Aplus Amult Aone Azero Ainv Aeq T [ c1 ... cn ]. "; "Add Semi Ring A Aplus Amult Aone Azero Aeq T [ c1 ... cn ]."; "Add Relation"; diff --git a/ide/coqide.ml b/ide/coqide.ml index 143a12deeb..61e95c21b1 100644 --- a/ide/coqide.ml +++ b/ide/coqide.ml @@ -460,7 +460,7 @@ let compile sn = |Some f -> let args = Coq.get_arguments sn.coqtop in let cmd = cmd_coqc#get - ^ " " ^ String.concat " " args + ^ " " ^ String.concat " " (List.map Filename.quote args) ^ " " ^ (Filename.quote f) ^ " 2>&1" in let buf = Buffer.create 1024 in @@ -474,7 +474,7 @@ let compile sn = flash_info (f ^ " successfully compiled") else begin flash_info (f ^ " failed to compile"); - sn.messages#default_route#set (Pp.str "Compilation output:\n"); + sn.messages#default_route#set (Pp.str ("Compilation output:\n" ^ cmd ^ "\n")); sn.messages#default_route#add (Pp.str (Buffer.contents buf)); end in diff --git a/ide/fake_ide.ml b/ide/fake_ide.ml index dfc16d39f3..4292e91252 100644 --- a/ide/fake_ide.ml +++ b/ide/fake_ide.ml @@ -327,11 +327,7 @@ let main = { xml_printer = op; xml_parser = ip } in let init () = match base_eval_call ~print:false (Xmlprotocol.init None) coq with - | Interface.Good id -> - let dir = Filename.dirname input_file in - let phrase = Printf.sprintf "Add LoadPath \"%s\". " dir in - let eid, tip = add_sentence ~name:"initial" phrase in - after_add (base_eval_call (Xmlprotocol.add ((phrase,eid),(tip,true))) coq) + | Interface.Good id -> () | Interface.Fail _ -> error "init call failed" in let finish () = match base_eval_call (Xmlprotocol.status true) coq with diff --git a/ide/idetop.ml b/ide/idetop.ml index 9eb0b972b6..57e9792845 100644 --- a/ide/idetop.ml +++ b/ide/idetop.ml @@ -69,7 +69,7 @@ let ide_cmd_checks ~last_valid { CAst.loc; v } = let user_error s = try CErrors.user_err ?loc ~hdr:"IDE" (str s) with e -> - let (e, info) = CErrors.push e in + let (e, info) = Exninfo.capture e in let info = Stateid.add info ~valid:last_valid Stateid.dummy in Exninfo.iraise (e, info) in @@ -477,7 +477,7 @@ let print_xml = fun oc xml -> Mutex.lock m; try Control.protect_sigalrm (Xml_printer.print oc) xml; Mutex.unlock m - with e -> let e = CErrors.push e in Mutex.unlock m; iraise e + with e -> let e = Exninfo.capture e in Mutex.unlock m; Exninfo.iraise e let slave_feeder fmt xml_oc msg = let xml = Xmlprotocol.(of_feedback fmt msg) in diff --git a/ide/protocol/xmlprotocol.ml b/ide/protocol/xmlprotocol.ml index a2c80ea118..2e78642f2e 100644 --- a/ide/protocol/xmlprotocol.ml +++ b/ide/protocol/xmlprotocol.ml @@ -679,7 +679,7 @@ let abstract_eval_call : type a. _ -> a call -> a value = fun handler c -> | PrintAst x -> mkGood (handler.print_ast x) | Annotate x -> mkGood (handler.annotate x) with any -> - let any = CErrors.push any in + let any = Exninfo.capture any in Fail (handler.handle_exn any) (** brain dead code, edit if protocol messages are added/removed *) diff --git a/interp/impargs.ml b/interp/impargs.ml index 78c4b21920..1365b97d82 100644 --- a/interp/impargs.ml +++ b/interp/impargs.ml @@ -77,9 +77,9 @@ let with_implicit_protection f x = implicit_args := oflags; rslt with reraise -> - let reraise = CErrors.push reraise in + let reraise = Exninfo.capture reraise in let () = implicit_args := oflags in - iraise reraise + Exninfo.iraise reraise type on_trailing_implicit = Error | Info | Silent diff --git a/interp/notation.ml b/interp/notation.ml index 2086e08f79..b869cb2a36 100644 --- a/interp/notation.ml +++ b/interp/notation.ml @@ -1959,6 +1959,6 @@ let with_notation_protection f x = let fs = freeze ~marshallable:false in try let a = f x in unfreeze fs; a with reraise -> - let reraise = CErrors.push reraise in + let reraise = Exninfo.capture reraise in let () = unfreeze fs in - iraise reraise + Exninfo.iraise reraise diff --git a/kernel/inductive.ml b/kernel/inductive.ml index c6035f78ff..1be86f2bf8 100644 --- a/kernel/inductive.ml +++ b/kernel/inductive.ml @@ -962,7 +962,7 @@ let check_one_fix renv recpos trees def = let stack_br = push_stack_args case_spec.(k) stack' in check_rec_call renv stack_br br') with (FixGuardError _ as exn) -> - let exn = CErrors.push exn in + let exn = Exninfo.capture exn in (* we try hard to reduce the match away by looking for a constructor in c_0 (we unfold definitions too) *) let c_0 = whd_all renv.env c_0 in @@ -1007,7 +1007,7 @@ let check_one_fix renv recpos trees def = check_nested_fix_body illformed renv' (decrArg+1) arg_sp body else check_rec_call renv' [] body) with (FixGuardError _ as exn) -> - let exn = CErrors.push exn in + let exn = Exninfo.capture exn in (* we try hard to reduce the fix away by looking for a constructor in l[decrArg] (we unfold definitions too) *) if List.length l <= decrArg then Exninfo.iraise exn; @@ -1055,7 +1055,7 @@ let check_one_fix renv recpos trees def = List.iter (check_rec_call renv []) l; check_rec_call renv [] c with (FixGuardError _ as exn) -> - let exn = CErrors.push exn in + let exn = Exninfo.capture exn in (* we try hard to reduce the proj away by looking for a constructor in c (we unfold definitions too) *) let c = whd_all renv.env c in diff --git a/kernel/nativelib.ml b/kernel/nativelib.ml index 86eaaddc90..3f2e63b984 100644 --- a/kernel/nativelib.ml +++ b/kernel/nativelib.ml @@ -180,8 +180,8 @@ let call_linker ?(fatal=true) env ~prefix f upds = if Dynlink.is_native then Dynlink.loadfile f else !load_obj f; register_native_file prefix with Dynlink.Error _ as exn -> - let exn = CErrors.push exn in - if fatal then iraise exn + let exn = Exninfo.capture exn in + if fatal then Exninfo.iraise exn else if !Flags.debug then Feedback.msg_debug CErrors.(iprint exn)); match upds with Some upds -> update_locations upds | _ -> () diff --git a/kernel/safe_typing.ml b/kernel/safe_typing.ml index 8db8a044a8..d8e1b6537e 100644 --- a/kernel/safe_typing.ml +++ b/kernel/safe_typing.ml @@ -1262,7 +1262,7 @@ let export ?except ~output_native_objects senv dir = let senv = try join_safe_environment ?except senv with e -> - let e = CErrors.push e in + let e = Exninfo.capture e in CErrors.user_err ~hdr:"export" (CErrors.iprint e) in assert(senv.future_cst = []); diff --git a/lib/cErrors.mli b/lib/cErrors.mli index 1660a00244..ec81694177 100644 --- a/lib/cErrors.mli +++ b/lib/cErrors.mli @@ -14,7 +14,7 @@ (** {6 Error handling} *) val push : exn -> Exninfo.iexn -(** Alias for [Backtrace.add_backtrace]. *) +[@@ocaml.deprecated "please use [Exninfo.capture]"] (** {6 Generic errors.} diff --git a/lib/future.ml b/lib/future.ml index ddf841b7fc..e8d232ad96 100644 --- a/lib/future.ml +++ b/lib/future.ml @@ -131,7 +131,7 @@ let rec compute ck : 'a value = let data = f () in c := Val data; `Val data with e -> - let e = CErrors.push e in + let e = Exninfo.capture e in let e = fix_exn e in match e with | (NotReady _, _) -> `Exn e diff --git a/lib/system.ml b/lib/system.ml index 2d68fd2fdf..9089eda564 100644 --- a/lib/system.ml +++ b/lib/system.ml @@ -248,9 +248,9 @@ let extern_state magic filename val_0 = marshal_out channel val_0; close_out channel with reraise -> - let reraise = CErrors.push reraise in + let reraise = Exninfo.capture reraise in let () = try_remove filename in - iraise reraise + Exninfo.iraise reraise with Sys_error s -> CErrors.user_err ~hdr:"System.extern_state" (str "System error: " ++ str s) diff --git a/lib/util.mli b/lib/util.mli index 2f1a03a19c..1417d6dfcb 100644 --- a/lib/util.mli +++ b/lib/util.mli @@ -119,8 +119,10 @@ val delayed_force : 'a delayed -> 'a (** {6 Enriched exceptions} *) type iexn = Exninfo.iexn +[@@ocaml.deprecated "please use [Exninfo.iexn]"] -val iraise : iexn -> 'a +val iraise : Exninfo.iexn -> 'a +[@@ocaml.deprecated "please use [Exninfo.iraise]"] (** {6 Misc. } *) diff --git a/library/states.ml b/library/states.ml index 90303a2a5c..c656dfb952 100644 --- a/library/states.ml +++ b/library/states.ml @@ -8,8 +8,6 @@ (* * (see LICENSE file for the text of the license) *) (************************************************************************) -open Util - type state = Lib.frozen * Summary.frozen let lib_of_state = fst @@ -31,5 +29,5 @@ let with_state_protection f x = try let a = f x in unfreeze st; a with reraise -> - let reraise = CErrors.push reraise in - (unfreeze st; iraise reraise) + let reraise = Exninfo.capture reraise in + (unfreeze st; Exninfo.iraise reraise) diff --git a/parsing/g_constr.mlg b/parsing/g_constr.mlg index d6c6c365cb..48e6f1f213 100644 --- a/parsing/g_constr.mlg +++ b/parsing/g_constr.mlg @@ -17,7 +17,6 @@ open Glob_term open Constrexpr open Constrexpr_ops open Util -open Tok open Namegen open Pcoq @@ -54,48 +53,25 @@ let err () = raise Stream.Failure (* Hack to parse "(x:=t)" as an explicit argument without conflicts with the *) (* admissible notation "(x t)" *) -let lpar_id_coloneq = - Pcoq.Entry.of_parser "test_lpar_id_coloneq" - (fun _ strm -> - match stream_nth 0 strm with - | KEYWORD "(" -> - (match stream_nth 1 strm with - | IDENT s -> - (match stream_nth 2 strm with - | KEYWORD ":=" -> - stream_njunk 3 strm; - Names.Id.of_string s - | _ -> err ()) - | _ -> err ()) - | _ -> err ()) +let test_lpar_id_coloneq = + let open Pcoq.Lookahead in + to_entry "test_lpar_id_coloneq" begin + lk_kw "(" >> lk_ident >> lk_kw ":=" + end let ensure_fixannot = - Pcoq.Entry.of_parser "check_fixannot" - (fun _ strm -> - match stream_nth 0 strm with - | KEYWORD "{" -> - (match stream_nth 1 strm with - | IDENT ("wf"|"struct"|"measure") -> () - | _ -> err ()) - | _ -> err ()) + let open Pcoq.Lookahead in + to_entry "check_fixannot" begin + lk_kw "{" >> lk_kws ["wf"; "struct"; "measure"] + end -let name_colon = - Pcoq.Entry.of_parser "name_colon" - (fun _ strm -> - match stream_nth 0 strm with - | IDENT s -> - (match stream_nth 1 strm with - | KEYWORD ":" -> - stream_njunk 2 strm; - Name (Names.Id.of_string s) - | _ -> err ()) - | KEYWORD "_" -> - (match stream_nth 1 strm with - | KEYWORD ":" -> - stream_njunk 2 strm; - Anonymous - | _ -> err ()) - | _ -> err ()) +let lk_name = Pcoq.Lookahead.(lk_ident <+> lk_kw "_") + +let test_name_colon = + let open Pcoq.Lookahead in + to_entry "test_name_colon" begin + lk_name >> lk_kw ":" + end let aliasvar = function { CAst.v = CPatAlias (_, na) } -> Some na | _ -> None @@ -266,7 +242,7 @@ GRAMMAR EXTEND Gram | "cofix"; c = cofix_decls -> { let (id,dcls) = c in CAst.make ~loc @@ CCoFix (id,dcls) } ] ] ; appl_arg: - [ [ id = lpar_id_coloneq; c = lconstr; ")" -> { (c,Some (CAst.make ~loc @@ ExplByName id)) } + [ [ test_lpar_id_coloneq; "("; id = ident; ":="; c = lconstr; ")" -> { (c,Some (CAst.make ~loc @@ ExplByName id)) } | c=operconstr LEVEL "9" -> { (c,None) } ] ] ; atomic_constr: @@ -464,8 +440,8 @@ GRAMMAR EXTEND Gram [ [ "!" ; c = operconstr LEVEL "200" -> { (CAst.make ~loc Anonymous), true, c } | "{"; id = name; "}"; ":" ; expl = [ "!" -> { true } | -> { false } ] ; c = operconstr LEVEL "200" -> { id, expl, c } - | iid = name_colon ; expl = [ "!" -> { true } | -> { false } ] ; c = operconstr LEVEL "200" -> - { (CAst.make ~loc iid), expl, c } + | test_name_colon; iid = name; ":" ; expl = [ "!" -> { true } | -> { false } ] ; c = operconstr LEVEL "200" -> + { iid, expl, c } | c = operconstr LEVEL "200" -> { (CAst.make ~loc Anonymous), false, c } ] ] ; diff --git a/parsing/g_prim.mlg b/parsing/g_prim.mlg index 020501aedf..5f61b9a047 100644 --- a/parsing/g_prim.mlg +++ b/parsing/g_prim.mlg @@ -31,35 +31,11 @@ let my_int_of_string loc s = with Failure _ -> CErrors.user_err ~loc (Pp.str "This number is too large.") -let rec contiguous tok n m = - n == m - || - let (_, ep) = Loc.unloc (tok n) in - let (bp, _) = Loc.unloc (tok (n + 1)) in - Int.equal ep bp && contiguous tok (succ n) m - -let rec lookahead_kwds strm n = function - | [] -> () - | x :: xs -> - let toks = Stream.npeek (n+1) strm in - match List.nth toks n with - | Tok.KEYWORD y -> - if String.equal x y then lookahead_kwds strm (succ n) xs - else raise Stream.Failure - | _ -> raise Stream.Failure - | exception (Failure _) -> raise Stream.Failure - -(* [test_nospace m] fails if the next m tokens are not contiguous keywords *) -let test_nospace m = assert(m <> []); Pcoq.Entry.of_parser "test_nospace" - (fun tok strm -> - let n = Stream.count strm in - lookahead_kwds strm 0 m; - if contiguous tok n (n + List.length m - 1) then () - else raise Stream.Failure) - -let test_nospace_pipe_closedcurly = - test_nospace ["|"; "}"] - +let test_pipe_closedcurly = + let open Pcoq.Lookahead in + to_entry "test_pipe_closedcurly" begin + lk_kw "|" >> lk_kw "}" >> check_no_space + end } @@ -155,6 +131,6 @@ GRAMMAR EXTEND Gram [ [ i = NUMERAL -> { check_int loc i } ] ] ; bar_cbrace: - [ [ test_nospace_pipe_closedcurly; "|"; "}" -> { () } ] ] + [ [ test_pipe_closedcurly; "|"; "}" -> { () } ] ] ; END diff --git a/parsing/pcoq.ml b/parsing/pcoq.ml index 92c3b7c6e8..55558eaed0 100644 --- a/parsing/pcoq.ml +++ b/parsing/pcoq.ml @@ -118,6 +118,64 @@ struct end +module Lookahead = +struct + + let err () = raise Stream.Failure + + type t = Gramlib.Plexing.location_function -> int -> Tok.t Stream.t -> int option + + let rec contiguous tok n m = + n == m || + let (_, ep) = Loc.unloc (tok n) in + let (bp, _) = Loc.unloc (tok (n + 1)) in + Int.equal ep bp && contiguous tok (succ n) m + + let check_no_space tok m strm = + let n = Stream.count strm in + if contiguous tok n (n+m-1) then Some m else None + + let to_entry s (lk : t) = + let run tok strm = match lk tok 0 strm with None -> err () | Some _ -> () in + Entry.of_parser s run + + let (>>) (lk1 : t) lk2 tok n strm = match lk1 tok n strm with + | None -> None + | Some n -> lk2 tok n strm + + let (<+>) (lk1 : t) lk2 tok n strm = match lk1 tok n strm with + | None -> lk2 tok n strm + | Some n -> Some n + + let lk_empty tok n strm = Some n + + let lk_kw kw tok n strm = match stream_nth n strm with + | Tok.KEYWORD kw' | Tok.IDENT kw' -> if String.equal kw kw' then Some (n + 1) else None + | _ -> None + + let lk_kws kws tok n strm = match stream_nth n strm with + | Tok.KEYWORD kw | Tok.IDENT kw -> if List.mem_f String.equal kw kws then Some (n + 1) else None + | _ -> None + + let lk_ident tok n strm = match stream_nth n strm with + | Tok.IDENT _ -> Some (n + 1) + | _ -> None + + let lk_ident_except idents tok n strm = match stream_nth n strm with + | Tok.IDENT ident when not (List.mem_f String.equal ident idents) -> Some (n + 1) + | _ -> None + + let lk_nat tok n strm = match stream_nth n strm with + | Tok.NUMERAL { NumTok.int = _; frac = ""; exp = "" } -> Some (n + 1) + | _ -> None + + let rec lk_list lk_elem n strm = + ((lk_elem >> lk_list lk_elem) <+> lk_empty) n strm + + let lk_ident_list = lk_list lk_ident + +end + (** Grammar extensions *) (** NB: [extend_statement = @@ -627,9 +685,9 @@ let with_grammar_rule_protection f x = let fs = freeze ~marshallable:false in try let a = f x in unfreeze fs; a with reraise -> - let reraise = CErrors.push reraise in + let reraise = Exninfo.capture reraise in let () = unfreeze fs in - iraise reraise + Exninfo.iraise reraise (** Registering grammar of generic arguments *) diff --git a/parsing/pcoq.mli b/parsing/pcoq.mli index f2fc007a7b..6543a69b50 100644 --- a/parsing/pcoq.mli +++ b/parsing/pcoq.mli @@ -34,6 +34,21 @@ module Entry : sig val name : 'a t -> string end +module Lookahead : sig + type t + val to_entry : string -> t -> unit Entry.t + val (>>) : t -> t -> t + val (<+>) : t -> t -> t + val lk_list : t -> t + val check_no_space : t + val lk_kw : string -> t + val lk_kws : string list -> t + val lk_nat : t + val lk_ident : t + val lk_ident_except : string list -> t + val lk_ident_list : t +end + (** The parser of Coq is built from three kinds of rule declarations: - dynamic rules declared at the evaluation of Coq files (using diff --git a/plugins/funind/indfun_common.ml b/plugins/funind/indfun_common.ml index b2ee0f9370..45fafd2872 100644 --- a/plugins/funind/indfun_common.ml +++ b/plugins/funind/indfun_common.ml @@ -520,7 +520,7 @@ let funind_purify f x = let st = Vernacstate.freeze_interp_state ~marshallable:false in try f x with e -> - let e = CErrors.push e in + let e = Exninfo.capture e in Vernacstate.unfreeze_interp_state st; Exninfo.iraise e diff --git a/plugins/ltac/extraargs.mlg b/plugins/ltac/extraargs.mlg index 5835d75c79..f97c291c79 100644 --- a/plugins/ltac/extraargs.mlg +++ b/plugins/ltac/extraargs.mlg @@ -330,18 +330,10 @@ END { let local_test_lpar_id_colon = - let err () = raise Stream.Failure in - Pcoq.Entry.of_parser "lpar_id_colon" - (fun _ strm -> - match Util.stream_nth 0 strm with - | Tok.KEYWORD "(" -> - (match Util.stream_nth 1 strm with - | Tok.IDENT _ -> - (match Util.stream_nth 2 strm with - | Tok.KEYWORD ":" -> () - | _ -> err ()) - | _ -> err ()) - | _ -> err ()) + let open Pcoq.Lookahead in + to_entry "lpar_id_colon" begin + lk_kw "(" >> lk_ident >> lk_kw ":" + end let pr_lpar_id_colon _ _ _ _ = mt () diff --git a/plugins/ltac/extratactics.mlg b/plugins/ltac/extratactics.mlg index 513f5ca77b..d0c94e7903 100644 --- a/plugins/ltac/extratactics.mlg +++ b/plugins/ltac/extratactics.mlg @@ -670,7 +670,7 @@ let hResolve id c occ t = Pretyping.understand env sigma t_hole with | Pretype_errors.PretypeError (_,_,Pretype_errors.UnsolvableImplicit _) as e -> - let (e, info) = CErrors.push e in + let (e, info) = Exninfo.capture e in let loc_begin = Option.cata (fun l -> fst (Loc.unloc l)) 0 (Loc.get_loc info) in resolve_hole (subst_hole_with_term loc_begin c_raw t_hole) in diff --git a/plugins/ltac/g_auto.mlg b/plugins/ltac/g_auto.mlg index 8344f9dae3..82c64a9857 100644 --- a/plugins/ltac/g_auto.mlg +++ b/plugins/ltac/g_auto.mlg @@ -114,7 +114,7 @@ END (** Eauto *) -TACTIC EXTEND prolog +TACTIC EXTEND prolog DEPRECATED { Deprecation.make ~note:"Use eauto instead" () } | [ "prolog" "[" uconstr_list(l) "]" int_or_var(n) ] -> { Eauto.prolog_tac (eval_uconstrs ist l) n } END @@ -253,4 +253,3 @@ VERNAC COMMAND EXTEND HintCut CLASSIFIED AS SIDEFF (match dbnames with None -> ["core"] | Some l -> l) entry; } END - diff --git a/plugins/ltac/g_ltac.mlg b/plugins/ltac/g_ltac.mlg index 7ea843ca69..c163438718 100644 --- a/plugins/ltac/g_ltac.mlg +++ b/plugins/ltac/g_ltac.mlg @@ -20,7 +20,6 @@ open Tacexpr open Namegen open Genarg open Genredexpr -open Tok (* necessary for camlp5 *) open Names open Attributes @@ -63,14 +62,10 @@ let classic_proof_mode = Pvernac.register_proof_mode "Classic" tactic_mode (* Hack to parse "[ id" without dropping [ *) let test_bracket_ident = - Pcoq.Entry.of_parser "test_bracket_ident" - (fun _ strm -> - match stream_nth 0 strm with - | KEYWORD "[" -> - (match stream_nth 1 strm with - | IDENT _ -> () - | _ -> raise Stream.Failure) - | _ -> raise Stream.Failure) + let open Pcoq.Lookahead in + to_entry "test_bracket_ident" begin + lk_kw "[" >> lk_ident + end (* Tactics grammar rules *) diff --git a/plugins/ltac/g_tactic.mlg b/plugins/ltac/g_tactic.mlg index f0d6258cd1..8e1e5559af 100644 --- a/plugins/ltac/g_tactic.mlg +++ b/plugins/ltac/g_tactic.mlg @@ -38,45 +38,24 @@ let err () = raise Stream.Failure (* Hack to parse "(x:=t)" as an explicit argument without conflicts with the *) (* admissible notation "(x t)" *) let test_lpar_id_coloneq = - Pcoq.Entry.of_parser "lpar_id_coloneq" - (fun _ strm -> - match stream_nth 0 strm with - | KEYWORD "(" -> - (match stream_nth 1 strm with - | IDENT _ -> - (match stream_nth 2 strm with - | KEYWORD ":=" -> () - | _ -> err ()) - | _ -> err ()) - | _ -> err ()) + let open Pcoq.Lookahead in + to_entry "lpar_id_coloneq" begin + lk_kw "(" >> lk_ident >> lk_kw ":=" + end (* Hack to recognize "(x)" *) let test_lpar_id_rpar = - Pcoq.Entry.of_parser "lpar_id_coloneq" - (fun _ strm -> - match stream_nth 0 strm with - | KEYWORD "(" -> - (match stream_nth 1 strm with - | IDENT _ -> - (match stream_nth 2 strm with - | KEYWORD ")" -> () - | _ -> err ()) - | _ -> err ()) - | _ -> err ()) + let open Pcoq.Lookahead in + to_entry "lpar_id_coloneq" begin + lk_kw "(" >> lk_ident >> lk_kw ")" + end (* idem for (x:=t) and (1:=t) *) let test_lpar_idnum_coloneq = - Pcoq.Entry.of_parser "test_lpar_idnum_coloneq" - (fun _ strm -> - match stream_nth 0 strm with - | KEYWORD "(" -> - (match stream_nth 1 strm with - | IDENT _ | NUMERAL _ -> - (match stream_nth 2 strm with - | KEYWORD ":=" -> () - | _ -> err ()) - | _ -> err ()) - | _ -> err ()) + let open Pcoq.Lookahead in + to_entry "test_lpar_idnum_coloneq" begin + lk_kw "(" >> (lk_ident <+> lk_nat) >> lk_kw ":=" + end (* idem for (x:t) *) open Extraargs @@ -107,11 +86,10 @@ let check_for_coloneq = | _ -> err ()) let lookup_at_as_comma = - Pcoq.Entry.of_parser "lookup_at_as_comma" - (fun _ strm -> - match stream_nth 0 strm with - | KEYWORD (","|"at"|"as") -> () - | _ -> err ()) + let open Pcoq.Lookahead in + to_entry "lookup_at_as_comma" begin + lk_kws [",";"at";"as"] + end open Constr open Prim @@ -611,6 +589,16 @@ GRAMMAR EXTEND Gram { TacAtom (CAst.make ~loc @@ TacAssert (false,true,Some tac,ipat,c)) } | IDENT "eassert"; c = constr; ipat = as_ipat; tac = by_tactic -> { TacAtom (CAst.make ~loc @@ TacAssert (true,true,Some tac,ipat,c)) } + + (* Alternative syntax for "pose proof c as id by tac" *) + | IDENT "pose"; IDENT "proof"; test_lpar_id_coloneq; "("; lid = identref; ":="; + c = lconstr; ")" -> + { let { CAst.loc = loc; v = id } = lid in + TacAtom (CAst.make ?loc @@ TacAssert (false,true,None,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c)) } + | IDENT "epose"; IDENT "proof"; test_lpar_id_coloneq; "("; lid = identref; ":="; + c = lconstr; ")" -> + { let { CAst.loc = loc; v = id } = lid in + TacAtom (CAst.make ?loc @@ TacAssert (true,true,None,Some (CAst.make ?loc @@ IntroNaming (IntroIdentifier id)),c)) } | IDENT "pose"; IDENT "proof"; c = lconstr; ipat = as_ipat -> { TacAtom (CAst.make ~loc @@ TacAssert (false,true,None,ipat,c)) } | IDENT "epose"; IDENT "proof"; c = lconstr; ipat = as_ipat -> diff --git a/plugins/ltac/tacinterp.ml b/plugins/ltac/tacinterp.ml index 6e620b71db..1d7fe335d1 100644 --- a/plugins/ltac/tacinterp.ml +++ b/plugins/ltac/tacinterp.ml @@ -165,8 +165,8 @@ let catching_error call_trace fail (e, info) = let catch_error call_trace f x = try f x with e when CErrors.noncritical e -> - let e = CErrors.push e in - catching_error call_trace iraise e + let e = Exninfo.capture e in + catching_error call_trace Exninfo.iraise e let wrap_error tac k = if is_traced () then Proofview.tclORELSE tac k else tac @@ -717,13 +717,13 @@ let interp_may_eval f ist env sigma = function try f ist env sigma c with reraise -> - let reraise = CErrors.push reraise in + let reraise = Exninfo.capture reraise in (* spiwack: to avoid unnecessary modifications of tacinterp, as this function already use effect, I call [run] hoping it doesn't mess up with any assumption. *) Proofview.NonLogical.run (debugging_exception_step ist false (fst reraise) (fun () -> str"interpretation of term " ++ pr_glob_constr_env env (fst c))); - iraise reraise + Exninfo.iraise reraise (* Interprets a constr expression possibly to first evaluate *) let interp_constr_may_eval ist env sigma c = @@ -731,12 +731,12 @@ let interp_constr_may_eval ist env sigma c = try interp_may_eval interp_constr ist env sigma c with reraise -> - let reraise = CErrors.push reraise in + let reraise = Exninfo.capture reraise in (* spiwack: to avoid unnecessary modifications of tacinterp, as this function already use effect, I call [run] hoping it doesn't mess up with any assumption. *) Proofview.NonLogical.run (debugging_exception_step ist false (fst reraise) (fun () -> str"evaluation of term")); - iraise reraise + Exninfo.iraise reraise in begin (* spiwack: to avoid unnecessary modifications of tacinterp, as this diff --git a/plugins/micromega/certificate.ml b/plugins/micromega/certificate.ml index 61234145e1..e946ffd8bc 100644 --- a/plugins/micromega/certificate.ml +++ b/plugins/micromega/certificate.ml @@ -413,8 +413,14 @@ let bound_monomials (sys : WithProof.t list) = (fun acc ((p, o), _) -> ISet.union (LinPoly.monomials p) acc) ISet.empty sys in + let module SetWP = Set.Make (struct + type t = int * WithProof.t + + let compare (_, x) (_, y) = WithProof.compare x y + end) in let bounds = saturate_bin + (module SetWP : Set.S with type elt = int * WithProof.t) (fun (i1, w1) (i2, w2) -> if i1 + i2 > deg then None else diff --git a/plugins/micromega/g_zify.mlg b/plugins/micromega/g_zify.mlg index 2b5fac32a2..5e4a847e6b 100644 --- a/plugins/micromega/g_zify.mlg +++ b/plugins/micromega/g_zify.mlg @@ -25,7 +25,8 @@ VERNAC COMMAND EXTEND DECLAREINJECTION CLASSIFIED AS SIDEFF | ["Add" "UnOp" constr(t) ] -> { Zify.UnOp.register t } | ["Add" "CstOp" constr(t) ] -> { Zify.CstOp.register t } | ["Add" "BinRel" constr(t) ] -> { Zify.BinRel.register t } -| ["Add" "PropOp" constr(t) ] -> { Zify.PropOp.register t } +| ["Add" "PropOp" constr(t) ] -> { Zify.PropBinOp.register t } +| ["Add" "PropBinOp" constr(t) ] -> { Zify.PropBinOp.register t } | ["Add" "PropUOp" constr(t) ] -> { Zify.PropUnOp.register t } | ["Add" "Spec" constr(t) ] -> { Zify.Spec.register t } | ["Add" "BinOpSpec" constr(t) ] -> { Zify.Spec.register t } @@ -34,13 +35,14 @@ VERNAC COMMAND EXTEND DECLAREINJECTION CLASSIFIED AS SIDEFF END TACTIC EXTEND ITER -| [ "zify_iter_specs" tactic(t)] -> { Zify.iter_specs t } +| [ "zify_iter_specs"] -> { Zify.iter_specs} END TACTIC EXTEND TRANS | [ "zify_op" ] -> { Zify.zify_tac } | [ "zify_saturate" ] -> { Zify.saturate } | [ "zify_iter_let" tactic(t)] -> { Zify.iter_let t } +| [ "zify_elim_let" ] -> { Zify.elim_let } END VERNAC COMMAND EXTEND ZifyPrint CLASSIFIED AS SIDEFF diff --git a/plugins/micromega/mutils.ml b/plugins/micromega/mutils.ml index 160b492d3d..51f0328e4b 100644 --- a/plugins/micromega/mutils.ml +++ b/plugins/micromega/mutils.ml @@ -140,24 +140,24 @@ let saturate p f sys = Printexc.print_backtrace stdout; raise x -let saturate_bin (f : 'a -> 'a -> 'a option) (l : 'a list) = - let rec map_with acc e l = +let saturate_bin (type a) (module Set : Set.S with type elt = a) + (f : a -> a -> a option) (l : a list) = + let rec map_with (acc : Set.t) e l = match l with | [] -> acc - | e' :: l' -> ( + | e' :: l -> ( match f e e' with - | None -> map_with acc e l' - | Some r -> map_with (r :: acc) e l' ) - in - let rec map2_with acc l' = - match l' with [] -> acc | e' :: l' -> map2_with (map_with acc e' l) l' + | None -> map_with acc e l + | Some r -> map_with (Set.add r acc) e l ) in + let map2_with acc l' = Set.fold (fun e' acc -> map_with acc e' l) l' acc in let rec iterate acc l' = - match map2_with [] l' with - | [] -> List.rev_append l' acc - | res -> iterate (List.rev_append l' acc) res + let res = map2_with Set.empty l' in + if Set.is_empty res then Set.union l' acc + else iterate (Set.union l' acc) res in - iterate [] l + let s0 = List.fold_left (fun acc e -> Set.add e acc) Set.empty l in + Set.elements (Set.diff (iterate Set.empty s0) s0) open Num open Big_int diff --git a/plugins/micromega/mutils.mli b/plugins/micromega/mutils.mli index 5dcaf3be44..9badddc255 100644 --- a/plugins/micromega/mutils.mli +++ b/plugins/micromega/mutils.mli @@ -116,7 +116,12 @@ val simplify : ('a -> 'a option) -> 'a list -> 'a list option val saturate : ('a -> 'b option) -> ('b * 'a -> 'a -> 'a option) -> 'a list -> 'a list -val saturate_bin : ('a -> 'a -> 'a option) -> 'a list -> 'a list +val saturate_bin : + (module Set.S with type elt = 'a) + -> ('a -> 'a -> 'a option) + -> 'a list + -> 'a list + val generate : ('a -> 'b option) -> 'a list -> 'b list val app_funs : ('a -> 'b option) list -> 'a -> 'b option val command : string -> string array -> 'a -> 'b diff --git a/plugins/micromega/polynomial.ml b/plugins/micromega/polynomial.ml index b20213979b..f83b36d847 100644 --- a/plugins/micromega/polynomial.ml +++ b/plugins/micromega/polynomial.ml @@ -864,6 +864,12 @@ end module WithProof = struct type t = (LinPoly.t * op) * ProofFormat.prf_rule + (* The comparison ignores proofs on purpose *) + let compare : t -> t -> int = + fun ((lp1, o1), _) ((lp2, o2), _) -> + let c = Vect.compare lp1 lp2 in + if c = 0 then compare o1 o2 else c + let annot s (p, prf) = (p, ProofFormat.Annot (s, prf)) let output o ((lp, op), prf) = diff --git a/plugins/micromega/polynomial.mli b/plugins/micromega/polynomial.mli index 4b56b037e0..797ff5827d 100644 --- a/plugins/micromega/polynomial.mli +++ b/plugins/micromega/polynomial.mli @@ -320,6 +320,7 @@ module WithProof : sig exception InvalidProof (** [InvalidProof] is raised if the operation is invalid. *) + val compare : t -> t -> int val annot : string -> t -> t val of_cstr : cstr * ProofFormat.prf_rule -> t diff --git a/plugins/micromega/zify.ml b/plugins/micromega/zify.ml index e71c89b4db..b3b627be14 100644 --- a/plugins/micromega/zify.ml +++ b/plugins/micromega/zify.ml @@ -12,11 +12,43 @@ open Constr open Names open Pp open Lazy +module NamedDecl = Context.Named.Declaration -(** [get_type_of] performs beta reduction ; - Is it ok for Retyping.get_type_of (Zpower_nat n q) to return (fun _ : nat => Z) q ? *) -let get_type_of env evd e = - Tacred.cbv_beta env evd (Retyping.get_type_of env evd e) +let debug = false + +(* The following [constr] are necessary for constructing the proof terms *) + +let zify str = + EConstr.of_constr + (UnivGen.constr_of_monomorphic_global + (Coqlib.lib_ref ("ZifyClasses." ^ str))) + +(* morphism like lemma *) + +let mkapp2 = lazy (zify "mkapp2") +let mkapp = lazy (zify "mkapp") +let eq_refl = lazy (zify "eq_refl") +let eq = lazy (zify "eq") +let mkrel = lazy (zify "mkrel") +let iff_refl = lazy (zify "iff_refl") +let eq_iff = lazy (zify "eq_iff") +let rew_iff = lazy (zify "rew_iff") + +(* propositional logic *) + +let op_and = lazy (zify "and") +let op_and_morph = lazy (zify "and_morph") +let op_or = lazy (zify "or") +let op_or_morph = lazy (zify "or_morph") +let op_impl_morph = lazy (zify "impl_morph") +let op_iff = lazy (zify "iff") +let op_iff_morph = lazy (zify "iff_morph") +let op_not = lazy (zify "not") +let op_not_morph = lazy (zify "not_morph") + +(* identity function *) +(*let identity = lazy (zify "identity")*) +let whd = Reductionops.clos_whd_flags CClosure.all (** [unsafe_to_constr c] returns a [Constr.t] without considering an evar_map. This is useful for calling Constr.hash *) @@ -24,6 +56,18 @@ let unsafe_to_constr = EConstr.Unsafe.to_constr let pr_constr env evd e = Printer.pr_econstr_env env evd e +let gl_pr_constr e = + let genv = Global.env () in + let evd = Evd.from_env genv in + pr_constr genv evd e + +let is_convertible env evd t1 t2 = Reductionops.(is_conv env evd t1 t2) + +(** [get_type_of] performs beta reduction ; + Is it ok for Retyping.get_type_of (Zpower_nat n q) to return (fun _ : nat => Z) q ? *) +let get_type_of env evd e = + Tacred.cbv_beta env evd (Retyping.get_type_of env evd e) + let rec find_option pred l = match l with | [] -> raise Not_found @@ -62,10 +106,7 @@ end *) let get_projections_from_constant (evd, i) = - match - EConstr.kind evd - (Reductionops.clos_whd_flags CClosure.all (Global.env ()) evd i) - with + match EConstr.kind evd (whd (Global.env ()) evd i) with | App (c, a) -> Some a | _ -> raise @@ -98,6 +139,109 @@ module EInjT = struct cstr : EConstr.t option (* forall x, pred (inj x) *) } end +(** [classify_op] classify injected operators and detect special cases. *) + +type classify_op = + | OpInj (* e.g. Z.of_nat -> \x.x *) + | OpSame (* e.g. Z.add -> Z.add *) + | OpConv (* e.g. Pos.ge == \x.y. Z.ge (Z.pos x) (Z.pos y) + \x.y. Z.pos (Pos.add x y) == \x.y. Z.add (Z.pos x) (Z.pos y) + Z.succ == (\x.x + 1) + *) + | OpOther + +(*let pp_classify_op = function + | OpInj -> Pp.str "Identity" + | OpSame -> Pp.str "Same" + | OpConv -> Pp.str "Conv" + | OpOther -> Pp.str "Other" + *) + +let name x = + Context.make_annot (Name.mk_name (Names.Id.of_string x)) Sorts.Relevant + +let mkconvert_unop i1 i2 op top = + (* fun x => inj (op x) *) + let op = + EConstr.mkLambda + ( name "x" + , i1.EInjT.source + , EConstr.mkApp (i2.EInjT.inj, [|EConstr.mkApp (op, [|EConstr.mkRel 1|])|]) + ) + in + (* fun x => top (inj x) *) + let top = + EConstr.mkLambda + ( name "x" + , i1.EInjT.source + , EConstr.mkApp + (top, [|EConstr.mkApp (i1.EInjT.inj, [|EConstr.mkRel 1|])|]) ) + in + (op, top) + +let mkconvert_binop i1 i2 i3 op top = + (* fun x y => inj (op x y) *) + let op = + EConstr.mkLambda + ( name "x" + , i1.EInjT.source + , EConstr.mkLambda + ( name "y" + , i1.EInjT.source + , EConstr.mkApp + ( i3.EInjT.inj + , [|EConstr.mkApp (op, [|EConstr.mkRel 2; EConstr.mkRel 1|])|] ) + ) ) + in + (* fun x y => top (inj x) (inj y) *) + let top = + EConstr.mkLambda + ( name "x" + , i1.EInjT.source + , EConstr.mkLambda + ( name "y" + , i2.EInjT.source + , EConstr.mkApp + ( top + , [| EConstr.mkApp (i1.EInjT.inj, [|EConstr.mkRel 2|]) + ; EConstr.mkApp (i2.EInjT.inj, [|EConstr.mkRel 1|]) |] ) ) ) + in + (op, top) + +let mkconvert_rel i r tr = + let tr = + EConstr.mkLambda + ( name "x" + , i.EInjT.source + , EConstr.mkLambda + ( name "y" + , i.EInjT.source + , EConstr.mkApp + ( tr + , [| EConstr.mkApp (i.EInjT.inj, [|EConstr.mkRel 2|]) + ; EConstr.mkApp (i.EInjT.inj, [|EConstr.mkRel 1|]) |] ) ) ) + in + (r, tr) + +(** [classify_op mkconvert op top] takes the injection [inj] for the origin operator [op] + and the destination operator [top] -- both [op] and [top] are closed terms *) +let classify_op mkconvert inj op top = + let env = Global.env () in + let evd = Evd.from_env env in + if is_convertible env evd inj op then OpInj + else if EConstr.eq_constr evd op top then OpSame + else + let op, top = mkconvert op top in + if is_convertible env evd op top then OpConv else OpOther + +(*let classify_op mkconvert tysrc op top = + let res = classify_op mkconvert tysrc op top in + Feedback.msg_debug + Pp.( + str "classify_op:" ++ gl_pr_constr op ++ str " " ++ gl_pr_constr top + ++ str " " ++ pp_classify_op res ++ fnl ()); + res + *) module EBinOpT = struct type t = { (* Op : source1 -> source2 -> source3 *) @@ -105,17 +249,23 @@ module EBinOpT = struct ; source2 : EConstr.t ; source3 : EConstr.t ; target : EConstr.t - ; inj1 : EConstr.t - ; (* InjTyp source1 target *) - inj2 : EConstr.t - ; (* InjTyp source2 target *) - inj3 : EConstr.t - ; (* InjTyp source3 target *) - tbop : EConstr.t (* TBOpInj *) } + ; inj1 : EInjT.t (* InjTyp source1 target *) + ; inj2 : EInjT.t (* InjTyp source2 target *) + ; inj3 : EInjT.t (* InjTyp source3 target *) + ; bop : EConstr.t (* BOP *) + ; tbop : EConstr.t (* TBOP *) + ; tbopinj : EConstr.t (* TBOpInj *) + ; classify_binop : classify_op } end module ECstOpT = struct - type t = {source : EConstr.t; target : EConstr.t; inj : EConstr.t} + type t = + { source : EConstr.t + ; target : EConstr.t + ; inj : EInjT.t + ; cst : EConstr.t + ; cstinj : EConstr.t + ; is_construct : bool } end module EUnOpT = struct @@ -123,28 +273,42 @@ module EUnOpT = struct { source1 : EConstr.t ; source2 : EConstr.t ; target : EConstr.t - ; inj1_t : EConstr.t - ; inj2_t : EConstr.t - ; unop : EConstr.t } + ; uop : EConstr.t + ; inj1_t : EInjT.t + ; inj2_t : EInjT.t + ; tuop : EConstr.t + ; tuopinj : EConstr.t + ; classify_unop : classify_op + ; is_construct : bool } end module EBinRelT = struct type t = - {source : EConstr.t; target : EConstr.t; inj : EConstr.t; brel : EConstr.t} + { source : EConstr.t + ; target : EConstr.t + ; inj : EInjT.t + ; brel : EConstr.t + ; tbrel : EConstr.t + ; brelinj : EConstr.t + ; classify_rel : classify_op } end module EPropBinOpT = struct - type t = EConstr.t + type t = {op : EConstr.t; op_iff : EConstr.t} end module EPropUnOpT = struct - type t = EConstr.t + type t = {op : EConstr.t; op_iff : EConstr.t} end module ESatT = struct type t = {parg1 : EConstr.t; parg2 : EConstr.t; satOK : EConstr.t} end +module ESpecT = struct + type t = {spec : EConstr.t} +end + (* Different type of declarations *) type decl_kind = | PropOp of EPropBinOpT.t decl @@ -155,16 +319,7 @@ type decl_kind = | UnOp of EUnOpT.t decl | CstOp of ECstOpT.t decl | Saturate of ESatT.t decl - -let get_decl = function - | PropOp d -> d.decl - | PropUnOp d -> d.decl - | InjTyp d -> d.decl - | BinRel d -> d.decl - | BinOp d -> d.decl - | UnOp d -> d.decl - | CstOp d -> d.decl - | Saturate d -> d.decl + | Spec of ESpecT.t decl type term_kind = Application of EConstr.constr | OtherTerm of EConstr.constr @@ -191,8 +346,10 @@ end let table = Summary.ref ~name:"zify_table" HConstr.empty let saturate = Summary.ref ~name:"zify_saturate" HConstr.empty +let specs = Summary.ref ~name:"zify_specs" HConstr.empty let table_cache = ref HConstr.empty let saturate_cache = ref HConstr.empty +let specs_cache = ref HConstr.empty (** Each type-class gives rise to a different table. They only differ on how projections are extracted. *) @@ -207,7 +364,7 @@ module EInj = struct let dest = function InjTyp x -> Some x | _ -> None let mk_elt evd i (a : EConstr.t array) = - let isid = EConstr.eq_constr evd a.(0) a.(1) in + let isid = EConstr.eq_constr_nounivs evd a.(0) a.(1) in { isid ; source = a.(0) ; target = a.(1) @@ -218,6 +375,14 @@ module EInj = struct let get_key = 0 end +let get_inj evd c = + match get_projections_from_constant (evd, c) with + | None -> + let env = Global.env () in + let t = string_of_ppcmds (pr_constr env evd c) in + failwith ("Cannot register term " ^ t) + | Some a -> EInj.mk_elt evd c a + module EBinOp = struct type elt = EBinOpT.t @@ -227,20 +392,34 @@ module EBinOp = struct let table = table let mk_elt evd i a = + let i1 = get_inj evd a.(5) in + let i2 = get_inj evd a.(6) in + let i3 = get_inj evd a.(7) in + let tbop = a.(8) in { source1 = a.(0) ; source2 = a.(1) ; source3 = a.(2) ; target = a.(3) - ; inj1 = a.(5) - ; inj2 = a.(6) - ; inj3 = a.(7) - ; tbop = a.(9) } + ; inj1 = i1 + ; inj2 = i2 + ; inj3 = i3 + ; bop = a.(4) + ; tbop = a.(8) + ; tbopinj = a.(9) + ; classify_binop = + classify_op (mkconvert_binop i1 i2 i3) i1.EInjT.inj a.(4) tbop } let get_key = 4 let cast x = BinOp x let dest = function BinOp x -> Some x | _ -> None end +(*let debug_term msg c = + let genv = Global.env () in + Feedback.msg_debug + Pp.(str msg ++ str " " ++ pr_constr genv (Evd.from_env genv) c); + c + *) module ECstOp = struct type elt = ECstOpT.t @@ -250,7 +429,15 @@ module ECstOp = struct let table = table let cast x = CstOp x let dest = function CstOp x -> Some x | _ -> None - let mk_elt evd i a = {source = a.(0); target = a.(1); inj = a.(3)} + + let mk_elt evd i a = + { source = a.(0) + ; target = a.(1) + ; inj = get_inj evd a.(3) + ; cst = a.(4) + ; cstinj = a.(5) + ; is_construct = EConstr.isConstruct evd a.(2) } + let get_key = 2 end @@ -265,12 +452,21 @@ module EUnOp = struct let dest = function UnOp x -> Some x | _ -> None let mk_elt evd i a = + let i1 = get_inj evd a.(4) in + let i2 = get_inj evd a.(5) in + let uop = a.(3) in + let tuop = a.(6) in { source1 = a.(0) ; source2 = a.(1) ; target = a.(2) - ; inj1_t = a.(4) - ; inj2_t = a.(5) - ; unop = a.(6) } + ; uop + ; inj1_t = i1 + ; inj2_t = i2 + ; tuop + ; tuopinj = a.(7) + ; is_construct = EConstr.isConstruct evd uop + ; classify_unop = classify_op (mkconvert_unop i1 i2) i1.EInjT.inj uop tuop + } let get_key = 3 end @@ -286,40 +482,48 @@ module EBinRel = struct let dest = function BinRel x -> Some x | _ -> None let mk_elt evd i a = - {source = a.(0); target = a.(1); inj = a.(3); brel = a.(4)} + let i = get_inj evd a.(3) in + let brel = a.(2) in + let tbrel = a.(4) in + { source = a.(0) + ; target = a.(1) + ; inj = get_inj evd a.(3) + ; brel + ; tbrel + ; brelinj = a.(5) + ; classify_rel = classify_op (mkconvert_rel i) i.EInjT.inj brel tbrel } let get_key = 2 end -module EPropOp = struct - type elt = EConstr.t +module EPropBinOp = struct + type elt = EPropBinOpT.t + + open EPropBinOpT let name = "PropBinOp" let table = table let cast x = PropOp x let dest = function PropOp x -> Some x | _ -> None - let mk_elt evd i a = i + let mk_elt evd i a = {op = a.(0); op_iff = a.(1)} let get_key = 0 end module EPropUnOp = struct - type elt = EConstr.t + type elt = EPropUnOpT.t + + open EPropUnOpT let name = "PropUnOp" let table = table let cast x = PropUnOp x let dest = function PropUnOp x -> Some x | _ -> None - let mk_elt evd i a = i + let mk_elt evd i a = {op = a.(0); op_iff = a.(1)} let get_key = 0 end let constr_of_term_kind = function Application c -> c | OtherTerm c -> c -let fold_declared_const f evd acc = - HConstr.fold - (fun _ (_, e) acc -> f (fst (EConstr.destConst evd (get_decl e))) acc) - !table_cache acc - module type S = sig val register : Constrexpr.constr_expr -> unit val print : unit -> unit @@ -417,118 +621,37 @@ module ESat = struct let get_key = 1 end +module ESpec = struct + open ESpecT + + type elt = ESpecT.t + + let name = "Spec" + let table = specs + let cast x = Spec x + let dest = function Spec x -> Some x | _ -> None + let mk_elt evd i a = {spec = a.(5)} + let get_key = 2 +end + module BinOp = MakeTable (EBinOp) module UnOp = MakeTable (EUnOp) module CstOp = MakeTable (ECstOp) module BinRel = MakeTable (EBinRel) -module PropOp = MakeTable (EPropOp) +module PropBinOp = MakeTable (EPropBinOp) module PropUnOp = MakeTable (EPropUnOp) module Saturate = MakeTable (ESat) +module Spec = MakeTable (ESpec) let init_cache () = table_cache := !table; - saturate_cache := !saturate - -(** The module [Spec] is used to register - the instances of [BinOpSpec], [UnOpSpec]. - They are not indexed and stored in a list. *) - -module Spec = struct - let table = Summary.ref ~name:"zify_Spec" [] - - let register_obj : Constr.constr -> Libobject.obj = - let cache_constr (_, c) = table := EConstr.of_constr c :: !table in - let subst_constr (subst, c) = Mod_subst.subst_mps subst c in - Libobject.declare_object - @@ Libobject.superglobal_object_nodischarge "register-zify-Spec" - ~cache:cache_constr ~subst:(Some subst_constr) - - let register c = - let env = Global.env () in - let evd = Evd.from_env env in - let _, c = Constrintern.interp_open_constr env evd c in - let _ = Lib.add_anonymous_leaf (register_obj (EConstr.to_constr evd c)) in - () - - let get () = !table - - let print () = - let env = Global.env () in - let evd = Evd.from_env env in - let constr_of_spec c = - let t = get_type_of env evd c in - match EConstr.kind evd t with - | App (intyp, args) -> pr_constr env evd args.(2) - | _ -> Pp.str "" - in - let l = - List.fold_left - (fun acc c -> Pp.(constr_of_spec c ++ str " " ++ acc)) - (Pp.str "") !table - in - Feedback.msg_notice l -end - -let unfold_decl evd = - let f cst acc = cst :: acc in - fold_declared_const f evd [] + saturate_cache := !saturate; + specs_cache := !specs open EInjT (** Get constr of lemma and projections in ZifyClasses. *) -let zify str = - EConstr.of_constr - (UnivGen.constr_of_monomorphic_global - (Coqlib.lib_ref ("ZifyClasses." ^ str))) - -let locate_const str = - let rf = "ZifyClasses." ^ str in - match Coqlib.lib_ref rf with - | GlobRef.ConstRef c -> c - | _ -> CErrors.anomaly Pp.(str rf ++ str " should be a constant") - -(* The following [constr] are necessary for constructing the proof terms *) -let mkapp2 = lazy (zify "mkapp2") -let mkapp = lazy (zify "mkapp") -let mkapp0 = lazy (zify "mkapp0") -let mkdp = lazy (zify "mkinjterm") -let eq_refl = lazy (zify "eq_refl") -let mkrel = lazy (zify "mkrel") -let mkprop_op = lazy (zify "mkprop_op") -let mkuprop_op = lazy (zify "mkuprop_op") -let mkdpP = lazy (zify "mkinjprop") -let iff_refl = lazy (zify "iff_refl") -let q = lazy (zify "target_prop") -let ieq = lazy (zify "injprop_ok") -let iff = lazy (zify "iff") - -(* A super-set of the previous are needed to unfold the generated proof terms. *) - -let to_unfold = - lazy - (List.rev_map locate_const - [ "source_prop" - ; "target_prop" - ; "uop_iff" - ; "op_iff" - ; "mkuprop_op" - ; "TUOp" - ; "inj_ok" - ; "TRInj" - ; "inj" - ; "source" - ; "injprop_ok" - ; "TR" - ; "TBOp" - ; "TCst" - ; "target" - ; "mkrel" - ; "mkapp2" - ; "mkapp" - ; "mkapp0" - ; "mkprop_op" ]) - (** Module [CstrTable] records terms [x] injected into [inj x] together with the corresponding type constraint. The terms are stored by side-effect during the traversal @@ -563,7 +686,10 @@ module CstrTable = struct List.iter (fun (_, (t : EConstr.types)) -> HConstr.add hyps_table t ()) (Tacmach.New.pf_hyps_types gl); - fun c -> HConstr.mem hyps_table c + fun c -> + let m = HConstr.mem hyps_table c in + if not m then HConstr.add hyps_table c (); + m in (* Add the constraint (cstr k) if it is not already present *) let gen k cstr = @@ -585,97 +711,183 @@ module CstrTable = struct Tacticals.New.tclIDTAC table) end -let mkvar red evd inj v = - ( if not red then - match inj.cstr with None -> () | Some ctr -> CstrTable.register evd v ctr ); - let iv = EConstr.mkApp (inj.inj, [|v|]) in - let iv = if red then Tacred.compute (Global.env ()) evd iv else iv in - EConstr.mkApp - ( force mkdp - , [| inj.source - ; inj.target - ; inj.inj - ; v - ; iv - ; EConstr.mkApp (force eq_refl, [|inj.target; iv|]) |] ) - -type texpr = - | Var of EInj.elt * EConstr.t - (** Var is a term that cannot be injected further *) - | Constant of EInj.elt * EConstr.t - (** Constant is a term that is solely built from constructors *) - | Injterm of EConstr.t - (** Injected is an injected term represented by a term of type [injterm] *) - -let is_constant = function Constant _ -> true | _ -> false - -let constr_of_texpr = function - | Constant (i, e) | Var (i, e) -> if i.isid then Some e else None - | _ -> None - -let inj_term_of_texpr evd = function - | Injterm e -> e - | Var (inj, e) -> mkvar false evd inj e - | Constant (inj, e) -> mkvar true evd inj e - -let mkapp2_id evd i (* InjTyp S3 T *) inj (* deriv i *) t (* S1 -> S2 -> S3 *) b - (* Binop S1 S2 S3 t ... *) dbop (* deriv b *) e1 e2 = - let default () = - let e1' = inj_term_of_texpr evd e1 in - let e2' = inj_term_of_texpr evd e2 in - EBinOpT.( - Injterm - (EConstr.mkApp - ( force mkapp2 - , [| dbop.source1 - ; dbop.source2 - ; dbop.source3 - ; dbop.target - ; t - ; dbop.inj1 - ; dbop.inj2 - ; dbop.inj3 - ; b - ; e1' - ; e2' |] ))) +type prf = + | Term (* source is built from constructors. + target = compute(inj source) + inj source == target *) + | Same (* target = source + inj source == inj target *) + | Conv of EConstr.t (* inj source == target *) + | Prf of EConstr.t * EConstr.t + +(** [eq_proof typ source target] returns (target = target : source = target) *) +let eq_proof typ source target = + EConstr.mkCast + ( EConstr.mkApp (force eq_refl, [|typ; target|]) + , DEFAULTcast + , EConstr.mkApp (force eq, [|typ; source; target|]) ) + +let interp_prf evd inj source prf = + let inj_source = + if inj.EInjT.isid then source else EConstr.mkApp (inj.EInjT.inj, [|source|]) in - if not inj.isid then default () - else - match (e1, e2) with - | Constant (_, e1), Constant (_, e2) - |Var (_, e1), Var (_, e2) - |Constant (_, e1), Var (_, e2) - |Var (_, e1), Constant (_, e2) -> - Var (inj, EConstr.mkApp (t, [|e1; e2|])) - | _, _ -> default () - -let mkapp_id evd i inj (unop, u) f e1 = - EUnOpT.( - if EConstr.eq_constr evd u.unop f then - (* Injection does nothing *) - match e1 with - | Constant (_, e) | Var (_, e) -> Var (inj, EConstr.mkApp (f, [|e|])) - | Injterm e1 -> - Injterm - (EConstr.mkApp - ( force mkapp - , [| u.source1 - ; u.source2 - ; u.target - ; f - ; u.inj1_t - ; u.inj2_t - ; unop - ; e1 |] )) + match prf with + | Term -> + let target = Tacred.compute (Global.env ()) evd inj_source in + (target, EConstr.mkApp (force eq_refl, [|inj.target; target|])) + | Same -> + (inj_source, EConstr.mkApp (force eq_refl, [|inj.target; inj_source|])) + | Conv trm -> (trm, eq_proof inj.target inj_source trm) + | Prf (target, prf) -> (target, prf) + +let pp_prf prf = + match prf with + | Term -> Pp.str "Term" + | Same -> Pp.str "Same" + | Conv t -> Pp.(str "Conv " ++ gl_pr_constr t) + | Prf (_, _) -> Pp.str "Prf " + +let interp_prf evd inj source prf = + let t, prf' = interp_prf evd inj source prf in + if debug then + Feedback.msg_debug + Pp.( + str "interp_prf " ++ gl_pr_constr inj.EInjT.inj ++ str " " + ++ gl_pr_constr source ++ str " = " ++ gl_pr_constr t ++ str " by " + ++ gl_pr_constr prf' ++ str " from " ++ pp_prf prf ++ fnl ()); + (t, prf') + +let mkvar evd inj e = + (match inj.cstr with None -> () | Some ctr -> CstrTable.register evd e ctr); + Same + +let pp_prf evd inj src prf = + let t, prf' = interp_prf evd inj src prf in + Pp.( + gl_pr_constr inj.EInjT.inj ++ str " " ++ gl_pr_constr src ++ str " = " + ++ gl_pr_constr t ++ str " by " + ++ + match prf with + | Term -> Pp.str "Term" + | Same -> Pp.str "Same" + | Conv t -> Pp.str "Conv" + | Prf (_, p) -> Pp.str "Prf " ++ gl_pr_constr p) + +let conv_of_term evd op isid arg = + Tacred.compute (Global.env ()) evd + (if isid then arg else EConstr.mkApp (op, [|arg|])) + +let app_unop evd src unop arg prf = + let cunop = unop.EUnOpT.classify_unop in + let default a' prf' = + let target = EConstr.mkApp (unop.EUnOpT.tuop, [|a'|]) in + EUnOpT.( + Prf + ( target + , EConstr.mkApp + ( force mkapp + , [| unop.source1 + ; unop.source2 + ; unop.target + ; unop.uop + ; unop.inj1_t.EInjT.inj + ; unop.inj2_t.EInjT.inj + ; unop.tuop + ; unop.tuopinj + ; arg + ; a' + ; prf' |] ) )) + in + match prf with + | Term -> ( + if unop.EUnOpT.is_construct then Term (* Keep rebuilding *) else - let e1 = inj_term_of_texpr evd e1 in - Injterm + match cunop with + | OpInj -> Conv (conv_of_term evd unop.EUnOpT.uop false arg) + | OpSame -> Same + | _ -> + let a', prf = interp_prf evd unop.EUnOpT.inj1_t arg prf in + default a' prf ) + | Same -> ( + match cunop with + | OpSame -> Same + | OpInj -> Same + | OpConv -> + Conv (EConstr.mkApp - ( force mkapp - , [|u.source1; u.source2; u.target; f; u.inj1_t; u.inj2_t; unop; e1|] - ))) - -type typed_constr = {constr : EConstr.t; typ : EConstr.t} + ( unop.EUnOpT.tuop + , [|EConstr.mkApp (unop.EUnOpT.inj1_t.EInjT.inj, [|arg|])|] )) + | OpOther -> + let a', prf' = interp_prf evd unop.EUnOpT.inj1_t arg prf in + default a' prf' ) + | Conv a' -> ( + match cunop with + | OpSame | OpConv -> Conv (EConstr.mkApp (unop.EUnOpT.tuop, [|a'|])) + | OpInj -> Conv a' + | _ -> + let a', prf = interp_prf evd unop.EUnOpT.inj1_t arg prf in + default a' prf ) + | Prf (a', prf') -> default a' prf' + +let app_unop evd src unop arg prf = + let res = app_unop evd src unop arg prf in + if debug then + Feedback.msg_debug + Pp.( + str "\napp_unop " + ++ pp_prf evd unop.EUnOpT.inj1_t arg prf + ++ str " => " + ++ pp_prf evd unop.EUnOpT.inj2_t src res); + res + +let app_binop evd src binop arg1 prf1 arg2 prf2 = + EBinOpT.( + let mkApp a1 a2 = EConstr.mkApp (binop.tbop, [|a1; a2|]) in + let to_conv inj arg = function + | Term -> conv_of_term evd inj.EInjT.inj inj.EInjT.isid arg + | Same -> + if inj.EInjT.isid then arg else EConstr.mkApp (inj.EInjT.inj, [|arg|]) + | Conv t -> t + | Prf _ -> failwith "Prf is not convertible" + in + let default a1 prf1 a2 prf2 = + let res = mkApp a1 a2 in + let prf = + EBinOpT.( + EConstr.mkApp + ( force mkapp2 + , [| binop.source1 + ; binop.source2 + ; binop.source3 + ; binop.target + ; binop.bop + ; binop.inj1.EInjT.inj + ; binop.inj2.EInjT.inj + ; binop.inj3.EInjT.inj + ; binop.tbop + ; binop.tbopinj + ; arg1 + ; a1 + ; prf1 + ; arg2 + ; a2 + ; prf2 |] )) + in + Prf (res, prf) + in + match (binop.EBinOpT.classify_binop, prf1, prf2) with + | OpSame, Same, Same -> Same + | OpSame, Term, Same | OpSame, Same, Term -> Same + | OpSame, (Term | Same | Conv _), (Term | Same | Conv _) -> + let t1 = to_conv binop.EBinOpT.inj1 arg1 prf1 in + let t2 = to_conv binop.EBinOpT.inj1 arg2 prf2 in + Conv (mkApp t1 t2) + | _, _, _ -> + let a1, prf1 = interp_prf evd binop.inj1 arg1 prf1 in + let a2, prf2 = interp_prf evd binop.inj2 arg2 prf2 in + default a1 prf1 a2 prf2) + +type typed_constr = {constr : EConstr.t; typ : EConstr.t; inj : EInjT.t} let get_injection env evd t = match snd (HConstr.find t !table_cache) with @@ -702,23 +914,68 @@ let is_prop env sigma term = let sort = Retyping.get_sort_of env sigma term in Sorts.is_prop sort -(** [get_application env evd e] expresses [e] as an application (c a) +let is_arrow env evd a p1 p2 = + is_prop env evd p1 + && is_prop + (EConstr.push_rel (Context.Rel.Declaration.LocalAssum (a, p1)) env) + evd p2 + && (a.Context.binder_name = Names.Anonymous || EConstr.Vars.noccurn evd 1 p2) + +(** [get_operator env evd e] expresses [e] as an application (c a) where c is the head symbol and [a] is the array of arguments. The function also transforms (x -> y) as (arrow x y) *) -let get_operator env evd e = - let is_arrow a p1 p2 = - is_prop env evd p1 - && is_prop - (EConstr.push_rel (Context.Rel.Declaration.LocalAssum (a, p1)) env) - evd p2 - && (a.Context.binder_name = Names.Anonymous || EConstr.Vars.noccurn evd 1 p2) - in +let get_operator barrow env evd e = match EConstr.kind evd e with - | Prod (a, p1, p2) when is_arrow a p1 p2 -> (arrow, [|p1; p2|]) + | Prod (a, p1, p2) -> + if barrow && is_arrow env evd a p1 p2 then (arrow, [|p1; p2|]) + else raise Not_found + | App (c, a) -> ( + match EConstr.kind evd c with + | Construct _ (* e.g. Z0 , Z.pos *) | Const _ (* e.g. Z.max *) | Proj _ + |Lambda _ (* e.g projections *) | Ind _ (* e.g. eq *) -> + (c, a) + | _ -> raise Not_found ) + | Construct _ -> (EConstr.whd_evar evd e, [||]) + | _ -> raise Not_found + +let decompose_app env evd e = + match EConstr.kind evd e with + | Prod (a, p1, p2) when is_arrow env evd a p1 p2 -> (arrow, [|p1; p2|]) | App (c, a) -> (c, a) - | _ -> (e, [||]) + | _ -> (EConstr.whd_evar evd e, [||]) + +type 'op propop = {op : 'op; op_constr : EConstr.t; op_iff : EConstr.t} -let is_convertible env evd k t = Reductionops.check_conv env evd k t +let mk_propop op c1 c2 = {op; op_constr = c1; op_iff = c2} + +type prop_binop = AND | OR | IFF | IMPL +type prop_unop = NOT + +type prop_op = + | BINOP of prop_binop propop * EConstr.t * EConstr.t + | UNOP of prop_unop propop * EConstr.t + | OTHEROP of EConstr.t * EConstr.t array + +let classify_prop env evd e = + match EConstr.kind evd e with + | Prod (a, p1, p2) when is_arrow env evd a p1 p2 -> + BINOP (mk_propop IMPL arrow (force op_impl_morph), p1, p2) + | App (c, a) -> ( + match Array.length a with + | 1 -> + if EConstr.eq_constr_nounivs evd (force op_not) c then + UNOP (mk_propop NOT c (force op_not_morph), a.(0)) + else OTHEROP (c, a) + | 2 -> + if EConstr.eq_constr_nounivs evd (force op_and) c then + BINOP (mk_propop AND c (force op_and_morph), a.(0), a.(1)) + else if EConstr.eq_constr_nounivs evd (force op_or) c then + BINOP (mk_propop OR c (force op_or_morph), a.(0), a.(1)) + else if EConstr.eq_constr_nounivs evd (force op_iff) c then + BINOP (mk_propop IFF c (force op_iff_morph), a.(0), a.(1)) + else OTHEROP (c, a) + | _ -> OTHEROP (c, a) ) + | _ -> OTHEROP (e, [||]) (** [match_operator env evd hd arg (t,d)] - hd is head operator of t @@ -744,223 +1001,242 @@ let match_operator env evd hd args (t, d) = | PropUnOp _ -> decomp t 1 | _ -> None ) +let pp_trans_expr env evd e res = + let {deriv = inj} = get_injection env evd e.typ in + if debug then + Feedback.msg_debug Pp.(str "\ntrans_expr " ++ pp_prf evd inj e.constr res); + res + let rec trans_expr env evd e = - (* Get the injection *) - let {decl = i; deriv = inj} = get_injection env evd e.typ in + let inj = e.inj in let e = e.constr in - if EConstr.isConstruct evd e then Constant (inj, e) (* Evaluate later *) - else - let c, a = get_operator env evd e in - try - let k, t = - find_option - (match_operator env evd c a) - (HConstr.find_all c !table_cache) + try + let c, a = get_operator false env evd e in + let k, t = + find_option (match_operator env evd c a) (HConstr.find_all c !table_cache) + in + let n = Array.length a in + match k with + | CstOp {deriv = c'} -> + ECstOpT.(if c'.is_construct then Term else Prf (c'.cst, c'.cstinj)) + | UnOp {deriv = unop} -> + let prf = + trans_expr env evd + { constr = a.(n - 1) + ; typ = unop.EUnOpT.source1 + ; inj = unop.EUnOpT.inj1_t } in - let n = Array.length a in - match k with - | CstOp {decl = c'} -> - Injterm - (EConstr.mkApp (force mkapp0, [|inj.source; inj.target; e; i; c'|])) - | UnOp {decl = unop; deriv = u} -> - let a' = - trans_expr env evd {constr = a.(n - 1); typ = u.EUnOpT.source1} - in - if is_constant a' && EConstr.isConstruct evd t then Constant (inj, e) - else mkapp_id evd i inj (unop, u) t a' - | BinOp {decl = binop; deriv = b} -> - let a0 = - trans_expr env evd {constr = a.(n - 2); typ = b.EBinOpT.source1} - in - let a1 = - trans_expr env evd {constr = a.(n - 1); typ = b.EBinOpT.source2} - in - if is_constant a0 && is_constant a1 && EConstr.isConstruct evd t then - Constant (inj, e) - else mkapp2_id evd i inj t binop b a0 a1 - | d -> Var (inj, e) - with Not_found -> Var (inj, e) + app_unop evd e unop a.(n - 1) prf + | BinOp {deriv = binop} -> + let prf1 = + trans_expr env evd + { constr = a.(n - 2) + ; typ = binop.EBinOpT.source1 + ; inj = binop.EBinOpT.inj1 } + in + let prf2 = + trans_expr env evd + { constr = a.(n - 1) + ; typ = binop.EBinOpT.source2 + ; inj = binop.EBinOpT.inj2 } + in + app_binop evd e binop a.(n - 2) prf1 a.(n - 1) prf2 + | d -> mkvar evd inj e + with Not_found -> + (* Feedback.msg_debug + Pp.(str "Not found " ++ Termops.Internal.debug_print_constr e); *) + mkvar evd inj e let trans_expr env evd e = - try trans_expr env evd e + try pp_trans_expr env evd e (trans_expr env evd e) with Not_found -> raise (CErrors.user_err ( Pp.str "Missing injection for type " ++ Printer.pr_leconstr_env env evd e.typ )) -type tprop = - | TProp of EConstr.t (** Transformed proposition *) - | IProp of EConstr.t (** Identical proposition *) - -let mk_iprop e = - EConstr.mkApp (force mkdpP, [|e; e; EConstr.mkApp (force iff_refl, [|e|])|]) - -let inj_prop_of_tprop = function TProp p -> p | IProp e -> mk_iprop e +type prfp = + | TProof of EConstr.t * EConstr.t (** Proof of tranformed proposition *) + | CProof of EConstr.t (** Transformed proposition is convertible *) + | IProof (** Transformed proposition is identical *) + +let pp_prfp = function + | TProof (t, prf) -> + Pp.str "TProof " ++ gl_pr_constr t ++ Pp.str " by " ++ gl_pr_constr prf + | CProof t -> Pp.str "CProof " ++ gl_pr_constr t + | IProof -> Pp.str "IProof" + +let trans_binrel evd src rop a1 prf1 a2 prf2 = + EBinRelT.( + match (rop.classify_rel, prf1, prf2) with + | OpSame, Same, Same -> IProof + | (OpSame | OpConv), Conv t1, Conv t2 -> + CProof (EConstr.mkApp (rop.tbrel, [|t1; t2|])) + | (OpSame | OpConv), (Same | Term | Conv _), (Same | Term | Conv _) -> + let a1', _ = interp_prf evd rop.inj a1 prf1 in + let a2', _ = interp_prf evd rop.inj a2 prf2 in + CProof (EConstr.mkApp (rop.tbrel, [|a1'; a2'|])) + | _, _, _ -> + let a1', prf1 = interp_prf evd rop.inj a1 prf1 in + let a2', prf2 = interp_prf evd rop.inj a2 prf2 in + TProof + ( EConstr.mkApp (rop.EBinRelT.tbrel, [|a1'; a2'|]) + , EConstr.mkApp + ( force mkrel + , [| rop.source + ; rop.target + ; rop.brel + ; rop.EBinRelT.inj.EInjT.inj + ; rop.EBinRelT.tbrel + ; rop.EBinRelT.brelinj + ; a1 + ; a1' + ; prf1 + ; a2 + ; a2' + ; prf2 |] ) )) + +let trans_binrel evd src rop a1 prf1 a2 prf2 = + let res = trans_binrel evd src rop a1 prf1 a2 prf2 in + if debug then Feedback.msg_debug Pp.(str "\ntrans_binrel " ++ pp_prfp res); + res + +let mkprf t p = + EConstr.( + match p with + | IProof -> (t, mkApp (force iff_refl, [|t|])) + | CProof t' -> (t', mkApp (force eq_iff, [|t; t'; eq_proof mkProp t t'|])) + | TProof (t', p) -> (t', p)) + +let mkprf t p = + let t', p = mkprf t p in + if debug then + Feedback.msg_debug + Pp.( + str "mkprf " ++ gl_pr_constr t ++ str " <-> " ++ gl_pr_constr t' + ++ str " by " ++ gl_pr_constr p); + (t', p) + +let trans_bin_prop op_constr op_iff t1 p1 t2 p2 = + match (p1, p2) with + | IProof, IProof -> IProof + | CProof t1', IProof -> CProof (EConstr.mkApp (op_constr, [|t1'; t2|])) + | IProof, CProof t2' -> CProof (EConstr.mkApp (op_constr, [|t1; t2'|])) + | CProof t1', CProof t2' -> CProof (EConstr.mkApp (op_constr, [|t1'; t2'|])) + | _, _ -> + let t1', p1 = mkprf t1 p1 in + let t2', p2 = mkprf t2 p2 in + TProof + ( EConstr.mkApp (op_constr, [|t1'; t2'|]) + , EConstr.mkApp (op_iff, [|t1; t2; t1'; t2'; p1; p2|]) ) + +let trans_bin_prop op_constr op_iff t1 p1 t2 p2 = + let prf = trans_bin_prop op_constr op_iff t1 p1 t2 p2 in + if debug then Feedback.msg_debug (pp_prfp prf); + prf + +let trans_un_prop op_constr op_iff p1 prf1 = + match prf1 with + | IProof -> IProof + | CProof p1' -> CProof (EConstr.mkApp (op_constr, [|p1'|])) + | TProof (p1', prf) -> + TProof + ( EConstr.mkApp (op_constr, [|p1'|]) + , EConstr.mkApp (op_iff, [|p1; p1'; prf|]) ) let rec trans_prop env evd e = - let c, a = get_operator env evd e in - try - let k, t = - find_option (match_operator env evd c a) (HConstr.find_all c !table_cache) - in - let n = Array.length a in - match k with - | PropOp {decl = rop} -> ( - try - let t1 = trans_prop env evd a.(n - 2) in - let t2 = trans_prop env evd a.(n - 1) in - match (t1, t2) with - | IProp _, IProp _ -> IProp e - | _, _ -> - let t1 = inj_prop_of_tprop t1 in - let t2 = inj_prop_of_tprop t2 in - TProp (EConstr.mkApp (force mkprop_op, [|t; rop; t1; t2|])) - with Not_found -> IProp e ) - | BinRel {decl = br; deriv = rop} -> ( - try + match classify_prop env evd e with + | BINOP ({op_constr; op_iff}, p1, p2) -> + let prf1 = trans_prop env evd p1 in + let prf2 = trans_prop env evd p2 in + trans_bin_prop op_constr op_iff p1 prf1 p2 prf2 + | UNOP ({op_constr; op_iff}, p1) -> + let prf1 = trans_prop env evd p1 in + trans_un_prop op_constr op_iff p1 prf1 + | OTHEROP (c, a) -> ( + try + let k, t = + find_option + (match_operator env evd c a) + (HConstr.find_all c !table_cache) + in + let n = Array.length a in + match k with + | BinRel {decl = br; deriv = rop} -> let a1 = - trans_expr env evd {constr = a.(n - 2); typ = rop.EBinRelT.source} + trans_expr env evd + { constr = a.(n - 2) + ; typ = rop.EBinRelT.source + ; inj = rop.EBinRelT.inj } in let a2 = - trans_expr env evd {constr = a.(n - 1); typ = rop.EBinRelT.source} + trans_expr env evd + { constr = a.(n - 1) + ; typ = rop.EBinRelT.source + ; inj = rop.EBinRelT.inj } in - if EConstr.eq_constr evd t rop.EBinRelT.brel then - match (constr_of_texpr a1, constr_of_texpr a2) with - | Some e1, Some e2 -> IProp (EConstr.mkApp (t, [|e1; e2|])) - | _, _ -> - let a1 = inj_term_of_texpr evd a1 in - let a2 = inj_term_of_texpr evd a2 in - TProp - (EConstr.mkApp - ( force mkrel - , [| rop.EBinRelT.source - ; rop.EBinRelT.target - ; t - ; rop.EBinRelT.inj - ; br - ; a1 - ; a2 |] )) - else - let a1 = inj_term_of_texpr evd a1 in - let a2 = inj_term_of_texpr evd a2 in - TProp - (EConstr.mkApp - ( force mkrel - , [| rop.EBinRelT.source - ; rop.EBinRelT.target - ; t - ; rop.EBinRelT.inj - ; br - ; a1 - ; a2 |] )) - with Not_found -> IProp e ) - | PropUnOp {decl = rop} -> ( - try - let t1 = trans_prop env evd a.(n - 1) in - match t1 with - | IProp _ -> IProp e - | _ -> - let t1 = inj_prop_of_tprop t1 in - TProp (EConstr.mkApp (force mkuprop_op, [|t; rop; t1|])) - with Not_found -> IProp e ) - | _ -> IProp e - with Not_found -> IProp e - -let unfold n env evd c = - let cbv l = - CClosure.RedFlags.( - Tacred.cbv_norm_flags - (mkflags - (fBETA :: fMATCH :: fFIX :: fCOFIX :: fZETA :: List.rev_map fCONST l))) - in - let unfold_decl = unfold_decl evd in - (* Unfold the let binding *) - let c = - match n with - | None -> c - | Some n -> - Tacred.unfoldn [(Locus.AllOccurrences, Names.EvalVarRef n)] env evd c - in - (* Reduce the term *) - let c = cbv (List.rev_append (force to_unfold) unfold_decl) env evd c in - c + trans_binrel evd e rop a.(n - 2) a1 a.(n - 1) a2 + | _ -> IProof + with Not_found -> IProof ) let trans_check_prop env evd t = - if is_prop env evd t then - (*let t = Tacred.unfoldn [Locus.AllOccurrences, Names.EvalConstRef coq_not] env evd t in*) - match trans_prop env evd t with IProp e -> None | TProp e -> Some e - else None + if is_prop env evd t then Some (trans_prop env evd t) else None + +let get_hyp_typ = function + | NamedDecl.LocalDef (h, _, ty) | NamedDecl.LocalAssum (h, ty) -> + (h.Context.binder_name, EConstr.of_constr ty) let trans_hyps env evd l = List.fold_left - (fun acc (h, p) -> - match trans_check_prop env evd p with + (fun acc decl -> + let h, ty = get_hyp_typ decl in + match trans_check_prop env evd ty with | None -> acc - | Some p' -> (h, p, p') :: acc) - [] (List.rev l) - -(* Only used if a direct rewrite fails *) -let trans_hyp h t = - Tactics.( + | Some p' -> (h, ty, p') :: acc) + [] l + +let trans_hyp h t0 prfp = + if debug then + Feedback.msg_debug Pp.(str "trans_hyp: " ++ pp_prfp prfp ++ fnl ()); + match prfp with + | IProof -> Tacticals.New.tclIDTAC (* Should detect before *) + | CProof t' -> + Proofview.Goal.enter (fun gl -> + let env = Tacmach.New.pf_env gl in + let evd = Tacmach.New.project gl in + let t' = Reductionops.nf_betaiota env evd t' in + Tactics.change_in_hyp ~check:true None + (Tactics.make_change_arg t') + (h, Locus.InHypTypeOnly)) + | TProof (t', prf) -> Tacticals.New.( Proofview.Goal.enter (fun gl -> let env = Tacmach.New.pf_env gl in - let n = - fresh_id_in_env Id.Set.empty (Names.Id.of_string "__zify") env + let evd = Tacmach.New.project gl in + let target = Reductionops.nf_betaiota env evd t' in + let h' = Tactics.fresh_id_in_env Id.Set.empty h env in + let prf = + EConstr.mkApp (force rew_iff, [|t0; target; prf; EConstr.mkVar h|]) in - let h' = fresh_id_in_env Id.Set.empty h env in - tclTHENLIST - [ letin_tac None (Names.Name n) t None - Locus.{onhyps = None; concl_occs = NoOccurrences} - ; assert_by (Name.Name h') - (EConstr.mkApp (force q, [|EConstr.mkVar n|])) - (tclTHEN - (Equality.rewriteRL - (EConstr.mkApp (force ieq, [|EConstr.mkVar n|]))) - (exact_check (EConstr.mkVar h))) - ; reduct_in_hyp ~check:true ~reorder:false (unfold (Some n)) - (h', Locus.InHyp) - ; clear [n] - ; (* [clear H] may fail if [h] has dependencies *) - tclTRY (clear [h]) ]))) - -let is_progress_rewrite evd t rew = - match EConstr.kind evd rew with - | App (c, [|lhs; rhs|]) -> - if EConstr.eq_constr evd (force iff) c then - (* This is a successful rewriting *) - not (EConstr.eq_constr evd lhs rhs) - else - CErrors.anomaly - Pp.( - str "is_progress_rewrite: not a rewrite" - ++ pr_constr (Global.env ()) evd rew) - | _ -> failwith "is_progress_rewrite: not even an application" - -let trans_hyp h t0 t = - Tacticals.New.( + tclTHEN + (Tactics.pose_proof (Name.Name h') prf) + (tclTRY + (tclTHEN (Tactics.clear [h]) (Tactics.rename_hyp [(h', h)]))))) + +let trans_concl prfp = + if debug then + Feedback.msg_debug Pp.(str "trans_concl: " ++ pp_prfp prfp ++ fnl ()); + match prfp with + | IProof -> Tacticals.New.tclIDTAC + | CProof t -> Proofview.Goal.enter (fun gl -> let env = Tacmach.New.pf_env gl in let evd = Tacmach.New.project gl in - let t' = unfold None env evd (EConstr.mkApp (force ieq, [|t|])) in - if is_progress_rewrite evd t0 (get_type_of env evd t') then - tclFIRST - [ Equality.general_rewrite_in true Locus.AllOccurrences true false h - t' false - ; trans_hyp h t ] - else tclIDTAC)) - -let trans_concl t = - Tacticals.New.( + let t' = Reductionops.nf_betaiota env evd t in + Tactics.change_concl t') + | TProof (t, prf) -> Proofview.Goal.enter (fun gl -> - let concl = Tacmach.New.pf_concl gl in - let env = Tacmach.New.pf_env gl in - let evd = Tacmach.New.project gl in - let t' = unfold None env evd (EConstr.mkApp (force ieq, [|t|])) in - if is_progress_rewrite evd concl (get_type_of env evd t') then - Equality.general_rewrite true Locus.AllOccurrences true false t' - else tclIDTAC)) + Equality.general_rewrite true Locus.AllOccurrences true false prf) let tclTHENOpt e tac tac' = match e with None -> tac' | Some e' -> Tacticals.New.tclTHEN (tac e') tac' @@ -976,6 +1252,16 @@ let assert_inj t = with Not_found -> Tacticals.New.tclFAIL 0 (Pp.str " InjTyp does not exist")) +let elim_binding x t ty = + Proofview.Goal.enter (fun gl -> + let env = Tacmach.New.pf_env gl in + let h = + Tactics.fresh_id_in_env Id.Set.empty (Nameops.add_prefix "heq_" x) env + in + Tacticals.New.tclTHEN + (Tactics.pose_proof (Name h) (eq_proof ty (EConstr.mkVar x) t)) + (Tacticals.New.tclTRY (Tactics.clear_body [x]))) + let do_let tac (h : Constr.named_declaration) = match h with | Context.Named.Declaration.LocalAssum _ -> Tacticals.New.tclIDTAC @@ -985,22 +1271,25 @@ let do_let tac (h : Constr.named_declaration) = let evd = Tacmach.New.project gl in try ignore (get_injection env evd (EConstr.of_constr ty)); - tac id.Context.binder_name t ty + tac id.Context.binder_name (EConstr.of_constr t) + (EConstr.of_constr ty) with Not_found -> Tacticals.New.tclIDTAC) -let iter_let tac = +let iter_let_aux tac = Proofview.Goal.enter (fun gl -> let env = Tacmach.New.pf_env gl in let sign = Environ.named_context env in + init_cache (); Tacticals.New.tclMAP (do_let tac) sign) let iter_let (tac : Ltac_plugin.Tacinterp.Value.t) = - init_cache (); - iter_let (fun (id : Names.Id.t) (t : Constr.types) (ty : Constr.types) -> + iter_let_aux (fun (id : Names.Id.t) t ty -> Ltac_plugin.Tacinterp.Value.apply tac [ Ltac_plugin.Tacinterp.Value.of_constr (EConstr.mkVar id) - ; Ltac_plugin.Tacinterp.Value.of_constr (EConstr.of_constr t) - ; Ltac_plugin.Tacinterp.Value.of_constr (EConstr.of_constr ty) ]) + ; Ltac_plugin.Tacinterp.Value.of_constr t + ; Ltac_plugin.Tacinterp.Value.of_constr ty ]) + +let elim_let = iter_let_aux elim_binding let zify_tac = Proofview.Goal.enter (fun gl -> @@ -1009,8 +1298,9 @@ let zify_tac = init_cache (); let evd = Tacmach.New.project gl in let env = Tacmach.New.pf_env gl in + let sign = Environ.named_context env in let concl = trans_check_prop env evd (Tacmach.New.pf_concl gl) in - let hyps = trans_hyps env evd (Tacmach.New.pf_hyps_types gl) in + let hyps = trans_hyps env evd sign in let l = CstrTable.get () in tclTHENOpt concl trans_concl (Tacticals.New.tclTHEN @@ -1018,14 +1308,101 @@ let zify_tac = (List.rev_map (fun (h, p, t) -> trans_hyp h p t) hyps)) (CstrTable.gen_cstr l))) -let iter_specs tac = - Tacticals.New.tclTHENLIST - (List.fold_left (fun acc d -> tac d :: acc) [] (Spec.get ())) +type pscript = Set of Names.Id.t * EConstr.t | Pose of Names.Id.t * EConstr.t + +type spec_env = + { map : Names.Id.t HConstr.t + ; spec_name : Names.Id.t + ; term_name : Names.Id.t + ; fresh : Nameops.Subscript.t + ; proofs : pscript list } + +let register_constr {map; spec_name; term_name; fresh; proofs} c thm = + let tname = Nameops.add_subscript term_name fresh in + let sname = Nameops.add_subscript spec_name fresh in + ( EConstr.mkVar tname + , { map = HConstr.add c tname map + ; spec_name + ; term_name + ; fresh = Nameops.Subscript.succ fresh + ; proofs = Set (tname, c) :: Pose (sname, thm) :: proofs } ) + +let fresh_subscript env = + let ctx = (Environ.named_context_val env).Environ.env_named_map in + Nameops.Subscript.succ + (Names.Id.Map.fold + (fun id _ s -> + let _, s' = Nameops.get_subscript id in + let cmp = Nameops.Subscript.compare s s' in + if cmp = 0 then s else if cmp < 0 then s' else s) + ctx Nameops.Subscript.zero) + +let init_env sname tname s = + { map = HConstr.empty + ; spec_name = sname + ; term_name = tname + ; fresh = s + ; proofs = [] } + +let rec spec_of_term env evd (senv : spec_env) t = + let get_name t env = + try EConstr.mkVar (HConstr.find t senv.map) with Not_found -> t + in + let c, a = decompose_app env evd t in + if a = [||] then (* The term cannot be decomposed. *) + (get_name t senv, senv) + else + (* recursively analyse the sub-terms *) + let a', senv' = + Array.fold_right + (fun e (l, senv) -> + let r, senv = spec_of_term env evd senv e in + (r :: l, senv)) + a ([], senv) + in + let a' = Array.of_list a' in + let t' = EConstr.mkApp (c, a') in + try (EConstr.mkVar (HConstr.find t' senv'.map), senv') + with Not_found -> ( + try + match snd (HConstr.find c !specs_cache) with + | Spec s -> + let thm = EConstr.mkApp (s.deriv.ESpecT.spec, a') in + register_constr senv' t' thm + | _ -> (get_name t' senv', senv') + with Not_found -> (t', senv') ) + +let interp_pscript s = + match s with + | Set (id, c) -> + Tacticals.New.tclTHEN + (Tactics.letin_tac None (Names.Name id) c None + {Locus.onhyps = None; Locus.concl_occs = Locus.AllOccurrences}) + (Tactics.clear_body [id]) + | Pose (id, c) -> Tactics.pose_proof (Names.Name id) c + +let rec interp_pscripts l = + match l with + | [] -> Tacticals.New.tclIDTAC + | s :: l -> Tacticals.New.tclTHEN (interp_pscript s) (interp_pscripts l) -let iter_specs (tac : Ltac_plugin.Tacinterp.Value.t) = - iter_specs (fun c -> - Ltac_plugin.Tacinterp.Value.apply tac - [Ltac_plugin.Tacinterp.Value.of_constr c]) +let spec_of_hyps = + Proofview.Goal.enter (fun gl -> + let terms = + Tacmach.New.pf_concl gl :: List.map snd (Tacmach.New.pf_hyps_types gl) + in + let env = Tacmach.New.pf_env gl in + let evd = Tacmach.New.project gl in + let s = fresh_subscript env in + let env = + List.fold_left + (fun acc t -> snd (spec_of_term env evd acc t)) + (init_env (Names.Id.of_string "H") (Names.Id.of_string "z") s) + terms + in + interp_pscripts (List.rev env.proofs)) + +let iter_specs = spec_of_hyps let find_hyp evd t l = try Some (fst (List.find (fun (h, t') -> EConstr.eq_constr evd t t') l)) diff --git a/plugins/micromega/zify.mli b/plugins/micromega/zify.mli index 4930a845c9..2cec9d6f91 100644 --- a/plugins/micromega/zify.mli +++ b/plugins/micromega/zify.mli @@ -19,13 +19,14 @@ module UnOp : S module BinOp : S module CstOp : S module BinRel : S -module PropOp : S +module PropBinOp : S module PropUnOp : S module Spec : S module Saturate : S val zify_tac : unit Proofview.tactic val saturate : unit Proofview.tactic -val iter_specs : Ltac_plugin.Tacinterp.Value.t -> unit Proofview.tactic +val iter_specs : unit Proofview.tactic val assert_inj : EConstr.constr -> unit Proofview.tactic val iter_let : Ltac_plugin.Tacinterp.Value.t -> unit Proofview.tactic +val elim_let : unit Proofview.tactic diff --git a/plugins/ssr/ssrcommon.ml b/plugins/ssr/ssrcommon.ml index f95672a15d..6ff79ebb9b 100644 --- a/plugins/ssr/ssrcommon.ml +++ b/plugins/ssr/ssrcommon.ml @@ -1095,11 +1095,11 @@ let tclDO n tac = try tac gl with | CErrors.UserError (l, s) as e -> - let _, info = CErrors.push e in - let e' = CErrors.UserError (l, prefix i ++ s) in - Util.iraise (e', info) + let _, info = Exninfo.capture e in + let e' = CErrors.UserError (l, prefix i ++ s) in + Exninfo.iraise (e', info) | Gramlib.Ploc.Exc(loc, CErrors.UserError (l, s)) -> - raise (Gramlib.Ploc.Exc(loc, CErrors.UserError (l, prefix i ++ s))) in + raise (Gramlib.Ploc.Exc(loc, CErrors.UserError (l, prefix i ++ s))) in let rec loop i gl = if i = n then tac_err_at i gl else (tclTHEN (tac_err_at i) (loop (i + 1))) gl in diff --git a/plugins/ssr/ssrparser.mlg b/plugins/ssr/ssrparser.mlg index 3f67d55e73..cd7c7d660e 100644 --- a/plugins/ssr/ssrparser.mlg +++ b/plugins/ssr/ssrparser.mlg @@ -142,25 +142,6 @@ let add_genarg tag pr = Pptactic.declare_extra_genarg_pprule wit gen_pr gen_pr gen_pr; wit -(** Primitive parsing to avoid syntax conflicts with basic tactics. *) - -let accept_before_syms syms strm = - match Util.stream_nth 1 strm with - | Tok.KEYWORD sym when List.mem sym syms -> () - | _ -> raise Stream.Failure - -let accept_before_syms_or_any_id syms strm = - match Util.stream_nth 1 strm with - | Tok.KEYWORD sym when List.mem sym syms -> () - | Tok.IDENT _ -> () - | _ -> raise Stream.Failure - -let accept_before_syms_or_ids syms ids strm = - match Util.stream_nth 1 strm with - | Tok.KEYWORD sym when List.mem sym syms -> () - | Tok.IDENT id when List.mem id ids -> () - | _ -> raise Stream.Failure - open Ssrast let pr_id = Ppconstr.pr_id let pr_name = function Name id -> pr_id id | Anonymous -> str "_" @@ -746,13 +727,11 @@ let pushIPatNoop = function | pats :: orpat -> (IPatNoop :: pats) :: orpat | [] -> [] -let test_ident_no_do _ strm = - match Util.stream_nth 0 strm with - | Tok.IDENT s when s <> "do" -> () - | _ -> raise Stream.Failure - let test_ident_no_do = - Pcoq.Entry.of_parser "test_ident_no_do" test_ident_no_do + let open Pcoq.Lookahead in + to_entry "test_ident_no_do" begin + lk_ident_except ["do"] + end } @@ -1005,12 +984,11 @@ END { -let accept_ssrfwdid _ strm = - match stream_nth 0 strm with - | Tok.IDENT id -> accept_before_syms_or_any_id [":"; ":="; "("] strm - | _ -> raise Stream.Failure - -let test_ssrfwdid = Pcoq.Entry.of_parser "test_ssrfwdid" accept_ssrfwdid +let test_ssrfwdid = + let open Pcoq.Lookahead in + to_entry "test_ssrfwdid" begin + lk_ident >> (lk_ident <+> lk_kws [":"; ":="; "("]) + end } @@ -1589,13 +1567,12 @@ END let sq_brace_tacnames = ["first"; "solve"; "do"; "rewrite"; "have"; "suffices"; "wlog"] (* "by" is a keyword *) -let accept_ssrseqvar _ strm = - match stream_nth 0 strm with - | Tok.IDENT id when not (List.mem id sq_brace_tacnames) -> - accept_before_syms_or_ids ["["] ["first";"last"] strm - | _ -> raise Stream.Failure -let test_ssrseqvar = Pcoq.Entry.of_parser "test_ssrseqvar" accept_ssrseqvar +let test_ssrseqvar = + let open Pcoq.Lookahead in + to_entry "test_ssrseqvar" begin + lk_ident_except sq_brace_tacnames >> (lk_kws ["[";"first";"last"]) + end let swaptacarg (loc, b) = (b, []), Some (TacId []) @@ -1985,15 +1962,11 @@ END { -let accept_ssreqid _ strm = - match Util.stream_nth 0 strm with - | Tok.IDENT _ -> accept_before_syms [":"] strm - | Tok.KEYWORD ":" -> () - | Tok.KEYWORD pat when List.mem pat ["_"; "?"; "->"; "<-"] -> - accept_before_syms [":"] strm - | _ -> raise Stream.Failure - -let test_ssreqid = Pcoq.Entry.of_parser "test_ssreqid" accept_ssreqid +let test_ssreqid = + let open Pcoq.Lookahead in + to_entry "test_ssreqid" begin + ((lk_ident <+> lk_kws ["_"; "?"; "->"; "<-"]) >> lk_kw ":") <+> lk_kw ":" + end } @@ -2614,12 +2587,11 @@ END { -let accept_idcomma _ strm = - match stream_nth 0 strm with - | Tok.IDENT _ | Tok.KEYWORD "_" -> accept_before_syms [","] strm - | _ -> raise Stream.Failure - -let test_idcomma = Pcoq.Entry.of_parser "test_idcomma" accept_idcomma +let test_idcomma = + let open Pcoq.Lookahead in + to_entry "test_idcomma" begin + (lk_ident <+> lk_kw "_") >> lk_kw "," + end } diff --git a/pretyping/cases.ml b/pretyping/cases.ml index 55c1f41c2c..afe776dced 100644 --- a/pretyping/cases.ml +++ b/pretyping/cases.ml @@ -73,11 +73,11 @@ let error_wrong_numarg_inductive ?loc env c n = let list_try_compile f l = let rec aux errors = function - | [] -> if errors = [] then anomaly (str "try_find_f.") else iraise (List.last errors) + | [] -> if errors = [] then anomaly (str "try_find_f.") else Exninfo.iraise (List.last errors) | h::t -> try f h with UserError _ | TypeError _ | PretypeError _ | PatternMatchingError _ as e -> - let e = CErrors.push e in + let e = Exninfo.capture e in aux (e::errors) t in aux [] l diff --git a/pretyping/pretyping.ml b/pretyping/pretyping.ml index ac1a4e88ef..1269488af3 100644 --- a/pretyping/pretyping.ml +++ b/pretyping/pretyping.ml @@ -87,9 +87,9 @@ let search_guard ?loc env possible_indexes fixdefs = let fix = ((indexes, 0),fixdefs) in (try check_fix env fix with reraise -> - let (e, info) = CErrors.push reraise in + let (e, info) = Exninfo.capture reraise in let info = Option.cata (fun loc -> Loc.add_loc info loc) info loc in - iraise (e, info)); + Exninfo.iraise (e, info)); indexes else (* we now search recursively among all combinations *) @@ -266,8 +266,8 @@ let apply_heuristics env sigma fail_evar = let flags = default_flags_of (Typeclasses.classes_transparent_state ()) in try solve_unif_constraints_with_heuristics ~flags env sigma with e when CErrors.noncritical e -> - let e = CErrors.push e in - if fail_evar then iraise e else sigma + let e = Exninfo.capture e in + if fail_evar then Exninfo.iraise e else sigma let check_typeclasses_instances_are_solved ~program_mode env current_sigma frozen = (* Naive way, call resolution again with failure flag *) @@ -753,9 +753,9 @@ struct let cofix = (i, fixdecls) in (try check_cofix !!env (i, nf_fix sigma fixdecls) with reraise -> - let (e, info) = CErrors.push reraise in + let (e, info) = Exninfo.capture reraise in let info = Option.cata (Loc.add_loc info) info loc in - iraise (e, info)); + Exninfo.iraise (e, info)); make_judge (mkCoFix cofix) ftys.(i) in discard_trace @@ inh_conv_coerce_to_tycon ?loc ~program_mode resolve_tc env sigma fixj tycon @@ -946,9 +946,9 @@ struct try judge_of_product !!env name j j' with TypeError _ as e -> - let (e, info) = CErrors.push e in + let (e, info) = Exninfo.capture e in let info = Option.cata (Loc.add_loc info) info loc in - iraise (e, info) in + Exninfo.iraise (e, info) in discard_trace @@ inh_conv_coerce_to_tycon ?loc ~program_mode resolve_tc env sigma resj tycon let pretype_letin self (name, c1, t, c2) = diff --git a/pretyping/reductionops.ml b/pretyping/reductionops.ml index 838bf22c66..b07ae8788a 100644 --- a/pretyping/reductionops.ml +++ b/pretyping/reductionops.ml @@ -1457,12 +1457,15 @@ let pb_equal = function | Reduction.CUMUL -> Reduction.CONV | Reduction.CONV -> Reduction.CONV -let report_anomaly e = - let msg = Pp.(str "Conversion test raised an anomaly:" ++ - spc () ++ CErrors.print e) in - let e = UserError (None,msg) in - let e = CErrors.push e in - iraise e +let report_anomaly (e, info) = + let e = + if is_anomaly e then + let msg = Pp.(str "Conversion test raised an anomaly:" ++ + spc () ++ CErrors.print e) in + UserError (None, msg) + else e + in + Exninfo.iraise (e, info) let f_conv ?l2r ?reds env ?evars x y = let inj = EConstr.Unsafe.to_constr in @@ -1478,7 +1481,9 @@ let test_trans_conversion (f: constr Reduction.extended_conversion_function) red let _ = f ~reds env ~evars:(evars, Evd.universes sigma) x y in true with Reduction.NotConvertible -> false - | e when is_anomaly e -> report_anomaly e + | e -> + let e = Exninfo.capture e in + report_anomaly e let is_conv ?(reds=TransparentState.full) env sigma = test_trans_conversion f_conv reds env sigma let is_conv_leq ?(reds=TransparentState.full) env sigma = test_trans_conversion f_conv_leq reds env sigma @@ -1494,7 +1499,9 @@ let check_conv ?(pb=Reduction.CUMUL) ?(ts=TransparentState.full) env sigma x y = try f ~reds:ts env ~evars:(safe_evar_value sigma, Evd.universes sigma) x y; true with Reduction.NotConvertible -> false | Univ.UniverseInconsistency _ -> false - | e when is_anomaly e -> report_anomaly e + | e -> + let e = Exninfo.capture e in + report_anomaly e let sigma_compare_sorts env pb s0 s1 sigma = match pb with @@ -1547,7 +1554,9 @@ let infer_conv_gen conv_fun ?(catch_incon=true) ?(pb=Reduction.CUMUL) with | Reduction.NotConvertible -> None | Univ.UniverseInconsistency _ when catch_incon -> None - | e when is_anomaly e -> report_anomaly e + | e -> + let e = Exninfo.capture e in + report_anomaly e let infer_conv = infer_conv_gen (fun pb ~l2r sigma -> Reduction.generic_conv pb ~l2r (safe_evar_value sigma)) diff --git a/pretyping/unification.ml b/pretyping/unification.ml index 5b87603d54..1df377b20e 100644 --- a/pretyping/unification.ml +++ b/pretyping/unification.ml @@ -1149,10 +1149,9 @@ let rec unify_0_with_initial_metas (sigma,ms,es as subst : subst0) conv_at_top e if !debug_unification then Feedback.msg_debug (str "Leaving unification with success"); a with e -> - let e = CErrors.push e in + let e = Exninfo.capture e in if !debug_unification then Feedback.msg_debug (str "Leaving unification with failure"); - iraise e - + Exninfo.iraise e let unify_0 env sigma = unify_0_with_initial_metas (sigma,[],[]) true env diff --git a/proofs/refiner.ml b/proofs/refiner.ml index 832a749ef2..fd73ab1b5a 100644 --- a/proofs/refiner.ml +++ b/proofs/refiner.ml @@ -209,8 +209,8 @@ let catch_failerror (e, info) = | FailError (0,_) -> Control.check_for_interrupt () | FailError (lvl,s) -> - iraise (FailError (lvl - 1, s), info) - | e -> iraise (e, info) + Exninfo.iraise (FailError (lvl - 1, s), info) + | e -> Exninfo.iraise (e, info) (** FIXME: do we need to add a [Errors.push] here? *) (* ORELSE0 t1 t2 tries to apply t1 and if it fails, applies t2 *) @@ -219,7 +219,7 @@ let tclORELSE0 t1 t2 g = t1 g with (* Breakpoint *) | e when CErrors.noncritical e -> - let e = CErrors.push e in catch_failerror e; t2 g + let e = Exninfo.capture e in catch_failerror e; t2 g (* ORELSE t1 t2 tries to apply t1 and if it fails or does not progress, then applies t2 *) @@ -232,7 +232,7 @@ let tclORELSE_THEN t1 t2then t2else gls = match try Some(tclPROGRESS t1 gls) with e when CErrors.noncritical e -> - let e = CErrors.push e in catch_failerror e; None + let e = Exninfo.capture e in catch_failerror e; None with | None -> t2else gls | Some sgl -> diff --git a/stm/asyncTaskQueue.ml b/stm/asyncTaskQueue.ml index fd689602df..9eb0924bd6 100644 --- a/stm/asyncTaskQueue.ml +++ b/stm/asyncTaskQueue.ml @@ -372,7 +372,7 @@ module Make(T : Task) () = struct let with_n_workers n priority f = let q = create n priority in try let rc = f q in destroy q; rc - with e -> let e = CErrors.push e in destroy q; iraise e + with e -> let e = Exninfo.capture e in destroy q; Exninfo.iraise e let n_workers { active } = Pool.n_workers active diff --git a/stm/stm.ml b/stm/stm.ml index c79a1eed3d..a5b868343d 100644 --- a/stm/stm.ml +++ b/stm/stm.ml @@ -1014,7 +1014,7 @@ end = struct (* {{{ *) if PG_compat.there_are_pending_proofs () then VCS.goals id (PG_compat.get_open_goals ()) with e -> - let (e, info) = CErrors.push e in + let (e, info) = Exninfo.capture e in let good_id = !cur_id in invalidate_cur_state (); VCS.reached id; @@ -1046,7 +1046,7 @@ end = struct (* {{{ *) unfreeze st; res with e -> - let e = CErrors.push e in + let e = Exninfo.capture e in Vernacstate.invalidate_cache (); unfreeze st; Exninfo.iraise e @@ -1540,7 +1540,7 @@ end = struct (* {{{ *) RespBuiltProof(proof,time) with | e when CErrors.noncritical e || e = Stack_overflow -> - let (e, info) = CErrors.push e in + let (e, info) = Exninfo.capture e in (* This can happen if the proof is broken. The error has also been * signalled as a feedback, hence we can silently recover *) let e_error_at, e_safe_id = match Stateid.get info with @@ -1687,7 +1687,7 @@ end = struct (* {{{ *) `OK proof end with e -> - let (e, info) = CErrors.push e in + let (e, info) = Exninfo.capture e in (try match Stateid.get info with | None -> msg_warning Pp.( @@ -2092,7 +2092,7 @@ end = struct (* {{{ *) ignore(stm_vernac_interp r_for st { r_what with verbose = true }); feedback ~id:r_for Processed with e when CErrors.noncritical e -> - let e = CErrors.push e in + let e = Exninfo.capture e in let msg = iprint e in feedback ~id:r_for (Message (Error, None, msg)) @@ -2157,22 +2157,23 @@ let collect_proof keep cur hd brkind id = let has_default_proof_using = Option.has_some (Proof_using.get_default_proof_using ()) in let proof_using_ast = function | VernacProof(_,Some _) -> true + | VernacProof(_,None) -> has_default_proof_using | _ -> false in let proof_using_ast = function | Some (_, v) when proof_using_ast v.expr.CAst.v.expr && (not (Vernacprop.has_Fail v.expr)) -> Some v | _ -> None in - let has_proof_using x = has_default_proof_using || (proof_using_ast x <> None) in + let has_proof_using x = proof_using_ast x <> None in let proof_no_using = function - | VernacProof(t,None) -> t + | VernacProof(t,None) -> if has_default_proof_using then None else t | _ -> assert false in let proof_no_using = function | Some (_, v) -> proof_no_using v.expr.CAst.v.expr, v | _ -> assert false in let has_proof_no_using = function - | VernacProof(_,None) -> true + | VernacProof(_,None) -> not has_default_proof_using | _ -> false in let has_proof_no_using = function @@ -2336,7 +2337,7 @@ let known_state ~doc ?(redefine_qed=false) ~cache id = else try f () with e when CErrors.noncritical e -> - let ie = CErrors.push e in + let ie = Exninfo.capture e in error_absorbing_tactic id blockname ie in (* Absorb errors from f x *) let resilient_command f x = @@ -2434,7 +2435,7 @@ let known_state ~doc ?(redefine_qed=false) ~cache id = let st = Vernacstate.freeze_interp_state ~marshallable:false in ignore(stm_vernac_interp id st x); with e when CErrors.noncritical e -> - let (e, info) = CErrors.push e in + let (e, info) = Exninfo.capture e in let info = Stateid.add info ~valid:prev id in Exninfo.iraise (e, info)); wall_clock_last_fork := Unix.gettimeofday () @@ -2568,28 +2569,32 @@ end (* }}} *) (********************************* STM API ************************************) (******************************************************************************) -(* Main initialization routine *) -type stm_init_options = { - (* The STM will set some internal flags differently depending on the - specified [doc_type]. This distinction should disappear at some - some point. *) - doc_type : stm_doc_type; +(** STM initialization options: *) +type stm_init_options = + { doc_type : stm_doc_type + (** The STM does set some internal flags differently depending on + the specified [doc_type]. This distinction should disappear at + some some point. *) - (* Initial load path in scope for the document. Usually extracted - from -R options / _CoqProject *) - iload_path : Loadpath.coq_path list; + ; ml_load_path : CUnix.physical_path list + (** OCaml load paths for the document. *) - (* Require [require_libs] before the initial state is + ; vo_load_path : Loadpath.vo_path list + (** [vo] load paths for the document. Usually extracted from -R + options / _CoqProject *) + + ; require_libs : (string * string option * bool option) list + (** Require [require_libs] before the initial state is ready. Parameters follow [Library], that is to say, [lib,prefix,import_export] means require library [lib] from optional [prefix] and [import_export] if [Some false/Some true] is used. *) - require_libs : (string * string option * bool option) list; - (* STM options that apply to the current document. *) - stm_options : AsyncOpts.stm_opt; -} -(* fb_handler : Feedback.feedback -> unit; *) + ; stm_options : AsyncOpts.stm_opt + (** Low-level STM options *) + } + + (* fb_handler : Feedback.feedback -> unit; *) (* let doc_type_module_name (std : stm_doc_type) = @@ -2614,7 +2619,7 @@ let dirpath_of_file f = let ldir = Libnames.add_dirpath_suffix ldir0 id in ldir -let new_doc { doc_type ; iload_path; require_libs; stm_options } = +let new_doc { doc_type ; ml_load_path; vo_load_path; require_libs; stm_options } = let require_file (dir, from, exp) = let mp = Libnames.qualid_of_string dir in @@ -2632,7 +2637,8 @@ let new_doc { doc_type ; iload_path; require_libs; stm_options } = (* Set load path; important, this has to happen before we declare the library below as [Declaremods/Library] will infer the module name by looking at the load path! *) - List.iter Loadpath.add_coq_path iload_path; + List.iter Mltop.add_ml_dir ml_load_path; + List.iter Loadpath.add_vo_path vo_load_path; Safe_typing.allow_delayed_constants := !cur_opt.async_proofs_mode <> APoff; @@ -2687,7 +2693,7 @@ let observe ~doc id = VCS.print (); doc with e -> - let e = CErrors.push e in + let e = Exninfo.capture e in VCS.print (); VCS.restore vcs; Exninfo.iraise e @@ -2762,7 +2768,7 @@ let finish_tasks name u p (t,rcbackup as tasks) = let a, _ = List.fold_left finish_task u (info_tasks tasks) in (a,true), p with e -> - let e = CErrors.push e in + let e = Exninfo.capture e in msg_warning (str"File " ++ str name ++ str ":" ++ spc () ++ iprint e); exit 1 @@ -2986,7 +2992,7 @@ let process_transaction ~doc ?(newtip=Stateid.fresh ()) VCS.print (); rc with e -> - let e = CErrors.push e in + let e = Exninfo.capture e in handle_failure e vcs let get_ast ~doc id = @@ -3196,7 +3202,7 @@ let edit_at ~doc id = VCS.print (); doc, rc with e -> - let (e, info) = CErrors.push e in + let (e, info) = Exninfo.capture e in match Stateid.get info with | None -> VCS.print (); diff --git a/stm/stm.mli b/stm/stm.mli index 841adcf05b..e56bac6e0f 100644 --- a/stm/stm.mli +++ b/stm/stm.mli @@ -52,38 +52,30 @@ type stm_doc_type = | VioDoc of string (* file path *) | Interactive of interactive_top (* module path *) -(** Coq initialization options: - - - [doc_type]: Type of document being created. - - - [require_libs]: list of libraries/modules to be pre-loaded at - startup. A tuple [(modname,modfrom,import)] is equivalent to [From - modfrom Require modname]; [import] works similarly to - [Library.require_library_from_dirpath], [Some false] will import - the module, [Some true] will additionally export it. - -*) -type stm_init_options = { - (* The STM will set some internal flags differently depending on the - specified [doc_type]. This distinction should disappear at some - some point. *) - doc_type : stm_doc_type; - - (* Initial load path in scope for the document. Usually extracted - from -R options / _CoqProject *) - iload_path : Loadpath.coq_path list; - - (* Require [require_libs] before the initial state is +(** STM initialization options: *) +type stm_init_options = + { doc_type : stm_doc_type + (** The STM does set some internal flags differently depending on + the specified [doc_type]. This distinction should disappear at + some some point. *) + + ; ml_load_path : CUnix.physical_path list + (** OCaml load paths for the document. *) + + ; vo_load_path : Loadpath.vo_path list + (** [vo] load paths for the document. Usually extracted from -R + options / _CoqProject *) + + ; require_libs : (string * string option * bool option) list + (** Require [require_libs] before the initial state is ready. Parameters follow [Library], that is to say, [lib,prefix,import_export] means require library [lib] from optional [prefix] and [import_export] if [Some false/Some true] is used. *) - require_libs : (string * string option * bool option) list; - (* STM options that apply to the current document. *) - stm_options : AsyncOpts.stm_opt; -} -(* fb_handler : Feedback.feedback -> unit; *) + ; stm_options : AsyncOpts.stm_opt + (** Low-level STM options *) + } (** The type of a STM document *) type doc diff --git a/tactics/abstract.ml b/tactics/abstract.ml index 1e18028e7b..86e6a92a22 100644 --- a/tactics/abstract.ml +++ b/tactics/abstract.ml @@ -97,8 +97,8 @@ let cache_term_by_tactic_then ~opaque ~name_op ?(goal_type=None) tac tacK = which is an error irrelevant to the proof system (in fact it means that [e] comes from [tac] failing to yield enough success). Hence it reraises [e]. *) - let (_, info) = CErrors.push src in - iraise (e, info) + let (_, info) = Exninfo.capture src in + Exninfo.iraise (e, info) in let body, effs = Future.force const.Declare.proof_entry_body in (* We drop the side-effects from the entry, they already exist in the ambient environment *) diff --git a/tactics/class_tactics.ml b/tactics/class_tactics.ml index 02186f6a98..e3f2f18b44 100644 --- a/tactics/class_tactics.ml +++ b/tactics/class_tactics.ml @@ -236,7 +236,7 @@ let unify_resolve_refine poly flags gl clenv = Tacticals.New.tclZEROMSG (str "Unable to unify") | e when CErrors.noncritical e -> Tacticals.New.tclZEROMSG (str "Unexpected error") - | _ -> iraise ie) + | _ -> Exninfo.iraise ie) (** Dealing with goals of the form A -> B and hints of the form C -> A -> B. @@ -771,7 +771,7 @@ module Search = struct (fun e' -> if CErrors.noncritical (fst e') then (pr_error e'; aux (merge_exceptions e e') tl) - else iraise e') + else Exninfo.iraise e') and aux e = function | x :: xs -> onetac e x xs | [] -> diff --git a/tactics/eauto.ml b/tactics/eauto.ml index 9a1e6a6736..aca6b4734a 100644 --- a/tactics/eauto.ml +++ b/tactics/eauto.ml @@ -235,7 +235,7 @@ module SearchProblem = struct (* msg (hov 1 (pptac ++ str" gives: \n" ++ pr_goals lgls ++ str"\n")); *) (lgls, cost, pptac) :: aux tacl with e when CErrors.noncritical e -> - let e = CErrors.push e in + let e = Exninfo.capture e in Refiner.catch_failerror e; aux tacl in aux l diff --git a/tactics/pfedit.ml b/tactics/pfedit.ml index dbabc4e4e0..7cdfd0637a 100644 --- a/tactics/pfedit.ml +++ b/tactics/pfedit.ml @@ -130,8 +130,8 @@ let build_constant_by_tactic ~name ?(opaque=Proof_global.Transparent) ctx sign ~ | _ -> CErrors.anomaly Pp.(str "[build_constant_by_tactic] close_proof returned more than one proof term") with reraise -> - let reraise = CErrors.push reraise in - iraise reraise + let reraise = Exninfo.capture reraise in + Exninfo.iraise reraise let build_by_tactic ?(side_eff=true) env sigma ~poly typ tac = let name = Id.of_string ("temporary_proof"^string_of_int (next())) in @@ -160,8 +160,8 @@ let refine_by_tactic ~name ~poly env sigma ty tac = try Proof.run_tactic env tac prf with Logic_monad.TacticFailure e as src -> (* Catch the inner error of the monad tactic *) - let (_, info) = CErrors.push src in - iraise (e, info) + let (_, info) = Exninfo.capture src in + Exninfo.iraise (e, info) in (* Plug back the retrieved sigma *) let Proof.{ goals; stack; shelf; given_up; sigma; entry } = Proof.data prf in diff --git a/tactics/tacticals.mli b/tactics/tacticals.mli index 4b93b81d1c..5fde6d2178 100644 --- a/tactics/tacticals.mli +++ b/tactics/tacticals.mli @@ -144,7 +144,7 @@ module New : sig (** [catch_failerror e] fails and decreases the level if [e] is an Ltac error with level more than 0. Otherwise succeeds. *) - val catch_failerror : Util.iexn -> unit tactic + val catch_failerror : Exninfo.iexn -> unit tactic val tclIDTAC : unit tactic val tclTHEN : unit tactic -> unit tactic -> unit tactic diff --git a/tactics/tactics.ml b/tactics/tactics.ml index 8371da76b2..83423b7556 100644 --- a/tactics/tactics.ml +++ b/tactics/tactics.ml @@ -1848,12 +1848,12 @@ let apply_in_once_main flags innerclause env sigma (loc,d,lbind) = let rec aux clause = try progress_with_clause flags innerclause clause with e when CErrors.noncritical e -> - let e' = CErrors.push e in + let e' = Exninfo.capture e in try aux (clenv_push_prod clause) with NotExtensibleClause -> match e with | UnableToApply -> explain_unable_to_apply_lemma ?loc env sigma thm innerclause - | _ -> iraise e' + | _ -> Exninfo.iraise e' in aux (make_clenv_binding env sigma (d,thm) lbind) @@ -1886,7 +1886,7 @@ let apply_in_once ?(respect_opaque = false) with_delta tac id ]) with e when with_destruct && CErrors.noncritical e -> - let (e, info) = CErrors.push e in + let (e, info) = Exninfo.capture e in (descend_in_conjunctions (Id.Set.singleton targetid) (fun b id -> aux (id::idstoclear) b (mkVar id)) (e, info) c) @@ -3155,7 +3155,7 @@ let clear_for_destruct ids = (clear_gen (fun env sigma id err inglobal -> raise (ClearDependencyError (id,err,inglobal))) ids) (function | ClearDependencyError (id,err,inglobal),_ -> warn_cannot_remove_as_expected (id,inglobal); Proofview.tclUNIT () - | e -> iraise e) + | e -> Exninfo.iraise e) (* Either unfold and clear if defined or simply clear if not a definition *) let expand_hyp id = diff --git a/test-suite/.csdp.cache b/test-suite/.csdp.cache Binary files differindex b3bcb5b056..046cb067c5 100644 --- a/test-suite/.csdp.cache +++ b/test-suite/.csdp.cache diff --git a/test-suite/Makefile b/test-suite/Makefile index 265c2eafa7..1681150f7b 100644 --- a/test-suite/Makefile +++ b/test-suite/Makefile @@ -32,11 +32,15 @@ include ../Makefile.common # Variables ####################################################################### +# Using quotes to anticipate the possibility of spaces in the directory name +# Note that this will later need an eval in shell to interpret the quotes +ROOT='$(shell cd ..; pwd)' + ifneq ($(wildcard ../_build),) -BIN:=$(shell cd ..; pwd)/_build/install/default/bin/ -COQLIB:=$(shell cd ..; pwd)/_build/install/default/lib/coq +BIN:=$(ROOT)/_build/install/default/bin/ +COQLIB:=$(ROOT)/_build/install/default/lib/coq else -BIN := $(shell cd ..; pwd)/bin/ +BIN := $(ROOT)/bin/ COQLIB?= ifeq ($(COQLIB),) @@ -602,10 +606,10 @@ $(patsubst %.sh,%.log,$(wildcard misc/*.sh)): %.log: %.sh $(PREREQUISITELOG) @echo "TEST $<" $(HIDE){ \ echo $(call log_intro,$<); \ - export BIN="$(BIN)"; \ - export coqc="$(coqc)"; \ - export coqtop="$(coqc)"; \ - export coqdep="$(coqdep)"; \ + export BIN=$(BIN); \ + export coqc="eval $(coqc)"; \ + export coqtop="eval $(coqc)"; \ + export coqdep="eval $(coqdep)"; \ "$<" 2>&1; R=$$?; times; \ if [ $$R = 0 ]; then \ echo $(log_success); \ diff --git a/test-suite/bugs/closed/bug_11342.v b/test-suite/bugs/closed/bug_11342.v new file mode 100644 index 0000000000..3c163fb772 --- /dev/null +++ b/test-suite/bugs/closed/bug_11342.v @@ -0,0 +1,19 @@ +(* -*- mode: coq; coq-prog-args: ("-vos") -*- *) + +Section foo. + Context {H:True}. + Set Default Proof Using "Type". + Theorem test2 : True. + Proof. + (* BUG: this gets run when compiling with -vos *) + fail "proof with default using". + exact I. + Qed. + + Theorem test3 : True. + Proof using Type. + (* this isn't run with -vos *) + fail "using". + exact I. + Qed. +End foo. diff --git a/test-suite/ide/debug_ltac.fake b/test-suite/ide/debug_ltac.fake index aa68fad39e..38c610a5a6 100644 --- a/test-suite/ide/debug_ltac.fake +++ b/test-suite/ide/debug_ltac.fake @@ -1,2 +1,3 @@ +ADD { Comments "fakeide doesn't support fail as the first sentence". } FAILADD { Debug On. } ADD { Set Debug On. } diff --git a/test-suite/ide/undo002.fake b/test-suite/ide/undo002.fake index 5284c5d3a5..eb553f9dfa 100644 --- a/test-suite/ide/undo002.fake +++ b/test-suite/ide/undo002.fake @@ -3,6 +3,7 @@ # # Simple backtrack by 2 before two global definitions # +ADD initial { Comments "initial sentence". } ADD { Definition foo := 0. } ADD { Definition bar := 1. } EDIT_AT initial diff --git a/test-suite/output/Inductive.out b/test-suite/output/Inductive.out index 8ff571ae55..ff2556c5dc 100644 --- a/test-suite/output/Inductive.out +++ b/test-suite/output/Inductive.out @@ -5,3 +5,5 @@ Inductive foo (A : Type) (x : A) (y : A := x) : Prop := Foo : foo A x Arguments foo _%type_scope Arguments Foo _%type_scope +myprod unit bool + : Set diff --git a/test-suite/output/Inductive.v b/test-suite/output/Inductive.v index 9eec9a7dad..db1276cb6c 100644 --- a/test-suite/output/Inductive.v +++ b/test-suite/output/Inductive.v @@ -5,3 +5,11 @@ Fail Inductive list' (A:Set) : Set := (* Check printing of let-ins *) #[universes(template)] Inductive foo (A : Type) (x : A) (y := x) := Foo. Print foo. + +(* Check where clause *) +Reserved Notation "x ** y" (at level 40, left associativity). +Inductive myprod A B := + mypair : A -> B -> A ** B + where "A ** B" := (myprod A B) (only parsing). + +Check unit ** bool. diff --git a/test-suite/output/bug_11342.out b/test-suite/output/bug_11342.out new file mode 100644 index 0000000000..9aac16de0d --- /dev/null +++ b/test-suite/output/bug_11342.out @@ -0,0 +1 @@ +without using diff --git a/test-suite/output/bug_11342.v b/test-suite/output/bug_11342.v new file mode 100644 index 0000000000..73131a3190 --- /dev/null +++ b/test-suite/output/bug_11342.v @@ -0,0 +1,12 @@ +(* -*- mode: coq; coq-prog-args: ("-vos") -*- *) + +Section foo. + Context {H:True}. + Theorem test1 : True. + Proof. + (* this gets printed with -vos because there's no annotation (either [Set + Default Proof Using ...] or an explicit [Proof using ...]) *) + idtac "without using". + exact I. + Qed. +End foo. diff --git a/test-suite/output/bug_11608.out b/test-suite/output/bug_11608.out new file mode 100644 index 0000000000..793ff768d4 --- /dev/null +++ b/test-suite/output/bug_11608.out @@ -0,0 +1 @@ +creating x without [Proof.] diff --git a/test-suite/output/bug_11608.v b/test-suite/output/bug_11608.v new file mode 100644 index 0000000000..3929082913 --- /dev/null +++ b/test-suite/output/bug_11608.v @@ -0,0 +1,13 @@ +(* -*- mode: coq; coq-prog-args: ("-vos") -*- *) + +Set Default Proof Using "Type". + +Section foo. + Context (A:Type). + Definition x : option A. + (* this can get printed with -vos since without "Proof." there's no Proof + using, even with a default annotation. *) + idtac "creating x without [Proof.]". + exact None. + Qed. +End foo. diff --git a/test-suite/output/bug_8206.out b/test-suite/output/bug_8206.out new file mode 100644 index 0000000000..6015fe32f9 --- /dev/null +++ b/test-suite/output/bug_8206.out @@ -0,0 +1,5 @@ +File "stdin", line 11, characters 0-23: +Error: Signature components for label homework do not match: expected type +"forall a b : nat, bug_8206.M.add a b = bug_8206.M.add b a" but found type +"nat -> forall b : nat, bug_8206.M.add 0 b = bug_8206.M.add b 0". + diff --git a/test-suite/output/bug_8206.v b/test-suite/output/bug_8206.v new file mode 100644 index 0000000000..8d4e73dfac --- /dev/null +++ b/test-suite/output/bug_8206.v @@ -0,0 +1,11 @@ +Module Type Sig. + Parameter add: nat -> nat -> nat. + Axiom homework: forall (a b: nat), add a b = add b a. +End Sig. + +Module Impl. + Definition add(a b: nat) := plus a b. + Axiom homework: forall (a b: nat), add 0 b = add b 0. +End Impl. + +Module M : Sig := Impl. diff --git a/test-suite/success/pose.v b/test-suite/success/pose.v new file mode 100644 index 0000000000..17007915fe --- /dev/null +++ b/test-suite/success/pose.v @@ -0,0 +1,9 @@ +(* Test syntax *) + +Goal 0=0. +pose proof (a := I). +Fail clearbody a. +epose proof (b := fun _ => eq_refl). +Fail clearbody b. +exact (b a). +Qed. diff --git a/theories/micromega/Lia.v b/theories/micromega/Lia.v index e53800d07d..5d97fc46ef 100644 --- a/theories/micromega/Lia.v +++ b/theories/micromega/Lia.v @@ -14,11 +14,8 @@ (* *) (************************************************************************) -Require Import ZMicromega. -Require Import ZArith_base. -Require Import RingMicromega. -Require Import VarMap. -Require Import DeclConstant. +Require Import ZMicromega RingMicromega VarMap DeclConstant. +Require Import BinNums. Require Coq.micromega.Tauto. Declare ML Module "micromega_plugin". @@ -29,9 +26,9 @@ Ltac zchecker := (@eq_refl bool true <: @eq bool (ZTautoChecker __ff __wit) true) (@find Z Z0 __varmap)). -Ltac lia := PreOmega.zify; xlia zchecker. +Ltac lia := Zify.zify; xlia zchecker. -Ltac nia := PreOmega.zify; xnlia zchecker. +Ltac nia := Zify.zify; xnlia zchecker. (* Local Variables: *) diff --git a/theories/micromega/ZMicromega.v b/theories/micromega/ZMicromega.v index 9bedb47371..38f3d3e0c0 100644 --- a/theories/micromega/ZMicromega.v +++ b/theories/micromega/ZMicromega.v @@ -1549,7 +1549,7 @@ Proof. apply H ; auto. unfold ltof in *. simpl in *. - PreOmega.zify. + Zify.zify. intuition subst. assumption. eapply Z.lt_le_trans. eassumption. apply Z.add_le_mono_r. assumption. diff --git a/theories/micromega/Zify.v b/theories/micromega/Zify.v index 18cd196148..494d5e5623 100644 --- a/theories/micromega/Zify.v +++ b/theories/micromega/Zify.v @@ -8,83 +8,16 @@ (* * (see LICENSE file for the text of the license) *) (************************************************************************) -Require Import ZifyClasses. -Require Export ZifyInst. -Require Import InitialRing. - -(** From PreOmega *) - -(** I) translation of Z.max, Z.min, Z.abs, Z.sgn into recognized equations *) - -Ltac zify_unop_core t thm a := - (* Let's introduce the specification theorem for t *) - pose proof (thm a); - (* Then we replace (t a) everywhere with a fresh variable *) - let z := fresh "z" in set (z:=t a) in *; clearbody z. - -Ltac zify_unop_var_or_term t thm a := - (* If a is a variable, no need for aliasing *) - let za := fresh "z" in - (rename a into za; rename za into a; zify_unop_core t thm a) || - (* Otherwise, a is a complex term: we alias it. *) - (remember a as za; zify_unop_core t thm za). - -Ltac zify_unop t thm a := - (* If a is a scalar, we can simply reduce the unop. *) - (* Note that simpl wasn't enough to reduce [Z.max 0 0] (#5439) *) - let isz := isZcst a in - match isz with - | true => - let u := eval compute in (t a) in - change (t a) with u in * - | _ => zify_unop_var_or_term t thm a - end. - -Ltac zify_unop_nored t thm a := - (* in this version, we don't try to reduce the unop (that can be (Z.add x)) *) - let isz := isZcst a in - match isz with - | true => zify_unop_core t thm a - | _ => zify_unop_var_or_term t thm a - end. - -Ltac zify_binop t thm a b:= - (* works as zify_unop, except that we should be careful when - dealing with b, since it can be equal to a *) - let isza := isZcst a in - match isza with - | true => zify_unop (t a) (thm a) b - | _ => - let za := fresh "z" in - (rename a into za; rename za into a; zify_unop_nored (t a) (thm a) b) || - (remember a as za; match goal with - | H : za = b |- _ => zify_unop_nored (t za) (thm za) za - | _ => zify_unop_nored (t za) (thm za) b - end) - end. - -(* end from PreOmega *) - -Ltac applySpec S := - let t := type of S in - match t with - | @BinOpSpec _ _ ?Op _ => - let Spec := (eval unfold S, BSpec in (@BSpec _ _ Op _ S)) in - repeat - match goal with - | H : context[Op ?X ?Y] |- _ => zify_binop Op Spec X Y - | |- context[Op ?X ?Y] => zify_binop Op Spec X Y - end - | @UnOpSpec _ _ ?Op _ => - let Spec := (eval unfold S, USpec in (@USpec _ _ Op _ S)) in - repeat - match goal with - | H : context[Op ?X] |- _ => zify_unop Op Spec X - | |- context[Op ?X ] => zify_unop Op Spec X - end - end. +Require Import ZifyClasses ZifyInst. +Declare ML Module "zify_plugin". (** [zify_post_hook] is there to be redefined. *) Ltac zify_post_hook := idtac. -Ltac zify := zify_op ; (zify_iter_specs applySpec) ; zify_post_hook. +Ltac iter_specs := zify_iter_specs. + +Ltac zify := intros; + zify_elim_let ; + zify_op ; + (zify_iter_specs) ; + zify_saturate ; zify_post_hook. diff --git a/theories/micromega/ZifyClasses.v b/theories/micromega/ZifyClasses.v index d3f7f91074..988205a891 100644 --- a/theories/micromega/ZifyClasses.v +++ b/theories/micromega/ZifyClasses.v @@ -73,6 +73,7 @@ Class BinRel {S:Type} {T:Type} (R : S -> S -> Prop) {I : InjTyp S T} := (** [PropOp Op] declares morphisms for [<->]. This will be used to deal with e.g. [and], [or],... *) + Class PropOp (Op : Prop -> Prop -> Prop) := mkprop { op_iff : forall (p1 p2 q1 q2:Prop), (p1 <-> q1) -> (p2 <-> q2) -> (Op p1 p2 <-> Op q1 q2) @@ -80,7 +81,7 @@ Class PropOp (Op : Prop -> Prop -> Prop) := Class PropUOp (Op : Prop -> Prop) := mkuprop { - uop_iff : forall (p1 q1 :Prop), (p1 <-> q1) -> (Op p1 <-> Op q1) + uop_iff : forall (p1 q1 :Prop), (p1 <-> q1) -> (Op p1 <-> Op q1) }. @@ -131,7 +132,7 @@ Class Saturate {T: Type} (Op : T -> T -> T) := are used to store source and target expressions together with a correctness proof. *) -Record injterm {S T: Type} {I : S -> T} := +Record injterm {S T: Type} (I : S -> T) := mkinjterm { source : S ; target : T ; inj_ok : I source = target}. Record injprop := @@ -139,82 +140,104 @@ Record injprop := source_prop : Prop ; target_prop : Prop ; injprop_ok : source_prop <-> target_prop}. -(** Lemmas for building [injterm] and [injprop]. *) -Definition mkprop_op (Op : Prop -> Prop -> Prop) (POp : PropOp Op) - (p1 :injprop) (p2: injprop) : injprop := - {| source_prop := (Op (source_prop p1) (source_prop p2)) ; - target_prop := (Op (target_prop p1) (target_prop p2)) ; - injprop_ok := (op_iff (source_prop p1) (source_prop p2) (target_prop p1) (target_prop p2) - (injprop_ok p1) (injprop_ok p2)) - |}. -Definition mkuprop_op (Op : Prop -> Prop) (POp : PropUOp Op) - (p1 :injprop) : injprop := - {| source_prop := (Op (source_prop p1)) ; - target_prop := (Op (target_prop p1)) ; - injprop_ok := (uop_iff (source_prop p1) (target_prop p1) (injprop_ok p1)) - |}. +(** Lemmas for building rewrite rules. *) + +Definition PropOp_iff (Op : Prop -> Prop -> Prop) := + forall (p1 p2 q1 q2:Prop), (p1 <-> q1) -> (p2 <-> q2) -> (Op p1 p2 <-> Op q1 q2). + +Definition PropUOp_iff (Op : Prop -> Prop) := + forall (p1 q1 :Prop), (p1 <-> q1) -> (Op p1 <-> Op q1). Lemma mkapp2 (S1 S2 S3 T : Type) (Op : S1 -> S2 -> S3) - {I1 : InjTyp S1 T} {I2 : InjTyp S2 T} {I3 : InjTyp S3 T} - (B : @BinOp S1 S2 S3 T Op I1 I2 I3) - (t1 : @injterm S1 T inj) (t2 : @injterm S2 T inj) - : @injterm S3 T inj. + (I1 : S1 -> T) (I2 : S2 -> T) (I3 : S3 -> T) + (TBOP : T -> T -> T) + (TBOPINJ : forall n m, I3 (Op n m) = TBOP (I1 n) (I2 m)) + (s1 : S1) (t1 : T) (P1: I1 s1 = t1) + (s2 : S2) (t2 : T) (P2: I2 s2 = t2): I3 (Op s1 s2) = TBOP t1 t2. Proof. - apply (mkinjterm _ _ inj (Op (source t1) (source t2)) (TBOp (target t1) (target t2))). - (rewrite <- inj_ok; - rewrite <- inj_ok; - apply TBOpInj). -Defined. + subst. apply TBOPINJ. +Qed. -Lemma mkapp (S1 S2 T : Type) (Op : S1 -> S2) - {I1 : InjTyp S1 T} - {I2 : InjTyp S2 T} - (B : @UnOp S1 S2 T Op I1 I2 ) - (t1 : @injterm S1 T inj) - : @injterm S2 T inj. +Lemma mkapp (S1 S2 T : Type) (OP : S1 -> S2) + (I1 : S1 -> T) + (I2 : S2 -> T) + (TUOP : T -> T) + (TUOPINJ : forall n, I2 (OP n) = TUOP (I1 n)) + (s1: S1) (t1: T) (P1: I1 s1 = t1): I2 (OP s1) = TUOP t1. Proof. - apply (mkinjterm _ _ inj (Op (source t1)) (TUOp (target t1))). - (rewrite <- inj_ok; apply TUOpInj). -Defined. + subst. apply TUOPINJ. +Qed. + +Lemma mkrel (S T : Type) (R : S -> S -> Prop) + (I : S -> T) + (TR : T -> T -> Prop) + (TRINJ : forall n m : S, R n m <-> TR (I n) (I m)) + (s1 : S) (t1 : T) (P1 : I s1 = t1) + (s2 : S) (t2 : T) (P2 : I s2 = t2): + R s1 s2 <-> TR t1 t2. +Proof. + subst. + apply TRINJ. +Qed. + +(** Hardcoded support and lemma for propositional logic *) -Lemma mkapp0 (S T : Type) (Op : S) - {I : InjTyp S T} - (B : @CstOp S T Op I) - : @injterm S T inj. +Lemma and_morph : forall (s1 s2 t1 t2:Prop), s1 <-> t1 -> s2 <-> t2 -> ((s1 /\ s2) <-> (t1 /\ t2)). Proof. - apply (mkinjterm _ _ inj Op TCst). - (apply TCstInj). -Defined. + intros. tauto. +Qed. -Lemma mkrel (S T : Type) (R : S -> S -> Prop) - {Inj : InjTyp S T} - (B : @BinRel S T R Inj) - (t1 : @injterm S T inj) (t2 : @injterm S T inj) - : @injprop. +Lemma or_morph : forall (s1 s2 t1 t2:Prop), s1 <-> t1 -> s2 <-> t2 -> ((s1 \/ s2) <-> (t1 \/ t2)). +Proof. + intros. tauto. +Qed. + +Lemma impl_morph : forall (s1 s2 t1 t2:Prop), s1 <-> t1 -> s2 <-> t2 -> ((s1 -> s2) <-> (t1 -> t2)). +Proof. + intros. tauto. +Qed. + +Lemma iff_morph : forall (s1 s2 t1 t2:Prop), s1 <-> t1 -> s2 <-> t2 -> ((s1 <-> s2) <-> (t1 <-> t2)). +Proof. + intros. tauto. +Qed. + +Lemma not_morph : forall (s1 t1:Prop), s1 <-> t1 -> (not s1) <-> (not t1). +Proof. + intros. tauto. +Qed. + +Lemma eq_iff : forall (P Q : Prop), P = Q -> (P <-> Q). Proof. - apply (mkinjprop (R (source t1) (source t2)) (TR (target t1) (target t2))). - (rewrite <- inj_ok; rewrite <- inj_ok;apply TRInj). + intros. + rewrite H. + apply iff_refl. Defined. +Lemma rew_iff (P Q : Prop) (IFF : P <-> Q) : P -> Q. +Proof. + exact (fun H => proj1 IFF H). +Qed. + +Definition identity (A : Type) : A -> A := fun x => x. + (** Registering constants for use by the plugin *) +Register eq_iff as ZifyClasses.eq_iff. Register target_prop as ZifyClasses.target_prop. Register mkrel as ZifyClasses.mkrel. Register target as ZifyClasses.target. Register mkapp2 as ZifyClasses.mkapp2. Register mkapp as ZifyClasses.mkapp. -Register mkapp0 as ZifyClasses.mkapp0. Register op_iff as ZifyClasses.op_iff. Register uop_iff as ZifyClasses.uop_iff. Register TR as ZifyClasses.TR. Register TBOp as ZifyClasses.TBOp. Register TUOp as ZifyClasses.TUOp. Register TCst as ZifyClasses.TCst. -Register mkprop_op as ZifyClasses.mkprop_op. -Register mkuprop_op as ZifyClasses.mkuprop_op. Register injprop_ok as ZifyClasses.injprop_ok. Register inj_ok as ZifyClasses.inj_ok. Register source as ZifyClasses.source. @@ -225,8 +248,26 @@ Register TUOpInj as ZifyClasses.TUOpInj. Register not as ZifyClasses.not. Register mkinjterm as ZifyClasses.mkinjterm. Register eq_refl as ZifyClasses.eq_refl. +Register eq as ZifyClasses.eq. Register mkinjprop as ZifyClasses.mkinjprop. Register iff_refl as ZifyClasses.iff_refl. +Register rew_iff as ZifyClasses.rew_iff. Register source_prop as ZifyClasses.source_prop. Register injprop_ok as ZifyClasses.injprop_ok. Register iff as ZifyClasses.iff. +Register BinOpSpec as ZifyClasses.BinOpSpec. +Register UnOpSpec as ZifyClasses.UnOpSpec. + +(** Propositional logic *) +Register and as ZifyClasses.and. +Register and_morph as ZifyClasses.and_morph. +Register or as ZifyClasses.or. +Register or_morph as ZifyClasses.or_morph. +Register iff as ZifyClasses.iff. +Register iff_morph as ZifyClasses.iff_morph. +Register impl_morph as ZifyClasses.impl_morph. +Register not as ZifyClasses.not. +Register not_morph as ZifyClasses.not_morph. + +(** Identify function *) +Register identity as ZifyClasses.identity. diff --git a/theories/micromega/ZifyInst.v b/theories/micromega/ZifyInst.v index edfb5a2a94..fa486f3abc 100644 --- a/theories/micromega/ZifyInst.v +++ b/theories/micromega/ZifyInst.v @@ -17,44 +17,10 @@ Require Import ZifyClasses. Declare ML Module "zify_plugin". Local Open Scope Z_scope. -(** Propositional logic *) -Instance PropAnd : PropOp and. -Proof. - constructor. - tauto. -Defined. -Add PropOp PropAnd. - -Instance PropOr : PropOp or. -Proof. - constructor. - tauto. -Defined. -Add PropOp PropOr. - -Instance PropArrow : PropOp (fun x y => x -> y). -Proof. - constructor. - intros. - tauto. -Defined. -Add PropOp PropArrow. - -Instance PropIff : PropOp iff. -Proof. - constructor. - intros. - tauto. -Defined. -Add PropOp PropIff. - -Instance PropNot : PropUOp not. -Proof. - constructor. - intros. - tauto. -Defined. -Add PropUOp PropNot. +Ltac refl := + abstract (intros ; match goal with + | |- context[@inj _ _ ?X] => unfold X, inj + end ; reflexivity). Instance Inj_Z_Z : InjTyp Z Z := @@ -162,13 +128,18 @@ Instance Op_pos_le : BinRel Pos.le := {| TR := Z.le; TRInj := fun x y => iff_refl (Z.pos x <= Z.pos y) |}. Add BinRel Op_pos_le. +Lemma eq_pos_inj : forall (x y:positive), x = y <-> Z.pos x = Z.pos y. +Proof. + intros. + apply (iff_sym (Pos2Z.inj_iff x y)). +Qed. + Instance Op_eq_pos : BinRel (@eq positive) := - {| TR := @eq Z ; TRInj := fun x y => iff_sym (Pos2Z.inj_iff x y) |}. + { TR := @eq Z ; TRInj := eq_pos_inj }. Add BinRel Op_eq_pos. (* zify_positive_op *) - Instance Op_Z_of_N : UnOp Z.of_N := { TUOp := (fun x => x) ; TUOpInj := fun x => eq_refl (Z.of_N x) }. Add UnOp Op_Z_of_N. @@ -189,8 +160,11 @@ Instance Op_pos_succ : UnOp Pos.succ := { TUOp := fun x => x + 1; TUOpInj := Pos2Z.inj_succ }. Add UnOp Op_pos_succ. + + + Instance Op_pos_pred_double : UnOp Pos.pred_double := - { TUOp := fun x => 2 * x - 1; TUOpInj := ltac:(reflexivity) }. +{ TUOp := fun x => 2 * x - 1; TUOpInj := ltac:(refl) }. Add UnOp Op_pos_pred_double. Instance Op_pos_pred : UnOp Pos.pred := @@ -217,7 +191,7 @@ Instance Op_pos_of_nat : UnOp Pos.of_nat := Add UnOp Op_pos_of_nat. Instance Op_pos_add : BinOp Pos.add := - { TBOp := Z.add ; TBOpInj := ltac: (reflexivity) }. + { TBOp := Z.add ; TBOpInj := ltac: (refl) }. Add BinOp Op_pos_add. Instance Op_pos_add_carry : BinOp Pos.add_carry := @@ -230,7 +204,7 @@ Instance Op_pos_sub : BinOp Pos.sub := Add BinOp Op_pos_sub. Instance Op_pos_mul : BinOp Pos.mul := - { TBOp := Z.mul ; TBOpInj := ltac: (reflexivity) }. + { TBOp := Z.mul ; TBOpInj := ltac: (refl) }. Add BinOp Op_pos_mul. Instance Op_pos_min : BinOp Pos.min := @@ -250,19 +224,19 @@ Instance Op_pos_square : UnOp Pos.square := Add UnOp Op_pos_square. Instance Op_xO : UnOp xO := - { TUOp := fun x => 2 * x ; TUOpInj := ltac: (reflexivity) }. + { TUOp := fun x => 2 * x ; TUOpInj := ltac: (refl) }. Add UnOp Op_xO. Instance Op_xI : UnOp xI := - { TUOp := fun x => 2 * x + 1 ; TUOpInj := ltac: (reflexivity) }. + { TUOp := fun x => 2 * x + 1 ; TUOpInj := ltac: (refl) }. Add UnOp Op_xI. Instance Op_xH : CstOp xH := - { TCst := 1%Z ; TCstInj := ltac:(reflexivity)}. + { TCst := 1%Z ; TCstInj := ltac:(refl)}. Add CstOp Op_xH. Instance Op_Z_of_nat : UnOp Z.of_nat:= - { TUOp := fun x => x ; TUOpInj := ltac:(reflexivity) }. + { TUOp := fun x => x ; TUOpInj := (fun x : nat => @eq_refl Z (Z.of_nat x)) }. Add UnOp Op_Z_of_nat. (* zify_N_rel *) @@ -287,6 +261,14 @@ Instance Op_eq_N : BinRel (@eq N) := Add BinRel Op_eq_N. (* zify_N_op *) +Instance Op_N_N0 : CstOp N0 := + { TCst := Z0 ; TCstInj := eq_refl }. +Add CstOp Op_N_N0. + +Instance Op_N_Npos : UnOp Npos := + { TUOp := (fun x => x) ; TUOpInj := ltac:(refl) }. +Add UnOp Op_N_Npos. + Instance Op_N_of_nat : UnOp N.of_nat := { TUOp := fun x => x ; TUOpInj := nat_N_Z }. Add UnOp Op_N_of_nat. @@ -296,7 +278,7 @@ Instance Op_Z_abs_N : UnOp Z.abs_N := Add UnOp Op_Z_abs_N. Instance Op_N_pos : UnOp N.pos := - { TUOp := fun x => x ; TUOpInj := ltac:(reflexivity)}. + { TUOp := fun x => x ; TUOpInj := ltac:(refl)}. Add UnOp Op_N_pos. Instance Op_N_add : BinOp N.add := @@ -360,68 +342,72 @@ Instance Op_eqZ : BinRel (@eq Z) := { TR := @eq Z ; TRInj := fun x y => iff_refl (x = y) }. Add BinRel Op_eqZ. +Instance Op_Z_Z0 : CstOp Z0 := + { TCst := Z0 ; TCstInj := eq_refl }. +Add CstOp Op_Z_Z0. + Instance Op_Z_add : BinOp Z.add := - { TBOp := Z.add ; TBOpInj := ltac:(reflexivity) }. + { TBOp := Z.add ; TBOpInj := ltac:(refl) }. Add BinOp Op_Z_add. Instance Op_Z_min : BinOp Z.min := - { TBOp := Z.min ; TBOpInj := ltac:(reflexivity) }. + { TBOp := Z.min ; TBOpInj := ltac:(refl) }. Add BinOp Op_Z_min. Instance Op_Z_max : BinOp Z.max := - { TBOp := Z.max ; TBOpInj := ltac:(reflexivity) }. + { TBOp := Z.max ; TBOpInj := ltac:(refl) }. Add BinOp Op_Z_max. Instance Op_Z_mul : BinOp Z.mul := - { TBOp := Z.mul ; TBOpInj := ltac:(reflexivity) }. + { TBOp := Z.mul ; TBOpInj := ltac:(refl) }. Add BinOp Op_Z_mul. Instance Op_Z_sub : BinOp Z.sub := - { TBOp := Z.sub ; TBOpInj := ltac:(reflexivity) }. + { TBOp := Z.sub ; TBOpInj := ltac:(refl) }. Add BinOp Op_Z_sub. Instance Op_Z_div : BinOp Z.div := - { TBOp := Z.div ; TBOpInj := ltac:(reflexivity) }. + { TBOp := Z.div ; TBOpInj := ltac:(refl) }. Add BinOp Op_Z_div. Instance Op_Z_mod : BinOp Z.modulo := - { TBOp := Z.modulo ; TBOpInj := ltac:(reflexivity) }. + { TBOp := Z.modulo ; TBOpInj := ltac:(refl) }. Add BinOp Op_Z_mod. Instance Op_Z_rem : BinOp Z.rem := - { TBOp := Z.rem ; TBOpInj := ltac:(reflexivity) }. + { TBOp := Z.rem ; TBOpInj := ltac:(refl) }. Add BinOp Op_Z_rem. Instance Op_Z_quot : BinOp Z.quot := - { TBOp := Z.quot ; TBOpInj := ltac:(reflexivity) }. + { TBOp := Z.quot ; TBOpInj := ltac:(refl) }. Add BinOp Op_Z_quot. Instance Op_Z_succ : UnOp Z.succ := - { TUOp := fun x => x + 1 ; TUOpInj := ltac:(reflexivity) }. + { TUOp := fun x => x + 1 ; TUOpInj := ltac:(refl) }. Add UnOp Op_Z_succ. Instance Op_Z_pred : UnOp Z.pred := - { TUOp := fun x => x - 1 ; TUOpInj := ltac:(reflexivity) }. + { TUOp := fun x => x - 1 ; TUOpInj := ltac:(refl) }. Add UnOp Op_Z_pred. Instance Op_Z_opp : UnOp Z.opp := - { TUOp := Z.opp ; TUOpInj := ltac:(reflexivity) }. + { TUOp := Z.opp ; TUOpInj := ltac:(refl) }. Add UnOp Op_Z_opp. Instance Op_Z_abs : UnOp Z.abs := - { TUOp := Z.abs ; TUOpInj := ltac:(reflexivity) }. + { TUOp := Z.abs ; TUOpInj := ltac:(refl) }. Add UnOp Op_Z_abs. Instance Op_Z_sgn : UnOp Z.sgn := - { TUOp := Z.sgn ; TUOpInj := ltac:(reflexivity) }. + { TUOp := Z.sgn ; TUOpInj := ltac:(refl) }. Add UnOp Op_Z_sgn. Instance Op_Z_pow : BinOp Z.pow := - { TBOp := Z.pow ; TBOpInj := ltac:(reflexivity) }. + { TBOp := Z.pow ; TBOpInj := ltac:(refl) }. Add BinOp Op_Z_pow. Instance Op_Z_pow_pos : BinOp Z.pow_pos := - { TBOp := Z.pow ; TBOpInj := ltac:(reflexivity) }. + { TBOp := Z.pow ; TBOpInj := ltac:(refl) }. Add BinOp Op_Z_pow_pos. Instance Op_Z_double : UnOp Z.double := diff --git a/theories/omega/PreOmega.v b/theories/omega/PreOmega.v index 34533670f8..bd9caa801c 100644 --- a/theories/omega/PreOmega.v +++ b/theories/omega/PreOmega.v @@ -573,16 +573,4 @@ Ltac zify_N := repeat zify_N_rel; repeat zify_N_op; unfold Z_of_N' in *. Require Import ZifyClasses ZifyInst. Require Zify. -(* [elim_let] replaces a let binding (x := e : t) - by an equation (x = e) if t is an injected type *) - -Ltac elim_binding x t ty := - let h := fresh "heq_" x in - pose proof (@eq_refl ty x : @eq ty x t) as h; - try clearbody x. - -Ltac elim_let := zify_iter_let elim_binding. - -Ltac zify := - intros ; elim_let ; - Zify.zify ; ZifyInst.zify_saturate. +Ltac zify := Zify.zify. diff --git a/toplevel/ccompile.ml b/toplevel/ccompile.ml index dceb811d66..f75a706041 100644 --- a/toplevel/ccompile.ml +++ b/toplevel/ccompile.ml @@ -104,7 +104,7 @@ let compile opts copts ~echo ~f_in ~f_out = |> prlist_with_sep pr_comma Names.Id.print) ++ str ".") in - let iload_path = build_load_path opts in + let ml_load_path, vo_load_path = build_load_path opts in let require_libs = require_libs opts in let stm_options = opts.config.stm_flags in let output_native_objects = match opts.config.native_compiler with @@ -129,8 +129,8 @@ let compile opts copts ~echo ~f_in ~f_out = | BuildVo | BuildVok -> let doc, sid = Topfmt.(in_phase ~phase:LoadingPrelude) Stm.new_doc - Stm.{ doc_type = VoDoc long_f_dot_out; - iload_path; require_libs; stm_options; + Stm.{ doc_type = VoDoc long_f_dot_out; ml_load_path; + vo_load_path; require_libs; stm_options; } in let state = { doc; sid; proof = None; time = opts.config.time } in let state = load_init_vernaculars opts ~state in @@ -181,8 +181,8 @@ let compile opts copts ~echo ~f_in ~f_out = let doc, sid = Topfmt.(in_phase ~phase:LoadingPrelude) Stm.new_doc - Stm.{ doc_type = VioDoc long_f_dot_out; - iload_path; require_libs; stm_options; + Stm.{ doc_type = VioDoc long_f_dot_out; ml_load_path; + vo_load_path; require_libs; stm_options; } in let state = { doc; sid; proof = None; time = opts.config.time } in @@ -252,8 +252,9 @@ let do_vio opts copts = (* We must initialize the loadpath here as the vio scheduling process happens outside of the STM *) if copts.vio_files <> [] || copts.vio_tasks <> [] then - let iload_path = build_load_path opts in - List.iter Loadpath.add_coq_path iload_path; + let ml_lp, vo_lp = build_load_path opts in + List.iter Mltop.add_ml_dir ml_lp; + List.iter Loadpath.add_vo_path vo_lp; (* Vio compile pass *) if copts.vio_files <> [] then schedule_vio copts; diff --git a/toplevel/coqargs.ml b/toplevel/coqargs.ml index 949a13974c..24cfecd057 100644 --- a/toplevel/coqargs.ml +++ b/toplevel/coqargs.ml @@ -70,8 +70,8 @@ type coqargs_pre = { load_init : bool; load_rcfile : bool; - ml_includes : Loadpath.coq_path list; - vo_includes : Loadpath.coq_path list; + ml_includes : string list; + vo_includes : Loadpath.vo_path list; vo_requires : (string * string option * bool option) list; (* None = No Import; Some false = Import; Some true = Export *) @@ -164,14 +164,13 @@ let default = { (* Functional arguments *) (******************************************************************************) let add_ml_include opts s = - Loadpath.{ opts with pre = { opts.pre with ml_includes = {recursive = false; path_spec = MlPath s} :: opts.pre.ml_includes }} + { opts with pre = { opts.pre with ml_includes = s :: opts.pre.ml_includes }} let add_vo_include opts unix_path coq_path implicit = let open Loadpath in let coq_path = Libnames.dirpath_of_string coq_path in { opts with pre = { opts.pre with vo_includes = { - recursive = true; - path_spec = VoPath { unix_path; coq_path; has_ml = AddNoML; implicit } } :: opts.pre.vo_includes }} + unix_path; coq_path; has_ml = false; implicit; recursive = true } :: opts.pre.vo_includes }} let add_vo_require opts d p export = { opts with pre = { opts.pre with vo_requires = (d, p, export) :: opts.pre.vo_requires }} @@ -582,9 +581,7 @@ let prelude_data = "Prelude", Some "Coq", Some false let require_libs opts = if opts.pre.load_init then prelude_data :: opts.pre.vo_requires else opts.pre.vo_requires -let cmdline_load_path opts = - opts.pre.ml_includes @ opts.pre.vo_includes - let build_load_path opts = - (if opts.pre.boot then [] else Coqinit.libs_init_load_path ()) @ - cmdline_load_path opts + let ml_path, vo_path = if opts.pre.boot then [],[] else Coqinit.libs_init_load_path () in + ml_path @ opts.pre.ml_includes , + vo_path @ opts.pre.vo_includes diff --git a/toplevel/coqargs.mli b/toplevel/coqargs.mli index aba6811f43..88de48967a 100644 --- a/toplevel/coqargs.mli +++ b/toplevel/coqargs.mli @@ -46,8 +46,8 @@ type coqargs_pre = { load_init : bool; load_rcfile : bool; - ml_includes : Loadpath.coq_path list; - vo_includes : Loadpath.coq_path list; + ml_includes : CUnix.physical_path list; + vo_includes : Loadpath.vo_path list; vo_requires : (string * string option * bool option) list; (* None = No Import; Some false = Import; Some true = Export *) @@ -83,4 +83,4 @@ val parse_args : help:Usage.specific_usage -> init:t -> string list -> t * strin val error_wrong_arg : string -> unit val require_libs : t -> (string * string option * bool option) list -val build_load_path : t -> Loadpath.coq_path list +val build_load_path : t -> CUnix.physical_path list * Loadpath.vo_path list diff --git a/toplevel/coqinit.ml b/toplevel/coqinit.ml index 7f3d4b570f..f6b173968f 100644 --- a/toplevel/coqinit.ml +++ b/toplevel/coqinit.ml @@ -47,38 +47,26 @@ let load_rcfile ~rcfile ~state = " found. Skipping rcfile loading.")) *) with reraise -> - let reraise = CErrors.push reraise in + let reraise = Exninfo.capture reraise in let () = Feedback.msg_info (str"Load of rcfile failed.") in - iraise reraise + Exninfo.iraise reraise -(* Recursively puts `.v` files in the LoadPath if -nois was not passed *) +(* Recursively puts `.v` files in the LoadPath *) let build_stdlib_vo_path ~unix_path ~coq_path = let open Loadpath in - { recursive = true; - path_spec = VoPath { unix_path; coq_path ; has_ml = AddNoML; implicit = true } - } - -let build_stdlib_ml_path ~dir = - let open Loadpath in - { recursive = true - ; path_spec = MlPath dir - } + { unix_path; coq_path ; has_ml = false; implicit = true; recursive = true } let build_userlib_path ~unix_path = let open Loadpath in - { recursive = true; - path_spec = VoPath { - unix_path; - coq_path = Libnames.default_root_prefix; - has_ml = AddRecML; - implicit = false; - } + { unix_path + ; coq_path = Libnames.default_root_prefix + ; has_ml = true + ; implicit = false + ; recursive = true } let ml_path_if c p = - let open Loadpath in - let f x = { recursive = false; path_spec = MlPath x } in - if c then List.map f p else [] + if c then p else [] (* LoadPath for developers *) let toplevel_init_load_path () = @@ -97,16 +85,19 @@ let libs_init_load_path () = let coqpath = Envars.coqpath in let coq_path = Names.DirPath.make [Libnames.coq_root] in + (* ML includes *) + let plugins_dirs = System.all_subdirs ~unix_path:(coqlib/"plugins") in + List.map fst plugins_dirs, + (* current directory (not recursively!) *) - [ { recursive = false; - path_spec = VoPath { unix_path = "."; - coq_path = Libnames.default_root_prefix; - implicit = false; - has_ml = AddTopML } + [ { unix_path = "." + ; coq_path = Libnames.default_root_prefix + ; implicit = false + ; has_ml = true + ; recursive = false } ] @ (* then standard library *) - [build_stdlib_ml_path ~dir:(coqlib/"plugins")] @ [build_stdlib_vo_path ~unix_path:(coqlib/"theories") ~coq_path] @ (* then user-contrib *) @@ -120,10 +111,8 @@ let libs_init_load_path () = (* Initialises the Ocaml toplevel before launching it, so that it can find the "include" file in the *source* directory *) let init_ocaml_path () = - let open Loadpath in - let lp s = { recursive = false; path_spec = MlPath s } in let add_subdir dl = - Loadpath.add_coq_path (lp (List.fold_left (/) (Envars.coqlib()) [dl])) + Mltop.add_ml_dir (List.fold_left (/) (Envars.coqlib()) [dl]) in - Loadpath.add_coq_path (lp (Envars.coqlib ())); - List.iter add_subdir Coq_config.all_src_dirs + Mltop.add_ml_dir (Envars.coqlib ()); + List.iter add_subdir Coq_config.all_src_dirs diff --git a/toplevel/coqinit.mli b/toplevel/coqinit.mli index f3a007d987..f099173808 100644 --- a/toplevel/coqinit.mli +++ b/toplevel/coqinit.mli @@ -17,7 +17,7 @@ val load_rcfile : rcfile:(string option) -> state:Vernac.State.t -> Vernac.State val init_ocaml_path : unit -> unit (* LoadPath for toploop toplevels *) -val toplevel_init_load_path : unit -> Loadpath.coq_path list +val toplevel_init_load_path : unit -> CUnix.physical_path list (* LoadPath for Coq user libraries *) -val libs_init_load_path : unit -> Loadpath.coq_path list +val libs_init_load_path : unit -> CUnix.physical_path list * Loadpath.vo_path list diff --git a/toplevel/coqloop.ml b/toplevel/coqloop.ml index e4508e9bfc..b0012f8978 100644 --- a/toplevel/coqloop.ml +++ b/toplevel/coqloop.ml @@ -265,7 +265,7 @@ let read_sentence ~state input = let open Vernac.State in try Stm.parse_sentence ~doc:state.doc state.sid ~entry:G_toplevel.vernac_toplevel input with reraise -> - let reraise = CErrors.push reraise in + let reraise = Exninfo.capture reraise in discard_to_dot (); (* The caller of read_sentence does the error printing now, this should be re-enabled once we rely on the feedback error @@ -360,7 +360,7 @@ let top_goal_print ~doc c oldp newp = end with | exn -> - let (e, info) = CErrors.push exn in + let (e, info) = Exninfo.capture exn in let loc = Loc.get_loc info in let msg = CErrors.iprint (e, info) in TopErr.print_error_for_buffer ?loc Feedback.Error msg top_buffer @@ -484,7 +484,7 @@ let read_and_execute ~state = TopErr.print_error_for_buffer Feedback.Error msg top_buffer; exit 1 | any -> - let (e, info) = CErrors.push any in + let (e, info) = Exninfo.capture any in let loc = Loc.get_loc info in let msg = CErrors.iprint (e, info) in TopErr.print_error_for_buffer ?loc Feedback.Error msg top_buffer; diff --git a/toplevel/coqtop.ml b/toplevel/coqtop.ml index 1ea48ee766..1e6b1023fe 100644 --- a/toplevel/coqtop.ml +++ b/toplevel/coqtop.ml @@ -226,7 +226,7 @@ let init_execution opts custom_init = Spawned.init_channels (); if opts.post.memory_stat then at_exit print_memory_stat; let top_lp = Coqinit.toplevel_init_load_path () in - List.iter Loadpath.add_coq_path top_lp; + List.iter Mltop.add_ml_dir top_lp; CoqworkmgrApi.(init opts.config.stm_flags.Stm.AsyncOpts.async_proofs_worker_priority); Mltop.init_known_plugins (); (* Configuration *) @@ -281,14 +281,14 @@ let init_document opts = *) (* Next line allows loading .vos files when in interactive mode *) Flags.load_vos_libraries := true; - let iload_path = build_load_path opts in + let ml_load_path, vo_load_path = build_load_path opts in let require_libs = require_libs opts in let stm_options = opts.config.stm_flags in let open Vernac.State in let doc, sid = Stm.(new_doc { doc_type = Interactive opts.config.logic.toplevel_name; - iload_path; require_libs; stm_options; + ml_load_path; vo_load_path; require_libs; stm_options; }) in { doc; sid; proof = None; time = opts.config.time } diff --git a/toplevel/g_toplevel.mlg b/toplevel/g_toplevel.mlg index 56fda58a25..d3768eb1e3 100644 --- a/toplevel/g_toplevel.mlg +++ b/toplevel/g_toplevel.mlg @@ -11,8 +11,6 @@ { open Pcoq open Pcoq.Prim -open Tok -open Util open Vernacexpr (* Vernaculars specific to the toplevel *) @@ -36,17 +34,10 @@ open Toplevel_ let err () = raise Stream.Failure let test_show_goal = - Pcoq.Entry.of_parser "test_show_goal" - (fun _ strm -> - match stream_nth 0 strm with - | IDENT "Show" -> - (match stream_nth 1 strm with - | IDENT "Goal" -> - (match stream_nth 2 strm with - | NUMERAL _ -> () - | _ -> err ()) - | _ -> err ()) - | _ -> err ()) + let open Pcoq.Lookahead in + to_entry "test_show_goal" begin + lk_kw "Show" >> lk_kw "Goal" >> lk_nat + end } diff --git a/toplevel/vernac.ml b/toplevel/vernac.ml index adcce67b0d..8e6cd8f4c7 100644 --- a/toplevel/vernac.ml +++ b/toplevel/vernac.ml @@ -69,7 +69,7 @@ let interp_vernac ~check ~interactive ~state ({CAst.loc;_} as com) = let new_proof = Vernacstate.Proof_global.give_me_the_proof_opt () [@ocaml.warning "-3"] in { state with doc = ndoc; sid = nsid; proof = new_proof; } with reraise -> - let (reraise, info) = CErrors.push reraise in + let (reraise, info) = Exninfo.capture reraise in (* XXX: In non-interactive mode edit_at seems to do very weird things, so we better avoid it while we investigate *) if interactive then ignore(Stm.edit_at ~doc:state.doc state.sid); @@ -77,7 +77,8 @@ let interp_vernac ~check ~interactive ~state ({CAst.loc;_} as com) = match Loc.get_loc info with | None -> Option.cata (Loc.add_loc info) info loc | Some _ -> info - end in iraise (reraise, info) + end in + Exninfo.iraise (reraise, info) (* Load a vernac file. CErrors are annotated with file and location *) let load_vernac_core ~echo ~check ~interactive ~state file = @@ -113,9 +114,9 @@ let load_vernac_core ~echo ~check ~interactive ~state file = in try loop state [] with any -> (* whatever the exception *) - let (e, info) = CErrors.push any in + let (e, info) = Exninfo.capture any in input_cleanup (); - iraise (e, info) + Exninfo.iraise (e, info) let process_expr ~state loc_ast = interp_vernac ~interactive:true ~check:true ~state loc_ast diff --git a/user-contrib/Ltac2/g_ltac2.mlg b/user-contrib/Ltac2/g_ltac2.mlg index e95ac3b02b..a0984aa2a9 100644 --- a/user-contrib/Ltac2/g_ltac2.mlg +++ b/user-contrib/Ltac2/g_ltac2.mlg @@ -13,7 +13,6 @@ open Pp open Util open Names -open Tok open Pcoq open Attributes open Constrexpr @@ -21,81 +20,52 @@ open Tac2expr open Tac2qexpr open Ltac_plugin -let err () = raise Stream.Failure - -type lookahead = Gramlib.Plexing.location_function -> int -> Tok.t Stream.t -> int option - -let check_no_space tok m strm = - let n = Stream.count strm in - if G_prim.contiguous tok n (n+m-1) then Some m else None - -let entry_of_lookahead s (lk : lookahead) = - let run tok strm = match lk tok 0 strm with None -> err () | Some _ -> () in - Pcoq.Entry.of_parser s run - -let (>>) (lk1 : lookahead) lk2 tok n strm = match lk1 tok n strm with -| None -> None -| Some n -> lk2 tok n strm - -let (<+>) (lk1 : lookahead) lk2 tok n strm = match lk1 tok n strm with -| None -> lk2 tok n strm -| Some n -> Some n - -let lk_empty tok n strm = Some n - -let lk_kw kw tok n strm = match stream_nth n strm with -| KEYWORD kw' | IDENT kw' -> if String.equal kw kw' then Some (n + 1) else None -| _ -> None - -let lk_ident tok n strm = match stream_nth n strm with -| IDENT _ -> Some (n + 1) -| _ -> None - -let lk_int tok n strm = match stream_nth n strm with -| NUMERAL { NumTok.int = _; frac = ""; exp = "" } -> Some (n + 1) -| _ -> None - -let lk_ident_or_anti = lk_ident <+> (lk_kw "$" >> lk_ident >> check_no_space) - -let rec lk_ident_list n strm = - ((lk_ident >> lk_ident_list) <+> lk_empty) n strm +let lk_ident_or_anti = + Pcoq.Lookahead.(lk_ident <+> (lk_kw "$" >> lk_ident >> check_no_space)) (* lookahead for (x:=t), (?x:=t) and (1:=t) *) let test_lpar_idnum_coloneq = - entry_of_lookahead "test_lpar_idnum_coloneq" begin - lk_kw "(" >> (lk_ident_or_anti <+> lk_int) >> lk_kw ":=" + let open Pcoq.Lookahead in + to_entry "test_lpar_idnum_coloneq" begin + lk_kw "(" >> (lk_ident_or_anti <+> lk_nat) >> lk_kw ":=" end (* lookahead for (x:t), (?x:t) *) let test_lpar_id_colon = - entry_of_lookahead "test_lpar_id_colon" begin + let open Pcoq.Lookahead in + to_entry "test_lpar_id_colon" begin lk_kw "(" >> lk_ident_or_anti >> lk_kw ":" end (* Hack to recognize "(x := t)" and "($x := t)" *) let test_lpar_id_coloneq = - entry_of_lookahead "test_lpar_id_coloneq" begin + let open Pcoq.Lookahead in + to_entry "test_lpar_id_coloneq" begin lk_kw "(" >> lk_ident_or_anti >> lk_kw ":=" end (* Hack to recognize "(x)" *) let test_lpar_id_rpar = - entry_of_lookahead "test_lpar_id_rpar" begin + let open Pcoq.Lookahead in + to_entry "test_lpar_id_rpar" begin lk_kw "(" >> lk_ident >> lk_kw ")" end let test_ampersand_ident = - entry_of_lookahead "test_ampersand_ident" begin + let open Pcoq.Lookahead in + to_entry "test_ampersand_ident" begin lk_kw "&" >> lk_ident >> check_no_space end let test_dollar_ident = - entry_of_lookahead "test_dollar_ident" begin + let open Pcoq.Lookahead in + to_entry "test_dollar_ident" begin lk_kw "$" >> lk_ident >> check_no_space end let test_ltac1_env = - entry_of_lookahead "test_ltac1_env" begin + let open Pcoq.Lookahead in + to_entry "test_ltac1_env" begin lk_ident_list >> lk_kw "|-" end diff --git a/user-contrib/Ltac2/tac2core.ml b/user-contrib/Ltac2/tac2core.ml index 196b28b274..f66ed7b4cf 100644 --- a/user-contrib/Ltac2/tac2core.ml +++ b/user-contrib/Ltac2/tac2core.ml @@ -573,7 +573,7 @@ let () = define1 "constr_check" constr begin fun c -> Proofview.Unsafe.tclEVARS sigma >>= fun () -> return (of_result Value.of_constr (Inl c)) with e when CErrors.noncritical e -> - let e = CErrors.push e in + let e = Exninfo.capture e in return (of_result Value.of_constr (Inr e)) end end @@ -1079,7 +1079,7 @@ let interp_constr flags ist c = Proofview.Unsafe.tclEVARS sigma >>= fun () -> Proofview.tclUNIT c with e when catchable_exception e -> - let (e, info) = CErrors.push e in + let (e, info) = Exninfo.capture e in set_bt info >>= fun info -> match Exninfo.get info fatal_flag with | None -> Proofview.tclZERO ~info e diff --git a/vernac/declareDef.ml b/vernac/declareDef.ml index e57c324c9a..2b6f987fa6 100644 --- a/vernac/declareDef.ml +++ b/vernac/declareDef.ml @@ -37,9 +37,9 @@ module Hook = struct let call ?hook ?fix_exn x = try Option.iter (fun hook -> CEphemeron.get hook x) hook with e when CErrors.noncritical e -> - let e = CErrors.push e in + let e = Exninfo.capture e in let e = Option.cata (fun fix -> fix e) e fix_exn in - Util.iraise e + Exninfo.iraise e end (* Locality stuff *) diff --git a/vernac/declaremods.ml b/vernac/declaremods.ml index c816a4eb4f..e645fc552b 100644 --- a/vernac/declaremods.ml +++ b/vernac/declaremods.ml @@ -935,9 +935,9 @@ let protect_summaries f = try f fs with reraise -> (* Something wrong: undo the whole process *) - let reraise = CErrors.push reraise in + let reraise = Exninfo.capture reraise in let () = Summary.unfreeze_summaries fs in - iraise reraise + Exninfo.iraise reraise let start_module export id args res = protect_summaries (RawModOps.start_module export id args res) diff --git a/vernac/g_vernac.mlg b/vernac/g_vernac.mlg index def4ed942a..c1414c552a 100644 --- a/vernac/g_vernac.mlg +++ b/vernac/g_vernac.mlg @@ -22,7 +22,6 @@ open Extend open Decls open Declaremods open Namegen -open Tok (* necessary for camlp5 *) open Pcoq open Pcoq.Prim @@ -45,11 +44,12 @@ let quoted_attributes = Entry.create "vernac:quoted_attributes" let class_rawexpr = Entry.create "vernac:class_rawexpr" let thm_token = Entry.create "vernac:thm_token" let def_body = Entry.create "vernac:def_body" -let decl_notation = Entry.create "vernac:decl_notation" +let decl_notations = Entry.create "vernac:decl_notations" let record_field = Entry.create "vernac:record_field" let of_type_with_opt_coercion = Entry.create "vernac:of_type_with_opt_coercion" let section_subset_expr = Entry.create "vernac:section_subset_expr" let scope_delimiter = Entry.create "vernac:scope_delimiter" +let only_parsing = Entry.create "vernac:only_parsing" let make_bullet s = let open Proof_bullet in @@ -177,7 +177,7 @@ let name_of_ident_decl : ident_decl -> name_decl = (* Gallina declarations *) GRAMMAR EXTEND Gram GLOBAL: gallina gallina_ext thm_token def_body of_type_with_opt_coercion - record_field decl_notation rec_definition ident_decl univ_decl; + record_field decl_notations rec_definition ident_decl univ_decl; gallina: (* Definition, Theorem, Variable, Axiom, ... *) @@ -377,15 +377,17 @@ GRAMMAR EXTEND Gram [ [ IDENT "Eval"; r = red_expr; "in" -> { Some r } | -> { None } ] ] ; - one_decl_notation: - [ [ ntn = ne_lstring; ":="; c = constr; - scopt = OPT [ ":"; sc = IDENT -> { sc } ] -> { (ntn,c,scopt) } ] ] + decl_notation: + [ [ ntn = ne_lstring; ":="; c = constr; b = only_parsing; + scopt = OPT [ ":"; sc = IDENT -> { sc } ] -> + { { decl_ntn_string = ntn; decl_ntn_interp = c; + decl_ntn_only_parsing = b; decl_ntn_scope = scopt } } ] ] ; decl_sep: [ [ IDENT "and" -> { () } ] ] ; - decl_notation: - [ [ "where"; l = LIST1 one_decl_notation SEP decl_sep -> { l } + decl_notations: + [ [ "where"; l = LIST1 decl_notation SEP decl_sep -> { l } | -> { [] } ] ] ; (* Inductives and records *) @@ -397,7 +399,7 @@ GRAMMAR EXTEND Gram [ [ oc = opt_coercion; id = ident_decl; indpar = binders; extrapar = OPT [ "|"; p = binders -> { p } ]; c = OPT [ ":"; c = lconstr -> { c } ]; - lc=opt_constructors_or_fields; ntn = decl_notation -> + lc=opt_constructors_or_fields; ntn = decl_notations -> { (((oc,id),(indpar,extrapar),c,lc),ntn) } ] ] ; constructor_list_or_record_decl: @@ -424,14 +426,14 @@ GRAMMAR EXTEND Gram [ [ id_decl = ident_decl; bl = binders_fixannot; rtype = type_cstr; - body_def = OPT [":="; def = lconstr -> { def } ]; notations = decl_notation -> + body_def = OPT [":="; def = lconstr -> { def } ]; notations = decl_notations -> { let binders, rec_order = bl in {fname = fst id_decl; univs = snd id_decl; rec_order; binders; rtype; body_def; notations} } ] ] ; corec_definition: [ [ id_decl = ident_decl; binders = binders; rtype = type_cstr; - body_def = OPT [":="; def = lconstr -> { def }]; notations = decl_notation -> + body_def = OPT [":="; def = lconstr -> { def }]; notations = decl_notations -> { {fname = fst id_decl; univs = snd id_decl; rec_order = (); binders; rtype; body_def; notations} } ]] ; @@ -467,7 +469,7 @@ GRAMMAR EXTEND Gram record_field: [ [ attr = LIST0 quoted_attributes ; bd = record_binder; rf_priority = OPT [ "|"; n = natural -> { n } ]; - rf_notation = decl_notation -> { + rf_notation = decl_notations -> { let rf_canonical = attr |> List.flatten |> parse canonical_field in let rf_subclass, rf_decl = bd in rf_decl, { rf_subclass ; rf_priority ; rf_notation ; rf_canonical } } ] ] @@ -531,16 +533,12 @@ END { -let only_starredidentrefs = - Pcoq.Entry.of_parser "test_only_starredidentrefs" - (fun _ strm -> - let rec aux n = - match Util.stream_nth n strm with - | KEYWORD "." -> () - | KEYWORD ")" -> () - | (IDENT _ | KEYWORD "Type" | KEYWORD "*") -> aux (n+1) - | _ -> raise Stream.Failure in - aux 0) +let test_only_starredidentrefs = + let open Pcoq.Lookahead in + to_entry "test_only_starredidentrefs" begin + lk_list (lk_ident <+> lk_kws ["Type"; "*"]) >> (lk_kws [".";")"]) + end + let starredidentreflist_to_expr l = match l with | [] -> SsEmpty @@ -670,7 +668,7 @@ GRAMMAR EXTEND Gram ; (* Proof using *) section_subset_expr: - [ [ only_starredidentrefs; l = LIST0 starredidentref -> + [ [ test_only_starredidentrefs; l = LIST0 starredidentref -> { starredidentreflist_to_expr l } | e = ssexpr -> { e } ]] ; @@ -688,9 +686,9 @@ GRAMMAR EXTEND Gram | e1 = ssexpr; "+"; e2 = ssexpr-> { SsUnion(e1,e2) } ] | "0" [ i = starredidentref -> { i } - | "("; only_starredidentrefs; l = LIST0 starredidentref; ")"-> + | "("; test_only_starredidentrefs; l = LIST0 starredidentref; ")"-> { starredidentreflist_to_expr l } - | "("; only_starredidentrefs; l = LIST0 starredidentref; ")"; "*" -> + | "("; test_only_starredidentrefs; l = LIST0 starredidentref; ")"; "*" -> { SsFwdClose(starredidentreflist_to_expr l) } | "("; e = ssexpr; ")"-> { e } | "("; e = ssexpr; ")"; "*" -> { SsFwdClose e } ] ] @@ -917,10 +915,11 @@ GRAMMAR EXTEND Gram | IDENT "Locate"; l = locatable -> { VernacLocate l } (* Managing load paths *) - | IDENT "Add"; IDENT "LoadPath"; dir = ne_string; alias = as_dirpath -> - { VernacAddLoadPath (false, dir, alias) } - | IDENT "Add"; IDENT "Rec"; IDENT "LoadPath"; dir = ne_string; - alias = as_dirpath -> { VernacAddLoadPath (true, dir, alias) } + | IDENT "Add"; IDENT "LoadPath"; physical_path = ne_string; "as"; logical_path = dirpath -> + { VernacAddLoadPath { implicit = false; logical_path; physical_path } } + | IDENT "Add"; IDENT "Rec"; IDENT "LoadPath"; physical_path = ne_string; "as"; logical_path = dirpath -> + { VernacAddLoadPath { implicit = true; logical_path; physical_path } } + | IDENT "Remove"; IDENT "LoadPath"; dir = ne_string -> { VernacRemoveLoadPath dir } @@ -939,9 +938,7 @@ GRAMMAR EXTEND Gram | IDENT "Inspect"; n = natural -> { VernacPrint (PrintInspect n) } | IDENT "Add"; IDENT "ML"; IDENT "Path"; dir = ne_string -> - { VernacAddMLPath (false, dir) } - | IDENT "Add"; IDENT "Rec"; IDENT "ML"; IDENT "Path"; dir = ne_string -> - { VernacAddMLPath (true, dir) } + { VernacAddMLPath dir } (* For acting on parameter tables *) | "Set"; table = option_table; v = option_setting -> @@ -1075,9 +1072,6 @@ GRAMMAR EXTEND Gram option_table: [ [ fl = LIST1 [ x = IDENT -> { x } ] -> { fl } ]] ; - as_dirpath: - [ [ d = OPT [ "as"; d = dirpath -> { d } ] -> { d } ] ] - ; ne_in_or_out_modules: [ [ IDENT "inside"; l = LIST1 global -> { SearchInside l } | IDENT "outside"; l = LIST1 global -> { SearchOutside l } ] ] @@ -1154,7 +1148,7 @@ GRAMMAR EXTEND Gram (* Grammar extensions *) GRAMMAR EXTEND Gram - GLOBAL: syntax; + GLOBAL: syntax only_parsing; syntax: [ [ IDENT "Open"; IDENT "Scope"; sc = IDENT -> diff --git a/vernac/indschemes.ml b/vernac/indschemes.ml index 227d2f1554..80616ecc2a 100644 --- a/vernac/indschemes.ml +++ b/vernac/indschemes.ml @@ -118,7 +118,7 @@ let alarm what internal msg = let try_declare_scheme what f internal names kn = try f internal names kn with e -> - let e = CErrors.push e in + let e = Exninfo.capture e in let rec extract_exn = function Logic_monad.TacticFailure e -> extract_exn e | e -> e in let msg = match extract_exn (fst e) with | ParameterWithoutEquality cst -> @@ -166,11 +166,11 @@ let try_declare_scheme what f internal names kn = | e when CErrors.noncritical e -> alarm what internal (str "Unexpected error during scheme creation: " ++ CErrors.print e) - | _ -> iraise e + | _ -> Exninfo.iraise e in match msg with | None -> () - | Some msg -> iraise (UserError (None, msg), snd e) + | Some msg -> Exninfo.iraise (UserError (None, msg), snd e) let beq_scheme_msg mind = let mib = Global.lookup_mind mind in diff --git a/vernac/lemmas.ml b/vernac/lemmas.ml index f7606f4ede..68f4f55d0e 100644 --- a/vernac/lemmas.ml +++ b/vernac/lemmas.ml @@ -375,8 +375,8 @@ let finish_proved env sigma idopt po info = (* This takes care of the implicits and hook for the current constant*) process_recthms ~fix_exn ?hook env sigma universes ~udecl ~poly ~scope r impargs other_thms with e when CErrors.noncritical e -> - let e = CErrors.push e in - iraise (fix_exn e) + let e = Exninfo.capture e in + Exninfo.iraise (fix_exn e) in () | _ -> CErrors.anomaly Pp.(str "[standard_proof_terminator] close_proof returned more than one proof term") diff --git a/vernac/library.ml b/vernac/library.ml index 0f7e7d2aa0..5aff86c50c 100644 --- a/vernac/library.ml +++ b/vernac/library.ml @@ -440,11 +440,11 @@ let save_library_base f sum lib univs tasks proofs = System.marshal_out_segment f ch (proofs : seg_proofs); close_out ch with reraise -> - let reraise = CErrors.push reraise in + let reraise = Exninfo.capture reraise in close_out ch; Feedback.msg_warning (str "Removed file " ++ str f); Sys.remove f; - iraise reraise + Exninfo.iraise reraise type ('document,'counters) todo_proofs = | ProofsTodoNone (* for .vo *) diff --git a/vernac/loadpath.ml b/vernac/loadpath.ml index 506b3bc505..38aa42c349 100644 --- a/vernac/loadpath.ml +++ b/vernac/loadpath.ml @@ -218,24 +218,18 @@ let try_locate_absolute_library dir = (** { 5 Extending the load path } *) -(* Adds a path to the Coq and ML paths *) -type add_ml = AddNoML | AddTopML | AddRecML - -type vo_path_spec = { - unix_path : string; (* Filesystem path containing vo/ml files *) - coq_path : DP.t; (* Coq prefix for the path *) - implicit : bool; (* [implicit = true] avoids having to qualify with [coq_path] *) - has_ml : add_ml; (* If [has_ml] is true, the directory will also be search for plugins *) -} - -type coq_path_spec = - | VoPath of vo_path_spec - | MlPath of string - -type coq_path = { - path_spec: coq_path_spec; - recursive: bool; -} +type vo_path = + { unix_path : string + (** Filesystem path containing vo/ml files *) + ; coq_path : DP.t + (** Coq prefix for the path *) + ; implicit : bool + (** [implicit = true] avoids having to qualify with [coq_path] *) + ; has_ml : bool + (** If [has_ml] is true, the directory will also be added to the ml include path *) + ; recursive : bool + (** [recursive] will determine whether we explore sub-directories *) + } let warn_cannot_open_path = CWarnings.create ~name:"cannot-open-path" ~category:"filesystem" @@ -255,9 +249,10 @@ let convert_string d = warn_cannot_use_directory d; raise Exit -let add_vo_path ~recursive lp = +let add_vo_path lp = let unix_path = lp.unix_path in let implicit = lp.implicit in + let recursive = lp.recursive in if System.exists_dir unix_path then let dirs = if recursive then System.all_subdirs ~unix_path else [] in let prefix = DP.repr lp.coq_path in @@ -268,22 +263,17 @@ let add_vo_path ~recursive lp = with Exit -> None in let dirs = List.map_filter convert_dirs dirs in - let add_ml_dir = Mltop.add_ml_dir ~recursive:false in - let () = match lp.has_ml with - | AddNoML -> () - | AddTopML -> - Mltop.add_ml_dir ~recursive:false unix_path - | AddRecML -> - List.iter (fun (lp,_) -> add_ml_dir lp) dirs; - add_ml_dir unix_path in + let () = + if lp.has_ml && not lp.recursive then + Mltop.add_ml_dir unix_path + else if lp.has_ml && lp.recursive then + (List.iter (fun (lp,_) -> Mltop.add_ml_dir lp) dirs; + Mltop.add_ml_dir unix_path) + else + () + in let add (path, dir) = add_load_path path ~implicit dir in let () = List.iter add dirs in add_load_path unix_path ~implicit lp.coq_path else warn_cannot_open_path unix_path - -let add_coq_path { recursive; path_spec } = match path_spec with - | VoPath lp -> - add_vo_path ~recursive lp - | MlPath dir -> - Mltop.add_ml_dir ~recursive dir diff --git a/vernac/loadpath.mli b/vernac/loadpath.mli index 47d2d34125..68212b9a47 100644 --- a/vernac/loadpath.mli +++ b/vernac/loadpath.mli @@ -64,26 +64,17 @@ val try_locate_absolute_library : DirPath.t -> string (** {6 Extending the Load Path } *) (** Adds a path to the Coq and ML paths *) -type add_ml = AddNoML | AddTopML | AddRecML - -type vo_path_spec = { - unix_path : string; +type vo_path = + { unix_path : string (** Filesystem path containing vo/ml files *) - coq_path : Names.DirPath.t; + ; coq_path : DirPath.t (** Coq prefix for the path *) - implicit : bool; + ; implicit : bool (** [implicit = true] avoids having to qualify with [coq_path] *) - has_ml : add_ml; - (** If [has_ml] is true, the directory will also be search for plugins *) -} - -type coq_path_spec = - | VoPath of vo_path_spec - | MlPath of string - -type coq_path = { - path_spec: coq_path_spec; - recursive: bool; -} + ; has_ml : bool + (** If [has_ml] is true, the directory will also be added to the ml include path *) + ; recursive : bool + (** [recursive] will determine whether we explore sub-directories *) + } -val add_coq_path : coq_path -> unit +val add_vo_path : vo_path -> unit diff --git a/vernac/metasyntax.ml b/vernac/metasyntax.ml index 3937f887ad..33fd14a731 100644 --- a/vernac/metasyntax.ml +++ b/vernac/metasyntax.ml @@ -1466,9 +1466,9 @@ let with_lib_stk_protection f x = let fs = Lib.freeze () in try let a = f x in Lib.unfreeze fs; a with reraise -> - let reraise = CErrors.push reraise in + let reraise = Exninfo.capture reraise in let () = Lib.unfreeze fs in - iraise reraise + Exninfo.iraise reraise let with_syntax_protection f x = with_lib_stk_protection @@ -1654,13 +1654,23 @@ let add_syntax_extension ~local ({CAst.loc;v=df},mods) = let open SynData in (* Notations with only interpretation *) -let add_notation_interpretation env ({CAst.loc;v=df},c,sc) = - let df' = add_notation_interpretation_core ~local:false df env c sc false false None in +let add_notation_interpretation env decl_ntn = + let + { decl_ntn_string = { CAst.loc ; v = df }; + decl_ntn_interp = c; + decl_ntn_only_parsing = onlyparse; + decl_ntn_scope = sc } = decl_ntn in + let df' = add_notation_interpretation_core ~local:false df env c sc onlyparse false None in Dumpglob.dump_notation (loc,df') sc true -let set_notation_for_interpretation env impls ({CAst.v=df},c,sc) = +let set_notation_for_interpretation env impls decl_ntn = + let + { decl_ntn_string = { CAst.v = df }; + decl_ntn_interp = c; + decl_ntn_only_parsing = onlyparse; + decl_ntn_scope = sc } = decl_ntn in (try ignore - (Flags.silently (fun () -> add_notation_interpretation_core ~local:false df env ~impls c sc false false None) ()); + (Flags.silently (fun () -> add_notation_interpretation_core ~local:false df env ~impls c sc onlyparse false None) ()); with NoSyntaxRule -> user_err Pp.(str "Parsing rule for this notation has to be previously declared.")); Option.iter (fun sc -> Notation.open_close_scope (false,true,sc)) sc diff --git a/vernac/metasyntax.mli b/vernac/metasyntax.mli index e3e733a4b7..d76820b033 100644 --- a/vernac/metasyntax.mli +++ b/vernac/metasyntax.mli @@ -37,12 +37,12 @@ val add_class_scope : locality_flag -> scope_name -> scope_class list -> unit (** Add only the interpretation of a notation that already has pa/pp rules *) val add_notation_interpretation : - env -> (lstring * constr_expr * scope_name option) -> unit + env -> decl_notation -> unit (** Add a notation interpretation for supporting the "where" clause *) -val set_notation_for_interpretation : env -> Constrintern.internalization_env -> - (lstring * constr_expr * scope_name option) -> unit +val set_notation_for_interpretation : + env -> Constrintern.internalization_env -> decl_notation -> unit (** Add only the parsing/printing rule of a notation *) diff --git a/vernac/mltop.ml b/vernac/mltop.ml index 5046248e11..671dae7876 100644 --- a/vernac/mltop.ml +++ b/vernac/mltop.ml @@ -128,11 +128,6 @@ let add_ml_dir s = | WithoutTop when has_dynlink -> keep_copy_mlpath s | _ -> () -(* For Rec Add ML Path (-R) *) -let add_ml_dir ~recursive unix_path = - let dirs = if recursive then (all_subdirs ~unix_path) else [unix_path,[]] in - List.iter (fun (lp,_) -> add_ml_dir lp) dirs - (* convertit un nom quelconque en nom de fichier ou de module *) let mod_of_name name = if Filename.check_suffix name ".cmo" then diff --git a/vernac/mltop.mli b/vernac/mltop.mli index 271772d7ba..633a5c241d 100644 --- a/vernac/mltop.mli +++ b/vernac/mltop.mli @@ -32,7 +32,7 @@ val ocaml_toploop : unit -> unit (** {5 ML Dynlink} *) (** Adds a dir to the plugin search path *) -val add_ml_dir : recursive:bool -> string -> unit +val add_ml_dir : string -> unit (** Tests if we can load ML files *) val has_dynlink : bool diff --git a/vernac/obligations.ml b/vernac/obligations.ml index 76dbf1ad5a..27eb821a6a 100644 --- a/vernac/obligations.ml +++ b/vernac/obligations.ml @@ -428,7 +428,7 @@ let solve_by_tac ?loc name evi t poly ctx = Some (body, entry.Declare.proof_entry_type, ctx') with | Refiner.FailError (_, s) as exn -> - let _ = CErrors.push exn in + let _ = Exninfo.capture exn in user_err ?loc ~hdr:"solve_obligation" (Lazy.force s) (* If the proof is open we absorb the error and leave the obligation open *) | Proof.OpenProof _ -> diff --git a/vernac/ppvernac.ml b/vernac/ppvernac.ml index 314c423f65..84ae04e4cc 100644 --- a/vernac/ppvernac.ml +++ b/vernac/ppvernac.ml @@ -297,11 +297,6 @@ open Pputils | { v = CHole (k, Namegen.IntroAnonymous, _) } -> mt() | _ as c -> brk(0,2) ++ str" :" ++ pr_c c - let pr_decl_notation prc ({loc; v=ntn},c,scopt) = - fnl () ++ keyword "where " ++ qs ntn ++ str " := " - ++ Flags.without_option Flags.beautify prc c ++ - pr_opt (fun sc -> str ": " ++ str sc) scopt - let pr_binders_arg = let env = Global.env () in let sigma = Evd.from_env env in @@ -418,6 +413,21 @@ let string_of_theorem_kind = let open Decls in function | l -> spc() ++ hov 1 (str"(" ++ prlist_with_sep sep_v2 pr_syntax_modifier l ++ str")") + let pr_only_parsing_clause onlyparsing = + pr_syntax_modifiers (if onlyparsing then [SetOnlyParsing] else []) + + let pr_decl_notation prc decl_ntn = + let open Vernacexpr in + let + { decl_ntn_string = {CAst.loc;v=ntn}; + decl_ntn_interp = c; + decl_ntn_only_parsing = onlyparsing; + decl_ntn_scope = scopt } = decl_ntn in + fnl () ++ keyword "where " ++ qs ntn ++ str " := " + ++ Flags.without_option Flags.beautify prc c + ++ pr_only_parsing_clause onlyparsing + ++ pr_opt (fun sc -> str ": " ++ str sc) scopt + let pr_rec_definition { fname; univs; rec_order; binders; rtype; body_def; notations } = let env = Global.env () in let sigma = Evd.from_env env in @@ -1016,22 +1026,18 @@ let string_of_definition_object_kind = let open Decls in function return (keyword "Existential" ++ spc () ++ int i ++ pr_lconstrarg c) (* Auxiliary file and library management *) - | VernacAddLoadPath (fl,s,d) -> + | VernacAddLoadPath { implicit; physical_path; logical_path } -> return ( hov 2 (keyword "Add" ++ - (if fl then spc () ++ keyword "Rec" ++ spc () else spc()) ++ - keyword "LoadPath" ++ spc() ++ qs s ++ - (match d with - | None -> mt() - | Some dir -> spc() ++ keyword "as" ++ spc() ++ DirPath.print dir)) - ) + (if implicit then spc () ++ keyword "Rec" ++ spc () else spc()) ++ + keyword "LoadPath" ++ spc() ++ qs physical_path ++ + spc() ++ keyword "as" ++ spc() ++ DirPath.print logical_path)) | VernacRemoveLoadPath s -> return (keyword "Remove LoadPath" ++ qs s) - | VernacAddMLPath (fl,s) -> + | VernacAddMLPath (s) -> return ( keyword "Add" - ++ (if fl then spc () ++ keyword "Rec" ++ spc () else spc()) ++ keyword "ML Path" ++ qs s ) @@ -1061,7 +1067,7 @@ let string_of_definition_object_kind = let open Decls in function hov 2 (keyword "Notation" ++ spc () ++ pr_lident id ++ spc () ++ prlist_with_sep spc pr_id ids ++ str":=" ++ pr_constrarg c ++ - pr_syntax_modifiers (if onlyparsing then [SetOnlyParsing] else [])) + pr_only_parsing_clause onlyparsing) ) | VernacArguments (q, args, more_implicits, mods) -> return ( diff --git a/vernac/topfmt.ml b/vernac/topfmt.ml index de02f7ecfb..509c164af8 100644 --- a/vernac/topfmt.ml +++ b/vernac/topfmt.ml @@ -361,7 +361,7 @@ let in_phase ~phase f x = with exn -> let iexn = Exninfo.capture exn in default_phase := op; - Util.iraise iexn + Exninfo.iraise iexn let pr_loc loc = let fname = loc.Loc.fname in @@ -394,7 +394,7 @@ let pr_phase ?loc () = None let print_err_exn any = - let (e, info) = CErrors.push any in + let (e, info) = Exninfo.capture any in let loc = Loc.get_loc info in let pre_hdr = pr_phase ?loc () in let msg = CErrors.iprint (e, info) ++ fnl () in diff --git a/vernac/vernacentries.ml b/vernac/vernacentries.ml index 2eb1aa39b0..953faf6fdb 100644 --- a/vernac/vernacentries.ml +++ b/vernac/vernacentries.ml @@ -342,9 +342,9 @@ let dump_universes_gen prl g s = close (); str "Universes written to file \"" ++ str s ++ str "\"." with reraise -> - let reraise = CErrors.push reraise in + let reraise = Exninfo.capture reraise in close (); - iraise reraise + Exninfo.iraise reraise let universe_subgraph ?loc g univ = let open Univ in @@ -1120,20 +1120,17 @@ let vernac_set_used_variables ~pstate e : Proof_global.t = let expand filename = Envars.expand_path_macros ~warn:(fun x -> Feedback.msg_warning (str x)) filename -let vernac_add_loadpath implicit pdir ldiropt = +let vernac_add_loadpath ~implicit pdir coq_path = let open Loadpath in let pdir = expand pdir in - let alias = Option.default Libnames.default_root_prefix ldiropt in - add_coq_path { recursive = true; - path_spec = VoPath { unix_path = pdir; coq_path = alias; has_ml = AddTopML; implicit } } + add_vo_path { unix_path = pdir; coq_path; has_ml = true; implicit; recursive = true } let vernac_remove_loadpath path = Loadpath.remove_load_path (expand path) (* Coq syntax for ML or system commands *) -let vernac_add_ml_path isrec path = - let open Loadpath in - add_coq_path { recursive = isrec; path_spec = MlPath (expand path) } +let vernac_add_ml_path path = + Mltop.add_ml_dir (expand path) let vernac_declare_ml_module ~local l = let local = Option.default false local in @@ -2106,18 +2103,18 @@ let translate_vernac ~atts v = let open Vernacextend in match v with unsupported_attributes atts; vernac_solve_existential ~pstate n c) (* Auxiliary file and library management *) - | VernacAddLoadPath (isrec,s,alias) -> + | VernacAddLoadPath { implicit; physical_path; logical_path } -> VtDefault(fun () -> unsupported_attributes atts; - vernac_add_loadpath isrec s alias) + vernac_add_loadpath ~implicit physical_path logical_path) | VernacRemoveLoadPath s -> VtDefault(fun () -> unsupported_attributes atts; vernac_remove_loadpath s) - | VernacAddMLPath (isrec,s) -> + | VernacAddMLPath (s) -> VtDefault(fun () -> unsupported_attributes atts; - vernac_add_ml_path isrec s) + vernac_add_ml_path s) | VernacDeclareMLModule l -> VtDefault(fun () -> with_locality ~atts vernac_declare_ml_module l) | VernacChdir s -> diff --git a/vernac/vernacexpr.ml b/vernac/vernacexpr.ml index 45018a246c..7169ea834a 100644 --- a/vernac/vernacexpr.ml +++ b/vernac/vernacexpr.ml @@ -98,7 +98,6 @@ type search_restriction = | SearchInside of qualid list | SearchOutside of qualid list -type rec_flag = bool (* true = Rec; false = NoRec *) type verbose_flag = bool (* true = Verbose; false = Silent *) type coercion_flag = bool (* true = AddCoercion false = NoCoercion *) type instance_flag = bool option @@ -129,7 +128,12 @@ type definition_expr = | DefineBody of local_binder_expr list * Genredexpr.raw_red_expr option * constr_expr * constr_expr option -type decl_notation = lstring * constr_expr * scope_name option +type decl_notation = + { decl_ntn_string : lstring + ; decl_ntn_interp : constr_expr + ; decl_ntn_only_parsing : bool + ; decl_ntn_scope : scope_name option + } type 'a fix_expr_gen = { fname : lident @@ -363,9 +367,13 @@ type nonrec vernac_expr = | VernacSolveExistential of int * constr_expr (* Auxiliary file and library management *) - | VernacAddLoadPath of rec_flag * string * DirPath.t option + | VernacAddLoadPath of { implicit : bool + ; physical_path : CUnix.physical_path + ; logical_path : DirPath.t + } + | VernacRemoveLoadPath of string - | VernacAddMLPath of rec_flag * string + | VernacAddMLPath of string | VernacDeclareMLModule of string list | VernacChdir of string option diff --git a/vernac/vernacextend.ml b/vernac/vernacextend.ml index f41df06f85..e0afb7f483 100644 --- a/vernac/vernacextend.ml +++ b/vernac/vernacextend.ml @@ -109,11 +109,11 @@ let type_vernac opn converted_args ~atts = phase := "Executing command"; hunk ~atts with - | reraise -> - let reraise = CErrors.push reraise in - if !Flags.debug then - Feedback.msg_debug (str"Vernac Interpreter " ++ str !phase); - iraise reraise + | reraise -> + let reraise = Exninfo.capture reraise in + if !Flags.debug then + Feedback.msg_debug (str"Vernac Interpreter " ++ str !phase); + Exninfo.iraise reraise (** VERNAC EXTEND registering *) diff --git a/vernac/vernacinterp.ml b/vernac/vernacinterp.ml index 1ec09b6beb..8083098022 100644 --- a/vernac/vernacinterp.ml +++ b/vernac/vernacinterp.ml @@ -96,7 +96,7 @@ let with_fail f : (Pp.t, unit) result = (* Fail Timeout is a common pattern so we need to support it. *) | e when CErrors.noncritical e || e = CErrors.Timeout -> (* The error has to be printed in the failing state *) - Ok CErrors.(iprint (push e)) + Ok CErrors.(iprint (Exninfo.capture e)) (* We restore the state always *) let with_fail ~st f = @@ -262,10 +262,10 @@ let interp_gen ~verbosely ~st ~interp_fn cmd = Vernacstate.freeze_interp_state ~marshallable:false ) st with exn -> - let exn = CErrors.push exn in + let exn = Exninfo.capture exn in let exn = locate_if_not_already ?loc:cmd.CAst.loc exn in Vernacstate.invalidate_cache (); - Util.iraise exn + Exninfo.iraise exn (* Regular interp *) let interp ?(verbosely=true) ~st cmd = diff --git a/vernac/vernacstate.ml b/vernac/vernacstate.ml index 3c70961e06..59557a60a6 100644 --- a/vernac/vernacstate.ml +++ b/vernac/vernacstate.ml @@ -21,7 +21,7 @@ module Parser = struct Flags.with_option Flags.we_are_parsing (fun () -> try Pcoq.Entry.parse entry pa with e when CErrors.noncritical e -> - let (e, info) = CErrors.push e in + let (e, info) = Exninfo.capture e in Exninfo.iraise (e, info)) () |
