diff options
Diffstat (limited to 'doc')
67 files changed, 2545 insertions, 774 deletions
diff --git a/doc/README.md b/doc/README.md index 3db1261656..c41d269437 100644 --- a/doc/README.md +++ b/doc/README.md @@ -101,18 +101,21 @@ Alternatively, you can use some specific targets: Also note the `-with-doc yes` option of `./configure` to enable the build of the documentation as part of the default make target. -If you're editing Sphinx documentation, set SPHINXWARNERROR to 0 -to avoid treating Sphinx warnings as errors. Otherwise, Sphinx quits -upon detecting the first warning. You can set this on the Sphinx `make` -command line or as an environment variable: - -- `make refman SPHINXWARNERROR=0` - -- ~~~ - export SPHINXWARNERROR=0 - ⋮ - make refman - ~~~ +To build the Sphinx documentation without stopping at the first +warning with the legacy Makefile, set `SPHINXWARNERROR` to 0 as such: + +``` +SPHINXWARNERROR=0 make refman-html +``` + +To do the same with the Dune build system, change the value of the +`SPHINXWARNOPT` variable (default is `-W`). The following will build +the Sphinx documentation without stopping at the first warning, and +store all the warnings in the file `/tmp/warn.log`: + +``` +SPHINXWARNOPT="-w/tmp/warn.log" dune build @refman-html +``` Installation ------------ diff --git a/doc/common/macros.tex b/doc/common/macros.tex index 6a28c5b3d1..927a912fbf 100644 --- a/doc/common/macros.tex +++ b/doc/common/macros.tex @@ -242,7 +242,7 @@ \newcommand{\vref}{\nterm{ref}} \newcommand{\zarithformula}{\nterm{zarith\_formula}} \newcommand{\zarith}{\nterm{zarith}} -\newcommand{\ltac}{\mbox{${\cal L}_{tac}$}} +\newcommand{\ltac}{\mbox{${\mathcal{L}}_{tac}$}} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % \mbox{\sf } series for roman text in maths formulas % @@ -373,15 +373,15 @@ \newcommand{\sumbool}[2]{\{#1\}+\{#2\}} \newcommand{\myifthenelse}[3]{\kw{if} ~ #1 ~\kw{then} ~ #2 ~ \kw{else} ~ #3} \newcommand{\fun}[2]{\item[]{\tt {#1}}. \quad\\ #2} -\newcommand{\WF}[2]{\ensuremath{{\cal W\!F}(#1)[#2]}} -\newcommand{\WFTWOLINES}[2]{\ensuremath{{\cal W\!F}\begin{array}{l}(#1)\\\mbox{}[{#2}]\end{array}}} +\newcommand{\WF}[2]{\ensuremath{{\mathcal{W\!F}}(#1)[#2]}} +\newcommand{\WFTWOLINES}[2]{\ensuremath{{\mathcal{W\!F}}\begin{array}{l}(#1)\\\mbox{}[{#2}]\end{array}}} \newcommand{\WFE}[1]{\WF{E}{#1}} \newcommand{\WT}[4]{\ensuremath{#1[#2] \vdash #3 : #4}} \newcommand{\WTE}[3]{\WT{E}{#1}{#2}{#3}} \newcommand{\WTEG}[2]{\WTE{\Gamma}{#1}{#2}} \newcommand{\WTM}[3]{\WT{#1}{}{#2}{#3}} -\newcommand{\WFT}[2]{\ensuremath{#1[] \vdash {\cal W\!F}(#2)}} +\newcommand{\WFT}[2]{\ensuremath{#1[] \vdash {\mathcal{W\!F}}(#2)}} \newcommand{\WS}[3]{\ensuremath{#1[] \vdash #2 <: #3}} \newcommand{\WSE}[2]{\WS{E}{#1}{#2}} \newcommand{\WEV}[3]{\mbox{$#1[] \vdash #2 \lra #3$}} @@ -427,7 +427,7 @@ \newcommand{\letin}[3]{\kw{let}~#1:=#2~\kw{in}~#3} \newcommand{\subst}[3]{#1\{#2/#3\}} \newcommand{\substs}[4]{#1\{(#2/#3)_{#4}\}} -\newcommand{\Sort}{\mbox{$\cal S$}} +\newcommand{\Sort}{\mbox{$\mathcal{S}$}} \newcommand{\convert}{=_{\beta\delta\iota\zeta\eta}} \newcommand{\leconvert}{\leq_{\beta\delta\iota\zeta\eta}} \newcommand{\NN}{\mathbb{N}} @@ -10,8 +10,10 @@ ; + tools/coqdoc/coqdoc.css (package coq) (source_tree sphinx) - (source_tree tools)) - (action (run sphinx-build -j4 -b html -d sphinx_build/doctrees sphinx sphinx_build/html))) + (source_tree tools) + (env_var SPHINXWARNOPT)) + (action + (run sphinx-build -j4 %{env:SPHINXWARNOPT=-W} -b html -d sphinx_build/doctrees sphinx sphinx_build/html))) (alias (name refman-html) diff --git a/doc/plugin_tutorial/.gitignore b/doc/plugin_tutorial/.gitignore new file mode 100644 index 0000000000..3e4978fac4 --- /dev/null +++ b/doc/plugin_tutorial/.gitignore @@ -0,0 +1,13 @@ +*.ml*.d +*.cm[ixt]* +Makefile.coq* +*~ +*.[ao] +.coqdeps.d +*.vo +*.glob +*.aux +*/*/.merlin + +# by convention g_foo.ml is generated +g_*.ml diff --git a/doc/plugin_tutorial/.travis.yml b/doc/plugin_tutorial/.travis.yml new file mode 100644 index 0000000000..556e0ac45a --- /dev/null +++ b/doc/plugin_tutorial/.travis.yml @@ -0,0 +1,38 @@ +dist: trusty +sudo: required +language: generic + +services: + - docker + +env: + global: + - NJOBS="2" + - CONTRIB_NAME="plugin_tutorials" + matrix: + - COQ_IMAGE="coqorg/coq:dev" + +install: | + # Prepare the COQ container + docker run -d -i --init --name=COQ -v ${TRAVIS_BUILD_DIR}:/home/coq/$CONTRIB_NAME -w /home/coq/$CONTRIB_NAME ${COQ_IMAGE} + docker exec COQ /bin/bash --login -c " + # This bash script is double-quoted to interpolate Travis CI env vars: + echo \"Build triggered by ${TRAVIS_EVENT_TYPE}\" + export PS4='+ \e[33;1m(\$0 @ line \$LINENO) \$\e[0m ' + set -ex # -e = exit on failure; -x = trace for debug + opam list + " +script: +- echo -e "${ANSI_YELLOW}Building $CONTRIB_NAME...${ANSI_RESET}" && echo -en 'travis_fold:start:testbuild\\r' +- | + docker exec COQ /bin/bash --login -c " + export PS4='+ \e[33;1m(\$0 @ line \$LINENO) \$\e[0m ' + set -ex + sudo chown -R coq:coq /home/coq/$CONTRIB_NAME + ( cd tuto0 && make ) + ( cd tuto1 && make ) + ( cd tuto2 && make ) + ( cd tuto3 && make ) + " +- docker stop COQ # optional +- echo -en 'travis_fold:end:testbuild\\r' diff --git a/doc/plugin_tutorial/LICENSE b/doc/plugin_tutorial/LICENSE new file mode 100644 index 0000000000..cf1ab25da0 --- /dev/null +++ b/doc/plugin_tutorial/LICENSE @@ -0,0 +1,24 @@ +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a compiled +binary, for any purpose, commercial or non-commercial, and by any +means. + +In jurisdictions that recognize copyright laws, the author or authors +of this software dedicate any and all copyright interest in the +software to the public domain. We make this dedication for the benefit +of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of +relinquishment in perpetuity of all present and future rights to this +software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to <http://unlicense.org> diff --git a/doc/plugin_tutorial/Makefile b/doc/plugin_tutorial/Makefile new file mode 100644 index 0000000000..7f1833fadd --- /dev/null +++ b/doc/plugin_tutorial/Makefile @@ -0,0 +1,21 @@ + +TUTOS:= \ + tuto0 \ + tuto1 \ + tuto2 \ + tuto3 + +all: $(TUTOS) + +.PHONY: $(TUTOS) all + +$(TUTOS): %: + +$(MAKE) -C $@ + +CLEANS:=$(addsuffix -clean, $(TUTOS)) +.PHONY: clean $(CLEANS) + +clean: $(CLEANS) + +%-clean: + +$(MAKE) -C $* clean diff --git a/doc/plugin_tutorial/README.md b/doc/plugin_tutorial/README.md new file mode 100644 index 0000000000..f82edb2352 --- /dev/null +++ b/doc/plugin_tutorial/README.md @@ -0,0 +1,86 @@ +How to write plugins in Coq +=========================== + # Working environment : merlin, tuareg (open question) + + ## OCaml & related tools + + These instructions use [OPAM](http://opam.ocaml.org/doc/Install.html) + +```shell +opam init --root=$PWD/CIW2018 --compiler=4.06.0 -j2 +eval `opam config env --root=$PWD/CIW2018` +opam install camlp5 ocamlfind num # Coq's dependencies +opam install lablgtk # Coqide's dependencies (optional) +opam install merlin # prints instructions for vim and emacs +``` + + ## Coq + +```shell +git clone git@github.com:coq/coq.git +cd coq +./configure -profile devel +make -j2 +cd .. +export PATH=$PWD/coq/bin:$PATH +``` + + ## This tutorial + +```shell +git clone git@github.com:ybertot/plugin_tutorials.git +cd plugin_tutorials/tuto0 +make .merlin # run before opening .ml files in your editor +make # build +``` + + + + # tuto0 : basics of project organization + package a ml4 file in a plugin, organize a `Makefile`, `_CoqProject` + - Example of syntax to add a new toplevel command + - Example of function call to print a simple message + - Example of syntax to add a simple tactic + (that does nothing and prints a message) + - To use it: + +```bash + cd tuto0; make + coqtop -I src -R theories Tuto0 +``` + + In the Coq session type: +```coq + Require Import Tuto0.Loader. HelloWorld. +``` + + # tuto1 : Ocaml to Coq communication + Explore the memory of Coq, modify it + - Commands that take arguments: strings, symbols, expressions of the calculus of constructions + - Commands that interact with type-checking in Coq + - A command that adds a new definition or theorem + - A command that uses a name and exploits the existing definitions + or theorems + - A command that exploits an existing ongoing proof + - A command that defines a new tactic + + Compilation and loading must be performed as for `tuto0`. + + # tuto2 : Ocaml to Coq communication + A more step by step introduction to writing commands + - Explanation of the syntax of entries + - Adding a new type to and parsing to the available choices + - Handling commands that store information in user-chosen registers and tables + + Compilation and loading must be performed as for `tuto1`. + + # tuto3 : manipulating terms of the calculus of constructions + Manipulating terms, inside commands and tactics. + - Obtaining existing values from memory + - Composing values + - Verifying types + - Using these terms in commands + - Using these terms in tactics + - Automatic proofs without tactics using type classes and canonical structures + + compilation and loading must be performed as for `tuto0`. diff --git a/doc/plugin_tutorial/tuto0/Makefile b/doc/plugin_tutorial/tuto0/Makefile new file mode 100644 index 0000000000..e0e197650d --- /dev/null +++ b/doc/plugin_tutorial/tuto0/Makefile @@ -0,0 +1,14 @@ +ifeq "$(COQBIN)" "" + COQBIN=$(dir $(shell which coqtop))/ +endif + +%: Makefile.coq + +Makefile.coq: _CoqProject + $(COQBIN)coq_makefile -f _CoqProject -o Makefile.coq + +tests: all + @$(MAKE) -C tests -s clean + @$(MAKE) -C tests -s all + +-include Makefile.coq diff --git a/doc/plugin_tutorial/tuto0/_CoqProject b/doc/plugin_tutorial/tuto0/_CoqProject new file mode 100644 index 0000000000..76368e3ac7 --- /dev/null +++ b/doc/plugin_tutorial/tuto0/_CoqProject @@ -0,0 +1,10 @@ +-R theories/ Tuto0 +-I src + +theories/Loader.v +theories/Demo.v + +src/tuto0_main.ml +src/tuto0_main.mli +src/g_tuto0.mlg +src/tuto0_plugin.mlpack diff --git a/doc/plugin_tutorial/tuto0/src/dune b/doc/plugin_tutorial/tuto0/src/dune new file mode 100644 index 0000000000..79d561061d --- /dev/null +++ b/doc/plugin_tutorial/tuto0/src/dune @@ -0,0 +1,9 @@ +(library + (name tuto0_plugin) + (public_name coq.plugins.tutorial.p0) + (libraries coq.plugins.ltac)) + +(rule + (targets g_tuto0.ml) + (deps (:pp-file g_tuto0.mlg) ) + (action (run coqpp %{pp-file}))) diff --git a/doc/plugin_tutorial/tuto0/src/g_tuto0.mlg b/doc/plugin_tutorial/tuto0/src/g_tuto0.mlg new file mode 100644 index 0000000000..5c633fe862 --- /dev/null +++ b/doc/plugin_tutorial/tuto0/src/g_tuto0.mlg @@ -0,0 +1,18 @@ +DECLARE PLUGIN "tuto0_plugin" + +{ + +open Pp +open Ltac_plugin + +} + +VERNAC COMMAND EXTEND HelloWorld CLASSIFIED AS QUERY +| [ "HelloWorld" ] -> { Feedback.msg_notice (strbrk Tuto0_main.message) } +END + +TACTIC EXTEND hello_world_tactic +| [ "hello_world" ] -> + { let _ = Feedback.msg_notice (str Tuto0_main.message) in + Tacticals.New.tclIDTAC } +END diff --git a/doc/plugin_tutorial/tuto0/src/tuto0_main.ml b/doc/plugin_tutorial/tuto0/src/tuto0_main.ml new file mode 100644 index 0000000000..93a807a800 --- /dev/null +++ b/doc/plugin_tutorial/tuto0/src/tuto0_main.ml @@ -0,0 +1 @@ +let message = "Hello world!" diff --git a/doc/plugin_tutorial/tuto0/src/tuto0_main.mli b/doc/plugin_tutorial/tuto0/src/tuto0_main.mli new file mode 100644 index 0000000000..846af3ed8c --- /dev/null +++ b/doc/plugin_tutorial/tuto0/src/tuto0_main.mli @@ -0,0 +1 @@ +val message : string diff --git a/doc/plugin_tutorial/tuto0/src/tuto0_plugin.mlpack b/doc/plugin_tutorial/tuto0/src/tuto0_plugin.mlpack new file mode 100644 index 0000000000..73be1bb561 --- /dev/null +++ b/doc/plugin_tutorial/tuto0/src/tuto0_plugin.mlpack @@ -0,0 +1,2 @@ +Tuto0_main +G_tuto0 diff --git a/doc/plugin_tutorial/tuto0/theories/Demo.v b/doc/plugin_tutorial/tuto0/theories/Demo.v new file mode 100644 index 0000000000..bdc61986af --- /dev/null +++ b/doc/plugin_tutorial/tuto0/theories/Demo.v @@ -0,0 +1,8 @@ +From Tuto0 Require Import Loader. + +HelloWorld. + +Lemma test : True. +Proof. +hello_world. +Abort. diff --git a/doc/plugin_tutorial/tuto0/theories/Loader.v b/doc/plugin_tutorial/tuto0/theories/Loader.v new file mode 100644 index 0000000000..7bce38382b --- /dev/null +++ b/doc/plugin_tutorial/tuto0/theories/Loader.v @@ -0,0 +1 @@ +Declare ML Module "tuto0_plugin". diff --git a/doc/plugin_tutorial/tuto1/Makefile b/doc/plugin_tutorial/tuto1/Makefile new file mode 100644 index 0000000000..e0e197650d --- /dev/null +++ b/doc/plugin_tutorial/tuto1/Makefile @@ -0,0 +1,14 @@ +ifeq "$(COQBIN)" "" + COQBIN=$(dir $(shell which coqtop))/ +endif + +%: Makefile.coq + +Makefile.coq: _CoqProject + $(COQBIN)coq_makefile -f _CoqProject -o Makefile.coq + +tests: all + @$(MAKE) -C tests -s clean + @$(MAKE) -C tests -s all + +-include Makefile.coq diff --git a/doc/plugin_tutorial/tuto1/_CoqProject b/doc/plugin_tutorial/tuto1/_CoqProject new file mode 100644 index 0000000000..585d1360be --- /dev/null +++ b/doc/plugin_tutorial/tuto1/_CoqProject @@ -0,0 +1,13 @@ +-R theories Tuto1 +-I src + +theories/Loader.v + +src/simple_check.mli +src/simple_check.ml +src/simple_declare.mli +src/simple_declare.ml +src/simple_print.ml +src/simple_print.mli +src/g_tuto1.mlg +src/tuto1_plugin.mlpack diff --git a/doc/plugin_tutorial/tuto1/src/dune b/doc/plugin_tutorial/tuto1/src/dune new file mode 100644 index 0000000000..cf9c674b14 --- /dev/null +++ b/doc/plugin_tutorial/tuto1/src/dune @@ -0,0 +1,9 @@ +(library + (name tuto1_plugin) + (public_name coq.plugins.tutorial.p1) + (libraries coq.plugins.ltac)) + +(rule + (targets g_tuto1.ml) + (deps (:pp-file g_tuto1.mlg) ) + (action (run coqpp %{pp-file}))) diff --git a/doc/plugin_tutorial/tuto1/src/g_tuto1.mlg b/doc/plugin_tutorial/tuto1/src/g_tuto1.mlg new file mode 100644 index 0000000000..4df284d2d9 --- /dev/null +++ b/doc/plugin_tutorial/tuto1/src/g_tuto1.mlg @@ -0,0 +1,154 @@ +DECLARE PLUGIN "tuto1_plugin" + +{ + +(* If we forget this line and include our own tactic definition using + TACTIC EXTEND, as below, then we get the strange error message + no implementation available for Tacentries, only when compiling + theories/Loader.v +*) +open Ltac_plugin +open Attributes +open Pp +(* This module defines the types of arguments to be used in the + EXTEND directives below, for example the string one. *) +open Stdarg + +} + +VERNAC COMMAND EXTEND HelloWorld CLASSIFIED AS QUERY +| [ "Hello" string(s) ] -> + { Feedback.msg_notice (strbrk "Hello " ++ str s) } +END + +(* reference is allowed as a syntactic entry, but so are all the entries + found the signature of module Prim in file coq/parsing/pcoq.mli *) + +VERNAC COMMAND EXTEND HelloAgain CLASSIFIED AS QUERY +| [ "HelloAgain" reference(r)] -> +(* The function Ppconstr.pr_qualid was found by searching all mli files + for a function of type qualid -> Pp.t *) + { Feedback.msg_notice + (strbrk "Hello again " ++ Ppconstr.pr_qualid r)} +END + +(* According to parsing/pcoq.mli, e has type constr_expr *) +(* this type is defined in pretyping/constrexpr.ml *) +(* Question for the developers: why is the file constrexpr.ml and not + constrexpr.mli --> Easier for packing the software in components. *) +VERNAC COMMAND EXTEND TakingConstr CLASSIFIED AS QUERY +| [ "Cmd1" constr(e) ] -> + { let _ = e in Feedback.msg_notice (strbrk "Cmd1 parsed something") } +END + +(* The next step is to make something of parsed expression. + Interesting information in interp/constrintern.mli *) + +(* There are several phases of transforming a parsed expression into + the final internal data-type (constr). There exists a collection of + functions that combine all the phases *) + +VERNAC COMMAND EXTEND TakingConstr2 CLASSIFIED AS QUERY +| [ "Cmd2" constr(e) ] -> + { let _ = Constrintern.interp_constr + (Global.env()) + (* Make sure you don't use Evd.empty here, as this does not + check consistency with existing universe constraints. *) + (Evd.from_env (Global.env())) e in + Feedback.msg_notice (strbrk "Cmd2 parsed something legitimate") } +END + +(* This is to show what happens when typing in an empty environment + with an empty evd. + Question for the developers: why does "Cmd3 (fun x : nat => x)." + raise an anomaly, not the same error as "Cmd3 (fun x : a => x)." *) + +VERNAC COMMAND EXTEND TakingConstr3 CLASSIFIED AS QUERY +| [ "Cmd3" constr(e) ] -> + { let _ = Constrintern.interp_constr Environ.empty_env + Evd.empty e in + Feedback.msg_notice + (strbrk "Cmd3 accepted something in the empty context")} +END + +(* When adding a definition, we have to be careful that just + the operation of constructing a well-typed term may already change + the environment, at the level of universe constraints (which + are recorded in the evd component). The function + Constrintern.interp_constr ignores this side-effect, so it should + not be used here. *) + +(* Looking at the interface file interp/constrintern.ml4, I lost + some time because I did not see that the "constr" type appearing + there was "EConstr.constr" and not "Constr.constr". *) + +VERNAC COMMAND EXTEND Define1 CLASSIFIED AS SIDEFF +| #[ poly = polymorphic ] [ "Cmd4" ident(i) constr(e) ] -> + { let v = Constrintern.interp_constr (Global.env()) + (Evd.from_env (Global.env())) e in + Simple_declare.packed_declare_definition ~poly i v } +END + +VERNAC COMMAND EXTEND Check1 CLASSIFIED AS QUERY +| [ "Cmd5" constr(e) ] -> + { let v = Constrintern.interp_constr (Global.env()) + (Evd.from_env (Global.env())) e in + let (_, ctx) = v in + let evd = Evd.from_ctx ctx in + Feedback.msg_notice + (Printer.pr_econstr_env (Global.env()) evd + (Simple_check.simple_check1 v)) } +END + +VERNAC COMMAND EXTEND Check2 CLASSIFIED AS QUERY +| [ "Cmd6" constr(e) ] -> + { let v = Constrintern.interp_constr (Global.env()) + (Evd.from_env (Global.env())) e in + let evd, ty = Simple_check.simple_check2 v in + Feedback.msg_notice + (Printer.pr_econstr_env (Global.env()) evd ty) } +END + +VERNAC COMMAND EXTEND Check1 CLASSIFIED AS QUERY +| [ "Cmd7" constr(e) ] -> + { let v = Constrintern.interp_constr (Global.env()) + (Evd.from_env (Global.env())) e in + let (a, ctx) = v in + let evd = Evd.from_ctx ctx in + Feedback.msg_notice + (Printer.pr_econstr_env (Global.env()) evd + (Simple_check.simple_check3 v)) } +END + +(* This command takes a name and return its value. It does less + than Print, because it fails on constructors, axioms, and inductive types. + This should be improved, because the error message is an anomaly. + Anomalies should never appear even when using a command outside of its + intended use. *) +VERNAC COMMAND EXTEND ExamplePrint CLASSIFIED AS QUERY +| [ "Cmd8" reference(r) ] -> + { let env = Global.env() in + let evd = Evd.from_env env in + Feedback.msg_notice + (Printer.pr_econstr_env env evd + (EConstr.of_constr + (Simple_print.simple_body_access (Nametab.global r)))) } +END + +TACTIC EXTEND my_intro +| [ "my_intro" ident(i) ] -> + { Tactics.introduction i } +END + +(* if one write this: + VERNAC COMMAND EXTEND exploreproof CLASSIFIED AS QUERY + it gives an error message that is basically impossible to understand. *) + +VERNAC COMMAND EXTEND ExploreProof CLASSIFIED AS QUERY +| [ "Cmd9" ] -> + { let p = Proof_global.give_me_the_proof () in + let sigma, env = Pfedit.get_current_context () in + let pprf = Proof.partial_proof p in + Feedback.msg_notice + (Pp.prlist_with_sep Pp.fnl (Printer.pr_econstr_env env sigma) pprf) } +END diff --git a/doc/plugin_tutorial/tuto1/src/simple_check.ml b/doc/plugin_tutorial/tuto1/src/simple_check.ml new file mode 100644 index 0000000000..1f636c531a --- /dev/null +++ b/doc/plugin_tutorial/tuto1/src/simple_check.ml @@ -0,0 +1,32 @@ +let simple_check1 value_with_constraints = + begin + let evalue, st = value_with_constraints in + let evd = Evd.from_ctx st in +(* This is reverse engineered from vernacentries.ml *) +(* The point of renaming is to make sure the bound names printed by Check + can be re-used in `apply with` tactics that use bound names to + refer to arguments. *) + let j = Termops.on_judgment EConstr.of_constr + (Arguments_renaming.rename_typing (Global.env()) + (EConstr.to_constr evd evalue)) in + let {Environ.uj_type=x}=j in x + end + +let simple_check2 value_with_constraints = + let evalue, st = value_with_constraints in + let evd = Evd.from_ctx st in +(* This version should be preferred if bound variable names are not so + important, you want to really verify that the input is well-typed, + and if you want to obtain the type. *) +(* Note that the output value is a pair containing a new evar_map: + typing will fill out blanks in the term by add evar bindings. *) + Typing.type_of (Global.env()) evd evalue + +let simple_check3 value_with_constraints = + let evalue, st = value_with_constraints in + let evd = Evd.from_ctx st in +(* This version should be preferred if bound variable names are not so + important and you already expect the input to have been type-checked + before. Set ~lax to false if you want an anomaly to be raised in + case of a type error. Otherwise a ReTypeError exception is raised. *) + Retyping.get_type_of ~lax:true (Global.env()) evd evalue diff --git a/doc/plugin_tutorial/tuto1/src/simple_check.mli b/doc/plugin_tutorial/tuto1/src/simple_check.mli new file mode 100644 index 0000000000..bcf1bf56cf --- /dev/null +++ b/doc/plugin_tutorial/tuto1/src/simple_check.mli @@ -0,0 +1,8 @@ +val simple_check1 : + EConstr.constr Evd.in_evar_universe_context -> EConstr.constr + +val simple_check2 : + EConstr.constr Evd.in_evar_universe_context -> Evd.evar_map * EConstr.constr + +val simple_check3 : + EConstr.constr Evd.in_evar_universe_context -> EConstr.constr diff --git a/doc/plugin_tutorial/tuto1/src/simple_declare.ml b/doc/plugin_tutorial/tuto1/src/simple_declare.ml new file mode 100644 index 0000000000..9d10a8ba72 --- /dev/null +++ b/doc/plugin_tutorial/tuto1/src/simple_declare.ml @@ -0,0 +1,24 @@ +(* Ideally coq/coq#8811 would get merged and then this function could be much simpler. *) +let edeclare ident (_, poly, _ as k) ~opaque sigma udecl body tyopt imps hook = + let sigma = Evd.minimize_universes sigma in + let body = EConstr.to_constr sigma body in + let tyopt = Option.map (EConstr.to_constr sigma) tyopt in + let uvars_fold uvars c = + Univ.LSet.union uvars (Vars.universes_of_constr c) in + let uvars = List.fold_left uvars_fold Univ.LSet.empty + (Option.List.cons tyopt [body]) in + let sigma = Evd.restrict_universe_context sigma uvars in + let univs = Evd.check_univ_decl ~poly sigma udecl in + let ubinders = Evd.universe_binders sigma in + let ce = Declare.definition_entry ?types:tyopt ~univs body in + DeclareDef.declare_definition ident k ce ubinders imps ~hook + +let packed_declare_definition ~poly ident value_with_constraints = + let body, ctx = value_with_constraints in + let sigma = Evd.from_ctx ctx in + let k = (Decl_kinds.Global, poly, Decl_kinds.Definition) in + let udecl = UState.default_univ_decl in + let nohook = Lemmas.mk_hook (fun _ x -> ()) in + ignore (edeclare ident k ~opaque:false sigma udecl body None [] nohook) + +(* But this definition cannot be undone by Reset ident *) diff --git a/doc/plugin_tutorial/tuto1/src/simple_declare.mli b/doc/plugin_tutorial/tuto1/src/simple_declare.mli new file mode 100644 index 0000000000..fd74e81526 --- /dev/null +++ b/doc/plugin_tutorial/tuto1/src/simple_declare.mli @@ -0,0 +1,5 @@ +open Names +open EConstr + +val packed_declare_definition : + poly:bool -> Id.t -> constr Evd.in_evar_universe_context -> unit diff --git a/doc/plugin_tutorial/tuto1/src/simple_print.ml b/doc/plugin_tutorial/tuto1/src/simple_print.ml new file mode 100644 index 0000000000..cfc38ff9c9 --- /dev/null +++ b/doc/plugin_tutorial/tuto1/src/simple_print.ml @@ -0,0 +1,17 @@ +(* A more advanced example of how to explore the structure of terms of + type constr is given in the coq-dpdgraph plugin. *) + +let simple_body_access gref = + match gref with + | Globnames.VarRef _ -> + failwith "variables are not covered in this example" + | Globnames.IndRef _ -> + failwith "inductive types are not covered in this example" + | Globnames.ConstructRef _ -> + failwith "constructors are not covered in this example" + | Globnames.ConstRef cst -> + let cb = Environ.lookup_constant cst (Global.env()) in + match Global.body_of_constant_body cb with + | Some(e, _) -> e + | None -> failwith "This term has no value" + diff --git a/doc/plugin_tutorial/tuto1/src/simple_print.mli b/doc/plugin_tutorial/tuto1/src/simple_print.mli new file mode 100644 index 0000000000..254b56ff79 --- /dev/null +++ b/doc/plugin_tutorial/tuto1/src/simple_print.mli @@ -0,0 +1 @@ +val simple_body_access : Names.GlobRef.t -> Constr.constr diff --git a/doc/plugin_tutorial/tuto1/src/tuto1_plugin.mlpack b/doc/plugin_tutorial/tuto1/src/tuto1_plugin.mlpack new file mode 100644 index 0000000000..a797a509e0 --- /dev/null +++ b/doc/plugin_tutorial/tuto1/src/tuto1_plugin.mlpack @@ -0,0 +1,4 @@ +Simple_check +Simple_declare +Simple_print +G_tuto1 diff --git a/doc/plugin_tutorial/tuto1/theories/Loader.v b/doc/plugin_tutorial/tuto1/theories/Loader.v new file mode 100644 index 0000000000..6e8e308b3f --- /dev/null +++ b/doc/plugin_tutorial/tuto1/theories/Loader.v @@ -0,0 +1 @@ +Declare ML Module "tuto1_plugin". diff --git a/doc/plugin_tutorial/tuto2/Makefile b/doc/plugin_tutorial/tuto2/Makefile new file mode 100644 index 0000000000..e0e197650d --- /dev/null +++ b/doc/plugin_tutorial/tuto2/Makefile @@ -0,0 +1,14 @@ +ifeq "$(COQBIN)" "" + COQBIN=$(dir $(shell which coqtop))/ +endif + +%: Makefile.coq + +Makefile.coq: _CoqProject + $(COQBIN)coq_makefile -f _CoqProject -o Makefile.coq + +tests: all + @$(MAKE) -C tests -s clean + @$(MAKE) -C tests -s all + +-include Makefile.coq diff --git a/doc/plugin_tutorial/tuto2/_CoqProject b/doc/plugin_tutorial/tuto2/_CoqProject new file mode 100644 index 0000000000..cf9cb5cc26 --- /dev/null +++ b/doc/plugin_tutorial/tuto2/_CoqProject @@ -0,0 +1,6 @@ +-R theories/ Tuto +-I src + +theories/Test.v +src/demo.mlg +src/demo_plugin.mlpack diff --git a/doc/plugin_tutorial/tuto2/src/.gitignore b/doc/plugin_tutorial/tuto2/src/.gitignore new file mode 100644 index 0000000000..5b1b6a902e --- /dev/null +++ b/doc/plugin_tutorial/tuto2/src/.gitignore @@ -0,0 +1 @@ +/demo.ml diff --git a/doc/plugin_tutorial/tuto2/src/demo.mlg b/doc/plugin_tutorial/tuto2/src/demo.mlg new file mode 100644 index 0000000000..966c05acdc --- /dev/null +++ b/doc/plugin_tutorial/tuto2/src/demo.mlg @@ -0,0 +1,375 @@ +(* -------------------------------------------------------------------------- *) +(* *) +(* Initial ritual dance *) +(* *) +(* -------------------------------------------------------------------------- *) + +DECLARE PLUGIN "demo_plugin" + +(* + Use this macro before any of the other OCaml macros. + + Each plugin has a unique name. + We have decided to name this plugin as "demo_plugin". + That means that: + + (1) If we want to load this particular plugin to Coq toplevel, + we must use the following command. + + Declare ML Module "demo_plugin". + + (2) The above command will succeed only if there is "demo_plugin.cmxs" + in some of the directories that Coq is supposed to look + (i.e. the ones we specified via "-I ..." command line options). + + (3) The file "demo_plugin.mlpack" lists the OCaml modules to be linked in + "demo_plugin.cmxs". + + (4) The file "demo_plugin.mlpack" as well as all .ml, .mli and .mlg files + are listed in the "_CoqProject" file. +*) + +(* -------------------------------------------------------------------------- *) +(* *) +(* How to define a new Vernacular command? *) +(* *) +(* -------------------------------------------------------------------------- *) + +VERNAC COMMAND EXTEND Cmd1 CLASSIFIED AS QUERY +| [ "Cmd1" ] -> { () } +END + +(* + These: + + VERNAC COMMAND EXTEND + + and + + END + + mark the beginning and the end of the definition of a new Vernacular command. + + Cmd1 is a unique identifier (which must start with an upper-case letter) + associated with the new Vernacular command we are defining. + + CLASSIFIED AS QUERY tells Coq that the new Vernacular command: + - changes neither the global environment + - nor does it modify the plugin's state. + + If the new command could: + - change the global environment + - or modify a plugin's state + then one would have to use CLASSIFIED AS SIDEFF instead. + + This: + + [ "Cmd1" ] -> { () } + + defines: + - the parsing rule + - the interpretation rule + + The parsing rule and the interpretation rule are separated by -> token. + + The parsing rule, in this case, is: + + [ "Cmd1" ] + + By convention, all vernacular command start with an upper-case letter. + + The [ and ] characters mark the beginning and the end of the parsing rule. + The parsing rule itself says that the syntax of the newly defined command + is composed from a single terminal Cmd1. + + The interpretation rule, in this case, is: + + { () } + + Similarly to the case of the parsing rule, + { and } characters mark the beginning and the end of the interpretation rule. + In this case, the following Ocaml expression: + + () + + defines the effect of the Vernacular command we have just defined. + That is, it behaves is no-op. +*) + +(* -------------------------------------------------------------------------- *) +(* *) +(* How to define a new Vernacular command with some terminal parameters? *) +(* *) +(* -------------------------------------------------------------------------- *) + +VERNAC COMMAND EXTEND Cmd2 CLASSIFIED AS QUERY +| [ "Cmd2" "With" "Some" "Terminal" "Parameters" ] -> { () } +END + +(* + As shown above, the Vernacular command can be composed from + any number of terminals. + + By convention, each of these terminals starts with an upper-case letter. +*) + +(* -------------------------------------------------------------------------- *) +(* *) +(* How to define a new Vernacular command with some non-terminal parameter? *) +(* *) +(* -------------------------------------------------------------------------- *) + +{ + +open Stdarg + +} + +VERNAC COMMAND EXTEND Cmd3 CLASSIFIED AS QUERY +| [ "Cmd3" int(i) ] -> { () } +END + +(* + This: + + open Stdarg + + is needed as some identifiers in the Ocaml code generated by the + + VERNAC COMMAND EXTEND ... END + + macros are not fully qualified. + + This: + + int(i) + + means that the new command is expected to be followed by an integer. + The integer is bound in the parsing rule to variable i. + This variable i then can be used in the interpretation rule. + + To see value of which Ocaml types can be bound this way, + look at the wit_* function declared in interp/stdarg.mli + (in the Coq's codebase). + + If we drop the wit_ prefix, we will get the token + that we can use in the parsing rule. + That is, since there exists wit_int, we know that + we can write: + + int(i) + + By looking at the signature of the wit_int function: + + val wit_int : int uniform_genarg_type + + we also know that variable i will have the type int. + + The types of wit_* functions are either: + + 'c uniform_genarg_type + + or + + ('a,'b,'c) genarg_type + + In both cases, the bound variable will have type 'c. +*) + +(* -------------------------------------------------------------------------- *) +(* *) +(* How to define a new Vernacular command with variable number of arguments? *) +(* *) +(* -------------------------------------------------------------------------- *) + +VERNAC COMMAND EXTEND Cmd4 CLASSIFIED AS QUERY +| [ "Cmd4" int_list(l) ] -> { () } +END + +(* + This: + + int_list(l) + + means that the new Vernacular command is expected to be followed + by a (whitespace separated) list of integers. + This list of integers is bound to the indicated l. + + In this case, as well as in the cases we point out below, instead of int + in int_list we could use any other supported type, e.g. ident, bool, ... + + To see which other Ocaml type constructors (in addition to list) + are supported, have a look at the parse_user_entry function defined + in grammar/q_util.mlp file. + + E.g.: + - ne_int_list(x) would represent a non-empty list of integers, + - int_list(x) would represent a list of integers, + - int_opt(x) would represent a value of type int option, + - ··· +*) + +(* -------------------------------------------------------------------------- *) +(* *) +(* How to define a new Vernacular command that takes values of a custom type? *) +(* *) +(* -------------------------------------------------------------------------- *) + +{ + +open Ltac_plugin + +} + +(* + If we want to avoid a compilation failure + + "no implementation available for Tacenv" + + then we have to open the Ltac_plugin module. +*) + +(* + Pp module must be opened because some of the macros that are part of the API + do not expand to fully qualified names. +*) + +{ + +type type_5 = Foo_5 | Bar_5 + +} + +(* + We define a type of values that we want to pass to our Vernacular command. +*) + +(* + By default, we are able to define new Vernacular commands that can take + parameters of some of the supported types. Which types are supported, + that was discussed earlier. + + If we want to be able to define Vernacular command that takes parameters + of a type that is not supported by default, we must use the following macro: +*) + +{ + +open Pp + +} + +VERNAC ARGUMENT EXTEND custom5 +| [ "Foo_5" ] -> { Foo_5 } +| [ "Bar_5" ] -> { Bar_5 } +END + +(* + where: + + custom5 + + indicates that, from now on, in our parsing rules we can write: + + custom5(some_variable) + + in those places where we expect user to provide an input + that can be parsed by the parsing rules above + (and interpreted by the interpretations rules above). +*) + +(* Here: *) + +VERNAC COMMAND EXTEND Cmd5 CLASSIFIED AS QUERY +| [ "Cmd5" custom5(x) ] -> { () } +END + +(* + we define a new Vernacular command whose parameters, provided by the user, + can be mapped to values of type_5. +*) + +(* -------------------------------------------------------------------------- *) +(* *) +(* How to give a feedback to the user? *) +(* *) +(* -------------------------------------------------------------------------- *) + +VERNAC COMMAND EXTEND Cmd6 CLASSIFIED AS QUERY +| [ "Cmd6" ] -> { Feedback.msg_notice (Pp.str "Everything is awesome!") } +END + +(* + The following functions: + + - Feedback.msg_info : Pp.t -> unit + - Feedback.msg_notice : Pp.t -> unit + - Feedback.msg_warning : Pp.t -> unit + - Feedback.msg_error : Pp.t -> unit + - Feedback.msg_debug : Pp.t -> unit + + enable us to give user a textual feedback. + + Pp module enable us to represent and construct pretty-printing instructions. + The concepts defined and the services provided by the Pp module are in + various respects related to the concepts and services provided + by the Format module that is part of the Ocaml standard library. +*) + +(* -------------------------------------------------------------------------- *) +(* *) +(* How to implement a Vernacular command with (undoable) side-effects? *) +(* *) +(* -------------------------------------------------------------------------- *) + +{ + +open Summary.Local + +} + +(* + By opening Summary.Local module we shadow the original functions + that we traditionally use for implementing stateful behavior. + + ref + ! + := + + are now shadowed by their counterparts in Summary.Local. *) + +{ + +let counter = ref ~name:"counter" 0 + +} + +VERNAC COMMAND EXTEND Cmd7 CLASSIFIED AS SIDEFF +| [ "Cmd7" ] -> { counter := succ !counter; + Feedback.msg_notice (Pp.str "counter = " ++ Pp.str (string_of_int (!counter))) } +END + +TACTIC EXTEND tactic1 +| [ "tactic1" ] -> { Proofview.tclUNIT () } +END + +(* ---- *) + +{ + +type custom = Foo_2 | Bar_2 + +let pr_custom _ _ _ = function + | Foo_2 -> Pp.str "Foo_2" + | Bar_2 -> Pp.str "Bar_2" + +} + +ARGUMENT EXTEND custom2 PRINTED BY { pr_custom } +| [ "Foo_2" ] -> { Foo_2 } +| [ "Bar_2" ] -> { Bar_2 } +END + +TACTIC EXTEND tactic2 +| [ "tactic2" custom2(x) ] -> { Proofview.tclUNIT () } +END diff --git a/doc/plugin_tutorial/tuto2/src/demo_plugin.mlpack b/doc/plugin_tutorial/tuto2/src/demo_plugin.mlpack new file mode 100644 index 0000000000..4f0b8480b5 --- /dev/null +++ b/doc/plugin_tutorial/tuto2/src/demo_plugin.mlpack @@ -0,0 +1 @@ +Demo diff --git a/doc/plugin_tutorial/tuto2/src/dune b/doc/plugin_tutorial/tuto2/src/dune new file mode 100644 index 0000000000..f2bc405455 --- /dev/null +++ b/doc/plugin_tutorial/tuto2/src/dune @@ -0,0 +1,9 @@ +(library + (name tuto2_plugin) + (public_name coq.plugins.tutorial.p2) + (libraries coq.plugins.ltac)) + +(rule + (targets demo.ml) + (deps (:pp-file demo.mlg) ) + (action (run coqpp %{pp-file}))) diff --git a/doc/plugin_tutorial/tuto2/theories/Test.v b/doc/plugin_tutorial/tuto2/theories/Test.v new file mode 100644 index 0000000000..38e83bfff1 --- /dev/null +++ b/doc/plugin_tutorial/tuto2/theories/Test.v @@ -0,0 +1,19 @@ +Declare ML Module "demo_plugin". + +Cmd1. +Cmd2 With Some Terminal Parameters. +Cmd3 42. +Cmd4 100 200 300 400. +Cmd5 Foo_5. +Cmd5 Bar_5. +Cmd6. +Cmd7. +Cmd7. +Cmd7. + +Goal True. +Proof. + tactic1. + tactic2 Foo_2. + tactic2 Bar_2. +Abort. diff --git a/doc/plugin_tutorial/tuto3/Makefile b/doc/plugin_tutorial/tuto3/Makefile new file mode 100644 index 0000000000..e0e197650d --- /dev/null +++ b/doc/plugin_tutorial/tuto3/Makefile @@ -0,0 +1,14 @@ +ifeq "$(COQBIN)" "" + COQBIN=$(dir $(shell which coqtop))/ +endif + +%: Makefile.coq + +Makefile.coq: _CoqProject + $(COQBIN)coq_makefile -f _CoqProject -o Makefile.coq + +tests: all + @$(MAKE) -C tests -s clean + @$(MAKE) -C tests -s all + +-include Makefile.coq diff --git a/doc/plugin_tutorial/tuto3/_CoqProject b/doc/plugin_tutorial/tuto3/_CoqProject new file mode 100644 index 0000000000..e2a60a430f --- /dev/null +++ b/doc/plugin_tutorial/tuto3/_CoqProject @@ -0,0 +1,12 @@ +-R theories Tuto3 +-I src + +theories/Data.v +theories/Loader.v + +src/tuto_tactic.ml +src/tuto_tactic.mli +src/construction_game.ml +src/construction_game.mli +src/g_tuto3.mlg +src/tuto3_plugin.mlpack diff --git a/doc/plugin_tutorial/tuto3/src/construction_game.ml b/doc/plugin_tutorial/tuto3/src/construction_game.ml new file mode 100644 index 0000000000..9d9f894e18 --- /dev/null +++ b/doc/plugin_tutorial/tuto3/src/construction_game.ml @@ -0,0 +1,186 @@ +open Pp + +let find_reference = Coqlib.find_reference [@ocaml.warning "-3"] + +let example_sort evd = +(* creating a new sort requires that universes should be recorded + in the evd datastructure, so this datastructure also needs to be + passed around. *) + let evd, s = Evd.new_sort_variable Evd.univ_rigid evd in + let new_type = EConstr.mkSort s in + evd, new_type + +let c_one evd = +(* In the general case, global references may refer to universe polymorphic + objects, and their universe has to be made afresh when creating an instance. *) + let gr_S = + find_reference "Tuto3" ["Coq"; "Init"; "Datatypes"] "S" in +(* the long name of "S" was found with the command "About S." *) + let gr_O = + find_reference "Tuto3" ["Coq"; "Init"; "Datatypes"] "O" in + let evd, c_O = Evarutil.new_global evd gr_O in + let evd, c_S = Evarutil.new_global evd gr_S in +(* Here is the construction of a new term by applying functions to argument. *) + evd, EConstr.mkApp (c_S, [| c_O |]) + +let dangling_identity env evd = +(* I call this a dangling identity, because it is not polymorph, but + the type on which it applies is left unspecified, as it is + represented by an existential variable. The declaration for this + existential variable needs to be added in the evd datastructure. *) + let evd, type_type = example_sort evd in + let evd, arg_type = Evarutil.new_evar env evd type_type in +(* Notice the use of a De Bruijn index for the inner occurrence of the + bound variable. *) + evd, EConstr.mkLambda(Names.Name (Names.Id.of_string "x"), arg_type, + EConstr.mkRel 1) + +let dangling_identity2 env evd = +(* This example uses directly a function that produces an evar that + is meant to be a type. *) + let evd, (arg_type, type_type) = + Evarutil.new_type_evar env evd Evd.univ_rigid in + evd, EConstr.mkLambda(Names.Name (Names.Id.of_string "x"), arg_type, + EConstr.mkRel 1) + +let example_sort_app_lambda () = + let env = Global.env () in + let evd = Evd.from_env env in + let evd, c_v = c_one evd in +(* dangling_identity and dangling_identity2 can be used interchangeably here *) + let evd, c_f = dangling_identity2 env evd in + let c_1 = EConstr.mkApp (c_f, [| c_v |]) in + let _ = Feedback.msg_notice + (Printer.pr_econstr_env env evd c_1) in + (* type verification happens here. Type verification will update + existential variable information in the evd part. *) + let evd, the_type = Typing.type_of env evd c_1 in +(* At display time, you will notice that the system knows about the + existential variable being instantiated to the "nat" type, even + though c_1 still contains the meta-variable. *) + Feedback.msg_notice + ((Printer.pr_econstr_env env evd c_1) ++ + str " has type " ++ + (Printer.pr_econstr_env env evd the_type)) + + +let c_S evd = + let gr = find_reference "Tuto3" ["Coq"; "Init"; "Datatypes"] "S" in + Evarutil.new_global evd gr + +let c_O evd = + let gr = find_reference "Tuto3" ["Coq"; "Init"; "Datatypes"] "O" in + Evarutil.new_global evd gr + +let c_E evd = + let gr = find_reference "Tuto3" ["Tuto3"; "Data"] "EvenNat" in + Evarutil.new_global evd gr + +let c_D evd = + let gr = find_reference "Tuto3" ["Tuto3"; "Data"] "tuto_div2" in + Evarutil.new_global evd gr + +let c_Q evd = + let gr = find_reference "Tuto3" ["Coq"; "Init"; "Logic"] "eq" in + Evarutil.new_global evd gr + +let c_R evd = + let gr = find_reference "Tuto3" ["Coq"; "Init"; "Logic"] "eq_refl" in + Evarutil.new_global evd gr + +let c_N evd = + let gr = find_reference "Tuto3" ["Coq"; "Init"; "Datatypes"] "nat" in + Evarutil.new_global evd gr + +let c_C evd = + let gr = find_reference "Tuto3" ["Tuto3"; "Data"] "C" in + Evarutil.new_global evd gr + +let c_F evd = + let gr = find_reference "Tuto3" ["Tuto3"; "Data"] "S_ev" in + Evarutil.new_global evd gr + +let c_P evd = + let gr = find_reference "Tuto3" ["Tuto3"; "Data"] "s_half_proof" in + Evarutil.new_global evd gr + +(* If c_S was universe polymorphic, we should have created a new constant + at each iteration of buildup. *) +let mk_nat evd n = + let evd, c_S = c_S evd in + let evd, c_O = c_O evd in + let rec buildup = function + | 0 -> c_O + | n -> EConstr.mkApp (c_S, [| buildup (n - 1) |]) in + if n <= 0 then evd, c_O else evd, buildup n + +let example_classes n = + let env = Global.env () in + let evd = Evd.from_env env in + let evd, c_n = mk_nat evd n in + let evd, n_half = mk_nat evd (n / 2) in + let evd, c_N = c_N evd in + let evd, c_div = c_D evd in + let evd, c_even = c_E evd in + let evd, c_Q = c_Q evd in + let evd, c_R = c_R evd in + let arg_type = EConstr.mkApp (c_even, [| c_n |]) in + let evd0 = evd in + let evd, instance = Evarutil.new_evar env evd arg_type in + let c_half = EConstr.mkApp (c_div, [|c_n; instance|]) in + let _ = Feedback.msg_notice (Printer.pr_econstr_env env evd c_half) in + let evd, the_type = Typing.type_of env evd c_half in + let _ = Feedback.msg_notice (Printer.pr_econstr_env env evd c_half) in + let proved_equality = + EConstr.mkCast(EConstr.mkApp (c_R, [| c_N; c_half |]), Constr.DEFAULTcast, + EConstr.mkApp (c_Q, [| c_N; c_half; n_half|])) in +(* This is where we force the system to compute with type classes. *) +(* Question to coq developers: why do we pass two evd arguments to + solve_remaining_evars? Is the choice of evd0 relevant here? *) + let evd = Pretyping.solve_remaining_evars + (Pretyping.default_inference_flags true) env evd ~initial:evd0 in + let evd, final_type = Typing.type_of env evd proved_equality in + Feedback.msg_notice (Printer.pr_econstr_env env evd proved_equality) + +(* This function, together with definitions in Data.v, shows how to + trigger automatic proofs at the time of typechecking, based on + canonical structures. + + n is a number for which we want to find the half (and a proof that + this half is indeed the half) +*) +let example_canonical n = + let env = Global.env () in + let evd = Evd.from_env env in +(* Construct a natural representation of this integer. *) + let evd, c_n = mk_nat evd n in +(* terms for "nat", "eq", "S_ev", "eq_refl", "C" *) + let evd, c_N = c_N evd in + let evd, c_F = c_F evd in + let evd, c_R = c_R evd in + let evd, c_C = c_C evd in + let evd, c_P = c_P evd in +(* the last argument of C *) + let refl_term = EConstr.mkApp (c_R, [|c_N; c_n |]) in +(* Now we build two existential variables, for the value of the half and for + the "S_ev" structure that triggers the proof search. *) + let evd, ev1 = Evarutil.new_evar env evd c_N in +(* This is the type for the second existential variable *) + let csev = EConstr.mkApp (c_F, [| ev1 |]) in + let evd, ev2 = Evarutil.new_evar env evd csev in +(* Now we build the C structure. *) + let test_term = EConstr.mkApp (c_C, [| c_n; ev1; ev2; refl_term |]) in +(* Type-checking this term will compute values for the existential variables *) + let evd, final_type = Typing.type_of env evd test_term in +(* The computed type has two parameters, the second one is the proof. *) + let value = match EConstr.kind evd final_type with + | Constr.App(_, [| _; the_half |]) -> the_half + | _ -> failwith "expecting the whole type to be \"cmp _ the_half\"" in + let _ = Feedback.msg_notice (Printer.pr_econstr_env env evd value) in +(* I wish for a nicer way to get the value of ev2 in the evar_map *) + let prf_struct = EConstr.of_constr (EConstr.to_constr evd ev2) in + let the_prf = EConstr.mkApp (c_P, [| ev1; prf_struct |]) in + let evd, the_statement = Typing.type_of env evd the_prf in + Feedback.msg_notice + (Printer.pr_econstr_env env evd the_prf ++ str " has type " ++ + Printer.pr_econstr_env env evd the_statement) diff --git a/doc/plugin_tutorial/tuto3/src/construction_game.mli b/doc/plugin_tutorial/tuto3/src/construction_game.mli new file mode 100644 index 0000000000..1832ed6630 --- /dev/null +++ b/doc/plugin_tutorial/tuto3/src/construction_game.mli @@ -0,0 +1,4 @@ +val dangling_identity : Environ.env -> Evd.evar_map -> Evd.evar_map * EConstr.t +val example_sort_app_lambda : unit -> unit +val example_classes : int -> unit +val example_canonical : int -> unit diff --git a/doc/plugin_tutorial/tuto3/src/dune b/doc/plugin_tutorial/tuto3/src/dune new file mode 100644 index 0000000000..ba6d8b288f --- /dev/null +++ b/doc/plugin_tutorial/tuto3/src/dune @@ -0,0 +1,10 @@ +(library + (name tuto3_plugin) + (public_name coq.plugins.tutorial.p3) + (flags :standard -warn-error -3) + (libraries coq.plugins.ltac)) + +(rule + (targets g_tuto3.ml) + (deps (:pp-file g_tuto3.mlg)) + (action (run coqpp %{pp-file}))) diff --git a/doc/plugin_tutorial/tuto3/src/g_tuto3.mlg b/doc/plugin_tutorial/tuto3/src/g_tuto3.mlg new file mode 100644 index 0000000000..82ba45726e --- /dev/null +++ b/doc/plugin_tutorial/tuto3/src/g_tuto3.mlg @@ -0,0 +1,46 @@ +DECLARE PLUGIN "tuto3_plugin" + +{ + +open Ltac_plugin + +open Construction_game + +(* This one is necessary, to avoid message about missing wit_string *) +open Stdarg + +} + +VERNAC COMMAND EXTEND ShowTypeConstruction CLASSIFIED AS QUERY +| [ "Tuto3_1" ] -> + { let env = Global.env () in + let evd = Evd.from_env env in + let evd, s = Evd.new_sort_variable Evd.univ_rigid evd in + let new_type_2 = EConstr.mkSort s in + let evd, _ = + Typing.type_of (Global.env()) (Evd.from_env (Global.env())) new_type_2 in + Feedback.msg_notice + (Printer.pr_econstr_env env evd new_type_2) } +END + +VERNAC COMMAND EXTEND ShowOneConstruction CLASSIFIED AS QUERY +| [ "Tuto3_2" ] -> { example_sort_app_lambda () } +END + +TACTIC EXTEND collapse_hyps +| [ "pack" "hypothesis" ident(i) ] -> + { Tuto_tactic.pack_tactic i } +END + +(* More advanced examples, where automatic proof happens but + no tactic is being called explicitely. The first one uses + type classes. *) +VERNAC COMMAND EXTEND TriggerClasses CLASSIFIED AS QUERY +| [ "Tuto3_3" int(n) ] -> { example_classes n } +END + +(* The second one uses canonical structures. *) +VERNAC COMMAND EXTEND TriggerCanonical CLASSIFIED AS QUERY +| [ "Tuto3_4" int(n) ] -> { example_canonical n } +END + diff --git a/doc/plugin_tutorial/tuto3/src/tuto3_plugin.mlpack b/doc/plugin_tutorial/tuto3/src/tuto3_plugin.mlpack new file mode 100644 index 0000000000..f4645ad7ed --- /dev/null +++ b/doc/plugin_tutorial/tuto3/src/tuto3_plugin.mlpack @@ -0,0 +1,3 @@ +Construction_game +Tuto_tactic +G_tuto3 diff --git a/doc/plugin_tutorial/tuto3/src/tuto_tactic.ml b/doc/plugin_tutorial/tuto3/src/tuto_tactic.ml new file mode 100644 index 0000000000..8f2c387d09 --- /dev/null +++ b/doc/plugin_tutorial/tuto3/src/tuto_tactic.ml @@ -0,0 +1,143 @@ +open Proofview + +let constants = ref ([] : EConstr.t list) + +(* This is a pattern to collect terms from the Coq memory of valid terms + and proofs. This pattern extends all the way to the definition of function + c_U *) +let collect_constants () = + if (!constants = []) then + let open EConstr in + let open UnivGen in + let find_reference = Coqlib.find_reference [@ocaml.warning "-3"] in + let gr_H = find_reference "Tuto3" ["Tuto3"; "Data"] "pack" in + let gr_M = find_reference "Tuto3" ["Tuto3"; "Data"] "packer" in + let gr_R = find_reference "Tuto3" ["Coq"; "Init"; "Datatypes"] "pair" in + let gr_P = find_reference "Tuto3" ["Coq"; "Init"; "Datatypes"] "prod" in + let gr_U = find_reference "Tuto3" ["Tuto3"; "Data"] "uncover" in + constants := List.map (fun x -> of_constr (constr_of_monomorphic_global x)) + [gr_H; gr_M; gr_R; gr_P; gr_U]; + !constants + else + !constants + +let c_H () = + match collect_constants () with + it :: _ -> it + | _ -> failwith "could not obtain an internal representation of pack" + +let c_M () = + match collect_constants () with + _ :: it :: _ -> it + | _ -> failwith "could not obtain an internal representation of pack_marker" + +let c_R () = + match collect_constants () with + _ :: _ :: it :: _ -> it + | _ -> failwith "could not obtain an internal representation of pair" + +let c_P () = + match collect_constants () with + _ :: _ :: _ :: it :: _ -> it + | _ -> failwith "could not obtain an internal representation of prod" + +let c_U () = + match collect_constants () with + _ :: _ :: _ :: _ :: it :: _ -> it + | _ -> failwith "could not obtain an internal representation of prod" + +(* The following tactic is meant to pack an hypothesis when no other + data is already packed. + + The main difficulty in defining this tactic is to understand how to + construct the input expected by apply_in. *) +let package i = Goal.enter begin fun gl -> + Tactics.apply_in true false i + [(* this means that the applied theorem is not to be cleared. *) + None, (CAst.make (c_M (), + (* we don't specialize the theorem with extra values. *) + Tactypes.NoBindings))] + (* we don't destruct the result according to any intro_pattern *) + None + end + +(* This function is meant to observe a type of shape (f a) + and return the value a. *) + +(* Remark by Maxime: look for destApp combinator. *) +let unpack_type evd term = + let report () = + CErrors.user_err (Pp.str "expecting a packed type") in + match EConstr.kind evd term with + | Constr.App (_, [| ty |]) -> ty + | _ -> report () + +(* This function is meant to observe a type of shape + A -> pack B -> C and return A, B, C + but it is not used in the current version of our tactic. + It is kept as an example. *) +let two_lambda_pattern evd term = + let report () = + CErrors.user_err (Pp.str "expecting two nested implications") in +(* Note that pattern-matching is always done through the EConstr.kind function, + which only provides one-level deep patterns. *) + match EConstr.kind evd term with + (* Here we recognize the outer implication *) + | Constr.Prod (_, ty1, l1) -> + (* Here we recognize the inner implication *) + (match EConstr.kind evd l1 with + | Constr.Prod (n2, packed_ty2, deep_conclusion) -> + (* Here we recognized that the second type is an application *) + ty1, unpack_type evd packed_ty2, deep_conclusion + | _ -> report ()) + | _ -> report () + +(* In the environment of the goal, we can get the type of an assumption + directly by a lookup. The other solution is to call a low-cost retyping + function like *) +let get_type_of_hyp env id = + match EConstr.lookup_named id env with + | Context.Named.Declaration.LocalAssum (_, ty) -> ty + | _ -> CErrors.user_err (let open Pp in + str (Names.Id.to_string id) ++ + str " is not a plain hypothesis") + +let repackage i h_hyps_id = Goal.enter begin fun gl -> + let env = Goal.env gl in + let evd = Tacmach.New.project gl in + let concl = Tacmach.New.pf_concl gl in + let (ty1 : EConstr.t) = get_type_of_hyp env i in + let (packed_ty2 : EConstr.t) = get_type_of_hyp env h_hyps_id in + let ty2 = unpack_type evd packed_ty2 in + let new_packed_type = EConstr.mkApp (c_P (), [| ty1; ty2 |]) in + let open EConstr in + let new_packed_value = + mkApp (c_R (), [| ty1; ty2; mkVar i; + mkApp (c_U (), [| ty2; mkVar h_hyps_id|]) |]) in + Refine.refine ~typecheck:true begin fun evd -> + let evd, new_goal = Evarutil.new_evar env evd + (mkProd (Names.Name.Anonymous, + mkApp(c_H (), [| new_packed_type |]), + Vars.lift 1 concl)) in + evd, mkApp (new_goal, + [|mkApp(c_M (), [|new_packed_type; new_packed_value |]) |]) + end + end + +let pack_tactic i = + let h_hyps_id = (Names.Id.of_string "packed_hyps") in + Proofview.Goal.enter begin fun gl -> + let hyps = Environ.named_context_val (Proofview.Goal.env gl) in + if not (Termops.mem_named_context_val i hyps) then + (CErrors.user_err + (Pp.str ("no hypothesis named" ^ (Names.Id.to_string i)))) + else + if Termops.mem_named_context_val h_hyps_id hyps then + tclTHEN (repackage i h_hyps_id) + (tclTHEN (Tactics.clear [h_hyps_id; i]) + (Tactics.introduction h_hyps_id)) + else + tclTHEN (package i) + (tclTHEN (Tactics.rename_hyp [i, h_hyps_id]) + (Tactics.move_hyp h_hyps_id Logic.MoveLast)) + end diff --git a/doc/plugin_tutorial/tuto3/src/tuto_tactic.mli b/doc/plugin_tutorial/tuto3/src/tuto_tactic.mli new file mode 100644 index 0000000000..dbf6cf48e2 --- /dev/null +++ b/doc/plugin_tutorial/tuto3/src/tuto_tactic.mli @@ -0,0 +1,3 @@ +val two_lambda_pattern : + Evd.evar_map -> EConstr.t -> EConstr.t * EConstr.t * EConstr.t +val pack_tactic : Names.Id.t -> unit Proofview.tactic diff --git a/doc/plugin_tutorial/tuto3/theories/Data.v b/doc/plugin_tutorial/tuto3/theories/Data.v new file mode 100644 index 0000000000..f7395d686b --- /dev/null +++ b/doc/plugin_tutorial/tuto3/theories/Data.v @@ -0,0 +1,73 @@ + + +Inductive pack (A: Type) : Type := + packer : A -> pack A. + +Arguments packer {A}. + +Definition uncover (A : Type) (packed : pack A) : A := + match packed with packer v => v end. + +Notation "!!!" := (pack _) (at level 0, only printing). + +(* The following data is used as material for automatic proofs + based on type classes. *) + +Class EvenNat the_even := {half : nat; half_prop : 2 * half = the_even}. + +Instance EvenNat0 : EvenNat 0 := {half := 0; half_prop := eq_refl}. + +Lemma even_rec n h : 2 * h = n -> 2 * S h = S (S n). +Proof. + intros []. + simpl. rewrite <-plus_n_O, <-plus_n_Sm. + reflexivity. +Qed. + +Instance EvenNat_rec n (p : EvenNat n) : EvenNat (S (S n)) := + {half := S (@half _ p); half_prop := even_rec n (@half _ p) (@half_prop _ p)}. + +Definition tuto_div2 n (p : EvenNat n) := @half _ p. + +(* to be used in the following examples +Compute (@half 8 _). + +Check (@half_prop 8 _). + +Check (@half_prop 7 _). + +and in command Tuto3_3 8. *) + +(* The following data is used as material for automatic proofs + based on canonical structures. *) + +Record S_ev n := Build_S_ev {double_of : nat; _ : 2 * n = double_of}. + +Definition s_half_proof n (r : S_ev n) : 2 * n = double_of n r := + match r with Build_S_ev _ _ h => h end. + +Canonical Structure can_ev_default n d (Pd : 2 * n = d) : S_ev n := + Build_S_ev n d Pd. + +Canonical Structure can_ev0 : S_ev 0 := + Build_S_ev 0 0 (@eq_refl _ 0). + +Lemma can_ev_rec n : forall (s : S_ev n), S_ev (S n). +Proof. +intros s; exists (S (S (double_of _ s))). +destruct s as [a P]. +exact (even_rec _ _ P). +Defined. + +Canonical Structure can_ev_rec. + +Record cmp (n : nat) (k : nat) := + C {h : S_ev k; _ : double_of k h = n}. + +(* To be used in, e.g., + +Check (C _ _ _ eq_refl : cmp 6 _). + +Check (C _ _ _ eq_refl : cmp 7 _). + +*) diff --git a/doc/plugin_tutorial/tuto3/theories/Loader.v b/doc/plugin_tutorial/tuto3/theories/Loader.v new file mode 100644 index 0000000000..1351cff63b --- /dev/null +++ b/doc/plugin_tutorial/tuto3/theories/Loader.v @@ -0,0 +1,3 @@ +From Tuto3 Require Export Data. + +Declare ML Module "tuto3_plugin". diff --git a/doc/plugin_tutorial/tuto3/theories/test.v b/doc/plugin_tutorial/tuto3/theories/test.v new file mode 100644 index 0000000000..43204ddf34 --- /dev/null +++ b/doc/plugin_tutorial/tuto3/theories/test.v @@ -0,0 +1,23 @@ +(* to be used e.g. in : coqtop -I src -R theories Tuto3 < theories/test.v *) + +Require Import Tuto3.Loader. + +(* This should print Type. *) +Tuto3_1. + +(* This should print a term that contains an existential variable. *) +(* And then print the same term, where the variable has been correctly + instantiated. *) +Tuto3_2. + +Lemma tutu x y (A : 0 < x) (B : 10 < y) : True. +Proof. +pack hypothesis A. +(* Hypothesis A should have disappeared and a "packed_hyps" hypothesis + should have appeared, with unreadable content. *) +pack hypothesis B. +(* Hypothesis B should have disappeared *) +destruct packed_hyps as [unpacked_hyps]. +(* Hypothesis unpacked_hyps should contain the previous contents of A and B. *) +exact I. +Qed. diff --git a/doc/sphinx/README.rst b/doc/sphinx/README.rst index a20b74822c..e4f078c1d6 100644 --- a/doc/sphinx/README.rst +++ b/doc/sphinx/README.rst @@ -416,12 +416,12 @@ Omitting annotations DO .. code:: - .. tacv:: assert @form as @intro_pattern + .. tacv:: assert @form as @simple_intropattern DON'T .. code:: - .. tacv:: assert form as intro_pattern + .. tacv:: assert form as simple_intropattern Using the ``.. coqtop::`` directive for syntax highlighting ----------------------------------------------------------- diff --git a/doc/sphinx/README.template.rst b/doc/sphinx/README.template.rst index 11f0cdc008..81f25bf274 100644 --- a/doc/sphinx/README.template.rst +++ b/doc/sphinx/README.template.rst @@ -172,12 +172,12 @@ Omitting annotations DO .. code:: - .. tacv:: assert @form as @intro_pattern + .. tacv:: assert @form as @simple_intropattern DON'T .. code:: - .. tacv:: assert form as intro_pattern + .. tacv:: assert form as simple_intropattern Using the ``.. coqtop::`` directive for syntax highlighting ----------------------------------------------------------- diff --git a/doc/sphinx/addendum/generalized-rewriting.rst b/doc/sphinx/addendum/generalized-rewriting.rst index e468cc63cd..b606fb4dd2 100644 --- a/doc/sphinx/addendum/generalized-rewriting.rst +++ b/doc/sphinx/addendum/generalized-rewriting.rst @@ -714,47 +714,47 @@ following grammar: .. productionlist:: rewriting s, t, u : `strategy` - : | `lemma` - : | `lemma_right_to_left` - : | `failure` - : | `identity` - : | `reflexivity` - : | `progress` - : | `failure_catch` - : | `composition` - : | `left_biased_choice` - : | `iteration_one_or_more` - : | `iteration_zero_or_more` - : | `one_subterm` - : | `all_subterms` - : | `innermost_first` - : | `outermost_first` - : | `bottom_up` - : | `top_down` - : | `apply_hint` - : | `any_of_the_terms` - : | `apply_reduction` - : | `fold_expression` + : `lemma` + : `lemma_right_to_left` + : `failure` + : `identity` + : `reflexivity` + : `progress` + : `failure_catch` + : `composition` + : `left_biased_choice` + : `iteration_one_or_more` + : `iteration_zero_or_more` + : `one_subterm` + : `all_subterms` + : `innermost_first` + : `outermost_first` + : `bottom_up` + : `top_down` + : `apply_hint` + : `any_of_the_terms` + : `apply_reduction` + : `fold_expression` .. productionlist:: rewriting - strategy : "(" `s` ")" + strategy : ( `s` ) lemma : `c` - lemma_right_to_left : "<-" `c` - failure : `fail` - identity : `id` - reflexivity : `refl` - progress : `progress` `s` - failure_catch : `try` `s` - composition : `s` ";" `u` + lemma_right_to_left : <- `c` + failure : fail + identity : id + reflexivity : refl + progress : progress `s` + failure_catch : try `s` + composition : `s` ; `u` left_biased_choice : choice `s` `t` - iteration_one_or_more : `repeat` `s` - iteration_zero_or_more : `any` `s` + iteration_one_or_more : repeat `s` + iteration_zero_or_more : any `s` one_subterm : subterm `s` all_subterms : subterms `s` - innermost_first : `innermost` `s` - outermost_first : `outermost` `s` - bottom_up : `bottomup` `s` - top_down : `topdown` `s` + innermost_first : innermost `s` + outermost_first : outermost `s` + bottom_up : bottomup `s` + top_down : topdown `s` apply_hint : hints `hintdb` any_of_the_terms : terms (`c`)+ apply_reduction : eval `redexpr` @@ -767,7 +767,7 @@ primitive fixpoint operator: .. productionlist:: rewriting try `s` : choice `s` `id` any `s` : fix `u`. try (`s` ; `u`) - repeat `s` : `s` ; `any` `s` + repeat `s` : `s` ; any `s` bottomup s : fix `bu`. (choice (progress (subterms bu)) s) ; try bu topdown s : fix `td`. (choice s (progress (subterms td))) ; try td innermost s : fix `i`. (choice (subterm i) s) diff --git a/doc/sphinx/addendum/implicit-coercions.rst b/doc/sphinx/addendum/implicit-coercions.rst index 64e2d7c4ab..e5b41be691 100644 --- a/doc/sphinx/addendum/implicit-coercions.rst +++ b/doc/sphinx/addendum/implicit-coercions.rst @@ -41,8 +41,8 @@ Formally, the syntax of a classes is defined as: .. productionlist:: class: `qualid` - : | Sortclass - : | Funclass + : Sortclass + : Funclass Coercions @@ -184,10 +184,10 @@ Figure :ref:`vernacular` as follows: \comindex{Hypothesis \mbox{\rm (and coercions)}} .. productionlist:: - assumption : assumption_keyword assums . - assums : simple_assums - : | (simple_assums) ... (simple_assums) - simple_assums : ident ... ident :[>] term + assumption : `assumption_keyword` `assums` . + assums : `simple_assums` + : (`simple_assums`) ... (`simple_assums`) + simple_assums : `ident` ... `ident` :[>] `term` If the extra ``>`` is present before the type of some assumptions, these assumptions are declared as coercions. @@ -203,7 +203,7 @@ grammar of inductive types from Figure :ref:`vernacular` as follows: .. productionlist:: inductive : Inductive `ind_body` with ... with `ind_body` - : | CoInductive `ind_body` with ... with `ind_body` + : CoInductive `ind_body` with ... with `ind_body` ind_body : `ident` [ `binders` ] : `term` := [[|] `constructor` | ... | `constructor` ] constructor : `ident` [ `binders` ] [:[>] `term` ] diff --git a/doc/sphinx/addendum/micromega.rst b/doc/sphinx/addendum/micromega.rst index fd66de427c..b076aac1ed 100644 --- a/doc/sphinx/addendum/micromega.rst +++ b/doc/sphinx/addendum/micromega.rst @@ -38,7 +38,7 @@ The tactics solve propositional formulas parameterized by atomic arithmetic expressions interpreted over a domain :math:`D \in \{\mathbb{Z},\mathbb{Q},\mathbb{R}\}`. The syntax of the formulas is the following: - .. productionlist:: `F` + .. productionlist:: F F : A ∣ P ∣ True ∣ False ∣ F ∧ F ∣ F ∨ F ∣ F ↔ F ∣ F → F ∣ ¬ F A : p = p ∣ p > p ∣ p < p ∣ p ≥ p ∣ p ≤ p p : c ∣ x ∣ −p ∣ p − p ∣ p + p ∣ p × p ∣ p ^ n @@ -145,7 +145,7 @@ weakness, the :tacn:`lia` tactic is using recursively a combination of: + linear *positivstellensatz* refutations; + cutting plane proofs; + case split. - + Cutting plane proofs ~~~~~~~~~~~~~~~~~~~~~~ @@ -250,6 +250,16 @@ obtain :math:`-1`. By Theorem :ref:`Psatz <psatz_thm>`, the goal is valid. .. [#] Support for :g:`nat` and :g:`N` is obtained by pre-processing the goal with the ``zify`` tactic. +.. [#] Support for :g:`Z.div` and :g:`Z.modulo` may be obtained by + pre-processing the goal with the ``Z.div_mod_to_equations`` tactic (you may + need to manually run ``zify`` first). +.. [#] Support for :g:`Z.quot` and :g:`Z.rem` may be obtained by pre-processing + the goal with the ``Z.quot_rem_to_equations`` tactic (you may need to manually + run ``zify`` first). +.. [#] Note that support for :g:`Z.div`, :g:`Z.modulo`, :g:`Z.quot`, and + :g:`Z.rem` may be simultaneously obtained by pre-processing the goal with the + ``Z.to_euclidean_division_equations`` tactic (you may need to manually run + ``zify`` first). .. [#] Sources and binaries can be found at https://projects.coin-or.org/Csdp .. [#] Variants deal with equalities and strict inequalities. .. [#] In practice, the oracle might fail to produce such a refutation. diff --git a/doc/sphinx/addendum/parallel-proof-processing.rst b/doc/sphinx/addendum/parallel-proof-processing.rst index 8b7214e2ab..903ee115c9 100644 --- a/doc/sphinx/addendum/parallel-proof-processing.rst +++ b/doc/sphinx/addendum/parallel-proof-processing.rst @@ -52,7 +52,7 @@ in interactive mode. It is not strictly mandatory in batch mode if it is not the first time the file is compiled and if the file itself did not change. When the proof does not begin with Proof using, the system records in an -auxiliary file, produced along with the `.vo` file, the list of section +auxiliary file, produced along with the ``.vo`` file, the list of section variables used. Automatic suggestion of proof annotations @@ -154,22 +154,22 @@ to a worker process. The threshold can be configured with Batch mode --------------- -When |Coq| is used as a batch compiler by running `coqc` or `coqtop` --compile, it produces a `.vo` file for each `.v` file. A `.vo` file contains, -among other things, theorem statements and proofs. Hence to produce a -.vo |Coq| need to process all the proofs of the `.v` file. +When |Coq| is used as a batch compiler by running ``coqc``, it produces +a ``.vo`` file for each ``.v`` file. A ``.vo`` file contains, among other +things, theorem statements and proofs. Hence to produce a .vo |Coq| +need to process all the proofs of the ``.v`` file. The asynchronous processing of proofs can decouple the generation of a -compiled file (like the `.vo` one) that can be loaded by ``Require`` from the +compiled file (like the ``.vo`` one) that can be loaded by ``Require`` from the generation and checking of the proof objects. The ``-quick`` flag can be -passed to `coqc` or `coqtop` to produce, quickly, `.vio` files. -Alternatively, when using a Makefile produced by `coq_makefile`, +passed to ``coqc`` or ``coqtop`` to produce, quickly, ``.vio`` files. +Alternatively, when using a Makefile produced by ``coq_makefile``, the ``quick`` target can be used to compile all files using the ``-quick`` flag. -A `.vio` file can be loaded using ``Require`` exactly as a `.vo` file but +A ``.vio`` file can be loaded using ``Require`` exactly as a ``.vo`` file but proofs will not be available (the Print command produces an error). Moreover, some universe constraints might be missing, so universes -inconsistencies might go unnoticed. A `.vio` file does not contain proof +inconsistencies might go unnoticed. A ``.vio`` file does not contain proof objects, but proof tasks, i.e. what a worker process can transform into a proof object. @@ -177,52 +177,52 @@ Compiling a set of files with the ``-quick`` flag allows one to work, interactively, on any file without waiting for all the proofs to be checked. -When working interactively, one can fully check all the `.v` files by -running `coqc` as usual. +When working interactively, one can fully check all the ``.v`` files by +running ``coqc`` as usual. -Alternatively one can turn each `.vio` into the corresponding `.vo`. All +Alternatively one can turn each ``.vio`` into the corresponding ``.vo``. All .vio files can be processed in parallel, hence this alternative might be faster. The command ``coqtop -schedule-vio2vo 2 a b c`` can be used to -obtain a good scheduling for two workers to produce `a.vo`, `b.vo`, and -`c.vo`. When using a Makefile produced by `coq_makefile`, the ``vio2vo`` target -can be used for that purpose. Variable `J` should be set to the number +obtain a good scheduling for two workers to produce ``a.vo``, ``b.vo``, and +``c.vo``. When using a Makefile produced by ``coq_makefile``, the ``vio2vo`` target +can be used for that purpose. Variable ``J`` should be set to the number of workers, e.g. ``make vio2vo J=2``. The only caveat is that, while the -.vo files obtained from `.vio` files are complete (they contain all proof +.vo files obtained from ``.vio`` files are complete (they contain all proof terms and universe constraints), the satisfiability of all universe constraints has not been checked globally (they are checked to be consistent for every single proof). Constraints will be checked when -these `.vo` files are (recursively) loaded with ``Require``. +these ``.vo`` files are (recursively) loaded with ``Require``. There is an extra, possibly even faster, alternative: just check the -proof tasks stored in `.vio` files without producing the `.vo` files. This +proof tasks stored in ``.vio`` files without producing the ``.vo`` files. This is possibly faster because all the proof tasks are independent, hence one can further partition the job to be done between workers. The ``coqtop -schedule-vio-checking 6 a b c`` command can be used to obtain a -good scheduling for 6 workers to check all the proof tasks of `a.vio`, -`b.vio`, and `c.vio`. Auxiliary files are used to predict how long a proof +good scheduling for 6 workers to check all the proof tasks of ``a.vio``, +``b.vio``, and ``c.vio``. Auxiliary files are used to predict how long a proof task will take, assuming it will take the same amount of time it took last time. When using a Makefile produced by coq_makefile, the -``checkproofs`` target can be used to check all `.vio` files. Variable `J` +``checkproofs`` target can be used to check all ``.vio`` files. Variable ``J`` should be set to the number of workers, e.g. ``make checkproofs J=6``. As -when converting `.vio` files to `.vo` files, universe constraints are not +when converting ``.vio`` files to ``.vo`` files, universe constraints are not checked to be globally consistent. Hence this compilation mode is only useful for quick regression testing and on developments not making -heavy use of the `Type` hierarchy. +heavy use of the ``Type`` hierarchy. Limiting the number of parallel workers -------------------------------------------- Many |Coq| processes may run on the same computer, and each of them may -start many additional worker processes. The `coqworkmgr` utility lets +start many additional worker processes. The ``coqworkmgr`` utility lets one limit the number of workers, globally. The utility accepts the ``-j`` argument to specify the maximum number of -workers (defaults to 2). `coqworkmgr` automatically starts in the +workers (defaults to 2). ``coqworkmgr`` automatically starts in the background and prints an environment variable assignment like ``COQWORKMGR_SOCKET=localhost:45634``. The user must set this variable in all the shells from which |Coq| processes will be started. If one uses just one terminal running the bash shell, then ``export ‘coqworkmgr -j 4‘`` will do the job. -After that, all |Coq| processes, e.g. `coqide` and `coqc`, will respect the +After that, all |Coq| processes, e.g. ``coqide`` and ``coqc``, will respect the limit, globally. diff --git a/doc/sphinx/addendum/ring.rst b/doc/sphinx/addendum/ring.rst index 99d689132d..8204d93fa7 100644 --- a/doc/sphinx/addendum/ring.rst +++ b/doc/sphinx/addendum/ring.rst @@ -308,13 +308,13 @@ The syntax for adding a new ring is .. productionlist:: coq ring_mod : abstract | decidable `term` | morphism `term` - : | setoid `term` `term` - : | constants [`ltac`] - : | preprocess [`ltac`] - : | postprocess [`ltac`] - : | power_tac `term` [`ltac`] - : | sign `term` - : | div `term` + : setoid `term` `term` + : constants [`ltac`] + : preprocess [`ltac`] + : postprocess [`ltac`] + : power_tac `term` [`ltac`] + : sign `term` + : div `term` abstract declares the ring as abstract. This is the default. diff --git a/doc/sphinx/language/cic.rst b/doc/sphinx/language/cic.rst index cc5d9d6205..67683902cd 100644 --- a/doc/sphinx/language/cic.rst +++ b/doc/sphinx/language/cic.rst @@ -84,7 +84,7 @@ implemented using *algebraic universes*. An algebraic universe :math:`u` is either a variable (a qualified identifier with a number) or a successor of an algebraic universe (an expression :math:`u+1`), or an upper bound of algebraic universes (an -expression :math:`\max(u 1 ,...,u n )`), or the base universe (the expression +expression :math:`\max(u_1 ,...,u_n )`), or the base universe (the expression :math:`0`) which corresponds, in the arity of template polymorphic inductive types (see Section :ref:`well-formed-inductive-definitions`), @@ -117,24 +117,24 @@ the following rules. #. variables, hereafter ranged over by letters :math:`x`, :math:`y`, etc., are terms #. constants, hereafter ranged over by letters :math:`c`, :math:`d`, etc., are terms. #. if :math:`x` is a variable and :math:`T`, :math:`U` are terms then - :math:`∀ x:T,U` (:g:`forall x:T, U` in |Coq| concrete syntax) is a term. - If :math:`x` occurs in :math:`U`, :math:`∀ x:T,U` reads as + :math:`∀ x:T,~U` (:g:`forall x:T, U` in |Coq| concrete syntax) is a term. + If :math:`x` occurs in :math:`U`, :math:`∀ x:T,~U` reads as “for all :math:`x` of type :math:`T`, :math:`U`”. - As :math:`U` depends on :math:`x`, one says that :math:`∀ x:T,U` is + As :math:`U` depends on :math:`x`, one says that :math:`∀ x:T,~U` is a *dependent product*. If :math:`x` does not occur in :math:`U` then - :math:`∀ x:T,U` reads as + :math:`∀ x:T,~U` reads as “if :math:`T` then :math:`U`”. A *non dependent product* can be written: :math:`T \rightarrow U`. #. if :math:`x` is a variable and :math:`T`, :math:`u` are terms then - :math:`λ x:T . u` (:g:`fun x:T => u` + :math:`λ x:T .~u` (:g:`fun x:T => u` in |Coq| concrete syntax) is a term. This is a notation for the - λ-abstraction of λ-calculus :cite:`Bar81`. The term :math:`λ x:T . u` is a function + λ-abstraction of λ-calculus :cite:`Bar81`. The term :math:`λ x:T .~u` is a function which maps elements of :math:`T` to the expression :math:`u`. #. if :math:`t` and :math:`u` are terms then :math:`(t~u)` is a term (:g:`t u` in |Coq| concrete syntax). The term :math:`(t~u)` reads as “t applied to u”. -#. if :g:`x` is a variable, and :math:`t`, :math:`T` and :math:`u` are - terms then :g:`let x:=t:T in u` is +#. if :math:`x` is a variable, and :math:`t`, :math:`T` and :math:`u` are + terms then :math:`\letin{x}{t:T}{u}` is a term which denotes the term :math:`u` where the variable :math:`x` is locally bound to :math:`t` of type :math:`T`. This stands for the common “let-in” construction of functional programs such as ML or Scheme. @@ -145,7 +145,7 @@ the following rules. **Free variables.** The notion of free variables is defined as usual. In the expressions -:g:`λx:T. U` and :g:`∀ x:T, U` the occurrences of :math:`x` in :math:`U` are bound. +:math:`λx:T.~U` and :math:`∀ x:T,~U` the occurrences of :math:`x` in :math:`U` are bound. .. _Substitution: @@ -172,11 +172,11 @@ implicative proposition, to denote :math:`\nat →\Prop` which is the type of unary predicates over the natural numbers, etc. Let us assume that ``mult`` is a function of type :math:`\nat→\nat→\nat` and ``eqnat`` a -predicate of type \nat→\nat→ \Prop. The λ-abstraction can serve to build -“ordinary” functions as in :math:`λ x:\nat.(\kw{mult}~x~x)` (i.e. +predicate of type :math:`\nat→\nat→ \Prop`. The λ-abstraction can serve to build +“ordinary” functions as in :math:`λ x:\nat.~(\kw{mult}~x~x)` (i.e. :g:`fun x:nat => mult x x` in |Coq| notation) but may build also predicates over the natural -numbers. For instance :math:`λ x:\nat.(\kw{eqnat}~x~0)` +numbers. For instance :math:`λ x:\nat.~(\kw{eqnat}~x~0)` (i.e. :g:`fun x:nat => eqnat x 0` in |Coq| notation) will represent the predicate of one variable :math:`x` which asserts the equality of :math:`x` with :math:`0`. This predicate has type @@ -186,7 +186,7 @@ object :math:`P~t` of type :math:`\Prop`, namely a proposition. Furthermore :g:`forall x:nat, P x` will represent the type of functions which associate to each natural number :math:`n` an object of type :math:`(P~n)` and -consequently represent the type of proofs of the formula “:math:`∀ x. P(x`)”. +consequently represent the type of proofs of the formula “:math:`∀ x.~P(x)`”. .. _Typing-rules: @@ -206,7 +206,7 @@ A *local context* is an ordered list of *local declarations* of names which we call *variables*. The declaration of some variable :math:`x` is either a *local assumption*, written :math:`x:T` (:math:`T` is a type) or a *local definition*, written :math:`x:=t:T`. We use brackets to write local contexts. -A typical example is :math:`[x:T;y:=u:U;z:V]`. Notice that the variables +A typical example is :math:`[x:T;~y:=u:U;~z:V]`. Notice that the variables declared in a local context must be distinct. If :math:`Γ` is a local context that declares some :math:`x`, we write :math:`x ∈ Γ`. By writing :math:`(x:T) ∈ Γ` we mean that either :math:`x:T` is an @@ -232,9 +232,9 @@ A *global assumption* will be represented in the global environment as :math:`(c:T)` which assumes the name :math:`c` to be of some type :math:`T`. A *global definition* will be represented in the global environment as :math:`c:=t:T` which defines the name :math:`c` to have value :math:`t` and type :math:`T`. We shall call -such names *constants*. For the rest of the chapter, the :math:`E;c:T` denotes +such names *constants*. For the rest of the chapter, the :math:`E;~c:T` denotes the global environment :math:`E` enriched with the global assumption :math:`c:T`. -Similarly, :math:`E;c:=t:T` denotes the global environment :math:`E` enriched with the +Similarly, :math:`E;~c:=t:T` denotes the global environment :math:`E` enriched with the global definition :math:`(c:=t:T)`. The rules for inductive definitions (see Section @@ -284,14 +284,14 @@ following rules. s \in \Sort c \notin E ------------ - \WF{E;c:T}{} + \WF{E;~c:T}{} .. inference:: W-Global-Def \WTE{}{t}{T} c \notin E --------------- - \WF{E;c:=t:T}{} + \WF{E;~c:=t:T}{} .. inference:: Ax-Prop @@ -328,10 +328,10 @@ following rules. .. inference:: Prod-Prop \WTEG{T}{s} - s \in {\Sort} + s \in \Sort \WTE{\Gamma::(x:T)}{U}{\Prop} ----------------------------- - \WTEG{\forall~x:T,U}{\Prop} + \WTEG{∀ x:T,~U}{\Prop} .. inference:: Prod-Set @@ -339,25 +339,25 @@ following rules. s \in \{\Prop, \Set\} \WTE{\Gamma::(x:T)}{U}{\Set} ---------------------------- - \WTEG{\forall~x:T,U}{\Set} + \WTEG{∀ x:T,~U}{\Set} .. inference:: Prod-Type \WTEG{T}{\Type(i)} \WTE{\Gamma::(x:T)}{U}{\Type(i)} -------------------------------- - \WTEG{\forall~x:T,U}{\Type(i)} + \WTEG{∀ x:T,~U}{\Type(i)} .. inference:: Lam - \WTEG{\forall~x:T,U}{s} + \WTEG{∀ x:T,~U}{s} \WTE{\Gamma::(x:T)}{t}{U} ------------------------------------ - \WTEG{\lb x:T\mto t}{\forall x:T, U} + \WTEG{λ x:T\mto t}{∀ x:T,~U} .. inference:: App - \WTEG{t}{\forall~x:U,T} + \WTEG{t}{∀ x:U,~T} \WTEG{u}{U} ------------------------------ \WTEG{(t\ u)}{\subst{T}{x}{u}} @@ -383,7 +383,7 @@ following rules. .. note:: We may have :math:`\letin{x}{t:T}{u}` well-typed without having - :math:`((λ x:T.u) t)` well-typed (where :math:`T` is a type of + :math:`((λ x:T.~u)~t)` well-typed (where :math:`T` is a type of :math:`t`). This is because the value :math:`t` associated to :math:`x` may be used in a conversion rule (see Section :ref:`Conversion-rules`). @@ -406,18 +406,18 @@ can decide if two programs are *intentionally* equal (one says We want to be able to identify some terms as we can identify the application of a function to a given argument with its result. For -instance the identity function over a given type T can be written -:math:`λx:T. x`. In any global environment :math:`E` and local context +instance the identity function over a given type :math:`T` can be written +:math:`λx:T.~x`. In any global environment :math:`E` and local context :math:`Γ`, we want to identify any object :math:`a` (of type -:math:`T`) with the application :math:`((λ x:T. x) a)`. We define for +:math:`T`) with the application :math:`((λ x:T.~x)~a)`. We define for this a *reduction* (or a *conversion*) rule we call :math:`β`: .. math:: - E[Γ] ⊢ ((λx:T. t) u)~\triangleright_β~\subst{t}{x}{u} + E[Γ] ⊢ ((λx:T.~t)~u)~\triangleright_β~\subst{t}{x}{u} We say that :math:`\subst{t}{x}{u}` is the *β-contraction* of -:math:`((λx:T. t) u)` and, conversely, that :math:`((λ x:T. t) u)` is the +:math:`((λx:T.~t)~u)` and, conversely, that :math:`((λ x:T.~t)~u)` is the *β-expansion* of :math:`\subst{t}{x}{u}`. According to β-reduction, terms of the *Calculus of Inductive @@ -481,7 +481,7 @@ destroyed, this reduction differs from δ-reduction. It is called \WTEG{u}{U} \WTE{\Gamma::(x:=u:U)}{t}{T} -------------- - E[Γ] ⊢ \letin{x}{u}{t}~\triangleright_ζ~\subst{t}{x}{u} + E[Γ] ⊢ \letin{x}{u:U}{t}~\triangleright_ζ~\subst{t}{x}{u} .. _eta-expansion: @@ -490,10 +490,10 @@ destroyed, this reduction differs from δ-reduction. It is called ~~~~~~~~~~~ Another important concept is η-expansion. It is legal to identify any -term :math:`t` of functional type :math:`∀ x:T, U` with its so-called η-expansion +term :math:`t` of functional type :math:`∀ x:T,~U` with its so-called η-expansion .. math:: - λx:T. (t~x) + λx:T.~(t~x) for :math:`x` an arbitrary variable name fresh in :math:`t`. @@ -503,26 +503,26 @@ for :math:`x` an arbitrary variable name fresh in :math:`t`. We deliberately do not define η-reduction: .. math:: - λ x:T. (t~x) \not\triangleright_η t + λ x:T.~(t~x)~\not\triangleright_η~t This is because, in general, the type of :math:`t` need not to be convertible - to the type of :math:`λ x:T. (t~x)`. E.g., if we take :math:`f` such that: + to the type of :math:`λ x:T.~(t~x)`. E.g., if we take :math:`f` such that: .. math:: - f : ∀ x:\Type(2),\Type(1) + f ~:~ ∀ x:\Type(2),~\Type(1) then .. math:: - λ x:\Type(1),(f~x) : ∀ x:\Type(1),\Type(1) + λ x:\Type(1).~(f~x) ~:~ ∀ x:\Type(1),~\Type(1) We could not allow .. math:: - λ x:Type(1),(f x) \triangleright_η f + λ x:\Type(1).~(f~x) ~\triangleright_η~ f - because the type of the reduced term :math:`∀ x:\Type(2),\Type(1)` would not be - convertible to the type of the original term :math:`∀ x:\Type(1),\Type(1).` + because the type of the reduced term :math:`∀ x:\Type(2),~\Type(1)` would not be + convertible to the type of the original term :math:`∀ x:\Type(1),~\Type(1)`. .. _convertibility: @@ -541,10 +541,10 @@ global environment :math:`E` and local context :math:`Γ` iff there exist terms :math:`u_1` and :math:`u_2` such that :math:`E[Γ] ⊢ t_1 \triangleright … \triangleright u_1` and :math:`E[Γ] ⊢ t_2 \triangleright … \triangleright u_2` and either :math:`u_1` and :math:`u_2` are identical, or they are convertible up to η-expansion, -i.e. :math:`u_1` is :math:`λ x:T. u_1'` and :math:`u_2 x` is +i.e. :math:`u_1` is :math:`λ x:T.~u_1'` and :math:`u_2 x` is recursively convertible to :math:`u_1'` , or, symmetrically, -:math:`u_2` is :math:`λx:T. u_2'` -and :math:`u_1 x` is recursively convertible to u_2′ . We then write +:math:`u_2` is :math:`λx:T.~u_2'` +and :math:`u_1 x` is recursively convertible to :math:`u_2'`. We then write :math:`E[Γ] ⊢ t_1 =_{βδιζη} t_2` . Apart from this we consider two instances of polymorphic and @@ -601,8 +601,8 @@ Subtyping rules ------------------- At the moment, we did not take into account one rule between universes -which says that any term in a universe of index i is also a term in -the universe of index i+1 (this is the *cumulativity* rule of |Cic|). +which says that any term in a universe of index :math:`i` is also a term in +the universe of index :math:`i+1` (this is the *cumulativity* rule of |Cic|). This property extends the equivalence relation of convertibility into a *subtyping* relation inductively defined by: @@ -614,25 +614,25 @@ a *subtyping* relation inductively defined by: :math:`E[Γ] ⊢ \Prop ≤_{βδιζη} \Type(i)`, for any :math:`i` #. if :math:`E[Γ] ⊢ T =_{βδιζη} U` and :math:`E[Γ::(x:T)] ⊢ T' ≤_{βδιζη} U'` then - :math:`E[Γ] ⊢ ∀x:T, T′ ≤_{βδιζη} ∀ x:U, U′`. + :math:`E[Γ] ⊢ ∀x:T,~T′ ≤_{βδιζη} ∀ x:U,~U′`. #. if :math:`\ind{p}{Γ_I}{Γ_C}` is a universe polymorphic and cumulative (see Chapter :ref:`polymorphicuniverses`) inductive type (see below) and - :math:`(t : ∀Γ_P ,∀Γ_{\mathit{Arr}(t)}, \Sort)∈Γ_I` + :math:`(t : ∀Γ_P ,∀Γ_{\mathit{Arr}(t)}, S)∈Γ_I` and - :math:`(t' : ∀Γ_P' ,∀Γ_{\mathit{Arr}(t)}', \Sort')∈Γ_I` + :math:`(t' : ∀Γ_P' ,∀Γ_{\mathit{Arr}(t)}', S')∈Γ_I` are two different instances of *the same* inductive type (differing only in universe levels) with constructors .. math:: - [c_1 : ∀Γ_P ,∀ T_{1,1} … T_{1,n_1} , t~v_{1,1} … v_{1,m} ;…; - c_k : ∀Γ_P ,∀ T_{k,1} … T_{k,n_k} ,t~v_{n,1} … v_{n,m} ] + [c_1 : ∀Γ_P ,∀ T_{1,1} … T_{1,n_1} ,~t~v_{1,1} … v_{1,m} ;~…;~ + c_k : ∀Γ_P ,∀ T_{k,1} … T_{k,n_k} ,~t~v_{k,1} … v_{k,m} ] and .. math:: - [c_1 : ∀Γ_P' ,∀ T_{1,1}' … T_{1,n_1}' , t'~v_{1,1}' … v_{1,m}' ;…; - c_k : ∀Γ_P' ,∀ T_{k,1}' … T_{k,n_k}' ,t'~v_{n,1}' … v_{n,m}' ] + [c_1 : ∀Γ_P' ,∀ T_{1,1}' … T_{1,n_1}' ,~t'~v_{1,1}' … v_{1,m}' ;~…;~ + c_k : ∀Γ_P' ,∀ T_{k,1}' … T_{k,n_k}' ,~t'~v_{k,1}' … v_{k,m}' ] respectively then @@ -656,8 +656,8 @@ a *subtyping* relation inductively defined by: .. math:: E[Γ] ⊢ A_i ≤_{βδιζη} A_i' - where :math:`Γ_{\mathit{Arr}(t)} = [a_1 : A_1 ; … ; a_l : A_l ]` and - :math:`Γ_{\mathit{Arr}(t)}' = [a_1 : A_1'; … ; a_l : A_l']`. + where :math:`Γ_{\mathit{Arr}(t)} = [a_1 : A_1 ;~ … ;~a_l : A_l ]` and + :math:`Γ_{\mathit{Arr}(t)}' = [a_1 : A_1';~ … ;~a_l : A_l']`. The conversion rule up to subtyping is now exactly: @@ -677,19 +677,19 @@ The conversion rule up to subtyping is now exactly: form*. There are several ways (or strategies) to apply the reduction rules. Among them, we have to mention the *head reduction* which will play an important role (see Chapter :ref:`tactics`). Any term :math:`t` can be written as -:math:`λ x_1 :T_1 . … λ x_k :T_k . (t_0~t_1 … t_n )` where :math:`t_0` is not an +:math:`λ x_1 :T_1 .~… λ x_k :T_k .~(t_0~t_1 … t_n )` where :math:`t_0` is not an application. We say then that :math:`t_0` is the *head of* :math:`t`. If we assume -that :math:`t_0` is :math:`λ x:T. u_0` then one step of β-head reduction of :math:`t` is: +that :math:`t_0` is :math:`λ x:T.~u_0` then one step of β-head reduction of :math:`t` is: .. math:: - λ x_1 :T_1 . … λ x_k :T_k . (λ x:T. u_0~t_1 … t_n ) \triangleright - λ (x_1 :T_1 )…(x_k :T_k ). (\subst{u_0}{x}{t_1}~t_2 … t_n ) + λ x_1 :T_1 .~… λ x_k :T_k .~(λ x:T.~u_0~t_1 … t_n ) ~\triangleright~ + λ (x_1 :T_1 )…(x_k :T_k ).~(\subst{u_0}{x}{t_1}~t_2 … t_n ) Iterating the process of head reduction until the head of the reduced term is no more an abstraction leads to the *β-head normal form* of :math:`t`: .. math:: - t \triangleright … \triangleright λ x_1 :T_1 . …λ x_k :T_k . (v~u_1 … u_m ) + t \triangleright … \triangleright λ x_1 :T_1 .~…λ x_k :T_k .~(v~u_1 … u_m ) where :math:`v` is not an abstraction (nor an application). Note that the head normal form must not be confused with the normal form since some :math:`u_i` @@ -713,12 +713,12 @@ Formally, we can represent any *inductive definition* as These inductive definitions, together with global assumptions and global definitions, then form the global environment. Additionally, -for any :math:`p` there always exists :math:`Γ_P =[a_1 :A_1 ;…;a_p :A_p ]` such that +for any :math:`p` there always exists :math:`Γ_P =[a_1 :A_1 ;~…;~a_p :A_p ]` such that each :math:`T` in :math:`(t:T)∈Γ_I \cup Γ_C` can be written as: :math:`∀Γ_P , T'` where :math:`Γ_P` is called the *context of parameters*. Furthermore, we must have that each :math:`T` in :math:`(t:T)∈Γ_I` can be written as: :math:`∀Γ_P,∀Γ_{\mathit{Arr}(t)}, S` where -:math:`Γ_{\mathit{Arr}(t)}` is called the *Arity* of the inductive type t and :math:`S` is called -the sort of the inductive type t (not to be confused with :math:`\Sort` which is the set of sorts). +:math:`Γ_{\mathit{Arr}(t)}` is called the *Arity* of the inductive type :math:`t` and :math:`S` is called +the sort of the inductive type :math:`t` (not to be confused with :math:`\Sort` which is the set of sorts). .. example:: @@ -726,8 +726,8 @@ the sort of the inductive type t (not to be confused with :math:`\Sort` which is .. math:: \ind{1}{[\List:\Set→\Set]}{\left[\begin{array}{rcl} - \Nil & : & \forall A:\Set,\List~A \\ - \cons & : & \forall A:\Set, A→ \List~A→ \List~A + \Nil & : & ∀ A:\Set,~\List~A \\ + \cons & : & ∀ A:\Set,~A→ \List~A→ \List~A \end{array} \right]} @@ -771,8 +771,8 @@ the sort of the inductive type t (not to be confused with :math:`\Sort` which is \odd&:&\nat → \Prop \end{array}\right]} {\left[\begin{array}{rcl} \evenO &:& \even~0\\ - \evenS &:& \forall n, \odd~n → \even~(\kw{S}~n)\\ - \oddS &:& \forall n, \even~n → \odd~(\kw{S}~n) + \evenS &:& ∀ n,~\odd~n → \even~(\nS~n)\\ + \oddS &:& ∀ n,~\even~n → \odd~(\nS~n) \end{array}\right]} which corresponds to the result of the |Coq| declaration: @@ -792,7 +792,7 @@ the sort of the inductive type t (not to be confused with :math:`\Sort` which is Types of inductive objects ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -We have to give the type of constants in a global environment E which +We have to give the type of constants in a global environment :math:`E` which contains an inductive declaration. .. inference:: Ind @@ -820,9 +820,9 @@ contains an inductive declaration. \begin{array}{l} E[Γ] ⊢ \even : \nat→\Prop\\ E[Γ] ⊢ \odd : \nat→\Prop\\ - E[Γ] ⊢ \even\_O : \even~O\\ - E[Γ] ⊢ \even\_S : \forall~n:\nat, \odd~n → \even~(S~n)\\ - E[Γ] ⊢ \odd\_S : \forall~n:\nat, \even~n → \odd~(S~n) + E[Γ] ⊢ \evenO : \even~\nO\\ + E[Γ] ⊢ \evenS : ∀ n:\nat,~\odd~n → \even~(\nS~n)\\ + E[Γ] ⊢ \oddS : ∀ n:\nat,~\even~n → \odd~(\nS~n) \end{array} @@ -842,11 +842,11 @@ Arity of a given sort +++++++++++++++++++++ A type :math:`T` is an *arity of sort* :math:`s` if it converts to the sort :math:`s` or to a -product :math:`∀ x:T,U` with :math:`U` an arity of sort :math:`s`. +product :math:`∀ x:T,~U` with :math:`U` an arity of sort :math:`s`. .. example:: - :math:`A→\Set` is an arity of sort :math:`\Set`. :math:`∀ A:\Prop,A→ \Prop` is an arity of sort + :math:`A→\Set` is an arity of sort :math:`\Set`. :math:`∀ A:\Prop,~A→ \Prop` is an arity of sort :math:`\Prop`. @@ -858,21 +858,21 @@ sort :math:`s`. .. example:: - :math:`A→ Set` and :math:`∀ A:\Prop,A→ \Prop` are arities. + :math:`A→ \Set` and :math:`∀ A:\Prop,~A→ \Prop` are arities. -Type constructor -++++++++++++++++ -We say that T is a *type of constructor of I* in one of the following +Type of constructor ++++++++++++++++++++ +We say that :math:`T` is a *type of constructor of* :math:`I` in one of the following two cases: + :math:`T` is :math:`(I~t_1 … t_n )` -+ :math:`T` is :math:`∀ x:U,T'` where :math:`T'` is also a type of constructor of :math:`I` ++ :math:`T` is :math:`∀ x:U,~T'` where :math:`T'` is also a type of constructor of :math:`I` .. example:: :math:`\nat` and :math:`\nat→\nat` are types of constructor of :math:`\nat`. - :math:`∀ A:Type,\List~A` and :math:`∀ A:Type,A→\List~A→\List~A` are types of constructor of :math:`\List`. + :math:`∀ A:\Type,~\List~A` and :math:`∀ A:\Type,~A→\List~A→\List~A` are types of constructor of :math:`\List`. .. _positivity: @@ -883,7 +883,7 @@ The type of constructor :math:`T` will be said to *satisfy the positivity condition* for a constant :math:`X` in the following cases: + :math:`T=(X~t_1 … t_n )` and :math:`X` does not occur free in any :math:`t_i` -+ :math:`T=∀ x:U,V` and :math:`X` occurs only strictly positively in :math:`U` and the type :math:`V` ++ :math:`T=∀ x:U,~V` and :math:`X` occurs only strictly positively in :math:`U` and the type :math:`V` satisfies the positivity condition for :math:`X`. Strict positivity @@ -895,13 +895,13 @@ cases: + :math:`X` does not occur in :math:`T` + :math:`T` converts to :math:`(X~t_1 … t_n )` and :math:`X` does not occur in any of :math:`t_i` -+ :math:`T` converts to :math:`∀ x:U,V` and :math:`X` does not occur in type :math:`U` but occurs ++ :math:`T` converts to :math:`∀ x:U,~V` and :math:`X` does not occur in type :math:`U` but occurs strictly positively in type :math:`V` + :math:`T` converts to :math:`(I~a_1 … a_m~t_1 … t_p )` where :math:`I` is the name of an inductive declaration of the form .. math:: - \ind{m}{I:A}{c_1 :∀ p_1 :P_1 ,… ∀p_m :P_m ,C_1 ;…;c_n :∀ p_1 :P_1 ,… ∀p_m :P_m ,C_n} + \ind{m}{I:A}{c_1 :∀ p_1 :P_1 ,… ∀p_m :P_m ,~C_1 ;~…;~c_n :∀ p_1 :P_1 ,… ∀p_m :P_m ,~C_n} (in particular, it is not mutually defined and it has :math:`m` parameters) and :math:`X` does not occur in @@ -916,7 +916,7 @@ condition* for a constant :math:`X` in the following cases: + :math:`T=(I~b_1 … b_m~u_1 … u_p)`, :math:`I` is an inductive definition with :math:`m` parameters and :math:`X` does not occur in any :math:`u_i` -+ :math:`T=∀ x:U,V` and :math:`X` occurs only strictly positively in :math:`U` and the type :math:`V` ++ :math:`T=∀ x:U,~V` and :math:`X` occurs only strictly positively in :math:`U` and the type :math:`V` satisfies the nested positivity condition for :math:`X` @@ -930,7 +930,6 @@ condition* for a constant :math:`X` in the following cases: Inductive nattree (A:Type) : Type := | leaf : nattree A | node : A -> (nat -> nattree A) -> nattree A. - End TreeExample. Then every instantiated constructor of ``nattree A`` satisfies the nested positivity condition for ``nattree``: @@ -943,7 +942,7 @@ condition* for a constant :math:`X` in the following cases: + Type ``A → (nat → nattree A) → nattree A`` of constructor ``node`` satisfies the positivity condition for ``nattree`` because: - - ``nattree`` occurs only strictly positively in ``A`` ... (bullet 3) + - ``nattree`` occurs only strictly positively in ``A`` ... (bullet 1) - ``nattree`` occurs only strictly positively in ``nat → nattree A`` ... (bullet 3 + 2) @@ -958,8 +957,8 @@ We shall now describe the rules allowing the introduction of a new inductive definition. Let :math:`E` be a global environment and :math:`Γ_P`, :math:`Γ_I`, :math:`Γ_C` be contexts -such that :math:`Γ_I` is :math:`[I_1 :∀ Γ_P ,A_1 ;…;I_k :∀ Γ_P ,A_k]`, and -:math:`Γ_C` is :math:`[c_1:∀ Γ_P ,C_1 ;…;c_n :∀ Γ_P ,C_n ]`. Then +such that :math:`Γ_I` is :math:`[I_1 :∀ Γ_P ,A_1 ;~…;~I_k :∀ Γ_P ,A_k]`, and +:math:`Γ_C` is :math:`[c_1:∀ Γ_P ,C_1 ;~…;~c_n :∀ Γ_P ,C_n ]`. Then .. inference:: W-Ind @@ -967,7 +966,7 @@ such that :math:`Γ_I` is :math:`[I_1 :∀ Γ_P ,A_1 ;…;I_k :∀ Γ_P ,A_k]`, (E[Γ_P ] ⊢ A_j : s_j )_{j=1… k} (E[Γ_I ;Γ_P ] ⊢ C_i : s_{q_i} )_{i=1… n} ------------------------------------------ - \WF{E;\ind{p}{Γ_I}{Γ_C}}{Γ} + \WF{E;~\ind{p}{Γ_I}{Γ_C}}{Γ} provided that the following side conditions hold: @@ -990,8 +989,8 @@ the Type hierarchy. .. example:: It is well known that the existential quantifier can be encoded as an - inductive definition. The following declaration introduces the second- - order existential quantifier :math:`∃ X.P(X)`. + inductive definition. The following declaration introduces the + second-order existential quantifier :math:`∃ X.P(X)`. .. coqtop:: in @@ -1028,7 +1027,7 @@ in :math:`\Type`. .. flag:: Auto Template Polymorphism This option, enabled by default, makes every inductive type declared - at level :math:`Type` (without annotations or hiding it behind a + at level :math:`\Type` (without annotations or hiding it behind a definition) template polymorphic. This can be prevented using the ``notemplate`` attribute. @@ -1041,6 +1040,12 @@ in :math:`\Type`. enabled it will prevail over automatic template polymorphism and cause an error when using the ``template`` attribute. +.. warn:: Automatically declaring @ident as template polymorphic. + + Warning ``auto-template`` can be used to find which types are + implicitly declared template polymorphic by :flag:`Auto Template + Polymorphism`. + If :math:`A` is an arity of some sort and :math:`s` is a sort, we write :math:`A_{/s}` for the arity obtained from :math:`A` by replacing its sort with :math:`s`. Especially, if :math:`A` is well-typed in some global environment and local @@ -1049,9 +1054,9 @@ Calculus of Inductive Constructions. The following typing rule is added to the theory. Let :math:`\ind{p}{Γ_I}{Γ_C}` be an inductive definition. Let -:math:`Γ_P = [p_1 :P_1 ;…;p_p :P_p ]` be its context of parameters, -:math:`Γ_I = [I_1:∀ Γ_P ,A_1 ;…;I_k :∀ Γ_P ,A_k ]` its context of definitions and -:math:`Γ_C = [c_1 :∀ Γ_P ,C_1 ;…;c_n :∀ Γ_P ,C_n]` its context of constructors, +:math:`Γ_P = [p_1 :P_1 ;~…;~p_p :P_p ]` be its context of parameters, +:math:`Γ_I = [I_1:∀ Γ_P ,A_1 ;~…;~I_k :∀ Γ_P ,A_k ]` its context of definitions and +:math:`Γ_C = [c_1 :∀ Γ_P ,C_1 ;~…;~c_n :∀ Γ_P ,C_n]` its context of constructors, with :math:`c_i` a constructor of :math:`I_{q_i}`. Let :math:`m ≤ p` be the length of the longest prefix of parameters such that the :math:`m` first arguments of all occurrences of all :math:`I_j` in all :math:`C_k` (even the occurrences in the @@ -1071,15 +1076,15 @@ uniform parameters of :math:`Γ_P` . We have: \end{array} \right. ----------------------------- - E[] ⊢ I_j~q_1 … q_r :∀ [p_{r+1} :P_{r+1} ;…;p_p :P_p], (A_j)_{/s_j} + E[] ⊢ I_j~q_1 … q_r :∀ [p_{r+1} :P_{r+1} ;~…;~p_p :P_p], (A_j)_{/s_j} provided that the following side conditions hold: + :math:`Γ_{P′}` is the context obtained from :math:`Γ_P` by replacing each :math:`P_l` that is an arity with :math:`P_l'` for :math:`1≤ l ≤ r` (notice that :math:`P_l` arity implies :math:`P_l'` - arity since :math:`(E[] ⊢ P_l' ≤_{βδιζη} \subst{P_l}{p_u}{q_u}_{u=1\ldots l-1} )`; + arity since :math:`E[] ⊢ P_l' ≤_{βδιζη} \subst{P_l}{p_u}{q_u}_{u=1\ldots l-1}`); + there are sorts :math:`s_i` , for :math:`1 ≤ i ≤ k` such that, for - :math:`Γ_{I'} = [I_1 :∀ Γ_{P'} ,(A_1)_{/s_1} ;…;I_k :∀ Γ_{P'} ,(A_k)_{/s_k}]` + :math:`Γ_{I'} = [I_1 :∀ Γ_{P'} ,(A_1)_{/s_1} ;~…;~I_k :∀ Γ_{P'} ,(A_k)_{/s_k}]` we have :math:`(E[Γ_{I′} ;Γ_{P′}] ⊢ C_i : s_{q_i})_{i=1… n}` ; + the sorts :math:`s_i` are such that all eliminations, to :math:`\Prop`, :math:`\Set` and :math:`\Type(j)`, are allowed @@ -1097,7 +1102,7 @@ replacements of sorts, needed for this derivation, in the parameters that are arities (this is possible because :math:`\ind{p}{Γ_I}{Γ_C}` well-formed implies that :math:`\ind{p}{Γ_{I'}}{Γ_{C'}}` is well-formed and has the same allowed eliminations, where :math:`Γ_{I′}` is defined as above and -:math:`Γ_{C′} = [c_1 :∀ Γ_{P′} ,C_1 ;…;c_n :∀ Γ_{P′} ,C_n ]`). That is, the changes in the +:math:`Γ_{C′} = [c_1 :∀ Γ_{P′} ,C_1 ;~…;~c_n :∀ Γ_{P′} ,C_n ]`). That is, the changes in the types of each partial instance :math:`q_1 … q_r` can be characterized by the ordered sets of arity sorts among the types of parameters, and to each signature is associated a new inductive definition with fresh names. @@ -1199,10 +1204,11 @@ a strongly normalizing reduction, we cannot accept any sort of recursion (even terminating). So the basic idea is to restrict ourselves to primitive recursive functions and functionals. -For instance, assuming a parameter :g:`A:Set` exists in the local context, -we want to build a function length of type :g:`list A -> nat` which computes -the length of the list, such that :g:`(length (nil A)) = O` and :g:`(length -(cons A a l)) = (S (length l))`. We want these equalities to be +For instance, assuming a parameter :math:`A:\Set` exists in the local context, +we want to build a function :math:`\length` of type :math:`\List~A → \nat` which computes +the length of the list, such that :math:`(\length~(\Nil~A)) = \nO` and +:math:`(\length~(\cons~A~a~l)) = (\nS~(\length~l))`. +We want these equalities to be recognized implicitly and taken into account in the conversion rule. From the logical point of view, we have built a type family by giving @@ -1216,22 +1222,22 @@ In case the inductive definition is effectively a recursive one, we want to capture the extra property that we have built the smallest fixed point of this recursive equation. This says that we are only manipulating finite objects. This analysis provides induction -principles. For instance, in order to prove :g:`∀ l:list A,(has_length A l -(length l))` it is enough to prove: +principles. For instance, in order to prove +:math:`∀ l:\List~A,~(\kw{has}\_\kw{length}~A~l~(\length~l))` it is enough to prove: -+ :g:`(has_length A (nil A) (length (nil A)))` -+ :g:`∀ a:A, ∀ l:list A, (has_length A l (length l)) →` - :g:`(has_length A (cons A a l) (length (cons A a l)))` ++ :math:`(\kw{has}\_\kw{length}~A~(\Nil~A)~(\length~(\Nil~A)))` ++ :math:`∀ a:A,~∀ l:\List~A,~(\kw{has}\_\kw{length}~A~l~(\length~l)) →` + :math:`(\kw{has}\_\kw{length}~A~(\cons~A~a~l)~(\length~(\cons~A~a~l)))` -which given the conversion equalities satisfied by length is the same +which given the conversion equalities satisfied by :math:`\length` is the same as proving: -+ :g:`(has_length A (nil A) O)` -+ :g:`∀ a:A, ∀ l:list A, (has_length A l (length l)) →` - :g:`(has_length A (cons A a l) (S (length l)))` ++ :math:`(\kw{has}\_\kw{length}~A~(\Nil~A)~\nO)` ++ :math:`∀ a:A,~∀ l:\List~A,~(\kw{has}\_\kw{length}~A~l~(\length~l)) →` + :math:`(\kw{has}\_\kw{length}~A~(\cons~A~a~l)~(\nS~(\length~l)))` One conceptually simple way to do that, following the basic scheme @@ -1261,7 +1267,7 @@ The |Coq| term for this proof will be written: .. math:: - \Match~m~\with~(c_1~x_{11} ... x_{1p_1} ) ⇒ f_1 | … | (c_n~x_{n1} ... x_{np_n} ) ⇒ f_n \kwend + \Match~m~\with~(c_1~x_{11} ... x_{1p_1} ) ⇒ f_1 | … | (c_n~x_{n1} ... x_{np_n} ) ⇒ f_n~\kwend In this expression, if :math:`m` eventually happens to evaluate to :math:`(c_i~u_1 … u_{p_i})` then the expression will behave as specified in its :math:`i`-th branch @@ -1269,7 +1275,7 @@ and it will reduce to :math:`f_i` where the :math:`x_{i1} …x_{ip_i}` are repla :math:`u_1 … u_{p_i}` according to the ι-reduction. Actually, for type checking a :math:`\Match…\with…\kwend` expression we also need -to know the predicate P to be proved by case analysis. In the general +to know the predicate :math:`P` to be proved by case analysis. In the general case where :math:`I` is an inductively defined :math:`n`-ary relation, :math:`P` is a predicate over :math:`n+1` arguments: the :math:`n` first ones correspond to the arguments of :math:`I` (parameters excluded), and the last one corresponds to object :math:`m`. |Coq| @@ -1303,7 +1309,7 @@ inference rules, we use a more compact notation: .. _Allowed-elimination-sorts: -**Allowed elimination sorts.** An important question for building the typing rule for match is what +**Allowed elimination sorts.** An important question for building the typing rule for :math:`\Match` is what can be the type of :math:`λ a x . P` with respect to the type of :math:`m`. If :math:`m:I` and :math:`I:A` and :math:`λ a x . P : B` then by :math:`[I:A|B]` we mean that one can use :math:`λ a x . P` with :math:`m` in the above match-construct. @@ -1321,7 +1327,7 @@ There is no restriction on the sort of the predicate to be eliminated. [(I~x):A′|B′] ----------------------- - [I:∀ x:A, A′|∀ x:A, B′] + [I:∀ x:A,~A′|∀ x:A,~B′] .. inference:: Set & Type @@ -1341,7 +1347,7 @@ sort :math:`\Prop`. ~ --------------- - [I:Prop|I→Prop] + [I:\Prop|I→\Prop] :math:`\Prop` is the type of logical propositions, the proofs of properties :math:`P` in @@ -1370,7 +1376,7 @@ the proof of :g:`or A B` is not accepted: From the computational point of view, the structure of the proof of :g:`(or A B)` in this term is needed for computing the boolean value. -In general, if :math:`I` has type :math:`\Prop` then :math:`P` cannot have type :math:`I→Set,` because +In general, if :math:`I` has type :math:`\Prop` then :math:`P` cannot have type :math:`I→\Set`, because it will mean to build an informative proof of type :math:`(P~m)` doing a case analysis over a non-computational object that will disappear in the extracted program. But the other way is safe with respect to our @@ -1378,11 +1384,11 @@ interpretation we can have :math:`I` a computational object and :math:`P` a non-computational one, it just corresponds to proving a logical property of a computational object. -In the same spirit, elimination on :math:`P` of type :math:`I→Type` cannot be allowed -because it trivially implies the elimination on :math:`P` of type :math:`I→ Set` by +In the same spirit, elimination on :math:`P` of type :math:`I→\Type` cannot be allowed +because it trivially implies the elimination on :math:`P` of type :math:`I→ \Set` by cumulativity. It also implies that there are two proofs of the same -property which are provably different, contradicting the proof- -irrelevance property which is sometimes a useful axiom: +property which are provably different, contradicting the +proof-irrelevance property which is sometimes a useful axiom: .. example:: @@ -1391,7 +1397,7 @@ irrelevance property which is sometimes a useful axiom: Axiom proof_irrelevance : forall (P : Prop) (x y : P), x=y. The elimination of an inductive definition of type :math:`\Prop` on a predicate -:math:`P` of type :math:`I→ Type` leads to a paradox when applied to impredicative +:math:`P` of type :math:`I→ \Type` leads to a paradox when applied to impredicative inductive definition like the second-order existential quantifier :g:`exProp` defined above, because it gives access to the two projections on this type. @@ -1407,7 +1413,7 @@ this type. I~\kw{is an empty or singleton definition} s ∈ \Sort ------------------------------------- - [I:Prop|I→ s] + [I:\Prop|I→ s] A *singleton definition* has only one constructor and all the arguments of this constructor have type :math:`\Prop`. In that case, there is a @@ -1444,7 +1450,7 @@ corresponding to the :math:`c:C` constructor. .. math:: \begin{array}{ll} \{c:(I~p_1\ldots p_r\ t_1 \ldots t_p)\}^P &\equiv (P~t_1\ldots ~t_p~c) \\ - \{c:\forall~x:T,C\}^P &\equiv \forall~x:T,\{(c~x):C\}^P + \{c:∀ x:T,~C\}^P &\equiv ∀ x:T,~\{(c~x):C\}^P \end{array} We write :math:`\{c\}^P` for :math:`\{c:C\}^P` with :math:`C` the type of :math:`c`. @@ -1463,7 +1469,7 @@ We write :math:`\{c\}^P` for :math:`\{c:C\}^P` with :math:`C` the type of :math: can be represented in abstract syntax as .. math:: - \case(t,P,f 1 | f 2 ) + \case(t,P,f_1 | f_2 ) where @@ -1471,27 +1477,27 @@ We write :math:`\{c\}^P` for :math:`\{c:C\}^P` with :math:`C` the type of :math: :nowrap: \begin{eqnarray*} - P & = & \lambda~l~.~P^\prime\\ + P & = & λ l.~P^\prime\\ f_1 & = & t_1\\ - f_2 & = & \lambda~(hd:\nat)~.~\lambda~(tl:\List~\nat)~.~t_2 + f_2 & = & λ (hd:\nat).~λ (tl:\List~\nat).~t_2 \end{eqnarray*} According to the definition: .. math:: - \{(\kw{nil}~\nat)\}^P ≡ \{(\kw{nil}~\nat) : (\List~\nat)\}^P ≡ (P~(\kw{nil}~\nat)) + \{(\Nil~\nat)\}^P ≡ \{(\Nil~\nat) : (\List~\nat)\}^P ≡ (P~(\Nil~\nat)) .. math:: \begin{array}{rl} - \{(\kw{cons}~\nat)\}^P & ≡\{(\kw{cons}~\nat) : (\nat→\List~\nat→\List~\nat)\}^P \\ - & ≡∀ n:\nat, \{(\kw{cons}~\nat~n) : \List~\nat→\List~\nat)\}^P \\ - & ≡∀ n:\nat, ∀ l:\List~\nat, \{(\kw{cons}~\nat~n~l) : \List~\nat)\}^P \\ - & ≡∀ n:\nat, ∀ l:\List~\nat,(P~(\kw{cons}~\nat~n~l)). + \{(\cons~\nat)\}^P & ≡\{(\cons~\nat) : (\nat→\List~\nat→\List~\nat)\}^P \\ + & ≡∀ n:\nat,~\{(\cons~\nat~n) : (\List~\nat→\List~\nat)\}^P \\ + & ≡∀ n:\nat,~∀ l:\List~\nat,~\{(\cons~\nat~n~l) : (\List~\nat)\}^P \\ + & ≡∀ n:\nat,~∀ l:\List~\nat,~(P~(\cons~\nat~n~l)). \end{array} - Given some :math:`P` then :math:`\{(\kw{nil}~\nat)\}^P` represents the expected type of :math:`f_1` , - and :math:`\{(\kw{cons}~\nat)\}^P` represents the expected type of :math:`f_2`. + Given some :math:`P` then :math:`\{(\Nil~\nat)\}^P` represents the expected type of :math:`f_1` , + and :math:`\{(\cons~\nat)\}^P` represents the expected type of :math:`f_2`. .. _Typing-rule: @@ -1512,7 +1518,7 @@ following typing rule E[Γ] ⊢ \case(c,P,f_1 |… |f_l ) : (P~t_1 … t_s~c) provided :math:`I` is an inductive type in a -definition :math:`\ind{r}{Γ_I}{Γ_C}` with :math:`Γ_C = [c_1 :C_1 ;…;c_n :C_n ]` and +definition :math:`\ind{r}{Γ_I}{Γ_C}` with :math:`Γ_C = [c_1 :C_1 ;~…;~c_n :C_n ]` and :math:`c_{p_1} … c_{p_l}` are the only constructors of :math:`I`. @@ -1527,8 +1533,8 @@ definition :math:`\ind{r}{Γ_I}{Γ_C}` with :math:`Γ_C = [c_1 :C_1 ;…;c_n :C_ E[Γ] ⊢ t : (\List ~\nat) \\ E[Γ] ⊢ P : B \\ [(\List ~\nat)|B] \\ - E[Γ] ⊢ f_1 : {(\kw{nil} ~\nat)}^P \\ - E[Γ] ⊢ f_2 : {(\kw{cons} ~\nat)}^P + E[Γ] ⊢ f_1 : \{(\Nil ~\nat)\}^P \\ + E[Γ] ⊢ f_2 : \{(\cons ~\nat)\}^P \end{array} ------------------------------------------------ E[Γ] ⊢ \case(t,P,f_1 |f_2 ) : (P~t) @@ -1551,7 +1557,7 @@ The ι-contraction of this term is :math:`(f_i~a_1 … a_m )` leading to the general reduction rule: .. math:: - \case((c_{p_i}~q_1 … q_r~a_1 … a_m ),P,f_1 |… |f_n ) \triangleright_ι (f_i~a_1 … a_m ) + \case((c_{p_i}~q_1 … q_r~a_1 … a_m ),P,f_1 |… |f_l ) \triangleright_ι (f_i~a_1 … a_m ) .. _Fixpoint-definitions: @@ -1565,14 +1571,14 @@ concrete syntax for a recursive set of mutually recursive declarations is (with :math:`Γ_i` contexts): .. math:: - \fix~f_1 (Γ_1 ) :A_1 :=t_1 \with … \with~f_n (Γ_n ) :A_n :=t_n + \fix~f_1 (Γ_1 ) :A_1 :=t_1~\with … \with~f_n (Γ_n ) :A_n :=t_n The terms are obtained by projections from this set of declarations and are written .. math:: - \fix~f_1 (Γ_1 ) :A_1 :=t_1 \with … \with~f_n (Γ_n ) :A_n :=t_n \for~f_i + \fix~f_1 (Γ_1 ) :A_1 :=t_1~\with … \with~f_n (Γ_n ) :A_n :=t_n~\for~f_i In the inference rules, we represent such a term by @@ -1580,7 +1586,7 @@ In the inference rules, we represent such a term by \Fix~f_i\{f_1 :A_1':=t_1' … f_n :A_n':=t_n'\} with :math:`t_i'` (resp. :math:`A_i'`) representing the term :math:`t_i` abstracted (resp. -generalized) with respect to the bindings in the context Γ_i , namely +generalized) with respect to the bindings in the context :math:`Γ_i`, namely :math:`t_i'=λ Γ_i . t_i` and :math:`A_i'=∀ Γ_i , A_i`. @@ -1592,7 +1598,7 @@ The typing rule is the expected one for a fixpoint. .. inference:: Fix (E[Γ] ⊢ A_i : s_i )_{i=1… n} - (E[Γ,f_1 :A_1 ,…,f_n :A_n ] ⊢ t_i : A_i )_{i=1… n} + (E[Γ;~f_1 :A_1 ;~…;~f_n :A_n ] ⊢ t_i : A_i )_{i=1… n} ------------------------------------------------------- E[Γ] ⊢ \Fix~f_i\{f_1 :A_1 :=t_1 … f_n :A_n :=t_n \} : A_i @@ -1608,14 +1614,14 @@ instance in the case of natural numbers, a proof of the induction principle of type .. math:: - ∀ P:\nat→\Prop, (P~O)→(∀ n:\nat, (P~n)→(P~(\kw{S}~n)))→ ∀ n:\nat, (P~n) + ∀ P:\nat→\Prop,~(P~\nO)→(∀ n:\nat,~(P~n)→(P~(\nS~n)))→ ∀ n:\nat,~(P~n) can be represented by the term: .. math:: \begin{array}{l} - λ P:\nat→\Prop. λ f:(P~O). λ g:(∀ n:\nat, (P~n)→(P~(S~n))).\\ - \Fix~h\{h:∀ n:\nat, (P~n):=λ n:\nat. \case(n,P,f | λp:\nat. (g~p~(h~p)))\} + λ P:\nat→\Prop.~λ f:(P~\nO).~λ g:(∀ n:\nat,~(P~n)→(P~(\nS~n))).\\ + \Fix~h\{h:∀ n:\nat,~(P~n):=λ n:\nat.~\case(n,P,f | λp:\nat.~(g~p~(h~p)))\} \end{array} Before accepting a fixpoint definition as being correctly typed, we @@ -1632,7 +1638,7 @@ fixpoints is extended and becomes where :math:`k_i` are positive integers. Each :math:`k_i` represents the index of parameter of :math:`f_i` , on which :math:`f_i` is decreasing. Each :math:`A_i` should be a type (reducible to a term) starting with at least :math:`k_i` products -:math:`∀ y_1 :B_1 ,… ∀ y_{k_i} :B_{k_i} , A_i'` and :math:`B_{k_i}` an inductive type. +:math:`∀ y_1 :B_1 ,~… ∀ y_{k_i} :B_{k_i} ,~A_i'` and :math:`B_{k_i}` an inductive type. Now in the definition :math:`t_i`, if :math:`f_j` occurs then it should be applied to at least :math:`k_j` arguments and the :math:`k_j`-th argument should be @@ -1642,23 +1648,23 @@ The definition of being structurally smaller is a bit technical. One needs first to define the notion of *recursive arguments of a constructor*. For an inductive definition :math:`\ind{r}{Γ_I}{Γ_C}`, if the type of a constructor :math:`c` has the form -:math:`∀ p_1 :P_1 ,… ∀ p_r :P_r, ∀ x_1:T_1, … ∀ x_r :T_r, (I_j~p_1 … p_r~t_1 … t_s )`, +:math:`∀ p_1 :P_1 ,~… ∀ p_r :P_r,~∀ x_1:T_1,~… ∀ x_r :T_r,~(I_j~p_1 … p_r~t_1 … t_s )`, then the recursive arguments will correspond to :math:`T_i` in which one of the :math:`I_l` occurs. The main rules for being structurally smaller are the following. Given a variable :math:`y` of an inductively defined type in a declaration -:math:`\ind{r}{Γ_I}{Γ_C}` where :math:`Γ_I` is :math:`[I_1 :A_1 ;…;I_k :A_k]`, and :math:`Γ_C` is -:math:`[c_1 :C_1 ;…;c_n :C_n ]`, the terms structurally smaller than :math:`y` are: +:math:`\ind{r}{Γ_I}{Γ_C}` where :math:`Γ_I` is :math:`[I_1 :A_1 ;~…;~I_k :A_k]`, and :math:`Γ_C` is +:math:`[c_1 :C_1 ;~…;~c_n :C_n ]`, the terms structurally smaller than :math:`y` are: -+ :math:`(t~u)` and :math:`λ x:u . t` when :math:`t` is structurally smaller than :math:`y`. ++ :math:`(t~u)` and :math:`λ x:U .~t` when :math:`t` is structurally smaller than :math:`y`. + :math:`\case(c,P,f_1 … f_n)` when each :math:`f_i` is structurally smaller than :math:`y`. If :math:`c` is :math:`y` or is structurally smaller than :math:`y`, its type is an inductive definition :math:`I_p` part of the inductive declaration corresponding to :math:`y`. Each :math:`f_i` corresponds to a type of constructor - :math:`C_q ≡ ∀ p_1 :P_1 ,…,∀ p_r :P_r , ∀ y_1 :B_1 , … ∀ y_k :B_k , (I~a_1 … a_k )` - and can consequently be written :math:`λ y_1 :B_1' . … λ y_k :B_k'. g_i`. (:math:`B_i'` is + :math:`C_q ≡ ∀ p_1 :P_1 ,~…,∀ p_r :P_r ,~∀ y_1 :B_1 ,~… ∀ y_k :B_k ,~(I~a_1 … a_k )` + and can consequently be written :math:`λ y_1 :B_1' .~… λ y_k :B_k'.~g_i`. (:math:`B_i'` is obtained from :math:`B_i` by substituting parameters for variables) the variables :math:`y_j` occurring in :math:`g_i` corresponding to recursive arguments :math:`B_i` (the ones in which one of the :math:`I_l` occurs) are structurally smaller than y. @@ -1702,7 +1708,7 @@ Let :math:`F` be the set of declarations: The reduction for fixpoints is: .. math:: - (\Fix~f_i \{F\} a_1 …a_{k_i}) \triangleright_ι \subst{t_i}{f_k}{\Fix~f_k \{F\}}_{k=1… n} ~a_1 … a_{k_i} + (\Fix~f_i \{F\}~a_1 …a_{k_i}) ~\triangleright_ι~ \subst{t_i}{f_k}{\Fix~f_k \{F\}}_{k=1… n} ~a_1 … a_{k_i} when :math:`a_{k_i}` starts with a constructor. This last restriction is needed in order to keep strong normalization and corresponds to the reduction @@ -1712,13 +1718,11 @@ possible: .. math:: :nowrap: - {\def\plus{\mathsf{plus}} - \def\tri{\triangleright_\iota} - \begin{eqnarray*} - \plus~(\nS~(\nS~\nO))~(\nS~\nO) & \tri & \nS~(\plus~(\nS~\nO)~(\nS~\nO))\\ - & \tri & \nS~(\nS~(\plus~\nO~(\nS~\nO)))\\ - & \tri & \nS~(\nS~(\nS~\nO))\\ - \end{eqnarray*}} + \begin{eqnarray*} + \plus~(\nS~(\nS~\nO))~(\nS~\nO)~& \trii & \nS~(\plus~(\nS~\nO)~(\nS~\nO))\\ + & \trii & \nS~(\nS~(\plus~\nO~(\nS~\nO)))\\ + & \trii & \nS~(\nS~(\nS~\nO))\\ + \end{eqnarray*} .. _Mutual-induction: @@ -1748,9 +1752,9 @@ reference to the global declaration in the subsequent global environment and local context by explicitly applying this constant to the constant :math:`c'`. -Below, if :math:`Γ` is a context of the form :math:`[y_1 :A_1 ;…;y_n :A_n]`, we write -:math:`∀x:U,\subst{Γ}{c}{x}` to mean -:math:`[y_1 :∀ x:U,\subst{A_1}{c}{x};…;y_n :∀ x:U,\subst{A_n}{c}{x}]` +Below, if :math:`Γ` is a context of the form :math:`[y_1 :A_1 ;~…;~y_n :A_n]`, we write +:math:`∀x:U,~\subst{Γ}{c}{x}` to mean +:math:`[y_1 :∀ x:U,~\subst{A_1}{c}{x};~…;~y_n :∀ x:U,~\subst{A_n}{c}{x}]` and :math:`\subst{E}{|Γ|}{|Γ|c}` to mean the parallel substitution :math:`E\{y_1 /(y_1~c)\}…\{y_n/(y_n~c)\}`. @@ -1760,25 +1764,25 @@ and :math:`\subst{E}{|Γ|}{|Γ|c}` to mean the parallel substitution **First abstracting property:** .. math:: - \frac{\WF{E;c:U;E′;c′:=t:T;E″}{Γ}} - {\WF{E;c:U;E′;c′:=λ x:U. \subst{t}{c}{x}:∀x:U,\subst{T}{c}{x};\subst{E″}{c′}{(c′~c)}} - {\subst{Γ}{c}{(c~c′)}}} + \frac{\WF{E;~c:U;~E′;~c′:=t:T;~E″}{Γ}} + {\WF{E;~c:U;~E′;~c′:=λ x:U.~\subst{t}{c}{x}:∀x:U,~\subst{T}{c}{x};~\subst{E″}{c′}{(c′~c)}} + {\subst{Γ}{c′}{(c′~c)}}} .. math:: - \frac{\WF{E;c:U;E′;c′:T;E″}{Γ}} - {\WF{E;c:U;E′;c′:∀ x:U,\subst{T}{c}{x};\subst{E″}{c′}{(c′~c)}}{Γ{c/(c~c′)}}} + \frac{\WF{E;~c:U;~E′;~c′:T;~E″}{Γ}} + {\WF{E;~c:U;~E′;~c′:∀ x:U,~\subst{T}{c}{x};~\subst{E″}{c′}{(c′~c)}}{\subst{Γ}{c′}{(c′~c)}}} .. math:: - \frac{\WF{E;c:U;E′;\ind{p}{Γ_I}{Γ_C};E″}{Γ}} - {\WFTWOLINES{E;c:U;E′;\ind{p+1}{∀ x:U,\subst{Γ_I}{c}{x}}{∀ x:U,\subst{Γ_C}{c}{x}}; - \subst{E″}{|Γ_I ,Γ_C |}{|Γ_I ,Γ_C | c}} - {\subst{Γ}{|Γ_I ,Γ_C|}{|Γ_I ,Γ_C | c}}} + \frac{\WF{E;~c:U;~E′;~\ind{p}{Γ_I}{Γ_C};~E″}{Γ}} + {\WFTWOLINES{E;~c:U;~E′;~\ind{p+1}{∀ x:U,~\subst{Γ_I}{c}{x}}{∀ x:U,~\subst{Γ_C}{c}{x}};~ + \subst{E″}{|Γ_I ;Γ_C |}{|Γ_I ;Γ_C | c}} + {\subst{Γ}{|Γ_I ;Γ_C|}{|Γ_I ;Γ_C | c}}} One can similarly modify a global declaration by generalizing it over a previously defined constant :math:`c′`. Below, if :math:`Γ` is a context of the form -:math:`[y_1 :A_1 ;…;y_n :A_n]`, we write :math:`\subst{Γ}{c}{u}` to mean -:math:`[y_1 :\subst{A_1} {c}{u};…;y_n:\subst{A_n} {c}{u}]`. +:math:`[y_1 :A_1 ;~…;~y_n :A_n]`, we write :math:`\subst{Γ}{c}{u}` to mean +:math:`[y_1 :\subst{A_1} {c}{u};~…;~y_n:\subst{A_n} {c}{u}]`. .. _Second-abstracting-property: @@ -1786,16 +1790,16 @@ a previously defined constant :math:`c′`. Below, if :math:`Γ` is a context of **Second abstracting property:** .. math:: - \frac{\WF{E;c:=u:U;E′;c′:=t:T;E″}{Γ}} - {\WF{E;c:=u:U;E′;c′:=(\letin{x}{u:U}{\subst{t}{c}{x}}):\subst{T}{c}{u};E″}{Γ}} + \frac{\WF{E;~c:=u:U;~E′;~c′:=t:T;~E″}{Γ}} + {\WF{E;~c:=u:U;~E′;~c′:=(\letin{x}{u:U}{\subst{t}{c}{x}}):\subst{T}{c}{u};~E″}{Γ}} .. math:: - \frac{\WF{E;c:=u:U;E′;c′:T;E″}{Γ}} - {\WF{E;c:=u:U;E′;c′:\subst{T}{c}{u};E″}{Γ}} + \frac{\WF{E;~c:=u:U;~E′;~c′:T;~E″}{Γ}} + {\WF{E;~c:=u:U;~E′;~c′:\subst{T}{c}{u};~E″}{Γ}} .. math:: - \frac{\WF{E;c:=u:U;E′;\ind{p}{Γ_I}{Γ_C};E″}{Γ}} - {\WF{E;c:=u:U;E′;\ind{p}{\subst{Γ_I}{c}{u}}{\subst{Γ_C}{c}{u}};E″}{Γ}} + \frac{\WF{E;~c:=u:U;~E′;~\ind{p}{Γ_I}{Γ_C};~E″}{Γ}} + {\WF{E;~c:=u:U;~E′;~\ind{p}{\subst{Γ_I}{c}{u}}{\subst{Γ_C}{c}{u}};~E″}{Γ}} .. _Pruning-the-local-context: @@ -1810,7 +1814,7 @@ One can consequently derive the following property. .. inference:: First pruning property: - \WF{E;c:U;E′}{Γ} + \WF{E;~c:U;~E′}{Γ} c~\kw{does not occur in}~E′~\kw{and}~Γ -------------------------------------- \WF{E;E′}{Γ} @@ -1820,7 +1824,7 @@ One can consequently derive the following property. .. inference:: Second pruning property: - \WF{E;c:=u:U;E′}{Γ} + \WF{E;~c:=u:U;~E′}{Γ} c~\kw{does not occur in}~E′~\kw{and}~Γ -------------------------------------- \WF{E;E′}{Γ} @@ -1861,10 +1865,10 @@ in the sort :math:`\Set`, which is extended to a domain in any sort: .. inference:: ProdImp E[Γ] ⊢ T : s - s ∈ {\Sort} - E[Γ::(x:T)] ⊢ U : Set + s ∈ \Sort + E[Γ::(x:T)] ⊢ U : \Set --------------------- - E[Γ] ⊢ ∀ x:T,U : Set + E[Γ] ⊢ ∀ x:T,~U : \Set This extension has consequences on the inductive definitions which are allowed. In the impredicative system, one can build so-called *large @@ -1879,15 +1883,15 @@ impredicative system for sort :math:`\Set` become: .. inference:: Set1 - s ∈ \{Prop, Set\} + s ∈ \{\Prop, \Set\} ----------------- - [I:Set|I→ s] + [I:\Set|I→ s] .. inference:: Set2 I~\kw{is a small inductive definition} s ∈ \{\Type(i)\} ---------------- - [I:Set|I→ s] + [I:\Set|I→ s] diff --git a/doc/sphinx/language/coq-library.rst b/doc/sphinx/language/coq-library.rst index 10650af1d1..b82b3b0e80 100644 --- a/doc/sphinx/language/coq-library.rst +++ b/doc/sphinx/language/coq-library.rst @@ -104,18 +104,18 @@ subclass :token:`form` of the syntactic class :token:`term`. The syntax of a nice last column. Or even better, find a proper way to do this! .. productionlist:: - form : True (True) - : | False (False) - : | ~ `form` (not) - : | `form` /\ `form` (and) - : | `form` \/ `form` (or) - : | `form` -> `form` (primitive implication) - : | `form` <-> `form` (iff) - : | forall `ident` : `type`, `form` (primitive for all) - : | exists `ident` [: `specif`], `form` (ex) - : | exists2 `ident` [: `specif`], `form` & `form` (ex2) - : | `term` = `term` (eq) - : | `term` = `term` :> `specif` (eq) + form : True (True) + : False (False) + : ~ `form` (not) + : `form` /\ `form` (and) + : `form` \/ `form` (or) + : `form` -> `form` (primitive implication) + : `form` <-> `form` (iff) + : forall `ident` : `type`, `form` (primitive for all) + : exists `ident` [: `specif`], `form` (ex) + : exists2 `ident` [: `specif`], `form` & `form` (ex2) + : `term` = `term` (eq) + : `term` = `term` :> `specif` (eq) .. note:: @@ -287,13 +287,13 @@ the next section :ref:`specification`): .. productionlist:: specif : `specif` * `specif` (prod) - : | `specif` + `specif` (sum) - : | `specif` + { `specif` } (sumor) - : | { `specif` } + { `specif` } (sumbool) - : | { `ident` : `specif` | `form` } (sig) - : | { `ident` : `specif` | `form` & `form` } (sig2) - : | { `ident` : `specif` & `specif` } (sigT) - : | { `ident` : `specif` & `specif` & `specif` } (sigT2) + : `specif` + `specif` (sum) + : `specif` + { `specif` } (sumor) + : { `specif` } + { `specif` } (sumbool) + : { `ident` : `specif` | `form` } (sig) + : { `ident` : `specif` | `form` & `form` } (sig2) + : { `ident` : `specif` & `specif` } (sigT) + : { `ident` : `specif` & `specif` & `specif` } (sigT2) term : (`term`, `term`) (pair) diff --git a/doc/sphinx/language/gallina-extensions.rst b/doc/sphinx/language/gallina-extensions.rst index 376a6b8eed..50a56f1d51 100644 --- a/doc/sphinx/language/gallina-extensions.rst +++ b/doc/sphinx/language/gallina-extensions.rst @@ -25,7 +25,7 @@ expressions. In this sense, the :cmd:`Record` construction allows defining record_keyword : Record | Inductive | CoInductive record_body : `ident` [ `binders` ] [: `sort` ] := [ `ident` ] { [ `field` ; … ; `field` ] }. field : `ident` [ `binders` ] : `type` [ where `notation` ] - : | `ident` [ `binders` ] [: `type` ] := `term` + : `ident` [ `binders` ] [: `type` ] := `term` .. cmd:: Record @ident @binders {? : @sort} := {? @ident} { {*; @ident @binders : @type } } @@ -165,8 +165,8 @@ available: .. productionlist:: terms projection : `term` `.` ( `qualid` ) - : | `term` `.` ( `qualid` `arg` … `arg` ) - : | `term` `.` ( @`qualid` `term` … `term` ) + : `term` `.` ( `qualid` `arg` … `arg` ) + : `term` `.` ( @`qualid` `term` … `term` ) Syntax of Record projections @@ -234,7 +234,8 @@ Primitive Projections extended the Calculus of Inductive Constructions with a new binary term constructor `r.(p)` representing a primitive projection `p` applied to a record object `r` (i.e., primitive projections are always applied). - Even if the record type has parameters, these do not appear at + Even if the record type has parameters, these do not appear + in the internal representation of applications of the projection, considerably reducing the sizes of terms when manipulating parameterized records and type checking time. On the user level, primitive projections can be used as a replacement @@ -818,14 +819,14 @@ together, as well as a means of massive abstraction. .. productionlist:: modules module_type : `qualid` - : | `module_type` with Definition `qualid` := `term` - : | `module_type` with Module `qualid` := `qualid` - : | `qualid` `qualid` … `qualid` - : | !`qualid` `qualid` … `qualid` + : `module_type` with Definition `qualid` := `term` + : `module_type` with Module `qualid` := `qualid` + : `qualid` `qualid` … `qualid` + : !`qualid` `qualid` … `qualid` module_binding : ( [Import|Export] `ident` … `ident` : `module_type` ) module_bindings : `module_binding` … `module_binding` module_expression : `qualid` … `qualid` - : | !`qualid` … `qualid` + : !`qualid` … `qualid` Syntax of modules @@ -1814,10 +1815,10 @@ This syntax extension is given in the following grammar: .. productionlist:: explicit_apps term : @ `qualid` `term` … `term` - : | @ `qualid` - : | `qualid` `argument` … `argument` + : @ `qualid` + : `qualid` `argument` … `argument` argument : `term` - : | (`ident` := `term`) + : (`ident` := `term`) Syntax for explicitly giving implicit arguments diff --git a/doc/sphinx/language/gallina-specification-language.rst b/doc/sphinx/language/gallina-specification-language.rst index 1a33a9a46e..5ecf007eff 100644 --- a/doc/sphinx/language/gallina-specification-language.rst +++ b/doc/sphinx/language/gallina-specification-language.rst @@ -127,43 +127,43 @@ is described in Chapter :ref:`syntaxextensionsandinterpretationscopes`. .. productionlist:: coq term : forall `binders` , `term` - : | fun `binders` => `term` - : | fix `fix_bodies` - : | cofix `cofix_bodies` - : | let `ident` [`binders`] [: `term`] := `term` in `term` - : | let fix `fix_body` in `term` - : | let cofix `cofix_body` in `term` - : | let ( [`name` , … , `name`] ) [`dep_ret_type`] := `term` in `term` - : | let ' `pattern` [in `term`] := `term` [`return_type`] in `term` - : | if `term` [`dep_ret_type`] then `term` else `term` - : | `term` : `term` - : | `term` <: `term` - : | `term` :> - : | `term` -> `term` - : | `term` `arg` … `arg` - : | @ `qualid` [`term` … `term`] - : | `term` % `ident` - : | match `match_item` , … , `match_item` [`return_type`] with + : fun `binders` => `term` + : fix `fix_bodies` + : cofix `cofix_bodies` + : let `ident` [`binders`] [: `term`] := `term` in `term` + : let fix `fix_body` in `term` + : let cofix `cofix_body` in `term` + : let ( [`name` , … , `name`] ) [`dep_ret_type`] := `term` in `term` + : let ' `pattern` [in `term`] := `term` [`return_type`] in `term` + : if `term` [`dep_ret_type`] then `term` else `term` + : `term` : `term` + : `term` <: `term` + : `term` :> + : `term` -> `term` + : `term` `arg` … `arg` + : @ `qualid` [`term` … `term`] + : `term` % `ident` + : match `match_item` , … , `match_item` [`return_type`] with : [[|] `equation` | … | `equation`] end - : | `qualid` - : | `sort` - : | `num` - : | _ - : | ( `term` ) + : `qualid` + : `sort` + : `num` + : _ + : ( `term` ) arg : `term` - : | ( `ident` := `term` ) + : ( `ident` := `term` ) binders : `binder` … `binder` binder : `name` - : | ( `name` … `name` : `term` ) - : | ( `name` [: `term`] := `term` ) - : | ' `pattern` + : ( `name` … `name` : `term` ) + : ( `name` [: `term`] := `term` ) + : ' `pattern` name : `ident` | _ qualid : `ident` | `qualid` `access_ident` sort : Prop | Set | Type fix_bodies : `fix_body` - : | `fix_body` with `fix_body` with … with `fix_body` for `ident` + : `fix_body` with `fix_body` with … with `fix_body` for `ident` cofix_bodies : `cofix_body` - : | `cofix_body` with `cofix_body` with … with `cofix_body` for `ident` + : `cofix_body` with `cofix_body` with … with `cofix_body` for `ident` fix_body : `ident` `binders` [`annotation`] [: `term`] := `term` cofix_body : `ident` [`binders`] [: `term`] := `term` annotation : { struct `ident` } @@ -173,13 +173,13 @@ is described in Chapter :ref:`syntaxextensionsandinterpretationscopes`. equation : `mult_pattern` | … | `mult_pattern` => `term` mult_pattern : `pattern` , … , `pattern` pattern : `qualid` `pattern` … `pattern` - : | @ `qualid` `pattern` … `pattern` - : | `pattern` as `ident` - : | `pattern` % `ident` - : | `qualid` - : | _ - : | `num` - : | ( `or_pattern` , … , `or_pattern` ) + : @ `qualid` `pattern` … `pattern` + : `pattern` as `ident` + : `pattern` % `ident` + : `qualid` + : _ + : `num` + : ( `or_pattern` , … , `or_pattern` ) or_pattern : `pattern` | … | `pattern` @@ -230,7 +230,7 @@ There are three sorts :g:`Set`, :g:`Prop` and :g:`Type`. themselves are typing the proofs. We denote propositions by :production:`form`. This constitutes a semantic subclass of the syntactic class :token:`term`. -- :g:`Set` is is the universe of *program types* or *specifications*. The +- :g:`Set` is the universe of *program types* or *specifications*. The specifications themselves are typing the programs. We denote specifications by :production:`specif`. This constitutes a semantic subclass of the syntactic class :token:`term`. @@ -524,38 +524,38 @@ The Vernacular .. productionlist:: coq decorated-sentence : [ `decoration` … `decoration` ] `sentence` sentence : `assumption` - : | `definition` - : | `inductive` - : | `fixpoint` - : | `assertion` `proof` + : `definition` + : `inductive` + : `fixpoint` + : `assertion` `proof` assumption : `assumption_keyword` `assums`. assumption_keyword : Axiom | Conjecture - : | Parameter | Parameters - : | Variable | Variables - : | Hypothesis | Hypotheses + : Parameter | Parameters + : Variable | Variables + : Hypothesis | Hypotheses assums : `ident` … `ident` : `term` - : | ( `ident` … `ident` : `term` ) … ( `ident` … `ident` : `term` ) + : ( `ident` … `ident` : `term` ) … ( `ident` … `ident` : `term` ) definition : [Local] Definition `ident` [`binders`] [: `term`] := `term` . - : | Let `ident` [`binders`] [: `term`] := `term` . + : Let `ident` [`binders`] [: `term`] := `term` . inductive : Inductive `ind_body` with … with `ind_body` . - : | CoInductive `ind_body` with … with `ind_body` . + : CoInductive `ind_body` with … with `ind_body` . ind_body : `ident` [`binders`] : `term` := : [[|] `ident` [`binders`] [:`term`] | … | `ident` [`binders`] [:`term`]] fixpoint : Fixpoint `fix_body` with … with `fix_body` . - : | CoFixpoint `cofix_body` with … with `cofix_body` . + : CoFixpoint `cofix_body` with … with `cofix_body` . assertion : `assertion_keyword` `ident` [`binders`] : `term` . assertion_keyword : Theorem | Lemma - : | Remark | Fact - : | Corollary | Proposition - : | Definition | Example + : Remark | Fact + : Corollary | Proposition + : Definition | Example proof : Proof . … Qed . - : | Proof . … Defined . - : | Proof . … Admitted . + : Proof . … Defined . + : Proof . … Admitted . decoration : #[ `attributes` ] attributes : [`attribute`, … , `attribute`] attribute : `ident` - :| `ident` = `string` - :| `ident` ( `attributes` ) + : `ident` = `string` + : `ident` ( `attributes` ) .. todo:: This use of … in this grammar is inconsistent What about removing the proof part of this grammar from this chapter diff --git a/doc/sphinx/practical-tools/coq-commands.rst b/doc/sphinx/practical-tools/coq-commands.rst index 9bc67147f7..1b4d2315aa 100644 --- a/doc/sphinx/practical-tools/coq-commands.rst +++ b/doc/sphinx/practical-tools/coq-commands.rst @@ -163,14 +163,14 @@ and ``coqtop``, unless stated otherwise: is equivalent to runningRequire dirpath. :-require dirpath: Load |Coq| compiled library dirpath and import it. This is equivalent to running Require Import dirpath. -:-batch: Exit just after argument parsing. Available for `coqtop` only. -:-compile *file.v*: Compile file *file.v* into *file.vo*. This option +:-batch: Exit just after argument parsing. Available for ``coqtop`` only. +:-compile *file.v*: Deprecated; use ``coqc`` instead. Compile file *file.v* into *file.vo*. This option implies -batch (exit just after argument parsing). It is available only - for `coqtop`, as this behavior is the purpose of `coqc`. -:-compile-verbose *file.v*: Same as -compile but also output the + for `coqtop`, as this behavior is the purpose of ``coqc``. +:-compile-verbose *file.v*: Deprecated. Use ``coqc -verbose``. Same as -compile but also output the content of *file.v* as it is compiled. :-verbose: Output the content of the input file as it is compiled. - This option is available for `coqc` only; it is the counterpart of + This option is available for ``coqc`` only; it is the counterpart of -compile-verbose. :-w (all|none|w₁,…,wₙ): Configure the display of warnings. This option expects all, none or a comma-separated list of warning names or @@ -211,11 +211,11 @@ and ``coqtop``, unless stated otherwise: (to be used by coqdoc, see :ref:`coqdoc`). By default, if *file.v* is being compiled, *file.glob* is used. :-no-glob: Disable the dumping of references for global names. -:-image *file*: Set the binary image to be used by `coqc` to be *file* +:-image *file*: Set the binary image to be used by ``coqc`` to be *file* instead of the standard one. Not of general use. :-bindir *directory*: Set the directory containing |Coq| binaries to be - used by `coqc`. It is equivalent to doing export COQBIN= *directory* - before launching `coqc`. + used by ``coqc``. It is equivalent to doing export COQBIN= *directory* + before launching ``coqc``. :-where: Print the location of |Coq|’s standard library and exit. :-config: Print the locations of |Coq|’s binaries, dependencies, and libraries, then exit. diff --git a/doc/sphinx/proof-engine/ltac.rst b/doc/sphinx/proof-engine/ltac.rst index 1071682ead..442077616f 100644 --- a/doc/sphinx/proof-engine/ltac.rst +++ b/doc/sphinx/proof-engine/ltac.rst @@ -41,117 +41,121 @@ mode but it can also be used in toplevel definitions as shown below. .. note:: - - The infix tacticals “… \|\| …”, “… + …”, and “… ; …” are associative. + - The infix tacticals ``… || …`` , ``… + …`` , and ``… ; …`` are associative. - - In :token:`tacarg`, there is an overlap between qualid as a direct tactic - argument and :token:`qualid` as a particular case of term. The resolution is - done by first looking for a reference of the tactic language and if - it fails, for a reference to a term. To force the resolution as a - reference of the tactic language, use the form :g:`ltac:(@qualid)`. To - force the resolution as a reference to a term, use the syntax - :g:`(@qualid)`. + .. example:: - - As shown by the figure, tactical ``\|\|`` binds more than the prefix - tacticals try, repeat, do and abstract which themselves bind more - than the postfix tactical “… ;[ … ]” which binds more than “… ; …”. + If you want that :n:`@tactic__2; @tactic__3` be fully run on the first + subgoal generated by :n:`@tactic__1`, before running on the other + subgoals, then you should not write + :n:`@tactic__1; (@tactic__2; @tactic__3)` but rather + :n:`@tactic__1; [> @tactic__2; @tactic__3 .. ]`. - For instance + - In :token:`tacarg`, there is an overlap between :token:`qualid` as a + direct tactic argument and :token:`qualid` as a particular case of + :token:`term`. The resolution is done by first looking for a reference + of the tactic language and if it fails, for a reference to a term. + To force the resolution as a reference of the tactic language, use the + form :n:`ltac:(@qualid)`. To force the resolution as a reference to a + term, use the syntax :n:`(@qualid)`. - .. coqtop:: in + - As shown by the figure, tactical ``… || …`` binds more than the prefix + tacticals :tacn:`try`, :tacn:`repeat`, :tacn:`do` and :tacn:`abstract` + which themselves bind more than the postfix tactical ``… ;[ … ]`` + which binds at the same level as ``… ; …`` . - try repeat tac1 || tac2; tac3; [tac31 | ... | tac3n]; tac4. + .. example:: - is understood as + :n:`try repeat @tactic__1 || @tactic__2; @tactic__3; [ {+| @tactic } ]; @tactic__4` - .. coqtop:: in + is understood as: - try (repeat (tac1 || tac2)); - ((tac3; [tac31 | ... | tac3n]); tac4). + :n:`((try (repeat (@tactic__1 || @tactic__2)); @tactic__3); [ {+| @tactic } ]); @tactic__4` .. productionlist:: coq expr : `expr` ; `expr` - : | [> `expr` | ... | `expr` ] - : | `expr` ; [ `expr` | ... | `expr` ] - : | `tacexpr3` - tacexpr3 : do (`natural` | `ident`) tacexpr3 - : | progress `tacexpr3` - : | repeat `tacexpr3` - : | try `tacexpr3` - : | once `tacexpr3` - : | exactly_once `tacexpr3` - : | timeout (`natural` | `ident`) `tacexpr3` - : | time [`string`] `tacexpr3` - : | only `selector`: `tacexpr3` - : | `tacexpr2` + : [> `expr` | ... | `expr` ] + : `expr` ; [ `expr` | ... | `expr` ] + : `tacexpr3` + tacexpr3 : do (`natural` | `ident`) `tacexpr3` + : progress `tacexpr3` + : repeat `tacexpr3` + : try `tacexpr3` + : once `tacexpr3` + : exactly_once `tacexpr3` + : timeout (`natural` | `ident`) `tacexpr3` + : time [`string`] `tacexpr3` + : only `selector`: `tacexpr3` + : `tacexpr2` tacexpr2 : `tacexpr1` || `tacexpr3` - : | `tacexpr1` + `tacexpr3` - : | tryif `tacexpr1` then `tacexpr1` else `tacexpr1` - : | `tacexpr1` + : `tacexpr1` + `tacexpr3` + : tryif `tacexpr1` then `tacexpr1` else `tacexpr1` + : `tacexpr1` tacexpr1 : fun `name` ... `name` => `atom` - : | let [rec] `let_clause` with ... with `let_clause` in `atom` - : | match goal with `context_rule` | ... | `context_rule` end - : | match reverse goal with `context_rule` | ... | `context_rule` end - : | match `expr` with `match_rule` | ... | `match_rule` end - : | lazymatch goal with `context_rule` | ... | `context_rule` end - : | lazymatch reverse goal with `context_rule` | ... | `context_rule` end - : | lazymatch `expr` with `match_rule` | ... | `match_rule` end - : | multimatch goal with `context_rule` | ... | `context_rule` end - : | multimatch reverse goal with `context_rule` | ... | `context_rule` end - : | multimatch `expr` with `match_rule` | ... | `match_rule` end - : | abstract `atom` - : | abstract `atom` using `ident` - : | first [ `expr` | ... | `expr` ] - : | solve [ `expr` | ... | `expr` ] - : | idtac [ `message_token` ... `message_token`] - : | fail [`natural`] [`message_token` ... `message_token`] - : | fresh [ `component` … `component` ] - : | context `ident` [`term`] - : | eval `redexpr` in `term` - : | type of `term` - : | constr : `term` - : | uconstr : `term` - : | type_term `term` - : | numgoals - : | guard `test` - : | assert_fails `tacexpr3` - : | assert_succeeds `tacexpr3` - : | `atomic_tactic` - : | `qualid` `tacarg` ... `tacarg` - : | `atom` + : let [rec] `let_clause` with ... with `let_clause` in `atom` + : match goal with `context_rule` | ... | `context_rule` end + : match reverse goal with `context_rule` | ... | `context_rule` end + : match `expr` with `match_rule` | ... | `match_rule` end + : lazymatch goal with `context_rule` | ... | `context_rule` end + : lazymatch reverse goal with `context_rule` | ... | `context_rule` end + : lazymatch `expr` with `match_rule` | ... | `match_rule` end + : multimatch goal with `context_rule` | ... | `context_rule` end + : multimatch reverse goal with `context_rule` | ... | `context_rule` end + : multimatch `expr` with `match_rule` | ... | `match_rule` end + : abstract `atom` + : abstract `atom` using `ident` + : first [ `expr` | ... | `expr` ] + : solve [ `expr` | ... | `expr` ] + : idtac [ `message_token` ... `message_token`] + : fail [`natural`] [`message_token` ... `message_token`] + : fresh [ `component` … `component` ] + : context `ident` [`term`] + : eval `redexpr` in `term` + : type of `term` + : constr : `term` + : uconstr : `term` + : type_term `term` + : numgoals + : guard `test` + : assert_fails `tacexpr3` + : assert_succeeds `tacexpr3` + : `atomic_tactic` + : `qualid` `tacarg` ... `tacarg` + : `atom` atom : `qualid` - : | () - : | `integer` - : | ( `expr` ) + : () + : `integer` + : ( `expr` ) component : `string` | `qualid` message_token : `string` | `ident` | `integer` tacarg : `qualid` - : | () - : | ltac : `atom` - : | `term` + : () + : ltac : `atom` + : `term` let_clause : `ident` [`name` ... `name`] := `expr` context_rule : `context_hyp`, ..., `context_hyp` |- `cpattern` => `expr` - : | `cpattern` => `expr` - : | |- `cpattern` => `expr` - : | _ => `expr` + : `cpattern` => `expr` + : |- `cpattern` => `expr` + : _ => `expr` context_hyp : `name` : `cpattern` - : | `name` := `cpattern` [: `cpattern`] + : `name` := `cpattern` [: `cpattern`] match_rule : `cpattern` => `expr` - : | context [ident] [ `cpattern` ] => `expr` - : | _ => `expr` + : context [`ident`] [ `cpattern` ] => `expr` + : _ => `expr` test : `integer` = `integer` - : | `integer` (< | <= | > | >=) `integer` + : `integer` (< | <= | > | >=) `integer` selector : [`ident`] - : | `integer` - : | (`integer` | `integer` - `integer`), ..., (`integer` | `integer` - `integer`) + : `integer` + : (`integer` | `integer` - `integer`), ..., (`integer` | `integer` - `integer`) toplevel_selector : `selector` - : | all - : | par - : | ! + : all + : par + : ! .. productionlist:: coq top : [Local] Ltac `ltac_def` with ... with `ltac_def` ltac_def : `ident` [`ident` ... `ident`] := `expr` - : | `qualid` [`ident` ... `ident`] ::= `expr` + : `qualid` [`ident` ... `ident`] ::= `expr` .. _ltac-semantics: diff --git a/doc/sphinx/proof-engine/ssreflect-proof-language.rst b/doc/sphinx/proof-engine/ssreflect-proof-language.rst index bffbe3e284..483dbd311d 100644 --- a/doc/sphinx/proof-engine/ssreflect-proof-language.rst +++ b/doc/sphinx/proof-engine/ssreflect-proof-language.rst @@ -1445,6 +1445,16 @@ section constant. If tactic is ``move`` or ``case`` and an equation :token:`ident` is given, then clear (step 3) for :token:`d_item` is suppressed (see section :ref:`generation_of_equations_ssr`). +Intro patterns (see section :ref:`introduction_ssr`) +and the ``rewrite`` tactic (see section :ref:`rewriting_ssr`) +let one place a :token:`clear_switch` in the middle of other items +(namely identifiers, views and rewrite rules). This can trigger the +addition of proof context items to the ones being explicitly +cleared, and in turn this can result in clear errors (e.g. if the +context item automatically added occurs in the goal). The +relevant sections describe ways to avoid the unintended clear of +context items. + Matching for apply and exact ```````````````````````````` @@ -1559,7 +1569,7 @@ whose general syntax is i_view ::= {? %{%} } /@term %| /ltac:( @tactic ) .. prodn:: - i_pattern ::= @ident %| > @ident %| _ %| ? %| * %| + %| {? @occ_switch } -> %| {? @occ_switch }<- %| [ {?| @i_item } ] %| - %| [: {+ @ident } ] + i_pattern ::= @ident %| > %| _ %| ? %| * %| + %| {? @occ_switch } -> %| {? @occ_switch }<- %| [ {?| @i_item } ] %| - %| [: {+ @ident } ] .. prodn:: i_block ::= [^ @ident ] %| [^~ @ident ] %| [^~ @num ] @@ -1572,6 +1582,9 @@ The :token:`i_pattern`\s can be seen as a variant of *intro patterns* (see :tacn:`intros`:) each performs an introduction operation, i.e., pops some variables or assumptions from the goal. +Simplification items +````````````````````` + An :token:`s_item` can simplify the set of subgoals or the subgoals themselves: + ``//`` removes all the “trivial” subgoals that can be resolved by the @@ -1583,18 +1596,32 @@ An :token:`s_item` can simplify the set of subgoals or the subgoals themselves: ``/= //``, i.e., ``simpl; try done``. -When an :token:`s_item` bears a :token:`clear_switch`, then the +When an :token:`s_item` immediately precedes a :token:`clear_switch`, then the :token:`clear_switch` is executed *after* the :token:`s_item`, e.g., ``{IHn}//`` will solve some subgoals, possibly using the fact ``IHn``, and will erase ``IHn`` from the context of the remaining subgoals. +Views +````` + The first entry in the :token:`i_view` grammar rule, :n:`/@term`, represents a view (see section :ref:`views_and_reflection_ssr`). It interprets the top of the stack with the view :token:`term`. -It is equivalent to ``move/term``. The optional flag ``{}`` can -be used to signal that the :token:`term`, when it is a context entry, -has to be cleared. +It is equivalent to :n:`move/@term`. + +A :token:`clear_switch` that immediately precedes an :token:`i_view` +is complemented with the name of the view if an only if the :token:`i_view` +is a simple proof context entry [#10]_. +E.g. ``{}/v`` is equivalent to ``/v{v}``. +This behavior can be avoided by separating the :token:`clear_switch` +from the :token:`i_view` with the ``-`` intro pattern or by putting +parentheses around the view. + +A :token:`clear_switch` that immediately precedes an :token:`i_view` +is executed after the view application. + + If the next :token:`i_item` is a view, then the view is applied to the assumption in top position once all the previous :token:`i_item` have been performed. @@ -1608,6 +1635,9 @@ Notations can be used to name tactics, for example:: lets one write just ``/myop`` in the intro pattern. Note the scope annotation: views are interpreted opening the ``ssripat`` scope. +Intro patterns +`````````````` + |SSR| supports the following :token:`i_pattern`\s: :token:`ident` @@ -1615,16 +1645,24 @@ annotation: views are interpreted opening the ``ssripat`` scope. a new constant, fact, or defined constant :token:`ident`, respectively. Note that defined constants cannot be introduced when δ-expansion is required to expose the top variable or assumption. -``>``:token:`ident` - pops the first assumption. Type class instances are not considered - as assumptions. - The tactic ``move=> >H`` is equivalent to - ``move=> ? ? H`` with enough ``?`` to name ``H`` the first assumption. - On a goal:: + A :token:`clear_switch` (even an empty one) immediately preceding an + :token:`ident` is complemented with that :token:`ident` if and only if + the identifier is a simple proof context entry [#10]_. + As a consequence by prefixing the + :token:`ident` with ``{}`` one can *replace* a context entry. + This behavior can be avoided by separating the :token:`clear_switch` + from the :token:`ident` with the ``-`` intro pattern. +``>`` + pops every variable occurring in the rest of the stack. + Type class instances are popped even if they don't occur + in the rest of the stack. + The tactic ``move=> >`` is equivalent to + ``move=> ? ?`` on a goal such as:: forall x y, x < y -> G - it names ``H`` the assumption ``x < y``. + A typical use if ``move=>> H`` to name ``H`` the first assumption, + in the example above ``x < y``. ``?`` pops the top variable into an anonymous constant or fact, whose name is picked by the tactic interpreter. |SSR| only generates names that cannot @@ -1707,6 +1745,9 @@ annotation: views are interpreted opening the ``ssripat`` scope. Note that |SSR| does not support the syntax ``(ipat, …, ipat)`` for destructing intro-patterns. +Clear switch +```````````` + Clears are deferred until the end of the intro pattern. .. example:: @@ -1729,6 +1770,9 @@ is performed behind the scenes. Facts mentioned in a clear switch must be valid names in the proof context (excluding the section context). +Branching and destructuring +``````````````````````````` + The rules for interpreting branching and destructing :token:`i_pattern` are motivated by the fact that it would be pointless to have a branching pattern if tactic is a ``move``, and in most of the remaining cases @@ -1753,6 +1797,9 @@ interpretation, e.g.: are all equivalent. +Block introduction +`````````````````` + |SSR| supports the following :token:`i_block`\s: :n:`[^ @ident ]` @@ -3029,13 +3076,22 @@ operation should be performed: pattern. In its simplest form, it is a regular term. If no explicit redex switch is present the rewrite pattern to be matched is inferred from the :token:`r_item`. -+ This optional term, or the :token:`r_item`, may be preceded by an occurrence - switch (see section :ref:`selectors_ssr`) or a clear item - (see section :ref:`discharge_ssr`), - these two possibilities being exclusive. An occurrence switch selects ++ This optional term, or the :token:`r_item`, may be preceded by an + :token:`occ_switch` (see section :ref:`selectors_ssr`) or a + :token:`clear_switch` (see section :ref:`discharge_ssr`), + these two possibilities being exclusive. + + An occurrence switch selects the occurrences of the rewrite pattern which should be affected by the rewrite operation. + A clear switch, even an empty one, is performed *after* the + :token:`r_item` is actually processed and is complemented with the name of + the rewrite rule if an only if it is a simple proof context entry [#10]_. + As a consequence one can + write ``rewrite {}H`` to rewrite with ``H`` and dispose ``H`` immediately + afterwards. + This behavior can be avoided by putting parentheses around the rewrite rule. An :token:`r_item` can be: @@ -3290,10 +3346,6 @@ the rewrite tactic. The effect of the tactic on the initial goal is to rewrite this lemma at the second occurrence of the first matching ``x + y + 0`` of the explicit rewrite redex ``_ + y + 0``. -An empty occurrence switch ``{}`` is not interpreted as a valid occurrence -switch. It has the effect of clearing the :token:`r_item` (when it is the name -of a context entry). - Occurrence selection and repetition ``````````````````````````````````` @@ -5305,7 +5357,7 @@ discharge item see :ref:`discharge_ssr` generalization item see :ref:`structure_ssr` -.. prodn:: i_pattern ::= @ident %| > @ident %| _ %| ? %| * %| + %| {? @occ_switch } -> %| {? @occ_switch } <- %| [ {?| @i_item } ] %| - %| [: {+ @ident } ] +.. prodn:: i_pattern ::= @ident %| > %| _ %| ? %| * %| + %| {? @occ_switch } -> %| {? @occ_switch } <- %| [ {?| @i_item } ] %| - %| [: {+ @ident } ] intro pattern :ref:`introduction_ssr` @@ -5519,3 +5571,5 @@ Settings in the metatheory .. [#9] The current state of the proof shall be displayed by the Show Proof command of |Coq| proof mode. +.. [#10] A simple proof context entry is a naked identifier (i.e. not between + parentheses) designating a context entry that is not a section variable. diff --git a/doc/sphinx/proof-engine/tactics.rst b/doc/sphinx/proof-engine/tactics.rst index 59602581c7..7eef504ea9 100644 --- a/doc/sphinx/proof-engine/tactics.rst +++ b/doc/sphinx/proof-engine/tactics.rst @@ -33,10 +33,13 @@ extends the folklore notion of tactical) to combine those atomic tactics. This chapter is devoted to atomic tactics. The tactic language will be described in Chapter :ref:`ltac`. +Common elements of tactics +-------------------------- + .. _invocation-of-tactics: Invocation of tactics -------------------------- +~~~~~~~~~~~~~~~~~~~~~ A tactic is applied as an ordinary command. It may be preceded by a goal selector (see Section :ref:`ltac-semantics`). If no selector is @@ -44,9 +47,9 @@ specified, the default selector is used. .. _tactic_invocation_grammar: - .. productionlist:: `sentence` - tactic_invocation : toplevel_selector : tactic. - : |tactic . + .. productionlist:: sentence + tactic_invocation : `toplevel_selector` : `tactic`. + : `tactic`. .. opt:: Default Goal Selector "@toplevel_selector" :name: Default Goal Selector @@ -71,29 +74,31 @@ specified, the default selector is used. Bindings list ~~~~~~~~~~~~~~~~~~~ -Tactics that take a term as argument may also support a bindings list, -so as to instantiate some parameters of the term by name or position. -The general form of a term equipped with a bindings list is ``term with -bindings_list`` where ``bindings_list`` may be of two different forms: +Tactics that take a term as an argument may also support a bindings list +to instantiate some parameters of the term by name or position. +The general form of a term with a bindings list is +:n:`@term with @bindings_list` where :token:`bindings_list` can take two different forms: .. _bindings_list_grammar: - .. productionlist:: `bindings_list` - bindings_list : (ref := `term`) ... (ref := `term`) + .. productionlist:: bindings_list + ref : `ident` + : `num` + bindings_list : (`ref` := `term`) ... (`ref` := `term`) : `term` ... `term` -+ In a bindings list of the form :n:`{* (ref:= term)}`, :n:`ref` is either an ++ In a bindings list of the form :n:`{+ (@ref:= @term)}`, :n:`@ref` is either an :n:`@ident` or a :n:`@num`. The references are determined according to the type of - ``term``. If :n:`ref` is an identifier, this identifier has to be bound in the - type of ``term`` and the binding provides the tactic with an instance for the - parameter of this name. If :n:`ref` is some number ``n``, this number denotes - the ``n``-th non dependent premise of the ``term``, as determined by the type - of ``term``. + :n:`@term`. If :n:`@ref` is an identifier, this identifier has to be bound in the + type of :n:`@term` and the binding provides the tactic with an instance for the + parameter of this name. If :n:`@ref` is a number ``n``, it refers to + the ``n``-th non dependent premise of the :n:`@term`, as determined by the type + of :n:`@term`. .. exn:: No such binder. :undocumented: -+ A bindings list can also be a simple list of terms :n:`{* term}`. ++ A bindings list can also be a simple list of terms :n:`{* @term}`. In that case the references to which these terms correspond are determined by the tactic. In case of :tacn:`induction`, :tacn:`destruct`, :tacn:`elim` and :tacn:`case`, the terms have to @@ -105,6 +110,350 @@ bindings_list`` where ``bindings_list`` may be of two different forms: .. exn:: Not the right number of missing arguments. :undocumented: +.. _intropatterns: + +Intro patterns +~~~~~~~~~~~~~~ + +Intro patterns let you specify the name to assign to variables and hypotheses +introduced by tactics. They also let you split an introduced hypothesis into +multiple hypotheses or subgoals. Common tactics that accept intro patterns +include :tacn:`assert`, :tacn:`intros` and :tacn:`destruct`. + +.. productionlist:: coq + intropattern_list : `intropattern` ... `intropattern` + : `empty` + empty : + intropattern : * + : ** + : `simple_intropattern` + simple_intropattern : `simple_intropattern_closed` [ % `term` ... % `term` ] + simple_intropattern_closed : `naming_intropattern` + : _ + : `or_and_intropattern` + : `equality_intropattern` + naming_intropattern : `ident` + : ? + : ?`ident` + or_and_intropattern : [ `intropattern_list` | ... | `intropattern_list` ] + : ( `simple_intropattern` , ... , `simple_intropattern` ) + : ( `simple_intropattern` & ... & `simple_intropattern` ) + equality_intropattern : -> + : <- + : [= `intropattern_list` ] + or_and_intropattern_loc : `or_and_intropattern` + : `ident` + +Note that the intro pattern syntax varies between tactics. +Most tactics use :n:`@simple_intropattern` in the grammar. +:tacn:`destruct`, :tacn:`edestruct`, :tacn:`induction`, +:tacn:`einduction`, :tacn:`case`, :tacn:`ecase` and the various +:tacn:`inversion` tactics use :n:`@or_and_intropattern_loc`, while +:tacn:`intros` and :tacn:`eintros` use :n:`@intropattern_list`. +The :n:`eqn:` construct in various tactics uses :n:`@naming_intropattern`. + +**Naming patterns** + +Use these elementary patterns to specify a name: + +* :n:`@ident` - use the specified name +* :n:`?` - let Coq choose a name +* :n:`?@ident` - generate a name that begins with :n:`@ident` +* :n:`_` - discard the matched part (unless it is required for another + hypothesis) +* if a disjunction pattern omits a name, such as :g:`[|H2]`, Coq will choose a name + +**Splitting patterns** + +The most common splitting patterns are: + +* split a hypothesis in the form :n:`A /\ B` into two + hypotheses :g:`H1: A` and :g:`H2: B` using the pattern :g:`(H1 & H2)` or + :g:`(H1, H2)` or :g:`[H1 H2]`. + :ref:`Example <intropattern_conj_ex>`. This also works on :n:`A <-> B`, which + is just a notation representing :n:`(A -> B) /\ (B -> A)`. +* split a hypothesis in the form :g:`A \/ B` into two + subgoals using the pattern :g:`[H1|H2]`. The first subgoal will have the hypothesis + :g:`H1: A` and the second subgoal will have the hypothesis :g:`H2: B`. + :ref:`Example <intropattern_disj_ex>` +* split a hypothesis in either of the forms :g:`A /\ B` or :g:`A \/ B` using the pattern :g:`[]`. + +Patterns can be nested: :n:`[[Ha|Hb] H]` can be used to split :n:`(A \/ B) /\ C`. + +Note that there is no equivalent to intro patterns for goals. For a goal :g:`A /\ B`, +use the :tacn:`split` tactic to replace the current goal with subgoals :g:`A` and :g:`B`. +For a goal :g:`A \/ B`, use :tacn:`left` to replace the current goal with :g:`A`, or +:tacn:`right` to replace the current goal with :g:`B`. + +* :n:`( {+, @simple_intropattern}` ) - matches + a product over an inductive type with a + :ref:`single constructor <intropattern_cons_note>`. + If the number of patterns + equals the number of constructor arguments, then it applies the patterns only to + the arguments, and + :n:`( {+, @simple_intropattern} )` is equivalent to :n:`[{+ @simple_intropattern}]`. + If the number of patterns equals the number of constructor arguments plus the number + of :n:`let-ins`, the patterns are applied to the arguments and :n:`let-in` variables. + +* :n:`( {+& @simple_intropattern} )` - matches a right-hand nested term that consists + of one or more nested binary inductive types such as :g:`a1 OP1 a2 OP2 ...` + (where the :g:`OPn` are right-associative). + (If the :g:`OPn` are left-associative, additional parentheses will be needed to make the + term right-hand nested, such as :g:`a1 OP1 (a2 OP2 ...)`.) + The splitting pattern can have more than 2 names, for example :g:`(H1 & H2 & H3)` + matches :g:`A /\ B /\ C`. + The inductive types must have a + :ref:`single constructor with two parameters <intropattern_cons_note>`. + :ref:`Example <intropattern_ampersand_ex>` + +* :n:`[ {+| @intropattern_list} ]` - splits an inductive type that has + :ref:`multiple constructors <intropattern_cons_note>` + such as :n:`A \/ B` + into multiple subgoals. The number of :token:`intropattern_list` must be the same as the number of + constructors for the matched part. +* :n:`[ {+ @intropattern} ]` - splits an inductive type that has a + :ref:`single constructor with multiple parameters <intropattern_cons_note>` + such as :n:`A /\ B` into multiple hypotheses. Use :n:`[H1 [H2 H3]]` to match :g:`A /\ B /\ C`. +* :n:`[]` - splits an inductive type: If the inductive + type has multiple constructors, such as :n:`A \/ B`, + create one subgoal for each constructor. If the inductive type has a single constructor with + multiple parameters, such as :n:`A /\ B`, split it into multiple hypotheses. + +**Equality patterns** + +These patterns can be used when the hypothesis is an equality: + +* :n:`->` - replaces the right-hand side of the hypothesis with the left-hand + side of the hypothesis in the conclusion of the goal; the hypothesis is + cleared; if the left-hand side of the hypothesis is a variable, it is + substituted everywhere in the context and the variable is removed. + :ref:`Example <intropattern_rarrow_ex>` +* :n:`<-` - similar to :n:`->`, but replaces the left-hand side of the hypothesis + with the right-hand side of the hypothesis. +* :n:`[= {*, @intropattern} ]` - If the product is over an equality type, + applies either :tacn:`injection` or :tacn:`discriminate`. + If :tacn:`injection` is applicable, the intropattern + is used on the hypotheses generated by :tacn:`injection`. If the + number of patterns is smaller than the number of hypotheses generated, the + pattern :n:`?` is used to complete the list. + :ref:`Example <intropattern_inj_discr_ex>` + +**Other patterns** + +* :n:`*` - introduces one or more quantified variables from the result + until there are no more quantified variables. + :ref:`Example <intropattern_star_ex>` + +* :n:`**` - introduces one or more quantified variables or hypotheses from the result until there are + no more quantified variables or implications (:g:`->`). :g:`intros **` is equivalent + to :g:`intros`. + :ref:`Example <intropattern_2stars_ex>` + +* :n:`@simple_intropattern_closed {* % @term}` - first applies each of the terms + with the :tacn:`apply ... in` tactic on the hypothesis to be introduced, then it uses + :n:`@simple_intropattern_closed`. + :ref:`Example <intropattern_injection_ex>` + +.. flag:: Bracketing Last Introduction Pattern + + For :n:`intros @intropattern_list`, controls how to handle a + conjunctive pattern that doesn't give enough simple patterns to match + all the arguments in the constructor. If set (the default), |Coq| generates + additional names to match the number of arguments. + Unsetting the option will put the additional hypotheses in the goal instead, behavior that is more + similar to |SSR|'s intro patterns. + + .. deprecated:: 8.10 + +.. _intropattern_cons_note: + +.. note:: + + :n:`A \/ B` and :n:`A /\ B` use infix notation to refer to the inductive + types :n:`or` and :n:`and`. + :n:`or` has multiple constructors (:n:`or_introl` and :n:`or_intror`), + while :n:`and` has a single constructor (:n:`conj`) with multiple parameters + (:n:`A` and :n:`B`). + These are defined in theories/Init/Logic.v. The "where" clauses define the + infix notation for "or" and "and". + + .. coqdoc:: + + Inductive or (A B:Prop) : Prop := + | or_introl : A -> A \/ B + | or_intror : B -> A \/ B + where "A \/ B" := (or A B) : type_scope. + + Inductive and (A B:Prop) : Prop := + conj : A -> B -> A /\ B + where "A /\ B" := (and A B) : type_scope. + +.. note:: + + :n:`intros {+ p}` is not always equivalent to :n:`intros p; ... ; intros p` + if some of the :n:`p` are :g:`_`. In the first form, all erasures are done + at once, while they're done sequentially for each tactic in the second form. + If the second matched term depends on the first matched term and the pattern + for both is :g:`_` (i.e., both will be erased), the first :n:`intros` in the second + form will fail because the second matched term still has the dependency on the first. + +Examples: + +.. _intropattern_conj_ex: + + .. example:: intro pattern for /\\ + + .. coqtop:: reset none + + Goal forall (A: Prop) (B: Prop), (A /\ B) -> True. + + .. coqtop:: out + + intros. + + .. coqtop:: all + + destruct H as (HA & HB). + +.. _intropattern_disj_ex: + + .. example:: intro pattern for \\/ + + .. coqtop:: reset none + + Goal forall (A: Prop) (B: Prop), (A \/ B) -> True. + + .. coqtop:: out + + intros. + + .. coqtop:: all + + destruct H as [HA|HB]. all: swap 1 2. + +.. _intropattern_rarrow_ex: + + .. example:: -> intro pattern + + .. coqtop:: reset none + + Goal forall (x:nat) (y:nat) (z:nat), (x = y) -> (y = z) -> (x = z). + + .. coqtop:: out + + intros * H. + + .. coqtop:: all + + intros ->. + +.. _intropattern_inj_discr_ex: + + .. example:: [=] intro pattern + + The first :n:`intros [=]` uses :tacn:`injection` to strip :n:`(S ...)` from + both sides of the matched equality. The second uses :tacn:`discriminate` on + the contradiction :n:`1 = 2` (internally represented as :n:`(S O) = (S (S O))`) + to complete the goal. + + .. coqtop:: reset none + + Goal forall (n m:nat), (S n) = (S m) -> (S O)=(S (S O)) -> False. + + .. coqtop:: out + + intros *. + + .. coqtop:: all + + intros [= H]. + + .. coqtop:: all + + intros [=]. + +.. _intropattern_ampersand_ex: + + .. example:: (A & B & ...) intro pattern + + .. coqtop:: reset none + + Variables (A : Prop) (B: nat -> Prop) (C: Prop). + + .. coqtop:: out + + Goal A /\ (exists x:nat, B x /\ C) -> True. + + .. coqtop:: all + + intros (a & x & b & c). + +.. _intropattern_star_ex: + + .. example:: * intro pattern + + .. coqtop:: reset out + + Goal forall (A: Prop) (B: Prop), A -> B. + + .. coqtop:: all + + intros *. + +.. _intropattern_2stars_ex: + + .. example:: ** pattern ("intros \**" is equivalent to "intros") + + .. coqtop:: reset out + + Goal forall (A: Prop) (B: Prop), A -> B. + + .. coqtop:: all + + intros **. + + .. example:: compound intro pattern + + .. coqtop:: reset out + + Goal forall A B C:Prop, A \/ B /\ C -> (A -> C) -> C. + + .. coqtop:: all + + intros * [a | (_,c)] f. + all: swap 1 2. + +.. _intropattern_injection_ex: + + .. example:: combined intro pattern using [=] -> and % + + .. coqtop:: reset none + + Require Import Coq.Lists.List. + Section IntroPatterns. + Variables (A : Type) (xs ys : list A). + + .. coqtop:: out + + Example ThreeIntroPatternsCombined : + S (length ys) = 1 -> xs ++ ys = xs. + + .. coqtop:: all + + intros [=->%length_zero_iff_nil]. + + * `intros` would add :g:`H : S (length ys) = 1` + * `intros [=]` would additionally apply :tacn:`injection` to :g:`H` to yield :g:`H0 : length ys = 0` + * `intros [=->%length_zero_iff_nil]` applies the theorem, making H the equality :g:`l=nil`, + which is then applied as for :g:`->`. + + .. coqdoc:: + + Theorem length_zero_iff_nil (l : list A): + length l = 0 <-> l=nil. + + The example is based on `Tej Chajed's coq-tricks <https://github.com/tchajed/coq-tricks/blob/8e6efe4971ed828ac8bdb5512c1f615d7d62691e/src/IntroPatterns.v>`_ + .. _occurrencessets: Occurrence sets and occurrence clauses @@ -113,11 +462,11 @@ Occurrence sets and occurrence clauses An occurrence clause is a modifier to some tactics that obeys the following syntax: - .. productionlist:: `sentence` + .. productionlist:: sentence occurrence_clause : in `goal_occurrences` - goal_occurrences : [`ident` [`at_occurrences`], ... , ident [`at_occurrences`] [|- [* [`at_occurrences`]]]] - :| * |- [* [`at_occurrences`]] - :| * + goal_occurrences : [`ident` [`at_occurrences`], ... , `ident` [`at_occurrences`] [|- [* [`at_occurrences`]]]] + : * |- [* [`at_occurrences`]] + : * at_occurrences : at `occurrences` occurrences : [-] `num` ... `num` @@ -508,10 +857,10 @@ Applying theorems This works as :tacn:`apply ... in` but turns unresolved bindings into existential variables, if any, instead of failing. - .. tacv:: apply {+, @term {? with @bindings_list } } in @ident as @intro_pattern + .. tacv:: apply {+, @term {? with @bindings_list } } in @ident as @simple_intropattern :name: apply ... in ... as - This works as :tacn:`apply ... in` then applies the :token:`intro_pattern` + This works as :tacn:`apply ... in` then applies the :token:`simple_intropattern` to the hypothesis :token:`ident`. .. tacv:: simple apply @term in @ident @@ -525,8 +874,8 @@ Applying theorems Tactic :n:`simple apply @term in @ident` does not either traverse tuples as :n:`apply @term in @ident` does. - .. tacv:: {? simple} apply {+, @term {? with @bindings_list}} in @ident {? as @intro_pattern} - {? simple} eapply {+, @term {? with @bindings_list}} in @ident {? as @intro_pattern} + .. tacv:: {? simple} apply {+, @term {? with @bindings_list}} in @ident {? as @simple_intropattern} + {? simple} eapply {+, @term {? with @bindings_list}} in @ident {? as @simple_intropattern} This summarizes the different syntactic variants of :n:`apply @term in @ident` and :n:`eapply @term in @ident`. @@ -726,149 +1075,17 @@ Managing the local context .. exn:: No such hypothesis: @ident. :undocumented: -.. tacn:: intros @intro_pattern_list +.. tacn:: intros @intropattern_list :name: intros ... - This extension of the tactic :n:`intros` allows to apply tactics on the fly - on the variables or hypotheses which have been introduced. An - *introduction pattern list* :n:`@intro_pattern_list` is a list of - introduction patterns possibly containing the filling introduction - patterns `*` and `**`. An *introduction pattern* is either: - - + a *naming introduction pattern*, i.e. either one of: - - + the pattern :n:`?` - - + the pattern :n:`?ident` - - + an identifier - - + an *action introduction pattern* which itself classifies into: - - + a *disjunctive/conjunctive introduction pattern*, i.e. either one of - - + a disjunction of lists of patterns - :n:`[@intro_pattern_list | ... | @intro_pattern_list]` - - + a conjunction of patterns: :n:`({+, p})` - - + a list of patterns - :n:`({+& p})` - for sequence of right-associative binary constructs - - + an *equality introduction pattern*, i.e. either one of: - - + a pattern for decomposing an equality: :n:`[= {+ p}]` - + the rewriting orientations: :n:`->` or :n:`<-` - - + the on-the-fly application of lemmas: :n:`p{+ %term}` where :n:`p` - itself is not a pattern for on-the-fly application of lemmas (note: - syntax is in experimental stage) - - + the wildcard: :n:`_` - - - Assuming a goal of type :g:`Q → P` (non-dependent product), or of type - :g:`forall x:T, P` (dependent product), the behavior of - :n:`intros p` is defined inductively over the structure of the introduction - pattern :n:`p`: - - Introduction on :n:`?` performs the introduction, and lets Coq choose a fresh - name for the variable; - - Introduction on :n:`?@ident` performs the introduction, and lets Coq choose a - fresh name for the variable based on :n:`@ident`; - - Introduction on :n:`@ident` behaves as described in :tacn:`intro` - - Introduction over a disjunction of list of patterns - :n:`[@intro_pattern_list | ... | @intro_pattern_list ]` expects the product - to be over an inductive type whose number of constructors is `n` (or more - generally over a type of conclusion an inductive type built from `n` - constructors, e.g. :g:`C -> A\/B` with `n=2` since :g:`A\/B` has `2` - constructors): it destructs the introduced hypothesis as :n:`destruct` (see - :tacn:`destruct`) would and applies on each generated subgoal the - corresponding tactic; - - The introduction patterns in :n:`@intro_pattern_list` are expected to consume - no more than the number of arguments of the `i`-th constructor. If it - consumes less, then Coq completes the pattern so that all the arguments of - the constructors of the inductive type are introduced (for instance, the - list of patterns :n:`[ | ] H` applied on goal :g:`forall x:nat, x=0 -> 0=x` - behaves the same as the list of patterns :n:`[ | ? ] H`); - - Introduction over a conjunction of patterns :n:`({+, p})` expects - the goal to be a product over an inductive type :g:`I` with a single - constructor that itself has at least `n` arguments: It performs a case - analysis over the hypothesis, as :n:`destruct` would, and applies the - patterns :n:`{+ p}` to the arguments of the constructor of :g:`I` (observe - that :n:`({+ p})` is an alternative notation for :n:`[{+ p}]`); - - Introduction via :n:`({+& p})` is a shortcut for introduction via - :n:`(p,( ... ,( ..., p ) ... ))`; it expects the hypothesis to be a sequence of - right-associative binary inductive constructors such as :g:`conj` or - :g:`ex_intro`; for instance, a hypothesis with type - :g:`A /\(exists x, B /\ C /\ D)` can be introduced via pattern - :n:`(a & x & b & c & d)`; - - If the product is over an equality type, then a pattern of the form - :n:`[= {+ p}]` applies either :tacn:`injection` or :tacn:`discriminate` - instead of :tacn:`destruct`; if :tacn:`injection` is applicable, the patterns - :n:`{+, p}` are used on the hypotheses generated by :tacn:`injection`; if the - number of patterns is smaller than the number of hypotheses generated, the - pattern :n:`?` is used to complete the list. - - Introduction over ``->`` (respectively over ``<-``) - expects the hypothesis to be an equality and the right-hand-side - (respectively the left-hand-side) is replaced by the left-hand-side - (respectively the right-hand-side) in the conclusion of the goal; - the hypothesis itself is erased; if the term to substitute is a variable, it - is substituted also in the context of goal and the variable is removed too. - - Introduction over a pattern :n:`p{+ %term}` first applies :n:`{+ term}` - on the hypothesis to be introduced (as in :n:`apply {+, term}`) prior to the - application of the introduction pattern :n:`p`; - - Introduction on the wildcard depends on whether the product is dependent or not: - in the non-dependent case, it erases the corresponding hypothesis (i.e. it - behaves as an :tacn:`intro` followed by a :tacn:`clear`) while in the - dependent case, it succeeds and erases the variable only if the wildcard is part - of a more complex list of introduction patterns that also erases the hypotheses - depending on this variable; - - Introduction over :n:`*` introduces all forthcoming quantified variables - appearing in a row; introduction over :n:`**` introduces all forthcoming - quantified variables or hypotheses until the goal is not any more a - quantification or an implication. - - .. example:: - - .. coqtop:: reset all - - Goal forall A B C:Prop, A \/ B /\ C -> (A -> C) -> C. - intros * [a | (_,c)] f. - -.. note:: - - :n:`intros {+ p}` is not equivalent to :n:`intros p; ... ; intros p` - for the following reason: If one of the :n:`p` is a wildcard pattern, it - might succeed in the first case because the further hypotheses it - depends on are eventually erased too while it might fail in the second - case because of dependencies in hypotheses which are not yet - introduced (and a fortiori not yet erased). - -.. note:: - - In :n:`intros @intro_pattern_list`, if the last introduction pattern - is a disjunctive or conjunctive pattern - :n:`[{+| @intro_pattern_list}]`, the completion of :n:`@intro_pattern_list` - so that all the arguments of the i-th constructors of the corresponding - inductive type are introduced can be controlled with the following option: + Introduces one or more variables or hypotheses from the goal by matching the + intro patterns. See the description in :ref:`intropatterns`. - .. flag:: Bracketing Last Introduction Pattern +.. tacn:: eintros @intropattern_list + :name: eintros - Force completion, if needed, when the last introduction pattern is a - disjunctive or conjunctive pattern (on by default). + Works just like :tacn:`intros ...` except that it creates existential variables + for any unresolved variables rather than failing. .. tacn:: clear @ident :name: clear @@ -1057,19 +1274,19 @@ Managing the local context used as a synonym of :tacn:`epose`, i.e. when the :token:`term` does not occur in the goal. -.. tacn:: remember @term as @ident__1 {? eqn:@ident__2 } +.. tacn:: remember @term as @ident__1 {? eqn:@naming_intropattern } :name: remember - This behaves as :n:`set (@ident__1 := @term) in *`, using a logical + This behaves as :n:`set (@ident := @term) in *`, using a logical (Leibniz’s) equality instead of a local definition. - If :n:`@ident__2` is provided, it will be the name of the new equation. + Use :n:`@naming_intropattern` to name or split up the new equation. - .. tacv:: remember @term as @ident__1 {? eqn:@ident__2 } in @goal_occurrences + .. tacv:: remember @term as @ident__1 {? eqn:@naming_intropattern } in @goal_occurrences This is a more general form of :tacn:`remember` that remembers the occurrences of :token:`term` specified by an occurrence set. - .. tacv:: eremember @term as @ident__1 {? eqn:@ident__2 } {? in @goal_occurrences } + .. tacv:: eremember @term as @ident__1 {? eqn:@naming_intropattern } {? in @goal_occurrences } :name: eremember While the different variants of :tacn:`remember` expect that no @@ -1163,16 +1380,16 @@ Controlling the proof flow :name: Proof is not complete. (assert) :undocumented: - .. tacv:: assert @type as @intro_pattern + .. tacv:: assert @type as @simple_intropattern - If :n:`intro_pattern` is a naming introduction pattern (see :tacn:`intro`), + If :n:`simple_intropattern` is an intro pattern (see :ref:`intropatterns`), the hypothesis is named after this introduction pattern (in particular, if - :n:`intro_pattern` is :n:`@ident`, the tactic behaves like - :n:`assert (@ident : @type)`). If :n:`intro_pattern` is an action + :n:`simple_intropattern` is :n:`@ident`, the tactic behaves like + :n:`assert (@ident : @type)`). If :n:`simple_intropattern` is an action introduction pattern, the tactic behaves like :n:`assert @type` followed by the action done by this introduction pattern. - .. tacv:: assert @type as @intro_pattern by @tactic + .. tacv:: assert @type as @simple_intropattern by @tactic This combines the two previous variants of :tacn:`assert`. @@ -1186,7 +1403,7 @@ Controlling the proof flow .. exn:: Variable @ident is already declared. :undocumented: -.. tacv:: eassert @type as @intro_pattern by @tactic +.. tacv:: eassert @type as @simple_intropattern by @tactic :name: eassert While the different variants of :tacn:`assert` expect that no existential @@ -1194,16 +1411,16 @@ Controlling the proof flow This allows not to specify the asserted statement completeley before starting to prove it. -.. tacv:: pose proof @term {? as @intro_pattern} +.. tacv:: pose proof @term {? as @simple_intropattern} :name: pose proof - This tactic behaves like :n:`assert @type {? as @intro_pattern} by exact @term` + This tactic behaves like :n:`assert @type {? as @simple_intropattern} by exact @term` where :token:`type` is the type of :token:`term`. In particular, :n:`pose proof @term as @ident` behaves as :n:`assert (@ident := @term)` - and :n:`pose proof @term as @intro_pattern` is the same as applying the - :token:`intro_pattern` to :token:`term`. + and :n:`pose proof @term as @simple_intropattern` is the same as applying the + :token:`simple_intropattern` to :token:`term`. -.. tacv:: epose proof @term {? as @intro_pattern} +.. tacv:: epose proof @term {? as @simple_intropattern} :name: epose proof While :tacn:`pose proof` expects that no existential variables are generated by @@ -1221,20 +1438,20 @@ Controlling the proof flow This behaves like :n:`enough (@ident : @type)` with the name :token:`ident` of the hypothesis generated by Coq. -.. tacv:: enough @type as @intro_pattern +.. tacv:: enough @type as @simple_intropattern - This behaves like :n:`enough @type` using :token:`intro_pattern` to name or + This behaves like :n:`enough @type` using :token:`simple_intropattern` to name or destruct the new hypothesis. .. tacv:: enough (@ident : @type) by @tactic - enough @type {? as @intro_pattern } by @tactic + enough @type {? as @simple_intropattern } by @tactic This behaves as above but with :token:`tactic` expected to solve the initial goal after the extra assumption :token:`type` is added and possibly destructed. If the - :n:`as @intro_pattern` clause generates more than one subgoal, :token:`tactic` is + :n:`as @simple_intropattern` clause generates more than one subgoal, :token:`tactic` is applied to all of them. -.. tacv:: eenough @type {? as @intro_pattern } {? by @tactic } +.. tacv:: eenough @type {? as @simple_intropattern } {? by @tactic } eenough (@ident : @type) {? by @tactic } :name: eenough; _ @@ -1250,8 +1467,8 @@ Controlling the proof flow subgoals: :g:`U -> T` and :g:`U`. The subgoal :g:`U -> T` comes first in the list of remaining subgoal to prove. -.. tacv:: specialize (@ident {* @term}) {? as @intro_pattern} - specialize @ident with @bindings_list {? as @intro_pattern} +.. tacv:: specialize (@ident {* @term}) {? as @simple_intropattern} + specialize @ident with @bindings_list {? as @simple_intropattern} :name: specialize; _ This tactic works on local hypothesis :n:`@ident`. The @@ -1264,7 +1481,7 @@ Controlling the proof flow uninstantiated arguments are inferred by unification if possible or left quantified in the hypothesis otherwise. With the :n:`as` clause, the local hypothesis :n:`@ident` is left unchanged and instead, the modified hypothesis - is introduced as specified by the :token:`intro_pattern`. The name :n:`@ident` + is introduced as specified by the :token:`simple_intropattern`. The name :n:`@ident` can also refer to a global lemma or hypothesis. In this case, for compatibility reasons, the behavior of :tacn:`specialize` is close to that of :tacn:`generalize`: the instantiated statement becomes an additional premise of @@ -1477,11 +1694,11 @@ analysis on inductive or co-inductive objects (see :ref:`inductive-definitions`) This is a shortcut for :n:`destruct @term; ...; destruct @term`. - .. tacv:: destruct @term as @disj_conj_intro_pattern + .. tacv:: destruct @term as @or_and_intropattern_loc This behaves as :n:`destruct @term` but uses the names - in :token:`disj_conj_intro_pattern` to name the variables introduced in the - context. The :token:`disj_conj_intro_pattern` must have the + in :token:`or_and_intropattern_loc` to name the variables introduced in the + context. The :token:`or_and_intropattern_loc` must have the form :n:`[p11 ... p1n | ... | pm1 ... pmn ]` with ``m`` being the number of constructors of the type of :token:`term`. Each variable introduced by :tacn:`destruct` in the context of the ``i``-th goal @@ -1491,13 +1708,13 @@ analysis on inductive or co-inductive objects (see :ref:`inductive-definitions`) pattern (see :tacn:`intros`). This provides a concise notation for chaining destruction of a hypothesis. - .. tacv:: destruct @term eqn:@naming_intro_pattern + .. tacv:: destruct @term eqn:@naming_intropattern :name: destruct ... eqn: This behaves as :n:`destruct @term` but adds an equation between :token:`term` and the value that it takes in each of the possible cases. The name of the equation is specified - by :token:`naming_intro_pattern` (see :tacn:`intros`), + by :token:`naming_intropattern` (see :tacn:`intros`), in particular ``?`` can be used to let Coq generate a fresh name. .. tacv:: destruct @term with @bindings_list @@ -1525,8 +1742,8 @@ analysis on inductive or co-inductive objects (see :ref:`inductive-definitions`) clause is an occurrence clause whose syntax and behavior is described in :ref:`occurrences sets <occurrencessets>`. - .. tacv:: destruct @term {? with @bindings_list } {? as @disj_conj_intro_pattern } {? eqn:@naming_intro_pattern } {? using @term {? with @bindings_list } } {? in @goal_occurrences } - edestruct @term {? with @bindings_list } {? as @disj_conj_intro_pattern } {? eqn:@naming_intro_pattern } {? using @term {? with @bindings_list } } {? in @goal_occurrences } + .. tacv:: destruct @term {? with @bindings_list } {? as @or_and_intropattern_loc } {? eqn:@naming_intropattern } {? using @term {? with @bindings_list } } {? in @goal_occurrences } + edestruct @term {? with @bindings_list } {? as @or_and_intropattern_loc } {? eqn:@naming_intropattern } {? using @term {? with @bindings_list } } {? in @goal_occurrences } These are the general forms of :tacn:`destruct` and :tacn:`edestruct`. They combine the effects of the ``with``, ``as``, ``eqn:``, ``using``, @@ -1622,11 +1839,11 @@ analysis on inductive or co-inductive objects (see :ref:`inductive-definitions`) Use in this case the variant :tacn:`elim ... with` below. -.. tacv:: induction @term as @disj_conj_intro_pattern +.. tacv:: induction @term as @or_and_intropattern_loc This behaves as :tacn:`induction` but uses the names in - :n:`@disj_conj_intro_pattern` to name the variables introduced in the - context. The :n:`@disj_conj_intro_pattern` must typically be of the form + :n:`@or_and_intropattern_loc` to name the variables introduced in the + context. The :n:`@or_and_intropattern_loc` must typically be of the form :n:`[ p` :sub:`11` :n:`... p` :sub:`1n` :n:`| ... | p`:sub:`m1` :n:`... p`:sub:`mn` :n:`]` with :n:`m` being the number of constructors of the type of :n:`@term`. Each variable introduced by induction in the context of the i-th goal gets its @@ -1686,8 +1903,8 @@ analysis on inductive or co-inductive objects (see :ref:`inductive-definitions`) induction y in x |- *. Show 2. -.. tacv:: induction @term with @bindings_list as @disj_conj_intro_pattern using @term with @bindings_list in @goal_occurrences - einduction @term with @bindings_list as @disj_conj_intro_pattern using @term with @bindings_list in @goal_occurrences +.. tacv:: induction @term with @bindings_list as @or_and_intropattern_loc using @term with @bindings_list in @goal_occurrences + einduction @term with @bindings_list as @or_and_intropattern_loc using @term with @bindings_list in @goal_occurrences These are the most general forms of :tacn:`induction` and :tacn:`einduction`. It combines the effects of the with, as, using, and in clauses. @@ -1898,7 +2115,7 @@ and an explanation of the underlying technique. .. exn:: Not the right number of induction arguments. :undocumented: -.. tacv:: functional induction (@qualid {+ @term}) as @disj_conj_intro_pattern using @term with @bindings_list +.. tacv:: functional induction (@qualid {+ @term}) as @simple_intropattern using @term with @bindings_list Similarly to :tacn:`induction` and :tacn:`elim`, this allows giving explicitly the name of the introduced variables, the induction principle, and @@ -2053,18 +2270,18 @@ and an explanation of the underlying technique. .. exn:: goal does not satisfy the expected preconditions. :undocumented: - .. tacv:: injection @term {? with @bindings_list} as {+ @intro_pattern} - injection @num as {+ intro_pattern} - injection as {+ intro_pattern} - einjection @term {? with @bindings_list} as {+ intro_pattern} - einjection @num as {+ intro_pattern} - einjection as {+ intro_pattern} + .. tacv:: injection @term {? with @bindings_list} as {+ @simple_intropattern} + injection @num as {+ simple_intropattern} + injection as {+ simple_intropattern} + einjection @term {? with @bindings_list} as {+ simple_intropattern} + einjection @num as {+ simple_intropattern} + einjection as {+ simple_intropattern} - These variants apply :n:`intros {+ @intro_pattern}` after the call to + These variants apply :n:`intros {+ @simple_intropattern}` after the call to :tacn:`injection` or :tacn:`einjection` so that all equalities generated are moved in - the context of hypotheses. The number of :n:`@intro_pattern` must not exceed + the context of hypotheses. The number of :n:`@simple_intropattern` must not exceed the number of equalities newly generated. If it is smaller, fresh - names are automatically generated to adjust the list of :n:`@intro_pattern` + names are automatically generated to adjust the list of :n:`@simple_intropattern` to the number of new equalities. The original equality is erased if it corresponds to a hypothesis. @@ -2118,10 +2335,10 @@ and an explanation of the underlying technique. This behaves as :n:`inversion` and then erases :n:`@ident` from the context. -.. tacv:: inversion @ident as @intro_pattern +.. tacv:: inversion @ident as @or_and_intropattern_loc - This generally behaves as inversion but using names in :n:`@intro_pattern` - for naming hypotheses. The :n:`@intro_pattern` must have the form + This generally behaves as inversion but using names in :n:`@or_and_intropattern_loc` + for naming hypotheses. The :n:`@or_and_intropattern_loc` must have the form :n:`[p`:sub:`11` :n:`... p`:sub:`1n` :n:`| ... | p`:sub:`m1` :n:`... p`:sub:`mn` :n:`]` with `m` being the number of constructors of the type of :n:`@ident`. Be careful that the list must be of length `m` even if ``inversion`` discards @@ -2153,12 +2370,12 @@ and an explanation of the underlying technique. Goal forall l:list nat, contains0 (1 :: l) -> contains0 l. intros l H; inversion H as [ | l' p Hl' [Heqp Heql'] ]. -.. tacv:: inversion @num as @intro_pattern +.. tacv:: inversion @num as @or_and_intropattern_loc This allows naming the hypotheses introduced by :n:`inversion @num` in the context. -.. tacv:: inversion_clear @ident as @intro_pattern +.. tacv:: inversion_clear @ident as @or_and_intropattern_loc This allows naming the hypotheses introduced by ``inversion_clear`` in the context. Notice that hypothesis names can be provided as if ``inversion`` @@ -2170,7 +2387,7 @@ and an explanation of the underlying technique. Let :n:`{+ @ident}` be identifiers in the local context. This tactic behaves as generalizing :n:`{+ @ident}`, and then performing ``inversion``. -.. tacv:: inversion @ident as @intro_pattern in {+ @ident} +.. tacv:: inversion @ident as @or_and_intropattern_loc in {+ @ident} This allows naming the hypotheses introduced in the context by :n:`inversion @ident in {+ @ident}`. @@ -2180,7 +2397,7 @@ and an explanation of the underlying technique. Let :n:`{+ @ident}` be identifiers in the local context. This tactic behaves as generalizing :n:`{+ @ident}`, and then performing ``inversion_clear``. -.. tacv:: inversion_clear @ident as @intro_pattern in {+ @ident} +.. tacv:: inversion_clear @ident as @or_and_intropattern_loc in {+ @ident} This allows naming the hypotheses introduced in the context by :n:`inversion_clear @ident in {+ @ident}`. @@ -2192,7 +2409,7 @@ and an explanation of the underlying technique. ``inversion`` and then substitutes :n:`@ident` for the corresponding :n:`@@term` in the goal. -.. tacv:: dependent inversion @ident as @intro_pattern +.. tacv:: dependent inversion @ident as @or_and_intropattern_loc This allows naming the hypotheses introduced in the context by :n:`dependent inversion @ident`. @@ -2202,7 +2419,7 @@ and an explanation of the underlying technique. Like ``dependent inversion``, except that :n:`@ident` is cleared from the local context. -.. tacv:: dependent inversion_clear @ident as @intro_pattern +.. tacv:: dependent inversion_clear @ident as @or_and_intropattern_loc This allows naming the hypotheses introduced in the context by :n:`dependent inversion_clear @ident`. @@ -2216,7 +2433,7 @@ and an explanation of the underlying technique. then :n:`@term` must be of type :g:`I:forall (x:T), I x -> s'` where :g:`s'` is the type of the goal. -.. tacv:: dependent inversion @ident as @intro_pattern with @term +.. tacv:: dependent inversion @ident as @or_and_intropattern_loc with @term This allows naming the hypotheses introduced in the context by :n:`dependent inversion @ident with @term`. @@ -2226,7 +2443,7 @@ and an explanation of the underlying technique. Like :tacn:`dependent inversion ... with ...` with but clears :n:`@ident` from the local context. -.. tacv:: dependent inversion_clear @ident as @intro_pattern with @term +.. tacv:: dependent inversion_clear @ident as @or_and_intropattern_loc with @term This allows naming the hypotheses introduced in the context by :n:`dependent inversion_clear @ident with @term`. @@ -2237,7 +2454,7 @@ and an explanation of the underlying technique. It is a very primitive inversion tactic that derives all the necessary equalities but it does not simplify the constraints as ``inversion`` does. -.. tacv:: simple inversion @ident as @intro_pattern +.. tacv:: simple inversion @ident as @or_and_intropattern_loc This allows naming the hypotheses introduced in the context by ``simple inversion``. @@ -3171,7 +3388,7 @@ Automation :name: auto This tactic implements a Prolog-like resolution procedure to solve the - current goal. It first tries to solve the goal using the assumption + current goal. It first tries to solve the goal using the :tacn:`assumption` tactic, then it reduces the goal to an atomic one using intros and introduces the newly generated hypotheses as hints. Then it looks at the list of tactics associated to the head symbol of the goal and @@ -3586,15 +3803,15 @@ The general command to add a hint to some databases :n:`{+ @ident}` is the following. Beware, there is no operator precedence during parsing, one can check with :cmd:`Print HintDb` to verify the current cut expression: - .. productionlist:: `regexp` - e : ident hint or instance identifier - :| _ any hint - :| e\|e′ disjunction - :| e e′ sequence - :| e * Kleene star - :| emp empty - :| eps epsilon - :| ( e ) + .. productionlist:: regexp + e : `ident` hint or instance identifier + : _ any hint + : `e` | `e` disjunction + : `e` `e` sequence + : `e` * Kleene star + : emp empty + : eps epsilon + : ( `e` ) The `emp` regexp does not match any search path while `eps` matches the empty path. During proof search, the path of @@ -4299,15 +4516,15 @@ Automating .. _btauto_grammar: - .. productionlist:: `sentence` - t : x - :∣ true - :∣ false - :∣ orb t1 t2 - :∣ andb t1 t2 - :∣ xorb t1 t2 - :∣ negb t - :∣ if t1 then t2 else t3 + .. productionlist:: sentence + t : `x` + : true + : false + : orb `t` `t` + : andb `t` `t` + : xorb `t` `t` + : negb `t` + : if `t` then `t` else `t` Whenever the formula supplied is not a tautology, it also provides a counter-example. @@ -4343,7 +4560,7 @@ Automating distributivity, constant propagation) and comparing syntactically the results. -.. tacn:: ring_simplify {+ @term} +.. tacn:: ring_simplify {* @term} :name: ring_simplify This tactic applies the normalization procedure described above to @@ -4357,7 +4574,7 @@ the tactic and how to declare new ring structures. All declared field structures can be printed with the ``Print Rings`` command. .. tacn:: field - field_simplify {+ @term} + field_simplify {* @term} field_simplify_eq :name: field; field_simplify; field_simplify_eq diff --git a/doc/sphinx/refman-preamble.sty b/doc/sphinx/refman-preamble.sty index b4fc608e47..8f7b1bb1e8 100644 --- a/doc/sphinx/refman-preamble.sty +++ b/doc/sphinx/refman-preamble.sty @@ -56,27 +56,29 @@ \newcommand{\oddS}{\textsf{odd}_\textsf{S}} \newcommand{\ovl}[1]{\overline{#1}} \newcommand{\Pair}{\textsf{pair}} +\newcommand{\plus}{\mathsf{plus}} \newcommand{\Prod}{\textsf{prod}} \newcommand{\Prop}{\textsf{Prop}} \newcommand{\return}{\kw{return}} \newcommand{\Set}{\textsf{Set}} \newcommand{\si}{\textsf{if}} \newcommand{\sinon}{\textsf{else}} -\newcommand{\Sort}{\cal S} +\newcommand{\Sort}{\mathcal{S}} \newcommand{\Str}{\textsf{Stream}} \newcommand{\Struct}{\kw{Struct}} \newcommand{\subst}[3]{#1\{#2/#3\}} \newcommand{\tl}{\textsf{tl}} \newcommand{\tree}{\textsf{tree}} +\newcommand{\trii}{\triangleright_\iota} \newcommand{\true}{\textsf{true}} \newcommand{\Type}{\textsf{Type}} \newcommand{\unfold}{\textsf{unfold}} \newcommand{\WEV}[3]{\mbox{$#1[] \vdash #2 \lra #3$}} \newcommand{\WEVT}[3]{\mbox{$#1[] \vdash #2 \lra$}\\ \mbox{$ #3$}} -\newcommand{\WF}[2]{{\cal W\!F}(#1)[#2]} +\newcommand{\WF}[2]{{\mathcal{W\!F}}(#1)[#2]} \newcommand{\WFE}[1]{\WF{E}{#1}} -\newcommand{\WFT}[2]{#1[] \vdash {\cal W\!F}(#2)} -\newcommand{\WFTWOLINES}[2]{{\cal W\!F}\begin{array}{l}(#1)\\\mbox{}[{#2}]\end{array}} +\newcommand{\WFT}[2]{#1[] \vdash {\mathcal{W\!F}}(#2)} +\newcommand{\WFTWOLINES}[2]{{\mathcal{W\!F}}\begin{array}{l}(#1)\\\mbox{}[{#2}]\end{array}} \newcommand{\with}{\kw{with}} \newcommand{\WS}[3]{#1[] \vdash #2 <: #3} \newcommand{\WSE}[2]{\WS{E}{#1}{#2}} diff --git a/doc/sphinx/user-extensions/syntax-extensions.rst b/doc/sphinx/user-extensions/syntax-extensions.rst index 47afa5ba0c..ae66791b0c 100644 --- a/doc/sphinx/user-extensions/syntax-extensions.rst +++ b/doc/sphinx/user-extensions/syntax-extensions.rst @@ -859,41 +859,41 @@ notations are given below. The optional :production:`scope` is described in .. productionlist:: coq notation : [Local] Notation `string` := `term` [`modifiers`] [: `scope`]. - : | [Local] Infix `string` := `qualid` [`modifiers`] [: `scope`]. - : | [Local] Reserved Notation `string` [`modifiers`] . - : | Inductive `ind_body` [`decl_notation`] with … with `ind_body` [`decl_notation`]. - : | CoInductive `ind_body` [`decl_notation`] with … with `ind_body` [`decl_notation`]. - : | Fixpoint `fix_body` [`decl_notation`] with … with `fix_body` [`decl_notation`]. - : | CoFixpoint `cofix_body` [`decl_notation`] with … with `cofix_body` [`decl_notation`]. - : | [Local] Declare Custom Entry `ident`. + : [Local] Infix `string` := `qualid` [`modifiers`] [: `scope`]. + : [Local] Reserved Notation `string` [`modifiers`] . + : Inductive `ind_body` [`decl_notation`] with … with `ind_body` [`decl_notation`]. + : CoInductive `ind_body` [`decl_notation`] with … with `ind_body` [`decl_notation`]. + : Fixpoint `fix_body` [`decl_notation`] with … with `fix_body` [`decl_notation`]. + : CoFixpoint `cofix_body` [`decl_notation`] with … with `cofix_body` [`decl_notation`]. + : [Local] Declare Custom Entry `ident`. decl_notation : [where `string` := `term` [: `scope`] and … and `string` := `term` [: `scope`]]. modifiers : at level `num` : in custom `ident` : in custom `ident` at level `num` - : | `ident` , … , `ident` at level `num` [`binderinterp`] - : | `ident` , … , `ident` at next level [`binderinterp`] - : | `ident` `explicit_subentry` - : | left associativity - : | right associativity - : | no associativity - : | only parsing - : | only printing - : | format `string` + : `ident` , … , `ident` at level `num` [`binderinterp`] + : `ident` , … , `ident` at next level [`binderinterp`] + : `ident` `explicit_subentry` + : left associativity + : right associativity + : no associativity + : only parsing + : only printing + : format `string` explicit_subentry : ident - : | global - : | bigint - : | [strict] pattern [at level `num`] - : | binder - : | closed binder - : | constr [`binderinterp`] - : | constr at level `num` [`binderinterp`] - : | constr at next level [`binderinterp`] - : | custom [`binderinterp`] - : | custom at level `num` [`binderinterp`] - : | custom at next level [`binderinterp`] + : global + : bigint + : [strict] pattern [at level `num`] + : binder + : closed binder + : constr [`binderinterp`] + : constr at level `num` [`binderinterp`] + : constr at next level [`binderinterp`] + : custom [`binderinterp`] + : custom at level `num` [`binderinterp`] + : custom at next level [`binderinterp`] binderinterp : as ident - : | as pattern - : | as strict pattern + : as pattern + : as strict pattern .. note:: No typing of the denoted expression is performed at definition time. Type checking is done only at the time of use of the notation. @@ -1496,12 +1496,13 @@ Numeral notations function returns :g:`None`, or if the interpretation is registered for only non-negative integers, and the given numeral is negative. - .. exn:: @ident should go from Decimal.int to @type or (option @type). Instead of Decimal.int, the types Decimal.uint or Z could be used{? (require BinNums first)}. + + .. exn:: @ident should go from Decimal.int to @type or (option @type). Instead of Decimal.int, the types Decimal.uint or Z could be used (you may need to require BinNums or Decimal first). The parsing function given to the :cmd:`Numeral Notation` vernacular is not of the right type. - .. exn:: @ident should go from @type to Decimal.int or (option Decimal.int). Instead of Decimal.int, the types Decimal.uint or Z could be used{? (require BinNums first)}. + .. exn:: @ident should go from @type to Decimal.int or (option Decimal.int). Instead of Decimal.int, the types Decimal.uint or Z could be used (you may need to require BinNums or Decimal first). The printing function given to the :cmd:`Numeral Notation` vernacular is not of the right type. @@ -1692,13 +1693,13 @@ Tactic notations allow to customize the syntax of tactics. They have the followi tacn : Tactic Notation [`tactic_level`] [`prod_item` … `prod_item`] := `tactic`. prod_item : `string` | `tactic_argument_type`(`ident`) tactic_level : (at level `num`) - tactic_argument_type : ident | simple_intropattern | reference - : | hyp | hyp_list | ne_hyp_list - : | constr | uconstr | constr_list | ne_constr_list - : | integer | integer_list | ne_integer_list - : | int_or_var | int_or_var_list | ne_int_or_var_list - : | tactic | tactic0 | tactic1 | tactic2 | tactic3 - : | tactic4 | tactic5 + tactic_argument_type : `ident` | `simple_intropattern` | `reference` + : `hyp` | `hyp_list` | `ne_hyp_list` + : `constr` | `uconstr` | `constr_list` | `ne_constr_list` + : `integer` | `integer_list` | `ne_integer_list` + : `int_or_var` | `int_or_var_list` | `ne_int_or_var_list` + : `tactic` | `tactic0` | `tactic1` | `tactic2` | `tactic3` + : `tactic4` | `tactic5` .. cmd:: Tactic Notation {? (at level @level)} {+ @prod_item} := @tactic. diff --git a/doc/stdlib/index-list.html.template b/doc/stdlib/index-list.html.template index 51f94d7e5a..c33df52038 100644 --- a/doc/stdlib/index-list.html.template +++ b/doc/stdlib/index-list.html.template @@ -618,5 +618,6 @@ through the <tt>Require Import</tt> command.</p> theories/Compat/Coq87.v theories/Compat/Coq88.v theories/Compat/Coq89.v + theories/Compat/Coq810.v </dd> </dl> diff --git a/doc/tools/coqrst/coqdomain.py b/doc/tools/coqrst/coqdomain.py index 827b7c13c1..067af954ad 100644 --- a/doc/tools/coqrst/coqdomain.py +++ b/doc/tools/coqrst/coqdomain.py @@ -1189,7 +1189,6 @@ def setup(app): app.connect('doctree-resolved', CoqtopBlocksTransform.merge_consecutive_coqtop_blocks) # Add extra styles - app.add_stylesheet("fonts.css") app.add_stylesheet("ansi.css") app.add_stylesheet("coqdoc.css") app.add_javascript("notations.js") |
