diff options
326 files changed, 4980 insertions, 2937 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 63d6ccc240..4a126c4e5a 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -121,33 +121,48 @@ ########## Plugins ########## /plugins/btauto/ @coq/btauto-maintainers +/theories/btauto/ @coq/btauto-maintainers /plugins/cc/ @coq/cc-maintainers +/theories/cc/ @coq/cc-maintainers /plugins/derive/ @coq/derive-maintainers +/theories/derive/ @coq/derive-maintainers /plugins/extraction/ @coq/extraction-maintainers +/theories/extraction/ @coq/extraction-maintainers /plugins/firstorder/ @coq/firstorder-maintainers +/theories/firstorder/ @coq/firstorder-maintainers /plugins/funind/ @coq/funind-maintainers +/theories/funind/ @coq/funind-maintainers /plugins/ltac/ @coq/ltac-maintainers +/theories/ltac/ @coq/ltac-maintainers /plugins/micromega/ @coq/micromega-maintainers +/theories/micromega/ @coq/micromega-maintainers /test-suite/micromega/ @coq/micromega-maintainers /plugins/nsatz/ @coq/nsatz-maintainers +/theories/nsatz/ @coq/nsatz-maintainers /plugins/setoid_ring/ @coq/ring-maintainers +/theories/setoid_ring/ @coq/ring-maintainers /plugins/ssrmatching/ @coq/ssreflect-maintainers +/theories/ssrmatching/ @coq/ssreflect-maintainers + /plugins/ssr/ @coq/ssreflect-maintainers +/theories/ssr/ @coq/ssreflect-maintainers + /test-suite/ssr/ @coq/ssreflect-maintainers /plugins/syntax/ @coq/parsing-maintainers /plugins/rtauto/ @coq/rtauto-maintainers +/theories/rtauto/ @coq/rtauto-maintainers /user-contrib/Ltac2 @coq/ltac2-maintainers diff --git a/.gitignore b/.gitignore index b99d2a0d45..b665b2f86d 100644 --- a/.gitignore +++ b/.gitignore @@ -64,12 +64,9 @@ plugins/micromega/.micromega.ml.generated kernel/byterun/dllcoqrun.so coqdoc.sty coqdoc.css -time-of-build.log -time-of-build-pretty.log -time-of-build-before.log -time-of-build-after.log -time-of-build-pretty.log2 -time-of-build-pretty.log3 +time-of-build*.log +time-of-build*.log2 +time-of-build*.log3 .csdp.cache test-suite/.lia.cache test-suite/.nra.cache @@ -118,9 +115,7 @@ doc/stdlib/index-list.html doc/tools/docgram/productionlistGrammar doc/tools/docgram/editedGrammar doc/tools/docgram/prodnGrammar -doc/tutorial/Tutorial.v.out -doc/RecTutorial/RecTutorial.html -doc/RecTutorial/RecTutorial.ps +doc/unreleased.rst # .mll files diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 8fd5eb3972..68bb24ac77 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -18,7 +18,7 @@ stages: variables: # Format: $IMAGE-V$DATE [Cache is not used as of today but kept here # for reference] - CACHEKEY: "bionic_coq-V2019-12-08-V82" + CACHEKEY: "bionic_coq-V2019-03-01-V43" IMAGE: "$CI_REGISTRY_IMAGE:$CACHEKEY" # By default, jobs run in the base switch; override to select another switch OPAM_SWITCH: "base" @@ -258,7 +258,7 @@ build:base: variables: COQ_EXTRA_CONF: "-native-compiler yes -coqide opt" # coqdoc for stdlib, until we know how to build it from installed Coq - EXTRA_TARGET: "stdlib" + EXTRA_TARGET: "doc-stdlib" EXTRA_INSTALL: "install-doc-stdlib-html install-doc-printable" # no coqide for 32bit: libgtk installation problems diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8cff8f66b7..d9adaf5dc7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -17,8 +17,8 @@ well. - [Writing tutorials and blog posts](#writing-tutorials-and-blog-posts) - [Contributing to the wiki](#contributing-to-the-wiki) - [Creating and maintaining Coq packages](#creating-and-maintaining-coq-packages) - - [Distribution](#distribution) - - [Support](#support) + - [Distribution of Coq packages](#distribution-of-coq-packages) + - [Support for plugin and library authors](#support-for-plugin-and-library-authors) - [Standard libraries](#standard-libraries) - [Maintaining existing packages in coq-community](#maintaining-existing-packages-in-coq-community) - [Contributing to the editor support packages](#contributing-to-the-editor-support-packages) @@ -161,14 +161,14 @@ tools is great so that others can start building new things on top. Having an extensive and healthy package ecosystem will be key to the success of Coq. -#### Distribution #### +#### Distribution of Coq packages #### You can distribute your library or plugin through the [Coq package index][Coq-package-index]. Tools can be advertised on the [tools page][tools-website] of the Coq website, or the [tools page][tools-wiki] of the wiki. -#### Support #### +#### Support for plugin and library authors #### You can find advice and best practices about maintaining a Coq project on the [coq-community wiki][coq-community-wiki]. @@ -529,10 +529,20 @@ how you can anticipate the results of the CI before opening a PR. Such a failure can indicate either a bug in your branch, or a breaking change that you introduced voluntarily. All such breaking changes should be properly documented in the [user changelog][user-changelog]. -Furthermore, a backward-compatible fix should be found, and this fix -should be merged in the broken projects *before* your PR to the Coq -repository can be. You can use the same documentation as above to -learn about testing and fixing locally the broken libraries. +Furthermore, a backward-compatible fix should be found, properly +documented in the changelog when non-obvious, and this fix should be +merged in the broken projects *before* your PR to the Coq repository +can be. + +Note that once the breaking change is well understood, it should not +feel like it is your role to fix every project that is affected: as +long as reviewers have approved and are ready to integrate your +breaking change, you are entitled to (politely) request project +authors / maintainers to fix the breakage on their own, or help you +fix it. Obviously, you should leave enough time for this to happen +(you cannot expect a project maintainer to allocate time for this as +soon as you request it) and you should be ready to listen to more +feedback and reconsider the impact of your change. #### Understanding reviewers' feedback #### diff --git a/Makefile.build b/Makefile.build index 1b3d99c81d..9e0a402730 100644 --- a/Makefile.build +++ b/Makefile.build @@ -50,6 +50,12 @@ VALIDATE ?= # When non-empty, passed as extra arguments to coqtop/coqc: COQUSERFLAGS ?= +# Option for changing sorting of timing output file +TIMING_SORT_BY ?= auto +# Option for changing the fuzz parameter on the output file +TIMING_FUZZ ?= 0 +# Option for changing whether to use real or user time for timing tables +TIMING_REAL?= # Output file names for timed builds TIME_OF_BUILD_FILE ?= time-of-build.log TIME_OF_BUILD_BEFORE_FILE ?= time-of-build-before.log @@ -104,6 +110,19 @@ include Makefile.dev ## provides the 'printers' and 'revision' rules ########################################################################### # Timing targets ########################################################################### +ifeq (0,$(TIMING_REAL)) +TIMING_REAL_ARG := +TIMING_USER_ARG := --user +else +ifeq (1,$(TIMING_REAL)) +TIMING_REAL_ARG := --real +TIMING_USER_ARG := +else +TIMING_REAL_ARG := +TIMING_USER_ARG := +endif +endif + make-pretty-timed-before:: TIME_OF_BUILD_FILE=$(TIME_OF_BUILD_BEFORE_FILE) make-pretty-timed-after:: TIME_OF_BUILD_FILE=$(TIME_OF_BUILD_AFTER_FILE) make-pretty-timed make-pretty-timed-before make-pretty-timed-after:: @@ -111,21 +130,21 @@ make-pretty-timed make-pretty-timed-before make-pretty-timed-after:: $(HIDE)($(MAKE) --no-print-directory $(TGTS) TIMED=1 2>&1 && touch pretty-timed-success.ok) | tee -a $(TIME_OF_BUILD_FILE) $(HIDE)rm pretty-timed-success.ok # must not be -f; must fail if the touch failed print-pretty-timed:: - $(HIDE)$(COQMAKE_ONE_TIME_FILE) $(TIME_OF_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) + $(HIDE)$(COQMAKE_ONE_TIME_FILE) $(TIMING_REAL_ARG) $(TIME_OF_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) print-pretty-timed-diff:: - $(HIDE)$(COQMAKE_BOTH_TIME_FILES) $(TIME_OF_BUILD_BEFORE_FILE) $(TIME_OF_BUILD_AFTER_FILE) $(TIME_OF_PRETTY_BOTH_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) + $(HIDE)$(COQMAKE_BOTH_TIME_FILES) --sort-by=$(TIMING_SORT_BY) $(TIMING_REAL_ARG) $(TIME_OF_BUILD_AFTER_FILE) $(TIME_OF_BUILD_BEFORE_FILE) $(TIME_OF_PRETTY_BOTH_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) ifeq (,$(BEFORE)) print-pretty-single-time-diff:: - @echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff BEFORE=path/to/file.v.before-timing AFTER=path/to/file.v.after-timing' + @echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing' $(HIDE)false else ifeq (,$(AFTER)) print-pretty-single-time-diff:: - @echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff BEFORE=path/to/file.v.before-timing AFTER=path/to/file.v.after-timing' + @echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing' $(HIDE)false else print-pretty-single-time-diff:: - $(HIDE)$(COQMAKE_BOTH_SINGLE_TIMING_FILES) $(BEFORE) $(AFTER) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) + $(HIDE)$(COQMAKE_BOTH_SINGLE_TIMING_FILES) --fuzz=$(TIMING_FUZZ) --sort-by=$(TIMING_SORT_BY) $(TIMING_USER_ARG) $(AFTER) $(BEFORE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) endif endif pretty-timed: @@ -809,21 +828,20 @@ $(USERCONTRIBMLLIBDFILE).d: $(D_DEPEND_BEFORE_SRC) $(filter user-contrib/%, $(ML # NB: for make world, no need to mention explicitly the .cmxs of the plugins, # since they are all mentioned in at least one Declare ML Module in some .v -coqlib: theories plugins +coqlib: stdlib-vo contrib-vo ifdef QUICK - $(SHOW)'COQC -schedule-vio2vo $(NJOBS) theories/**.vio plugins/**.vio' - $(HIDE)$(BOOTCOQC) -schedule-vio2vo $(NJOBS) $(THEORIESVO) $(PLUGINSVO) + $(SHOW)'COQC -schedule-vio2vo $(NJOBS) theories/**.vio' + $(HIDE)$(BOOTCOQC) -schedule-vio2vo $(NJOBS) $(THEORIESVO) $(CONTRIBVO) endif -coqlib.timing.diff: theories.timing.diff plugins.timing.diff +coqlib.timing.diff: stdlib.timing.diff -theories: $(THEORIESVO) -plugins: $(PLUGINSVO) +stdlib-vo: $(THEORIESVO) +contrib-vo: $(CONTRIBVO) -theories.timing.diff: $(THEORIESVO:.$(VO)=.v.timing.diff) -plugins.timing.diff: $(PLUGINSVO:.$(VO)=.v.timing.diff) +stdlib.timing.diff: $(ALLVO:.$(VO)=.v.timing.diff) -.PHONY: coqlib theories plugins coqlib.timing.diff theories.timing.diff plugins.timing.diff +.PHONY: coqlib stdlib-vo contrib-vo coqlib.timing.diff stdlib.timing.diff # The .vo files in Init are built with the -noinit option @@ -859,7 +877,7 @@ endif $(HIDE)$(BOOTCOQC) $< -vio -noglob %.v.timing.diff: %.v.before-timing %.v.after-timing - $(SHOW)PYTHON TIMING-DIFF $< + $(SHOW)'PYTHON TIMING-DIFF $*.v.{before,after}-timing' $(HIDE)$(MAKE) --no-print-directory print-pretty-single-time-diff BEFORE=$*.v.before-timing AFTER=$*.v.after-timing TIME_OF_PRETTY_BUILD_FILE="$@" diff --git a/Makefile.dev b/Makefile.dev index b1e142333a..6e9b5356ab 100644 --- a/Makefile.dev +++ b/Makefile.dev @@ -140,17 +140,17 @@ noreal: unicode logic arith bool zarith qarith lists sets fsets \ ### 4) plugins ################ -OMEGAVO:=$(filter plugins/omega/%, $(PLUGINSVO)) -MICROMEGAVO:=$(filter plugins/micromega/%, $(PLUGINSVO)) -RINGVO:=$(filter plugins/setoid_ring/%, $(PLUGINSVO)) -NSATZVO:=$(filter plugins/nsatz/%, $(PLUGINSVO)) -FUNINDVO:=$(filter plugins/funind/%, $(PLUGINSVO)) -BTAUTOVO:=$(filter plugins/btauto/%, $(PLUGINSVO)) -RTAUTOVO:=$(filter plugins/rtauto/%, $(PLUGINSVO)) -EXTRACTIONVO:=$(filter plugins/extraction/%, $(PLUGINSVO)) +OMEGAVO:=$(filter theories/omega/%, $(THEORIESVO)) +MICROMEGAVO:=$(filter theories/micromega/%, $(THEORIESVO)) +RINGVO:=$(filter theories/setoid_ring/%, $(THEORIESVO)) +NSATZVO:=$(filter theories/nsatz/%, $(THEORIESVO)) +FUNINDVO:=$(filter theories/funind/%, $(THEORIESVO)) +BTAUTOVO:=$(filter theories/btauto/%, $(THEORIESVO)) +RTAUTOVO:=$(filter theories/rtauto/%, $(THEORIESVO)) +EXTRACTIONVO:=$(filter theories/extraction/%, $(THEORIESVO)) CCVO:= -DERIVEVO:=$(filter plugins/derive/%, $(PLUGINSVO)) -LTACVO:=$(filter plugins/ltac/%, $(PLUGINSVO)) +DERIVEVO:=$(filter theories/derive/%, $(THEORIESVO)) +LTACVO:=$(filter theories/ltac/%, $(THEORIESVO)) omega: $(OMEGAVO) $(OMEGACMO) micromega: $(MICROMEGAVO) $(MICROMEGACMO) $(CSDPCERT) diff --git a/Makefile.doc b/Makefile.doc index 50c4acb416..1249555cd7 100644 --- a/Makefile.doc +++ b/Makefile.doc @@ -56,9 +56,9 @@ DOCCOMMON:=doc/common/version.tex doc/common/title.tex doc/common/macros.tex ###################################################################### .PHONY: doc doc-html doc-pdf doc-ps -.PHONY: stdlib full-stdlib sphinx +.PHONY: doc-stdlib full-stdlib sphinx -doc: refman stdlib +doc: refman doc-stdlib SPHINX_DEPS ?= ifndef QUICK @@ -93,7 +93,7 @@ doc-pdf:\ doc-ps:\ doc/stdlib/Library.ps -stdlib: \ +doc-stdlib: \ doc/stdlib/html/index.html doc/stdlib/Library.ps doc/stdlib/Library.pdf full-stdlib: \ @@ -129,7 +129,7 @@ doc/unreleased.rst: $(wildcard doc/changelog/00-title.rst doc/changelog/*/*.rst) # Standard library ###################################################################### -DOCLIBS=-R theories Coq -R plugins Coq -Q user-contrib/Ltac2 Ltac2 +DOCLIBS=-R theories Coq -Q user-contrib/Ltac2 Ltac2 ### Standard library (browsable html format) @@ -247,7 +247,7 @@ PLUGIN_MLGS := $(wildcard plugins/*/*.mlg) OMITTED_PLUGIN_MLGS := plugins/ssr/ssrparser.mlg plugins/ssr/ssrvernac.mlg plugins/ssrmatching/g_ssrmatching.mlg DOC_MLGS := */*.mlg $(sort $(filter-out $(OMITTED_PLUGIN_MLGS), $(PLUGIN_MLGS))) DOC_EDIT_MLGS := doc/tools/docgram/*.edit_mlg -DOC_RSTS := doc/sphinx/*.rst doc/sphinx/*/*.rst +DOC_RSTS := doc/sphinx/*/*.rst doc/tools/docgram/fullGrammar: $(DOC_GRAM) $(DOC_MLGS) $(SHOW)'DOC_GRAM' @@ -256,7 +256,7 @@ doc/tools/docgram/fullGrammar: $(DOC_GRAM) $(DOC_MLGS) #todo: add a dependency of sphinx on updated_rsts when we're ready doc/tools/docgram/orderedGrammar doc/tools/docgram/updated_rsts: $(DOC_GRAM) $(DOC_EDIT_MLGS) $(SHOW)'DOC_GRAM_RSTS' - $(HIDE)$(DOC_GRAM) $(DOC_MLGS) $(DOC_RSTS) + $(HIDE)$(DOC_GRAM) -check-cmds $(DOC_MLGS) $(DOC_RSTS) doc/tools/docgram/updated_rsts: doc/tools/docgram/orderedGrammar diff --git a/Makefile.make b/Makefile.make index e63a578e37..e020ffc5be 100644 --- a/Makefile.make +++ b/Makefile.make @@ -275,7 +275,7 @@ depclean: find . $(FIND_SKIP_DIRS) '(' -name '*.d' ')' -exec rm -f {} + cacheclean: - find theories plugins test-suite -name '.*.aux' -exec rm -f {} + + find theories test-suite -name '.*.aux' -exec rm -f {} + cleanconfig: rm -f config/Makefile config/coq_config.ml dev/ocamldebug-coq config/Info-*.plist diff --git a/Makefile.vofiles b/Makefile.vofiles index fe7ca7c36f..04bc2cf105 100644 --- a/Makefile.vofiles +++ b/Makefile.vofiles @@ -13,8 +13,8 @@ endif ########################################################################### THEORIESVO := $(patsubst %.v,%.$(VO),$(shell find theories -type f -name "*.v")) -PLUGINSVO := $(patsubst %.v,%.$(VO),$(shell find plugins $(addprefix user-contrib/, $(USERCONTRIBDIRS)) -type f -name "*.v")) -ALLVO := $(THEORIESVO) $(PLUGINSVO) +CONTRIBVO := $(patsubst %.v,%.$(VO),$(shell find $(addprefix user-contrib/, $(USERCONTRIBDIRS)) -type f -name "*.v")) +ALLVO := $(THEORIESVO) $(CONTRIBVO) VFILES := $(ALLVO:.$(VO)=.v) ## More specific targets @@ -23,17 +23,15 @@ THEORIESLIGHTVO:= \ $(filter theories/Init/% theories/Logic/% theories/Unicode/% theories/Arith/%, $(THEORIESVO)) # convert a (stdlib) filename into a module name: -# remove .vo, replace theories and plugins by Coq, and replace slashes by dots -vo_to_mod = $(subst /,.,$(patsubst user-contrib/%,%,$(patsubst theories/%,Coq.%,$(patsubst plugins/%,Coq.%,$(1:.vo=))))) +# remove .vo, replace stdlib by Coq, and replace slashes by dots +vo_to_mod = $(subst /,.,$(patsubst user-contrib/%,%,$(patsubst theories/%,Coq.%,$(1:.vo=)))) ALLMODS:=$(call vo_to_mod,$(ALLVO:.$(VO)=.vo)) - # Converting a stdlib filename into native compiler filenames # Used for install targets -vo_to_cm = $(foreach vo,$(1),$(dir $(vo)).coq-native/$(subst /,_,$(patsubst user-contrib/%,N%, $(patsubst theories/%,NCoq_%,$(patsubst plugins/%,NCoq_%,$(vo:.$(VO)=.cm*)))))) - -vo_to_obj = $(foreach vo,$(1),$(dir $(vo)).coq-native/$(subst /,_,$(patsubst user-contrib/%,N%, $(patsubst theories/%,NCoq_%,$(patsubst plugins/%,NCoq_%,$(vo:.$(VO)=.o)))))) +vo_to_cm = $(foreach vo,$(1),$(dir $(vo)).coq-native/$(subst /,_,$(patsubst user-contrib/%,N%, $(patsubst theories/%,NCoq_%,$(vo:.$(VO)=.cm*))))) +vo_to_obj = $(foreach vo,$(1),$(dir $(vo)).coq-native/$(subst /,_,$(patsubst user-contrib/%,N%, $(patsubst theories/%,NCoq_%,$(vo:.$(VO)=.o))))) ifdef QUICK GLOBFILES:= diff --git a/clib/exninfo.ml b/clib/exninfo.ml index ee998c2f17..34a4555a9a 100644 --- a/clib/exninfo.ml +++ b/clib/exninfo.ml @@ -81,16 +81,6 @@ let iraise (e,i) = | Some bt -> Printexc.raise_with_backtrace e bt -let raise ?info e = match info with -| None -> - let () = Mutex.lock lock in - let id = Thread.id (Thread.self ()) in - let () = current := remove_assoc id !current in - let () = Mutex.unlock lock in - raise e -| Some i -> - iraise (e,i) - let find_and_remove () = let () = Mutex.lock lock in let id = Thread.id (Thread.self ()) in diff --git a/clib/exninfo.mli b/clib/exninfo.mli index 36cc44cf82..725cd82809 100644 --- a/clib/exninfo.mli +++ b/clib/exninfo.mli @@ -79,6 +79,3 @@ val capture : exn -> iexn val iraise : iexn -> 'a (** Raise the given enriched exception. *) - -val raise : ?info:info -> exn -> 'a -(** Raise the given exception with additional information. *) diff --git a/configure.ml b/configure.ml index 89d9ed9d2a..55d71f6c2e 100644 --- a/configure.ml +++ b/configure.ml @@ -923,7 +923,7 @@ let datadir,datadirsuffix = let (_,_,d,s) = select "DATADIR" in d,s (** * CC runtime flags *) -let cflags_dflt = "-Wall -Wno-unused -g -O2 -fexcess-precision=standard" +let cflags_dflt = "-Wall -Wno-unused -g -O2 -std=c99 -fasm" let cflags_sse2 = "-msse2 -mfpmath=sse" @@ -975,7 +975,7 @@ let config_runtime () = | Some flags -> string_split ',' flags | _ when use_custom -> [custom_flag] | _ when !prefs.local -> - ["-dllib";"-lcoqrun";"-dllpath";coqtop/"kernel/byterun"] + ["-dllib";"-lcoqrun";"-dllpath";("\"" ^ coqtop ^ "/kernel/byterun\"")] | _ -> let ld="CAML_LD_LIBRARY_PATH" in build_loadpath := sprintf "export %s:='%s/kernel/byterun':$(%s)" ld coqtop ld; diff --git a/coqpp/coqpp_main.ml b/coqpp/coqpp_main.ml index 72b7cb2f84..e723d4ea1b 100644 --- a/coqpp/coqpp_main.ml +++ b/coqpp/coqpp_main.ml @@ -266,7 +266,7 @@ let print_rule fmt r = let print_entry fmt e = let print_position_opt fmt pos = print_opt fmt print_position pos in let print_rules fmt rules = print_list fmt print_rule rules in - fprintf fmt "let () =@ @[Pcoq.grammar_extend@ %s@ None@ @[(%a, %a)@]@]@ in@ " + fprintf fmt "let () =@ @[Pcoq.grammar_extend@ %s@ @[(%a, %a)@]@]@ in@ " e.gentry_name print_position_opt e.gentry_pos print_rules e.gentry_rules let print_ast fmt ext = diff --git a/dev/ci/README-developers.md b/dev/ci/README-developers.md index 9ed7180807..6a740b9033 100644 --- a/dev/ci/README-developers.md +++ b/dev/ci/README-developers.md @@ -43,9 +43,10 @@ See also [`test-suite/README.md`](../../test-suite/README.md) for information ab ### Breaking changes When your PR breaks an external project we test in our CI, you must -prepare a patch (or ask someone to prepare a patch) to fix the -project. There is experimental support for an improved workflow, see -[the next section](#experimental-automatic-overlay-creation-and-building), below +prepare a patch (or ask someone—possibly the project author—to +prepare a patch) to fix the project. There is experimental support for +an improved workflow, see [the next +section](#experimental-automatic-overlay-creation-and-building), below are the steps to manually prepare a patch: 1. Fork the external project, create a new branch, push a commit adapting diff --git a/dev/ci/README-users.md b/dev/ci/README-users.md index 06b617d4c1..6649820f22 100644 --- a/dev/ci/README-users.md +++ b/dev/ci/README-users.md @@ -1,36 +1,40 @@ Information for external library / Coq plugin authors ----------------------------------------------------- -You are encouraged to consider submitting your development for addition to +You are encouraged to consider submitting your project for addition to Coq's CI. This means that: -- Any time that a proposed change is breaking your development, Coq developers - will send you patches to adapt it or, at the very least, will work with you - to see how to adapt it. +- Any time that a proposed change is breaking your project, Coq + developers and contributors will send you patches to adapt it or + will explain how to adapt it and work with you to ensure that you + manage to do it. On the condition that: -- At the time of the submission, your development works with Coq's +- At the time of the submission, your project works with Coq's `master` branch. -- Your development is publicly available in a git repository and we can easily +- Your project is publicly available in a git repository and we can easily send patches to you (e.g. through pull / merge requests). - You react in a timely manner to discuss / integrate those patches. + When seeking your help for preparing such patches, we will accept + that it takes longer than when we are just requesting to integrate a + simple (and already fully prepared) patch. - You do not push, to the branches that we test, commits that haven't been first tested to compile with the corresponding branch(es) of Coq. - For that, we recommend setting a CI system for you development, see + For that, we recommend setting a CI system for you project, see [supported CI images for Coq](#supported-ci-images-for-coq) below. -- You maintain a reasonable build time for your development, or you provide +- You maintain a reasonable build time for your project, or you provide a "lite" target that we can use. In case you forget to comply with these last three conditions, we would reach -out to you and give you a 30-day grace period during which your development +out to you and give you a 30-day grace period during which your project would be moved into our "allow failure" category. At the end of the grace -period, in the absence of progress, the development would be removed from our +period, in the absence of progress, the project would be removed from our CI. ### Timely merging of overlays @@ -47,7 +51,7 @@ these kind of merges. ### OCaml and plugin-specific considerations -Developments that link against Coq's OCaml API [most of them are known +Projects that link against Coq's OCaml API [most of them are known as "plugins"] do have some special requirements: - Coq's OCaml API is not stable. We hope to improve this in the future @@ -65,7 +69,7 @@ as "plugins"] do have some special requirements: uses. In particular, warnings that are considered fatal by the Coq developers _must_ be also fatal for plugin CI code. -### Add your development by submitting a pull request +### Add your project by submitting a pull request Add a new `ci-mydev.sh` script to [`dev/ci`](.); set the corresponding variables in [`ci-basic-overlay.sh`](ci-basic-overlay.sh); add the @@ -75,7 +79,7 @@ Have a look at [#7656](https://github.com/coq/coq/pull/7656/files) for an example. **Do not hesitate to submit an incomplete pull request if you need help to finish it.** -You may also be interested in having your development tested in our +You may also be interested in having your project tested in our performance benchmark. Currently this is done by providing an OPAM package in https://github.com/coq/opam-coq-archive and opening an issue at https://github.com/coq/coq-bench/issues. @@ -83,13 +87,13 @@ https://github.com/coq/coq-bench/issues. ### Recommended branching policy. It is sometimes the case that you will need to maintain a branch of -your development for particular Coq versions. This is in fact very -likely if your development includes a Coq ML plugin. +your project for particular Coq versions. This is in fact very likely +if your project includes a Coq ML plugin. -We thus recommend a branching convention that mirrors Coq's branching -policy. Then, you would have a `master` branch that follows Coq's -`master`, a `v8.8` branch that works with Coq's `v8.8` branch and so -on. +For such projects, we recommend a branching convention that mirrors +Coq's branching policy. Then, you would have a `master` branch that +follows Coq's `master`, a `v8.8` branch that works with Coq's `v8.8` +branch and so on. This convention will be supported by tools in the future to make some developer commands work more seamlessly. diff --git a/dev/ci/ci-coquelicot.sh b/dev/ci/ci-coquelicot.sh index 6cb8dad604..ffe92dcecf 100755 --- a/dev/ci/ci-coquelicot.sh +++ b/dev/ci/ci-coquelicot.sh @@ -7,4 +7,4 @@ install_ssreflect git_download coquelicot -( cd "${CI_BUILD_DIR}/coquelicot" && ./autogen.sh && ./configure && ./remake "-j${NJOBS}" ) +( cd "${CI_BUILD_DIR}/coquelicot" && autoreconf -i -s && ./configure && ./remake "-j${NJOBS}" ) diff --git a/dev/ci/docker/bionic_coq/Dockerfile b/dev/ci/docker/bionic_coq/Dockerfile index b8f9d99702..41392b4b8c 100644 --- a/dev/ci/docker/bionic_coq/Dockerfile +++ b/dev/ci/docker/bionic_coq/Dockerfile @@ -1,4 +1,4 @@ -# CACHEKEY: "bionic_coq-V2019-12-08-V82" +# CACHEKEY: "bionic_coq-V2019-03-01-V43" # ^^ Update when modifying this file. FROM ubuntu:bionic @@ -37,9 +37,9 @@ ENV COMPILER="4.05.0" # Common OPAM packages. # `num` does not have a version number as the right version to install varies # with the compiler version. -ENV BASE_OPAM="num ocamlfind.1.8.1 dune.2.0.0 ounit.2.0.8 odoc.1.4.2" \ +ENV BASE_OPAM="num ocamlfind.1.8.1 dune.2.0.1 ounit.2.0.8 odoc.1.4.2" \ CI_OPAM="menhir.20190626 ocamlgraph.1.8.8" \ - BASE_ONLY_OPAM="elpi.1.8.0" + BASE_ONLY_OPAM="elpi.1.10.2" # BASE switch; CI_OPAM contains Coq's CI dependencies. ENV COQIDE_OPAM="cairo2.0.6.1 lablgtk3-sourceview3.3.0.beta6" diff --git a/dev/ci/user-overlays/10204-rm-unsafe-type-of-coercion.sh b/dev/ci/user-overlays/10204-rm-unsafe-type-of-coercion.sh new file mode 100644 index 0000000000..87dad61dbc --- /dev/null +++ b/dev/ci/user-overlays/10204-rm-unsafe-type-of-coercion.sh @@ -0,0 +1,6 @@ +if [ "$CI_PULL_REQUEST" = "10204" ] || [ "$CI_BRANCH" = "rm-unsafe-type-of-coercion" ]; then + + paramcoq_CI_REF=fix-papp + paramcoq_CI_GITURL=https://github.com/maximedenes/paramcoq + +fi diff --git a/dev/ci/user-overlays/10832-herbelin-master+fix6082-7766-overriding-notation-format.sh b/dev/ci/user-overlays/10832-herbelin-master+fix6082-7766-overriding-notation-format.sh new file mode 100644 index 0000000000..c17fe4fcba --- /dev/null +++ b/dev/ci/user-overlays/10832-herbelin-master+fix6082-7766-overriding-notation-format.sh @@ -0,0 +1,9 @@ +if [ "$CI_PULL_REQUEST" = "10832" ] || [ "$CI_BRANCH" = "master+fix6082-7766-overriding-notation-format" ]; then + + equations_CI_REF=master+fix-interpretation-notation-format-pr10832 + equations_CI_GITURL=https://github.com/herbelin/Coq-Equations + + quickchick_CI_REF=master+fix-interpretation-notation-format-pr10832 + quickchick_CI_GITURL=https://github.com/herbelin/QuickChick + +fi diff --git a/dev/ci/user-overlays/11051-gares-elpi-1.8.sh b/dev/ci/user-overlays/11051-gares-elpi-1.8.sh deleted file mode 100644 index 7845654375..0000000000 --- a/dev/ci/user-overlays/11051-gares-elpi-1.8.sh +++ /dev/null @@ -1,6 +0,0 @@ -if [ "$CI_PULL_REQUEST" = "11051" ] || [ "$CI_BRANCH" = "elpi-1.8" ]; then - - elpi_CI_REF="coq-master+v1.2" - elpi_CI_GITURL=https://github.com/LPCIC/coq-elpi - -fi diff --git a/dev/ci/user-overlays/11708-gares-elpi-1.10.sh b/dev/ci/user-overlays/11708-gares-elpi-1.10.sh new file mode 100644 index 0000000000..121190e5f6 --- /dev/null +++ b/dev/ci/user-overlays/11708-gares-elpi-1.10.sh @@ -0,0 +1,6 @@ +if [ "$CI_PULL_REQUEST" = "11708" ] || [ "$CI_BRANCH" = " elpi-1.10+coq-elpi-1.3" ]; then + + elpi_CI_REF="coq-master+coq-elpi-1.3" + elpi_CI_GITURL=https://github.com/LPCIC/coq-elpi + +fi diff --git a/dev/doc/INSTALL.make.md b/dev/doc/INSTALL.make.md index 3db5d0b14f..f81630c55d 100644 --- a/dev/doc/INSTALL.make.md +++ b/dev/doc/INSTALL.make.md @@ -102,6 +102,14 @@ Detailed Installation Procedure. it is recommended to compile in parallel, via make -jN where N is your number of cores. + If you wish to create timing logs for the standard library, you can + pass `TIMING=1` (for per-line timing files) or `TIMED=1` (for + per-file timing on stdout). Further variables and targets are + available for more detailed timing analysis; see the section of the + reference manual on `coq_makefile`. If there is any timing target + or variable supported by `coq_makefile`-made Makefiles which is not + supported by Coq's own Makefile, please report that as a bug. + 5. You can now install the Coq system. Executables, libraries, and manual pages are copied in some standard places of your system, defined at configuration time (step 3). Just do diff --git a/dev/doc/build-system.dune.md b/dev/doc/build-system.dune.md index cd35064b18..777eec97c6 100644 --- a/dev/doc/build-system.dune.md +++ b/dev/doc/build-system.dune.md @@ -46,21 +46,27 @@ of build threads `(jobs N)` and display options `(display _mode_)`. ## Running binaries [coqtop / coqide] -There are two special targets `states` and `quickide` that will -generate "shims" for running `coqtop` and `coqide` in a fast build. In -order to use them, do: +Running `coqtop` directly with `dune exec -- coqtop` won't in general +work well unless you are using `dune exec -- coqtop -noinit`. The +`coqtop` binary doesn't depend itself on Coq's prelude, so plugins / +vo files may go stale if you rebuild only `coqtop`. + +Instead, you should use the provided "shims" for running `coqtop` and +`coqide` in a fast build. In order to use them, do: ``` $ make -f Makefile.dune voboot # Only once per session $ dune exec -- dev/shim/coqtop-prelude ``` -or `quickide` / `dev/shim/coqide-prelude` for CoqIDE. These targets -enjoy quick incremental compilation thanks to `-opaque` so they tend -to be very fast while developing. +or `quickide` / `dev/shim/coqide-prelude` for CoqIDE, etc.... See +`dev/shim/dune` for a complete list of targets. These targets enjoy +quick incremental compilation thanks to `-opaque` so they tend to be +very fast while developing. Note that for a fast developer build of ML files, the `check` target -will be faster. +is faster, as it doesn't link the binaries and uses the non-optimizing +compiler. ## Targets @@ -214,3 +220,12 @@ useful to Coq, some examples are: - Cross-compilation. - Automatic Generation of OPAM files. - Multi-directory libraries. + +## FAQ + +- I get "Error: Dynlink error: Interface mismatch": + + You are likely running a partial build which doesn't include + implicitly loaded plugins / vo files. See the "Running binaries + [coqtop / coqide]" section above as to how to correctly call Coq's + binaries. diff --git a/dev/doc/changes.md b/dev/doc/changes.md index cb6e695865..d5938713d6 100644 --- a/dev/doc/changes.md +++ b/dev/doc/changes.md @@ -2,6 +2,12 @@ ### ML API +Types `precedence`, `parenRelation`, `tolerability` in +`notgram_ops.ml` have been reworked. See `entry_level` and +`entry_relative_level` in `constrexpr.ml`. + +### ML API + Exception handling: - Coq's custom `Backtrace` module has been removed in favor of OCaml's @@ -9,6 +15,12 @@ Exception handling: `Exninfo.capture` and `iraise` when re-raising inside an exception handler. +- Registration of exception printers now follows more closely OCaml's + API, thus: + + + printers are of type `exn -> Pp.t option` [`None` == not handled] + + it is forbidden for exception printers to raise. + Printers: - Functions such as Printer.pr_lconstr_goal_style_env have been diff --git a/dev/tools/merge-pr.sh b/dev/tools/merge-pr.sh index a888998ebf..ce64aebdc7 100755 --- a/dev/tools/merge-pr.sh +++ b/dev/tools/merge-pr.sh @@ -22,7 +22,6 @@ fi RED="\033[31m" RESET="\033[0m" GREEN="\033[32m" -BLUE="\033[34m" YELLOW="\033[33m" info() { echo -e "${GREEN}info:${RESET} $1 ${RESET}" @@ -74,17 +73,17 @@ fi PRDATA=$(curl -s "$API/pulls/$PR") TITLE=$(echo "$PRDATA" | jq -r '.title') -info "title for PR $PR is ${BLUE}$TITLE" +info "title for PR $PR is $TITLE" BASE_BRANCH=$(echo "$PRDATA" | jq -r '.base.label') -info "PR $PR targets branch ${BLUE}$BASE_BRANCH" +info "PR $PR targets branch $BASE_BRANCH" CURRENT_LOCAL_BRANCH=$(git rev-parse --abbrev-ref HEAD) -info "you are merging in ${BLUE}$CURRENT_LOCAL_BRANCH" +info "you are merging in $CURRENT_LOCAL_BRANCH" REMOTE=$(git config --get "branch.$CURRENT_LOCAL_BRANCH.remote") if [ -z "$REMOTE" ]; then - error "branch ${BLUE}$CURRENT_LOCAL_BRANCH${RESET} has not associated remote" + error "branch $CURRENT_LOCAL_BRANCH has not associated remote" error "don't know where to fetch the PR from" error "please run: git branch --set-upstream-to=THE_REMOTE/$CURRENT_LOCAL_BRANCH" exit 1 @@ -96,12 +95,12 @@ if [ "$REMOTE_URL" != "${OFFICIAL_REMOTE_GIT_URL}" ] && \ [ "$REMOTE_URL" != "https://${OFFICIAL_REMOTE_HTTPS_URL}.git" ] && \ [[ "$REMOTE_URL" != "https://"*"@${OFFICIAL_REMOTE_HTTPS_URL}" ]] && \ [[ "$REMOTE_URL" != "https://"*"@${OFFICIAL_REMOTE_HTTPS_URL}.git" ]] ; then - error "remote ${BLUE}$REMOTE${RESET} does not point to the official Coq repo" - error "that is ${BLUE}$OFFICIAL_REMOTE_GIT_URL" - error "it points to ${BLUE}$REMOTE_URL${RESET} instead" + error "remote $REMOTE does not point to the official Coq repo" + error "that is $OFFICIAL_REMOTE_GIT_URL" + error "it points to $REMOTE_URL instead" ask_confirmation fi -info "remote for $CURRENT_LOCAL_BRANCH is ${BLUE}$REMOTE" +info "remote for $CURRENT_LOCAL_BRANCH is $REMOTE" info "fetching from $REMOTE the PR" git remote update "$REMOTE" @@ -112,12 +111,12 @@ if ! git ls-remote "$REMOTE" | grep pull >/dev/null; then fi git fetch "$REMOTE" "refs/pull/$PR/head" COMMIT=$(git rev-parse FETCH_HEAD) -info "commit for PR $PR is ${BLUE}$COMMIT" +info "commit for PR $PR is $COMMIT" # Sanity check: merge to a different branch if [ "$BASE_BRANCH" != "coq:$CURRENT_LOCAL_BRANCH" ]; then - error "PR requests merge in ${BLUE}$BASE_BRANCH${RESET} but you are merging in ${BLUE}$CURRENT_LOCAL_BRANCH" + error "PR requests merge in $BASE_BRANCH but you are merging in $CURRENT_LOCAL_BRANCH" ask_confirmation fi; @@ -132,7 +131,8 @@ if [ "$LOCAL_BRANCH_COMMIT" != "$UPSTREAM_COMMIT" ]; then if git merge-base --is-ancestor -- "$UPSTREAM_COMMIT" "$LOCAL_BRANCH_COMMIT"; then warning "Your branch is ahead of ${REMOTE}." - warning "You should see this warning only if you've just merged another PR and did not push yet." + warning "On master, GitHub's branch protection rule prevents merging several PRs at once." + warning "You should run [git push ${REMOTE}] between each call to the merge script." ask_confirmation else error "Local branch is not up-to-date with ${REMOTE}." @@ -165,7 +165,7 @@ fi STATUS=$(curl -s "$API/commits/$COMMIT/status") if [ "$(echo "$STATUS" | jq -r '.state')" != "success" ]; then - error "CI unsuccessful on ${BLUE}$(echo "$STATUS" | + error "CI unsuccessful on $(echo "$STATUS" | jq -r -c '.statuses|map(select(.state != "success"))|map(.context)')" ask_confirmation fi; @@ -174,7 +174,7 @@ fi; NEEDS_LABELS=$(echo "$PRDATA" | jq -rc '.labels | map(select(.name | match("needs:"))) | map(.name)') if [ "$NEEDS_LABELS" != "[]" ]; then - error "needs:something labels still present: ${BLUE}$NEEDS_LABELS" + error "needs:something labels still present: $NEEDS_LABELS" ask_confirmation fi diff --git a/dev/top_printers.dbg b/dev/top_printers.dbg index 82f2e79549..da224aa5ab 100644 --- a/dev/top_printers.dbg +++ b/dev/top_printers.dbg @@ -74,6 +74,7 @@ install_printer Top_printers.ppuniverse_context_future install_printer Top_printers.ppuniverses install_printer Top_printers.ppnamedcontextval install_printer Top_printers.ppenv +install_printer Top_printers.ppglobenv install_printer Top_printers.pptac install_printer Top_printers.ppobj install_printer Top_printers.pploc diff --git a/dev/top_printers.ml b/dev/top_printers.ml index f640a33773..e8129938a1 100644 --- a/dev/top_printers.ml +++ b/dev/top_printers.ml @@ -247,6 +247,8 @@ let ppenv e = pp (str "[" ++ pr_named_context_of e Evd.empty ++ str "]" ++ spc() ++ str "[" ++ pr_rel_context e Evd.empty (rel_context e) ++ str "]") +let ppglobenv e = ppenv (GlobEnv.env e) + let ppenvwithcst e = pp (str "[" ++ pr_named_context_of e Evd.empty ++ str "]" ++ spc() ++ str "[" ++ pr_rel_context e Evd.empty (rel_context e) ++ str "]" ++ spc() ++ diff --git a/dev/top_printers.mli b/dev/top_printers.mli index 133326523b..ac9b63f60a 100644 --- a/dev/top_printers.mli +++ b/dev/top_printers.mli @@ -150,6 +150,7 @@ val ppuniverses : UGraph.t -> unit val ppnamedcontextval : Environ.named_context_val -> unit val ppenv : Environ.env -> unit +val ppglobenv : GlobEnv.t -> unit val ppenvwithcst : Environ.env -> unit val pptac : Ltac_plugin.Tacexpr.glob_tactic_expr -> unit diff --git a/doc/README.md b/doc/README.md index ef3ccc2105..7fa6f5cf3d 100644 --- a/doc/README.md +++ b/doc/README.md @@ -96,7 +96,7 @@ Alternatively, you can use some specific targets: - `make refman-{html,pdf}` to produce only one format of the reference manual -- `make stdlib` +- `make doc-stdlib` to produce all formats of the Coq standard library diff --git a/doc/changelog/02-specification-language/11261-master+implicit-type-used-printing.rst b/doc/changelog/02-specification-language/11261-master+implicit-type-used-printing.rst new file mode 100644 index 0000000000..51818c666b --- /dev/null +++ b/doc/changelog/02-specification-language/11261-master+implicit-type-used-printing.rst @@ -0,0 +1,5 @@ +- **Added:** + :cmd:`Implicit Types` are now taken into account for printing. To inhibit it, + unset the :flag:`Printing Use Implicit Types` flag + (`#11261 <https://github.com/coq/coq/pull/11261>`_, + by Hugo Herbelin, granting `#10366 <https://github.com/coq/coq/pull/10366>`_). diff --git a/doc/changelog/02-specification-language/11600-uniform-syntax.rst b/doc/changelog/02-specification-language/11600-uniform-syntax.rst new file mode 100644 index 0000000000..3fa3f80301 --- /dev/null +++ b/doc/changelog/02-specification-language/11600-uniform-syntax.rst @@ -0,0 +1,4 @@ +- **Added:** + New syntax :g:`Inductive Acc A R | x : Prop := ...` to specify which + parameters of an inductive are uniform. + (`#11600 <https://github.com/coq/coq/pull/11600>`_, by Gaëtan Gilbert). diff --git a/doc/changelog/03-notations/10832-master+fix6082-7766-overriding-notation-format.rst b/doc/changelog/03-notations/10832-master+fix6082-7766-overriding-notation-format.rst new file mode 100644 index 0000000000..5393fb3d8c --- /dev/null +++ b/doc/changelog/03-notations/10832-master+fix6082-7766-overriding-notation-format.rst @@ -0,0 +1 @@ +- Different interpretations in different scopes of the same notation string can now be associated to different printing formats; this fixes bug #6092 and #7766 (`#10832 <https://github.com/coq/coq/pull/10832>`_, by Hugo Herbelin). diff --git a/doc/changelog/03-notations/11120-master+refactoring-application-printing.rst b/doc/changelog/03-notations/11120-master+refactoring-application-printing.rst new file mode 100644 index 0000000000..d95f554766 --- /dev/null +++ b/doc/changelog/03-notations/11120-master+refactoring-application-printing.rst @@ -0,0 +1,17 @@ +- **Fixed:** Parsing and printing consistently handle inheritance of implicit + arguments in notations. With the exception of notations of + the form :n:`Notation @string := @@qualid` and :n:`Notation @ident := @@qualid` which + inhibit implicit arguments, all notations binding a partially + applied constant, as e.g. in :n:`Notation @string := (@qualid {+ @arg })`, + or :n:`Notation @string := (@@qualid {+ @arg })`, or + :n:`Notation @ident := (@qualid {+ @arg })`, or :n:`Notation @ident + := (@@qualid {+ @arg })`, inherit the remaining implicit arguments + (`#11120 <https://github.com/coq/coq/pull/11120>`_, by Hugo + Herbelin, fixing `#4690 <https://github.com/coq/coq/pull/4690>`_ and + `#11091 <https://github.com/coq/coq/pull/11091>`_). + +- **Changed:** Interpretation scopes are now always inherited in + notations binding a partially applied constant, including for + notations binding an expression of the form :n:`@@qualid`. The latter was + not the case beforehand + (part of `#11120 <https://github.com/coq/coq/pull/11120>`_). diff --git a/doc/changelog/03-notations/11530-master+fix11331-custom-entries-precedence.rst b/doc/changelog/03-notations/11530-master+fix11331-custom-entries-precedence.rst new file mode 100644 index 0000000000..b105928b22 --- /dev/null +++ b/doc/changelog/03-notations/11530-master+fix11331-custom-entries-precedence.rst @@ -0,0 +1,8 @@ +- **Fixed:** + Bugs in dealing with precedences of notations in custom entries + (`#11530 <https://github.com/coq/coq/pull/11530>`_, + by Hugo Herbelin, fixing in particular + `#9517 <https://github.com/coq/coq/pull/9517>`_, + `#9519 <https://github.com/coq/coq/pull/9519>`_, + `#9521 <https://github.com/coq/coq/pull/9521>`_, + `#11331 <https://github.com/coq/coq/pull/11331>`_). diff --git a/doc/changelog/03-notations/11590-master+fix9741-only-printing-does-not-reserve-keyword.rst b/doc/changelog/03-notations/11590-master+fix9741-only-printing-does-not-reserve-keyword.rst new file mode 100644 index 0000000000..1d94cbf21b --- /dev/null +++ b/doc/changelog/03-notations/11590-master+fix9741-only-printing-does-not-reserve-keyword.rst @@ -0,0 +1,4 @@ +- **Fixed:** + Notations in onlyprinting mode do not uselessly reserve parsing keywords + (`#11590 <https://github.com/coq/coq/pull/11590>`_, + by Hugo Herbelin, fixes `#9741 <https://github.com/coq/coq/pull/9741>`_). diff --git a/doc/changelog/03-notations/11650-parensNew.rst b/doc/changelog/03-notations/11650-parensNew.rst new file mode 100644 index 0000000000..5e2da594c6 --- /dev/null +++ b/doc/changelog/03-notations/11650-parensNew.rst @@ -0,0 +1,4 @@ +- **Added:** + added option Set Printing Parentheses to print parentheses even when implied by associativity or precedence. + (`#11650 <https://github.com/coq/coq/pull/11650>`_, + by Hugo Herbelin and Abhishek Anand). diff --git a/doc/changelog/07-commands-and-options/11617-toplevel+boot.rst b/doc/changelog/07-commands-and-options/11617-toplevel+boot.rst new file mode 100644 index 0000000000..49dd0ee2d8 --- /dev/null +++ b/doc/changelog/07-commands-and-options/11617-toplevel+boot.rst @@ -0,0 +1,5 @@ +- **Added:** + New ``coqc`` / ``coqtop`` option ``-boot`` that will not bind the + `Coq` library prefix by default + (`#11617 <https://github.com/coq/coq/pull/11617>`_, + by Emilio Jesus Gallego Arias). diff --git a/doc/changelog/07-commands-and-options/11663-remove-polymorphic-unqualified.rst b/doc/changelog/07-commands-and-options/11663-remove-polymorphic-unqualified.rst new file mode 100644 index 0000000000..1f8dcd3992 --- /dev/null +++ b/doc/changelog/07-commands-and-options/11663-remove-polymorphic-unqualified.rst @@ -0,0 +1,5 @@ +- **Removed:** + Unqualified ``polymorphic``, ``monomorphic``, ``template``, + ``notemplate`` attributes (they were deprecated since Coq 8.10). + Use them as sub-attributes of the ``universes`` attribute (`#11663 + <https://github.com/coq/coq/pull/11663>`_, by Théo Zimmermann). diff --git a/doc/changelog/08-tools/11302-better-timing-scripts-options.rst b/doc/changelog/08-tools/11302-better-timing-scripts-options.rst new file mode 100644 index 0000000000..3b20bbf75d --- /dev/null +++ b/doc/changelog/08-tools/11302-better-timing-scripts-options.rst @@ -0,0 +1,35 @@ +- **Added:** + The ``make-both-single-timing-files.py`` script now accepts a + ``--fuzz=N`` parameter on the command line which determines how many + characters two lines may be offset in the "before" and "after" + timing logs while still being considered the same line. When + invoking this script via the ``print-pretty-single-time-diff`` + target in a ``Makefile`` made by ``coq_makefile``, you can set this + argument by passing ``TIMING_FUZZ=N`` to ``make`` (`#11302 + <https://github.com/coq/coq/pull/11302>`_, by Jason Gross). + +- **Added:** + The ``make-one-time-file.py`` and ``make-both-time-files.py`` + scripts now accept a ``--real`` parameter on the command line to + print real times rather than user times in the tables. The + ``make-both-single-timing-files.py`` script accepts a ``--user`` + parameter to use user times. When invoking these scripts via the + ``print-pretty-timed`` or ``print-pretty-timed-diff`` or + ``print-pretty-single-time-diff`` targets in a ``Makefile`` made by + ``coq_makefile``, you can set this argument by passing + ``TIMING_REAL=1`` (to pass ``--real``) or ``TIMING_REAL=0`` (to pass + ``--user``) to ``make`` (`#11302 + <https://github.com/coq/coq/pull/11302>`_, by Jason Gross). + +- **Added:** + Coq's build system now supports both ``TIMING_FUZZ``, + ``TIMING_SORT_BY``, and ``TIMING_REAL`` just like a ``Makefile`` + made by ``coq_makefile`` (`#11302 + <https://github.com/coq/coq/pull/11302>`_, by Jason Gross). + +- **Fixed:** + The various timing targets for Coq's standard library now correctly + display and label the "before" and "after" columns, rather than + mixing them up (`#11302 <https://github.com/coq/coq/pull/11302>`_ + fixes `#11301 <https://github.com/coq/coq/issues/11301>`_, by Jason + Gross). diff --git a/doc/changelog/08-tools/11523-coqdep+refactor2.rst b/doc/changelog/08-tools/11523-coqdep+refactor2.rst index 90c23d8b76..32a4750b73 100644 --- a/doc/changelog/08-tools/11523-coqdep+refactor2.rst +++ b/doc/changelog/08-tools/11523-coqdep+refactor2.rst @@ -1,7 +1,10 @@ - **Changed:** - Internal options and behavior of ``coqdep`` have changed, in particular - options ``-w``, ``-D``, ``-mldep``, and ``-dumpbox`` have been removed, - and ``-boot`` will not load any path by default, ``-R/-Q`` should be - used instead - (`#11523 <https://github.com/coq/coq/pull/11523>`_, + Internal options and behavior of ``coqdep`` have changed. ``coqdep`` + no longer works as a replacement for ``ocamldep``, thus ``.ml`` + files are not supported as input. Also, several deprecated options + have been removed: ``-w``, ``-D``, ``-mldep``, ``-prefix``, + ``-slash``, and ``-dumpbox``. Passing ``-boot`` to ``coqdep`` will + not load any path by default now, ``-R/-Q`` should be used instead. + (`#11523 <https://github.com/coq/coq/pull/11523>`_ and + `#11589 <https://github.com/coq/coq/pull/11589>`_, by Emilio Jesus Gallego Arias). diff --git a/doc/changelog/09-coqide/10008-snyke7+escape_spaces.rst b/doc/changelog/09-coqide/10008-snyke7+escape_spaces.rst new file mode 100644 index 0000000000..99b1592fb3 --- /dev/null +++ b/doc/changelog/09-coqide/10008-snyke7+escape_spaces.rst @@ -0,0 +1,4 @@ +- **Fixed:** + Fix file paths containing spaces when compiling + (`#10008 <https://github.com/coq/coq/pull/10008>`_, + by snyke7, fixing `#11595 <https://github.com/coq/coq/pull/11595>`_). diff --git a/doc/changelog/10-standard-library/11686-fix-int-notations.rst b/doc/changelog/10-standard-library/11686-fix-int-notations.rst new file mode 100644 index 0000000000..cc820c5a25 --- /dev/null +++ b/doc/changelog/10-standard-library/11686-fix-int-notations.rst @@ -0,0 +1,6 @@ +- **Changed:** + Notations :n:`[|@term|]` and :n:`[||@term||]` for morphisms from 63-bit + integers to :g:`Z` and :g:`zn2z int` have been removed in favor of + :n:`φ(@term)` and :n:`Φ(@term)` respectively. These notations were + breaking Ltac parsing. (`#11686 <https://github.com/coq/coq/pull/11686>`_, + by Maxime Dénès). diff --git a/doc/changelog/12-misc/11329-master+fix11114-extraction-anomaly-implicit-record.rst b/doc/changelog/12-misc/11329-master+fix11114-extraction-anomaly-implicit-record.rst new file mode 100644 index 0000000000..0a686dd87d --- /dev/null +++ b/doc/changelog/12-misc/11329-master+fix11114-extraction-anomaly-implicit-record.rst @@ -0,0 +1,4 @@ +- **Fixed:** + :cmd:`Extraction Implicit` on the constructor of a record was leading to an anomaly + (`#11329 <https://github.com/coq/coq/pull/11329>`_, + by Hugo Herbelin, fixes `#11114 <https://github.com/coq/coq/pull/11114>`_). diff --git a/doc/sphinx/README.rst b/doc/sphinx/README.rst index a34b2d5195..89b4bda71a 100644 --- a/doc/sphinx/README.rst +++ b/doc/sphinx/README.rst @@ -34,14 +34,14 @@ Names (link targets) are auto-generated for most simple objects, though they can Most objects should have a body (i.e. a block of indented text following the signature, called “contents” in Sphinx terms). Undocumented objects should have the ``:undocumented:`` flag instead, as shown above. When multiple objects have a single description, they can be grouped into a single object, like this (semicolons can be used to separate the names of the objects; names starting with ``_`` will be omitted from the indexes):: - .. cmdv:: Lemma @ident {? @binders} : @type - Remark @ident {? @binders} : @type - Fact @ident {? @binders} : @type - Corollary @ident {? @binders} : @type - Proposition @ident {? @binders} : @type + .. cmdv:: Lemma @ident {* @binder } : @type + Remark @ident {* @binder } : @type + Fact @ident {* @binder } : @type + Corollary @ident {* @binder } : @type + Proposition @ident {* @binder } : @type :name: Lemma; Remark; Fact; Corollary; Proposition - These commands are all synonyms of :n:`Theorem @ident {? @binders } : type`. + These commands are all synonyms of :n:`Theorem @ident {* @binder } : type`. Notations --------- @@ -89,10 +89,15 @@ Objects Here is the list of all objects of the Coq domain (The symbol :black_nib: indicates an object whose signature can be written using the notations DSL): +``.. attr::`` :black_nib: An attribute. + Example:: + + .. attr:: local + ``.. cmd::`` :black_nib: A Coq command. Example:: - .. cmd:: Infix "@symbol" := @term ({+, @modifier}). + .. cmd:: Infix @string := @term1_extended {? ( {+, @syntax_modifier } ) } {? : @ident } This command is equivalent to :n:`…`. diff --git a/doc/sphinx/README.template.rst b/doc/sphinx/README.template.rst index 51d5174567..c5e0007e78 100644 --- a/doc/sphinx/README.template.rst +++ b/doc/sphinx/README.template.rst @@ -34,14 +34,14 @@ Names (link targets) are auto-generated for most simple objects, though they can Most objects should have a body (i.e. a block of indented text following the signature, called “contents” in Sphinx terms). Undocumented objects should have the ``:undocumented:`` flag instead, as shown above. When multiple objects have a single description, they can be grouped into a single object, like this (semicolons can be used to separate the names of the objects; names starting with ``_`` will be omitted from the indexes):: - .. cmdv:: Lemma @ident {? @binders} : @type - Remark @ident {? @binders} : @type - Fact @ident {? @binders} : @type - Corollary @ident {? @binders} : @type - Proposition @ident {? @binders} : @type + .. cmdv:: Lemma @ident {* @binder } : @type + Remark @ident {* @binder } : @type + Fact @ident {* @binder } : @type + Corollary @ident {* @binder } : @type + Proposition @ident {* @binder } : @type :name: Lemma; Remark; Fact; Corollary; Proposition - These commands are all synonyms of :n:`Theorem @ident {? @binders } : type`. + These commands are all synonyms of :n:`Theorem @ident {* @binder } : type`. Notations --------- diff --git a/doc/sphinx/addendum/generalized-rewriting.rst b/doc/sphinx/addendum/generalized-rewriting.rst index 9f5741f72a..94ab6e789c 100644 --- a/doc/sphinx/addendum/generalized-rewriting.rst +++ b/doc/sphinx/addendum/generalized-rewriting.rst @@ -170,7 +170,7 @@ compatibility constraints. Adding new relations and morphisms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. cmd:: Add Parametric Relation @binders : (A t1 ... tn) (Aeq t′1 ... t′m) {? reflexivity proved by @term} {? symmetry proved by @term} {? transitivity proved by @term} as @ident +.. cmd:: Add Parametric Relation {* @binder } : (A t1 ... tn) (Aeq t′1 ... t′m) {? reflexivity proved by @term} {? symmetry proved by @term} {? transitivity proved by @term} as @ident This command declares a parametric relation :g:`Aeq: forall (y1 : β1 ... ym : βm)`, :g:`relation (A t1 ... tn)` over :g:`(A : αi -> ... αn -> Type)`. @@ -219,7 +219,7 @@ replace terms with related ones only in contexts that are syntactic compositions of parametric morphism instances declared with the following command. -.. cmd:: Add Parametric Morphism @binders : (@ident {+ @term__1}) with signature @term__2 as @ident +.. cmd:: Add Parametric Morphism {* @binder } : (@ident {+ @term__1}) with signature @term__2 as @ident This command declares a parametric morphism :n:`@ident {+ @term__1}` of signature :n:`@term__2`. The final identifier :token:`ident` gives a unique diff --git a/doc/sphinx/addendum/implicit-coercions.rst b/doc/sphinx/addendum/implicit-coercions.rst index 19b33f0d90..b007509b2e 100644 --- a/doc/sphinx/addendum/implicit-coercions.rst +++ b/doc/sphinx/addendum/implicit-coercions.rst @@ -198,7 +198,7 @@ Figure :ref:`vernacular` as follows: \comindex{Hypothesis \mbox{\rm (and coercions)}} .. productionlist:: - assumption : `assumption_keyword` `assums` . + assumption : `assumption_token` `assums` . assums : `simple_assums` : (`simple_assums`) ... (`simple_assums`) simple_assums : `ident` ... `ident` :[>] `term` @@ -215,12 +215,6 @@ grammar of inductive types from Figure :ref:`vernacular` as follows: \comindex{Inductive \mbox{\rm (and coercions)}} \comindex{CoInductive \mbox{\rm (and coercions)}} -.. productionlist:: - inductive : Inductive `ind_body` with ... with `ind_body` - : CoInductive `ind_body` with ... with `ind_body` - ind_body : `ident` [ `binders` ] : `term` := [[|] `constructor` | ... | `constructor` ] - constructor : `ident` [ `binders` ] [:[>] `term` ] - Especially, if the extra ``>`` is present in a constructor declaration, this constructor is declared as a coercion. @@ -240,7 +234,7 @@ declaration, this constructor is declared as a coercion. Same as :cmd:`Identity Coercion` but locally to the current section. - .. cmdv:: SubClass @ident := @type + .. cmd:: SubClass @ident_decl @def_body :name: SubClass If :n:`@type` is a class :n:`@ident'` applied to some arguments then @@ -251,7 +245,7 @@ declaration, this constructor is declared as a coercion. :n:`Definition @ident := @type.` :n:`Identity Coercion Id_@ident_@ident' : @ident >-> @ident'`. - .. cmdv:: Local SubClass @ident := @type + .. cmdv:: Local SubClass @ident_decl @def_body Same as before but locally to the current section. @@ -299,7 +293,7 @@ Classes as Records We allow the definition of *Structures with Inheritance* (or classes as records) by extending the existing :cmd:`Record` macro. Its new syntax is: -.. cmdv:: Record {? >} @ident {? @binders} : @sort := {? @ident} { {+; @ident :{? >} @term } } +.. cmdv:: {| Record | Structure } {? >} @ident {* @binder } : @sort := {? @ident} { {+; @ident :{? >} @term } } The first identifier :token:`ident` is the name of the defined record and :token:`sort` is its type. The optional identifier after ``:=`` is the name @@ -315,12 +309,6 @@ by extending the existing :cmd:`Record` macro. Its new syntax is: (this may fail if the uniform inheritance condition is not satisfied). -.. cmdv:: Structure {? >} @ident {? @binders} : @sort := {? @ident} { {+; @ident :{? >} @term } } - :name: Structure - - This is a synonym of :cmd:`Record`. - - Coercions and Sections ---------------------- diff --git a/doc/sphinx/addendum/program.rst b/doc/sphinx/addendum/program.rst index a17dca1693..549249d25c 100644 --- a/doc/sphinx/addendum/program.rst +++ b/doc/sphinx/addendum/program.rst @@ -174,7 +174,7 @@ Program Definition .. exn:: In environment … the term: @term does not have type @type. Actually, it has type ... :undocumented: - .. cmdv:: Program Definition @ident @binders : @type := @term + .. cmdv:: Program Definition @ident {* @binder } : @type := @term This is equivalent to: @@ -189,7 +189,7 @@ Program Definition Program Fixpoint ~~~~~~~~~~~~~~~~ -.. cmd:: Program Fixpoint @ident @binders {? {@order}} : @type := @term +.. cmd:: Program Fixpoint @ident {* @binder } {? {@order}} : @type := @term The optional order annotation follows the grammar: diff --git a/doc/sphinx/addendum/type-classes.rst b/doc/sphinx/addendum/type-classes.rst index 57a2254100..af4e9051bb 100644 --- a/doc/sphinx/addendum/type-classes.rst +++ b/doc/sphinx/addendum/type-classes.rst @@ -295,14 +295,18 @@ the Existing Instance command to achieve the same effect. Summary of the commands ----------------------- -.. cmd:: Class @ident {? @binders} : {? @sort} := {? @ident} { {+; @ident :{? >} @term } } +.. cmd:: Class @inductive_definition {* with @inductive_definition } The :cmd:`Class` command is used to declare a typeclass with parameters :token:`binders` and fields the declared record fields. + This command supports the :attr:`universes(polymorphic)`, :attr:`universes(monomorphic)`, + :attr:`universes(template)`, :attr:`universes(notemplate)`, + :attr:`Cumulative`, :attr:`NonCumulative` and :attr:`Private` attributes. + .. _singleton-class: - .. cmdv:: Class @ident {? @binders} : {? @sort} := @ident : @term + .. cmdv:: Class @ident {* @binder } : {? @sort} := @ident : @term This variant declares a *singleton* class with a single method. This singleton class is a so-called definitional class, represented simply @@ -324,7 +328,7 @@ Summary of the commands This command has no effect when used on a typeclass. -.. cmd:: Instance @ident {? @binders} : @term__0 {+ @term} {? | @num} := { {*; @field_def} } +.. cmd:: Instance @ident {* @binder } : @term__0 {+ @term} {? | @num} := { {*; @field_def} } This command is used to declare a typeclass instance named :token:`ident` of the class :n:`@term__0` with parameters :token:`term` and @@ -337,10 +341,10 @@ Summary of the commands :tacn:`auto` hints. If the priority :token:`num` is not specified, it defaults to the number of non-dependent binders of the instance. - .. cmdv:: Instance @ident {? @binders} : forall {? @binders}, @term__0 {+ @term} {? | @num } := @term + .. cmdv:: Instance @ident {* @binder } : forall {* @binder }, @term__0 {+ @term} {? | @num } := @term This syntax is used for declaration of singleton class instances or - for directly giving an explicit term of type :n:`forall @binders, @term__0 + for directly giving an explicit term of type :n:`forall {* @binder }, @term__0 {+ @term}`. One need not even mention the unique field name for singleton classes. diff --git a/doc/sphinx/addendum/universe-polymorphism.rst b/doc/sphinx/addendum/universe-polymorphism.rst index f9cc25959c..c069782add 100644 --- a/doc/sphinx/addendum/universe-polymorphism.rst +++ b/doc/sphinx/addendum/universe-polymorphism.rst @@ -152,6 +152,8 @@ Many other commands support the ``Polymorphic`` flag, including: - :cmd:`Hint Resolve` and :cmd:`Hint Rewrite` will use the auto/rewrite hint polymorphically, not at a single instance. +.. _cumulative: + Cumulative, NonCumulative ------------------------- diff --git a/doc/sphinx/conf.py b/doc/sphinx/conf.py index f1dd7479c5..22102aa3ab 100755 --- a/doc/sphinx/conf.py +++ b/doc/sphinx/conf.py @@ -108,7 +108,7 @@ master_doc = "index" # General information about the project. project = 'Coq' -copyright = '1999-2019, Inria, CNRS and contributors' +copyright = '1999-2020, Inria, CNRS and contributors' author = 'The Coq Development Team' # The version info for the project you're documenting, acts as replacement for @@ -183,16 +183,21 @@ todo_include_todos = False nitpicky = True nitpick_ignore = [ ('token', token) for token in [ + 'binders', 'collection', 'command', + 'definition', 'dirpath', + 'inductive', + 'ind_body', 'modpath', 'module', - 'red_expr', + 'simple_tactic', 'symbol', 'tactic', 'term_pattern', 'term_pattern_string', + 'toplevel_selector', ]] # -- Options for HTML output ---------------------------------------------- diff --git a/doc/sphinx/coq-cmdindex.rst b/doc/sphinx/coq-cmdindex.rst index fd0b342ae4..18d2e379ac 100644 --- a/doc/sphinx/coq-cmdindex.rst +++ b/doc/sphinx/coq-cmdindex.rst @@ -2,6 +2,8 @@ .. hack to get index in TOC +.. _command_index: + ----------------- Command index ----------------- diff --git a/doc/sphinx/coq-tacindex.rst b/doc/sphinx/coq-tacindex.rst index 31b2f7f8cb..cddcdf83e8 100644 --- a/doc/sphinx/coq-tacindex.rst +++ b/doc/sphinx/coq-tacindex.rst @@ -2,6 +2,8 @@ .. hack to get index in TOC +.. _tactic_index: + ------------- Tactic index ------------- diff --git a/doc/sphinx/language/coq-library.rst b/doc/sphinx/language/coq-library.rst index 80f209fcf1..39f2ccec29 100644 --- a/doc/sphinx/language/coq-library.rst +++ b/doc/sphinx/language/coq-library.rst @@ -23,7 +23,7 @@ These libraries and developments are available for download at http://coq.inria.fr (see :ref:`userscontributions`). This chapter briefly reviews the |Coq| libraries whose contents can -also be browsed at http://coq.inria.fr/stdlib. +also be browsed at http://coq.inria.fr/stdlib/. diff --git a/doc/sphinx/language/gallina-extensions.rst b/doc/sphinx/language/gallina-extensions.rst index f0bbaed8f3..6c1d83b3b8 100644 --- a/doc/sphinx/language/gallina-extensions.rst +++ b/doc/sphinx/language/gallina-extensions.rst @@ -27,19 +27,26 @@ expressions. In this sense, the :cmd:`Record` construction allows defining field : `ident` [ `binders` ] : `type` [ where `notation` ] : `ident` [ `binders` ] [: `type` ] := `term` -.. cmd:: Record @ident @binders {? : @sort} := {? @ident} { {*; @ident @binders : @type } } +.. cmd:: {| Record | Structure } @inductive_definition {* with @inductive_definition } + :name: Record; Structure The first identifier :token:`ident` is the name of the defined record and :token:`sort` is its type. The optional identifier following ``:=`` is the name of its constructor. If it is omitted, the default name :n:`Build_@ident`, where :token:`ident` is the record name, is used. If :token:`sort` is omitted, the default sort is :math:`\Type`. The identifiers inside the brackets are the names of - fields. For a given field :token:`ident`, its type is :n:`forall @binders, @type`. + fields. For a given field :token:`ident`, its type is :n:`forall {* @binder }, @type`. Remark that the type of a particular identifier may depend on a previously-given identifier. Thus the order of the fields is important. Finally, :token:`binders` are parameters of the record. + :cmd:`Record` and :cmd:`Structure` are synonyms. + + This command supports the :attr:`universes(polymorphic)`, :attr:`universes(monomorphic)`, + :attr:`universes(template)`, :attr:`universes(notemplate)`, + :attr:`Cumulative`, :attr:`NonCumulative` and :attr:`Private` attributes. + More generally, a record may have explicitly defined (a.k.a. manifest) fields. For instance, we might have: -:n:`Record @ident @binders : @sort := { @ident__1 : @type__1 ; @ident__2 := @term__2 ; @ident__3 : @type__3 }`. +:n:`Record @ident {* @binder } : @sort := { @ident__1 : @type__1 ; @ident__2 := @term__2 ; @ident__3 : @type__3 }`. in which case the correctness of :n:`@type__3` may rely on the instance :n:`@term__2` of :n:`@ident__2` and :n:`@term__2` may in turn depend on :n:`@ident__1`. .. example:: @@ -62,7 +69,7 @@ in which case the correctness of :n:`@type__3` may rely on the instance :n:`@ter Let us now see the work done by the ``Record`` macro. First the macro generates a variant type definition with just one constructor: -:n:`Variant @ident {? @binders } : @sort := @ident__0 {? @binders }`. +:n:`Variant @ident {* @binder } : @sort := @ident__0 {* @binder }`. To build an object of type :token:`ident`, one should provide the constructor :n:`@ident__0` with the appropriate number of terms filling the fields of the record. @@ -183,8 +190,6 @@ other arguments are the parameters of the inductive type. defined with the ``Record`` keyword can be activated with the :flag:`Nonrecursive Elimination Schemes` flag (see :ref:`proofschemes-induction-principles`). -.. note:: ``Structure`` is a synonym of the keyword ``Record``. - .. warn:: @ident cannot be defined. It can happen that the definition of a projection is impossible. @@ -696,7 +701,7 @@ used by ``Function``. A more precise description is given below. .. cmdv:: Function @ident {* @binder } : @type := @term - Defines the not recursive function :token:`ident` as if declared with + Defines the nonrecursive function :token:`ident` as if it was declared with :cmd:`Definition`. Moreover the following are defined: + :token:`ident`\ ``_rect``, :token:`ident`\ ``_rec`` and :token:`ident`\ ``_ind``, @@ -817,32 +822,6 @@ Sections create local contexts which can be shared across multiple definitions. Most commands, like :cmd:`Hint`, :cmd:`Notation`, option management, … which appear inside a section are canceled when the section is closed. -.. cmd:: Variable @ident : @type - - This command links :token:`type` to the name :token:`ident` in the context of - the current section. When the current section is closed, name :token:`ident` - will be unknown and every object using this variable will be explicitly - parameterized (the variable is *discharged*). - - .. exn:: @ident already exists. - :name: @ident already exists. (Variable) - :undocumented: - - .. cmdv:: Variable {+ @ident } : @type - - Links :token:`type` to each :token:`ident`. - - .. cmdv:: Variable {+ ( {+ @ident } : @type ) } - - Declare one or more variables with various types. - - .. cmdv:: Variables {+ ( {+ @ident } : @type) } - Hypothesis {+ ( {+ @ident } : @type) } - Hypotheses {+ ( {+ @ident } : @type) } - :name: Variables; Hypothesis; Hypotheses - - These variants are synonyms of :n:`Variable {+ ( {+ @ident } : @type) }`. - .. cmd:: Let @ident := @term This command binds the value :token:`term` to the name :token:`ident` in the @@ -855,7 +834,7 @@ Sections create local contexts which can be shared across multiple definitions. :name: @ident already exists. (Let) :undocumented: - .. cmdv:: Let @ident {? @binders } {? : @type } := @term + .. cmdv:: Let @ident {* @binder } {? : @type } := @term :undocumented: .. cmdv:: Let Fixpoint @ident @fix_body {* with @fix_body} @@ -866,7 +845,7 @@ Sections create local contexts which can be shared across multiple definitions. :name: Let CoFixpoint :undocumented: -.. cmd:: Context @binders +.. cmd:: Context {* @binder } Declare variables in the context of the current section, like :cmd:`Variable`, but also allowing implicit variables, :ref:`implicit-generalization`, and @@ -1011,7 +990,7 @@ Reserved commands inside an interactive module type: This is a shortcut for the command :n:`Include @module` for each :token:`module`. -.. cmd:: @assumption_keyword Inline @assums +.. cmd:: @assumption_token Inline @assums :name: Inline The instance of this assumption will be automatically expanded at functor application, except when @@ -1673,11 +1652,11 @@ The syntax is also supported in internal binders. For instance, in the following kinds of expressions, the type of each declaration present in :token:`binders` can be bracketed to mark the declaration as implicit: -:n:`fun (@ident:forall @binders, @type) => @term`, -:n:`forall (@ident:forall @binders, @type), @type`, -:n:`let @ident @binders := @term in @term`, -:n:`fix @ident @binders := @term in @term` and -:n:`cofix @ident @binders := @term in @term`. +:n:`fun (@ident:forall {* @binder }, @type) => @term`, +:n:`forall (@ident:forall {* @binder }, @type), @type`, +:n:`let @ident {* @binder } := @term in @term`, +:n:`fix @ident {* @binder } := @term in @term` and +:n:`cofix @ident {* @binder } := @term in @term`. Here is an example: .. coqtop:: all @@ -2208,6 +2187,13 @@ or :g:`m` to the type :g:`nat` of natural numbers). Adds blocks of implicit types with different specifications. +.. flag:: Printing Use Implicit Types + + By default, the type of bound variables is not printed when + the variable name is associated to an implicit type which matches the + actual type of the variable. This feature can be deactivated by + turning this flag off. + .. _implicit-generalization: Implicit generalization diff --git a/doc/sphinx/language/gallina-specification-language.rst b/doc/sphinx/language/gallina-specification-language.rst index 721c7a7a51..e12ff1ba98 100644 --- a/doc/sphinx/language/gallina-specification-language.rst +++ b/doc/sphinx/language/gallina-specification-language.rst @@ -49,11 +49,11 @@ Blanks Comments Comments are enclosed between ``(*`` and ``*)``. They can be nested. - They can contain any character. However, embedded :token:`string` literals must be + They can contain any character. However, embedded :n:`@string` literals must be correctly closed. Comments are treated as blanks. Identifiers - Identifiers, written :token:`ident`, are sequences of letters, digits, ``_`` and + Identifiers, written :n:`@ident`, are sequences of letters, digits, ``_`` and ``'``, that do not start with a digit or ``'``. That is, they are recognized by the following grammar (except that the string ``_`` is reserved; it is not a valid identifier): @@ -74,8 +74,8 @@ Identifiers Numerals Numerals are sequences of digits with an optional fractional part - and exponent, optionally preceded by a minus sign. :token:`int` is an integer; - a numeral without fractional or exponent parts. :token:`num` is a non-negative + and exponent, optionally preceded by a minus sign. :n:`@int` is an integer; + a numeral without fractional or exponent parts. :n:`@num` is a non-negative integer. Underscores embedded in the digits are ignored, for example ``1_000_000`` is the same as ``1000000``. @@ -175,12 +175,19 @@ is described in Chapter :ref:`syntaxextensionsandinterpretationscopes`. | ltac : ( @ltac_expr ) field_def ::= @qualid {* @binder } := @term +.. _types: + Types ----- -Coq terms are typed. Coq types are recognized by the same syntactic -class as :token:`term`. We denote by :production:`type` the semantic subclass -of types inside the syntactic class :token:`term`. +.. prodn:: + type ::= @term + +:n:`@type`\s are a subset of :n:`@term`\s; not every :n:`@term` is a :n:`@type`. +Every term has an associated type, which +can be determined by applying the :ref:`typing-rules`. Distinct terms +may share the same type, for example 0 and 1 are both of type `nat`, the +natural numbers. .. _gallina-identifiers: @@ -193,14 +200,14 @@ Qualified identifiers and simple identifiers qualid ::= @ident {* @field_ident } field_ident ::= .@ident -*Qualified identifiers* (:token:`qualid`) denote *global constants* +*Qualified identifiers* (:n:`@qualid`) denote *global constants* (definitions, lemmas, theorems, remarks or facts), *global variables* (parameters or axioms), *inductive types* or *constructors of inductive -types*. *Simple identifiers* (or shortly :token:`ident`) are a syntactic subset +types*. *Simple identifiers* (or shortly :n:`@ident`) are a syntactic subset of qualified identifiers. Identifiers may also denote *local variables*, while qualified identifiers do not. -Field identifiers, written :token:`field_ident`, are identifiers prefixed by +Field identifiers, written :n:`@field_ident`, are identifiers prefixed by `.` (dot) with no blank between the dot and the identifier. @@ -215,7 +222,7 @@ numbers (see :ref:`datatypes`). .. note:: - Negative integers are not at the same level as :token:`num`, for this + Negative integers are not at the same level as :n:`@num`, for this would make precedence unnatural. .. index:: @@ -227,7 +234,7 @@ numbers (see :ref:`datatypes`). Sorts ----- -.. insertprodn sort univ_annot +.. insertprodn sort univ_constraint .. prodn:: sort ::= Set @@ -242,12 +249,14 @@ Sorts universe_name ::= @qualid | Set | Prop + univ_annot ::= @%{ {* @universe_level } %} universe_level ::= Set | Prop | Type | _ | @qualid - univ_annot ::= @%{ {* @universe_level } %} + univ_decl ::= @%{ {* @ident } {? + } {? %| {*, @univ_constraint } {? + } } %} + univ_constraint ::= @universe_name {| < | = | <= } @universe_name There are four sorts :g:`SProp`, :g:`Prop`, :g:`Set` and :g:`Type`. @@ -255,13 +264,13 @@ There are four sorts :g:`SProp`, :g:`Prop`, :g:`Set` and :g:`Type`. propositions* (also called *strict propositions*). - :g:`Prop` is the universe of *logical propositions*. The logical propositions - themselves are typing the proofs. We denote propositions by :token:`form`. - This constitutes a semantic subclass of the syntactic class :token:`term`. + themselves are typing the proofs. We denote propositions by :n:`@form`. + This constitutes a semantic subclass of the syntactic class :n:`@term`. - :g:`Set` is the universe of *program types* or *specifications*. The specifications themselves are typing the programs. We denote - specifications by :token:`specif`. This constitutes a semantic subclass of - the syntactic class :token:`term`. + specifications by :n:`@specif`. This constitutes a semantic subclass of + the syntactic class :n:`@term`. - :g:`Type` is the type of sorts. @@ -280,15 +289,15 @@ Binders name ::= _ | @ident binder ::= @name - | ( {+ @name } : @term ) - | ( @name {? : @term } := @term ) - | %{ {+ @name } {? : @term } %} - | [ {+ @name } {? : @term } ] + | ( {+ @name } : @type ) + | ( @name {? : @type } := @term ) + | ( @name : @type %| @term ) + | %{ {+ @name } {? : @type } %} + | [ {+ @name } {? : @type } ] | `( {+, @typeclass_constraint } ) | `%{ {+, @typeclass_constraint } %} | `[ {+, @typeclass_constraint } ] | ' @pattern0 - | ( @name : @term %| @term ) typeclass_constraint ::= {? ! } @term | %{ @name %} : {? ! } @term | @name : {? ! } @term @@ -303,14 +312,14 @@ a notation for a sequence of binding variables sharing the same type: binder can also be any pattern prefixed by a quote, e.g. :g:`'(x,y)`. Some constructions allow the binding of a variable to value. This is -called a “let-binder”. The entry :token:`binder` of the grammar accepts +called a “let-binder”. The entry :n:`@binder` of the grammar accepts either an assumption binder as defined above or a let-binder. The notation in the latter case is :n:`(@ident := @term)`. In a let-binder, only one variable can be introduced at the same time. It is also possible to give the type of the variable as follows: :n:`(@ident : @type := @term)`. -Lists of :token:`binder`\s are allowed. In the case of :g:`fun` and :g:`forall`, +Lists of :n:`@binder`\s are allowed. In the case of :g:`fun` and :g:`forall`, it is intended that at least one binder of the list is an assumption otherwise fun and forall gets identical. Moreover, parentheses can be omitted in the case of a single sequence of bindings sharing the same type (e.g.: @@ -322,9 +331,9 @@ Abstractions: fun ----------------- The expression :n:`fun @ident : @type => @term` defines the -*abstraction* of the variable :token:`ident`, of type :token:`type`, over the term -:token:`term`. It denotes a function of the variable :token:`ident` that evaluates to -the expression :token:`term` (e.g. :g:`fun x : A => x` denotes the identity +*abstraction* of the variable :n:`@ident`, of type :n:`@type`, over the term +:n:`@term`. It denotes a function of the variable :n:`@ident` that evaluates to +the expression :n:`@term` (e.g. :g:`fun x : A => x` denotes the identity function on type :g:`A`). The keyword :g:`fun` can be followed by several binders as given in Section :ref:`binders`. Functions over several variables are equivalent to an iteration of one-variable @@ -341,12 +350,12 @@ Products: forall ---------------- The expression :n:`forall @ident : @type, @term` denotes the -*product* of the variable :token:`ident` of type :token:`type`, over the term :token:`term`. +*product* of the variable :n:`@ident` of type :n:`@type`, over the term :n:`@term`. As for abstractions, :g:`forall` is followed by a binder list, and products over several variables are equivalent to an iteration of one-variable -products. Note that :token:`term` is intended to be a type. +products. Note that :n:`@term` is intended to be a type. -If the variable :token:`ident` occurs in :token:`term`, the product is called +If the variable :n:`@ident` occurs in :n:`@term`, the product is called *dependent product*. The intention behind a dependent product :g:`forall x : A, B` is twofold. It denotes either the universal quantification of the variable :g:`x` of type :g:`A` @@ -391,13 +400,13 @@ Type cast | @term10 :> The expression :n:`@term : @type` is a type cast expression. It enforces -the type of :token:`term` to be :token:`type`. +the type of :n:`@term` to be :n:`@type`. :n:`@term <: @type` locally sets up the virtual machine for checking that -:token:`term` has type :token:`type`. +:n:`@term` has type :n:`@type`. -:n:`@term <<: @type` uses native compilation for checking that :token:`term` -has type :token:`type`. +:n:`@term <<: @type` uses native compilation for checking that :n:`@term` +has type :n:`@type`. .. index:: _ @@ -418,15 +427,15 @@ Let-in definitions .. insertprodn term_let term_let .. prodn:: - term_let ::= let @name {? : @term } := @term in @term - | let @name {+ @binder } {? : @term } := @term in @term + term_let ::= let @name {? : @type } := @term in @term + | let @name {+ @binder } {? : @type } := @term in @term | let ( {*, @name } ) {? {? as @name } return @term100 } := @term in @term | let ' @pattern := @term {? return @term100 } in @term | let ' @pattern in @pattern := @term return @term100 in @term :n:`let @ident := @term in @term’` -denotes the local binding of :token:`term` to the variable -:token:`ident` in :token:`term`’. There is a syntactic sugar for let-in +denotes the local binding of :n:`@term` to the variable +:n:`@ident` in :n:`@term`’. There is a syntactic sugar for let-in definition of functions: :n:`let @ident {+ @binder} := @term in @term’` stands for :n:`let @ident := fun {+ @binder} => @term in @term’`. @@ -463,8 +472,8 @@ to apply specific treatments accordingly. This paragraph describes the basic form of pattern matching. See Section :ref:`Mult-match` and Chapter :ref:`extendedpatternmatching` for the description of the general form. The basic form of pattern matching is characterized -by a single :token:`case_item` expression, an :token:`eqn` restricted to a -single :token:`pattern` and :token:`pattern` restricted to the form +by a single :n:`@case_item` expression, an :n:`@eqn` restricted to a +single :n:`@pattern` and :n:`@pattern` restricted to the form :n:`@qualid {* @ident}`. The expression @@ -486,7 +495,7 @@ In the *dependent* case, there are three subcases. In the first subcase, the type in each branch may depend on the exact value being matched in the branch. In this case, the whole pattern matching itself depends on the term being matched. This dependency of the term being matched in the -return type is expressed with an :n:`@ident` clause where :token:`ident` +return type is expressed with an :n:`@ident` clause where :n:`@ident` is dependent in the return type. For instance, in the following example: .. coqtop:: in @@ -538,7 +547,7 @@ dependency of the return type in the annotations of the inductive type is expressed with a clause in the form :n:`in @qualid {+ _ } {+ @pattern }`, where -- :token:`qualid` is the inductive type of the term being matched; +- :n:`@qualid` is the inductive type of the term being matched; - the holes :n:`_` match the parameters of the inductive type: the return type is not dependent on them. @@ -587,7 +596,7 @@ Recursive and co-recursive functions: fix and cofix .. prodn:: term_fix ::= let fix @fix_body in @term | fix @fix_body {? {+ with @fix_body } for @ident } - fix_body ::= @ident {* @binder } {? @fixannot } {? : @term } := @term + fix_body ::= @ident {* @binder } {? @fixannot } {? : @type } := @term fixannot ::= %{ struct @ident %} | %{ wf @term1_extended @ident %} | %{ measure @term1_extended {? @ident } {? @term1_extended } %} @@ -595,92 +604,55 @@ Recursive and co-recursive functions: fix and cofix | @ @qualid {? @univ_annot } -The expression “``fix`` :token:`ident`:math:`_1` :token:`binder`:math:`_1` ``:`` -:token:`type`:math:`_1` ``:=`` :token:`term`:math:`_1` ``with … with`` -:token:`ident`:math:`_n` :token:`binder`:math:`_n` : :token:`type`:math:`_n` -``:=`` :token:`term`:math:`_n` ``for`` :token:`ident`:math:`_i`” denotes the +The expression ":n:`fix @ident__1 @binder__1 : @type__1 := @term__1 with … with @ident__n @binder__n : @type__n := @term__n for @ident__i`" denotes the :math:`i`-th component of a block of functions defined by mutual structural recursion. It is the local counterpart of the :cmd:`Fixpoint` command. When -:math:`n=1`, the “``for`` :token:`ident`:math:`_i`” clause is omitted. +:math:`n=1`, the ":n:`for @ident__i`" clause is omitted. The association of a single fixpoint and a local definition have a special -syntax: :n:`let fix @ident @binders := @term in` stands for -:n:`let @ident := fix @ident @binders := @term in`. The same applies for co-fixpoints. +syntax: :n:`let fix @ident {* @binder } := @term in` stands for +:n:`let @ident := fix @ident {* @binder } := @term in`. The same applies for co-fixpoints. .. insertprodn term_cofix cofix_body .. prodn:: term_cofix ::= let cofix @cofix_body in @term | cofix @cofix_body {? {+ with @cofix_body } for @ident } - cofix_body ::= @ident {* @binder } {? : @term } := @term + cofix_body ::= @ident {* @binder } {? : @type } := @term -The expression “``cofix`` :token:`ident`:math:`_1` :token:`binder`:math:`_1` ``:`` -:token:`type`:math:`_1` ``with … with`` :token:`ident`:math:`_n` :token:`binder`:math:`_n` -: :token:`type`:math:`_n` ``for`` :token:`ident`:math:`_i`” denotes the -:math:`i`-th component of a block of terms defined by a mutual guarded +The expression +":n:`cofix @ident__1 @binder__1 : @type__1 with … with @ident__n @binder__n : @type__n for @ident__i`" +denotes the :math:`i`-th component of a block of terms defined by a mutual guarded co-recursion. It is the local counterpart of the :cmd:`CoFixpoint` command. When -:math:`n=1`, the “``for`` :token:`ident`:math:`_i`” clause is omitted. +:math:`n=1`, the ":n:`for @ident__i`" clause is omitted. .. _vernacular: The Vernacular ============== -.. insertgramXX vernac ident_opt2 - -.. productionlist:: coq - decorated-sentence : [ `decoration` … `decoration` ] `sentence` - sentence : `assumption` - : `definition` - : `inductive` - : `fixpoint` - : `assertion` `proof` - assumption : `assumption_keyword` `assums`. - assumption_keyword : Axiom | Conjecture - : Parameter | Parameters - : Variable | Variables - : Hypothesis | Hypotheses - assums : `ident` … `ident` : `term` - : ( `ident` … `ident` : `term` ) … ( `ident` … `ident` : `term` ) - definition : [Local] Definition `ident` [`binders`] [: `term`] := `term` . - : Let `ident` [`binders`] [: `term`] := `term` . - binders : binders binder - : binder - inductive : Inductive `ind_body` with … with `ind_body` . - : CoInductive `ind_body` with … with `ind_body` . - ind_body : `ident` [`binders`] : `term` := - : [[|] `ident` [`binders`] [:`term`] | … | `ident` [`binders`] [:`term`]] - fixpoint : Fixpoint `fix_body` with … with `fix_body` . - : CoFixpoint `fix_body` with … with `fix_body` . - assertion : `assertion_keyword` `ident` [`binders`] : `term` . - assertion_keyword : Theorem | Lemma - : Remark | Fact - : Corollary | Property | Proposition - : Definition | Example - proof : Proof . … Qed . - : Proof . … Defined . - : Proof . … Admitted . - decoration : #[ `attributes` ] - attributes : [`attribute`, … , `attribute`] - attribute : `ident` - : `ident` = `string` - : `ident` ( `attributes` ) - -.. todo:: This use of … in this grammar is inconsistent - What about removing the proof part of this grammar from this chapter - and putting it somewhere where top-level tactics can be described as well? - See also #7583. - -This grammar describes *The Vernacular* which is the language of -commands of Gallina. A sentence of the vernacular language, like in -many natural languages, begins with a capital letter and ends with a -dot. - -Sentences may be *decorated* with so-called *attributes*, -which are described in the corresponding section (:ref:`gallina-attributes`). - -The different kinds of command are described hereafter. They all suppose -that the terms occurring in the sentences are well-typed. +.. insertprodn vernacular vernacular + +.. prodn:: + vernacular ::= {* {? @all_attrs } {| @command | @ltac_expr } . } + +The top-level input to |Coq| is a series of :production:`command`\s and :production:`tactic`\s, +each terminated with a period +and optionally decorated with :ref:`gallina-attributes`. :n:`@ltac_expr` syntax supports both simple +and compound tactics. For example: ``split.`` is a simple tactic while ``split; auto.`` combines two +simple tactics. + +Tactics specify how to transform the current proof state as a step in creating a proof. They +are syntactically valid only when |Coq| is in proof mode, such as after a :cmd:`Theorem` command +and before any subsequent proof-terminating command such as :cmd:`Qed`. See :ref:`proofhandling` for more +on proof mode. + +By convention, command names begin with uppercase letters, while +tactic names begin with lowercase letters. Commands appear in the +HTML documentation in blue boxes after the label "Command". In the pdf, they appear +after the boldface label "Command:". Commands are listed in the :ref:`command_index`. + +Similarly, tactics appear after the label "Tactic". Tactics are listed in the :ref:`tactic_index`. .. _gallina-assumptions: @@ -688,73 +660,68 @@ Assumptions ----------- Assumptions extend the environment with axioms, parameters, hypotheses -or variables. An assumption binds an :token:`ident` to a :token:`type`. It is accepted -by Coq if and only if this :token:`type` is a correct type in the environment -preexisting the declaration and if :token:`ident` was not previously defined in -the same module. This :token:`type` is considered to be the type (or -specification, or statement) assumed by :token:`ident` and we say that :token:`ident` -has type :token:`type`. +or variables. An assumption binds an :n:`@ident` to a :n:`@type`. It is accepted +by Coq if and only if this :n:`@type` is a correct type in the environment +preexisting the declaration and if :n:`@ident` was not previously defined in +the same module. This :n:`@type` is considered to be the type (or +specification, or statement) assumed by :n:`@ident` and we say that :n:`@ident` +has type :n:`@type`. .. _Axiom: -.. cmd:: Parameter @ident : @type - - This command links :token:`type` to the name :token:`ident` as its specification in - the global context. The fact asserted by :token:`type` is thus assumed as a - postulate. - - .. exn:: @ident already exists. - :name: @ident already exists. (Axiom) - :undocumented: - - .. cmdv:: Parameter {+ @ident } : @type +.. cmd:: @assumption_token {? Inline {? ( @num ) } } {| {+ ( @assumpt ) } | @assumpt } + :name: Axiom; Axioms; Conjecture; Conjectures; Hypothesis; Hypotheses; Parameter; Parameters; Variable; Variables - Adds several parameters with specification :token:`type`. + .. insertprodn assumption_token of_type - .. cmdv:: Parameter {+ ( {+ @ident } : @type ) } + .. prodn:: + assumption_token ::= {| Axiom | Axioms } + | {| Conjecture | Conjectures } + | {| Parameter | Parameters } + | {| Hypothesis | Hypotheses } + | {| Variable | Variables } + assumpt ::= {+ @ident_decl } @of_type + ident_decl ::= @ident {? @univ_decl } + of_type ::= {| : | :> | :>> } @type - Adds blocks of parameters with different specifications. + These commands bind one or more :n:`@ident`\(s) to specified :n:`@type`\(s) as their specifications in + the global context. The fact asserted by the :n:`@type` (or, equivalently, the existence + of an object of this type) is accepted as a postulate. - .. cmdv:: Local Parameter {+ ( {+ @ident } : @type ) } - :name: Local Parameter + :cmd:`Axiom`, :cmd:`Conjecture`, :cmd:`Parameter` and their plural forms + are equivalent. They can take the :attr:`local` attribute (see :ref:`gallina-attributes`), + which makes the defined :n:`@ident`\s accessible by :cmd:`Import` and its variants + only through their fully qualified names. - Such parameters are never made accessible through their unqualified name by - :cmd:`Import` and its variants. You have to explicitly give their fully - qualified name to refer to them. + Similarly, :cmd:`Hypothesis`, :cmd:`Variable` and their plural forms are equivalent. Outside + of a section, these are equivalent to :n:`Local Parameter`. Inside a section, the + :n:`@ident`\s defined are only accessible within the section. When the current section + is closed, the :n:`@ident`\(s) become undefined and every object depending on them will be explicitly + parameterized (i.e., the variables are *discharged*). See Section :ref:`section-mechanism`. - .. cmdv:: {? Local } Parameters {+ ( {+ @ident } : @type ) } - {? Local } Axiom {+ ( {+ @ident } : @type ) } - {? Local } Axioms {+ ( {+ @ident } : @type ) } - {? Local } Conjecture {+ ( {+ @ident } : @type ) } - {? Local } Conjectures {+ ( {+ @ident } : @type ) } - :name: Parameters; Axiom; Axioms; Conjecture; Conjectures +.. example:: Simple assumptions - These variants are synonyms of :n:`{? Local } Parameter {+ ( {+ @ident } : @type ) }`. + .. coqtop:: reset in - .. cmdv:: Variable {+ ( {+ @ident } : @type ) } - Variables {+ ( {+ @ident } : @type ) } - Hypothesis {+ ( {+ @ident } : @type ) } - Hypotheses {+ ( {+ @ident } : @type ) } - :name: Variable (outside a section); Variables (outside a section); Hypothesis (outside a section); Hypotheses (outside a section) + Parameter X Y : Set. + Parameter (R : X -> Y -> Prop) (S : Y -> X -> Prop). + Axiom R_S_inv : forall x y, R x y <-> S y x. - Outside of any section, these variants are synonyms of - :n:`Local Parameter {+ ( {+ @ident } : @type ) }`. - For their meaning inside a section, see :cmd:`Variable` in - :ref:`section-mechanism`. +.. exn:: @ident already exists. + :name: @ident already exists. (Axiom) + :undocumented: - .. warn:: @ident is declared as a local axiom [local-declaration,scope] +.. warn:: @ident is declared as a local axiom [local-declaration,scope] - Warning generated when using :cmd:`Variable` instead of - :cmd:`Local Parameter`. + Warning generated when using :cmd:`Variable` or its equivalent + instead of :n:`Local Parameter` or its equivalent. .. note:: - It is advised to use the commands :cmd:`Axiom`, :cmd:`Conjecture` and + We advise using the commands :cmd:`Axiom`, :cmd:`Conjecture` and :cmd:`Hypothesis` (and their plural forms) for logical postulates (i.e. when - the assertion :token:`type` is of sort :g:`Prop`), and to use the commands + the assertion :n:`@type` is of sort :g:`Prop`), and to use the commands :cmd:`Parameter` and :cmd:`Variable` (and their plural forms) in other cases - (corresponding to the declaration of an abstract mathematical entity). - -.. seealso:: Section :ref:`section-mechanism`. + (corresponding to the declaration of an abstract object of the given type). .. _gallina-definitions: @@ -777,85 +744,92 @@ type which is the type of its body. A formal presentation of constants and environments is given in Section :ref:`typing-rules`. -.. cmd:: Definition @ident := @term - - This command binds :token:`term` to the name :token:`ident` in the environment, - provided that :token:`term` is well-typed. - - .. exn:: @ident already exists. - :name: @ident already exists. (Definition) - :undocumented: - - .. cmdv:: Definition @ident : @type := @term - - This variant checks that the type of :token:`term` is definitionally equal to - :token:`type`, and registers :token:`ident` as being of type - :token:`type`, and bound to value :token:`term`. - - .. exn:: The term @term has type @type while it is expected to have type @type'. - :undocumented: - - .. cmdv:: Definition @ident @binders {? : @type } := @term +.. cmd:: {| Definition | Example } @ident_decl @def_body + :name: Definition; Example - This is equivalent to - :n:`Definition @ident : forall @binders, @type := fun @binders => @term`. + .. insertprodn def_body def_body - .. cmdv:: Local Definition @ident {? @binders } {? : @type } := @term - :name: Local Definition + .. prodn:: + def_body ::= {* @binder } {? : @type } := {? @reduce } @term + | {* @binder } : @type - Such definitions are never made accessible through their - unqualified name by :cmd:`Import` and its variants. - You have to explicitly give their fully qualified name to refer to them. + These commands bind :n:`@term` to the name :n:`@ident` in the environment, + provided that :n:`@term` is well-typed. They can take the :attr:`local` attribute (see :ref:`gallina-attributes`), + which makes the defined :n:`@ident` accessible by :cmd:`Import` and its variants + only through their fully qualified names. + If :n:`@reduce` is present then :n:`@ident` is bound to the result of the specified + computation on :n:`@term`. - .. cmdv:: {? Local } Example @ident {? @binders } {? : @type } := @term - :name: Example + If :n:`@term` is omitted, Coq enters the proof editing mode. This can be + used to define a term incrementally, in particular by relying on the :tacn:`refine` tactic. + In this case, the proof should be terminated with :cmd:`Defined` in order to define a constant + for which the computational behavior is relevant. See :ref:`proof-editing-mode`. - This is equivalent to :cmd:`Definition`. + The form :n:`Definition @ident : @type := @term` checks that the type of :n:`@term` + is definitionally equal to :n:`@type`, and registers :n:`@ident` as being of type + :n:`@type`, and bound to value :n:`@term`. - .. cmdv:: Let @ident := @term - :name: Let (outside a section) + The form :n:`Definition @ident {* @binder } : @type := @term` is equivalent to + :n:`Definition @ident : forall {* @binder }, @type := fun {* @binder } => @term`. - Outside of any section, this variant is a synonym of - :n:`Local Definition @ident := @term`. - For its meaning inside a section, see :cmd:`Let` in - :ref:`section-mechanism`. - - .. warn:: @ident is declared as a local definition [local-declaration,scope] + .. seealso:: :cmd:`Opaque`, :cmd:`Transparent`, :tacn:`unfold`. - Warning generated when using :cmd:`Let` instead of - :cmd:`Local Definition`. + .. exn:: @ident already exists. + :name: @ident already exists. (Definition) + :undocumented: -.. seealso:: Section :ref:`section-mechanism`, commands :cmd:`Opaque`, - :cmd:`Transparent`, and tactic :tacn:`unfold`. + .. exn:: The term @term has type @type while it is expected to have type @type'. + :undocumented: .. _gallina-inductive-definitions: -Inductive definitions ---------------------- - -We gradually explain simple inductive types, simple annotated inductive -types, simple parametric inductive types, mutually inductive types. We -explain also co-inductive types. - -Simple inductive types -~~~~~~~~~~~~~~~~~~~~~~ +Inductive types +--------------- -.. cmd:: Inductive @ident : {? @sort } := {? | } @ident : @type {* | @ident : @type } - - This command defines a simple inductive type and its constructors. - The first :token:`ident` is the name of the inductively defined type - and :token:`sort` is the universe where it lives. The next :token:`ident`\s - are the names of its constructors and :token:`type` their respective types. - Depending on the universe where the inductive type :token:`ident` lives - (e.g. its type :token:`sort`), Coq provides a number of destructors. - Destructors are named :token:`ident`\ ``_sind``,:token:`ident`\ ``_ind``, - :token:`ident`\ ``_rec`` or :token:`ident`\ ``_rect`` which respectively - correspond to elimination principles on :g:`SProp`, :g:`Prop`, :g:`Set` and :g:`Type`. - The type of the destructors expresses structural induction/recursion - principles over objects of type :token:`ident`. - The constant :token:`ident`\ ``_ind`` is always provided, - whereas :token:`ident`\ ``_rec`` and :token:`ident`\ ``_rect`` can be - impossible to derive (for example, when :token:`ident` is a proposition). +.. cmd:: Inductive @inductive_definition {* with @inductive_definition } + + .. insertprodn inductive_definition field_body + + .. prodn:: + inductive_definition ::= {? > } @ident_decl {* @binder } {? %| {* @binder } } {? : @type } {? := {? @constructors_or_record } } {? @decl_notations } + constructors_or_record ::= {? %| } {+| @constructor } + | {? @ident } %{ {+; @record_field } %} + constructor ::= @ident {* @binder } {? @of_type } + record_field ::= {* #[ {*, @attr } ] } @name {? @field_body } {? %| @num } {? @decl_notations } + field_body ::= {* @binder } @of_type + | {* @binder } @of_type := @term + | {* @binder } := @term + + This command defines one or more + inductive types and its constructors. Coq generates destructors + depending on the universe that the inductive type belongs to. + + The destructors are named :n:`@ident`\ ``_rect``, :n:`@ident`\ ``_ind``, + :n:`@ident`\ ``_rec`` and :n:`@ident`\ ``_sind``, which + respectively correspond to elimination principles on :g:`Type`, :g:`Prop`, + :g:`Set` and :g:`SProp`. The type of the destructors + expresses structural induction/recursion principles over objects of + type :n:`@ident`. The constant :n:`@ident`\ ``_ind`` is always + generated, whereas :n:`@ident`\ ``_rec`` and :n:`@ident`\ ``_rect`` + may be impossible to derive (for example, when :n:`@ident` is a + proposition). + + This command supports the :attr:`universes(polymorphic)`, :attr:`universes(monomorphic)`, + :attr:`universes(template)`, :attr:`universes(notemplate)`, + :attr:`Cumulative`, :attr:`NonCumulative` and :attr:`Private` attributes. + + Mutually inductive types can be defined by including multiple :n:`@inductive_definition`\s. + The :n:`@ident`\s are simultaneously added to the environment before the types of constructors are checked. + Each :n:`@ident` can be used independently thereafter. + See :ref:`mutually_inductive_types`. + + If the entire inductive definition is parameterized with :n:`@binder`\s, the parameters correspond + to a local context in which the entire set of inductive declarations is interpreted. + For this reason, the parameters must be strictly the same for each inductive type. + See :ref:`parametrized-inductive-types`. + + Constructor :n:`@ident`\s can come with :n:`@binder`\s, in which case + the actual type of the constructor is :n:`forall {* @binder }, @type`. .. exn:: Non strictly positive occurrence of @ident in @type. @@ -867,66 +841,71 @@ Simple inductive types .. exn:: The conclusion of @type is not valid; it must be built from @ident. The conclusion of the type of the constructors must be the inductive type - :token:`ident` being defined (or :token:`ident` applied to arguments in + :n:`@ident` being defined (or :n:`@ident` applied to arguments in the case of annotated inductive types — cf. next section). - .. example:: +The following subsections show examples of simple inductive types, simple annotated +inductive types, simple parametric inductive types and mutually inductive +types. - The set of natural numbers is defined as: +.. _simple-inductive-types: - .. coqtop:: all +Simple inductive types +~~~~~~~~~~~~~~~~~~~~~~ - Inductive nat : Set := - | O : nat - | S : nat -> nat. +A simple inductive type belongs to a universe that is a simple :n:`sort`. - The type nat is defined as the least :g:`Set` containing :g:`O` and closed by - the :g:`S` constructor. The names :g:`nat`, :g:`O` and :g:`S` are added to the - environment. +.. example:: - Now let us have a look at the elimination principles. They are three of them: - :g:`nat_ind`, :g:`nat_rec` and :g:`nat_rect`. The type of :g:`nat_ind` is: + The set of natural numbers is defined as: - .. coqtop:: all + .. coqtop:: reset all - Check nat_ind. + Inductive nat : Set := + | O : nat + | S : nat -> nat. - This is the well known structural induction principle over natural - numbers, i.e. the second-order form of Peano’s induction principle. It - allows proving some universal property of natural numbers (:g:`forall - n:nat, P n`) by induction on :g:`n`. + The type nat is defined as the least :g:`Set` containing :g:`O` and closed by + the :g:`S` constructor. The names :g:`nat`, :g:`O` and :g:`S` are added to the + environment. - The types of :g:`nat_rec` and :g:`nat_rect` are similar, except that they pertain - to :g:`(P:nat->Set)` and :g:`(P:nat->Type)` respectively. They correspond to - primitive induction principles (allowing dependent types) respectively - over sorts ``Set`` and ``Type``. + This definition generates four elimination principles: + :g:`nat_rect`, :g:`nat_ind`, :g:`nat_rec` and :g:`nat_sind`. The type of :g:`nat_ind` is: - .. cmdv:: Inductive @ident {? : @sort } := {? | } {*| @ident {? @binders } {? : @type } } + .. coqtop:: all - Constructors :token:`ident`\s can come with :token:`binders` in which case, - the actual type of the constructor is :n:`forall @binders, @type`. + Check nat_ind. - In the case where inductive types have no annotations (next section - gives an example of such annotations), a constructor can be defined - by only giving the type of its arguments. + This is the well known structural induction principle over natural + numbers, i.e. the second-order form of Peano’s induction principle. It + allows proving universal properties of natural numbers (:g:`forall + n:nat, P n`) by induction on :g:`n`. - .. example:: + The types of :g:`nat_rect`, :g:`nat_rec` and :g:`nat_sind` are similar, except that they + apply to, respectively, :g:`(P:nat->Type)`, :g:`(P:nat->Set)` and :g:`(P:nat->SProp)`. They correspond to + primitive induction principles (allowing dependent types) respectively + over sorts ```Type``, ``Set`` and ``SProp``. - .. coqtop:: none +In the case where inductive types don't have annotations (the next section +gives an example of annotations), a constructor can be defined +by giving the type of its arguments alone. - Reset nat. +.. example:: - .. coqtop:: in + .. coqtop:: reset none - Inductive nat : Set := O | S (_:nat). + Reset nat. + .. coqtop:: in + + Inductive nat : Set := O | S (_:nat). Simple annotated inductive types ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -In an annotated inductive types, the universe where the inductive type -is defined is no longer a simple sort, but what is called an arity, -which is a type whose conclusion is a sort. +In annotated inductive types, the universe where the inductive type +is defined is no longer a simple :n:`@sort`, but what is called an arity, +which is a type whose conclusion is a :n:`@sort`. .. example:: @@ -939,72 +918,74 @@ which is a type whose conclusion is a sort. | even_0 : even O | even_SS : forall n:nat, even n -> even (S (S n)). - The type :g:`nat->Prop` means that even is a unary predicate (inductively + The type :g:`nat->Prop` means that :g:`even` is a unary predicate (inductively defined) over natural numbers. The type of its two constructors are the - defining clauses of the predicate even. The type of :g:`even_ind` is: + defining clauses of the predicate :g:`even`. The type of :g:`even_ind` is: .. coqtop:: all Check even_ind. - From a mathematical point of view it asserts that the natural numbers satisfying - the predicate even are exactly in the smallest set of naturals satisfying the + From a mathematical point of view, this asserts that the natural numbers satisfying + the predicate :g:`even` are exactly in the smallest set of naturals satisfying the clauses :g:`even_0` or :g:`even_SS`. This is why, when we want to prove any predicate :g:`P` over elements of :g:`even`, it is enough to prove it for :g:`O` and to prove that if any natural number :g:`n` satisfies :g:`P` its double - successor :g:`(S (S n))` satisfies also :g:`P`. This is indeed analogous to the + successor :g:`(S (S n))` satisfies also :g:`P`. This is analogous to the structural induction principle we got for :g:`nat`. +.. _parametrized-inductive-types: + Parameterized inductive types ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. cmdv:: Inductive @ident @binders {? : @type } := {? | } @ident : @type {* | @ident : @type} +In the previous example, each constructor introduces a different +instance of the predicate :g:`even`. In some cases, all the constructors +introduce the same generic instance of the inductive definition, in +which case, instead of an annotation, we use a context of parameters +which are :n:`@binder`\s shared by all the constructors of the definition. - In the previous example, each constructor introduces a different - instance of the predicate :g:`even`. In some cases, all the constructors - introduce the same generic instance of the inductive definition, in - which case, instead of an annotation, we use a context of parameters - which are :token:`binders` shared by all the constructors of the definition. +Parameters differ from inductive type annotations in that the +conclusion of each type of constructor invokes the inductive type with +the same parameter values of its specification. - Parameters differ from inductive type annotations in the fact that the - conclusion of each type of constructor invoke the inductive type with - the same values of parameters as its specification. +.. example:: - .. example:: + A typical example is the definition of polymorphic lists: - A typical example is the definition of polymorphic lists: + .. coqtop:: all - .. coqtop:: in + Inductive list (A:Set) : Set := + | nil : list A + | cons : A -> list A -> list A. - Inductive list (A:Set) : Set := - | nil : list A - | cons : A -> list A -> list A. + In the type of :g:`nil` and :g:`cons`, we write ":g:`list A`" and not + just ":g:`list`". The constructors :g:`nil` and :g:`cons` have these types: - In the type of :g:`nil` and :g:`cons`, we write :g:`(list A)` and not - just :g:`list`. The constructors :g:`nil` and :g:`cons` will have respectively - types: + .. coqtop:: all - .. coqtop:: all + Check nil. + Check cons. - Check nil. - Check cons. + Observe that the destructors are also quantified with :g:`(A:Set)`, for example: - Types of destructors are also quantified with :g:`(A:Set)`. + .. coqtop:: all - Once again, it is possible to specify only the type of the arguments - of the constructors, and to omit the type of the conclusion: + Check list_ind. - .. coqtop:: none + Once again, the types of the constructor arguments and of the conclusion can be omitted: + + .. coqtop:: none - Reset list. + Reset list. - .. coqtop:: in + .. coqtop:: in - Inductive list (A:Set) : Set := nil | cons (_:A) (_:list A). + Inductive list (A:Set) : Set := nil | cons (_:A) (_:list A). .. note:: - + It is possible in the type of a constructor, to - invoke recursively the inductive definition on an argument which is not + + The constructor type can + recursively invoke the inductive definition on an argument which is not the parameter itself. One can define : @@ -1019,7 +1000,9 @@ Parameterized inductive types .. coqtop:: all reset - Inductive list2 (A:Set) : Set := nil2 | cons2 (_:A) (_:list2 (A*A)). + Inductive list2 (A:Set) : Set := + | nil2 + | cons2 (_:A) (_:list2 (A*A)). But the following definition will give an error: @@ -1063,48 +1046,31 @@ Parameterized inductive types | cons3 : A -> list3 -> list3. End list3. - Attributes ``uniform`` and ``nonuniform`` respectively enable and - disable uniform parameters for a single inductive declaration block. + For finer control, you can use a ``|`` between the uniform and + the non-uniform parameters: -.. seealso:: - Section :ref:`inductive-definitions` and the :tacn:`induction` tactic. + .. coqtop:: in reset -Variants -~~~~~~~~ + Inductive Acc {A:Type} (R:A->A->Prop) | (x:A) : Prop + := Acc_in : (forall y, R y x -> Acc y) -> Acc x. -.. cmd:: Variant @ident @binders {? : @type } := {? | } @ident : @type {* | @ident : @type} + The flag can then be seen as deciding whether the ``|`` is at the + beginning (when the flag is unset) or at the end (when it is set) + of the parameters when not explicitly given. - The :cmd:`Variant` command is identical to the :cmd:`Inductive` command, except - that it disallows recursive definition of types (for instance, lists cannot - be defined using :cmd:`Variant`). No induction scheme is generated for - this variant, unless the :flag:`Nonrecursive Elimination Schemes` flag is on. +.. seealso:: + Section :ref:`inductive-definitions` and the :tacn:`induction` tactic. - .. exn:: The @num th argument of @ident must be @ident in @type. - :undocumented: +.. _mutually_inductive_types: Mutually defined inductive types ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. cmdv:: Inductive @ident {? : @type } := {? | } {*| @ident : @type } {* with {? | } {*| @ident {? : @type } } } - - This variant allows defining a block of mutually inductive types. - It has the same semantics as the above :cmd:`Inductive` definition for each - :token:`ident`. All :token:`ident` are simultaneously added to the environment. - Then well-typing of constructors can be checked. Each one of the :token:`ident` - can be used on its own. - - .. cmdv:: Inductive @ident @binders {? : @type } := {? | } {*| @ident : @type } {* with {? | } {*| @ident @binders {? : @type } } } +.. example:: Mutually defined inductive types - In this variant, the inductive definitions are parameterized - with :token:`binders`. However, parameters correspond to a local context - in which the whole set of inductive declarations is done. For this - reason, the parameters must be strictly the same for each inductive types. - -.. example:: - - The typical example of a mutual inductive data type is the one for trees and - forests. We assume given two types :g:`A` and :g:`B` as variables. It can - be declared the following way. + A typical example of mutually inductive data types is trees and + forests. We assume two types :g:`A` and :g:`B` that are given as variables. The types can + be declared like this: .. coqtop:: in @@ -1116,13 +1082,10 @@ Mutually defined inductive types | leaf : B -> forest | cons : tree -> forest -> forest. - This declaration generates automatically six induction principles. They are - respectively called :g:`tree_rec`, :g:`tree_ind`, :g:`tree_rect`, - :g:`forest_rec`, :g:`forest_ind`, :g:`forest_rect`. These ones are not the most - general ones but are just the induction principles corresponding to each - inductive part seen as a single inductive definition. + This declaration automatically generates eight induction principles. They are not the most + general principles, but they correspond to each inductive part seen as a single inductive definition. - To illustrate this point on our example, we give the types of :g:`tree_rec` + To illustrate this point on our example, here are the types of :g:`tree_rec` and :g:`forest_rec`. .. coqtop:: all @@ -1133,7 +1096,7 @@ Mutually defined inductive types Assume we want to parameterize our mutual inductive definitions with the two type variables :g:`A` and :g:`B`, the declaration should be - done the following way: + done as follows: .. coqdoc:: @@ -1152,10 +1115,27 @@ Mutually defined inductive types A generic command :cmd:`Scheme` is useful to build automatically various mutual induction principles. +Variants +~~~~~~~~ + +.. cmd:: Variant @inductive_definition {* with @inductive_definition } + + The :cmd:`Variant` command is identical to the :cmd:`Inductive` command, except + that it disallows recursive definition of types (for instance, lists cannot + be defined using :cmd:`Variant`). No induction scheme is generated for + this variant, unless the :flag:`Nonrecursive Elimination Schemes` flag is on. + + This command supports the :attr:`universes(polymorphic)`, :attr:`universes(monomorphic)`, + :attr:`universes(template)`, :attr:`universes(notemplate)`, + :attr:`Cumulative`, :attr:`NonCumulative` and :attr:`Private` attributes. + + .. exn:: The @num th argument of @ident must be @ident in @type. + :undocumented: + .. _coinductive-types: Co-inductive types -~~~~~~~~~~~~~~~~~~ +------------------ The objects of an inductive type are well-founded with respect to the constructors of the type. In other words, such objects contain only a @@ -1165,7 +1145,7 @@ constructors. Infinite objects are introduced by a non-ending (but effective) process of construction, defined in terms of the constructors of the type. -.. cmd:: CoInductive @ident @binders {? : @type } := {? | } @ident : @type {* | @ident : @type} +.. cmd:: CoInductive @inductive_definition {* with @inductive_definition } This command introduces a co-inductive type. The syntax of the command is the same as the command :cmd:`Inductive`. @@ -1173,10 +1153,14 @@ of the type. type, since such principles only make sense for inductive types. For co-inductive types, the only elimination principle is case analysis. + This command supports the :attr:`universes(polymorphic)`, :attr:`universes(monomorphic)`, + :attr:`universes(template)`, :attr:`universes(notemplate)`, + :attr:`Cumulative`, :attr:`NonCumulative` and :attr:`Private` attributes. + .. example:: - An example of a co-inductive type is the type of infinite sequences of - natural numbers, usually called streams. + The type of infinite sequences of natural numbers, usually called streams, + is an example of a co-inductive type. .. coqtop:: in @@ -1190,13 +1174,12 @@ of the type. Definition hd (x:Stream) := let (a,s) := x in a. Definition tl (x:Stream) := let (a,s) := x in s. -Definition of co-inductive predicates and blocks of mutually +Definitions of co-inductive predicates and blocks of mutually co-inductive definitions are also allowed. .. example:: - An example of a co-inductive predicate is the extensional equality on - streams: + The extensional equality on streams is an example of a co-inductive type: .. coqtop:: in @@ -1210,7 +1193,7 @@ co-inductive definitions are also allowed. objects in Section :ref:`cofixpoint`. Caveat -++++++ +~~~~~~ The ability to define co-inductive types by constructors, hereafter called *positive co-inductive types*, is known to break subject reduction. The story is @@ -1278,27 +1261,41 @@ constructions. .. _Fixpoint: -.. cmd:: Fixpoint @ident @binders {? {struct @ident} } {? : @type } := @term +.. cmd:: Fixpoint @fix_definition {* with @fix_definition } + + .. insertprodn fix_definition fix_definition + + .. prodn:: + fix_definition ::= @ident_decl {* @binder } {? @fixannot } {? : @type } {? := @term } {? @decl_notations } This command allows defining functions by pattern matching over inductive objects using a fixed point construction. The meaning of this declaration is - to define :token:`ident` a recursive function with arguments specified by - the :token:`binders` such that :token:`ident` applied to arguments - corresponding to these :token:`binders` has type :token:`type`, and is - equivalent to the expression :token:`term`. The type of :token:`ident` is - consequently :n:`forall @binders, @type` and its value is equivalent - to :n:`fun @binders => @term`. + to define :n:`@ident` as a recursive function with arguments specified by + the :n:`@binder`\s such that :n:`@ident` applied to arguments + corresponding to these :n:`@binder`\s has type :n:`@type`, and is + equivalent to the expression :n:`@term`. The type of :n:`@ident` is + consequently :n:`forall {* @binder }, @type` and its value is equivalent + to :n:`fun {* @binder } => @term`. To be accepted, a :cmd:`Fixpoint` definition has to satisfy some syntactical constraints on a special argument called the decreasing argument. They are needed to ensure that the :cmd:`Fixpoint` definition always terminates. - The point of the :n:`{struct @ident}` annotation is to let the user tell the - system which argument decreases along the recursive calls. + The point of the :n:`{struct @ident}` annotation (see :n:`@fixannot`) is to + let the user tell the system which argument decreases along the recursive calls. - The :n:`{struct @ident}` annotation may be left implicit, in this case the - system tries successively arguments from left to right until it finds one + The :n:`{struct @ident}` annotation may be left implicit, in which case the + system successively tries arguments from left to right until it finds one that satisfies the decreasing condition. + The :n:`with` clause allows simultaneously defining several mutual fixpoints. + It is especially useful when defining functions over mutually defined + inductive types. Example: :ref:`Mutual Fixpoints<example_mutual_fixpoints>`. + + If :n:`@term` is omitted, :n:`@type` is required and Coq enters the proof editing mode. + This can be used to define a term incrementally, in particular by relying on the :tacn:`refine` tactic. + In this case, the proof should be terminated with :cmd:`Defined` in order to define a constant + for which the computational behavior is relevant. See :ref:`proof-editing-mode`. + .. note:: + Some fixpoints may have several arguments that fit as decreasing @@ -1379,35 +1376,35 @@ constructions. end end. +.. _example_mutual_fixpoints: - .. cmdv:: Fixpoint @ident @binders {? {struct @ident} } {? : @type } := @term {* with @ident @binders {? : @type } := @term } - - This variant allows defining simultaneously several mutual fixpoints. - It is especially useful when defining functions over mutually defined - inductive types. - - .. example:: + .. example:: Mutual fixpoints - The size of trees and forests can be defined the following way: + The size of trees and forests can be defined the following way: - .. coqtop:: all + .. coqtop:: all - Fixpoint tree_size (t:tree) : nat := - match t with - | node a f => S (forest_size f) - end - with forest_size (f:forest) : nat := - match f with - | leaf b => 1 - | cons t f' => (tree_size t + forest_size f') - end. + Fixpoint tree_size (t:tree) : nat := + match t with + | node a f => S (forest_size f) + end + with forest_size (f:forest) : nat := + match f with + | leaf b => 1 + | cons t f' => (tree_size t + forest_size f') + end. .. _cofixpoint: Definitions of recursive objects in co-inductive types ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. cmd:: CoFixpoint @ident {? @binders } {? : @type } := @term +.. cmd:: CoFixpoint @cofix_definition {* with @cofix_definition } + + .. insertprodn cofix_definition cofix_definition + + .. prodn:: + cofix_definition ::= @ident_decl {* @binder } {? : @type } {? := @term } {? @decl_notations } This command introduces a method for constructing an infinite object of a coinductive type. For example, the stream containing all natural numbers can @@ -1419,7 +1416,7 @@ Definitions of recursive objects in co-inductive types CoFixpoint from (n:nat) : Stream := Seq n (from (S n)). - Oppositely to recursive ones, there is no decreasing argument in a + Unlike recursive definitions, there is no decreasing argument in a co-recursive definition. To be admissible, a method of construction must provide at least one extra constructor of the infinite object for each iteration. A syntactical guard condition is imposed on co-recursive @@ -1448,10 +1445,63 @@ Definitions of recursive objects in co-inductive types Eval compute in (hd (from 0)). Eval compute in (tl (from 0)). - .. cmdv:: CoFixpoint @ident {? @binders } {? : @type } := @term {* with @ident {? @binders } : {? @type } := @term } + As in the :cmd:`Fixpoint` command, the :n:`with` clause allows simultaneously + defining several mutual cofixpoints. + + If :n:`@term` is omitted, :n:`@type` is required and Coq enters the proof editing mode. + This can be used to define a term incrementally, in particular by relying on the :tacn:`refine` tactic. + In this case, the proof should be terminated with :cmd:`Defined` in order to define a constant + for which the computational behavior is relevant. See :ref:`proof-editing-mode`. + +.. _Computations: + +Computations +------------ + +.. insertprodn reduce pattern_occ - As in the :cmd:`Fixpoint` command, it is possible to introduce a block of - mutually dependent methods. +.. prodn:: + reduce ::= Eval @red_expr in + red_expr ::= red + | hnf + | simpl {? @delta_flag } {? @ref_or_pattern_occ } + | cbv {? @strategy_flag } + | cbn {? @strategy_flag } + | lazy {? @strategy_flag } + | compute {? @delta_flag } + | vm_compute {? @ref_or_pattern_occ } + | native_compute {? @ref_or_pattern_occ } + | unfold {+, @unfold_occ } + | fold {+ @term1_extended } + | pattern {+, @pattern_occ } + | @ident + delta_flag ::= {? - } [ {+ @smart_global } ] + smart_global ::= @qualid + | @by_notation + by_notation ::= @string {? % @ident } + strategy_flag ::= {+ @red_flags } + | @delta_flag + red_flags ::= beta + | iota + | match + | fix + | cofix + | zeta + | delta {? @delta_flag } + ref_or_pattern_occ ::= @smart_global {? at @occs_nums } + | @term1_extended {? at @occs_nums } + occs_nums ::= {+ @num_or_var } + | - @num_or_var {* @int_or_var } + num_or_var ::= @num + | @ident + int_or_var ::= @int + | @ident + unfold_occ ::= @smart_global {? at @occs_nums } + pattern_occ ::= @term1_extended {? at @occs_nums } + +See :ref:`Conversion-rules`. + +.. todo:: Add text here .. _Assertions: @@ -1463,40 +1513,26 @@ inhabitant of the type) is interactively built using tactics. The interactive proof mode is described in Chapter :ref:`proofhandling` and the tactics in Chapter :ref:`Tactics`. The basic assertion command is: -.. cmd:: Theorem @ident {? @binders } : @type - - After the statement is asserted, Coq needs a proof. Once a proof of - :token:`type` under the assumptions represented by :token:`binders` is given and - validated, the proof is generalized into a proof of :n:`forall @binders, @type` and - the theorem is bound to the name :token:`ident` in the environment. - - .. exn:: The term @term has type @type which should be Set, Prop or Type. - :undocumented: - - .. exn:: @ident already exists. - :name: @ident already exists. (Theorem) - - The name you provided is already defined. You have then to choose - another name. - - .. exn:: Nested proofs are not allowed unless you turn the Nested Proofs Allowed flag on. - - You are asserting a new statement while already being in proof editing mode. - This feature, called nested proofs, is disabled by default. - To activate it, turn the :flag:`Nested Proofs Allowed` flag on. +.. cmd:: @thm_token @ident_decl {* @binder } : @type {* with @ident_decl {* @binder } : @type } + :name: Theorem; Lemma; Fact; Remark; Corollary; Proposition; Property - .. cmdv:: Lemma @ident {? @binders } : @type - Remark @ident {? @binders } : @type - Fact @ident {? @binders } : @type - Corollary @ident {? @binders } : @type - Proposition @ident {? @binders } : @type - :name: Lemma; Remark; Fact; Corollary; Proposition + .. insertprodn thm_token thm_token - These commands are all synonyms of :n:`Theorem @ident {? @binders } : type`. + .. prodn:: + thm_token ::= Theorem + | Lemma + | Fact + | Remark + | Corollary + | Proposition + | Property -.. cmdv:: Theorem @ident {? @binders } : @type {* with @ident {? @binders } : @type} + After the statement is asserted, Coq needs a proof. Once a proof of + :n:`@type` under the assumptions represented by :n:`@binder`\s is given and + validated, the proof is generalized into a proof of :n:`forall {* @binder }, @type` and + the theorem is bound to the name :n:`@ident` in the environment. - This command is useful for theorems that are proved by simultaneous induction + Forms using the :n:`with` clause are useful for theorems that are proved by simultaneous induction over a mutually inductive assumption, or that assert mutually dependent statements in some mutual co-inductive type. It is equivalent to :cmd:`Fixpoint` or :cmd:`CoFixpoint` but using tactics to build the proof of @@ -1513,46 +1549,29 @@ Chapter :ref:`Tactics`. The basic assertion command is: correct at some time of the interactive development of a proof, use the command :cmd:`Guarded`. - The command can be used also with :cmd:`Lemma`, :cmd:`Remark`, etc. instead of - :cmd:`Theorem`. - -.. cmdv:: Definition @ident {? @binders } : @type - - This allows defining a term of type :token:`type` using the proof editing - mode. It behaves as :cmd:`Theorem` but is intended to be used in conjunction with - :cmd:`Defined` in order to define a constant of which the computational - behavior is relevant. - - The command can be used also with :cmd:`Example` instead of :cmd:`Definition`. - - .. seealso:: :cmd:`Opaque`, :cmd:`Transparent`, :tacn:`unfold`. - -.. cmdv:: Let @ident {? @binders } : @type - - Like :n:`Definition @ident {? @binders } : @type` except that the definition is - turned into a let-in definition generalized over the declarations depending - on it after closing the current section. + .. exn:: The term @term has type @type which should be Set, Prop or Type. + :undocumented: -.. cmdv:: Fixpoint @ident @binders : @type {* with @ident @binders : @type} + .. exn:: @ident already exists. + :name: @ident already exists. (Theorem) - This generalizes the syntax of :cmd:`Fixpoint` so that one or more bodies - can be defined interactively using the proof editing mode (when a - body is omitted, its type is mandatory in the syntax). When the block - of proofs is completed, it is intended to be ended by :cmd:`Defined`. + The name you provided is already defined. You have then to choose + another name. -.. cmdv:: CoFixpoint @ident {? @binders } : @type {* with @ident {? @binders } : @type} + .. exn:: Nested proofs are not allowed unless you turn the Nested Proofs Allowed flag on. - This generalizes the syntax of :cmd:`CoFixpoint` so that one or more bodies - can be defined interactively using the proof editing mode. + You are asserting a new statement while already being in proof editing mode. + This feature, called nested proofs, is disabled by default. + To activate it, turn the :flag:`Nested Proofs Allowed` flag on. -A proof starts by the keyword :cmd:`Proof`. Then Coq enters the proof editing mode -until the proof is completed. The proof editing mode essentially contains -tactics that are described in chapter :ref:`Tactics`. Besides tactics, there -are commands to manage the proof editing mode. They are described in Chapter +Proofs start with the keyword :cmd:`Proof`. Then Coq enters the proof editing mode +until the proof is completed. In proof editing mode, the user primarily enters +tactics, which are described in chapter :ref:`Tactics`. The user may also enter +commands to manage the proof editing mode. They are described in Chapter :ref:`proofhandling`. -When the proof is completed it should be validated and put in the environment -using the keyword :cmd:`Qed`. +When the proof is complete, use the :cmd:`Qed` command so the kernel verifies +the proof and adds it to the environment. .. note:: @@ -1581,33 +1600,96 @@ using the keyword :cmd:`Qed`. Attributes ----------- -Any vernacular command can be decorated with a list of attributes, enclosed -between ``#[`` (hash and opening square bracket) and ``]`` (closing square bracket) -and separated by commas ``,``. Multiple space-separated blocks may be provided. +.. insertprodn all_attrs legacy_attrs -Each attribute has a name (an identifier) and may have a value. -A value is either a :token:`string` (in which case it is specified with an equal ``=`` sign), -or a list of attributes, enclosed within brackets. +.. prodn:: + all_attrs ::= {* #[ {*, @attr } ] } {? @legacy_attrs } + attr ::= @ident {? @attr_value } + attr_value ::= = @string + | ( {*, @attr } ) + legacy_attrs ::= {? {| Local | Global } } {? {| Polymorphic | Monomorphic } } {? Program } {? {| Cumulative | NonCumulative } } {? Private } + +Attributes modify the behavior of a command or tactic. +Syntactically, most commands and tactics can be decorated with attributes, but +attributes not supported by the command or tactic will be flagged as errors. + +The order of top-level attributes doesn't affect their meaning. ``#[foo,bar]``, ``#[bar,foo]``, +``#[foo]#[bar]`` and ``#[bar]#[foo]`` are equivalent. + +The legacy attributes (:n:`@legacy_attrs`) provide an older, alternate syntax +for certain attributes. They are equivalent to new attributes as follows: + +================ ================================ +Legacy attribute New attribute +================ ================================ +`Local` :attr:`local` +`Global` :attr:`global` +`Polymorphic` :attr:`universes(polymorphic)` +`Monomorphic` :attr:`universes(monomorphic)` +`Cumulative` none +`NonCumulative` none +`Private` none +`Program` :attr:`program` +================ ================================ Some attributes are specific to a command, and so are described with -that command. Currently, the following attributes are recognized by a -variety of commands: +that command. Currently, the following attributes are recognized: + +.. attr:: universes(monomorphic) + :name: universes(monomorphic) + + See :ref:`polymorphicuniverses`. + +.. attr:: universes(polymorphic) + :name: universes(polymorphic) + + See :ref:`polymorphicuniverses`. + +.. attr:: universes(template) + :name: universes(template) + + See :ref:`Template-polymorphism` + +.. attr:: universes(notemplate) + :name: universes(notemplate) + + See :ref:`Template-polymorphism` + +.. attr:: program -``universes(monomorphic)``, ``universes(polymorphic)`` - Equivalent to the ``Monomorphic`` and - ``Polymorphic`` flags (see :ref:`polymorphicuniverses`). + See :ref:`programs`. -``program`` - Takes no value, equivalent to the ``Program`` flag - (see :ref:`programs`). +.. attr:: global -``global``, ``local`` - Take no value, equivalent to the ``Global`` and ``Local`` flags - (see :ref:`controlling-locality-of-commands`). + See :ref:`controlling-locality-of-commands`. -``deprecated`` - Takes as value the optional attributes ``since`` and ``note``; - both have a string value. +.. attr:: local + + See :ref:`controlling-locality-of-commands`. + +.. attr:: Cumulative + + Legacy attribute, only allowed in a polymorphic context. + Specifies that two instances of the same inductive type (family) are convertible + based on the universe variances; they do not need to be equal. + See :ref:`cumulative`. + +.. attr:: NonCumulative + + Legacy attribute, only allowed in a polymorphic context. + Specifies that two instances of the same inductive type (family) are convertible + only if all the universes are equal. + See :ref:`cumulative`. + +.. attr:: Private + + Legacy attribute. Documentation to be added. + +.. attr:: deprecated ( {? since = @string , } {? note = @string } ) + :name: deprecated + + At least one of :n:`since` or :n:`note` must be present. If both are present, + either one may appear first and they must be separated by a comma. This attribute is supported by the following commands: :cmd:`Ltac`, :cmd:`Tactic Notation`, :cmd:`Notation`, :cmd:`Infix`. @@ -1625,12 +1707,13 @@ variety of commands: :n:`@string__1` is the actual notation, :n:`@string__2` is the version number, :n:`@string__3` is the note. -``canonical`` +.. attr:: canonical + This attribute can decorate a :cmd:`Definition` or :cmd:`Let` command. It is equivalent to having a :cmd:`Canonical Structure` declaration just after the command. - This attirbute can take the value ``false`` when decorating a record field + This attribute can take the value ``false`` when decorating a record field declaration with the effect of preventing the field from being involved in the inference of canonical instances. diff --git a/doc/sphinx/practical-tools/utilities.rst b/doc/sphinx/practical-tools/utilities.rst index 3d1fc6d4b9..179dff9959 100644 --- a/doc/sphinx/practical-tools/utilities.rst +++ b/doc/sphinx/practical-tools/utilities.rst @@ -255,14 +255,20 @@ file timing data: one, you can pass them via the variable ``TGTS``, e.g., ``make pretty-timed TGTS="a.vo b.vo"``. - .. :: + .. note:: This target requires ``python`` to build the table. .. note:: This target will *append* to the timing log; if you want a - fresh start, you must remove the ``filetime-of-build.log`` or + fresh start, you must remove the file ``time-of-build.log`` or ``run make cleanall``. + .. note:: + By default the table displays user times. If the build log + contains real times (which it does by default), passing + ``TIMING_REAL=1`` to ``make pretty-timed`` will use real times + rather than user times in the table. + .. example:: For example, the output of ``make pretty-timed`` may look like this: @@ -310,6 +316,15 @@ file timing data: (which are frequently noise); lexicographic sorting forces an order on files which take effectively no time to compile. + If you prefer a different sorting order, you can pass + ``TIMING_SORT_BY=absolute`` to sort by the total time taken, or + ``TIMING_SORT_BY=diff`` to sort by the signed difference in + time. + + .. note:: + Just like ``pretty-timed``, this table defaults to using user + times. Pass ``TIMING_REAL=1`` to ``make`` on the command line to show real times instead. + .. example:: For example, the output table from @@ -349,7 +364,7 @@ line timing data: :: - print-pretty-single-time-diff BEFORE=path/to/file.v.before-timing AFTER=path/to/file.v.after-timing + print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing this target will make a sorted table of the per-line timing differences between the timing logs in the ``BEFORE`` and ``AFTER`` files, display it, and @@ -364,6 +379,28 @@ line timing data: .. note:: This target requires python to build the table. + .. note:: + This target follows the same sorting order as the + ``print-pretty-timed-diff`` target, and supports the same + options for the ``TIMING_SORT_BY`` variable. + + .. note:: + By default, two lines are only considered the same if the + character offsets and initial code strings are identical. Passing + ``TIMING_FUZZ=N`` relaxes this constraint by allowing the + character locations to differ by up to ``N``, as long + as the total number of characters and initial code strings + continue to match. This is useful when there are small changes + to a file, and you want to match later lines that have not + changed even though the character offsets have changed. + + .. note:: + By default the table picks up real times, under the assumption + that when comparing line-by-line, the real time is a more + accurate representation as it includes disk time and time spent + in the native compiler. Passing ``TIMING_REAL=0`` to ``make`` + will use user times rather than real times in the table. + .. example:: For example, running ``print-pretty-single-time-diff`` might give a table like this: diff --git a/doc/sphinx/proof-engine/ltac2.rst b/doc/sphinx/proof-engine/ltac2.rst index b722b1af74..06106a6b4c 100644 --- a/doc/sphinx/proof-engine/ltac2.rst +++ b/doc/sphinx/proof-engine/ltac2.rst @@ -3,10 +3,6 @@ Ltac2 ===== -.. coqtop:: none - - From Ltac2 Require Import Ltac2. - The Ltac tactic language is probably one of the ingredients of the success of Coq, yet it is at the same time its Achilles' heel. Indeed, Ltac: @@ -88,6 +84,12 @@ which allows to ensure that Ltac2 satisfies the same equations as a generic ML with unspecified effects would do, e.g. function reduction is substitution by a value. +To import Ltac2, use the following command: + +.. coqtop:: in + + From Ltac2 Require Import Ltac2. + Type Syntax ~~~~~~~~~~~ @@ -907,9 +909,9 @@ Ltac2 from Ltac1 Same as above by switching Ltac1 by Ltac2 and using the `ltac2` quotation instead. -.. productionlist:: coq - ltac_expr : ltac2 : ( `ltac2_term` ) - : ltac2 : ( `ident` ... `ident` |- `ltac2_term` ) +.. prodn:: + ltac_expr += ltac2 : ( `ltac2_term` ) + | ltac2 : ( `ident` ... `ident` |- `ltac2_term` ) The typing rules are dual, that is, the optional identifiers are bound with type `Ltac2.Ltac1.t` in the Ltac2 expression, which is expected to have diff --git a/doc/sphinx/proof-engine/proof-handling.rst b/doc/sphinx/proof-engine/proof-handling.rst index b1734b3f19..03eebc32f9 100644 --- a/doc/sphinx/proof-engine/proof-handling.rst +++ b/doc/sphinx/proof-engine/proof-handling.rst @@ -80,20 +80,19 @@ list of assertion commands is given in :ref:`Assertions`. The command a while when the proof is large. In some exceptional cases one may even incur a memory overflow. - .. cmdv:: Defined - :name: Defined +.. cmd:: Defined - Same as :cmd:`Qed` but the proof is then declared transparent, which means - that its content can be explicitly used for type checking and that it can be - unfolded in conversion tactics (see :ref:`performingcomputations`, - :cmd:`Opaque`, :cmd:`Transparent`). + Same as :cmd:`Qed`, except the proof is made *transparent*, which means + that its content can be explicitly used for type checking and that it can be + unfolded in conversion tactics (see :ref:`performingcomputations`, + :cmd:`Opaque`, :cmd:`Transparent`). - .. cmdv:: Save @ident - :name: Save +.. cmd:: Save @ident + :name: Save - Forces the name of the original goal to be :token:`ident`. This - command (and the following ones) can only be used if the original goal - has been opened using the :cmd:`Goal` command. + Forces the name of the original goal to be :token:`ident`. This + command can only be used if the original goal + was opened using the :cmd:`Goal` command. .. cmd:: Admitted diff --git a/doc/sphinx/proof-engine/ssreflect-proof-language.rst b/doc/sphinx/proof-engine/ssreflect-proof-language.rst index 46215f16a6..90a991794f 100644 --- a/doc/sphinx/proof-engine/ssreflect-proof-language.rst +++ b/doc/sphinx/proof-engine/ssreflect-proof-language.rst @@ -1702,7 +1702,7 @@ Intro patterns when it is the very *first* :token:`i_pattern` after tactic ``=>`` tactical *and* tactic is not a move, is a *branching*:token:`i_pattern`. It executes the sequence - :token:`i_item`:math:`_i` on the i-th subgoal produced by tactic. The + :n:`@i_item__i` on the i-th subgoal produced by tactic. The execution of tactic should thus generate exactly m subgoals, unless the ``[…]`` :token:`i_pattern` comes after an initial ``//`` or ``//=`` :token:`s_item` that closes some of the goals produced by ``tactic``, in diff --git a/doc/sphinx/proof-engine/tactics.rst b/doc/sphinx/proof-engine/tactics.rst index 36a5916868..6a0ce20c79 100644 --- a/doc/sphinx/proof-engine/tactics.rst +++ b/doc/sphinx/proof-engine/tactics.rst @@ -275,7 +275,7 @@ These patterns can be used when the hypothesis is an equality: :n:`or` has multiple constructors (:n:`or_introl` and :n:`or_intror`), while :n:`and` has a single constructor (:n:`conj`) with multiple parameters (:n:`A` and :n:`B`). - These are defined in theories/Init/Logic.v. The "where" clauses define the + These are defined in ``theories/Init/Logic.v``. The "where" clauses define the infix notation for "or" and "and". .. coqdoc:: @@ -1281,16 +1281,16 @@ Managing the local context is an occurrence clause whose syntax and behavior are described in :ref:`goal occurrences <occurrencessets>`. - .. tacv:: set (@ident @binders := @term) {? in @goal_occurrences } + .. tacv:: set (@ident {* @binder } := @term) {? in @goal_occurrences } - This is equivalent to :n:`set (@ident := fun @binders => @term) {? in @goal_occurrences }`. + This is equivalent to :n:`set (@ident := fun {* @binder } => @term) {? in @goal_occurrences }`. .. tacv:: set @term {? in @goal_occurrences } This behaves as :n:`set (@ident := @term) {? in @goal_occurrences }` but :token:`ident` is generated by Coq. - .. tacv:: eset (@ident {? @binders } := @term) {? in @goal_occurrences } + .. tacv:: eset (@ident {* @binder } := @term) {? in @goal_occurrences } eset @term {? in @goal_occurrences } :name: eset; _ @@ -1326,16 +1326,16 @@ Managing the local context without performing any replacement in the goal or in the hypotheses. It is equivalent to :n:`set (@ident := @term) in |-`. - .. tacv:: pose (@ident @binders := @term) + .. tacv:: pose (@ident {* @binder } := @term) - This is equivalent to :n:`pose (@ident := fun @binders => @term)`. + This is equivalent to :n:`pose (@ident := fun {* @binder } => @term)`. .. tacv:: pose @term This behaves as :n:`pose (@ident := @term)` but :token:`ident` is generated by Coq. - .. tacv:: epose (@ident {? @binders} := @term) + .. tacv:: epose (@ident {* @binder } := @term) epose @term :name: epose; _ diff --git a/doc/sphinx/user-extensions/proof-schemes.rst b/doc/sphinx/user-extensions/proof-schemes.rst index 5b0b3c51b0..34197c4fcf 100644 --- a/doc/sphinx/user-extensions/proof-schemes.rst +++ b/doc/sphinx/user-extensions/proof-schemes.rst @@ -337,31 +337,31 @@ Generation of inversion principles with ``Derive`` ``Inversion`` ----------------------------------------------------------------- .. cmd:: Derive Inversion @ident with @ident Sort @sort - Derive Inversion @ident with (forall @binders, @ident @term) Sort @sort + Derive Inversion @ident with (forall {* @binder }, @ident @term) Sort @sort This command generates an inversion principle for the :tacn:`inversion ... using ...` tactic. The first :token:`ident` is the name of the generated principle. The second :token:`ident` should be an inductive predicate, and :token:`binders` the variables occurring in the term :token:`term`. This command generates the inversion lemma for the sort - :token:`sort` corresponding to the instance :n:`forall @binders, @ident @term`. + :token:`sort` corresponding to the instance :n:`forall {* @binder }, @ident @term`. When applied, it is equivalent to having inverted the instance with the tactic :g:`inversion`. .. cmdv:: Derive Inversion_clear @ident with @ident Sort @sort - Derive Inversion_clear @ident with (forall @binders, @ident @term) Sort @sort + Derive Inversion_clear @ident with (forall {* @binder }, @ident @term) Sort @sort When applied, it is equivalent to having inverted the instance with the tactic inversion replaced by the tactic `inversion_clear`. .. cmdv:: Derive Dependent Inversion @ident with @ident Sort @sort - Derive Dependent Inversion @ident with (forall @binders, @ident @term) Sort @sort + Derive Dependent Inversion @ident with (forall {* @binder }, @ident @term) Sort @sort When applied, it is equivalent to having inverted the instance with the tactic `dependent inversion`. .. cmdv:: Derive Dependent Inversion_clear @ident with @ident Sort @sort - Derive Dependent Inversion_clear @ident with (forall @binders, @ident @term) Sort @sort + Derive Dependent Inversion_clear @ident with (forall {* @binder }, @ident @term) Sort @sort When applied, it is equivalent to having inverted the instance with the tactic `dependent inversion_clear`. diff --git a/doc/sphinx/user-extensions/syntax-extensions.rst b/doc/sphinx/user-extensions/syntax-extensions.rst index dbe714c388..9b4d7cf5fa 100644 --- a/doc/sphinx/user-extensions/syntax-extensions.rst +++ b/doc/sphinx/user-extensions/syntax-extensions.rst @@ -163,6 +163,10 @@ Grammar constr` is at level 100. To avoid ``x : A`` being parsed as a type cast, it is necessary to put ``x`` at a level below 100, typically 99. Hence, a correct definition is the following: +.. coqtop:: none + + Reset Initial. + .. coqtop:: all Notation "{ x : A | P }" := (sig A (fun x => P)) (x at level 99). @@ -306,10 +310,10 @@ at the time of use of the notation. The Infix command ~~~~~~~~~~~~~~~~~~ -The :cmd:`Infix` command is a shortening for declaring notations of infix +The :cmd:`Infix` command is a shortcut for declaring notations for infix symbols. -.. cmd:: Infix "@symbol" := @term {? (@modifiers) }. +.. cmd:: Infix @string := @term {? (@modifiers) } This command is equivalent to @@ -350,6 +354,11 @@ Reserving notations This command declares an infix parsing rule without giving its interpretation. + When a format is attached to a reserved notation, it is used by + default by all subsequent interpretations of the corresponding + notation. A specific interpretation can provide its own format + overriding the default format though. + Simultaneous definition of terms and notations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -357,7 +366,7 @@ Thanks to reserved notations, the inductive, co-inductive, record, recursive and corecursive definitions can benefit from customized notations. To do this, insert a ``where`` notation clause after the definition of the (co)inductive type or (co)recursive term (or after the definition of each of them in case of mutual -definitions). The exact syntax is given by :token:`decl_notation` for inductive, +definitions). The exact syntax is given by :n:`@decl_notation` for inductive, co-inductive, recursive and corecursive definitions and in :ref:`record-types` for records. Here are examples: @@ -387,6 +396,11 @@ Displaying information about notations Controls whether to use notations for printing terms wherever possible. Default is on. +.. flag:: Printing Parentheses + + If on, parentheses are printed even if implied by associativity and precedence + Default is off. + .. seealso:: :flag:`Printing All` @@ -408,6 +422,27 @@ identifiers or tokens starting with a single quote are dropped. Locate "exists". Locate "exists _ .. _ , _". +Inheritance of the properties of arguments of constants bound to a notation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If the right-hand side of a notation is a partially applied constant, +the notation inherits the implicit arguments (see +:ref:`ImplicitArguments`) and interpretation scopes (see +:ref:`Scopes`) of the constant. For instance: + +.. coqtop:: in reset + + Record R := {dom : Type; op : forall {A}, A -> dom}. + Notation "# x" := (@op x) (at level 8). + +.. coqtop:: all + + Check fun x:R => # x 3. + +As an exception, if the right-hand side is just of the form +:n:`@@qualid`, this conventionally stops the inheritance of implicit +arguments (but not of interpretation scopes). + Notations and binders ~~~~~~~~~~~~~~~~~~~~~ @@ -798,7 +833,13 @@ associated to the custom entry ``expr``. The level can be omitted, as in Notation "[ e ]" := e (e custom expr). -in which case Coq tries to infer it. +in which case Coq infer it. If the sub-expression is at a border of +the notation (as e.g. ``x`` and ``y`` in ``x + y``), the level is +determined by the associativity. If the sub-expression is not at the +border of the notation (as e.g. ``e`` in ``"[ e ]``), the level is +inferred to be the highest level used for the entry. In particular, +this level depends on the highest level existing in the entry at the +time of use of the notation. In the absence of an explicit entry for parsing or printing a sub-expression of a notation in a custom entry, the default is to @@ -873,7 +914,6 @@ notations are given below. The optional :production:`scope` is described in : Fixpoint `fix_body` [`decl_notation`] with … with `fix_body` [`decl_notation`]. : CoFixpoint `fix_body` [`decl_notation`] with … with `fix_body` [`decl_notation`]. : [Local] Declare Custom Entry `ident`. - decl_notation : [where `string` := `term` [: `scope`] and … and `string` := `term` [: `scope`]]. modifiers : `modifier`, … , `modifier` modifier : at level `num` : in custom `ident` @@ -903,6 +943,12 @@ notations are given below. The optional :production:`scope` is described in : as pattern : as strict pattern +.. insertprodn decl_notations decl_notation + +.. prodn:: + decl_notations ::= where @decl_notation {* and @decl_notation } + decl_notation ::= @string := @term1_extended {? : @ident } + .. note:: No typing of the denoted expression is performed at definition time. Type checking is done only at the time of use of the notation. @@ -1400,6 +1446,12 @@ Abbreviations denoted expression is performed at definition time. Type checking is done only at the time of use of the abbreviation. + Like for notations, if the right-hand side of an abbreviation is a + partially applied constant, the abbreviation inherits the implicit + arguments and interpretation scopes of the constant. As an + exception, if the right-hand side is just of the form :n:`@@qualid`, + this conventionally stops the inheritance of implicit arguments. + .. _numeral-notations: Numeral notations diff --git a/doc/stdlib/dune b/doc/stdlib/dune index 828caecabc..093c7a62b2 100644 --- a/doc/stdlib/dune +++ b/doc/stdlib/dune @@ -5,7 +5,6 @@ (deps make-library-index index-list.html.template hidden-files (source_tree %{project_root}/theories) - (source_tree %{project_root}/plugins) (source_tree %{project_root}/user-contrib)) (action (chdir %{project_root} @@ -17,7 +16,6 @@ (deps ; This will be replaced soon by `theories/**/*.v` soon, thanks to rgrinberg (source_tree %{project_root}/theories) - (source_tree %{project_root}/plugins) (source_tree %{project_root}/user-contrib) (:header %{project_root}/doc/common/styles/html/coqremote/header.html) (:footer %{project_root}/doc/common/styles/html/coqremote/footer.html) @@ -26,7 +24,7 @@ (action (progn (run mkdir -p html) - (bash "%{bin:coqdoc} -q -d html --with-header %{header} --with-footer %{footer} --multi-index --html -g -R %{project_root}/theories Coq -R %{project_root}/plugins Coq -Q %{project_root}/user-contrib/Ltac2 Ltac2 $(find %{project_root}/theories %{project_root}/plugins %{project_root}/user-contrib -name *.v)") + (bash "%{bin:coqdoc} -q -d html --with-header %{header} --with-footer %{footer} --multi-index --html -g -R %{project_root}/theories Coq -Q %{project_root}/user-contrib/Ltac2 Ltac2 $(find %{project_root}/theories %{project_root}/user-contrib -name *.v)") (run mv html/index.html html/genindex.html) (with-stdout-to _index.html diff --git a/doc/stdlib/hidden-files b/doc/stdlib/hidden-files index dbc3a42ee9..60d6039b0f 100644 --- a/doc/stdlib/hidden-files +++ b/doc/stdlib/hidden-files @@ -1,90 +1,90 @@ -plugins/btauto/Algebra.v -plugins/btauto/Btauto.v -plugins/btauto/Reflect.v -plugins/derive/Derive.v -plugins/extraction/ExtrHaskellBasic.v -plugins/extraction/ExtrHaskellNatInt.v -plugins/extraction/ExtrHaskellNatInteger.v -plugins/extraction/ExtrHaskellNatNum.v -plugins/extraction/ExtrHaskellString.v -plugins/extraction/ExtrHaskellZInt.v -plugins/extraction/ExtrHaskellZInteger.v -plugins/extraction/ExtrHaskellZNum.v -plugins/extraction/ExtrOcamlBasic.v -plugins/extraction/ExtrOcamlBigIntConv.v -plugins/extraction/ExtrOcamlChar.v -plugins/extraction/ExtrOCamlInt63.v -plugins/extraction/ExtrOCamlFloats.v -plugins/extraction/ExtrOcamlIntConv.v -plugins/extraction/ExtrOcamlNatBigInt.v -plugins/extraction/ExtrOcamlNatInt.v -plugins/extraction/ExtrOcamlString.v -plugins/extraction/ExtrOcamlNativeString.v -plugins/extraction/ExtrOcamlZBigInt.v -plugins/extraction/ExtrOcamlZInt.v -plugins/extraction/Extraction.v -plugins/funind/FunInd.v -plugins/funind/Recdef.v -plugins/ltac/Ltac.v -plugins/micromega/Ztac.v -plugins/micromega/DeclConstant.v -plugins/micromega/Env.v -plugins/micromega/EnvRing.v -plugins/micromega/Fourier.v -plugins/micromega/Fourier_util.v -plugins/micromega/Lia.v -plugins/micromega/Lqa.v -plugins/micromega/Lra.v -plugins/micromega/MExtraction.v -plugins/micromega/OrderedRing.v -plugins/micromega/Psatz.v -plugins/micromega/QMicromega.v -plugins/micromega/RMicromega.v -plugins/micromega/Refl.v -plugins/micromega/RingMicromega.v -plugins/micromega/Tauto.v -plugins/micromega/VarMap.v -plugins/micromega/ZCoeff.v -plugins/micromega/ZMicromega.v -plugins/micromega/ZifyInst.v -plugins/micromega/ZifyBool.v -plugins/micromega/ZifyComparison.v -plugins/micromega/ZifyClasses.v -plugins/micromega/Zify.v -plugins/nsatz/Nsatz.v -plugins/omega/Omega.v -plugins/omega/OmegaLemmas.v -plugins/omega/OmegaPlugin.v -plugins/omega/OmegaTactic.v -plugins/omega/PreOmega.v -plugins/quote/Quote.v -plugins/romega/ROmega.v -plugins/romega/ReflOmegaCore.v -plugins/rtauto/Bintree.v -plugins/rtauto/Rtauto.v -plugins/setoid_ring/Algebra_syntax.v -plugins/setoid_ring/ArithRing.v -plugins/setoid_ring/BinList.v -plugins/setoid_ring/Cring.v -plugins/setoid_ring/Field.v -plugins/setoid_ring/Field_tac.v -plugins/setoid_ring/Field_theory.v -plugins/setoid_ring/InitialRing.v -plugins/setoid_ring/Integral_domain.v -plugins/setoid_ring/NArithRing.v -plugins/setoid_ring/Ncring.v -plugins/setoid_ring/Ncring_initial.v -plugins/setoid_ring/Ncring_polynom.v -plugins/setoid_ring/Ncring_tac.v -plugins/setoid_ring/RealField.v -plugins/setoid_ring/Ring.v -plugins/setoid_ring/Ring_base.v -plugins/setoid_ring/Ring_polynom.v -plugins/setoid_ring/Ring_tac.v -plugins/setoid_ring/Ring_theory.v -plugins/setoid_ring/Rings_Q.v -plugins/setoid_ring/Rings_R.v -plugins/setoid_ring/Rings_Z.v -plugins/setoid_ring/ZArithRing.v -plugins/ssr/ssrunder.v -plugins/ssr/ssrsetoid.v +theories/btauto/Algebra.v +theories/btauto/Btauto.v +theories/btauto/Reflect.v +theories/derive/Derive.v +theories/extraction/ExtrHaskellBasic.v +theories/extraction/ExtrHaskellNatInt.v +theories/extraction/ExtrHaskellNatInteger.v +theories/extraction/ExtrHaskellNatNum.v +theories/extraction/ExtrHaskellString.v +theories/extraction/ExtrHaskellZInt.v +theories/extraction/ExtrHaskellZInteger.v +theories/extraction/ExtrHaskellZNum.v +theories/extraction/ExtrOcamlBasic.v +theories/extraction/ExtrOcamlBigIntConv.v +theories/extraction/ExtrOcamlChar.v +theories/extraction/ExtrOCamlInt63.v +theories/extraction/ExtrOCamlFloats.v +theories/extraction/ExtrOcamlIntConv.v +theories/extraction/ExtrOcamlNatBigInt.v +theories/extraction/ExtrOcamlNatInt.v +theories/extraction/ExtrOcamlString.v +theories/extraction/ExtrOcamlNativeString.v +theories/extraction/ExtrOcamlZBigInt.v +theories/extraction/ExtrOcamlZInt.v +theories/extraction/Extraction.v +theories/funind/FunInd.v +theories/funind/Recdef.v +theories/ltac/Ltac.v +theories/micromega/Ztac.v +theories/micromega/DeclConstant.v +theories/micromega/Env.v +theories/micromega/EnvRing.v +theories/micromega/Fourier.v +theories/micromega/Fourier_util.v +theories/micromega/Lia.v +theories/micromega/Lqa.v +theories/micromega/Lra.v +theories/micromega/MExtraction.v +theories/micromega/OrderedRing.v +theories/micromega/Psatz.v +theories/micromega/QMicromega.v +theories/micromega/RMicromega.v +theories/micromega/Refl.v +theories/micromega/RingMicromega.v +theories/micromega/Tauto.v +theories/micromega/VarMap.v +theories/micromega/ZCoeff.v +theories/micromega/ZMicromega.v +theories/micromega/ZifyInst.v +theories/micromega/ZifyBool.v +theories/micromega/ZifyComparison.v +theories/micromega/ZifyClasses.v +theories/micromega/Zify.v +theories/nsatz/Nsatz.v +theories/omega/Omega.v +theories/omega/OmegaLemmas.v +theories/omega/OmegaPlugin.v +theories/omega/OmegaTactic.v +theories/omega/PreOmega.v +theories/quote/Quote.v +theories/romega/ROmega.v +theories/romega/ReflOmegaCore.v +theories/rtauto/Bintree.v +theories/rtauto/Rtauto.v +theories/setoid_ring/Algebra_syntax.v +theories/setoid_ring/ArithRing.v +theories/setoid_ring/BinList.v +theories/setoid_ring/Cring.v +theories/setoid_ring/Field.v +theories/setoid_ring/Field_tac.v +theories/setoid_ring/Field_theory.v +theories/setoid_ring/InitialRing.v +theories/setoid_ring/Integral_domain.v +theories/setoid_ring/NArithRing.v +theories/setoid_ring/Ncring.v +theories/setoid_ring/Ncring_initial.v +theories/setoid_ring/Ncring_polynom.v +theories/setoid_ring/Ncring_tac.v +theories/setoid_ring/RealField.v +theories/setoid_ring/Ring.v +theories/setoid_ring/Ring_base.v +theories/setoid_ring/Ring_polynom.v +theories/setoid_ring/Ring_tac.v +theories/setoid_ring/Ring_theory.v +theories/setoid_ring/Rings_Q.v +theories/setoid_ring/Rings_R.v +theories/setoid_ring/Rings_Z.v +theories/setoid_ring/ZArithRing.v +theories/ssr/ssrunder.v +theories/ssr/ssrsetoid.v diff --git a/doc/stdlib/index-list.html.template b/doc/stdlib/index-list.html.template index b2ddf36b65..51688e2d9e 100644 --- a/doc/stdlib/index-list.html.template +++ b/doc/stdlib/index-list.html.template @@ -619,11 +619,11 @@ through the <tt>Require Import</tt> command.</p> small scale reflection formalization technique </dt> <dd> - plugins/ssrmatching/ssrmatching.v - plugins/ssr/ssrclasses.v - plugins/ssr/ssreflect.v - plugins/ssr/ssrbool.v - plugins/ssr/ssrfun.v + theories/ssrmatching/ssrmatching.v + theories/ssr/ssrclasses.v + theories/ssr/ssreflect.v + theories/ssr/ssrbool.v + theories/ssr/ssrfun.v </dd> <dt> <b>Ltac2</b>: diff --git a/doc/stdlib/make-library-index b/doc/stdlib/make-library-index index 732f15b78a..a51308f153 100755 --- a/doc/stdlib/make-library-index +++ b/doc/stdlib/make-library-index @@ -8,7 +8,7 @@ HIDDEN=$2 cp -f $FILE.template tmp echo -n "Building file index-list.prehtml... " -LIBDIRS=`find theories/* plugins/* user-contrib/* -type d ! -name .coq-native` +LIBDIRS=`find theories/* user-contrib/* -type d ! -name .coq-native` for k in $LIBDIRS; do if [[ $k =~ "user-contrib" ]]; then diff --git a/doc/tools/coqrst/coqdomain.py b/doc/tools/coqrst/coqdomain.py index 85d86bed62..d6ecf311f1 100644 --- a/doc/tools/coqrst/coqdomain.py +++ b/doc/tools/coqrst/coqdomain.py @@ -198,12 +198,23 @@ class CoqObject(ObjectDescription): index_text += " " + self.index_suffix self.indexnode['entries'].append(('single', index_text, target, '', None)) + aliases = None # additional indexed names for a command or other object + def add_target_and_index(self, name, _, signode): """Attach a link target to `signode` and an index entry for `name`. This is only called (from ``ObjectDescription.run``) if ``:noindex:`` isn't specified.""" if name: target = self._add_target(signode, name) self._add_index_entry(name, target) + if self.aliases is not None: + parent = signode.parent + for alias in self.aliases: + aliasnode = nodes.inline('', '') + signode.parent.append(aliasnode) + target2 = self._add_target(aliasnode, alias) + self._add_index_entry(name, target2) + parent.remove(signode) # move to the end + parent.append(signode) return target def _prepare_names(self): @@ -213,10 +224,15 @@ class CoqObject(ObjectDescription): self._names = {} else: names = [n.strip() for n in names.split(";")] - if len(names) != len(sigs): + if len(sigs) > 1 and len(names) != len(sigs): ERR = ("Expected {} semicolon-separated names, got {}. " + "Please provide one name per signature line.") raise self.error(ERR.format(len(names), len(sigs))) + if len(sigs) == 1 and len(names) > 1: + self.aliases = names[:-1] + names = names[-1:] + else: + self.aliases = None self._names = dict(zip(sigs, names)) def run(self): @@ -278,7 +294,7 @@ class VernacObject(NotationObject): Example:: - .. cmd:: Infix "@symbol" := @term ({+, @modifier}). + .. cmd:: Infix @string := @term1_extended {? ( {+, @syntax_modifier } ) } {? : @ident } This command is equivalent to :n:`…`. """ @@ -325,6 +341,20 @@ class TacticNotationObject(NotationObject): index_suffix = "(tactic)" annotation = None +class AttributeNotationObject(NotationObject): + """An attribute. + + Example:: + + .. attr:: local + """ + subdomain = "attr" + index_suffix = "(attribute)" + annotation = "Attribute" + + def _name_from_signature(self, signature): + return notation_to_string(signature) + class TacticNotationVariantObject(TacticNotationObject): """A variant of a tactic. @@ -1066,6 +1096,10 @@ class CoqVernacIndex(CoqSubdomainsIndex): class CoqTacticIndex(CoqSubdomainsIndex): name, localname, shortname, subdomains = "tacindex", "Tactic Index", "tactics", ["tacn"] +# Attribute index is generated but not included in output +class CoqAttributeIndex(CoqSubdomainsIndex): + name, localname, shortname, subdomains = "attrindex", "Attribute Index", "attributes", ["attr"] + class CoqOptionIndex(CoqSubdomainsIndex): name, localname, shortname, subdomains = "optindex", "Flags, options and Tables Index", "options", ["flag", "opt", "table"] @@ -1142,6 +1176,7 @@ class CoqDomain(Domain): 'opt': ObjType('opt', 'opt'), 'flag': ObjType('flag', 'flag'), 'table': ObjType('table', 'table'), + 'attr': ObjType('attr', 'attr'), 'thm': ObjType('thm', 'thm'), 'prodn': ObjType('prodn', 'prodn'), 'exn': ObjType('exn', 'exn'), @@ -1160,6 +1195,7 @@ class CoqDomain(Domain): 'opt': OptionObject, 'flag': FlagObject, 'table': TableObject, + 'attr': AttributeNotationObject, 'thm': GallinaObject, 'prodn' : ProductionObject, 'exn': ExceptionObject, @@ -1173,6 +1209,7 @@ class CoqDomain(Domain): 'opt': XRefRole(warn_dangling=True), 'flag': XRefRole(warn_dangling=True), 'table': XRefRole(warn_dangling=True), + 'attr': XRefRole(warn_dangling=True), 'thm': XRefRole(warn_dangling=True), 'prodn' : XRefRole(warn_dangling=True), 'exn': XRefRole(warn_dangling=True), @@ -1196,6 +1233,7 @@ class CoqDomain(Domain): 'opt': {}, 'flag': {}, 'table': {}, + 'attr': {}, 'thm': {}, 'prodn' : {}, 'exn': {}, diff --git a/doc/tools/docgram/README.md b/doc/tools/docgram/README.md index 182532e413..fc6d0ace0d 100644 --- a/doc/tools/docgram/README.md +++ b/doc/tools/docgram/README.md @@ -176,9 +176,13 @@ that appear in the specified production: production **without** `<grammar_symbol>`. If found, both productions are replaced with single production with `OPT <grammar_symbol>` - The current version handles a single USE_NT or ADD_OPT per EDIT. + The current version handles a single USE_NT or ADD_OPT per EDIT. These symbols + may appear in the middle of the production given in the EDIT. -* `REPLACE` - (2 sequential productions) - removes `<oldprod>` and +`INSERTALL <symbols>` - inserts <symbols> at the beginning of every production in +<edited_nt>. + +`REPLACE` - (2 sequential productions) - removes `<oldprod>` and inserts `<newprod>` in its place. ``` @@ -186,7 +190,14 @@ that appear in the specified production: | WITH <newprod> ``` -* `PRINT` <nonterminal> - prints the nonterminal definition at that point in +`MOVETO <destination> <production>` - moves the production to `<destination>` and, + if needed, creates a new production <edited_nt> -> <destination>. + +`OPTINREF` - verifies that <edited_nt> has an empty production. If so, it removes +the empty production and replaces all references to <edited_nt> throughout the +grammar with `OPT <edited_nt>` + +`PRINT` <nonterminal> - prints the nonterminal definition at that point in applying the edits. Most useful when the edits get a bit complicated to follow. * (any other nonterminal name) - adds a new production (and possibly a new nonterminal) diff --git a/doc/tools/docgram/common.edit_mlg b/doc/tools/docgram/common.edit_mlg index 9c1827f5b7..3524d77380 100644 --- a/doc/tools/docgram/common.edit_mlg +++ b/doc/tools/docgram/common.edit_mlg @@ -39,7 +39,7 @@ RENAME: [ | Prim.natural natural *) | Vernac.rec_definition rec_definition - +(* todo: hmm, rename adds 1 prodn to closed_binder?? *) | Constr.closed_binder closed_binder ] @@ -162,11 +162,6 @@ lconstr: [ | DELETE l_constr ] -type_cstr: [ -| REPLACE ":" lconstr -| WITH OPT ( ":" lconstr ) -| DELETE (* empty *) -] let_type_cstr: [ | DELETE OPT [ ":" lconstr ] @@ -210,8 +205,6 @@ term_let: [ atomic_constr: [ (* @Zimmi48: "string" used only for notations, but keep to be consistent with patterns *) (* | DELETE string *) -| REPLACE global univ_instance -| WITH global OPT univ_instance | REPLACE "?" "[" ident "]" | WITH "?[" ident "]" | MOVETO term_evar "?[" ident "]" @@ -237,8 +230,6 @@ operconstr10: [ (* fixme: add in as a prodn somewhere *) | MOVETO dangling_pattern_extension_rule "@" pattern_identref LIST1 identref | DELETE dangling_pattern_extension_rule -| REPLACE "@" global univ_instance LIST0 operconstr9 -| WITH "@" global OPT univ_instance LIST0 operconstr9 ] operconstr9: [ @@ -285,13 +276,6 @@ binders_fixannot: [ | LIST0 binder OPT fixannot ] - -of_type_with_opt_coercion: [ -| DELETE ":>" ">" -| DELETE ":" ">" ">" -| DELETE ":" ">" -] - binder: [ | DELETE name ] @@ -306,11 +290,15 @@ open_binders: [ | DELETE closed_binder binders ] +type: [ +| operconstr200 +] + closed_binder: [ | name | REPLACE "(" name LIST1 name ":" lconstr ")" -| WITH "(" LIST1 name ":" lconstr ")" +| WITH "(" LIST1 name ":" type ")" | DELETE "(" name ":" lconstr ")" | DELETE "(" name ":=" lconstr ")" @@ -324,6 +312,16 @@ closed_binder: [ | REPLACE "{" name LIST1 name ":" lconstr "}" | WITH "{" LIST1 name type_cstr "}" | DELETE "{" name ":" lconstr "}" + +| DELETE "[" name "]" +| DELETE "[" name LIST1 name "]" + +| REPLACE "[" name LIST1 name ":" lconstr "]" +| WITH "[" LIST1 name type_cstr "]" +| DELETE "[" name ":" lconstr "]" + +| REPLACE "(" Prim.name ":" lconstr "|" lconstr ")" +| WITH "(" Prim.name ":" type "|" lconstr ")" ] name_colon: [ @@ -377,28 +375,151 @@ eqn: [ ] universe_increment: [ -| REPLACE "+" natural -| WITH OPT ( "+" natural ) -| DELETE (* empty *) +| OPTINREF ] evar_instance: [ -| REPLACE "@{" LIST1 inst SEP ";" "}" -| WITH OPT ( "@{" LIST1 inst SEP ";" "}" ) +| OPTINREF +] + +gallina: [ +| REPLACE thm_token ident_decl binders ":" lconstr LIST0 [ "with" ident_decl binders ":" lconstr ] +| WITH thm_token ident_decl binders ":" type LIST0 [ "with" ident_decl binders ":" type ] +| DELETE assumptions_token inline assum_list +| REPLACE OPT cumulativity_token private_token finite_token LIST1 inductive_definition SEP "with" +| WITH "Inductive" inductive_definition LIST0 ( "with" inductive_definition ) +| "CoInductive" inductive_definition LIST0 ( "with" inductive_definition ) +| "Variant" inductive_definition LIST0 ( "with" inductive_definition ) +| [ "Record" | "Structure" ] inductive_definition LIST0 ( "with" inductive_definition ) +| "Class" inductive_definition LIST0 ( "with" inductive_definition ) +| REPLACE "Fixpoint" LIST1 rec_definition SEP "with" +| WITH "Fixpoint" rec_definition LIST0 ( "with" rec_definition ) +| REPLACE "Let" "Fixpoint" LIST1 rec_definition SEP "with" +| WITH "Let" "Fixpoint" rec_definition LIST0 ( "with" rec_definition ) +| REPLACE "CoFixpoint" LIST1 corec_definition SEP "with" +| WITH "CoFixpoint" corec_definition LIST0 ( "with" corec_definition ) +| REPLACE "Let" "CoFixpoint" LIST1 corec_definition SEP "with" +| WITH "Let" "CoFixpoint" corec_definition LIST0 ( "with" corec_definition ) +| REPLACE "Scheme" LIST1 scheme SEP "with" +| WITH "Scheme" scheme LIST0 ( "with" scheme ) +] + +DELETE: [ +| private_token +| cumulativity_token +] + +opt_coercion: [ +| OPTINREF +] + +opt_constructors_or_fields: [ +| OPTINREF +] + +SPLICE: [ +| opt_coercion +| opt_constructors_or_fields +] + +constructor_list_or_record_decl: [ +| OPTINREF +] + +record_fields: [ +| REPLACE record_field ";" record_fields +| WITH LIST1 record_field SEP ";" +| DELETE record_field | DELETE (* empty *) ] +decl_notation: [ +| REPLACE "where" LIST1 one_decl_notation SEP decl_sep +| WITH "where" one_decl_notation LIST0 ( decl_sep one_decl_notation ) +] + +assumptions_token: [ +| DELETENT +] + +inline: [ +| REPLACE "Inline" "(" natural ")" +| WITH "Inline" OPT ( "(" natural ")" ) +| DELETE "Inline" +| OPTINREF +] + univ_instance: [ -| DELETE (* empty *) +| OPTINREF +] + +univ_decl: [ +| REPLACE "@{" LIST0 identref [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | bar_cbrace ] ] +| WITH "@{" LIST0 identref OPT "+" OPT [ "|" LIST0 univ_constraint SEP "," OPT "+" ] "}" +] + +of_type_with_opt_coercion: [ +| DELETENT +] + +of_type_with_opt_coercion: [ +| [ ":" | ":>" | ":>>" ] type +] + +attribute_value: [ +| OPTINREF +] + +def_body: [ +| DELETE binders ":=" reduce lconstr +| REPLACE binders ":" lconstr ":=" reduce lconstr +| WITH LIST0 binder OPT (":" type) ":=" reduce lconstr +| REPLACE binders ":" lconstr +| WITH LIST0 binder ":" type +] + +reduce: [ +| OPTINREF +] + +occs: [ +| OPTINREF +] + +delta_flag: [ +| REPLACE "-" "[" LIST1 smart_global "]" +| WITH OPT "-" "[" LIST1 smart_global "]" +| DELETE "[" LIST1 smart_global "]" +| OPTINREF +] + +strategy_flag: [ +| REPLACE OPT delta_flag +| WITH delta_flag +| (* empty *) +| OPTINREF ] -constr: [ -| REPLACE "@" global univ_instance -| WITH "@" global OPT univ_instance +export_token: [ +| OPTINREF ] -(* todo: binders should be binders_opt *) +functor_app_annot: [ +| OPTINREF +] + +is_module_expr: [ +| OPTINREF +] +is_module_type: [ +| OPTINREF +] + +gallina_ext: [ +| REPLACE "Arguments" smart_global LIST0 argument_spec_block OPT [ "," LIST1 [ LIST0 more_implicits_block ] SEP "," ] OPT [ ":" LIST1 arguments_modifier SEP "," ] +| WITH "Arguments" smart_global LIST0 argument_spec_block LIST0 [ "," LIST0 more_implicits_block ] OPT [ ":" LIST1 arguments_modifier SEP "," ] +] (* lexer stuff *) IDENT: [ @@ -478,6 +599,8 @@ tactic_expr1: [ | EDIT match_key ADD_OPT "reverse" "goal" "with" match_context_list "end" | MOVETO ltac_match_goal match_key OPT "reverse" "goal" "with" match_context_list "end" | MOVETO ltac_match_term match_key tactic_expr5 "with" match_list "end" +| REPLACE failkw [ int_or_var | ] LIST0 message_token +| WITH failkw OPT int_or_var LIST0 message_token ] DELETE: [ @@ -544,12 +667,146 @@ bar_cbrace: [ | WITH "|}" ] +printable: [ +| REPLACE [ "Sorted" | ] "Universes" OPT printunivs_subgraph OPT ne_string +| WITH OPT "Sorted" "Universes" OPT printunivs_subgraph OPT ne_string +| INSERTALL "Print" +] + +command: [ +| REPLACE "Print" printable +| WITH printable +| "SubClass" ident_decl def_body +| REPLACE "Ltac" LIST1 ltac_tacdef_body SEP "with" +| WITH "Ltac" ltac_tacdef_body LIST0 ( "with" ltac_tacdef_body ) +| REPLACE "Function" LIST1 function_rec_definition_loc SEP "with" (* funind plugin *) +| WITH "Function" function_rec_definition_loc LIST0 ( "with" function_rec_definition_loc ) (* funind plugin *) +| REPLACE "Functional" "Scheme" LIST1 fun_scheme_arg SEP "with" (* funind plugin *) +| WITH "Functional" "Scheme" fun_scheme_arg LIST0 ( "with" fun_scheme_arg ) (* funind plugin *) + +] + +only_parsing: [ +| OPTINREF +] + +syntax: [ +| REPLACE "Infix" ne_lstring ":=" constr [ "(" LIST1 syntax_modifier SEP "," ")" | ] OPT [ ":" IDENT ] +| WITH "Infix" ne_lstring ":=" constr OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] OPT [ ":" IDENT ] +| REPLACE "Notation" lstring ":=" constr [ "(" LIST1 syntax_modifier SEP "," ")" | ] OPT [ ":" IDENT ] +| WITH "Notation" lstring ":=" constr OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] OPT [ ":" IDENT ] +| REPLACE "Reserved" "Infix" ne_lstring [ "(" LIST1 syntax_modifier SEP "," ")" | ] +| WITH "Reserved" "Infix" ne_lstring OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] +| REPLACE "Reserved" "Notation" ne_lstring [ "(" LIST1 syntax_modifier SEP "," ")" | ] +| WITH "Reserved" "Notation" ne_lstring OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] +] + +numnotoption: [ +| OPTINREF +] + +binder_tactic: [ +| REPLACE "let" [ "rec" | ] LIST1 let_clause SEP "with" "in" tactic_expr5 +| WITH "let" OPT "rec" let_clause LIST0 ( "with" let_clause ) "in" tactic_expr5 +] + +tactic_then_gen: [ +| OPTINREF +] + +record_binder_body: [ +| REPLACE binders of_type_with_opt_coercion lconstr +| WITH binders of_type_with_opt_coercion +| REPLACE binders of_type_with_opt_coercion lconstr ":=" lconstr +| WITH binders of_type_with_opt_coercion ":=" lconstr +] + +simple_assum_coe: [ +| REPLACE LIST1 ident_decl of_type_with_opt_coercion lconstr +| WITH LIST1 ident_decl of_type_with_opt_coercion +] + +constructor_type: [ +| REPLACE binders [ of_type_with_opt_coercion lconstr | ] +| WITH binders OPT of_type_with_opt_coercion +] + (* todo: is this really correct? Search for "Pvernac.register_proof_mode" *) (* consider tactic_command vs tac2mode *) vernac_aux: [ | tactic_mode "." ] +def_token: [ +| DELETE "SubClass" (* document separately from Definition and Example *) +] + +assumption_token: [ +| REPLACE "Axiom" +| WITH [ "Axiom" | "Axioms" ] +| REPLACE "Conjecture" +| WITH [ "Conjecture" | "Conjectures" ] +| REPLACE "Hypothesis" +| WITH [ "Hypothesis" | "Hypotheses" ] +| REPLACE "Parameter" +| WITH [ "Parameter" | "Parameters" ] +| REPLACE "Variable" +| WITH [ "Variable" | "Variables" ] +] + +legacy_attrs: [ +| OPT [ "Local" | "Global" ] OPT [ "Polymorphic" | "Monomorphic" ] OPT "Program" OPT [ "Cumulative" | "NonCumulative" ] OPT "Private" +] + +all_attrs: [ +| LIST0 ( "#[" LIST0 attribute SEP "," "]" ) OPT legacy_attrs +] + +vernacular: [ +| LIST0 ( OPT all_attrs [ command | tactic ] "." ) +] + +rec_definition: [ +| REPLACE ident_decl binders_fixannot type_cstr OPT [ ":=" lconstr ] decl_notation +| WITH ident_decl binders_fixannot type_cstr OPT [ ":=" lconstr ] decl_notation +] + +corec_definition: [ +| REPLACE ident_decl binders type_cstr OPT [ ":=" lconstr ] decl_notation +| WITH ident_decl binders type_cstr OPT [ ":=" lconstr ] decl_notation +] + +type_cstr: [ +| REPLACE ":" lconstr +| WITH ":" type +| OPTINREF +] + +decl_notation: [ +| OPTINREF +] + +inductive_definition: [ +| REPLACE OPT ">" ident_decl binders OPT [ "|" binders ] OPT [ ":" lconstr ] OPT ( ":=" OPT constructor_list_or_record_decl ) OPT decl_notation +| WITH OPT ">" ident_decl binders OPT [ "|" binders ] OPT [ ":" type ] OPT ( ":=" OPT constructor_list_or_record_decl ) OPT decl_notation +] + +constructor_list_or_record_decl: [ +| DELETE "|" LIST1 constructor SEP "|" +| REPLACE identref constructor_type "|" LIST1 constructor SEP "|" +| WITH OPT "|" LIST1 constructor SEP "|" +| DELETE identref constructor_type +| REPLACE identref "{" record_fields "}" +| WITH OPT identref "{" record_fields "}" +| DELETE "{" record_fields "}" +] + +record_binder: [ +| REPLACE name record_binder_body +| WITH name OPT record_binder_body +| DELETE name +] + SPLICE: [ | noedit_mode | command_entry @@ -642,8 +899,6 @@ SPLICE: [ | tactic | uconstr | impl_ident_head -| argument_spec -| at_level | branches | check_module_type | decorated_vernac @@ -666,7 +921,27 @@ SPLICE: [ | evar_instance | fix_decls | cofix_decls -] +| assum_list +| assum_coe +| inline +| occs +| univ_name_list +| ltac_info +| field_mods +| ltac_production_sep +| ltac_tactic_level +| printunivs_subgraph +| ring_mods +| scope_delimiter +| eliminator (* todo: splice or not? *) +| quoted_attributes (* todo: splice or not? *) +| printable +| only_parsing +| def_token +| record_fields +| constructor_type +| record_binder +] (* end SPLICE *) RENAME: [ | clause clause_dft_concl @@ -711,6 +986,14 @@ RENAME: [ | corec_definition cofix_definition | inst evar_binding | univ_instance univ_annot +| simple_assum_coe assumpt +| of_type_with_opt_coercion of_type +| decl_notation decl_notations +| one_decl_notation decl_notation +| attribute attr +| attribute_value attr_value +| constructor_list_or_record_decl constructors_or_record +| record_binder_body field_body ] diff --git a/doc/tools/docgram/doc_grammar.ml b/doc/tools/docgram/doc_grammar.ml index b50c427742..5fcb56f5f2 100644 --- a/doc/tools/docgram/doc_grammar.ml +++ b/doc/tools/docgram/doc_grammar.ml @@ -693,6 +693,58 @@ let create_edit_map g op edits = let remove_Sedit2 p = List.filter (fun sym -> match sym with | Sedit2 _ -> false | _ -> true) p +(* don't deal with Sedit, Sedit2 yet (ever?) *) +let rec pmatch fullprod fullpat repl = + let map_prod prod = List.concat (List.map (fun s -> pmatch [s] fullpat repl) prod) in + let pmatch_wrap sym = + let r = pmatch [sym] fullpat repl in + match r with + | a :: b :: tl -> Sparen r + | [a] -> a + | x -> error "pmatch: should not happen"; Sterm "??" + in + + let symmatch_r s = + let res = match s with + | Slist1 sym -> Slist1 (pmatch_wrap sym) + | Slist1sep (sym, sep) -> Slist1sep (pmatch_wrap sym, sep) + | Slist0 sym -> Slist0 (pmatch_wrap sym) + | Slist0sep (sym, sep) -> Slist0sep (pmatch_wrap sym, sep) + | Sopt sym -> Sopt (pmatch_wrap sym) + | Sparen prod -> Sparen (map_prod prod) + | Sprod prods -> Sprod (List.map map_prod prods) + | sym -> sym + in +(* Printf.printf "symmatch of %s gives %s\n" (prod_to_str [s]) (prod_to_str [res]);*) + res + in + + let rec pmatch_r prod pat match_start start_res res = +(* Printf.printf "pmatch_r: prod = %s; pat = %s; res = %s\n" (prod_to_str prod) (prod_to_str pat) (prod_to_str res);*) + match prod, pat with + | _, [] -> + let new_res = (List.rev repl) @ res in + pmatch_r prod fullpat prod new_res new_res (* subst and continue *) + | [], _ -> (List.rev ((List.rev match_start) @ res)) (* leftover partial match *) + | hdprod :: tlprod, hdpat :: tlpat -> + if hdprod = hdpat then + pmatch_r tlprod tlpat match_start start_res res + else + (* match from the next starting position *) + match match_start with + | hd :: tl -> + let new_res = (symmatch_r hd) :: start_res in + pmatch_r tl fullpat tl new_res new_res + | [] -> List.rev res (* done *) + in + pmatch_r fullprod fullpat fullprod [] [] + +(* global replace of production substrings, rhs only *) +let global_repl g pat repl = + List.iter (fun nt -> + g_update_prods g nt (List.map (fun prod -> pmatch prod pat repl) (NTMap.find nt !g.map)) + ) !g.order + (* edit a production: rename nonterminals, drop nonterminals, substitute nonterminals *) let rec edit_prod g top edit_map prod = let edit_nt edit_map sym0 nt = @@ -1112,8 +1164,15 @@ let apply_edit_file g edits = with Not_found -> prods) in aux tl prods' add_nt + | (Snterm "OPTINREF" :: _) :: tl -> + if not (List.mem [] prods) then + error "OPTINREF but no empty production for %s\n" nt; + global_repl g [(Snterm nt)] [(Sopt (Snterm nt))]; + aux tl (remove_prod [] prods nt) add_nt + | (Snterm "INSERTALL" :: syms) :: tl -> + aux tl (List.map (fun p -> syms @ p) prods) add_nt | (Snterm "PRINT" :: _) :: tl -> - pr_prods nt (NTMap.find nt !g.map); + pr_prods nt prods; aux tl prods add_nt | (Snterm "EDIT" :: oprod) :: tl -> aux tl (edit_single_prod g oprod prods nt) add_nt @@ -1593,6 +1652,7 @@ type seen = { nts: (string * int) NTMap.t; tacs: (string * int) NTMap.t; cmds: (string * int) NTMap.t; + cmdvs: (string * int) NTMap.t; } let process_rst g file args seen tac_prods cmd_prods = @@ -1640,9 +1700,9 @@ let process_rst g file args seen tac_prods cmd_prods = let start_index = index_of start !g.order in let end_index = index_of end_ !g.order in if start_index = None then - error "%s line %d: '%s' is undefined\n" file !linenum start; + error "%s line %d: '%s' is undefined in insertprodn\n" file !linenum start; if end_index = None then - error "%s line %d: '%s' is undefined\n" file !linenum end_; + error "%s line %d: '%s' is undefined in insertprodn\n" file !linenum end_; if start_index <> None && end_index <> None then check_range_consistency g start end_; match start_index, end_index with @@ -1697,12 +1757,17 @@ let process_rst g file args seen tac_prods cmd_prods = seen := { !seen with tacs = (NTMap.add rhs (file, !linenum) !seen.tacs)}; fprintf new_rst "%s\n" line | "cmd::" when args.check_cmds -> +(* if not (StringSet.mem rhs cmd_prods) then warn "%s line %d: Unknown command: '%s'\n" file !linenum rhs; if NTMap.mem rhs !seen.cmds then warn "%s line %d: Repeated command: '%s'\n" file !linenum rhs; +*) seen := { !seen with cmds = (NTMap.add rhs (file, !linenum) !seen.cmds)}; fprintf new_rst "%s\n" line + | "cmdv::" when args.check_cmds -> + seen := { !seen with cmdvs = (NTMap.add rhs (file, !linenum) !seen.cmdvs)}; + fprintf new_rst "%s\n" line | "insertprodn" -> process_insertprodn line rhs | _ -> fprintf new_rst "%s\n" line @@ -1818,15 +1883,43 @@ let process_grammar args = list, StringSet.of_list list in let tac_list, tac_prods = plist "simple_tactic" in let cmd_list, cmd_prods = plist "command" in - let seen = ref { nts=NTMap.empty; tacs=NTMap.empty; cmds=NTMap.empty } in + let seen = ref { nts=NTMap.empty; tacs=NTMap.empty; cmds=NTMap.empty; cmdvs=NTMap.empty } in List.iter (fun file -> process_rst g file args seen tac_prods cmd_prods) args.rst_files; report_omitted_prods !g.order !seen.nts "Nonterminal" ""; let out = open_out (dir "updated_rsts") in close_out out; +(* if args.check_tacs then report_omitted_prods tac_list !seen.tacs "Tactic" "\n "; if args.check_cmds then - report_omitted_prods cmd_list !seen.cmds "Command" "\n " + report_omitted_prods cmd_list !seen.cmds "Command" "\n "; +*) + + let rstCmds = StringSet.of_list (List.map (fun b -> let c, _ = b in c) (NTMap.bindings !seen.cmds)) in + let rstCmdvs = StringSet.of_list (List.map (fun b -> let c, _ = b in c) (NTMap.bindings !seen.cmdvs)) in + let command_nts = ["command"; "gallina"; "gallina_ext"; "query_command"; "syntax"] in + (* TODO: need to handle tactic_mode (overlaps with query_command) and subprf *) + let gramCmds = List.fold_left (fun set nt -> + StringSet.union set (StringSet.of_list (List.map (fun p -> String.trim (prod_to_prodn p)) (NTMap.find nt !prodn_gram.map))) + ) StringSet.empty command_nts in + + let allCmds = StringSet.union rstCmdvs (StringSet.union rstCmds gramCmds) in + let out = open_out_bin (dir "prodnCommands") in + StringSet.iter (fun c -> + let rsts = StringSet.mem c rstCmds in + let gram = StringSet.mem c gramCmds in + let pfx = match rsts, gram with + | true, false -> "+" + | false, true -> "-" + | false, false -> "?" + | _, _ -> " " + in + let var = if StringSet.mem c rstCmdvs then "v" else " " in + fprintf out "%s%s %s\n" pfx var c) + allCmds; + close_out out; + Printf.printf "# cmds in rsts, gram, total = %d %d %d\n" (StringSet.cardinal gramCmds) + (StringSet.cardinal rstCmds) (StringSet.cardinal allCmds); end; (* generate output for prodn: simple_tactic, command, also for Ltac?? *) diff --git a/doc/tools/docgram/fullGrammar b/doc/tools/docgram/fullGrammar index e12589bb89..529d81e424 100644 --- a/doc/tools/docgram/fullGrammar +++ b/doc/tools/docgram/fullGrammar @@ -322,8 +322,13 @@ closed_binder: [ | "{" name LIST1 name ":" lconstr "}" | "{" name ":" lconstr "}" | "{" name LIST1 name "}" +| "[" name "]" +| "[" name LIST1 name ":" lconstr "]" +| "[" name ":" lconstr "]" +| "[" name LIST1 name "]" | "`(" LIST1 typeclass_constraint SEP "," ")" | "`{" LIST1 typeclass_constraint SEP "," "}" +| "`[" LIST1 typeclass_constraint SEP "," "]" | "'" pattern0 ] @@ -643,6 +648,7 @@ command: [ | "Show" "Ltac" "Profile" | "Show" "Ltac" "Profile" "CutOff" int | "Show" "Ltac" "Profile" string +| "Show" "Lia" "Profile" (* micromega plugin *) | "Add" "InjTyp" constr (* micromega plugin *) | "Add" "BinOp" constr (* micromega plugin *) | "Add" "UnOp" constr (* micromega plugin *) @@ -937,12 +943,12 @@ opt_constructors_or_fields: [ ] inductive_definition: [ -| opt_coercion ident_decl binders OPT [ ":" lconstr ] opt_constructors_or_fields decl_notation +| opt_coercion ident_decl binders OPT [ "|" binders ] OPT [ ":" lconstr ] opt_constructors_or_fields decl_notation ] constructor_list_or_record_decl: [ | "|" LIST1 constructor SEP "|" -| identref constructor_type "|" LIST0 constructor SEP "|" +| identref constructor_type "|" LIST1 constructor SEP "|" | identref constructor_type | identref "{" record_fields "}" | "{" record_fields "}" @@ -1270,7 +1276,7 @@ printable: [ | "Instances" smart_global | "Coercions" | "Coercion" "Paths" class_rawexpr class_rawexpr -| "Canonical" "Projections" +| "Canonical" "Projections" LIST0 smart_global | "Typing" "Flags" | "Tables" | "Options" @@ -1400,8 +1406,7 @@ syntax_modifier: [ | "only" "parsing" | "format" STRING OPT STRING | IDENT; "," LIST1 IDENT SEP "," "at" level -| IDENT; "at" level -| IDENT; "at" level constr_as_binder_kind +| IDENT; "at" level OPT constr_as_binder_kind | IDENT constr_as_binder_kind | IDENT syntax_extension_type ] @@ -1412,17 +1417,18 @@ syntax_extension_type: [ | "bigint" | "binder" | "constr" -| "constr" OPT at_level OPT constr_as_binder_kind +| "constr" at_level_opt OPT constr_as_binder_kind | "pattern" | "pattern" "at" "level" natural | "strict" "pattern" | "strict" "pattern" "at" "level" natural | "closed" "binder" -| "custom" IDENT OPT at_level OPT constr_as_binder_kind +| "custom" IDENT at_level_opt OPT constr_as_binder_kind ] -at_level: [ +at_level_opt: [ | "at" level +| ] constr_as_binder_kind: [ @@ -1719,7 +1725,6 @@ simple_tactic: [ | "restart_timer" OPT string | "finish_timing" OPT string | "finish_timing" "(" string ")" OPT string -| "myred" (* micromega plugin *) | "psatz_Z" int_or_var tactic (* micromega plugin *) | "psatz_Z" tactic (* micromega plugin *) | "xlia" tactic (* micromega plugin *) @@ -1735,13 +1740,12 @@ simple_tactic: [ | "psatz_R" tactic (* micromega plugin *) | "psatz_Q" int_or_var tactic (* micromega plugin *) | "psatz_Q" tactic (* micromega plugin *) -| "iter_specs" tactic (* micromega plugin *) +| "zify_iter_specs" tactic (* micromega plugin *) | "zify_op" (* micromega plugin *) -| "saturate" (* micromega plugin *) +| "zify_saturate" (* micromega plugin *) +| "zify_iter_let" tactic (* micromega plugin *) | "nsatz_compute" constr (* nsatz plugin *) | "omega" (* omega plugin *) -| "omega" "with" LIST1 ident (* omega plugin *) -| "omega" "with" "*" (* omega plugin *) | "rtauto" | "protect_fv" string "in" ident (* setoid_ring plugin *) | "protect_fv" string (* setoid_ring plugin *) diff --git a/doc/tools/docgram/orderedGrammar b/doc/tools/docgram/orderedGrammar index 63e0ca129c..908e3ccd51 100644 --- a/doc/tools/docgram/orderedGrammar +++ b/doc/tools/docgram/orderedGrammar @@ -29,7 +29,7 @@ vernac_control: [ | "Redirect" string vernac_control | "Timeout" num vernac_control | "Fail" vernac_control -| LIST0 ( "#[" LIST0 attribute SEP "," "]" ) vernac +| LIST0 ( "#[" LIST0 attr SEP "," "]" ) vernac ] term: [ @@ -102,50 +102,24 @@ dangling_pattern_extension_rule: [ | "@" "?" ident LIST1 ident ] -record_fields: [ -| record_field ";" record_fields -| record_field -| -] - -record_field: [ -| LIST0 ( "#[" LIST0 attribute SEP "," "]" ) record_binder OPT [ "|" num ] decl_notation -] - -decl_notation: [ -| "where" LIST1 one_decl_notation SEP "and" -| -] - -one_decl_notation: [ -| string ":=" term1_extended OPT [ ":" ident ] +assumption_token: [ +| [ "Axiom" | "Axioms" ] +| [ "Conjecture" | "Conjectures" ] +| [ "Parameter" | "Parameters" ] +| [ "Hypothesis" | "Hypotheses" ] +| [ "Variable" | "Variables" ] ] -record_binder: [ -| name -| name record_binder_body +assumpt: [ +| LIST1 ident_decl of_type ] -record_binder_body: [ -| LIST0 binder of_type_with_opt_coercion term -| LIST0 binder of_type_with_opt_coercion term ":=" term -| LIST0 binder ":=" term -] - -of_type_with_opt_coercion: [ -| ":>>" -| ":>" -| ":" +ident_decl: [ +| ident OPT univ_decl ] -attribute: [ -| ident attribute_value -] - -attribute_value: [ -| "=" string -| "(" LIST0 attribute SEP "," ")" -| +of_type: [ +| [ ":" | ":>" | ":>>" ] type ] qualid: [ @@ -156,6 +130,10 @@ field_ident: [ | "." ident ] +type: [ +| term +] + numeral: [ | LIST1 digit OPT ( "." LIST1 digit ) OPT [ [ "e" | "E" ] OPT [ "+" | "-" ] LIST1 digit ] ] @@ -184,6 +162,27 @@ subsequent_letter: [ | [ first_letter | digit | "'" | unicode_id_part ] ] +vernacular: [ +| LIST0 ( OPT all_attrs [ command | ltac_expr ] "." ) +] + +all_attrs: [ +| LIST0 ( "#[" LIST0 attr SEP "," "]" ) OPT legacy_attrs +] + +attr: [ +| ident OPT attr_value +] + +attr_value: [ +| "=" string +| "(" LIST0 attr SEP "," ")" +] + +legacy_attrs: [ +| OPT [ "Local" | "Global" ] OPT [ "Polymorphic" | "Monomorphic" ] OPT "Program" OPT [ "Cumulative" | "NonCumulative" ] OPT "Private" +] + sort: [ | "Set" | "Prop" @@ -208,6 +207,10 @@ universe_name: [ | "Prop" ] +univ_annot: [ +| "@{" LIST0 universe_level "}" +] + universe_level: [ | "Set" | "Prop" @@ -216,8 +219,12 @@ universe_level: [ | qualid ] -univ_annot: [ -| "@{" LIST0 universe_level "}" +univ_decl: [ +| "@{" LIST0 ident OPT "+" OPT [ "|" LIST0 univ_constraint SEP "," OPT "+" ] "}" +] + +univ_constraint: [ +| universe_name [ "<" | "=" | "<=" ] universe_name ] term_fix: [ @@ -226,7 +233,7 @@ term_fix: [ ] fix_body: [ -| ident LIST0 binder OPT fixannot OPT ( ":" term ) ":=" term +| ident LIST0 binder OPT fixannot OPT ( ":" type ) ":=" term ] fixannot: [ @@ -246,12 +253,12 @@ term_cofix: [ ] cofix_body: [ -| ident LIST0 binder OPT ( ":" term ) ":=" term +| ident LIST0 binder OPT ( ":" type ) ":=" term ] term_let: [ -| "let" name OPT ( ":" term ) ":=" term "in" term -| "let" name LIST1 binder OPT ( ":" term ) ":=" term "in" term +| "let" name OPT ( ":" type ) ":=" term "in" term +| "let" name LIST1 binder OPT ( ":" type ) ":=" term "in" term | "let" "(" LIST0 name SEP "," ")" OPT [ OPT [ "as" name ] "return" term100 ] ":=" term "in" term | "let" "'" pattern ":=" term OPT ( "return" term100 ) "in" term | "let" "'" pattern "in" pattern ":=" term "return" term100 "in" term @@ -269,13 +276,15 @@ name: [ binder: [ | name -| "(" LIST1 name ":" term ")" -| "(" name OPT ( ":" term ) ":=" term ")" -| "{" LIST1 name OPT ( ":" term ) "}" +| "(" LIST1 name ":" type ")" +| "(" name OPT ( ":" type ) ":=" term ")" +| "(" name ":" type "|" term ")" +| "{" LIST1 name OPT ( ":" type ) "}" +| "[" LIST1 name OPT ( ":" type ) "]" | "`(" LIST1 typeclass_constraint SEP "," ")" | "`{" LIST1 typeclass_constraint SEP "," "}" +| "`[" LIST1 typeclass_constraint SEP "," "]" | "'" pattern0 -| "(" name ":" term "|" term ")" ] typeclass_constraint: [ @@ -359,17 +368,20 @@ subprf: [ ] gallina: [ -| thm_token ident_decl LIST0 binder ":" term LIST0 [ "with" ident_decl LIST0 binder ":" term ] -| assumption_token inline assum_list -| assumptions_token inline assum_list -| def_token ident_decl def_body +| thm_token ident_decl LIST0 binder ":" type LIST0 [ "with" ident_decl LIST0 binder ":" type ] +| assumption_token OPT ( "Inline" OPT ( "(" num ")" ) ) [ LIST1 ( "(" assumpt ")" ) | assumpt ] +| [ "Definition" | "Example" ] ident_decl def_body | "Let" ident def_body -| OPT cumulativity_token private_token finite_token LIST1 inductive_definition SEP "with" -| "Fixpoint" LIST1 fix_definition SEP "with" -| "Let" "Fixpoint" LIST1 fix_definition SEP "with" -| "CoFixpoint" LIST1 cofix_definition SEP "with" -| "Let" "CoFixpoint" LIST1 cofix_definition SEP "with" -| "Scheme" LIST1 scheme SEP "with" +| "Inductive" inductive_definition LIST0 ( "with" inductive_definition ) +| "CoInductive" inductive_definition LIST0 ( "with" inductive_definition ) +| "Variant" inductive_definition LIST0 ( "with" inductive_definition ) +| [ "Record" | "Structure" ] inductive_definition LIST0 ( "with" inductive_definition ) +| "Class" inductive_definition LIST0 ( "with" inductive_definition ) +| "Fixpoint" fix_definition LIST0 ( "with" fix_definition ) +| "Let" "Fixpoint" fix_definition LIST0 ( "with" fix_definition ) +| "CoFixpoint" cofix_definition LIST0 ( "with" cofix_definition ) +| "Let" "CoFixpoint" cofix_definition LIST0 ( "with" cofix_definition ) +| "Scheme" scheme LIST0 ( "with" scheme ) | "Combined" "Scheme" ident "from" LIST1 ident SEP "," | "Register" qualid "as" qualid | "Register" "Inline" qualid @@ -380,7 +392,15 @@ gallina: [ ] fix_definition: [ -| ident_decl LIST0 binder OPT fixannot OPT ( ":" term ) OPT [ ":=" term ] decl_notation +| ident_decl LIST0 binder OPT fixannot OPT ( ":" type ) OPT [ ":=" term ] OPT decl_notations +] + +decl_notations: [ +| "where" decl_notation LIST0 ( "and" decl_notation ) +] + +decl_notation: [ +| string ":=" term1_extended OPT [ ":" ident ] ] register_token: [ @@ -444,80 +464,23 @@ thm_token: [ | "Property" ] -def_token: [ -| "Definition" -| "Example" -| "SubClass" -] - -assumption_token: [ -| "Hypothesis" -| "Variable" -| "Axiom" -| "Parameter" -| "Conjecture" -] - -assumptions_token: [ -| "Hypotheses" -| "Variables" -| "Axioms" -| "Parameters" -| "Conjectures" -] - -inline: [ -| "Inline" "(" num ")" -| "Inline" -| -] - -univ_constraint: [ -| universe_name [ "<" | "=" | "<=" ] universe_name -] - -ident_decl: [ -| ident OPT ( "@{" LIST0 ident [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | "|}" ] ] ) -] - -finite_token: [ -| "Inductive" -| "CoInductive" -| "Variant" -| "Record" -| "Structure" -| "Class" -] - -cumulativity_token: [ -| "Cumulative" -| "NonCumulative" -] - -private_token: [ -| "Private" -| -] - def_body: [ -| LIST0 binder ":=" reduce term -| LIST0 binder ":" term ":=" reduce term -| LIST0 binder ":" term +| LIST0 binder OPT ( ":" type ) ":=" OPT reduce term +| LIST0 binder ":" type ] reduce: [ | "Eval" red_expr "in" -| ] red_expr: [ | "red" | "hnf" -| "simpl" delta_flag OPT ref_or_pattern_occ -| "cbv" strategy_flag -| "cbn" strategy_flag -| "lazy" strategy_flag -| "compute" delta_flag +| "simpl" OPT delta_flag OPT ref_or_pattern_occ +| "cbv" OPT strategy_flag +| "cbn" OPT strategy_flag +| "lazy" OPT strategy_flag +| "compute" OPT delta_flag | "vm_compute" OPT ref_or_pattern_occ | "native_compute" OPT ref_or_pattern_occ | "unfold" LIST1 unfold_occ SEP "," @@ -526,6 +489,19 @@ red_expr: [ | ident ] +delta_flag: [ +| OPT "-" "[" LIST1 smart_global "]" +] + +smart_global: [ +| qualid +| by_notation +] + +by_notation: [ +| string OPT [ "%" ident ] +] + strategy_flag: [ | LIST1 red_flags | delta_flag @@ -538,70 +514,71 @@ red_flags: [ | "fix" | "cofix" | "zeta" -| "delta" delta_flag +| "delta" OPT delta_flag ] -delta_flag: [ -| "-" "[" LIST1 smart_global "]" -| "[" LIST1 smart_global "]" -| +ref_or_pattern_occ: [ +| smart_global OPT ( "at" occs_nums ) +| term1_extended OPT ( "at" occs_nums ) ] -ref_or_pattern_occ: [ -| smart_global occs -| term1_extended occs +occs_nums: [ +| LIST1 num_or_var +| "-" num_or_var LIST0 int_or_var ] -unfold_occ: [ -| smart_global occs +num_or_var: [ +| num +| ident ] -opt_constructors_or_fields: [ -| ":=" constructor_list_or_record_decl -| +int_or_var: [ +| int +| ident ] -inductive_definition: [ -| opt_coercion ident_decl LIST0 binder OPT [ ":" term ] opt_constructors_or_fields decl_notation +unfold_occ: [ +| smart_global OPT ( "at" occs_nums ) ] -opt_coercion: [ -| ">" -| +pattern_occ: [ +| term1_extended OPT ( "at" occs_nums ) ] -constructor_list_or_record_decl: [ -| "|" LIST1 constructor SEP "|" -| ident constructor_type "|" LIST0 constructor SEP "|" -| ident constructor_type -| ident "{" record_fields "}" -| "{" record_fields "}" -| +finite_token: [ +| "Inductive" +| "CoInductive" +| "Variant" +| "Record" +| "Structure" +| "Class" ] -assum_list: [ -| LIST1 assum_coe -| simple_assum_coe +inductive_definition: [ +| OPT ">" ident_decl LIST0 binder OPT [ "|" LIST0 binder ] OPT [ ":" type ] OPT ( ":=" OPT constructors_or_record ) OPT decl_notations ] -assum_coe: [ -| "(" simple_assum_coe ")" +constructors_or_record: [ +| OPT "|" LIST1 constructor SEP "|" +| OPT ident "{" LIST1 record_field SEP ";" "}" ] -simple_assum_coe: [ -| LIST1 ident_decl of_type_with_opt_coercion term +constructor: [ +| ident LIST0 binder OPT of_type ] -constructor_type: [ -| LIST0 binder [ of_type_with_opt_coercion term | ] +record_field: [ +| LIST0 ( "#[" LIST0 attr SEP "," "]" ) name OPT field_body OPT [ "|" num ] OPT decl_notations ] -constructor: [ -| ident constructor_type +field_body: [ +| LIST0 binder of_type +| LIST0 binder of_type ":=" term +| LIST0 binder ":=" term ] cofix_definition: [ -| ident_decl LIST0 binder OPT ( ":" term ) OPT [ ":=" term ] decl_notation +| ident_decl LIST0 binder OPT ( ":" type ) OPT [ ":=" term ] OPT decl_notations ] scheme: [ @@ -624,25 +601,16 @@ sort_family: [ | "Type" ] -smart_global: [ -| qualid -| by_notation -] - -by_notation: [ -| string OPT [ "%" ident ] -] - gallina_ext: [ -| "Module" export_token ident LIST0 ( "(" export_token LIST1 ident ":" module_type_inl ")" ) of_module_type is_module_expr -| "Module" "Type" ident LIST0 ( "(" export_token LIST1 ident ":" module_type_inl ")" ) LIST0 ( "<:" module_type_inl ) is_module_type -| "Declare" "Module" export_token ident LIST0 ( "(" export_token LIST1 ident ":" module_type_inl ")" ) ":" module_type_inl +| "Module" OPT export_token ident LIST0 module_binder of_module_type OPT is_module_expr +| "Module" "Type" ident LIST0 module_binder LIST0 ( "<:" module_type_inl ) OPT is_module_type +| "Declare" "Module" OPT export_token ident LIST0 module_binder ":" module_type_inl | "Section" ident | "Chapter" ident | "End" ident | "Collection" ident ":=" section_subset_expr -| "Require" export_token LIST1 qualid -| "From" qualid "Require" export_token LIST1 qualid +| "Require" OPT export_token LIST1 qualid +| "From" qualid "Require" OPT export_token LIST1 qualid | "Import" LIST1 qualid | "Export" LIST1 qualid | "Include" module_type_inl LIST0 ( "<+" module_expr_inl ) @@ -650,9 +618,9 @@ gallina_ext: [ | "Transparent" LIST1 smart_global | "Opaque" LIST1 smart_global | "Strategy" LIST1 [ strategy_level "[" LIST1 smart_global "]" ] -| "Canonical" OPT "Structure" qualid OPT [ OPT ( "@{" LIST0 ident [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | "|}" ] ] ) def_body ] +| "Canonical" OPT "Structure" qualid OPT [ OPT univ_decl def_body ] | "Canonical" OPT "Structure" by_notation -| "Coercion" qualid OPT ( "@{" LIST0 ident [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | "|}" ] ] ) def_body +| "Coercion" qualid OPT univ_decl def_body | "Identity" "Coercion" ident ":" class_rawexpr ">->" class_rawexpr | "Coercion" qualid ":" class_rawexpr ">->" class_rawexpr | "Coercion" by_notation ":" class_rawexpr ">->" class_rawexpr @@ -661,7 +629,7 @@ gallina_ext: [ | "Existing" "Instance" qualid hint_info | "Existing" "Instances" LIST1 qualid OPT [ "|" num ] | "Existing" "Class" qualid -| "Arguments" smart_global LIST0 argument_spec_block OPT [ "," LIST1 [ LIST0 more_implicits_block ] SEP "," ] OPT [ ":" LIST1 arguments_modifier SEP "," ] +| "Arguments" smart_global LIST0 argument_spec_block LIST0 [ "," LIST0 more_implicits_block ] OPT [ ":" LIST1 arguments_modifier SEP "," ] | "Implicit" "Type" reserv_list | "Implicit" "Types" reserv_list | "Generalizable" [ "All" "Variables" | "No" "Variables" | [ "Variable" | "Variables" ] LIST1 ident ] @@ -689,43 +657,41 @@ hint_info: [ export_token: [ | "Import" | "Export" -| ] -of_module_type: [ -| ":" module_type_inl -| LIST0 ( "<:" module_type_inl ) +module_binder: [ +| "(" OPT export_token LIST1 ident ":" module_type_inl ")" ] -is_module_type: [ -| ":=" module_type_inl LIST0 ( "<+" module_type_inl ) -| +module_type_inl: [ +| "!" module_type +| module_type OPT functor_app_annot ] -is_module_expr: [ -| ":=" module_expr_inl LIST0 ( "<+" module_expr_inl ) -| +module_type: [ +| qualid +| "(" module_type ")" +| module_type module_expr_atom +| module_type "with" with_declaration +] + +with_declaration: [ +| "Definition" qualid OPT univ_decl ":=" term +| "Module" qualid ":=" qualid ] functor_app_annot: [ | "[" "inline" "at" "level" num "]" | "[" "no" "inline" "]" -| -] - -module_expr_inl: [ -| "!" module_expr -| module_expr functor_app_annot ] -module_type_inl: [ -| "!" module_type -| module_type functor_app_annot +of_module_type: [ +| ":" module_type_inl +| LIST0 ( "<:" module_type_inl ) ] -module_expr: [ -| module_expr_atom -| module_expr module_expr_atom +is_module_type: [ +| ":=" module_type_inl LIST0 ( "<+" module_type_inl ) ] module_expr_atom: [ @@ -733,25 +699,31 @@ module_expr_atom: [ | "(" module_expr ")" ] -module_type: [ -| qualid -| "(" module_type ")" -| module_type module_expr_atom -| module_type "with" with_declaration +module_expr: [ +| module_expr_atom +| module_expr module_expr_atom ] -with_declaration: [ -| "Definition" qualid OPT ( "@{" LIST0 ident [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | "|}" ] ] ) ":=" term -| "Module" qualid ":=" qualid +is_module_expr: [ +| ":=" module_expr_inl LIST0 ( "<+" module_expr_inl ) +] + +module_expr_inl: [ +| "!" module_expr +| module_expr OPT functor_app_annot ] argument_spec_block: [ -| OPT "!" name OPT ( "%" ident ) +| argument_spec | "/" | "&" -| "(" LIST1 ( OPT "!" name OPT ( "%" ident ) ) ")" OPT ( "%" ident ) -| "[" LIST1 ( OPT "!" name OPT ( "%" ident ) ) "]" OPT ( "%" ident ) -| "{" LIST1 ( OPT "!" name OPT ( "%" ident ) ) "}" OPT ( "%" ident ) +| "(" LIST1 argument_spec ")" OPT ( "%" ident ) +| "[" LIST1 argument_spec "]" OPT ( "%" ident ) +| "{" LIST1 argument_spec "}" OPT ( "%" ident ) +] + +argument_spec: [ +| OPT "!" name OPT ( "%" ident ) ] more_implicits_block: [ @@ -760,6 +732,20 @@ more_implicits_block: [ | "{" LIST1 name "}" ] +arguments_modifier: [ +| "simpl" "nomatch" +| "simpl" "never" +| "default" "implicits" +| "clear" "bidirectionality" "hint" +| "clear" "implicits" +| "clear" "scopes" +| "clear" "scopes" "and" "implicits" +| "clear" "implicits" "and" "scopes" +| "rename" +| "assert" +| "extra" "scopes" +] + strategy_level: [ | "expand" | "opaque" @@ -785,20 +771,6 @@ simple_reserv: [ | LIST1 ident ":" term ] -arguments_modifier: [ -| "simpl" "nomatch" -| "simpl" "never" -| "default" "implicits" -| "clear" "implicits" -| "clear" "scopes" -| "clear" "bidirectionality" "hint" -| "rename" -| "assert" -| "extra" "scopes" -| "clear" "scopes" "and" "implicits" -| "clear" "implicits" "and" "scopes" -] - command: [ | "Goal" term | "Declare" "Scope" ident @@ -812,7 +784,43 @@ command: [ | "Add" "Rec" "LoadPath" string as_dirpath | "Remove" "LoadPath" string | "Type" term -| "Print" printable +| "Print" "Term" smart_global OPT ( "@{" LIST0 name "}" ) +| "Print" "All" +| "Print" "Section" qualid +| "Print" "Grammar" ident +| "Print" "Custom" "Grammar" ident +| "Print" "LoadPath" OPT dirpath +| "Print" "Modules" +| "Print" "Libraries" +| "Print" "ML" "Path" +| "Print" "ML" "Modules" +| "Print" "Debug" "GC" +| "Print" "Graph" +| "Print" "Classes" +| "Print" "TypeClasses" +| "Print" "Instances" smart_global +| "Print" "Coercions" +| "Print" "Coercion" "Paths" class_rawexpr class_rawexpr +| "Print" "Canonical" "Projections" LIST0 smart_global +| "Print" "Typing" "Flags" +| "Print" "Tables" +| "Print" "Options" +| "Print" "Hint" +| "Print" "Hint" smart_global +| "Print" "Hint" "*" +| "Print" "HintDb" ident +| "Print" "Scopes" +| "Print" "Scope" ident +| "Print" "Visibility" OPT ident +| "Print" "Implicit" smart_global +| "Print" OPT "Sorted" "Universes" OPT ( "Subgraph" "(" LIST0 qualid ")" ) OPT string +| "Print" "Assumptions" smart_global +| "Print" "Opaque" "Dependencies" smart_global +| "Print" "Transparent" "Dependencies" smart_global +| "Print" "All" "Dependencies" smart_global +| "Print" "Strategy" smart_global +| "Print" "Strategies" +| "Print" "Registered" | "Print" smart_global OPT ( "@{" LIST0 name "}" ) | "Print" "Module" "Type" qualid | "Print" "Module" qualid @@ -931,6 +939,7 @@ command: [ | "Show" "Ltac" "Profile" | "Show" "Ltac" "Profile" "CutOff" int | "Show" "Ltac" "Profile" string +| "Show" "Lia" "Profile" (* micromega plugin *) | "Add" "InjTyp" term1_extended (* micromega plugin *) | "Add" "BinOp" term1_extended (* micromega plugin *) | "Add" "UnOp" term1_extended (* micromega plugin *) @@ -959,12 +968,12 @@ command: [ | "Print" "Rewrite" "HintDb" ident | "Print" "Ltac" qualid | "Locate" "Ltac" qualid -| "Ltac" LIST1 tacdef_body SEP "with" +| "Ltac" tacdef_body LIST0 ( "with" tacdef_body ) | "Print" "Ltac" "Signatures" | "Set" "Firstorder" "Solver" ltac_expr | "Print" "Firstorder" "Solver" -| "Function" LIST1 fix_definition SEP "with" (* funind plugin *) -| "Functional" "Scheme" LIST1 fun_scheme_arg SEP "with" (* funind plugin *) +| "Function" fix_definition LIST0 ( "with" fix_definition ) +| "Functional" "Scheme" fun_scheme_arg LIST0 ( "with" fun_scheme_arg ) | "Extraction" qualid (* extraction plugin *) | "Recursive" "Extraction" LIST1 qualid (* extraction plugin *) | "Extraction" string LIST1 qualid (* extraction plugin *) @@ -1002,8 +1011,9 @@ command: [ | "Print" "Rings" (* setoid_ring plugin *) | "Add" "Field" ident ":" term1_extended OPT ( "(" LIST1 field_mod SEP "," ")" ) (* setoid_ring plugin *) | "Print" "Fields" (* setoid_ring plugin *) -| "Numeral" "Notation" qualid qualid qualid ":" ident numnotoption +| "Numeral" "Notation" qualid qualid qualid ":" ident OPT numnotoption | "String" "Notation" qualid qualid qualid ":" ident +| "SubClass" ident_decl def_body ] orient: [ @@ -1043,46 +1053,6 @@ starredidentref: [ | "Type" "*" ] -printable: [ -| "Term" smart_global OPT ( "@{" LIST0 name "}" ) -| "All" -| "Section" qualid -| "Grammar" ident -| "Custom" "Grammar" ident -| "LoadPath" OPT dirpath -| "Modules" -| "Libraries" -| "ML" "Path" -| "ML" "Modules" -| "Debug" "GC" -| "Graph" -| "Classes" -| "TypeClasses" -| "Instances" smart_global -| "Coercions" -| "Coercion" "Paths" class_rawexpr class_rawexpr -| "Canonical" "Projections" -| "Typing" "Flags" -| "Tables" -| "Options" -| "Hint" -| "Hint" smart_global -| "Hint" "*" -| "HintDb" ident -| "Scopes" -| "Scope" ident -| "Visibility" OPT ident -| "Implicit" smart_global -| [ "Sorted" | ] "Universes" OPT ( "Subgraph" "(" LIST0 qualid ")" ) OPT string -| "Assumptions" smart_global -| "Opaque" "Dependencies" smart_global -| "Transparent" "Dependencies" smart_global -| "All" "Dependencies" smart_global -| "Strategy" smart_global -| "Strategies" -| "Registered" -] - dirpath: [ | ident | dirpath field_ident @@ -1160,7 +1130,6 @@ ltac_production_item: [ ] numnotoption: [ -| | "(" "warning" "after" num ")" | "(" "abstract" "after" num ")" ] @@ -1288,17 +1257,12 @@ syntax: [ | "Delimit" "Scope" ident "with" ident | "Undelimit" "Scope" ident | "Bind" "Scope" ident "with" LIST1 class_rawexpr -| "Infix" string ":=" term1_extended [ "(" LIST1 syntax_modifier SEP "," ")" | ] OPT [ ":" ident ] -| "Notation" ident LIST0 ident ":=" term1_extended only_parsing -| "Notation" string ":=" term1_extended [ "(" LIST1 syntax_modifier SEP "," ")" | ] OPT [ ":" ident ] +| "Infix" string ":=" term1_extended OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] OPT [ ":" ident ] +| "Notation" ident LIST0 ident ":=" term1_extended OPT ( "(" "only" "parsing" ")" ) +| "Notation" string ":=" term1_extended OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] OPT [ ":" ident ] | "Format" "Notation" string string string -| "Reserved" "Infix" string [ "(" LIST1 syntax_modifier SEP "," ")" | ] -| "Reserved" "Notation" string [ "(" LIST1 syntax_modifier SEP "," ")" | ] -] - -only_parsing: [ -| "(" "only" "parsing" ")" -| +| "Reserved" "Infix" string OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] +| "Reserved" "Notation" string OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] ] level: [ @@ -1317,31 +1281,35 @@ syntax_modifier: [ | "only" "parsing" | "format" string OPT string | ident "," LIST1 ident SEP "," "at" level -| ident "at" level -| ident "at" level constr_as_binder_kind +| ident "at" level OPT constr_as_binder_kind | ident constr_as_binder_kind | ident syntax_extension_type ] +constr_as_binder_kind: [ +| "as" "ident" +| "as" "pattern" +| "as" "strict" "pattern" +] + syntax_extension_type: [ | "ident" | "global" | "bigint" | "binder" | "constr" -| "constr" OPT ( "at" level ) OPT constr_as_binder_kind +| "constr" at_level_opt OPT constr_as_binder_kind | "pattern" | "pattern" "at" "level" num | "strict" "pattern" | "strict" "pattern" "at" "level" num | "closed" "binder" -| "custom" ident OPT ( "at" level ) OPT constr_as_binder_kind +| "custom" ident at_level_opt OPT constr_as_binder_kind ] -constr_as_binder_kind: [ -| "as" "ident" -| "as" "pattern" -| "as" "strict" "pattern" +at_level_opt: [ +| "at" level +| ] simple_tactic: [ @@ -1591,7 +1559,7 @@ simple_tactic: [ | "eenough" term1_extended as_ipat by_tactic | "generalize" term1_extended | "generalize" term1_extended LIST1 term1_extended -| "generalize" term1_extended occs as_name LIST0 [ "," pattern_occ as_name ] +| "generalize" term1_extended OPT ( "at" occs_nums ) as_name LIST0 [ "," pattern_occ as_name ] | "induction" induction_clause_list | "einduction" induction_clause_list | "destruct" induction_clause_list @@ -1605,11 +1573,11 @@ simple_tactic: [ | "inversion" quantified_hypothesis "using" term1_extended in_hyp_list | "red" clause_dft_concl | "hnf" clause_dft_concl -| "simpl" delta_flag OPT ref_or_pattern_occ clause_dft_concl -| "cbv" strategy_flag clause_dft_concl -| "cbn" strategy_flag clause_dft_concl -| "lazy" strategy_flag clause_dft_concl -| "compute" delta_flag clause_dft_concl +| "simpl" OPT delta_flag OPT ref_or_pattern_occ clause_dft_concl +| "cbv" OPT strategy_flag clause_dft_concl +| "cbn" OPT strategy_flag clause_dft_concl +| "lazy" OPT strategy_flag clause_dft_concl +| "compute" OPT delta_flag clause_dft_concl | "vm_compute" OPT ref_or_pattern_occ clause_dft_concl | "native_compute" OPT ref_or_pattern_occ clause_dft_concl | "unfold" LIST1 unfold_occ SEP "," clause_dft_concl @@ -1631,7 +1599,6 @@ simple_tactic: [ | "functional" "inversion" quantified_hypothesis OPT qualid (* funind plugin *) | "functional" "induction" LIST1 term1_extended fun_ind_using with_names (* funind plugin *) | "soft" "functional" "induction" LIST1 term1_extended fun_ind_using with_names (* funind plugin *) -| "myred" (* micromega plugin *) | "psatz_Z" int_or_var ltac_expr (* micromega plugin *) | "psatz_Z" ltac_expr (* micromega plugin *) | "xlia" ltac_expr (* micromega plugin *) @@ -1647,24 +1614,18 @@ simple_tactic: [ | "psatz_R" ltac_expr (* micromega plugin *) | "psatz_Q" int_or_var ltac_expr (* micromega plugin *) | "psatz_Q" ltac_expr (* micromega plugin *) -| "iter_specs" ltac_expr (* micromega plugin *) +| "zify_iter_specs" ltac_expr (* micromega plugin *) | "zify_op" (* micromega plugin *) -| "saturate" (* micromega plugin *) +| "zify_saturate" (* micromega plugin *) +| "zify_iter_let" ltac_expr (* micromega plugin *) | "nsatz_compute" term1_extended (* nsatz plugin *) | "omega" (* omega plugin *) -| "omega" "with" LIST1 ident (* omega plugin *) -| "omega" "with" "*" (* omega plugin *) | "protect_fv" string "in" ident (* setoid_ring plugin *) | "protect_fv" string (* setoid_ring plugin *) | "ring_lookup" ltac_expr0 "[" LIST0 term1_extended "]" LIST1 term1_extended (* setoid_ring plugin *) | "field_lookup" ltac_expr "[" LIST0 term1_extended "]" LIST1 term1_extended (* setoid_ring plugin *) ] -int_or_var: [ -| int -| ident -] - hloc: [ | | "in" "|-" "*" @@ -1686,17 +1647,12 @@ by_arg_tac: [ in_clause: [ | in_clause -| "*" occs +| "*" OPT ( "at" occs_nums ) | "*" "|-" concl_occ | LIST0 hypident_occ SEP "," "|-" concl_occ | LIST0 hypident_occ SEP "," ] -occs: [ -| "at" occs_nums -| -] - as_ipat: [ | "as" simple_intropattern | @@ -1809,10 +1765,6 @@ bindings: [ | LIST1 term1_extended ] -pattern_occ: [ -| term1_extended occs -] - comparison: [ | "=" | "<" @@ -1838,12 +1790,12 @@ hypident: [ ] hypident_occ: [ -| hypident occs +| hypident OPT ( "at" occs_nums ) ] clause_dft_concl: [ | "in" in_clause -| occs +| OPT ( "at" occs_nums ) | ] @@ -1858,18 +1810,8 @@ opt_clause: [ | ] -occs_nums: [ -| LIST1 num_or_var -| "-" num_or_var LIST0 int_or_var -] - -num_or_var: [ -| num -| ident -] - concl_occ: [ -| "*" occs +| "*" OPT ( "at" occs_nums ) | ] @@ -1987,7 +1929,7 @@ ltac_expr: [ binder_tactic: [ | "fun" LIST1 fun_var "=>" ltac_expr -| "let" [ "rec" | ] LIST1 let_clause SEP "with" "in" ltac_expr +| "let" OPT "rec" let_clause LIST0 ( "with" let_clause ) "in" ltac_expr | "info" ltac_expr ] @@ -2005,16 +1947,15 @@ let_clause: [ ltac_expr4: [ | ltac_expr3 ";" binder_tactic | ltac_expr3 ";" ltac_expr3 -| ltac_expr3 ";" "[" multi_goal_tactics "]" -| ltac_expr3 ";" "[" ">" multi_goal_tactics "]" +| ltac_expr3 ";" "[" OPT multi_goal_tactics "]" | ltac_expr3 +| ltac_expr3 ";" "[" ">" OPT multi_goal_tactics "]" ] multi_goal_tactics: [ | OPT ltac_expr "|" multi_goal_tactics | ltac_expr_opt ".." OPT "|" ltac_expr_opt_list_or | ltac_expr -| ] ltac_expr_opt: [ @@ -2044,6 +1985,20 @@ ltac_expr3: [ | ltac_expr2 ] +only_selector: [ +| "only" selector ":" +] + +selector: [ +| LIST1 range_selector SEP "," +| "[" ident "]" +] + +range_selector: [ +| num "-" num +| num +] + ltac_expr2: [ | ltac_expr1 "+" binder_tactic | ltac_expr1 "+" ltac_expr2 @@ -2058,7 +2013,7 @@ ltac_expr1: [ | "first" "[" LIST0 ltac_expr SEP "|" "]" | "solve" "[" LIST0 ltac_expr SEP "|" "]" | "idtac" LIST0 message_token -| failkw [ int_or_var | ] LIST0 message_token +| failkw OPT int_or_var LIST0 message_token | ltac_match_goal | simple_tactic | tactic_arg @@ -2099,7 +2054,7 @@ tactic_arg_compat: [ ltac_expr0: [ | "(" ltac_expr ")" -| "[>" multi_goal_tactics "]" +| "[>" OPT multi_goal_tactics "]" | tactic_atom ] @@ -2115,20 +2070,6 @@ toplevel_selector: [ | "!" ":" ] -only_selector: [ -| "only" selector ":" -] - -selector: [ -| LIST1 range_selector SEP "," -| "[" ident "]" -] - -range_selector: [ -| num "-" num -| num -] - ltac_match_term: [ | match_key ltac_expr "with" OPT "|" LIST1 match_rule SEP "|" "end" ] diff --git a/doc/tools/docgram/productionlist.edit_mlg b/doc/tools/docgram/productionlist.edit_mlg index 8170b71e7a..93eb38d1a0 100644 --- a/doc/tools/docgram/productionlist.edit_mlg +++ b/doc/tools/docgram/productionlist.edit_mlg @@ -12,19 +12,3 @@ (* Contents used to generate productionlists in doc *) DOC_GRAMMAR - -(* this is here because they're inside _opt generated by EXPAND *) -SPLICE: [ -| ltac_info -| eliminator -| field_mods -| ltac_production_sep -| ltac_tactic_level -| module_binder -| printunivs_subgraph -| quoted_attributes -| ring_mods -| scope_delimiter -| univ_decl -| univ_name_list -] @@ -27,9 +27,9 @@ (source_tree user-contrib)) (action (with-stdout-to .vfiles.d - (bash "%{bin:coqdep} -dyndep both -noglob -boot -R theories Coq -R plugins Coq -Q user-contrib/Ltac2 Ltac2 -I user-contrib/Ltac2 \ + (bash "%{bin:coqdep} -dyndep both -noglob -boot -R theories Coq -Q user-contrib/Ltac2 Ltac2 -I user-contrib/Ltac2 \ `find plugins/ -maxdepth 1 -mindepth 1 -type d -printf '-I %p '` \ - `find theories plugins user-contrib -type f -name *.v`")))) + `find theories user-contrib -type f -name *.v`")))) (alias (name vodeps) diff --git a/engine/logic_monad.ml b/engine/logic_monad.ml index 3c383b2e00..1caf2c2722 100644 --- a/engine/logic_monad.ml +++ b/engine/logic_monad.ml @@ -38,9 +38,9 @@ exception Tac_Timeout exception TacticFailure of exn let _ = CErrors.register_handler begin function - | Exception e -> CErrors.print e - | TacticFailure e -> CErrors.print e - | _ -> raise CErrors.Unhandled + | Exception e -> Some (CErrors.print e) + | TacticFailure e -> Some (CErrors.print e) + | _ -> None end (** {6 Non-logical layer} *) @@ -83,7 +83,7 @@ struct (** [Pervasives.raise]. Except that exceptions are wrapped with {!Exception}. *) - let raise ?info = fun e -> (); fun () -> Exninfo.raise ?info (Exception e) + let raise (e, info) () = Exninfo.iraise (Exception e, info) (** [try ... with ...] but restricted to {!Exception}. *) let catch = fun s h -> (); @@ -93,7 +93,8 @@ struct h (e, info) () let read_line = fun () -> try read_line () with e -> - let (e, info) = CErrors.push e in raise ~info e () + let (e, info) = CErrors.push e in + raise (e, info) () let print_char = fun c -> (); fun () -> print_char c diff --git a/engine/logic_monad.mli b/engine/logic_monad.mli index 75920455ce..5002d24af0 100644 --- a/engine/logic_monad.mli +++ b/engine/logic_monad.mli @@ -70,7 +70,7 @@ module NonLogical : sig (** [Pervasives.raise]. Except that exceptions are wrapped with {!Exception}. *) - val raise : ?info:Exninfo.info -> exn -> 'a t + val raise : Exninfo.iexn -> 'a t (** [try ... with ...] but restricted to {!Exception}. *) val catch : 'a t -> (Exninfo.iexn -> 'a t) -> 'a t diff --git a/engine/proofview.ml b/engine/proofview.ml index b0ea75ac60..a26ce71141 100644 --- a/engine/proofview.ml +++ b/engine/proofview.ml @@ -303,8 +303,8 @@ let tclONCE = Proof.once exception MoreThanOneSuccess let _ = CErrors.register_handler begin function | MoreThanOneSuccess -> - Pp.str "This tactic has more than one success." - | _ -> raise CErrors.Unhandled + Some (Pp.str "This tactic has more than one success.") + | _ -> None end (** [tclEXACTLY_ONCE e t] succeeds as [t] if [t] has exactly one @@ -348,8 +348,8 @@ exception NoSuchGoals of int let _ = CErrors.register_handler begin function | NoSuchGoals n -> - str "No such " ++ str (String.plural n "goal") ++ str "." - | _ -> raise CErrors.Unhandled + Some (str "No such " ++ str (String.plural n "goal") ++ str ".") + | _ -> None end (** [tclFOCUS ?nosuchgoal i j t] applies [t] in a context where @@ -421,9 +421,10 @@ exception SizeMismatch of int*int let _ = CErrors.register_handler begin function | SizeMismatch (i,j) -> let open Pp in - str"Incorrect number of goals" ++ spc() ++ - str"(expected "++int i++str(String.plural i " tactic") ++ str", was given "++ int j++str")." - | _ -> raise CErrors.Unhandled + Some ( + str"Incorrect number of goals" ++ spc() ++ + str"(expected "++int i++str(String.plural i " tactic") ++ str", was given "++ int j++str").") + | _ -> None end (** A variant of [Monad.List.iter] where we iter over the focused list @@ -908,8 +909,8 @@ let tclPROGRESS t = let _ = CErrors.register_handler begin function | Logic_monad.Tac_Timeout -> - Pp.str "[Proofview.tclTIMEOUT] Tactic timeout!" - | _ -> raise CErrors.Unhandled + Some (Pp.str "[Proofview.tclTIMEOUT] Tactic timeout!") + | _ -> None end let tclTIMEOUT n t = @@ -937,7 +938,7 @@ let tclTIMEOUT n t = return (Util.Inr (Logic_monad.Tac_Timeout, info)) | Logic_monad.TacticFailure e -> return (Util.Inr (e, info)) - | e -> Logic_monad.NonLogical.raise ~info e + | e -> Logic_monad.NonLogical.raise (e, info) end end >>= function | Util.Inl (res,s,m,i) -> diff --git a/ide/coq.ml b/ide/coq.ml index 4d6ba55d76..5b66cb745e 100644 --- a/ide/coq.ml +++ b/ide/coq.ml @@ -133,10 +133,10 @@ and asks_for_coqtop args = ~filter:false ~filename:(coqtop_path ()) () in match file with - | Some _ -> - let () = custom_coqtop := file in + | [file] -> + let () = custom_coqtop := Some file in filter_coq_opts args - | None -> exit 0 + | _ -> exit 0 exception WrongExitStatus of string @@ -558,6 +558,7 @@ struct { opts = [raw_matching]; init = true; label = "Display raw _matching expressions" }; { opts = [notations]; init = true; label = "Display _notations" }; + { opts = [notations]; init = true; label = "Display _parentheses" }; { opts = [all_basic]; init = false; label = "Display _all basic low-level contents" }; { opts = [existential]; init = false; diff --git a/ide/coqide.ml b/ide/coqide.ml index ccf6d40b2b..61e95c21b1 100644 --- a/ide/coqide.ml +++ b/ide/coqide.ml @@ -282,9 +282,8 @@ let load ?parent _ = let filename = try notebook#current_term.fileops#filename with Invalid_argument _ -> None in - match select_file_for_open ~title:"Load file" ?parent ?filename () with - | None -> () - | Some f -> FileAux.load_file f + let filenames = select_file_for_open ~title:"Load file" ~multiple:true ?parent ?filename () in + List.iter FileAux.load_file filenames let save ?parent _ = on_current_term (FileAux.check_save ?parent ~saveas:false) @@ -461,7 +460,7 @@ let compile sn = |Some f -> let args = Coq.get_arguments sn.coqtop in let cmd = cmd_coqc#get - ^ " " ^ String.concat " " args + ^ " " ^ String.concat " " (List.map Filename.quote args) ^ " " ^ (Filename.quote f) ^ " 2>&1" in let buf = Buffer.create 1024 in @@ -475,7 +474,7 @@ let compile sn = flash_info (f ^ " successfully compiled") else begin flash_info (f ^ " failed to compile"); - sn.messages#default_route#set (Pp.str "Compilation output:\n"); + sn.messages#default_route#set (Pp.str ("Compilation output:\n" ^ cmd ^ "\n")); sn.messages#default_route#add (Pp.str (Buffer.contents buf)); end in diff --git a/ide/coqide_ui.ml b/ide/coqide_ui.ml index f22821c6ea..e9ff1bbba1 100644 --- a/ide/coqide_ui.ml +++ b/ide/coqide_ui.ml @@ -79,6 +79,7 @@ let init () = \n <menuitem action='Display coercions' />\ \n <menuitem action='Display raw matching expressions' />\ \n <menuitem action='Display notations' />\ +\n <menuitem action='Display parentheses' />\ \n <menuitem action='Display all basic low-level contents' />\ \n <menuitem action='Display existential variable instances' />\ \n <menuitem action='Display universe levels' />\ diff --git a/ide/idetop.ml b/ide/idetop.ml index ae2301a0a7..9eb0b972b6 100644 --- a/ide/idetop.ml +++ b/ide/idetop.ml @@ -49,6 +49,7 @@ let coqide_known_option table = List.mem table [ ["Printing";"Matching"]; ["Printing";"Synth"]; ["Printing";"Notations"]; + ["Printing";"Parentheses"]; ["Printing";"All"]; ["Printing";"Records"]; ["Printing";"Existential";"Instances"]; @@ -70,7 +71,7 @@ let ide_cmd_checks ~last_valid { CAst.loc; v } = with e -> let (e, info) = CErrors.push e in let info = Stateid.add info ~valid:last_valid Stateid.dummy in - Exninfo.raise ~info e + Exninfo.iraise (e, info) in if is_debug v.expr then user_error "Debug mode not available in the IDE" diff --git a/ide/ideutils.ml b/ide/ideutils.ml index 1cf065cf25..38da229d61 100644 --- a/ide/ideutils.ml +++ b/ide/ideutils.ml @@ -259,7 +259,7 @@ let current_dir () = match project_path#get with | None -> "" | Some dir -> dir -let select_file_for_open ~title ?(filter=true) ?parent ?filename () = +let select_file_for_open ~title ?(filter=true) ?(multiple=false) ?parent ?filename () = let file_chooser = GWindow.file_chooser_dialog ~action:`OPEN ~modal:true ~title ?parent () in @@ -271,6 +271,7 @@ let select_file_for_open ~title ?(filter=true) ?parent ?filename () = file_chooser#add_filter (filter_all_files ()) end; file_chooser#set_default_response `OPEN; + file_chooser#set_select_multiple multiple; let dir = match filename with | None -> current_dir () | Some f -> Filename.dirname f in @@ -279,12 +280,12 @@ let select_file_for_open ~title ?(filter=true) ?parent ?filename () = match file_chooser#run () with | `OPEN -> begin - match file_chooser#filename with - | None -> None - | Some _ as f -> - project_path#set file_chooser#current_folder; f + let filenames = file_chooser#get_filenames in + (if filenames <> [] then + project_path#set file_chooser#current_folder); + filenames end - | `DELETE_EVENT | `CANCEL -> None in + | `DELETE_EVENT | `CANCEL -> [] in file_chooser#destroy (); file diff --git a/ide/ideutils.mli b/ide/ideutils.mli index bacb273657..af30cd2b05 100644 --- a/ide/ideutils.mli +++ b/ide/ideutils.mli @@ -31,7 +31,7 @@ val find_tag_start : GText.tag -> GText.iter -> GText.iter val find_tag_stop : GText.tag -> GText.iter -> GText.iter val select_file_for_open : - title:string -> ?filter:bool -> ?parent:GWindow.window -> ?filename:string -> unit -> string option + title:string -> ?filter:bool -> ?multiple:bool -> ?parent:GWindow.window -> ?filename:string -> unit -> string list val select_file_for_save : title:string -> ?parent:GWindow.window -> ?filename:string -> unit -> string option val try_convert : string -> string diff --git a/interp/constrexpr.ml b/interp/constrexpr.ml index b96ef7c4e5..4bdacda529 100644 --- a/interp/constrexpr.ml +++ b/interp/constrexpr.ml @@ -19,11 +19,21 @@ type universe_decl_expr = (lident list, Glob_term.glob_constraint list) UState.g type ident_decl = lident * universe_decl_expr option type name_decl = lname * universe_decl_expr option +type notation_with_optional_scope = LastLonelyNotation | NotationInScope of string + +type entry_level = int +type entry_relative_level = LevelLt of entry_level | LevelLe of entry_level | LevelSome + type notation_entry = InConstrEntry | InCustomEntry of string -type notation_entry_level = InConstrEntrySomeLevel | InCustomEntryLevel of string * int +type notation_entry_level = InConstrEntrySomeLevel | InCustomEntryLevel of string * entry_level type notation_key = string + +(* A notation associated to a given parsing rule *) type notation = notation_entry_level * notation_key +(* A notation associated to a given interpretation *) +type specific_notation = notation_with_optional_scope * notation + type 'a or_by_notation_r = | AN of 'a | ByNotation of (string * string option) @@ -78,7 +88,7 @@ type cases_pattern_expr_r = (** [CPatCstr (_, c, Some l1, l2)] represents [(@ c l1) l2] *) | CPatAtom of qualid option | CPatOr of cases_pattern_expr list - | CPatNotation of notation * cases_pattern_notation_substitution + | CPatNotation of notation_with_optional_scope option * notation * cases_pattern_notation_substitution * cases_pattern_expr list (** CPatNotation (_, n, l1 ,l2) represents (notation n applied with substitution l1) applied to arguments l2 *) @@ -119,7 +129,7 @@ and constr_expr_r = | CEvar of Glob_term.existential_name * (Id.t * constr_expr) list | CSort of Glob_term.glob_sort | CCast of constr_expr * constr_expr Glob_term.cast_type - | CNotation of notation * constr_notation_substitution + | CNotation of notation_with_optional_scope option * notation * constr_notation_substitution | CGeneralization of Glob_term.binding_kind * abstraction_kind option * constr_expr | CPrim of prim_token | CDelimiters of string * constr_expr diff --git a/interp/constrexpr_ops.ml b/interp/constrexpr_ops.ml index b4798127f9..401853b625 100644 --- a/interp/constrexpr_ops.ml +++ b/interp/constrexpr_ops.ml @@ -75,7 +75,8 @@ let rec cases_pattern_expr_eq p1 p2 = Option.equal qualid_eq r1 r2 | CPatOr a1, CPatOr a2 -> List.equal cases_pattern_expr_eq a1 a2 - | CPatNotation (n1, s1, l1), CPatNotation (n2, s2, l2) -> + | CPatNotation (inscope1, n1, s1, l1), CPatNotation (inscope2, n2, s2, l2) -> + Option.equal notation_with_optional_scope_eq inscope1 inscope2 && notation_eq n1 n2 && cases_pattern_notation_substitution_eq s1 s2 && List.equal cases_pattern_expr_eq l1 l2 @@ -160,7 +161,8 @@ let rec constr_expr_eq e1 e2 = Glob_ops.glob_sort_eq s1 s2 | CCast(t1,c1), CCast(t2,c2) -> constr_expr_eq t1 t2 && cast_expr_eq c1 c2 - | CNotation(n1, s1), CNotation(n2, s2) -> + | CNotation(inscope1, n1, s1), CNotation(inscope2, n2, s2) -> + Option.equal notation_with_optional_scope_eq inscope1 inscope2 && notation_eq n1 n2 && constr_notation_substitution_eq s1 s2 | CPrim i1, CPrim i2 -> @@ -271,7 +273,7 @@ let rec cases_pattern_fold_names f a pt = match CAst.(pt.v) with | CPatCstr (_,patl1,patl2) -> List.fold_left (cases_pattern_fold_names f) (Option.fold_left (List.fold_left (cases_pattern_fold_names f)) a patl1) patl2 - | CPatNotation (_,(patl,patll),patl') -> + | CPatNotation (_,_,(patl,patll),patl') -> List.fold_left (cases_pattern_fold_names f) (List.fold_left (cases_pattern_fold_names f) a (patl@List.flatten patll)) patl' | CPatDelimiters (_,pat) -> cases_pattern_fold_names f a pat @@ -320,7 +322,7 @@ let fold_constr_expr_with_binders g f n acc = CAst.with_val (function f (Name.fold_right g (na.CAst.v) n) (Option.fold_left (f n) (f n acc a) t) b | CCast (a,(CastConv b|CastVM b|CastNative b)) -> f n (f n acc a) b | CCast (a,CastCoerce) -> f n acc a - | CNotation (_,(l,ll,bl,bll)) -> + | CNotation (_,_,(l,ll,bl,bll)) -> (* The following is an approximation: we don't know exactly if an ident is binding nor to which subterms bindings apply *) let acc = List.fold_left (f n) acc (l@List.flatten ll) in @@ -399,9 +401,9 @@ let map_constr_expr_with_binders g f e = CAst.map (function | CLetIn (na,a,t,b) -> CLetIn (na,f e a,Option.map (f e) t,f (Name.fold_right g (na.CAst.v) e) b) | CCast (a,c) -> CCast (f e a, Glob_ops.map_cast_type (f e) c) - | CNotation (n,(l,ll,bl,bll)) -> + | CNotation (inscope,n,(l,ll,bl,bll)) -> (* This is an approximation because we don't know what binds what *) - CNotation (n,(List.map (f e) l,List.map (List.map (f e)) ll, bl, + CNotation (inscope,n,(List.map (f e) l,List.map (List.map (f e)) ll, bl, List.map (fun bl -> snd (map_local_binders f g e bl)) bll)) | CGeneralization (b,a,c) -> CGeneralization (b,a,f e c) | CDelimiters (s,a) -> CDelimiters (s,f e a) @@ -574,7 +576,7 @@ let mkAppPattern ?loc p lp = CErrors.user_err ?loc:p.loc ~hdr:"compound_pattern" (Pp.str "Nested applications not supported.") | CPatCstr (r, l1, l2) -> CPatCstr (r, l1 , l2@lp) - | CPatNotation (n, s, l) -> CPatNotation (n , s, l@lp) + | CPatNotation (inscope, n, s, l) -> CPatNotation (inscope, n , s, l@lp) | _ -> CErrors.user_err ?loc:p.loc ~hdr:"compound_pattern" (Pp.str "Such pattern cannot have arguments.")) @@ -591,8 +593,8 @@ let rec coerce_to_cases_pattern_expr c = CAst.map_with_loc (fun ?loc -> function (mkAppPattern (coerce_to_cases_pattern_expr p) (List.map (fun (a,_) -> coerce_to_cases_pattern_expr a) args)).CAst.v | CAppExpl ((None,r,i),args) -> CPatCstr (r,Some (List.map coerce_to_cases_pattern_expr args),[]) - | CNotation (ntn,(c,cl,[],[])) -> - CPatNotation (ntn,(List.map coerce_to_cases_pattern_expr c, + | CNotation (inscope,ntn,(c,cl,[],[])) -> + CPatNotation (inscope,ntn,(List.map coerce_to_cases_pattern_expr c, List.map (List.map coerce_to_cases_pattern_expr) cl),[]) | CPrim p -> CPatPrim p diff --git a/interp/constrextern.ml b/interp/constrextern.ml index c198c4eb9b..44aacd62d8 100644 --- a/interp/constrextern.ml +++ b/interp/constrextern.ml @@ -57,12 +57,38 @@ let print_implicits_defensive = ref true (* This forces printing of coercions *) let print_coercions = ref false +(* This forces printing of parentheses even when + it is implied by associativity/precedence *) +let print_parentheses = Notation_ops.print_parentheses + (* This forces printing universe names of Type{.} *) let print_universes = Detyping.print_universes (* This suppresses printing of primitive tokens (e.g. numeral) and notations *) let print_no_symbol = ref false +(* This tells to skip types if a variable has this type by default *) +let print_use_implicit_types = + Goptions.declare_bool_option_and_ref + ~depr:false + ~key:["Printing";"Use";"Implicit";"Types"] + ~value:true + +(**********************************************************************) + +let hole = CAst.make @@ CHole (None, IntroAnonymous, None) + +let is_reserved_type na t = + not !Flags.raw_print && print_use_implicit_types () && + match na with + | Anonymous -> false + | Name id -> + try + let pat = Reserve.find_reserved_type id in + let _ = match_notation_constr false t ([],pat) in + true + with Not_found | No_match -> false + (**********************************************************************) (* Turning notations and scopes on and off for printing *) module IRuleSet = Set.Make(struct @@ -75,10 +101,10 @@ let inactive_notations_table = let inactive_scopes_table = Summary.ref ~name:"inactive_scopes_table" CString.Set.empty -let show_scope scopt = - match scopt with - | None -> str "" - | Some sc -> spc () ++ str "in scope" ++ spc () ++ str sc +let show_scope inscope = + match inscope with + | LastLonelyNotation -> str "" + | NotationInScope sc -> spc () ++ str "in scope" ++ spc () ++ str sc let _show_inactive_notations () = begin @@ -97,8 +123,8 @@ let _show_inactive_notations () = let _ = Feedback.msg_notice (str "Inactive notations:") in IRuleSet.iter (function - | NotationRule (scopt, ntn) -> - Feedback.msg_notice (pr_notation ntn ++ show_scope scopt) + | NotationRule (inscope, ntn) -> + Feedback.msg_notice (pr_notation ntn ++ show_scope inscope) | SynDefRule kn -> Feedback.msg_notice (str (string_of_qualid (Nametab.shortest_qualid_of_syndef Id.Set.empty kn)))) !inactive_notations_table @@ -107,18 +133,19 @@ let deactivate_notation nr = | SynDefRule kn -> (* shouldn't we check whether it is well defined? *) inactive_notations_table := IRuleSet.add nr !inactive_notations_table - | NotationRule (scopt, ntn) -> - match availability_of_notation (scopt, ntn) (scopt, []) with + | NotationRule (inscope, ntn) -> + let scopt = match inscope with NotationInScope sc -> Some sc | LastLonelyNotation -> None in + match availability_of_notation (inscope, ntn) (scopt, []) with | None -> user_err ~hdr:"Notation" (pr_notation ntn ++ spc () ++ str "does not exist" - ++ (match scopt with - | None -> spc () ++ str "in the empty scope." - | Some _ -> show_scope scopt ++ str ".")) + ++ (match inscope with + | LastLonelyNotation -> spc () ++ str "in the empty scope." + | NotationInScope _ -> show_scope inscope ++ str ".")) | Some _ -> if IRuleSet.mem nr !inactive_notations_table then Feedback.msg_warning (str "Notation" ++ spc () ++ pr_notation ntn ++ spc () - ++ str "is already inactive" ++ show_scope scopt ++ str ".") + ++ str "is already inactive" ++ show_scope inscope ++ str ".") else inactive_notations_table := IRuleSet.add nr !inactive_notations_table let reactivate_notation nr = @@ -127,9 +154,9 @@ let reactivate_notation nr = IRuleSet.remove nr !inactive_notations_table with Not_found -> match nr with - | NotationRule (scopt, ntn) -> + | NotationRule (inscope, ntn) -> Feedback.msg_warning (str "Notation" ++ spc () ++ pr_notation ntn ++ spc () - ++ str "is already active" ++ show_scope scopt ++ + ++ str "is already active" ++ show_scope inscope ++ str ".") | SynDefRule kn -> let s = string_of_qualid (Nametab.shortest_qualid_of_syndef Id.Set.empty kn) in @@ -157,8 +184,8 @@ let reactivate_scope sc = let is_inactive_rule nr = IRuleSet.mem nr !inactive_notations_table || match nr with - | NotationRule (Some sc, ntn) -> CString.Set.mem sc !inactive_scopes_table - | NotationRule (None, ntn) -> false + | NotationRule (NotationInScope sc, ntn) -> CString.Set.mem sc !inactive_scopes_table + | NotationRule (LastLonelyNotation, ntn) -> false | SynDefRule _ -> false (* args: notation, scope, activate/deactivate *) @@ -169,10 +196,13 @@ let toggle_scope_printing ~scope ~activate = deactivate_scope scope let toggle_notation_printing ?scope ~notation ~activate = + let inscope = match scope with + | None -> LastLonelyNotation + | Some sc -> NotationInScope sc in if activate then - reactivate_notation (NotationRule (scope, notation)) + reactivate_notation (NotationRule (inscope, notation)) else - deactivate_notation (NotationRule (scope, notation)) + deactivate_notation (NotationRule (inscope, notation)) (* This governs printing of projections using the dot notation symbols *) let print_projections = ref false @@ -254,11 +284,11 @@ let insert_pat_alias ?loc p = function let rec insert_coercion ?loc l c = match l with | [] -> c - | ntn::l -> CAst.make ?loc @@ CNotation (ntn,([insert_coercion ?loc l c],[],[],[])) + | (inscope,ntn)::l -> CAst.make ?loc @@ CNotation (Some inscope,ntn,([insert_coercion ?loc l c],[],[],[])) let rec insert_pat_coercion ?loc l c = match l with | [] -> c - | ntn::l -> CAst.make ?loc @@ CPatNotation (ntn,([insert_pat_coercion ?loc l c],[]),[]) + | (inscope,ntn)::l -> CAst.make ?loc @@ CPatNotation (Some inscope,ntn,([insert_pat_coercion ?loc l c],[]),[]) (**********************************************************************) (* conversion of references *) @@ -348,19 +378,19 @@ let make_notation_gen loc ntn mknot mkprim destprim l bl = | None -> mknot (loc,ntn,l,bl) end | _ -> mknot (loc,ntn,l,bl) -let make_notation loc ntn (terms,termlists,binders,binderlists as subst) = +let make_notation loc (inscope,ntn) (terms,termlists,binders,binderlists as subst) = if not (List.is_empty termlists) || not (List.is_empty binderlists) then - CAst.make ?loc @@ CNotation (ntn,subst) + CAst.make ?loc @@ CNotation (Some inscope,ntn,subst) else make_notation_gen loc ntn - (fun (loc,ntn,l,bl) -> CAst.make ?loc @@ CNotation (ntn,(l,[],bl,[]))) + (fun (loc,ntn,l,bl) -> CAst.make ?loc @@ CNotation (Some inscope,ntn,(l,[],bl,[]))) (fun (loc,p) -> CAst.make ?loc @@ CPrim p) destPrim terms binders -let make_pat_notation ?loc ntn (terms,termlists as subst) args = - if not (List.is_empty termlists) then (CAst.make ?loc @@ CPatNotation (ntn,subst,args)) else +let make_pat_notation ?loc (inscope,ntn) (terms,termlists as subst) args = + if not (List.is_empty termlists) then (CAst.make ?loc @@ CPatNotation (Some inscope,ntn,subst,args)) else make_notation_gen loc ntn - (fun (loc,ntn,l,_) -> CAst.make ?loc @@ CPatNotation (ntn,(l,[]),args)) + (fun (loc,ntn,l,_) -> CAst.make ?loc @@ CPatNotation (Some inscope,ntn,(l,[]),args)) (fun (loc,p) -> CAst.make ?loc @@ CPatPrim p) destPatPrim terms [] @@ -377,6 +407,36 @@ let pattern_printable_in_both_syntax (ind,_ as c) = (List.for_all is_status_implicit params)&&(List.for_all (fun x -> not (is_status_implicit x)) args) ) impl_st +let extern_record_pattern cstrsp args = + try + if !Flags.raw_print then raise Exit; + let projs = Recordops.lookup_projections (fst cstrsp) in + if PrintingRecord.active (fst cstrsp) then + () + else if PrintingConstructor.active (fst cstrsp) then + raise Exit + else if not (get_record_print ()) then + raise Exit; + let rec ip projs args acc = + match projs, args with + | [], [] -> acc + | proj :: q, pat :: tail -> + let acc = + match proj, pat with + | _, { CAst.v = CPatAtom None } -> + (* we don't want to have 'x := _' in our patterns *) + acc + | Some c, _ -> + let loc = pat.CAst.loc in + (extern_reference ?loc Id.Set.empty (GlobRef.ConstRef c), pat) :: acc + | _ -> raise No_match in + ip q tail acc + | _ -> assert false + in + Some (List.rev (ip projs args [])) + with + Not_found | No_match | Exit -> None + (* Better to use extern_glob_constr composed with injection/retraction ?? *) let rec extern_cases_pattern_in_scope (custom,scopes as allscopes) vars pat = try @@ -411,27 +471,9 @@ let rec extern_cases_pattern_in_scope (custom,scopes as allscopes) vars pat = | PatCstr(cstrsp,args,na) -> let args = List.map (extern_cases_pattern_in_scope allscopes vars) args in let p = - try - if !Flags.raw_print then raise Exit; - let projs = Recordops.lookup_projections (fst cstrsp) in - let rec ip projs args acc = - match projs, args with - | [], [] -> acc - | proj :: q, pat :: tail -> - let acc = - match proj, pat with - | _, { CAst.v = CPatAtom None } -> - (* we don't want to have 'x := _' in our patterns *) - acc - | Some c, _ -> - ((extern_reference ?loc Id.Set.empty (GlobRef.ConstRef c), pat) :: acc) - | _ -> raise No_match in - ip q tail acc - | _ -> assert false - in - CPatRecord(List.rev (ip projs args [])) - with - Not_found | No_match | Exit -> + match extern_record_pattern cstrsp args with + | Some l -> CPatRecord l + | None -> let c = extern_reference Id.Set.empty (GlobRef.ConstructRef cstrsp) in if Constrintern.get_asymmetric_patterns () then if pattern_printable_in_both_syntax cstrsp @@ -447,15 +489,15 @@ let rec extern_cases_pattern_in_scope (custom,scopes as allscopes) vars pat = in insert_pat_coercion coercion pat -and apply_notation_to_pattern ?loc gr ((subst,substlist),(nb_to_drop,more_args)) +and apply_notation_to_pattern ?loc gr ((subst,substlist),(no_implicit,nb_to_drop,more_args)) (custom, (tmp_scope, scopes) as allscopes) vars = function - | NotationRule (sc,ntn) -> + | NotationRule (_,ntn as specific_ntn) -> begin match availability_of_entry_coercion custom (fst ntn) with | None -> raise No_match | Some coercion -> - match availability_of_notation (sc,ntn) (tmp_scope,scopes) with + match availability_of_notation specific_ntn (tmp_scope,scopes) with (* Uninterpretation is not allowed in current context *) | None -> raise No_match (* Uninterpretation is allowed in current context *) @@ -470,16 +512,20 @@ and apply_notation_to_pattern ?loc gr ((subst,substlist),(nb_to_drop,more_args)) let subscope = (subentry,(scopt,scl@scopes')) in List.map (extern_cases_pattern_in_scope subscope vars) c) substlist in - let l2 = List.map (extern_cases_pattern_in_scope allscopes vars) more_args in + let subscopes = find_arguments_scope gr in + let more_args_scopes = try List.skipn nb_to_drop subscopes with Failure _ -> [] in + let more_args = fill_arg_scopes more_args more_args_scopes allscopes in + let l2 = List.map (fun (c,allscopes) -> extern_cases_pattern_in_scope allscopes vars c) more_args in let l2' = if Constrintern.get_asymmetric_patterns () || not (List.is_empty ll) then l2 else - match drop_implicits_in_patt gr nb_to_drop l2 with + if no_implicit then l2 else + match drop_implicits_in_patt gr nb_to_drop l2 with |Some true_args -> true_args |None -> raise No_match in insert_pat_coercion coercion (insert_pat_delimiters ?loc - (make_pat_notation ?loc ntn (l,ll) l2') key) + (make_pat_notation ?loc specific_ntn (l,ll) l2') key) end | SynDefRule kn -> match availability_of_entry_coercion custom InConstrEntrySomeLevel with @@ -490,10 +536,14 @@ and apply_notation_to_pattern ?loc gr ((subst,substlist),(nb_to_drop,more_args)) List.rev_map (fun (c,(subentry,(scopt,scl))) -> extern_cases_pattern_in_scope (subentry,(scopt,scl@scopes)) vars c) subst in - let l2 = List.map (extern_cases_pattern_in_scope allscopes vars) more_args in + let subscopes = find_arguments_scope gr in + let more_args_scopes = try List.skipn nb_to_drop subscopes with Failure _ -> [] in + let more_args = fill_arg_scopes more_args more_args_scopes allscopes in + let l2 = List.map (fun (c,allscopes) -> extern_cases_pattern_in_scope allscopes vars c) more_args in let l2' = if Constrintern.get_asymmetric_patterns () then l2 else - match drop_implicits_in_patt gr (nb_to_drop + List.length l1) l2 with + if no_implicit then l2 else + match drop_implicits_in_patt gr nb_to_drop l2 with |Some true_args -> true_args |None -> raise No_match in @@ -716,7 +766,7 @@ let extern_applied_ref inctx impl (cf,f) us args = let extern_applied_syntactic_definition n extraimpl (cf,f) syndefargs extraargs = try let syndefargs = List.map (fun a -> (a,None)) syndefargs in - let extraargs = adjust_implicit_arguments false (List.length extraargs) 1 extraargs extraimpl in + let extraargs = adjust_implicit_arguments false n (n-List.length extraargs+1) extraargs extraimpl in let args = syndefargs @ extraargs in if args = [] then cf else CApp ((None, CAst.make cf), args) with Expl -> @@ -736,8 +786,10 @@ let extern_applied_notation n impl f args = if List.is_empty args then f.CAst.v else - let args = adjust_implicit_arguments false (List.length args) 1 args impl in + try + let args = adjust_implicit_arguments false n (n-List.length args+1) args impl in mkFlattenedCApp (f,args) + with Expl -> raise No_match let extern_args extern env args = let map (arg, argscopes) = lazy (extern argscopes env arg) in @@ -786,6 +838,12 @@ let rec flatten_application c = match DAst.get c with end | a -> c +let same_binder_type ty nal c = + match nal, DAst.get c with + | _::_, GProd (_,_,ty',_) -> glob_constr_eq ty ty' + | [], _ -> true + | _ -> assert false + (**********************************************************************) (* mapping glob_constr to numerals (in presence of coercions, choose the *) (* one with no delimiter if possible) *) @@ -935,12 +993,10 @@ let rec extern inctx scopes vars r = extern inctx scopes (add_vname vars na) c) | GProd (na,bk,t,c) -> - let t = extern_typ scopes vars t in - factorize_prod scopes (add_vname vars na) na bk t c + factorize_prod scopes vars na bk t c | GLambda (na,bk,t,c) -> - let t = extern_typ scopes vars t in - factorize_lambda inctx scopes (add_vname vars na) na bk t c + factorize_lambda inctx scopes vars na bk t c | GCases (sty,rtntypopt,tml,eqns) -> let vars' = @@ -969,17 +1025,19 @@ let rec extern inctx scopes vars r = ) x)) tml in - let eqns = List.map (extern_eqn inctx scopes vars) (factorize_eqns eqns) in + let eqns = List.map (extern_eqn (inctx || rtntypopt <> None) scopes vars) (factorize_eqns eqns) in CCases (sty,rtntypopt',tml,eqns) | GLetTuple (nal,(na,typopt),tm,b) -> - CLetTuple (List.map CAst.make nal, + let inctx = inctx || typopt <> None in + CLetTuple (List.map CAst.make nal, (Option.map (fun _ -> (make na)) typopt, Option.map (extern_typ scopes (add_vname vars na)) typopt), sub_extern false scopes vars tm, extern inctx scopes (List.fold_left add_vname vars nal) b) | GIf (c,(na,typopt),b1,b2) -> + let inctx = inctx || typopt <> None in CIf (sub_extern false scopes vars c, (Option.map (fun _ -> (CAst.make na)) typopt, Option.map (extern_typ scopes (add_vname vars na)) typopt), @@ -1002,7 +1060,7 @@ let rec extern inctx scopes vars r = | Some x -> Some (CAst.make @@ CStructRec (CAst.make @@ Name.get_id (List.nth assums x))) in ((CAst.make fi), n, bl, extern_typ scopes vars0 ty, - extern false scopes vars1 def)) idv + sub_extern true scopes vars1 def)) idv in CFix (CAst.(make ?loc idv.(n)), Array.to_list listdecl) | GCoFix n -> @@ -1013,7 +1071,7 @@ let rec extern inctx scopes vars r = let vars0 = List.fold_right (Name.fold_right Id.Set.add) ids vars in let vars1 = List.fold_right (Name.fold_right Id.Set.add) ids vars' in ((CAst.make fi),bl,extern_typ scopes vars0 tyv.(i), - sub_extern false scopes vars1 bv.(i))) idv + sub_extern true scopes vars1 bv.(i))) idv in CCoFix (CAst.(make ?loc idv.(n)),Array.to_list listdecl)) @@ -1039,7 +1097,10 @@ and extern_typ (subentry,(_,scopes)) = and sub_extern inctx (subentry,(_,scopes)) = extern inctx (subentry,(None,scopes)) -and factorize_prod scopes vars na bk aty c = +and factorize_prod scopes vars na bk t c = + let implicit_type = is_reserved_type na t in + let aty = extern_typ scopes vars t in + let vars = add_vname vars na in let store, get = set_temporary_memory () in match na, DAst.get c with | Name id, GCases (Constr.LetPatternStyle, None, [(e,(Anonymous,None))],(_::_ as eqns)) @@ -1056,18 +1117,25 @@ and factorize_prod scopes vars na bk aty c = | _ -> CProdN ([binder],b)) | _ -> assert false) | _, _ -> - let c = extern_typ scopes vars c in - match na, c.v with + let c' = extern_typ scopes vars c in + match na, c'.v with | Name id, CProdN (CLocalAssum(nal,Default bk',ty)::bl,b) - when binding_kind_eq bk bk' && constr_expr_eq aty ty - && not (occur_var_constr_expr id ty) (* avoid na in ty escapes scope *) -> - CProdN (CLocalAssum(make na::nal,Default bk,aty)::bl,b) + when binding_kind_eq bk bk' + && not (occur_var_constr_expr id ty) (* avoid na in ty escapes scope *) + && (constr_expr_eq aty ty || (constr_expr_eq ty hole && same_binder_type t nal c)) -> + let ty = if implicit_type then ty else aty in + CProdN (CLocalAssum(make na::nal,Default bk,ty)::bl,b) | _, CProdN (bl,b) -> - CProdN (CLocalAssum([make na],Default bk,aty)::bl,b) + let ty = if implicit_type then hole else aty in + CProdN (CLocalAssum([make na],Default bk,ty)::bl,b) | _, _ -> - CProdN ([CLocalAssum([make na],Default bk,aty)],c) + let ty = if implicit_type then hole else aty in + CProdN ([CLocalAssum([make na],Default bk,ty)],c') -and factorize_lambda inctx scopes vars na bk aty c = +and factorize_lambda inctx scopes vars na bk t c = + let implicit_type = is_reserved_type na t in + let aty = extern_typ scopes vars t in + let vars = add_vname vars na in let store, get = set_temporary_memory () in match na, DAst.get c with | Name id, GCases (Constr.LetPatternStyle, None, [(e,(Anonymous,None))],(_::_ as eqns)) @@ -1084,16 +1152,20 @@ and factorize_lambda inctx scopes vars na bk aty c = | _ -> CLambdaN ([binder],b)) | _ -> assert false) | _, _ -> - let c = sub_extern inctx scopes vars c in - match c.v with + let c' = sub_extern inctx scopes vars c in + match c'.v with | CLambdaN (CLocalAssum(nal,Default bk',ty)::bl,b) - when binding_kind_eq bk bk' && constr_expr_eq aty ty - && not (occur_name na ty) (* avoid na in ty escapes scope *) -> + when binding_kind_eq bk bk' + && not (occur_name na ty) (* avoid na in ty escapes scope *) + && (constr_expr_eq aty ty || (constr_expr_eq ty hole && same_binder_type t nal c)) -> + let aty = if implicit_type then ty else aty in CLambdaN (CLocalAssum(make na::nal,Default bk,aty)::bl,b) | CLambdaN (bl,b) -> - CLambdaN (CLocalAssum([make na],Default bk,aty)::bl,b) + let ty = if implicit_type then hole else aty in + CLambdaN (CLocalAssum([make na],Default bk,ty)::bl,b) | _ -> - CLambdaN ([CLocalAssum([make na],Default bk,aty)],c) + let ty = if implicit_type then hole else aty in + CLambdaN ([CLocalAssum([make na],Default bk,ty)],c') and extern_local_binder scopes vars = function [] -> ([],[],[]) @@ -1107,15 +1179,17 @@ and extern_local_binder scopes vars = function Option.map (extern false scopes vars) ty) :: l) | GLocalAssum (na,bk,ty) -> + let implicit_type = is_reserved_type na ty in let ty = extern_typ scopes vars ty in (match extern_local_binder scopes (Name.fold_right Id.Set.add na vars) l with (assums,ids,CLocalAssum(nal,k,ty')::l) - when constr_expr_eq ty ty' && + when (constr_expr_eq ty ty' || implicit_type && constr_expr_eq ty' hole) && match na with Name id -> not (occur_var_constr_expr id ty') | _ -> true -> (na::assums,na::ids, CLocalAssum(CAst.make na::nal,k,ty')::l) | (assums,ids,l) -> + let ty = if implicit_type then hole else ty in (na::assums,na::ids, CLocalAssum([CAst.make na],Default bk,ty) :: l)) @@ -1159,35 +1233,32 @@ and extern_notation (custom,scopes as allscopes) vars t rules = [], [] in (* Adjust to the number of arguments expected by the notation *) let (t,args,argsscopes,argsimpls) = match n with - | Some n when nallargs >= n && nallargs > 0 -> + | Some n when nallargs >= n -> let args1, args2 = List.chop n args in let args2scopes = try List.skipn n argsscopes with Failure _ -> [] in - let args2impls = try List.skipn n argsimpls with Failure _ -> [] in - (* Note: NApp(NRef f,[]), hence n=0, encodes @f *) - (if Int.equal n 0 then f else DAst.make @@ GApp (f,args1)), - args2, args2scopes, args2impls - | None when nallargs > 0 -> + let args2impls = + if n = 0 then + (* Note: NApp(NRef f,[]), hence n=0, encodes @f and + conventionally deactivates implicit arguments *) + [] + else try List.skipn n argsimpls with Failure _ -> [] in + DAst.make @@ GApp (f,args1), args2, args2scopes, args2impls + | None -> begin match DAst.get f with | GRef (ref,us) -> f, args, argsscopes, argsimpls | _ -> t, [], [], [] end - | Some 0 when nallargs = 0 -> - begin match DAst.get f with - | GRef (ref,us) -> DAst.make @@ GApp (t,[]), [], [], [] - | _ -> t, [], [], [] - end - | None -> t, [], [], [] | _ -> raise No_match in (* Try matching ... *) let terms,termlists,binders,binderlists = match_notation_constr !print_universes t pat in (* Try availability of interpretation ... *) match keyrule with - | NotationRule (sc,ntn) -> + | NotationRule (_,ntn as specific_ntn) -> (match availability_of_entry_coercion custom (fst ntn) with | None -> raise No_match | Some coercion -> - match availability_of_notation (sc,ntn) scopes with + match availability_of_notation specific_ntn scopes with (* Uninterpretation is not allowed in current context *) | None -> raise No_match (* Uninterpretation is allowed in current context *) @@ -1210,7 +1281,7 @@ and extern_notation (custom,scopes as allscopes) vars t rules = List.map (fun (bl,(subentry,(scopt,scl))) -> pi3 (extern_local_binder (subentry,(scopt,scl@scopes')) vars bl)) binderlists in - let c = make_notation loc ntn (l,ll,bl,bll) in + let c = make_notation loc specific_ntn (l,ll,bl,bll) in let c = insert_coercion coercion (insert_delimiters c key) in let args = fill_arg_scopes args argsscopes allscopes in let args = extern_args (extern true) vars args in diff --git a/interp/constrextern.mli b/interp/constrextern.mli index fa263cbeb7..0eca287c1d 100644 --- a/interp/constrextern.mli +++ b/interp/constrextern.mli @@ -53,6 +53,7 @@ val print_implicits_defensive : bool ref val print_arguments : bool ref val print_evar_arguments : bool ref val print_coercions : bool ref +val print_parentheses : bool ref val print_universes : bool ref val print_no_symbol : bool ref val print_projections : bool ref diff --git a/interp/constrintern.ml b/interp/constrintern.ml index b2c572d290..8a820293a0 100644 --- a/interp/constrintern.ml +++ b/interp/constrintern.ml @@ -226,7 +226,7 @@ let contract_curly_brackets ntn (l,ll,bl,bll) = let ntn' = ref ntn in let rec contract_squash n = function | [] -> [] - | { CAst.v = CNotation ((InConstrEntrySomeLevel,"{ _ }"),([a],[],[],[])) } :: l -> + | { CAst.v = CNotation (None,(InConstrEntrySomeLevel,"{ _ }"),([a],[],[],[])) } :: l -> ntn' := expand_notation_string !ntn' n; contract_squash n (a::l) | a :: l -> @@ -242,7 +242,7 @@ let contract_curly_brackets_pat ntn (l,ll) = let ntn' = ref ntn in let rec contract_squash n = function | [] -> [] - | { CAst.v = CPatNotation ((InConstrEntrySomeLevel,"{ _ }"),([a],[]),[]) } :: l -> + | { CAst.v = CPatNotation (None,(InConstrEntrySomeLevel,"{ _ }"),([a],[]),[]) } :: l -> ntn' := expand_notation_string !ntn' n; contract_squash n (a::l) | a :: l -> @@ -637,6 +637,14 @@ let rec expand_binders ?loc mk bl c = (**********************************************************************) (* Syntax extensions *) +let check_not_notation_variable f ntnvars = + (* Check bug #4690 *) + match DAst.get f with + | GVar id when Id.Map.mem id ntnvars -> + user_err (str "Prefix @ is not applicable to notation variables.") + | _ -> + () + let option_mem_assoc id = function | Some (id',c) -> Id.equal id id' | None -> false @@ -1071,11 +1079,11 @@ let find_appl_head_data c = c, impls, scopes, [] | GApp (r, l) -> begin match DAst.get r with - | GRef (ref,_) when l != [] -> + | GRef (ref,_) -> let n = List.length l in let impls = implicits_of_global ref in let scopes = find_arguments_scope ref in - c, List.map (drop_first_implicits n) impls, + c, (if n = 0 then [] else List.map (drop_first_implicits n) impls), List.skipn_at_least n scopes,[] | _ -> c,[],[],[] end @@ -1659,10 +1667,11 @@ let drop_notations_pattern looked_for genv = let () = assert (List.is_empty vars) in let (_,argscs) = find_remaining_scopes [] pats g in Some (g, [], List.map2 (in_pat_sc scopes) argscs pats) - | NApp (NRef g,[]) -> (* special case: Syndef for @Cstr, this deactivates *) + | NApp (NRef g,[]) -> (* special case: Syndef for @Cstr deactivates implicit arguments *) test_kind top g; let () = assert (List.is_empty vars) in - Some (g, List.map (in_pat false scopes) pats, []) + let (_,argscs) = find_remaining_scopes [] pats g in + Some (g, List.map2 (in_pat_sc scopes) argscs pats, []) | NApp (NRef g,args) -> (* Convention: do not deactivate implicit arguments and scopes for further arguments *) test_kind top g; @@ -1680,7 +1689,7 @@ let drop_notations_pattern looked_for genv = test_kind top g; Dumpglob.add_glob ?loc:qid.loc g; let (_,argscs) = find_remaining_scopes [] pats g in - Some (g,[],List.map2 (fun x -> in_pat false (x,snd scopes)) argscs pats) + Some (g,[],List.map2 (in_pat_sc scopes) argscs pats) with Not_found -> None and in_pat top scopes pt = let open CAst in @@ -1719,13 +1728,13 @@ let drop_notations_pattern looked_for genv = (* but not scopes in expl_pl *) let (argscs1,_) = find_remaining_scopes expl_pl pl g in DAst.make ?loc @@ RCPatCstr (g, List.map2 (in_pat_sc scopes) argscs1 expl_pl @ List.map (in_pat false scopes) pl, []) - | CPatNotation ((InConstrEntrySomeLevel,"- _"),([a],[]),[]) when is_non_zero_pat a -> + | CPatNotation (_,(InConstrEntrySomeLevel,"- _"),([a],[]),[]) when is_non_zero_pat a -> let p = match a.CAst.v with CPatPrim (Numeral (_, p)) -> p | _ -> assert false in let pat, _df = Notation.interp_prim_token_cases_pattern_expr ?loc (ensure_kind false loc) (Numeral (SMinus,p)) scopes in rcp_of_glob scopes pat - | CPatNotation ((InConstrEntrySomeLevel,"( _ )"),([a],[]),[]) -> + | CPatNotation (_,(InConstrEntrySomeLevel,"( _ )"),([a],[]),[]) -> in_pat top scopes a - | CPatNotation (ntn,fullargs,extrargs) -> + | CPatNotation (_,ntn,fullargs,extrargs) -> let ntn,(terms,termlists) = contract_curly_brackets_pat ntn fullargs in let ((ids',c),df) = Notation.interp_notation ?loc ntn scopes in let (terms,termlists) = split_by_type_pat ?loc ids' (terms,termlists) in @@ -1780,7 +1789,15 @@ let drop_notations_pattern looked_for genv = let (argscs1,argscs2) = find_remaining_scopes pl args g in let pl = List.map2 (fun x -> in_not false loc (x,snd scopes) fullsubst []) argscs1 pl in let pl = add_local_defs_and_check_length loc genv g pl args in - DAst.make ?loc @@ RCPatCstr (g, pl @ List.map (in_pat false scopes) args, []) + let args = List.map2 (fun x -> in_pat false (x,snd scopes)) argscs2 args in + let pat = + if List.length pl = 0 then + (* Convention: if notation is @f, encoded as NApp(Nref g,[]), then + implicit arguments are not inherited *) + RCPatCstr (g, pl @ args, []) + else + RCPatCstr (g, pl, args) in + DAst.make ?loc @@ pat | NList (x,y,iter,terminator,revert) -> if not (List.is_empty args) then user_err ?loc (strbrk "Application of arguments to a recursive notation not supported in patterns."); @@ -2035,12 +2052,14 @@ let internalize globalenv env pattern_mode (_, ntnvars as lvar) c = DAst.make ?loc @@ GLetIn (na.CAst.v, inc1, int, intern_restart_binders (push_name_env ntnvars (impls_term_list 1 inc1) env na) c2) - | CNotation ((InConstrEntrySomeLevel,"- _"), ([a],[],[],[])) when is_non_zero a -> + | CNotation (_,(InConstrEntrySomeLevel,"- _"), ([a],[],[],[])) when is_non_zero a -> let p = match a.CAst.v with CPrim (Numeral (_, p)) -> p | _ -> assert false in intern env (CAst.make ?loc @@ CPrim (Numeral (SMinus,p))) - | CNotation ((InConstrEntrySomeLevel,"( _ )"),([a],[],[],[])) -> intern env a - | CNotation (ntn,args) -> - intern_notation intern env ntnvars loc ntn args + | CNotation (_,(InConstrEntrySomeLevel,"( _ )"),([a],[],[],[])) -> intern env a + | CNotation (_,ntn,args) -> + let c = intern_notation intern env ntnvars loc ntn args in + let x, impl, scopes, l = find_appl_head_data c in + apply_impargs x env impl scopes l loc | CGeneralization (b,a,c) -> intern_generalization intern env ntnvars loc b a c | CPrim p -> @@ -2054,6 +2073,7 @@ let internalize globalenv env pattern_mode (_, ntnvars as lvar) c = intern_applied_reference ~isproj intern env (Environ.named_context_val globalenv) lvar us args ref in + check_not_notation_variable f ntnvars; (* Rem: GApp(_,f,[]) stands for @f *) if args = [] then DAst.make ?loc @@ GApp (f,[]) else smart_gapp f loc (intern_args env args_scopes (List.map fst args)) @@ -2070,9 +2090,9 @@ let internalize globalenv env pattern_mode (_, ntnvars as lvar) c = | CRef (ref,us) -> intern_applied_reference ~isproj intern env (Environ.named_context_val globalenv) lvar us args ref - | CNotation (ntn,([],[],[],[])) -> + | CNotation (_,ntn,ntnargs) -> assert (Option.is_empty isproj); - let c = intern_notation intern env ntnvars loc ntn ([],[],[],[]) in + let c = intern_notation intern env ntnvars loc ntn ntnargs in let x, impl, scopes, l = find_appl_head_data c in (x,impl,scopes,l), args | _ -> assert (Option.is_empty isproj); (intern_no_implicit env f,[],[],[]), args in diff --git a/interp/implicit_quantifiers.ml b/interp/implicit_quantifiers.ml index ffbb982ab7..e6f12f7eb2 100644 --- a/interp/implicit_quantifiers.ml +++ b/interp/implicit_quantifiers.ml @@ -92,7 +92,7 @@ let free_vars_of_constr_expr c ?(bound=Id.Set.empty) l = let rec aux bdvars l c = match CAst.(c.v) with | CRef (qid,_) when qualid_is_ident qid -> found c.CAst.loc (qualid_basename qid) bdvars l - | CNotation ((InConstrEntrySomeLevel,"{ _ : _ | _ }"), ({ CAst.v = CRef (qid,_) } :: _, [], [], [])) when + | CNotation (_,(InConstrEntrySomeLevel,"{ _ : _ | _ }"), ({ CAst.v = CRef (qid,_) } :: _, [], [], [])) when qualid_is_ident qid && not (Id.Set.mem (qualid_basename qid) bdvars) -> Constrexpr_ops.fold_constr_expr_with_binders (fun a l -> Id.Set.add a l) aux (Id.Set.add (qualid_basename qid) bdvars) l c | _ -> Constrexpr_ops.fold_constr_expr_with_binders (fun a l -> Id.Set.add a l) aux bdvars l c diff --git a/interp/notation.ml b/interp/notation.ml index 93969f3718..2086e08f79 100644 --- a/interp/notation.ml +++ b/interp/notation.ml @@ -49,6 +49,11 @@ let notation_entry_level_eq s1 s2 = match (s1,s2) with | InCustomEntryLevel (s1,n1), InCustomEntryLevel (s2,n2) -> String.equal s1 s2 && n1 = n2 | (InConstrEntrySomeLevel | InCustomEntryLevel _), _ -> false +let notation_with_optional_scope_eq inscope1 inscope2 = match (inscope1,inscope2) with + | LastLonelyNotation, LastLonelyNotation -> true + | NotationInScope s1, NotationInScope s2 -> String.equal s1 s2 + | (LastLonelyNotation | NotationInScope _), _ -> false + let notation_eq (from1,ntn1) (from2,ntn2) = notation_entry_level_eq from1 from2 && String.equal ntn1 ntn2 @@ -63,6 +68,15 @@ module NotationOrd = module NotationSet = Set.Make(NotationOrd) module NotationMap = CMap.Make(NotationOrd) +module SpecificNotationOrd = + struct + type t = specific_notation + let compare = pervasives_compare + end + +module SpecificNotationSet = Set.Make(SpecificNotationOrd) +module SpecificNotationMap = CMap.Make(SpecificNotationOrd) + (**********************************************************************) (* Scope of symbols *) @@ -148,21 +162,21 @@ let normalize_scope sc = (**********************************************************************) (* The global stack of scopes *) -type scope_elem = Scope of scope_name | SingleNotation of notation -type scopes = scope_elem list +type scope_item = OpenScopeItem of scope_name | LonelyNotationItem of notation +type scopes = scope_item list let scope_eq s1 s2 = match s1, s2 with -| Scope s1, Scope s2 -> String.equal s1 s2 -| SingleNotation s1, SingleNotation s2 -> notation_eq s1 s2 -| Scope _, SingleNotation _ -| SingleNotation _, Scope _ -> false +| OpenScopeItem s1, OpenScopeItem s2 -> String.equal s1 s2 +| LonelyNotationItem s1, LonelyNotationItem s2 -> notation_eq s1 s2 +| OpenScopeItem _, LonelyNotationItem _ +| LonelyNotationItem _, OpenScopeItem _ -> false let scope_stack = ref [] let current_scopes () = !scope_stack let scope_is_open_in_scopes sc l = - List.exists (function Scope sc' -> String.equal sc sc' | _ -> false) l + List.exists (function OpenScopeItem sc' -> String.equal sc sc' | _ -> false) l let scope_is_open sc = scope_is_open_in_scopes sc (!scope_stack) @@ -188,7 +202,7 @@ let discharge_scope (_,(local,_,_ as o)) = let classify_scope (local,_,_ as o) = if local then Dispose else Substitute o -let inScope : bool * bool * scope_elem -> obj = +let inScope : bool * bool * scope_item -> obj = declare_object {(default_object "SCOPE") with cache_function = cache_scope; open_function = open_scope; @@ -197,11 +211,11 @@ let inScope : bool * bool * scope_elem -> obj = classify_function = classify_scope } let open_close_scope (local,opening,sc) = - Lib.add_anonymous_leaf (inScope (local,opening,Scope (normalize_scope sc))) + Lib.add_anonymous_leaf (inScope (local,opening,OpenScopeItem (normalize_scope sc))) let empty_scope_stack = [] -let push_scope sc scopes = Scope sc :: scopes +let push_scope sc scopes = OpenScopeItem sc :: scopes let push_scopes = List.fold_right push_scope @@ -254,7 +268,7 @@ let find_delimiters_scope ?loc key = (* Uninterpretation tables *) type interp_rule = - | NotationRule of scope_name option * notation + | NotationRule of specific_notation | SynDefRule of KerName.t (* We define keys for glob_constr and aconstr to split the syntax entries @@ -1064,17 +1078,17 @@ let check_required_module ?loc sc (sp,d) = the scope stack [scopes], and if yes, using delimiters or not *) let find_with_delimiters = function - | None -> None - | Some scope -> + | LastLonelyNotation -> None + | NotationInScope scope -> match (String.Map.find scope !scope_map).delimiters with | Some key -> Some (Some scope, Some key) | None -> None let rec find_without_delimiters find (ntn_scope,ntn) = function - | Scope scope :: scopes -> + | OpenScopeItem scope :: scopes -> (* Is the expected ntn/numpr attached to the most recently open scope? *) begin match ntn_scope with - | Some scope' when String.equal scope scope' -> + | NotationInScope scope' when String.equal scope scope' -> Some (None,None) | _ -> (* If the most recently open scope has a notation/numeral printer @@ -1084,9 +1098,9 @@ let rec find_without_delimiters find (ntn_scope,ntn) = function else find_without_delimiters find (ntn_scope,ntn) scopes end - | SingleNotation ntn' :: scopes -> + | LonelyNotationItem ntn' :: scopes -> begin match ntn_scope, ntn with - | None, Some ntn when notation_eq ntn ntn' -> + | LastLonelyNotation, Some ntn when notation_eq ntn ntn' -> Some (None, None) | _ -> find_without_delimiters find (ntn_scope,ntn) scopes @@ -1123,7 +1137,7 @@ let declare_notation_interpretation ntn scopt pat df ~onlyprint deprecation = scope_map := String.Map.add scope sc !scope_map end; begin match scopt with - | None -> scope_stack := SingleNotation ntn :: !scope_stack + | None -> scope_stack := LonelyNotationItem ntn :: !scope_stack | Some _ -> () end @@ -1133,15 +1147,15 @@ let declare_uninterpretation rule (metas,c as pat) = let rec find_interpretation ntn find = function | [] -> raise Not_found - | Scope scope :: scopes -> + | OpenScopeItem scope :: scopes -> (try let n = find scope in (n,Some scope) with Not_found -> find_interpretation ntn find scopes) - | SingleNotation ntn'::scopes when notation_eq ntn' ntn -> + | LonelyNotationItem ntn'::scopes when notation_eq ntn' ntn -> (try let n = find default_scope in (n,None) with Not_found -> (* e.g. because single notation only for constr, not cases_pattern *) find_interpretation ntn find scopes) - | SingleNotation _::scopes -> + | LonelyNotationItem _::scopes -> find_interpretation ntn find scopes let find_notation ntn sc = @@ -1244,7 +1258,7 @@ let availability_of_notation (ntn_scope,ntn) scopes = commonly from the lowest level of the source entry to the highest level of the target entry. *) -type entry_coercion = notation list +type entry_coercion = (notation_with_optional_scope * notation) list module EntryCoercionOrd = struct @@ -1295,7 +1309,7 @@ let rec insert_coercion_path path = function else if shorter_path path path' then path::allpaths else path'::insert_coercion_path path paths -let declare_entry_coercion (entry,_ as ntn) entry' = +let declare_entry_coercion (scope,(entry,_) as specific_ntn) entry' = let entry, lev = decompose_custom_entry entry in let entry', lev' = decompose_custom_entry entry' in (* Transitive closure *) @@ -1304,19 +1318,19 @@ let declare_entry_coercion (entry,_ as ntn) entry' = List.fold_right (fun ((lev'',lev'''),path) l -> if notation_entry_eq entry entry''' && level_ord lev lev''' && not (notation_entry_eq entry' entry'') - then ((entry'',entry'),((lev'',lev'),path@[ntn]))::l else l) paths l) + then ((entry'',entry'),((lev'',lev'),path@[specific_ntn]))::l else l) paths l) !entry_coercion_map [] in let toaddright = EntryCoercionMap.fold (fun (entry'',entry''') paths l -> List.fold_right (fun ((lev'',lev'''),path) l -> if entry' = entry'' && level_ord lev' lev'' && entry <> entry''' - then ((entry,entry'''),((lev,lev'''),path@[ntn]))::l else l) paths l) + then ((entry,entry'''),((lev,lev'''),path@[specific_ntn]))::l else l) paths l) !entry_coercion_map [] in entry_coercion_map := List.fold_right (fun (pair,path) -> let olds = try EntryCoercionMap.find pair !entry_coercion_map with Not_found -> [] in EntryCoercionMap.add pair (insert_coercion_path path olds)) - (((entry,entry'),((lev,lev'),[ntn]))::toaddright@toaddleft) + (((entry,entry'),((lev,lev'),[specific_ntn]))::toaddright@toaddleft) !entry_coercion_map let entry_has_global_map = ref String.Map.empty @@ -1389,7 +1403,7 @@ let availability_of_prim_token n printer_scope local_scopes = with Not_found -> false in let scopes = make_current_scopes local_scopes in - Option.map snd (find_without_delimiters f (Some printer_scope,None) scopes) + Option.map snd (find_without_delimiters f (NotationInScope printer_scope,None) scopes) (* Miscellaneous *) @@ -1705,11 +1719,11 @@ let pr_scopes prglob = let rec find_default ntn = function | [] -> None - | Scope scope :: scopes -> + | OpenScopeItem scope :: scopes -> if NotationMap.mem ntn (find_scope scope).notations then Some scope else find_default ntn scopes - | SingleNotation ntn' :: scopes -> + | LonelyNotationItem ntn' :: scopes -> if notation_eq ntn ntn' then Some default_scope else find_default ntn scopes @@ -1863,13 +1877,13 @@ let collect_notation_in_scope scope sc known = let collect_notations stack = fst (List.fold_left (fun (all,knownntn as acc) -> function - | Scope scope -> + | OpenScopeItem scope -> if String.List.mem_assoc scope all then acc else let (l,knownntn) = collect_notation_in_scope scope (find_scope scope) knownntn in ((scope,l)::all,knownntn) - | SingleNotation ntn -> + | LonelyNotationItem ntn -> if List.mem_f notation_eq ntn knownntn then (all,knownntn) else try diff --git a/interp/notation.mli b/interp/notation.mli index ea5125f7ec..26c45d5896 100644 --- a/interp/notation.mli +++ b/interp/notation.mli @@ -25,11 +25,15 @@ val notation_entry_eq : notation_entry -> notation_entry -> bool val notation_entry_level_eq : notation_entry_level -> notation_entry_level -> bool (** Equality on [notation_entry_level]. *) +val notation_with_optional_scope_eq : notation_with_optional_scope -> notation_with_optional_scope -> bool + val notation_eq : notation -> notation -> bool (** Equality on [notation]. *) module NotationSet : Set.S with type elt = notation module NotationMap : CMap.ExtS with type key = notation and module Set := NotationSet +module SpecificNotationSet : Set.S with type elt = specific_notation +module SpecificNotationMap : CMap.ExtS with type key = specific_notation and module Set := SpecificNotationSet (** {6 Scopes } *) (** A scope is a set of interpreters for symbols + optional @@ -213,7 +217,7 @@ val availability_of_prim_token : (** Binds a notation in a given scope to an interpretation *) type interp_rule = - | NotationRule of scope_name option * notation + | NotationRule of specific_notation | SynDefRule of KerName.t val declare_notation_interpretation : notation -> scope_name option -> @@ -236,7 +240,7 @@ val uninterp_ind_pattern_notations : inductive -> notation_rule list (** Test if a notation is available in the scopes context [scopes]; if available, the result is not None; the first argument is itself not None if a delimiters is needed *) -val availability_of_notation : scope_name option * notation -> subscopes -> +val availability_of_notation : specific_notation -> subscopes -> (scope_name option * delimiters option) option (** {6 Miscellaneous} *) @@ -299,8 +303,8 @@ val locate_notation : (glob_constr -> Pp.t) -> notation_key -> val pr_visibility: (glob_constr -> Pp.t) -> scope_name option -> Pp.t -type entry_coercion = notation list -val declare_entry_coercion : notation -> notation_entry_level -> unit +type entry_coercion = (notation_with_optional_scope * notation) list +val declare_entry_coercion : specific_notation -> notation_entry_level -> unit val availability_of_entry_coercion : notation_entry_level -> notation_entry_level -> entry_coercion option val declare_custom_entry_has_global : string -> int -> unit diff --git a/interp/notation_ops.ml b/interp/notation_ops.ml index d1eb50d370..8f47e9276b 100644 --- a/interp/notation_ops.ml +++ b/interp/notation_ops.ml @@ -258,6 +258,8 @@ let glob_constr_of_notation_constr ?loc x = (******************************************************************************) (* Translating a glob_constr into a notation, interpreting recursive patterns *) +let print_parentheses = ref false + type found_variables = { vars : Id.t list; recursive_term_vars : (Id.t * Id.t) list; @@ -1092,6 +1094,7 @@ let match_termlist match_fun alp metas sigma rest x y iter termin revert = let rest = Id.List.assoc ldots_var terms in let t = Id.List.assoc y terms in let sigma = remove_sigma y (remove_sigma ldots_var sigma) in + if !print_parentheses && not (List.is_empty acc) then raise No_match; aux sigma (t::acc) rest with No_match when not (List.is_empty acc) -> acc, match_fun metas sigma rest termin in @@ -1364,35 +1367,37 @@ let match_cases_pattern_list match_fun metas sigma rest x y iter termin revert = let rec match_cases_pattern metas (terms,termlists,(),() as sigma) a1 a2 = match DAst.get a1, a2 with - | r1, NVar id2 when Id.List.mem_assoc id2 metas -> (bind_env_cases_pattern sigma id2 a1),(0,[]) - | PatVar Anonymous, NHole _ -> sigma,(0,[]) + | r1, NVar id2 when Id.List.mem_assoc id2 metas -> (bind_env_cases_pattern sigma id2 a1),(false,0,[]) + | PatVar Anonymous, NHole _ -> sigma,(false,0,[]) | PatCstr ((ind,_ as r1),largs,Anonymous), NRef (GlobRef.ConstructRef r2) when eq_constructor r1 r2 -> let l = try add_patterns_for_params_remove_local_defs (Global.env ()) r1 largs with Not_found -> raise No_match in - sigma,(0,l) + sigma,(false,0,l) | PatCstr ((ind,_ as r1),args1,Anonymous), NApp (NRef (GlobRef.ConstructRef r2),l2) when eq_constructor r1 r2 -> let l1 = try add_patterns_for_params_remove_local_defs (Global.env()) r1 args1 with Not_found -> raise No_match in let le2 = List.length l2 in - if Int.equal le2 0 (* Special case of a notation for a @Cstr *) || le2 > List.length l1 + if le2 > List.length l1 then raise No_match else let l1',more_args = Util.List.chop le2 l1 in - (List.fold_left2 (match_cases_pattern_no_more_args metas) sigma l1' l2),(le2,more_args) + (* Convention: notations to @f don't keep implicit arguments *) + let no_implicit = le2 = 0 in + (List.fold_left2 (match_cases_pattern_no_more_args metas) sigma l1' l2),(no_implicit,le2,more_args) | r1, NList (x,y,iter,termin,revert) -> (match_cases_pattern_list (match_cases_pattern_no_more_args) - metas (terms,termlists,(),()) a1 x y iter termin revert),(0,[]) + metas (terms,termlists,(),()) a1 x y iter termin revert),(false,0,[]) | _ -> raise No_match and match_cases_pattern_no_more_args metas sigma a1 a2 = match match_cases_pattern metas sigma a1 a2 with - | out,(_,[]) -> out + | out,(_,_,[]) -> out | _ -> raise No_match let match_ind_pattern metas sigma ind pats a2 = match a2 with | NRef (GlobRef.IndRef r2) when eq_ind ind r2 -> - sigma,(0,pats) + sigma,(false,0,pats) | NApp (NRef (GlobRef.IndRef r2),l2) when eq_ind ind r2 -> let le2 = List.length l2 in @@ -1401,7 +1406,8 @@ let match_ind_pattern metas sigma ind pats a2 = raise No_match else let l1',more_args = Util.List.chop le2 pats in - (List.fold_left2 (match_cases_pattern_no_more_args metas) sigma l1' l2),(le2,more_args) + let no_implicit = le2 = 0 in + (List.fold_left2 (match_cases_pattern_no_more_args metas) sigma l1' l2),(no_implicit,le2,more_args) |_ -> raise No_match let reorder_canonically_substitution terms termlists metas = diff --git a/interp/notation_ops.mli b/interp/notation_ops.mli index c62dac013b..0ef51b65a2 100644 --- a/interp/notation_ops.mli +++ b/interp/notation_ops.mli @@ -61,6 +61,8 @@ val glob_constr_of_notation_constr : ?loc:Loc.t -> notation_constr -> glob_const exception No_match +val print_parentheses : bool ref + val match_notation_constr : bool -> 'a glob_constr_g -> interpretation -> ('a glob_constr_g * extended_subscopes) list * ('a glob_constr_g list * extended_subscopes) list * ('a cases_pattern_disjunction_g * extended_subscopes) list * @@ -69,12 +71,12 @@ val match_notation_constr : bool -> 'a glob_constr_g -> interpretation -> val match_notation_constr_cases_pattern : 'a cases_pattern_g -> interpretation -> (('a cases_pattern_g * extended_subscopes) list * ('a cases_pattern_g list * extended_subscopes) list) * - (int * 'a cases_pattern_g list) + (bool * int * 'a cases_pattern_g list) val match_notation_constr_ind_pattern : inductive -> 'a cases_pattern_g list -> interpretation -> (('a cases_pattern_g * extended_subscopes) list * ('a cases_pattern_g list * extended_subscopes) list) * - (int * 'a cases_pattern_g list) + (bool * int * 'a cases_pattern_g list) (** {5 Matching a notation pattern against a [glob_constr]} *) diff --git a/kernel/nativelib.ml b/kernel/nativelib.ml index a62b51e8aa..86eaaddc90 100644 --- a/kernel/nativelib.ml +++ b/kernel/nativelib.ml @@ -25,7 +25,7 @@ let open_header = ["Nativevalues"; let open_header = List.map mk_open open_header (* Directory where compiled files are stored *) -let output_dir = ".coq-native" +let output_dir = ref ".coq-native" (* Extension of generated ml files, stored for debugging purposes *) let source_ext = ".native" @@ -51,8 +51,13 @@ let () = at_exit (fun () -> be guessed until flags have been properly initialized. It also lets us avoid forcing [my_temp_dir] if we don't need it (eg stdlib file without native compute or native conv uses). *) -let include_dirs () = - let base = [Envars.coqlib () / "kernel"; Envars.coqlib () / "library"] in +let include_dirs = ref [] +let get_include_dirs () = + let base = match !include_dirs with + | [] -> + [Envars.coqlib () / "kernel"; Envars.coqlib () / "library"] + | _::_ as l -> l + in if Lazy.is_val my_temp_dir then (Lazy.force my_temp_dir) :: base else base @@ -88,8 +93,8 @@ let error_native_compiler_failed e = let call_compiler ?profile:(profile=false) ml_filename = let load_path = !get_load_paths () in - let load_path = List.map (fun dn -> dn / output_dir) load_path in - let include_dirs = List.flatten (List.map (fun x -> ["-I"; x]) (include_dirs () @ load_path)) in + let load_path = List.map (fun dn -> dn / !output_dir) load_path in + let include_dirs = List.flatten (List.map (fun x -> ["-I"; x]) (get_include_dirs () @ load_path)) in let f = Filename.chop_extension ml_filename in let link_filename = f ^ ".cmo" in let link_filename = Dynlink.adapt_filename link_filename in @@ -139,7 +144,7 @@ let compile_library dir code fn = let fn = fn ^ source_ext in let basename = Filename.basename fn in let dirname = Filename.dirname fn in - let dirname = dirname / output_dir in + let dirname = dirname / !output_dir in let () = try Unix.mkdir dirname 0o755 with Unix.Unix_error (Unix.EEXIST, _, _) -> () @@ -181,5 +186,5 @@ let call_linker ?(fatal=true) env ~prefix f upds = match upds with Some upds -> update_locations upds | _ -> () let link_library env ~prefix ~dirname ~basename = - let f = dirname / output_dir / basename in + let f = dirname / !output_dir / basename in call_linker env ~fatal:false ~prefix f None diff --git a/kernel/nativelib.mli b/kernel/nativelib.mli index 52d18acca6..155fde54e9 100644 --- a/kernel/nativelib.mli +++ b/kernel/nativelib.mli @@ -13,7 +13,8 @@ open Nativecode used by the native compiler. *) (* Directory where compiled files are stored *) -val output_dir : string +val output_dir : CUnix.physical_path ref +val include_dirs : CUnix.physical_path list ref val get_load_paths : (unit -> string list) ref diff --git a/kernel/term_typing.ml b/kernel/term_typing.ml index faa601e277..2ecd4880f7 100644 --- a/kernel/term_typing.ml +++ b/kernel/term_typing.ml @@ -61,7 +61,7 @@ let feedback_completion_typecheck = Feedback.feedback ~id:state_id Feedback.Complete) type typing_context = -| MonoTyCtx of Environ.env * unsafe_type_judgment * Univ.ContextSet.t * Id.Set.t * Stateid.t option +| MonoTyCtx of Environ.env * unsafe_type_judgment * Id.Set.t * Stateid.t option | PolyTyCtx of Environ.env * unsafe_type_judgment * Univ.universe_level_subst * Univ.AUContext.t * Id.Set.t * Stateid.t option let infer_declaration env (dcl : constant_entry) = @@ -155,7 +155,7 @@ let infer_opaque env = function let env = push_context_set ~strict:true univs env in let { opaque_entry_feedback = feedback_id; _ } = c in let tyj = Typeops.infer_type env typ in - let context = MonoTyCtx (env, tyj, univs, c.opaque_entry_secctx, feedback_id) in + let context = MonoTyCtx (env, tyj, c.opaque_entry_secctx, feedback_id) in let def = OpaqueDef () in { Cooking.cook_body = def; @@ -257,10 +257,8 @@ let build_constant_declaration env result = const_typing_flags = Environ.typing_flags env } let check_delayed (type a) (handle : a effect_handler) tyenv (body : a proof_output) = match tyenv with -| MonoTyCtx (env, tyj, univs, declared, feedback_id) -> +| MonoTyCtx (env, tyj, declared, feedback_id) -> let ((body, uctx), side_eff) = body in - (* don't redeclare universes which are declared for the type *) - let uctx = Univ.ContextSet.diff uctx univs in let (body, uctx', valid_signatures) = handle env body side_eff in let uctx = Univ.ContextSet.union uctx uctx' in let env = push_context_set uctx env in diff --git a/lib/cErrors.ml b/lib/cErrors.ml index 9f496f5845..323dc8c1a4 100644 --- a/lib/cErrors.ml +++ b/lib/cErrors.ml @@ -66,12 +66,10 @@ let print_anomaly askreport e = let handle_stack = ref [] -exception Unhandled - let register_handler h = handle_stack := h::!handle_stack let is_handled e = - let is_handled_by h = (try let _ = h e in true with | Unhandled -> false) in + let is_handled_by h = Option.has_some (h e) in List.exists is_handled_by !handle_stack let is_anomaly = function @@ -88,30 +86,31 @@ let register_additional_error_info (f : Exninfo.info -> (Pp.t option Loc.located all the handlers of a list, and finally a [bottom] handler if all others have failed *) -let rec print_gen ~anomaly ~extra_msg stk (e, info) = +let rec print_gen ~anomaly ~extra_msg stk e = match stk with | [] -> print_anomaly anomaly e | h::stk' -> - try - let err_msg = h e in + match h e with + | Some err_msg -> Option.cata (fun msg -> msg ++ err_msg) err_msg extra_msg - with - | Unhandled -> print_gen ~anomaly ~extra_msg stk' (e,info) - | any -> print_gen ~anomaly ~extra_msg stk' (any,info) + | None -> + print_gen ~anomaly ~extra_msg stk' e let print_gen ~anomaly (e, info) = let extra_info = try CList.find_map (fun f -> Some (f info)) !additional_error_info_handler with Not_found -> None in - let extra_msg, info = match extra_info with - | None -> None, info - | Some (loc, msg) -> - let info = Option.cata (fun l -> Loc.add_loc info l) info loc in - msg, info + let extra_msg = match extra_info with + | None -> None + | Some (loc, msg) -> msg in - print_gen ~anomaly ~extra_msg !handle_stack (e,info) + try + print_gen ~anomaly ~extra_msg !handle_stack e + with exn -> + (* exception in error printer *) + str "<in exception printer>" ++ fnl () ++ print_anomaly anomaly exn (** The standard exception printer *) let iprint (e, info) = @@ -130,8 +129,8 @@ let print_no_report e = iprint_no_report (e, Exninfo.info e) let _ = register_handler begin function | UserError(s, pps) -> - where s ++ pps - | _ -> raise Unhandled + Some (where s ++ pps) + | _ -> None end (** Critical exceptions should not be caught and ignored by mistake diff --git a/lib/cErrors.mli b/lib/cErrors.mli index 02eaf6bd0b..1660a00244 100644 --- a/lib/cErrors.mli +++ b/lib/cErrors.mli @@ -46,19 +46,14 @@ exception Timeout recent first) until a handle deals with it. Handles signal that they don't deal with some exception - by raising [Unhandled]. + by returning None. Raising any other exception is + forbidden and will result in an anomaly. - Handles can raise exceptions themselves, in which - case, the exception is passed to the handles which - were registered before. - - The exception that are considered anomalies should not be + Exceptions that are considered anomalies should not be handled by registered handlers. *) -exception Unhandled - -val register_handler : (exn -> Pp.t) -> unit +val register_handler : (exn -> Pp.t option) -> unit (** The standard exception printer *) val print : exn -> Pp.t diff --git a/lib/future.ml b/lib/future.ml index 5cccd2038d..ddf841b7fc 100644 --- a/lib/future.ml +++ b/lib/future.ml @@ -28,9 +28,9 @@ exception NotReady of string exception NotHere of string let _ = CErrors.register_handler (function - | NotReady name -> !not_ready_msg name - | NotHere name -> !not_here_msg name - | _ -> raise CErrors.Unhandled) + | NotReady name -> Some (!not_ready_msg name) + | NotHere name -> Some (!not_here_msg name) + | _ -> None) type fix_exn = Exninfo.iexn -> Exninfo.iexn let id x = x diff --git a/parsing/extend.ml b/parsing/extend.ml index dcdaa25c33..848861238a 100644 --- a/parsing/extend.ml +++ b/parsing/extend.ml @@ -21,6 +21,7 @@ type production_position = type production_level = | NextLevel | NumLevel of int + | DefaultLevel (** Interpreted differently at the border or inside a rule *) (** User-level types used to tell how to parse or interpret of the non-terminal *) @@ -40,7 +41,7 @@ type constr_entry_key = (** Entries used in productions, vernac side (e.g. "x bigint" or "x ident") *) type simple_constr_prod_entry_key = - production_level option constr_entry_key_gen + production_level constr_entry_key_gen (** Entries used in productions (in right-hand-side of grammar rules), to parse non-terminals *) diff --git a/parsing/g_constr.mlg b/parsing/g_constr.mlg index dcc3a87b11..d6c6c365cb 100644 --- a/parsing/g_constr.mlg +++ b/parsing/g_constr.mlg @@ -199,11 +199,11 @@ GRAMMAR EXTEND Gram collapse -(3) into the numeral -3. *) (match c.CAst.v with | CPrim (Numeral (SPlus,n)) -> - CAst.make ~loc @@ CNotation((InConstrEntrySomeLevel,"( _ )"),([c],[],[],[])) + CAst.make ~loc @@ CNotation(None,(InConstrEntrySomeLevel,"( _ )"),([c],[],[],[])) | _ -> c) } | "{|"; c = record_declaration; bar_cbrace -> { c } | "{"; c = binder_constr ; "}" -> - { CAst.make ~loc @@ CNotation((InConstrEntrySomeLevel,"{ _ }"),([c],[],[],[])) } + { CAst.make ~loc @@ CNotation(None,(InConstrEntrySomeLevel,"{ _ }"),([c],[],[],[])) } | "`{"; c = operconstr LEVEL "200"; "}" -> { CAst.make ~loc @@ CGeneralization (MaxImplicit, None, c) } | "`("; c = operconstr LEVEL "200"; ")" -> @@ -380,7 +380,7 @@ GRAMMAR EXTEND Gram collapse -(3) into the numeral -3. *) match p.CAst.v with | CPatPrim (Numeral (SPlus,n)) -> - CAst.make ~loc @@ CPatNotation((InConstrEntrySomeLevel,"( _ )"),([p],[]),[]) + CAst.make ~loc @@ CPatNotation(None,(InConstrEntrySomeLevel,"( _ )"),([p],[]),[]) | _ -> p } | "("; p = pattern LEVEL "200"; "|" ; pl = LIST1 pattern LEVEL "200" SEP "|"; ")" -> { CAst.make ~loc @@ CPatOr (p::pl) } diff --git a/parsing/notation_gram.ml b/parsing/notation_gram.ml index 9f133ca9d4..427505c199 100644 --- a/parsing/notation_gram.ml +++ b/parsing/notation_gram.ml @@ -10,14 +10,11 @@ open Names open Extend +open Constrexpr (** Dealing with precedences *) -type precedence = int -type parenRelation = L | E | Any | Prec of precedence -type tolerability = precedence * parenRelation - -type level = Constrexpr.notation_entry * precedence * tolerability list * constr_entry_key list +type level = notation_entry * entry_level * entry_relative_level list * constr_entry_key list (* first argument is InCustomEntry s for custom entries *) type grammar_constr_prod_item = @@ -37,7 +34,4 @@ type one_notation_grammar = { notgram_prods : grammar_constr_prod_item list list; } -type notation_grammar = { - notgram_onlyprinting : bool; - notgram_rules : one_notation_grammar list -} +type notation_grammar = one_notation_grammar list diff --git a/parsing/notgram_ops.ml b/parsing/notgram_ops.ml index 009dafdb13..b6699493bb 100644 --- a/parsing/notgram_ops.ml +++ b/parsing/notgram_ops.ml @@ -12,33 +12,36 @@ open Pp open CErrors open Util open Notation -open Notation_gram +open Constrexpr -(* Uninterpreted notation levels *) +(* Register the level of notation for parsing and printing + (also register the parsing rule if not onlyprinting) *) let notation_level_map = Summary.ref ~name:"notation_level_map" NotationMap.empty -let declare_notation_level ?(onlyprint=false) ntn level = +let declare_notation_level ntn parsing_rule level = try - let (level,onlyprint) = NotationMap.find ntn !notation_level_map in - if not onlyprint then anomaly (str "Notation " ++ pr_notation ntn ++ str " is already assigned a level.") + let _ = NotationMap.find ntn !notation_level_map in + anomaly (str "Notation " ++ pr_notation ntn ++ str " is already assigned a level.") with Not_found -> - notation_level_map := NotationMap.add ntn (level,onlyprint) !notation_level_map + notation_level_map := NotationMap.add ntn (parsing_rule,level) !notation_level_map -let level_of_notation ?(onlyprint=false) ntn = - let (level,onlyprint') = NotationMap.find ntn !notation_level_map in - if onlyprint' && not onlyprint then raise Not_found; - level +let level_of_notation ntn = + NotationMap.find ntn !notation_level_map + +let get_defined_notations () = + NotationSet.elements @@ NotationMap.domain !notation_level_map (**********************************************************************) (* Equality *) open Extend -let parenRelation_eq t1 t2 = match t1, t2 with -| L, L | E, E | Any, Any -> true -| Prec l1, Prec l2 -> Int.equal l1 l2 -| _ -> false +let entry_relative_level_eq t1 t2 = match t1, t2 with +| LevelLt n1, LevelLt n2 -> Int.equal n1 n2 +| LevelLe n1, LevelLe n2 -> Int.equal n1 n2 +| LevelSome, LevelSome -> true +| (LevelLt _ | LevelLe _ | LevelSome), _ -> false let production_position_eq pp1 pp2 = match (pp1,pp2) with | BorderProd (side1,assoc1), BorderProd (side2,assoc2) -> side1 = side2 && assoc1 = assoc2 @@ -48,7 +51,8 @@ let production_position_eq pp1 pp2 = match (pp1,pp2) with let production_level_eq l1 l2 = match (l1,l2) with | NextLevel, NextLevel -> true | NumLevel n1, NumLevel n2 -> Int.equal n1 n2 -| (NextLevel | NumLevel _), _ -> false +| DefaultLevel, DefaultLevel -> true +| (NextLevel | NumLevel _ | DefaultLevel), _ -> false let constr_entry_key_eq eq v1 v2 = match v1, v2 with | ETIdent, ETIdent -> true @@ -61,11 +65,10 @@ let constr_entry_key_eq eq v1 v2 = match v1, v2 with | (ETIdent | ETGlobal | ETBigint | ETBinder _ | ETConstr _ | ETPattern _), _ -> false let level_eq_gen strict (s1, l1, t1, u1) (s2, l2, t2, u2) = - let tolerability_eq (i1, r1) (i2, r2) = Int.equal i1 i2 && parenRelation_eq r1 r2 in let prod_eq (l1,pp1) (l2,pp2) = not strict || (production_level_eq l1 l2 && production_position_eq pp1 pp2) in - notation_entry_eq s1 s2 && Int.equal l1 l2 && List.equal tolerability_eq t1 t2 + notation_entry_eq s1 s2 && Int.equal l1 l2 && List.equal entry_relative_level_eq t1 t2 && List.equal (constr_entry_key_eq prod_eq) u1 u2 let level_eq = level_eq_gen false diff --git a/parsing/notgram_ops.mli b/parsing/notgram_ops.mli index c31f4505e7..d8b7e8e4c1 100644 --- a/parsing/notgram_ops.mli +++ b/parsing/notgram_ops.mli @@ -13,8 +13,13 @@ open Constrexpr open Notation_gram val level_eq : level -> level -> bool +val entry_relative_level_eq : entry_relative_level -> entry_relative_level -> bool (** {6 Declare and test the level of a (possibly uninterpreted) notation } *) -val declare_notation_level : ?onlyprint:bool -> notation -> level -> unit -val level_of_notation : ?onlyprint:bool -> notation -> level (** raise [Not_found] if no level or not respecting onlyprint *) +val declare_notation_level : notation -> notation_grammar option -> level -> unit +val level_of_notation : notation -> notation_grammar option * level + (** raise [Not_found] if not declared *) + +(** Returns notations with defined parsing/printing rules *) +val get_defined_notations : unit -> notation list diff --git a/parsing/parsing.mllib b/parsing/parsing.mllib index 2154f2f881..12311f9cd9 100644 --- a/parsing/parsing.mllib +++ b/parsing/parsing.mllib @@ -2,8 +2,8 @@ Tok CLexer Extend Notation_gram -Ppextend Notgram_ops +Ppextend Pcoq G_constr G_prim diff --git a/parsing/pcoq.ml b/parsing/pcoq.ml index 26afdcb9d5..92c3b7c6e8 100644 --- a/parsing/pcoq.ml +++ b/parsing/pcoq.ml @@ -212,7 +212,8 @@ type 'a extend_statement = 'a single_extend_statement list type extend_rule = -| ExtendRule : 'a G.Entry.e * gram_reinit option * 'a extend_statement -> extend_rule +| ExtendRule : 'a Entry.t * 'a extend_statement -> extend_rule +| ExtendRuleReinit : 'a Entry.t * gram_reinit * 'a extend_statement -> extend_rule module EntryCommand = Dyn.Make () module EntryData = struct type _ t = Ex : 'b G.Entry.e String.Map.t -> ('a * 'b) t end @@ -231,33 +232,39 @@ let camlp5_entries = ref EntryDataMap.empty (** Deletion *) -let grammar_delete e reinit (pos,rls) = +let grammar_delete e (pos,rls) = List.iter (fun (n,ass,lev) -> List.iter (fun (AnyProduction (pil,_)) -> G.safe_delete_rule e pil) (List.rev lev)) - (List.rev rls); - match reinit with - | Some (a,ext) -> - let lev = match pos with + (List.rev rls) + +let grammar_delete_reinit e reinit (pos, rls) = + grammar_delete e (pos, rls); + let a, ext = reinit in + let lev = match pos with | Some (Gramext.Level n) -> n | _ -> assert false - in - let warning msg = Feedback.msg_warning Pp.(str msg) in - (G.safe_extend ~warning:(Some warning) e) (Some ext) [Some lev,Some a,[]] - | None -> () + in + let warning msg = Feedback.msg_warning Pp.(str msg) in + (G.safe_extend ~warning:(Some warning) e) (Some ext) [Some lev,Some a,[]] (** Extension *) -let grammar_extend e reinit ext = +let grammar_extend e ext = let ext = of_coq_extend_statement ext in - let undo () = grammar_delete e reinit ext in + let undo () = grammar_delete e ext in let pos, ext = fix_extend_statement ext in let redo () = G.safe_extend ~warning:None e pos ext in camlp5_state := ByEXTEND (undo, redo) :: !camlp5_state; redo () -let grammar_extend_sync e reinit ext = - camlp5_state := ByGrammar (ExtendRule (e, reinit, ext)) :: !camlp5_state; +let grammar_extend_sync e ext = + camlp5_state := ByGrammar (ExtendRule (e, ext)) :: !camlp5_state; + let pos, ext = fix_extend_statement (of_coq_extend_statement ext) in + G.safe_extend ~warning:None e pos ext + +let grammar_extend_sync_reinit e reinit ext = + camlp5_state := ByGrammar (ExtendRuleReinit (e, reinit, ext)) :: !camlp5_state; let pos, ext = fix_extend_statement (of_coq_extend_statement ext) in G.safe_extend ~warning:None e pos ext @@ -278,8 +285,12 @@ let rec remove_grammars n = if n>0 then match !camlp5_state with | [] -> anomaly ~label:"Pcoq.remove_grammars" (Pp.str "too many rules to remove.") - | ByGrammar (ExtendRule (g, reinit, ext)) :: t -> - grammar_delete g reinit (of_coq_extend_statement ext); + | ByGrammar (ExtendRuleReinit (g, reinit, ext)) :: t -> + grammar_delete_reinit g reinit (of_coq_extend_statement ext); + camlp5_state := t; + remove_grammars (n-1) + | ByGrammar (ExtendRule (g, ext)) :: t -> + grammar_delete g (of_coq_extend_statement ext); camlp5_state := t; remove_grammars (n-1) | ByEXTEND (undo,redo)::t -> @@ -507,6 +518,12 @@ let create_entry_command name (interp : ('a, 'b) entry_extension) : ('a, 'b) ent let () = entry_interp := EntryInterpMap.add obj (EntryInterp.Ex interp) !entry_interp in obj +let iter_extend_sync = function + | ExtendRule (e, ext) -> + grammar_extend_sync e ext + | ExtendRuleReinit (e, reinit, ext) -> + grammar_extend_sync_reinit e reinit ext + let extend_grammar_command tag g = let modify = GrammarInterpMap.find tag !grammar_interp in let grammar_state = match !grammar_stack with @@ -514,8 +531,7 @@ let extend_grammar_command tag g = | (_, st) :: _ -> st in let (rules, st) = modify g grammar_state in - let iter (ExtendRule (e, reinit, ext)) = grammar_extend_sync e reinit ext in - let () = List.iter iter rules in + let () = List.iter iter_extend_sync rules in let nb = List.length rules in grammar_stack := (GramExt (nb, GrammarCommand.Dyn (tag, g)), st) :: !grammar_stack diff --git a/parsing/pcoq.mli b/parsing/pcoq.mli index 404fbdb4fd..f2fc007a7b 100644 --- a/parsing/pcoq.mli +++ b/parsing/pcoq.mli @@ -225,7 +225,7 @@ type 'a extend_statement = Gramlib.Gramext.position option * 'a single_extend_statement list -val grammar_extend : 'a Entry.t -> gram_reinit option -> 'a extend_statement -> unit +val grammar_extend : 'a Entry.t -> 'a extend_statement -> unit (** Extend the grammar of Coq, without synchronizing it with the backtracking mechanism. This means that grammar extensions defined this way will survive an undo. *) @@ -242,7 +242,8 @@ type 'a grammar_command marshallable. *) type extend_rule = -| ExtendRule : 'a Entry.t * gram_reinit option * 'a extend_statement -> extend_rule +| ExtendRule : 'a Entry.t * 'a extend_statement -> extend_rule +| ExtendRuleReinit : 'a Entry.t * gram_reinit * 'a extend_statement -> extend_rule type 'a grammar_extension = 'a -> GramState.t -> extend_rule list * GramState.t (** Grammar extension entry point. Given some ['a] and a current grammar state, diff --git a/parsing/ppextend.ml b/parsing/ppextend.ml index 7368f4109e..bb6693a239 100644 --- a/parsing/ppextend.ml +++ b/parsing/ppextend.ml @@ -12,7 +12,8 @@ open Util open Pp open CErrors open Notation -open Notation_gram +open Constrexpr +open Notgram_ops (*s Pretty-print. *) @@ -37,41 +38,67 @@ let ppcmd_of_cut = function | PpBrk(n1,n2) -> brk(n1,n2) type unparsing = - | UnpMetaVar of int * parenRelation - | UnpBinderMetaVar of int * parenRelation - | UnpListMetaVar of int * parenRelation * unparsing list - | UnpBinderListMetaVar of int * bool * unparsing list + | UnpMetaVar of entry_relative_level * Extend.side option + | UnpBinderMetaVar of entry_relative_level + | UnpListMetaVar of entry_relative_level * unparsing list * Extend.side option + | UnpBinderListMetaVar of bool * unparsing list | UnpTerminal of string | UnpBox of ppbox * unparsing Loc.located list | UnpCut of ppcut -type unparsing_rule = unparsing list * precedence +type unparsing_rule = unparsing list * entry_level type extra_unparsing_rules = (string * string) list -(* Concrete syntax for symbolic-extension table *) -let notation_rules = - Summary.ref ~name:"notation-rules" (NotationMap.empty : (unparsing_rule * extra_unparsing_rules * notation_grammar) NotationMap.t) - -let declare_notation_rule ntn ~extra unpl gram = - notation_rules := NotationMap.add ntn (unpl,extra,gram) !notation_rules - -let find_notation_printing_rule ntn = - try pi1 (NotationMap.find ntn !notation_rules) - with Not_found -> anomaly (str "No printing rule found for " ++ pr_notation ntn ++ str ".") -let find_notation_extra_printing_rules ntn = - try pi2 (NotationMap.find ntn !notation_rules) - with Not_found -> [] -let find_notation_parsing_rules ntn = - try pi3 (NotationMap.find ntn !notation_rules) - with Not_found -> anomaly (str "No parsing rule found for " ++ pr_notation ntn ++ str ".") - -let get_defined_notations () = - NotationSet.elements @@ NotationMap.domain !notation_rules + +let rec unparsing_eq unp1 unp2 = match (unp1,unp2) with + | UnpMetaVar (p1,s1), UnpMetaVar (p2,s2) -> entry_relative_level_eq p1 p2 && s1 = s2 + | UnpBinderMetaVar p1, UnpBinderMetaVar p2 -> entry_relative_level_eq p1 p2 + | UnpListMetaVar (p1,l1,s1), UnpListMetaVar (p2,l2,s2) -> entry_relative_level_eq p1 p2 && List.for_all2eq unparsing_eq l1 l2 && s1 = s2 + | UnpBinderListMetaVar (b1,l1), UnpBinderListMetaVar (b2,l2) -> b1 = b2 && List.for_all2eq unparsing_eq l1 l2 + | UnpTerminal s1, UnpTerminal s2 -> String.equal s1 s2 + | UnpBox (b1,l1), UnpBox (b2,l2) -> b1 = b2 && List.for_all2eq unparsing_eq (List.map snd l1) (List.map snd l2) + | UnpCut p1, UnpCut p2 -> p1 = p2 + | (UnpMetaVar _ | UnpBinderMetaVar _ | UnpListMetaVar _ | + UnpBinderListMetaVar _ | UnpTerminal _ | UnpBox _ | UnpCut _), _ -> false + +(* Register generic and specific printing rules *) + +let generic_notation_printing_rules = + Summary.ref ~name:"generic-notation-printing-rules" (NotationMap.empty : (unparsing_rule * bool * extra_unparsing_rules) NotationMap.t) + +let specific_notation_printing_rules = + Summary.ref ~name:"specific-notation-printing-rules" (SpecificNotationMap.empty : (unparsing_rule * extra_unparsing_rules) SpecificNotationMap.t) + +let declare_generic_notation_printing_rules ntn ~reserved ~extra unpl = + generic_notation_printing_rules := NotationMap.add ntn (unpl,reserved,extra) !generic_notation_printing_rules +let declare_specific_notation_printing_rules specific_ntn ~extra unpl = + specific_notation_printing_rules := SpecificNotationMap.add specific_ntn (unpl,extra) !specific_notation_printing_rules + +let has_generic_notation_printing_rule ntn = + NotationMap.mem ntn !generic_notation_printing_rules + +let find_generic_notation_printing_rule ntn = + NotationMap.find ntn !generic_notation_printing_rules + +let find_specific_notation_printing_rule specific_ntn = + SpecificNotationMap.find specific_ntn !specific_notation_printing_rules + +let find_notation_printing_rule which ntn = + try match which with + | None -> raise Not_found (* Normally not the case *) + | Some which -> fst (find_specific_notation_printing_rule (which,ntn)) + with Not_found -> pi1 (find_generic_notation_printing_rule ntn) + +let find_notation_extra_printing_rules which ntn = + try match which with + | None -> raise Not_found + | Some which -> snd (find_specific_notation_printing_rule (which,ntn)) + with Not_found -> pi3 (find_generic_notation_printing_rule ntn) let add_notation_extra_printing_rule ntn k v = try - notation_rules := - let p, pp, gr = NotationMap.find ntn !notation_rules in - NotationMap.add ntn (p, (k,v) :: pp, gr) !notation_rules + generic_notation_printing_rules := + let p, b, pp = NotationMap.find ntn !generic_notation_printing_rules in + NotationMap.add ntn (p, b, (k,v) :: pp) !generic_notation_printing_rules with Not_found -> user_err ~hdr:"add_notation_extra_printing_rule" (str "No such Notation.") diff --git a/parsing/ppextend.mli b/parsing/ppextend.mli index be5af75e72..18e48942c6 100644 --- a/parsing/ppextend.mli +++ b/parsing/ppextend.mli @@ -9,7 +9,6 @@ (************************************************************************) open Constrexpr -open Notation_gram (** {6 Pretty-print. } *) @@ -31,21 +30,24 @@ val ppcmd_of_cut : ppcut -> Pp.t (** Declare and look for the printing rule for symbolic notations *) type unparsing = - | UnpMetaVar of int * parenRelation - | UnpBinderMetaVar of int * parenRelation - | UnpListMetaVar of int * parenRelation * unparsing list - | UnpBinderListMetaVar of int * bool * unparsing list + | UnpMetaVar of entry_relative_level * Extend.side option + | UnpBinderMetaVar of entry_relative_level + | UnpListMetaVar of entry_relative_level * unparsing list * Extend.side option + | UnpBinderListMetaVar of bool * unparsing list | UnpTerminal of string | UnpBox of ppbox * unparsing Loc.located list | UnpCut of ppcut -type unparsing_rule = unparsing list * precedence +type unparsing_rule = unparsing list * entry_level type extra_unparsing_rules = (string * string) list -val declare_notation_rule : notation -> extra:extra_unparsing_rules -> unparsing_rule -> notation_grammar -> unit -val find_notation_printing_rule : notation -> unparsing_rule -val find_notation_extra_printing_rules : notation -> extra_unparsing_rules -val find_notation_parsing_rules : notation -> notation_grammar -val add_notation_extra_printing_rule : notation -> string -> string -> unit -(** Returns notations with defined parsing/printing rules *) -val get_defined_notations : unit -> notation list +val unparsing_eq : unparsing -> unparsing -> bool + +val declare_generic_notation_printing_rules : notation -> reserved:bool -> extra:extra_unparsing_rules -> unparsing_rule -> unit +val declare_specific_notation_printing_rules : specific_notation -> extra:extra_unparsing_rules -> unparsing_rule -> unit +val has_generic_notation_printing_rule : notation -> bool +val find_generic_notation_printing_rule : notation -> unparsing_rule * bool * extra_unparsing_rules +val find_specific_notation_printing_rule : specific_notation -> unparsing_rule * extra_unparsing_rules +val find_notation_printing_rule : notation_with_optional_scope option -> notation -> unparsing_rule +val find_notation_extra_printing_rules : notation_with_optional_scope option -> notation -> extra_unparsing_rules +val add_notation_extra_printing_rule : notation -> string -> string -> unit diff --git a/plugins/extraction/extraction.ml b/plugins/extraction/extraction.ml index 9b30ddd958..71a3dcfef2 100644 --- a/plugins/extraction/extraction.ml +++ b/plugins/extraction/extraction.ml @@ -507,21 +507,22 @@ and extract_really_ind env kn mib = assert (Int.equal (List.length field_names) (List.length typ)); let projs = ref Cset.empty in let mp = MutInd.modpath kn in - let rec select_fields l typs = match l,typs with + let implicits = implicits_of_global (GlobRef.ConstructRef (ip,1)) in + let rec select_fields i l typs = match l,typs with | [],[] -> [] - | _::l, typ::typs when isTdummy (expand env typ) -> - select_fields l typs + | _::l, typ::typs when isTdummy (expand env typ) || Int.Set.mem i implicits -> + select_fields (i+1) l typs | {binder_name=Anonymous}::l, typ::typs -> - None :: (select_fields l typs) + None :: (select_fields (i+1) l typs) | {binder_name=Name id}::l, typ::typs -> let knp = Constant.make2 mp (Label.of_id id) in (* Is it safe to use [id] for projections [foo.id] ? *) if List.for_all ((==) Keep) (type2signature env typ) then projs := Cset.add knp !projs; - Some (GlobRef.ConstRef knp) :: (select_fields l typs) + Some (GlobRef.ConstRef knp) :: (select_fields (i+1) l typs) | _ -> assert false in - let field_glob = select_fields field_names typ + let field_glob = select_fields (1+npar) field_names typ in (* Is this record officially declared with its projections ? *) (* If so, we use this information. *) diff --git a/plugins/fourier/plugin_base.dune b/plugins/fourier/plugin_base.dune deleted file mode 100644 index 8cc76f6f9e..0000000000 --- a/plugins/fourier/plugin_base.dune +++ /dev/null @@ -1,5 +0,0 @@ -(library - (name fourier_plugin) - (public_name coq.plugins.fourier) - (synopsis "Coq's fourier plugin") - (libraries coq.plugins.ltac)) diff --git a/plugins/funind/glob_term_to_relation.ml b/plugins/funind/glob_term_to_relation.ml index fdbad2ab9e..c7dfe69fb1 100644 --- a/plugins/funind/glob_term_to_relation.ml +++ b/plugins/funind/glob_term_to_relation.ml @@ -1476,7 +1476,7 @@ let do_build_inductive in let rel_ind i ext_rel_constructors = ((CAst.make @@ relnames.(i)), - rel_params, + (rel_params,None), Some rel_arities.(i), ext_rel_constructors),[] in diff --git a/plugins/ltac/extraargs.mlg b/plugins/ltac/extraargs.mlg index bab6bfd78e..5835d75c79 100644 --- a/plugins/ltac/extraargs.mlg +++ b/plugins/ltac/extraargs.mlg @@ -298,7 +298,7 @@ END let pr_by_arg_tac env sigma _prc _prlc prtac opt_c = match opt_c with | None -> mt () - | Some t -> hov 2 (str "by" ++ spc () ++ prtac env sigma (3,Notation_gram.E) t) + | Some t -> hov 2 (str "by" ++ spc () ++ prtac env sigma (Constrexpr.LevelLe 3) t) } diff --git a/plugins/ltac/extraargs.mli b/plugins/ltac/extraargs.mli index 6dd51e4e01..dd4195f2ef 100644 --- a/plugins/ltac/extraargs.mli +++ b/plugins/ltac/extraargs.mli @@ -67,7 +67,7 @@ val wit_by_arg_tac : val pr_by_arg_tac : Environ.env -> Evd.evar_map -> - (Environ.env -> Evd.evar_map -> int * Notation_gram.parenRelation -> raw_tactic_expr -> Pp.t) -> + (Environ.env -> Evd.evar_map -> Constrexpr.entry_relative_level -> raw_tactic_expr -> Pp.t) -> raw_tactic_expr option -> Pp.t val test_lpar_id_colon : unit Pcoq.Entry.t diff --git a/plugins/ltac/pptactic.ml b/plugins/ltac/pptactic.ml index 7843faaef3..e2b8bcb5a9 100644 --- a/plugins/ltac/pptactic.ml +++ b/plugins/ltac/pptactic.ml @@ -17,7 +17,6 @@ open Constrexpr open Genarg open Geninterp open Stdarg -open Notation_gram open Tactypes open Locus open Genredexpr @@ -73,43 +72,43 @@ type 'a raw_extra_genarg_printer = Environ.env -> Evd.evar_map -> (Environ.env -> Evd.evar_map -> constr_expr -> Pp.t) -> (Environ.env -> Evd.evar_map -> constr_expr -> Pp.t) -> - (Environ.env -> Evd.evar_map -> tolerability -> raw_tactic_expr -> Pp.t) -> + (Environ.env -> Evd.evar_map -> entry_relative_level -> raw_tactic_expr -> Pp.t) -> 'a -> Pp.t type 'a glob_extra_genarg_printer = Environ.env -> Evd.evar_map -> (Environ.env -> Evd.evar_map -> glob_constr_and_expr -> Pp.t) -> (Environ.env -> Evd.evar_map -> glob_constr_and_expr -> Pp.t) -> - (Environ.env -> Evd.evar_map -> tolerability -> glob_tactic_expr -> Pp.t) -> + (Environ.env -> Evd.evar_map -> entry_relative_level -> glob_tactic_expr -> Pp.t) -> 'a -> Pp.t type 'a extra_genarg_printer = Environ.env -> Evd.evar_map -> (Environ.env -> Evd.evar_map -> EConstr.constr -> Pp.t) -> (Environ.env -> Evd.evar_map -> EConstr.constr -> Pp.t) -> - (Environ.env -> Evd.evar_map -> tolerability -> Val.t -> Pp.t) -> + (Environ.env -> Evd.evar_map -> entry_relative_level -> Val.t -> Pp.t) -> 'a -> Pp.t type 'a raw_extra_genarg_printer_with_level = Environ.env -> Evd.evar_map -> (Environ.env -> Evd.evar_map -> constr_expr -> Pp.t) -> (Environ.env -> Evd.evar_map -> constr_expr -> Pp.t) -> - (Environ.env -> Evd.evar_map -> tolerability -> raw_tactic_expr -> Pp.t) -> - tolerability -> 'a -> Pp.t + (Environ.env -> Evd.evar_map -> entry_relative_level -> raw_tactic_expr -> Pp.t) -> + entry_relative_level -> 'a -> Pp.t type 'a glob_extra_genarg_printer_with_level = Environ.env -> Evd.evar_map -> (Environ.env -> Evd.evar_map -> glob_constr_and_expr -> Pp.t) -> (Environ.env -> Evd.evar_map -> glob_constr_and_expr -> Pp.t) -> - (Environ.env -> Evd.evar_map -> tolerability -> glob_tactic_expr -> Pp.t) -> - tolerability -> 'a -> Pp.t + (Environ.env -> Evd.evar_map -> entry_relative_level -> glob_tactic_expr -> Pp.t) -> + entry_relative_level -> 'a -> Pp.t type 'a extra_genarg_printer_with_level = Environ.env -> Evd.evar_map -> (Environ.env -> Evd.evar_map -> EConstr.constr -> Pp.t) -> (Environ.env -> Evd.evar_map -> EConstr.constr -> Pp.t) -> - (Environ.env -> Evd.evar_map -> tolerability -> Val.t -> Pp.t) -> - tolerability -> 'a -> Pp.t + (Environ.env -> Evd.evar_map -> entry_relative_level -> Val.t -> Pp.t) -> + entry_relative_level -> 'a -> Pp.t let string_of_genarg_arg (ArgumentType arg) = let rec aux : type a b c. (a, b, c) genarg_type -> string = function @@ -294,7 +293,7 @@ let string_of_genarg_arg (ArgumentType arg) = let pr _ = str "_" in KerName.print key ++ spc() ++ pr_sequence pr l ++ str" (* Generic printer *)" - let pr_farg prtac arg = prtac (1, Any) (TacArg (CAst.make arg)) + let pr_farg prtac arg = prtac LevelSome (TacArg (CAst.make arg)) let is_genarg tag wit = let ArgT.Any tag = tag in @@ -314,35 +313,35 @@ let string_of_genarg_arg (ArgumentType arg) = let rec pr_any_arg : type l. (_ -> l generic_argument -> Pp.t) -> _ -> l generic_argument -> Pp.t = fun prtac symb arg -> match symb with - | Extend.Uentry tag when is_genarg tag (genarg_tag arg) -> prtac (1, Any) arg + | Extend.Uentry tag when is_genarg tag (genarg_tag arg) -> prtac LevelSome arg | Extend.Ulist1 s | Extend.Ulist0 s -> begin match get_list arg with - | None -> str "ltac:(" ++ prtac (1, Any) arg ++ str ")" + | None -> str "ltac:(" ++ prtac LevelSome arg ++ str ")" | Some l -> pr_sequence (pr_any_arg prtac s) l end | Extend.Ulist1sep (s, sep) | Extend.Ulist0sep (s, sep) -> begin match get_list arg with - | None -> str "ltac:(" ++ prtac (1, Any) arg ++ str ")" + | None -> str "ltac:(" ++ prtac LevelSome arg ++ str ")" | Some l -> prlist_with_sep (fun () -> str sep) (pr_any_arg prtac s) l end | Extend.Uopt s -> begin match get_opt arg with - | None -> str "ltac:(" ++ prtac (1, Any) arg ++ str ")" + | None -> str "ltac:(" ++ prtac LevelSome arg ++ str ")" | Some l -> pr_opt (pr_any_arg prtac s) l end | Extend.Uentry _ | Extend.Uentryl _ -> - str "ltac:(" ++ prtac (1, Any) arg ++ str ")" + str "ltac:(" ++ prtac LevelSome arg ++ str ")" let pr_targ prtac symb arg = match symb with | Extend.Uentry tag when is_genarg tag (ArgumentType wit_tactic) -> - prtac (1, Any) arg - | Extend.Uentryl (_, l) -> prtac (l, Any) arg + prtac LevelSome arg + | Extend.Uentryl (_, l) -> prtac LevelSome arg | _ -> match arg with | TacGeneric arg -> let pr l arg = prtac l (TacGeneric arg) in pr_any_arg pr symb arg - | _ -> str "ltac:(" ++ prtac (1, Any) arg ++ str ")" + | _ -> str "ltac:(" ++ prtac LevelSome arg ++ str ")" let pr_raw_extend_rec prtac = pr_extend_gen (pr_farg prtac) @@ -630,7 +629,7 @@ let pr_goal_selector ~toplevel s = let pr_then () = str ";" - let ltop = (5,E) + let ltop = LevelLe 5 let lseq = 4 let ltactical = 3 let lorelse = 2 @@ -645,13 +644,13 @@ let pr_goal_selector ~toplevel s = let ltatom = 1 let linfo = 5 - let level_of (n,p) = match p with E -> n | L -> n-1 | Prec n -> n | Any -> lseq + let level_of p = match p with LevelLe n -> n | LevelLt n -> n-1 | LevelSome -> lseq (** A printer for tactics that polymorphically works on the three "raw", "glob" and "typed" levels *) type 'a printer = { - pr_tactic : tolerability -> 'tacexpr -> Pp.t; + pr_tactic : entry_relative_level -> 'tacexpr -> Pp.t; pr_constr : Environ.env -> Evd.evar_map -> 'trm -> Pp.t; pr_lconstr : Environ.env -> Evd.evar_map -> 'trm -> Pp.t; pr_dconstr : Environ.env -> Evd.evar_map -> 'dtrm -> Pp.t; @@ -780,7 +779,7 @@ let pr_goal_selector ~toplevel s = hov 1 ( primitive (if b then if ev then "eassert" else "assert" else if ev then "eenough" else "enough") ++ pr_assumption (pr.pr_constr env sigma) (pr.pr_dconstr env sigma) (pr.pr_lconstr env sigma) ipat c ++ - pr_non_empty_arg (pr_by_tactic (pr.pr_tactic (ltactical,E))) tac + pr_non_empty_arg (pr_by_tactic (pr.pr_tactic (LevelLe ltactical))) tac ) | TacAssert (ev,_,None,ipat,c) -> hov 1 ( @@ -857,7 +856,7 @@ let pr_goal_selector ~toplevel s = pr_with_bindings_arg_full (pr.pr_dconstr env sigma) (pr.pr_dconstr env sigma) c) l ++ pr_non_empty_arg (pr_clauses (Some true) pr.pr_name) cl - ++ pr_non_empty_arg (pr_by_tactic (pr.pr_tactic (ltactical,E))) tac + ++ pr_non_empty_arg (pr_by_tactic (pr.pr_tactic (LevelLe ltactical))) tac ) | TacInversion (DepInversion (k,c,ids),hyp) -> hov 1 ( @@ -893,11 +892,11 @@ let pr_goal_selector ~toplevel s = let return (doc, l) = (tag tac doc, l) in let (strm, prec) = return (match tac with | TacAbstract (t,None) -> - keyword "abstract " ++ pr_tac (labstract,L) t, labstract + keyword "abstract " ++ pr_tac (LevelLt labstract) t, labstract | TacAbstract (t,Some s) -> hov 0 ( keyword "abstract" - ++ str" (" ++ pr_tac (labstract,L) t ++ str")" ++ spc () + ++ str" (" ++ pr_tac (LevelLt labstract) t ++ str")" ++ spc () ++ keyword "using" ++ spc () ++ pr_id s), labstract | TacLetIn (recflag,llc,u) -> @@ -906,7 +905,7 @@ let pr_goal_selector ~toplevel s = (hv 0 ( pr_let_clauses recflag (pr.pr_generic env sigma) (pr_tac ltop) llc ++ spc () ++ keyword "in" - ) ++ fnl () ++ pr_tac (llet,E) u), + ) ++ fnl () ++ pr_tac (LevelLe llet) u), llet | TacMatch (lz,t,lrul) -> hov 0 ( @@ -931,17 +930,17 @@ let pr_goal_selector ~toplevel s = hov 2 ( keyword "fun" ++ prlist pr_funvar lvar ++ str " =>" ++ spc () - ++ pr_tac (lfun,E) body), + ++ pr_tac (LevelLe lfun) body), lfun | TacThens (t,tl) -> hov 1 ( - pr_tac (lseq,E) t ++ pr_then () ++ spc () + pr_tac (LevelLe lseq) t ++ pr_then () ++ spc () ++ pr_seq_body (pr_opt_tactic (pr_tac ltop)) tl), lseq | TacThen (t1,t2) -> hov 1 ( - pr_tac (lseq,E) t1 ++ pr_then () ++ spc () - ++ pr_tac (lseq,L) t2), + pr_tac (LevelLe lseq) t1 ++ pr_then () ++ spc () + ++ pr_tac (LevelLt lseq) t2), lseq | TacDispatch tl -> pr_dispatch (pr_tac ltop) tl, lseq @@ -949,78 +948,78 @@ let pr_goal_selector ~toplevel s = pr_tac_extend (pr_tac ltop) tf t tr , lseq | TacThens3parts (t1,tf,t2,tl) -> hov 1 ( - pr_tac (lseq,E) t1 ++ pr_then () ++ spc () + pr_tac (LevelLe lseq) t1 ++ pr_then () ++ spc () ++ pr_then_gen (pr_tac ltop) tf t2 tl), lseq | TacTry t -> hov 1 ( - keyword "try" ++ spc () ++ pr_tac (ltactical,E) t), + keyword "try" ++ spc () ++ pr_tac (LevelLe ltactical) t), ltactical | TacDo (n,t) -> hov 1 ( str "do" ++ spc () ++ pr_or_var int n ++ spc () - ++ pr_tac (ltactical,E) t), + ++ pr_tac (LevelLe ltactical) t), ltactical | TacTimeout (n,t) -> hov 1 ( keyword "timeout " ++ pr_or_var int n ++ spc () - ++ pr_tac (ltactical,E) t), + ++ pr_tac (LevelLe ltactical) t), ltactical | TacTime (s,t) -> hov 1 ( keyword "time" ++ pr_opt qstring s ++ spc () - ++ pr_tac (ltactical,E) t), + ++ pr_tac (LevelLe ltactical) t), ltactical | TacRepeat t -> hov 1 ( keyword "repeat" ++ spc () - ++ pr_tac (ltactical,E) t), + ++ pr_tac (LevelLe ltactical) t), ltactical | TacProgress t -> hov 1 ( keyword "progress" ++ spc () - ++ pr_tac (ltactical,E) t), + ++ pr_tac (LevelLe ltactical) t), ltactical | TacShowHyps t -> hov 1 ( keyword "infoH" ++ spc () - ++ pr_tac (ltactical,E) t), + ++ pr_tac (LevelLe ltactical) t), ltactical | TacInfo t -> hov 1 ( keyword "info" ++ spc () - ++ pr_tac (ltactical,E) t), + ++ pr_tac (LevelLe ltactical) t), linfo | TacOr (t1,t2) -> hov 1 ( - pr_tac (lorelse,L) t1 ++ spc () + pr_tac (LevelLt lorelse) t1 ++ spc () ++ str "+" ++ brk (1,1) - ++ pr_tac (lorelse,E) t2), + ++ pr_tac (LevelLe lorelse) t2), lorelse | TacOnce t -> hov 1 ( keyword "once" ++ spc () - ++ pr_tac (ltactical,E) t), + ++ pr_tac (LevelLe ltactical) t), ltactical | TacExactlyOnce t -> hov 1 ( keyword "exactly_once" ++ spc () - ++ pr_tac (ltactical,E) t), + ++ pr_tac (LevelLe ltactical) t), ltactical | TacIfThenCatch (t,tt,te) -> hov 1 ( - str"tryif" ++ spc() ++ pr_tac (ltactical,E) t ++ brk(1,1) ++ - str"then" ++ spc() ++ pr_tac (ltactical,E) tt ++ brk(1,1) ++ - str"else" ++ spc() ++ pr_tac (ltactical,E) te ++ brk(1,1)), + str"tryif" ++ spc() ++ pr_tac (LevelLe ltactical) t ++ brk(1,1) ++ + str"then" ++ spc() ++ pr_tac (LevelLe ltactical) tt ++ brk(1,1) ++ + str"else" ++ spc() ++ pr_tac (LevelLe ltactical) te ++ brk(1,1)), ltactical | TacOrelse (t1,t2) -> hov 1 ( - pr_tac (lorelse,L) t1 ++ spc () + pr_tac (LevelLt lorelse) t1 ++ spc () ++ str "||" ++ brk (1,1) - ++ pr_tac (lorelse,E) t2), + ++ pr_tac (LevelLe lorelse) t2), lorelse | TacFail (g,n,l) -> let arg = @@ -1042,7 +1041,7 @@ let pr_goal_selector ~toplevel s = | TacSolve tl -> keyword "solve" ++ spc () ++ pr_seq_body (pr_tac ltop) tl, llet | TacComplete t -> - pr_tac (lcomplete,E) t, lcomplete + pr_tac (LevelLe lcomplete) t, lcomplete | TacSelect (s, tac) -> pr_goal_selector ~toplevel:false s ++ spc () ++ pr_tac ltop tac, latom | TacId l -> keyword "idtac" ++ prlist (pr_arg (pr_message_token pr.pr_name)) l, latom @@ -1398,10 +1397,10 @@ let () = let () = let printer env sigma _ _ prtac = prtac env sigma in declare_extra_genarg_pprule_with_level wit_tactic printer printer printer - ltop (0,E) + ltop (LevelLe 0) let () = let pr_unit _env _sigma _ _ _ _ () = str "()" in let printer env sigma _ _ prtac = prtac env sigma in declare_extra_genarg_pprule_with_level wit_ltac printer printer pr_unit - ltop (0,E) + ltop (LevelLe 0) diff --git a/plugins/ltac/pptactic.mli b/plugins/ltac/pptactic.mli index 9cff3ea1eb..33db933168 100644 --- a/plugins/ltac/pptactic.mli +++ b/plugins/ltac/pptactic.mli @@ -16,7 +16,6 @@ open Geninterp open Names open Environ open Constrexpr -open Notation_gram open Genintern open Tacexpr open Tactypes @@ -29,43 +28,43 @@ type 'a raw_extra_genarg_printer = Environ.env -> Evd.evar_map -> (Environ.env -> Evd.evar_map -> constr_expr -> Pp.t) -> (Environ.env -> Evd.evar_map -> constr_expr -> Pp.t) -> - (Environ.env -> Evd.evar_map -> tolerability -> raw_tactic_expr -> Pp.t) -> + (Environ.env -> Evd.evar_map -> entry_relative_level -> raw_tactic_expr -> Pp.t) -> 'a -> Pp.t type 'a glob_extra_genarg_printer = Environ.env -> Evd.evar_map -> (Environ.env -> Evd.evar_map -> glob_constr_and_expr -> Pp.t) -> (Environ.env -> Evd.evar_map -> glob_constr_and_expr -> Pp.t) -> - (Environ.env -> Evd.evar_map -> tolerability -> glob_tactic_expr -> Pp.t) -> + (Environ.env -> Evd.evar_map -> entry_relative_level -> glob_tactic_expr -> Pp.t) -> 'a -> Pp.t type 'a extra_genarg_printer = Environ.env -> Evd.evar_map -> (Environ.env -> Evd.evar_map -> EConstr.constr -> Pp.t) -> (Environ.env -> Evd.evar_map -> EConstr.constr -> Pp.t) -> - (Environ.env -> Evd.evar_map -> tolerability -> Val.t -> Pp.t) -> + (Environ.env -> Evd.evar_map -> entry_relative_level -> Val.t -> Pp.t) -> 'a -> Pp.t type 'a raw_extra_genarg_printer_with_level = Environ.env -> Evd.evar_map -> (Environ.env -> Evd.evar_map -> constr_expr -> Pp.t) -> (Environ.env -> Evd.evar_map -> constr_expr -> Pp.t) -> - (Environ.env -> Evd.evar_map -> tolerability -> raw_tactic_expr -> Pp.t) -> - tolerability -> 'a -> Pp.t + (Environ.env -> Evd.evar_map -> entry_relative_level -> raw_tactic_expr -> Pp.t) -> + entry_relative_level -> 'a -> Pp.t type 'a glob_extra_genarg_printer_with_level = Environ.env -> Evd.evar_map -> (Environ.env -> Evd.evar_map -> glob_constr_and_expr -> Pp.t) -> (Environ.env -> Evd.evar_map -> glob_constr_and_expr -> Pp.t) -> - (Environ.env -> Evd.evar_map -> tolerability -> glob_tactic_expr -> Pp.t) -> - tolerability -> 'a -> Pp.t + (Environ.env -> Evd.evar_map -> entry_relative_level -> glob_tactic_expr -> Pp.t) -> + entry_relative_level -> 'a -> Pp.t type 'a extra_genarg_printer_with_level = Environ.env -> Evd.evar_map -> (Environ.env -> Evd.evar_map -> EConstr.constr -> Pp.t) -> (Environ.env -> Evd.evar_map -> EConstr.constr -> Pp.t) -> - (Environ.env -> Evd.evar_map -> tolerability -> Val.t -> Pp.t) -> - tolerability -> 'a -> Pp.t + (Environ.env -> Evd.evar_map -> entry_relative_level -> Val.t -> Pp.t) -> + entry_relative_level -> 'a -> Pp.t val declare_extra_genarg_pprule : ('a, 'b, 'c) genarg_type -> @@ -78,7 +77,7 @@ val declare_extra_genarg_pprule_with_level : 'a raw_extra_genarg_printer_with_level -> 'b glob_extra_genarg_printer_with_level -> 'c extra_genarg_printer_with_level -> - (* surroounded *) tolerability -> (* non-surroounded *) tolerability -> unit + (* surroounded *) entry_relative_level -> (* non-surroounded *) entry_relative_level -> unit val declare_extra_vernac_genarg_pprule : ('a, 'b, 'c) genarg_type -> @@ -140,7 +139,7 @@ val pr_ltac_constant : ltac_constant -> Pp.t val pr_raw_tactic : env -> Evd.evar_map -> raw_tactic_expr -> Pp.t -val pr_raw_tactic_level : env -> Evd.evar_map -> tolerability -> raw_tactic_expr -> Pp.t +val pr_raw_tactic_level : env -> Evd.evar_map -> entry_relative_level -> raw_tactic_expr -> Pp.t val pr_glob_tactic : env -> glob_tactic_expr -> Pp.t @@ -155,10 +154,10 @@ val pr_match_pattern : ('a -> Pp.t) -> 'a match_pattern -> Pp.t val pr_match_rule : bool -> ('a -> Pp.t) -> ('b -> Pp.t) -> ('b, 'a) match_rule -> Pp.t -val pr_value : tolerability -> Val.t -> Pp.t +val pr_value : entry_relative_level -> Val.t -> Pp.t -val ltop : tolerability +val ltop : entry_relative_level -val make_constr_printer : (env -> Evd.evar_map -> tolerability -> 'a -> Pp.t) -> +val make_constr_printer : (env -> Evd.evar_map -> entry_relative_level -> 'a -> Pp.t) -> 'a Genprint.top_printer diff --git a/plugins/ltac/tacentries.ml b/plugins/ltac/tacentries.ml index 13a2f3b8c0..8b4520947b 100644 --- a/plugins/ltac/tacentries.ml +++ b/plugins/ltac/tacentries.ml @@ -191,7 +191,7 @@ let add_tactic_entry (kn, ml, tg) state = in let prods = List.map map tg.tacgram_prods in let rules = make_rule mkact prods in - let r = ExtendRule (entry, None, (pos, [(None, None, [rules])])) in + let r = ExtendRule (entry, (pos, [(None, None, [rules])])) in ([r], state) let tactic_grammar = @@ -415,7 +415,7 @@ let create_ltac_quotation name cast (e, l) = in let action _ v _ _ _ loc = cast (Some loc, v) in let gram = (level, assoc, [Rule (rule, action)]) in - Pcoq.grammar_extend Pltac.tactic_arg None (None, [gram]) + Pcoq.grammar_extend Pltac.tactic_arg (None, [gram]) (** Command *) @@ -759,7 +759,7 @@ let argument_extend (type a b c) ~name (arg : (a, b, c) tactic_argument) = e | Vernacextend.Arg_rules rules -> let e = Pcoq.create_generic_entry Pcoq.utactic name (Genarg.rawwit wit) in - let () = Pcoq.grammar_extend e None (None, [(None, None, rules)]) in + let () = Pcoq.grammar_extend e (None, [(None, None, rules)]) in e in let (rpr, gpr, tpr) = arg.arg_printer in diff --git a/plugins/ltac/tactic_debug.ml b/plugins/ltac/tactic_debug.ml index 0e9465839a..392f9b2ffd 100644 --- a/plugins/ltac/tactic_debug.ml +++ b/plugins/ltac/tactic_debug.ml @@ -107,13 +107,29 @@ let db_initialize = let int_of_string s = try Proofview.NonLogical.return (int_of_string s) - with e -> Proofview.NonLogical.raise e + with e -> + let e = Exninfo.capture e in + Proofview.NonLogical.raise e let string_get s i = try Proofview.NonLogical.return (String.get s i) - with e -> Proofview.NonLogical.raise e + with e -> + let e = Exninfo.capture e in + Proofview.NonLogical.raise e + +let check_positive n = + try + if n < 0 then + raise (Invalid_argument "number must be positive") + else + Proofview.NonLogical.return () + with e -> + let e = Exninfo.capture e in + Proofview.NonLogical.raise e -let run_invalid_arg () = Proofview.NonLogical.raise (Invalid_argument "run_com") +let run_invalid_arg () = + let info = Exninfo.null in + Proofview.NonLogical.raise (Invalid_argument "run_com", info) (* Gives the number of steps or next breakpoint of a run command *) let run_com inst = @@ -125,7 +141,7 @@ let run_com inst = let s = String.sub inst i (String.length inst - i) in if inst.[0] >= '0' && inst.[0] <= '9' then int_of_string s >>= fun num -> - (if num<0 then run_invalid_arg () else return ()) >> + check_positive num >> (skip:=num) >> (skipped:=0) else breakpoint:=Some (possibly_unquote s) @@ -156,11 +172,11 @@ let rec prompt level = let open Proofview.NonLogical in Proofview.NonLogical.print_notice (fnl () ++ str "TcDebug (" ++ int level ++ str ") > ") >> if Util.(!batch) then return (DebugOn (level+1)) else - let exit = (skip:=0) >> (skipped:=0) >> raise Sys.Break in + let exit = (skip:=0) >> (skipped:=0) >> raise (Sys.Break, Exninfo.null) in Proofview.NonLogical.catch Proofview.NonLogical.read_line begin function (e, info) -> match e with | End_of_file -> exit - | e -> raise ~info e + | e -> raise (e, info) end >>= fun inst -> match inst with @@ -176,7 +192,7 @@ let rec prompt level = Proofview.NonLogical.catch (run_com inst >> runtrue >> return (DebugOn (level+1))) begin function (e, info) -> match e with | Failure _ | Invalid_argument _ -> prompt level - | e -> raise ~info e + | e -> raise (e, info) end end diff --git a/plugins/ltac/tactic_matching.ml b/plugins/ltac/tactic_matching.ml index eabfe2f540..2d5e9e53ce 100644 --- a/plugins/ltac/tactic_matching.ml +++ b/plugins/ltac/tactic_matching.ml @@ -227,11 +227,9 @@ module PatternMatching (E:StaticEnvironment) = struct substitution. *) let wildcard_match_term = return - (** [pattern_match_term refresh pat term lhs] returns the possible - matchings of [term] with the pattern [pat => lhs]. If refresh is - true, refreshes the universes of [term]. *) - let pattern_match_term refresh pat term lhs = -(* let term = if refresh then Termops.refresh_universes_strict term else term in *) + (** [pattern_match_term pat term lhs] returns the possible + matchings of [term] with the pattern [pat => lhs]. *) + let pattern_match_term pat term lhs = match pat with | Term p -> begin @@ -262,7 +260,7 @@ module PatternMatching (E:StaticEnvironment) = struct matching rule [rule]. *) let rule_match_term term = function | All lhs -> wildcard_match_term lhs - | Pat ([],pat,lhs) -> pattern_match_term false pat term lhs + | Pat ([],pat,lhs) -> pattern_match_term pat term lhs | Pat _ -> (* Rules with hypotheses, only work in match goal. *) fail @@ -286,8 +284,7 @@ module PatternMatching (E:StaticEnvironment) = struct let hyp_match_type hypname pat hyps = pick hyps >>= fun decl -> let id = NamedDecl.get_id decl in - let refresh = is_local_def decl in - pattern_match_term refresh pat (NamedDecl.get_type decl) () <*> + pattern_match_term pat (NamedDecl.get_type decl) () <*> put_terms (id_map_try_add_name hypname (EConstr.mkVar id) empty_term_subst) <*> return id @@ -298,8 +295,8 @@ module PatternMatching (E:StaticEnvironment) = struct let hyp_match_body_and_type hypname bodypat typepat hyps = pick hyps >>= function | LocalDef (id,body,hyp) -> - pattern_match_term false bodypat body () <*> - pattern_match_term true typepat hyp () <*> + pattern_match_term bodypat body () <*> + pattern_match_term typepat hyp () <*> put_terms (id_map_try_add_name hypname (EConstr.mkVar id.binder_name) empty_term_subst) <*> return id.binder_name | LocalAssum (id,hyp) -> fail @@ -337,7 +334,7 @@ module PatternMatching (E:StaticEnvironment) = struct (* the rules are applied from the topmost one (in the concrete syntax) to the bottommost. *) let hyppats = List.rev hyppats in - pattern_match_term false conclpat concl () <*> + pattern_match_term conclpat concl () <*> hyp_pattern_list_match hyppats hyps lhs (** [match_goal hyps concl rules] matches the goal [hyps|-concl] diff --git a/plugins/ssr/ssrparser.mlg b/plugins/ssr/ssrparser.mlg index 21b832a326..3f67d55e73 100644 --- a/plugins/ssr/ssrparser.mlg +++ b/plugins/ssr/ssrparser.mlg @@ -90,7 +90,7 @@ let cast_arg wit v = Taccoerce.Value.cast (Genarg.topwit wit) v * we thus save the lexer to restore it at the end of the file *) let frozen_lexer = CLexer.get_keyword_state () ;; -let tacltop = (5,Notation_gram.E) +let tacltop = (LevelLe 5) let pr_ssrtacarg env sigma _ _ prt = prt env sigma tacltop diff --git a/plugins/ssr/ssrparser.mli b/plugins/ssr/ssrparser.mli index e6b1706b41..53c895f9d9 100644 --- a/plugins/ssr/ssrparser.mli +++ b/plugins/ssr/ssrparser.mli @@ -15,12 +15,12 @@ open Ltac_plugin val ssrtacarg : Tacexpr.raw_tactic_expr Pcoq.Entry.t val wit_ssrtacarg : (Tacexpr.raw_tactic_expr, Tacexpr.glob_tactic_expr, Geninterp.Val.t) Genarg.genarg_type val pr_ssrtacarg : Environ.env -> Evd.evar_map -> 'a -> 'b -> - (Environ.env -> Evd.evar_map -> Notation_gram.tolerability -> 'c) -> 'c + (Environ.env -> Evd.evar_map -> Constrexpr.entry_relative_level -> 'c) -> 'c val ssrtclarg : Tacexpr.raw_tactic_expr Pcoq.Entry.t val wit_ssrtclarg : (Tacexpr.raw_tactic_expr, Tacexpr.glob_tactic_expr, Geninterp.Val.t) Genarg.genarg_type val pr_ssrtclarg : Environ.env -> Evd.evar_map -> 'a -> 'b -> - (Environ.env -> Evd.evar_map -> Notation_gram.tolerability -> 'c -> 'd) -> 'c -> 'd + (Environ.env -> Evd.evar_map -> Constrexpr.entry_relative_level -> 'c -> 'd) -> 'c -> 'd val add_genarg : string -> (Environ.env -> Evd.evar_map -> 'a -> Pp.t) -> 'a Genarg.uniform_genarg_type diff --git a/plugins/ssrmatching/ssrmatching.ml b/plugins/ssrmatching/ssrmatching.ml index e45bae19ca..96f8cb12ba 100644 --- a/plugins/ssrmatching/ssrmatching.ml +++ b/plugins/ssrmatching/ssrmatching.ml @@ -857,7 +857,7 @@ let glob_cpattern gs p = | k, (v, Some t), _ as orig -> if k = 'x' then glob_ssrterm gs ('(', (v, Some t), None) else match t.CAst.v with - | CNotation((InConstrEntrySomeLevel,"( _ in _ )"), ([t1; t2], [], [], [])) -> + | CNotation(_,(InConstrEntrySomeLevel,"( _ in _ )"), ([t1; t2], [], [], [])) -> (try match glob t1, glob t2 with | (r1, None), (r2, None) -> encode k "In" [r1;r2] | (r1, Some _), (r2, Some _) when isCVar t1 -> @@ -865,11 +865,11 @@ let glob_cpattern gs p = | (r1, Some _), (r2, Some _) -> encode k "In" [r1; r2] | _ -> CErrors.anomaly (str"where are we?.") with _ when isCVar t1 -> encode k "In" [bind_in t1 t2]) - | CNotation((InConstrEntrySomeLevel,"( _ in _ in _ )"), ([t1; t2; t3], [], [], [])) -> + | CNotation(_,(InConstrEntrySomeLevel,"( _ in _ in _ )"), ([t1; t2; t3], [], [], [])) -> check_var t2; encode k "In" [fst (glob t1); bind_in t2 t3] - | CNotation((InConstrEntrySomeLevel,"( _ as _ )"), ([t1; t2], [], [], [])) -> + | CNotation(_,(InConstrEntrySomeLevel,"( _ as _ )"), ([t1; t2], [], [], [])) -> encode k "As" [fst (glob t1); fst (glob t2)] - | CNotation((InConstrEntrySomeLevel,"( _ as _ in _ )"), ([t1; t2; t3], [], [], [])) -> + | CNotation(_,(InConstrEntrySomeLevel,"( _ as _ in _ )"), ([t1; t2; t3], [], [], [])) -> check_var t2; encode k "As" [fst (glob t1); bind_in t2 t3] | _ -> glob_ssrterm gs orig ;; diff --git a/pretyping/cases.ml b/pretyping/cases.ml index 29d6726262..55c1f41c2c 100644 --- a/pretyping/cases.ml +++ b/pretyping/cases.ml @@ -1817,6 +1817,7 @@ let build_inversion_problem ~program_mode loc env sigma tms t = List.rev_append patl patl',acc_sign,acc | (t, NotInd (bo,typ)) :: tms -> let pat,acc = make_patvar t acc in + let typ = lift n typ in let d = LocalAssum (annotR (alias_of_pat pat),typ) in let patl,acc_sign,acc = aux (n+1) (snd (push_rel ~hypnaming:KeepUserNameAndRenameExistingButSectionNames sigma d env)) (d::acc_sign) tms acc in pat::patl,acc_sign,acc in @@ -2126,11 +2127,6 @@ let eq_id avoid id = let hid' = next_ident_away hid avoid in hid' -let papp sigma gr args = - let evdref = ref sigma in - let ans = papp evdref gr args in - !evdref, ans - let mk_eq sigma typ x y = papp sigma coq_eq_ind [| typ; x ; y |] let mk_eq_refl sigma typ x = papp sigma coq_eq_refl [| typ; x |] let mk_JMeq sigma typ x typ' y = diff --git a/pretyping/coercion.ml b/pretyping/coercion.ml index c4aa3479bf..62bc27cd3c 100644 --- a/pretyping/coercion.ml +++ b/pretyping/coercion.ml @@ -92,19 +92,23 @@ let inh_pattern_coerce_to ?loc env pat ind1 ind2 = open Program -let make_existential ?loc ?(opaque = not (get_proofs_transparency ())) na env evdref c = +let make_existential ?loc ?(opaque = not (get_proofs_transparency ())) na env sigma c = let src = Loc.tag ?loc (Evar_kinds.QuestionMark { Evar_kinds.default_question_mark with Evar_kinds.qm_obligation=Evar_kinds.Define opaque; Evar_kinds.qm_name=na; }) in - let evd, v = Evarutil.new_evar env !evdref ~src c in - let evd = Evd.set_obligation_evar evd (fst (destEvar evd v)) in - evdref := evd; - v - -let app_opt env evdref f t = - whd_betaiota !evdref (app_opt f t) + let sigma, v = Evarutil.new_evar env sigma ~src c in + let sigma = Evd.set_obligation_evar sigma (fst (destEvar sigma v)) in + sigma, v + +let app_opt env sigma f t = + let sigma, t = + match f with + | None -> sigma, t + | Some f -> f sigma t + in + sigma, whd_betaiota sigma t let pair_of_array a = (a.(0), a.(1)) @@ -125,8 +129,8 @@ let disc_subset sigma x = exception NoSubtacCoercion -let hnf env evd c = whd_all env evd c -let hnf_nodelta env evd c = whd_betaiota evd c +let hnf env sigma c = whd_all env sigma c +let hnf_nodelta env sigma c = whd_betaiota sigma c let lift_args n sign = let rec liftrec k = function @@ -135,222 +139,219 @@ let lift_args n sign = in liftrec (List.length sign) sign -let coerce ?loc env evdref (x : EConstr.constr) (y : EConstr.constr) - : (EConstr.constr -> EConstr.constr) option - = +let coerce ?loc env sigma (x : EConstr.constr) (y : EConstr.constr) + : evar_map * (evar_map -> EConstr.constr -> evar_map * EConstr.constr) option + = let open Context.Rel.Declaration in - let rec coerce_unify env x y = - let x = hnf env !evdref x and y = hnf env !evdref y in - try - evdref := Evarconv.unify_leq_delay env !evdref x y; - None - with UnableToUnify _ -> coerce' env x y - and coerce' env x y : (EConstr.constr -> EConstr.constr) option = - let subco () = subset_coerce env evdref x y in + let rec coerce_unify env sigma x y = + let x = hnf env sigma x and y = hnf env sigma y in + try + (Evarconv.unify_leq_delay env sigma x y, None) + with UnableToUnify _ -> coerce' env sigma x y + and coerce' env sigma x y : evar_map * (evar_map -> EConstr.constr -> evar_map * EConstr.constr) option = + let subco sigma = subset_coerce env sigma x y in let dest_prod c = - match Reductionops.splay_prod_n env (!evdref) 1 c with + match Reductionops.splay_prod_n env sigma 1 c with | [LocalAssum (na,t) | LocalDef (na,_,t)], c -> (na, t), c | _ -> raise NoSubtacCoercion in - let coerce_application typ typ' c c' l l' = + let coerce_application sigma typ typ' c c' l l' = let len = Array.length l in - let rec aux tele typ typ' i co = + let rec aux sigma tele typ typ' i co = if i < len then let hdx = l.(i) and hdy = l'.(i) in - try evdref := unify_leq_delay env !evdref hdx hdy; - let (n, eqT), restT = dest_prod typ in - let (n', eqT'), restT' = dest_prod typ' in - aux (hdx :: tele) (subst1 hdx restT) (subst1 hdy restT') (succ i) co - with UnableToUnify _ -> - let (n, eqT), restT = dest_prod typ in - let (n', eqT'), restT' = dest_prod typ' in - let () = - try evdref := unify_leq_delay env !evdref eqT eqT' - with UnableToUnify _ -> raise NoSubtacCoercion - in - (* Disallow equalities on arities *) - if Reductionops.is_arity env !evdref eqT then raise NoSubtacCoercion; - let restargs = lift_args 1 - (List.rev (Array.to_list (Array.sub l (succ i) (len - (succ i))))) - in - let args = List.rev (restargs @ mkRel 1 :: List.map (lift 1) tele) in - let pred = mkLambda (n, eqT, applist (lift 1 c, args)) in - let eq = papp evdref coq_eq_ind [| eqT; hdx; hdy |] in - let evar = make_existential ?loc n.binder_name env evdref eq in - let eq_app x = papp evdref coq_eq_rect - [| eqT; hdx; pred; x; hdy; evar|] - in - aux (hdy :: tele) (subst1 hdx restT) - (subst1 hdy restT') (succ i) (fun x -> eq_app (co x)) - else Some (fun x -> - let term = co x in - let sigma, term = Typing.solve_evars env !evdref term in - evdref := sigma; term) + try + let sigma = unify_leq_delay env sigma hdx hdy in + let (n, eqT), restT = dest_prod typ in + let (n', eqT'), restT' = dest_prod typ' in + aux sigma (hdx :: tele) (subst1 hdx restT) (subst1 hdy restT') (succ i) co + with UnableToUnify _ -> + let (n, eqT), restT = dest_prod typ in + let (n', eqT'), restT' = dest_prod typ' in + let sigma = + try + unify_leq_delay env sigma eqT eqT' + with UnableToUnify _ -> raise NoSubtacCoercion + in + (* Disallow equalities on arities *) + if Reductionops.is_arity env sigma eqT then raise NoSubtacCoercion; + let restargs = lift_args 1 + (List.rev (Array.to_list (Array.sub l (succ i) (len - (succ i))))) + in + let args = List.rev (restargs @ mkRel 1 :: List.map (lift 1) tele) in + let pred = mkLambda (n, eqT, applist (lift 1 c, args)) in + let sigma, eq = papp sigma coq_eq_ind [| eqT; hdx; hdy |] in + let sigma, evar = make_existential ?loc n.binder_name env sigma eq in + let eq_app sigma x = papp sigma coq_eq_rect + [| eqT; hdx; pred; x; hdy; evar|] + in + aux sigma (hdy :: tele) (subst1 hdx restT) + (subst1 hdy restT') (succ i) (fun sigma x -> let sigma, x = co sigma x in eq_app sigma x) + else + sigma, Some (fun sigma x -> + let sigma, term = co sigma x in + let sigma, term = Typing.solve_evars env sigma term in + sigma, term) in - if isEvar !evdref c || isEvar !evdref c' || not (Program.is_program_generalized_coercion ()) then - (* Second-order unification needed. *) - raise NoSubtacCoercion; - aux [] typ typ' 0 (fun x -> x) + if isEvar sigma c || isEvar sigma c' || not (Program.is_program_generalized_coercion ()) then + (* Second-order unification needed. *) + raise NoSubtacCoercion; + aux sigma [] typ typ' 0 (fun sigma x -> sigma, x) in - match (EConstr.kind !evdref x, EConstr.kind !evdref y) with - | Sort s, Sort s' -> - (match ESorts.kind !evdref s, ESorts.kind !evdref s' with - | Prop, Prop | Set, Set -> None - | (Prop | Set), Type _ -> None - | Type x, Type y when Univ.Universe.equal x y -> None (* false *) - | _ -> subco ()) - | Prod (name, a, b), Prod (name', a', b') -> - let name' = - {name' with - binder_name = - Name (Namegen.next_ident_away - Namegen.default_dependent_ident (Termops.vars_of_env env))} - in - let env' = push_rel (LocalAssum (name', a')) env in - let c1 = coerce_unify env' (lift 1 a') (lift 1 a) in - (* env, x : a' |- c1 : lift 1 a' > lift 1 a *) - let coec1 = app_opt env' evdref c1 (mkRel 1) in - (* env, x : a' |- c1[x] : lift 1 a *) - let c2 = coerce_unify env' (subst1 coec1 (liftn 1 2 b)) b' in - (* env, x : a' |- c2 : b[c1[x]/x]] > b' *) - (match c1, c2 with - | None, None -> None - | _, _ -> - Some - (fun f -> - mkLambda (name', a', - app_opt env' evdref c2 - (mkApp (lift 1 f, [| coec1 |]))))) - - | App (c, l), App (c', l') -> - (match EConstr.kind !evdref c, EConstr.kind !evdref c' with - Ind (i, u), Ind (i', u') -> (* Inductive types *) - let len = Array.length l in - let sigT = delayed_force sigT_typ in - let prod = delayed_force prod_typ in - (* Sigma types *) - if Int.equal len (Array.length l') && Int.equal len 2 && eq_ind i i' - && (eq_ind i (destIndRef sigT) || eq_ind i (destIndRef prod)) - then - if eq_ind i (destIndRef sigT) - then - begin - let (a, pb), (a', pb') = - pair_of_array l, pair_of_array l' - in - let c1 = coerce_unify env a a' in - let remove_head a c = - match EConstr.kind !evdref c with - | Lambda (n, t, t') -> c, t' - | Evar (k, args) -> - let (evs, t) = Evardefine.define_evar_as_lambda env !evdref (k,args) in - evdref := evs; - let (n, dom, rng) = destLambda !evdref t in - if isEvar !evdref dom then - let (domk, args) = destEvar !evdref dom in - evdref := define domk a !evdref; - else (); - t, rng - | _ -> raise NoSubtacCoercion - in - let (pb, b), (pb', b') = remove_head a pb, remove_head a' pb' in - let ra = Retyping.relevance_of_type env !evdref a in - let env' = push_rel - (LocalAssum (make_annot (Name Namegen.default_dependent_ident) ra, a)) - env - in - let c2 = coerce_unify env' b b' in - match c1, c2 with - | None, None -> None - | _, _ -> - Some - (fun x -> - let x, y = - app_opt env' evdref c1 (papp evdref sigT_proj1 - [| a; pb; x |]), - app_opt env' evdref c2 (papp evdref sigT_proj2 - [| a; pb; x |]) - in - papp evdref sigT_intro [| a'; pb'; x ; y |]) - end - else - begin - let (a, b), (a', b') = - pair_of_array l, pair_of_array l' - in - let c1 = coerce_unify env a a' in - let c2 = coerce_unify env b b' in - match c1, c2 with - | None, None -> None - | _, _ -> - Some - (fun x -> - let x, y = - app_opt env evdref c1 (papp evdref prod_proj1 - [| a; b; x |]), - app_opt env evdref c2 (papp evdref prod_proj2 - [| a; b; x |]) - in - papp evdref prod_intro [| a'; b'; x ; y |]) - end - else - if eq_ind i i' && Int.equal len (Array.length l') then - let evm = !evdref in - (try subco () - with NoSubtacCoercion -> - let typ = Typing.unsafe_type_of env evm c in - let typ' = Typing.unsafe_type_of env evm c' in - coerce_application typ typ' c c' l l') - else - subco () - | x, y when EConstr.eq_constr !evdref c c' -> - if Int.equal (Array.length l) (Array.length l') then - let evm = !evdref in - let lam_type = Typing.unsafe_type_of env evm c in - let lam_type' = Typing.unsafe_type_of env evm c' in - coerce_application lam_type lam_type' c c' l l' - else subco () - | _ -> subco ()) - | _, _ -> subco () - - and subset_coerce env evdref x y = - match disc_subset !evdref x with - Some (u, p) -> - let c = coerce_unify env u y in - let f x = - app_opt env evdref c (papp evdref sig_proj1 [| u; p; x |]) - in Some f + match (EConstr.kind sigma x, EConstr.kind sigma y) with + | Sort s, Sort s' -> + (match ESorts.kind sigma s, ESorts.kind sigma s' with + | Prop, Prop | Set, Set -> sigma, None + | (Prop | Set), Type _ -> sigma, None + | Type x, Type y when Univ.Universe.equal x y -> sigma, None (* false *) + | _ -> subco sigma) + | Prod (name, a, b), Prod (name', a', b') -> + let name' = + {name' with + binder_name = + Name (Namegen.next_ident_away + Namegen.default_dependent_ident (Termops.vars_of_env env))} + in + let env' = push_rel (LocalAssum (name', a')) env in + let sigma, c1 = coerce_unify env' sigma (lift 1 a') (lift 1 a) in + (* env, x : a' |- c1 : lift 1 a' > lift 1 a *) + let sigma, coec1 = app_opt env' sigma c1 (mkRel 1) in + (* env, x : a' |- c1[x] : lift 1 a *) + let sigma, c2 = coerce_unify env' sigma (subst1 coec1 (liftn 1 2 b)) b' in + (* env, x : a' |- c2 : b[c1[x]/x]] > b' *) + (match c1, c2 with + | None, None -> sigma, None + | _, _ -> + sigma, + Some (fun sigma f -> + let sigma, t = app_opt env' sigma c2 + (mkApp (lift 1 f, [| coec1 |])) in + sigma, mkLambda (name', a', t))) + + | App (c, l), App (c', l') -> + (match EConstr.kind sigma c, EConstr.kind sigma c' with + Ind (i, u), Ind (i', u') -> (* Inductive types *) + let len = Array.length l in + let sigT = delayed_force sigT_typ in + let prod = delayed_force prod_typ in + (* Sigma types *) + if Int.equal len (Array.length l') && Int.equal len 2 && eq_ind i i' + && (eq_ind i (destIndRef sigT) || eq_ind i (destIndRef prod)) + then + if eq_ind i (destIndRef sigT) + then + begin + let (a, pb), (a', pb') = + pair_of_array l, pair_of_array l' + in + let sigma, c1 = coerce_unify env sigma a a' in + let remove_head sigma a c = + match EConstr.kind sigma c with + | Lambda (n, t, t') -> sigma, (c, t') + | Evar (k, args) -> + let (sigma, t) = Evardefine.define_evar_as_lambda env sigma (k,args) in + let (n, dom, rng) = destLambda sigma t in + let sigma = + if isEvar sigma dom then + let (domk, args) = destEvar sigma dom in + define domk a sigma + else sigma + in + sigma, (t, rng) + | _ -> raise NoSubtacCoercion + in + let sigma, (pb, b) = remove_head sigma a pb in + let sigma, (pb', b') = remove_head sigma a' pb' in + let ra = Retyping.relevance_of_type env sigma a in + let env' = push_rel + (LocalAssum (make_annot (Name Namegen.default_dependent_ident) ra, a)) + env + in + let sigma, c2 = coerce_unify env' sigma b b' in + match c1, c2 with + | None, None -> sigma, None + | _, _ -> + sigma, + Some (fun sigma x -> + let sigma, t1 = papp sigma sigT_proj1 [| a; pb; x |] in + let sigma, t2 = papp sigma sigT_proj2 [| a; pb; x |] in + let sigma, x = app_opt env' sigma c1 t1 in + let sigma, y = app_opt env' sigma c2 t2 in + papp sigma sigT_intro [| a'; pb'; x ; y |]) + end + else + begin + let (a, b), (a', b') = + pair_of_array l, pair_of_array l' + in + let sigma, c1 = coerce_unify env sigma a a' in + let sigma, c2 = coerce_unify env sigma b b' in + match c1, c2 with + | None, None -> sigma, None + | _, _ -> + sigma, + Some (fun sigma x -> + let sigma, t1 = papp sigma prod_proj1 [| a; b; x |] in + let sigma, t2 = papp sigma prod_proj2 [| a; b; x |] in + let sigma, x = app_opt env sigma c1 t1 in + let sigma, y = app_opt env sigma c2 t2 in + papp sigma prod_intro [| a'; b'; x ; y |]) + end + else + if eq_ind i i' && Int.equal len (Array.length l') then + (try subco sigma + with NoSubtacCoercion -> + let sigma, typ = Typing.type_of env sigma c in + let sigma, typ' = Typing.type_of env sigma c' in + coerce_application sigma typ typ' c c' l l') + else + subco sigma + | x, y when EConstr.eq_constr sigma c c' -> + if Int.equal (Array.length l) (Array.length l') then + let sigma, lam_type = Typing.type_of env sigma c in + let sigma, lam_type' = Typing.type_of env sigma c' in + coerce_application sigma lam_type lam_type' c c' l l' + else subco sigma + | _ -> subco sigma) + | _, _ -> subco sigma + + and subset_coerce env sigma x y = + match disc_subset sigma x with + Some (u, p) -> + let sigma, c = coerce_unify env sigma u y in + let f sigma x = + let sigma, t = papp sigma sig_proj1 [| u; p; x |] in + app_opt env sigma c t + in sigma, Some f | None -> - match disc_subset !evdref y with + match disc_subset sigma y with Some (u, p) -> - let c = coerce_unify env x u in - Some - (fun x -> - let cx = app_opt env evdref c x in - let evar = make_existential ?loc Anonymous env evdref (mkApp (p, [| cx |])) - in - (papp evdref sig_intro [| u; p; cx; evar |])) - | None -> - raise NoSubtacCoercion - in coerce_unify env x y - -let app_coercion env evdref coercion v = + let sigma, c = coerce_unify env sigma x u in + sigma, Some + (fun sigma x -> + let sigma, cx = app_opt env sigma c x in + let sigma, evar = make_existential ?loc Anonymous env sigma (mkApp (p, [| cx |])) + in + (papp sigma sig_intro [| u; p; cx; evar |])) + | None -> + raise NoSubtacCoercion + in coerce_unify env sigma x y + +let app_coercion env sigma coercion v = match coercion with - | None -> v + | None -> sigma, v | Some f -> - let sigma, v' = Typing.solve_evars env !evdref (f v) in - evdref := sigma; - whd_betaiota !evdref v' + let sigma, v' = f sigma v in + let sigma, v' = Typing.solve_evars env sigma v' in + sigma, whd_betaiota sigma v' -let coerce_itf ?loc env evd v t c1 = - let evdref = ref evd in - let coercion = coerce ?loc env evdref t c1 in - let t = app_coercion env evdref coercion v in - !evdref, t +let coerce_itf ?loc env sigma v t c1 = + let sigma, coercion = coerce ?loc env sigma t c1 in + app_coercion env sigma coercion v -let saturate_evd env evd = +let saturate_evd env sigma = Typeclasses.resolve_typeclasses - ~filter:Typeclasses.no_goals ~split:true ~fail:false env evd + ~filter:Typeclasses.no_goals ~split:true ~fail:false env sigma type coercion_trace = | IdCoe @@ -388,7 +389,7 @@ let rec reapply_coercions sigma trace c = match trace with (* Apply coercion path from p to hj; raise NoCoercion if not applicable *) let apply_coercion env sigma p hj typ_cl = - let j,t,trace,evd = + let j,t,trace,sigma = List.fold_left (fun (ja,typ_cl,trace,sigma) i -> let isid = i.coe_is_identity in @@ -415,129 +416,126 @@ let apply_coercion env sigma p hj typ_cl = in jres, jres.uj_type, trace, sigma) (hj,typ_cl,IdCoe,sigma) p - in evd, j, trace + in sigma, j, trace -let mu env evdref t = +let mu env sigma t = let rec aux v = - let v' = hnf env !evdref v in - match disc_subset !evdref v' with + let v' = hnf env sigma v in + match disc_subset sigma v' with | Some (u, p) -> - let f, ct, trace = aux u in - let p = hnf_nodelta env !evdref p in + let sigma, (f, ct, trace) = aux u in + let p = hnf_nodelta env sigma p in let p1 = delayed_force sig_proj1 in - let evd, p1 = Evarutil.new_global !evdref p1 in - evdref := evd; - (Some (fun x -> - app_opt env evdref + let sigma, p1 = Evarutil.new_global sigma p1 in + sigma, + (Some (fun sigma x -> + app_opt env sigma f (mkApp (p1, [| u; p; x |]))), ct, Coe {head=p1; args=[u;p]; previous=trace}) - | None -> (None, v, IdCoe) + | None -> sigma, (None, v, IdCoe) in aux t (* Try to coerce to a funclass; raise NoCoercion if not possible *) -let inh_app_fun_core ~program_mode env evd j = - let t = whd_all env evd j.uj_type in - match EConstr.kind evd t with - | Prod _ -> (evd,j,IdCoe) +let inh_app_fun_core ~program_mode env sigma j = + let t = whd_all env sigma j.uj_type in + match EConstr.kind sigma t with + | Prod _ -> (sigma,j,IdCoe) | Evar ev -> - let (evd',t) = Evardefine.define_evar_as_product env evd ev in - (evd',{ uj_val = j.uj_val; uj_type = t },IdCoe) + let (sigma,t) = Evardefine.define_evar_as_product env sigma ev in + (sigma,{ uj_val = j.uj_val; uj_type = t },IdCoe) | _ -> try let t,p = - lookup_path_to_fun_from env evd j.uj_type in - apply_coercion env evd p j t + lookup_path_to_fun_from env sigma j.uj_type in + apply_coercion env sigma p j t with Not_found | NoCoercion -> if program_mode then try - let evdref = ref evd in - let coercef, t, trace = mu env evdref t in - let res = { uj_val = app_opt env evdref coercef j.uj_val; uj_type = t } in - (!evdref, res, trace) + let sigma, (coercef, t, trace) = mu env sigma t in + let sigma, uj_val = app_opt env sigma coercef j.uj_val in + let res = { uj_val ; uj_type = t } in + (sigma, res, trace) with NoSubtacCoercion | NoCoercion -> - (evd,j,IdCoe) + (sigma,j,IdCoe) else raise NoCoercion (* Try to coerce to a funclass; returns [j] if no coercion is applicable *) -let inh_app_fun ~program_mode resolve_tc env evd j = - try inh_app_fun_core ~program_mode env evd j +let inh_app_fun ~program_mode resolve_tc env sigma j = + try inh_app_fun_core ~program_mode env sigma j with | NoCoercion when not resolve_tc - || not (get_use_typeclasses_for_conversion ()) -> (evd, j, IdCoe) + || not (get_use_typeclasses_for_conversion ()) -> (sigma, j, IdCoe) | NoCoercion -> - try inh_app_fun_core ~program_mode env (saturate_evd env evd) j - with NoCoercion -> (evd, j, IdCoe) + try inh_app_fun_core ~program_mode env (saturate_evd env sigma) j + with NoCoercion -> (sigma, j, IdCoe) let type_judgment env sigma j = match EConstr.kind sigma (whd_all env sigma j.uj_type) with | Sort s -> {utj_val = j.uj_val; utj_type = ESorts.kind sigma s } | _ -> error_not_a_type env sigma j -let inh_tosort_force ?loc env evd j = +let inh_tosort_force ?loc env sigma j = try - let t,p = lookup_path_to_sort_from env evd j.uj_type in - let evd,j1,_trace = apply_coercion env evd p j t in - let j2 = Environ.on_judgment_type (whd_evar evd) j1 in - (evd,type_judgment env evd j2) + let t,p = lookup_path_to_sort_from env sigma j.uj_type in + let sigma,j1,_trace = apply_coercion env sigma p j t in + let j2 = Environ.on_judgment_type (whd_evar sigma) j1 in + (sigma,type_judgment env sigma j2) with Not_found | NoCoercion -> - error_not_a_type ?loc env evd j + error_not_a_type ?loc env sigma j -let inh_coerce_to_sort ?loc env evd j = - let typ = whd_all env evd j.uj_type in - match EConstr.kind evd typ with - | Sort s -> (evd,{ utj_val = j.uj_val; utj_type = ESorts.kind evd s }) +let inh_coerce_to_sort ?loc env sigma j = + let typ = whd_all env sigma j.uj_type in + match EConstr.kind sigma typ with + | Sort s -> (sigma,{ utj_val = j.uj_val; utj_type = ESorts.kind sigma s }) | Evar ev -> - let (evd',s) = Evardefine.define_evar_as_sort env evd ev in - (evd',{ utj_val = j.uj_val; utj_type = s }) + let (sigma,s) = Evardefine.define_evar_as_sort env sigma ev in + (sigma,{ utj_val = j.uj_val; utj_type = s }) | _ -> - inh_tosort_force ?loc env evd j + inh_tosort_force ?loc env sigma j -let inh_coerce_to_base ?loc ~program_mode env evd j = +let inh_coerce_to_base ?loc ~program_mode env sigma j = if program_mode then - let evdref = ref evd in - let ct, typ', trace = mu env evdref j.uj_type in - let res = - { uj_val = (app_coercion env evdref ct j.uj_val); - uj_type = typ' } - in !evdref, res - else (evd, j) - -let inh_coerce_to_prod ?loc ~program_mode env evd t = + let sigma, (ct, typ', _trace) = mu env sigma j.uj_type in + let sigma, uj_val = app_coercion env sigma ct j.uj_val in + let res = { uj_val; uj_type = typ' } in + sigma, res + else (sigma, j) + +let inh_coerce_to_prod ?loc ~program_mode env sigma t = if program_mode then - let evdref = ref evd in - let _, typ', _trace = mu env evdref t in - !evdref, typ' - else (evd, t) + let sigma, (_, typ', _trace) = mu env sigma t in + sigma, typ' + else (sigma, t) -let inh_coerce_to_fail flags env evd rigidonly v t c1 = +let inh_coerce_to_fail flags env sigma rigidonly v t c1 = if rigidonly && not (Heads.is_rigid env (EConstr.Unsafe.to_constr c1) && Heads.is_rigid env (EConstr.Unsafe.to_constr t)) then raise NoCoercion else - let evd, v', t', trace = + let sigma, v', t', trace = try - let t2,t1,p = lookup_path_between env evd (t,c1) in - let evd,j,trace = - apply_coercion env evd p + let t2,t1,p = lookup_path_between env sigma (t,c1) in + let sigma,j,trace = + apply_coercion env sigma p {uj_val = v; uj_type = t} t2 in - evd, j.uj_val, j.uj_type, trace + sigma, j.uj_val, j.uj_type, trace with Not_found -> raise NoCoercion in - try (unify_leq_delay ~flags env evd t' c1, v', trace) + try (unify_leq_delay ~flags env sigma t' c1, v', trace) with UnableToUnify _ -> raise NoCoercion let default_flags_of env = default_flags_of TransparentState.full -let rec inh_conv_coerce_to_fail ?loc env evd ?(flags=default_flags_of env) rigidonly v t c1 = - try (unify_leq_delay ~flags env evd t c1, v, IdCoe) - with UnableToUnify (best_failed_evd,e) -> - try inh_coerce_to_fail flags env evd rigidonly v t c1 +let rec inh_conv_coerce_to_fail ?loc env sigma ?(flags=default_flags_of env) rigidonly v t c1 = + try (unify_leq_delay ~flags env sigma t c1, v, IdCoe) + with UnableToUnify (best_failed_sigma,e) -> + try inh_coerce_to_fail flags env sigma rigidonly v t c1 with NoCoercion -> match - EConstr.kind evd (whd_all env evd t), - EConstr.kind evd (whd_all env evd c1) + EConstr.kind sigma (whd_all env sigma t), + EConstr.kind sigma (whd_all env sigma c1) with | Prod (name,t1,t2), Prod (_,u1,u2) -> (* Conversion did not work, we may succeed with a coercion. *) @@ -551,45 +549,46 @@ let rec inh_conv_coerce_to_fail ?loc env evd ?(flags=default_flags_of env) rigid | na -> na) name in let open Context.Rel.Declaration in let env1 = push_rel (LocalAssum (name,u1)) env in - let (evd', v1, trace1) = - inh_conv_coerce_to_fail ?loc env1 evd rigidonly + let (sigma, v1, trace1) = + inh_conv_coerce_to_fail ?loc env1 sigma rigidonly (mkRel 1) (lift 1 u1) (lift 1 t1) in - let v2 = beta_applist evd' (lift 1 v,[v1]) in - let t2 = Retyping.get_type_of env1 evd' v2 in - let (evd'',v2',trace2) = inh_conv_coerce_to_fail ?loc env1 evd' rigidonly v2 t2 u2 in + let v2 = beta_applist sigma (lift 1 v,[v1]) in + let t2 = Retyping.get_type_of env1 sigma v2 in + let (sigma,v2',trace2) = inh_conv_coerce_to_fail ?loc env1 sigma rigidonly v2 t2 u2 in let trace = ProdCoe { na=name; ty=u1; dom=trace1; body=trace2 } in - (evd'', mkLambda (name, u1, v2'), trace) - | _ -> raise (NoCoercionNoUnifier (best_failed_evd,e)) + (sigma, mkLambda (name, u1, v2'), trace) + | _ -> raise (NoCoercionNoUnifier (best_failed_sigma,e)) (* Look for cj' obtained from cj by inserting coercions, s.t. cj'.typ = t *) -let inh_conv_coerce_to_gen ?loc ~program_mode resolve_tc rigidonly flags env evd cj t = - let (evd', val', otrace) = +let inh_conv_coerce_to_gen ?loc ~program_mode resolve_tc rigidonly flags env sigma cj t = + let (sigma, val', otrace) = try - let (evd', val', trace) = inh_conv_coerce_to_fail ?loc env evd ~flags rigidonly cj.uj_val cj.uj_type t in - (evd', val', Some trace) - with NoCoercionNoUnifier (best_failed_evd,e) -> + let (sigma, val', trace) = inh_conv_coerce_to_fail ?loc env sigma ~flags rigidonly cj.uj_val cj.uj_type t in + (sigma, val', Some trace) + with NoCoercionNoUnifier (best_failed_sigma,e) -> try if program_mode then - let (evd', val') = coerce_itf ?loc env evd cj.uj_val cj.uj_type t in - (evd', val', None) + let (sigma, val') = coerce_itf ?loc env sigma cj.uj_val cj.uj_type t in + (sigma, val', None) else raise NoSubtacCoercion with | NoSubtacCoercion when not resolve_tc || not (get_use_typeclasses_for_conversion ()) -> - error_actual_type ?loc env best_failed_evd cj t e + error_actual_type ?loc env best_failed_sigma cj t e | NoSubtacCoercion -> - let evd' = saturate_evd env evd in + let sigma' = saturate_evd env sigma in try - if evd' == evd then - error_actual_type ?loc env best_failed_evd cj t e + if sigma' == sigma then + error_actual_type ?loc env best_failed_sigma cj t e else - let (evd', val', trace) = inh_conv_coerce_to_fail ?loc env evd' rigidonly cj.uj_val cj.uj_type t in - (evd', val', Some trace) - with NoCoercionNoUnifier (_evd,_error) -> - error_actual_type ?loc env best_failed_evd cj t e + let sigma = sigma' in + let (sigma, val', trace) = inh_conv_coerce_to_fail ?loc env sigma rigidonly cj.uj_val cj.uj_type t in + (sigma, val', Some trace) + with NoCoercionNoUnifier (_sigma,_error) -> + error_actual_type ?loc env best_failed_sigma cj t e in - (evd',{ uj_val = val'; uj_type = t },otrace) + (sigma,{ uj_val = val'; uj_type = t },otrace) -let inh_conv_coerce_to ?loc ~program_mode resolve_tc env evd ?(flags=default_flags_of env) = - inh_conv_coerce_to_gen ?loc ~program_mode resolve_tc false flags env evd -let inh_conv_coerce_rigid_to ?loc ~program_mode resolve_tc env evd ?(flags=default_flags_of env) = - inh_conv_coerce_to_gen ?loc ~program_mode resolve_tc true flags env evd +let inh_conv_coerce_to ?loc ~program_mode resolve_tc env sigma ?(flags=default_flags_of env) = + inh_conv_coerce_to_gen ?loc ~program_mode resolve_tc false flags env sigma +let inh_conv_coerce_rigid_to ?loc ~program_mode resolve_tc env sigma ?(flags=default_flags_of env) = + inh_conv_coerce_to_gen ?loc ~program_mode resolve_tc true flags env sigma diff --git a/pretyping/program.ml b/pretyping/program.ml index 9c478844aa..909ba6e44a 100644 --- a/pretyping/program.ml +++ b/pretyping/program.ml @@ -11,12 +11,11 @@ open CErrors open Util -let papp evdref r args = +let papp sigma r args = let open EConstr in let gr = delayed_force r in - let evd, hd = Evarutil.new_global !evdref gr in - evdref := evd; - mkApp (hd, args) + let evd, hd = Evarutil.new_global sigma gr in + sigma, mkApp (hd, args) let sig_typ () = Coqlib.lib_ref "core.sig.type" let sig_intro () = Coqlib.lib_ref "core.sig.intro" diff --git a/pretyping/program.mli b/pretyping/program.mli index 6604b3a854..7da0da1297 100644 --- a/pretyping/program.mli +++ b/pretyping/program.mli @@ -10,6 +10,7 @@ open Names open EConstr +open Evd (** A bunch of Coq constants used by Program *) @@ -38,7 +39,7 @@ val mk_coq_and : Evd.evar_map -> constr list -> Evd.evar_map * constr val mk_coq_not : Evd.evar_map -> constr -> Evd.evar_map * constr (** Polymorphic application of delayed references *) -val papp : Evd.evar_map ref -> (unit -> GlobRef.t) -> constr array -> constr +val papp : evar_map -> (unit -> GlobRef.t) -> constr array -> evar_map * constr val get_proofs_transparency : unit -> bool val is_program_cases : unit -> bool diff --git a/pretyping/reductionops.ml b/pretyping/reductionops.ml index 838bf22c66..98eb33273f 100644 --- a/pretyping/reductionops.ml +++ b/pretyping/reductionops.ml @@ -1457,12 +1457,15 @@ let pb_equal = function | Reduction.CUMUL -> Reduction.CONV | Reduction.CONV -> Reduction.CONV -let report_anomaly e = - let msg = Pp.(str "Conversion test raised an anomaly:" ++ - spc () ++ CErrors.print e) in - let e = UserError (None,msg) in - let e = CErrors.push e in - iraise e +let report_anomaly (e, info) = + let e = + if is_anomaly e then + let msg = Pp.(str "Conversion test raised an anomaly:" ++ + spc () ++ CErrors.print e) in + UserError (None, msg) + else e + in + iraise (e, info) let f_conv ?l2r ?reds env ?evars x y = let inj = EConstr.Unsafe.to_constr in @@ -1478,7 +1481,9 @@ let test_trans_conversion (f: constr Reduction.extended_conversion_function) red let _ = f ~reds env ~evars:(evars, Evd.universes sigma) x y in true with Reduction.NotConvertible -> false - | e when is_anomaly e -> report_anomaly e + | e -> + let e = Exninfo.capture e in + report_anomaly e let is_conv ?(reds=TransparentState.full) env sigma = test_trans_conversion f_conv reds env sigma let is_conv_leq ?(reds=TransparentState.full) env sigma = test_trans_conversion f_conv_leq reds env sigma @@ -1494,7 +1499,9 @@ let check_conv ?(pb=Reduction.CUMUL) ?(ts=TransparentState.full) env sigma x y = try f ~reds:ts env ~evars:(safe_evar_value sigma, Evd.universes sigma) x y; true with Reduction.NotConvertible -> false | Univ.UniverseInconsistency _ -> false - | e when is_anomaly e -> report_anomaly e + | e -> + let e = Exninfo.capture e in + report_anomaly e let sigma_compare_sorts env pb s0 s1 sigma = match pb with @@ -1547,7 +1554,9 @@ let infer_conv_gen conv_fun ?(catch_incon=true) ?(pb=Reduction.CUMUL) with | Reduction.NotConvertible -> None | Univ.UniverseInconsistency _ when catch_incon -> None - | e when is_anomaly e -> report_anomaly e + | e -> + let e = Exninfo.capture e in + report_anomaly e let infer_conv = infer_conv_gen (fun pb ~l2r sigma -> Reduction.generic_conv pb ~l2r (safe_evar_value sigma)) diff --git a/pretyping/typing.mli b/pretyping/typing.mli index fd2dc7c2fc..e85f6327f8 100644 --- a/pretyping/typing.mli +++ b/pretyping/typing.mli @@ -19,6 +19,7 @@ open Evd (** Typecheck a term and return its type. May trigger an evarmap leak. *) val unsafe_type_of : env -> evar_map -> constr -> types +[@@ocaml.deprecated "Use [type_of] or retyping according to your needs."] (** Typecheck a term and return its type + updated evars, optionally refreshing universes *) diff --git a/printing/genprint.ml b/printing/genprint.ml index a04df31d30..a673cbd933 100644 --- a/printing/genprint.ml +++ b/printing/genprint.ml @@ -19,15 +19,15 @@ open Geninterp (* Printing generic values *) type 'a with_level = - { default_already_surrounded : Notation_gram.tolerability; - default_ensure_surrounded : Notation_gram.tolerability; + { default_already_surrounded : Constrexpr.entry_relative_level; + default_ensure_surrounded : Constrexpr.entry_relative_level; printer : 'a } type printer_result = | PrinterBasic of (Environ.env -> Evd.evar_map -> Pp.t) -| PrinterNeedsLevel of (Environ.env -> Evd.evar_map -> Notation_gram.tolerability -> Pp.t) with_level +| PrinterNeedsLevel of (Environ.env -> Evd.evar_map -> Constrexpr.entry_relative_level -> Pp.t) with_level -type printer_fun_with_level = Environ.env -> Evd.evar_map -> Notation_gram.tolerability -> Pp.t +type printer_fun_with_level = Environ.env -> Evd.evar_map -> Constrexpr.entry_relative_level -> Pp.t type top_printer_result = | TopPrinterBasic of (unit -> Pp.t) diff --git a/printing/genprint.mli b/printing/genprint.mli index 21b8417ffa..59e36baeb6 100644 --- a/printing/genprint.mli +++ b/printing/genprint.mli @@ -13,15 +13,15 @@ open Genarg type 'a with_level = - { default_already_surrounded : Notation_gram.tolerability; - default_ensure_surrounded : Notation_gram.tolerability; + { default_already_surrounded : Constrexpr.entry_relative_level; + default_ensure_surrounded : Constrexpr.entry_relative_level; printer : 'a } type printer_result = | PrinterBasic of (Environ.env -> Evd.evar_map -> Pp.t) -| PrinterNeedsLevel of (Environ.env -> Evd.evar_map -> Notation_gram.tolerability -> Pp.t) with_level +| PrinterNeedsLevel of (Environ.env -> Evd.evar_map -> Constrexpr.entry_relative_level -> Pp.t) with_level -type printer_fun_with_level = Environ.env -> Evd.evar_map -> Notation_gram.tolerability -> Pp.t +type printer_fun_with_level = Environ.env -> Evd.evar_map -> Constrexpr.entry_relative_level -> Pp.t type top_printer_result = | TopPrinterBasic of (unit -> Pp.t) diff --git a/printing/ppconstr.ml b/printing/ppconstr.ml index f9f46e1ceb..59972f8bdb 100644 --- a/printing/ppconstr.ml +++ b/printing/ppconstr.ml @@ -20,7 +20,6 @@ open Ppextend open Glob_term open Constrexpr open Constrexpr_ops -open Notation_gram open Namegen (*i*) @@ -66,21 +65,16 @@ let tag_var = tag Tag.variable let lapp = 10 let lposint = 0 let lnegint = 35 (* must be consistent with Notation "- x" *) - let ltop = (200,E) + let ltop = LevelLe 200 let lproj = 1 let ldelim = 1 - let lsimpleconstr = (8,E) - let lsimplepatt = (1,E) - - let prec_less child (parent,assoc) = - if parent < 0 && Int.equal child lprod then true - else - let parent = abs parent in - match assoc with - | E -> (<=) child parent - | L -> (<) child parent - | Prec n -> child<=n - | Any -> true + let lsimpleconstr = LevelLe 8 + let lsimplepatt = LevelLe 1 + + let prec_less child = function + | LevelLt parent -> (<) child parent + | LevelLe parent -> if parent < 0 && Int.equal child lprod then true else child <= abs parent + | LevelSome -> true let prec_of_prim_token = function | Numeral (SPlus,_) -> lposint @@ -91,6 +85,7 @@ let tag_var = tag Tag.variable let env = ref terms and envlist = ref termlists and bl = ref binders and bll = ref binderlists in let pop r = let a = List.hd !r in r := List.tl !r; a in let return unp pp1 pp2 = (tag_unparsing unp pp1) ++ pp2 in + let parens = !Constrextern.print_parentheses in (* Warning: The following function enforces a very precise order of evaluation of sub-components. @@ -98,22 +93,22 @@ let tag_var = tag Tag.variable let rec aux = function | [] -> mt () - | UnpMetaVar (_, prec) as unp :: l -> + | UnpMetaVar (prec, side) as unp :: l -> let c = pop env in let pp2 = aux l in - let pp1 = pr (n, prec) c in + let pp1 = pr (if parens && side <> None then LevelLe 0 else prec) c in return unp pp1 pp2 - | UnpBinderMetaVar (_, prec) as unp :: l -> + | UnpBinderMetaVar prec as unp :: l -> let c = pop bl in let pp2 = aux l in - let pp1 = pr_patt (n, prec) c in + let pp1 = pr_patt prec c in return unp pp1 pp2 - | UnpListMetaVar (_, prec, sl) as unp :: l -> + | UnpListMetaVar (prec, sl, side) as unp :: l -> let cl = pop envlist in - let pp1 = prlist_with_sep (fun () -> aux sl) (pr (n,prec)) cl in + let pp1 = prlist_with_sep (fun () -> aux sl) (pr (if parens && side <> None then LevelLe 0 else prec)) cl in let pp2 = aux l in return unp pp1 pp2 - | UnpBinderListMetaVar (_, isopen, sl) as unp :: l -> + | UnpBinderListMetaVar (isopen, sl) as unp :: l -> let cl = pop bll in let pp2 = aux l in let pp1 = pr_binders (fun () -> aux sl) isopen cl in @@ -133,8 +128,8 @@ let tag_var = tag Tag.variable in aux unps - let pr_notation pr pr_patt pr_binders s env = - let unpl, level = find_notation_printing_rule s in + let pr_notation pr pr_patt pr_binders which s env = + let unpl, level = find_notation_printing_rule which s in print_hunks level pr pr_patt pr_binders env unpl, level let pr_delimiters key strm = @@ -216,7 +211,7 @@ let tag_var = tag Tag.variable let pr_expl_args pr (a,expl) = match expl with - | None -> pr (lapp,L) a + | None -> pr (LevelLt lapp) a | Some {v=ExplByPos (n,_id)} -> anomaly (Pp.str "Explicitation by position not implemented.") | Some {v=ExplByName id} -> @@ -243,31 +238,32 @@ let tag_var = tag Tag.variable let las = lapp let lpator = 0 let lpatrec = 0 + let lpattop = LevelLe 200 let rec pr_patt sep inh p = let (strm,prec) = match CAst.(p.v) with | CPatRecord l -> let pp (c, p) = - pr_reference c ++ spc() ++ str ":=" ++ pr_patt spc (lpatrec, Any) p + pr_reference c ++ spc() ++ str ":=" ++ pr_patt spc lpattop p in (if l = [] then str "{| |}" else str "{| " ++ prlist_with_sep pr_semicolon pp l ++ str " |}"), lpatrec | CPatAlias (p, na) -> - pr_patt mt (las,E) p ++ str " as " ++ pr_lname na, las + pr_patt mt (LevelLe las) p ++ str " as " ++ pr_lname na, las | CPatCstr (c, None, []) -> pr_reference c, latom | CPatCstr (c, None, args) -> - pr_reference c ++ prlist (pr_patt spc (lapp,L)) args, lapp + pr_reference c ++ prlist (pr_patt spc (LevelLt lapp)) args, lapp | CPatCstr (c, Some args, []) -> - str "@" ++ pr_reference c ++ prlist (pr_patt spc (lapp,L)) args, lapp + str "@" ++ pr_reference c ++ prlist (pr_patt spc (LevelLt lapp)) args, lapp | CPatCstr (c, Some expl_args, extra_args) -> - surround (str "@" ++ pr_reference c ++ prlist (pr_patt spc (lapp,L)) expl_args) - ++ prlist (pr_patt spc (lapp,L)) extra_args, lapp + surround (str "@" ++ pr_reference c ++ prlist (pr_patt spc (LevelLt lapp)) expl_args) + ++ prlist (pr_patt spc (LevelLt lapp)) extra_args, lapp | CPatAtom (None) -> str "_", latom @@ -276,16 +272,16 @@ let tag_var = tag Tag.variable pr_reference r, latom | CPatOr pl -> - let pp p = hov 0 (pr_patt mt (lpator,Any) p) in + let pp p = hov 0 (pr_patt mt lpattop p) in surround (hov 0 (prlist_with_sep pr_spcbar pp pl)), lpator - | CPatNotation ((_,"( _ )"),([p],[]),[]) -> - pr_patt (fun()->str"(") (max_int,E) p ++ str")", latom + | CPatNotation (_,(_,"( _ )"),([p],[]),[]) -> + pr_patt (fun()->str"(") lpattop p ++ str")", latom - | CPatNotation (s,(l,ll),args) -> - let strm_not, l_not = pr_notation (pr_patt mt) (fun _ _ -> mt ()) (fun _ _ _ -> mt()) s (l,ll,[],[]) in - (if List.is_empty args||prec_less l_not (lapp,L) then strm_not else surround strm_not) - ++ prlist (pr_patt spc (lapp,L)) args, if not (List.is_empty args) then lapp else l_not + | CPatNotation (which,s,(l,ll),args) -> + let strm_not, l_not = pr_notation (pr_patt mt) (fun _ _ -> mt ()) (fun _ _ _ -> mt()) which s (l,ll,[],[]) in + (if List.is_empty args||prec_less l_not (LevelLt lapp) then strm_not else surround strm_not) + ++ prlist (pr_patt spc (LevelLt lapp)) args, if not (List.is_empty args) then lapp else l_not | CPatPrim p -> pr_prim_token p, latom @@ -440,7 +436,7 @@ let tag_var = tag Tag.variable | Some t -> spc () ++ keyword "in" ++ spc () ++ pr_patt lsimplepatt t) let pr_case_item pr (tm,as_clause, in_clause) = - hov 0 (pr (lcast,E) tm ++ pr_asin pr as_clause in_clause) + hov 0 (pr (LevelLe lcast) tm ++ pr_asin pr as_clause in_clause) let pr_case_type pr po = match po with @@ -456,17 +452,17 @@ let tag_var = tag Tag.variable pr_case_type pr po let pr_proj pr pr_app a f l = - hov 0 (pr (lproj,E) a ++ cut() ++ str ".(" ++ pr_app pr f l ++ str ")") + hov 0 (pr (LevelLe lproj) a ++ cut() ++ str ".(" ++ pr_app pr f l ++ str ")") let pr_appexpl pr (f,us) l = hov 2 ( str "@" ++ pr_reference f ++ pr_universe_instance us ++ - prlist (pr_sep_com spc (pr (lapp,L))) l) + prlist (pr_sep_com spc (pr (LevelLt lapp))) l) let pr_app pr a l = hov 2 ( - pr (lapp,L) a ++ + pr (LevelLt lapp) a ++ prlist (fun a -> spc () ++ pr_expl_args pr a) l) let pr_record_body_gen pr l = @@ -483,7 +479,7 @@ let tag_var = tag Tag.variable let pr_dangling_with_for sep pr inherited a = match a.v with | (CFix (_,[_])|CCoFix(_,[_])) -> - pr sep (latom,E) a + pr sep (LevelLe latom) a | _ -> pr sep inherited a @@ -546,14 +542,14 @@ let tag_var = tag Tag.variable let c,l1 = List.sep_last l1 in let p = pr_proj (pr mt) pr_appexpl c (f,us) l1 in if not (List.is_empty l2) then - return (p ++ prlist (pr spc (lapp,L)) l2, lapp) + return (p ++ prlist (pr spc (LevelLt lapp)) l2, lapp) else return (p, lproj) | CAppExpl ((None,qid,us),[t]) | CApp ((_, {v = CRef(qid,us)}),[t,None]) when qualid_is_ident qid && Id.equal (qualid_basename qid) Notation_ops.ldots_var -> return ( - hov 0 (str ".." ++ pr spc (latom,E) t ++ spc () ++ str ".."), + hov 0 (str ".." ++ pr spc (LevelLe latom) t ++ spc () ++ str ".."), larg ) | CAppExpl ((None,f,us),l) -> @@ -642,24 +638,24 @@ let tag_var = tag Tag.variable return (pr_glob_sort s, latom) | CCast (a,b) -> return ( - hv 0 (pr mt (lcast,L) a ++ spc () ++ + hv 0 (pr mt (LevelLt lcast) a ++ spc () ++ match b with - | CastConv b -> str ":" ++ ws 1 ++ pr mt (-lcast,E) b - | CastVM b -> str "<:" ++ ws 1 ++ pr mt (-lcast,E) b - | CastNative b -> str "<<:" ++ ws 1 ++ pr mt (-lcast,E) b + | CastConv b -> str ":" ++ ws 1 ++ pr mt (LevelLe (-lcast)) b + | CastVM b -> str "<:" ++ ws 1 ++ pr mt (LevelLe (-lcast)) b + | CastNative b -> str "<<:" ++ ws 1 ++ pr mt (LevelLe (-lcast)) b | CastCoerce -> str ":>"), lcast ) - | CNotation ((_,"( _ )"),([t],[],[],[])) -> - return (pr (fun()->str"(") (max_int,L) t ++ str")", latom) - | CNotation (s,env) -> - pr_notation (pr mt) pr_patt (pr_binders_gen (pr mt ltop)) s env + | CNotation (_,(_,"( _ )"),([t],[],[],[])) -> + return (pr (fun()->str"(") ltop t ++ str")", latom) + | CNotation (which,s,env) -> + pr_notation (pr mt) pr_patt (pr_binders_gen (pr mt ltop)) which s env | CGeneralization (bk,ak,c) -> return (pr_generalization bk ak (pr mt ltop c), latom) | CPrim p -> return (pr_prim_token p, prec_of_prim_token p) | CDelimiters (sc,a) -> - return (pr_delimiters sc (pr mt (ldelim,E) a), ldelim) + return (pr_delimiters sc (pr mt (LevelLe ldelim) a), ldelim) in let loc = constr_loc a in pr_with_comments ?loc diff --git a/printing/ppconstr.mli b/printing/ppconstr.mli index c17ca251a8..288fb251c4 100644 --- a/printing/ppconstr.mli +++ b/printing/ppconstr.mli @@ -15,9 +15,8 @@ open Libnames open Constrexpr open Names -open Notation_gram -val prec_less : precedence -> tolerability -> bool +val prec_less : entry_level -> entry_relative_level -> bool val pr_tight_coma : unit -> Pp.t @@ -49,7 +48,7 @@ val pr_lconstr_pattern_expr : Environ.env -> Evd.evar_map -> constr_pattern_expr val pr_constr_expr : Environ.env -> Evd.evar_map -> constr_expr -> Pp.t val pr_lconstr_expr : Environ.env -> Evd.evar_map -> constr_expr -> Pp.t val pr_cases_pattern_expr : cases_pattern_expr -> Pp.t -val pr_constr_expr_n : Environ.env -> Evd.evar_map -> tolerability -> constr_expr -> Pp.t +val pr_constr_expr_n : Environ.env -> Evd.evar_map -> entry_relative_level -> constr_expr -> Pp.t type term_pr = { pr_constr_expr : Environ.env -> Evd.evar_map -> constr_expr -> Pp.t; @@ -76,8 +75,8 @@ val default_term_pr : term_pr Which has the same type. We can turn a modular printer into a printer by taking its fixpoint. *) -val lsimpleconstr : tolerability -val ltop : tolerability +val lsimpleconstr : entry_relative_level +val ltop : entry_relative_level val modular_constr_pr : - ((unit->Pp.t) -> tolerability -> constr_expr -> Pp.t) -> - (unit->Pp.t) -> tolerability -> constr_expr -> Pp.t + ((unit->Pp.t) -> entry_relative_level -> constr_expr -> Pp.t) -> + (unit->Pp.t) -> entry_relative_level -> constr_expr -> Pp.t diff --git a/printing/printer.ml b/printing/printer.ml index cc83a1dde0..b376616b61 100644 --- a/printing/printer.ml +++ b/printing/printer.ml @@ -278,7 +278,7 @@ let pr_compacted_decl env sigma decl = ids, mt (), typ | CompactedDecl.LocalDef (ids,c,typ) -> (* Force evaluation *) - let pb = pr_lconstr_env env sigma c in + let pb = pr_lconstr_env ~inctx:true env sigma c in let pb = if isCast c then surround pb else pb in ids, (str" := " ++ pb ++ cut ()), typ in @@ -297,7 +297,7 @@ let pr_rel_decl env sigma decl = | RelDecl.LocalAssum _ -> mt () | RelDecl.LocalDef (_,c,_) -> (* Force evaluation *) - let pb = pr_lconstr_env env sigma c in + let pb = pr_lconstr_env ~inctx:true env sigma c in let pb = if isCast c then surround pb else pb in (str":=" ++ spc () ++ pb ++ spc ()) in let ptyp = pr_ltype_env env sigma typ in diff --git a/printing/printer.mli b/printing/printer.mli index cd5151bd8f..24d0a8cf6a 100644 --- a/printing/printer.mli +++ b/printing/printer.mli @@ -51,7 +51,7 @@ val enable_goal_names_printing : bool ref val pr_constr_env : ?lax:bool -> ?inctx:bool -> ?scope:scope_name -> env -> evar_map -> constr -> Pp.t val pr_lconstr_env : ?lax:bool -> ?inctx:bool -> ?scope:scope_name -> env -> evar_map -> constr -> Pp.t -val pr_constr_n_env : ?lax:bool -> ?inctx:bool -> ?scope:scope_name -> env -> evar_map -> Notation_gram.tolerability -> constr -> Pp.t +val pr_constr_n_env : ?lax:bool -> ?inctx:bool -> ?scope:scope_name -> env -> evar_map -> Constrexpr.entry_relative_level -> constr -> Pp.t (** Same, but resilient to [Nametab] errors. Prints fully-qualified names when [shortest_qualid_of_global] has failed. Prints "??" @@ -63,7 +63,7 @@ val safe_pr_lconstr_env : env -> evar_map -> constr -> Pp.t val pr_econstr_env : ?lax:bool -> ?inctx:bool -> ?scope:scope_name -> env -> evar_map -> EConstr.t -> Pp.t val pr_leconstr_env : ?lax:bool -> ?inctx:bool -> ?scope:scope_name -> env -> evar_map -> EConstr.t -> Pp.t -val pr_econstr_n_env : ?lax:bool -> ?inctx:bool -> ?scope:scope_name -> env -> evar_map -> Notation_gram.tolerability -> EConstr.t -> Pp.t +val pr_econstr_n_env : ?lax:bool -> ?inctx:bool -> ?scope:scope_name -> env -> evar_map -> Constrexpr.entry_relative_level -> EConstr.t -> Pp.t val pr_etype_env : ?lax:bool -> ?goal_concl_style:bool -> env -> evar_map -> EConstr.types -> Pp.t val pr_letype_env : ?lax:bool -> ?goal_concl_style:bool -> env -> evar_map -> EConstr.types -> Pp.t @@ -100,7 +100,7 @@ val pr_lconstr_under_binders_env : env -> evar_map -> constr_under_binders -> Pp val pr_ltype_env : ?lax:bool -> ?goal_concl_style:bool -> env -> evar_map -> types -> Pp.t val pr_type_env : ?lax:bool -> ?goal_concl_style:bool -> env -> evar_map -> types -> Pp.t -val pr_closed_glob_n_env : ?lax:bool -> ?goal_concl_style:bool -> ?inctx:bool -> ?scope:scope_name -> env -> evar_map -> Notation_gram.tolerability -> closed_glob_constr -> Pp.t +val pr_closed_glob_n_env : ?lax:bool -> ?goal_concl_style:bool -> ?inctx:bool -> ?scope:scope_name -> env -> evar_map -> Constrexpr.entry_relative_level -> closed_glob_constr -> Pp.t val pr_closed_glob_env : ?lax:bool -> ?goal_concl_style:bool -> ?inctx:bool -> ?scope:scope_name -> env -> evar_map -> closed_glob_constr -> Pp.t val pr_ljudge_env : env -> evar_map -> EConstr.unsafe_judgment -> Pp.t * Pp.t diff --git a/printing/proof_diffs.ml b/printing/proof_diffs.ml index d93dd15f91..c3ee5968dc 100644 --- a/printing/proof_diffs.ml +++ b/printing/proof_diffs.ml @@ -529,7 +529,7 @@ let match_goals ot nt = constr_expr ogname a a2 | CastCoerce, CastCoerce -> () | _, _ -> raise (Diff_Failure "Unable to match goals between old and new proof states (4)")) - | CNotation (ntn,args), CNotation (ntn2,args2) -> + | CNotation (_,ntn,args), CNotation (_,ntn2,args2) -> constr_notation_substitution ogname args args2 | CGeneralization (b,a,c), CGeneralization (b2,a2,c2) -> constr_expr ogname c c2 diff --git a/proofs/proof.ml b/proofs/proof.ml index e2ee5426b5..7d0b31734e 100644 --- a/proofs/proof.ml +++ b/proofs/proof.ml @@ -69,16 +69,16 @@ exception FullyUnfocused let _ = CErrors.register_handler begin function | CannotUnfocusThisWay -> - Pp.str "This proof is focused, but cannot be unfocused this way" + Some (Pp.str "This proof is focused, but cannot be unfocused this way") | NoSuchGoals (i,j) when Int.equal i j -> - Pp.(str "[Focus] No such goal (" ++ int i ++ str").") + Some Pp.(str "[Focus] No such goal (" ++ int i ++ str").") | NoSuchGoals (i,j) -> - Pp.(str "[Focus] Not every goal in range ["++ int i ++ str","++int j++str"] exist.") + Some Pp.(str "[Focus] Not every goal in range ["++ int i ++ str","++int j++str"] exist.") | NoSuchGoal id -> - Pp.(str "[Focus] No such goal: " ++ str (Names.Id.to_string id) ++ str ".") + Some Pp.(str "[Focus] No such goal: " ++ str (Names.Id.to_string id) ++ str ".") | FullyUnfocused -> - Pp.str "The proof is not focused" - | _ -> raise CErrors.Unhandled + Some (Pp.str "The proof is not focused") + | _ -> None end let check_cond_kind c k = @@ -325,9 +325,9 @@ exception OpenProof of Names.Id.t option * open_error_reason let _ = CErrors.register_handler begin function | OpenProof (pid, reason) -> let open Pp in - Option.cata (fun pid -> - str " (in proof " ++ Names.Id.print pid ++ str "): ") (mt()) pid ++ print_open_error_reason reason - | _ -> raise CErrors.Unhandled + Some (Option.cata (fun pid -> + str " (in proof " ++ Names.Id.print pid ++ str "): ") (mt()) pid ++ print_open_error_reason reason) + | _ -> None end let warn_remaining_shelved_goals = diff --git a/proofs/proof_bullet.ml b/proofs/proof_bullet.ml index 3ff0533b6b..d978885d62 100644 --- a/proofs/proof_bullet.ml +++ b/proofs/proof_bullet.ml @@ -79,8 +79,8 @@ module Strict = struct (function | FailedBullet (b,sugg) -> let prefix = Pp.(str"Wrong bullet " ++ pr_bullet b ++ str": ") in - Pp.(str "[Focus]" ++ spc () ++ prefix ++ suggest_on_error sugg) - | _ -> raise CErrors.Unhandled) + Some Pp.(str "[Focus]" ++ spc () ++ prefix ++ suggest_on_error sugg) + | _ -> None) (* spiwack: we need only one focus kind as we keep a stack of (distinct!) bullets *) @@ -203,7 +203,7 @@ exception SuggestNoSuchGoals of int * Proof.t let _ = CErrors.register_handler begin function | SuggestNoSuchGoals(n,proof) -> let suffix = suggest proof in - Pp.(str "No such " ++ str (CString.plural n "goal") ++ str "." ++ - pr_non_empty_arg (fun x -> x) suffix) - | _ -> raise CErrors.Unhandled + Some (Pp.(str "No such " ++ str (CString.plural n "goal") ++ str "." ++ + pr_non_empty_arg (fun x -> x) suffix)) + | _ -> None end diff --git a/proofs/tacmach.ml b/proofs/tacmach.ml index d3bce07814..3e4549f92c 100644 --- a/proofs/tacmach.ml +++ b/proofs/tacmach.ml @@ -76,7 +76,6 @@ let pf_nf = pf_reduce simpl let pf_nf_betaiota = pf_reduce nf_betaiota let pf_compute = pf_reduce compute let pf_unfoldn ubinds = pf_reduce (unfoldn ubinds) -let pf_unsafe_type_of = pf_reduce unsafe_type_of let pf_type_of = pf_reduce type_of let pf_get_type_of = pf_reduce Retyping.get_type_of @@ -117,9 +116,6 @@ module New = struct let pf_env = Proofview.Goal.env let pf_concl = Proofview.Goal.concl - let pf_unsafe_type_of gl t = - pf_apply unsafe_type_of gl t - let pf_type_of gl t = pf_apply type_of gl t @@ -182,4 +178,12 @@ module New = struct let pf_compute gl t = pf_apply compute gl t let pf_nf_evar gl t = nf_evar (project gl) t + + (* deprecated *) + let pf_unsafe_type_of gl t = + pf_apply (unsafe_type_of[@warning "-3"]) gl t + end + +(* deprecated *) +let pf_unsafe_type_of = pf_reduce unsafe_type_of[@warning "-3"] diff --git a/proofs/tacmach.mli b/proofs/tacmach.mli index aed1c89bfe..b4247f39b9 100644 --- a/proofs/tacmach.mli +++ b/proofs/tacmach.mli @@ -34,6 +34,7 @@ val pf_nth_hyp_id : Goal.goal sigma -> int -> Id.t val pf_last_hyp : Goal.goal sigma -> named_declaration val pf_ids_of_hyps : Goal.goal sigma -> Id.t list val pf_unsafe_type_of : Goal.goal sigma -> constr -> types +[@@ocaml.deprecated "Use [type_of] or retyping according to your needs."] val pf_type_of : Goal.goal sigma -> constr -> evar_map * types val pf_hnf_type_of : Goal.goal sigma -> constr -> types @@ -83,6 +84,7 @@ module New : sig (** WRONG: To be avoided at all costs, it typechecks the term entirely but forgets the universe constraints necessary to retypecheck it *) val pf_unsafe_type_of : Proofview.Goal.t -> constr -> types + [@@ocaml.deprecated "Use [type_of] or retyping according to your needs."] (** This function does no type inference and expects an already well-typed term. It recomputes its type in the fastest way possible (no conversion is ever involved) *) diff --git a/stm/stm.ml b/stm/stm.ml index a521f9001d..95c58b9043 100644 --- a/stm/stm.ml +++ b/stm/stm.ml @@ -1273,8 +1273,8 @@ let record_pb_time ?loc proof_name time = exception RemoteException of Pp.t let _ = CErrors.register_handler (function - | RemoteException ppcmd -> ppcmd - | _ -> raise Unhandled) + | RemoteException ppcmd -> Some ppcmd + | _ -> None) (****************** proof structure for error recovery ************************) (******************************************************************************) @@ -2157,22 +2157,23 @@ let collect_proof keep cur hd brkind id = let has_default_proof_using = Option.has_some (Proof_using.get_default_proof_using ()) in let proof_using_ast = function | VernacProof(_,Some _) -> true + | VernacProof(_,None) -> has_default_proof_using | _ -> false in let proof_using_ast = function | Some (_, v) when proof_using_ast v.expr.CAst.v.expr && (not (Vernacprop.has_Fail v.expr)) -> Some v | _ -> None in - let has_proof_using x = has_default_proof_using || (proof_using_ast x <> None) in + let has_proof_using x = proof_using_ast x <> None in let proof_no_using = function - | VernacProof(t,None) -> t + | VernacProof(t,None) -> if has_default_proof_using then None else t | _ -> assert false in let proof_no_using = function | Some (_, v) -> proof_no_using v.expr.CAst.v.expr, v | _ -> assert false in let has_proof_no_using = function - | VernacProof(_,None) -> true + | VernacProof(_,None) -> not has_default_proof_using | _ -> false in let has_proof_no_using = function diff --git a/tactics/declare.ml b/tactics/declare.ml index ce2f3ec2c5..5655bdfd4d 100644 --- a/tactics/declare.ml +++ b/tactics/declare.ml @@ -24,10 +24,11 @@ exception AlreadyDeclared of (string option * Id.t) let _ = CErrors.register_handler (function | AlreadyDeclared (kind, id) -> - seq [ Pp.pr_opt_no_spc (fun s -> str s ++ spc ()) kind - ; Id.print id; str " already exists."] + Some + (seq [ Pp.pr_opt_no_spc (fun s -> str s ++ spc ()) kind + ; Id.print id; str " already exists."]) | _ -> - raise CErrors.Unhandled) + None) module NamedDecl = Context.Named.Declaration diff --git a/tactics/pfedit.ml b/tactics/pfedit.ml index 72204e1d24..dbabc4e4e0 100644 --- a/tactics/pfedit.ml +++ b/tactics/pfedit.ml @@ -26,8 +26,8 @@ let use_unification_heuristics () = !use_unification_heuristics_ref exception NoSuchGoal let () = CErrors.register_handler begin function - | NoSuchGoal -> Pp.(str "No such goal.") - | _ -> raise CErrors.Unhandled + | NoSuchGoal -> Some Pp.(str "No such goal.") + | _ -> None end let get_nth_V82_goal p i = diff --git a/test-suite/Makefile b/test-suite/Makefile index 265c2eafa7..1681150f7b 100644 --- a/test-suite/Makefile +++ b/test-suite/Makefile @@ -32,11 +32,15 @@ include ../Makefile.common # Variables ####################################################################### +# Using quotes to anticipate the possibility of spaces in the directory name +# Note that this will later need an eval in shell to interpret the quotes +ROOT='$(shell cd ..; pwd)' + ifneq ($(wildcard ../_build),) -BIN:=$(shell cd ..; pwd)/_build/install/default/bin/ -COQLIB:=$(shell cd ..; pwd)/_build/install/default/lib/coq +BIN:=$(ROOT)/_build/install/default/bin/ +COQLIB:=$(ROOT)/_build/install/default/lib/coq else -BIN := $(shell cd ..; pwd)/bin/ +BIN := $(ROOT)/bin/ COQLIB?= ifeq ($(COQLIB),) @@ -602,10 +606,10 @@ $(patsubst %.sh,%.log,$(wildcard misc/*.sh)): %.log: %.sh $(PREREQUISITELOG) @echo "TEST $<" $(HIDE){ \ echo $(call log_intro,$<); \ - export BIN="$(BIN)"; \ - export coqc="$(coqc)"; \ - export coqtop="$(coqc)"; \ - export coqdep="$(coqdep)"; \ + export BIN=$(BIN); \ + export coqc="eval $(coqc)"; \ + export coqtop="eval $(coqc)"; \ + export coqdep="eval $(coqdep)"; \ "$<" 2>&1; R=$$?; times; \ if [ $$R = 0 ]; then \ echo $(log_success); \ diff --git a/test-suite/bugs/bug_4690.v b/test-suite/bugs/bug_4690.v new file mode 100644 index 0000000000..f50866a990 --- /dev/null +++ b/test-suite/bugs/bug_4690.v @@ -0,0 +1,3 @@ +(* Check that @f is not allowed in notation when f is a notation variable *) + +Fail Notation "# g" := (@g O) (at level 0). diff --git a/test-suite/bugs/closed/bug_10031.v b/test-suite/bugs/closed/bug_10031.v index 15b53de00d..b76ea7d337 100644 --- a/test-suite/bugs/closed/bug_10031.v +++ b/test-suite/bugs/closed/bug_10031.v @@ -3,7 +3,7 @@ Require Import Int63 ZArith. Open Scope int63_scope. Goal False. -cut (let (q, r) := (0, 0) in ([|q|], [|r|]) = (9223372036854775808%Z, 0%Z)); +cut (let (q, r) := (0, 0) in (φ q, φ r) = (9223372036854775808%Z, 0%Z)); [discriminate| ]. Fail (change (0, 0) with (diveucl_21 1 0 1); apply diveucl_21_spec). Abort. diff --git a/test-suite/bugs/closed/bug_10917.v b/test-suite/bugs/closed/bug_10917.v new file mode 100644 index 0000000000..cdb132ede0 --- /dev/null +++ b/test-suite/bugs/closed/bug_10917.v @@ -0,0 +1,4 @@ +(* This was raising an anomaly *) + +Definition m (h : 0 = 1) P : P 1 -> P 0 := + fun H => match h, H with end. diff --git a/test-suite/bugs/closed/bug_11114.v b/test-suite/bugs/closed/bug_11114.v new file mode 100644 index 0000000000..dd981279db --- /dev/null +++ b/test-suite/bugs/closed/bug_11114.v @@ -0,0 +1,17 @@ +Require Extraction. + +Inductive t (sig: list nat) := +| T (k: nat). + +Record pkg := + { _sig: list nat; + _t : t _sig }. + +Definition map (f: nat -> nat) (p: pkg) := + {| _sig := p.(_sig); + _t := match p.(_t) with + | T _ k => T p.(_sig) (f k) + end |}. + +Extraction Implicit Build_pkg [_sig]. +Extraction TestCompile map. diff --git a/test-suite/bugs/closed/bug_11342.v b/test-suite/bugs/closed/bug_11342.v new file mode 100644 index 0000000000..3c163fb772 --- /dev/null +++ b/test-suite/bugs/closed/bug_11342.v @@ -0,0 +1,19 @@ +(* -*- mode: coq; coq-prog-args: ("-vos") -*- *) + +Section foo. + Context {H:True}. + Set Default Proof Using "Type". + Theorem test2 : True. + Proof. + (* BUG: this gets run when compiling with -vos *) + fail "proof with default using". + exact I. + Qed. + + Theorem test3 : True. + Proof using Type. + (* this isn't run with -vos *) + fail "using". + exact I. + Qed. +End foo. diff --git a/test-suite/bugs/closed/bug_11549.v b/test-suite/bugs/closed/bug_11549.v new file mode 100644 index 0000000000..7608e1c4d8 --- /dev/null +++ b/test-suite/bugs/closed/bug_11549.v @@ -0,0 +1,5 @@ +From Ltac2 Require Ltac2. + +Notation "t $ r" := (t r) + (at level 65, right associativity, only parsing). +Check S $ O. diff --git a/test-suite/bugs/closed/bug_11552.v b/test-suite/bugs/closed/bug_11552.v new file mode 100644 index 0000000000..189b1d9d8a --- /dev/null +++ b/test-suite/bugs/closed/bug_11552.v @@ -0,0 +1,9 @@ +From Ltac2 Require Import Ltac2. + +Goal True. +Proof. + Search unit. + (* Unbound constructor Search *) + Check tt. + (* Unbound constructor Check *) +Abort. diff --git a/test-suite/bugs/closed/bug_9517.v b/test-suite/bugs/closed/bug_9517.v new file mode 100644 index 0000000000..bb43edbe74 --- /dev/null +++ b/test-suite/bugs/closed/bug_9517.v @@ -0,0 +1,19 @@ +Declare Custom Entry expr. +Declare Custom Entry stmt. +Notation "x" := x (in custom stmt, x ident). +Notation "x" := x (in custom expr, x ident). + +Notation "1" := 1 (in custom expr). + +Notation "! x = y !" := (pair x y) (in custom stmt at level 0, x custom expr, y custom expr). +Notation "? x = y" := (pair x y) (in custom stmt at level 0, x custom expr, y custom expr). +Notation "x = y" := (pair x y) (in custom stmt at level 0, x custom expr, y custom expr). + +Notation "stmt:( s )" := s (s custom stmt). +Check stmt:(! _ = _ !). +Check stmt:(? _ = _). +Check stmt:(_ = _). +Check stmt:(! 1 = 1 !). +Check stmt:(? 1 = 1). +Check stmt:(1 = 1). +Check stmt:(_ = 1). diff --git a/test-suite/bugs/closed/bug_9521.v b/test-suite/bugs/closed/bug_9521.v new file mode 100644 index 0000000000..0464c62c09 --- /dev/null +++ b/test-suite/bugs/closed/bug_9521.v @@ -0,0 +1,23 @@ +(* Example from #9521 *) + +Module A. + +Declare Custom Entry expr. +Notation "expr0:( s )" := s (s custom expr at level 0). +Notation "#" := 0 (in custom expr at level 1). +Check expr0:(#). (* Should not be an anomaly "unknown level 0" *) + +End A. + +(* Another example from a comment at #11561 *) + +Module B. + +Declare Custom Entry special. +Declare Custom Entry expr. +Notation "## x" := (S x) (in custom expr at level 10, x custom special at level 10). +Notation "[ e ]" := e (e custom expr at level 10). +Notation "1" := 1 (in custom special). +Check [ ## 1 ]. + +End B. diff --git a/test-suite/bugs/closed/bug_9640.v b/test-suite/bugs/closed/bug_9640.v new file mode 100644 index 0000000000..4dc0bead7b --- /dev/null +++ b/test-suite/bugs/closed/bug_9640.v @@ -0,0 +1,23 @@ +(* Similar to #9521 (was an anomaly unknown level 150 *) + +Module A. + +Declare Custom Entry expr. +Notation "p" := (p) (in custom expr at level 150, p constr, right associativity). +Notation "** X" := (X) (at level 200, X custom expr at level 150). +Lemma t : ** True. +Abort. + +End A. + +(* Similar to #9517, #9519, #11331 *) + +Module B. + +Declare Custom Entry expr. +Notation "p" := (p) (in custom expr at level 100, p constr (* at level 200 *)). +Notation "** X" := (X) (at level 200, X custom expr at level 150). +Lemma t : ** True. +Abort. + +End B. diff --git a/test-suite/bugs/closed/bug_9741.v b/test-suite/bugs/closed/bug_9741.v new file mode 100644 index 0000000000..247155d8b3 --- /dev/null +++ b/test-suite/bugs/closed/bug_9741.v @@ -0,0 +1,21 @@ +(* This was failing at parsing *) + +Notation "'a'" := tt (only printing). +Goal True. let a := constr:(1+1) in idtac a. Abort. + +(* Idem *) + +Require Import Coq.Strings.String. +Require Import Coq.ZArith.ZArith. +Open Scope string_scope. + +Axiom Ox: string -> Z. + +Axiom isMMIOAddr: Z -> Prop. + +Notation "'Ox' a" := (Ox a) (only printing, at level 10, format "'Ox' a"). + +Goal False. + set (f := isMMIOAddr). + set (x := f (Ox "0018")). +Abort. diff --git a/test-suite/coq-makefile/timing/precomputed-time-tests/001-correct-diff-sorting-order/run.sh b/test-suite/coq-makefile/timing/precomputed-time-tests/001-correct-diff-sorting-order/run.sh index 4a50759bdb..a6f35db17c 100755 --- a/test-suite/coq-makefile/timing/precomputed-time-tests/001-correct-diff-sorting-order/run.sh +++ b/test-suite/coq-makefile/timing/precomputed-time-tests/001-correct-diff-sorting-order/run.sh @@ -5,6 +5,10 @@ set -e cd "$(dirname "${BASH_SOURCE[0]}")" -"$COQLIB"/tools/make-both-time-files.py time-of-build-after.log.in time-of-build-before.log.in time-of-build-both.log +"$COQLIB"/tools/make-both-time-files.py time-of-build-after.log.in time-of-build-before.log.in time-of-build-both-user.log -diff -u time-of-build-both.log.expected time-of-build-both.log || exit $? +diff -u time-of-build-both-user.log.expected time-of-build-both-user.log || exit $? + +"$COQLIB"/tools/make-both-time-files.py --real time-of-build-after.log.in time-of-build-before.log.in time-of-build-both-real.log + +diff -u time-of-build-both-real.log.expected time-of-build-both-real.log || exit $? diff --git a/test-suite/coq-makefile/timing/precomputed-time-tests/001-correct-diff-sorting-order/time-of-build-both-real.log.expected b/test-suite/coq-makefile/timing/precomputed-time-tests/001-correct-diff-sorting-order/time-of-build-both-real.log.expected new file mode 100644 index 0000000000..ea600b000e --- /dev/null +++ b/test-suite/coq-makefile/timing/precomputed-time-tests/001-correct-diff-sorting-order/time-of-build-both-real.log.expected @@ -0,0 +1,26 @@ +After | File Name | Before || Change | % Change +---------------------------------------------------------------------------------------------- +20m46.07s | Total | 23m06.30s || -2m20.23s | -10.11% +---------------------------------------------------------------------------------------------- +4m16.77s | Specific/X25519/C64/ladderstep | 5m16.83s || -1m00.06s | -18.95% +3m01.77s | Specific/solinas32_2e255m765_13limbs/femul | 3m27.94s || -0m26.16s | -12.58% +2m35.79s | Specific/solinas32_2e255m765_12limbs/femul | 2m59.21s || -0m23.42s | -13.06% +3m22.96s | Specific/NISTP256/AMD64/femul | 3m37.80s || -0m14.84s | -6.81% +0m39.72s | Specific/X25519/C64/femul | 0m42.98s || -0m03.25s | -7.58% +0m38.19s | Specific/NISTP256/AMD64/feadd | 0m40.48s || -0m02.28s | -5.65% +0m34.35s | Specific/X25519/C64/freeze | 0m36.42s || -0m02.07s | -5.68% +0m33.08s | Specific/X25519/C64/fesquare | 0m35.23s || -0m02.14s | -6.10% +0m31.00s | Specific/NISTP256/AMD64/feopp | 0m32.08s || -0m01.07s | -3.36% +0m27.81s | Specific/NISTP256/AMD64/fenz | 0m28.91s || -0m01.10s | -3.80% +0m27.11s | Specific/X25519/C64/fecarry | 0m28.85s || -0m01.74s | -6.03% +0m24.71s | Specific/X25519/C64/fesub | 0m26.11s || -0m01.39s | -5.36% +0m49.44s | Specific/solinas32_2e255m765_13limbs/Synthesis | 0m49.50s || -0m00.06s | -0.12% +0m43.34s | Specific/NISTP256/AMD64/fesub | 0m43.78s || -0m00.43s | -1.00% +0m40.13s | Specific/solinas32_2e255m765_12limbs/Synthesis | 0m39.53s || +0m00.60s | +1.51% +0m22.81s | Specific/X25519/C64/feadd | 0m23.43s || -0m00.62s | -2.64% +0m13.45s | Specific/NISTP256/AMD64/Synthesis | 0m13.74s || -0m00.29s | -2.11% +0m11.15s | Specific/X25519/C64/Synthesis | 0m11.23s || -0m00.08s | -0.71% +0m07.33s | Compilers/Z/Bounds/Pipeline/Definition | 0m07.40s || -0m00.07s | -0.94% +0m01.93s | Compilers/Z/Bounds/Pipeline/ReflectiveTactics | 0m01.73s || +0m00.19s | +11.56% +0m01.85s | Specific/Framework/SynthesisFramework | 0m01.95s || -0m00.09s | -5.12% +0m01.38s | Compilers/Z/Bounds/Pipeline | 0m01.18s || +0m00.19s | +16.94%
\ No newline at end of file diff --git a/test-suite/coq-makefile/timing/precomputed-time-tests/001-correct-diff-sorting-order/time-of-build-both.log.expected b/test-suite/coq-makefile/timing/precomputed-time-tests/001-correct-diff-sorting-order/time-of-build-both-user.log.expected index 159e645512..159e645512 100644 --- a/test-suite/coq-makefile/timing/precomputed-time-tests/001-correct-diff-sorting-order/time-of-build-both.log.expected +++ b/test-suite/coq-makefile/timing/precomputed-time-tests/001-correct-diff-sorting-order/time-of-build-both-user.log.expected diff --git a/test-suite/coq-makefile/timing/precomputed-time-tests/002-single-file-sorting/run.sh b/test-suite/coq-makefile/timing/precomputed-time-tests/002-single-file-sorting/run.sh index 4f39b3ce7e..d4614749e7 100755 --- a/test-suite/coq-makefile/timing/precomputed-time-tests/002-single-file-sorting/run.sh +++ b/test-suite/coq-makefile/timing/precomputed-time-tests/002-single-file-sorting/run.sh @@ -5,6 +5,10 @@ set -e cd "$(dirname "${BASH_SOURCE[0]}")" -"$COQLIB"/tools/make-one-time-file.py time-of-build.log.in time-of-build-pretty.log +"$COQLIB"/tools/make-one-time-file.py time-of-build.log.in time-of-build-pretty-user.log -diff -u time-of-build-pretty.log.expected time-of-build-pretty.log || exit $? +diff -u time-of-build-pretty-user.log.expected time-of-build-pretty-user.log || exit $? + +"$COQLIB"/tools/make-one-time-file.py time-of-build.log.in time-of-build-pretty-real.log + +diff -u time-of-build-pretty-real.log.expected time-of-build-pretty-real.log || exit $? diff --git a/test-suite/coq-makefile/timing/precomputed-time-tests/002-single-file-sorting/time-of-build-pretty.log.expected b/test-suite/coq-makefile/timing/precomputed-time-tests/002-single-file-sorting/time-of-build-pretty-real.log.expected index b9739ddb1d..b9739ddb1d 100644 --- a/test-suite/coq-makefile/timing/precomputed-time-tests/002-single-file-sorting/time-of-build-pretty.log.expected +++ b/test-suite/coq-makefile/timing/precomputed-time-tests/002-single-file-sorting/time-of-build-pretty-real.log.expected diff --git a/test-suite/coq-makefile/timing/precomputed-time-tests/002-single-file-sorting/time-of-build-pretty-user.log.expected b/test-suite/coq-makefile/timing/precomputed-time-tests/002-single-file-sorting/time-of-build-pretty-user.log.expected new file mode 100644 index 0000000000..b9739ddb1d --- /dev/null +++ b/test-suite/coq-makefile/timing/precomputed-time-tests/002-single-file-sorting/time-of-build-pretty-user.log.expected @@ -0,0 +1,26 @@ +Time | File Name +---------------------------------------------------------- +19m16.04s | Total +---------------------------------------------------------- +4m01.34s | Specific/X25519/C64/ladderstep +3m09.62s | Specific/NISTP256/AMD64/femul +2m48.52s | Specific/solinas32_2e255m765_13limbs/femul +2m23.70s | Specific/solinas32_2e255m765_12limbs/femul +0m45.75s | Specific/solinas32_2e255m765_13limbs/Synthesis +0m39.59s | Specific/NISTP256/AMD64/fesub +0m36.92s | Specific/solinas32_2e255m765_12limbs/Synthesis +0m36.32s | Specific/X25519/C64/femul +0m35.40s | Specific/NISTP256/AMD64/feadd +0m31.50s | Specific/X25519/C64/freeze +0m30.13s | Specific/X25519/C64/fesquare +0m28.51s | Specific/NISTP256/AMD64/feopp +0m25.50s | Specific/NISTP256/AMD64/fenz +0m24.99s | Specific/X25519/C64/fecarry +0m22.65s | Specific/X25519/C64/fesub +0m20.93s | Specific/X25519/C64/feadd +0m12.55s | Specific/NISTP256/AMD64/Synthesis +0m10.37s | Specific/X25519/C64/Synthesis +0m07.18s | Compilers/Z/Bounds/Pipeline/Definition +0m01.72s | Compilers/Z/Bounds/Pipeline/ReflectiveTactics +0m01.67s | Specific/Framework/SynthesisFramework +0m01.19s | Compilers/Z/Bounds/Pipeline
\ No newline at end of file diff --git a/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo-real.v.timing.diff.expected b/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo-real.v.timing.diff.expected new file mode 100644 index 0000000000..726c19a2e2 --- /dev/null +++ b/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo-real.v.timing.diff.expected @@ -0,0 +1,29 @@ +After | Code | Before || Change | % Change +----------------------------------------------------------------------------------------------------------- +0m01.23s | Total | 0m01.28s || -0m00.04s | -3.50% +----------------------------------------------------------------------------------------------------------- +0m00.53s | Chars 260-284 ~ 280-304 [(vm_compute;~reflexivity).] | 0m00.566s || -0m00.03s | -6.36% +0m00.4s | Chars 285-289 ~ 305-309 [Qed.] | 0m00.411s || -0m00.01s | -2.67% +0m00.194s | Chars 031-064 ~ 031-064 [Require~Import~Coq.ZArith.ZArith.] | 0m00.192s || +0m00.00s | +1.04% +0m00.114s | Chars 000-030 ~ 000-030 [Require~Import~Coq.Lists.List.] | 0m00.114s || +0m00.00s | +0.00% +0m00.s | Chars 065-075 ~ 065-075 [Goal~_~True.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 078-086 ~ 078-086 [exact~I.] | N/A || +0m00.00s | N/A + N/A | Chars 078-090 ~ 078-090 [constructor.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 087-091 ~ 091-095 [Qed.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 092-102 ~ 096-106 [Goal~_~True.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 105-113 ~ 109-117 [exact~I.] | N/A || +0m00.00s | N/A + N/A | Chars 105-117 ~ 109-121 [constructor.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 114-118 ~ 122-126 [Qed.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 119-129 ~ 127-137 [Goal~_~True.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 132-140 ~ 140-148 [exact~I.] | N/A || +0m00.00s | N/A + N/A | Chars 132-144 ~ 140-152 [constructor.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 141-145 ~ 153-157 [Qed.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 146-156 ~ 158-168 [Goal~_~True.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 159-167 ~ 171-179 [exact~I.] | N/A || +0m00.00s | N/A + N/A | Chars 159-171 ~ 171-183 [constructor.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 168-172 ~ 184-188 [Qed.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 173-183 ~ 189-199 [Goal~_~True.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 186-194 ~ 202-210 [exact~I.] | N/A || +0m00.00s | N/A + N/A | Chars 186-198 ~ 202-214 [constructor.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 195-199 ~ 215-219 [Qed.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 200-257 ~ 220-277 [Goal~_~List.repeat~Z.div_eucl~...] | 0m00.s || +0m00.00s | N/A
\ No newline at end of file diff --git a/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo-user.v.timing.diff.expected b/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo-user.v.timing.diff.expected new file mode 100644 index 0000000000..f6be1d936d --- /dev/null +++ b/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo-user.v.timing.diff.expected @@ -0,0 +1,29 @@ +After | Code | Before || Change | % Change +----------------------------------------------------------------------------------------------------------- +0m01.14s | Total | 0m01.15s || -0m00.00s | -0.77% +----------------------------------------------------------------------------------------------------------- +0m00.504s | Chars 260-284 ~ 280-304 [(vm_compute;~reflexivity).] | 0m00.528s || -0m00.02s | -4.54% +0m00.384s | Chars 285-289 ~ 305-309 [Qed.] | 0m00.4s || -0m00.01s | -4.00% +0m00.172s | Chars 031-064 ~ 031-064 [Require~Import~Coq.ZArith.ZArith.] | 0m00.156s || +0m00.01s | +10.25% +0m00.083s | Chars 000-030 ~ 000-030 [Require~Import~Coq.Lists.List.] | 0m00.072s || +0m00.01s | +15.27% +0m00.004s | Chars 200-257 ~ 220-277 [Goal~_~List.repeat~Z.div_eucl~...] | 0m00.s || +0m00.00s | ∞ +0m00.s | Chars 065-075 ~ 065-075 [Goal~_~True.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 078-086 ~ 078-086 [exact~I.] | N/A || +0m00.00s | N/A + N/A | Chars 078-090 ~ 078-090 [constructor.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 087-091 ~ 091-095 [Qed.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 092-102 ~ 096-106 [Goal~_~True.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 105-113 ~ 109-117 [exact~I.] | N/A || +0m00.00s | N/A + N/A | Chars 105-117 ~ 109-121 [constructor.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 114-118 ~ 122-126 [Qed.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 119-129 ~ 127-137 [Goal~_~True.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 132-140 ~ 140-148 [exact~I.] | N/A || +0m00.00s | N/A + N/A | Chars 132-144 ~ 140-152 [constructor.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 141-145 ~ 153-157 [Qed.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 146-156 ~ 158-168 [Goal~_~True.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 159-167 ~ 171-179 [exact~I.] | N/A || +0m00.00s | N/A + N/A | Chars 159-171 ~ 171-183 [constructor.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 168-172 ~ 184-188 [Qed.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 173-183 ~ 189-199 [Goal~_~True.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 186-194 ~ 202-210 [exact~I.] | N/A || +0m00.00s | N/A + N/A | Chars 186-198 ~ 202-214 [constructor.] | 0m00.s || +0m00.00s | N/A +0m00.s | Chars 195-199 ~ 215-219 [Qed.] | 0m00.s || +0m00.00s | N/A
\ No newline at end of file diff --git a/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo.v.after-timing.in b/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo.v.after-timing.in new file mode 100644 index 0000000000..c58e7d82d1 --- /dev/null +++ b/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo.v.after-timing.in @@ -0,0 +1,20 @@ +Chars 0 - 30 [Require~Import~Coq.Lists.List.] 0.114 secs (0.083u,0.032s) +Chars 31 - 64 [Require~Import~Coq.ZArith.ZArith.] 0.194 secs (0.172u,0.023s) +Chars 65 - 75 [Goal~_~True.] 0. secs (0.u,0.s) +Chars 78 - 86 [exact~I.] 0. secs (0.u,0.s) +Chars 87 - 91 [Qed.] 0. secs (0.u,0.s) +Chars 92 - 102 [Goal~_~True.] 0. secs (0.u,0.s) +Chars 105 - 113 [exact~I.] 0. secs (0.u,0.s) +Chars 114 - 118 [Qed.] 0. secs (0.u,0.s) +Chars 119 - 129 [Goal~_~True.] 0. secs (0.u,0.s) +Chars 132 - 140 [exact~I.] 0. secs (0.u,0.s) +Chars 141 - 145 [Qed.] 0. secs (0.u,0.s) +Chars 146 - 156 [Goal~_~True.] 0. secs (0.u,0.s) +Chars 159 - 167 [exact~I.] 0. secs (0.u,0.s) +Chars 168 - 172 [Qed.] 0. secs (0.u,0.s) +Chars 173 - 183 [Goal~_~True.] 0. secs (0.u,0.s) +Chars 186 - 194 [exact~I.] 0. secs (0.u,0.s) +Chars 195 - 199 [Qed.] 0. secs (0.u,0.s) +Chars 200 - 257 [Goal~_~List.repeat~Z.div_eucl~...] 0. secs (0.004u,0.s) +Chars 260 - 284 [(vm_compute;~reflexivity).] 0.53 secs (0.504u,0.024s) +Chars 285 - 289 [Qed.] 0.4 secs (0.384u,0.016s) diff --git a/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo.v.before-timing.in b/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo.v.before-timing.in new file mode 100644 index 0000000000..b49c1b1cb7 --- /dev/null +++ b/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo.v.before-timing.in @@ -0,0 +1,20 @@ +Chars 0 - 30 [Require~Import~Coq.Lists.List.] 0.114 secs (0.072u,0.044s) +Chars 31 - 64 [Require~Import~Coq.ZArith.ZArith.] 0.192 secs (0.156u,0.035s) +Chars 65 - 75 [Goal~_~True.] 0. secs (0.u,0.s) +Chars 78 - 90 [constructor.] 0. secs (0.u,0.s) +Chars 91 - 95 [Qed.] 0. secs (0.u,0.s) +Chars 96 - 106 [Goal~_~True.] 0. secs (0.u,0.s) +Chars 109 - 121 [constructor.] 0. secs (0.u,0.s) +Chars 122 - 126 [Qed.] 0. secs (0.u,0.s) +Chars 127 - 137 [Goal~_~True.] 0. secs (0.u,0.s) +Chars 140 - 152 [constructor.] 0. secs (0.u,0.004s) +Chars 153 - 157 [Qed.] 0. secs (0.u,0.s) +Chars 158 - 168 [Goal~_~True.] 0. secs (0.u,0.s) +Chars 171 - 183 [constructor.] 0. secs (0.u,0.s) +Chars 184 - 188 [Qed.] 0. secs (0.u,0.s) +Chars 189 - 199 [Goal~_~True.] 0. secs (0.u,0.s) +Chars 202 - 214 [constructor.] 0. secs (0.u,0.s) +Chars 215 - 219 [Qed.] 0. secs (0.u,0.s) +Chars 220 - 277 [Goal~_~List.repeat~Z.div_eucl~...] 0. secs (0.u,0.s) +Chars 280 - 304 [(vm_compute;~reflexivity).] 0.566 secs (0.528u,0.039s) +Chars 305 - 309 [Qed.] 0.411 secs (0.4u,0.008s) diff --git a/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo_after.v b/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo_after.v new file mode 100644 index 0000000000..7141065b52 --- /dev/null +++ b/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo_after.v @@ -0,0 +1,20 @@ +Require Import Coq.Lists.List. +Require Import Coq.ZArith.ZArith. +Goal True. + exact I. +Qed. +Goal True. + exact I. +Qed. +Goal True. + exact I. +Qed. +Goal True. + exact I. +Qed. +Goal True. + exact I. +Qed. +Goal List.repeat Z.div_eucl 5 = List.repeat Z.div_eucl 5. + vm_compute; reflexivity. +Qed. diff --git a/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo_before.v b/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo_before.v new file mode 100644 index 0000000000..e152689ee4 --- /dev/null +++ b/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/foo_before.v @@ -0,0 +1,20 @@ +Require Import Coq.Lists.List. +Require Import Coq.ZArith.ZArith. +Goal True. + constructor. +Qed. +Goal True. + constructor. +Qed. +Goal True. + constructor. +Qed. +Goal True. + constructor. +Qed. +Goal True. + constructor. +Qed. +Goal List.repeat Z.div_eucl 5 = List.repeat Z.div_eucl 5. + vm_compute; reflexivity. +Qed. diff --git a/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/run.sh b/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/run.sh new file mode 100755 index 0000000000..980bf9cf01 --- /dev/null +++ b/test-suite/coq-makefile/timing/precomputed-time-tests/004-per-file-fuzz/run.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash + +set -x +set -e + +cd "$(dirname "${BASH_SOURCE[0]}")" + +"$COQLIB"/tools/make-both-single-timing-files.py --fuzz=20 foo.v.after-timing.in foo.v.before-timing.in foo-real.v.timing.diff || exit $? + +diff -u foo-real.v.timing.diff.expected foo-real.v.timing.diff || exit $? + +"$COQLIB"/tools/make-both-single-timing-files.py --fuzz=20 --user foo.v.after-timing.in foo.v.before-timing.in foo-user.v.timing.diff || exit $? + +diff -u foo-user.v.timing.diff.expected foo-user.v.timing.diff || exit $? diff --git a/test-suite/coq-makefile/timing/precomputed-time-tests/run.sh b/test-suite/coq-makefile/timing/precomputed-time-tests/run.sh index cfacf738a3..4b5acb9168 100755 --- a/test-suite/coq-makefile/timing/precomputed-time-tests/run.sh +++ b/test-suite/coq-makefile/timing/precomputed-time-tests/run.sh @@ -10,3 +10,4 @@ export COQLIB ./001-correct-diff-sorting-order/run.sh ./002-single-file-sorting/run.sh ./003-non-utf8/run.sh +./004-per-file-fuzz/run.sh diff --git a/test-suite/output-coqtop/ShowGoal.out b/test-suite/output-coqtop/ShowGoal.out index 2eadd22db8..42d9ff31e9 100644 --- a/test-suite/output-coqtop/ShowGoal.out +++ b/test-suite/output-coqtop/ShowGoal.out @@ -52,19 +52,19 @@ x < 1 subgoal ============================
i = i
-x < goal ID 16 at state 5
+x < goal ID 13 at state 5
i : nat
============================
i = ?j /\ ?j = ?k /\ i = ?k
-x < goal ID 16 at state 7
+x < goal ID 13 at state 7
i : nat
============================
i = i /\ i = ?k /\ i = ?k
-x < goal ID 16 at state 9
+x < goal ID 13 at state 9
i : nat
============================
diff --git a/test-suite/output-coqtop/ShowGoal.v b/test-suite/output-coqtop/ShowGoal.v index 9545254770..80996eb169 100644 --- a/test-suite/output-coqtop/ShowGoal.v +++ b/test-suite/output-coqtop/ShowGoal.v @@ -6,6 +6,6 @@ Proof using. trivial. split. trivial. -Show Goal 16 at 5. -Show Goal 16 at 7. -Show Goal 16 at 9. +Show Goal 13 at 5. +Show Goal 13 at 7. +Show Goal 13 at 9. diff --git a/test-suite/output/EqNotation.out b/test-suite/output/EqNotation.out new file mode 100644 index 0000000000..41500a75b9 --- /dev/null +++ b/test-suite/output/EqNotation.out @@ -0,0 +1,3 @@ +The command has indeed failed with message: +Cannot infer the implicit parameter A of eq whose type is +"Type". diff --git a/test-suite/output/EqNotation.v b/test-suite/output/EqNotation.v new file mode 100644 index 0000000000..21076472b8 --- /dev/null +++ b/test-suite/output/EqNotation.v @@ -0,0 +1,2 @@ +(* should mention "the implicit parameter A of eq" *) +Fail Type (forall x, x = x). diff --git a/test-suite/output/Implicit.out b/test-suite/output/Implicit.out index 5f22eb5d7c..ef7667936c 100644 --- a/test-suite/output/Implicit.out +++ b/test-suite/output/Implicit.out @@ -1,4 +1,4 @@ -compose (C:=nat) S +compose S : (nat -> nat) -> nat -> nat ex_intro (P:=fun _ : nat => True) (x:=0) I : ex (fun _ : nat => True) @@ -12,3 +12,8 @@ map id' (1 :: nil) : list nat map (id'' (A:=nat)) (1 :: nil) : list nat +fix f (x : nat) : option nat := match x with + | 0 => None + | S _ => x + end + : nat -> option nat diff --git a/test-suite/output/Implicit.v b/test-suite/output/Implicit.v index 306532c0df..a7c4399e38 100644 --- a/test-suite/output/Implicit.v +++ b/test-suite/output/Implicit.v @@ -51,3 +51,13 @@ Definition id'' (A:Type) (x:A) := x. Check map (@id'' nat) (1::nil). +Module MatchBranchesInContext. + +Set Implicit Arguments. +Set Contextual Implicit. + +Inductive option A := None | Some (a:A). +Coercion some_nat := @Some nat. +Check fix f x := match x with 0 => None | n => some_nat n end. + +End MatchBranchesInContext. diff --git a/test-suite/output/ImplicitTypes.out b/test-suite/output/ImplicitTypes.out new file mode 100644 index 0000000000..824c260e92 --- /dev/null +++ b/test-suite/output/ImplicitTypes.out @@ -0,0 +1,26 @@ +forall b, b = b + : Prop +forall b : nat, b = b + : Prop +forall b : bool, @eq bool b b + : Prop +forall b : bool, b = b + : Prop +forall b c : bool, b = c + : Prop +forall c b : bool, b = c + : Prop +forall b1 b2, b1 = b2 + : Prop +fun b => b = b + : bool -> Prop +fix f b (n : nat) {struct n} : bool := + match n with + | 0 => b + | S p => f b p + end + : bool -> nat -> bool +∀ b c : bool, b = c + : Prop +∀ b1 b2, b1 = b2 + : Prop diff --git a/test-suite/output/ImplicitTypes.v b/test-suite/output/ImplicitTypes.v new file mode 100644 index 0000000000..dbc83f9229 --- /dev/null +++ b/test-suite/output/ImplicitTypes.v @@ -0,0 +1,37 @@ +Implicit Types b : bool. +Check forall b, b = b. + +(* Check the type is not used if not the reserved one *) +Check forall b:nat, b = b. + +(* Check full printing *) +Set Printing All. +Check forall b, b = b. +Unset Printing All. + +(* Check printing of type *) +Unset Printing Use Implicit Types. +Check forall b, b = b. +Set Printing Use Implicit Types. + +(* Check factorization: we give priority on factorization over implicit type *) +Check forall b c, b = c. +Check forall c b, b = c. + +(* Check factorization of implicit types *) +Check forall b1 b2, b1 = b2. + +(* Check in "fun" *) +Check fun b => b = b. + +(* Check in binders *) +Check fix f b n := match n with 0 => b | S p => f b p end. + +(* Check in notations *) +Module Notation. + Notation "∀ x .. y , P" := (forall x, .. (forall y, P) ..) + (at level 200, x binder, y binder, right associativity, + format "'[ ' '[ ' ∀ x .. y ']' , '/' P ']'") : type_scope. + Check forall b c, b = c. + Check forall b1 b2, b1 = b2. +End Notation. diff --git a/test-suite/output/Notations.out b/test-suite/output/Notations.out index b870fa6f6f..53ad8a9612 100644 --- a/test-suite/output/Notations.out +++ b/test-suite/output/Notations.out @@ -107,14 +107,15 @@ fun x : option Z => match x with end : option Z -> Z fun x : option Z => match x with - | SOME2 x0 => x0 - | NONE2 => 0 + | SOME3 _ x0 => x0 + | NONE3 _ => 0 end : option Z -> Z -fun x : list ?T => match x with - | NIL => NONE2 - | (_ :') t => SOME2 t - end +fun x : list ?T => +match x with +| NIL => NONE3 (list ?T) +| (_ :') t => SOME3 (list ?T) t +end : list ?T -> option (list ?T) where ?T : [x : list ?T x1 : list ?T x0 := x1 : list ?T |- Type] (x, x1, diff --git a/test-suite/output/Notations4.out b/test-suite/output/Notations4.out index f65696e464..e121b5e86c 100644 --- a/test-suite/output/Notations4.out +++ b/test-suite/output/Notations4.out @@ -71,3 +71,39 @@ The command has indeed failed with message: The format is not the same on the right- and left-hand sides of the special token "..". The command has indeed failed with message: The format is not the same on the right- and left-hand sides of the special token "..". +Entry constr:expr is +[ "201" RIGHTA + [ "{"; constr:operconstr LEVEL "200"; "}" ] ] + +fun x : nat => [ x ] + : nat -> nat +fun x : nat => [x] + : nat -> nat +∀ x : nat, x = x + : Prop +File "stdin", line 219, characters 0-160: +Warning: Notation "∀ _ .. _ , _" was already defined with a different format +in scope type_scope. [notation-incompatible-format,parsing] +∀x : nat,x = x + : Prop +File "stdin", line 232, characters 0-60: +Warning: Notation "_ %%% _" was already defined with a different format. +[notation-incompatible-format,parsing] +File "stdin", line 236, characters 0-64: +Warning: Notation "_ %%% _" was already defined with a different format. +[notation-incompatible-format,parsing] +File "stdin", line 241, characters 0-62: +Warning: Lonely notation "_ %%%% _" was already defined with a different +format. [notation-incompatible-format,parsing] +3 %% 4 + : nat +3 %% 4 + : nat +3 %% 4 + : nat +File "stdin", line 269, characters 0-61: +Warning: The format modifier is irrelevant for only parsing rules. +[irrelevant-format-only-parsing,parsing] +File "stdin", line 273, characters 0-63: +Warning: The only parsing modifier has no effect in Reserved Notation. +[irrelevant-reserved-notation-only-parsing,parsing] diff --git a/test-suite/output/Notations4.v b/test-suite/output/Notations4.v index 4de6ce19b4..1cf0d919b1 100644 --- a/test-suite/output/Notations4.v +++ b/test-suite/output/Notations4.v @@ -184,3 +184,92 @@ Fail Notation " {@ T1 ; T2 ; .. ; Tn } " := (format "'[v' {@ '[' T1 ']' ; '//' '[' T2 ']' ; '//' .. ; '//' '[' Tn ']' } ']'"). End M. + +Module Bug11331. + +Declare Custom Entry expr. +Notation "{ p }" := (p) (in custom expr at level 201, p constr). +Print Custom Grammar expr. + +End Bug11331. + +Module Bug_6082. + +Declare Scope foo. +Notation "[ x ]" := (S x) (format "[ x ]") : foo. +Open Scope foo. +Check fun x => S x. + +Declare Scope bar. +Notation "[ x ]" := (S x) (format "[ x ]") : bar. +Open Scope bar. + +Check fun x => S x. + +End Bug_6082. + +Module Bug_7766. + +Notation "∀ x .. y , P" := (forall x, .. (forall y, P) ..) + (at level 200, x binder, y binder, right associativity, + format "'[ ' ∀ x .. y ']' , P") : type_scope. + +Check forall (x : nat), x = x. + +Notation "∀ x .. y , P" := (forall x, .. (forall y, P) ..) + (at level 200, x binder, y binder, right associativity, + format "∀ x .. y , P") : type_scope. + +Check forall (x : nat), x = x. + +End Bug_7766. + +Module N. + +(* Other tests about generic and specific formats *) + +Reserved Notation "x %%% y" (format "x %%% y", at level 35). +Reserved Notation "x %%% y" (format "x %%% y", at level 35). + +(* Not using the reserved format, we warn *) + +Notation "x %%% y" := (x+y) (format "x %%% y", at level 35). + +(* Same scope (here lonely): we warn *) + +Notation "x %%%% y" := (x+y) (format "x %%%% y", at level 35). +Notation "x %%%% y" := (x+y) (format "x %%%% y", at level 35). + +(* Test if the format for a specific notation becomes the default + generic format or if the generic format, in the absence of a + Reserved Notation, is the one canonically obtained from the + notation *) + +Declare Scope foo_scope. +Declare Scope bar_scope. +Declare Scope bar'_scope. +Notation "x %% y" := (x+y) (at level 47, format "x %% y") : foo_scope. +Open Scope foo_scope. +Check 3 %% 4. + +(* No scope, we inherit the initial format *) + +Notation "x %% y" := (x*y) : bar_scope. (* Inherit the format *) +Open Scope bar_scope. +Check 3 %% 4. + +(* Different scope and no reserved notation, we don't warn *) + +Notation "x %% y" := (x*y) (at level 47, format "x %% y") : bar'_scope. +Open Scope bar'_scope. +Check 3 %% 4. + +(* Warn for combination of "only parsing" and "format" *) + +Notation "###" := 0 (at level 0, only parsing, format "###"). + +(* In reserved notation, warn only for the "only parsing" *) + +Reserved Notation "##" (at level 0, only parsing, format "##"). + +End N. diff --git a/test-suite/output/Notations5.out b/test-suite/output/Notations5.out index 83dd2f40fb..f59306c454 100644 --- a/test-suite/output/Notations5.out +++ b/test-suite/output/Notations5.out @@ -6,13 +6,13 @@ where ?B : [ |- Type] p 0 : forall (a2 : nat) (B : Type) (b : B), 0 = a2 /\ b = b -p 0 0 (B:=bool) +p 0 0 : forall b : bool, 0 = 0 /\ b = b -p 0 0 (B:=bool) +p 0 0 : forall b : bool, 0 = 0 /\ b = b -p (A:=nat) +p : forall (a1 a2 : nat) (B : Type) (b : B), a1 = a2 /\ b = b -p (A:=nat) +p : forall (a1 a2 : nat) (B : Type) (b : B), a1 = a2 /\ b = b @p nat 0 0 : forall (B : Type) (b : B), 0 = 0 /\ b = b @@ -44,16 +44,16 @@ p : forall (A : Type) (a1 a2 : A) (B : Type) (b : B), a1 = a2 /\ b = b f x true : 0 = 0 /\ true = true -f x (B:=bool) +f x : forall b : bool, 0 = 0 /\ b = b -f x (B:=bool) +f x : forall b : bool, 0 = 0 /\ b = b @f nat : forall a1 a2 : nat, T a1 a2 -> forall (B : Type) (b : B), a1 = a2 /\ b = b -f (a1:=0) (a2:=0) +f : T 0 0 -> forall (B : Type) (b : B), 0 = 0 /\ b = b -f (a1:=0) (a2:=0) +f : T 0 0 -> forall (B : Type) (b : B), 0 = 0 /\ b = b @f : forall (A : Type) (a1 a2 : A), @@ -62,27 +62,27 @@ f : T 0 0 -> forall (B : Type) (b : B), 0 = 0 /\ b = b x.(f) true : 0 = 0 /\ true = true -x.(f) (B:=bool) +x.(f) : forall b : bool, 0 = 0 /\ b = b -x.(f) (B:=bool) +x.(f) : forall b : bool, 0 = 0 /\ b = b @f nat : forall a1 a2 : nat, T a1 a2 -> forall (B : Type) (b : B), a1 = a2 /\ b = b -f (a1:=0) (a2:=0) +f : T 0 0 -> forall (B : Type) (b : B), 0 = 0 /\ b = b -f (a1:=0) (a2:=0) +f : T 0 0 -> forall (B : Type) (b : B), 0 = 0 /\ b = b @f : forall (A : Type) (a1 a2 : A), T a1 a2 -> forall (B : Type) (b : B), a1 = a2 /\ b = b f : T 0 0 -> forall (B : Type) (b : B), 0 = 0 /\ b = b -p +u ?A : forall (a1 a2 : ?A) (B : Type) (b : B), a1 = a2 /\ b = b where ?A : [ |- Type] -p +u ?A : forall (a1 a2 : ?A) (B : Type) (b : B), a1 = a2 /\ b = b where ?A : [ |- Type] @@ -90,23 +90,23 @@ u : forall (A : Type) (a1 a2 : A) (B : Type) (b : B), a1 = a2 /\ b = b u : forall (A : Type) (a1 a2 : A) (B : Type) (b : B), a1 = a2 /\ b = b -p 0 0 +u nat 0 0 ?B : forall b : ?B, 0 = 0 /\ b = b where ?B : [ |- Type] -p 0 0 +u nat 0 0 bool : forall b : bool, 0 = 0 /\ b = b -@p nat 0 0 +u nat 0 0 : forall (B : Type) (b : B), 0 = 0 /\ b = b -@p nat 0 0 +u nat 0 0 : forall (B : Type) (b : B), 0 = 0 /\ b = b u : forall (a1 a2 : ?A) (B : Type) (b : B), a1 = a2 /\ b = b where ?A : [ |- Type] -u +@u : forall (A : Type) (a1 a2 : A) (B : Type) (b : B), a1 = a2 /\ b = b -u +@u : forall (A : Type) (a1 a2 : A) (B : Type) (b : B), a1 = a2 /\ b = b u : forall (a1 a2 : ?A) (B : Type) (b : B), a1 = a2 /\ b = b @@ -138,7 +138,7 @@ v 0 : forall b : ?B, 0 = 0 /\ b = b where ?B : [ |- Type] -v 0 (B:=bool) true +v 0 true : 0 = 0 /\ true = true v : forall (a2 : nat) (B : Type) (b : B), 0 = a2 /\ b = b @@ -158,7 +158,7 @@ v 0 : forall b : ?B, 0 = 0 /\ b = b where ?B : [ |- Type] -v 0 (B:=bool) true +v 0 true : 0 = 0 /\ true = true v : forall (a2 : nat) (B : Type) (b : B), 0 = a2 /\ b = b @@ -188,15 +188,15 @@ where : forall b : ?B, 0 = 0 /\ b = b where ?B : [ |- Type] -## 0 0 (B:=bool) true +## 0 0 true : 0 = 0 /\ true = true -## 0 0 (B:=bool) true +## 0 0 true : 0 = 0 /\ true = true ## 0 0 (B:=bool) : forall b : bool, 0 = 0 /\ b = b ## 0 0 (B:=bool) : forall b : bool, 0 = 0 /\ b = b -p +## ?A : forall (a1 a2 : ?A) (B : Type) (b : B), a1 = a2 /\ b = b where ?A : [ |- Type] @@ -204,45 +204,109 @@ where : forall (A : Type) (a1 a2 : A) (B : Type) (b : B), a1 = a2 /\ b = b ## : forall (A : Type) (a1 a2 : A) (B : Type) (b : B), a1 = a2 /\ b = b -p 0 +## nat 0 : forall (a2 : nat) (B : Type) (b : B), 0 = a2 /\ b = b -p 0 +## nat 0 : forall (a2 : nat) (B : Type) (b : B), 0 = a2 /\ b = b -@p nat 0 0 +## nat 0 0 : forall (B : Type) (b : B), 0 = 0 /\ b = b -p 0 0 +## nat 0 0 ?B : forall b : ?B, 0 = 0 /\ b = b where ?B : [ |- Type] -p 0 0 +## nat 0 0 ?B : forall b : ?B, 0 = 0 /\ b = b where ?B : [ |- Type] -p 0 0 (B:=bool) +## nat 0 0 bool : forall b : bool, 0 = 0 /\ b = b -p 0 0 true +## nat 0 0 bool true : 0 = 0 /\ true = true ## 0 : forall (a2 : nat) (B : Type) (b : B), 0 = a2 /\ b = b ## 0 : forall (a2 : nat) (B : Type) (b : B), 0 = a2 /\ b = b +## 0 0 + : forall b : ?B, 0 = 0 /\ b = b +where +?B : [ |- Type] ## 0 0 (B:=bool) : forall b : bool, 0 = 0 /\ b = b ## 0 0 (B:=bool) : forall b : bool, 0 = 0 /\ b = b -## 0 0 (B:=bool) true +## 0 0 true : 0 = 0 /\ true = true -## 0 0 (B:=bool) true +## 0 0 true : 0 = 0 /\ true = true ## 0 : forall (a2 : nat) (B : Type) (b : B), 0 = a2 /\ b = b ## 0 : forall (a2 : nat) (B : Type) (b : B), 0 = a2 /\ b = b +## 0 0 + : forall b : ?B, 0 = 0 /\ b = b +where +?B : [ |- Type] ## 0 0 (B:=bool) : forall b : bool, 0 = 0 /\ b = b ## 0 0 (B:=bool) : forall b : bool, 0 = 0 /\ b = b -## 0 0 (B:=bool) true +## 0 0 true : 0 = 0 /\ true = true -## 0 0 (B:=bool) true +## 0 0 true : 0 = 0 /\ true = true +# 0 0 bool 0%bool + : T +fun a : T => match a with + | # 0 0 _ _ => 1 + | _ => 2 + end + : T -> nat +#' 0 0 0%bool + : T +fun a : T => match a with + | #' 0 0 _ => 1 + | _ => 2 + end + : T -> nat +## 0 0 0%bool + : T +fun a : T => match a with + | ## 0 0 _ => 1 + | _ => 2 + end + : T -> nat +##' 0 0 0%bool + : T +fun a : T => match a with + | ##' 0 0 _ => 1 + | _ => 2 + end + : T -> nat +P 0 0 bool 0%bool + : T +fun a : T => match a with + | P 0 0 _ _ => 1 + | _ => 2 + end + : T -> nat +P' 0 0 0%bool + : T +fun a : T => match a with + | P' 0 0 _ => 1 + | _ => 2 + end + : T -> nat +Q 0 0 0%bool + : T +fun a : T => match a with + | Q 0 0 _ => 1 + | _ => 2 + end + : T -> nat +Q' 0 0 0%bool + : T +fun a : T => match a with + | Q' 0 0 _ => 1 + | _ => 2 + end + : T -> nat diff --git a/test-suite/output/Notations5.v b/test-suite/output/Notations5.v index b3bea929ba..09d5e31c48 100644 --- a/test-suite/output/Notations5.v +++ b/test-suite/output/Notations5.v @@ -115,21 +115,21 @@ Module AppliedTermsPrinting. Notation u := @p. Check u _. - (* p *) + (* u ?A *) Check p. - (* p *) + (* u ?A *) Check @p. (* u *) Check u. (* u *) Check p 0 0. - (* p 0 0 *) + (* u nat 0 0 ?B *) Check u nat 0 0 bool. - (* p 0 0 -- WEAKNESS should ideally be (B:=bool) *) + (* u nat 0 0 bool *) Check u nat 0 0. - (* @p nat 0 0 *) + (* u nat 0 0 *) Check @p nat 0 0. - (* @p nat 0 0 *) + (* u nat 0 0 *) End AtAbbreviationForApplicationHead. @@ -145,9 +145,9 @@ Module AppliedTermsPrinting. Check p. (* u *) Check @p. - (* u -- BUG *) + (* @u *) Check @u. - (* u -- BUG *) + (* @u *) Check u. (* u *) Check p 0 0. @@ -181,7 +181,7 @@ Module AppliedTermsPrinting. Check v 0. (* v 0 *) Check v 0 true. - (* v 0 (B:=bool) true -- BUG *) + (* v 0 true *) Check @p nat 0. (* v *) Check @p nat 0 0. @@ -209,7 +209,7 @@ Module AppliedTermsPrinting. Check v 0. (* v 0 *) Check v 0 true. - (* v 0 (B:=bool) true -- BUG *) + (* v 0 true *) Check @p nat 0. (* v *) Check @p nat 0 0. @@ -243,9 +243,9 @@ Module AppliedTermsPrinting. Check ## 0 0. (* ## 0 0 *) Check p 0 0 true. - (* ## 0 0 (B:=bool) true -- BUG B should not be displayed *) + (* ## 0 0 true *) Check ## 0 0 true. - (* ## 0 0 (B:=bool) true -- BUG B should not be displayed *) + (* ## 0 0 true *) Check p 0 0 (B:=bool). (* ## 0 0 (B:=bool) *) Check ## 0 0 (B:=bool). @@ -263,25 +263,25 @@ Module AppliedTermsPrinting. Notation "##" := @p (at level 0). Check p. - (* p *) + (* ## ?A *) Check @p. (* ## *) Check ##. (* ## *) Check p 0. - (* p 0 -- why not "## nat 0" *) + (* ## nat 0 *) Check ## nat 0. - (* p 0 *) + (* ## nat 0 *) Check ## nat 0 0. - (* @p nat 0 0 *) + (* ## nat 0 0 *) Check p 0 0. - (* p 0 0 *) + (* ## nat 0 0 ?B *) Check ## nat 0 0 _. - (* p 0 0 *) + (* ## nat 0 0 ?B *) Check ## nat 0 0 bool. - (* p 0 0 (B:=bool) *) + (* ## nat 0 0 bool *) Check ## nat 0 0 bool true. - (* p 0 0 true *) + (* ## nat 0 0 bool true *) End AtNotationForHeadApplication. @@ -298,16 +298,16 @@ Module AppliedTermsPrinting. (* ## 0 *) Check ## 0. (* ## 0 *) - (* Check ## 0 0. *) - (* Anomaly *) + Check ## 0 0. + (* ## 0 0 *) Check p 0 0 (B:=bool). (* ## 0 0 (B:=bool) *) - Check ## 0 0 bool. - (* ## 0 0 (B:=bool) -- INCONSISTENT parsing/printing *) + Check ## 0 0 (B:=bool). + (* ## 0 0 (B:=bool) *) Check p 0 0 true. - (* ## 0 0 (B:=bool) true -- BUG B should not be displayed *) - Check ## 0 0 bool true. - (* ## 0 0 (B:=bool) true -- INCONSISTENT parsing/printing + BUG B should not be displayed *) + (* ## 0 0 true *) + Check ## 0 0 true. + (* ## 0 0 true *) End NotationForPartialApplication. @@ -324,17 +324,75 @@ Module AppliedTermsPrinting. (* ## 0 *) Check ## 0. (* ## 0 *) - (* Check ## 0 0. *) - (* Anomaly *) + Check ## 0 0. + (* ## 0 0 *) Check p 0 0 (B:=bool). (* ## 0 0 (B:=bool) *) - Check ## 0 0 bool. - (* ## 0 0 (B:=bool) -- INCONSISTENT parsing/printing *) + Check ## 0 0 (B:=bool). + (* ## 0 0 (B:=bool) *) Check p 0 0 true. - (* ## 0 0 (B:=bool) true -- BUG B should not be displayed *) - Check ## 0 0 bool true. - (* ## 0 0 (B:=bool) true -- INCONSISTENCY parsing/printing + BUG B should not be displayed *) + (* ## 0 0 true *) + Check ## 0 0 true. + (* ## 0 0 true *) End AtNotationForPartialApplication. End AppliedTermsPrinting. + +Module AppliedPatternsPrinting. + + (* Other tests testing inheritance of scope and implicit in + term and pattern for parsing and printing *) + + Inductive T := p (a:nat) (b:bool) {B} (b:B) : T. + Notation "0" := true : bool_scope. + + Module A. + Notation "#" := @p (at level 0). + Check # 0 0 _ true. + Check fun a => match a with # 0 0 _ _ => 1 | _ => 2 end. (* !! *) + End A. + + Module B. + Notation "#'" := p (at level 0). + Check #' 0 0 true. + Check fun a => match a with #' 0 0 _ => 1 | _ => 2 end. + End B. + + Module C. + Notation "## q" := (@p q) (at level 0, q at level 0). + Check ## 0 0 true. + Check fun a => match a with ## 0 0 _ => 1 | _ => 2 end. + End C. + + Module D. + Notation "##' q" := (p q) (at level 0, q at level 0). + Check ##' 0 0 true. + Check fun a => match a with ##' 0 0 _ => 1 | _ => 2 end. + End D. + + Module E. + Notation P := @ p. + Check P 0 0 _ true. + Check fun a => match a with P 0 0 _ _ => 1 | _ => 2 end. + End E. + + Module F. + Notation P' := p. + Check P' 0 0 true. + Check fun a => match a with P' 0 0 _ => 1 | _ => 2 end. + End F. + + Module G. + Notation Q q := (@p q). + Check Q 0 0 true. + Check fun a => match a with Q 0 0 _ => 1 | _ => 2 end. + End G. + + Module H. + Notation Q' q := (p q). + Check Q' 0 0 true. + Check fun a => match a with Q' 0 0 _ => 1 | _ => 2 end. + End H. + +End AppliedPatternsPrinting. diff --git a/test-suite/output/NumeralNotations.out b/test-suite/output/NumeralNotations.out index 505dc52ebe..113384e9cf 100644 --- a/test-suite/output/NumeralNotations.out +++ b/test-suite/output/NumeralNotations.out @@ -75,7 +75,7 @@ The command has indeed failed with message: In environment v := 0 : nat The term "v" has type "nat" while it is expected to have type "wuint". -File "stdin", line 202, characters 2-72: +File "stdin", line 203, characters 2-72: Warning: The 'abstract after' directive has no effect when the parsing function (of_uint) targets an option type. [abstract-large-number-no-op,numbers] diff --git a/test-suite/output/NumeralNotations.v b/test-suite/output/NumeralNotations.v index c306b15ef3..22aff36d67 100644 --- a/test-suite/output/NumeralNotations.v +++ b/test-suite/output/NumeralNotations.v @@ -123,6 +123,7 @@ Module Test6. Export Scopes. Numeral Notation wnat of_uint to_uint : wnat_scope (abstract after 5000). End Notations. + Set Printing Coercions. Check let v := 0%wnat in v : wnat. Check wrap O. Timeout 1 Check wrap (ack 4 4). (* should be instantaneous *) diff --git a/test-suite/output/PrintingParentheses.out b/test-suite/output/PrintingParentheses.out new file mode 100644 index 0000000000..a5874f09a7 --- /dev/null +++ b/test-suite/output/PrintingParentheses.out @@ -0,0 +1,28 @@ +((1 + (2 * 3), 4), 5) + : (nat * nat) * nat +mult_n_Sm = +fun n m : nat => +nat_ind (fun n0 : nat => ((n0 * m) + n0) = (n0 * (S m))) eq_refl + (fun (p : nat) (H : ((p * m) + p) = (p * (S m))) => + let n0 := p * (S m) in + match H in (_ = y) return (((m + (p * m)) + (S p)) = (S (m + y))) with + | eq_refl => + eq_ind (S ((m + (p * m)) + p)) + (fun n1 : nat => n1 = (S (m + ((p * m) + p)))) + (eq_S ((m + (p * m)) + p) (m + ((p * m) + p)) + (nat_ind + (fun n1 : nat => ((n1 + (p * m)) + p) = (n1 + ((p * m) + p))) + eq_refl + (fun (n1 : nat) + (H0 : ((n1 + (p * m)) + p) = (n1 + ((p * m) + p))) => + f_equal_nat nat S ((n1 + (p * m)) + p) + (n1 + ((p * m) + p)) H0) m)) ((m + (p * m)) + (S p)) + (plus_n_Sm (m + (p * m)) p) + end) n + : forall n m : nat, ((n * m) + n) = (n * (S m)) + +Arguments mult_n_Sm (_ _)%nat_scope +1 :: (2 :: [3; 4]) + : list nat +{0 = 1} + {2 <= (4 + 5)} + : Set diff --git a/test-suite/output/PrintingParentheses.v b/test-suite/output/PrintingParentheses.v new file mode 100644 index 0000000000..190e122e2f --- /dev/null +++ b/test-suite/output/PrintingParentheses.v @@ -0,0 +1,10 @@ +Set Printing Parentheses. + +Check (1+2*3,4,5). +Print mult_n_Sm. + +Require Import List. +Import ListNotations. +Check [1;2;3;4]. + +Check {0=1}+{2<=4+5}. diff --git a/test-suite/output/Record.v b/test-suite/output/Record.v index d9a649fadc..71a8afa131 100644 --- a/test-suite/output/Record.v +++ b/test-suite/output/Record.v @@ -20,6 +20,8 @@ Check {| field := 5 |}. Check build_r 5. Check build_c 5. +Set Printing Records. + Record N := C { T : Type; _ : True }. Check fun x:N => let 'C _ p := x in p. Check fun x:N => let 'C T _ := x in T. diff --git a/test-suite/output/Show.out b/test-suite/output/Show.out index ca56f032ff..f02e442be5 100644 --- a/test-suite/output/Show.out +++ b/test-suite/output/Show.out @@ -1,10 +1,10 @@ -3 subgoals (ID 31) +3 subgoals (ID 29) H : 0 = 0 ============================ 1 = 1 -subgoal 2 (ID 35) is: +subgoal 2 (ID 33) is: 1 = S (S m') -subgoal 3 (ID 22) is: +subgoal 3 (ID 20) is: S (S n') = S m diff --git a/test-suite/output/UnivBinders.out b/test-suite/output/UnivBinders.out index 525ca48bee..04514c15cb 100644 --- a/test-suite/output/UnivBinders.out +++ b/test-suite/output/UnivBinders.out @@ -67,9 +67,9 @@ mono The command has indeed failed with message: Universe u already exists. bobmorane = -let tt := Type@{UnivBinders.34} in -let ff := Type@{UnivBinders.36} in tt -> ff - : Type@{max(UnivBinders.33,UnivBinders.35)} +let tt := Type@{UnivBinders.33} in +let ff := Type@{UnivBinders.35} in tt -> ff + : Type@{max(UnivBinders.32,UnivBinders.34)} The command has indeed failed with message: Universe u already bound. foo@{E M N} = @@ -142,16 +142,16 @@ Applied.infunct@{u v} = inmod@{u} -> Type@{v} : Type@{max(u+1,v+1)} (* u v |= *) -axfoo@{i UnivBinders.59 UnivBinders.60} : -Type@{UnivBinders.59} -> Type@{i} -(* i UnivBinders.59 UnivBinders.60 |= *) +axfoo@{i UnivBinders.58 UnivBinders.59} : +Type@{UnivBinders.58} -> Type@{i} +(* i UnivBinders.58 UnivBinders.59 |= *) axfoo is universe polymorphic Arguments axfoo _%type_scope Expands to: Constant UnivBinders.axfoo -axbar@{i UnivBinders.59 UnivBinders.60} : -Type@{UnivBinders.60} -> Type@{i} -(* i UnivBinders.59 UnivBinders.60 |= *) +axbar@{i UnivBinders.58 UnivBinders.59} : +Type@{UnivBinders.59} -> Type@{i} +(* i UnivBinders.58 UnivBinders.59 |= *) axbar is universe polymorphic Arguments axbar _%type_scope diff --git a/test-suite/output/bug_11342.out b/test-suite/output/bug_11342.out new file mode 100644 index 0000000000..9aac16de0d --- /dev/null +++ b/test-suite/output/bug_11342.out @@ -0,0 +1 @@ +without using diff --git a/test-suite/output/bug_11342.v b/test-suite/output/bug_11342.v new file mode 100644 index 0000000000..73131a3190 --- /dev/null +++ b/test-suite/output/bug_11342.v @@ -0,0 +1,12 @@ +(* -*- mode: coq; coq-prog-args: ("-vos") -*- *) + +Section foo. + Context {H:True}. + Theorem test1 : True. + Proof. + (* this gets printed with -vos because there's no annotation (either [Set + Default Proof Using ...] or an explicit [Proof using ...]) *) + idtac "without using". + exact I. + Qed. +End foo. diff --git a/test-suite/output/bug_11608.out b/test-suite/output/bug_11608.out new file mode 100644 index 0000000000..793ff768d4 --- /dev/null +++ b/test-suite/output/bug_11608.out @@ -0,0 +1 @@ +creating x without [Proof.] diff --git a/test-suite/output/bug_11608.v b/test-suite/output/bug_11608.v new file mode 100644 index 0000000000..3929082913 --- /dev/null +++ b/test-suite/output/bug_11608.v @@ -0,0 +1,13 @@ +(* -*- mode: coq; coq-prog-args: ("-vos") -*- *) + +Set Default Proof Using "Type". + +Section foo. + Context (A:Type). + Definition x : option A. + (* this can get printed with -vos since without "Proof." there's no Proof + using, even with a default annotation. *) + idtac "creating x without [Proof.]". + exact None. + Qed. +End foo. diff --git a/test-suite/success/Notations2.v b/test-suite/success/Notations2.v index aa439fae12..382c252727 100644 --- a/test-suite/success/Notations2.v +++ b/test-suite/success/Notations2.v @@ -7,21 +7,21 @@ The convention is: Constant foo with implicit arguments and scopes used in a term or a pattern: foo do not deactivate further arguments and scopes - @foo deactivates further arguments and scopes - (foo x) deactivates further arguments and scopes - (@foo x) deactivates further arguments and scopes + @foo deactivate further arguments and scopes + (foo x) deactivate further arguments and scopes + (@foo x) deactivate further arguments and scopes Notations binding to foo: # := foo do not deactivate further arguments and scopes -# := @foo deactivates further arguments and scopes -# x := foo x deactivates further arguments and scopes -# x := @foo x deactivates further arguments and scopes +# := @foo deactivate further arguments and scopes +# x := foo x do not deactivate further arguments and scopes +# x := @foo x do not deactivate further arguments and scopes Abbreviations binding to foo: f := foo do not deactivate further arguments and scopes -f := @foo deactivates further arguments and scopes +f := @foo deactivate further arguments and scopes f x := foo x do not deactivate further arguments and scopes f x := @foo x do not deactivate further arguments and scopes *) @@ -62,18 +62,18 @@ Check c4 _ 0%bool _ 0%bool 0%bool : prod' bool bool. Check fun A (x :prod' bool A) => match x with c4 _ 0%bool _ y 0%bool => 2 | _ => 1 end. Check fun A (x :prod' bool A) => match x with (@pair') _ 0%bool _ y 0%bool => 2 | _ => 1 end. -(* 5. Notations stop further implicit arguments to be inserted and scopes to be used *) +(* 5. Non-@id notations inherit implicit arguments to be inserted and scopes to be used *) Notation "# x" := (pair' x) (at level 0, x at level 1). Check pair' 0 0 0 : prod' bool bool. -Check # 0 _ 0%bool 0%bool : prod' bool bool. -Check fun A (x :prod' bool A) => match x with # 0 _ y 0%bool => 2 | _ => 1 end. +Check # 0 0 0 : prod' bool bool. +Check fun A (x :prod' bool A) => match x with # 0 y 0 => 2 | _ => 1 end. -(* 6. Notations stop further implicit arguments to be inserted and scopes to be used *) +(* 6. Non-@id notations inherit implicit arguments to be inserted and scopes to be used *) Notation "## x" := ((@pair') _ x) (at level 0, x at level 1). Check (@pair') _ 0%bool _ 0%bool 0%bool : prod' bool bool. Check ((@pair') _ 0%bool) _ 0%bool 0%bool : prod' bool bool. -Check ## 0%bool _ 0%bool 0%bool : prod' bool bool. -Check fun A (x :prod' bool A) => match x with ## 0%bool _ y 0%bool => 2 | _ => 1 end. +Check ## 0%bool 0 0 : prod' bool bool. +Check fun A (x :prod' bool A) => match x with ## 0%bool y 0 => 2 | _ => 1 end. (* 7. Notations stop further implicit arguments to be inserted and scopes to be used *) Notation "###" := (@pair') (at level 0). @@ -86,10 +86,10 @@ Notation "####" := pair' (at level 0). Check #### 0 0 0 : prod' bool bool. Check fun A (x :prod' bool A) => match x with #### 0 y 0 => 2 | _ => 1 end. -(* 9. Notations w/o @ but arguments do not preserve further implicit arguments and scopes *) +(* 9. Non-@id notations inherit implicit arguments and scopes *) Notation "##### x" := (pair' x) (at level 0, x at level 1). -Check ##### 0 _ 0%bool 0%bool : prod' bool bool. -Check fun A (x :prod' bool A) => match x with ##### 0 _ y 0%bool => 2 | _ => 1 end. +Check ##### 0 0 0 : prod' bool bool. +Check fun A (x :prod' bool A) => match x with ##### 0 y 0 => 2 | _ => 1 end. (* 10. Check computation of binding variable through other notations *) (* it should be detected as binding variable and the scopes not being checked *) @@ -172,3 +172,33 @@ Notation "#" := 0 (only printing). Print Visibility. End Bug10750. + +Module M18. + + Module A. + Module B. + Infix "+++" := Nat.add (at level 70). + End B. + End A. +Import A. +(* Check that the notation in module B is not visible *) +Infix "+++" := Nat.add (at level 80). + +End M18. + +Module InheritanceArgumentScopes. + +Axiom p : forall (A:Type) (b:nat), A = A /\ b = b. +Check fun A n => p (A * A) (n * n). (* safety check *) +Notation q := @p. +Check fun A n => q (A * A) (n * n). (* check that argument scopes are propagated *) + +End InheritanceArgumentScopes. + +Module InheritanceMaximalImplicitPureNotation. + +Definition id {A B:Type} (a:B) := a. +Notation "#" := (@id nat). +Check # = (fun a:nat => a). (* # should inherit its maximal implicit argument *) + +End InheritanceMaximalImplicitPureNotation. diff --git a/test-suite/success/NotationsAndLtac.v b/test-suite/success/NotationsAndLtac.v new file mode 100644 index 0000000000..f3ec1916dc --- /dev/null +++ b/test-suite/success/NotationsAndLtac.v @@ -0,0 +1,52 @@ +(* Test that adding notations that overlap with the tactic grammar does not +* interfere with Ltac parsing. *) + +Module test1. + Notation "x [ y ]" := (fst (id x, id y)) (at level 11). + + Goal True \/ (exists x : nat, True /\ True) -> True. + Proof. + intros [|[a [y z]]]; [idtac|idtac]; try solve [eauto | trivial; [trivial]]. + Qed. +End test1. + +Module test2. + Notation "x [ y ]" := (fst (id x, id y)) (at level 100). + Goal True \/ (exists x : nat, True /\ True) -> True. + Proof. + intros [|[a [y z]]]; [idtac|idtac]; try solve [eauto | trivial; [trivial]]. + Qed. +End test2. + +Module test3. + Notation "x [ y ]" := (fst (id x, id y)) (at level 1). + Goal True \/ (exists x : nat, True /\ True) -> True. + Proof. + intros [|[a [y z]]]; [idtac|idtac]; try solve [eauto | trivial; [trivial]]. + Qed. +End test3. + +Module test1'. + Notation "x [ [ y ] ] " := (fst (id x, id y)) (at level 11). + + Goal True \/ (exists x : nat, True /\ True) -> True. + Proof. + intros [|[a [y z]]]; [idtac|idtac]; try solve [eauto | trivial; [trivial]]. + Qed. +End test1'. + +Module test2'. + Notation "x [ [ y ] ]" := (fst (id x, id y)) (at level 100). + Goal True \/ (exists x : nat, True /\ True) -> True. + Proof. + intros [|[a [y z]]]; [idtac|idtac]; try solve [eauto | trivial; [trivial]]. + Qed. +End test2'. + +Module test3'. + Notation "x [ [ y ] ]" := (fst (id x, id y)) (at level 1). + Goal True \/ (exists x : nat, True /\ True) -> True. + Proof. + intros [|[a [y z]]]; [idtac|idtac]; try solve [eauto | trivial; [trivial]]. + Qed. +End test3'. diff --git a/test-suite/success/uniform_inductive_parameters.v b/test-suite/success/uniform_inductive_parameters.v index 651247937d..e2b4694fff 100644 --- a/test-suite/success/uniform_inductive_parameters.v +++ b/test-suite/success/uniform_inductive_parameters.v @@ -1,23 +1,22 @@ -Module Att. - #[uniform] Inductive list (A : Type) := - | nil : list - | cons : A -> list -> list. - Check (list : Type -> Type). - Check (cons : forall A, A -> list A -> list A). -End Att. - Set Uniform Inductive Parameters. Inductive list (A : Type) := -| nil : list -| cons : A -> list -> list. + | nil : list + | cons : A -> list -> list. Check (list : Type -> Type). Check (cons : forall A, A -> list A -> list A). Inductive list2 (A : Type) (A' := prod A A) := -| nil2 : list2 -| cons2 : A' -> list2 -> list2. + | nil2 : list2 + | cons2 : A' -> list2 -> list2. Check (list2 : Type -> Type). Check (cons2 : forall A (A' := prod A A), A' -> list2 A -> list2 A). -#[nonuniform] Inductive bla (n:nat) := c (_ : bla (S n)). +Inductive list3 | A := nil3 | cons3 : A -> list3 (A * A)%type -> list3 A. + +Unset Uniform Inductive Parameters. + +Inductive list4 A | := nil4 | cons4 : A -> list4 -> list4. + +Inductive Acc {A:Type} (R:A->A->Prop) | (x:A) : Prop + := Acc_in : (forall y, R y x -> Acc y) -> Acc x. diff --git a/theories/Floats/FloatLemmas.v b/theories/Floats/FloatLemmas.v index 81cb7120e0..5db501742f 100644 --- a/theories/Floats/FloatLemmas.v +++ b/theories/Floats/FloatLemmas.v @@ -24,7 +24,7 @@ Theorem ldexp_spec : forall f e, Prim2SF (ldexp f e) = SFldexp prec emax (Prim2S destruct (Prim2SF f); auto. unfold SFldexp. unfold binary_round. - assert (Hmod_elim : forall e, ([| of_Z (Z.max (Z.min e (emax - emin)) (emin - emax - 1) + shift)|]%int63 - shift = Z.max (Z.min e (emax - emin)) (emin - emax - 1))%Z). + assert (Hmod_elim : forall e, (φ (of_Z (Z.max (Z.min e (emax - emin)) (emin - emax - 1) + shift))%int63 - shift = Z.max (Z.min e (emax - emin)) (emin - emax - 1))%Z). { intro e1. rewrite of_Z_spec, shift_value. diff --git a/theories/Floats/FloatOps.v b/theories/Floats/FloatOps.v index f0d3bcced9..e74cb09c27 100644 --- a/theories/Floats/FloatOps.v +++ b/theories/Floats/FloatOps.v @@ -10,7 +10,7 @@ Definition shift := 2101%Z. (** [= 2*emax + prec] *) Definition frexp f := let (m, se) := frshiftexp f in - (m, ([| se |] - shift)%Z%int63). + (m, (φ se - shift)%Z%int63). Definition ldexp f e := let e' := Z.max (Z.min e (emax - emin)) (emin - emax - 1) in @@ -28,7 +28,7 @@ Definition Prim2SF f := else let (r, exp) := frexp f in let e := (exp - prec)%Z in - let (shr, e') := shr_fexp prec emax [| normfr_mantissa r |]%int63 e loc_Exact in + let (shr, e') := shr_fexp prec emax (φ (normfr_mantissa r))%int63 e loc_Exact in match shr_m shr with | Zpos p => S754_finite (get_sign f) p e' | Zneg _ | Z0 => S754_zero false (* must never occur *) diff --git a/theories/Init/Logic.v b/theories/Init/Logic.v index 8ba17e38c8..aa376b780a 100644 --- a/theories/Init/Logic.v +++ b/theories/Init/Logic.v @@ -352,10 +352,6 @@ Inductive eq (A:Type) (x:A) : A -> Prop := where "x = y :> A" := (@eq A x y) : type_scope. -Notation "x = y" := (x = y :>_) : type_scope. -Notation "x <> y :> T" := (~ x = y :>T) : type_scope. -Notation "x <> y" := (x <> y :>_) : type_scope. - Arguments eq {A} x _. Arguments eq_refl {A x} , [A] x. @@ -363,6 +359,10 @@ Arguments eq_ind [A] x P _ y _. Arguments eq_rec [A] x P _ y _. Arguments eq_rect [A] x P _ y _. +Notation "x = y" := (eq x y) : type_scope. +Notation "x <> y :> T" := (~ x = y :>T) : type_scope. +Notation "x <> y" := (~ (x = y)) : type_scope. + Hint Resolve I conj or_introl or_intror : core. Hint Resolve eq_refl: core. Hint Resolve ex_intro ex_intro2: core. @@ -465,7 +465,7 @@ Module EqNotations. | eq_refl => H' end) (at level 10, H' at level 10, - format "'[' 'rew' 'dependent' H in '/' H' ']'"). + format "'[' 'rew' 'dependent' '/ ' H in '/' H' ']'"). Notation "'rew' 'dependent' -> H 'in' H'" := (match H with | eq_refl => H' @@ -476,7 +476,7 @@ Module EqNotations. | eq_refl => H' end) (at level 10, H' at level 10, - format "'[' 'rew' 'dependent' <- H in '/' H' ']'"). + format "'[' 'rew' 'dependent' <- '/ ' H in '/' H' ']'"). Notation "'rew' 'dependent' [ 'fun' y p => P ] H 'in' H'" := (match H as p in (_ = y) return P with | eq_refl => H' diff --git a/theories/Init/Specif.v b/theories/Init/Specif.v index c00f8edcf7..d3e5ddcc8a 100644 --- a/theories/Init/Specif.v +++ b/theories/Init/Specif.v @@ -255,12 +255,6 @@ Qed. (** Equality of sigma types *) Import EqNotations. -Local Notation "'rew' 'dependent' H 'in' H'" - := (match H with - | eq_refl => H' - end) - (at level 10, H' at level 10, - format "'[' 'rew' 'dependent' '/ ' H in '/' H' ']'"). (** Equality for [sigT] *) Section sigT. diff --git a/theories/Numbers/Cyclic/Int63/Cyclic63.v b/theories/Numbers/Cyclic/Int63/Cyclic63.v index 74c91ac226..4125f6abb7 100644 --- a/theories/Numbers/Cyclic/Int63/Cyclic63.v +++ b/theories/Numbers/Cyclic/Int63/Cyclic63.v @@ -109,7 +109,7 @@ Instance int_ops : ZnZ.Ops int := Local Open Scope Z_scope. -Lemma is_zero_spec_aux : forall x : int, is_zero x = true -> [|x|] = 0%Z. +Lemma is_zero_spec_aux : forall x : int, is_zero x = true -> φ x = 0%Z. Proof. intros x;rewrite is_zero_spec;intros H;rewrite H;trivial. Qed. @@ -120,8 +120,8 @@ Lemma positive_to_int_spec : Z_of_N (fst (positive_to_int p)) * wB + to_Z (snd (positive_to_int p)). Proof. assert (H: (wB <= wB) -> forall p : positive, - Zpos p = Z_of_N (fst (positive_to_int p)) * wB + [|snd (positive_to_int p)|] /\ - [|snd (positive_to_int p)|] < wB). + Zpos p = Z_of_N (fst (positive_to_int p)) * wB + φ (snd (positive_to_int p)) /\ + φ (snd (positive_to_int p)) < wB). 2: intros p; case (H (Z.le_refl wB) p); auto. unfold positive_to_int, wB at 1 3 4. elim size. @@ -136,7 +136,7 @@ Proof. generalize (IH F1 p1); case positive_to_int_rec; simpl. intros n1 i (H1,H2). rewrite Zpos_xI, H1. - replace [|i << 1 + 1|] with ([|i|] * 2 + 1). + replace (φ (i << 1 + 1)) with (φ i * 2 + 1). split; auto with zarith; ring. rewrite add_spec, lsl_spec, Zplus_mod_idemp_l, to_Z_1, Z.pow_1_r, Zmod_small; auto. case (to_Z_bounded i); split; auto with zarith. @@ -144,7 +144,7 @@ Proof. generalize (IH F1 p1); case positive_to_int_rec; simpl. intros n1 i (H1,H2). rewrite Zpos_xO, H1. - replace [|i << 1|] with ([|i|] * 2). + replace (φ (i << 1)) with (φ i * 2). split; auto with zarith; ring. rewrite lsl_spec, to_Z_1, Z.pow_1_r, Zmod_small; auto. case (to_Z_bounded i); split; auto with zarith. @@ -152,7 +152,7 @@ Proof. Qed. Lemma mulc_WW_spec : - forall x y,[|| x *c y ||] = [|x|] * [|y|]. + forall x y, Φ ( x *c y ) = φ x * φ y. Proof. intros x y;unfold mulc_WW. generalize (mulc_spec x y);destruct (mulc x y);simpl;intros Heq;rewrite Heq. @@ -164,18 +164,18 @@ Qed. Lemma squarec_spec : forall x, - [||x *c x||] = [|x|] * [|x|]. + Φ(x *c x) = φ x * φ x. Proof (fun x => mulc_WW_spec x x). -Lemma diveucl_spec_aux : forall a b, 0 < [|b|] -> +Lemma diveucl_spec_aux : forall a b, 0 < φ b -> let (q,r) := diveucl a b in - [|a|] = [|q|] * [|b|] + [|r|] /\ - 0 <= [|r|] < [|b|]. + φ a = φ q * φ b + φ r /\ + 0 <= φ r < φ b. Proof. intros a b H;assert (W:= diveucl_spec a b). - assert ([|b|]>0) by (auto with zarith). - generalize (Z_div_mod [|a|] [|b|] H0). - destruct (diveucl a b);destruct (Z.div_eucl [|a|] [|b|]). + assert (φ b>0) by (auto with zarith). + generalize (Z_div_mod φ a φ b H0). + destruct (diveucl a b);destruct (Z.div_eucl φ a φ b). inversion W;rewrite Zmult_comm;trivial. Qed. @@ -252,10 +252,10 @@ Proof. case lebP; intros hle. 2: { symmetry; apply Zmod_small. - assert (2 ^ [|Int63.digits|] < 2 ^ [|p|]); [ apply Zpower_lt_monotone; auto with zarith | ]. - change wB with (2 ^ [|Int63.digits|]) in *; auto with zarith. } - rewrite <- (shift_unshift_mod_3 [|Int63.digits|] [|p|] [|w|]) by auto with zarith. - replace ([|Int63.digits|] - [|p|]) with [|Int63.digits - p|] by (rewrite sub_spec, Zmod_small; auto with zarith). + assert (2 ^ φ Int63.digits < 2 ^ φ p); [ apply Zpower_lt_monotone; auto with zarith | ]. + change wB with (2 ^ φ Int63.digits) in *; auto with zarith. } + rewrite <- (shift_unshift_mod_3 φ Int63.digits φ p φ w) by auto with zarith. + replace (φ Int63.digits - φ p) with (φ (Int63.digits - p)) by (rewrite sub_spec, Zmod_small; auto with zarith). rewrite lsr_spec, lsl_spec; reflexivity. Qed. diff --git a/theories/Numbers/Cyclic/Int63/Int63.v b/theories/Numbers/Cyclic/Int63/Int63.v index febf4fa1be..d490c28578 100644 --- a/theories/Numbers/Cyclic/Int63/Int63.v +++ b/theories/Numbers/Cyclic/Int63/Int63.v @@ -194,6 +194,8 @@ Fixpoint to_Z_rec (n:nat) (i:int) := Definition to_Z := to_Z_rec size. +Notation "'φ' x" := (to_Z x) (at level 0) : int63_scope. + Fixpoint of_pos_rec (n:nat) (p:positive) := match n, p with | O, _ => 0 @@ -211,10 +213,11 @@ Definition of_Z z := | Zneg p => - (of_pos p) end. -Notation "[| x |]" := (to_Z x) (at level 0, x at level 99) : int63_scope. - Definition wB := (2 ^ (Z.of_nat size))%Z. +Notation "'Φ' x" := + (zn2z_to_Z wB to_Z x) (at level 0) : int63_scope. + Lemma to_Z_rec_bounded size : forall x, (0 <= to_Z_rec size x < 2 ^ Z.of_nat size)%Z. Proof. elim size. simpl; auto with zarith. @@ -225,7 +228,7 @@ Proof. rewrite Zdouble_plus_one_mult; auto with zarith. Qed. -Corollary to_Z_bounded : forall x, (0 <= [| x |] < wB)%Z. +Corollary to_Z_bounded : forall x, (0 <= φ x < wB)%Z. Proof. apply to_Z_rec_bounded. Qed. (* =================================================== *) @@ -290,29 +293,24 @@ Proof. exact (fun x => let 'eq_refl := x in idProp). Qed. Lemma wB_pos : 0 < wB. Proof. reflexivity. Qed. -Lemma to_Z_0 : [|0|] = 0. +Lemma to_Z_0 : φ 0 = 0. Proof. reflexivity. Qed. -Lemma to_Z_1 : [|1|] = 1. +Lemma to_Z_1 : φ 1 = 1. Proof. reflexivity. Qed. (* Notations *) Local Open Scope Z_scope. -Notation "[+| c |]" := +Local Notation "[+| c |]" := (interp_carry 1 wB to_Z c) (at level 0, c at level 99) : int63_scope. -Notation "[-| c |]" := +Local Notation "[-| c |]" := (interp_carry (-1) wB to_Z c) (at level 0, c at level 99) : int63_scope. -Notation "[|| x ||]" := - (zn2z_to_Z wB to_Z x) (at level 0, x at level 99) : int63_scope. - (* Bijection : int63 <-> Bvector size *) -Axiom of_to_Z : forall x, of_Z [| x |] = x. - -Notation "'φ' x" := [| x |] (at level 0) : int63_scope. +Axiom of_to_Z : forall x, of_Z φ x = x. Lemma can_inj {rT aT} {f: aT -> rT} {g: rT -> aT} (K: forall a, g (f a) = a) {a a'} (e: f a = f a') : a = a'. Proof. generalize (K a) (K a'). congruence. Qed. @@ -322,9 +320,9 @@ Proof. exact (λ e, can_inj of_to_Z e). Qed. (** Specification of logical operations *) Local Open Scope Z_scope. -Axiom lsl_spec : forall x p, [| x << p |] = [| x |] * 2 ^ [| p |] mod wB. +Axiom lsl_spec : forall x p, φ (x << p) = φ x * 2 ^ φ p mod wB. -Axiom lsr_spec : forall x p, [|x >> p|] = [|x|] / 2 ^ [|p|]. +Axiom lsr_spec : forall x p, φ (x >> p) = φ x / 2 ^ φ p. Axiom land_spec: forall x y i , bit (x land y) i = bit x i && bit y i. @@ -339,26 +337,26 @@ Axiom lxor_spec: forall x y i, bit (x lxor y) i = xorb (bit x i) (bit y i). (* Remarque : les axiomes seraient plus simple si on utilise of_Z a la place : exemple : add_spec : forall x y, of_Z (x + y) = of_Z x + of_Z y. *) -Axiom add_spec : forall x y, [|x + y|] = ([|x|] + [|y|]) mod wB. +Axiom add_spec : forall x y, φ (x + y) = (φ x + φ y) mod wB. -Axiom sub_spec : forall x y, [|x - y|] = ([|x|] - [|y|]) mod wB. +Axiom sub_spec : forall x y, φ (x - y) = (φ x - φ y) mod wB. -Axiom mul_spec : forall x y, [| x * y |] = [|x|] * [|y|] mod wB. +Axiom mul_spec : forall x y, φ (x * y) = φ x * φ y mod wB. -Axiom mulc_spec : forall x y, [|x|] * [|y|] = [|fst (mulc x y)|] * wB + [|snd (mulc x y)|]. +Axiom mulc_spec : forall x y, φ x * φ y = φ (fst (mulc x y)) * wB + φ (snd (mulc x y)). -Axiom div_spec : forall x y, [|x / y|] = [|x|] / [|y|]. +Axiom div_spec : forall x y, φ (x / y) = φ x / φ y. -Axiom mod_spec : forall x y, [|x \% y|] = [|x|] mod [|y|]. +Axiom mod_spec : forall x y, φ (x \% y) = φ x mod φ y. (* Comparisons *) Axiom eqb_correct : forall i j, (i == j)%int63 = true -> i = j. Axiom eqb_refl : forall x, (x == x)%int63 = true. -Axiom ltb_spec : forall x y, (x < y)%int63 = true <-> [|x|] < [|y|]. +Axiom ltb_spec : forall x y, (x < y)%int63 = true <-> φ x < φ y. -Axiom leb_spec : forall x y, (x <= y)%int63 = true <-> [|x|] <= [|y|]. +Axiom leb_spec : forall x y, (x <= y)%int63 = true <-> φ x <= φ y. (** Exotic operations *) @@ -370,11 +368,11 @@ Primitive tail0 := #int63_tail0. Axiom compare_def_spec : forall x y, compare x y = compare_def x y. -Axiom head0_spec : forall x, 0 < [|x|] -> - wB/ 2 <= 2 ^ ([|head0 x|]) * [|x|] < wB. +Axiom head0_spec : forall x, 0 < φ x -> + wB/ 2 <= 2 ^ (φ (head0 x)) * φ x < wB. -Axiom tail0_spec : forall x, 0 < [|x|] -> - (exists y, 0 <= y /\ [|x|] = (2 * y + 1) * (2 ^ [|tail0 x|]))%Z. +Axiom tail0_spec : forall x, 0 < φ x -> + (exists y, 0 <= y /\ φ x = (2 * y + 1) * (2 ^ φ (tail0 x)))%Z. Axiom addc_def_spec : forall x y, (x +c y)%int63 = addc_def x y. @@ -388,8 +386,8 @@ Axiom diveucl_def_spec : forall x y, diveucl x y = diveucl_def x y. Axiom diveucl_21_spec : forall a1 a2 b, let (q,r) := diveucl_21 a1 a2 b in - let (q',r') := Z.div_eucl ([|a1|] * wB + [|a2|]) [|b|] in - [|a1|] < [|b|] -> [|q|] = q' /\ [|r|] = r'. + let (q',r') := Z.div_eucl (φ a1 * wB + φ a2) φ b in + φ a1 < φ b -> φ q = q' /\ φ r = r'. Axiom addmuldiv_def_spec : forall p x y, addmuldiv p x y = addmuldiv_def p x y. @@ -550,16 +548,16 @@ Qed. (** Comparison *) -Lemma eqbP x y : reflect ([| x |] = [| y |]) (x == y). +Lemma eqbP x y : reflect (φ x = φ y ) (x == y). Proof. apply iff_reflect; rewrite eqb_spec; split; [ apply to_Z_inj | apply f_equal ]. Qed. -Lemma ltbP x y : reflect ([| x |] < [| y |])%Z (x < y). +Lemma ltbP x y : reflect (φ x < φ y )%Z (x < y). Proof. apply iff_reflect; symmetry; apply ltb_spec. Qed. -Lemma lebP x y : reflect ([| x |] <= [| y |])%Z (x ≤ y). +Lemma lebP x y : reflect (φ x <= φ y )%Z (x ≤ y). Proof. apply iff_reflect; symmetry; apply leb_spec. Qed. -Lemma compare_spec x y : compare x y = ([|x|] ?= [|y|])%Z. +Lemma compare_spec x y : compare x y = (φ x ?= φ y)%Z. Proof. rewrite compare_def_spec; unfold compare_def. case ltbP; [ auto using Z.compare_lt_iff | intros hge ]. @@ -572,72 +570,72 @@ Proof. apply eqb_spec. Qed. Lemma diveucl_spec x y : let (q,r) := diveucl x y in - ([| q |], [| r |]) = Z.div_eucl [| x |] [| y |]. + (φ q , φ r ) = Z.div_eucl φ x φ y . Proof. rewrite diveucl_def_spec; unfold diveucl_def; rewrite div_spec, mod_spec; unfold Z.div, Zmod. - destruct (Z.div_eucl [| x |] [| y |]); trivial. + destruct (Z.div_eucl φ x φ y ); trivial. Qed. Local Open Scope Z_scope. (** Addition *) -Lemma addc_spec x y : [+| x +c y |] = [| x |] + [| y |]. +Lemma addc_spec x y : [+| x +c y |] = φ x + φ y . Proof. rewrite addc_def_spec; unfold addc_def, interp_carry. pose proof (to_Z_bounded x); pose proof (to_Z_bounded y). case ltbP; rewrite add_spec. - case (Z_lt_ge_dec ([| x |] + [| y |]) wB). + case (Z_lt_ge_dec (φ x + φ y ) wB). intros k; rewrite Zmod_small; lia. - intros hge; rewrite <- (Zmod_unique _ _ 1 ([| x |] + [| y |] - wB)); lia. - case (Z_lt_ge_dec ([| x |] + [| y |]) wB). + intros hge; rewrite <- (Zmod_unique _ _ 1 (φ x + φ y - wB)); lia. + case (Z_lt_ge_dec (φ x + φ y ) wB). intros k; rewrite Zmod_small; lia. - intros hge; rewrite <- (Zmod_unique _ _ 1 ([| x |] + [| y |] - wB)); lia. + intros hge; rewrite <- (Zmod_unique _ _ 1 (φ x + φ y - wB)); lia. Qed. -Lemma succ_spec x : [| succ x |] = ([| x |] + 1) mod wB. +Lemma succ_spec x : φ (succ x) = (φ x + 1) mod wB. Proof. apply add_spec. Qed. -Lemma succc_spec x : [+| succc x |] = [| x |] + 1. +Lemma succc_spec x : [+| succc x |] = φ x + 1. Proof. apply addc_spec. Qed. -Lemma addcarry_spec x y : [| addcarry x y |] = ([| x |] + [| y |] + 1) mod wB. +Lemma addcarry_spec x y : φ (addcarry x y) = (φ x + φ y + 1) mod wB. Proof. unfold addcarry; rewrite -> !add_spec, Zplus_mod_idemp_l; trivial. Qed. -Lemma addcarryc_spec x y : [+| addcarryc x y |] = [| x |] + [| y |] + 1. +Lemma addcarryc_spec x y : [+| addcarryc x y |] = φ x + φ y + 1. Proof. rewrite addcarryc_def_spec; unfold addcarryc_def, interp_carry. pose proof (to_Z_bounded x); pose proof (to_Z_bounded y). case lebP; rewrite addcarry_spec. - case (Z_lt_ge_dec ([| x |] + [| y |] + 1) wB). + case (Z_lt_ge_dec (φ x + φ y + 1) wB). intros hlt; rewrite Zmod_small; lia. - intros hge; rewrite <- (Zmod_unique _ _ 1 ([| x |] + [| y |] + 1 - wB)); lia. - case (Z_lt_ge_dec ([| x |] + [| y |] + 1) wB). + intros hge; rewrite <- (Zmod_unique _ _ 1 (φ x + φ y + 1 - wB)); lia. + case (Z_lt_ge_dec (φ x + φ y + 1) wB). intros hlt; rewrite Zmod_small; lia. - intros hge; rewrite <- (Zmod_unique _ _ 1 ([| x |] + [| y |] + 1 - wB)); lia. + intros hge; rewrite <- (Zmod_unique _ _ 1 (φ x + φ y + 1 - wB)); lia. Qed. (** Subtraction *) -Lemma subc_spec x y : [-| x -c y |] = [| x |] - [| y |]. +Lemma subc_spec x y : [-| x -c y |] = φ x - φ y . Proof. rewrite subc_def_spec; unfold subc_def; unfold interp_carry. pose proof (to_Z_bounded x); pose proof (to_Z_bounded y). case lebP. intros hle; rewrite sub_spec, Z.mod_small; lia. - intros hgt; rewrite sub_spec, <- (Zmod_unique _ wB (-1) ([| x |] - [| y |] + wB)); lia. + intros hgt; rewrite sub_spec, <- (Zmod_unique _ wB (-1) (φ x - φ y + wB)); lia. Qed. -Lemma pred_spec x : [| pred x |] = ([| x |] - 1) mod wB. +Lemma pred_spec x : φ (pred x) = (φ x - 1) mod wB. Proof. apply sub_spec. Qed. -Lemma predc_spec x : [-| predc x |] = [| x |] - 1. +Lemma predc_spec x : [-| predc x |] = φ x - 1. Proof. apply subc_spec. Qed. -Lemma oppc_spec x : [-| oppc x |] = - [| x |]. +Lemma oppc_spec x : [-| oppc x |] = - φ x . Proof. unfold oppc; rewrite -> subc_spec, to_Z_0; trivial. Qed. -Lemma opp_spec x : [|- x |] = - [| x |] mod wB. +Lemma opp_spec x : φ (- x) = - φ x mod wB. Proof. unfold opp; rewrite -> sub_spec, to_Z_0; trivial. Qed. -Lemma oppcarry_spec x : [| oppcarry x |] = wB - [| x |] - 1. +Lemma oppcarry_spec x : φ (oppcarry x) = wB - φ x - 1. Proof. unfold oppcarry; rewrite sub_spec. rewrite <- Zminus_plus_distr, Zplus_comm, Zminus_plus_distr. @@ -645,20 +643,20 @@ Proof. generalize (to_Z_bounded x); auto with zarith. Qed. -Lemma subcarry_spec x y : [| subcarry x y |] = ([| x |] - [| y |] - 1) mod wB. +Lemma subcarry_spec x y : φ (subcarry x y) = (φ x - φ y - 1) mod wB. Proof. unfold subcarry; rewrite !sub_spec, Zminus_mod_idemp_l; trivial. Qed. -Lemma subcarryc_spec x y : [-| subcarryc x y |] = [| x |] - [| y |] - 1. +Lemma subcarryc_spec x y : [-| subcarryc x y |] = φ x - φ y - 1. Proof. rewrite subcarryc_def_spec; unfold subcarryc_def, interp_carry; fold (subcarry x y). pose proof (to_Z_bounded x); pose proof (to_Z_bounded y). case ltbP; rewrite subcarry_spec. intros hlt; rewrite Zmod_small; lia. - intros hge; rewrite <- (Zmod_unique _ _ (-1) ([| x |] - [| y |] - 1 + wB)); lia. + intros hge; rewrite <- (Zmod_unique _ _ (-1) (φ x - φ y - 1 + wB)); lia. Qed. (** GCD *) -Lemma to_Z_gcd : forall i j, [| gcd i j |] = Zgcdn (2 * size) [| j |] [| i |]. +Lemma to_Z_gcd : forall i j, φ (gcd i j) = Zgcdn (2 * size) (φ j) (φ i). Proof. unfold gcd. elim (2*size)%nat. reflexivity. @@ -668,17 +666,17 @@ Proof. intros ->; rewrite Z.abs_eq; lia. intros hne; rewrite ih; clear ih. rewrite <- mod_spec. - revert hj hne; case [| j |]; intros; lia. + revert hj hne; case φ j ; intros; lia. Qed. -Lemma gcd_spec a b : Zis_gcd [| a |] [| b |] [| gcd a b |]. +Lemma gcd_spec a b : Zis_gcd (φ a) (φ b) (φ (gcd a b)). Proof. rewrite to_Z_gcd. apply Zis_gcd_sym. apply Zgcdn_is_gcd. unfold Zgcd_bound. generalize (to_Z_bounded b). - destruct [|b|]. + destruct φ b. unfold size; auto with zarith. intros (_,H). cut (Psize p <= size)%nat; [ lia | rewrite <- Zpower2_Psize; auto]. @@ -686,10 +684,10 @@ Proof. Qed. (** Head0, Tail0 *) -Lemma head00_spec x : [| x |] = 0 -> [| head0 x |] = [| digits |]. +Lemma head00_spec x : φ x = 0 -> φ (head0 x) = φ digits . Proof. now intros h; rewrite (to_Z_inj _ 0 h). Qed. -Lemma tail00_spec x : [| x |] = 0 -> [|tail0 x|] = [|digits|]. +Lemma tail00_spec x : φ x = 0 -> φ (tail0 x) = φ digits. Proof. now intros h; rewrite (to_Z_inj _ 0 h). Qed. Infix "≡" := (eqm wB) (at level 80) : int63_scope. @@ -744,20 +742,20 @@ Proof. Qed. Lemma add_le_r m n: - if (n <= m + n)%int63 then ([|m|] + [|n|] < wB)%Z else (wB <= [|m|] + [|n|])%Z. + if (n <= m + n)%int63 then (φ m + φ n < wB)%Z else (wB <= φ m + φ n)%Z. Proof. case (to_Z_bounded m); intros H1m H2m. case (to_Z_bounded n); intros H1n H2n. - case (Zle_or_lt wB ([|m|] + [|n|])); intros H. - assert (H1: ([| m + n |] = [|m|] + [|n|] - wB)%Z). + case (Zle_or_lt wB (φ m + φ n)); intros H. + assert (H1: (φ (m + n) = φ m + φ n - wB)%Z). rewrite add_spec. - replace (([|m|] + [|n|]) mod wB)%Z with (((([|m|] + [|n|]) - wB) + wB) mod wB)%Z. + replace ((φ m + φ n) mod wB)%Z with ((((φ m + φ n) - wB) + wB) mod wB)%Z. rewrite -> Zplus_mod, Z_mod_same_full, Zplus_0_r, !Zmod_small; auto with zarith. rewrite !Zmod_small; auto with zarith. apply f_equal2 with (f := Zmod); auto with zarith. case_eq (n <= m + n)%int63; auto. rewrite leb_spec, H1; auto with zarith. - assert (H1: ([| m + n |] = [|m|] + [|n|])%Z). + assert (H1: (φ (m + n) = φ m + φ n)%Z). rewrite add_spec, Zmod_small; auto with zarith. replace (n <= m + n)%int63 with true; auto. apply sym_equal; rewrite leb_spec, H1; auto with zarith. @@ -844,40 +842,40 @@ Proof. rewrite -> leb_spec in H. apply Zdiv_small; split; [ auto | ]. apply (Z.lt_le_trans _ _ _ H2x). - unfold wB; change (Z_of_nat size) with [|digits|]. + unfold wB; change (Z_of_nat size) with φ digits. apply Zpower_le_monotone; auto with zarith. Qed. (* BIT *) -Lemma bit_0_spec i: [|bit i 0|] = [|i|] mod 2. +Lemma bit_0_spec i: φ (bit i 0) = φ i mod 2. Proof. unfold bit, is_zero. rewrite lsr_0_r. - assert (Hbi: ([|i|] mod 2 < 2)%Z). + assert (Hbi: (φ i mod 2 < 2)%Z). apply Z_mod_lt; auto with zarith. case (to_Z_bounded i); intros H1i H2i. - case (Zmod_le_first [|i|] 2); auto with zarith; intros H3i H4i. - assert (H2b: (0 < 2 ^ [|digits - 1|])%Z). + case (Zmod_le_first (φ i) 2); auto with zarith; intros H3i H4i. + assert (H2b: (0 < 2 ^ φ (digits - 1))%Z). apply Zpower_gt_0; auto with zarith. case (to_Z_bounded (digits -1)); auto with zarith. - assert (H: [|i << (digits -1)|] = ([|i|] mod 2 * 2^ [|digits -1|])%Z). + assert (H: φ (i << (digits -1)) = (φ i mod 2 * 2^ φ (digits -1))%Z). rewrite lsl_spec. - rewrite -> (Z_div_mod_eq [|i|] 2) at 1; auto with zarith. + rewrite -> (Z_div_mod_eq φ i 2) at 1; auto with zarith. rewrite -> Zmult_plus_distr_l, <-Zplus_mod_idemp_l. rewrite -> (Zmult_comm 2), <-Zmult_assoc. - replace (2 * 2 ^ [|digits - 1|])%Z with wB; auto. + replace (2 * 2 ^ φ (digits - 1))%Z with wB; auto. rewrite Z_mod_mult, Zplus_0_l; apply Zmod_small. split; auto with zarith. - replace wB with (2 * 2 ^ [|digits -1|])%Z; auto. + replace wB with (2 * 2 ^ φ (digits -1))%Z; auto. apply Zmult_lt_compat_r; auto with zarith. - case (Zle_lt_or_eq 0 ([|i|] mod 2)); auto with zarith; intros Hi. + case (Zle_lt_or_eq 0 (φ i mod 2)); auto with zarith; intros Hi. 2: generalize H; rewrite <-Hi, Zmult_0_l. - 2: replace 0%Z with [|0|]; auto. + 2: replace 0%Z with φ 0; auto. 2: now case eqbP. - generalize H; replace ([|i|] mod 2) with 1%Z; auto with zarith. + generalize H; replace (φ i mod 2) with 1%Z; auto with zarith. rewrite Zmult_1_l. intros H1. - assert (H2: [|i << (digits - 1)|] <> [|0|]). - replace [|0|] with 0%Z; auto with zarith. + assert (H2: φ (i << (digits - 1)) <> φ 0). + replace φ 0 with 0%Z; auto with zarith. now case eqbP. Qed. @@ -885,7 +883,7 @@ Lemma bit_split i : ( i = (i >> 1 ) << 1 + bit i 0)%int63. Proof. apply to_Z_inj. rewrite -> add_spec, lsl_spec, lsr_spec, bit_0_spec, Zplus_mod_idemp_l. - replace (2 ^ [|1|]) with 2%Z; auto with zarith. + replace (2 ^ φ 1) with 2%Z; auto with zarith. rewrite -> Zmult_comm, <-Z_div_mod_eq; auto with zarith. rewrite Zmod_small; auto; case (to_Z_bounded i); auto. Qed. @@ -911,11 +909,11 @@ Qed. Local Hint Resolve Z.lt_gt Z.div_pos : zarith. -Lemma to_Z_split x : [|x|] = [|(x >> 1)|] * 2 + [|bit x 0|]. +Lemma to_Z_split x : φ x = φ (x >> 1) * 2 + φ (bit x 0). Proof. case (to_Z_bounded x); intros H1x H2x. case (to_Z_bounded (bit x 0)); intros H1b H2b. - assert (F1: 0 <= [|x >> 1|] < wB/2). + assert (F1: 0 <= φ (x >> 1) < wB/2). rewrite -> lsr_spec, to_Z_1, Z.pow_1_r. split; auto with zarith. apply Zdiv_lt_upper_bound; auto with zarith. rewrite -> (bit_split x) at 1. @@ -927,7 +925,7 @@ Proof. rewrite -> lsl_spec, to_Z_1, Z.pow_1_r, Zmod_small; auto with zarith. 2: change wB with ((wB/2)*2); auto with zarith. change wB with (((wB/2 - 1) * 2 + 1) + 1). - assert ([|bit x 0|] <= 1); auto with zarith. + assert (φ (bit x 0) <= 1); auto with zarith. case bit; discriminate. Qed. @@ -944,11 +942,11 @@ Proof. intros H1; assert (H2: n = max_int). 2: generalize H; rewrite H2; discriminate. case (to_Z_bounded n); intros H1n H2n. - case (Zle_lt_or_eq [|n|] (wB - 1)); auto with zarith; + case (Zle_lt_or_eq φ n (wB - 1)); auto with zarith; intros H2; apply to_Z_inj; auto. generalize (add_le_r 1 n); rewrite H1. - change [|max_int|] with (wB - 1)%Z. - replace [|1|] with 1%Z; auto with zarith. + change φ max_int with (wB - 1)%Z. + replace φ 1 with 1%Z; auto with zarith. Qed. Lemma bit_ext i j : (forall n, bit i n = bit j n) -> i = j. @@ -964,7 +962,7 @@ Proof. 1, 3: apply to_Z_bounded. 1, 2: rewrite lsr_spec; auto using Z_lt_div2. intros b. - case (Zle_or_lt [|digits|] [|b|]). + case (Zle_or_lt φ digits φ b). rewrite <- leb_spec; intros; rewrite !bit_M; auto. rewrite <- ltb_spec; intros; rewrite !bit_half; auto. Qed. @@ -975,58 +973,58 @@ Proof. assert (F1: 1 >= 0) by discriminate. case_eq (digits <= j)%int63; intros H. rewrite orb_true_r, bit_M; auto. - set (d := [|digits|]). - case (Zle_or_lt d [|j|]); intros H1. + set (d := φ digits). + case (Zle_or_lt d (φ j)); intros H1. case (leb_spec digits j); rewrite H; auto with zarith. intros _ HH; generalize (HH H1); discriminate. clear H. generalize (ltb_spec j i); case ltb; intros H2; unfold bit; simpl. - assert (F2: ([|j|] < [|i|])%Z) by (case H2; auto); clear H2. + assert (F2: (φ j < φ i)%Z) by (case H2; auto); clear H2. replace (is_zero (((x << i) >> j) << (digits - 1))) with true; auto. case (to_Z_bounded j); intros H1j H2j. apply sym_equal; rewrite is_zero_spec; apply to_Z_inj. rewrite lsl_spec, lsr_spec, lsl_spec. replace wB with (2^d); auto. - pattern d at 1; replace d with ((d - ([|j|] + 1)) + ([|j|] + 1))%Z by ring. + pattern d at 1; replace d with ((d - (φ j + 1)) + (φ j + 1))%Z by ring. rewrite Zpower_exp; auto with zarith. - replace [|i|] with (([|i|] - ([|j|] + 1)) + ([|j|] + 1))%Z by ring. + replace φ i with ((φ i - (φ j + 1)) + (φ j + 1))%Z by ring. rewrite -> Zpower_exp, Zmult_assoc; auto with zarith. rewrite Zmult_mod_distr_r. rewrite -> Zplus_comm, Zpower_exp, !Zmult_assoc; auto with zarith. rewrite -> Z_div_mult_full; auto with zarith. rewrite <-Zmult_assoc, <-Zpower_exp; auto with zarith. - replace (1 + [|digits - 1|])%Z with d; auto with zarith. + replace (1 + φ digits - 1)%Z with d; auto with zarith. rewrite Z_mod_mult; auto. - case H2; intros _ H3; case (Zle_or_lt [|i|] [|j|]); intros F2. + case H2; intros _ H3; case (Zle_or_lt φ i φ j); intros F2. 2: generalize (H3 F2); discriminate. clear H2 H3. apply f_equal with (f := negb). apply f_equal with (f := is_zero). apply to_Z_inj. rewrite -> !lsl_spec, !lsr_spec, !lsl_spec. - pattern wB at 2 3; replace wB with (2^(1+ [|digits - 1|])); auto. + pattern wB at 2 3; replace wB with (2^(1+ φ (digits - 1))); auto. rewrite -> Zpower_exp, Z.pow_1_r; auto with zarith. rewrite !Zmult_mod_distr_r. apply f_equal2 with (f := Zmult); auto. replace wB with (2^ d); auto with zarith. - replace d with ((d - [|i|]) + [|i|])%Z by ring. + replace d with ((d - φ i) + φ i)%Z by ring. case (to_Z_bounded i); intros H1i H2i. rewrite Zpower_exp; auto with zarith. rewrite Zmult_mod_distr_r. case (to_Z_bounded j); intros H1j H2j. - replace [|j - i|] with ([|j|] - [|i|])%Z. + replace φ (j - i) with (φ j - φ i)%Z. 2: rewrite sub_spec, Zmod_small; auto with zarith. - set (d1 := (d - [|i|])%Z). - set (d2 := ([|j|] - [|i|])%Z). - pattern [|j|] at 1; - replace [|j|] with (d2 + [|i|])%Z. + set (d1 := (d - φ i)%Z). + set (d2 := (φ j - φ i)%Z). + pattern φ j at 1; + replace φ j with (d2 + φ i)%Z. 2: unfold d2; ring. rewrite -> Zpower_exp; auto with zarith. rewrite -> Zdiv_mult_cancel_r. - 2: generalize (Zpower2_lt_lin [| i |] H1i); auto with zarith. - rewrite -> (Z_div_mod_eq [|x|] (2^d1)) at 2; auto with zarith. + 2: generalize (Zpower2_lt_lin φ i H1i); auto with zarith. + rewrite -> (Z_div_mod_eq φ x (2^d1)) at 2; auto with zarith. pattern d1 at 2; - replace d1 with (d2 + (1+ (d - [|j|] - 1)))%Z + replace d1 with (d2 + (1+ (d - φ j - 1)))%Z by (unfold d1, d2; ring). rewrite Zpower_exp; auto with zarith. rewrite <-Zmult_assoc, Zmult_comm. @@ -1058,13 +1056,13 @@ Proof. intros Hx Hy. rewrite leb_spec. rewrite -> (to_Z_split y) at 1; rewrite (to_Z_split (x lor y)). - assert ([|y>>1|] <= [|(x lor y) >> 1|]). + assert (φ (y>>1) <= φ ((x lor y) >> 1)). rewrite -> lor_lsr, <-leb_spec; apply IH. rewrite -> lsr_spec, to_Z_1, Z.pow_1_r; split; auto with zarith. apply Zdiv_lt_upper_bound; auto with zarith. rewrite -> lsr_spec, to_Z_1, Z.pow_1_r; split; auto with zarith. apply Zdiv_lt_upper_bound; auto with zarith. - assert ([|bit y 0|] <= [|bit (x lor y) 0|]); auto with zarith. + assert (φ (bit y 0) <= φ (bit (x lor y) 0)); auto with zarith. rewrite lor_spec; do 2 case bit; try discriminate. Qed. @@ -1118,8 +1116,8 @@ Proof. assert (F: (bit x 0 + bit y 0)%int63 = (bit x 0 || bit y 0)). assert (F1: (2 | wB)) by (apply Zpower_divide; apply refl_equal). assert (F2: 0 < wB) by (apply refl_equal). - assert (F3: [|bit x 0 + bit y 0|] mod 2 = [|bit x 0 || bit y 0|] mod 2). - apply trans_equal with (([|(x>>1 + y>>1) << 1|] + [|bit x 0 + bit y 0|]) mod 2). + assert (F3: φ (bit x 0 + bit y 0) mod 2 = φ (bit x 0 || bit y 0) mod 2). + apply trans_equal with ((φ ((x>>1 + y>>1) << 1) + φ (bit x 0 + bit y 0)) mod 2). rewrite lsl_spec, Zplus_mod, <-Zmod_div_mod; auto with zarith. rewrite Z.pow_1_r, Z_mod_mult, Zplus_0_l, Zmod_mod; auto with zarith. rewrite (Zmod_div_mod 2 wB), <-add_spec, Heq; auto with zarith. @@ -1136,12 +1134,12 @@ Proof. case_eq (digits <= m)%int63. intros Hlm; rewrite bit_M; auto; discriminate. rewrite <- not_true_iff_false, leb_spec; intros Hlm. - case (Zle_lt_or_eq 0 [|m|]); auto; intros Hm. + case (Zle_lt_or_eq 0 φ m); auto; intros Hm. replace m with ((m -1) + 1)%int63. rewrite <-(bit_half x), <-(bit_half y); auto with zarith. apply HH. rewrite <-lor_lsr. - assert (0 <= [|bit (x lor y) 0|] <= 1) by (case bit; split; discriminate). + assert (0 <= φ (bit (x lor y) 0) <= 1) by (case bit; split; discriminate). rewrite F in Heq; generalize (add_cancel_r _ _ _ Heq). intros Heq1; apply to_Z_inj. generalize (f_equal to_Z Heq1); rewrite lsl_spec, to_Z_1, Z.pow_1_r, Zmod_small. @@ -1149,13 +1147,13 @@ Proof. case (to_Z_bounded (x lor y)); intros H1xy H2xy. rewrite lsr_spec, to_Z_1, Z.pow_1_r; auto with zarith. change wB with ((wB/2)*2); split; auto with zarith. - assert ([|x lor y|] / 2 < wB / 2); auto with zarith. + assert (φ (x lor y) / 2 < wB / 2); auto with zarith. apply Zdiv_lt_upper_bound; auto with zarith. split. case (to_Z_bounded (x >> 1 + y >> 1)); auto with zarith. rewrite add_spec. - apply Z.le_lt_trans with (([|x >> 1|] + [|y >> 1|]) * 2); auto with zarith. - case (Zmod_le_first ([|x >> 1|] + [|y >> 1|]) wB); auto with zarith. + apply Z.le_lt_trans with ((φ (x >> 1) + φ (y >> 1)) * 2); auto with zarith. + case (Zmod_le_first (φ (x >> 1) + φ (y >> 1)) wB); auto with zarith. case (to_Z_bounded (x >> 1)); case (to_Z_bounded (y >> 1)); auto with zarith. generalize Hb; rewrite (to_Z_split x) at 1; rewrite (to_Z_split y) at 1. case (to_Z_bounded (bit x 0)); case (to_Z_bounded (bit y 0)); auto with zarith. @@ -1168,8 +1166,8 @@ Proof. Qed. Lemma addmuldiv_spec x y p : - [| p |] <= [| digits |] -> - [| addmuldiv p x y |] = ([| x |] * (2 ^ [| p |]) + [| y |] / (2 ^ ([| digits |] - [| p |]))) mod wB. + φ p <= φ digits -> + φ (addmuldiv p x y) = (φ x * (2 ^ φ p) + φ y / (2 ^ (φ digits - φ p))) mod wB. Proof. intros H. assert (Fp := to_Z_bounded p); assert (Fd := to_Z_bounded digits). @@ -1203,7 +1201,7 @@ Proof. rewrite andb_false_r; auto. Qed. -Lemma is_even_spec x : if is_even x then [|x|] mod 2 = 0 else [|x|] mod 2 = 1. +Lemma is_even_spec x : if is_even x then φ x mod 2 = 0 else φ x mod 2 = 1. Proof. rewrite is_even_bit. generalize (bit_0_spec x); case bit; simpl; auto. @@ -1283,39 +1281,39 @@ Proof. Qed. Lemma sqrt_step_correct rec i j: - 0 < [|i|] -> 0 < [|j|] -> [|i|] < ([|j|] + 1) ^ 2 -> - 2 * [|j|] < wB -> + 0 < φ i -> 0 < φ j -> φ i < (φ j + 1) ^ 2 -> + 2 * φ j < wB -> (forall j1 : int, - 0 < [|j1|] < [|j|] -> [|i|] < ([|j1|] + 1) ^ 2 -> - [|rec i j1|] ^ 2 <= [|i|] < ([|rec i j1|] + 1) ^ 2) -> - [|sqrt_step rec i j|] ^ 2 <= [|i|] < ([|sqrt_step rec i j|] + 1) ^ 2. + 0 < φ j1 < φ j -> φ i < (φ j1 + 1) ^ 2 -> + φ (rec i j1) ^ 2 <= φ i < (φ (rec i j1) + 1) ^ 2) -> + φ (sqrt_step rec i j) ^ 2 <= φ i < (φ (sqrt_step rec i j) + 1) ^ 2. Proof. - assert (Hp2: 0 < [|2|]) by exact (refl_equal Lt). + assert (Hp2: 0 < φ 2) by exact (refl_equal Lt). intros Hi Hj Hij H31 Hrec. unfold sqrt_step. case ltbP; rewrite div_spec. - intros hlt. - assert ([| j + i / j|] = [|j|] + [|i|]/[|j|]) as hj. + assert (φ (j + i / j) = φ j + φ i/φ j) as hj. rewrite add_spec, Zmod_small;rewrite div_spec; auto with zarith. apply Hrec; rewrite lsr_spec, hj, to_Z_1; change (2 ^ 1) with 2. + split; [ | apply sqrt_test_false;auto with zarith]. - replace ([|j|] + [|i|]/[|j|]) with (1 * 2 + (([|j|] - 2) + [|i|] / [|j|])) by ring. + replace (φ j + φ i/φ j) with (1 * 2 + ((φ j - 2) + φ i / φ j)) by ring. rewrite Z_div_plus_full_l; auto with zarith. - assert (0 <= [|i|]/ [|j|]) by (apply Z_div_pos; auto with zarith). - assert (0 <= ([|j|] - 2 + [|i|] / [|j|]) / 2) ; auto with zarith. + assert (0 <= φ i/ φ j) by (apply Z_div_pos; auto with zarith). + assert (0 <= (φ j - 2 + φ i / φ j) / 2) ; auto with zarith. apply Z.div_pos; [ | lia ]. - case (Zle_lt_or_eq 1 [|j|]); auto with zarith; intros Hj1. + case (Zle_lt_or_eq 1 φ j); auto with zarith; intros Hj1. rewrite <- Hj1, Zdiv_1_r; lia. + apply sqrt_main;auto with zarith. - split;[apply sqrt_test_true | ];auto with zarith. Qed. -Lemma iter_sqrt_correct n rec i j: 0 < [|i|] -> 0 < [|j|] -> - [|i|] < ([|j|] + 1) ^ 2 -> 2 * [|j|] < wB -> - (forall j1, 0 < [|j1|] -> 2^(Z_of_nat n) + [|j1|] <= [|j|] -> - [|i|] < ([|j1|] + 1) ^ 2 -> 2 * [|j1|] < wB -> - [|rec i j1|] ^ 2 <= [|i|] < ([|rec i j1|] + 1) ^ 2) -> - [|iter_sqrt n rec i j|] ^ 2 <= [|i|] < ([|iter_sqrt n rec i j|] + 1) ^ 2. +Lemma iter_sqrt_correct n rec i j: 0 < φ i -> 0 < φ j -> + φ i < (φ j + 1) ^ 2 -> 2 * φ j < wB -> + (forall j1, 0 < φ j1 -> 2^(Z_of_nat n) + φ j1 <= φ j -> + φ i < (φ j1 + 1) ^ 2 -> 2 * φ j1 < wB -> + φ (rec i j1) ^ 2 <= φ i < (φ (rec i j1) + 1) ^ 2) -> + φ (iter_sqrt n rec i j) ^ 2 <= φ i < (φ (iter_sqrt n rec i j) + 1) ^ 2. Proof. revert rec i j; elim n; unfold iter_sqrt; fold iter_sqrt; clear n. intros rec i j Hi Hj Hij H31 Hrec; apply sqrt_step_correct; auto with zarith. @@ -1328,7 +1326,7 @@ Proof. intros j3 Hj3 Hpj3. apply HHrec; auto. rewrite -> inj_S, Z.pow_succ_r. - apply Z.le_trans with (2 ^Z_of_nat n + [|j2|]); auto with zarith. + apply Z.le_trans with (2 ^Z_of_nat n + φ j2); auto with zarith. apply Zle_0_nat. Qed. @@ -1351,7 +1349,7 @@ Proof. Qed. Lemma sqrt_spec : forall x, - [|sqrt x|] ^ 2 <= [|x|] < ([|sqrt x|] + 1) ^ 2. + φ (sqrt x) ^ 2 <= φ x < (φ (sqrt x) + 1) ^ 2. Proof. intros i; unfold sqrt. rewrite compare_spec. case Z.compare_spec; rewrite to_Z_1; @@ -1359,16 +1357,16 @@ Proof. lia. apply iter_sqrt_correct; auto with zarith; rewrite lsr_spec, to_Z_1; change (2^1) with 2; auto with zarith. - replace [|i|] with (1 * 2 + ([|i|] - 2))%Z; try ring. - assert (0 <= ([|i|] - 2)/2)%Z by (apply Z_div_pos; auto with zarith). + replace φ i with (1 * 2 + (φ i - 2))%Z; try ring. + assert (0 <= (φ i - 2)/2)%Z by (apply Z_div_pos; auto with zarith). rewrite Z_div_plus_full_l; auto with zarith. apply sqrt_init; auto. - assert (W:= Z_mult_div_ge [|i|] 2);assert (W':= to_Z_bounded i);auto with zarith. + assert (W:= Z_mult_div_ge φ i 2);assert (W':= to_Z_bounded i);auto with zarith. intros j2 H1 H2; contradict H2; apply Zlt_not_le. fold wB;assert (W:=to_Z_bounded i). - apply Z.le_lt_trans with ([|i|]); auto with zarith. - assert (0 <= [|i|]/2)%Z by (apply Z_div_pos; auto with zarith). - apply Z.le_trans with (2 * ([|i|]/2)); auto with zarith. + apply Z.le_lt_trans with (φ i); auto with zarith. + assert (0 <= φ i/2)%Z by (apply Z_div_pos; auto with zarith). + apply Z.le_trans with (2 * (φ i/2)); auto with zarith. apply Z_mult_div_ge; auto with zarith. case (to_Z_bounded i); repeat rewrite Z.pow_2_r; auto with zarith. Qed. @@ -1393,66 +1391,66 @@ Proof. Qed. Lemma sqrt2_lower_bound ih il j: - [|| WW ih il||] < ([|j|] + 1) ^ 2 -> [|ih|] <= [|j|]. + Φ (WW ih il) < (φ j + 1) ^ 2 -> φ ih <= φ j. Proof. intros H1. case (to_Z_bounded j); intros Hbj _. case (to_Z_bounded il); intros Hbil _. case (to_Z_bounded ih); intros Hbih Hbih1. - assert (([|ih|] < [|j|] + 1)%Z); auto with zarith. + assert ((φ ih < φ j + 1)%Z); auto with zarith. apply Zlt_square_simpl; auto with zarith. simpl zn2z_to_Z in H1. repeat rewrite <-Z.pow_2_r. refine (Z.le_lt_trans _ _ _ _ H1). - apply Z.le_trans with ([|ih|] * wB)%Z;try rewrite Z.pow_2_r; auto with zarith. + apply Z.le_trans with (φ ih * wB)%Z;try rewrite Z.pow_2_r; auto with zarith. Qed. Lemma diveucl_21_spec_aux : forall a1 a2 b, - wB/2 <= [|b|] -> - [|a1|] < [|b|] -> + wB/2 <= φ b -> + φ a1 < φ b -> let (q,r) := diveucl_21 a1 a2 b in - [|a1|] *wB+ [|a2|] = [|q|] * [|b|] + [|r|] /\ - 0 <= [|r|] < [|b|]. + φ a1 *wB+ φ a2 = φ q * φ b + φ r /\ + 0 <= φ r < φ b. Proof. intros a1 a2 b H1 H2;assert (W:= diveucl_21_spec a1 a2 b). assert (W1:= to_Z_bounded a1). assert (W2:= to_Z_bounded a2). assert (Wb:= to_Z_bounded b). - assert ([|b|]>0) by (auto with zarith). - generalize (Z_div_mod ([|a1|]*wB+[|a2|]) [|b|] H). + assert (φ b>0) by (auto with zarith). + generalize (Z_div_mod (φ a1*wB+φ a2) φ b H). revert W. - destruct (diveucl_21 a1 a2 b); destruct (Z.div_eucl ([|a1|]*wB+[|a2|]) [|b|]). + destruct (diveucl_21 a1 a2 b); destruct (Z.div_eucl (φ a1*wB+φ a2) φ b). intros (H', H''); auto; rewrite H', H''; clear H' H''. intros (H', H''); split; [ |exact H'']. now rewrite H', Zmult_comm. Qed. -Lemma div2_phi ih il j: (2^62 <= [|j|] -> [|ih|] < [|j|] -> - [|fst (diveucl_21 ih il j)|] = [|| WW ih il||] /[|j|])%Z. +Lemma div2_phi ih il j: (2^62 <= φ j -> φ ih < φ j -> + φ (fst (diveucl_21 ih il j)) = Φ (WW ih il) / φ j)%Z. Proof. intros Hj Hj1. generalize (diveucl_21_spec_aux ih il j Hj Hj1). case diveucl_21; intros q r (Hq, Hr). - apply Zdiv_unique with [|r|]; auto with zarith. + apply Zdiv_unique with φ r; auto with zarith. simpl @fst; apply eq_trans with (1 := Hq); ring. Qed. Lemma sqrt2_step_correct rec ih il j: - 2 ^ (Z_of_nat (size - 2)) <= [|ih|] -> - 0 < [|j|] -> [|| WW ih il||] < ([|j|] + 1) ^ 2 -> - (forall j1, 0 < [|j1|] < [|j|] -> [|| WW ih il||] < ([|j1|] + 1) ^ 2 -> - [|rec ih il j1|] ^ 2 <= [||WW ih il||] < ([|rec ih il j1|] + 1) ^ 2) -> - [|sqrt2_step rec ih il j|] ^ 2 <= [||WW ih il ||] - < ([|sqrt2_step rec ih il j|] + 1) ^ 2. -Proof. - assert (Hp2: (0 < [|2|])%Z) by exact (refl_equal Lt). + 2 ^ (Z_of_nat (size - 2)) <= φ ih -> + 0 < φ j -> Φ (WW ih il) < (φ j + 1) ^ 2 -> + (forall j1, 0 < φ j1 < φ j -> Φ (WW ih il) < (φ j1 + 1) ^ 2 -> + φ (rec ih il j1) ^ 2 <= Φ (WW ih il) < (φ (rec ih il j1) + 1) ^ 2) -> + φ (sqrt2_step rec ih il j) ^ 2 <= Φ (WW ih il) + < (φ (sqrt2_step rec ih il j) + 1) ^ 2. +Proof. + assert (Hp2: (0 < φ 2)%Z) by exact (refl_equal Lt). intros Hih Hj Hij Hrec; rewrite sqrt2_step_def. - assert (H1: ([|ih|] <= [|j|])%Z) by (apply sqrt2_lower_bound with il; auto). + assert (H1: (φ ih <= φ j)%Z) by (apply sqrt2_lower_bound with il; auto). case (to_Z_bounded ih); intros Hih1 _. case (to_Z_bounded il); intros Hil1 _. case (to_Z_bounded j); intros _ Hj1. - assert (Hp3: (0 < [||WW ih il||])). - {simpl zn2z_to_Z;apply Z.lt_le_trans with ([|ih|] * wB)%Z; auto with zarith. + assert (Hp3: (0 < Φ (WW ih il))). + {simpl zn2z_to_Z;apply Z.lt_le_trans with (φ ih * wB)%Z; auto with zarith. apply Zmult_lt_0_compat; auto with zarith. refine (Z.lt_le_trans _ _ _ _ Hih); auto with zarith. } cbv zeta. @@ -1461,10 +1459,10 @@ Proof. 2: rewrite <-not_true_iff_false, ltb_spec in Heq. 2: split; auto. 2: apply sqrt_test_true; auto with zarith. - 2: unfold zn2z_to_Z; replace [|ih|] with [|j|]; auto with zarith. - 2: assert (0 <= [|il|]/[|j|]) by (apply Z_div_pos; auto with zarith). + 2: unfold zn2z_to_Z; replace φ ih with φ j; auto with zarith. + 2: assert (0 <= φ il/φ j) by (apply Z_div_pos; auto with zarith). 2: rewrite Zmult_comm, Z_div_plus_full_l; unfold base; auto with zarith. - case (Zle_or_lt (2^(Z_of_nat size -1)) [|j|]); intros Hjj. + case (Zle_or_lt (2^(Z_of_nat size -1)) φ j); intros Hjj. case_eq (fst (diveucl_21 ih il j) < j)%int63;intros Heq0. 2: rewrite <-not_true_iff_false, ltb_spec, (div2_phi _ _ _ Hjj Heq) in Heq0. 2: split; auto; apply sqrt_test_true; auto with zarith. @@ -1472,50 +1470,50 @@ Proof. match goal with |- context[rec _ _ ?X] => set (u := X) end. - assert (H: [|u|] = ([|j|] + ([||WW ih il||])/([|j|]))/2). + assert (H: φ u = (φ j + (Φ (WW ih il))/(φ j))/2). { unfold u; generalize (addc_spec j (fst (diveucl_21 ih il j))); case addc;unfold interp_carry;rewrite (div2_phi _ _ _ Hjj Heq);simpl zn2z_to_Z. { intros i H;rewrite lsr_spec, H;trivial. } intros i H;rewrite <- H. case (to_Z_bounded i); intros H1i H2i. rewrite -> add_spec, Zmod_small, lsr_spec. - { change (1 * wB) with ([|(1 << (digits -1))|] * 2)%Z. + { change (1 * wB) with (φ (1 << (digits -1)) * 2)%Z. rewrite Z_div_plus_full_l; auto with zarith. } change wB with (2 * (wB/2))%Z; auto. - replace [|(1 << (digits - 1))|] with (wB/2); auto. + replace φ (1 << (digits - 1)) with (wB/2); auto. rewrite lsr_spec; auto. - replace (2^[|1|]) with 2%Z; auto. + replace (2^φ 1) with 2%Z; auto. split; auto with zarith. - assert ([|i|]/2 < wB/2); auto with zarith. + assert (φ i/2 < wB/2); auto with zarith. apply Zdiv_lt_upper_bound; auto with zarith. } apply Hrec; rewrite H; clear u H. - assert (Hf1: 0 <= [||WW ih il||]/ [|j|]) by (apply Z_div_pos; auto with zarith). - case (Zle_lt_or_eq 1 ([|j|])); auto with zarith; intros Hf2. + assert (Hf1: 0 <= Φ (WW ih il) / φ j) by (apply Z_div_pos; auto with zarith). + case (Zle_lt_or_eq 1 (φ j)); auto with zarith; intros Hf2. 2: contradict Heq0; apply Zle_not_lt; rewrite <- Hf2, Zdiv_1_r; auto with zarith. split. - replace ([|j|] + [||WW ih il||]/ [|j|])%Z with - (1 * 2 + (([|j|] - 2) + [||WW ih il||] / [|j|])) by lia. + replace (φ j + Φ (WW ih il) / φ j)%Z with + (1 * 2 + ((φ j - 2) + Φ (WW ih il) / φ j)) by lia. rewrite Z_div_plus_full_l; auto with zarith. - assert (0 <= ([|j|] - 2 + [||WW ih il||] / [|j|]) / 2) ; auto with zarith. + assert (0 <= (φ j - 2 + Φ (WW ih il) / φ j) / 2) ; auto with zarith. apply sqrt_test_false; auto with zarith. apply sqrt_main; auto with zarith. contradict Hij; apply Zle_not_lt. - assert ((1 + [|j|]) <= 2 ^ (Z_of_nat size - 1)); auto with zarith. + assert ((1 + φ j) <= 2 ^ (Z_of_nat size - 1)); auto with zarith. apply Z.le_trans with ((2 ^ (Z_of_nat size - 1)) ^2); auto with zarith. - assert (0 <= 1 + [|j|]); auto with zarith. + assert (0 <= 1 + φ j); auto with zarith. apply Zmult_le_compat; auto with zarith. change ((2 ^ (Z_of_nat size - 1))^2) with (2 ^ (Z_of_nat size - 2) * wB). - apply Z.le_trans with ([|ih|] * wB); auto with zarith. + apply Z.le_trans with (φ ih * wB); auto with zarith. unfold zn2z_to_Z, wB; auto with zarith. Qed. Lemma iter2_sqrt_correct n rec ih il j: - 2^(Z_of_nat (size - 2)) <= [|ih|] -> 0 < [|j|] -> [||WW ih il||] < ([|j|] + 1) ^ 2 -> - (forall j1, 0 < [|j1|] -> 2^(Z_of_nat n) + [|j1|] <= [|j|] -> - [||WW ih il||] < ([|j1|] + 1) ^ 2 -> - [|rec ih il j1|] ^ 2 <= [||WW ih il||] < ([|rec ih il j1|] + 1) ^ 2) -> - [|iter2_sqrt n rec ih il j|] ^ 2 <= [||WW ih il||] - < ([|iter2_sqrt n rec ih il j|] + 1) ^ 2. + 2^(Z_of_nat (size - 2)) <= φ ih -> 0 < φ j -> Φ (WW ih il) < (φ j + 1) ^ 2 -> + (forall j1, 0 < φ j1 -> 2^(Z_of_nat n) + φ j1 <= φ j -> + Φ (WW ih il) < (φ j1 + 1) ^ 2 -> + φ (rec ih il j1) ^ 2 <= Φ (WW ih il) < (φ (rec ih il j1) + 1) ^ 2) -> + φ (iter2_sqrt n rec ih il j) ^ 2 <= Φ (WW ih il) + < (φ (iter2_sqrt n rec ih il j) + 1) ^ 2. Proof. revert rec ih il j; elim n; unfold iter2_sqrt; fold iter2_sqrt; clear n. intros rec ih il j Hi Hj Hij Hrec; apply sqrt2_step_correct; auto with zarith. @@ -1528,22 +1526,22 @@ Proof. intros j3 Hj3 Hpj3. apply HHrec; auto. rewrite -> inj_S, Z.pow_succ_r. - apply Z.le_trans with (2 ^Z_of_nat n + [|j2|])%Z; auto with zarith. + apply Z.le_trans with (2 ^Z_of_nat n + φ j2)%Z; auto with zarith. apply Zle_0_nat. Qed. Lemma sqrt2_spec : forall x y, - wB/ 4 <= [|x|] -> + wB/ 4 <= φ x -> let (s,r) := sqrt2 x y in - [||WW x y||] = [|s|] ^ 2 + [+|r|] /\ - [+|r|] <= 2 * [|s|]. + Φ (WW x y) = φ s ^ 2 + [+|r|] /\ + [+|r|] <= 2 * φ s. Proof. intros ih il Hih; unfold sqrt2. - change [||WW ih il||] with ([||WW ih il||]). + change Φ (WW ih il) with (Φ(WW ih il)). assert (Hbin: forall s, s * s + 2* s + 1 = (s + 1) ^ 2) by (intros s; ring). assert (Hb: 0 <= wB) by (red; intros HH; discriminate). - assert (Hi2: [||WW ih il ||] < ([|max_int|] + 1) ^ 2). + assert (Hi2: Φ(WW ih il ) < (φ max_int + 1) ^ 2). apply Z.le_lt_trans with ((wB - 1) * wB + (wB - 1)); auto with zarith. 2: apply refl_equal. case (to_Z_bounded ih); case (to_Z_bounded il); intros H1 H2 H3 H4. @@ -1553,7 +1551,7 @@ Lemma sqrt2_spec : forall x y, intros j1 _ HH; contradict HH. apply Zlt_not_le. case (to_Z_bounded j1); auto with zarith. - change (2 ^ Z_of_nat size) with ([|max_int|]+1)%Z; auto with zarith. + change (2 ^ Z_of_nat size) with (φ max_int+1)%Z; auto with zarith. set (s := iter2_sqrt size (fun _ _ j : int=> j) ih il max_int). intros Hs1 Hs2. generalize (mulc_spec s s); case mulc. @@ -1565,104 +1563,104 @@ Lemma sqrt2_spec : forall x y, rewrite ltb_spec; intros Heq. unfold interp_carry; rewrite Zmult_1_l. rewrite -> Z.pow_2_r, Hihl1, Hil2. - case (Zle_lt_or_eq ([|ih1|] + 1) ([|ih|])); auto with zarith. + case (Zle_lt_or_eq (φ ih1 + 1) (φ ih)); auto with zarith. intros H2; contradict Hs2; apply Zle_not_lt. - replace (([|s|] + 1) ^ 2) with ([||WW ih1 il1||] + 2 * [|s|] + 1). + replace ((φ s + 1) ^ 2) with (Φ(WW ih1 il1) + 2 * φ s + 1). unfold zn2z_to_Z. case (to_Z_bounded il); intros Hpil _. - assert (Hl1l: [|il1|] <= [|il|]). + assert (Hl1l: φ il1 <= φ il). case (to_Z_bounded il2); rewrite Hil2; auto with zarith. - enough ([|ih1|] * wB + 2 * [|s|] + 1 <= [|ih|] * wB) by lia. + enough (φ ih1 * wB + 2 * φ s + 1 <= φ ih * wB) by lia. case (to_Z_bounded s); intros _ Hps. case (to_Z_bounded ih1); intros Hpih1 _. - apply Z.le_trans with (([|ih1|] + 2) * wB). lia. + apply Z.le_trans with ((φ ih1 + 2) * wB). lia. auto with zarith. unfold zn2z_to_Z; rewrite <-Hihl1, Hbin; auto. intros H2; split. unfold zn2z_to_Z; rewrite <- H2; ring. - replace (wB + ([|il|] - [|il1|])) with ([||WW ih il||] - ([|s|] * [|s|])). + replace (wB + (φ il - φ il1)) with (Φ(WW ih il) - (φ s * φ s)). rewrite <-Hbin in Hs2; auto with zarith. rewrite Hihl1; unfold zn2z_to_Z; rewrite <- H2; ring. unfold interp_carry. - case (Zle_lt_or_eq [|ih|] [|ih1|]); auto with zarith; intros H. + case (Zle_lt_or_eq φ ih φ ih1); auto with zarith; intros H. contradict Hs1. apply Zlt_not_le; rewrite Z.pow_2_r, Hihl1. unfold zn2z_to_Z. case (to_Z_bounded il); intros _ H2. - apply Z.lt_le_trans with (([|ih|] + 1) * wB + 0). + apply Z.lt_le_trans with ((φ ih + 1) * wB + 0). rewrite Zmult_plus_distr_l, Zplus_0_r; auto with zarith. case (to_Z_bounded il1); intros H3 _. apply Zplus_le_compat; auto with zarith. split. rewrite Z.pow_2_r, Hihl1. unfold zn2z_to_Z; ring[Hil2 H]. - replace [|il2|] with ([||WW ih il||] - [||WW ih1 il1||]). + replace φ il2 with (Φ(WW ih il) - Φ(WW ih1 il1)). unfold zn2z_to_Z at 2; rewrite <-Hihl1. rewrite <-Hbin in Hs2; auto with zarith. unfold zn2z_to_Z; rewrite H, Hil2; ring. unfold interp_carry in Hil2 |- *. - assert (Hsih: [|ih - 1|] = [|ih|] - 1). - rewrite sub_spec, Zmod_small; auto; replace [|1|] with 1; auto. + assert (Hsih: φ (ih - 1) = φ ih - 1). + rewrite sub_spec, Zmod_small; auto; replace φ 1 with 1; auto. case (to_Z_bounded ih); intros H1 H2. split; auto with zarith. apply Z.le_trans with (wB/4 - 1); auto with zarith. case_eq (ih1 < ih - 1)%int63; [idtac | rewrite <- not_true_iff_false]; rewrite ltb_spec, Hsih; intros Heq. rewrite Z.pow_2_r, Hihl1. - case (Zle_lt_or_eq ([|ih1|] + 2) [|ih|]); auto with zarith. + case (Zle_lt_or_eq (φ ih1 + 2) φ ih); auto with zarith. intros H2; contradict Hs2; apply Zle_not_lt. - replace (([|s|] + 1) ^ 2) with ([||WW ih1 il1||] + 2 * [|s|] + 1). + replace ((φ s + 1) ^ 2) with (Φ(WW ih1 il1) + 2 * φ s + 1). unfold zn2z_to_Z. - assert ([|ih1|] * wB + 2 * [|s|] + 1 <= [|ih|] * wB + ([|il|] - [|il1|])); + assert (φ ih1 * wB + 2 * φ s + 1 <= φ ih * wB + (φ il - φ il1)); auto with zarith. rewrite <-Hil2. case (to_Z_bounded il2); intros Hpil2 _. - apply Z.le_trans with ([|ih|] * wB + - wB); auto with zarith. + apply Z.le_trans with (φ ih * wB + - wB); auto with zarith. case (to_Z_bounded s); intros _ Hps. - assert (2 * [|s|] + 1 <= 2 * wB); auto with zarith. - apply Z.le_trans with ([|ih1|] * wB + 2 * wB); auto with zarith. - assert (Hi: ([|ih1|] + 3) * wB <= [|ih|] * wB) by auto with zarith. + assert (2 * φ s + 1 <= 2 * wB); auto with zarith. + apply Z.le_trans with (φ ih1 * wB + 2 * wB); auto with zarith. + assert (Hi: (φ ih1 + 3) * wB <= φ ih * wB) by auto with zarith. lia. unfold zn2z_to_Z; rewrite <-Hihl1, Hbin; auto. intros H2; unfold zn2z_to_Z; rewrite <-H2. split. - replace [|il|] with (([|il|] - [|il1|]) + [|il1|]); try ring. + replace φ il with ((φ il - φ il1) + φ il1); try ring. rewrite <-Hil2; ring. - replace (1 * wB + [|il2|]) with ([||WW ih il||] - [||WW ih1 il1||]). + replace (1 * wB + φ il2) with (Φ(WW ih il) - Φ(WW ih1 il1)). unfold zn2z_to_Z at 2; rewrite <-Hihl1. rewrite <-Hbin in Hs2; auto with zarith. unfold zn2z_to_Z; rewrite <-H2. - replace [|il|] with (([|il|] - [|il1|]) + [|il1|]); try ring. + replace φ il with ((φ il - φ il1) + φ il1); try ring. rewrite <-Hil2; ring. - case (Zle_lt_or_eq ([|ih|] - 1) ([|ih1|])); auto with zarith; intros H1. - assert (He: [|ih|] = [|ih1|]). + case (Zle_lt_or_eq (φ ih - 1) (φ ih1)); auto with zarith; intros H1. + assert (He: φ ih = φ ih1). apply Zle_antisym; auto with zarith. - case (Zle_or_lt [|ih1|] [|ih|]); auto; intros H2. + case (Zle_or_lt φ ih1 φ ih); auto; intros H2. contradict Hs1; apply Zlt_not_le; rewrite Z.pow_2_r, Hihl1. unfold zn2z_to_Z. case (to_Z_bounded il); intros _ Hpil1. - apply Z.lt_le_trans with (([|ih|] + 1) * wB). + apply Z.lt_le_trans with ((φ ih + 1) * wB). rewrite Zmult_plus_distr_l, Zmult_1_l; auto with zarith. case (to_Z_bounded il1); intros Hpil2 _. - apply Z.le_trans with (([|ih1|]) * wB); auto with zarith. + apply Z.le_trans with ((φ ih1) * wB); auto with zarith. contradict Hs1; apply Zlt_not_le; rewrite Z.pow_2_r, Hihl1. unfold zn2z_to_Z; rewrite He. - assert ([|il|] - [|il1|] < 0); auto with zarith. + assert (φ il - φ il1 < 0); auto with zarith. rewrite <-Hil2. case (to_Z_bounded il2); auto with zarith. split. rewrite Z.pow_2_r, Hihl1. unfold zn2z_to_Z; rewrite <-H1. - apply trans_equal with ([|ih|] * wB + [|il1|] + ([|il|] - [|il1|])). + apply trans_equal with (φ ih * wB + φ il1 + (φ il - φ il1)). ring. rewrite <-Hil2; ring. - replace [|il2|] with ([||WW ih il||] - [||WW ih1 il1||]). + replace φ il2 with (Φ(WW ih il) - Φ(WW ih1 il1)). unfold zn2z_to_Z at 2; rewrite <- Hihl1. rewrite <-Hbin in Hs2; auto with zarith. unfold zn2z_to_Z. rewrite <-H1. ring_simplify. - apply trans_equal with (wB + ([|il|] - [|il1|])). + apply trans_equal with (wB + (φ il - φ il1)). ring. rewrite <-Hil2; ring. Qed. @@ -1738,7 +1736,7 @@ Proof. symmetry; apply Z.mod_small. split. lia. exact h. Qed. -Lemma of_Z_spec n : [| of_Z n |] = n mod wB. +Lemma of_Z_spec n : φ (of_Z n) = n mod wB. Proof. destruct n. reflexivity. { now simpl; unfold of_pos; rewrite of_pos_rec_spec by lia. } diff --git a/plugins/btauto/Algebra.v b/theories/btauto/Algebra.v index 4a603f2c52..4a603f2c52 100644 --- a/plugins/btauto/Algebra.v +++ b/theories/btauto/Algebra.v diff --git a/plugins/btauto/Btauto.v b/theories/btauto/Btauto.v index d3331ccf89..d3331ccf89 100644 --- a/plugins/btauto/Btauto.v +++ b/theories/btauto/Btauto.v diff --git a/plugins/btauto/Reflect.v b/theories/btauto/Reflect.v index 867fe69550..867fe69550 100644 --- a/plugins/btauto/Reflect.v +++ b/theories/btauto/Reflect.v diff --git a/plugins/derive/Derive.v b/theories/derive/Derive.v index d1046ae79b..d1046ae79b 100644 --- a/plugins/derive/Derive.v +++ b/theories/derive/Derive.v diff --git a/plugins/extraction/ExtrHaskellBasic.v b/theories/extraction/ExtrHaskellBasic.v index d08a81da64..d08a81da64 100644 --- a/plugins/extraction/ExtrHaskellBasic.v +++ b/theories/extraction/ExtrHaskellBasic.v diff --git a/plugins/extraction/ExtrHaskellNatInt.v b/theories/extraction/ExtrHaskellNatInt.v index 267322d9ed..267322d9ed 100644 --- a/plugins/extraction/ExtrHaskellNatInt.v +++ b/theories/extraction/ExtrHaskellNatInt.v diff --git a/plugins/extraction/ExtrHaskellNatInteger.v b/theories/extraction/ExtrHaskellNatInteger.v index 4c5c71f58a..4c5c71f58a 100644 --- a/plugins/extraction/ExtrHaskellNatInteger.v +++ b/theories/extraction/ExtrHaskellNatInteger.v diff --git a/plugins/extraction/ExtrHaskellNatNum.v b/theories/extraction/ExtrHaskellNatNum.v index 09b0444614..09b0444614 100644 --- a/plugins/extraction/ExtrHaskellNatNum.v +++ b/theories/extraction/ExtrHaskellNatNum.v diff --git a/plugins/extraction/ExtrHaskellString.v b/theories/extraction/ExtrHaskellString.v index 8c61f4e96b..8c61f4e96b 100644 --- a/plugins/extraction/ExtrHaskellString.v +++ b/theories/extraction/ExtrHaskellString.v diff --git a/plugins/extraction/ExtrHaskellZInt.v b/theories/extraction/ExtrHaskellZInt.v index 0345ffc4e8..0345ffc4e8 100644 --- a/plugins/extraction/ExtrHaskellZInt.v +++ b/theories/extraction/ExtrHaskellZInt.v diff --git a/plugins/extraction/ExtrHaskellZInteger.v b/theories/extraction/ExtrHaskellZInteger.v index f7f9e2f80d..f7f9e2f80d 100644 --- a/plugins/extraction/ExtrHaskellZInteger.v +++ b/theories/extraction/ExtrHaskellZInteger.v diff --git a/plugins/extraction/ExtrHaskellZNum.v b/theories/extraction/ExtrHaskellZNum.v index 4141bd203f..4141bd203f 100644 --- a/plugins/extraction/ExtrHaskellZNum.v +++ b/theories/extraction/ExtrHaskellZNum.v diff --git a/plugins/extraction/ExtrOCamlFloats.v b/theories/extraction/ExtrOCamlFloats.v index 1891772cc2..1891772cc2 100644 --- a/plugins/extraction/ExtrOCamlFloats.v +++ b/theories/extraction/ExtrOCamlFloats.v diff --git a/plugins/extraction/ExtrOCamlInt63.v b/theories/extraction/ExtrOCamlInt63.v index a2ee602313..a2ee602313 100644 --- a/plugins/extraction/ExtrOCamlInt63.v +++ b/theories/extraction/ExtrOCamlInt63.v diff --git a/plugins/extraction/ExtrOcamlBasic.v b/theories/extraction/ExtrOcamlBasic.v index 2f82b24862..2f82b24862 100644 --- a/plugins/extraction/ExtrOcamlBasic.v +++ b/theories/extraction/ExtrOcamlBasic.v diff --git a/plugins/extraction/ExtrOcamlBigIntConv.v b/theories/extraction/ExtrOcamlBigIntConv.v index f8bc86d087..f8bc86d087 100644 --- a/plugins/extraction/ExtrOcamlBigIntConv.v +++ b/theories/extraction/ExtrOcamlBigIntConv.v diff --git a/plugins/extraction/ExtrOcamlChar.v b/theories/extraction/ExtrOcamlChar.v index 1e68365dd3..1e68365dd3 100644 --- a/plugins/extraction/ExtrOcamlChar.v +++ b/theories/extraction/ExtrOcamlChar.v diff --git a/plugins/extraction/ExtrOcamlIntConv.v b/theories/extraction/ExtrOcamlIntConv.v index 2de1906323..2de1906323 100644 --- a/plugins/extraction/ExtrOcamlIntConv.v +++ b/theories/extraction/ExtrOcamlIntConv.v diff --git a/plugins/extraction/ExtrOcamlNatBigInt.v b/theories/extraction/ExtrOcamlNatBigInt.v index a66d6e41fd..a66d6e41fd 100644 --- a/plugins/extraction/ExtrOcamlNatBigInt.v +++ b/theories/extraction/ExtrOcamlNatBigInt.v diff --git a/plugins/extraction/ExtrOcamlNatInt.v b/theories/extraction/ExtrOcamlNatInt.v index 406a7f0d2b..406a7f0d2b 100644 --- a/plugins/extraction/ExtrOcamlNatInt.v +++ b/theories/extraction/ExtrOcamlNatInt.v diff --git a/plugins/extraction/ExtrOcamlNativeString.v b/theories/extraction/ExtrOcamlNativeString.v index ec3da1e444..ec3da1e444 100644 --- a/plugins/extraction/ExtrOcamlNativeString.v +++ b/theories/extraction/ExtrOcamlNativeString.v diff --git a/plugins/extraction/ExtrOcamlString.v b/theories/extraction/ExtrOcamlString.v index 18c5ed3fe4..18c5ed3fe4 100644 --- a/plugins/extraction/ExtrOcamlString.v +++ b/theories/extraction/ExtrOcamlString.v diff --git a/plugins/extraction/ExtrOcamlZBigInt.v b/theories/extraction/ExtrOcamlZBigInt.v index c36ea50755..c36ea50755 100644 --- a/plugins/extraction/ExtrOcamlZBigInt.v +++ b/theories/extraction/ExtrOcamlZBigInt.v diff --git a/plugins/extraction/ExtrOcamlZInt.v b/theories/extraction/ExtrOcamlZInt.v index c7343d2468..c7343d2468 100644 --- a/plugins/extraction/ExtrOcamlZInt.v +++ b/theories/extraction/ExtrOcamlZInt.v diff --git a/plugins/extraction/Extraction.v b/theories/extraction/Extraction.v index 207c95247e..207c95247e 100644 --- a/plugins/extraction/Extraction.v +++ b/theories/extraction/Extraction.v diff --git a/plugins/funind/FunInd.v b/theories/funind/FunInd.v index d58b169154..d58b169154 100644 --- a/plugins/funind/FunInd.v +++ b/theories/funind/FunInd.v diff --git a/plugins/funind/Recdef.v b/theories/funind/Recdef.v index cd3d69861f..cd3d69861f 100644 --- a/plugins/funind/Recdef.v +++ b/theories/funind/Recdef.v diff --git a/plugins/ltac/Ltac.v b/theories/ltac/Ltac.v index e69de29bb2..e69de29bb2 100644 --- a/plugins/ltac/Ltac.v +++ b/theories/ltac/Ltac.v diff --git a/plugins/micromega/DeclConstant.v b/theories/micromega/DeclConstant.v index 7ad5e313e3..7ad5e313e3 100644 --- a/plugins/micromega/DeclConstant.v +++ b/theories/micromega/DeclConstant.v diff --git a/plugins/micromega/Env.v b/theories/micromega/Env.v index 8f4d4726b6..8f4d4726b6 100644 --- a/plugins/micromega/Env.v +++ b/theories/micromega/Env.v diff --git a/plugins/micromega/EnvRing.v b/theories/micromega/EnvRing.v index 2762bb6b32..2762bb6b32 100644 --- a/plugins/micromega/EnvRing.v +++ b/theories/micromega/EnvRing.v diff --git a/plugins/micromega/Fourier.v b/theories/micromega/Fourier.v index 0153de1dab..0153de1dab 100644 --- a/plugins/micromega/Fourier.v +++ b/theories/micromega/Fourier.v diff --git a/plugins/micromega/Fourier_util.v b/theories/micromega/Fourier_util.v index 95fa5b88df..95fa5b88df 100644 --- a/plugins/micromega/Fourier_util.v +++ b/theories/micromega/Fourier_util.v diff --git a/plugins/micromega/Lia.v b/theories/micromega/Lia.v index e53800d07d..e53800d07d 100644 --- a/plugins/micromega/Lia.v +++ b/theories/micromega/Lia.v diff --git a/plugins/micromega/Lqa.v b/theories/micromega/Lqa.v index 25fb62cfad..25fb62cfad 100644 --- a/plugins/micromega/Lqa.v +++ b/theories/micromega/Lqa.v diff --git a/plugins/micromega/Lra.v b/theories/micromega/Lra.v index 2403696696..2403696696 100644 --- a/plugins/micromega/Lra.v +++ b/theories/micromega/Lra.v diff --git a/plugins/micromega/MExtraction.v b/theories/micromega/MExtraction.v index 0e8c09ef1b..0e8c09ef1b 100644 --- a/plugins/micromega/MExtraction.v +++ b/theories/micromega/MExtraction.v diff --git a/plugins/micromega/OrderedRing.v b/theories/micromega/OrderedRing.v index d5884d9c1c..d5884d9c1c 100644 --- a/plugins/micromega/OrderedRing.v +++ b/theories/micromega/OrderedRing.v diff --git a/plugins/micromega/Psatz.v b/theories/micromega/Psatz.v index 16ae24ba81..16ae24ba81 100644 --- a/plugins/micromega/Psatz.v +++ b/theories/micromega/Psatz.v diff --git a/plugins/micromega/QMicromega.v b/theories/micromega/QMicromega.v index 4a02d1d01e..4a02d1d01e 100644 --- a/plugins/micromega/QMicromega.v +++ b/theories/micromega/QMicromega.v diff --git a/plugins/micromega/RMicromega.v b/theories/micromega/RMicromega.v index 0f7a02c2c9..0f7a02c2c9 100644 --- a/plugins/micromega/RMicromega.v +++ b/theories/micromega/RMicromega.v diff --git a/plugins/micromega/Refl.v b/theories/micromega/Refl.v index cd759029fa..cd759029fa 100644 --- a/plugins/micromega/Refl.v +++ b/theories/micromega/Refl.v diff --git a/plugins/micromega/RingMicromega.v b/theories/micromega/RingMicromega.v index aa8876357a..aa8876357a 100644 --- a/plugins/micromega/RingMicromega.v +++ b/theories/micromega/RingMicromega.v diff --git a/plugins/micromega/Tauto.v b/theories/micromega/Tauto.v index a155207e2e..a155207e2e 100644 --- a/plugins/micromega/Tauto.v +++ b/theories/micromega/Tauto.v diff --git a/plugins/micromega/VarMap.v b/theories/micromega/VarMap.v index 6db62e8401..6db62e8401 100644 --- a/plugins/micromega/VarMap.v +++ b/theories/micromega/VarMap.v diff --git a/plugins/micromega/ZCoeff.v b/theories/micromega/ZCoeff.v index 08f3f39204..08f3f39204 100644 --- a/plugins/micromega/ZCoeff.v +++ b/theories/micromega/ZCoeff.v diff --git a/plugins/micromega/ZMicromega.v b/theories/micromega/ZMicromega.v index 9bedb47371..9bedb47371 100644 --- a/plugins/micromega/ZMicromega.v +++ b/theories/micromega/ZMicromega.v diff --git a/plugins/micromega/Zify.v b/theories/micromega/Zify.v index 18cd196148..18cd196148 100644 --- a/plugins/micromega/Zify.v +++ b/theories/micromega/Zify.v diff --git a/plugins/micromega/ZifyBool.v b/theories/micromega/ZifyBool.v index 4060478363..4060478363 100644 --- a/plugins/micromega/ZifyBool.v +++ b/theories/micromega/ZifyBool.v diff --git a/plugins/micromega/ZifyClasses.v b/theories/micromega/ZifyClasses.v index d3f7f91074..d3f7f91074 100644 --- a/plugins/micromega/ZifyClasses.v +++ b/theories/micromega/ZifyClasses.v diff --git a/plugins/micromega/ZifyComparison.v b/theories/micromega/ZifyComparison.v index df75cf2c05..df75cf2c05 100644 --- a/plugins/micromega/ZifyComparison.v +++ b/theories/micromega/ZifyComparison.v diff --git a/plugins/micromega/ZifyInst.v b/theories/micromega/ZifyInst.v index edfb5a2a94..edfb5a2a94 100644 --- a/plugins/micromega/ZifyInst.v +++ b/theories/micromega/ZifyInst.v diff --git a/plugins/micromega/Ztac.v b/theories/micromega/Ztac.v index 091f58a0ef..091f58a0ef 100644 --- a/plugins/micromega/Ztac.v +++ b/theories/micromega/Ztac.v diff --git a/plugins/nsatz/Nsatz.v b/theories/nsatz/Nsatz.v index 896ee303cc..896ee303cc 100644 --- a/plugins/nsatz/Nsatz.v +++ b/theories/nsatz/Nsatz.v diff --git a/plugins/omega/Omega.v b/theories/omega/Omega.v index 4ceb530827..4ceb530827 100644 --- a/plugins/omega/Omega.v +++ b/theories/omega/Omega.v diff --git a/plugins/omega/OmegaLemmas.v b/theories/omega/OmegaLemmas.v index d2378569fc..d2378569fc 100644 --- a/plugins/omega/OmegaLemmas.v +++ b/theories/omega/OmegaLemmas.v diff --git a/plugins/omega/OmegaPlugin.v b/theories/omega/OmegaPlugin.v index 303eb0527a..303eb0527a 100644 --- a/plugins/omega/OmegaPlugin.v +++ b/theories/omega/OmegaPlugin.v diff --git a/plugins/omega/OmegaTactic.v b/theories/omega/OmegaTactic.v index 303eb0527a..303eb0527a 100644 --- a/plugins/omega/OmegaTactic.v +++ b/theories/omega/OmegaTactic.v diff --git a/plugins/omega/PreOmega.v b/theories/omega/PreOmega.v index 34533670f8..34533670f8 100644 --- a/plugins/omega/PreOmega.v +++ b/theories/omega/PreOmega.v diff --git a/plugins/rtauto/Bintree.v b/theories/rtauto/Bintree.v index 6b92445326..6b92445326 100644 --- a/plugins/rtauto/Bintree.v +++ b/theories/rtauto/Bintree.v diff --git a/plugins/rtauto/Rtauto.v b/theories/rtauto/Rtauto.v index 2e9b4347b9..2e9b4347b9 100644 --- a/plugins/rtauto/Rtauto.v +++ b/theories/rtauto/Rtauto.v diff --git a/plugins/setoid_ring/Algebra_syntax.v b/theories/setoid_ring/Algebra_syntax.v index 5f594d29cd..5f594d29cd 100644 --- a/plugins/setoid_ring/Algebra_syntax.v +++ b/theories/setoid_ring/Algebra_syntax.v diff --git a/plugins/setoid_ring/ArithRing.v b/theories/setoid_ring/ArithRing.v index 727e99f0b4..727e99f0b4 100644 --- a/plugins/setoid_ring/ArithRing.v +++ b/theories/setoid_ring/ArithRing.v diff --git a/plugins/setoid_ring/BinList.v b/theories/setoid_ring/BinList.v index 958832274b..958832274b 100644 --- a/plugins/setoid_ring/BinList.v +++ b/theories/setoid_ring/BinList.v diff --git a/plugins/setoid_ring/Cring.v b/theories/setoid_ring/Cring.v index df0313a624..df0313a624 100644 --- a/plugins/setoid_ring/Cring.v +++ b/theories/setoid_ring/Cring.v diff --git a/plugins/setoid_ring/Field.v b/theories/setoid_ring/Field.v index 9ff07948df..9ff07948df 100644 --- a/plugins/setoid_ring/Field.v +++ b/theories/setoid_ring/Field.v diff --git a/plugins/setoid_ring/Field_tac.v b/theories/setoid_ring/Field_tac.v index a5390efc7f..a5390efc7f 100644 --- a/plugins/setoid_ring/Field_tac.v +++ b/theories/setoid_ring/Field_tac.v diff --git a/plugins/setoid_ring/Field_theory.v b/theories/setoid_ring/Field_theory.v index 3736bc47a5..3736bc47a5 100644 --- a/plugins/setoid_ring/Field_theory.v +++ b/theories/setoid_ring/Field_theory.v diff --git a/plugins/setoid_ring/InitialRing.v b/theories/setoid_ring/InitialRing.v index dc096554c8..dc096554c8 100644 --- a/plugins/setoid_ring/InitialRing.v +++ b/theories/setoid_ring/InitialRing.v diff --git a/plugins/setoid_ring/Integral_domain.v b/theories/setoid_ring/Integral_domain.v index f1394c51d5..f1394c51d5 100644 --- a/plugins/setoid_ring/Integral_domain.v +++ b/theories/setoid_ring/Integral_domain.v diff --git a/plugins/setoid_ring/NArithRing.v b/theories/setoid_ring/NArithRing.v index 8cda4ad714..8cda4ad714 100644 --- a/plugins/setoid_ring/NArithRing.v +++ b/theories/setoid_ring/NArithRing.v diff --git a/plugins/setoid_ring/Ncring.v b/theories/setoid_ring/Ncring.v index 8f3de26272..8f3de26272 100644 --- a/plugins/setoid_ring/Ncring.v +++ b/theories/setoid_ring/Ncring.v diff --git a/plugins/setoid_ring/Ncring_initial.v b/theories/setoid_ring/Ncring_initial.v index e40ef6056d..e40ef6056d 100644 --- a/plugins/setoid_ring/Ncring_initial.v +++ b/theories/setoid_ring/Ncring_initial.v diff --git a/plugins/setoid_ring/Ncring_polynom.v b/theories/setoid_ring/Ncring_polynom.v index 048c8eecf9..048c8eecf9 100644 --- a/plugins/setoid_ring/Ncring_polynom.v +++ b/theories/setoid_ring/Ncring_polynom.v diff --git a/plugins/setoid_ring/Ncring_tac.v b/theories/setoid_ring/Ncring_tac.v index 65233873b1..65233873b1 100644 --- a/plugins/setoid_ring/Ncring_tac.v +++ b/theories/setoid_ring/Ncring_tac.v diff --git a/plugins/setoid_ring/RealField.v b/theories/setoid_ring/RealField.v index d83fcf3781..d83fcf3781 100644 --- a/plugins/setoid_ring/RealField.v +++ b/theories/setoid_ring/RealField.v diff --git a/plugins/setoid_ring/Ring.v b/theories/setoid_ring/Ring.v index 35e308565f..35e308565f 100644 --- a/plugins/setoid_ring/Ring.v +++ b/theories/setoid_ring/Ring.v diff --git a/plugins/setoid_ring/Ring_base.v b/theories/setoid_ring/Ring_base.v index 36e7890fbb..36e7890fbb 100644 --- a/plugins/setoid_ring/Ring_base.v +++ b/theories/setoid_ring/Ring_base.v diff --git a/plugins/setoid_ring/Ring_polynom.v b/theories/setoid_ring/Ring_polynom.v index 092114ff0b..092114ff0b 100644 --- a/plugins/setoid_ring/Ring_polynom.v +++ b/theories/setoid_ring/Ring_polynom.v diff --git a/plugins/setoid_ring/Ring_tac.v b/theories/setoid_ring/Ring_tac.v index 0a14c0ee5c..0a14c0ee5c 100644 --- a/plugins/setoid_ring/Ring_tac.v +++ b/theories/setoid_ring/Ring_tac.v diff --git a/plugins/setoid_ring/Ring_theory.v b/theories/setoid_ring/Ring_theory.v index dc45853458..dc45853458 100644 --- a/plugins/setoid_ring/Ring_theory.v +++ b/theories/setoid_ring/Ring_theory.v diff --git a/plugins/setoid_ring/Rings_Q.v b/theories/setoid_ring/Rings_Q.v index b3ed0be916..b3ed0be916 100644 --- a/plugins/setoid_ring/Rings_Q.v +++ b/theories/setoid_ring/Rings_Q.v diff --git a/plugins/setoid_ring/Rings_R.v b/theories/setoid_ring/Rings_R.v index ec91fa9e97..ec91fa9e97 100644 --- a/plugins/setoid_ring/Rings_R.v +++ b/theories/setoid_ring/Rings_R.v diff --git a/plugins/setoid_ring/Rings_Z.v b/theories/setoid_ring/Rings_Z.v index 8a51bcea02..8a51bcea02 100644 --- a/plugins/setoid_ring/Rings_Z.v +++ b/theories/setoid_ring/Rings_Z.v diff --git a/plugins/setoid_ring/ZArithRing.v b/theories/setoid_ring/ZArithRing.v index 833e19a698..833e19a698 100644 --- a/plugins/setoid_ring/ZArithRing.v +++ b/theories/setoid_ring/ZArithRing.v diff --git a/plugins/ssr/ssrbool.v b/theories/ssr/ssrbool.v index 475859fcc2..e2ab812cce 100644 --- a/plugins/ssr/ssrbool.v +++ b/theories/ssr/ssrbool.v @@ -437,7 +437,7 @@ Reserved Notation "{ 'on' cd , 'bijective' f }" (at level 0, f at level 8, is | or => . It is important that in other notations a leading square bracket #[# is always followed by an operator symbol or a fixed identifier. **) -Reserved Notation "[ /\ P1 & P2 ]" (at level 0, only parsing). +Reserved Notation "[ /\ P1 & P2 ]" (at level 0). Reserved Notation "[ /\ P1 , P2 & P3 ]" (at level 0, format "'[hv' [ /\ '[' P1 , '/' P2 ']' '/ ' & P3 ] ']'"). Reserved Notation "[ /\ P1 , P2 , P3 & P4 ]" (at level 0, format @@ -445,21 +445,21 @@ Reserved Notation "[ /\ P1 , P2 , P3 & P4 ]" (at level 0, format Reserved Notation "[ /\ P1 , P2 , P3 , P4 & P5 ]" (at level 0, format "'[hv' [ /\ '[' P1 , '/' P2 , '/' P3 , '/' P4 ']' '/ ' & P5 ] ']'"). -Reserved Notation "[ \/ P1 | P2 ]" (at level 0, only parsing). +Reserved Notation "[ \/ P1 | P2 ]" (at level 0). Reserved Notation "[ \/ P1 , P2 | P3 ]" (at level 0, format "'[hv' [ \/ '[' P1 , '/' P2 ']' '/ ' | P3 ] ']'"). Reserved Notation "[ \/ P1 , P2 , P3 | P4 ]" (at level 0, format "'[hv' [ \/ '[' P1 , '/' P2 , '/' P3 ']' '/ ' | P4 ] ']'"). -Reserved Notation "[ && b1 & c ]" (at level 0, only parsing). +Reserved Notation "[ && b1 & c ]" (at level 0). Reserved Notation "[ && b1 , b2 , .. , bn & c ]" (at level 0, format "'[hv' [ && '[' b1 , '/' b2 , '/' .. , '/' bn ']' '/ ' & c ] ']'"). -Reserved Notation "[ || b1 | c ]" (at level 0, only parsing). +Reserved Notation "[ || b1 | c ]" (at level 0). Reserved Notation "[ || b1 , b2 , .. , bn | c ]" (at level 0, format "'[hv' [ || '[' b1 , '/' b2 , '/' .. , '/' bn ']' '/ ' | c ] ']'"). -Reserved Notation "[ ==> b1 => c ]" (at level 0, only parsing). +Reserved Notation "[ ==> b1 => c ]" (at level 0). Reserved Notation "[ ==> b1 , b2 , .. , bn => c ]" (at level 0, format "'[hv' [ ==> '[' b1 , '/' b2 , '/' .. , '/' bn ']' '/' => c ] ']'"). diff --git a/plugins/ssr/ssrclasses.v b/theories/ssr/ssrclasses.v index 0ae3f8c6a5..0ae3f8c6a5 100644 --- a/plugins/ssr/ssrclasses.v +++ b/theories/ssr/ssrclasses.v diff --git a/plugins/ssr/ssreflect.v b/theories/ssr/ssreflect.v index bc4a57dedd..701ebcad56 100644 --- a/plugins/ssr/ssreflect.v +++ b/theories/ssr/ssreflect.v @@ -97,11 +97,11 @@ Local Notation CoqCast x T := (x : T) (only parsing). (** Reserve notation that introduced in this file. **) Reserved Notation "'if' c 'then' vT 'else' vF" (at level 200, - c, vT, vF at level 200, only parsing). + c, vT, vF at level 200). Reserved Notation "'if' c 'return' R 'then' vT 'else' vF" (at level 200, - c, R, vT, vF at level 200, only parsing). + c, R, vT, vF at level 200). Reserved Notation "'if' c 'as' x 'return' R 'then' vT 'else' vF" (at level 200, - c, R, vT, vF at level 200, x ident, only parsing). + c, R, vT, vF at level 200, x ident). Reserved Notation "x : T" (at level 100, right associativity, format "'[hv' x '/ ' : T ']'"). diff --git a/plugins/ssr/ssrfun.v b/theories/ssr/ssrfun.v index dd847169b9..dd847169b9 100644 --- a/plugins/ssr/ssrfun.v +++ b/theories/ssr/ssrfun.v diff --git a/plugins/ssr/ssrsetoid.v b/theories/ssr/ssrsetoid.v index 7c5cd135fe..7c5cd135fe 100644 --- a/plugins/ssr/ssrsetoid.v +++ b/theories/ssr/ssrsetoid.v diff --git a/plugins/ssr/ssrunder.v b/theories/ssr/ssrunder.v index 7c529a6133..7c529a6133 100644 --- a/plugins/ssr/ssrunder.v +++ b/theories/ssr/ssrunder.v diff --git a/plugins/ssrmatching/ssrmatching.v b/theories/ssrmatching/ssrmatching.v index 23a16615f5..23a16615f5 100644 --- a/plugins/ssrmatching/ssrmatching.v +++ b/theories/ssrmatching/ssrmatching.v diff --git a/tools/CoqMakefile.in b/tools/CoqMakefile.in index 49fb88cd8c..1d682218b6 100644 --- a/tools/CoqMakefile.in +++ b/tools/CoqMakefile.in @@ -125,6 +125,10 @@ CAMLPKGS ?= TIMING?= # Option for changing sorting of timing output file TIMING_SORT_BY ?= auto +# Option for changing the fuzz parameter on the output file +TIMING_FUZZ ?= 0 +# Option for changing whether to use real or user time for timing tables +TIMING_REAL?= # Output file names for timed builds TIME_OF_BUILD_FILE ?= time-of-build.log TIME_OF_BUILD_BEFORE_FILE ?= time-of-build-before.log @@ -335,6 +339,19 @@ all.timing.diff: $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" post-all .PHONY: all.timing.diff +ifeq (0,$(TIMING_REAL)) +TIMING_REAL_ARG := +TIMING_USER_ARG := --user +else +ifeq (1,$(TIMING_REAL)) +TIMING_REAL_ARG := --real +TIMING_USER_ARG := +else +TIMING_REAL_ARG := +TIMING_USER_ARG := +endif +endif + make-pretty-timed-before:: TIME_OF_BUILD_FILE=$(TIME_OF_BUILD_BEFORE_FILE) make-pretty-timed-after:: TIME_OF_BUILD_FILE=$(TIME_OF_BUILD_AFTER_FILE) make-pretty-timed make-pretty-timed-before make-pretty-timed-after:: @@ -342,9 +359,9 @@ make-pretty-timed make-pretty-timed-before make-pretty-timed-after:: $(HIDE)($(MAKE) --no-print-directory -f "$(PARENT)" $(TGTS) TIMED=1 2>&1 && touch pretty-timed-success.ok) | tee -a $(TIME_OF_BUILD_FILE) $(HIDE)rm pretty-timed-success.ok # must not be -f; must fail if the touch failed print-pretty-timed:: - $(HIDE)$(COQMAKE_ONE_TIME_FILE) $(TIME_OF_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) + $(HIDE)$(COQMAKE_ONE_TIME_FILE) $(TIMING_REAL_ARG) $(TIME_OF_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) print-pretty-timed-diff:: - $(HIDE)$(COQMAKE_BOTH_TIME_FILES) --sort-by=$(TIMING_SORT_BY) $(TIME_OF_BUILD_AFTER_FILE) $(TIME_OF_BUILD_BEFORE_FILE) $(TIME_OF_PRETTY_BOTH_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) + $(HIDE)$(COQMAKE_BOTH_TIME_FILES) --sort-by=$(TIMING_SORT_BY) $(TIMING_REAL_ARG) $(TIME_OF_BUILD_AFTER_FILE) $(TIME_OF_BUILD_BEFORE_FILE) $(TIME_OF_PRETTY_BOTH_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) ifeq (,$(BEFORE)) print-pretty-single-time-diff:: @echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing' @@ -356,7 +373,7 @@ print-pretty-single-time-diff:: $(HIDE)false else print-pretty-single-time-diff:: - $(HIDE)$(COQMAKE_BOTH_SINGLE_TIMING_FILES) --sort-by=$(TIMING_SORT_BY) $(AFTER) $(BEFORE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) + $(HIDE)$(COQMAKE_BOTH_SINGLE_TIMING_FILES) --fuzz=$(TIMING_FUZZ) --sort-by=$(TIMING_SORT_BY) $(TIMING_USER_ARG) $(AFTER) $(BEFORE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES) endif endif pretty-timed: @@ -695,7 +712,7 @@ $(VFILES:.v=.vok): %.vok: %.v $(HIDE)$(TIMER) $(COQC) -vok $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $< $(addsuffix .timing.diff,$(VFILES)): %.timing.diff : %.before-timing %.after-timing - $(SHOW)PYTHON TIMING-DIFF $< + $(SHOW)PYTHON TIMING-DIFF $*.{before,after}-timing $(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" print-pretty-single-time-diff BEFORE=$*.before-timing AFTER=$*.after-timing TIME_OF_PRETTY_BUILD_FILE="$@" $(BEAUTYFILES): %.v.beautified: %.v diff --git a/tools/TimeFileMaker.py b/tools/TimeFileMaker.py index 3d07661d56..210901f8a7 100644 --- a/tools/TimeFileMaker.py +++ b/tools/TimeFileMaker.py @@ -4,6 +4,7 @@ from __future__ import unicode_literals from __future__ import print_function import sys import re +import argparse from io import open # This script parses the output of `make TIMED=1` into a dictionary @@ -14,18 +15,76 @@ STRIP_REG = re.compile('^(coq/|contrib/|)(?:theories/|src/)?') STRIP_REP = r'\1' INFINITY = '\u221e' -def parse_args(argv, USAGE, HELP_STRING): - sort_by = 'auto' - if any(arg.startswith('--sort-by=') for arg in argv[1:]): - sort_by = [arg for arg in argv[1:] if arg.startswith('--sort-by=')][-1][len('--sort-by='):] - args = [arg for arg in argv if not arg.startswith('--sort-by=')] - if len(args) < 3 or '--help' in args[1:] or '-h' in args[1:] or sort_by not in ('auto', 'absolute', 'diff'): - print(USAGE) - if '--help' in args[1:] or '-h' in args[1:]: - print(HELP_STRING) - if len(args) == 2: sys.exit(0) - sys.exit(1) - return sort_by, args +def nonnegative(arg): + v = int(arg) + if v < 0: raise argparse.ArgumentTypeError("%s is an invalid non-negative int value" % arg) + return v + +def add_sort_by(parser): + return parser.add_argument( + '--sort-by', type=str, dest='sort_by', choices=('auto', 'absolute', 'diff'), + default='auto', + help=('How to sort the table entries.\n' + + 'The "auto" method sorts by absolute time differences ' + + 'rounded towards zero to a whole-number of seconds, then ' + + 'by times in the "after" column, and finally ' + + 'lexicographically by file name. This will put the ' + + 'biggest changes in either direction first, and will ' + + 'prefer sorting by build-time over subsecond changes in ' + + 'build time (which are frequently noise); lexicographic ' + + 'sorting forces an order on files which take effectively ' + + 'no time to compile.\n' + + 'The "absolute" method sorts by the total time taken.\n' + + 'The "diff" method sorts by the signed difference in time.')) + +def add_fuzz(parser): + return parser.add_argument( + '--fuzz', dest='fuzz', metavar='N', type=nonnegative, default=0, + help=('By default, two lines are only considered the same if ' + + 'the character offsets and initial code strings match. ' + 'This option relaxes this constraint by allowing the ' + + 'character offsets to differ by up to N characters, as long ' + + 'as the total number of characters and initial code strings ' + + 'continue to match. This is useful when there are small changes ' + + 'to a file, and you want to match later lines that have not ' + + 'changed even though the character offsets have changed.')) + +def add_real(parser, single_timing=False): + return parser.add_argument( + '--real', action='store_true', + help=(r'''Use real times rather than user times. + +''' + ('''By default, the input is expected to contain lines in the format: +FILE_NAME (...user: NUMBER_IN_SECONDS...) +If --real is passed, then the lines are instead expected in the format: +FILE_NAME (...real: NUMBER_IN_SECONDS...)''' if not single_timing else +'''The input is expected to contain lines in the format: +Chars START - END COMMAND NUMBER secs (NUMBERu...)'''))) + +def add_user(parser, single_timing=False): + return parser.add_argument( + '--user', dest='real', action='store_false', + help=(r'''Use user times rather than real times. + +''' + ('''By default, the input is expected to contain lines in the format: +FILE_NAME (...real: NUMBER_IN_SECONDS...) +If --user is passed, then the lines are instead expected in the format: +FILE_NAME (...user: NUMBER_IN_SECONDS...)''' if not single_timing else +'''The input is expected to contain lines in the format: +Chars START - END COMMAND NUMBER secs (NUMBERu...)'''))) + +# N.B. We need to include default=None for nargs='*', c.f., https://bugs.python.org/issue28609#msg280180 +def add_file_name_gen(parser, prefix='', descr='file containing the build log', stddir='in', defaults=None, **kwargs): + extra = ('' if defaults is None else ' (defaults to %s if no argument is passed)' % defaults) + return parser.add_argument( + prefix + 'FILE_NAME', type=str, + help=('The name of the %s (use "-" for std%s)%s.' % (descr, stddir, extra)), + **kwargs) + +def add_file_name(parser): return add_file_name_gen(parser) +def add_after_file_name(parser): return add_file_name_gen(parser, 'AFTER_', 'file containing the "after" build log') +def add_before_file_name(parser): return add_file_name_gen(parser, 'BEFORE_', 'file containing the "before" build log') +def add_output_file_name(parser): return add_file_name_gen(parser, 'OUTPUT_', 'file to write the output table to', stddir='out', defaults='-', nargs='*', default=None) def reformat_time_string(time): @@ -45,14 +104,16 @@ def get_file_lines(file_name): lines = f.readlines() for line in lines: try: - yield line.decode('utf-8') + # Since we read the files in binary mode, we have to + # normalize Windows line endings from \r\n to \n + yield line.decode('utf-8').replace('\r\n', '\n') except UnicodeDecodeError: # invalid utf-8 pass def get_file(file_name): return ''.join(get_file_lines(file_name)) -def get_times(file_name): +def get_times(file_name, use_real=False): ''' Reads the contents of file_name, which should be the output of 'make TIMED=1', and parses it to construct a dict mapping file @@ -60,28 +121,96 @@ def get_times(file_name): using STRIP_REG and STRIP_REP. ''' lines = get_file(file_name) - reg = re.compile(r'^([^\s]+) \([^\)]*?user: ([0-9\.]+)[^\)]*?\)\s*$', re.MULTILINE) + reg_user = re.compile(r'^([^\s]+) \([^\)]*?user: ([0-9\.]+)[^\)]*?\)\s*$', re.MULTILINE) + reg_real = re.compile(r'^([^\s]+) \([^\)]*?real: ([0-9\.]+)[^\)]*?\)\s*$', re.MULTILINE) + reg = reg_real if use_real else reg_user times = reg.findall(lines) if all(time in ('0.00', '0.01') for name, time in times): - reg = re.compile(r'^([^\s]*) \([^\)]*?real: ([0-9\.]+)[^\)]*?\)\s*$', re.MULTILINE) + reg = reg_real times = reg.findall(lines) if all(STRIP_REG.search(name.strip()) for name, time in times): times = tuple((STRIP_REG.sub(STRIP_REP, name.strip()), time) for name, time in times) return dict((name, reformat_time_string(time)) for name, time in times) -def get_single_file_times(file_name): +def get_single_file_times(file_name, use_real=False): ''' Reads the contents of file_name, which should be the output of 'coqc -time', and parses it to construct a dict mapping lines to to compile durations, as strings. ''' lines = get_file(file_name) - reg = re.compile(r'^Chars ([0-9]+) - ([0-9]+) ([^ ]+) ([0-9\.]+) secs (.*)$', re.MULTILINE) + reg = re.compile(r'^Chars ([0-9]+) - ([0-9]+) ([^ ]+) ([0-9\.]+) secs \(([0-9\.]+)u(.*)\)$', re.MULTILINE) times = reg.findall(lines) if len(times) == 0: return dict() - longest = max(max((len(start), len(stop))) for start, stop, name, time, extra in times) + longest = max(max((len(start), len(stop))) for start, stop, name, real, user, extra in times) FORMAT = 'Chars %%0%dd - %%0%dd %%s' % (longest, longest) - return dict((FORMAT % (int(start), int(stop), name), reformat_time_string(time)) for start, stop, name, time, extra in times) + return dict((FORMAT % (int(start), int(stop), name), reformat_time_string(real if use_real else user)) for start, stop, name, real, user, extra in times) + +def fuzz_merge(l1, l2, fuzz): + '''Takes two iterables of ((start, end, code), times) and a fuzz + parameter, and yields a single iterable of ((start, stop, code), + times1, times2) + + We only give both left and right if (a) the codes are the same, + (b) the number of characters (stop - start) is the same, and (c) + the difference between left and right code locations is <= fuzz. + + We keep a current guess at the overall offset, and prefer drawing + from whichever list is earliest after correcting for current + offset. + + ''' + assert(fuzz >= 0) + cur_fuzz = 0 + l1 = list(l1) + l2 = list(l2) + cur1, cur2 = None, None + while (len(l1) > 0 or cur1 is not None) and (len(l2) > 0 or cur2 is not None): + if cur1 is None: cur1 = l1.pop(0) + if cur2 is None: cur2 = l2.pop(0) + ((s1, e1, c1), t1), ((s2, e2, c2), t2) = cur1, cur2 + assert(t1 is not None) + assert(t2 is not None) + s2_adjusted, e2_adjusted = s2 + cur_fuzz, e2 + cur_fuzz + if cur1[0] == cur2[0]: + yield (cur1, cur2) + cur1, cur2 = None, None + cur_fuzz = 0 + elif c1 == c2 and e1-s1 == e2-s2 and abs(s1 - s2) <= fuzz: + yield (((s1, e1, c1), t1), ((s2, e2, c2), t2)) + cur1, cur2 = None, None + cur_fuzz = s1 - s2 + elif s1 < s2_adjusted or (s1 == s2_adjusted and e1 <= e2): + yield (((s1, e1, c1), t1), ((s1 - cur_fuzz, e1 - cur_fuzz, c1), None)) + cur1 = None + else: + yield (((s2 + cur_fuzz, e2 + cur_fuzz, c2), None), ((s2, e2, c2), t2)) + cur2 = None + if len(l1) > 0: + for i in l1: yield (i, (i[0], None)) + elif len(l2) > 0: + for i in l2: yield ((i[0], None), i) + +def adjust_fuzz(left_dict, right_dict, fuzz): + reg = re.compile(r'Chars ([0-9]+) - ([0-9]+) (.*)$') + left_dict_list = sorted(((int(s), int(e), c), v) for ((s, e, c), v) in ((reg.match(k).groups(), v) for k, v in left_dict.items())) + right_dict_list = sorted(((int(s), int(e), c), v) for ((s, e, c), v) in ((reg.match(k).groups(), v) for k, v in right_dict.items())) + merged = list(fuzz_merge(left_dict_list, right_dict_list, fuzz)) + if len(merged) == 0: + # assert that both left and right dicts are empty + assert(not left_dict) + assert(not right_dict) + return left_dict, right_dict + longest = max(max((len(str(start1)), len(str(stop1)), len(str(start2)), len(str(stop2)))) for ((start1, stop1, code1), t1), ((start2, stop2, code2), t2) in merged) + FORMAT1 = 'Chars %%0%dd - %%0%dd %%s' % (longest, longest) + FORMAT2 = 'Chars %%0%dd-%%0%dd ~ %%0%dd-%%0%dd %%s' % (longest, longest, longest, longest) + if fuzz == 0: + left_dict = dict((FORMAT1 % k, t1) for (k, t1), _ in merged if t1 is not None) + right_dict = dict((FORMAT1 % k, t2) for _, (k, t2) in merged if t2 is not None) + else: + left_dict = dict((FORMAT2 % (s1, e1, s2, e2, c1), t1) for ((s1, e1, c1), t1), ((s2, e2, c2), t2) in merged if t1 is not None) + right_dict = dict((FORMAT2 % (s1, e1, s2, e2, c1), t2) for ((s1, e1, c1), t1), ((s2, e2, c2), t2) in merged if t2 is not None) + return left_dict, right_dict def fix_sign_for_sorting(num, descending=True): return -num if descending else num diff --git a/tools/coq_dune.ml b/tools/coq_dune.ml index f62947ec67..96fb9710c7 100644 --- a/tools/coq_dune.ml +++ b/tools/coq_dune.ml @@ -213,7 +213,7 @@ let record_dune d ff = if Sys.file_exists sd && Sys.is_directory sd then let out = open_out (bpath [sd;"dune"]) in let fmt = formatter_of_out_channel out in - if List.nth d 0 = "plugins" || List.nth d 0 = "user-contrib" then + if Sys.file_exists (bpath [sd; "plugin_base.dune"]) then fprintf fmt "(include plugin_base.dune)@\n"; out_install fmt d ff; List.iter (pp_dep d fmt) ff; @@ -285,8 +285,11 @@ let exec_ifile f = begin try let ic = open_in in_file in (try f ic - with _ -> eprintf "Error: exec_ifile@\n%!"; close_in ic) - with _ -> eprintf "Error: cannot open input file %s@\n%!" in_file + with exn -> + eprintf "Error: exec_ifile @[%s@]@\n%!" (Printexc.to_string exn); + close_in ic) + with _ -> + eprintf "Error: cannot open input file %s@\n%!" in_file end | _ -> eprintf "Error: wrong number of arguments@\n%!"; exit 1 diff --git a/tools/make-both-single-timing-files.py b/tools/make-both-single-timing-files.py index fddf75f39f..a28da43043 100755 --- a/tools/make-both-single-timing-files.py +++ b/tools/make-both-single-timing-files.py @@ -1,12 +1,17 @@ #!/usr/bin/env python3 -import sys from TimeFileMaker import * if __name__ == '__main__': - USAGE = 'Usage: %s [--sort-by=auto|absolute|diff] AFTER_FILE_NAME BEFORE_FILE_NAME [OUTPUT_FILE_NAME ..]' % sys.argv[0] - HELP_STRING = r'''Formats timing information from the output of two invocations of `coqc -time` into a sorted table''' - sort_by, args = parse_args(sys.argv, USAGE, HELP_STRING) - left_dict = get_single_file_times(args[1]) - right_dict = get_single_file_times(args[2]) - table = make_diff_table_string(left_dict, right_dict, tag="Code", sort_by=sort_by) - print_or_write_table(table, args[3:]) + parser = argparse.ArgumentParser(description=r'''Formats timing information from the output of two invocations of `coqc -time` into a sorted table''') + add_sort_by(parser) + add_user(parser, single_timing=True) + add_fuzz(parser) + add_after_file_name(parser) + add_before_file_name(parser) + add_output_file_name(parser) + args = parser.parse_args() + left_dict = get_single_file_times(args.AFTER_FILE_NAME, use_real=args.real) + right_dict = get_single_file_times(args.BEFORE_FILE_NAME, use_real=args.real) + left_dict, right_dict = adjust_fuzz(left_dict, right_dict, fuzz=args.fuzz) + table = make_diff_table_string(left_dict, right_dict, tag="Code", sort_by=args.sort_by) + print_or_write_table(table, args.OUTPUT_FILE_NAME) diff --git a/tools/make-both-time-files.py b/tools/make-both-time-files.py index 8937d63c2f..5d88548bba 100755 --- a/tools/make-both-time-files.py +++ b/tools/make-both-time-files.py @@ -1,16 +1,15 @@ #!/usr/bin/env python3 -import sys from TimeFileMaker import * if __name__ == '__main__': - USAGE = 'Usage: %s [--sort-by=auto|absolute|diff] AFTER_FILE_NAME BEFORE_FILE_NAME [OUTPUT_FILE_NAME ..]' % sys.argv[0] - HELP_STRING = r'''Formats timing information from the output of two invocations of `make TIMED=1` into a sorted table. - -The input is expected to contain lines in the format: -FILE_NAME (...user: NUMBER_IN_SECONDS...) -''' - sort_by, args = parse_args(sys.argv, USAGE, HELP_STRING) - left_dict = get_times(args[1]) - right_dict = get_times(args[2]) - table = make_diff_table_string(left_dict, right_dict, sort_by=sort_by) - print_or_write_table(table, args[3:]) + parser = argparse.ArgumentParser(description=r'''Formats timing information from the output of two invocations of `make TIMED=1` into a sorted table.''') + add_sort_by(parser) + add_real(parser) + add_after_file_name(parser) + add_before_file_name(parser) + add_output_file_name(parser) + args = parser.parse_args() + left_dict = get_times(args.AFTER_FILE_NAME, use_real=args.real) + right_dict = get_times(args.BEFORE_FILE_NAME, use_real=args.real) + table = make_diff_table_string(left_dict, right_dict, sort_by=args.sort_by) + print_or_write_table(table, args.OUTPUT_FILE_NAME) diff --git a/tools/make-one-time-file.py b/tools/make-one-time-file.py index ad0a04ab07..3df7d7e584 100755 --- a/tools/make-one-time-file.py +++ b/tools/make-one-time-file.py @@ -3,19 +3,11 @@ import sys from TimeFileMaker import * if __name__ == '__main__': - USAGE = 'Usage: %s FILE_NAME [OUTPUT_FILE_NAME ..]' % sys.argv[0] - HELP_STRING = r'''Formats timing information from the output of `make TIMED=1` into a sorted table. - -The input is expected to contain lines in the format: -FILE_NAME (...user: NUMBER_IN_SECONDS...) -''' - if len(sys.argv) < 2 or '--help' in sys.argv[1:] or '-h' in sys.argv[1:]: - print(USAGE) - if '--help' in sys.argv[1:] or '-h' in sys.argv[1:]: - print(HELP_STRING) - if len(sys.argv) == 2: sys.exit(0) - sys.exit(1) - else: - times_dict = get_times(sys.argv[1]) - table = make_table_string(times_dict) - print_or_write_table(table, sys.argv[2:]) + parser = argparse.ArgumentParser(description=r'''Formats timing information from the output of `make TIMED=1` into a sorted table.''') + add_real(parser) + add_file_name(parser) + add_output_file_name(parser) + args = parser.parse_args() + times_dict = get_times(args.FILE_NAME, use_real=args.real) + table = make_table_string(times_dict) + print_or_write_table(table, args.OUTPUT_FILE_NAME) diff --git a/topbin/coqtop_byte_bin.ml b/topbin/coqtop_byte_bin.ml index 604c6e251a..7e977ca0f2 100644 --- a/topbin/coqtop_byte_bin.ml +++ b/topbin/coqtop_byte_bin.ml @@ -11,9 +11,9 @@ (* We register this handler for lower-level toplevel loading code *) let _ = CErrors.register_handler (function | Symtable.Error e -> - Pp.str (Format.asprintf "%a" Symtable.report_error e) + Some (Pp.str (Format.asprintf "%a" Symtable.report_error e)) | _ -> - raise CErrors.Unhandled + None ) let drop_setup () = diff --git a/toplevel/coqargs.ml b/toplevel/coqargs.ml index 506a8dc5b0..949a13974c 100644 --- a/toplevel/coqargs.ml +++ b/toplevel/coqargs.ml @@ -55,6 +55,8 @@ type coqargs_config = { color : color; enable_VM : bool; native_compiler : native_compiler; + native_output_dir : CUnix.physical_path; + native_include_dirs : CUnix.physical_path list; stm_flags : Stm.AsyncOpts.stm_opt; debug : bool; diffs_set : bool; @@ -64,6 +66,7 @@ type coqargs_config = { } type coqargs_pre = { + boot : bool; load_init : bool; load_rcfile : bool; @@ -120,6 +123,8 @@ let default_config = { color = `AUTO; enable_VM = true; native_compiler = default_native; + native_output_dir = ".coq-native"; + native_include_dirs = []; stm_flags = Stm.AsyncOpts.default_opts; debug = false; diffs_set = false; @@ -131,6 +136,7 @@ let default_config = { } let default_pre = { + boot = false; load_init = true; load_rcfile = true; ml_includes = []; @@ -259,8 +265,10 @@ let get_cache opt = function let get_native_name s = (* We ignore even critical errors because this mode has to be super silent *) try - String.concat "/" [Filename.dirname s; - Nativelib.output_dir; Library.native_name_from_filename s] + Filename.(List.fold_left concat (dirname s) + [ !Nativelib.output_dir + ; Library.native_name_from_filename s + ]) with _ -> "" let get_compat_file = function @@ -483,6 +491,14 @@ let parse_args ~help ~init arglist : t * string list = let opt = to_opt_key opt in { oval with config = { oval.config with set_options = (opt, OptionUnset) :: oval.config.set_options }} + |"-native-output-dir" -> + let native_output_dir = next () in + { oval with config = { oval.config with native_output_dir } } + + |"-nI" -> + let include_dir = next () in + { oval with config = {oval.config with native_include_dirs = include_dir :: oval.config.native_include_dirs } } + (* Options with zero arg *) |"-async-queries-always-delegate" |"-async-proofs-always-delegate" @@ -512,6 +528,7 @@ let parse_args ~help ~init arglist : t * string list = |"-indices-matter" -> set_logic (fun o -> { o with indices_matter = true }) oval |"-m"|"--memory" -> { oval with post = { oval.post with memory_stat = true }} |"-noinit"|"-nois" -> { oval with pre = { oval.pre with load_init = false }} + |"-boot" -> { oval with pre = { oval.pre with boot = true }} |"-output-context" -> { oval with post = { oval.post with output_context = true }} |"-profile-ltac" -> Flags.profile_ltac := true; oval |"-q" -> { oval with pre = { oval.pre with load_rcfile = false; }} @@ -569,5 +586,5 @@ let cmdline_load_path opts = opts.pre.ml_includes @ opts.pre.vo_includes let build_load_path opts = - Coqinit.libs_init_load_path ~load_init:opts.pre.load_init @ + (if opts.pre.boot then [] else Coqinit.libs_init_load_path ()) @ cmdline_load_path opts diff --git a/toplevel/coqargs.mli b/toplevel/coqargs.mli index 26f22386a0..aba6811f43 100644 --- a/toplevel/coqargs.mli +++ b/toplevel/coqargs.mli @@ -31,6 +31,8 @@ type coqargs_config = { color : color; enable_VM : bool; native_compiler : native_compiler; + native_output_dir : CUnix.physical_path; + native_include_dirs : CUnix.physical_path list; stm_flags : Stm.AsyncOpts.stm_opt; debug : bool; diffs_set : bool; @@ -40,6 +42,7 @@ type coqargs_config = { } type coqargs_pre = { + boot : bool; load_init : bool; load_rcfile : bool; diff --git a/toplevel/coqinit.ml b/toplevel/coqinit.ml index ac348b9646..7f3d4b570f 100644 --- a/toplevel/coqinit.ml +++ b/toplevel/coqinit.ml @@ -51,12 +51,17 @@ let load_rcfile ~rcfile ~state = let () = Feedback.msg_info (str"Load of rcfile failed.") in iraise reraise -(* Recursively puts dir in the LoadPath if -nois was not passed *) -let build_stdlib_path ~load_init ~unix_path ~coq_path ~with_ml = +(* Recursively puts `.v` files in the LoadPath if -nois was not passed *) +let build_stdlib_vo_path ~unix_path ~coq_path = let open Loadpath in - let add_ml = if with_ml then AddRecML else AddNoML in { recursive = true; - path_spec = VoPath { unix_path; coq_path ; has_ml = add_ml; implicit = load_init } + path_spec = VoPath { unix_path; coq_path ; has_ml = AddNoML; implicit = true } + } + +let build_stdlib_ml_path ~dir = + let open Loadpath in + { recursive = true + ; path_spec = MlPath dir } let build_userlib_path ~unix_path = @@ -83,7 +88,7 @@ let toplevel_init_load_path () = ml_path_if Coq_config.local [coqlib/"dev"] (* LoadPath for Coq user libraries *) -let libs_init_load_path ~load_init = +let libs_init_load_path () = let open Loadpath in let coqlib = Envars.coqlib () in @@ -100,9 +105,9 @@ let libs_init_load_path ~load_init = has_ml = AddTopML } } ] @ - (* then standard library and plugins *) - [build_stdlib_path ~load_init ~unix_path:(coqlib/"theories") ~coq_path ~with_ml:false; - build_stdlib_path ~load_init ~unix_path:(coqlib/"plugins") ~coq_path ~with_ml:true ] @ + (* then standard library *) + [build_stdlib_ml_path ~dir:(coqlib/"plugins")] @ + [build_stdlib_vo_path ~unix_path:(coqlib/"theories") ~coq_path] @ (* then user-contrib *) (if Sys.file_exists user_contrib then diff --git a/toplevel/coqinit.mli b/toplevel/coqinit.mli index fc53c8b47c..f3a007d987 100644 --- a/toplevel/coqinit.mli +++ b/toplevel/coqinit.mli @@ -20,4 +20,4 @@ val init_ocaml_path : unit -> unit val toplevel_init_load_path : unit -> Loadpath.coq_path list (* LoadPath for Coq user libraries *) -val libs_init_load_path : load_init:bool -> Loadpath.coq_path list +val libs_init_load_path : unit -> Loadpath.coq_path list diff --git a/toplevel/coqtop.ml b/toplevel/coqtop.ml index 46dd693155..1ea48ee766 100644 --- a/toplevel/coqtop.ml +++ b/toplevel/coqtop.ml @@ -239,6 +239,10 @@ let init_execution opts custom_init = set_options opts.config.set_options; + (* Native output dir *) + Nativelib.output_dir := opts.config.native_output_dir; + Nativelib.include_dirs := opts.config.native_include_dirs; + (* Allow the user to load an arbitrary state here *) inputstate opts.pre; diff --git a/toplevel/usage.ml b/toplevel/usage.ml index 6848862603..c7e1d607f4 100644 --- a/toplevel/usage.ml +++ b/toplevel/usage.ml @@ -32,7 +32,8 @@ let print_usage_common co command = \n -coqlib dir set the coq standard library directory\ \n -exclude-dir f exclude subdirectories named f for option -R\ \n\ -\n -noinit start without loading the Init library\ +\n -boot don't bind the `Coq.` prefix to the default -coqlib dir\ +\n -noinit don't load Coq.Init.Prelude on start \ \n -nois (idem)\ \n -compat X.Y provides compatibility support for Coq version X.Y\ \n\ @@ -93,6 +94,8 @@ let print_usage_common co command = \n for full Gc stats dump)\ \n -bytecode-compiler (yes|no) enable the vm_compute reduction machine\ \n -native-compiler (yes|no|ondemand) enable the native_compute reduction machine\ +\n -native-output-dir <directory> set the output directory for native objects\ +\n -nI dir OCaml include directories for the native compiler (default if not set) \ \n -h, -help, --help print this list of options\ \n" diff --git a/user-contrib/Ltac2/g_ltac2.mlg b/user-contrib/Ltac2/g_ltac2.mlg index c1bd585f3f..e95ac3b02b 100644 --- a/user-contrib/Ltac2/g_ltac2.mlg +++ b/user-contrib/Ltac2/g_ltac2.mlg @@ -25,6 +25,10 @@ let err () = raise Stream.Failure type lookahead = Gramlib.Plexing.location_function -> int -> Tok.t Stream.t -> int option +let check_no_space tok m strm = + let n = Stream.count strm in + if G_prim.contiguous tok n (n+m-1) then Some m else None + let entry_of_lookahead s (lk : lookahead) = let run tok strm = match lk tok 0 strm with None -> err () | Some _ -> () in Pcoq.Entry.of_parser s run @@ -51,7 +55,7 @@ let lk_int tok n strm = match stream_nth n strm with | NUMERAL { NumTok.int = _; frac = ""; exp = "" } -> Some (n + 1) | _ -> None -let lk_ident_or_anti = lk_ident <+> (lk_kw "$" >> lk_ident) +let lk_ident_or_anti = lk_ident <+> (lk_kw "$" >> lk_ident >> check_no_space) let rec lk_ident_list n strm = ((lk_ident >> lk_ident_list) <+> lk_empty) n strm @@ -80,10 +84,6 @@ let test_lpar_id_rpar = lk_kw "(" >> lk_ident >> lk_kw ")" end -let check_no_space tok m strm = - let n = Stream.count strm in - if G_prim.contiguous tok n (n+m-1) then Some m else None - let test_ampersand_ident = entry_of_lookahead "test_ampersand_ident" begin lk_kw "&" >> lk_ident >> check_no_space @@ -91,7 +91,7 @@ let test_ampersand_ident = let test_dollar_ident = entry_of_lookahead "test_dollar_ident" begin - lk_kw "$" >> lk_ident + lk_kw "$" >> lk_ident >> check_no_space end let test_ltac1_env = @@ -889,7 +889,7 @@ let rules = [ ] in Hook.set Tac2entries.register_constr_quotations begin fun () -> - Pcoq.grammar_extend Pcoq.Constr.operconstr None (Some (Gramlib.Gramext.Level "0"), [(None, None, rules)]) + Pcoq.grammar_extend Pcoq.Constr.operconstr (Some (Gramlib.Gramext.Level "0"), [(None, None, rules)]) end } @@ -948,6 +948,12 @@ VERNAC { tac2mode } EXTEND VernacLtac2 fun ~pstate -> Tac2entries.call ~pstate ~default t } END +GRAMMAR EXTEND Gram + GLOBAL: tac2mode; + tac2mode: + [ [ tac = G_vernac.query_command -> { tac None } ] ]; +END + { open Stdarg diff --git a/user-contrib/Ltac2/tac2core.ml b/user-contrib/Ltac2/tac2core.ml index 431589aa30..196b28b274 100644 --- a/user-contrib/Ltac2/tac2core.ml +++ b/user-contrib/Ltac2/tac2core.ml @@ -161,7 +161,7 @@ let set_bt info = let throw ?(info = Exninfo.null) e = set_bt info >>= fun info -> let info = Exninfo.add info fatal_flag () in - Proofview.tclLIFT (Proofview.NonLogical.raise ~info e) + Proofview.tclLIFT (Proofview.NonLogical.raise (e, info)) let fail ?(info = Exninfo.null) e = set_bt info >>= fun info -> diff --git a/user-contrib/Ltac2/tac2entries.ml b/user-contrib/Ltac2/tac2entries.ml index d6db4a735c..2a0c109a42 100644 --- a/user-contrib/Ltac2/tac2entries.ml +++ b/user-contrib/Ltac2/tac2entries.ml @@ -643,7 +643,7 @@ let perform_notation syn st = | Some lev -> Some (string_of_int lev) in let rule = (lev, None, [rule]) in - ([Pcoq.ExtendRule (Pltac.tac2expr, None, (None, [rule]))], st) + ([Pcoq.ExtendRule (Pltac.tac2expr, (None, [rule]))], st) let ltac2_notation = Pcoq.create_grammar_command "ltac2-notation" perform_notation @@ -848,8 +848,8 @@ let () = register_handler begin function let v = Tac2ffi.of_open (kn, args) in let t = GTypRef (Other t_exn, []) in let c = Tac2print.pr_valexpr (Global.env ()) Evd.empty v t in - hov 0 (str "Uncaught Ltac2 exception:" ++ spc () ++ hov 0 c) -| _ -> raise Unhandled + Some (hov 0 (str "Uncaught Ltac2 exception:" ++ spc () ++ hov 0 c)) +| _ -> None end let () = CErrors.register_additional_error_info begin fun info -> diff --git a/vernac/attributes.ml b/vernac/attributes.ml index 194308b77f..7213ba4829 100644 --- a/vernac/attributes.ml +++ b/vernac/attributes.ml @@ -163,23 +163,7 @@ let program = let locality = bool_attribute ~name:"Locality" ~on:"local" ~off:"global" -let warn_unqualified_univ_attr = - CWarnings.create ~name:"unqualified-univ-attr" ~category:"deprecated" - (fun key -> Pp.(str "Attribute " ++ str key ++ - str " should be qualified as \"universes("++str key++str")\".")) - let ukey = "universes" -let universe_transform ~warn_unqualified : unit attribute = - fun atts -> - let atts = List.map (fun (key,_ as att) -> - match key with - | "polymorphic" | "monomorphic" - | "template" | "notemplate" -> - if warn_unqualified then warn_unqualified_univ_attr key; - ukey, VernacFlagList [att] - | _ -> att) atts - in - atts, () let universe_polymorphism_option_name = ["Universe"; "Polymorphism"] let is_universe_polymorphism = @@ -198,16 +182,10 @@ let polymorphic_base = | Some b -> return b | None -> return (is_universe_polymorphism()) -let polymorphic_nowarn = - universe_transform ~warn_unqualified:false >> - qualify_attribute ukey polymorphic_base - let template = - universe_transform ~warn_unqualified:true >> qualify_attribute ukey (bool_attribute ~name:"Template" ~on:"template" ~off:"notemplate") let polymorphic = - universe_transform ~warn_unqualified:true >> qualify_attribute ukey polymorphic_base let deprecation_parser : Deprecation.t key_parser = fun orig args -> diff --git a/vernac/attributes.mli b/vernac/attributes.mli index 0074db66d3..7ecb7e4fb0 100644 --- a/vernac/attributes.mli +++ b/vernac/attributes.mli @@ -114,10 +114,6 @@ val make_attribute : (vernac_flags -> vernac_flags * 'a) -> 'a attribute val vernac_polymorphic_flag : vernac_flag val vernac_monomorphic_flag : vernac_flag -(** For the stm, do not use! *) - -val polymorphic_nowarn : bool attribute - -(** For internal use, avoid warning if not qualified as eg [universes(polymorphic)]. *) +(** For internal use. *) val universe_polymorphism_option_name : string list val is_universe_polymorphism : unit -> bool diff --git a/vernac/comInductive.ml b/vernac/comInductive.ml index d711c9aea0..edb03a5c89 100644 --- a/vernac/comInductive.ml +++ b/vernac/comInductive.ml @@ -433,32 +433,33 @@ let interp_mutual_inductive_gen env0 ~template udecl (uparamsl,paramsl,indl) not then user_err (str "Inductives with uniform parameters may not have attached notations."); let indnames = List.map (fun ind -> ind.ind_name) indl in - let sigma, env_params, infos = + + (* In case of template polymorphism, we need to compute more constraints *) + let env0 = if poly then env0 else Environ.set_universes_lbound env0 Univ.Level.prop in + + let sigma, env_params, (ctx_params, env_uparams, ctx_uparams, params, userimpls, useruimpls, impls, udecl) = interp_params env0 udecl uparamsl paramsl in (* Interpret the arities *) let arities = List.map (intern_ind_arity env_params sigma) indl in - let sigma, env_params, (ctx_params, env_uparams, ctx_uparams, params, userimpls, useruimpls, impls, udecl), arities, is_template = - let is_template = List.exists (fun (_,_,_,pseudo_poly) -> not (Option.is_empty pseudo_poly)) arities in - if not poly && is_template then - (* In case of template polymorphism, we need to compute more constraints *) - let env0 = Environ.set_universes_lbound env0 Univ.Level.prop in - let sigma, env_params, infos = - interp_params env0 udecl uparamsl paramsl - in - let arities = List.map (intern_ind_arity env_params sigma) indl in - sigma, env_params, infos, arities, is_template - else sigma, env_params, infos, arities, is_template - in - let sigma, arities = List.fold_left_map (pretype_ind_arity env_params) sigma arities in let arities, relevances, arityconcl, indimpls = List.split4 arities in - let fullarities = List.map (fun c -> EConstr.it_mkProd_or_LetIn c ctx_params) arities in + let lift1_ctx ctx = + let t = EConstr.it_mkProd_or_LetIn EConstr.mkProp ctx in + let t = EConstr.Vars.lift 1 t in + let ctx, _ = EConstr.decompose_prod_assum sigma t in + ctx + in + let ctx_params_lifted, fullarities = CList.fold_left_map + (fun ctx_params c -> lift1_ctx ctx_params, EConstr.it_mkProd_or_LetIn c ctx_params) + ctx_params + arities + in let env_ar = push_types env_uparams indnames relevances fullarities in - let env_ar_params = EConstr.push_rel_context ctx_params env_ar in + let env_ar_params = EConstr.push_rel_context ctx_params_lifted env_ar in (* Compute interpretation metadatas *) let indimpls = List.map (fun impls -> userimpls @ impls) indimpls in @@ -509,6 +510,9 @@ let interp_mutual_inductive_gen env0 ~template udecl (uparamsl,paramsl,indl) not let eq_local_binders bl1 bl2 = List.equal local_binder_eq bl1 bl2 +let eq_params (up1,p1) (up2,p2) = + eq_local_binders up1 up2 && Option.equal eq_local_binders p1 p2 + let extract_coercions indl = let mkqid (_,({CAst.v=id},_)) = qualid_of_ident id in let extract lc = List.filter (fun (iscoe,_) -> iscoe) lc in @@ -519,7 +523,7 @@ let extract_params indl = match paramsl with | [] -> anomaly (Pp.str "empty list of inductive types.") | params::paramsl -> - if not (List.for_all (eq_local_binders params) paramsl) then user_err Pp.(str + if not (List.for_all (eq_params params) paramsl) then user_err Pp.(str "Parameters should be syntactically the same for each inductive type."); params @@ -544,7 +548,12 @@ type uniform_inductive_flag = let do_mutual_inductive ~template udecl indl ~cumulative ~poly ~private_ind ~uniform finite = let (params,indl),coes,ntns = extract_mutual_inductive_declaration_components indl in (* Interpret the types *) - let indl = match uniform with UniformParameters -> (params, [], indl) | NonUniformParameters -> ([], params, indl) in + let indl = match params with + | uparams, Some params -> (uparams, params, indl) + | params, None -> match uniform with + | UniformParameters -> (params, [], indl) + | NonUniformParameters -> ([], params, indl) + in let mie,pl,impls = interp_mutual_inductive_gen (Global.env()) ~template udecl indl ntns ~cumulative ~poly ~private_ind finite in (* Declare the mutual inductive block with its associated schemes *) ignore (DeclareInd.declare_mutual_inductive_with_eliminations mie pl impls); diff --git a/vernac/egramcoq.ml b/vernac/egramcoq.ml index 07656f9715..ead86bd12f 100644 --- a/vernac/egramcoq.ml +++ b/vernac/egramcoq.ml @@ -96,38 +96,38 @@ let create_pos = function let find_position_gen current ensure assoc lev = match lev with | None -> - current, (None, None, None, None) + current, (None, None, None, None) | Some n -> - let after = ref None in - let init = ref None in - let rec add_level q = function - | (p,_,_ as pa)::l when p > n -> pa :: add_level (Some p) l - | (p,a,reinit)::l when Int.equal p n -> - if reinit then - let a' = create_assoc assoc in - (init := Some (a',create_pos q); (p,a',false)::l) - else if admissible_assoc (a,assoc) then - raise Exit - else - error_level_assoc p a (Option.get assoc) - | l -> after := q; (n,create_assoc assoc,ensure)::l - in - try - let updated = add_level None current in - let assoc = create_assoc assoc in - begin match !init with + let after = ref None in + let init = ref None in + let rec add_level q = function + | (p,_,_ as pa)::l when p > n -> pa :: add_level (Some p) l + | (p,a,reinit)::l when Int.equal p n -> + if reinit then + let a' = create_assoc assoc in + (init := Some (a',create_pos q); (p,a',false)::l) + else if admissible_assoc (a,assoc) then + raise Exit + else + error_level_assoc p a (Option.get assoc) + | l -> after := q; (n,create_assoc assoc,ensure)::l + in + try + let updated = add_level None current in + let assoc = create_assoc assoc in + begin match !init with | None -> (* Create the entry *) - updated, (Some (create_pos !after), Some assoc, Some (constr_level n), None) + updated, (Some (create_pos !after), Some assoc, Some (constr_level n), None) | _ -> (* The reinit flag has been updated *) - updated, (Some (Gramlib.Gramext.Level (constr_level n)), None, None, !init) - end - with - (* Nothing has changed *) - Exit -> - (* Just inherit the existing associativity and name (None) *) - current, (Some (Gramlib.Gramext.Level (constr_level n)), None, None, None) + updated, (Some (Gramlib.Gramext.Level (constr_level n)), None, None, !init) + end + with + (* Nothing has changed *) + Exit -> + (* Just inherit the existing associativity and name (None) *) + current, (Some (Gramlib.Gramext.Level (constr_level n)), None, None, None) let rec list_mem_assoc_triple x = function | [] -> false @@ -200,41 +200,44 @@ let assoc_eq al ar = | LeftA, LeftA -> true | _, _ -> false -(* [adjust_level assoc from prod] where [assoc] and [from] are the name +(** [adjust_level assoc from prod] where [assoc] and [from] are the name and associativity of the level where to add the rule; the meaning of the result is - None = SELF - Some None = NEXT - Some (Some (n,cur)) = constr LEVEL n - s.t. if [cur] is set then [n] is the same as the [from] level *) -let adjust_level assoc from = let open Gramlib.Gramext in function + DefaultLevel = entry name + NextLevel = NEXT + NumLevel n = constr LEVEL n *) +let adjust_level custom assoc (custom',from) p = let open Gramlib.Gramext in match p with +(* If a level in a different grammar, no other choice than denoting it by absolute level *) + | (NumLevel n,_) when not (Notation.notation_entry_eq custom custom') -> NumLevel n +(* If a default level in a different grammar, the entry name is ok *) + | (DefaultLevel,InternalProd) -> + if Notation.notation_entry_eq custom InConstrEntry then NumLevel 200 else DefaultLevel + | (DefaultLevel,BorderProd _) when not (Notation.notation_entry_eq custom custom') -> + if Notation.notation_entry_eq custom InConstrEntry then NumLevel 200 else DefaultLevel (* Associativity is None means force the level *) - | (NumLevel n,BorderProd (_,None)) -> Some (Some (n,true)) + | (NumLevel n,BorderProd (_,None)) -> NumLevel n + | (DefaultLevel,BorderProd (_,None)) -> assert false (* Compute production name on the right side *) (* If NonA or LeftA on the right-hand side, set to NEXT *) - | (NumLevel n,BorderProd (Right,Some (NonA|LeftA))) -> - Some None + | ((NumLevel _ | DefaultLevel),BorderProd (Right,Some (NonA|LeftA))) -> NextLevel (* If RightA on the right-hand side, set to the explicit (current) level *) - | (NumLevel n,BorderProd (Right,Some RightA)) -> - Some (Some (n,true)) + | (NumLevel n,BorderProd (Right,Some RightA)) -> NumLevel n + | (DefaultLevel,BorderProd (Right,Some RightA)) -> NumLevel from (* Compute production name on the left side *) (* If NonA on the left-hand side, adopt the current assoc ?? *) - | (NumLevel n,BorderProd (Left,Some NonA)) -> None + | ((NumLevel _ | DefaultLevel),BorderProd (Left,Some NonA)) -> DefaultLevel (* If the expected assoc is the current one, set to SELF *) - | (NumLevel n,BorderProd (Left,Some a)) when assoc_eq a (camlp5_assoc assoc) -> - None + | ((NumLevel _ | DefaultLevel),BorderProd (Left,Some a)) when assoc_eq a (camlp5_assoc assoc) -> + DefaultLevel (* Otherwise, force the level, n or n-1, according to expected assoc *) - | (NumLevel n,BorderProd (Left,Some a)) -> - begin match a with - | LeftA -> Some (Some (n, true)) - | _ -> Some None - end + | (NumLevel n,BorderProd (Left,Some LeftA)) -> NumLevel n + | ((NumLevel _ | DefaultLevel),BorderProd (Left,Some _)) -> NextLevel (* None means NEXT *) - | (NextLevel,_) -> Some None + | (NextLevel,_) -> assert (Notation.notation_entry_eq custom custom'); NextLevel (* Compute production name elsewhere *) | (NumLevel n,InternalProd) -> - if from = n + 1 then Some None else Some (Some (n, Int.equal n from)) + if from = n + 1 then NextLevel else NumLevel n type _ target = | ForConstr : constr_expr target @@ -311,13 +314,14 @@ let target_entry : type s. notation_entry -> s target -> s Entry.t = function | ForConstr -> entry_for_constr | ForPattern -> entry_for_patttern -let is_self from e = match e with +let is_self custom (custom',from) e = Notation.notation_entry_eq custom custom' && match e with | (NumLevel n, BorderProd (Right, _ (* Some(NonA|LeftA) *))) -> false | (NumLevel n, BorderProd (Left, _)) -> Int.equal from n | _ -> false -let is_binder_level from e = match e with -| (NumLevel 200, (BorderProd (Right, _) | InternalProd)) -> from = 200 +let is_binder_level custom (custom',from) e = match e with +| (NumLevel 200, (BorderProd (Right, _) | InternalProd)) -> + custom = InConstrEntry && custom' = InConstrEntry && from = 200 | _ -> false let make_sep_rules = function @@ -338,15 +342,15 @@ type ('s, 'a) mayrec_symbol = | MayRecMay : ('s, mayrec, 'a) symbol -> ('s, 'a) mayrec_symbol let symbol_of_target : type s. _ -> _ -> _ -> _ -> s target -> (s, s) mayrec_symbol = fun custom p assoc from forpat -> - if custom = InConstrEntry && is_binder_level from p then MayRecNo (Aentryl (target_entry InConstrEntry forpat, "200")) - else if is_self from p then MayRecMay Aself + if is_binder_level custom from p then (* Prevent self *) MayRecNo (Aentryl (target_entry custom forpat, "200")) + else if is_self custom from p then MayRecMay Aself else let g = target_entry custom forpat in - let lev = adjust_level assoc from p in + let lev = adjust_level custom assoc from p in begin match lev with - | None -> MayRecNo (Aentry g) - | Some None -> MayRecMay Anext - | Some (Some (lev, cur)) -> MayRecNo (Aentryl (g, string_of_int lev)) + | DefaultLevel -> MayRecNo (Aentry g) + | NextLevel -> MayRecMay Anext + | NumLevel lev -> MayRecNo (Aentryl (g, string_of_int lev)) end let symbol_of_entry : type s r. _ -> _ -> (s, r) entry -> (s, r) mayrec_symbol = fun assoc from typ -> match typ with @@ -501,37 +505,46 @@ let target_to_bool : type r. r target -> bool = function let prepare_empty_levels forpat (where,(pos,p4assoc,name,reinit)) = let empty = (pos, [(name, p4assoc, [])]) in - ExtendRule (target_entry where forpat, reinit, empty) + match reinit with + | None -> + ExtendRule (target_entry where forpat, empty) + | Some reinit -> + ExtendRuleReinit (target_entry where forpat, reinit, empty) -let rec pure_sublevels' custom assoc from forpat level = function +let different_levels (custom,opt_level) (custom',string_level) = + match opt_level with + | None -> true + | Some level -> not (Notation.notation_entry_eq custom custom') || level <> int_of_string string_level + +let rec pure_sublevels' assoc from forpat level = function | [] -> [] | GramConstrNonTerminal (e,_) :: rem -> - let rem = pure_sublevels' custom assoc from forpat level rem in + let rem = pure_sublevels' assoc from forpat level rem in let push where p rem = - match symbol_of_target custom p assoc from forpat with - | MayRecNo (Aentryl (_,i)) when level <> Some (int_of_string i) -> (where,int_of_string i) :: rem + match symbol_of_target where p assoc from forpat with + | MayRecNo (Aentryl (_,i)) when different_levels (fst from,level) (where,i) -> (where,int_of_string i) :: rem | _ -> rem in (match e with | ETProdPattern i -> push InConstrEntry (NumLevel i,InternalProd) rem | ETProdConstr (s,p) -> push s p rem | _ -> rem) -| (GramConstrTerminal _ | GramConstrListMark _) :: rem -> pure_sublevels' custom assoc from forpat level rem +| (GramConstrTerminal _ | GramConstrListMark _) :: rem -> pure_sublevels' assoc from forpat level rem let make_act : type r. r target -> _ -> r gen_eval = function | ForConstr -> fun notation loc env -> let env = (env.constrs, env.constrlists, env.binders, env.binderlists) in - CAst.make ~loc @@ CNotation (notation, env) + CAst.make ~loc @@ CNotation (None, notation, env) | ForPattern -> fun notation loc env -> let env = (env.constrs, env.constrlists) in - CAst.make ~loc @@ CPatNotation (notation, env, []) + CAst.make ~loc @@ CPatNotation (None, notation, env, []) let extend_constr state forpat ng = let custom,n,_,_ = ng.notgram_level in let assoc = ng.notgram_assoc in let (entry, level) = interp_constr_entry_key custom forpat n in let fold (accu, state) pt = - let AnyTyRule r = make_ty_rule assoc n forpat pt in - let pure_sublevels = pure_sublevels' custom assoc n forpat level pt in + let AnyTyRule r = make_ty_rule assoc (custom,n) forpat pt in + let pure_sublevels = pure_sublevels' assoc (custom,n) forpat level pt in let isforpat = target_to_bool forpat in let needed_levels, state = register_empty_levels state isforpat pure_sublevels in let (pos,p4assoc,name,reinit), state = find_position state custom isforpat assoc level in @@ -543,7 +556,12 @@ let extend_constr state forpat ng = | MayRecRNo symbs -> Rule (symbs, act) | MayRecRMay symbs -> Rule (symbs, act) in name, p4assoc, [r] in - let r = ExtendRule (entry, reinit, (pos, [rule])) in + let r = match reinit with + | None -> + ExtendRule (entry, (pos, [rule])) + | Some reinit -> + ExtendRuleReinit (entry, reinit, (pos, [rule])) + in (accu @ empty_rules @ [r], state) in List.fold_left fold ([], state) ng.notgram_prods diff --git a/vernac/egramml.ml b/vernac/egramml.ml index 62eb561f3c..2b1d99c7a9 100644 --- a/vernac/egramml.ml +++ b/vernac/egramml.ml @@ -90,4 +90,4 @@ let extend_vernac_command_grammar s nt gl = vernac_exts := (s,gl) :: !vernac_exts; let mkact loc l = VernacExtend (s, l) in let rules = [make_rule mkact gl] in - grammar_extend nt None (None, [None, None, rules]) + grammar_extend nt (None, [None, None, rules]) diff --git a/vernac/g_vernac.mlg b/vernac/g_vernac.mlg index 74249301d7..def4ed942a 100644 --- a/vernac/g_vernac.mlg +++ b/vernac/g_vernac.mlg @@ -395,13 +395,14 @@ GRAMMAR EXTEND Gram ; inductive_definition: [ [ oc = opt_coercion; id = ident_decl; indpar = binders; + extrapar = OPT [ "|"; p = binders -> { p } ]; c = OPT [ ":"; c = lconstr -> { c } ]; lc=opt_constructors_or_fields; ntn = decl_notation -> - { (((oc,id),indpar,c,lc),ntn) } ] ] + { (((oc,id),(indpar,extrapar),c,lc),ntn) } ] ] ; constructor_list_or_record_decl: [ [ "|"; l = LIST1 constructor SEP "|" -> { Constructors l } - | id = identref ; c = constructor_type; "|"; l = LIST0 constructor SEP "|" -> + | id = identref ; c = constructor_type; "|"; l = LIST1 constructor SEP "|" -> { Constructors ((c id)::l) } | id = identref ; c = constructor_type -> { Constructors [ c id ] } | cstr = identref; "{"; fs = record_fields; "}" -> @@ -1224,11 +1225,10 @@ GRAMMAR EXTEND Gram | { CAst.v = k }, Some s -> SetFormat(k,s) | s, None -> SetFormat ("text",s) end } | x = IDENT; ","; l = LIST1 [id = IDENT -> { id } ] SEP ","; "at"; - lev = level -> { SetItemLevel (x::l,None,Some lev) } - | x = IDENT; "at"; lev = level -> { SetItemLevel ([x],None,Some lev) } - | x = IDENT; "at"; lev = level; b = constr_as_binder_kind -> - { SetItemLevel ([x],Some b,Some lev) } - | x = IDENT; b = constr_as_binder_kind -> { SetItemLevel ([x],Some b,None) } + lev = level -> { SetItemLevel (x::l,None,lev) } + | x = IDENT; "at"; lev = level; b = OPT constr_as_binder_kind -> + { SetItemLevel ([x],b,lev) } + | x = IDENT; b = constr_as_binder_kind -> { SetItemLevel ([x],Some b,DefaultLevel) } | x = IDENT; typ = syntax_extension_type -> { SetEntryType (x,typ) } ] ] ; @@ -1236,19 +1236,20 @@ GRAMMAR EXTEND Gram [ [ IDENT "ident" -> { ETIdent } | IDENT "global" -> { ETGlobal } | IDENT "bigint" -> { ETBigint } | IDENT "binder" -> { ETBinder true } - | IDENT "constr" -> { ETConstr (InConstrEntry,None,None) } - | IDENT "constr"; n = OPT at_level; b = OPT constr_as_binder_kind -> { ETConstr (InConstrEntry,b,n) } + | IDENT "constr" -> { ETConstr (InConstrEntry,None,DefaultLevel) } + | IDENT "constr"; n = at_level_opt; b = OPT constr_as_binder_kind -> { ETConstr (InConstrEntry,b,n) } | IDENT "pattern" -> { ETPattern (false,None) } | IDENT "pattern"; "at"; IDENT "level"; n = natural -> { ETPattern (false,Some n) } | IDENT "strict"; IDENT "pattern" -> { ETPattern (true,None) } | IDENT "strict"; IDENT "pattern"; "at"; IDENT "level"; n = natural -> { ETPattern (true,Some n) } | IDENT "closed"; IDENT "binder" -> { ETBinder false } - | IDENT "custom"; x = IDENT; n = OPT at_level; b = OPT constr_as_binder_kind -> + | IDENT "custom"; x = IDENT; n = at_level_opt; b = OPT constr_as_binder_kind -> { ETConstr (InCustomEntry x,b,n) } ] ] ; - at_level: - [ [ "at"; n = level -> { n } ] ] + at_level_opt: + [ [ "at"; n = level -> { n } + | -> { DefaultLevel } ] ] ; constr_as_binder_kind: [ [ "as"; IDENT "ident" -> { Notation_term.AsIdent } diff --git a/vernac/himsg.ml b/vernac/himsg.ml index dfc4631572..07ec6ca1ba 100644 --- a/vernac/himsg.ml +++ b/vernac/himsg.ml @@ -324,11 +324,8 @@ let explain_unification_error env sigma p1 p2 = function strbrk ": cannot ensure that " ++ t ++ strbrk " is a subtype of " ++ u] | UnifUnivInconsistency p -> - if !Constrextern.print_universes then - [str "universe inconsistency: " ++ - Univ.explain_universe_inconsistency (Termops.pr_evd_level sigma) p] - else - [str "universe inconsistency"] + [str "universe inconsistency: " ++ + Univ.explain_universe_inconsistency (Termops.pr_evd_level sigma) p] | CannotSolveConstraint ((pb,env,t,u),e) -> let env = make_all_name_different env sigma in (strbrk "cannot satisfy constraint " ++ pr_leconstr_env env sigma t ++ @@ -1359,6 +1356,12 @@ let explain_prim_token_notation_error kind env sigma = function Nota: explain_exn does NOT end with a newline anymore! *) +exception Unhandled + +let wrap_unhandled f e = + try Some (f e) + with Unhandled -> None + let explain_exn_default = function (* Basic interaction exceptions *) | Stream.Error txt -> hov 0 (str "Syntax error: " ++ str txt ++ str ".") @@ -1369,19 +1372,14 @@ let explain_exn_default = function | CErrors.Timeout -> hov 0 (str "Timeout!") | Sys.Break -> hov 0 (fnl () ++ str "User interrupt.") (* Otherwise, not handled here *) - | _ -> raise CErrors.Unhandled + | _ -> raise Unhandled -let _ = CErrors.register_handler explain_exn_default +let _ = CErrors.register_handler (wrap_unhandled explain_exn_default) let rec vernac_interp_error_handler = function | Univ.UniverseInconsistency i -> - let msg = - if !Constrextern.print_universes then - str "." ++ spc() ++ - Univ.explain_universe_inconsistency UnivNames.pr_with_global_universes i - else - mt() in - str "Universe inconsistency" ++ msg ++ str "." + str "Universe inconsistency." ++ spc() ++ + Univ.explain_universe_inconsistency UnivNames.pr_with_global_universes i ++ str "." | TypeError(ctx,te) -> let te = map_ptype_error EConstr.of_constr te in explain_type_error ctx Evd.empty te @@ -1417,6 +1415,6 @@ let rec vernac_interp_error_handler = function | Logic_monad.TacticFailure e -> vernac_interp_error_handler e | _ -> - raise CErrors.Unhandled + raise Unhandled -let _ = CErrors.register_handler vernac_interp_error_handler +let _ = CErrors.register_handler (wrap_unhandled vernac_interp_error_handler) diff --git a/vernac/metasyntax.ml b/vernac/metasyntax.ml index 0c39aba70a..3937f887ad 100644 --- a/vernac/metasyntax.ml +++ b/vernac/metasyntax.ml @@ -286,28 +286,46 @@ let pr_notation_entry = function | InConstrEntry -> str "constr" | InCustomEntry s -> str "custom " ++ str s -let prec_assoc = let open Gramlib.Gramext in function - | RightA -> (L,E) - | LeftA -> (E,L) - | NonA -> (L,L) - let precedence_of_position_and_level from_level = function - | NumLevel n, BorderProd (_,None) -> n, Prec n | NumLevel n, BorderProd (b,Some a) -> - n, let (lp,rp) = prec_assoc a in if b == Left then lp else rp - | NumLevel n, InternalProd -> n, Prec n - | NextLevel, _ -> from_level, L - + (let open Gramlib.Gramext in + match a, b with + | RightA, Left -> LevelLt n + | RightA, Right -> LevelLe n + | LeftA, Left -> LevelLe n + | LeftA, Right -> LevelLt n + | NonA, _ -> LevelLt n), Some b + | NumLevel n, _ -> LevelLe n, None + | NextLevel, _ -> LevelLt from_level, None + | DefaultLevel, _ -> LevelSome, None + +(** Computing precedences of subentries for parsing *) let precedence_of_entry_type (from_custom,from_level) = function | ETConstr (custom,_,x) when notation_entry_eq custom from_custom -> - precedence_of_position_and_level from_level x - | ETConstr (custom,_,(NumLevel n,_)) -> n, Prec n + fst (precedence_of_position_and_level from_level x) + | ETConstr (custom,_,(NumLevel n,_)) -> LevelLe n | ETConstr (custom,_,(NextLevel,_)) -> user_err (strbrk "\"next level\" is only for sub-expressions in the same entry as where the notation is (" ++ quote (pr_notation_entry custom) ++ strbrk " is different from " ++ quote (pr_notation_entry from_custom) ++ str ").") - | ETPattern (_,n) -> let n = match n with None -> 0 | Some n -> n in n, Prec n - | _ -> 0, E (* should not matter *) + | ETPattern (_,n) -> let n = match n with None -> 0 | Some n -> n in LevelLe n + | _ -> LevelSome (* should not matter *) + +(** Computing precedences for future insertion of parentheses at + the time of printing using hard-wired constr levels *) +let unparsing_precedence_of_entry_type from_level = function + | ETConstr (InConstrEntry,_,x) -> + (* Possible insertion of parentheses at printing time to deal + with precedence in a constr entry is managed using [prec_less] + in [ppconstr.ml] *) + precedence_of_position_and_level from_level x + | ETConstr (custom,_,_) -> + (* Precedence of printing for a custom entry is managed using + explicit insertion of entry coercions at the time of building + a [constr_expr] *) + LevelSome, None + | ETPattern (_,n) -> (* in constr *) LevelLe (match n with Some n -> n | None -> 0), None + | _ -> LevelSome, None (* should not matter *) (* Some breaking examples *) (* "x = y" : "x /1 = y" (breaks before any symbol) *) @@ -374,14 +392,14 @@ let check_open_binder isopen sl m = let unparsing_metavar i from typs = let x = List.nth typs (i-1) in - let prec = snd (precedence_of_entry_type from x) in + let prec,side = unparsing_precedence_of_entry_type from x in match x with | ETConstr _ | ETGlobal | ETBigint -> - UnpMetaVar (i,prec) + UnpMetaVar (prec,side) | ETPattern _ -> - UnpBinderMetaVar (i,prec) + UnpBinderMetaVar prec | ETIdent -> - UnpBinderMetaVar (i,prec) + UnpBinderMetaVar prec | ETBinder isopen -> assert false @@ -389,12 +407,12 @@ let unparsing_metavar i from typs = let index_id id l = List.index Id.equal id l -let make_hunks etyps symbols from = +let make_hunks etyps symbols from_level = let vars,typs = List.split etyps in let rec make b = function | NonTerminal m :: prods -> let i = index_id m vars in - let u = unparsing_metavar i from typs in + let u = unparsing_metavar i from_level typs in if is_next_non_terminal b prods then (None, u) :: add_break_if_none 1 b (make b prods) else @@ -428,17 +446,17 @@ let make_hunks etyps symbols from = | SProdList (m,sl) :: prods -> let i = index_id m vars in let typ = List.nth typs (i-1) in - let _,prec = precedence_of_entry_type from typ in + let prec,side = unparsing_precedence_of_entry_type from_level typ in let sl' = (* If no separator: add a break *) if List.is_empty sl then add_break 1 [] (* We add NonTerminal for simulation but remove it afterwards *) else make true sl in let hunk = match typ with - | ETConstr _ -> UnpListMetaVar (i,prec,List.map snd sl') + | ETConstr _ -> UnpListMetaVar (prec,List.map snd sl',side) | ETBinder isopen -> check_open_binder isopen sl m; - UnpBinderListMetaVar (i,isopen,List.map snd sl') + UnpBinderListMetaVar (isopen,List.map snd sl') | _ -> assert false in (None, hunk) :: make_with_space b prods @@ -555,7 +573,7 @@ let read_recursive_format sl fmt = the names in the notation *) slfmt, res -let hunks_of_format (from,(vars,typs)) symfmt = +let hunks_of_format (from_level,(vars,typs)) symfmt = let rec aux = function | symbs, (_,(UnpTerminal s' as u)) :: fmt when String.equal s' (String.make (String.length s') ' ') -> @@ -565,22 +583,22 @@ let hunks_of_format (from,(vars,typs)) symfmt = let symbs, l = aux (symbs,fmt) in symbs, UnpTerminal s :: l | NonTerminal s :: symbs, (_,UnpTerminal s') :: fmt when Id.equal s (Id.of_string s') -> let i = index_id s vars in - let symbs, l = aux (symbs,fmt) in symbs, unparsing_metavar i from typs :: l + let symbs, l = aux (symbs,fmt) in symbs, unparsing_metavar i from_level typs :: l | symbs, (_,(UnpCut _ as u)) :: fmt -> let symbs, l = aux (symbs,fmt) in symbs, u :: l | SProdList (m,sl) :: symbs, fmt when has_ldots fmt -> let i = index_id m vars in let typ = List.nth typs (i-1) in - let _,prec = precedence_of_entry_type from typ in + let prec,side = unparsing_precedence_of_entry_type from_level typ in let loc_slfmt,rfmt = read_recursive_format sl fmt in let sl, slfmt = aux (sl,loc_slfmt) in if not (List.is_empty sl) then error_format ?loc:(find_prod_list_loc loc_slfmt fmt) (); let symbs, l = aux (symbs,rfmt) in let hunk = match typ with - | ETConstr _ -> UnpListMetaVar (i,prec,slfmt) + | ETConstr _ -> UnpListMetaVar (prec,slfmt,side) | ETBinder isopen -> check_open_binder isopen sl m; - UnpBinderListMetaVar (i,isopen,slfmt) + UnpBinderListMetaVar (isopen,slfmt) | _ -> assert false in symbs, hunk :: l | symbs, (_,UnpBox (a,b)) :: fmt -> @@ -725,15 +743,11 @@ let recompute_assoc typs = let open Gramlib.Gramext in let pr_arg_level from (lev,typ) = let pplev = function - | (n,L) when Int.equal n from -> str "at next level" - | (n,E) -> str "at level " ++ int n - | (n,L) -> str "at level below " ++ int n - | (n,Prec m) when Int.equal m n -> str "at level " ++ int n - | (n,_) -> str "Unknown level" in - Ppvernac.pr_set_entry_type (fun _ -> (*TO CHECK*) mt()) typ ++ - (match typ with - | ETConstr _ | ETPattern _ -> spc () ++ pplev lev - | _ -> mt ()) + | LevelLt n when Int.equal n from -> spc () ++ str "at next level" + | LevelLe n -> spc () ++ str "at level " ++ int n + | LevelLt n -> spc () ++ str "at level below " ++ int n + | LevelSome -> mt () in + Ppvernac.pr_set_entry_type (fun _ -> (*TO CHECK*) mt()) typ ++ pplev lev let pr_level ntn (from,fromlevel,args,typs) = (match from with InConstrEntry -> mt () | InCustomEntry s -> str "in " ++ str s ++ spc()) ++ @@ -755,43 +769,97 @@ let error_parsing_incompatible_level ntn ntn' oldprec prec = spc() ++ str "while it is now required to be" ++ spc() ++ pr_level ntn prec ++ str ".") -type syntax_extension = { +let warn_incompatible_format = + CWarnings.create ~name:"notation-incompatible-format" ~category:"parsing" + (fun (specific,ntn) -> + let head,scope = match specific with + | None -> str "Notation", mt () + | Some LastLonelyNotation -> str "Lonely notation", mt () + | Some (NotationInScope sc) -> str "Notation", strbrk (" in scope " ^ sc) in + head ++ spc () ++ pr_notation ntn ++ + strbrk " was already defined with a different format" ++ scope ++ str ".") + +type syntax_parsing_extension = { synext_level : Notation_gram.level; synext_notation : notation; - synext_notgram : notation_grammar; - synext_unparsing : unparsing list; + synext_notgram : notation_grammar option; +} + +type syntax_printing_extension = { + synext_reserved : bool; + synext_unparsing : unparsing_rule; synext_extra : (string * string) list; } -type syntax_extension_obj = locality_flag * syntax_extension +let generic_format_to_declare ntn {synext_unparsing = (rules,_); synext_extra = extra_rules } = + try + let (generic_rules,_),reserved,generic_extra_rules = + Ppextend.find_generic_notation_printing_rule ntn in + if reserved && + (not (List.for_all2eq unparsing_eq rules generic_rules) + || extra_rules <> generic_extra_rules) + then + (warn_incompatible_format (None,ntn); true) + else + false + with Not_found -> true + +let check_reserved_format ntn = function + | None -> () + | Some sy_pp_rules -> let _ = generic_format_to_declare ntn sy_pp_rules in () + +let specific_format_to_declare (specific,ntn as specific_ntn) + {synext_unparsing = (rules,_); synext_extra = extra_rules } = + try + let (specific_rules,_),specific_extra_rules = + Ppextend.find_specific_notation_printing_rule specific_ntn in + if not (List.for_all2eq unparsing_eq rules specific_rules) + || extra_rules <> specific_extra_rules then + (warn_incompatible_format (Some specific,ntn); true) + else false + with Not_found -> true + +type syntax_extension_obj = + locality_flag * (syntax_parsing_extension * syntax_printing_extension option) let check_and_extend_constr_grammar ntn rule = try let ntn_for_grammar = rule.notgram_notation in if notation_eq ntn ntn_for_grammar then raise Not_found; let prec = rule.notgram_level in - let oldprec = Notgram_ops.level_of_notation ntn_for_grammar in - if not (Notgram_ops.level_eq prec oldprec) then error_parsing_incompatible_level ntn ntn_for_grammar oldprec prec; + let oldparsing,oldprec = Notgram_ops.level_of_notation ntn_for_grammar in + if not (Notgram_ops.level_eq prec oldprec) && oldparsing <> None then error_parsing_incompatible_level ntn ntn_for_grammar oldprec prec; + if oldparsing = None then raise Not_found with Not_found -> Egramcoq.extend_constr_grammar rule -let cache_one_syntax_extension se = - let ntn = se.synext_notation in - let prec = se.synext_level in - let onlyprint = se.synext_notgram.notgram_onlyprinting in - try - let oldprec = Notgram_ops.level_of_notation ~onlyprint ntn in - if not (Notgram_ops.level_eq prec oldprec) then error_incompatible_level ntn oldprec prec; - with Not_found -> - begin - (* Reserve the notation level *) - Notgram_ops.declare_notation_level ntn prec ~onlyprint; - (* Declare the parsing rule *) - if not onlyprint then List.iter (check_and_extend_constr_grammar ntn) se.synext_notgram.notgram_rules; - (* Declare the notation rule *) - declare_notation_rule ntn - ~extra:se.synext_extra (se.synext_unparsing, let (_,lev,_,_) = prec in lev) se.synext_notgram - end +let cache_one_syntax_extension (pa_se,pp_se) = + let ntn = pa_se.synext_notation in + let prec = pa_se.synext_level in + (* Check and ensure that the level and the precomputed parsing rule is declared *) + let oldparsing = + try + let oldparsing,oldprec = Notgram_ops.level_of_notation ntn in + if not (Notgram_ops.level_eq prec oldprec) && (oldparsing <> None || pa_se.synext_notgram = None) then error_incompatible_level ntn oldprec prec; + oldparsing + with Not_found -> + (* Declare the level and the precomputed parsing rule *) + let _ = Notgram_ops.declare_notation_level ntn pa_se.synext_notgram prec in + None in + (* Declare the parsing rule *) + begin match oldparsing, pa_se.synext_notgram with + | None, Some grams -> List.iter (check_and_extend_constr_grammar ntn) grams + | _ -> (* The grammars rules are canonically derived from the string and the precedence*) () + end; + (* Printing *) + match pp_se with + | None -> () + | Some pp_se -> + (* Check compatibility of format in case of two Reserved Notation *) + (* and declare or redeclare printing rule *) + if generic_format_to_declare ntn pp_se then + declare_generic_notation_printing_rules ntn + ~extra:pp_se.synext_extra ~reserved:pp_se.synext_reserved pp_se.synext_unparsing let cache_syntax_extension (_, (_, sy)) = cache_one_syntax_extension sy @@ -800,11 +868,11 @@ let subst_parsing_rule subst x = x let subst_printing_rule subst x = x -let subst_syntax_extension (subst, (local, sy)) = - (local, { sy with - synext_notgram = { sy.synext_notgram with notgram_rules = List.map (subst_parsing_rule subst) sy.synext_notgram.notgram_rules }; - synext_unparsing = subst_printing_rule subst sy.synext_unparsing; - }) +let subst_syntax_extension (subst, (local, (pa_sy,pp_sy))) = + (local, ({ pa_sy with + synext_notgram = Option.map (List.map (subst_parsing_rule subst)) pa_sy.synext_notgram }, + Option.map (fun pp_sy -> {pp_sy with synext_unparsing = subst_printing_rule subst pp_sy.synext_unparsing}) pp_sy) + ) let classify_syntax_definition (local, _ as o) = if local then Dispose else Substitute o @@ -955,18 +1023,23 @@ let is_only_printing mods = (* Compute precedences from modifiers (or find default ones) *) -let set_entry_type from etyps (x,typ) = +let set_entry_type from n etyps (x,typ) = + let make_lev n s = match typ with + | BorderProd _ -> NumLevel n + | InternalProd -> DefaultLevel in let typ = try match List.assoc x etyps, typ with - | ETConstr (s,bko,Some n), (_,BorderProd (left,_)) -> + | ETConstr (s,bko,DefaultLevel), _ -> + if notation_entry_eq from s then ETConstr (s,bko,(make_lev n s,typ)) + else ETConstr (s,bko,(DefaultLevel,typ)) + | ETConstr (s,bko,n), BorderProd (left,_) -> ETConstr (s,bko,(n,BorderProd (left,None))) - | ETConstr (s,bko,Some n), (_,InternalProd) -> - ETConstr (s,bko,(n,InternalProd)) + | ETConstr (s,bko,n), InternalProd -> + ETConstr (s,bko,(n,InternalProd)) | ETPattern (b,n), _ -> ETPattern (b,n) | (ETIdent | ETBigint | ETGlobal | ETBinder _ as x), _ -> x - | ETConstr (s,bko,None), _ -> ETConstr (s,bko,typ) with Not_found -> - ETConstr (from,None,typ) + ETConstr (from,None,(make_lev n from,typ)) in (x,typ) let join_auxiliary_recursive_types recvars etyps = @@ -1123,11 +1196,11 @@ let find_precedence custom lev etyps symbols onlyprint = else user_err Pp.(str "The level of the leftmost non-terminal cannot be changed.") in (try match List.assoc x etyps, custom with - | ETConstr (s,_,Some _), s' when s = s' -> test () + | ETConstr (s,_,(NumLevel _ | NextLevel)), s' when s = s' -> test () | (ETIdent | ETBigint | ETGlobal), _ -> begin match lev with | None -> - ([Feedback.msg_info ?loc:None ,strbrk "Setting notation at level 0."],0) + ([fun () -> Flags.if_verbose (Feedback.msg_info ?loc:None) (strbrk "Setting notation at level 0.")],0) | Some 0 -> ([],0) | _ -> @@ -1144,7 +1217,7 @@ let find_precedence custom lev etyps symbols onlyprint = else [],Option.get lev) | Some (Terminal _) when last_is_terminal () -> if Option.is_empty lev then - ([Feedback.msg_info ?loc:None ,strbrk "Setting notation at level 0."], 0) + ([fun () -> Flags.if_verbose (Feedback.msg_info ?loc:None) (strbrk "Setting notation at level 0.")], 0) else [],Option.get lev | Some _ -> if Option.is_empty lev then user_err Pp.(str "Cannot determine the level."); @@ -1197,7 +1270,7 @@ module SynData = struct extra : (string * string) list; (* XXX: Callback to printing, must remove *) - msgs : ((Pp.t -> unit) * Pp.t) list; + msgs : (unit -> unit) list; (* Fields for internalization *) recvars : (Id.t * Id.t) list; @@ -1216,14 +1289,13 @@ module SynData = struct end let find_subentry_types from n assoc etyps symbols = - let innerlevel = NumLevel 200 in let typs = find_symbols - (NumLevel n,BorderProd(Left,assoc)) - (innerlevel,InternalProd) - (NumLevel n,BorderProd(Right,assoc)) + (BorderProd(Left,assoc)) + (InternalProd) + (BorderProd(Right,assoc)) symbols in - let sy_typs = List.map (set_entry_type from etyps) typs in + let sy_typs = List.map (set_entry_type from n etyps) typs in let prec = List.map (assoc_of_type from n) sy_typs in sy_typs, prec @@ -1296,15 +1368,19 @@ let compute_syntax_data ~local deprecation df modifiers = not_data = sy_fulldata; } +let warn_only_parsing_reserved_notation = + CWarnings.create ~name:"irrelevant-reserved-notation-only-parsing" ~category:"parsing" + (fun () -> strbrk "The only parsing modifier has no effect in Reserved Notation.") + let compute_pure_syntax_data ~local df mods = let open SynData in let sd = compute_syntax_data ~local None df mods in - let msgs = - if sd.only_parsing then - (Feedback.msg_warning ?loc:None, - strbrk "The only parsing modifier has no effect in Reserved Notation.")::sd.msgs - else sd.msgs in - { sd with msgs } + if sd.only_parsing + then + let msgs = (fun () -> warn_only_parsing_reserved_notation ?loc:None ())::sd.msgs in + { sd with msgs; only_parsing = false } + else + sd (**********************************************************************) (* Registration of notations interpretation *) @@ -1318,6 +1394,7 @@ type notation_obj = { notobj_onlyprint : bool; notobj_deprecation : Deprecation.t option; notobj_notation : notation * notation_location; + notobj_specific_pp_rules : syntax_printing_extension option; } let load_notation_common silently_define_scope_if_undefined _ (_, nobj) = @@ -1334,24 +1411,35 @@ let load_notation = load_notation_common true let open_notation i (_, nobj) = - let scope = nobj.notobj_scope in - let (ntn, df) = nobj.notobj_notation in - let pat = nobj.notobj_interp in - let onlyprint = nobj.notobj_onlyprint in - let deprecation = nobj.notobj_deprecation in - let fresh = not (Notation.exists_notation_in_scope scope ntn onlyprint pat) in - if Int.equal i 1 && fresh then begin - (* Declare the interpretation *) - let () = Notation.declare_notation_interpretation ntn scope pat df ~onlyprint deprecation in - (* Declare the uninterpretation *) - if not nobj.notobj_onlyparse then - Notation.declare_uninterpretation (NotationRule (scope, ntn)) pat; - (* Declare a possible coercion *) - (match nobj.notobj_coercion with - | Some (IsEntryCoercion entry) -> Notation.declare_entry_coercion ntn entry - | Some (IsEntryGlobal (entry,n)) -> Notation.declare_custom_entry_has_global entry n - | Some (IsEntryIdent (entry,n)) -> Notation.declare_custom_entry_has_ident entry n - | None -> ()) + if Int.equal i 1 then begin + let scope = nobj.notobj_scope in + let (ntn, df) = nobj.notobj_notation in + let pat = nobj.notobj_interp in + let onlyprint = nobj.notobj_onlyprint in + let deprecation = nobj.notobj_deprecation in + let specific = match scope with None -> LastLonelyNotation | Some sc -> NotationInScope sc in + let specific_ntn = (specific,ntn) in + let fresh = not (Notation.exists_notation_in_scope scope ntn onlyprint pat) in + if fresh then begin + (* Declare the interpretation *) + let () = Notation.declare_notation_interpretation ntn scope pat df ~onlyprint deprecation in + (* Declare the uninterpretation *) + if not nobj.notobj_onlyparse then + Notation.declare_uninterpretation (NotationRule specific_ntn) pat; + (* Declare a possible coercion *) + (match nobj.notobj_coercion with + | Some (IsEntryCoercion entry) -> Notation.declare_entry_coercion specific_ntn entry + | Some (IsEntryGlobal (entry,n)) -> Notation.declare_custom_entry_has_global entry n + | Some (IsEntryIdent (entry,n)) -> Notation.declare_custom_entry_has_ident entry n + | None -> ()) + end; + (* Declare specific format if any *) + match nobj.notobj_specific_pp_rules with + | Some pp_sy -> + if specific_format_to_declare specific_ntn pp_sy then + Ppextend.declare_specific_notation_printing_rules + specific_ntn ~extra:pp_sy.synext_extra pp_sy.synext_unparsing + | None -> () end let cache_notation o = @@ -1393,23 +1481,30 @@ let with_syntax_protection f x = exception NoSyntaxRule let recover_notation_syntax ntn = - try - let prec = Notgram_ops.level_of_notation ~onlyprint:true ntn (* Be as little restrictive as possible *) in - let pp_rule,_ = find_notation_printing_rule ntn in - let pp_extra_rules = find_notation_extra_printing_rules ntn in - let pa_rule = find_notation_parsing_rules ntn in - { synext_level = prec; - synext_notation = ntn; - synext_notgram = pa_rule; - synext_unparsing = pp_rule; - synext_extra = pp_extra_rules; - } - with Not_found -> - raise NoSyntaxRule + let pa = + try + let pa_rule,prec = Notgram_ops.level_of_notation ntn in + { synext_level = prec; + synext_notation = ntn; + synext_notgram = pa_rule } + with Not_found -> + raise NoSyntaxRule in + let pp = + try + let pp_rule,reserved,pp_extra_rules = find_generic_notation_printing_rule ntn in + Some { + synext_reserved = reserved; + synext_unparsing = pp_rule; + synext_extra = pp_extra_rules; + } + with Not_found -> None in + pa,pp let recover_squash_syntax sy = - let sq = recover_notation_syntax (InConstrEntrySomeLevel,"{ _ }") in - sy :: sq.synext_notgram.notgram_rules + let sq,_ = recover_notation_syntax (InConstrEntrySomeLevel,"{ _ }") in + match sq.synext_notgram with + | Some gram -> sy :: gram + | None -> raise NoSyntaxRule (**********************************************************************) (* Main entry point for building parsing and printing rules *) @@ -1440,16 +1535,28 @@ let make_pp_rule level (typs,symbols) fmt = | Some fmt -> hunks_of_format (level, List.split typs) (symbols, parse_format fmt) -(* let make_syntax_rules i_typs (ntn,prec,need_squash) sy_data fmt extra onlyprint compat = *) -let make_syntax_rules (sd : SynData.syn_data) = let open SynData in +let make_parsing_rules (sd : SynData.syn_data) = let open SynData in let ntn_for_grammar, prec_for_grammar, need_squash = sd.not_data in - let custom,level,_,_ = sd.level in - let pa_rule = make_pa_rule prec_for_grammar sd.pa_syntax_data ntn_for_grammar need_squash in - let pp_rule = make_pp_rule (custom,level) sd.pp_syntax_data sd.format in { + let pa_rule = + if sd.only_printing then None + else Some (make_pa_rule prec_for_grammar sd.pa_syntax_data ntn_for_grammar need_squash) + in { synext_level = sd.level; synext_notation = fst sd.info; - synext_notgram = { notgram_onlyprinting = sd.only_printing; notgram_rules = pa_rule }; - synext_unparsing = pp_rule; + synext_notgram = pa_rule; + } + +let warn_irrelevant_format = + CWarnings.create ~name:"irrelevant-format-only-parsing" ~category:"parsing" + (fun () -> str "The format modifier is irrelevant for only parsing rules.") + +let make_printing_rules reserved (sd : SynData.syn_data) = let open SynData in + let custom,level,_,_ = sd.level in + let pp_rule = make_pp_rule level sd.pp_syntax_data sd.format in + if sd.only_parsing then (if sd.format <> None then warn_irrelevant_format (); None) + else Some { + synext_reserved = reserved; + synext_unparsing = (pp_rule,level); synext_extra = sd.extra; } @@ -1463,9 +1570,10 @@ let to_map l = let add_notation_in_scope ~local deprecation df env c mods scope = let open SynData in let sd = compute_syntax_data ~local deprecation df mods in - (* Prepare the interpretation *) (* Prepare the parsing and printing rules *) - let sy_rules = make_syntax_rules sd in + let sy_pa_rules = make_parsing_rules sd in + let sy_pp_rules = make_printing_rules false sd in + (* Prepare the interpretation *) let i_vars = make_internalization_vars sd.recvars sd.mainvars sd.intern_typs in let nenv = { ninterp_var_type = to_map i_vars; @@ -1485,24 +1593,29 @@ let add_notation_in_scope ~local deprecation df env c mods scope = notobj_onlyprint = sd.only_printing; notobj_deprecation = sd.deprecation; notobj_notation = sd.info; + notobj_specific_pp_rules = sy_pp_rules; } in + let gen_sy_pp_rules = + if Ppextend.has_generic_notation_printing_rule (fst sd.info) then None + else sy_pp_rules (* We use the format of this notation as the default *) in + let _ = check_reserved_format (fst sd.info) sy_pp_rules in (* Ready to change the global state *) - Flags.if_verbose (List.iter (fun (f,x) -> f x)) sd.msgs; - Lib.add_anonymous_leaf (inSyntaxExtension (local, sy_rules)); + List.iter (fun f -> f ()) sd.msgs; + Lib.add_anonymous_leaf (inSyntaxExtension (local, (sy_pa_rules,gen_sy_pp_rules))); Lib.add_anonymous_leaf (inNotation notation); sd.info let add_notation_interpretation_core ~local df env ?(impls=empty_internalization_env) c scope onlyparse onlyprint deprecation = let (recvars,mainvars,symbs) = analyze_notation_tokens ~onlyprint df in (* Recover types of variables and pa/pp rules; redeclare them if needed *) - let level, i_typs, onlyprint = if not (is_numeral symbs) then begin - let sy = recover_notation_syntax (make_notation_key InConstrEntrySomeLevel symbs) in + let level, i_typs, onlyprint, pp_sy = if not (is_numeral symbs) then begin + let (pa_sy,pp_sy as sy) = recover_notation_syntax (make_notation_key InConstrEntrySomeLevel symbs) in let () = Lib.add_anonymous_leaf (inSyntaxExtension (local,sy)) in (* If the only printing flag has been explicitly requested, put it back *) - let onlyprint = onlyprint || sy.synext_notgram.notgram_onlyprinting in - let _,_,_,typs = sy.synext_level in - Some sy.synext_level, typs, onlyprint - end else None, [], false in + let onlyprint = onlyprint || pa_sy.synext_notgram = None in + let _,_,_,typs = pa_sy.synext_level in + Some pa_sy.synext_level, typs, onlyprint, pp_sy + end else None, [], false, None in (* Declare interpretation *) let path = (Lib.library_dp(), Lib.current_dirpath true) in let df' = (make_notation_key InConstrEntrySomeLevel symbs, (path,df)) in @@ -1525,6 +1638,7 @@ let add_notation_interpretation_core ~local df env ?(impls=empty_internalization notobj_onlyprint = onlyprint; notobj_deprecation = deprecation; notobj_notation = df'; + notobj_specific_pp_rules = pp_sy; } in Lib.add_anonymous_leaf (inNotation notation); df' @@ -1532,10 +1646,11 @@ let add_notation_interpretation_core ~local df env ?(impls=empty_internalization (* Notations without interpretation (Reserved Notation) *) let add_syntax_extension ~local ({CAst.loc;v=df},mods) = let open SynData in - let psd = compute_pure_syntax_data ~local df mods in - let sy_rules = make_syntax_rules {psd with deprecation = None} in - Flags.if_verbose (List.iter (fun (f,x) -> f x)) psd.msgs; - Lib.add_anonymous_leaf (inSyntaxExtension(local,sy_rules)) + let psd = {(compute_pure_syntax_data ~local df mods) with deprecation = None} in + let pa_rules = make_parsing_rules psd in + let pp_rules = make_printing_rules true psd in + List.iter (fun f -> f ()) psd.msgs; + Lib.add_anonymous_leaf (inSyntaxExtension(local,(pa_rules,pp_rules))) (* Notations with only interpretation *) diff --git a/vernac/mltop.ml b/vernac/mltop.ml index ab9d008659..5046248e11 100644 --- a/vernac/mltop.ml +++ b/vernac/mltop.ml @@ -99,9 +99,9 @@ let ocaml_toploop () = *) let _ = CErrors.register_handler (function | Dynlink.Error e -> - hov 0 (str "Dynlink error: " ++ str Dynlink.(error_message e)) + Some (hov 0 (str "Dynlink error: " ++ str Dynlink.(error_message e))) | _ -> - raise CErrors.Unhandled + None ) let ml_load s = diff --git a/vernac/ppvernac.ml b/vernac/ppvernac.ml index 6240120cb0..314c423f65 100644 --- a/vernac/ppvernac.ml +++ b/vernac/ppvernac.ml @@ -107,8 +107,11 @@ open Pputils | InCustomEntry s -> keyword "custom" ++ spc () ++ str s let pr_at_level = function - | NumLevel n -> keyword "at" ++ spc () ++ keyword "level" ++ spc () ++ int n - | NextLevel -> keyword "at" ++ spc () ++ keyword "next" ++ spc () ++ keyword "level" + | NumLevel n -> spc () ++ keyword "at" ++ spc () ++ keyword "level" ++ spc () ++ int n + | NextLevel -> spc () ++ keyword "at" ++ spc () ++ keyword "next" ++ spc () ++ keyword "level" + | DefaultLevel -> mt () + + let level_of_pattern_level = function None -> DefaultLevel | Some n -> NumLevel n let pr_constr_as_binder_kind = let open Notation_term in function | AsIdent -> spc () ++ keyword "as ident" @@ -120,19 +123,14 @@ open Pputils let pr_set_entry_type pr = function | ETIdent -> str"ident" | ETGlobal -> str"global" - | ETPattern (b,None) -> pr_strict b ++ str"pattern" - | ETPattern (b,Some n) -> pr_strict b ++ str"pattern" ++ spc () ++ pr_at_level (NumLevel n) + | ETPattern (b,n) -> pr_strict b ++ str"pattern" ++ pr_at_level (level_of_pattern_level n) | ETConstr (s,bko,lev) -> pr_notation_entry s ++ pr lev ++ pr_opt pr_constr_as_binder_kind bko | ETBigint -> str "bigint" | ETBinder true -> str "binder" | ETBinder false -> str "closed binder" - let pr_at_level_opt = function - | None -> mt () - | Some n -> spc () ++ pr_at_level n - let pr_set_simple_entry_type = - pr_set_entry_type pr_at_level_opt + pr_set_entry_type pr_at_level let pr_comment pr_c = function | CommentConstr c -> pr_c c @@ -402,7 +400,7 @@ let string_of_theorem_kind = let open Decls in function let pr_syntax_modifier = let open Gramlib.Gramext in function | SetItemLevel (l,bko,n) -> - prlist_with_sep sep_v2 str l ++ spc () ++ pr_at_level_opt n ++ + prlist_with_sep sep_v2 str l ++ spc () ++ pr_at_level n ++ pr_opt pr_constr_as_binder_kind bko | SetLevel n -> pr_at_level (NumLevel n) | SetCustomEntry (s,n) -> keyword "in" ++ spc() ++ keyword "custom" ++ spc() ++ str s ++ (match n with None -> mt () | Some n -> pr_at_level (NumLevel n)) @@ -812,11 +810,12 @@ let string_of_definition_object_kind = let open Decls in function | RecordDecl (c,fs) -> pr_record_decl c fs in - let pr_oneind key (((coe,iddecl),indpar,s,lc),ntn) = + let pr_oneind key (((coe,iddecl),(indupar,indpar),s,lc),ntn) = hov 0 ( str key ++ spc() ++ (if coe then str"> " else str"") ++ pr_ident_decl iddecl ++ - pr_and_type_binders_arg indpar ++ + pr_and_type_binders_arg indupar ++ + pr_opt (fun p -> str "|" ++ spc() ++ pr_and_type_binders_arg p) indpar ++ pr_opt (fun s -> str":" ++ spc() ++ pr_lconstr_expr env sigma s) s ++ str" :=") ++ pr_constructor_list lc ++ prlist (pr_decl_notation @@ pr_constr env sigma) ntn diff --git a/vernac/prettyp.ml b/vernac/prettyp.ml index 32c438c724..cdd93db884 100644 --- a/vernac/prettyp.ml +++ b/vernac/prettyp.ml @@ -547,7 +547,7 @@ let print_located_qualid ref = print_located_qualid "object" LocAny ref (**** Gallina layer *****) let gallina_print_typed_value_in_env env sigma (trm,typ) = - (pr_leconstr_env env sigma trm ++ fnl () ++ + (pr_leconstr_env ~inctx:true env sigma trm ++ fnl () ++ str " : " ++ pr_letype_env env sigma typ) (* To be improved; the type should be used to provide the types in the @@ -556,7 +556,7 @@ let gallina_print_typed_value_in_env env sigma (trm,typ) = synthesizes the type nat of the abstraction on u *) let print_named_def env sigma name body typ = - let pbody = pr_lconstr_env env sigma body in + let pbody = pr_lconstr_env ~inctx:true env sigma body in let ptyp = pr_ltype_env env sigma typ in let pbody = if Constr.isCast body then surround pbody else pbody in (str "*** [" ++ str name ++ str " " ++ @@ -598,7 +598,7 @@ let gallina_print_section_variable env sigma id = with_line_skip (print_name_infos (GlobRef.VarRef id)) let print_body env evd = function - | Some c -> pr_lconstr_env env evd c + | Some c -> pr_lconstr_env ~inctx:true env evd c | None -> (str"<no body>") let print_typed_body env evd (val_0,typ) = diff --git a/vernac/pvernac.ml b/vernac/pvernac.ml index 826e88cabf..2425f3d6c1 100644 --- a/vernac/pvernac.ml +++ b/vernac/pvernac.ml @@ -58,7 +58,7 @@ module Vernac_ = Rule (Next (Stop, Atoken Tok.PEOI), act_eoi); Rule (Next (Stop, Aentry vernac_control), act_vernac); ] in - Pcoq.grammar_extend main_entry None (None, [None, None, rule]) + Pcoq.grammar_extend main_entry (None, [None, None, rule]) let select_tactic_entry spec = match spec with diff --git a/vernac/vernacentries.ml b/vernac/vernacentries.ml index e469323f50..2eb1aa39b0 100644 --- a/vernac/vernacentries.ml +++ b/vernac/vernacentries.ml @@ -623,18 +623,16 @@ let should_treat_as_cumulative cum poly = else user_err Pp.(str "The NonCumulative prefix can only be used in a polymorphic context.") | None -> poly && is_polymorphic_inductive_cumulativity () -let uniform_att = - let get_uniform_inductive_parameters = - Goptions.declare_bool_option_and_ref - ~depr:false - ~key:["Uniform"; "Inductive"; "Parameters"] - ~value:false - in - let open Attributes.Notations in - Attributes.bool_attribute ~name:"uniform" ~on:"uniform" ~off:"nonuniform" >>= fun u -> - let u = match u with Some u -> u | None -> get_uniform_inductive_parameters () in - let u = if u then ComInductive.UniformParameters else ComInductive.NonUniformParameters in - return u +let get_uniform_inductive_parameters = + Goptions.declare_bool_option_and_ref + ~depr:false + ~key:["Uniform"; "Inductive"; "Parameters"] + ~value:false + +let should_treat_as_uniform () = + if get_uniform_inductive_parameters () + then ComInductive.UniformParameters + else ComInductive.NonUniformParameters let vernac_record ~template udecl cum k poly finite records = let cumulative = should_treat_as_cumulative cum poly in @@ -682,6 +680,7 @@ let finite_of_kind = let open Declarations in function indicates whether the type is inductive, co-inductive or neither. *) let vernac_inductive ~atts cum lo kind indl = + let template, poly = Attributes.(parse Notations.(template ++ polymorphic) atts) in let open Pp in let udecl, indl = extract_inductive_udecl indl in if Dumpglob.dump () then @@ -710,12 +709,14 @@ let vernac_inductive ~atts cum lo kind indl = if Option.has_some is_defclass then (* Definitional class case *) let (id, bl, c, l) = Option.get is_defclass in + let bl = match bl with + | bl, None -> bl + | _ -> CErrors.user_err Pp.(str "Definitional classes do not support the \"|\" syntax.") + in let (coe, (lid, ce)) = l in let coe' = if coe then Some true else None in let f = AssumExpr ((make ?loc:lid.loc @@ Name lid.v), ce), - { rf_subclass = coe' ; rf_priority = None ; rf_notation = [] ; rf_canonical = true } - in - let template, poly = Attributes.(parse Notations.(template ++ polymorphic) atts) in + { rf_subclass = coe' ; rf_priority = None ; rf_notation = [] ; rf_canonical = true } in vernac_record ~template udecl cum (Class true) poly finite [id, bl, c, None, [f]] else if List.for_all is_record indl then (* Mutual record case *) @@ -732,12 +733,15 @@ let vernac_inductive ~atts cum lo kind indl = let () = List.iter check_where indl in let unpack ((id, bl, c, decl), _) = match decl with | RecordDecl (oc, fs) -> + let bl = match bl with + | bl, None -> bl + | _ -> CErrors.user_err Pp.(str "Records do not support the \"|\" syntax.") + in (id, bl, c, oc, fs) | Constructors _ -> assert false (* ruled out above *) in let kind = match kind with Class _ -> Class false | _ -> kind in let recordl = List.map unpack indl in - let template, poly = Attributes.(parse Notations.(template ++ polymorphic) atts) in vernac_record ~template udecl cum kind poly finite recordl else if List.for_all is_constructor indl then (* Mutual inductive case *) @@ -761,12 +765,9 @@ let vernac_inductive ~atts cum lo kind indl = | RecordDecl _ -> assert false (* ruled out above *) in let indl = List.map unpack indl in - let (template, poly), uniform = - Attributes.(parse Notations.(template ++ polymorphic ++ uniform_att) atts) - in let cumulative = should_treat_as_cumulative cum poly in - ComInductive.do_mutual_inductive ~template udecl indl ~cumulative ~poly - ~private_ind:lo ~uniform finite + let uniform = should_treat_as_uniform () in + ComInductive.do_mutual_inductive ~template udecl indl ~cumulative ~poly ~private_ind:lo ~uniform finite else user_err (str "Mixed record-inductive definitions are not allowed") @@ -1281,6 +1282,13 @@ let () = let () = declare_bool_option { optdepr = false; + optkey = ["Printing";"Parentheses"]; + optread = (fun () -> !Constrextern.print_parentheses); + optwrite = (fun b -> Constrextern.print_parentheses := b) } + +let () = + declare_bool_option + { optdepr = false; optkey = ["Printing";"Existential";"Instances"]; optread = (fun () -> !Detyping.print_evar_arguments); optwrite = (:=) Detyping.print_evar_arguments } diff --git a/vernac/vernacexpr.ml b/vernac/vernacexpr.ml index 8ead56dfdf..45018a246c 100644 --- a/vernac/vernacexpr.ml +++ b/vernac/vernacexpr.ml @@ -163,12 +163,15 @@ type constructor_expr = (lident * constr_expr) with_coercion type constructor_list_or_record_decl_expr = | Constructors of constructor_expr list | RecordDecl of lident option * (local_decl_expr * record_field_attr) list +type inductive_params_expr = local_binder_expr list * local_binder_expr list option +(** If the option is nonempty the "|" marker was used *) + type inductive_expr = - ident_decl with_coercion * local_binder_expr list * constr_expr option * + ident_decl with_coercion * inductive_params_expr * constr_expr option * constructor_list_or_record_decl_expr type one_inductive_expr = - lident * local_binder_expr list * constr_expr option * constructor_expr list + lident * inductive_params_expr * constr_expr option * constructor_expr list type typeclass_constraint = name_decl * Glob_term.binding_kind * constr_expr and typeclass_context = typeclass_constraint list @@ -177,7 +180,7 @@ type proof_expr = ident_decl * (local_binder_expr list * constr_expr) type syntax_modifier = - | SetItemLevel of string list * Notation_term.constr_as_binder_kind option * Extend.production_level option + | SetItemLevel of string list * Notation_term.constr_as_binder_kind option * Extend.production_level | SetLevel of int | SetCustomEntry of string * int option | SetAssoc of Gramlib.Gramext.g_assoc diff --git a/vernac/vernacextend.ml b/vernac/vernacextend.ml index e29086d726..f41df06f85 100644 --- a/vernac/vernacextend.ml +++ b/vernac/vernacextend.ml @@ -254,7 +254,7 @@ let vernac_argument_extend ~name arg = e | Arg_rules rules -> let e = Pcoq.create_generic_entry Pcoq.utactic name (Genarg.rawwit wit) in - let () = Pcoq.grammar_extend e None (None, [(None, None, rules)]) in + let () = Pcoq.grammar_extend e (None, [(None, None, rules)]) in e in let pr = arg.arg_printer in diff --git a/vernac/vernacstate.ml b/vernac/vernacstate.ml index 80b72225f0..3c70961e06 100644 --- a/vernac/vernacstate.ml +++ b/vernac/vernacstate.ml @@ -124,8 +124,8 @@ module Proof_global = struct let () = CErrors.register_handler begin function | NoCurrentProof -> - Pp.(str "No focused proof (No proof-editing in progress).") - | _ -> raise CErrors.Unhandled + Some (Pp.(str "No focused proof (No proof-editing in progress).")) + | _ -> None end open Lemmas |
