aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitattributes2
-rw-r--r--.gitignore3
-rw-r--r--CONTRIBUTING.md1274
-rw-r--r--Makefile.build33
-rw-r--r--Makefile.common3
-rw-r--r--azure-pipelines.yml3
-rw-r--r--coqpp/coqpp_lex.mll1
-rw-r--r--coqpp/coqpp_main.ml28
-rw-r--r--coqpp/coqpp_parse.mly60
-rw-r--r--coqpp/coqpp_parser.ml44
-rw-r--r--coqpp/coqpp_parser.mli15
-rw-r--r--coqpp/dune2
-rw-r--r--dev/README.md2
-rwxr-xr-xdev/build/windows/MakeCoq_MinGW.bat2
-rwxr-xr-xdev/build/windows/makecoq_mingw.sh114
-rw-r--r--dev/build/windows/patches_coq/VST.patch9
-rw-r--r--dev/build/windows/patches_coq/quickchick.patch41
-rwxr-xr-xdev/ci/ci-basic-overlay.sh27
-rwxr-xr-xdev/ci/gitlab.bat8
-rw-r--r--dev/ci/user-overlays/10441-ppedrot-static-poly-section.sh6
-rw-r--r--dev/doc/xml-protocol.md2
-rw-r--r--dev/shim/dune8
-rw-r--r--doc/changelog/02-specification-language/10441-static-poly-section.rst11
-rw-r--r--doc/changelog/08-tools/10430-extraction-int63.rst5
-rw-r--r--doc/changelog/10-standard-library/09811-remove-zlogarithm.rst4
-rw-r--r--doc/sphinx/addendum/ring.rst2
-rw-r--r--doc/sphinx/addendum/universe-polymorphism.rst2
-rw-r--r--doc/sphinx/changes.rst4
-rw-r--r--doc/sphinx/language/coq-library.rst36
-rw-r--r--doc/sphinx/language/gallina-extensions.rst11
-rw-r--r--doc/sphinx/language/gallina-specification-language.rst107
-rw-r--r--doc/sphinx/practical-tools/coqide.rst30
-rw-r--r--doc/sphinx/proof-engine/ltac.rst18
-rw-r--r--doc/sphinx/proof-engine/ltac2.rst8
-rw-r--r--doc/sphinx/proof-engine/ssreflect-proof-language.rst10
-rw-r--r--doc/sphinx/proof-engine/vernacular-commands.rst20
-rw-r--r--doc/stdlib/hidden-files1
-rw-r--r--doc/stdlib/index-list.html.template2
-rw-r--r--doc/tools/docgram/README.md208
-rw-r--r--doc/tools/docgram/common.edit_mlg220
-rw-r--r--doc/tools/docgram/doc_grammar.ml1606
-rw-r--r--doc/tools/docgram/fullGrammar3174
-rw-r--r--doc/tools/docgram/orderedGrammar4170
-rw-r--r--doc/tools/docgram/prodn.edit_mlg14
-rw-r--r--doc/tools/docgram/productionlist.edit_mlg25
-rw-r--r--ide/MacOS/default_accel_map10
-rw-r--r--ide/coqOps.ml43
-rw-r--r--ide/coqOps.mli1
-rw-r--r--ide/coqide.ml40
-rw-r--r--ide/coqide_ui.ml14
-rw-r--r--ide/preferences.ml14
-rw-r--r--kernel/byterun/coq_uint63_native.h55
-rw-r--r--kernel/cClosure.ml11
-rw-r--r--kernel/cClosure.mli4
-rw-r--r--kernel/uint63.mli12
-rw-r--r--kernel/uint63_amd64_63.ml90
-rw-r--r--kernel/uint63_i386_31.ml86
-rw-r--r--library/lib.ml86
-rw-r--r--library/lib.mli2
-rw-r--r--plugins/extraction/ExtrOCamlInt63.v56
-rw-r--r--plugins/funind/functional_principles_types.mli6
-rw-r--r--plugins/funind/g_indfun.mlg12
-rw-r--r--plugins/funind/indfun.ml325
-rw-r--r--plugins/funind/indfun.mli7
-rw-r--r--plugins/funind/invfun.ml6
-rw-r--r--plugins/funind/invfun.mli10
-rw-r--r--plugins/ltac/extratactics.mlg12
-rw-r--r--plugins/ssr/ssrparser.mlg92
-rw-r--r--pretyping/evarconv.ml18
-rw-r--r--pretyping/tacred.ml9
-rw-r--r--pretyping/unification.ml5
-rw-r--r--printing/printer.ml9
-rw-r--r--proofs/pfedit.ml4
-rw-r--r--proofs/proof.ml2
-rw-r--r--proofs/proof.mli2
-rw-r--r--proofs/proof_global.ml11
-rw-r--r--stm/stm.ml25
-rw-r--r--stm/vernac_classifier.ml10
-rw-r--r--test-suite/arithmetic/diveucl_21.v12
-rw-r--r--test-suite/bugs/closed/HoTT_coq_020.v2
-rw-r--r--test-suite/bugs/closed/HoTT_coq_098.v2
-rw-r--r--test-suite/bugs/closed/bug_10300.v14
-rw-r--r--test-suite/bugs/closed/bug_10533.v8
-rw-r--r--test-suite/bugs/closed/bug_10560.v9
-rw-r--r--test-suite/bugs/closed/bug_3314.v6
-rw-r--r--test-suite/bugs/closed/bug_4503.v6
-rw-r--r--test-suite/bugs/closed/bug_4816.v13
-rw-r--r--test-suite/misc/universes/dune3
-rw-r--r--test-suite/success/namedunivs.v1
-rw-r--r--test-suite/success/polymorphism.v9
-rw-r--r--theories/Numbers/Cyclic/Int63/Int63.v33
-rw-r--r--theories/Numbers/Cyclic/ZModulo/ZModulo.v58
-rw-r--r--theories/Reals/Rtrigo_calc.v8
-rw-r--r--theories/ZArith/ZArith.v1
-rw-r--r--theories/ZArith/Zlogarithm.v273
-rw-r--r--theories/ZArith/Zsqrt_compat.v234
-rw-r--r--toplevel/coqloop.ml2
-rw-r--r--toplevel/g_toplevel.mlg6
-rw-r--r--user-contrib/Ltac2/tac2entries.ml2
-rw-r--r--vernac/comFixpoint.ml85
-rw-r--r--vernac/comFixpoint.mli51
-rw-r--r--vernac/comInductive.ml6
-rw-r--r--vernac/comInductive.mli53
-rw-r--r--vernac/comProgramFixpoint.ml40
-rw-r--r--vernac/comProgramFixpoint.mli14
-rw-r--r--vernac/declareObl.ml5
-rw-r--r--vernac/declareObl.mli5
-rw-r--r--vernac/g_vernac.mlg18
-rw-r--r--vernac/lemmas.ml180
-rw-r--r--vernac/obligations.mli46
-rw-r--r--vernac/ppvernac.ml24
-rw-r--r--vernac/ppvernac.mli2
-rw-r--r--vernac/pvernac.mli2
-rw-r--r--vernac/vernacentries.ml18
-rw-r--r--vernac/vernacexpr.ml23
-rw-r--r--vernac/vernacprop.ml1
116 files changed, 11851 insertions, 1888 deletions
diff --git a/.gitattributes b/.gitattributes
index 260e3f96b6..415cabba3b 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -19,6 +19,7 @@
*.dtd whitespace=blank-at-eol,tab-in-indent
dune* whitespace=blank-at-eol,tab-in-indent
*.dune whitespace=blank-at-eol,tab-in-indent
+*.edit_mlg whitespace=blank-at-eol,tab-in-indent
*.el whitespace=blank-at-eol,tab-in-indent
*.fake whitespace=blank-at-eol,tab-in-indent
*.g whitespace=blank-at-eol,tab-in-indent
@@ -36,6 +37,7 @@ dune* whitespace=blank-at-eol,tab-in-indent
*.mll whitespace=blank-at-eol,tab-in-indent
*.mllib whitespace=blank-at-eol,tab-in-indent
*.mlp whitespace=blank-at-eol,tab-in-indent
+*.mly whitespace=blank-at-eol,tab-in-indent
*.mlpack whitespace=blank-at-eol,tab-in-indent
*.nix whitespace=blank-at-eol,tab-in-indent
*.nsh whitespace=blank-at-eol,tab-in-indent
diff --git a/.gitignore b/.gitignore
index 5339a0c44d..93b874eae3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -110,6 +110,9 @@ doc/stdlib/FullLibrary.coqdoc.tex
doc/stdlib/html/
doc/stdlib/index-body.html
doc/stdlib/index-list.html
+doc/tools/docgram/productionlistGrammar
+doc/tools/docgram/editedGrammar
+doc/tools/docgram/prodnGrammar
doc/tutorial/Tutorial.v.out
doc/RecTutorial/RecTutorial.html
doc/RecTutorial/RecTutorial.ps
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 0d11d092ba..529a912bb6 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,37 +1,282 @@
-# Contributing to Coq
+# Guide to contributing to Coq #
-Thank you for your interest in contributing to Coq! There are many ways to
-contribute, and we appreciate all of them. Please make sure you read and
-abide by the [Code of Conduct](CODE_OF_CONDUCT.md).
+## Foreword ##
-## Bug Reports
+As with any documentation, this guide is most useful if it's promptly
+updated to reflect changes in processes, development tools, or the Coq
+ecosystem. If you notice anything inaccurate or outdated, please
+signal it in a new issue, or fix it in a new pull request. If you
+find some parts are not sufficiently clear, you may open an issue as
+well.
-Bug reports are enormously useful to identify issues with Coq; we can't fix
-what we don't know about. To report a bug, please open an issue in the
-[Coq issue tracker][] (you'll need a GitHub
-account). You can file a bug for any of the following:
+## Table of contents ##
-- An anomaly. These are always considered bugs, so Coq will even ask you to
- file a bug report!
+- [Introduction](#introduction)
+- [Contributing to the ecosystem](#contributing-to-the-ecosystem)
+ - [Asking and answering questions](#asking-and-answering-questions)
+ - [Writing tutorials and blog posts](#writing-tutorials-and-blog-posts)
+ - [Contributing to the wiki](#contributing-to-the-wiki)
+ - [Creating and maintaining Coq packages](#creating-and-maintaining-coq-packages)
+ - [Distribution](#distribution)
+ - [Support](#support)
+ - [Standard libraries](#standard-libraries)
+ - [Maintaining existing packages in coq-community](#maintaining-existing-packages-in-coq-community)
+ - [Contributing to the website or the package archive](#contributing-to-the-website-or-the-package-archive)
+ - [Other ways of creating content](#other-ways-of-creating-content)
+- [Issues](#issues)
+ - [Reporting a bug, requesting an enhancement](#reporting-a-bug-requesting-an-enhancement)
+ - [Beta testing](#beta-testing)
+ - [Helping triage existing issues](#helping-triage-existing-issues)
+- [Code changes](#code-changes)
+ - [Using GitHub pull requests](#using-github-pull-requests)
+ - [Taking feedback into account](#taking-feedback-into-account)
+ - [Understanding automatic feedback](#understanding-automatic-feedback)
+ - [Understanding reviewers' feedback](#understanding-reviewers-feedback)
+ - [Fixing your branch](#fixing-your-branch)
+ - [Improving the official documentation](#improving-the-official-documentation)
+ - [Contributing to the standard library](#contributing-to-the-standard-library)
+ - [Fixing bugs and performing small changes](#fixing-bugs-and-performing-small-changes)
+ - [Proposing large changes: Coq Enhancement Proposals](#proposing-large-changes-coq-enhancement-proposals)
+ - [Collaborating on a pull request](#collaborating-on-a-pull-request)
+- [Becoming a maintainer](#becoming-a-maintainer)
+ - [Reviewing pull requests](#reviewing-pull-requests)
+ - [Merging pull requests](#merging-pull-requests)
+ - [Core development team](#core-development-team)
+- [Release management](#release-management)
+ - [Packaging Coq](#packaging-coq)
+- [Additional resources](#additional-resources)
+ - [Developer documentation](#developer-documentation)
+ - [Where to find the resources](#where-to-find-the-resources)
+ - [Building Coq](#building-coq)
+ - [Continuous integration](#continuous-integration)
+ - [Code owners, issue and pull request templates](#code-owners-issue-and-pull-request-templates)
+ - [Style guide](#style-guide)
+ - [OCaml resources](#ocaml-resources)
+ - [Git documentation, tips and tricks](#git-documentation-tips-and-tricks)
+ - [GitHub documentation, tips and tricks](#github-documentation-tips-and-tricks)
+ - [GitLab documentation, tips and tricks](#gitlab-documentation-tips-and-tricks)
+ - [Coqbot](#coqbot)
+ - [Online forum and chat to talk to developers](#online-forum-and-chat-to-talk-to-developers)
+ - [Coq remote working groups](#coq-remote-working-groups)
+ - [Coq Users and Developers Workshops](#coq-users-and-developers-workshops)
+
+## Introduction ##
+
+Thank you for your interest in contributing to Coq! There are many
+ways to contribute, and we appreciate all of them.
+
+People often begin by making small contributions, and contributions to
+the ecosystem, before working their way up incrementally to the core
+parts of the system, and start to propose larger changes, or take an
+active role in maintaining the system. So this is the way this
+contributing guide is organized. However, it is by no means necessary
+that you go through these steps in this order. Feel free to use this
+guide as a reference and quickly jump to the part that is most
+relevant to you at the current time.
+
+We want to make sure that contributing to Coq is a fun and positive
+experience for everyone, so please make sure you read and abide by our
+[Code of Conduct][Code-of-conduct].
+
+## Contributing to the ecosystem ##
+
+In this section, we present all the ways to contribute to Coq outside
+of the Coq repository itself.
+
+### Asking and answering questions ###
+
+One very important way of contributing is by asking and answering
+questions, in order to create a body of easily-browsable,
+problem-oriented, additional documentation.
+
+There are two main platforms for this purpose:
+
+- [Stack Overflow][Stack-Overflow] (or more generally the [Stack
+ Exchange][Stack-Exchange] platforms, as some Coq questions may be
+ asked on other sites, such as TCS Stack Exchange);
+- Our [Discourse forum][Discourse].
+
+In particular, our Discourse forum has several non-English categories
+that have yet to find their public, so do not hesitate to advertise
+them to people you know who might not be at ease with English.
+
+Other active places to answer questions include the [Coq-Club][]
+mailing list, and the Coq IRC channel (`irc://irc.freenode.net/#coq`).
+
+### Writing tutorials and blog posts ###
+
+Writing about Coq, in the form of tutorials or blog posts, is also a
+very important contribution. In particular, it can help new users get
+interested in Coq, and learn about it, and existing users learn about
+advance features. Our official resources, such as the [reference
+manual][refman] are not suited for learning Coq, but serve as
+reference documentation to which you can link from your tutorials.
+
+The Coq website has a page listing known
+[tutorials][Coq-documentation] and the [wiki][] home page contains a
+list too. You can expand the former through a pull request on the
+[Coq website repository][Coq-website-repository], while the latter can
+be edited directly by anyone with a GitHub account.
+
+At the current time, we do not have a way of aggregating blog posts on
+a single page (like [OCaml planet][OCaml-planet]), but this would
+probably be something useful to get, so do not hesitate if you want to
+create it. Some people use [Reddit][] for this purpose.
+
+### Contributing to the wiki ###
+
+Coq's [wiki][] is an informal source of additional documentation which
+anyone with a GitHub account can edit directly. In particular, it
+contains the Coq [FAQ][] which has not seen so many updates in the
+recent years. You should feel free to fix it, expand it, and even
+refactor it (if you are not sure if some changes would be welcome, you
+can open an issue to discuss them before performing them).
+
+People who watch the Coq repository will see recent wiki edits in
+their GitHub feed. It is recommended to review them *a posteriori* to
+check no mistake was introduced. The wiki is also a standard git
+repository, so people can follow the changes using any standard git
+tool.
+
+Coq's wiki is formatted using GitHub's flavored Markdown, with some
+wiki-specific extensions. See:
+
+- [GitHub's Markdown guide][GitHub-markdown]
+- [GitHub's wiki extensions][GitHub-wiki-extensions]
+
+### Creating and maintaining Coq packages ###
+
+*Note: this sub-section is about packages extending Coq, such as
+plugins or libraries. A different, but also very valuable,
+contribution is to package Coq for your preferred package manager (see
+[Packaging Coq](#packaging-coq)).*
+
+Sharing reusable assets in the form of new libraries, plugins, and
+tools is great so that others can start building new things on top.
+Having an extensive and healthy package ecosystem will be key to the
+success of Coq.
+
+#### Distribution ####
+
+You can distribute your library or plugin through the [Coq package
+index][Coq-package-index]. Tools can be advertised on the [tools
+page][tools-website] of the Coq website, or the [tools
+page][tools-wiki] of the wiki.
+
+#### Support ####
+
+You can find advice and best practices about maintaining a Coq project
+on the [coq-community wiki][coq-community-wiki].
+
+Learn how to write a Coq plugin, and about best practices, in the Coq
+[plugin tutorial][plugin-tutorial]. This tutorial is still a work in
+progress, so do not hesitate to expand it, or ask questions.
+
+If you want quick feedback on best practices, or how to talk to the
+Coq API, a good place to hang out is the Coq [Gitter channel][Gitter].
+
+Finally, we strongly encourage authors of plugins to submit their
+plugins to join Coq's continuous integration (CI) early on. Indeed,
+the Coq API gets continously reworked, so this is the best way of
+ensuring your plugin stays compatible with new Coq versions, as this
+means Coq developers will fix your plugin for you. Learn more about
+this in the [CI README (user part)][CI-README-users].
+
+Pure Coq libraries are also welcome to join Coq's CI, especially if
+they test underused / undertested features.
+
+#### Standard libraries ####
+
+There are many general purpose Coq libraries, so before you publish
+yours, consider whether you could contribute to an existing one
+instead (either the official [standard
+library](#contributing-to-the-standard-library), or one of the many
+[alternative standard libraries][other-standard-libraries]).
+
+#### Maintaining existing packages in coq-community ####
+
+Some Coq packages are not maintained by their initial authors anymore
+(for instance if they've moved on to new jobs or new projects) even if
+they were useful, or interesting. The coq-community organization is a
+place for volunteers to take over the maintenance of such packages.
+
+If you want to contribute by becoming a maintainer, there is [a list
+of packages waiting for a
+maintainer][coq-community-maintainer-wanted]. You can also propose a
+package that is not listed. Find out more about coq-community in [the
+manifesto's README][coq-community-manifesto].
+
+### Contributing to the editor support packages ###
+
+Here are the URLs of the repositories of the various editor support
+packages:
+
+- Proof-General (Emacs major mode) <https://github.com/ProofGeneral/PG>
+- Company-coq (Emacs minor mode) <https://github.com/cpitclaudel/company-coq>
+- Coqtail (Vim) <https://github.com/whonore/Coqtail>
+- VsCoq Reloaded (VsCode) <https://github.com/coq-community/vscoq>
+
+Each of them has their own contribution process.
+
+### Contributing to the website or the package archive ###
+
+The website and the package archive have their own repositories:
+
+- <https://github.com/coq/www>
+- <https://github.com/coq/opam-coq-archive>
+
+You can contribute to them by using issues and pull requests on these
+repositories. These repositories should get their own contributing
+guides, but they don't have any at the time of writing this.
+
+### Other ways of creating content ###
+
+There are many other ways of creating content and making the Coq
+community thrive, including many which we might not have thought
+about. Feel free to add more references / ideas to this sub-section.
+
+You can tweet about Coq, you can give talks about Coq both in
+academic, and in non-academic venues (such as developer conferences).
+
+[Codewars][] is a platform where people can try to solve some
+programming challenges that were proposed by other community members.
+Coq is supported and the community is eager to get more challenges.
+
+## Issues ##
+
+### Reporting a bug, requesting an enhancement ###
+
+Bug reports are enormously useful to identify issues with Coq; we
+can't fix what we don't know about. To report a bug, please open an
+issue in the [Coq issue tracker][Coq-issue-tracker] (you'll need a
+GitHub account). You can file a bug for any of the following:
+
+- An anomaly. These are always considered bugs, so Coq will even ask
+ you to file a bug report!
- An error you didn't expect. If you're not sure whether it's a bug or
- intentional, feel free to file a bug anyway. We may want to improve the
- documentation or error message.
-- Missing documentation. It's helpful to track where the documentation should
- be improved, so please file a bug if you can't find or don't understand some
- bit of documentation.
-- An error message that wasn't as helpful as you'd like. Bonus points for
- suggesting what information would have helped you.
-- Bugs in CoqIDE should also be filed in the
- [Coq issue tracker][].
- Bugs in the Emacs plugin should be filed against
- [ProofGeneral](https://github.com/ProofGeneral/PG/issues), or against
- [company-coq](https://github.com/cpitclaudel/company-coq/issues) if they are
- specific to company-coq features.
-
-It would help if you search the existing issues before reporting a bug. This
-can be difficult, so consider it extra credit. We don't mind duplicate bug
-reports. If unsure, you are always very welcome to ask on our [Discourse forum][]
-or [Gitter chat][] before, after, or while writing a bug report
+ intentional, feel free to file a bug anyway. We may want to improve
+ the documentation or error message.
+- Missing or incorrect documentation. It's helpful to track where the
+ documentation should be improved, so please file a bug if you can't
+ find or don't understand some bit of documentation.
+- An error message that wasn't as helpful as you'd like. Bonus points
+ for suggesting what information would have helped you.
+- Bugs in CoqIDE should also be filed in the [Coq issue
+ tracker][Coq-issue-tracker]. Bugs in the Emacs plugin should be
+ filed against [ProofGeneral][ProofGeneral-issues], or against
+ [company-coq][company-coq-issues] if they are specific to
+ company-coq features.
+
+It would help if you search the existing issues before reporting a
+bug. This can be difficult, so consider it extra credit. We don't
+mind duplicate bug reports. If unsure, you are always very welcome to
+ask on our [Discourse forum][Discourse] or [Gitter chat][Gitter]
+before, after, or while writing a bug report.
+
+It is better if you can test that your bug is still present in the
+current testing or development version of Coq (see the [next
+sub-section](#beta-testing)) before reporting it, but if you can't, it
+should not discourage you from reporting it.
When it applies, it's extremely helpful for bug reports to include sample
code, and much better if the code is self-contained and complete. It's not
@@ -40,177 +285,838 @@ since someone else can often do this if you include a complete example. We
tend to include the code in the bug description itself, but if you have a
very large input file then you can add it as an attachment.
-If you want to minimize your bug (or help minimize someone else's) for more
-extra credit, then you can use the
-[Coq bug minimizer](https://github.com/JasonGross/coq-tools) (specifically,
-the bug minimizer is the `find-bug.py` script in that repo).
-
-### Triaging bug reports
-
-Triaging bug reports (adding labels, closing outdated / resolved bugs)
-requires you to be granted some permissions. You may request members of the
-**@coq/core** team to add you to the contributors team. They can do so using
-this link: <https://github.com/orgs/coq/teams/contributors/members?add=true>.
-
-## Pull requests
-
-**Beginner's guide to hacking Coq: [`dev/doc/README.md`](dev/doc/README.md)** \
-**Development information and tools: [`dev/README.md`](dev/README.md)**
-
-If you want to contribute a bug fix or feature yourself, pull requests on
-the [GitHub repository](https://github.com/coq/coq) are the way to contribute
-directly to the Coq implementation. We recommend you create a fork of the
-repository on GitHub and push your changes to a new "topic branch" in that
-fork. From there you can follow the
-[GitHub pull request documentation](https://help.github.com/articles/about-pull-requests/)
-to get your changes reviewed and pulled into the Coq source repository.
-
-Documentation for getting started with the Coq sources is located in various
-files in [`dev/doc`](dev/doc) (for example, [debugging.md](dev/doc/debugging.md)).
-
-Please make pull requests against the `master` branch.
-
-If it's your first significant contribution to Coq (significant means: more
-than fixing a typo), your pull request should include a commit adding your name
-to the [`CREDITS`](CREDITS) file (possibly with the name of your
-institution / employer if relevant to your contribution, an ORCID if you have
-one —you may log into https://orcid.org/ using your institutional account to
-get one—, and the year of your contribution).
-
-It's helpful to run the Coq test suite with `make test-suite` before submitting
-your change. Our CI runs this test suite and lots of other tests, including
-building external Coq projects, on every pull request, but these results
-take significantly longer to come back (on the order of a few hours). Running
-the test suite locally will take somewhere around 10-15 minutes. Refer to
-[`dev/ci/README-developers.md`](dev/ci/README-developers.md) for more
-information on CI tests, including how to run them on your private branches.
-
-If your pull request fixes a bug, please consider adding a regression test as
-well. See [`test-suite/README.md`](test-suite/README.md) for how to do so.
-
-If your pull request fixes a critical bug (a bug allowing a proof of `False`),
-please add an entry to [`dev/doc/critical-bugs`](/dev/doc/critical-bugs).
-
-Don't be alarmed if the pull request process takes some time. It can take a
-few days to get feedback, approval on the final changes, and then a merge.
-Do not hesitate to ping the reviewers if it takes longer than this.
-Coq doesn't release new versions very frequently so it can take a few months
-for your change to land in a released version. That said, you can start using
-the latest Coq `master` branch to take advantage of all the new features,
-improvements, and fixes.
-
-Whitespace discipline (do not indent using tabs, no trailing spaces, text
-files end with newlines) is checked by the `lint` job on GitLab CI (using
-`git diff --check`). We ship a [`dev/tools/pre-commit`](/dev/tools/pre-commit)
-git hook which fixes these errors at commit time. `configure` automatically
-sets you up to use it, unless you already have a hook at `.git/hooks/pre-commit`.
-
-Each commit in your pull request should compile (this makes bisecting
-easier). The `lint` job checks compilation of the OCaml files, please
-try to keep the rest of Coq in a functioning state as well.
+If you want to minimize your bug (or help minimize someone else's) for
+more extra credit, then you can use the [Coq bug
+minimizer][JasonGross-coq-tools] (specifically, the bug minimizer is
+the `find-bug.py` script in that repo).
+
+### Beta testing ###
+
+Coq gets a new major release about every six months. Before a new
+major version is released, there is a beta-testing period, which
+usually lasts one month (see the [release plan][release-plan]). You
+can help make the upcoming release better, by testing the beta
+version, and trying to port your projects to it. You should report
+any bug you notice, but also any change of behavior that is not
+documented in the changelog. Then Coq developers will be able to
+check if what you reported is a regression that needs to be fixed, or
+an expected change that needs to be mentioned in the changelog.
+
+You can go even further by using the development version (`master`
+branch) of Coq on a day by day basis, and report problems as soon as
+you notice them. If you wish to do so, the easiest way to install Coq
+is through opam (using the `dev` version of the Coq package, available
+in the <https://coq.inria.fr/opam/core-dev> repository) or through
+[Nix][]. The documentation of the development version is [available
+online][master-doc], including the [unreleased
+changelog][unreleased-changelog].
+
+### Helping triage existing issues ###
+
+Coq has too many bug reports for its core developers alone to manage.
+You can help a lot by:
+
+- confirming that reported bugs are still active with the current
+ version of Coq;
+- determining if the bug is a regression (new, and unexpected,
+ behavior from a recent Coq version);
+- more generally, by reproducing a bug, on another system,
+ configuration, another version of Coq, and by documenting what you
+ did;
+- giving a judgement about whether the reported behavior is really a
+ bug, or is expected but just improperly documented, or expected and
+ already documented;
+- producing a trace if it is relevant and you know how to do it;
+- producing another example exhibiting the same bug, or minimizing the
+ initial example using the bug minimizer mentioned above;
+- using `git bisect` to find the commit that introduced a regression;
+- fixing the bug if you have an idea of how to do so (see the
+ [following section](#code-changes)).
+
+Once you have some experience with the Coq issue tracker, you can
+request to join the **@coq/contributors** team (any member of the
+**@coq/core** team can do so using [this link][add-contributor]).
+Being in this team will grant you the following access:
+
+- **Updating labels:** every open issue and pull request should
+ ideally get one or several `kind:` and `part:` labels. In
+ particular, valid issues should generally get either a `kind: bug`
+ (the reported behavior can indeed be considered a bug, this can be
+ completed with the `kind: anomaly`, and `kind: regression` labels),
+ `kind: documentation` (e.g. if a reported behavior is expected but
+ improperly documented), `kind: enhancement` (a request for
+ enhancement of an existing feature), or `kind: feature` label (an
+ idea for a new feature).
+- **Creating new labels:** if you feel a `part:` label is missing, do
+ not hesitate to create it. If you are not sure, you may discuss it
+ with other contributors and developers on [Gitter][] first.
+- **Closing issues:** if a bug cannot be reproduced anymore, is a
+ duplicate, or should not be considered a bug report in the first
+ place, you should close it. When doing so, try putting an
+ appropriate `resolved:` label to indicate the reason. If the bug
+ has been fixed already, and you know in which version, you can add a
+ milestone to it, even a milestone that's already closed, instead of
+ a `resolved:` label. When closing a duplicate issue, try to add all
+ the additional info that could be gathered to the original issue.
+- **Editing issue titles:** you may want to do so to better reflect
+ the current understanding of the underlying issue.
+- **Editing comments:** feel free to do so to fix typos and formatting
+ only (in particular, some old comments from the Bugzilla era or
+ before are not properly formatted). You may also want to edit the
+ OP's initial comment (a.k.a. body of the issue) to better reflect
+ the current understanding of the issue, especially if the discussion
+ is long. If you do so, only add to the original comment, and mark
+ it clearly with an `EDITED by @YourNickname:`.
+- **Hiding comments:** when the discussion has become too long, this
+ can be done to hide irrelevant comments (off-topic, outdated or
+ resolved sub-issues).
+- **Deleting things:** please don't delete any comment or issue, our
+ policy doesn't allow for comments to be deleted, unless done by the
+ community moderators. You should hide them instead. An audit log
+ is available to track deleted items if needed (but does not allow
+ recovering them).
+- **Pushing a branch or a tag to the main repository:** please push
+ changes to your own fork rather than the main repository. (Branches
+ pushed to the main repository will be removed promptly and without
+ notice.)
+
+Yet to be fully specified: use of priority, difficulty, `help wanted`,
+and `good first issue` labels, milestones, assignments, and GitHub
+projects.
+
+## Code changes ##
+
+### Using GitHub pull requests ###
+
+If you want to contribute a documentation update, bug fix or feature
+yourself, pull requests (PRs) on the [GitHub
+repository][coq-repository] are the way to contribute directly to the
+Coq implementation (all changes, even the smallest changes from core
+developers, go through PRs). You will need to create a fork of the
+repository on GitHub and push your changes to a new "topic branch" in
+that fork (instead of using an existing branch name like `master`).
+
+PRs should always target the `master` branch. Make sure that your
+copy of this branch is up-to-date before starting to do your changes,
+and that there are no conflicts before submitting your PR. If you
+need to fix conflicts, we generally prefer that you rebase your branch
+on top of `master`, instead of creating a merge commit.
+
+If you are not familiar with `git` or GitHub, Sections [Git
+documentation, tips and tricks](#git-documentation-tips-and-tricks),
+and [GitHub documentation, tips and
+tricks](#github-documentation-tips-and-tricks), should be helpful (and
+even if you are, you might learn a few tricks).
+
+Once you have submitted your PR, it may take some time to get
+feedback, in the form of reviews from maintainers, and test results
+from our continuous integration system. Our code owner system will
+automatically request reviews from relevant maintainers. Then, one
+maintainer should self-assign the PR (if that does not happen after a
+few days, feel free to ping the maintainers that were requested a
+review). The PR assignee will then become your main point of contact
+for handling the PR: they should ensure that everything is in order
+and merge when it is the case (you can ping them if the PR is ready
+from your side but nothing happens for a few days).
+
+After your PR is accepted and merged, it may get backported to a
+stable branch if appropriate, and will eventually make it to a
+release. You do not have to worry about this, it is the role of the
+assignee and the release manager to do so (see Section [Release
+management](#release-management)). The milestone should give you an
+indication of when to expect your change to be released (this could be
+several months after your PR is merged). That said, you can start
+using the latest Coq `master` branch to take advantage of all the new
+features, improvements, and fixes.
+
+### Taking feedback into account ###
+
+#### Understanding automatic feedback ####
+
+When you open or update a PR, you get automatically some feedback: we
+have a bot whose job will be to push a branch to our GitLab mirror to
+run some continuous integration (CI) tests. The tests will run on a
+commit merging your branch with the base branch, so if there is a
+conflict and this merge cannot be performed automatically, the bot
+will put a `needs: rebase` label, and the tests won't run.
+
+Otherwise, a large suite of tests will be run on GitLab, plus some
+additional tests on Azure for Windows and macOS compatibility.
+
+If a test fails on GitLab, you will see in the GitHub PR interface,
+both the failure of the whole pipeline, and of the specific failed
+job. Most of these failures indicate problems that should be
+addressed, but some can still be due to synchronization issues out of
+your control. In particular, if you get a failure in one of the
+tested plugins but you didn't change the Coq API, it is probably a
+transient issue and you shouldn't have to worry about it. In case of
+doubt, ask the reviewers.
+
+##### Test-suite failures #####
+
+If you broke the test-suite, you should get many failed jobs, because
+the test-suite is run multiple times in various settings. You should
+get the same failure locally by running `make test-suite` or `make -f
+Makefile.dune test-suite`. It's helpful to run this locally and
+ensure the test-suite is not broken before submitting a PR as this
+will spare a lot of runtime on distant machines.
+
+To learn more about the test-suite, you should refer to its
+[README][test-suite-README].
+
+##### Linter failures #####
+
+We have a linter that checks a few different things:
+
+- **Every commit can build.** This is an important requirement to
+ allow the use of `git bisect` in the future. It should be possible
+ to build every commit, and in principle even the test-suite should
+ pass on every commit (but this isn't tested in CI because it would
+ take too long). A good way to test this is to use `git rebase
+ master --exec "make -f Makefile.dune check`.
+- **No tabs or end-of-line spaces on updated lines**. We are trying
+ to get rid of all tabs and all end-of-line spaces from the code base
+ (except in some very special files that need them). This checks not
+ only that you didn't introduce new ones, but also that updated lines
+ are clean (even if they were there before). You can avoid worrying
+ about tabs and end-of-line spaces by installing our [pre-commit git
+ hook][git-hook], which will fix these issues at commit time.
+ Running `./configure` once will install this hook automatically
+ unless you already have a pre-commit hook installed. If you are
+ encountering these issues nonetheless, you can fix them by rebasing
+ your branch with `git rebase --whitespace=fix`.
+- **All files should end with a single newline**. See the section
+ [Style guide](#style-guide) for additional style recommendations.
You may run the linter yourself with `dev/lint-repository.sh`.
-Here are a few tags Coq developers may add to your PR and what they mean. In
-general feedback and requests for you as the pull request author will be in
-the comments and tags are only used to organize pull requests.
-
-- [needs: rebase][rebase-label] indicates the PR should be rebased on top of
- the latest base branch (usually `master`). See the
- [GitHub documentation](https://help.github.com/articles/about-git-rebase/)
- for a brief introduction to using `git rebase`.
- We generally ask you to rebase only when there are merge conflicts or if
- the PR has been opened for a long time and we want a fresh CI run.
-- [needs: fixing][fixing-label] indicates the PR needs a fix, as discussed in the comments.
-- [needs: benchmarking][benchmarking-label] and [needs: testing][testing-label]
+##### Plugin failures #####
+
+If you did change the Coq API, then you may have broken a plugin.
+After ensuring that the failure comes from your change, you will have
+to provide a fix to the plugin, and the PR assignee will have to
+ensure that this fix is merged in the plugin simultaneously with your
+PR on the Coq repository.
+
+If your changes to the API are not straightforward, you should also
+document them in `dev/doc/changes.md`.
+
+The [CI README (developer part)][CI-README-developers] contains more
+information on how to fix plugins, test and submit your changes, and
+how you can anticipate the results of the CI before opening a PR.
+
+##### Library failures #####
+
+Such a failure can indicate either a bug in your branch, or a breaking
+change that you introduced voluntarily. All such breaking changes
+should be properly documented in the [user changelog][user-changelog].
+Furthermore, a backward-compatible fix should be found, and this fix
+should be merged in the broken projects *before* your PR to the Coq
+repository can be. You can use the same documentation as above to
+learn about testing and fixing locally the broken libraries.
+
+#### Understanding reviewers' feedback ####
+
+The reviews you get are highly dependent on the kind of changes you
+did. In any case, you should always remember that reviewers are
+friendly volunteers that do their best to help you get your changes in
+(and should abide by our [Code of Conduct][Code-of-Conduct]). But at
+the same time, they try to ensure that code that is introduced or
+updated is of the highest quality and will be easy to maintain in the
+future, and that's why they may ask you to perform small or even large
+changes. If you need a clarification, do not hesitate to ask.
+
+Here are a few labels that reviewers may add to your PR to track its
+status. In general, this will come in addition to comments from the
+reviewers, with specific requests.
+
+- [needs: rebase][needs-rebase] indicates the PR should be rebased on
+ top of the latest version of the base branch (usually `master`). We
+ generally ask you to rebase only when there are merge conflicts or
+ if the PR has been opened for a long time and we want a fresh CI
+ run.
+- [needs: fixing][needs-fixing] indicates the PR needs a fix, as
+ discussed in the comments.
+- [needs: documentation][needs-documentation] indicates the PR
+ introduces changes that should be documented before it can be merged.
+- [needs: changelog entry][needs-changelog] indicates the PR introduces
+ changes that should be documented in the [user
+ changelog][user-changelog].
+- [needs: benchmarking][needs-benchmarking] and [needs: testing][needs-testing]
indicate the PR needs testing beyond what the test suite can handle.
For example, performance benchmarking is currently performed with a different
infrastructure ([documented in the wiki][jenkins-doc]). Unless some followup
- is specifically requested you aren't expected to do this additional testing.
+ is specifically requested, you aren't expected to do this additional testing.
-To learn more about the merging process, you can read the
-[merging documentation for Coq maintainers](dev/doc/MERGING.md).
+More generally, such labels should come with a description that should
+allow you to understand what they mean.
-[rebase-label]: https://github.com/coq/coq/pulls?utf8=%E2%9C%93&q=is%3Aopen%20is%3Apr%20label%3A%22needs%3A%20rebase%22
-[fixing-label]: https://github.com/coq/coq/pulls?q=is%3Aopen+is%3Apr+label%3A%22needs%3A+fixing%22
-[benchmarking-label]: https://github.com/coq/coq/pulls?q=is%3Aopen+is%3Apr+label%3A%22needs%3A+benchmarking%22
-[testing-label]: https://github.com/coq/coq/pulls?q=is%3Aopen+is%3Apr+label%3A%22needs%3A+testing%22
+#### Fixing your branch ####
-[jenkins-doc]: https://github.com/coq/coq/wiki/Jenkins-(automated-benchmarking)
+If you have changes to perform before your PR can be merged, you might
+want to do them in separate commits at first to ease the reviewers'
+task, but we generally appreciate that they are squashed with the
+commits that they fix before merging. This is especially true of
+commits fixing previously introduced bugs or failures.
+
+### Improving the official documentation ###
+
+The documentation is usually a good place to start contributing,
+because you can get used to the pull request submitting and review
+process, without needing to learn about the code source of Coq at the
+same time.
+
+The official documentation is formed of two components:
+
+- the [reference manual][refman],
+- the [documentation of the standard library][stdlib-doc].
-## Documentation
+The sources of the reference manual are located in the
+[`doc/sphinx`][refman-sources] directory. They are written in rst
+(Sphinx) format with some Coq-specific extensions, which are
+documented in the [README][refman-README] in the above directory.
+This README was written to be read from begin to end. As soon as your
+edits to the documentation are more than changing the textual content,
+we strongly encourage you to read this document.
-Currently the process for contributing to the documentation is the same as
-for changing anything else in Coq, so please submit a pull request as
-described above.
+The documentation of the standard library is generated with
+[coqdoc][coqdoc-documentation] from the comments in the sources of the
+standard library.
-Our issue tracker includes a flag to mark bugs related to documentation.
-You can view a list of documentation-related bugs using a
-[GitHub issue search](https://github.com/coq/coq/issues?q=is%3Aopen+is%3Aissue+label%3A%22kind%3A+documentation%22).
-Many of these bugs can be fixed by contributing writing, without knowledge
-of Coq's OCaml source code.
+The [README in the `doc` directory][doc-README] contains more
+information about the documentation's build dependencies, and the
+`make` targets.
-The sources for the [Coq reference manual](https://coq.inria.fr/distrib/current/refman/)
-are at [`doc/sphinx`](/doc/sphinx). These are written in reStructuredText
-and compiled to HTML and PDF with [Sphinx](http://www.sphinx-doc.org/).
+You can browse through the list of open documentation issues using the
+[kind: documentation][kind-documentation] label, or the [user
+documentation GitHub project][documentation-github-project] (you can
+look in particular at the "Writing" and "Fixing" columns).
-You will find information on how to build the documentation in
-[`doc/README.md`](doc/README.md) and information about the specificities of
-the Coq Sphinx format in [`doc/sphinx/README.rst`](doc/sphinx/README.rst).
+### Contributing to the standard library ###
-You may also contribute to the informal documentation available in
-[Cocorico](https://github.com/coq/coq/wiki) (the Coq wiki), and the
-[Coq FAQ](https://github.com/coq/coq/wiki/The-Coq-FAQ). Both of these are
-editable by anyone with a GitHub account.
+Contributing to the standard library is also made easier by not having
+to learn about Coq's internals, and its implementation language.
-## Where to get help (with the Coq source code, or anything else)
+Due to the compatibility constraints created by the many projects that
+depend on it, proposing breaking changes, such as changing a
+definition, may frequently be rejected, or at the very least might
+take a long time before getting approved and merged. This does not
+mean that you cannot try. On the other hand, contributing new lemmas
+on existing definitions and cleaning up existing proofs are likely to
+be accepted. Contributing new operations on existing types are also
+likely to be accepted in many cases. In case of doubt, ask in an
+issue before spending too much time preparing your PR.
-We have a [Discourse forum][] (see in particular the [Coq development category][])
-and a [Gitter chat][]. Feel free to join any of them and ask questions.
+If you create a new file, it needs to be listed in
+`doc/stdlib/index-list.html`.
+
+Add coqdoc comments to extend the [standard library
+documentation][stdlib-doc]. See the [coqdoc
+documentation][coqdoc-documentation] to learn more.
+
+### Fixing bugs and performing small changes ###
+
+Just open a PR with your fix. If it is not yet completed, do not
+hesitate to open a [*draft PR*][GitHub-draft-PR] to get early
+feedback, and talk to developers on [Gitter][].
+
+It is generally a good idea to add a regression test to the
+test-suite. See the test-suite [README][test-suite-README] for how to
+do so.
+
+Small fixes do not need any documentation, or changelog update. New,
+or updated, user-facing features, and major bug fixes do. See above
+on how to contribute to the documentation, and the README in
+[`doc/changelog`][user-changelog] for how to add a changelog entry.
+
+### Proposing large changes: Coq Enhancement Proposals ###
+
+You are always welcome to open a PR for a change of any size.
+However, you should be aware that the larger the change, the higher
+the chances it will take very long to review, and possibly never get
+merged.
+
+So it is recommended that before spending a lot of time coding, you
+seek feedback from maintainers to see if your change would be
+supported, and if they have recommendation about its implementation.
+You can do this informally by opening an issue, or more formally by
+producing a design document as a [Coq Enhancement Proposal][CEP].
+
+Another recommendation is that you do not put several unrelated
+changes (even if you produced them together) in the same PR. In
+particular, make sure you split bug fixes into separate PRs when this
+is possible. More generally, smaller-sized PRs, or PRs changing less
+components, are more likely to be reviewed and merged promptly.
+
+### Collaborating on a pull request ###
+
+Beyond making suggestions to a PR author during the review process,
+you may want to collaborate further by checking out the code, making
+changes, and pushing them. There are two main ways of doing this:
+
+- **Pull requests on pull requests:** You can checkout the PR branch
+ (GitHub provides the link to the remote to pull from and the branch
+ name on the top and the bottom of the PR discussion thread),
+ checkout a new personal branch from there, do some changes, commit
+ them, push to your fork, and open a new PR on the PR author's fork.
+- **Pushing to the PR branch:** If the PR author has not unchecked the
+ "Allow edit from maintainers" checkbox, and you have write-access to
+ the repository (i.e. you are in the **@coq/contributors** team),
+ then you can also push (and even force-push) directly to the PR
+ branch, on the main author's fork. Obviously, don't do it without
+ coordinating with the PR author first (in particular, in case you
+ need to force-push).
+
+When several people have co-authored a single commit (e.g. because
+someone fixed something in a commit initially authored by someone
+else), this should be reflected by adding ["Co-authored-by:"
+tags][GitHub-co-authored-by] at the end of the commit message. The
+line should contain the co-author name and committer e-mail address.
+
+## Becoming a maintainer ##
+
+### Reviewing pull requests ###
+
+You can start reviewing PRs as soon as you feel comfortable doing so
+(anyone can review anything, although some designated reviewers
+will have to give a final approval before a PR can be merged, as is
+explained in the next sub-section).
+
+Reviewers should ensure that the code that is changed or introduced is
+in good shape and will not be a burden to maintain, is unlikely to
+break anything, or the compatibility-breakage has been identified and
+validated, includes documentation, changelog entries, and test files
+when necessary. Reviewers can use labels, or change requests to
+further emphasize what remains to be changed before they can approve
+the PR. Once reviewers are satisfied (regarding the part they
+reviewed), they should formally approve the PR, possibly stating what
+they reviewed.
+
+That being said, reviewers should also make sure that they do not make
+the contributing process harder than necessary: they should make it
+clear which comments are really required to perform before approving,
+and which are just suggestions. They should strive to reduce the
+number of rounds of feedback that are needed by posting most of their
+comments at the same time. If they are opposed to the change, they
+should clearly say so from the beginning to avoid the contributor
+spending time in vain.
+
+### Merging pull requests ###
+
+Our [CODEOWNERS][] file associates a team of maintainers, or a
+principal and secondary maintainers, to each component. They will be
+responsible for self-assigning and merging PRs (they didn't co-author)
+that change this component. When several components are changed in
+significant ways, at least a maintainer (other than the PR author)
+must approve the PR for each affected component before it can be
+merged, and one of them has to assign the PR, and merge it when it is
+time. Before merging, the assignee must also select a milestone for
+the PR (see also Section [Release management](#release-management)).
+
+If you feel knowledgeable enough to maintain a component, and have a
+good track record of contributing to it, we would be happy to have you
+join one of the maintainer teams.
+
+The merging process is described in more details in [this
+document][MERGING].
+
+The people with merging powers (either because listed as a principal
+or secondary maintainer in [CODEOWNERS][], or because member of a
+maintainer team), are the members of the **@coq/pushers** team
+([member list][coq-pushers] only visible to the Coq organization
+members because of a limitation of GitHub).
+
+### Core development team ###
+
+The core developers are the active developers with a lengthy and
+significant contribution track record. They are the ones with admin
+powers over the Coq organization, and the ones who take part in votes
+in case of conflicts to take a decision (rare). One of them is
+designated as a development coordinator, and has to approve the
+changes in the core team membership (until we get a more formal
+joining and leaving process).
+
+The core developers are the members of the **@coq/core** team ([member
+list][coq-core] only visible to the Coq organization members because
+of a limitation of GitHub).
+
+## Release management ##
+
+Coq's major release cycles generally span about six months, with about
+4-5 months of development, and 1-2 months of stabilization /
+beta-releases. The release manager (RM) role is a rolling position
+among core developers. The [release plan][release-plan] is published
+on the wiki.
+
+Development of new features, refactorings, deprecations and clean-ups
+always happens on `master`. Stabilization starts by branching
+(creating a new stable `v...` branch from the current `master`), which
+marks the beginning of a feature freeze (new features will continue to
+be merged into `master` but won't make it for the upcoming major
+release, but only for the next one).
+
+After branching, most changes are introduced in the stable branch by a
+backporting process. PR authors and assignee can signal a desire to
+have a PR backported by selecting an appropriate milestone. Most of
+the time, the choice of milestone is between two options: the next
+major version that has yet to branch from `master`, or the next
+version (beta, final, or patch-level release) of the active stable
+branch. In the end, it is the RM who decides whether to follow or not
+the recommendation of the PR assignee, and who backports PRs to the
+stable branch.
+
+Very specific changes that are only relevant for the stable branch and
+not for the `master` branch can result in a PR targetting the stable
+branch instead of `master`. In this case, the RM is the only one who
+can merge the PR, and they may even do so if they are the author of
+the PR. Examples of such PRs include bug fixes to a feature that has
+been removed in `master`, and PRs from the RM changing the version
+number in preparation for the next release.
+
+Some automation is in place to help the RM in their task: a GitHub
+project is created at branching time to manage PRs to backport; when a
+PR is merged in a milestone corresponding to the stable branch, our
+bot will add this PR in a "Request inclusion" column in this project;
+the RM can browse through the list of PRs waiting to be backported in
+this column, possibly reject some of them by simply removing the PR
+from the column (in which case, the bot will update the PR milestone),
+and proceed to backport others; when a backported PR is pushed to the
+stable branch, the bot moves the PR from the "Request inclusion"
+column to a "Shipped" column.
+
+More information about the RM tasks can be found in the [release
+process checklist][RM-checklist].
+
+### Packaging Coq ###
+
+The RM role does not include the task of making Coq available through
+the various package managers out there: several contributors (most
+often external to the development team) take care of this, and we
+thank them for this. If your preferred package manager does not
+include Coq, it is a very worthy contribution to make it available
+there. But be careful not to let a package get outdated, as this
+could lead some users to install an outdated version of Coq without
+even being aware of it.
+
+This [Repology page][repology-coq] lists the versions of Coq which are
+packaged in many repositories, although it is missing information on
+some repositories, like opam.
+
+The Windows and macOS installers are auto-generated in our CI, and
+this infrastructure has dedicated maintainers within the development
+team.
+
+## Additional resources ##
+
+### Developer documentation ###
+
+#### Where to find the resources ####
+
+- You can find developer resources in the `dev` directory, and more
+ specifically developer documentation in `dev/doc`. The
+ [README][dev-README] in the `dev` directory lists what's available.
+
+ For example, [`dev/doc/README.md`][dev-doc-README] is a beginner's
+ guide to hacking Coq, and documentation on debugging Coq can be
+ found in [`dev/doc/debugging.md`][debugging-doc].
+
+- When it makes sense, the documentation is kept even closer to the
+ sources, in README files in various directories (e.g. the test-suite
+ [README][test-suite-README] or the refman [README][refman-README]).
+
+- Documentation of the Coq API is written directly in comments in
+ `.mli` files. You can browse it on [the Coq website][api-doc], or
+ rebuild it locally (`make -f Makefile.dune apidoc`, requires `odoc`
+ and `dune`).
+
+- A plugin tutorial is located in
+ [`doc/plugin_tutorial`][plugin-tutorial].
+
+- The Coq [wiki][] contains additional developer resources.
+
+#### Building Coq ####
+
+The list of dependencies can be found in the first section of the
+[`INSTALL`](INSTALL) file.
+
+Today, the recommended method for building Coq is to use `dune`. Run
+`make -f Makefile.dune` to get help on the various available targets,
+Additional documentation can be found in
+[`dev/doc/build-system.dune.md`][dev-doc-dune], and in [the official
+Dune documentation][dune-doc].
+
+The legacy make-based system is still available. If you wish to use
+it, you need to start by running `./configure -profile devel`. Most
+of the available targets are not documented, so you will need to ask.
+
+#### Continuous integration ####
+
+Continuous integration (CI) testing is key in ensuring that the
+`master` branch is kept in a well-functioning state at all times, and
+that no accidental compatibility breakages are introduced. Our CI is
+quite extensive since it includes testing many external projects, some
+of them taking more than an hour to compile. However, you can get
+partial results much more quickly (when our CI is not overloaded).
+
+The main documentation resources on our CI are:
+
+- the [README for users, i.e. plugin and library authors][CI-README-users];
+- the [README for developers, and contributors][CI-README-developers];
+- the README of the [user-overlays][] directory.
+
+Preparing an overlay (i.e. a patch to an external project that we test
+in our CI, to make it compile with the modified version of Coq in your
+branch) is a step that everyone goes through at some point. All you
+need to know to prepare an overlay manually is in the README in the
+[user-overlays][] directory. You might want to use some additional
+tooling such as the `make ci-*` targets of `Makefile.ci`, the Nix
+support for getting the dependencies of the external projects (see the
+README in [`dev/ci/nix`](dev-ci-nix), and the (so far undocumented)
+`dev/tools/create_overlays.sh` script.
+
+More work is to be done on understanding how each developer proceeds
+to prepare overlays, and propose a simplified and documented
+procedure.
+
+We also have a benchmarking infrastructure, which is documented [on
+the wiki][jenkins-doc].
+
+#### Code owners, issue and pull request templates ####
+
+These files can be found in the [`.github`](.github) directory. The
+templates are particularly useful to remind contributors what
+information we need for them, and, in the case of PRs, to update the
+documentation, changelog, and test-suite when relevant.
+
+GitHub now supports setting up multiple issue templates, and we could
+use this to define distinct requirements for various kind of bugs,
+enhancement and feature requests.
+
+#### Style guide ####
+
+There exists an [old style guide][old-style-guide] whose content is
+still mostly relevant. Yet to be done: extract the parts that are
+most relevant, and put them in this section instead.
+
+We don't use a code formatter at the current time, and we are
+reluctant to merge changes to parts of the code that are unchanged
+aside from formatting. However, it is still a good idea if you don't
+know how to format a block of code to use the formatting that
+[ocamlformat][] would give
+
+#### OCaml resources ####
+
+You can find lots of OCaml resources on <http://ocaml.org/>, including
+documentation, a Discourse forum, the package archive, etc. You may
+also want to refer to the [Dune documentation][dune-doc].
+
+Another ressource is <https://ocamlverse.github.io/>, especially its
+[community page][ocamlverse-community], which lists the various OCaml
+discussion platforms.
+
+#### Git documentation, tips and tricks ####
+
+Lots of resources about git, the version control system, are available
+on the web, starting with the [official website][git].
+
+We recommend a setup with two configured remotes, one for the official
+Coq repository, called `upstream`, and one for your fork, called
+`origin`. Here is a way to do this for a clean clone:
+
+``` shell
+git clone https://github.com/coq/coq -o upstream
+cd coq
+git remote add origin git@github.com:$YOURNAME/coq
+# Make sure you click the fork button on GitHub so that this repository exists
+cp dev/tools/pre-commit .git/hooks/ # Setup the pre-commit hook
+```
+
+Then, if you want to prepare a fix:
+
+``` shell
+# Make sure we start from an up-to-date master
+git checkout master
+git pull --ff-only # If this fails, then your master branch is messy
+git checkout -b my-topic-branch
+# Modify some files
+git add .
+# Every untracked or modified file will be included in the next commit
+# You can also replace the dot with an explicit list of files
+git commit -m "My commit summary.
+
+You can add more information on multiple lines,
+but you need to skip a line first."
+git push -u origin my-topic-branch
+# Next time, you push to this branch, you can just do git push
+```
+
+When you push a new branch for the first time, GitHub gives you a link
+to open a PR.
+
+If you need to fix the last commit in your branch (typically, if your
+branch has a single commit on top of `master`), you can do so with
+
+```
+git add .
+git commit --amend --no-edit
+```
+
+If you need to fix another commit in your branch, or if you need to
+fix a conflict with `master`, you will need to learn about `git rebase`.
+GitHub provides [a short introduction][GitHub-rebase] to `git rebase`.
+
+#### GitHub documentation, tips and tricks ####
+
+GitHub has [extensive documentation][GitHub-doc] about everything you
+can do on the platform, and tips about using `git` as well. See in
+particular, [how to configure your commit e-mail
+address][GitHub-commit-email] and [how to open a PR from a
+fork][GitHub-PR-from-fork].
+
+##### Watching the repository #####
+
+["Watching" this repository][GitHub-watching] can result in a very
+large number of notifications. We recommend you, either, [configure
+your mailbox][notification-email] to handle incoming notifications
+efficiently, or you read your notifications within a web browser. You
+can configure how you receive notifications in [your GitHub
+settings][GitHub-notification-settings], you can use the GitHub
+interface to mark as read, save for later or mute threads. You can
+also manage your GitHub web notifications using a tool such as
+[Octobox][].
+
+#### GitLab documentation, tips and tricks ####
+
+We use GitLab mostly for its CI service. The [Coq organization on
+GitLab][GitLab-coq] hosts a number of CI/CD-only mirrors. If you are
+a regular contributor, you can request access to it from [the
+organization page][GitLab-coq]: this will grant you permission to
+restart failing CI jobs.
+
+GitLab too has [extensive documentation][GitLab-doc], in particular on
+configuring CI.
+
+#### Coqbot ####
+
+Our bot sources can be found at <https://github.com/coq/bot>. Its
+documentation is still a work-in-progress.
+
+### Online forum and chat to talk to developers ###
+
+We have a [Discourse forum][Discourse] (see in particular the [Coq
+development category][Discourse-development-category]) and a [Gitter
+chat][Gitter]. Feel free to join any of them and ask questions.
People are generally happy to help and very reactive.
-[Coq development category]: https://coq.discourse.group/c/coq-development
-
-## Watching the repository
-
-["Watching" this repository](https://github.com/coq/coq/subscription)
-can result in a very large number of notifications. We advise that if
-you do, either [configure your mailbox](https://blog.github.com/2017-07-18-managing-large-numbers-of-github-notifications/#prioritize-the-notifications-you-receive)
-to handle incoming notifications efficiently, or you read your
-notifications within a web browser. You can configure how you receive
-notifications in [your GitHub settings](https://github.com/settings/notifications),
-you can use the GitHub interface to mark as read, save for later or
-mute threads. You can also manage your GitHub web notifications using
-a tool such as [Octobox](http://octobox.io/).
-
-## Contributing outside this repository
-
-There are many useful ways to contribute to the Coq ecosystem that don't
-involve the Coq repository.
-
-Tutorials to teach Coq, and especially to teach particular advanced features,
-are much appreciated. Some tutorials are listed on the
-[Coq website](https://coq.inria.fr/documentation). If you would like to add
-a link to this list, please make a pull request against the Coq website
-repository at <https://github.com/coq/www>.
-
-External plugins / libraries contribute to create a successful ecosystem
-around Coq. If your external development is mature enough, you may consider
-submitting it for addition to our CI tests. Refer to
-[`dev/ci/README-users.md`](dev/ci/README-users.md) for more information.
-
-Some Coq packages are not maintained by their authors anymore even if they
-were useful (for instance because they changed jobs). The coq-community
-organization is a place for people to take over the maintenance of such
-useful packages. If you want to contribute by becoming a maintainer, you can
-find a list of packages waiting for a maintainer [here](https://github.com/coq-community/manifesto/issues?q=is%3Aissue+is%3Aopen+label%3Amaintainer-wanted).
-You can also propose a package that is not listed. Find out more about
-coq-community in [the manifesto's README](https://github.com/coq-community/manifesto).
-
-Ask and answer questions on our [Discourse forum][], on [Stack Exchange][],
-and on the Coq IRC channel (`irc://irc.freenode.net/#coq`).
-
-[Coq issue tracker]: https://github.com/coq/coq/issues
-[Discourse forum]: https://coq.discourse.group/
-[Gitter chat]: https://gitter.im/coq/coq
-[Stack Exchange]: https://stackexchange.com/filters/299857/questions-tagged-coq-on-stackexchange-sites
+Obviously, the issue tracker is also a good place to ask questions,
+especially if the development processes are unclear, or the developer
+documentation should be improved.
+
+### Coq remote working groups ###
+
+We semi-regularly (up to every month) organize remote working groups,
+which can be accessed through video-conference, and are most often
+live streamed on [YouTube][]. Summary notes and announcements of the
+next working group can be found [on the wiki][wiki-WG]
+
+These working groups are where important decisions are taken, most
+often by consensus, but also, if it is needed, by a vote of core
+developers.
+
+### Coq Users and Developers Workshops ###
+
+We have an annual gathering late Spring in France where most core
+developers are present, and whose objective is to help new
+contributors get started with the Coq codebase, provide help to plugin
+and library authors, and more generally have fun together.
+
+The list of past (and upcoming, when it's already planned) workshops
+can be found [on the wiki][wiki-CUDW].
+
+[add-contributor]: https://github.com/orgs/coq/teams/contributors/members?add=true
+[api-doc]: https://coq.github.io/doc/master/api/
+[CEP]: https://github.com/coq/ceps
+[CI-README-developers]: dev/ci/README-developers.md
+[CI-README-users]: dev/ci/README-users.md
+[Code-of-Conduct]: CODE_OF_CONDUCT.md
+[CODEOWNERS]: .github/CODEOWNERS
+[Codewars]: https://www.codewars.com/?language=coq
+[company-coq-issues]: https://github.com/cpitclaudel/company-coq/issues
+[Coq-Club]: https://sympa.inria.fr/sympa/arc/coq-club
+[coq-community-maintainer-wanted]: https://github.com/coq-community/manifesto/issues?q=is%3Aissue+is%3Aopen+label%3Amaintainer-wanted
+[coq-community-manifesto]: https://github.com/coq-community/manifesto
+[coq-community-wiki]: https://github.com/coq-community/manifesto/wiki
+[coq-core]: https://github.com/orgs/coq/teams/core/members
+[coqdoc-documentation]: https://coq.inria.fr/refman/practical-tools/utilities.html#documenting-coq-files-with-coqdoc
+[Coq-documentation]: https://coq.inria.fr/documentation
+[Coq-issue-tracker]: https://github.com/coq/coq/issues
+[Coq-package-index]: https://coq.inria.fr/packages
+[coq-pushers]: https://github.com/orgs/coq/teams/pushers/members
+[coq-repository]: https://github.com/coq/coq
+[Coq-website-repository]: https://github.com/coq/www
+[debugging-doc]: dev/doc/debugging.md
+[dev-doc-README]: dev/doc/README.md
+[dev-doc-dune]: dev/doc/build-system.dune.md
+[dev-README]: dev/README.md
+[Discourse]: https://coq.discourse.group/
+[Discourse-development-category]: https://coq.discourse.group/c/coq-development
+[doc-README]: doc/README.md
+[documentation-github-project]: https://github.com/coq/coq/projects/3
+[dune-doc]: https://dune.readthedocs.io/en/latest/
+[FAQ]: https://github.com/coq/coq/wiki/The-Coq-FAQ
+[git]: https://git-scm.com/
+[git-hook]: dev/tools/pre-commit
+[GitHub-co-authored-by]: https://github.blog/2018-01-29-commit-together-with-co-authors/
+[GitHub-commit-email]: https://help.github.com/en/articles/setting-your-commit-email-address-in-git
+[GitHub-doc]: https://help.github.com/
+[GitHub-draft-PR]: https://github.blog/2019-02-14-introducing-draft-pull-requests/
+[GitHub-markdown]: https://guides.github.com/features/mastering-markdown/
+[GitHub-notification-settings]: https://github.com/settings/notifications
+[GitHub-PR-from-fork]: https://help.github.com/en/articles/creating-a-pull-request-from-a-fork
+[GitHub-rebase]: https://help.github.com/articles/about-git-rebase/
+[GitHub-watching]: https://github.com/coq/coq/subscription
+[GitHub-wiki-extensions]: https://help.github.com/en/articles/editing-wiki-content
+[GitLab-coq]: https://gitlab.com/coq
+[GitLab-doc]: https://docs.gitlab.com/
+[Gitter]: https://gitter.im/coq/coq
+[JasonGross-coq-tools]: https://github.com/JasonGross/coq-tools
+[jenkins-doc]: https://github.com/coq/coq/wiki/Jenkins-(automated-benchmarking)
+[kind-documentation]: https://github.com/coq/coq/issues?q=is%3Aopen+is%3Aissue+label%3A%22kind%3A+documentation%22
+[master-doc]: https://coq.github.io/doc/master/refman/
+[MERGING]: dev/doc/MERGING.md
+[needs-benchmarking]: https://github.com/coq/coq/labels/needs%3A%20benchmarking
+[needs-changelog]: https://github.com/coq/coq/labels/needs%3A%20changelog%20entry
+[needs-documentation]: https://github.com/coq/coq/labels/needs%3A%20documentation
+[needs-fixing]: https://github.com/coq/coq/labels/needs%3A%20fixing
+[needs-rebase]: https://github.com/coq/coq/labels/needs%3A%20rebase
+[needs-testing]: https://github.com/coq/coq/labels/needs%3A%20testing
+[Nix]: https://github.com/coq/coq/wiki/Nix
+[notification-email]: https://blog.github.com/2017-07-18-managing-large-numbers-of-github-notifications/#prioritize-the-notifications-you-receive
+[OCaml-planet]: http://ocaml.org/community/planet/
+[ocamlformat]: https://github.com/ocaml-ppx/ocamlformat
+[ocamlverse-community]: https://ocamlverse.github.io/content/community.html
+[Octobox]: http://octobox.io/
+[old-style-guide]: dev/doc/style.txt
+[other-standard-libraries]: https://github.com/coq/stdlib2/wiki/Other-%22standard%22-libraries
+[plugin-tutorial]: doc/plugin_tutorial
+[ProofGeneral-issues]: https://github.com/ProofGeneral/PG/issues
+[Reddit]: https://www.reddit.com/r/Coq/
+[refman]: https://coq.inria.fr/refman
+[refman-sources]: doc/sphinx
+[refman-README]: doc/sphinx/README.rst
+[release-plan]: https://github.com/coq/coq/wiki/Release-Plan
+[repology-coq]: https://repology.org/project/coq/versions
+[RM-checklist]: dev/doc/release-process.md
+[Stack-Exchange]: https://stackexchange.com/filters/299857/questions-tagged-coq-on-stackexchange-sites
+[Stack-Overflow]: https://stackoverflow.com/questions/tagged/coq
+[stdlib-doc]: https://coq.inria.fr/stdlib/
+[test-suite-README]: test-suite/README.md
+[tools-website]: https://coq.inria.fr/related-tools.html
+[tools-wiki]: https://github.com/coq/coq/wiki/Tools
+[unreleased-changelog]: https://coq.github.io/doc/master/refman/changes.html#unreleased-changes
+[user-changelog]: doc/changelog
+[user-overlays]: dev/ci/user-overlays
+[wiki]: https://github.com/coq/coq/wiki
+[wiki-CUDW]: https://github.com/coq/coq/wiki/CoqImplementorsWorkshop
+[wiki-WG]: https://github.com/coq/coq/wiki/Coq-Working-Groups
+[YouTube]: https://www.youtube.com/channel/UCbJo6gYYr0OF18x01M4THdQ
diff --git a/Makefile.build b/Makefile.build
index c6223a6dbd..d1ed9a6f96 100644
--- a/Makefile.build
+++ b/Makefile.build
@@ -355,13 +355,44 @@ kernel/copcodes.ml: kernel/genOpcodeFiles.exe
COQPPCMO := $(addsuffix .cmo, $(addprefix coqpp/, coqpp_parse coqpp_lex))
coqpp/coqpp_parse.cmi: coqpp/coqpp_ast.cmi
+coqpp/coqpp_parser.cmo: coqpp_parser.cmi
coqpp/coqpp_parse.cmo: coqpp/coqpp_ast.cmi coqpp/coqpp_parse.cmi
coqpp/coqpp_lex.cmo: coqpp/coqpp_ast.cmi coqpp/coqpp_parse.cmo
-$(COQPP): $(COQPPCMO) coqpp/coqpp_main.ml
+$(COQPP): $(COQPPCMO) coqpp/coqpp_parser.mli coqpp/coqpp_parser.ml coqpp/coqpp_main.ml
$(SHOW)'OCAMLC -a $@'
$(HIDE)$(OCAMLC) -I coqpp $^ -linkall -o $@
+DOC_GRAMCMO := $(addsuffix .cmo, $(addprefix coqpp/, coqpp_parse coqpp_lex))
+
+$(DOC_GRAM): $(DOC_GRAMCMO) coqpp/coqpp_parser.mli coqpp/coqpp_parser.ml doc/tools/docgram/doc_grammar.ml
+ $(SHOW)'OCAMLC -a $@'
+ $(HIDE)$(OCAMLC) -I coqpp $^ -package str -linkall -linkpkg -o $@
+
+DOC_MLGS := */*.mlg plugins/*/*.mlg
+DOC_EDIT_MLGS := doc/tools/docgram/*_mlg
+DOC_RSTS := doc/sphinx/*.rst doc/sphinx/*/*.rst
+
+doc/tools/docgram/fullGrammar: $(DOC_GRAM) $(DOC_MLGS)
+ $(SHOW)'DOC_GRAM'
+ $(HIDE)$(DOC_GRAM) -short -no-warn $(DOC_MLGS)
+
+#todo: add a dependency of sphinx on orderedGrammar and of *.rst on orderedGrammar when we're ready
+doc/tools/docgram/orderedGrammar: $(DOC_GRAM) $(DOC_EDIT_MLGS)
+ $(SHOW)'DOC_GRAM_RSTS'
+ $(HIDE)$(DOC_GRAM) $(DOC_MLGS) $(DOC_RSTS)
+
+.PHONY: doc_gram doc_gram_rsts doc_gram_verify
+
+doc_gram: doc/tools/docgram/fullGrammar
+
+doc_gram_verify: doc/tools/docgram/fullGrammar
+ $(SHOW)'DOC_GRAM_VERIFY'
+ $(HIDE)$(DOC_GRAM) -short -no-warn -verify $(DOC_MLGS) $(DOC_RSTS)
+
+#todo: add dependency on $(DOC_RSTS) very soon
+doc_gram_rsts: doc/tools/docgram/orderedGrammar
+
###########################################################################
# Specific rules for Uint63
###########################################################################
diff --git a/Makefile.common b/Makefile.common
index b331484fb2..dd23d7dd2f 100644
--- a/Makefile.common
+++ b/Makefile.common
@@ -22,6 +22,7 @@ COQTOPBYTE:=bin/coqtop.byte$(EXE)
COQDEP:=bin/coqdep$(EXE)
COQPP:=bin/coqpp$(EXE)
+DOC_GRAM:=bin/doc_grammar$(EXE)
COQDEPBYTE:=bin/coqdep.byte$(EXE)
COQMAKEFILE:=bin/coq_makefile$(EXE)
COQMAKEFILEBYTE:=bin/coq_makefile.byte$(EXE)
@@ -42,7 +43,7 @@ COQMAKE_BOTH_TIME_FILES:=tools/make-both-time-files.py
COQMAKE_BOTH_SINGLE_TIMING_FILES:=tools/make-both-single-timing-files.py
TOOLS:=$(COQDEP) $(COQMAKEFILE) $(COQTEX) $(COQWC) $(COQDOC) $(COQC)\
- $(COQWORKMGR) $(COQPP)
+ $(COQWORKMGR) $(COQPP) $(DOC_GRAM)
TOOLS_HELPERS:=tools/CoqMakefile.in $(COQMAKE_ONE_TIME_FILE) $(COQTIME_FILE_MAKER)\
$(COQMAKE_BOTH_TIME_FILES) $(COQMAKE_BOTH_SINGLE_TIMING_FILES)
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index fd99dc6d18..862c54900f 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -58,7 +58,7 @@ jobs:
displayName: 'Install system dependencies'
env:
HOMEBREW_NO_AUTO_UPDATE: "1"
- HBCORE_DATE: "2019-06-18"
+ HBCORE_DATE: "2019-06-16"
HBCORE_REF: "944a5b7d83e4b81c749d93831b514607bdd2b6a1"
- script: |
@@ -81,6 +81,7 @@ jobs:
eval $(opam env)
./configure -prefix '$(Build.BinariesDirectory)' -warn-error yes -native-compiler no -coqide opt
+ make -j "$NJOBS" byte
make -j "$NJOBS"
displayName: 'Build Coq'
diff --git a/coqpp/coqpp_lex.mll b/coqpp/coqpp_lex.mll
index 040d5eee09..b1dc841a4c 100644
--- a/coqpp/coqpp_lex.mll
+++ b/coqpp/coqpp_lex.mll
@@ -100,6 +100,7 @@ rule extend = parse
| "COMMAND" { COMMAND }
| "TACTIC" { TACTIC }
| "EXTEND" { EXTEND }
+| "DOC_GRAMMAR" { DOC_GRAMMAR }
| "END" { END }
| "DECLARE" { DECLARE }
| "PLUGIN" { PLUGIN }
diff --git a/coqpp/coqpp_main.ml b/coqpp/coqpp_main.ml
index 7e869d6fe1..72b7cb2f84 100644
--- a/coqpp/coqpp_main.ml
+++ b/coqpp/coqpp_main.ml
@@ -11,6 +11,7 @@
open Lexing
open Coqpp_ast
open Format
+open Coqpp_parser
let fatal msg =
let () = Format.eprintf "Error: %s@\n%!" msg in
@@ -19,13 +20,6 @@ let fatal msg =
let dummy_loc = { loc_start = Lexing.dummy_pos; loc_end = Lexing.dummy_pos }
let mk_code s = { code = s; loc = dummy_loc }
-let pr_loc loc =
- let file = loc.loc_start.pos_fname in
- let line = loc.loc_start.pos_lnum in
- let bpos = loc.loc_start.pos_cnum - loc.loc_start.pos_bol in
- let epos = loc.loc_end.pos_cnum - loc.loc_start.pos_bol in
- Printf.sprintf "File \"%s\", line %d, characters %d-%d:" file line bpos epos
-
let print_code fmt c =
let loc = c.loc.loc_start in
(* Print the line location as a source annotation *)
@@ -33,26 +27,6 @@ let print_code fmt c =
let code_insert = asprintf "\n# %i \"%s\"\n%s%s" loc.pos_lnum loc.pos_fname padding c.code in
fprintf fmt "@[@<0>%s@]@\n" code_insert
-let parse_file f =
- let chan = open_in f in
- let lexbuf = Lexing.from_channel chan in
- let () = lexbuf.lex_curr_p <- { lexbuf.lex_curr_p with pos_fname = f } in
- let ans =
- try Coqpp_parse.file Coqpp_lex.token lexbuf
- with
- | Coqpp_lex.Lex_error (loc, msg) ->
- let () = close_in chan in
- let () = Printf.eprintf "%s\n%!" (pr_loc loc) in
- fatal msg
- | Parsing.Parse_error ->
- let () = close_in chan in
- let loc = Coqpp_lex.loc lexbuf in
- let () = Printf.eprintf "%s\n%!" (pr_loc loc) in
- fatal "syntax error"
- in
- let () = close_in chan in
- ans
-
module StringSet = Set.Make(String)
let string_split s =
diff --git a/coqpp/coqpp_parse.mly b/coqpp/coqpp_parse.mly
index d1f09c2d0b..5a0d54c60d 100644
--- a/coqpp/coqpp_parse.mly
+++ b/coqpp/coqpp_parse.mly
@@ -57,6 +57,8 @@ let parse_user_entry s sep =
in
parse s sep table
+let no_code = { code = ""; loc = { loc_start=Lexing.dummy_pos; loc_end=Lexing.dummy_pos} }
+
%}
%token <Coqpp_ast.code> CODE
@@ -64,7 +66,7 @@ let parse_user_entry s sep =
%token <string> IDENT QUALID
%token <string> STRING
%token <int> INT
-%token VERNAC TACTIC GRAMMAR EXTEND END DECLARE PLUGIN DEPRECATED ARGUMENT
+%token VERNAC TACTIC GRAMMAR DOC_GRAMMAR EXTEND END DECLARE PLUGIN DEPRECATED ARGUMENT
%token RAW_PRINTED GLOB_PRINTED
%token COMMAND CLASSIFIED STATE PRINTED TYPED INTERPRETED GLOBALIZED SUBSTITUTED BY AS
%token BANGBRACKET HASHBRACKET LBRACKET RBRACKET PIPE ARROW FUN COMMA EQUAL STAR
@@ -97,6 +99,7 @@ node:
| vernac_extend { $1 }
| tactic_extend { $1 }
| argument_extend { $1 }
+| doc_gram { $1 }
;
declare_plugin:
@@ -411,3 +414,58 @@ gram_tokens:
| gram_token { [$1] }
| gram_token gram_tokens { $1 :: $2 }
;
+
+doc_gram:
+| DOC_GRAMMAR doc_gram_entries
+ { GramExt { gramext_name = ""; gramext_globals=[]; gramext_entries = $2 } }
+
+doc_gram_entries:
+| { [] }
+| doc_gram_entry doc_gram_entries { $1 :: $2 }
+;
+
+doc_gram_entry:
+| qualid_or_ident COLON LBRACKET PIPE doc_gram_rules RBRACKET
+ { { gentry_name = $1; gentry_pos = None;
+ gentry_rules = [{ grule_label = None; grule_assoc = None; grule_prods = $5; }] } }
+| qualid_or_ident COLON LBRACKET RBRACKET
+ { { gentry_name = $1; gentry_pos = None;
+ gentry_rules = [{ grule_label = None; grule_assoc = None; grule_prods = []; }] } }
+;
+
+doc_gram_rules:
+| doc_gram_rule { [$1] }
+| doc_gram_rule PIPE doc_gram_rules { $1 :: $3 }
+;
+
+doc_gram_rule:
+| doc_gram_symbols_opt { { gprod_symbs = $1; gprod_body = no_code; } }
+;
+
+doc_gram_symbols_opt:
+| { [] }
+| doc_gram_symbols { $1 }
+| doc_gram_symbols SEMICOLON { $1 }
+;
+
+doc_gram_symbols:
+| doc_gram_symbol { [$1] }
+| doc_gram_symbols SEMICOLON doc_gram_symbol { $1 @ [$3] }
+;
+
+doc_gram_symbol:
+| IDENT EQUAL doc_gram_gram_tokens { (Some $1, $3) }
+| doc_gram_gram_tokens { (None, $1) }
+;
+
+doc_gram_gram_tokens:
+| doc_gram_gram_token { [$1] }
+| doc_gram_gram_token doc_gram_gram_tokens { $1 :: $2 }
+;
+
+doc_gram_gram_token:
+| qualid_or_ident { GSymbQualid ($1, None) }
+| LPAREN doc_gram_gram_tokens RPAREN { GSymbParen $2 }
+| LBRACKET doc_gram_rules RBRACKET { GSymbProd $2 }
+| STRING { GSymbString $1 }
+;
diff --git a/coqpp/coqpp_parser.ml b/coqpp/coqpp_parser.ml
new file mode 100644
index 0000000000..1fa47f554c
--- /dev/null
+++ b/coqpp/coqpp_parser.ml
@@ -0,0 +1,44 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2019 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+open Lexing
+open Coqpp_ast
+
+let pr_loc loc =
+ let file = loc.loc_start.pos_fname in
+ let line = loc.loc_start.pos_lnum in
+ let bpos = loc.loc_start.pos_cnum - loc.loc_start.pos_bol in
+ let epos = loc.loc_end.pos_cnum - loc.loc_start.pos_bol in
+ Printf.sprintf "File \"%s\", line %d, characters %d-%d:" file line bpos epos
+
+
+let fatal msg =
+ let () = Format.eprintf "Error: %s@\n%!" msg in
+ exit 1
+
+let parse_file f =
+ let chan = open_in f in
+ let lexbuf = Lexing.from_channel chan in
+ let () = lexbuf.lex_curr_p <- { lexbuf.lex_curr_p with pos_fname = f } in
+ let ans =
+ try Coqpp_parse.file Coqpp_lex.token lexbuf
+ with
+ | Coqpp_lex.Lex_error (loc, msg) ->
+ let () = close_in chan in
+ let () = Printf.eprintf "%s\n%!" (pr_loc loc) in
+ fatal msg
+ | Parsing.Parse_error ->
+ let () = close_in chan in
+ let loc = Coqpp_lex.loc lexbuf in
+ let () = Printf.eprintf "%s\n%!" (pr_loc loc) in
+ fatal "syntax error"
+ in
+ let () = close_in chan in
+ ans
diff --git a/coqpp/coqpp_parser.mli b/coqpp/coqpp_parser.mli
new file mode 100644
index 0000000000..6e0a59687a
--- /dev/null
+++ b/coqpp/coqpp_parser.mli
@@ -0,0 +1,15 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2019 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+val pr_loc : Coqpp_ast.loc -> string
+
+val fatal : string -> unit
+
+val parse_file : string -> Coqpp_ast.t
diff --git a/coqpp/dune b/coqpp/dune
index a6edf4cf5b..12071c7c05 100644
--- a/coqpp/dune
+++ b/coqpp/dune
@@ -5,5 +5,5 @@
(name coqpp_main)
(public_name coqpp)
(package coq)
- (modules coqpp_ast coqpp_lex coqpp_parse coqpp_main)
+ (modules coqpp_ast coqpp_lex coqpp_parse coqpp_parser coqpp_main)
(modules_without_implementation coqpp_ast))
diff --git a/dev/README.md b/dev/README.md
index 9761f7b96f..4cda60a703 100644
--- a/dev/README.md
+++ b/dev/README.md
@@ -2,7 +2,7 @@
## Debugging and profiling (`dev/`)
-**More info on debugging: [`doc/debugging.md`](doc/debugging.md)**
+**More info on debugging: [`dev/doc/debugging.md`](doc/debugging.md)**
| File | Description |
| ---- | ----------- |
diff --git a/dev/build/windows/MakeCoq_MinGW.bat b/dev/build/windows/MakeCoq_MinGW.bat
index 78ca5e830a..c75acb0560 100755
--- a/dev/build/windows/MakeCoq_MinGW.bat
+++ b/dev/build/windows/MakeCoq_MinGW.bat
@@ -381,6 +381,7 @@ IF "%RUNSETUP%"=="Y" (
-P pkg-config ^
-P mingw64-%ARCH%-binutils,mingw64-%ARCH%-gcc-core,mingw64-%ARCH%-gcc-g++,mingw64-%ARCH%-windows_default_manifest ^
-P mingw64-%ARCH%-headers,mingw64-%ARCH%-runtime,mingw64-%ARCH%-pthreads,mingw64-%ARCH%-zlib ^
+ -P mingw64-%ARCH%-gmp,mingw64-%ARCH%-mpfr ^
-P adwaita-icon-theme ^
-P libiconv-devel,libunistring-devel,libncurses-devel ^
-P gettext-devel,libgettextpo-devel ^
@@ -389,6 +390,7 @@ IF "%RUNSETUP%"=="Y" (
-P gtk-update-icon-cache ^
-P libtool,automake ^
-P intltool ^
+ -P bison,flex ^
%EXTRAPACKAGES% ^
|| GOTO ErrorExit
diff --git a/dev/build/windows/makecoq_mingw.sh b/dev/build/windows/makecoq_mingw.sh
index 0699e2bd44..0c8213b8f5 100755
--- a/dev/build/windows/makecoq_mingw.sh
+++ b/dev/build/windows/makecoq_mingw.sh
@@ -315,7 +315,7 @@ function get_expand_source_tar {
find "$(ls)" -mindepth 1 -maxdepth 1 -exec mv -- "{}" . \;
else
echo "Unzip strip count not supported"
- return 1
+ exit 1
fi
else
logn untar tar xvaf "$TARBALLS/$name.$3" --strip $strip
@@ -323,10 +323,11 @@ function get_expand_source_tar {
# Patch if patch file exists
# First try specific patch file name then generic patch file name
+ # Note: set -o errexit does not work inside a function called in an if, so exit explicity.
if [ -f "$PATCHES/$name.patch" ] ; then
- log1 patch -p1 -i "$PATCHES/$name.patch"
+ log1 patch -p1 -i "$PATCHES/$name.patch" || exit 1
elif [ -f "$PATCHES/$basename.patch" ] ; then
- log1 patch -p1 -i "$PATCHES/$basename.patch"
+ log1 patch -p1 -i "$PATCHES/$basename.patch" || exit 1
fi
# Go back to base folder
@@ -1146,7 +1147,8 @@ function make_menhir {
make_ocaml
make_findlib
make_ocamlbuild
- if build_prep http://gallium.inria.fr/~fpottier/menhir menhir-20180530 tar.gz 1 ; then
+ # This is the version required by latest CompCert
+ if build_prep https://gitlab.inria.fr/fpottier/menhir/-/archive/20190626 menhir-20190626 tar.gz 1 ; then
# Note: menhir doesn't support -j 8, so don't pass MAKE_OPT
log2 make all PREFIX="$PREFIXOCAML"
log2 make install PREFIX="$PREFIXOCAML"
@@ -1154,6 +1156,29 @@ function make_menhir {
fi
}
+##### CAMLP5 Ocaml Preprocessor #####
+
+function make_camlp5 {
+ make_ocaml
+ make_findlib
+
+ if build_prep https://github.com/camlp5/camlp5/archive rel707 tar.gz 1 camlp5-rel707; then
+ logn configure ./configure
+ # Somehow my virus scanner has the boot.new/SAVED directory locked after the move for a second => repeat until success
+ sed -i 's/mv boot.new boot/until mv boot.new boot; do sleep 1; done/' Makefile
+ # shellcheck disable=SC2086
+ log1 make world.opt $MAKE_OPT
+ log2 make install
+ log2 make clean
+ # For some reason META is not built / copied, but it is required
+ log2 make -C etc META
+ mkdir -p "$PREFIXOCAML/libocaml/site-lib/camlp5/"
+ cp etc/META "$PREFIXOCAML/libocaml/site-lib/camlp5/"
+ log2 make clean
+ build_post
+ fi
+}
+
##### LABLGTK Ocaml GTK binding #####
# Note: when rebuilding lablgtk by deleting the .finished file,
@@ -1805,8 +1830,9 @@ function make_addon_coquelicot {
installer_addon_dependency_beg coquelicot
make_addon_ssreflect
installer_addon_dependency_end
- if build_prep_overlay Coquelicot; then
+ if build_prep_overlay coquelicot; then
installer_addon_section coquelicot "Coquelicot" "Coq library for real analysis" ""
+ logn autogen ./autogen.sh
logn configure ./configure --libdir="$PREFIXCOQ/lib/coq/user-contrib/Coquelicot"
logn remake ./remake
logn remake-install ./remake install
@@ -1867,6 +1893,84 @@ function make_addon_quickchick {
fi
}
+# Flocq: Floating point library
+
+function make_addon_flocq {
+ if build_prep_overlay Flocq; then
+ installer_addon_section flocq "Flocq" "Coq library for floating point arithmetic" ""
+ logn autogen ./autogen.sh
+ logn configure ./configure
+ logn remake ./remake --jobs=$MAKE_THREADS
+ logn install ./remake install
+ build_post
+ fi
+}
+
+# Coq-Interval: interval arithmetic and inequality proofs
+
+function make_addon_interval {
+ installer_addon_dependency_beg interval
+ make_addon_mathcomp
+ make_addon_coquelicot
+ make_addon_bignums
+ make_addon_flocq
+ installer_addon_dependency_end
+ if build_prep_overlay interval; then
+ installer_addon_section interval "Interval" "Coq library and tactic for proving real inequalities" ""
+ logn autogen ./autogen.sh
+ logn configure ./configure
+ logn remake ./remake --jobs=$MAKE_THREADS
+ logn install ./remake install
+ build_post
+ fi
+}
+
+# Gappa: Automatic generation of arithmetic proofs (mostly on limited precision arithmetic)
+
+function install_boost {
+ # The extra tar parameter extracts only the boost headers, not the boost library source code (which is huge and takes a long time)
+ if build_prep https://dl.bintray.com/boostorg/release/1.69.0/source boost_1_69_0 tar.gz 1 boost_1_69_0 boost boost_1_69_0/boost; then
+ # Move extracted boost folder where mingw-gcc can find it
+ mv boost /usr/$TARGET_ARCH/sys-root/mingw/include
+ build_post
+ fi
+}
+
+function copy_gappa_dlls {
+ copy_coq_dll LIBGMP-10.DLL
+ copy_coq_dll LIBMPFR-6.DLL
+ copy_coq_dll LIBSTDC++-6.DLL
+}
+
+function make_addon_gappa_tool {
+ install_boost
+ if build_prep_overlay gappa_tool; then
+ installer_addon_section gappa_tool "Gappa tool" "Stand alone tool for automated generation of numerical arithmetic proofs" ""
+ logn autogen ./autogen.sh
+ logn configure ./configure --build="$HOST" --host="$HOST" --target="$TARGET" --prefix="$PREFIXCOQ"
+ logn remake ./remake --jobs=$MAKE_THREADS
+ logn install ./remake -d install
+ log1 copy_gappa_dlls
+ build_post
+ fi
+}
+
+function make_addon_gappa {
+ make_camlp5
+ installer_addon_dependency_beg gappa
+ make_addon_gappa_tool
+ make_addon_flocq
+ installer_addon_dependency_end
+ if build_prep_overlay gappa_plugin ; then
+ installer_addon_section gappa "Gappa plugin" "Coq plugin for the Gappa tool" ""
+ logn autogen ./autogen.sh
+ logn configure ./configure
+ logn remake ./remake
+ logn install ./remake install
+ build_post
+ fi
+}
+
# Main function for building addons
function make_addons {
diff --git a/dev/build/windows/patches_coq/VST.patch b/dev/build/windows/patches_coq/VST.patch
index 2c8c46373f..d047eb107f 100644
--- a/dev/build/windows/patches_coq/VST.patch
+++ b/dev/build/windows/patches_coq/VST.patch
@@ -1,8 +1,7 @@
diff --git a/Makefile b/Makefile
-index 4a119042..fdfac13e 100755
--- a/Makefile
+++ b/Makefile
-@@ -76,8 +76,8 @@ endif
+@@ -82,8 +82,8 @@ endif
COMPCERTDIRS=lib common $(ARCHDIRS) cfrontend flocq exportclight $(BACKEND)
@@ -10,6 +9,6 @@ index 4a119042..fdfac13e 100755
-EXTFLAGS= $(foreach d, $(COMPCERTDIRS), -Q $(COMPCERT)/$(d) compcert.$(d))
+COMPCERT_R_FLAGS= $(foreach d, $(COMPCERTDIRS), -R $(COMPCERT)/$(d) VST.compcert.$(d))
+EXTFLAGS= $(foreach d, $(COMPCERTDIRS), -Q $(COMPCERT)/$(d) VST.compcert.$(d))
-
- # for SSReflect
- ifdef MATHCOMP
+ # for ITrees
+ ifeq ($(wildcard InteractionTrees/the?ries),"InteractionTrees/theories")
+ EXTFLAGS:=$(EXTFLAGS) -Q InteractionTrees/theories ITree
diff --git a/dev/build/windows/patches_coq/quickchick.patch b/dev/build/windows/patches_coq/quickchick.patch
index 1afa6e7f95..4b7b86ff05 100644
--- a/dev/build/windows/patches_coq/quickchick.patch
+++ b/dev/build/windows/patches_coq/quickchick.patch
@@ -1,12 +1,12 @@
-diff/patch file created on Mon, Aug 27, 2018 9:21:52 AM with:
-difftar-folder.sh tarballs/quickchick-v1.0.2.tar.gz quickchick-v1.0.2 1
-TARFILE= tarballs/quickchick-v1.0.2.tar.gz
-FOLDER= quickchick-v1.0.2
+diff/patch file created on Wed, Jul 17, 2019 8:06:45 PM with:
+difftar-folder.sh tarballs/quickchick-741fb98eb865129a70c4ef7a64db2739c4a5eab0.tar.gz quickchick-741fb98eb865129a70c4ef7a64db2739c4a5eab0 1
+TARFILE= tarballs/quickchick-741fb98eb865129a70c4ef7a64db2739c4a5eab0.tar.gz
+FOLDER= quickchick-741fb98eb865129a70c4ef7a64db2739c4a5eab0
TARSTRIP= 1
-TARPREFIX= QuickChick-1.0.2/
-ORIGFOLDER= quickchick-v1.0.2.orig
---- quickchick-v1.0.2.orig/Makefile 2018-08-22 18:21:39.000000000 +0200
-+++ quickchick-v1.0.2/Makefile 2018-08-27 09:21:04.710461100 +0200
+TARPREFIX= QuickChick-741fb98eb865129a70c4ef7a64db2739c4a5eab0/
+ORIGFOLDER= quickchick-741fb98eb865129a70c4ef7a64db2739c4a5eab0.orig
+--- quickchick-741fb98eb865129a70c4ef7a64db2739c4a5eab0.orig/Makefile 2019-06-26 12:09:01.000000000 +0200
++++ quickchick-741fb98eb865129a70c4ef7a64db2739c4a5eab0/Makefile 2019-07-17 20:05:44.322251200 +0200
@@ -2,7 +2,7 @@
.PHONY: plugin install install-plugin clean quickChickTool
@@ -16,11 +16,32 @@ ORIGFOLDER= quickchick-v1.0.2.orig
QCTOOL_SRC=$(QCTOOL_DIR)/quickChickTool.ml \
$(QCTOOL_DIR)/quickChickToolTypes.ml \
$(QCTOOL_DIR)/quickChickToolLexer.mll \
+@@ -20,8 +20,8 @@
+
+ all: quickChickTool plugin documentation-check
+
+-plugin: Makefile.coq
+- $(MAKE) -f Makefile.coq
++plugin: Makefile.coq
++ $(MAKE) -f Makefile.coq
+
+ documentation-check: plugin
+ coqc -R src QuickChick -I src QuickChickInterface.v
@@ -32,7 +32,7 @@
install: all
$(V)$(MAKE) -f Makefile.coq install > $(TEMPFILE)
# Manually copying the remaining files
-- $(V)cp $(QCTOOL_EXE) $(shell opam config var bin)/quickChick
-+ $(V)cp $(QCTOOL_EXE) "$(COQBIN)/quickChick"
+- $(V)cp $(QCTOOL_DIR)/$(QCTOOL_EXE) $(shell opam config var bin)/quickChick
++ $(V)cp $(QCTOOL_DIR)/$(QCTOOL_EXE) "$(COQBIN)/quickChick"
# $(V)cp src/quickChickLib.cmx $(COQLIB)/user-contrib/QuickChick
# $(V)cp src/quickChickLib.o $(COQLIB)/user-contrib/QuickChick
+
+@@ -56,7 +56,7 @@
+ $(MAKE) -C examples/RedBlack test
+ # cd examples/stlc; make clean && make
+ $(MAKE) -C examples/multifile-mutation test
+-# This takes too long.
++# This takes too long.
+ # $(MAKE) -C examples/c-mutation test
+ # coqc examples/BSTTest.v
+ coqc examples/DependentTest.v
diff --git a/dev/ci/ci-basic-overlay.sh b/dev/ci/ci-basic-overlay.sh
index fa39b41565..ad22c394d8 100755
--- a/dev/ci/ci-basic-overlay.sh
+++ b/dev/ci/ci-basic-overlay.sh
@@ -102,6 +102,27 @@
: "${coquelicot_CI_ARCHIVEURL:=${coquelicot_CI_GITURL}/-/archive}"
########################################################################
+# Coq-interval
+########################################################################
+: "${interval_CI_REF:=master}"
+: "${interval_CI_GITURL:=https://gitlab.inria.fr/coqinterval/interval}"
+: "${interval_CI_ARCHIVEURL:=${interval_CI_GITURL}/-/archive}"
+
+########################################################################
+# Gappa stand alone tool
+########################################################################
+: "${gappa_tool_CI_REF:=master}"
+: "${gappa_tool_CI_GITURL:=https://gitlab.inria.fr/gappa/gappa}"
+: "${gappa_tool_CI_ARCHIVEURL:=${gappa_tool_CI_GITURL}/-/archive}"
+
+########################################################################
+# Gappa plugin
+########################################################################
+: "${gappa_plugin_CI_REF:=master}"
+: "${gappa_plugin_CI_GITURL:=https://gitlab.inria.fr/gappa/coq}"
+: "${gappa_plugin_CI_ARCHIVEURL:=${gappa_plugin_CI_GITURL}/-/archive}"
+
+########################################################################
# CompCert
########################################################################
: "${compcert_CI_REF:=master}"
@@ -160,9 +181,9 @@
########################################################################
# SF
########################################################################
-: "${sf_lf_CI_TARURL:=https://www.cis.upenn.edu/~bcpierce/sf/lf-current/lf.tgz}"
-: "${sf_plf_CI_TARURL:=https://www.cis.upenn.edu/~bcpierce/sf/plf-current/plf.tgz}"
-: "${sf_vfa_CI_TARURL:=https://www.cis.upenn.edu/~bcpierce/sf/vfa-current/vfa.tgz}"
+: "${sf_lf_CI_TARURL:=https://softwarefoundations.cis.upenn.edu/lf-current/lf.tgz}"
+: "${sf_plf_CI_TARURL:=https://softwarefoundations.cis.upenn.edu/plf-current/plf.tgz}"
+: "${sf_vfa_CI_TARURL:=https://softwarefoundations.cis.upenn.edu/vfa-current/vfa.tgz}"
########################################################################
# TLC
diff --git a/dev/ci/gitlab.bat b/dev/ci/gitlab.bat
index 6c4ccfc14d..3998fc6514 100755
--- a/dev/ci/gitlab.bat
+++ b/dev/ci/gitlab.bat
@@ -48,9 +48,13 @@ IF "%WINDOWS%" == "enabled_all_addons" (
-addon=compcert ^
-addon=extlib ^
-addon=quickchick ^
+ -addon=coquelicot ^
-addon=vst ^
- -addon=aactactics
-REM -addon=coquelicot ^
+ -addon=aactactics ^
+ -addon=flocq ^
+ -addon=interval ^
+ -addon=gappa_tool ^
+ -addon=gappa
) ELSE (
SET "EXTRA_ADDONS= "
)
diff --git a/dev/ci/user-overlays/10441-ppedrot-static-poly-section.sh b/dev/ci/user-overlays/10441-ppedrot-static-poly-section.sh
new file mode 100644
index 0000000000..00f544f894
--- /dev/null
+++ b/dev/ci/user-overlays/10441-ppedrot-static-poly-section.sh
@@ -0,0 +1,6 @@
+if [ "$CI_PULL_REQUEST" = "10441" ] || [ "$CI_BRANCH" = "static-poly-section" ]; then
+
+ ext_lib_CI_REF=static-poly-section
+ ext_lib_CI_GITURL=https://github.com/ppedrot/coq-ext-lib
+
+fi
diff --git a/dev/doc/xml-protocol.md b/dev/doc/xml-protocol.md
index e23d1234f7..a3e1a4e90b 100644
--- a/dev/doc/xml-protocol.md
+++ b/dev/doc/xml-protocol.md
@@ -174,7 +174,7 @@ Moves current tip to `${stateId}`, such that commands may be added to the new st
</union>
</value>
```
-* Failure: If `stateId` is in an error-state and cannot be jumped to, `errorFreeStateId` is the parent state of ``stateId` that shopuld be edited instead.
+* Failure: If `stateId` is in an error-state and cannot be jumped to, `errorFreeStateId` is the parent state of `stateId` that should be edited instead.
```html
<value val="fail" loc_s="${startOffsetOfError}" loc_e="${endOffsetOfError}">
<state_id val="${errorFreeStateId}"/>
diff --git a/dev/shim/dune b/dev/shim/dune
index e307848292..84b2e642e8 100644
--- a/dev/shim/dune
+++ b/dev/shim/dune
@@ -4,7 +4,7 @@
%{bin:coqtop}
%{project_root}/theories/Init/Prelude.vo)
(action
- (with-outputs-to coqtop-prelude
+ (with-stdout-to coqtop-prelude
(progn
(echo "#!/usr/bin/env bash\n")
(bash "echo \"$(pwd)/%{bin:coqtop} -coqlib $(pwd)/%{project_root}\" \\$@")
@@ -16,7 +16,7 @@
%{bin:coqc}
%{project_root}/theories/Init/Prelude.vo)
(action
- (with-outputs-to coqc-prelude
+ (with-stdout-to coqc-prelude
(progn
(echo "#!/usr/bin/env bash\n")
(bash "echo \"$(pwd)/%{bin:coqc} -coqlib $(pwd)/%{project_root}\" \\$@")
@@ -29,7 +29,7 @@
%{lib:coq.kernel:../../stublibs/dllbyterun_stubs.so}
%{project_root}/theories/Init/Prelude.vo)
(action
- (with-outputs-to %{targets}
+ (with-stdout-to %{targets}
(progn
(echo "#!/usr/bin/env bash\n")
(bash "echo \"$(pwd)/%{bin:coqtop.byte} -coqlib $(pwd)/%{project_root}\" \\$@")
@@ -45,7 +45,7 @@
%{project_root}/coqide-server.install
%{project_root}/coqide.install)
(action
- (with-outputs-to coqide-prelude
+ (with-stdout-to coqide-prelude
(progn
(echo "#!/usr/bin/env bash\n")
(bash "echo \"$(pwd)/%{bin:coqide} -coqlib $(pwd)/%{project_root}\" \\$@")
diff --git a/doc/changelog/02-specification-language/10441-static-poly-section.rst b/doc/changelog/02-specification-language/10441-static-poly-section.rst
new file mode 100644
index 0000000000..7f0345d946
--- /dev/null
+++ b/doc/changelog/02-specification-language/10441-static-poly-section.rst
@@ -0,0 +1,11 @@
+- The universe polymorphism setting now applies from the opening of a section.
+ In particular, it is not possible anymore to mix polymorphic and monomorphic
+ definitions in a section when there are no variables nor universe constraints
+ defined in this section. This makes the behaviour consistent with the
+ documentation. (`#10441 <https://github.com/coq/coq/pull/10441>`_,
+ by Pierre-Marie Pédrot)
+
+- The :cmd:`Section` vernacular command now accepts the "universes" attribute. In
+ addition to setting the section universe polymorphism, it also locally sets
+ the universe polymorphic option inside the section.
+ (`#10441 <https://github.com/coq/coq/pull/10441>`_, by Pierre-Marie Pédrot)
diff --git a/doc/changelog/08-tools/10430-extraction-int63.rst b/doc/changelog/08-tools/10430-extraction-int63.rst
new file mode 100644
index 0000000000..68ae4591a4
--- /dev/null
+++ b/doc/changelog/08-tools/10430-extraction-int63.rst
@@ -0,0 +1,5 @@
+- Fix extraction to OCaml of primitive machine integers;
+ see :ref:`primitive-integers`
+ (`#10430 <https://github.com/coq/coq/pull/10430>`_,
+ fixes `#10361 <https://github.com/coq/coq/issues/10361>`_,
+ by Vincent Laporte).
diff --git a/doc/changelog/10-standard-library/09811-remove-zlogarithm.rst b/doc/changelog/10-standard-library/09811-remove-zlogarithm.rst
new file mode 100644
index 0000000000..ab625b9e03
--- /dev/null
+++ b/doc/changelog/10-standard-library/09811-remove-zlogarithm.rst
@@ -0,0 +1,4 @@
+- Removes deprecated modules `Coq.ZArith.Zlogarithm`
+ and `Coq.ZArith.Zsqrt_compat`
+ (#9881 <https://github.com/coq/coq/pull/9811>
+ by Vincent Laporte).
diff --git a/doc/sphinx/addendum/ring.rst b/doc/sphinx/addendum/ring.rst
index 3f4d5cc784..1098aa75da 100644
--- a/doc/sphinx/addendum/ring.rst
+++ b/doc/sphinx/addendum/ring.rst
@@ -76,7 +76,7 @@ an arbitrary order:
===== =============== =========================
Then normalize the “abstract” polynomial
-:math:`((V_1 \otimes V_2 ) \oplus V_2) \oplus (V_0 \otimes 2)`
+:math:`((V_1 \oplus V_2 ) \otimes V_2) \oplus (V_0 \otimes 2)`
In our example the normal form is:
:math:`(2 \otimes V_0 ) \oplus (V_1 \otimes V_2) \oplus (V_2 \otimes V_2 )`.
Then substitute the variables by their values in the variables map to
diff --git a/doc/sphinx/addendum/universe-polymorphism.rst b/doc/sphinx/addendum/universe-polymorphism.rst
index 395b5ce2d3..7e698bfb66 100644
--- a/doc/sphinx/addendum/universe-polymorphism.rst
+++ b/doc/sphinx/addendum/universe-polymorphism.rst
@@ -144,6 +144,8 @@ Many other commands support the ``Polymorphic`` flag, including:
- ``Lemma``, ``Axiom``, and all the other “definition” keywords support
polymorphism.
+- :cmd:`Section` will locally set the polymorphism flag inside the section.
+
- ``Variables``, ``Context``, ``Universe`` and ``Constraint`` in a section support
polymorphism. This means that the universe variables (and associated
constraints) are discharged polymorphically over definitions that use
diff --git a/doc/sphinx/changes.rst b/doc/sphinx/changes.rst
index fd84868a1f..6ac55e7bf4 100644
--- a/doc/sphinx/changes.rst
+++ b/doc/sphinx/changes.rst
@@ -102,10 +102,10 @@ reference manual. Here are the most important user-visible changes:
extensionality lemma:
- interactive mode: :n:`under @term`, associated terminator: :tacn:`over`
- - one-liner mode: `under @term do [@tactic | ...]`
+ - one-liner mode: :n:`under @term do [@tactic | ...]`
It can take occurrence switches, contextual patterns, and intro patterns:
- :g:`under {2}[in RHS]eq_big => [i|i ?] do ...`
+ :g:`under {2}[in RHS]eq_big => [i|i ?]`
(`#9651 <https://github.com/coq/coq/pull/9651>`_,
by Erik Martin-Dorel and Enrico Tassi).
diff --git a/doc/sphinx/language/coq-library.rst b/doc/sphinx/language/coq-library.rst
index d1b95e6203..ac75240cfb 100644
--- a/doc/sphinx/language/coq-library.rst
+++ b/doc/sphinx/language/coq-library.rst
@@ -7,22 +7,20 @@ The |Coq| library
single: Theories
-The |Coq| library is structured into two parts:
+The |Coq| library has two parts:
- * **The initial library**: it contains elementary logical notions and
- data-types. It constitutes the basic state of the system directly
- available when running |Coq|;
+ * **The basic library**: definitions and theorems for
+ the most commonly used elementary logical notions and
+ data types. |Coq| normally loads these files automatically when it starts.
- * **The standard library**: general-purpose libraries containing various
- developments of |Coq| axiomatizations about sets, lists, sorting,
- arithmetic, etc. This library comes with the system and its modules
- are directly accessible through the ``Require`` command (see
- Section :ref:`compiled-files`);
+ * **The standard library**: general-purpose libraries with
+ definitions and theorems for sets, lists, sorting,
+ arithmetic, etc. To use these files, users must load them explicitly
+ with the ``Require`` command (see :ref:`compiled-files`)
-In addition, user-provided libraries or developments are provided by
-|Coq| users' community. These libraries and developments are available
-for download at http://coq.inria.fr (see
-Section :ref:`userscontributions`).
+There are also many libraries provided by |Coq| users' community.
+These libraries and developments are available
+for download at http://coq.inria.fr (see :ref:`userscontributions`).
This chapter briefly reviews the |Coq| libraries whose contents can
also be browsed at http://coq.inria.fr/stdlib.
@@ -514,8 +512,8 @@ realizability interpretation.
forall (A B:Prop) (P:Type), (A -> B -> P) -> A /\ B -> P.
-Basic Arithmetics
-~~~~~~~~~~~~~~~~~
+Basic Arithmetic
+~~~~~~~~~~~~~~~~
The basic library includes a few elementary properties of natural
numbers, together with the definitions of predecessor, addition and
@@ -804,8 +802,8 @@ Notation Interpretation
=============== ===================
-Notations for integer arithmetics
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Notations for integer arithmetic
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. index::
single: Arithmetical notations
@@ -822,7 +820,7 @@ Notations for integer arithmetics
The following table describes the syntax of expressions
-for integer arithmetics. It is provided by requiring and opening the module ``ZArith`` and opening scope ``Z_scope``.
+for integer arithmetic. It is provided by requiring and opening the module ``ZArith`` and opening scope ``Z_scope``.
It specifies how notations are interpreted and, when not
already reserved, the precedence and associativity.
@@ -866,7 +864,7 @@ Notations for real numbers
This is provided by requiring and opening the module ``Reals`` and
opening scope ``R_scope``. This set of notations is very similar to
-the notation for integer arithmetics. The inverse function was added.
+the notation for integer arithmetic. The inverse function was added.
=============== ===================
Notation Interpretation
diff --git a/doc/sphinx/language/gallina-extensions.rst b/doc/sphinx/language/gallina-extensions.rst
index c93984661e..acf68e9fd2 100644
--- a/doc/sphinx/language/gallina-extensions.rst
+++ b/doc/sphinx/language/gallina-extensions.rst
@@ -2443,12 +2443,19 @@ The reduction machines (:tacn:`vm_compute`, :tacn:`native_compute`) implement
dedicated, efficient, rules to reduce the applications of these primitive
operations.
-These primitives, when extracted to OCaml (see :ref:`extraction`), are mapped to
-types and functions of a :g:`Uint63` module. Said module is not produced by
+The extraction of these primitives can be customized similarly to the extraction
+of regular axioms (see :ref:`extraction`). Nonetheless, the :g:`ExtrOCamlInt63`
+module can be used when extracting to OCaml: it maps the Coq primitives to types
+and functions of a :g:`Uint63` module. Said OCaml module is not produced by
extraction. Instead, it has to be provided by the user (if they want to compile
or execute the extracted code). For instance, an implementation of this module
can be taken from the kernel of Coq.
+Literal values (at type :g:`Int63.int`) are extracted to literal OCaml values
+wrapped into the :g:`Uint63.of_int` (resp. :g:`Uint63.of_int64`) constructor on
+64-bit (resp. 32-bit) platforms. Currently, this cannot be customized (see the
+function :g:`Uint63.compile` from the kernel).
+
Bidirectionality hints
----------------------
diff --git a/doc/sphinx/language/gallina-specification-language.rst b/doc/sphinx/language/gallina-specification-language.rst
index 38f6714f46..91dfa34494 100644
--- a/doc/sphinx/language/gallina-specification-language.rst
+++ b/doc/sphinx/language/gallina-specification-language.rst
@@ -44,78 +44,91 @@ Lexical conventions
===================
Blanks
- Space, newline and horizontal tabulation are considered as blanks.
+ Space, newline and horizontal tab are considered blanks.
Blanks are ignored but they separate tokens.
Comments
- Comments in Coq are enclosed between ``(*`` and ``*)``, and can be nested.
- They can contain any character. However, :token:`string` literals must be
+ Comments are enclosed between ``(*`` and ``*)``. They can be nested.
+ They can contain any character. However, embedded :token:`string` literals must be
correctly closed. Comments are treated as blanks.
-Identifiers and access identifiers
+Identifiers and field identifiers
Identifiers, written :token:`ident`, are sequences of letters, digits, ``_`` and
- ``'``, that do not start with a digit or ``'``. That is, they are
- recognized by the following lexical class:
+ ``'``, that do not start with a digit or ``'``. That is, they are
+ recognized by the following grammar (except that the string ``_`` is reserved;
+ it is not a valid identifier):
.. productionlist:: coq
- first_letter : a..z ∣ A..Z ∣ _ ∣ unicode-letter
- subsequent_letter : a..z ∣ A..Z ∣ 0..9 ∣ _ ∣ ' ∣ unicode-letter ∣ unicode-id-part
ident : `first_letter`[`subsequent_letter`…`subsequent_letter`]
- access_ident : .`ident`
+ field : .`ident`
+ first_letter : a..z ∣ A..Z ∣ _ ∣ `unicode_letter`
+ subsequent_letter : `first_letter` ∣ 0..9 ∣ ' ∣ `unicode_id_part`
All characters are meaningful. In particular, identifiers are case-sensitive.
- The entry ``unicode-letter`` non-exhaustively includes Latin,
+ :production:`unicode_letter` non-exhaustively includes Latin,
Greek, Gothic, Cyrillic, Arabic, Hebrew, Georgian, Hangul, Hiragana
and Katakana characters, CJK ideographs, mathematical letter-like
- symbols, hyphens, non-breaking space, … The entry ``unicode-id-part``
+ symbols and non-breaking space. :production:`unicode_id_part`
non-exhaustively includes symbols for prime letters and subscripts.
- Access identifiers, written :token:`access_ident`, are identifiers prefixed by
- `.` (dot) without blank. They are used in the syntax of qualified
- identifiers.
+ Field identifiers, written :token:`field`, are identifiers prefixed by
+ `.` (dot) with no blank between the dot and the identifier. They are used in
+ the syntax of qualified identifiers.
Numerals
- Numerals are sequences of digits with a potential fractional part
- and exponent. Integers are numerals without fractional nor exponent
- part and optionally preceded by a minus sign. Underscores ``_`` can
- be used as comments in numerals.
+ Numerals are sequences of digits with an optional fractional part
+ and exponent, optionally preceded by a minus sign. :token:`int` is an integer;
+ a numeral without fractional or exponent parts. :token:`num` is a non-negative
+ integer. Underscores embedded in the digits are ignored, for example
+ ``1_000_000`` is the same as ``1000000``.
.. productionlist:: coq
- digit : 0..9
+ numeral : `num`[. `num`][`exp`[`sign`]`num`]
+ int : [-]`num`
num : `digit`…`digit`
- integer : [-]`num`
- dot : .
+ digit : 0..9
exp : e | E
sign : + | -
- numeral : `num`[`dot` `num`][`exp`[`sign`]`num`]
Strings
- Strings are delimited by ``"`` (double quote), and enclose a sequence of
- any characters different from ``"`` or the sequence ``""`` to denote the
- double quote character. In grammars, the entry for quoted strings is
- :production:`string`.
+ Strings begin and end with ``"`` (double quote). Use ``""`` to represent
+ a double quote character within a string. In the grammar, strings are
+ identified with :production:`string`.
Keywords
- The following identifiers are reserved keywords, and cannot be
- employed otherwise::
-
- _ as at cofix else end exists exists2 fix for
- forall fun if IF in let match mod return
- SProp Prop Set Type then using where with
-
-Special tokens
- The following sequences of characters are special tokens::
-
- ! % & && ( () ) * + ++ , - -> . .( ..
- / /\ : :: :< := :> ; < <- <-> <: <= <> =
- => =_D > >-> >= ? ?= @ [ \/ ] ^ { | |-
- || } ~ #[
-
- Lexical ambiguities are resolved according to the “longest match”
- rule: when a sequence of non alphanumerical characters can be
- decomposed into several different ways, then the first token is the
- longest possible one (among all tokens defined at this moment), and so
- on.
+ The following character sequences are reserved keywords that cannot be
+ used as identifiers::
+
+ _ Axiom CoFixpoint Definition Fixpoint Hypothesis IF Parameter Prop
+ SProp Set Theorem Type Variable as at by cofix discriminated else
+ end exists exists2 fix for forall fun if in lazymatch let match
+ multimatch return then using where with
+
+ Note that plugins may define additional keywords when they are loaded.
+
+Other tokens
+ The set of
+ tokens defined at any given time can vary because the :cmd:`Notation`
+ command can define new tokens. A :cmd:`Require` command may load more notation definitions,
+ while the end of a :cmd:`Section` may remove notations. Some notations
+ are defined in the basic library (see :ref:`thecoqlibrary`) and are normallly
+ loaded automatically at startup time.
+
+ Here are the character sequences that Coq directly defines as tokens
+ without using :cmd:`Notation` (omitting 25 specialized tokens that begin with
+ ``#int63_``)::
+
+ ! #[ % & ' ( () (bfs) (dfs) ) * ** + , - ->
+ . .( .. ... / : ::= := :> :>> ; < <+ <- <:
+ <<: <= = => > >-> >= ? @ @{ [ [= ] _ _eqn
+ `( `{ { {| | |- || }
+
+ When multiple tokens match the beginning of a sequence of characters,
+ the longest matching token is used.
+ Occasionally you may need to insert spaces to separate tokens. For example,
+ if ``~`` and ``~~`` are both defined as tokens, the inputs ``~ ~`` and
+ ``~~`` generate different tokens, whereas if `~~` is not defined, then the
+ two inputs are equivalent.
.. _term:
@@ -164,7 +177,7 @@ is described in Chapter :ref:`syntaxextensionsandinterpretationscopes`.
: ( `name` [: `term`] := `term` )
: ' `pattern`
name : `ident` | _
- qualid : `ident` | `qualid` `access_ident`
+ qualid : `ident` | `qualid` `field`
sort : SProp | Prop | Set | Type
fix_bodies : `fix_body`
: `fix_body` with `fix_body` with … with `fix_body` for `ident`
diff --git a/doc/sphinx/practical-tools/coqide.rst b/doc/sphinx/practical-tools/coqide.rst
index efb5df720a..7d6171285e 100644
--- a/doc/sphinx/practical-tools/coqide.rst
+++ b/doc/sphinx/practical-tools/coqide.rst
@@ -88,8 +88,6 @@ There are other buttons on the |CoqIDE| toolbar: a button to save the running
buffer; a button to close the current buffer (an "X"); buttons to switch among
buffers (left and right arrows); an "information" button; and a "gears" button.
-The "information" button is described in Section :ref:`try-tactics-automatically`.
-
The "gears" button submits proof terms to the |Coq| kernel for type checking.
When |Coq| uses asynchronous processing (see Chapter :ref:`asynchronousandparallelproofprocessing`),
proofs may have been completed without kernel-checking of generated proof terms.
@@ -100,27 +98,6 @@ processed color, though their preceding proofs have the processed color.
Notice that for all these buttons, except for the "gears" button, their operations
are also available in the menu, where their keyboard shortcuts are given.
-.. _try-tactics-automatically:
-
-Trying tactics automatically
-------------------------------
-
-The menu Try Tactics provides some features for automatically trying
-to solve the current goal using simple tactics. If such a tactic
-succeeds in solving the goal, then its text is automatically inserted
-into the script. There is finally a combination of these tactics,
-called the *proof wizard* which will try each of them in turn. This
-wizard is also available as a tool button (the "information" button). The set of
-tactics tried by the wizard is customizable in the preferences.
-
-These tactics are general ones, in particular they do not refer to
-particular hypotheses. You may also try specific tactics related to
-the goal or one of the hypotheses, by clicking with the right mouse
-button on the goal or the considered hypothesis. This is the
-“contextual menu on goals” feature, that may be disabled in the
-preferences if undesirable.
-
-
Proof folding
------------------
@@ -202,13 +179,6 @@ compilation, printing, web browsing. In the browser command, you may
use `%s` to denote the URL to open, for example:
`firefox -remote "OpenURL(%s)"`.
-The `Tactics Wizard` section allows defining the set of tactics that
-should be tried, in sequence, to solve the current goal.
-
-The last section is for miscellaneous boolean settings, such as the
-“contextual menu on goals” feature presented in the section
-:ref:`Try tactics automatically <try-tactics-automatically>`.
-
Notice that these settings are saved in the file `.coqiderc` of your
home directory.
diff --git a/doc/sphinx/proof-engine/ltac.rst b/doc/sphinx/proof-engine/ltac.rst
index 46f9826e41..362c3da6cb 100644
--- a/doc/sphinx/proof-engine/ltac.rst
+++ b/doc/sphinx/proof-engine/ltac.rst
@@ -31,10 +31,10 @@ Syntax
The syntax of the tactic language is given below. See Chapter
:ref:`gallinaspecificationlanguage` for a description of the BNF metasyntax used
in these grammar rules. Various already defined entries will be used in this
-chapter: entries :token:`natural`, :token:`integer`, :token:`ident`,
+chapter: entries :token:`num`, :token:`int`, :token:`ident`
:token:`qualid`, :token:`term`, :token:`cpattern` and :token:`tactic`
-represent respectively the natural and integer numbers, the authorized
-identificators and qualified names, Coq terms and patterns and all the atomic
+represent respectively natural and integer numbers,
+identifiers, qualified names, Coq terms, patterns and the atomic
tactics described in Chapter :ref:`tactics`.
The syntax of :production:`cpattern` is
@@ -141,10 +141,10 @@ mode but it can also be used in toplevel definitions as shown below.
: `atom`
atom : `qualid`
: ()
- : `integer`
+ : `int`
: ( `ltac_expr` )
component : `string` | `qualid`
- message_token : `string` | `ident` | `integer`
+ message_token : `string` | `ident` | `int`
tacarg : `qualid`
: ()
: ltac : `atom`
@@ -159,11 +159,11 @@ mode but it can also be used in toplevel definitions as shown below.
match_rule : `cpattern` => `ltac_expr`
: context [`ident`] [ `cpattern` ] => `ltac_expr`
: _ => `ltac_expr`
- test : `integer` = `integer`
- : `integer` (< | <= | > | >=) `integer`
+ test : `int` = `int`
+ : `int` (< | <= | > | >=) `int`
selector : [`ident`]
- : `integer`
- : (`integer` | `integer` - `integer`), ..., (`integer` | `integer` - `integer`)
+ : `int`
+ : (`int` | `int` - `int`), ..., (`int` | `int` - `int`)
toplevel_selector : `selector`
: all
: par
diff --git a/doc/sphinx/proof-engine/ltac2.rst b/doc/sphinx/proof-engine/ltac2.rst
index 3036648b08..ceaa2775bf 100644
--- a/doc/sphinx/proof-engine/ltac2.rst
+++ b/doc/sphinx/proof-engine/ltac2.rst
@@ -179,7 +179,7 @@ constructions from ML.
: let `ltac2_var` := `ltac2_term` in `ltac2_term`
: let rec `ltac2_var` := `ltac2_term` in `ltac2_term`
: match `ltac2_term` with `ltac2_branch` ... `ltac2_branch` end
- : `integer`
+ : `int`
: `string`
: `ltac2_term` ; `ltac2_term`
: [| `ltac2_term` ; ... ; `ltac2_term` |]
@@ -670,7 +670,7 @@ A scope is a name given to a grammar entry used to produce some Ltac2 expression
at parsing time. Scopes are described using a form of S-expression.
.. prodn::
- ltac2_scope ::= {| @string | @integer | @lident ({+, @ltac2_scope}) }
+ ltac2_scope ::= {| @string | @int | @lident ({+, @ltac2_scope}) }
A few scopes contain antiquotation features. For sake of uniformity, all
antiquotations are introduced by the syntax :n:`$@lident`.
@@ -719,7 +719,7 @@ The following scopes are built-in.
+ parses a Ltac2 expression at the next level and return it as is.
-- :n:`tactic(n = @integer)`:
+- :n:`tactic(n = @int)`:
+ parses a Ltac2 expression at the provided level :n:`n` and return it as is.
@@ -760,7 +760,7 @@ Notations
The Ltac2 parser can be extended by syntactic notations.
-.. cmd:: Ltac2 Notation {+ {| @lident (@ltac2_scope) | @string } } {? : @integer} := @ltac2_term
+.. cmd:: Ltac2 Notation {+ {| @lident (@ltac2_scope) | @string } } {? : @int} := @ltac2_term
:name: Ltac2 Notation
A Ltac2 notation adds a parsing rule to the Ltac2 grammar, which is expanded
diff --git a/doc/sphinx/proof-engine/ssreflect-proof-language.rst b/doc/sphinx/proof-engine/ssreflect-proof-language.rst
index 1b9e3ce0f3..ed980bd4de 100644
--- a/doc/sphinx/proof-engine/ssreflect-proof-language.rst
+++ b/doc/sphinx/proof-engine/ssreflect-proof-language.rst
@@ -3761,10 +3761,10 @@ involves the following steps:
5. If so :tacn:`under` protects these n goals against an
accidental instantiation of the evar.
- These protected goals are displayed using the ``Under[ … ]``
- notation (e.g. ``Under[ m - m ]`` in the running example).
+ These protected goals are displayed using the ``'Under[ … ]``
+ notation (e.g. ``'Under[ m - m ]`` in the running example).
-6. The expression inside the ``Under[ … ]`` notation can be
+6. The expression inside the ``'Under[ … ]`` notation can be
proved equivalent to the desired expression
by using a regular :tacn:`rewrite` tactic.
@@ -3782,7 +3782,7 @@ The over tactic
Two equivalent facilities (a terminator and a lemma) are provided to
close intermediate subgoals generated by :tacn:`under` (i.e. goals
-displayed as ``Under[ … ]``):
+displayed as ``'Under[ … ]``):
.. tacn:: over
:name: over
@@ -3792,7 +3792,7 @@ displayed as ``Under[ … ]``):
.. tacv:: by rewrite over
- This is a variant of :tacn:`over` in order to close ``Under[ … ]``
+ This is a variant of :tacn:`over` in order to close ``'Under[ … ]``
goals, relying on the ``over`` rewrite rule.
.. _under_one_liner:
diff --git a/doc/sphinx/proof-engine/vernacular-commands.rst b/doc/sphinx/proof-engine/vernacular-commands.rst
index 5f3e82938d..774732825a 100644
--- a/doc/sphinx/proof-engine/vernacular-commands.rst
+++ b/doc/sphinx/proof-engine/vernacular-commands.rst
@@ -870,26 +870,6 @@ interactively, they cannot be part of a vernacular file loaded via
have to undo some extra commands and end on a state :n:`@num′ ≤ @num` if
necessary.
- .. cmdv:: Backtrack @num @num @num
- :name: Backtrack
-
- .. deprecated:: 8.4
-
- :cmd:`Backtrack` is a *deprecated* form of
- :cmd:`BackTo` which allows explicitly manipulating the proof environment. The
- three numbers represent the following:
-
- + *first number* : State label to reach, as for :cmd:`BackTo`.
- + *second number* : *Proof state number* to unbury once aborts have been done.
- |Coq| will compute the number of :cmd:`Undo` to perform (see Chapter :ref:`proofhandling`).
- + *third number* : Number of :cmd:`Abort` to perform, i.e. the number of currently
- opened nested proofs that must be canceled (see Chapter :ref:`proofhandling`).
-
- .. exn:: Invalid backtrack.
-
- The destination state label is unknown.
-
-
.. _quitting-and-debugging:
Quitting and debugging
diff --git a/doc/stdlib/hidden-files b/doc/stdlib/hidden-files
index b25104ddb9..46175e37ed 100644
--- a/doc/stdlib/hidden-files
+++ b/doc/stdlib/hidden-files
@@ -12,6 +12,7 @@ plugins/extraction/ExtrHaskellZInteger.v
plugins/extraction/ExtrHaskellZNum.v
plugins/extraction/ExtrOcamlBasic.v
plugins/extraction/ExtrOcamlBigIntConv.v
+plugins/extraction/ExtrOCamlInt63.v
plugins/extraction/ExtrOcamlIntConv.v
plugins/extraction/ExtrOcamlNatBigInt.v
plugins/extraction/ExtrOcamlNatInt.v
diff --git a/doc/stdlib/index-list.html.template b/doc/stdlib/index-list.html.template
index 61d601be7d..dcfe4a08f3 100644
--- a/doc/stdlib/index-list.html.template
+++ b/doc/stdlib/index-list.html.template
@@ -181,14 +181,12 @@ through the <tt>Require Import</tt> command.</p>
theories/ZArith/Zhints.v
(theories/ZArith/ZArith_base.v)
theories/ZArith/Zcomplements.v
- theories/ZArith/Zsqrt_compat.v
theories/ZArith/Zpow_def.v
theories/ZArith/Zpow_alt.v
theories/ZArith/Zpower.v
theories/ZArith/Zdiv.v
theories/ZArith/Zquot.v
theories/ZArith/Zeuclid.v
- theories/ZArith/Zlogarithm.v
(theories/ZArith/ZArith.v)
theories/ZArith/Zgcd_alt.v
theories/ZArith/Zwf.v
diff --git a/doc/tools/docgram/README.md b/doc/tools/docgram/README.md
new file mode 100644
index 0000000000..98fdc38ca7
--- /dev/null
+++ b/doc/tools/docgram/README.md
@@ -0,0 +1,208 @@
+# Grammar extraction tool for documentation
+
+`doc_grammar` extracts Coq's grammar from `.mlg` files, edits it and inserts it in
+chunks into `.rst` files. The tool currently inserts Sphinx
+`productionlist` constructs. It also generates a file with `prodn` constructs
+for the entire grammar, but updates to `tacn` and `cmd` constructs must be done
+manually since the grammar doesn't have names for them as it does for
+nonterminals. There is an option to report which `tacn` and `cmd` were not
+found in the `.rst` files. `tacv` and `cmdv` constructs are not processed at all.
+
+The mlg grammars present several challenges to generating an accurate grammar
+for documentation purposes:
+
+* The 30+ mlg files don't define an overall order in which nonterminals should
+ appear in a complete grammar.
+
+* Even within a single mlg file, nonterminals and productions are often given
+ in an order that's much different from what a reader of the documentation would
+ expect. In a small number of cases, changing the order in the mlg would change
+ how some inputs are parsed, in particular when the order determines how to distinguish
+ otherwise ambiguous inputs.
+
+ Strictly speaking, that means our grammar is not a context free grammar even though
+ we gloss over that distinction in the documentation.
+
+* For a few nonterminals, some productions are only available if certain plugins
+ are activated (e.g. SSR). Readers should be informed about these.
+
+* Some limited parts of the grammar are defined in OCaml, including lookahead symbols
+ like `test_bracket_ident` and references to nonterminals in other files using
+ qualified names such as `Prim.ident`. A few symbols are defined multiple times,
+ such as `scope` and `orient`.
+
+## What the tool does
+
+1. The tool reads all the `mlg` files and generates `fullGrammar`, which includes
+all the grammar without the actions for each production or the OCaml code. This
+file is provided as a convenience to make it easier to examine the (mostly)
+unprocessed grammar of the mlg files with less clutter. Nonterminals that use
+levels (`"5" RIGHTA` below) are modified, for example:
+
+```
+tactic_expr:
+ [ "5" RIGHTA
+ [ te = binder_tactic -> { te } ]
+```
+
+becomes
+
+```
+tactic_expr5: [
+| binder_tactic
+| tactic_expr4
+]
+```
+
+2. The tool applies grammar editing operations specified by `common.edit_mlg` to
+generate `editedGrammar`.
+
+3. `orderedGrammar` gives the desired order for the nonterminals and productions
+in the documented grammar. Developers should edit this file to change the order.
+`doc_grammar` updates `orderedGrammar` so it has the same set of nonterminals and productions
+as `editedGrammar`. The update process removes manually-added comments from
+`orderedGrammar` while automatically-generated comments will be regenerated.
+
+4. The tool applies further edits to the grammar specified by `productionlist.edit_mlg`,
+then it updates the productionlists in the `.rst` files as specified by comments in the form
+`.. insertgram <first nt> <last nt>`. The edits are primarily to expand
+`.mlg` constructs such as `LIST1` and `OPT` into separate productions. The tool
+generates `productionlistGrammar`, which has the entire grammar in the form of `productionlists`.
+
+5. Using the grammar produced in step 3, the tool applies edits specified by
+`prodn.edit_mlg` and generates `prodnGrammar`, representing each production as
+a Sphinx `prodn` construct. Differently-edited grammars are used because `prodn`
+can naturally represent `LIST1 x SEP ','` whereas that is awkward for `productionlists`.
+
+## How to use the tool
+
+* `make doc_gram` updates `fullGrammar`.
+
+* `make doc_gram_verify` verifies that `fullGrammar` is consistent with the `.mlg` files.
+ This is for use by CI.
+
+* `make doc_gram_rsts` updates the `*Grammar` and `.rst` files.
+
+Changes to `fullGrammar`, `orderedGrammar` and the `.rsts` should be checked in to git.
+The other `*Grammar` files should not.
+
+### Command line arguments
+
+The executable takes a list of `.mlg` and `.rst` files as arguments. The tool
+inserts the grammar into the `.rsts` as specified by comments in those files.
+The order of the `.mlg` files affects the order of nonterminals and productions in
+`fullGrammar`. The order doesn't matter for the `.rst` files.
+
+Specifying the `-verify` command line argument avoids updating any of the files,
+but verifies that the current files are consistent. This setting is meant for
+use in CI; it will be up to each developer to include the changes to `*Grammar` and
+the `.rst` files in their PRs when they've changed the grammar.
+
+Other command line arguments:
+
+* `-check-tacs` reports on differences in tactics between the `rsts` and the grammar
+
+* `-check-cmds` reports on differences in commands between the `rsts` and the grammar
+
+* `-no-warn` suppresses printing of some warning messages
+
+* `-short` limits processing to updating/verifying only the `fullGrammar` file
+
+* `-verbose` prints more messages about the grammar
+
+* `-verify` described above
+
+### Grammar editing scripts
+
+The grammar editing scripts `*.edit_mlg` are similar in format to `.mlg` files stripped
+of all OCaml features. This is an easy way to include productions to match or add without
+writing another parser. The `DOC_GRAMMAR` token at the beginning of each file
+signals the use of streamlined syntax.
+
+Each edit file has a series of items in the form of productions. Items are applied
+in the order they appear. There are two types of editing operations:
+
+* Global edits - edit rules that apply to the entire grammar in a single operation.
+ These are identified by using specific reserved names as the non-terminal name.
+
+* Local edits - edit rules that apply to the productions of a single non-terminal.
+ The rule is a local edit if the non-terminal name isn't reserved. Individual
+ productions within a local edit that begin with a different set of reserved names
+ edit existing productions. For example `binders: [ | DELETE Pcoq.Constr.binders ]`
+ deletes the production `binders: [ | Pcoq.Constr.binders]`
+
+Productions that don't begin with a reserved name are added to the grammar,
+such as `empty: [ | ]`, which adds a new non-terminal `empty` with an
+empty production on the right-hand side.
+
+Another example: `LEFTQMARK: [ | "?" ]` is a local edit that treats `LEFTQMARK` as
+the name of a non-terminal and adds one production for it. (We know that LEFTQMARK
+is a token but doc_grammar does not.) `SPLICE: [ | LEFTQMARK ]` requests replacing all
+uses of `LEFTQMARK` anywhere in the grammar with its productions and removing the
+non-terminal. The combined effect of these two is to replace all uses of
+`LEFTQMARK` with `"?"`.
+
+Here are the current operations. They are likely to be refined as we learn
+what operations are most useful while we update the mlg files and documentation:
+
+### Global edits
+
+`DELETE` - deletes the specified non-terminals anywhere in the grammar. Each
+should appear as a separate production. Useful for removing non-terminals that
+only do lookahead that shouldn't be in the documentation.
+
+`RENAME` - each production specifies an (old name, new name) pair of
+non-terminals to rename.
+
+`SPLICE` - requests replacing all uses of the nonterminals anywhere in the
+grammar with its productions and removing the non-terminal. Each should appear
+as a separate production. (Doesn't work recursively; splicing for both
+`A: [ | B ]` and `B: [ | C ]` must be done in separate SPLICE operations.)
+
+`EXPAND` - expands LIST0, LIST1, LIST* ... SEP and OPT constructs into
+new non-terminals
+
+### Local edits
+
+`DELETE <production>` - removes the specified production from the grammar
+
+`EDIT <production>` - modifies the specified production using the following tags
+that appear in the specified production:
+
+* `USE_NT <name>` LIST* - extracts LIST* as new nonterminal with the specified
+ new non-terminal name
+
+* `ADD_OPT <grammar symbol>` - looks for a production that matches the specified
+ production **without** `<grammar_symbol>`. If found, both productions are
+ replaced with single production with `OPT <grammar_symbol>`
+
+ The current version handles a single USE_NT or ADD_OPT per EDIT.
+
+* `REPLACE` - (2 sequential productions) - removes `<oldprod>` and
+ inserts `<newprod>` in its place.
+
+```
+| REPLACE <oldprod>
+| WITH <newprod>
+```
+
+* (any other nonterminal name) - adds a new production (and possibly a new nonterminal)
+to the grammar.
+
+### `.rst` file updates
+
+`doc_grammar` updates `.rst` files when it sees the following 3 lines
+
+```
+.. insertgram <start> <end>
+.. productionlist:: XXX
+```
+
+The end of the existing `productionlist` is recognized by a blank line.
+
+### Other details
+
+The output identifies productions from plugins that aren't automatically loaded with
+`(* XXX plugin *)` in grammar files and with `(XXX plugin)` in productionlists.
+If desired, this mechanism could be extended to tag certain productions as deprecated,
+perhaps in conjunction with a coqpp change.
diff --git a/doc/tools/docgram/common.edit_mlg b/doc/tools/docgram/common.edit_mlg
new file mode 100644
index 0000000000..ea94e21ff3
--- /dev/null
+++ b/doc/tools/docgram/common.edit_mlg
@@ -0,0 +1,220 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2019 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+(* Defines additional productions and edits for use in documentation. Not compiled into Coq *)
+
+DOC_GRAMMAR
+
+(* additional nts to be spliced *)
+
+LEFTQMARK: [
+| "?"
+]
+
+SPLICE: [
+| LEFTQMARK
+]
+
+hyp: [
+| var
+]
+
+tactic_then_gen: [
+| EDIT ADD_OPT tactic_expr5 "|" tactic_then_gen
+| EDIT ADD_OPT tactic_expr5 ".." tactic_then_last
+]
+
+SPLICE: [
+| hyp
+| identref
+| pattern_ident (* depends on previous LEFTQMARK splice todo: improve *)
+| constr_eval (* splices as multiple prods *)
+| tactic_then_last (* todo: dependency on c.edit_mlg edit?? really useful? *)
+| Prim.name
+| ltac_selector
+| Constr.ident
+| tactic_then_locality (* todo: cleanup *)
+| attribute_list
+]
+
+RENAME: [
+ (* map missing names for rhs *)
+| _binders binders
+| Constr.constr term
+| Constr.constr_pattern constr_pattern
+| Constr.global global
+| Constr.lconstr lconstr
+| Constr.lconstr_pattern lconstr_pattern
+| G_vernac.query_command query_command
+| G_vernac.section_subset_expr section_subset_expr
+| nonsimple_intropattern intropattern
+| Pltac.tactic tactic
+| Pltac.tactic_expr ltac_expr
+| Prim.ident ident
+| Prim.reference reference
+| Pvernac.Vernac_.main_entry vernac_control
+| Tactic.tactic tactic
+| tactic3 ltac_expr3 (* todo: can't figure out how this gets mapped by coqpp *)
+| tactic1 ltac_expr1 (* todo: can't figure out how this gets mapped by coqpp *)
+| tactic0 ltac_expr0 (* todo: can't figure out how this gets mapped by coqpp *)
+| tactic_expr5 ltac_expr
+| tactic_expr4 ltac_expr4
+| tactic_expr3 ltac_expr3
+| tactic_expr2 ltac_expr2
+| tactic_expr1 ltac_expr1
+| tactic_expr0 ltac_expr0
+
+ (* elementary renaming/OCaml-defined productions *)
+| clause clause_dft_concl
+| in_clause' in_clause
+| l_constr lconstr (* todo: should delete the production *)
+
+ (* SSR *)
+| G_vernac.def_body def_body
+| Pcoq.Constr.constr term
+| Prim.by_notation by_notation
+| Prim.identref ident
+| Prim.natural natural
+| Vernac.rec_definition rec_definition
+
+ (* rename on lhs *)
+| intropatterns intropattern_list_opt
+| Constr.closed_binder closed_binder
+
+ (* historical name *)
+| constr term
+]
+
+DELETE: [
+| check_for_coloneq
+| impl_ident_head
+| local_test_lpar_id_colon
+| lookup_at_as_comma
+| only_starredidentrefs
+| test_bracket_ident
+| test_lpar_id_coloneq
+| test_lpar_id_rpar
+| test_lpar_idnum_coloneq
+| test_show_goal
+
+ (* SSR *)
+(* | ssr_null_entry *)
+| ssrtermkind (* todo: rename as "test..." *)
+| term_annotation (* todo: rename as "test..." *)
+| test_idcomma
+| test_nohidden
+| test_not_ssrslashnum
+| test_ssr_rw_syntax
+| test_ssreqid
+| test_ssrfwdid
+| test_ssrseqvar
+| test_ssrslashnum00
+| test_ssrslashnum01
+| test_ssrslashnum10
+| test_ssrslashnum11
+| test_ident_no_do
+| ssrdoarg (* todo: this and the next one should be removed from the grammar? *)
+| ssrseqdir
+]
+
+ident: [
+| DELETE IDENT ssr_null_entry
+]
+
+natural: [
+| DELETE _natural
+]
+
+
+ (* added productions *)
+
+empty: [ (* todo: (bug) this is getting converted to empty -> empty *)
+|
+]
+
+lpar_id_coloneq: [
+| "(" IDENT; ":="
+]
+
+name_colon: [
+| IDENT; ":"
+| "_" ":" (* todo: should "_" be a keyword or an identifier? *)
+]
+
+int: [ (* todo: probably should be NUMERAL *)
+| integer
+]
+
+command_entry: [
+| noedit_mode
+]
+
+binders: [
+| DELETE Pcoq.Constr.binders (* todo: not sure why there are 2 "binders:" *)
+]
+
+(* edits to simplify *)
+
+ltac_expr1: [
+| EDIT match_key ADD_OPT "reverse" "goal" "with" match_context_list "end"
+]
+
+match_context_list: [
+| EDIT ADD_OPT "|" LIST1 match_context_rule SEP "|"
+]
+
+match_hyps: [
+| REPLACE name ":=" "[" match_pattern "]" ":" match_pattern
+| WITH name ":=" OPT ("[" match_pattern "]" ":") match_pattern
+| DELETE name ":=" match_pattern
+]
+
+match_list: [
+| EDIT ADD_OPT "|" LIST1 match_rule SEP "|"
+]
+
+
+selector_body: [
+| REPLACE range_selector_or_nth (* depends on whether range_selector_or_nth is deleted first *)
+| WITH LIST1 range_selector SEP ","
+]
+
+range_selector_or_nth: [
+| DELETENT
+]
+
+simple_tactic: [
+| DELETE "intros"
+| REPLACE "intros" ne_intropatterns
+| WITH "intros" intropattern_list_opt
+| DELETE "eintros"
+| REPLACE "eintros" ne_intropatterns
+| WITH "eintros" intropattern_list_opt
+]
+
+intropattern_list_opt: [
+| DELETE LIST0 intropattern
+| intropattern_list_opt intropattern
+| empty
+]
+
+
+ne_intropatterns: [
+| DELETENT (* todo: don't use DELETENT for this *)
+]
+
+
+or_and_intropattern: [
+| DELETE "()"
+| DELETE "(" simple_intropattern ")"
+| REPLACE "(" simple_intropattern "," LIST1 simple_intropattern SEP "," ")"
+| WITH "(" LIST0 simple_intropattern SEP "," ")"
+| EDIT "[" USE_NT intropattern_or LIST1 intropattern_list_opt SEP "|" "]"
+]
diff --git a/doc/tools/docgram/doc_grammar.ml b/doc/tools/docgram/doc_grammar.ml
new file mode 100644
index 0000000000..eb86bab37e
--- /dev/null
+++ b/doc/tools/docgram/doc_grammar.ml
@@ -0,0 +1,1606 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2019 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+open Coqpp_parser
+open Coqpp_ast
+
+let exit_code = ref 0
+let show_warn = ref true
+
+let fprintf = Printf.fprintf
+
+let error s =
+ exit_code := 1;
+ Printf.eprintf "Error: ";
+ Printf.eprintf s
+
+(* todo: checking if !show_warn is true here gives a compilation error *)
+let warn s =
+ Printf.eprintf "Warning: ";
+ Printf.eprintf s
+
+type args = {
+ mlg_files : string list;
+ rst_files : string list;
+ fullGrammar : bool;
+ check_tacs : bool;
+ check_cmds : bool;
+ show_warn : bool;
+ verbose : bool;
+ verify : bool;
+}
+
+let default_args = {
+ mlg_files = [];
+ rst_files = [];
+ fullGrammar = false;
+ check_tacs = false;
+ check_cmds = false;
+ show_warn = true;
+ verbose = false;
+ verify = false;
+}
+
+(* translated symbols *)
+
+type doc_symbol =
+| Sterm of string
+| Snterm of string
+| Slist1 of doc_symbol
+| Slist1sep of doc_symbol * doc_symbol
+| Slist0 of doc_symbol
+| Slist0sep of doc_symbol * doc_symbol
+| Sopt of doc_symbol
+| Sparen of doc_symbol list (* for GRAMMAR EXTEND *)
+| Sprod of doc_symbol list list (* for GRAMMAR EXTEND *)
+ (* editing operations *)
+| Sedit of string
+| Sedit2 of string * string
+
+(* nonterminals to rename or delete *)
+module StringMap = Map.Make(String)
+module NTMap = StringMap
+module StringSet = Set.Make(String)
+
+type gram = {
+ (* map from nonterminal name to a list of prods *)
+ (* each production is a list of doc_symbol *)
+ map: doc_symbol list list NTMap.t;
+ (* specifies the order for nt's *)
+ order: string list;
+}
+
+module DocGram = struct
+ (* these guarantee that order and map have a 1-1 relationship
+ on the nt name. They don't guarantee that nts on rhs of a production
+ are defined, nor do they prohibit duplicate productions *)
+
+ exception Duplicate
+ exception Invalid
+
+ (* add an nt at the end (if not already present) then set its prods *)
+ let g_maybe_add g nt prods =
+ if not (NTMap.mem nt !g.map) then
+ g := { !g with order = !g.order @ [nt] };
+ g := { !g with map = NTMap.add nt prods !g.map }
+
+ (* add an nt at the beginning (if not already present) then set its prods *)
+ let g_maybe_add_begin g nt prods =
+ if not (NTMap.mem nt !g.map) then
+ g := { !g with order = nt :: !g.order };
+ g := { !g with map = NTMap.add nt prods !g.map }
+
+ (* reverse the order of the grammar *)
+ let g_reverse g =
+ g := { !g with order = List.rev !g.order }
+
+ (* update the productions of an existing nt *)
+ let g_update_prods g nt prods =
+ ignore (NTMap.find nt !g.map); (* don't add the nt if it's not present *)
+ g := { !g with map = NTMap.add nt prods !g.map }
+
+ (* remove a non-terminal *)
+ let g_remove g nt =
+ g := { map = NTMap.remove nt !g.map;
+ order = List.filter (fun elt -> elt <> nt) !g.order }
+
+ (* rename an nt and update its prods, keeping its original position.
+ If the new name already exists, include its prods *)
+ let g_rename_merge g nt nt' nprods =
+ let oprods =
+ try
+ let oprods = NTMap.find nt' !g.map in
+ g := { !g with order = List.filter (fun elt -> elt <> nt') !g.order };
+ oprods
+ with Not_found ->
+ g := { !g with map = NTMap.add nt' [] !g.map };
+ []
+ in
+ g := { map = NTMap.remove nt !g.map;
+ order = List.map (fun n -> if n = nt then nt' else n) !g.order };
+ g_update_prods g nt' (oprods @ nprods)
+
+ (* add a new nonterminal after "ins_after" None means insert at the beginning *)
+ let g_add_after g ins_after nt prods =
+ if NTMap.mem nt !g.map then raise Duplicate; (* don't update the nt if it's already present *)
+ let rec insert_nt order res =
+ match ins_after, order with
+ | None, _ -> nt :: order
+ | Some _, [] -> raise Not_found
+ | Some ins_after_nt, hd :: tl ->
+ if hd = ins_after_nt then
+ (List.rev res) @ (hd :: nt :: tl)
+ else
+ insert_nt tl (hd :: res)
+ in
+ g := { order = insert_nt !g.order [];
+ map = NTMap.add nt prods !g.map }
+
+ (* replace the map and order *)
+ let g_reorder g map order =
+ let order_nts = StringSet.of_list order in
+ let map_nts = List.fold_left (fun res b -> let (nt, _) = b in StringSet.add nt res)
+ StringSet.empty (NTMap.bindings map) in
+ if List.length order <> NTMap.cardinal map ||
+ not (StringSet.equal order_nts map_nts) then raise Invalid;
+ g := { order = order; map = map }
+
+end
+open DocGram
+
+(*** Print routines ***)
+
+let sprintf = Printf.sprintf
+
+let map_and_concat f ?(delim="") l =
+ String.concat delim (List.map f l)
+
+let rec db_output_prodn = function
+ | Sterm s -> sprintf "(Sterm %s) " s
+ | Snterm s -> sprintf "(Snterm %s) " s
+ | Slist1 sym -> sprintf "(Slist1 %s) " (db_output_prodn sym)
+ | Slist1sep (sym, sep) -> sprintf "(Slist1sep %s %s) " (db_output_prodn sep) (db_output_prodn sym)
+ | Slist0 sym -> sprintf "(Slist0 %s) " (db_output_prodn sym)
+ | Slist0sep (sym, sep) -> sprintf "(Slist0sep %s %s) " (db_output_prodn sep) (db_output_prodn sym)
+ | Sopt sym -> sprintf "(Sopt %s) " (db_output_prodn sym)
+ | Sparen prod -> sprintf "(Sparen %s) " (db_out_list prod)
+ | Sprod prods -> sprintf "(Sprod %s) " (db_out_prods prods)
+ | Sedit s -> sprintf "(Sedit %s) " s
+ | Sedit2 (s, s2) -> sprintf "(Sedit2 %s %s) " s s2
+and db_out_list prod = sprintf "(%s)" (map_and_concat db_output_prodn prod)
+and db_out_prods prods = sprintf "( %s )" (map_and_concat ~delim:" | " db_out_list prods)
+
+let rec output_prod plist need_semi = function
+ | Sterm s -> if plist then sprintf "%s" s else sprintf "\"%s\"" s
+ | Snterm s ->
+ if plist then sprintf "`%s`" s else
+ sprintf "%s%s" s (if s = "IDENT" && need_semi then ";" else "")
+ | Slist1 sym -> sprintf "LIST1 %s" (prod_to_str ~plist [sym])
+ | Slist1sep (sym, sep) -> sprintf "LIST1 %s SEP %s" (prod_to_str ~plist [sym]) (prod_to_str ~plist [sep])
+ | Slist0 sym -> sprintf "LIST0 %s" (prod_to_str ~plist [sym])
+ | Slist0sep (sym, sep) -> sprintf "LIST0 %s SEP %s" (prod_to_str ~plist [sym]) (prod_to_str ~plist [sep])
+ | Sopt sym -> sprintf "OPT %s" (prod_to_str ~plist [sym])
+ | Sparen sym_list -> sprintf "( %s )" (prod_to_str sym_list)
+ | Sprod sym_list ->
+ sprintf "[ %s ]" (String.concat " " (List.mapi (fun i r ->
+ let prod = (prod_to_str r) in
+ let sep = if i = 0 then "" else
+ if prod <> "" then "| " else "|" in
+ sprintf "%s%s" sep prod)
+ sym_list))
+ | Sedit s -> sprintf "%s" s
+ (* todo: make PLUGIN info output conditional on the set of prods? *)
+ | Sedit2 ("PLUGIN", plugin) ->
+ if plist then
+ sprintf " (%s plugin)" plugin
+ else
+ sprintf " (* %s plugin *)" plugin
+ | Sedit2 ("FILE", file) ->
+ let file_suffix_regex = Str.regexp ".*/\\([a-zA-Z0-9_\\.]+\\)" in
+ let suffix = if Str.string_match file_suffix_regex file 0 then Str.matched_group 1 file else file in
+ if plist then
+ sprintf " (%s)" suffix
+ else
+ sprintf " (* %s *)" suffix
+ | Sedit2 (s, s2) -> sprintf "%s \"%s\"" s s2
+
+and prod_to_str_r plist prod =
+ match prod with
+ | p :: tl ->
+ let need_semi =
+ match prod with
+ | Snterm "IDENT" :: Sterm _ :: _
+ | Snterm "IDENT" :: Sprod _ :: _ -> true
+ | _ -> false in
+ (output_prod plist need_semi p) :: (prod_to_str_r plist tl)
+ | [] -> []
+
+and prod_to_str ?(plist=false) prod =
+ String.concat " " (prod_to_str_r plist prod)
+
+
+let rec output_prodn = function
+ | Sterm s -> let s = if List.mem s ["{"; "{|"; "|"; "}"] then "%" ^ s else s in
+ sprintf "%s" s
+ | Snterm s -> sprintf "@%s" s
+ | Slist1 sym -> sprintf "{+ %s }" (output_prodn sym)
+ | Slist1sep (sym, sep) -> sprintf "{+%s %s }" (output_sep sep) (output_prodn sym)
+ | Slist0 sym -> sprintf "{* %s }" (output_prodn sym)
+ | Slist0sep (sym, sep) -> sprintf "{*%s %s }" (output_sep sep) (output_prodn sym)
+ | Sopt sym -> sprintf "{? %s }" (output_prodn sym)
+ | Sparen sym_list -> sprintf "%s" (prod_to_prodn sym_list)
+ | Sprod sym_list ->
+ let lcurly, rcurly = if List.length sym_list = 1 then "", "" else "{| ", " }" in
+ sprintf "%s%s%s"
+ lcurly
+ (String.concat " " (List.mapi (fun i r ->
+ let prod = (prod_to_prodn r) in
+ let sep = if i = 0 then "" else
+ if prod <> "" then "| " else "|" in
+ sprintf "%s%s" sep prod)
+ sym_list))
+ rcurly
+ | Sedit s -> sprintf "%s" s
+ | Sedit2 ("PLUGIN", s2) -> ""
+ | Sedit2 (s, s2) -> sprintf "%s \"%s\"" s s2
+
+and output_sep sep =
+ match sep with
+ | Sterm s -> sprintf "%s" s (* avoid escaping separator *)
+ | _ -> output_prodn sep
+
+and prod_to_prodn prod = String.concat " " (List.map output_prodn prod)
+
+type fmt = [`MLG | `PRODLIST | `PRODN ]
+
+(* print a subset of the grammar with nts in the specified order *)
+let print_in_order out g fmt nt_order hide =
+ List.iter (fun nt ->
+ if not (StringSet.mem nt hide) then
+ try
+ let prods = NTMap.find nt !g.map in
+ match fmt with
+ | `MLG ->
+ fprintf out "%s: [\n" nt;
+ List.iter (fun prod ->
+ let str = prod_to_str ~plist:false prod in
+ let pfx = if str = "" then "|" else "| " in
+ fprintf out "%s%s\n" pfx str)
+ prods;
+ fprintf out "]\n\n"
+ | `PRODLIST ->
+ fprintf out "%s :" nt;
+ List.iteri (fun i prod ->
+ if i > 0 then
+ fprintf out "%s :" (String.make (String.length nt) ' ');
+ let str = prod_to_str ~plist:true prod in
+ let pfx = if str = "" then "" else " " in
+ fprintf out "%s%s\n" pfx str)
+ prods;
+ | `PRODN ->
+ fprintf out "\n%s:\n" nt;
+ List.iter (fun prod ->
+ let str = prod_to_prodn prod in
+ let pfx = if str = "" then "" else " " in
+ fprintf out "%s%s\n" pfx str)
+ prods;
+ with Not_found -> error "Missing nt '%s' in print_in_order\n" nt)
+ nt_order
+
+
+(*** Read grammar routines ***)
+
+let cvt_ext prod =
+ let rec to_doc_sym = function
+ | Ulist1 sym -> Slist1 (to_doc_sym sym)
+ | Ulist1sep (sym, s) -> Slist1sep ((to_doc_sym sym), Sterm s)
+ | Ulist0 sym -> Slist0 (to_doc_sym sym)
+ | Ulist0sep (sym, s) -> Slist0sep ((to_doc_sym sym), Sterm s)
+ | Uopt sym -> Sopt (to_doc_sym sym)
+ | Uentry s -> Snterm s
+ | Uentryl (s, i) -> Snterm (s ^ (string_of_int i))
+ in
+ let from_ext = function
+ | ExtTerminal s -> Sterm s
+ | ExtNonTerminal (s, _) -> to_doc_sym s
+ in
+ List.map from_ext prod
+
+let rec cvt_gram_sym = function
+ | GSymbString s -> Sterm s
+ | GSymbQualid (s, level) ->
+ Snterm (match level with
+ | Some str -> s ^ str
+ | None -> s)
+ | GSymbParen l -> Sparen (cvt_gram_sym_list l)
+ | GSymbProd ll ->
+ let cvt = List.map cvt_gram_prod ll in
+ (match cvt with
+ | (Snterm x :: []) :: [] -> Snterm x
+ | (Sterm x :: []) :: [] -> Sterm x
+ | _ -> Sprod cvt)
+
+and cvt_gram_sym_list l =
+ let get_sym = function
+ | GSymbQualid (s, level) -> s
+ | _ -> ""
+ in
+
+ match l with
+ | GSymbQualid ("LIST0", _) :: s :: GSymbQualid ("SEP", _) :: sep :: tl ->
+ Slist0sep (cvt_gram_sym s, cvt_gram_sym sep) :: cvt_gram_sym_list tl
+ | GSymbQualid ("LIST1", _) :: s :: GSymbQualid ("SEP", _) :: sep :: tl ->
+ Slist1sep (cvt_gram_sym s, cvt_gram_sym sep) :: cvt_gram_sym_list tl
+ | GSymbQualid ("LIST0", _) :: s :: tl ->
+ Slist0 (cvt_gram_sym s) :: cvt_gram_sym_list tl
+ | GSymbQualid ("LIST1", _) :: s :: tl ->
+ Slist1 (cvt_gram_sym s) :: cvt_gram_sym_list tl
+ | GSymbQualid ("OPT", _) :: s :: tl ->
+ Sopt (cvt_gram_sym s) :: cvt_gram_sym_list tl
+ | GSymbQualid ("IDENT", _) :: s2 :: tl when get_sym s2 = "" ->
+ cvt_gram_sym s2 :: cvt_gram_sym_list tl
+ | GSymbQualid ("ADD_OPT", _) :: tl ->
+ (Sedit "ADD_OPT") :: cvt_gram_sym_list tl
+ | GSymbQualid ("NOTE", _) :: GSymbQualid (s2, l) :: tl ->
+ (Sedit2 ("NOTE", s2)) :: cvt_gram_sym_list tl
+ | GSymbQualid ("USE_NT", _) :: GSymbQualid (s2, l) :: tl ->
+ (Sedit2 ("USE_NT", s2)) :: cvt_gram_sym_list tl
+ | hd :: tl ->
+ cvt_gram_sym hd :: cvt_gram_sym_list tl
+ | [] -> []
+
+and cvt_gram_prod p =
+ List.concat (List.map (fun x -> let _, gs = x in cvt_gram_sym_list gs) p.gprod_symbs)
+
+
+let add_symdef nt file symdef_map =
+ let ent =
+ try
+ StringMap.find nt !symdef_map
+ with Not_found -> []
+ in
+ symdef_map := StringMap.add nt (Filename.basename file::ent) !symdef_map
+
+let rec edit_SELF nt cur_level next_level right_assoc prod =
+ let len = List.length prod in
+ List.mapi (fun i sym ->
+ match sym with
+ | Snterm s -> begin match s with
+ | s when s = nt || s = "SELF" ->
+ if i = 0 then Snterm next_level
+ else if i+1 < len then sym
+ else if right_assoc then Snterm cur_level else Snterm next_level
+ | "NEXT" -> Snterm next_level
+ | _ -> sym
+ end
+ | Slist1 sym -> Slist1 (List.hd (edit_SELF nt cur_level next_level right_assoc [sym]))
+ | Slist0 sym -> Slist0 (List.hd (edit_SELF nt cur_level next_level right_assoc [sym]))
+ | x -> x)
+ prod
+
+
+let autoloaded_mlgs = [ (* in the order they are loaded by Coq *)
+ "parsing/g_constr.mlg";
+ "parsing/g_prim.mlg";
+ "vernac/g_vernac.mlg";
+ "vernac/g_proofs.mlg";
+ "toplevel/g_toplevel.mlg";
+ "plugins/ltac/extraargs.mlg";
+ "plugins/ltac/g_obligations.mlg";
+ "plugins/ltac/coretactics.mlg";
+ "plugins/ltac/extratactics.mlg";
+ "plugins/ltac/profile_ltac_tactics.mlg";
+ "plugins/ltac/g_auto.mlg";
+ "plugins/ltac/g_class.mlg";
+ "plugins/ltac/g_rewrite.mlg";
+ "plugins/ltac/g_eqdecide.mlg";
+ "plugins/ltac/g_tactic.mlg";
+ "plugins/ltac/g_ltac.mlg";
+ "plugins/syntax/g_string.mlg";
+ "plugins/btauto/g_btauto.mlg";
+ "plugins/rtauto/g_rtauto.mlg";
+ "plugins/cc/g_congruence.mlg";
+ "plugins/firstorder/g_ground.mlg";
+ "plugins/syntax/g_numeral.mlg";
+]
+
+
+let ematch prod edit =
+ let rec ematchr prod edit =
+ (*Printf.printf "%s and\n %s\n\n" (prod_to_str prod) (prod_to_str edit);*)
+ match (prod, edit) with
+ | (_, Sedit _ :: tl)
+ | (_, Sedit2 _ :: tl)
+ -> ematchr prod tl
+ | (Sedit _ :: tl, _)
+ | (Sedit2 _ :: tl, _)
+ -> ematchr tl edit
+ | (phd :: ptl, hd :: tl) ->
+ let m = match (phd, hd) with
+ | (Slist1 psym, Slist1 sym)
+ | (Slist0 psym, Slist0 sym)
+ | (Sopt psym, Sopt sym)
+ -> ematchr [psym] [sym]
+ | (Slist1sep (psym, psep), Slist1sep (sym, sep))
+ | (Slist0sep (psym, psep), Slist0sep (sym, sep))
+ -> ematchr [psym] [sym] && ematchr [psep] [sep]
+ | (Sparen psyml, Sparen syml)
+ -> ematchr psyml syml
+ | (Sprod psymll, Sprod symll)
+ -> List.fold_left (&&) true (List.map2 ematchr psymll symll)
+ | _, _ -> phd = hd
+ in
+ m && ematchr ptl tl
+ | ([], hd :: tl) -> false
+ | (phd :: ptl, []) -> false
+ | ([], []) -> true
+in
+ (*Printf.printf "\n";*)
+ let rv = ematchr prod edit in
+ (*Printf.printf "%b\n" rv;*)
+ rv
+
+let has_match p prods = List.exists (fun p2 -> ematch p p2) prods
+
+let plugin_regex = Str.regexp "^plugins/\\([a-zA-Z0-9_]+\\)/"
+
+let read_mlg is_edit ast file level_renames symdef_map =
+ let res = ref [] in
+ let add_prods nt prods =
+ if not is_edit then
+ add_symdef nt file symdef_map;
+ let prods = if not is_edit &&
+ not (List.mem file autoloaded_mlgs) &&
+ Str.string_match plugin_regex file 0 then
+ let plugin = Str.matched_group 1 file in
+ List.map (fun p -> p @ [Sedit2 ("PLUGIN", plugin)]) prods
+ else
+ prods
+ in
+ (* todo: doesn't yet work perfectly with SPLICE *)
+(* let prods = if not is_edit then List.map (fun p -> p @ [Sedit2 ("FILE", file)]) prods else prods in*)
+ res := (nt, prods) :: !res
+ in
+ let prod_loop = function
+ | GramExt grammar_ext ->
+ let get_label = function
+ | Some s -> s
+ | None -> ""
+ in
+ List.iter (fun ent ->
+ let len = List.length ent.gentry_rules in
+ List.iteri (fun i rule ->
+ let nt = ent.gentry_name in
+ let level = (get_label rule.grule_label) in
+ let level = if level <> "" then level else
+ match ent.gentry_pos with
+ | Some Level lev
+ | Some Before lev
+ | Some After lev
+ -> lev
+ (* Looks like FIRST/LAST can be ignored for documenting the current grammar *)
+ | _ -> "" in
+ let cur_level = nt ^ level in
+ let next_level = nt ^
+ if i+1 < len then (get_label (List.nth ent.gentry_rules (i+1)).grule_label) else "" in
+ let right_assoc = (rule.grule_assoc = Some RightA) in
+
+ if i = 0 && cur_level <> nt && not (StringMap.mem nt !level_renames) then begin
+ level_renames := StringMap.add nt cur_level !level_renames;
+ end;
+ let cvted = List.map cvt_gram_prod rule.grule_prods in
+ (* edit names for levels *)
+ (* See https://camlp5.github.io/doc/html/grammars.html#b:Associativity *)
+ let edited = List.map (edit_SELF nt cur_level next_level right_assoc) cvted in
+ let prods_to_add =
+ if cur_level <> nt && i+1 < len then
+ edited @ [[Snterm next_level]]
+ else
+ edited in
+ add_prods cur_level prods_to_add)
+ ent.gentry_rules
+ ) grammar_ext.gramext_entries
+
+ | VernacExt vernac_ext ->
+ let node = match vernac_ext.vernacext_entry with
+ | None -> "command"
+ | Some c -> String.trim c.code
+ in
+ add_prods node
+ (List.map (fun r -> cvt_ext r.vernac_toks) vernac_ext.vernacext_rules)
+ | VernacArgumentExt vernac_argument_ext ->
+ add_prods vernac_argument_ext.vernacargext_name
+ (List.map (fun r -> cvt_ext r.tac_toks) vernac_argument_ext.vernacargext_rules)
+ | TacticExt tactic_ext ->
+ add_prods "simple_tactic"
+ (List.map (fun r -> cvt_ext r.tac_toks) tactic_ext.tacext_rules)
+ | ArgumentExt argument_ext ->
+ add_prods argument_ext.argext_name
+ (List.map (fun r -> cvt_ext r.tac_toks) argument_ext.argext_rules)
+ | _ -> ()
+ in
+
+ List.iter prod_loop ast;
+ List.rev !res
+
+let dir s = "doc/tools/docgram/" ^ s
+
+let read_mlg_edit file =
+ let fdir = dir file in
+ let level_renames = ref StringMap.empty in (* ignored *)
+ let symdef_map = ref StringMap.empty in (* ignored *)
+ read_mlg true (parse_file fdir) fdir level_renames symdef_map
+
+let add_rule g nt prods file =
+ let ent = try NTMap.find nt !g.map with Not_found -> [] in
+ let nodups = List.concat (List.map (fun prod ->
+ if has_match prod ent then begin
+ if !show_warn then
+ warn "%s: Duplicate production '%s: %s'\n" file nt (prod_to_str prod);
+ []
+ end else
+ [prod])
+ prods) in
+ g_maybe_add_begin g nt (ent @ nodups)
+
+let read_mlg_files g args symdef_map =
+ let level_renames = ref StringMap.empty in
+ let last_autoloaded = List.hd (List.rev autoloaded_mlgs) in
+ List.iter (fun file ->
+ (* does nt renaming, deletion and splicing *)
+ let rules = read_mlg false (parse_file file) file level_renames symdef_map in
+ let numprods = List.fold_left (fun num rule ->
+ let nt, prods = rule in
+ add_rule g nt prods file;
+ num + List.length prods)
+ 0 rules
+ in
+ if args.verbose then begin
+ Printf.eprintf "%s: %d nts, %d prods\n" file (List.length rules) numprods;
+ if file = last_autoloaded then
+ Printf.eprintf " Optionally loaded plugins:\n"
+ end
+ ) args.mlg_files;
+ g_reverse g;
+ !level_renames
+
+
+(*** global editing ops ***)
+
+let create_edit_map edits =
+ let rec aux edits map =
+ match edits with
+ | [] -> map
+ | edit :: tl ->
+ let (key, binding) = edit in
+ aux tl (StringMap.add key binding map)
+ in
+ aux edits StringMap.empty
+
+(* edit a production: rename nonterminals, drop nonterminals, substitute nonterminals *)
+let rec edit_prod g top edit_map prod =
+ let edit_nt edit_map sym0 nt =
+ try
+ let binding = StringMap.find nt edit_map in
+ match binding with
+ | "DELETE" -> []
+ | "SPLICE" ->
+ begin
+ try let splice_prods = NTMap.find nt !g.map in
+ match splice_prods with
+ | [] -> assert false
+ | [p] -> List.rev p
+ | _ -> [Sprod splice_prods]
+ with Not_found -> error "Missing nt '%s' for splice\n" nt; [Snterm nt]
+ end
+ | _ -> [Snterm binding]
+ with Not_found -> [sym0]
+ in
+
+ let rec edit_symbol sym0 =
+ match sym0 with
+ | Sterm s -> [sym0]
+ | Snterm s -> edit_nt edit_map sym0 s
+ | Slist1 sym -> [Slist1 (List.hd (edit_symbol sym))]
+ (* you'll get a run-time failure deleting a SEP symbol *)
+ | Slist1sep (sym, sep) -> [Slist1sep (List.hd (edit_symbol sym), (List.hd (edit_symbol sep)))]
+ | Slist0 sym -> [Slist0 (List.hd (edit_symbol sym))]
+ | Slist0sep (sym, sep) -> [Slist0sep (List.hd (edit_symbol sym), (List.hd (edit_symbol sep)))]
+ | Sopt sym -> [Sopt (List.hd (edit_symbol sym))]
+ | Sparen slist -> [Sparen (List.hd (edit_prod g false edit_map slist))]
+ | Sprod slistlist -> let (_, prods) = edit_rule g edit_map "" slistlist in
+ [Sprod prods]
+ | Sedit _
+ | Sedit2 _ -> [sym0] (* these constructors not used here *)
+ in
+ let is_splice nt =
+ try
+ StringMap.find nt edit_map = "SPLICE"
+ with Not_found -> false
+ in
+ let get_splice_prods nt =
+ try NTMap.find nt !g.map
+ with Not_found -> (error "Missing nt '%s' for splice\n" nt; [])
+ in
+
+ (* special case splice creating multiple new productions *)
+ let splice_prods = match prod with
+ | Snterm nt :: [] when is_splice nt ->
+ get_splice_prods nt
+ | _ -> []
+ in
+ if top && splice_prods <> [] then
+ splice_prods
+ else
+ [List.rev (List.concat (List.rev (List.map (fun sym -> edit_symbol sym) prod)))]
+
+and edit_rule g edit_map nt rule =
+ let nt =
+ try let new_name = StringMap.find nt edit_map in
+ match new_name with
+ | "SPLICE" -> nt
+ | "DELETE" -> ""
+ | _ -> new_name
+ with Not_found -> nt
+ in
+ (nt, (List.concat (List.map (edit_prod g true edit_map) rule)))
+
+(*** splice: replace a reference to a nonterminal with its definition ***)
+
+(* todo: create a better splice routine, handle recursive case *)
+let apply_splice g splice_map =
+ StringMap.iter (fun nt b ->
+ if not (NTMap.mem nt !g.map) then
+ error "Unknown nt '%s' for apply_splice\n" nt)
+ splice_map;
+ List.iter (fun b ->
+ let (nt, prods) = b in
+ let (nt', prods) = edit_rule g splice_map nt prods in
+ g_update_prods g nt' prods)
+ (NTMap.bindings !g.map);
+ List.iter (fun b ->
+ let (nt, op) = b in
+ match op with
+ | "DELETE"
+ | "SPLICE" ->
+ g_remove g nt;
+ | _ -> ())
+ (StringMap.bindings splice_map)
+
+let find_first edit prods nt =
+ let rec find_first_r edit prods nt i =
+ match prods with
+ | [] ->
+ error "Can't find '%s' in REPLACE for '%s'\n" (prod_to_str edit) nt;
+ raise Not_found
+ | prod :: tl ->
+ if ematch prod edit then i
+ else find_first_r edit tl nt (i+1)
+ in
+ find_first_r edit prods nt 0
+
+let remove_prod edit prods nt =
+ let res, got_first = List.fold_left (fun args prod ->
+ let res, got_first = args in
+ if not got_first && ematch prod edit then
+ res, true
+ else
+ prod :: res, got_first)
+ ([], false) prods in
+ if not got_first then
+ error "Can't find '%s' to DELETE for '%s'\n" (prod_to_str edit) nt;
+ List.rev res
+
+let insert_after posn insert prods =
+ List.concat (List.mapi (fun i prod ->
+ if i = posn then prod :: insert else [prod])
+ prods)
+
+(*** replace LIST*, OPT with new nonterminals ***)
+
+(* generate a non-terminal name for a replacement *)
+let nt_regex = Str.regexp "^[a-zA-Z_][a-zA-Z0-9_\\.]*$"
+let good_name name = if Str.string_match nt_regex name 0 then name else ""
+let map_name s =
+ let s = match s with
+ | "|" -> "or"
+ | "!" -> "exclam"
+ | ">" -> "gt"
+ | "<" -> "lt"
+ | "+" -> "plus"
+ | "?" -> "qmark"
+ | "}" -> "rbrace"
+ | "," -> "comma"
+ | ";" -> "semi"
+ | _ -> s
+ in
+ good_name s
+
+let rec gen_nt_name sym =
+ let name_from_prod prod =
+ let rec aux name sterm_name prod =
+ if name <> "" then name else
+ match prod with
+ | [] -> sterm_name
+ | Sterm s :: tl
+ | Snterm s :: tl ->
+ if good_name s <> "" then
+ aux (map_name s) sterm_name tl
+ else
+ aux name (map_name s) tl;
+ | sym :: tl->
+ aux (gen_nt_name sym) sterm_name tl
+ in
+ aux "" "" prod
+ in
+
+ let name = match sym with
+ | Sterm s -> map_name s
+ | Snterm s -> s
+ | Slist1 sym
+ | Slist1sep (sym, _)
+ | Slist0 sym
+ | Slist0sep (sym, _)
+ | Sopt sym ->
+ gen_nt_name sym
+ | Sparen slist ->
+ name_from_prod slist
+ | Sprod slistlist ->
+ name_from_prod (List.hd slistlist)
+ | _ -> ""
+ in
+ good_name name
+
+(* create a new nt for LIST* or OPT with the specified name *)
+let rec maybe_add_nt g insert_after name sym queue =
+ let empty = [Snterm "empty"] in
+ let maybe_unwrap ?(multi=false) sym =
+ match sym with
+ | Sprod slist when List.length slist = 1 || multi
+ -> slist
+ | Sparen slist -> [ slist ]
+ | _ -> [ [sym] ]
+ in
+ let unw sym = List.hd (maybe_unwrap sym) in
+ let get_prods nt =
+ match sym with
+ | Slist1 sym -> let sym' = unw sym in
+ [ [Snterm nt] @ sym'; sym' ]
+ | Slist1sep (sym, sep)
+ | Slist0sep (sym, sep) -> let sym' = unw sym in
+ [ [Snterm nt; sep] @ sym'; sym' ]
+ | Slist0 sym -> [ [Snterm nt] @ (unw sym); empty ]
+ | Sopt sym -> (maybe_unwrap ~multi:true sym) @ [ empty ]
+ | Sprod slistlist -> slistlist
+ | _ -> []
+ in
+
+ let is_slist0sep sym =
+ match sym with
+ | Slist0sep _ -> true
+ | _ -> false
+ in
+
+ (* find an existing nt with an identical definition, or generate an unused nt name *)
+ let rec find_name nt i =
+ let trial_name = sprintf "%s%s" nt (if i = 1 then "" else string_of_int i) in
+ try
+ if NTMap.find trial_name !g.map = get_prods trial_name then
+ trial_name
+ else
+ find_name nt (succ i)
+ with Not_found -> trial_name
+ in
+ let list_name sep =
+ match sep with
+ | Sterm s ->
+ let name = map_name s in
+ if name = s then "_list" else "_list_" ^ name
+ | _ -> "_list"
+ in
+ let nt = name ^ match sym with
+ | Slist1 sym -> "_list"
+ | Slist1sep (sym, sep) -> list_name sep
+ | Slist0 sym -> "_list_opt"
+ | Slist0sep (sym, sep) -> list_name sep (* special handling *)
+ | Sopt sym -> "_opt"
+ | Sprod slistlist -> "_alt"
+ | _ -> (error "Invalid symbol for USE_NT for nt '%s'\n" name; "ERROR")
+ in
+ let base_nt = find_name nt 1 in
+ let new_nt = if is_slist0sep sym then base_nt ^ "_opt" else base_nt in
+ if not (NTMap.mem new_nt !g.map) then begin
+ let prods = if is_slist0sep sym then [ [Snterm base_nt]; empty ] else get_prods base_nt in
+ g_add_after g (Some !insert_after) new_nt prods;
+ insert_after := new_nt;
+ Queue.add new_nt queue
+ end;
+ if is_slist0sep sym && not (NTMap.mem base_nt !g.map) then begin
+ match sym with
+ | Slist0sep (sym, sep) ->
+ let prods = get_prods base_nt in
+ g_add_after g (Some !insert_after) base_nt prods;
+ insert_after := base_nt;
+ Queue.add base_nt queue
+ | _ -> ()
+ end;
+ new_nt
+
+(* expand LIST*, OPT and add "empty" *)
+(* todo: doesn't handle recursive expansions well, such as syntax_modifier_opt *)
+and expand_rule g edited_nt queue =
+ let rule = NTMap.find edited_nt !g.map in
+ let insert_after = ref edited_nt in
+ let rec expand rule =
+ let rec aux syms res =
+ match syms with
+ | [] -> res
+ | sym0 :: tl ->
+ let new_sym = match sym0 with
+ | Sterm _
+ | Snterm _ ->
+ sym0
+ | Slist1 sym
+ | Slist1sep (sym, _)
+ | Slist0 sym
+ | Slist0sep (sym, _)
+ | Sopt sym ->
+ let name = gen_nt_name sym in
+ if name <> "" then begin
+ let new_nt = maybe_add_nt g insert_after name sym0 queue in
+ Snterm new_nt
+ end else sym0
+ | Sparen slist -> Sparen (expand slist)
+ | Sprod slistlist ->
+ let has_empty = List.length (List.hd (List.rev slistlist)) = 0 in
+ let name = gen_nt_name sym0 in
+ if name <> "" then begin
+ let new_nt = maybe_add_nt g insert_after name
+ (if has_empty then (Sopt (Sprod (List.rev (List.tl (List.rev slistlist))) ))
+ else sym0) queue
+ in
+ Snterm new_nt
+ end else
+ Sprod (List.map expand slistlist)
+ | Sedit _
+ | Sedit2 _ ->
+ sym0 (* these constructors not used here *)
+ in
+ aux tl (new_sym :: res)
+ in
+ List.rev (aux rule (if edited_nt <> "empty" && ematch rule [] then [Snterm "empty"] else []))
+ in
+ let rule' = List.map expand rule in
+ g_update_prods g edited_nt rule'
+
+let expand_lists g =
+ (* todo: use Queue.of_seq w OCaml 4.07+ *)
+ let queue = Queue.create () in
+ List.iter (fun nt -> Queue.add nt queue) !g.order;
+ try
+ while true do
+ let nt = Queue.pop queue in
+ expand_rule g nt queue
+ done
+ with
+ | Queue.Empty -> ()
+
+let edit_all_prods g op eprods =
+ let do_it op eprods num =
+ let rec aux eprods res =
+ match eprods with
+ | [] -> res
+ | [Snterm old_nt; Snterm new_nt] :: tl when num = 2 ->
+ aux tl ((old_nt, new_nt) :: res)
+ | [Snterm old_nt] :: tl when num = 1 ->
+ aux tl ((old_nt, op) :: res)
+ | eprod :: tl ->
+ error "Production '%s: %s' must have only %d nonterminal(s)\n"
+ op (prod_to_str eprod) num;
+ aux tl res
+ in
+ let map = create_edit_map (aux eprods []) in
+ if op = "SPLICE" then
+ apply_splice g map
+ else (* RENAME/DELETE *)
+ List.iter (fun b -> let (nt, _) = b in
+ let prods = try NTMap.find nt !g.map with Not_found -> [] in
+ let (nt', prods') = edit_rule g map nt prods in
+ if nt' = "" then
+ g_remove g nt
+ else if nt <> nt' then
+ g_rename_merge g nt nt' prods'
+ else
+ g_update_prods g nt prods')
+ (NTMap.bindings !g.map);
+ in
+ match op with
+ | "RENAME" -> do_it op eprods 2; true
+ | "DELETE" -> do_it op eprods 1; true
+ | "SPLICE" -> do_it op eprods 1; true
+ | "EXPAND" ->
+ if List.length eprods > 1 || List.length (List.hd eprods) <> 0 then
+ error "'EXPAND:' expects a single empty production\n";
+ expand_lists g; true
+ | _ -> false
+
+let edit_single_prod g edit0 prods nt =
+ let rec edit_single_prod_r edit prods nt seen =
+ match edit with
+ | [] -> prods
+ | Sedit "ADD_OPT" :: sym :: tl ->
+ let prods' = (try
+ let pfx = List.rev seen in
+ let posn = find_first edit0 prods nt in
+ let prods = insert_after posn [pfx @ (Sopt sym :: tl)] prods in
+ let prods = remove_prod (pfx @ (sym :: tl)) prods nt in
+ remove_prod (pfx @ tl) prods nt
+ with Not_found -> prods) in
+ edit_single_prod_r tl prods' nt seen
+ | Sedit "ADD_OPT" :: [] -> error "Bad position for ADD_OPT\n"; prods
+ | Sedit2 ("USE_NT", name) :: sym :: tl ->
+ let prods' = (try
+ let nt = maybe_add_nt g (ref nt) name sym (Queue.create ()) in
+ let pfx = List.rev seen in
+ let posn = find_first edit0 prods nt in
+ let prods = insert_after posn [pfx @ (Snterm nt :: tl)] prods in
+ remove_prod (pfx @ (sym :: tl)) prods nt
+ with Not_found -> prods) in
+ edit_single_prod_r tl prods' nt seen
+ | Sedit2 ("USE_NT", _) :: [] -> error "Bad position for USE_NT\n"; prods
+ | sym :: tl ->
+ edit_single_prod_r tl prods nt (sym :: seen)
+ in
+ edit_single_prod_r edit0 prods nt []
+
+let apply_edit_file g edits =
+ List.iter (fun b ->
+ let (nt, eprod) = b in
+ if not (edit_all_prods g nt eprod) then begin
+ let rec aux eprod prods add_nt =
+ match eprod with
+ | [] -> prods, add_nt
+ | (Snterm "DELETE" :: oprod) :: tl ->
+ aux tl (remove_prod oprod prods nt) add_nt
+ | (Snterm "DELETENT" :: _) :: tl -> (* note this doesn't remove references *)
+ g_remove g nt;
+ aux tl prods false
+ | (Snterm "EDIT" :: oprod) :: tl ->
+ aux tl (edit_single_prod g oprod prods nt) add_nt
+ | (Snterm "REPLACE" :: oprod) :: (Snterm "WITH" :: rprod) :: tl ->
+ let prods' = (try
+ let posn = find_first oprod prods nt in
+ let prods = insert_after posn [rprod] prods in (* insert new prod *)
+ remove_prod oprod prods nt (* remove orig prod *)
+ with Not_found -> prods)
+ in
+ aux tl prods' add_nt
+ | (Snterm "REPLACE" :: _ as eprod) :: tl ->
+ error "Missing WITH after '%s' in '%s'\n" (prod_to_str eprod) nt;
+ aux tl prods add_nt
+ | prod :: tl ->
+ (* add a production *)
+ if has_match prod prods then
+ error "Duplicate production '%s' for %s\n" (prod_to_str prod) nt;
+ aux tl (prods @ [prod]) add_nt
+ in
+ let prods, add_nt =
+ aux eprod (try NTMap.find nt !g.map with Not_found -> []) true in
+ if add_nt then
+ g_maybe_add g nt prods
+ end)
+ edits
+
+
+(*** main routines ***)
+
+ (* get the nt's in the production, preserving order, don't worry about dups *)
+ let nts_in_prod prod =
+ let rec traverse = function
+ | Sterm s -> []
+ | Snterm s -> [s]
+ | Slist1 sym
+ | Slist0 sym
+ | Sopt sym
+ -> traverse sym
+ | Slist1sep (sym, sep)
+ | Slist0sep (sym, sep)
+ -> traverse sym @ (traverse sep)
+ | Sparen sym_list -> List.concat (List.map traverse sym_list)
+ | Sprod sym_list_list -> List.concat (List.map (fun l -> List.concat (List.map traverse l)) sym_list_list)
+ | Sedit _
+ | Sedit2 _ -> []
+ in
+ List.rev (List.concat (List.map traverse prod))
+
+ (* get the special tokens in the grammar *)
+let print_special_tokens g =
+ let rec traverse set = function
+ | Sterm s ->
+ let c = s.[0] in
+ if (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') then set
+ else StringSet.add s set
+ | Snterm s -> set
+ | Slist1 sym
+ | Slist0 sym
+ | Sopt sym
+ -> traverse set sym
+ | Slist1sep (sym, sep)
+ | Slist0sep (sym, sep)
+ -> traverse (traverse set sym) sep
+ | Sparen sym_list -> traverse_prod set sym_list
+ | Sprod sym_list_list -> traverse_prods set sym_list_list
+ | Sedit _
+ | Sedit2 _ -> set
+ and traverse_prod set prod = List.fold_left traverse set prod
+ and traverse_prods set prods = List.fold_left traverse_prod set prods
+ in
+ let spec_toks = List.fold_left (fun set b ->
+ let nt, prods = b in
+ traverse_prods set prods)
+ StringSet.empty (NTMap.bindings !g.map)
+ in
+ Printf.printf "Special tokens:";
+ StringSet.iter (fun t -> Printf.printf " %s" t) spec_toks;
+ Printf.printf "\n\n"
+
+(* get the transitive closure of a non-terminal excluding "stops" symbols.
+ Preserve ordering to the extent possible *)
+(* todo: at the moment, the code doesn't use the ordering; consider switching to using
+sets instead of lists *)
+let nt_closure g start stops =
+ let stop_set = StringSet.of_list stops in
+ let rec nt_closure_r res todo =
+ match todo with
+ | [] -> res
+ | nt :: tl ->
+ if List.mem nt res || StringSet.mem nt stop_set then
+ nt_closure_r res tl
+ else begin
+ let more_to_do =
+ try
+ let prods = NTMap.find nt !g.map in
+ tl @ (List.concat (List.map nts_in_prod prods))
+ with Not_found -> tl in
+ nt_closure_r (nt :: res) more_to_do
+ end
+ in
+ List.rev (nt_closure_r [] [start])
+
+let header = "--------------------------------------------"
+let nt_subset_in_orig_order g nts =
+ let subset = StringSet.of_list nts in
+ List.filter (fun nt -> StringSet.mem nt subset) !g.order
+
+let print_chunk out g seen fmt title starts ends =
+ fprintf out "\n\n%s:\n%s\n" title header;
+ List.iter (fun start ->
+ let nts = (nt_closure g start ends) in
+ print_in_order out g fmt (nt_subset_in_orig_order g nts) !seen;
+ seen := StringSet.union !seen (StringSet.of_list nts))
+ starts
+
+let print_chunks g out fmt () =
+ let seen = ref StringSet.empty in
+ print_chunk out g seen fmt "lconstr" ["lconstr"] ["binder_constr"; "tactic_expr5"];
+ print_chunk out g seen fmt "Gallina syntax of terms" ["binder_constr"] ["tactic_expr5"];
+ print_chunk out g seen fmt "Gallina The Vernacular" ["gallina"] ["tactic_expr5"];
+ print_chunk out g seen fmt "intropattern_list_opt" ["intropattern_list"; "or_and_intropattern_loc"] ["operconstr"; "tactic_expr5"];
+ print_chunk out g seen fmt "simple_tactic" ["simple_tactic"]
+ ["tactic_expr5"; "tactic_expr3"; "tactic_expr2"; "tactic_expr1"; "tactic_expr0"];
+
+ (*print_chunk out g seen fmt "Ltac" ["tactic_expr5"] [];*)
+ print_chunk out g seen fmt "Ltac" ["tactic_expr5"] ["tactic_expr4"];
+ print_chunk out g seen fmt "Ltac 4" ["tactic_expr4"] ["tactic_expr3"; "tactic_expr2"];
+ print_chunk out g seen fmt "Ltac 3" ["tactic_expr3"] ["tactic_expr2"];
+ print_chunk out g seen fmt "Ltac 2" ["tactic_expr2"] ["tactic_expr1"];
+ print_chunk out g seen fmt "Ltac 1" ["tactic_expr1"] ["tactic_expr0"];
+ print_chunk out g seen fmt "Ltac 0" ["tactic_expr0"] [];
+
+
+ print_chunk out g seen fmt "command" ["command"] [];
+ print_chunk out g seen fmt "vernac_toplevel" ["vernac_toplevel"] [];
+ print_chunk out g seen fmt "vernac_control" ["vernac_control"] []
+
+ (*
+ let ssr_tops = ["ssr_dthen"; "ssr_else"; "ssr_mpat"; "ssr_rtype"] in
+ seen := StringSet.union !seen (StringSet.of_list ssr_tops);
+
+ print_chunk out g seen fmt "ssrindex" ["ssrindex"] [];
+ print_chunk out g seen fmt "command" ["command"] [];
+ print_chunk out g seen fmt "binder_constr" ["binder_constr"] [];
+ (*print_chunk out g seen fmt "closed_binder" ["closed_binder"] [];*)
+ print_chunk out g seen fmt "gallina_ext" ["gallina_ext"] [];
+ (*print_chunk out g seen fmt "hloc" ["hloc"] [];*)
+ (*print_chunk out g seen fmt "hypident" ["hypident"] [];*)
+ print_chunk out g seen fmt "simple_tactic" ["simple_tactic"] [];
+ print_chunk out g seen fmt "tactic_expr" ["tactic_expr4"; "tactic_expr1"; "tactic_expr0"] [];
+ fprintf out "\n\nRemainder:\n";
+ print_in_order g (List.filter (fun x -> not (StringSet.mem x !seen)) !g.order) StringSet.empty;
+ *)
+
+
+ (*seen := StringSet.diff !seen (StringSet.of_list ssr_tops);*)
+ (*print_chunk out g seen fmt "vernac_toplevel" ["vernac_toplevel"] [];*)
+
+let start_symbols = ["vernac_toplevel"]
+(* don't report tokens as undefined *)
+let tokens = [ "bullet"; "field"; "ident"; "int"; "num"; "numeral"; "string" ]
+
+let report_bad_nts g file =
+ let rec get_nts refd defd bindings =
+ match bindings with
+ | [] -> refd, defd
+ | (nt, prods) :: tl ->
+ get_nts (List.fold_left (fun res prod ->
+ StringSet.union res (StringSet.of_list (nts_in_prod prod)))
+ refd prods)
+ (StringSet.add nt defd) tl
+ in
+ let all_nts_ref, all_nts_def =
+ get_nts (StringSet.of_list tokens) (StringSet.of_list tokens) (NTMap.bindings !g.map) in
+
+ let undef = StringSet.diff all_nts_ref all_nts_def in
+ List.iter (fun nt -> warn "%s: Undefined symbol '%s'\n" file nt) (StringSet.elements undef);
+
+ let reachable =
+ List.fold_left (fun res sym ->
+ StringSet.union res (StringSet.of_list (nt_closure g sym [])))
+ StringSet.empty start_symbols
+ in
+ let unreachable = List.filter (fun nt -> not (StringSet.mem nt reachable)) !g.order in
+ List.iter (fun nt -> warn "%s: Unreachable symbol '%s'\n" file nt) unreachable
+
+
+let report_info g symdef_map =
+ let num_prods = List.fold_left (fun sum nt -> let prods = NTMap.find nt !g.map in sum + (List.length prods))
+ 0 !g.order
+ in
+
+ Printf.eprintf "\nstart symbols: %s\n" (String.concat " " start_symbols);
+ Printf.eprintf "%d nonterminals defined, %d productions\n" (NTMap.cardinal !g.map) num_prods;
+ Printf.eprintf "%d terminals\n" (List.length tokens);
+
+ Printf.eprintf "\nSymbols with multiple definition points in *.mlg:\n";
+ let bindings = List.sort (fun a b -> let (ak, _) = a and (bk, _) = b in
+ String.compare ak bk) (StringMap.bindings symdef_map) in
+ List.iter (fun b ->
+ let (k, v) = b in
+ if List.length v > 1 then begin
+ Printf.eprintf " %s: " k;
+ List.iter (fun f -> Printf.eprintf "%s " f) v;
+ Printf.eprintf "\n"
+ end)
+ bindings;
+ Printf.eprintf "\n"
+
+
+
+[@@@ocaml.warning "-32"]
+let rec dump prod =
+ match prod with
+ | hd :: tl -> let s = (match hd with
+ | Sterm s -> sprintf "Sterm %s" s
+ | Snterm s -> sprintf "Snterm \"%s\"" s
+ | Slist1 sym -> "Slist1"
+ | Slist0 sym -> "Slist0"
+ | Sopt sym -> "Sopt"
+ | Slist1sep _ -> "Slist1sep"
+ | Slist0sep _ -> "Slist0sep"
+ | Sparen sym_list -> "Sparen"
+ | Sprod sym_list_list -> "Sprod"
+ | Sedit _ -> "Sedit"
+ | Sedit2 _ -> "Sedit2") in
+ Printf.printf "%s " s;
+ dump tl
+ | [] -> Printf.printf "\n"
+[@@@ocaml.warning "+32"]
+
+let reorder_grammar eg reordered_rules file =
+ let og = ref { map = NTMap.empty; order = [] } in
+ List.iter (fun rule ->
+ let nt, prods = rule in
+ try
+ (* only keep nts and prods in common with editedGrammar *)
+ let eg_prods = NTMap.find nt !eg.map in
+ let prods = List.filter (fun prod -> (has_match prod eg_prods)) prods in
+ if NTMap.mem nt !og.map then
+ warn "%s: Duplicate nonterminal '%s'\n" file nt;
+ add_rule og nt prods file
+ with Not_found -> ())
+ reordered_rules;
+ g_reverse og;
+
+ (* insert a prod in a list after prev_prod (None=at the beginning) *)
+ let rec insert_prod prev_prod prod prods res =
+ match prev_prod, prods with
+ | None, _ -> prod :: prods
+ | Some _, [] -> raise Not_found
+ | Some ins_after_prod, hd :: tl ->
+ if ematch hd ins_after_prod then
+ (List.rev res) @ (hd :: prod :: tl)
+ else
+ insert_prod prev_prod prod tl (hd :: res)
+ in
+
+ (* insert prods that are not already in og_prods *)
+ let rec upd_prods prev_prod eg_prods og_prods =
+ match eg_prods with
+ | [] -> og_prods
+ | prod :: tl ->
+ let og_prods =
+ if has_match prod og_prods then
+ List.map (fun p -> if ematch p prod then prod else p) og_prods
+ else
+ insert_prod prev_prod prod og_prods [] in
+ upd_prods (Some prod) tl og_prods
+ in
+
+ (* add nts and prods not present in orderedGrammar *)
+ let _ = List.fold_left (fun prev_nt nt ->
+ let e_prods = NTMap.find nt !eg.map in
+ if not (NTMap.mem nt !og.map) then
+ g_add_after og prev_nt nt e_prods
+ else
+ g_update_prods og nt (upd_prods None e_prods (NTMap.find nt !og.map));
+ Some nt)
+ None !eg.order in
+ g_reorder eg !og.map !og.order
+
+
+let finish_with_file old_file verify =
+ let files_eq f1 f2 =
+ let chunksize = 8192 in
+ (try
+ let ofile = open_in_bin f1 in
+ let nfile = open_in_bin f2 in
+ let rv = if (in_channel_length ofile) <> (in_channel_length nfile) then false
+ else begin
+ let obuf = Bytes.create chunksize in
+ Bytes.fill obuf 0 chunksize '\x00';
+ let nbuf = Bytes.create chunksize in
+ Bytes.fill nbuf 0 chunksize '\x00';
+ let rec read () =
+ let olen = input ofile obuf 0 chunksize in
+ let _ = input nfile nbuf 0 chunksize in
+ if obuf <> nbuf then
+ false
+ else if olen = 0 then
+ true
+ else
+ read ()
+ in
+ read ()
+ end
+ in
+ close_in ofile;
+ close_in nfile;
+ rv
+ with Sys_error _ -> false)
+ in
+
+ let temp_file = (old_file ^ "_temp") in
+ if verify then
+ if (files_eq old_file temp_file || !exit_code <> 0) then
+ Sys.remove temp_file
+ else
+ error "%s is not current\n" old_file
+ else
+ Sys.rename temp_file old_file
+
+let open_temp_bin file =
+ open_out_bin (sprintf "%s_temp" file)
+
+let find_longest_match prods str =
+ (* todo: require a minimum length? *)
+ let common_prefix_len s1 s2 =
+ let limit = min (String.length s1) (String.length s2) in
+ let rec aux off =
+ if off = limit then off
+ else if s1.[off] = s2.[off] then aux (succ off)
+ else off
+ in
+ aux 0
+ in
+
+ let slen = String.length str in
+ let rec longest best multi best_len prods =
+ match prods with
+ | [] -> best, multi, best_len
+ | prod :: tl ->
+ let pstr = String.trim (prod_to_prodn prod) in
+ let clen = common_prefix_len str pstr in
+ if clen = slen && slen = String.length pstr then
+ pstr, false, clen (* exact match *)
+ else if clen > best_len then
+ longest pstr false clen tl (* better match *)
+ else if clen = best_len then
+ longest best true best_len tl (* 2nd match with same length *)
+ else
+ longest best multi best_len tl (* worse match *)
+ in
+ longest "" false 0 prods
+
+type seen = {
+ nts: (string * int) NTMap.t;
+ tacs: (string * int) NTMap.t;
+ cmds: (string * int) NTMap.t;
+}
+
+let process_rst g file args seen tac_prods cmd_prods =
+ let old_rst = open_in file in
+ let new_rst = open_temp_bin file in
+ let linenum = ref 0 in
+ let dir_regex = Str.regexp "^\\([ \t]*\\)\\.\\.[ \t]*\\([a-zA-Z0-9:]*\\)\\(.*\\)" in
+ let ig_args_regex = Str.regexp "^[ \t]*\\([a-zA-Z0-9_\\.]*\\)[ \t]*\\([a-zA-Z0-9_\\.]*\\)" in
+ let blank_regex = Str.regexp "^[ \t]*$" in
+ let end_prodlist_regex = Str.regexp "^[ \t]*$" in
+ let rec index_of_r str list index =
+ match list with
+ | [] -> None
+ | hd :: list ->
+ if hd = str then Some index
+ else index_of_r str list (index+1)
+ in
+ let index_of str list = index_of_r str list 0 in
+ let getline () =
+ let line = input_line old_rst in
+ incr linenum;
+ line
+ in
+ let output_insertgram start_index end_ indent is_coq_group =
+ let rec nthcdr n list = if n = 0 then list else nthcdr (n-1) (List.tl list) in
+ let rec copy_prods list =
+ match list with
+ | [] -> ()
+ | nt :: tl ->
+ (try
+ let (prev_file, prev_linenum) = NTMap.find nt !seen.nts in
+ warn "%s line %d: '%s' already included at %s line %d\n"
+ file !linenum nt prev_file prev_linenum;
+ with Not_found ->
+ if is_coq_group then
+ seen := { !seen with nts = (NTMap.add nt (file, !linenum) !seen.nts)} );
+ let prods = NTMap.find nt !g.map in
+ List.iteri (fun i prod ->
+ let rhs = String.trim (sprintf ": %s" (prod_to_str ~plist:true prod)) in
+ fprintf new_rst "%s %s %s\n" indent (if i = 0 then nt else String.make (String.length nt) ' ') rhs)
+ prods;
+ if nt <> end_ then copy_prods tl
+ in
+ copy_prods (nthcdr start_index !g.order)
+ in
+
+ let process_insertgram line rhs =
+ if not (Str.string_match ig_args_regex rhs 0) then
+ error "%s line %d: bad arguments '%s' for 'insertgram'\n" file !linenum rhs
+ else begin
+ let start = Str.matched_group 1 rhs in
+ let end_ = Str.matched_group 2 rhs in
+ let start_index = index_of start !g.order in
+ let end_index = index_of end_ !g.order in
+ if start_index = None then
+ error "%s line %d: '%s' is undefined\n" file !linenum start;
+ if end_index = None then
+ error "%s line %d: '%s' is undefined\n" file !linenum end_;
+ match start_index, end_index with
+ | Some start_index, Some end_index ->
+ if start_index > end_index then
+ error "%s line %d: '%s' must appear before '%s' in .../orderedGrammar\n" file !linenum start end_
+ else begin
+ try
+ let line2 = getline() in
+ if not (Str.string_match blank_regex line2 0) then
+ error "%s line %d: expecting a blank line after 'insertgram'\n" file !linenum
+ else begin
+ let line3 = getline() in
+ if not (Str.string_match dir_regex line3 0) || (Str.matched_group 2 line3) <> "productionlist::" then
+ error "%s line %d: expecting 'productionlist' after 'insertgram'\n" file !linenum
+ else begin
+ let indent = Str.matched_group 1 line3 in
+ let is_coq_group = ("coq" = String.trim (Str.matched_group 3 line3)) in
+ let rec skip_to_end () =
+ let endline = getline() in
+ if Str.string_match end_prodlist_regex endline 0 then begin
+ fprintf new_rst "%s\n\n%s\n" line line3;
+ output_insertgram start_index end_ indent is_coq_group;
+ fprintf new_rst "%s\n" endline
+ end else
+ skip_to_end ()
+ in
+ skip_to_end ()
+ end
+ end
+ with End_of_file -> error "%s line %d: unexpected end of file\n" file !linenum;
+ end
+ | _ -> ()
+ end
+
+ in
+ try
+ while true do
+ let line = getline() in
+ if Str.string_match dir_regex line 0 then begin
+ let dir = Str.matched_group 2 line in
+ let rhs = String.trim (Str.matched_group 3 line) in
+ match dir with
+ | "productionlist::" ->
+ if rhs = "coq" then
+ warn "%s line %d: Missing 'insertgram' before 'productionlist:: coq'\n" file !linenum;
+ fprintf new_rst "%s\n" line;
+ | "tacn::" when args.check_tacs ->
+ if not (StringSet.mem rhs tac_prods) then
+ warn "%s line %d: Unknown tactic: '%s'\n" file !linenum rhs;
+ if NTMap.mem rhs !seen.tacs then
+ warn "%s line %d: Repeated tactic: '%s'\n" file !linenum rhs;
+ seen := { !seen with tacs = (NTMap.add rhs (file, !linenum) !seen.tacs)};
+ fprintf new_rst "%s\n" line
+ | "cmd::" when args.check_cmds ->
+ if not (StringSet.mem rhs cmd_prods) then
+ warn "%s line %d: Unknown command: '%s'\n" file !linenum rhs;
+ if NTMap.mem rhs !seen.cmds then
+ warn "%s line %d: Repeated command: '%s'\n" file !linenum rhs;
+ seen := { !seen with cmds = (NTMap.add rhs (file, !linenum) !seen.cmds)};
+ fprintf new_rst "%s\n" line
+ | "insertgram" ->
+ process_insertgram line rhs
+ | _ -> fprintf new_rst "%s\n" line
+ end else
+ fprintf new_rst "%s\n" line;
+ done
+ with End_of_file -> ();
+ close_in old_rst;
+ close_out new_rst;
+ finish_with_file file args.verify
+
+let report_omitted_prods entries seen label split =
+ let maybe_warn first last n =
+ if first <> "" then begin
+ if first <> last then
+ warn "%ss '%s' to %s'%s' not included in .rst files (%d)\n" label first split last n
+ else
+ warn "%s %s not included in .rst files\n" label first;
+ end
+ in
+
+ let first, last, n = List.fold_left (fun missing nt ->
+ let first, last, n = missing in
+ if NTMap.mem nt seen then begin
+ maybe_warn first last n;
+ "", "", 0
+ end else
+ (if first = "" then nt else first), nt, n + 1)
+ ("", "", 0) entries in
+ maybe_warn first last n
+
+let process_grammar args =
+ let symdef_map = ref StringMap.empty in
+ let g = ref { map = NTMap.empty; order = [] } in
+
+ let level_renames = read_mlg_files g args symdef_map in
+
+ (* rename nts with levels *)
+ List.iter (fun b -> let (nt, prod) = b in
+ let (_, prod) = edit_rule g level_renames nt prod in
+ g_update_prods g nt prod)
+ (NTMap.bindings !g.map);
+
+ (* print the full grammar with minimal editing *)
+ let out = open_temp_bin (dir "fullGrammar") in
+ fprintf out "%s\n%s\n\n"
+ "(* Coq grammar generated from .mlg files. Do not edit by hand. Not compiled into Coq *)"
+ "DOC_GRAMMAR";
+ print_in_order out g `MLG !g.order StringSet.empty;
+ close_out out;
+ finish_with_file (dir "fullGrammar") args.verify;
+ if args.verbose then
+ print_special_tokens g;
+
+ if not args.fullGrammar then begin
+ (* do shared edits *)
+ if !exit_code = 0 then begin
+ let common_edits = read_mlg_edit "common.edit_mlg" in
+ apply_edit_file g common_edits
+ end;
+ let prodn_gram = ref { map = !g.map; order = !g.order } in
+
+ if !exit_code = 0 && not args.verify then begin
+ let prodlist_edits = read_mlg_edit "productionlist.edit_mlg" in
+ apply_edit_file g prodlist_edits;
+ let out = open_temp_bin (dir "productionlistGrammar") in
+ if args.verbose then
+ report_info g !symdef_map;
+ print_in_order out g `PRODLIST !g.order StringSet.empty;
+ (*print_chunks g out `PRODLIST ();*)
+ close_out out;
+ finish_with_file (dir "productionlistGrammar") args.verify;
+ end;
+
+ if !exit_code = 0 && not args.verify then begin
+ let out = open_temp_bin (dir "editedGrammar") in
+ fprintf out "%s\n%s\n\n"
+ "(* Edited Coq grammar generated from .mlg files. Do not edit by hand. Not compiled into Coq *)"
+ "DOC_GRAMMAR";
+ print_in_order out g `MLG !g.order StringSet.empty;
+ close_out out;
+ finish_with_file (dir "editedGrammar") args.verify;
+ report_bad_nts g "editedGrammar"
+ end;
+
+ if !exit_code = 0 then begin
+ let ordered_grammar = read_mlg_edit "orderedGrammar" in
+ let out = open_temp_bin (dir "orderedGrammar") in
+ fprintf out "%s\n%s\n\n"
+ ("(* Defines the order to apply to editedGrammar to get productionlistGrammar.\n" ^
+ "doc_grammar will modify this file to add/remove nonterminals and productions\n" ^
+ "to match editedGrammar, which will remove comments. Not compiled into Coq *)")
+ "DOC_GRAMMAR";
+ reorder_grammar g ordered_grammar "orderedGrammar";
+ print_in_order out g `MLG !g.order StringSet.empty;
+ close_out out;
+ finish_with_file (dir "orderedGrammar") args.verify;
+ end;
+
+ if !exit_code = 0 then begin
+ let plist nt =
+ let list = (List.map (fun t -> String.trim (prod_to_prodn t))
+ (NTMap.find nt !g.map)) in
+ list, StringSet.of_list list in
+ let tac_list, tac_prods = plist "simple_tactic" in
+ let cmd_list, cmd_prods = plist "command" in
+ let seen = ref { nts=NTMap.empty; tacs=NTMap.empty; cmds=NTMap.empty } in
+ List.iter (fun file -> process_rst g file args seen tac_prods cmd_prods) args.rst_files;
+ report_omitted_prods !g.order !seen.nts "Nonterminal" "";
+ if args.check_tacs then
+ report_omitted_prods tac_list !seen.tacs "Tactic" "\n ";
+ if args.check_cmds then
+ report_omitted_prods cmd_list !seen.cmds "Command" "\n "
+ end;
+
+ (* generate output for prodn: simple_tactic, command, also for Ltac?? *)
+ if !exit_code = 0 && not args.verify then begin
+ let prodn_edits = read_mlg_edit "prodn.edit_mlg" in
+ apply_edit_file prodn_gram prodn_edits;
+ let out = open_temp_bin (dir "prodnGrammar") in
+ print_in_order out prodn_gram `PRODN !prodn_gram.order StringSet.empty;
+ close_out out;
+ finish_with_file (dir "prodnGrammar") args.verify
+ end
+ end
+
+let parse_args () =
+ let suffix_regex = Str.regexp ".*\\.\\([a-z]+\\)$" in
+ let args =
+ List.fold_left (fun args arg ->
+ match arg with
+ | "-check-cmds" -> { args with check_cmds = true }
+ | "-check-tacs" -> { args with check_tacs = true }
+ | "-no-warn" -> show_warn := false; { args with show_warn = true }
+ | "-short" -> { args with fullGrammar = true }
+ | "-verbose" -> { args with verbose = true }
+ | "-verify" -> { args with verify = true }
+ | arg when Str.string_match suffix_regex arg 0 ->
+ (match Str.matched_group 1 arg with
+ | "mlg" -> { args with mlg_files = (arg :: args.mlg_files) }
+ | "rst" -> { args with rst_files = (arg :: args.rst_files) }
+ | _ -> error "Unknown command line argument '%s'\n" arg; args)
+ | arg -> error "Unknown command line argument '%s'\n" arg; args)
+ default_args (List.tl (Array.to_list Sys.argv)) in
+ { args with mlg_files = (List.rev args.mlg_files); rst_files = (List.rev args.rst_files)}
+
+let () =
+ (*try*)
+ Printexc.record_backtrace true;
+ let args = parse_args () in
+ if !exit_code = 0 then begin
+ process_grammar args
+ end;
+ exit !exit_code
+ (*with _ -> Printexc.print_backtrace stdout; exit 1*)
diff --git a/doc/tools/docgram/fullGrammar b/doc/tools/docgram/fullGrammar
new file mode 100644
index 0000000000..a83638dd73
--- /dev/null
+++ b/doc/tools/docgram/fullGrammar
@@ -0,0 +1,3174 @@
+(* Coq grammar generated from .mlg files. Do not edit by hand. Not compiled into Coq *)
+DOC_GRAMMAR
+
+Constr.ident: [
+| Prim.ident
+]
+
+Prim.name: [
+| "_"
+]
+
+global: [
+| Prim.reference
+]
+
+constr_pattern: [
+| constr
+]
+
+lconstr_pattern: [
+| lconstr
+]
+
+sort: [
+| "Set"
+| "Prop"
+| "SProp"
+| "Type"
+| "Type" "@{" "_" "}"
+| "Type" "@{" universe "}"
+]
+
+sort_family: [
+| "Set"
+| "Prop"
+| "SProp"
+| "Type"
+]
+
+universe_increment: [
+| "+" natural
+|
+]
+
+universe_name: [
+| global
+| "Set"
+| "Prop"
+]
+
+universe_expr: [
+| universe_name universe_increment
+]
+
+universe: [
+| "max" "(" LIST1 universe_expr SEP "," ")"
+| universe_expr
+]
+
+lconstr: [
+| operconstr200
+| l_constr
+]
+
+constr: [
+| operconstr8
+| "@" global instance
+]
+
+operconstr200: [
+| binder_constr
+| operconstr100
+]
+
+operconstr100: [
+| operconstr99 "<:" binder_constr
+| operconstr99 "<:" operconstr100
+| operconstr99 "<<:" binder_constr
+| operconstr99 "<<:" operconstr100
+| operconstr99 ":" binder_constr
+| operconstr99 ":" operconstr100
+| operconstr99 ":>"
+| operconstr99
+]
+
+operconstr99: [
+| operconstr90
+]
+
+operconstr90: [
+| operconstr10
+]
+
+operconstr10: [
+| operconstr9 LIST1 appl_arg
+| "@" global instance LIST0 operconstr9
+| "@" pattern_identref LIST1 identref
+| operconstr9
+]
+
+operconstr9: [
+| ".." operconstr0 ".."
+| operconstr8
+]
+
+operconstr8: [
+| operconstr1
+]
+
+operconstr1: [
+| operconstr0 ".(" global LIST0 appl_arg ")"
+| operconstr0 ".(" "@" global LIST0 ( operconstr9 ) ")"
+| operconstr0 "%" IDENT
+| operconstr0
+]
+
+operconstr0: [
+| atomic_constr
+| match_constr
+| "(" operconstr200 ")"
+| "{|" record_declaration bar_cbrace
+| "{" binder_constr "}"
+| "`{" operconstr200 "}"
+| "`(" operconstr200 ")"
+| "ltac" ":" "(" Pltac.tactic_expr ")"
+]
+
+record_declaration: [
+| record_fields
+]
+
+record_fields: [
+| record_field_declaration ";" record_fields
+| record_field_declaration
+|
+| record_field ";" record_fields
+| record_field ";"
+| record_field
+]
+
+record_field_declaration: [
+| global binders ":=" lconstr
+]
+
+binder_constr: [
+| "forall" open_binders "," operconstr200
+| "fun" open_binders "=>" operconstr200
+| "let" name binders type_cstr ":=" operconstr200 "in" operconstr200
+| "let" single_fix "in" operconstr200
+| "let" [ "(" LIST0 name SEP "," ")" | "()" ] return_type ":=" operconstr200 "in" operconstr200
+| "let" "'" pattern200 ":=" operconstr200 "in" operconstr200
+| "let" "'" pattern200 ":=" operconstr200 case_type "in" operconstr200
+| "let" "'" pattern200 "in" pattern200 ":=" operconstr200 case_type "in" operconstr200
+| "if" operconstr200 return_type "then" operconstr200 "else" operconstr200
+| fix_constr
+| "if" operconstr200 "is" ssr_dthen ssr_else (* ssr plugin *)
+| "if" operconstr200 "isn't" ssr_dthen ssr_else (* ssr plugin *)
+| "let" ":" ssr_mpat ":=" lconstr "in" lconstr (* ssr plugin *)
+| "let" ":" ssr_mpat ":=" lconstr ssr_rtype "in" lconstr (* ssr plugin *)
+| "let" ":" ssr_mpat "in" pattern200 ":=" lconstr ssr_rtype "in" lconstr (* ssr plugin *)
+]
+
+appl_arg: [
+| lpar_id_coloneq lconstr ")"
+| operconstr9
+]
+
+atomic_constr: [
+| global instance
+| sort
+| NUMERAL
+| string
+| "_"
+| "?" "[" ident "]"
+| "?" "[" pattern_ident "]"
+| pattern_ident evar_instance
+]
+
+inst: [
+| ident ":=" lconstr
+]
+
+evar_instance: [
+| "@{" LIST1 inst SEP ";" "}"
+|
+]
+
+instance: [
+| "@{" LIST0 universe_level "}"
+|
+]
+
+universe_level: [
+| "Set"
+| "Prop"
+| "Type"
+| "_"
+| global
+]
+
+fix_constr: [
+| single_fix
+| single_fix "with" LIST1 fix_decl SEP "with" "for" identref
+]
+
+single_fix: [
+| fix_kw fix_decl
+]
+
+fix_kw: [
+| "fix"
+| "cofix"
+]
+
+fix_decl: [
+| identref binders_fixannot type_cstr ":=" operconstr200
+]
+
+match_constr: [
+| "match" LIST1 case_item SEP "," OPT case_type "with" branches "end"
+]
+
+case_item: [
+| operconstr100 OPT [ "as" name ] OPT [ "in" pattern200 ]
+]
+
+case_type: [
+| "return" operconstr100
+]
+
+return_type: [
+| OPT [ OPT [ "as" name ] case_type ]
+]
+
+branches: [
+| OPT "|" LIST0 eqn SEP "|"
+]
+
+mult_pattern: [
+| LIST1 pattern200 SEP ","
+]
+
+eqn: [
+| LIST1 mult_pattern SEP "|" "=>" lconstr
+]
+
+record_pattern: [
+| global ":=" pattern200
+]
+
+record_patterns: [
+| record_pattern ";" record_patterns
+| record_pattern ";"
+| record_pattern
+|
+]
+
+pattern200: [
+| pattern100
+]
+
+pattern100: [
+| pattern99 ":" binder_constr
+| pattern99 ":" operconstr100
+| pattern99
+]
+
+pattern99: [
+| pattern90
+]
+
+pattern90: [
+| pattern10
+]
+
+pattern10: [
+| pattern1 "as" name
+| pattern1 LIST1 pattern1
+| "@" Prim.reference LIST0 pattern1
+| pattern1
+]
+
+pattern1: [
+| pattern0 "%" IDENT
+| pattern0
+]
+
+pattern0: [
+| Prim.reference
+| "{|" record_patterns bar_cbrace
+| "_"
+| "(" pattern200 ")"
+| "(" pattern200 "|" LIST1 pattern200 SEP "|" ")"
+| NUMERAL
+| string
+]
+
+impl_ident_tail: [
+| "}"
+| LIST1 name ":" lconstr "}"
+| LIST1 name "}"
+| ":" lconstr "}"
+]
+
+fixannot: [
+| "{" "struct" identref "}"
+| "{" "wf" constr identref "}"
+| "{" "measure" constr OPT identref OPT constr "}"
+| "{" "struct" name "}"
+|
+]
+
+impl_name_head: [
+| impl_ident_head
+]
+
+binders_fixannot: [
+| impl_name_head impl_ident_tail binders_fixannot
+| fixannot
+| binder binders_fixannot
+|
+]
+
+open_binders: [
+| name LIST0 name ":" lconstr
+| name LIST0 name binders
+| name ".." name
+| closed_binder binders
+]
+
+binders: [
+| LIST0 binder
+| Pcoq.Constr.binders
+]
+
+binder: [
+| name
+| closed_binder
+]
+
+closed_binder: [
+| "(" name LIST1 name ":" lconstr ")"
+| "(" name ":" lconstr ")"
+| "(" name ":=" lconstr ")"
+| "(" name ":" lconstr ":=" lconstr ")"
+| "{" name "}"
+| "{" name LIST1 name ":" lconstr "}"
+| "{" name ":" lconstr "}"
+| "{" name LIST1 name "}"
+| "`(" LIST1 typeclass_constraint SEP "," ")"
+| "`{" LIST1 typeclass_constraint SEP "," "}"
+| "'" pattern0
+| [ "of" | "&" ] operconstr99 (* ssr plugin *)
+]
+
+typeclass_constraint: [
+| "!" operconstr200
+| "{" name "}" ":" [ "!" | ] operconstr200
+| name_colon [ "!" | ] operconstr200
+| operconstr200
+]
+
+type_cstr: [
+| OPT [ ":" lconstr ]
+| ":" lconstr
+|
+]
+
+preident: [
+| IDENT
+]
+
+ident: [
+| IDENT
+]
+
+pattern_ident: [
+| LEFTQMARK ident
+]
+
+pattern_identref: [
+| pattern_ident
+]
+
+var: [
+| ident
+]
+
+identref: [
+| ident
+]
+
+field: [
+| FIELD
+]
+
+fields: [
+| field fields
+| field
+]
+
+fullyqualid: [
+| ident fields
+| ident
+]
+
+basequalid: [
+| ident fields
+| ident
+]
+
+name: [
+| "_"
+| ident
+]
+
+reference: [
+| ident fields
+| ident
+]
+
+by_notation: [
+| ne_string OPT [ "%" IDENT ]
+]
+
+smart_global: [
+| reference
+| by_notation
+]
+
+qualid: [
+| basequalid
+]
+
+ne_string: [
+| STRING
+]
+
+ne_lstring: [
+| ne_string
+]
+
+dirpath: [
+| ident LIST0 field
+]
+
+string: [
+| STRING
+]
+
+lstring: [
+| string
+]
+
+integer: [
+| NUMERAL
+| "-" NUMERAL
+]
+
+natural: [
+| NUMERAL
+| _natural
+]
+
+bigint: [
+| NUMERAL
+]
+
+bar_cbrace: [
+| "|" "}"
+]
+
+vernac_toplevel: [
+| "Drop" "."
+| "Quit" "."
+| "Backtrack" natural natural natural "."
+| test_show_goal "Show" "Goal" natural "at" natural "."
+| Pvernac.Vernac_.main_entry
+]
+
+opt_hintbases: [
+|
+| ":" LIST1 IDENT
+]
+
+command: [
+| "Goal" lconstr
+| "Proof"
+| "Proof" "Mode" string
+| "Proof" lconstr
+| "Abort"
+| "Abort" "All"
+| "Abort" identref
+| "Existential" natural constr_body
+| "Admitted"
+| "Qed"
+| "Save" identref
+| "Defined"
+| "Defined" identref
+| "Restart"
+| "Undo"
+| "Undo" natural
+| "Undo" "To" natural
+| "Focus"
+| "Focus" natural
+| "Unfocus"
+| "Unfocused"
+| "Show"
+| "Show" natural
+| "Show" ident
+| "Show" "Existentials"
+| "Show" "Universes"
+| "Show" "Conjectures"
+| "Show" "Proof"
+| "Show" "Intro"
+| "Show" "Intros"
+| "Show" "Match" reference
+| "Guarded"
+| "Create" "HintDb" IDENT; [ "discriminated" | ]
+| "Remove" "Hints" LIST1 global opt_hintbases
+| "Hint" hint opt_hintbases
+| "Comments" LIST0 comment
+| "Declare" "Instance" ident_decl binders ":" operconstr200 hint_info
+| "Declare" "Scope" IDENT
+| "Pwd"
+| "Cd"
+| "Cd" ne_string
+| "Load" [ "Verbose" | ] [ ne_string | IDENT ]
+| "Declare" "ML" "Module" LIST1 ne_string
+| "Locate" locatable
+| "Add" "LoadPath" ne_string as_dirpath
+| "Add" "Rec" "LoadPath" ne_string as_dirpath
+| "Remove" "LoadPath" ne_string
+| "AddPath" ne_string "as" as_dirpath
+| "AddRecPath" ne_string "as" as_dirpath
+| "DelPath" ne_string
+| "Type" lconstr
+| "Print" printable
+| "Print" smart_global OPT univ_name_list
+| "Print" "Module" "Type" global
+| "Print" "Module" global
+| "Print" "Namespace" dirpath
+| "Inspect" natural
+| "Add" "ML" "Path" ne_string
+| "Add" "Rec" "ML" "Path" ne_string
+| "Set" option_table option_setting
+| "Unset" option_table
+| "Print" "Table" option_table
+| "Add" IDENT IDENT LIST1 option_ref_value
+| "Add" IDENT LIST1 option_ref_value
+| "Test" option_table "for" LIST1 option_ref_value
+| "Test" option_table
+| "Remove" IDENT IDENT LIST1 option_ref_value
+| "Remove" IDENT LIST1 option_ref_value
+| "Write" "State" IDENT
+| "Write" "State" ne_string
+| "Restore" "State" IDENT
+| "Restore" "State" ne_string
+| "Reset" "Initial"
+| "Reset" identref
+| "Back"
+| "Back" natural
+| "BackTo" natural
+| "Debug" "On"
+| "Debug" "Off"
+| "Declare" "Reduction" IDENT; ":=" red_expr
+| "Declare" "Custom" "Entry" IDENT
+| "Derive" ident "SuchThat" constr "As" ident (* derive plugin *)
+| "Extraction" global (* extraction plugin *)
+| "Recursive" "Extraction" LIST1 global (* extraction plugin *)
+| "Extraction" string LIST1 global (* extraction plugin *)
+| "Extraction" "TestCompile" LIST1 global (* extraction plugin *)
+| "Separate" "Extraction" LIST1 global (* extraction plugin *)
+| "Extraction" "Library" ident (* extraction plugin *)
+| "Recursive" "Extraction" "Library" ident (* extraction plugin *)
+| "Extraction" "Language" language (* extraction plugin *)
+| "Extraction" "Inline" LIST1 global (* extraction plugin *)
+| "Extraction" "NoInline" LIST1 global (* extraction plugin *)
+| "Print" "Extraction" "Inline" (* extraction plugin *)
+| "Reset" "Extraction" "Inline" (* extraction plugin *)
+| "Extraction" "Implicit" global "[" LIST0 int_or_id "]" (* extraction plugin *)
+| "Extraction" "Blacklist" LIST1 ident (* extraction plugin *)
+| "Print" "Extraction" "Blacklist" (* extraction plugin *)
+| "Reset" "Extraction" "Blacklist" (* extraction plugin *)
+| "Extract" "Constant" global LIST0 string "=>" mlname (* extraction plugin *)
+| "Extract" "Inlined" "Constant" global "=>" mlname (* extraction plugin *)
+| "Extract" "Inductive" global "=>" mlname "[" LIST0 mlname "]" OPT string (* extraction plugin *)
+| "Show" "Extraction" (* extraction plugin *)
+| "Set" "Firstorder" "Solver" tactic
+| "Print" "Firstorder" "Solver"
+| "Function" LIST1 function_rec_definition_loc SEP "with" (* funind plugin *)
+| "Functional" "Scheme" LIST1 fun_scheme_arg SEP "with" (* funind plugin *)
+| "Functional" "Case" fun_scheme_arg (* funind plugin *)
+| "Generate" "graph" "for" reference (* funind plugin *)
+| "Hint" "Rewrite" orient LIST1 constr ":" LIST0 preident
+| "Hint" "Rewrite" orient LIST1 constr "using" tactic ":" LIST0 preident
+| "Hint" "Rewrite" orient LIST1 constr
+| "Hint" "Rewrite" orient LIST1 constr "using" tactic
+| "Derive" "Inversion_clear" ident "with" constr "Sort" sort_family
+| "Derive" "Inversion_clear" ident "with" constr
+| "Derive" "Inversion" ident "with" constr "Sort" sort_family
+| "Derive" "Inversion" ident "with" constr
+| "Derive" "Dependent" "Inversion" ident "with" constr "Sort" sort_family
+| "Derive" "Dependent" "Inversion_clear" ident "with" constr "Sort" sort_family
+| "Declare" "Left" "Step" constr
+| "Declare" "Right" "Step" constr
+| "Grab" "Existential" "Variables"
+| "Unshelve"
+| "Declare" "Equivalent" "Keys" constr constr
+| "Print" "Equivalent" "Keys"
+| "Optimize" "Proof"
+| "Optimize" "Heap"
+| "Hint" "Cut" "[" hints_path "]" opthints
+| "Typeclasses" "Transparent" LIST0 reference
+| "Typeclasses" "Opaque" LIST0 reference
+| "Typeclasses" "eauto" ":=" debug eauto_search_strategy OPT int
+| "Proof" "with" Pltac.tactic OPT [ "using" G_vernac.section_subset_expr ]
+| "Proof" "using" G_vernac.section_subset_expr OPT [ "with" Pltac.tactic ]
+| "Tactic" "Notation" OPT ltac_tactic_level LIST1 ltac_production_item ":=" tactic
+| "Print" "Ltac" reference
+| "Locate" "Ltac" reference
+| "Ltac" LIST1 ltac_tacdef_body SEP "with"
+| "Print" "Ltac" "Signatures"
+| "Obligation" integer "of" ident ":" lglob withtac
+| "Obligation" integer "of" ident withtac
+| "Obligation" integer ":" lglob withtac
+| "Obligation" integer withtac
+| "Next" "Obligation" "of" ident withtac
+| "Next" "Obligation" withtac
+| "Solve" "Obligation" integer "of" ident "with" tactic
+| "Solve" "Obligation" integer "with" tactic
+| "Solve" "Obligations" "of" ident "with" tactic
+| "Solve" "Obligations" "with" tactic
+| "Solve" "Obligations"
+| "Solve" "All" "Obligations" "with" tactic
+| "Solve" "All" "Obligations"
+| "Admit" "Obligations" "of" ident
+| "Admit" "Obligations"
+| "Obligation" "Tactic" ":=" tactic
+| "Show" "Obligation" "Tactic"
+| "Obligations" "of" ident
+| "Obligations"
+| "Preterm" "of" ident
+| "Preterm"
+| "Add" "Relation" constr constr "reflexivity" "proved" "by" constr "symmetry" "proved" "by" constr "as" ident
+| "Add" "Relation" constr constr "reflexivity" "proved" "by" constr "as" ident
+| "Add" "Relation" constr constr "as" ident
+| "Add" "Relation" constr constr "symmetry" "proved" "by" constr "as" ident
+| "Add" "Relation" constr constr "symmetry" "proved" "by" constr "transitivity" "proved" "by" constr "as" ident
+| "Add" "Relation" constr constr "reflexivity" "proved" "by" constr "transitivity" "proved" "by" constr "as" ident
+| "Add" "Relation" constr constr "reflexivity" "proved" "by" constr "symmetry" "proved" "by" constr "transitivity" "proved" "by" constr "as" ident
+| "Add" "Relation" constr constr "transitivity" "proved" "by" constr "as" ident
+| "Add" "Parametric" "Relation" binders ":" constr constr "reflexivity" "proved" "by" constr "symmetry" "proved" "by" constr "as" ident
+| "Add" "Parametric" "Relation" binders ":" constr constr "reflexivity" "proved" "by" constr "as" ident
+| "Add" "Parametric" "Relation" binders ":" constr constr "as" ident
+| "Add" "Parametric" "Relation" binders ":" constr constr "symmetry" "proved" "by" constr "as" ident
+| "Add" "Parametric" "Relation" binders ":" constr constr "symmetry" "proved" "by" constr "transitivity" "proved" "by" constr "as" ident
+| "Add" "Parametric" "Relation" binders ":" constr constr "reflexivity" "proved" "by" constr "transitivity" "proved" "by" constr "as" ident
+| "Add" "Parametric" "Relation" binders ":" constr constr "reflexivity" "proved" "by" constr "symmetry" "proved" "by" constr "transitivity" "proved" "by" constr "as" ident
+| "Add" "Parametric" "Relation" binders ":" constr constr "transitivity" "proved" "by" constr "as" ident
+| "Add" "Setoid" constr constr constr "as" ident
+| "Add" "Parametric" "Setoid" binders ":" constr constr constr "as" ident
+| "Add" "Morphism" constr ":" ident
+| "Declare" "Morphism" constr ":" ident
+| "Add" "Morphism" constr "with" "signature" lconstr "as" ident
+| "Add" "Parametric" "Morphism" binders ":" constr "with" "signature" lconstr "as" ident
+| "Print" "Rewrite" "HintDb" preident
+| "Reset" "Ltac" "Profile"
+| "Show" "Ltac" "Profile"
+| "Show" "Ltac" "Profile" "CutOff" int
+| "Show" "Ltac" "Profile" string
+| "Add" "Ring" ident ":" constr OPT ring_mods (* setoid_ring plugin *)
+| "Print" "Rings" (* setoid_ring plugin *)
+| "Add" "Field" ident ":" constr OPT field_mods (* setoid_ring plugin *)
+| "Print" "Fields" (* setoid_ring plugin *)
+| "Prenex" "Implicits" LIST1 global (* ssr plugin *)
+| "Search" ssr_search_arg ssr_modlocs (* ssr plugin *)
+| "Print" "Hint" "View" ssrviewpos (* ssr plugin *)
+| "Hint" "View" ssrviewposspc LIST1 ssrhintref (* ssr plugin *)
+| "Numeral" "Notation" reference reference reference ":" ident numnotoption
+| "String" "Notation" reference reference reference ":" ident
+]
+
+reference_or_constr: [
+| global
+| constr
+]
+
+hint: [
+| "Resolve" LIST1 reference_or_constr hint_info
+| "Resolve" "->" LIST1 global OPT natural
+| "Resolve" "<-" LIST1 global OPT natural
+| "Immediate" LIST1 reference_or_constr
+| "Variables" "Transparent"
+| "Variables" "Opaque"
+| "Constants" "Transparent"
+| "Constants" "Opaque"
+| "Transparent" LIST1 global
+| "Opaque" LIST1 global
+| "Mode" global mode
+| "Unfold" LIST1 global
+| "Constructors" LIST1 global
+| "Extern" natural OPT Constr.constr_pattern "=>" Pltac.tactic
+]
+
+constr_body: [
+| ":=" lconstr
+| ":" lconstr ":=" lconstr
+]
+
+mode: [
+| LIST1 [ "+" | "!" | "-" ]
+]
+
+vernac_control: [
+| "Time" vernac_control
+| "Redirect" ne_string vernac_control
+| "Timeout" natural vernac_control
+| "Fail" vernac_control
+| decorated_vernac
+]
+
+decorated_vernac: [
+| LIST0 quoted_attributes vernac
+]
+
+quoted_attributes: [
+| "#[" attribute_list "]"
+]
+
+attribute_list: [
+| LIST0 attribute SEP ","
+]
+
+attribute: [
+| ident attribute_value
+]
+
+attribute_value: [
+| "=" string
+| "(" attribute_list ")"
+|
+]
+
+vernac: [
+| "Local" vernac_poly
+| "Global" vernac_poly
+| vernac_poly
+]
+
+vernac_poly: [
+| "Polymorphic" vernac_aux
+| "Monomorphic" vernac_aux
+| vernac_aux
+]
+
+vernac_aux: [
+| "Program" gallina "."
+| "Program" gallina_ext "."
+| gallina "."
+| gallina_ext "."
+| command "."
+| syntax "."
+| subprf
+| command_entry
+]
+
+noedit_mode: [
+| query_command
+]
+
+subprf: [
+| BULLET
+| "{"
+| "}"
+]
+
+gallina: [
+| thm_token ident_decl binders ":" lconstr LIST0 [ "with" ident_decl binders ":" lconstr ]
+| assumption_token inline assum_list
+| assumptions_token inline assum_list
+| def_token ident_decl def_body
+| "Let" identref def_body
+| OPT cumulativity_token private_token finite_token LIST1 inductive_definition SEP "with"
+| "Fixpoint" LIST1 rec_definition SEP "with"
+| "Let" "Fixpoint" LIST1 rec_definition SEP "with"
+| "CoFixpoint" LIST1 corec_definition SEP "with"
+| "Let" "CoFixpoint" LIST1 corec_definition SEP "with"
+| "Scheme" LIST1 scheme SEP "with"
+| "Combined" "Scheme" identref "from" LIST1 identref SEP ","
+| "Register" global "as" qualid
+| "Register" "Inline" global
+| "Primitive" identref OPT [ ":" lconstr ] ":=" register_token
+| "Universe" LIST1 identref
+| "Universes" LIST1 identref
+| "Constraint" LIST1 univ_constraint SEP ","
+]
+
+register_token: [
+| register_prim_token
+| register_type_token
+]
+
+register_type_token: [
+| "#int63_type"
+]
+
+register_prim_token: [
+| "#int63_head0"
+| "#int63_tail0"
+| "#int63_add"
+| "#int63_sub"
+| "#int63_mul"
+| "#int63_div"
+| "#int63_mod"
+| "#int63_lsr"
+| "#int63_lsl"
+| "#int63_land"
+| "#int63_lor"
+| "#int63_lxor"
+| "#int63_addc"
+| "#int63_subc"
+| "#int63_addcarryc"
+| "#int63_subcarryc"
+| "#int63_mulc"
+| "#int63_diveucl"
+| "#int63_div21"
+| "#int63_addmuldiv"
+| "#int63_eq"
+| "#int63_lt"
+| "#int63_le"
+| "#int63_compare"
+]
+
+thm_token: [
+| "Theorem"
+| "Lemma"
+| "Fact"
+| "Remark"
+| "Corollary"
+| "Proposition"
+| "Property"
+]
+
+def_token: [
+| "Definition"
+| "Example"
+| "SubClass"
+]
+
+assumption_token: [
+| "Hypothesis"
+| "Variable"
+| "Axiom"
+| "Parameter"
+| "Conjecture"
+]
+
+assumptions_token: [
+| "Hypotheses"
+| "Variables"
+| "Axioms"
+| "Parameters"
+| "Conjectures"
+]
+
+inline: [
+| "Inline" "(" natural ")"
+| "Inline"
+|
+]
+
+univ_constraint: [
+| universe_name [ "<" | "=" | "<=" ] universe_name
+]
+
+univ_decl: [
+| "@{" LIST0 identref [ "+" | ] [ "|" LIST0 univ_constraint SEP "," [ "+" | ] "}" | [ "}" | bar_cbrace ] ]
+]
+
+ident_decl: [
+| identref OPT univ_decl
+]
+
+finite_token: [
+| "Inductive"
+| "CoInductive"
+| "Variant"
+| "Record"
+| "Structure"
+| "Class"
+]
+
+cumulativity_token: [
+| "Cumulative"
+| "NonCumulative"
+]
+
+private_token: [
+| "Private"
+|
+]
+
+def_body: [
+| binders ":=" reduce lconstr
+| binders ":" lconstr ":=" reduce lconstr
+| binders ":" lconstr
+]
+
+reduce: [
+| "Eval" red_expr "in"
+|
+]
+
+one_decl_notation: [
+| ne_lstring ":=" constr OPT [ ":" IDENT ]
+]
+
+decl_sep: [
+| "and"
+]
+
+decl_notation: [
+| "where" LIST1 one_decl_notation SEP decl_sep
+|
+]
+
+opt_constructors_or_fields: [
+| ":=" constructor_list_or_record_decl
+|
+]
+
+inductive_definition: [
+| opt_coercion ident_decl binders OPT [ ":" lconstr ] opt_constructors_or_fields decl_notation
+]
+
+constructor_list_or_record_decl: [
+| "|" LIST1 constructor SEP "|"
+| identref constructor_type "|" LIST0 constructor SEP "|"
+| identref constructor_type
+| identref "{" record_fields "}"
+| "{" record_fields "}"
+|
+]
+
+opt_coercion: [
+| ">"
+|
+]
+
+rec_definition: [
+| ident_decl binders_fixannot type_cstr OPT [ ":=" lconstr ] decl_notation
+]
+
+corec_definition: [
+| ident_decl binders type_cstr OPT [ ":=" lconstr ] decl_notation
+]
+
+scheme: [
+| scheme_kind
+| identref ":=" scheme_kind
+]
+
+scheme_kind: [
+| "Induction" "for" smart_global "Sort" sort_family
+| "Minimality" "for" smart_global "Sort" sort_family
+| "Elimination" "for" smart_global "Sort" sort_family
+| "Case" "for" smart_global "Sort" sort_family
+| "Equality" "for" smart_global
+]
+
+record_field: [
+| LIST0 quoted_attributes record_binder OPT [ "|" natural ] decl_notation
+]
+
+record_binder_body: [
+| binders of_type_with_opt_coercion lconstr
+| binders of_type_with_opt_coercion lconstr ":=" lconstr
+| binders ":=" lconstr
+]
+
+record_binder: [
+| name
+| name record_binder_body
+]
+
+assum_list: [
+| LIST1 assum_coe
+| simple_assum_coe
+]
+
+assum_coe: [
+| "(" simple_assum_coe ")"
+]
+
+simple_assum_coe: [
+| LIST1 ident_decl of_type_with_opt_coercion lconstr
+]
+
+constructor_type: [
+| binders [ of_type_with_opt_coercion lconstr | ]
+]
+
+constructor: [
+| identref constructor_type
+]
+
+of_type_with_opt_coercion: [
+| ":>>"
+| ":>" ">"
+| ":>"
+| ":" ">" ">"
+| ":" ">"
+| ":"
+]
+
+gallina_ext: [
+| "Module" export_token identref LIST0 module_binder of_module_type is_module_expr
+| "Module" "Type" identref LIST0 module_binder check_module_types is_module_type
+| "Declare" "Module" export_token identref LIST0 module_binder ":" module_type_inl
+| "Section" identref
+| "Chapter" identref
+| "End" identref
+| "Collection" identref ":=" section_subset_expr
+| "Require" export_token LIST1 global
+| "From" global "Require" export_token LIST1 global
+| "Import" LIST1 global
+| "Export" LIST1 global
+| "Include" module_type_inl LIST0 ext_module_expr
+| "Include" "Type" module_type_inl LIST0 ext_module_type
+| "Transparent" LIST1 smart_global
+| "Opaque" LIST1 smart_global
+| "Strategy" LIST1 [ strategy_level "[" LIST1 smart_global "]" ]
+| "Canonical" OPT "Structure" global OPT [ OPT univ_decl def_body ]
+| "Canonical" OPT "Structure" by_notation
+| "Coercion" global OPT univ_decl def_body
+| "Identity" "Coercion" identref ":" class_rawexpr ">->" class_rawexpr
+| "Coercion" global ":" class_rawexpr ">->" class_rawexpr
+| "Coercion" by_notation ":" class_rawexpr ">->" class_rawexpr
+| "Context" LIST1 binder
+| "Instance" instance_name ":" operconstr200 hint_info [ ":=" "{" record_declaration "}" | ":=" lconstr | ]
+| "Existing" "Instance" global hint_info
+| "Existing" "Instances" LIST1 global OPT [ "|" natural ]
+| "Existing" "Class" global
+| "Arguments" smart_global LIST0 argument_spec_block OPT [ "," LIST1 [ LIST0 more_implicits_block ] SEP "," ] OPT [ ":" LIST1 arguments_modifier SEP "," ]
+| "Implicit" "Type" reserv_list
+| "Implicit" "Types" reserv_list
+| "Generalizable" [ "All" "Variables" | "No" "Variables" | [ "Variable" | "Variables" ] LIST1 identref ]
+| "Export" "Set" option_table option_setting
+| "Export" "Unset" option_table
+| "Import" "Prenex" "Implicits" (* ssr plugin *)
+]
+
+export_token: [
+| "Import"
+| "Export"
+|
+]
+
+ext_module_type: [
+| "<+" module_type_inl
+]
+
+ext_module_expr: [
+| "<+" module_expr_inl
+]
+
+check_module_type: [
+| "<:" module_type_inl
+]
+
+check_module_types: [
+| LIST0 check_module_type
+]
+
+of_module_type: [
+| ":" module_type_inl
+| check_module_types
+]
+
+is_module_type: [
+| ":=" module_type_inl LIST0 ext_module_type
+|
+]
+
+is_module_expr: [
+| ":=" module_expr_inl LIST0 ext_module_expr
+|
+]
+
+functor_app_annot: [
+| "[" "inline" "at" "level" natural "]"
+| "[" "no" "inline" "]"
+|
+]
+
+module_expr_inl: [
+| "!" module_expr
+| module_expr functor_app_annot
+]
+
+module_type_inl: [
+| "!" module_type
+| module_type functor_app_annot
+]
+
+module_binder: [
+| "(" export_token LIST1 identref ":" module_type_inl ")"
+]
+
+module_expr: [
+| module_expr_atom
+| module_expr module_expr_atom
+]
+
+module_expr_atom: [
+| qualid
+| "(" module_expr ")"
+]
+
+with_declaration: [
+| "Definition" fullyqualid OPT univ_decl ":=" Constr.lconstr
+| "Module" fullyqualid ":=" qualid
+]
+
+module_type: [
+| qualid
+| "(" module_type ")"
+| module_type module_expr_atom
+| module_type "with" with_declaration
+]
+
+section_subset_expr: [
+| only_starredidentrefs LIST0 starredidentref
+| ssexpr35
+]
+
+starredidentref: [
+| identref
+| identref "*"
+| "Type"
+| "Type" "*"
+]
+
+ssexpr35: [
+| "-" ssexpr50
+| ssexpr50
+]
+
+ssexpr50: [
+| ssexpr0 "-" ssexpr0
+| ssexpr0 "+" ssexpr0
+| ssexpr0
+]
+
+ssexpr0: [
+| starredidentref
+| "(" only_starredidentrefs LIST0 starredidentref ")"
+| "(" only_starredidentrefs LIST0 starredidentref ")" "*"
+| "(" ssexpr35 ")"
+| "(" ssexpr35 ")" "*"
+]
+
+arguments_modifier: [
+| "simpl" "nomatch"
+| "simpl" "never"
+| "default" "implicits"
+| "clear" "implicits"
+| "clear" "scopes"
+| "clear" "bidirectionality" "hint"
+| "rename"
+| "assert"
+| "extra" "scopes"
+| "clear" "scopes" "and" "implicits"
+| "clear" "implicits" "and" "scopes"
+]
+
+scope: [
+| "%" IDENT
+]
+
+argument_spec: [
+| OPT "!" name OPT scope
+]
+
+argument_spec_block: [
+| argument_spec
+| "/"
+| "&"
+| "(" LIST1 argument_spec ")" OPT scope
+| "[" LIST1 argument_spec "]" OPT scope
+| "{" LIST1 argument_spec "}" OPT scope
+]
+
+more_implicits_block: [
+| name
+| "[" LIST1 name "]"
+| "{" LIST1 name "}"
+]
+
+strategy_level: [
+| "expand"
+| "opaque"
+| integer
+| "transparent"
+]
+
+instance_name: [
+| ident_decl binders
+|
+]
+
+hint_info: [
+| "|" OPT natural OPT constr_pattern
+|
+]
+
+reserv_list: [
+| LIST1 reserv_tuple
+| simple_reserv
+]
+
+reserv_tuple: [
+| "(" simple_reserv ")"
+]
+
+simple_reserv: [
+| LIST1 identref ":" lconstr
+]
+
+query_command: [
+| "Eval" red_expr "in" lconstr "."
+| "Compute" lconstr "."
+| "Check" lconstr "."
+| "About" smart_global OPT univ_name_list "."
+| "SearchHead" constr_pattern in_or_out_modules "."
+| "SearchPattern" constr_pattern in_or_out_modules "."
+| "SearchRewrite" constr_pattern in_or_out_modules "."
+| "Search" searchabout_query searchabout_queries "."
+| "SearchAbout" searchabout_query searchabout_queries "."
+| "SearchAbout" "[" LIST1 searchabout_query "]" in_or_out_modules "."
+]
+
+printable: [
+| "Term" smart_global OPT univ_name_list
+| "All"
+| "Section" global
+| "Grammar" IDENT
+| "Custom" "Grammar" IDENT
+| "LoadPath" OPT dirpath
+| "Modules"
+| "Libraries"
+| "ML" "Path"
+| "ML" "Modules"
+| "Debug" "GC"
+| "Graph"
+| "Classes"
+| "TypeClasses"
+| "Instances" smart_global
+| "Coercions"
+| "Coercion" "Paths" class_rawexpr class_rawexpr
+| "Canonical" "Projections"
+| "Tables"
+| "Options"
+| "Hint"
+| "Hint" smart_global
+| "Hint" "*"
+| "HintDb" IDENT
+| "Scopes"
+| "Scope" IDENT
+| "Visibility" OPT IDENT
+| "Implicit" smart_global
+| [ "Sorted" | ] "Universes" OPT printunivs_subgraph OPT ne_string
+| "Assumptions" smart_global
+| "Opaque" "Dependencies" smart_global
+| "Transparent" "Dependencies" smart_global
+| "All" "Dependencies" smart_global
+| "Strategy" smart_global
+| "Strategies"
+| "Registered"
+]
+
+printunivs_subgraph: [
+| "Subgraph" "(" LIST0 reference ")"
+]
+
+class_rawexpr: [
+| "Funclass"
+| "Sortclass"
+| smart_global
+]
+
+locatable: [
+| smart_global
+| "Term" smart_global
+| "File" ne_string
+| "Library" global
+| "Module" global
+]
+
+option_setting: [
+|
+| integer
+| STRING
+]
+
+option_ref_value: [
+| global
+| STRING
+]
+
+option_table: [
+| LIST1 IDENT
+]
+
+as_dirpath: [
+| OPT [ "as" dirpath ]
+]
+
+ne_in_or_out_modules: [
+| "inside" LIST1 global
+| "outside" LIST1 global
+]
+
+in_or_out_modules: [
+| ne_in_or_out_modules
+|
+]
+
+comment: [
+| constr
+| STRING
+| natural
+]
+
+positive_search_mark: [
+| "-"
+|
+]
+
+searchabout_query: [
+| positive_search_mark ne_string OPT scope
+| positive_search_mark constr_pattern
+]
+
+searchabout_queries: [
+| ne_in_or_out_modules
+| searchabout_query searchabout_queries
+|
+]
+
+univ_name_list: [
+| "@{" LIST0 name "}"
+]
+
+syntax: [
+| "Open" "Scope" IDENT
+| "Close" "Scope" IDENT
+| "Delimit" "Scope" IDENT; "with" IDENT
+| "Undelimit" "Scope" IDENT
+| "Bind" "Scope" IDENT; "with" LIST1 class_rawexpr
+| "Infix" ne_lstring ":=" constr [ "(" LIST1 syntax_modifier SEP "," ")" | ] OPT [ ":" IDENT ]
+| "Notation" identref LIST0 ident ":=" constr only_parsing
+| "Notation" lstring ":=" constr [ "(" LIST1 syntax_modifier SEP "," ")" | ] OPT [ ":" IDENT ]
+| "Format" "Notation" STRING STRING STRING
+| "Reserved" "Infix" ne_lstring [ "(" LIST1 syntax_modifier SEP "," ")" | ]
+| "Reserved" "Notation" ne_lstring [ "(" LIST1 syntax_modifier SEP "," ")" | ]
+]
+
+only_parsing: [
+| "(" "only" "parsing" ")"
+| "(" "compat" STRING ")"
+|
+]
+
+level: [
+| "level" natural
+| "next" "level"
+]
+
+syntax_modifier: [
+| "at" "level" natural
+| "in" "custom" IDENT
+| "in" "custom" IDENT; "at" "level" natural
+| "left" "associativity"
+| "right" "associativity"
+| "no" "associativity"
+| "only" "printing"
+| "only" "parsing"
+| "compat" STRING
+| "format" STRING OPT STRING
+| IDENT; "," LIST1 IDENT SEP "," "at" level
+| IDENT; "at" level
+| IDENT; "at" level constr_as_binder_kind
+| IDENT constr_as_binder_kind
+| IDENT syntax_extension_type
+]
+
+syntax_extension_type: [
+| "ident"
+| "global"
+| "bigint"
+| "binder"
+| "constr"
+| "constr" OPT at_level OPT constr_as_binder_kind
+| "pattern"
+| "pattern" "at" "level" natural
+| "strict" "pattern"
+| "strict" "pattern" "at" "level" natural
+| "closed" "binder"
+| "custom" IDENT OPT at_level OPT constr_as_binder_kind
+]
+
+at_level: [
+| "at" level
+]
+
+constr_as_binder_kind: [
+| "as" "ident"
+| "as" "pattern"
+| "as" "strict" "pattern"
+]
+
+simple_tactic: [
+| "btauto"
+| "congruence"
+| "congruence" integer
+| "congruence" "with" LIST1 constr
+| "congruence" integer "with" LIST1 constr
+| "f_equal"
+| "firstorder" OPT tactic firstorder_using
+| "firstorder" OPT tactic "with" LIST1 preident
+| "firstorder" OPT tactic firstorder_using "with" LIST1 preident
+| "gintuition" OPT tactic
+| "functional" "inversion" quantified_hypothesis OPT reference (* funind plugin *)
+| "functional" "induction" LIST1 constr fun_ind_using with_names (* funind plugin *)
+| "soft" "functional" "induction" LIST1 constr fun_ind_using with_names (* funind plugin *)
+| "reflexivity"
+| "exact" casted_constr
+| "assumption"
+| "etransitivity"
+| "cut" constr
+| "exact_no_check" constr
+| "vm_cast_no_check" constr
+| "native_cast_no_check" constr
+| "casetype" constr
+| "elimtype" constr
+| "lapply" constr
+| "transitivity" constr
+| "left"
+| "eleft"
+| "left" "with" bindings
+| "eleft" "with" bindings
+| "right"
+| "eright"
+| "right" "with" bindings
+| "eright" "with" bindings
+| "constructor"
+| "constructor" int_or_var
+| "constructor" int_or_var "with" bindings
+| "econstructor"
+| "econstructor" int_or_var
+| "econstructor" int_or_var "with" bindings
+| "specialize" constr_with_bindings
+| "specialize" constr_with_bindings "as" simple_intropattern
+| "symmetry"
+| "symmetry" "in" in_clause
+| "split"
+| "esplit"
+| "split" "with" bindings
+| "esplit" "with" bindings
+| "exists"
+| "exists" LIST1 bindings SEP ","
+| "eexists"
+| "eexists" LIST1 bindings SEP ","
+| "intros" "until" quantified_hypothesis
+| "intro"
+| "intro" ident
+| "intro" ident "at" "top"
+| "intro" ident "at" "bottom"
+| "intro" ident "after" hyp
+| "intro" ident "before" hyp
+| "intro" "at" "top"
+| "intro" "at" "bottom"
+| "intro" "after" hyp
+| "intro" "before" hyp
+| "move" hyp "at" "top"
+| "move" hyp "at" "bottom"
+| "move" hyp "after" hyp
+| "move" hyp "before" hyp
+| "rename" LIST1 rename SEP ","
+| "revert" LIST1 hyp
+| "simple" "induction" quantified_hypothesis
+| "simple" "destruct" quantified_hypothesis
+| "double" "induction" quantified_hypothesis quantified_hypothesis
+| "admit"
+| "fix" ident natural
+| "cofix" ident
+| "clear" LIST0 hyp
+| "clear" "-" LIST1 hyp
+| "clearbody" LIST1 hyp
+| "generalize" "dependent" constr
+| "replace" uconstr "with" constr clause by_arg_tac
+| "replace" "->" uconstr clause
+| "replace" "<-" uconstr clause
+| "replace" uconstr clause
+| "simplify_eq"
+| "simplify_eq" destruction_arg
+| "esimplify_eq"
+| "esimplify_eq" destruction_arg
+| "discriminate"
+| "discriminate" destruction_arg
+| "ediscriminate"
+| "ediscriminate" destruction_arg
+| "injection"
+| "injection" destruction_arg
+| "einjection"
+| "einjection" destruction_arg
+| "injection" "as" LIST0 simple_intropattern
+| "injection" destruction_arg "as" LIST0 simple_intropattern
+| "einjection" "as" LIST0 simple_intropattern
+| "einjection" destruction_arg "as" LIST0 simple_intropattern
+| "simple" "injection"
+| "simple" "injection" destruction_arg
+| "dependent" "rewrite" orient constr
+| "dependent" "rewrite" orient constr "in" hyp
+| "cutrewrite" orient constr
+| "cutrewrite" orient constr "in" hyp
+| "decompose" "sum" constr
+| "decompose" "record" constr
+| "absurd" constr
+| "contradiction" OPT constr_with_bindings
+| "autorewrite" "with" LIST1 preident clause
+| "autorewrite" "with" LIST1 preident clause "using" tactic
+| "autorewrite" "*" "with" LIST1 preident clause
+| "autorewrite" "*" "with" LIST1 preident clause "using" tactic
+| "rewrite" "*" orient uconstr "in" hyp "at" occurrences by_arg_tac
+| "rewrite" "*" orient uconstr "at" occurrences "in" hyp by_arg_tac
+| "rewrite" "*" orient uconstr "in" hyp by_arg_tac
+| "rewrite" "*" orient uconstr "at" occurrences by_arg_tac
+| "rewrite" "*" orient uconstr by_arg_tac
+| "refine" uconstr
+| "simple" "refine" uconstr
+| "notypeclasses" "refine" uconstr
+| "simple" "notypeclasses" "refine" uconstr
+| "solve_constraints"
+| "subst" LIST1 var
+| "subst"
+| "simple" "subst"
+| "evar" test_lpar_id_colon "(" ident ":" lconstr ")"
+| "evar" constr
+| "instantiate" "(" ident ":=" lglob ")"
+| "instantiate" "(" integer ":=" lglob ")" hloc
+| "instantiate"
+| "stepl" constr "by" tactic
+| "stepl" constr
+| "stepr" constr "by" tactic
+| "stepr" constr
+| "generalize_eqs" hyp
+| "dependent" "generalize_eqs" hyp
+| "generalize_eqs_vars" hyp
+| "dependent" "generalize_eqs_vars" hyp
+| "specialize_eqs" hyp
+| "hresolve_core" "(" ident ":=" constr ")" "at" int_or_var "in" constr
+| "hresolve_core" "(" ident ":=" constr ")" "in" constr
+| "hget_evar" int_or_var
+| "destauto"
+| "destauto" "in" hyp
+| "transparent_abstract" tactic3
+| "transparent_abstract" tactic3 "using" ident
+| "constr_eq" constr constr
+| "constr_eq_strict" constr constr
+| "constr_eq_nounivs" constr constr
+| "is_evar" constr
+| "has_evar" constr
+| "is_var" constr
+| "is_fix" constr
+| "is_cofix" constr
+| "is_ind" constr
+| "is_constructor" constr
+| "is_proj" constr
+| "is_const" constr
+| "shelve"
+| "shelve_unifiable"
+| "unshelve" tactic1
+| "give_up"
+| "cycle" int_or_var
+| "swap" int_or_var int_or_var
+| "revgoals"
+| "guard" test
+| "decompose" "[" LIST1 constr "]" constr
+| "optimize_heap"
+| "eassumption"
+| "eexact" constr
+| "trivial" auto_using hintbases
+| "info_trivial" auto_using hintbases
+| "debug" "trivial" auto_using hintbases
+| "auto" OPT int_or_var auto_using hintbases
+| "info_auto" OPT int_or_var auto_using hintbases
+| "debug" "auto" OPT int_or_var auto_using hintbases
+| "prolog" "[" LIST0 uconstr "]" int_or_var
+| "eauto" OPT int_or_var OPT int_or_var auto_using hintbases
+| "new" "auto" OPT int_or_var auto_using hintbases
+| "debug" "eauto" OPT int_or_var OPT int_or_var auto_using hintbases
+| "info_eauto" OPT int_or_var OPT int_or_var auto_using hintbases
+| "dfs" "eauto" OPT int_or_var auto_using hintbases
+| "autounfold" hintbases clause_dft_concl
+| "autounfold_one" hintbases "in" hyp
+| "autounfold_one" hintbases
+| "unify" constr constr
+| "unify" constr constr "with" preident
+| "convert_concl_no_check" constr
+| "typeclasses" "eauto" "bfs" OPT int_or_var "with" LIST1 preident
+| "typeclasses" "eauto" OPT int_or_var "with" LIST1 preident
+| "typeclasses" "eauto" OPT int_or_var
+| "head_of_constr" ident constr
+| "not_evar" constr
+| "is_ground" constr
+| "autoapply" constr "using" preident
+| "autoapply" constr "with" preident
+| "progress_evars" tactic
+| "decide" "equality"
+| "compare" constr constr
+| "rewrite_strat" rewstrategy "in" hyp
+| "rewrite_strat" rewstrategy
+| "rewrite_db" preident "in" hyp
+| "rewrite_db" preident
+| "substitute" orient glob_constr_with_bindings
+| "setoid_rewrite" orient glob_constr_with_bindings
+| "setoid_rewrite" orient glob_constr_with_bindings "in" hyp
+| "setoid_rewrite" orient glob_constr_with_bindings "at" occurrences
+| "setoid_rewrite" orient glob_constr_with_bindings "at" occurrences "in" hyp
+| "setoid_rewrite" orient glob_constr_with_bindings "in" hyp "at" occurrences
+| "setoid_symmetry"
+| "setoid_symmetry" "in" hyp
+| "setoid_reflexivity"
+| "setoid_transitivity" constr
+| "setoid_etransitivity"
+| "intros" ne_intropatterns
+| "intros"
+| "eintros" ne_intropatterns
+| "eintros"
+| "apply" LIST1 constr_with_bindings_arg SEP "," in_hyp_as
+| "eapply" LIST1 constr_with_bindings_arg SEP "," in_hyp_as
+| "simple" "apply" LIST1 constr_with_bindings_arg SEP "," in_hyp_as
+| "simple" "eapply" LIST1 constr_with_bindings_arg SEP "," in_hyp_as
+| "elim" constr_with_bindings_arg OPT eliminator
+| "eelim" constr_with_bindings_arg OPT eliminator
+| "case" induction_clause_list
+| "ecase" induction_clause_list
+| "fix" ident natural "with" LIST1 fixdecl
+| "cofix" ident "with" LIST1 cofixdecl
+| "pose" bindings_with_parameters
+| "pose" constr as_name
+| "epose" bindings_with_parameters
+| "epose" constr as_name
+| "set" bindings_with_parameters clause_dft_concl
+| "set" constr as_name clause_dft_concl
+| "eset" bindings_with_parameters clause_dft_concl
+| "eset" constr as_name clause_dft_concl
+| "remember" constr as_name eqn_ipat clause_dft_all
+| "eremember" constr as_name eqn_ipat clause_dft_all
+| "assert" test_lpar_id_coloneq "(" identref ":=" lconstr ")"
+| "eassert" test_lpar_id_coloneq "(" identref ":=" lconstr ")"
+| "assert" test_lpar_id_colon "(" identref ":" lconstr ")" by_tactic
+| "eassert" test_lpar_id_colon "(" identref ":" lconstr ")" by_tactic
+| "enough" test_lpar_id_colon "(" identref ":" lconstr ")" by_tactic
+| "eenough" test_lpar_id_colon "(" identref ":" lconstr ")" by_tactic
+| "assert" constr as_ipat by_tactic
+| "eassert" constr as_ipat by_tactic
+| "pose" "proof" lconstr as_ipat
+| "epose" "proof" lconstr as_ipat
+| "enough" constr as_ipat by_tactic
+| "eenough" constr as_ipat by_tactic
+| "generalize" constr
+| "generalize" constr LIST1 constr
+| "generalize" constr lookup_at_as_comma occs as_name LIST0 [ "," pattern_occ as_name ]
+| "induction" induction_clause_list
+| "einduction" induction_clause_list
+| "destruct" induction_clause_list
+| "edestruct" induction_clause_list
+| "rewrite" LIST1 oriented_rewriter SEP "," clause_dft_concl by_tactic
+| "erewrite" LIST1 oriented_rewriter SEP "," clause_dft_concl by_tactic
+| "dependent" [ "simple" "inversion" | "inversion" | "inversion_clear" ] quantified_hypothesis as_or_and_ipat OPT [ "with" constr ]
+| "simple" "inversion" quantified_hypothesis as_or_and_ipat in_hyp_list
+| "inversion" quantified_hypothesis as_or_and_ipat in_hyp_list
+| "inversion_clear" quantified_hypothesis as_or_and_ipat in_hyp_list
+| "inversion" quantified_hypothesis "using" constr in_hyp_list
+| "red" clause_dft_concl
+| "hnf" clause_dft_concl
+| "simpl" delta_flag OPT ref_or_pattern_occ clause_dft_concl
+| "cbv" strategy_flag clause_dft_concl
+| "cbn" strategy_flag clause_dft_concl
+| "lazy" strategy_flag clause_dft_concl
+| "compute" delta_flag clause_dft_concl
+| "vm_compute" OPT ref_or_pattern_occ clause_dft_concl
+| "native_compute" OPT ref_or_pattern_occ clause_dft_concl
+| "unfold" LIST1 unfold_occ SEP "," clause_dft_concl
+| "fold" LIST1 constr clause_dft_concl
+| "pattern" LIST1 pattern_occ SEP "," clause_dft_concl
+| "change" conversion clause_dft_concl
+| "change_no_check" conversion clause_dft_concl
+| "start" "ltac" "profiling"
+| "stop" "ltac" "profiling"
+| "reset" "ltac" "profile"
+| "show" "ltac" "profile"
+| "show" "ltac" "profile" "cutoff" int
+| "show" "ltac" "profile" string
+| "restart_timer" OPT string
+| "finish_timing" OPT string
+| "finish_timing" "(" string ")" OPT string
+| "myred" (* micromega plugin *)
+| "psatz_Z" int_or_var tactic (* micromega plugin *)
+| "psatz_Z" tactic (* micromega plugin *)
+| "xlia" tactic (* micromega plugin *)
+| "xnlia" tactic (* micromega plugin *)
+| "xnra" tactic (* micromega plugin *)
+| "xnqa" tactic (* micromega plugin *)
+| "sos_Z" tactic (* micromega plugin *)
+| "sos_Q" tactic (* micromega plugin *)
+| "sos_R" tactic (* micromega plugin *)
+| "lra_Q" tactic (* micromega plugin *)
+| "lra_R" tactic (* micromega plugin *)
+| "psatz_R" int_or_var tactic (* micromega plugin *)
+| "psatz_R" tactic (* micromega plugin *)
+| "psatz_Q" int_or_var tactic (* micromega plugin *)
+| "psatz_Q" tactic (* micromega plugin *)
+| "nsatz_compute" constr (* nsatz plugin *)
+| "omega" (* omega plugin *)
+| "omega" "with" LIST1 ident (* omega plugin *)
+| "omega" "with" "*" (* omega plugin *)
+| "rtauto"
+| "protect_fv" string "in" ident (* setoid_ring plugin *)
+| "protect_fv" string (* setoid_ring plugin *)
+| "ring_lookup" tactic0 "[" LIST0 constr "]" LIST1 constr (* setoid_ring plugin *)
+| "field_lookup" tactic "[" LIST0 constr "]" LIST1 constr (* setoid_ring plugin *)
+| "YouShouldNotTypeThis" ssrintrosarg (* ssr plugin *)
+| "by" ssrhintarg (* ssr plugin *)
+| "YouShouldNotTypeThis" "do" ssrdoarg (* ssr plugin *)
+| "YouShouldNotTypeThis" ssrtclarg ssrseqdir ssrseqarg (* ssr plugin *)
+| "clear" natural (* ssr plugin *)
+| "move" ssrmovearg ssrrpat (* ssr plugin *)
+| "move" ssrmovearg ssrclauses (* ssr plugin *)
+| "move" ssrrpat (* ssr plugin *)
+| "move" (* ssr plugin *)
+| "case" ssrcasearg ssrclauses (* ssr plugin *)
+| "case" (* ssr plugin *)
+| "elim" ssrarg ssrclauses (* ssr plugin *)
+| "elim" (* ssr plugin *)
+| "apply" ssrapplyarg (* ssr plugin *)
+| "apply" (* ssr plugin *)
+| "exact" ssrexactarg (* ssr plugin *)
+| "exact" (* ssr plugin *)
+| "exact" "<:" lconstr (* ssr plugin *)
+| "congr" ssrcongrarg (* ssr plugin *)
+| "ssrinstancesofruleL2R" ssrterm (* ssr plugin *)
+| "ssrinstancesofruleR2L" ssrterm (* ssr plugin *)
+| "rewrite" ssrrwargs ssrclauses (* ssr plugin *)
+| "unlock" ssrunlockargs ssrclauses (* ssr plugin *)
+| "pose" ssrfixfwd (* ssr plugin *)
+| "pose" ssrcofixfwd (* ssr plugin *)
+| "pose" ssrfwdid ssrposefwd (* ssr plugin *)
+| "set" ssrfwdid ssrsetfwd ssrclauses (* ssr plugin *)
+| "abstract" ssrdgens (* ssr plugin *)
+| "have" ssrhavefwdwbinders (* ssr plugin *)
+| "have" "suff" ssrhpats_nobs ssrhavefwd (* ssr plugin *)
+| "have" "suffices" ssrhpats_nobs ssrhavefwd (* ssr plugin *)
+| "suff" "have" ssrhpats_nobs ssrhavefwd (* ssr plugin *)
+| "suffices" "have" ssrhpats_nobs ssrhavefwd (* ssr plugin *)
+| "suff" ssrsufffwd (* ssr plugin *)
+| "suffices" ssrsufffwd (* ssr plugin *)
+| "wlog" ssrhpats_nobs ssrwlogfwd ssrhint (* ssr plugin *)
+| "wlog" "suff" ssrhpats_nobs ssrwlogfwd ssrhint (* ssr plugin *)
+| "wlog" "suffices" ssrhpats_nobs ssrwlogfwd ssrhint (* ssr plugin *)
+| "without" "loss" ssrhpats_nobs ssrwlogfwd ssrhint (* ssr plugin *)
+| "without" "loss" "suff" ssrhpats_nobs ssrwlogfwd ssrhint (* ssr plugin *)
+| "without" "loss" "suffices" ssrhpats_nobs ssrwlogfwd ssrhint (* ssr plugin *)
+| "gen" "have" ssrclear ssr_idcomma ssrhpats_nobs ssrwlogfwd ssrhint (* ssr plugin *)
+| "generally" "have" ssrclear ssr_idcomma ssrhpats_nobs ssrwlogfwd ssrhint (* ssr plugin *)
+| "under" ssrrwarg (* ssr plugin *)
+| "under" ssrrwarg ssrintros_ne (* ssr plugin *)
+| "under" ssrrwarg ssrintros_ne "do" ssrhint3arg (* ssr plugin *)
+| "under" ssrrwarg "do" ssrhint3arg (* ssr plugin *)
+| "ssrinstancesoftpat" cpattern (* ssrmatching plugin *)
+]
+
+mlname: [
+| preident (* extraction plugin *)
+| string (* extraction plugin *)
+]
+
+int_or_id: [
+| preident (* extraction plugin *)
+| integer (* extraction plugin *)
+]
+
+language: [
+| "Ocaml" (* extraction plugin *)
+| "OCaml" (* extraction plugin *)
+| "Haskell" (* extraction plugin *)
+| "Scheme" (* extraction plugin *)
+| "JSON" (* extraction plugin *)
+]
+
+firstorder_using: [
+| "using" reference
+| "using" reference "," LIST1 reference SEP ","
+| "using" reference reference LIST0 reference
+|
+]
+
+fun_ind_using: [
+| "using" constr_with_bindings (* funind plugin *)
+| (* funind plugin *)
+]
+
+with_names: [
+| "as" simple_intropattern (* funind plugin *)
+| (* funind plugin *)
+]
+
+constr_comma_sequence': [
+| constr "," constr_comma_sequence' (* funind plugin *)
+| constr (* funind plugin *)
+]
+
+auto_using': [
+| "using" constr_comma_sequence' (* funind plugin *)
+| (* funind plugin *)
+]
+
+function_rec_definition_loc: [
+| Vernac.rec_definition (* funind plugin *)
+]
+
+fun_scheme_arg: [
+| ident ":=" "Induction" "for" reference "Sort" sort_family (* funind plugin *)
+]
+
+orient: [
+| "->"
+| "<-"
+|
+]
+
+occurrences: [
+| LIST1 integer
+| var
+]
+
+glob: [
+| constr
+]
+
+lglob: [
+| lconstr
+]
+
+casted_constr: [
+| constr
+]
+
+hloc: [
+|
+| "in" "|-" "*"
+| "in" ident
+| "in" "(" "Type" "of" ident ")"
+| "in" "(" "Value" "of" ident ")"
+| "in" "(" "type" "of" ident ")"
+| "in" "(" "value" "of" ident ")"
+]
+
+rename: [
+| ident "into" ident
+]
+
+by_arg_tac: [
+| "by" tactic3
+|
+]
+
+in_clause: [
+| in_clause'
+| "*" occs
+| "*" "|-" concl_occ
+| LIST0 hypident_occ SEP "," "|-" concl_occ
+| LIST0 hypident_occ SEP ","
+]
+
+test_lpar_id_colon: [
+| local_test_lpar_id_colon
+]
+
+orient_string: [
+| orient preident
+]
+
+comparison: [
+| "="
+| "<"
+| "<="
+| ">"
+| ">="
+]
+
+test: [
+| int_or_var comparison int_or_var
+]
+
+hintbases: [
+| "with" "*"
+| "with" LIST1 preident
+|
+]
+
+auto_using: [
+| "using" LIST1 uconstr SEP ","
+|
+]
+
+hints_path_atom: [
+| LIST1 global
+| "_"
+]
+
+hints_path: [
+| "(" hints_path ")"
+| hints_path "*"
+| "emp"
+| "eps"
+| hints_path "|" hints_path
+| hints_path_atom
+| hints_path hints_path
+]
+
+opthints: [
+| ":" LIST1 preident
+|
+]
+
+debug: [
+| "debug"
+|
+]
+
+eauto_search_strategy: [
+| "(bfs)"
+| "(dfs)"
+|
+]
+
+tactic_then_last: [
+| "|" LIST0 ( OPT tactic_expr5 ) SEP "|"
+|
+]
+
+tactic_then_gen: [
+| tactic_expr5 "|" tactic_then_gen
+| tactic_expr5 ".." tactic_then_last
+| ".." tactic_then_last
+| tactic_expr5
+| "|" tactic_then_gen
+|
+]
+
+tactic_then_locality: [
+| "[" OPT ">"
+]
+
+tactic_expr5: [
+| binder_tactic
+| tactic_expr4
+]
+
+tactic_expr4: [
+| tactic_expr3 ";" binder_tactic
+| tactic_expr3 ";" tactic_expr3
+| tactic_expr3 ";" tactic_then_locality tactic_then_gen "]"
+| tactic_expr3
+| tactic_expr5 ";" "first" ssr_first_else (* ssr plugin *)
+| tactic_expr5 ";" "first" ssrseqarg (* ssr plugin *)
+| tactic_expr5 ";" "last" ssrseqarg (* ssr plugin *)
+]
+
+tactic_expr3: [
+| "try" tactic_expr3
+| "do" int_or_var tactic_expr3
+| "timeout" int_or_var tactic_expr3
+| "time" OPT string tactic_expr3
+| "repeat" tactic_expr3
+| "progress" tactic_expr3
+| "once" tactic_expr3
+| "exactly_once" tactic_expr3
+| "infoH" tactic_expr3
+| "abstract" tactic_expr2
+| "abstract" tactic_expr2 "using" ident
+| selector tactic_expr3
+| tactic_expr2
+| "do" ssrmmod ssrdotac ssrclauses (* ssr plugin *)
+| "do" ssrortacarg ssrclauses (* ssr plugin *)
+| "do" int_or_var ssrmmod ssrdotac ssrclauses (* ssr plugin *)
+| "abstract" ssrdgens (* ssr plugin *)
+]
+
+tactic_expr2: [
+| tactic_expr1 "+" binder_tactic
+| tactic_expr1 "+" tactic_expr2
+| "tryif" tactic_expr5 "then" tactic_expr5 "else" tactic_expr2
+| tactic_expr1 "||" binder_tactic
+| tactic_expr1 "||" tactic_expr2
+| tactic_expr1
+]
+
+tactic_expr1: [
+| match_key "goal" "with" match_context_list "end"
+| match_key "reverse" "goal" "with" match_context_list "end"
+| match_key tactic_expr5 "with" match_list "end"
+| "first" "[" LIST0 tactic_expr5 SEP "|" "]"
+| "solve" "[" LIST0 tactic_expr5 SEP "|" "]"
+| "idtac" LIST0 message_token
+| failkw [ int_or_var | ] LIST0 message_token
+| simple_tactic
+| tactic_arg
+| reference LIST0 tactic_arg_compat
+| tactic_expr0
+| tactic_expr5 ssrintros_ne (* ssr plugin *)
+]
+
+tactic_expr0: [
+| "(" tactic_expr5 ")"
+| "[" ">" tactic_then_gen "]"
+| tactic_atom
+| ssrparentacarg (* ssr plugin *)
+]
+
+failkw: [
+| "fail"
+| "gfail"
+]
+
+binder_tactic: [
+| "fun" LIST1 input_fun "=>" tactic_expr5
+| "let" [ "rec" | ] LIST1 let_clause SEP "with" "in" tactic_expr5
+| "info" tactic_expr5
+]
+
+tactic_arg_compat: [
+| tactic_arg
+| Constr.constr
+| "()"
+]
+
+tactic_arg: [
+| constr_eval
+| "fresh" LIST0 fresh_id
+| "type_term" uconstr
+| "numgoals"
+]
+
+fresh_id: [
+| STRING
+| qualid
+]
+
+constr_eval: [
+| "eval" red_expr "in" Constr.constr
+| "context" identref "[" Constr.lconstr "]"
+| "type" "of" Constr.constr
+]
+
+constr_may_eval: [
+| constr_eval
+| Constr.constr
+]
+
+tactic_atom: [
+| integer
+| reference
+| "()"
+]
+
+match_key: [
+| "match"
+| "lazymatch"
+| "multimatch"
+]
+
+input_fun: [
+| "_"
+| ident
+]
+
+let_clause: [
+| identref ":=" tactic_expr5
+| "_" ":=" tactic_expr5
+| identref LIST1 input_fun ":=" tactic_expr5
+]
+
+match_pattern: [
+| "context" OPT Constr.ident "[" Constr.lconstr_pattern "]"
+| Constr.lconstr_pattern
+]
+
+match_hyps: [
+| name ":" match_pattern
+| name ":=" "[" match_pattern "]" ":" match_pattern
+| name ":=" match_pattern
+]
+
+match_context_rule: [
+| LIST0 match_hyps SEP "," "|-" match_pattern "=>" tactic_expr5
+| "[" LIST0 match_hyps SEP "," "|-" match_pattern "]" "=>" tactic_expr5
+| "_" "=>" tactic_expr5
+]
+
+match_context_list: [
+| LIST1 match_context_rule SEP "|"
+| "|" LIST1 match_context_rule SEP "|"
+]
+
+match_rule: [
+| match_pattern "=>" tactic_expr5
+| "_" "=>" tactic_expr5
+]
+
+match_list: [
+| LIST1 match_rule SEP "|"
+| "|" LIST1 match_rule SEP "|"
+]
+
+message_token: [
+| identref
+| STRING
+| integer
+]
+
+ltac_def_kind: [
+| ":="
+| "::="
+]
+
+tacdef_body: [
+| Constr.global LIST1 input_fun ltac_def_kind tactic_expr5
+| Constr.global ltac_def_kind tactic_expr5
+]
+
+tactic: [
+| tactic_expr5
+]
+
+range_selector: [
+| natural "-" natural
+| natural
+]
+
+range_selector_or_nth: [
+| natural "-" natural OPT [ "," LIST1 range_selector SEP "," ]
+| natural OPT [ "," LIST1 range_selector SEP "," ]
+]
+
+selector_body: [
+| range_selector_or_nth
+| test_bracket_ident "[" ident "]"
+]
+
+selector: [
+| "only" selector_body ":"
+]
+
+toplevel_selector: [
+| selector_body ":"
+| "!" ":"
+| "all" ":"
+]
+
+tactic_mode: [
+| OPT toplevel_selector G_vernac.query_command
+| OPT toplevel_selector "{"
+| OPT ltac_selector OPT ltac_info tactic ltac_use_default
+| "par" ":" OPT ltac_info tactic ltac_use_default
+]
+
+ltac_selector: [
+| toplevel_selector
+]
+
+ltac_info: [
+| "Info" natural
+]
+
+ltac_use_default: [
+| "."
+| "..."
+]
+
+ltac_tactic_level: [
+| "(" "at" "level" natural ")"
+]
+
+ltac_production_sep: [
+| "," string
+]
+
+ltac_production_item: [
+| string
+| ident "(" ident OPT ltac_production_sep ")"
+| ident
+]
+
+ltac_tacdef_body: [
+| tacdef_body
+]
+
+withtac: [
+| "with" Tactic.tactic
+|
+]
+
+Constr.closed_binder: [
+| "(" Prim.name ":" Constr.lconstr "|" Constr.lconstr ")"
+]
+
+glob_constr_with_bindings: [
+| constr_with_bindings
+]
+
+rewstrategy: [
+| glob
+| "<-" constr
+| "subterms" rewstrategy
+| "subterm" rewstrategy
+| "innermost" rewstrategy
+| "outermost" rewstrategy
+| "bottomup" rewstrategy
+| "topdown" rewstrategy
+| "id"
+| "fail"
+| "refl"
+| "progress" rewstrategy
+| "try" rewstrategy
+| "any" rewstrategy
+| "repeat" rewstrategy
+| rewstrategy ";" rewstrategy
+| "(" rewstrategy ")"
+| "choice" rewstrategy rewstrategy
+| "old_hints" preident
+| "hints" preident
+| "terms" LIST0 constr
+| "eval" red_expr
+| "fold" constr
+]
+
+int_or_var: [
+| integer
+| identref
+]
+
+nat_or_var: [
+| natural
+| identref
+]
+
+id_or_meta: [
+| identref
+]
+
+open_constr: [
+| constr
+]
+
+uconstr: [
+| constr
+]
+
+destruction_arg: [
+| natural
+| test_lpar_id_rpar constr_with_bindings
+| constr_with_bindings_arg
+]
+
+constr_with_bindings_arg: [
+| ">" constr_with_bindings
+| constr_with_bindings
+]
+
+quantified_hypothesis: [
+| ident
+| natural
+]
+
+conversion: [
+| constr
+| constr "with" constr
+| constr "at" occs_nums "with" constr
+]
+
+occs_nums: [
+| LIST1 nat_or_var
+| "-" nat_or_var LIST0 int_or_var
+]
+
+occs: [
+| "at" occs_nums
+|
+]
+
+pattern_occ: [
+| constr occs
+]
+
+ref_or_pattern_occ: [
+| smart_global occs
+| constr occs
+]
+
+unfold_occ: [
+| smart_global occs
+]
+
+intropatterns: [
+| LIST0 intropattern
+]
+
+ne_intropatterns: [
+| LIST1 intropattern
+]
+
+or_and_intropattern: [
+| "[" LIST1 intropatterns SEP "|" "]"
+| "()"
+| "(" simple_intropattern ")"
+| "(" simple_intropattern "," LIST1 simple_intropattern SEP "," ")"
+| "(" simple_intropattern "&" LIST1 simple_intropattern SEP "&" ")"
+]
+
+equality_intropattern: [
+| "->"
+| "<-"
+| "[=" intropatterns "]"
+]
+
+naming_intropattern: [
+| pattern_ident
+| "?"
+| ident
+]
+
+intropattern: [
+| simple_intropattern
+| "*"
+| "**"
+]
+
+simple_intropattern: [
+| simple_intropattern_closed LIST0 [ "%" operconstr0 ]
+]
+
+simple_intropattern_closed: [
+| or_and_intropattern
+| equality_intropattern
+| "_"
+| naming_intropattern
+]
+
+simple_binding: [
+| "(" ident ":=" lconstr ")"
+| "(" natural ":=" lconstr ")"
+]
+
+bindings: [
+| test_lpar_idnum_coloneq LIST1 simple_binding
+| LIST1 constr
+]
+
+constr_with_bindings: [
+| constr with_bindings
+]
+
+with_bindings: [
+| "with" bindings
+|
+]
+
+red_flags: [
+| "beta"
+| "iota"
+| "match"
+| "fix"
+| "cofix"
+| "zeta"
+| "delta" delta_flag
+]
+
+delta_flag: [
+| "-" "[" LIST1 smart_global "]"
+| "[" LIST1 smart_global "]"
+|
+]
+
+strategy_flag: [
+| LIST1 red_flags
+| delta_flag
+]
+
+red_expr: [
+| "red"
+| "hnf"
+| "simpl" delta_flag OPT ref_or_pattern_occ
+| "cbv" strategy_flag
+| "cbn" strategy_flag
+| "lazy" strategy_flag
+| "compute" delta_flag
+| "vm_compute" OPT ref_or_pattern_occ
+| "native_compute" OPT ref_or_pattern_occ
+| "unfold" LIST1 unfold_occ SEP ","
+| "fold" LIST1 constr
+| "pattern" LIST1 pattern_occ SEP ","
+| IDENT
+]
+
+hypident: [
+| id_or_meta
+| "(" "type" "of" id_or_meta ")"
+| "(" "value" "of" id_or_meta ")"
+| "(" "type" "of" Prim.identref ")" (* ssr plugin *)
+| "(" "value" "of" Prim.identref ")" (* ssr plugin *)
+]
+
+hypident_occ: [
+| hypident occs
+]
+
+clause_dft_concl: [
+| "in" in_clause
+| occs
+|
+]
+
+clause_dft_all: [
+| "in" in_clause
+|
+]
+
+opt_clause: [
+| "in" in_clause
+| "at" occs_nums
+|
+]
+
+concl_occ: [
+| "*" occs
+|
+]
+
+in_hyp_list: [
+| "in" LIST1 id_or_meta
+|
+]
+
+in_hyp_as: [
+| "in" id_or_meta as_ipat
+|
+]
+
+simple_binder: [
+| name
+| "(" LIST1 name ":" lconstr ")"
+]
+
+fixdecl: [
+| "(" ident LIST0 simple_binder fixannot ":" lconstr ")"
+]
+
+cofixdecl: [
+| "(" ident LIST0 simple_binder ":" lconstr ")"
+]
+
+bindings_with_parameters: [
+| check_for_coloneq "(" ident LIST0 simple_binder ":=" lconstr ")"
+]
+
+eliminator: [
+| "using" constr_with_bindings
+]
+
+as_ipat: [
+| "as" simple_intropattern
+|
+]
+
+or_and_intropattern_loc: [
+| or_and_intropattern
+| identref
+]
+
+as_or_and_ipat: [
+| "as" or_and_intropattern_loc
+|
+]
+
+eqn_ipat: [
+| "eqn" ":" naming_intropattern
+| "_eqn" ":" naming_intropattern
+| "_eqn"
+|
+]
+
+as_name: [
+| "as" ident
+|
+]
+
+by_tactic: [
+| "by" tactic_expr3
+|
+]
+
+rewriter: [
+| "!" constr_with_bindings_arg
+| [ "?" | LEFTQMARK ] constr_with_bindings_arg
+| natural "!" constr_with_bindings_arg
+| natural [ "?" | LEFTQMARK ] constr_with_bindings_arg
+| natural constr_with_bindings_arg
+| constr_with_bindings_arg
+]
+
+oriented_rewriter: [
+| orient rewriter
+]
+
+induction_clause: [
+| destruction_arg as_or_and_ipat eqn_ipat opt_clause
+]
+
+induction_clause_list: [
+| LIST1 induction_clause SEP "," OPT eliminator opt_clause
+]
+
+ring_mod: [
+| "decidable" constr (* setoid_ring plugin *)
+| "abstract" (* setoid_ring plugin *)
+| "morphism" constr (* setoid_ring plugin *)
+| "constants" "[" tactic "]" (* setoid_ring plugin *)
+| "closed" "[" LIST1 global "]" (* setoid_ring plugin *)
+| "preprocess" "[" tactic "]" (* setoid_ring plugin *)
+| "postprocess" "[" tactic "]" (* setoid_ring plugin *)
+| "setoid" constr constr (* setoid_ring plugin *)
+| "sign" constr (* setoid_ring plugin *)
+| "power" constr "[" LIST1 global "]" (* setoid_ring plugin *)
+| "power_tac" constr "[" tactic "]" (* setoid_ring plugin *)
+| "div" constr (* setoid_ring plugin *)
+]
+
+ring_mods: [
+| "(" LIST1 ring_mod SEP "," ")" (* setoid_ring plugin *)
+]
+
+field_mod: [
+| ring_mod (* setoid_ring plugin *)
+| "completeness" constr (* setoid_ring plugin *)
+]
+
+field_mods: [
+| "(" LIST1 field_mod SEP "," ")" (* setoid_ring plugin *)
+]
+
+ssrtacarg: [
+| tactic_expr5 (* ssr plugin *)
+]
+
+ssrtac3arg: [
+| tactic_expr3 (* ssr plugin *)
+]
+
+ssrtclarg: [
+| ssrtacarg (* ssr plugin *)
+]
+
+ssrhyp: [
+| ident (* ssr plugin *)
+]
+
+ssrhoi_hyp: [
+| ident (* ssr plugin *)
+]
+
+ssrhoi_id: [
+| ident (* ssr plugin *)
+]
+
+ssrsimpl_ne: [
+| "//=" (* ssr plugin *)
+| "/=" (* ssr plugin *)
+| test_ssrslashnum11 "/" natural "/" natural "=" (* ssr plugin *)
+| test_ssrslashnum10 "/" natural "/" (* ssr plugin *)
+| test_ssrslashnum10 "/" natural "=" (* ssr plugin *)
+| test_ssrslashnum10 "/" natural "/=" (* ssr plugin *)
+| test_ssrslashnum10 "/" natural "/" "=" (* ssr plugin *)
+| test_ssrslashnum01 "//" natural "=" (* ssr plugin *)
+| test_ssrslashnum00 "//" (* ssr plugin *)
+]
+
+ssrclear_ne: [
+| "{" LIST1 ssrhyp "}" (* ssr plugin *)
+]
+
+ssrclear: [
+| ssrclear_ne (* ssr plugin *)
+| (* ssr plugin *)
+]
+
+ssrindex: [
+| int_or_var (* ssr plugin *)
+]
+
+ssrocc: [
+| natural LIST0 natural (* ssr plugin *)
+| "-" LIST0 natural (* ssr plugin *)
+| "+" LIST0 natural (* ssr plugin *)
+]
+
+ssrmmod: [
+| "!" (* ssr plugin *)
+| LEFTQMARK (* ssr plugin *)
+| "?" (* ssr plugin *)
+]
+
+ssrmult_ne: [
+| natural ssrmmod (* ssr plugin *)
+| ssrmmod (* ssr plugin *)
+]
+
+ssrmult: [
+| ssrmult_ne (* ssr plugin *)
+| (* ssr plugin *)
+]
+
+ssrdocc: [
+| "{" ssrocc "}" (* ssr plugin *)
+| "{" LIST0 ssrhyp "}" (* ssr plugin *)
+]
+
+ssrterm: [
+| "YouShouldNotTypeThis" constr (* ssr plugin *)
+| ssrtermkind Pcoq.Constr.constr (* ssr plugin *)
+]
+
+ast_closure_term: [
+| term_annotation constr (* ssr plugin *)
+]
+
+ast_closure_lterm: [
+| term_annotation lconstr (* ssr plugin *)
+]
+
+ssrbwdview: [
+| "YouShouldNotTypeThis" (* ssr plugin *)
+| test_not_ssrslashnum "/" Pcoq.Constr.constr (* ssr plugin *)
+| test_not_ssrslashnum "/" Pcoq.Constr.constr ssrbwdview (* ssr plugin *)
+]
+
+ssrfwdview: [
+| "YouShouldNotTypeThis" (* ssr plugin *)
+| test_not_ssrslashnum "/" ast_closure_term (* ssr plugin *)
+| test_not_ssrslashnum "/" ast_closure_term ssrfwdview (* ssr plugin *)
+]
+
+ident_no_do: [
+| "YouShouldNotTypeThis" ident (* ssr plugin *)
+| test_ident_no_do IDENT (* ssr plugin *)
+]
+
+ssripat: [
+| "_" (* ssr plugin *)
+| "*" (* ssr plugin *)
+| ">" (* ssr plugin *)
+| ident_no_do (* ssr plugin *)
+| "?" (* ssr plugin *)
+| "+" (* ssr plugin *)
+| "++" (* ssr plugin *)
+| ssrsimpl_ne (* ssr plugin *)
+| ssrdocc "->" (* ssr plugin *)
+| ssrdocc "<-" (* ssr plugin *)
+| ssrdocc (* ssr plugin *)
+| "->" (* ssr plugin *)
+| "<-" (* ssr plugin *)
+| "-" (* ssr plugin *)
+| "-/" "=" (* ssr plugin *)
+| "-/=" (* ssr plugin *)
+| "-/" "/" (* ssr plugin *)
+| "-//" (* ssr plugin *)
+| "-/" integer "/" (* ssr plugin *)
+| "-/" "/=" (* ssr plugin *)
+| "-//" "=" (* ssr plugin *)
+| "-//=" (* ssr plugin *)
+| "-/" integer "/=" (* ssr plugin *)
+| "-/" integer "/" integer "=" (* ssr plugin *)
+| ssrfwdview (* ssr plugin *)
+| "[" ":" LIST0 ident "]" (* ssr plugin *)
+| "[:" LIST0 ident "]" (* ssr plugin *)
+| ssrcpat (* ssr plugin *)
+]
+
+ssripats: [
+| ssripat ssripats (* ssr plugin *)
+| (* ssr plugin *)
+]
+
+ssriorpat: [
+| ssripats "|" ssriorpat (* ssr plugin *)
+| ssripats "|-" ">" ssriorpat (* ssr plugin *)
+| ssripats "|-" ssriorpat (* ssr plugin *)
+| ssripats "|->" ssriorpat (* ssr plugin *)
+| ssripats "||" ssriorpat (* ssr plugin *)
+| ssripats "|||" ssriorpat (* ssr plugin *)
+| ssripats "||||" ssriorpat (* ssr plugin *)
+| ssripats (* ssr plugin *)
+]
+
+ssrcpat: [
+| "YouShouldNotTypeThis" ssriorpat (* ssr plugin *)
+| test_nohidden "[" hat "]" (* ssr plugin *)
+| test_nohidden "[" ssriorpat "]" (* ssr plugin *)
+| test_nohidden "[=" ssriorpat "]" (* ssr plugin *)
+]
+
+hat: [
+| "^" ident (* ssr plugin *)
+| "^" "~" ident (* ssr plugin *)
+| "^" "~" natural (* ssr plugin *)
+| "^~" ident (* ssr plugin *)
+| "^~" natural (* ssr plugin *)
+]
+
+ssripats_ne: [
+| ssripat ssripats (* ssr plugin *)
+]
+
+ssrhpats: [
+| ssripats (* ssr plugin *)
+]
+
+ssrhpats_wtransp: [
+| ssripats (* ssr plugin *)
+| ssripats "@" ssripats (* ssr plugin *)
+]
+
+ssrhpats_nobs: [
+| ssripats (* ssr plugin *)
+]
+
+ssrrpat: [
+| "->" (* ssr plugin *)
+| "<-" (* ssr plugin *)
+]
+
+ssrintros_ne: [
+| "=>" ssripats_ne (* ssr plugin *)
+]
+
+ssrintros: [
+| ssrintros_ne (* ssr plugin *)
+| (* ssr plugin *)
+]
+
+ssrintrosarg: [
+| "YouShouldNotTypeThis" ssrtacarg ssrintros_ne (* ssr plugin *)
+]
+
+ssrfwdid: [
+| test_ssrfwdid Prim.ident (* ssr plugin *)
+]
+
+ssrortacs: [
+| ssrtacarg "|" ssrortacs (* ssr plugin *)
+| ssrtacarg "|" (* ssr plugin *)
+| ssrtacarg (* ssr plugin *)
+| "|" ssrortacs (* ssr plugin *)
+| "|" (* ssr plugin *)
+]
+
+ssrhintarg: [
+| "[" "]" (* ssr plugin *)
+| "[" ssrortacs "]" (* ssr plugin *)
+| ssrtacarg (* ssr plugin *)
+]
+
+ssrhint3arg: [
+| "[" "]" (* ssr plugin *)
+| "[" ssrortacs "]" (* ssr plugin *)
+| ssrtac3arg (* ssr plugin *)
+]
+
+ssrortacarg: [
+| "[" ssrortacs "]" (* ssr plugin *)
+]
+
+ssrhint: [
+| (* ssr plugin *)
+| "by" ssrhintarg (* ssr plugin *)
+]
+
+ssrwgen: [
+| ssrclear_ne (* ssr plugin *)
+| ssrhoi_hyp (* ssr plugin *)
+| "@" ssrhoi_hyp (* ssr plugin *)
+| "(" ssrhoi_id ":=" lcpattern ")" (* ssr plugin *)
+| "(" ssrhoi_id ")" (* ssr plugin *)
+| "(@" ssrhoi_id ":=" lcpattern ")" (* ssr plugin *)
+| "(" "@" ssrhoi_id ":=" lcpattern ")" (* ssr plugin *)
+]
+
+ssrclausehyps: [
+| ssrwgen "," ssrclausehyps (* ssr plugin *)
+| ssrwgen ssrclausehyps (* ssr plugin *)
+| ssrwgen (* ssr plugin *)
+]
+
+ssrclauses: [
+| "in" ssrclausehyps "|-" "*" (* ssr plugin *)
+| "in" ssrclausehyps "|-" (* ssr plugin *)
+| "in" ssrclausehyps "*" (* ssr plugin *)
+| "in" ssrclausehyps (* ssr plugin *)
+| "in" "|-" "*" (* ssr plugin *)
+| "in" "*" (* ssr plugin *)
+| "in" "*" "|-" (* ssr plugin *)
+| (* ssr plugin *)
+]
+
+ssrfwd: [
+| ":=" ast_closure_lterm (* ssr plugin *)
+| ":" ast_closure_lterm ":=" ast_closure_lterm (* ssr plugin *)
+]
+
+ssrbvar: [
+| ident (* ssr plugin *)
+| "_" (* ssr plugin *)
+]
+
+ssrbinder: [
+| ssrbvar (* ssr plugin *)
+| "(" ssrbvar ")" (* ssr plugin *)
+| "(" ssrbvar ":" lconstr ")" (* ssr plugin *)
+| "(" ssrbvar LIST1 ssrbvar ":" lconstr ")" (* ssr plugin *)
+| "(" ssrbvar ":" lconstr ":=" lconstr ")" (* ssr plugin *)
+| "(" ssrbvar ":=" lconstr ")" (* ssr plugin *)
+| [ "of" | "&" ] operconstr99 (* ssr plugin *)
+]
+
+ssrstruct: [
+| "{" "struct" ident "}" (* ssr plugin *)
+| (* ssr plugin *)
+]
+
+ssrposefwd: [
+| LIST0 ssrbinder ssrfwd (* ssr plugin *)
+]
+
+ssrfixfwd: [
+| "fix" ssrbvar LIST0 ssrbinder ssrstruct ssrfwd (* ssr plugin *)
+]
+
+ssrcofixfwd: [
+| "cofix" ssrbvar LIST0 ssrbinder ssrfwd (* ssr plugin *)
+]
+
+ssrsetfwd: [
+| ":" ast_closure_lterm ":=" "{" ssrocc "}" cpattern (* ssr plugin *)
+| ":" ast_closure_lterm ":=" lcpattern (* ssr plugin *)
+| ":=" "{" ssrocc "}" cpattern (* ssr plugin *)
+| ":=" lcpattern (* ssr plugin *)
+]
+
+ssrhavefwd: [
+| ":" ast_closure_lterm ssrhint (* ssr plugin *)
+| ":" ast_closure_lterm ":=" ast_closure_lterm (* ssr plugin *)
+| ":" ast_closure_lterm ":=" (* ssr plugin *)
+| ":=" ast_closure_lterm (* ssr plugin *)
+]
+
+ssrhavefwdwbinders: [
+| ssrhpats_wtransp LIST0 ssrbinder ssrhavefwd (* ssr plugin *)
+]
+
+ssrdoarg: [
+]
+
+ssrseqarg: [
+| ssrswap (* ssr plugin *)
+| ssrseqidx ssrortacarg OPT ssrorelse (* ssr plugin *)
+| ssrseqidx ssrswap (* ssr plugin *)
+| tactic_expr3 (* ssr plugin *)
+]
+
+ssrseqidx: [
+| test_ssrseqvar Prim.ident (* ssr plugin *)
+| Prim.natural (* ssr plugin *)
+]
+
+ssrswap: [
+| "first" (* ssr plugin *)
+| "last" (* ssr plugin *)
+]
+
+ssrorelse: [
+| "||" tactic_expr2 (* ssr plugin *)
+]
+
+Prim.ident: [
+| IDENT ssr_null_entry (* ssr plugin *)
+]
+
+ssrparentacarg: [
+| "(" tactic_expr5 ")" (* ssr plugin *)
+]
+
+ssrdotac: [
+| tactic_expr3 (* ssr plugin *)
+| ssrortacarg (* ssr plugin *)
+]
+
+ssrseqdir: [
+]
+
+ssr_first: [
+| ssr_first ssrintros_ne (* ssr plugin *)
+| "[" LIST0 tactic_expr5 SEP "|" "]" (* ssr plugin *)
+]
+
+ssr_first_else: [
+| ssr_first ssrorelse (* ssr plugin *)
+| ssr_first (* ssr plugin *)
+]
+
+ssrgen: [
+| ssrdocc cpattern (* ssr plugin *)
+| cpattern (* ssr plugin *)
+]
+
+ssrdgens_tl: [
+| "{" LIST1 ssrhyp "}" cpattern ssrdgens_tl (* ssr plugin *)
+| "{" LIST1 ssrhyp "}" (* ssr plugin *)
+| "{" ssrocc "}" cpattern ssrdgens_tl (* ssr plugin *)
+| "/" ssrdgens_tl (* ssr plugin *)
+| cpattern ssrdgens_tl (* ssr plugin *)
+| (* ssr plugin *)
+]
+
+ssrdgens: [
+| ":" ssrgen ssrdgens_tl (* ssr plugin *)
+]
+
+ssreqid: [
+| test_ssreqid ssreqpat (* ssr plugin *)
+| test_ssreqid (* ssr plugin *)
+]
+
+ssreqpat: [
+| Prim.ident (* ssr plugin *)
+| "_" (* ssr plugin *)
+| "?" (* ssr plugin *)
+| "+" (* ssr plugin *)
+| ssrdocc "->" (* ssr plugin *)
+| ssrdocc "<-" (* ssr plugin *)
+| "->" (* ssr plugin *)
+| "<-" (* ssr plugin *)
+]
+
+ssrarg: [
+| ssrfwdview ssreqid ssrdgens ssrintros (* ssr plugin *)
+| ssrfwdview ssrclear ssrintros (* ssr plugin *)
+| ssreqid ssrdgens ssrintros (* ssr plugin *)
+| ssrclear_ne ssrintros (* ssr plugin *)
+| ssrintros_ne (* ssr plugin *)
+]
+
+ssrmovearg: [
+| ssrarg (* ssr plugin *)
+]
+
+ssrcasearg: [
+| ssrarg (* ssr plugin *)
+]
+
+ssragen: [
+| "{" LIST1 ssrhyp "}" ssrterm (* ssr plugin *)
+| ssrterm (* ssr plugin *)
+]
+
+ssragens: [
+| "{" LIST1 ssrhyp "}" ssrterm ssragens (* ssr plugin *)
+| "{" LIST1 ssrhyp "}" (* ssr plugin *)
+| ssrterm ssragens (* ssr plugin *)
+| (* ssr plugin *)
+]
+
+ssrapplyarg: [
+| ":" ssragen ssragens ssrintros (* ssr plugin *)
+| ssrclear_ne ssrintros (* ssr plugin *)
+| ssrintros_ne (* ssr plugin *)
+| ssrbwdview ":" ssragen ssragens ssrintros (* ssr plugin *)
+| ssrbwdview ssrclear ssrintros (* ssr plugin *)
+]
+
+ssrexactarg: [
+| ":" ssragen ssragens (* ssr plugin *)
+| ssrbwdview ssrclear (* ssr plugin *)
+| ssrclear_ne (* ssr plugin *)
+]
+
+ssrcongrarg: [
+| natural constr ssrdgens (* ssr plugin *)
+| natural constr (* ssr plugin *)
+| constr ssrdgens (* ssr plugin *)
+| constr (* ssr plugin *)
+]
+
+ssrrwocc: [
+| "{" LIST0 ssrhyp "}" (* ssr plugin *)
+| "{" ssrocc "}" (* ssr plugin *)
+| (* ssr plugin *)
+]
+
+ssrrule_ne: [
+| test_not_ssrslashnum [ "/" ssrterm | ssrterm | ssrsimpl_ne ] (* ssr plugin *)
+| ssrsimpl_ne (* ssr plugin *)
+]
+
+ssrrule: [
+| ssrrule_ne (* ssr plugin *)
+| (* ssr plugin *)
+]
+
+ssrpattern_squarep: [
+| "[" rpattern "]" (* ssr plugin *)
+| (* ssr plugin *)
+]
+
+ssrpattern_ne_squarep: [
+| "[" rpattern "]" (* ssr plugin *)
+]
+
+ssrrwarg: [
+| "-" ssrmult ssrrwocc ssrpattern_squarep ssrrule_ne (* ssr plugin *)
+| "-/" ssrterm (* ssr plugin *)
+| ssrmult_ne ssrrwocc ssrpattern_squarep ssrrule_ne (* ssr plugin *)
+| "{" LIST1 ssrhyp "}" ssrpattern_ne_squarep ssrrule_ne (* ssr plugin *)
+| "{" LIST1 ssrhyp "}" ssrrule (* ssr plugin *)
+| "{" ssrocc "}" ssrpattern_squarep ssrrule_ne (* ssr plugin *)
+| "{" "}" ssrpattern_squarep ssrrule_ne (* ssr plugin *)
+| ssrpattern_ne_squarep ssrrule_ne (* ssr plugin *)
+| ssrrule_ne (* ssr plugin *)
+]
+
+ssrrwargs: [
+| test_ssr_rw_syntax LIST1 ssrrwarg (* ssr plugin *)
+]
+
+ssrunlockarg: [
+| "{" ssrocc "}" ssrterm (* ssr plugin *)
+| ssrterm (* ssr plugin *)
+]
+
+ssrunlockargs: [
+| LIST0 ssrunlockarg (* ssr plugin *)
+]
+
+ssrsufffwd: [
+| ssrhpats LIST0 ssrbinder ":" ast_closure_lterm ssrhint (* ssr plugin *)
+]
+
+ssrwlogfwd: [
+| ":" LIST0 ssrwgen "/" ast_closure_lterm (* ssr plugin *)
+]
+
+ssr_idcomma: [
+| (* ssr plugin *)
+| test_idcomma [ IDENT | "_" ] "," (* ssr plugin *)
+]
+
+ssr_rtype: [
+| "return" operconstr100 (* ssr plugin *)
+]
+
+ssr_mpat: [
+| pattern200 (* ssr plugin *)
+]
+
+ssr_dpat: [
+| ssr_mpat "in" pattern200 ssr_rtype (* ssr plugin *)
+| ssr_mpat ssr_rtype (* ssr plugin *)
+| ssr_mpat (* ssr plugin *)
+]
+
+ssr_dthen: [
+| ssr_dpat "then" lconstr (* ssr plugin *)
+]
+
+ssr_elsepat: [
+| "else" (* ssr plugin *)
+]
+
+ssr_else: [
+| ssr_elsepat lconstr (* ssr plugin *)
+]
+
+ssr_search_item: [
+| string (* ssr plugin *)
+| string "%" preident (* ssr plugin *)
+| constr_pattern (* ssr plugin *)
+]
+
+ssr_search_arg: [
+| "-" ssr_search_item ssr_search_arg (* ssr plugin *)
+| ssr_search_item ssr_search_arg (* ssr plugin *)
+| (* ssr plugin *)
+]
+
+ssr_modlocs: [
+| (* ssr plugin *)
+| "in" LIST1 modloc (* ssr plugin *)
+]
+
+modloc: [
+| "-" global (* ssr plugin *)
+| global (* ssr plugin *)
+]
+
+ssrhintref: [
+| constr (* ssr plugin *)
+| constr "|" natural (* ssr plugin *)
+]
+
+ssrviewpos: [
+| "for" "move" "/" (* ssr plugin *)
+| "for" "apply" "/" (* ssr plugin *)
+| "for" "apply" "/" "/" (* ssr plugin *)
+| "for" "apply" "//" (* ssr plugin *)
+| (* ssr plugin *)
+]
+
+ssrviewposspc: [
+| ssrviewpos (* ssr plugin *)
+]
+
+rpattern: [
+| lconstr (* ssrmatching plugin *)
+| "in" lconstr (* ssrmatching plugin *)
+| lconstr "in" lconstr (* ssrmatching plugin *)
+| "in" lconstr "in" lconstr (* ssrmatching plugin *)
+| lconstr "in" lconstr "in" lconstr (* ssrmatching plugin *)
+| lconstr "as" lconstr "in" lconstr (* ssrmatching plugin *)
+]
+
+cpattern: [
+| "Qed" constr (* ssrmatching plugin *)
+| ssrtermkind constr (* ssrmatching plugin *)
+]
+
+lcpattern: [
+| "Qed" lconstr (* ssrmatching plugin *)
+| ssrtermkind lconstr (* ssrmatching plugin *)
+]
+
+ssrpatternarg: [
+| rpattern (* ssrmatching plugin *)
+]
+
+numnotoption: [
+|
+| "(" "warning" "after" bigint ")"
+| "(" "abstract" "after" bigint ")"
+]
+
diff --git a/doc/tools/docgram/orderedGrammar b/doc/tools/docgram/orderedGrammar
new file mode 100644
index 0000000000..cd6e11505c
--- /dev/null
+++ b/doc/tools/docgram/orderedGrammar
@@ -0,0 +1,4170 @@
+(* Defines the order to apply to editedGrammar to get productionlistGrammar.
+doc_grammar will modify this file to add/remove nonterminals and productions
+to match editedGrammar, which will remove comments. Not compiled into Coq *)
+DOC_GRAMMAR
+
+global: [
+| reference
+]
+
+constr_pattern: [
+| term
+]
+
+sort: [
+| "Set"
+| "Prop"
+| "SProp"
+| "Type"
+| "Type" "@{" "_" "}"
+| "Type" "@{" universe "}"
+]
+
+sort_family: [
+| "Set"
+| "Prop"
+| "SProp"
+| "Type"
+]
+
+universe_increment: [
+| "+" natural
+| empty
+]
+
+universe_name: [
+| global
+| "Set"
+| "Prop"
+]
+
+universe_expr: [
+| universe_name universe_increment
+]
+
+universe: [
+| "max" "(" universe_expr_list_comma ")"
+| universe_expr
+]
+
+universe_expr_list_comma: [
+| universe_expr_list_comma "," universe_expr
+| universe_expr
+]
+
+lconstr: [
+| operconstr200
+| lconstr
+]
+
+term: [
+| operconstr8
+| "@" global instance
+]
+
+operconstr200: [
+| binder_constr
+| operconstr100
+]
+
+operconstr100: [
+| operconstr99 "<:" binder_constr
+| operconstr99 "<:" operconstr100
+| operconstr99 "<<:" binder_constr
+| operconstr99 "<<:" operconstr100
+| operconstr99 ":" binder_constr
+| operconstr99 ":" operconstr100
+| operconstr99 ":>"
+| operconstr99
+]
+
+operconstr99: [
+| operconstr90
+]
+
+operconstr90: [
+| operconstr10
+]
+
+operconstr10: [
+| operconstr9 appl_arg_list
+| "@" global instance operconstr9_list_opt
+| "@" pattern_identref ident_list
+| operconstr9
+]
+
+appl_arg_list: [
+| appl_arg_list appl_arg
+| appl_arg
+]
+
+operconstr9: [
+| ".." operconstr0 ".."
+| operconstr8
+]
+
+operconstr8: [
+| operconstr1
+]
+
+operconstr1: [
+| operconstr0 ".(" global appl_arg_list_opt ")"
+| operconstr0 ".(" "@" global operconstr9_list_opt ")"
+| operconstr0 "%" IDENT
+| operconstr0
+]
+
+appl_arg_list_opt: [
+| appl_arg_list_opt appl_arg
+| empty
+]
+
+operconstr9_list_opt: [
+| operconstr9_list_opt operconstr9
+| empty
+]
+
+operconstr0: [
+| atomic_constr
+| match_constr
+| "(" operconstr200 ")"
+| "{|" record_declaration bar_cbrace
+| "{" binder_constr "}"
+| "`{" operconstr200 "}"
+| "`(" operconstr200 ")"
+| "ltac" ":" "(" ltac_expr ")"
+]
+
+record_declaration: [
+| record_fields
+]
+
+record_fields: [
+| record_field_declaration ";" record_fields
+| record_field_declaration
+| empty
+| record_field ";" record_fields
+| record_field ";"
+| record_field
+]
+
+record_field_declaration: [
+| global binders ":=" lconstr
+]
+
+binder_constr: [
+| "forall" open_binders "," operconstr200
+| "fun" open_binders "=>" operconstr200
+| "let" name binders type_cstr ":=" operconstr200 "in" operconstr200
+| "let" single_fix "in" operconstr200
+| "let" name_alt return_type ":=" operconstr200 "in" operconstr200
+| "let" "'" pattern200 ":=" operconstr200 "in" operconstr200
+| "let" "'" pattern200 ":=" operconstr200 case_type "in" operconstr200
+| "let" "'" pattern200 "in" pattern200 ":=" operconstr200 case_type "in" operconstr200
+| "if" operconstr200 return_type "then" operconstr200 "else" operconstr200
+| fix_constr
+| "if" operconstr200 "is" ssr_dthen ssr_else (* ssr plugin *)
+| "if" operconstr200 "isn't" ssr_dthen ssr_else (* ssr plugin *)
+| "let" ":" ssr_mpat ":=" lconstr "in" lconstr (* ssr plugin *)
+| "let" ":" ssr_mpat ":=" lconstr ssr_rtype "in" lconstr (* ssr plugin *)
+| "let" ":" ssr_mpat "in" pattern200 ":=" lconstr ssr_rtype "in" lconstr (* ssr plugin *)
+]
+
+name_alt: [
+| "(" name_list_comma_opt ")"
+| "()"
+]
+
+name_list_comma_opt: [
+| name_list_comma
+| empty
+]
+
+name_list_comma: [
+| name_list_comma "," name
+| name
+]
+
+name_list_opt: [
+| name_list_opt name
+| empty
+]
+
+name_list: [
+| name_list name
+| name
+]
+
+appl_arg: [
+| lpar_id_coloneq lconstr ")"
+| operconstr9
+]
+
+atomic_constr: [
+| global instance
+| sort
+| NUMERAL
+| string
+| "_"
+| "?" "[" ident "]"
+| "?" "[" "?" ident "]"
+| "?" ident evar_instance
+]
+
+inst: [
+| ident ":=" lconstr
+]
+
+evar_instance: [
+| "@{" inst_list_semi "}"
+| empty
+]
+
+inst_list_semi: [
+| inst_list_semi ";" inst
+| inst
+]
+
+instance: [
+| "@{" universe_level_list_opt "}"
+| empty
+]
+
+universe_level_list_opt: [
+| universe_level_list_opt universe_level
+| empty
+]
+
+universe_level: [
+| "Set"
+| "Prop"
+| "Type"
+| "_"
+| global
+]
+
+fix_constr: [
+| single_fix
+| single_fix "with" fix_decl_list "for" ident
+]
+
+fix_decl_list: [
+| fix_decl_list "with" fix_decl
+| fix_decl
+]
+
+single_fix: [
+| fix_kw fix_decl
+]
+
+fix_kw: [
+| "fix"
+| "cofix"
+]
+
+fix_decl: [
+| ident binders_fixannot type_cstr ":=" operconstr200
+]
+
+match_constr: [
+| "match" case_item_list_comma case_type_opt "with" branches "end"
+]
+
+case_item_list_comma: [
+| case_item_list_comma "," case_item
+| case_item
+]
+
+case_type_opt: [
+| case_type
+| empty
+]
+
+case_item: [
+| operconstr100 as_opt in_opt
+]
+
+as_opt2: [
+| as_opt case_type
+| empty
+]
+
+in_opt: [
+| "in" pattern200
+| empty
+]
+
+case_type: [
+| "return" operconstr100
+]
+
+return_type: [
+| as_opt2
+]
+
+as_opt3: [
+| "as" dirpath
+| empty
+]
+
+branches: [
+| or_opt eqn_list_or_opt
+]
+
+mult_pattern: [
+| pattern200_list_comma
+]
+
+pattern200_list_comma: [
+| pattern200_list_comma "," pattern200
+| pattern200
+]
+
+eqn: [
+| mult_pattern_list_or "=>" lconstr
+]
+
+mult_pattern_list_or: [
+| mult_pattern_list_or "|" mult_pattern
+| mult_pattern
+]
+
+record_pattern: [
+| global ":=" pattern200
+]
+
+record_patterns: [
+| record_pattern ";" record_patterns
+| record_pattern ";"
+| record_pattern
+| empty
+]
+
+pattern200: [
+| pattern100
+]
+
+pattern100: [
+| pattern99 ":" binder_constr
+| pattern99 ":" operconstr100
+| pattern99
+]
+
+pattern99: [
+| pattern90
+]
+
+pattern90: [
+| pattern10
+]
+
+pattern10: [
+| pattern1 "as" name
+| pattern1 pattern1_list
+| "@" reference pattern1_list_opt
+| pattern1
+]
+
+pattern1_list: [
+| pattern1_list pattern1
+| pattern1
+]
+
+pattern1_list_opt: [
+| pattern1_list_opt pattern1
+| empty
+]
+
+pattern1: [
+| pattern0 "%" IDENT
+| pattern0
+]
+
+pattern0: [
+| reference
+| "{|" record_patterns bar_cbrace
+| "_"
+| "(" pattern200 ")"
+| "(" pattern200 "|" pattern200_list_or ")"
+| NUMERAL
+| string
+]
+
+pattern200_list_or: [
+| pattern200_list_or "|" pattern200
+| pattern200
+]
+
+impl_ident_tail: [
+| "}"
+| name_list ":" lconstr "}"
+| name_list "}"
+| ":" lconstr "}"
+]
+
+fixannot: [
+| "{" "struct" ident "}"
+| "{" "wf" term ident "}"
+| "{" "measure" term ident_opt term_opt "}"
+| "{" "struct" name "}"
+| empty
+]
+
+term_opt: [
+| term
+| empty
+]
+
+impl_name_head: [
+| empty
+]
+
+binders_fixannot: [
+| impl_name_head impl_ident_tail binders_fixannot
+| fixannot
+| binder binders_fixannot
+| empty
+]
+
+open_binders: [
+| name name_list_opt ":" lconstr
+| name name_list_opt binders
+| name ".." name
+| closed_binder binders
+]
+
+binders: [
+| binder_list_opt
+]
+
+binder_list_opt: [
+| binder_list_opt binder
+| empty
+]
+
+binder: [
+| name
+| closed_binder
+]
+
+typeclass_constraint: [
+| "!" operconstr200
+| "{" name "}" ":" exclam_opt operconstr200
+| name_colon exclam_opt operconstr200
+| operconstr200
+]
+
+type_cstr: [
+| lconstr_opt
+| ":" lconstr
+| empty
+]
+
+preident: [
+| IDENT
+]
+
+pattern_identref: [
+| "?" ident
+]
+
+var: [
+| ident
+]
+
+field: [
+| FIELD
+]
+
+fields: [
+| field fields
+| field
+]
+
+fullyqualid: [
+| ident fields
+| ident
+]
+
+basequalid: [
+| ident fields
+| ident
+]
+
+name: [
+| "_"
+| ident
+]
+
+reference: [
+| ident fields
+| ident
+]
+
+by_notation: [
+| ne_string IDENT_opt
+]
+
+IDENT_opt: [
+| "%" IDENT
+| empty
+]
+
+smart_global: [
+| reference
+| by_notation
+]
+
+qualid: [
+| basequalid
+]
+
+ne_string: [
+| STRING
+]
+
+ne_lstring: [
+| ne_string
+]
+
+dirpath: [
+| ident field_list_opt
+]
+
+field_list_opt: [
+| field_list_opt field
+| empty
+]
+
+string: [
+| STRING
+]
+
+lstring: [
+| string
+]
+
+integer: [
+| NUMERAL
+| "-" NUMERAL
+]
+
+natural: [
+| NUMERAL
+]
+
+bigint: [
+| NUMERAL
+]
+
+bar_cbrace: [
+| "|" "}"
+]
+
+vernac_control: [
+| "Time" vernac_control
+| "Redirect" ne_string vernac_control
+| "Timeout" natural vernac_control
+| "Fail" vernac_control
+| decorated_vernac
+]
+
+decorated_vernac: [
+| quoted_attributes_list_opt vernac
+]
+
+quoted_attributes_list_opt: [
+| quoted_attributes_list_opt quoted_attributes
+| empty
+]
+
+quoted_attributes: [
+| "#[" attribute_list_comma_opt "]"
+]
+
+attribute_list_comma_opt: [
+| attribute_list_comma
+| empty
+]
+
+attribute_list_comma: [
+| attribute_list_comma "," attribute
+| attribute
+]
+
+attribute: [
+| ident attribute_value
+]
+
+attribute_value: [
+| "=" string
+| "(" attribute_list_comma_opt ")"
+| empty
+]
+
+vernac: [
+| "Local" vernac_poly
+| "Global" vernac_poly
+| vernac_poly
+]
+
+vernac_poly: [
+| "Polymorphic" vernac_aux
+| "Monomorphic" vernac_aux
+| vernac_aux
+]
+
+vernac_aux: [
+| "Program" gallina "."
+| "Program" gallina_ext "."
+| gallina "."
+| gallina_ext "."
+| command "."
+| syntax "."
+| subprf
+| command_entry
+]
+
+noedit_mode: [
+| query_command
+]
+
+subprf: [
+| BULLET
+| "{"
+| "}"
+]
+
+gallina: [
+| thm_token ident_decl binders ":" lconstr with_list_opt
+| assumption_token inline assum_list
+| assumptions_token inline assum_list
+| def_token ident_decl def_body
+| "Let" ident def_body
+| cumulativity_token_opt private_token finite_token inductive_definition_list
+| "Fixpoint" rec_definition_list
+| "Let" "Fixpoint" rec_definition_list
+| "CoFixpoint" corec_definition_list
+| "Let" "CoFixpoint" corec_definition_list
+| "Scheme" scheme_list
+| "Combined" "Scheme" ident "from" ident_list_comma
+| "Register" global "as" qualid
+| "Register" "Inline" global
+| "Primitive" ident lconstr_opt ":=" register_token
+| "Universe" ident_list
+| "Universes" ident_list
+| "Constraint" univ_constraint_list_comma
+]
+
+with_list_opt: [
+| with_list_opt "with" ident_decl binders ":" lconstr
+| empty
+]
+
+cumulativity_token_opt: [
+| cumulativity_token
+| empty
+]
+
+inductive_definition_list: [
+| inductive_definition_list "with" inductive_definition
+| inductive_definition
+]
+
+rec_definition_list: [
+| rec_definition_list "with" rec_definition
+| rec_definition
+]
+
+corec_definition_list: [
+| corec_definition_list "with" corec_definition
+| corec_definition
+]
+
+scheme_list: [
+| scheme_list "with" scheme
+| scheme
+]
+
+ident_list_comma: [
+| ident_list_comma "," ident
+| ident
+]
+
+univ_constraint_list_comma: [
+| univ_constraint_list_comma "," univ_constraint
+| univ_constraint
+]
+
+lconstr_opt2: [
+| ":=" lconstr
+| empty
+]
+
+register_token: [
+| register_prim_token
+| register_type_token
+]
+
+register_type_token: [
+| "#int63_type"
+]
+
+register_prim_token: [
+| "#int63_head0"
+| "#int63_tail0"
+| "#int63_add"
+| "#int63_sub"
+| "#int63_mul"
+| "#int63_div"
+| "#int63_mod"
+| "#int63_lsr"
+| "#int63_lsl"
+| "#int63_land"
+| "#int63_lor"
+| "#int63_lxor"
+| "#int63_addc"
+| "#int63_subc"
+| "#int63_addcarryc"
+| "#int63_subcarryc"
+| "#int63_mulc"
+| "#int63_diveucl"
+| "#int63_div21"
+| "#int63_addmuldiv"
+| "#int63_eq"
+| "#int63_lt"
+| "#int63_le"
+| "#int63_compare"
+]
+
+thm_token: [
+| "Theorem"
+| "Lemma"
+| "Fact"
+| "Remark"
+| "Corollary"
+| "Proposition"
+| "Property"
+]
+
+def_token: [
+| "Definition"
+| "Example"
+| "SubClass"
+]
+
+assumption_token: [
+| "Hypothesis"
+| "Variable"
+| "Axiom"
+| "Parameter"
+| "Conjecture"
+]
+
+assumptions_token: [
+| "Hypotheses"
+| "Variables"
+| "Axioms"
+| "Parameters"
+| "Conjectures"
+]
+
+inline: [
+| "Inline" "(" natural ")"
+| "Inline"
+| empty
+]
+
+univ_constraint: [
+| universe_name lt_alt universe_name
+]
+
+lt_alt: [
+| "<"
+| "="
+| "<="
+]
+
+univ_decl: [
+| "@{" ident_list_opt plus_opt univ_constraint_alt
+]
+
+plus_opt: [
+| "+"
+| empty
+]
+
+univ_constraint_alt: [
+| "|" univ_constraint_list_comma_opt plus_opt "}"
+| rbrace_alt
+]
+
+univ_constraint_list_comma_opt: [
+| univ_constraint_list_comma
+| empty
+]
+
+rbrace_alt: [
+| "}"
+| bar_cbrace
+]
+
+ident_decl: [
+| ident univ_decl_opt
+]
+
+finite_token: [
+| "Inductive"
+| "CoInductive"
+| "Variant"
+| "Record"
+| "Structure"
+| "Class"
+]
+
+cumulativity_token: [
+| "Cumulative"
+| "NonCumulative"
+]
+
+private_token: [
+| "Private"
+| empty
+]
+
+def_body: [
+| binders ":=" reduce lconstr
+| binders ":" lconstr ":=" reduce lconstr
+| binders ":" lconstr
+]
+
+reduce: [
+| "Eval" red_expr "in"
+| empty
+]
+
+one_decl_notation: [
+| ne_lstring ":=" term IDENT_opt2
+]
+
+IDENT_opt2: [
+| ":" IDENT
+| empty
+]
+
+decl_sep: [
+| "and"
+]
+
+decl_notation: [
+| "where" one_decl_notation_list
+| empty
+]
+
+one_decl_notation_list: [
+| one_decl_notation_list decl_sep one_decl_notation
+| one_decl_notation
+]
+
+opt_constructors_or_fields: [
+| ":=" constructor_list_or_record_decl
+| empty
+]
+
+inductive_definition: [
+| opt_coercion ident_decl binders lconstr_opt opt_constructors_or_fields decl_notation
+]
+
+constructor_list_or_record_decl: [
+| "|" constructor_list_or
+| ident constructor_type "|" constructor_list_or_opt
+| ident constructor_type
+| ident "{" record_fields "}"
+| "{" record_fields "}"
+| empty
+]
+
+constructor_list_or: [
+| constructor_list_or "|" constructor
+| constructor
+]
+
+constructor_list_or_opt: [
+| constructor_list_or
+| empty
+]
+
+opt_coercion: [
+| ">"
+| empty
+]
+
+rec_definition: [
+| ident_decl binders_fixannot type_cstr lconstr_opt2 decl_notation
+]
+
+corec_definition: [
+| ident_decl binders type_cstr lconstr_opt2 decl_notation
+]
+
+lconstr_opt: [
+| ":" lconstr
+| empty
+]
+
+scheme: [
+| scheme_kind
+| ident ":=" scheme_kind
+]
+
+scheme_kind: [
+| "Induction" "for" smart_global "Sort" sort_family
+| "Minimality" "for" smart_global "Sort" sort_family
+| "Elimination" "for" smart_global "Sort" sort_family
+| "Case" "for" smart_global "Sort" sort_family
+| "Equality" "for" smart_global
+]
+
+record_field: [
+| quoted_attributes_list_opt record_binder natural_opt2 decl_notation
+]
+
+record_binder_body: [
+| binders of_type_with_opt_coercion lconstr
+| binders of_type_with_opt_coercion lconstr ":=" lconstr
+| binders ":=" lconstr
+]
+
+record_binder: [
+| name
+| name record_binder_body
+]
+
+assum_list: [
+| assum_coe_list
+| simple_assum_coe
+]
+
+assum_coe_list: [
+| assum_coe_list assum_coe
+| assum_coe
+]
+
+assum_coe: [
+| "(" simple_assum_coe ")"
+]
+
+simple_assum_coe: [
+| ident_decl_list of_type_with_opt_coercion lconstr
+]
+
+ident_decl_list: [
+| ident_decl_list ident_decl
+| ident_decl
+]
+
+constructor_type: [
+| binders of_type_with_opt_coercion_opt
+]
+
+of_type_with_opt_coercion_opt: [
+| of_type_with_opt_coercion lconstr
+| empty
+]
+
+constructor: [
+| ident constructor_type
+]
+
+of_type_with_opt_coercion: [
+| ":>>"
+| ":>" ">"
+| ":>"
+| ":" ">" ">"
+| ":" ">"
+| ":"
+]
+
+gallina_ext: [
+| "Module" export_token ident module_binder_list_opt of_module_type is_module_expr
+| "Module" "Type" ident module_binder_list_opt check_module_types is_module_type
+| "Declare" "Module" export_token ident module_binder_list_opt ":" module_type_inl
+| "Section" ident
+| "Chapter" ident
+| "End" ident
+| "Collection" ident ":=" section_subset_expr
+| "Require" export_token global_list
+| "From" global "Require" export_token global_list
+| "Import" global_list
+| "Export" global_list
+| "Include" module_type_inl ext_module_expr_list_opt
+| "Include" "Type" module_type_inl ext_module_type_list_opt
+| "Transparent" smart_global_list
+| "Opaque" smart_global_list
+| "Strategy" strategy_level_list
+| "Canonical" Structure_opt global univ_decl_opt2
+| "Canonical" Structure_opt by_notation
+| "Coercion" global univ_decl_opt def_body
+| "Identity" "Coercion" ident ":" class_rawexpr ">->" class_rawexpr
+| "Coercion" global ":" class_rawexpr ">->" class_rawexpr
+| "Coercion" by_notation ":" class_rawexpr ">->" class_rawexpr
+| "Context" binder_list
+| "Instance" instance_name ":" operconstr200 hint_info record_declaration_opt
+| "Existing" "Instance" global hint_info
+| "Existing" "Instances" global_list natural_opt2
+| "Existing" "Class" global
+| "Arguments" smart_global argument_spec_block_list_opt more_implicits_block_opt arguments_modifier_opt
+| "Implicit" "Type" reserv_list
+| "Implicit" "Types" reserv_list
+| "Generalizable" All_alt
+| "Export" "Set" option_table option_setting
+| "Export" "Unset" option_table
+| "Import" "Prenex" "Implicits" (* ssr plugin *)
+]
+
+module_binder_list_opt: [
+| module_binder_list_opt module_binder
+| empty
+]
+
+ext_module_expr_list_opt: [
+| ext_module_expr_list_opt ext_module_expr
+| empty
+]
+
+ext_module_type_list_opt: [
+| ext_module_type_list_opt ext_module_type
+| empty
+]
+
+strategy_level_list: [
+| strategy_level_list strategy_level "[" smart_global_list "]"
+| strategy_level "[" smart_global_list "]"
+]
+
+Structure_opt: [
+| "Structure"
+| empty
+]
+
+univ_decl_opt: [
+| univ_decl
+| empty
+]
+
+binder_list: [
+| binder_list binder
+| binder
+]
+
+record_declaration_opt: [
+| ":=" "{" record_declaration "}"
+| ":=" lconstr
+| empty
+]
+
+natural_opt: [
+| natural
+| empty
+]
+
+argument_spec_block_list_opt: [
+| argument_spec_block_list_opt argument_spec_block
+| empty
+]
+
+more_implicits_block_opt: [
+| "," more_implicits_block_list_comma
+| empty
+]
+
+more_implicits_block_list_comma: [
+| more_implicits_block_list_comma "," more_implicits_block_list_opt
+| more_implicits_block_list_opt
+]
+
+arguments_modifier_opt: [
+| ":" arguments_modifier_list_comma
+| empty
+]
+
+arguments_modifier_list_comma: [
+| arguments_modifier_list_comma "," arguments_modifier
+| arguments_modifier
+]
+
+All_alt: [
+| "All" "Variables"
+| "No" "Variables"
+| Variable_alt ident_list
+]
+
+Variable_alt: [
+| "Variable"
+| "Variables"
+]
+
+more_implicits_block_list_opt: [
+| more_implicits_block_list_opt more_implicits_block
+| empty
+]
+
+univ_decl_opt2: [
+| univ_decl_opt def_body
+| empty
+]
+
+export_token: [
+| "Import"
+| "Export"
+| empty
+]
+
+ext_module_type: [
+| "<+" module_type_inl
+]
+
+ext_module_expr: [
+| "<+" module_expr_inl
+]
+
+check_module_type: [
+| "<:" module_type_inl
+]
+
+check_module_types: [
+| check_module_type_list_opt
+]
+
+check_module_type_list_opt: [
+| check_module_type_list_opt check_module_type
+| empty
+]
+
+of_module_type: [
+| ":" module_type_inl
+| check_module_types
+]
+
+is_module_type: [
+| ":=" module_type_inl ext_module_type_list_opt
+| empty
+]
+
+is_module_expr: [
+| ":=" module_expr_inl ext_module_expr_list_opt
+| empty
+]
+
+functor_app_annot: [
+| "[" "inline" "at" "level" natural "]"
+| "[" "no" "inline" "]"
+| empty
+]
+
+module_expr_inl: [
+| "!" module_expr
+| module_expr functor_app_annot
+]
+
+module_type_inl: [
+| "!" module_type
+| module_type functor_app_annot
+]
+
+module_binder: [
+| "(" export_token ident_list ":" module_type_inl ")"
+]
+
+module_expr: [
+| module_expr_atom
+| module_expr module_expr_atom
+]
+
+module_expr_atom: [
+| qualid
+| "(" module_expr ")"
+]
+
+with_declaration: [
+| "Definition" fullyqualid univ_decl_opt ":=" lconstr
+| "Module" fullyqualid ":=" qualid
+]
+
+module_type: [
+| qualid
+| "(" module_type ")"
+| module_type module_expr_atom
+| module_type "with" with_declaration
+]
+
+section_subset_expr: [
+| starredidentref_list_opt
+| ssexpr35
+]
+
+starredidentref_list_opt: [
+| starredidentref_list_opt starredidentref
+| empty
+]
+
+starredidentref: [
+| ident
+| ident "*"
+| "Type"
+| "Type" "*"
+]
+
+ssexpr35: [
+| "-" ssexpr50
+| ssexpr50
+]
+
+ssexpr50: [
+| ssexpr0 "-" ssexpr0
+| ssexpr0 "+" ssexpr0
+| ssexpr0
+]
+
+ssexpr0: [
+| starredidentref
+| "(" starredidentref_list_opt ")"
+| "(" starredidentref_list_opt ")" "*"
+| "(" ssexpr35 ")"
+| "(" ssexpr35 ")" "*"
+]
+
+arguments_modifier: [
+| "simpl" "nomatch"
+| "simpl" "never"
+| "default" "implicits"
+| "clear" "implicits"
+| "clear" "scopes"
+| "clear" "bidirectionality" "hint"
+| "rename"
+| "assert"
+| "extra" "scopes"
+| "clear" "scopes" "and" "implicits"
+| "clear" "implicits" "and" "scopes"
+]
+
+scope: [
+| "%" IDENT
+]
+
+argument_spec: [
+| exclam_opt name scope_opt
+]
+
+exclam_opt: [
+| "!"
+| empty
+]
+
+scope_opt: [
+| scope
+| empty
+]
+
+argument_spec_block: [
+| argument_spec
+| "/"
+| "&"
+| "(" argument_spec_list ")" scope_opt
+| "[" argument_spec_list "]" scope_opt
+| "{" argument_spec_list "}" scope_opt
+]
+
+argument_spec_list: [
+| argument_spec_list argument_spec
+| argument_spec
+]
+
+more_implicits_block: [
+| name
+| "[" name_list "]"
+| "{" name_list "}"
+]
+
+strategy_level: [
+| "expand"
+| "opaque"
+| integer
+| "transparent"
+]
+
+instance_name: [
+| ident_decl binders
+| empty
+]
+
+hint_info: [
+| "|" natural_opt constr_pattern_opt
+| empty
+]
+
+reserv_list: [
+| reserv_tuple_list
+| simple_reserv
+]
+
+reserv_tuple_list: [
+| reserv_tuple_list reserv_tuple
+| reserv_tuple
+]
+
+reserv_tuple: [
+| "(" simple_reserv ")"
+]
+
+simple_reserv: [
+| ident_list ":" lconstr
+]
+
+command: [
+| "Comments" comment_list_opt
+| "Declare" "Instance" ident_decl binders ":" operconstr200 hint_info
+| "Declare" "Scope" IDENT
+| "Pwd"
+| "Cd"
+| "Cd" ne_string
+| "Load" Verbose_opt ne_string_alt
+| "Declare" "ML" "Module" ne_string_list
+| "Locate" locatable
+| "Add" "LoadPath" ne_string as_dirpath
+| "Add" "Rec" "LoadPath" ne_string as_dirpath
+| "Remove" "LoadPath" ne_string
+| "AddPath" ne_string "as" as_dirpath
+| "AddRecPath" ne_string "as" as_dirpath
+| "DelPath" ne_string
+| "Type" lconstr
+| "Print" printable
+| "Print" smart_global univ_name_list_opt
+| "Print" "Module" "Type" global
+| "Print" "Module" global
+| "Print" "Namespace" dirpath
+| "Inspect" natural
+| "Add" "ML" "Path" ne_string
+| "Add" "Rec" "ML" "Path" ne_string
+| "Set" option_table option_setting
+| "Unset" option_table
+| "Print" "Table" option_table
+| "Add" IDENT IDENT option_ref_value_list
+| "Add" IDENT option_ref_value_list
+| "Test" option_table "for" option_ref_value_list
+| "Test" option_table
+| "Remove" IDENT IDENT option_ref_value_list
+| "Remove" IDENT option_ref_value_list
+| "Write" "State" IDENT
+| "Write" "State" ne_string
+| "Restore" "State" IDENT
+| "Restore" "State" ne_string
+| "Reset" "Initial"
+| "Reset" ident
+| "Back"
+| "Back" natural
+| "BackTo" natural
+| "Debug" "On"
+| "Debug" "Off"
+| "Declare" "Reduction" IDENT; ":=" red_expr
+| "Declare" "Custom" "Entry" IDENT
+| "Goal" lconstr
+| "Proof"
+| "Proof" "Mode" string
+| "Proof" lconstr
+| "Abort"
+| "Abort" "All"
+| "Abort" ident
+| "Existential" natural constr_body
+| "Admitted"
+| "Qed"
+| "Save" ident
+| "Defined"
+| "Defined" ident
+| "Restart"
+| "Undo"
+| "Undo" natural
+| "Undo" "To" natural
+| "Focus"
+| "Focus" natural
+| "Unfocus"
+| "Unfocused"
+| "Show"
+| "Show" natural
+| "Show" ident
+| "Show" "Existentials"
+| "Show" "Universes"
+| "Show" "Conjectures"
+| "Show" "Proof"
+| "Show" "Intro"
+| "Show" "Intros"
+| "Show" "Match" reference
+| "Guarded"
+| "Create" "HintDb" IDENT discriminated_opt
+| "Remove" "Hints" global_list opt_hintbases
+| "Hint" hint opt_hintbases
+| "Obligation" integer "of" ident ":" lglob withtac
+| "Obligation" integer "of" ident withtac
+| "Obligation" integer ":" lglob withtac
+| "Obligation" integer withtac
+| "Next" "Obligation" "of" ident withtac
+| "Next" "Obligation" withtac
+| "Solve" "Obligation" integer "of" ident "with" tactic
+| "Solve" "Obligation" integer "with" tactic
+| "Solve" "Obligations" "of" ident "with" tactic
+| "Solve" "Obligations" "with" tactic
+| "Solve" "Obligations"
+| "Solve" "All" "Obligations" "with" tactic
+| "Solve" "All" "Obligations"
+| "Admit" "Obligations" "of" ident
+| "Admit" "Obligations"
+| "Obligation" "Tactic" ":=" tactic
+| "Show" "Obligation" "Tactic"
+| "Obligations" "of" ident
+| "Obligations"
+| "Preterm" "of" ident
+| "Preterm"
+| "Hint" "Rewrite" orient term_list ":" preident_list_opt
+| "Hint" "Rewrite" orient term_list "using" tactic ":" preident_list_opt
+| "Hint" "Rewrite" orient term_list
+| "Hint" "Rewrite" orient term_list "using" tactic
+| "Derive" "Inversion_clear" ident "with" term "Sort" sort_family
+| "Derive" "Inversion_clear" ident "with" term
+| "Derive" "Inversion" ident "with" term "Sort" sort_family
+| "Derive" "Inversion" ident "with" term
+| "Derive" "Dependent" "Inversion" ident "with" term "Sort" sort_family
+| "Derive" "Dependent" "Inversion_clear" ident "with" term "Sort" sort_family
+| "Declare" "Left" "Step" term
+| "Declare" "Right" "Step" term
+| "Grab" "Existential" "Variables"
+| "Unshelve"
+| "Declare" "Equivalent" "Keys" term term
+| "Print" "Equivalent" "Keys"
+| "Optimize" "Proof"
+| "Optimize" "Heap"
+| "Reset" "Ltac" "Profile"
+| "Show" "Ltac" "Profile"
+| "Show" "Ltac" "Profile" "CutOff" int
+| "Show" "Ltac" "Profile" string
+| "Hint" "Cut" "[" hints_path "]" opthints
+| "Typeclasses" "Transparent" reference_list_opt
+| "Typeclasses" "Opaque" reference_list_opt
+| "Typeclasses" "eauto" ":=" debug eauto_search_strategy int_opt
+| "Add" "Relation" term term "reflexivity" "proved" "by" term "symmetry" "proved" "by" term "as" ident
+| "Add" "Relation" term term "reflexivity" "proved" "by" term "as" ident
+| "Add" "Relation" term term "as" ident
+| "Add" "Relation" term term "symmetry" "proved" "by" term "as" ident
+| "Add" "Relation" term term "symmetry" "proved" "by" term "transitivity" "proved" "by" term "as" ident
+| "Add" "Relation" term term "reflexivity" "proved" "by" term "transitivity" "proved" "by" term "as" ident
+| "Add" "Relation" term term "reflexivity" "proved" "by" term "symmetry" "proved" "by" term "transitivity" "proved" "by" term "as" ident
+| "Add" "Relation" term term "transitivity" "proved" "by" term "as" ident
+| "Add" "Parametric" "Relation" binders ":" term term "reflexivity" "proved" "by" term "symmetry" "proved" "by" term "as" ident
+| "Add" "Parametric" "Relation" binders ":" term term "reflexivity" "proved" "by" term "as" ident
+| "Add" "Parametric" "Relation" binders ":" term term "as" ident
+| "Add" "Parametric" "Relation" binders ":" term term "symmetry" "proved" "by" term "as" ident
+| "Add" "Parametric" "Relation" binders ":" term term "symmetry" "proved" "by" term "transitivity" "proved" "by" term "as" ident
+| "Add" "Parametric" "Relation" binders ":" term term "reflexivity" "proved" "by" term "transitivity" "proved" "by" term "as" ident
+| "Add" "Parametric" "Relation" binders ":" term term "reflexivity" "proved" "by" term "symmetry" "proved" "by" term "transitivity" "proved" "by" term "as" ident
+| "Add" "Parametric" "Relation" binders ":" term term "transitivity" "proved" "by" term "as" ident
+| "Add" "Setoid" term term term "as" ident
+| "Add" "Parametric" "Setoid" binders ":" term term term "as" ident
+| "Add" "Morphism" term ":" ident
+| "Declare" "Morphism" term ":" ident
+| "Add" "Morphism" term "with" "signature" lconstr "as" ident
+| "Add" "Parametric" "Morphism" binders ":" term "with" "signature" lconstr "as" ident
+| "Print" "Rewrite" "HintDb" preident
+| "Proof" "with" tactic using_opt
+| "Proof" "using" section_subset_expr with_opt
+| "Tactic" "Notation" ltac_tactic_level_opt ltac_production_item_list ":=" tactic
+| "Print" "Ltac" reference
+| "Locate" "Ltac" reference
+| "Ltac" ltac_tacdef_body_list
+| "Print" "Ltac" "Signatures"
+| "String" "Notation" reference reference reference ":" ident
+| "Set" "Firstorder" "Solver" tactic
+| "Print" "Firstorder" "Solver"
+| "Numeral" "Notation" reference reference reference ":" ident numnotoption
+| "Derive" ident "SuchThat" term "As" ident (* derive plugin *)
+| "Extraction" global (* extraction plugin *)
+| "Recursive" "Extraction" global_list (* extraction plugin *)
+| "Extraction" string global_list (* extraction plugin *)
+| "Extraction" "TestCompile" global_list (* extraction plugin *)
+| "Separate" "Extraction" global_list (* extraction plugin *)
+| "Extraction" "Library" ident (* extraction plugin *)
+| "Recursive" "Extraction" "Library" ident (* extraction plugin *)
+| "Extraction" "Language" language (* extraction plugin *)
+| "Extraction" "Inline" global_list (* extraction plugin *)
+| "Extraction" "NoInline" global_list (* extraction plugin *)
+| "Print" "Extraction" "Inline" (* extraction plugin *)
+| "Reset" "Extraction" "Inline" (* extraction plugin *)
+| "Extraction" "Implicit" global "[" int_or_id_list_opt "]" (* extraction plugin *)
+| "Extraction" "Blacklist" ident_list (* extraction plugin *)
+| "Print" "Extraction" "Blacklist" (* extraction plugin *)
+| "Reset" "Extraction" "Blacklist" (* extraction plugin *)
+| "Extract" "Constant" global string_list_opt "=>" mlname (* extraction plugin *)
+| "Extract" "Inlined" "Constant" global "=>" mlname (* extraction plugin *)
+| "Extract" "Inductive" global "=>" mlname "[" mlname_list_opt "]" string_opt (* extraction plugin *)
+| "Show" "Extraction" (* extraction plugin *)
+| "Function" function_rec_definition_loc_list (* funind plugin *)
+| "Functional" "Scheme" fun_scheme_arg_list (* funind plugin *)
+| "Functional" "Case" fun_scheme_arg (* funind plugin *)
+| "Generate" "graph" "for" reference (* funind plugin *)
+| "Add" "Ring" ident ":" term ring_mods_opt (* setoid_ring plugin *)
+| "Print" "Rings" (* setoid_ring plugin *)
+| "Add" "Field" ident ":" term field_mods_opt (* setoid_ring plugin *)
+| "Print" "Fields" (* setoid_ring plugin *)
+| "Prenex" "Implicits" global_list (* ssr plugin *)
+| "Search" ssr_search_arg ssr_modlocs (* ssr plugin *)
+| "Print" "Hint" "View" ssrviewpos (* ssr plugin *)
+| "Hint" "View" ssrviewposspc ssrhintref_list (* ssr plugin *)
+]
+
+comment_list_opt: [
+| comment_list_opt comment
+| empty
+]
+
+Verbose_opt: [
+| "Verbose"
+| empty
+]
+
+ne_string_alt: [
+| ne_string
+| IDENT
+]
+
+ne_string_list: [
+| ne_string_list ne_string
+| ne_string
+]
+
+univ_name_list_opt: [
+| univ_name_list
+| empty
+]
+
+option_ref_value_list: [
+| option_ref_value_list option_ref_value
+| option_ref_value
+]
+
+discriminated_opt: [
+| "discriminated"
+| empty
+]
+
+global_list: [
+| global_list global
+| global
+]
+
+preident_list_opt: [
+| preident_list_opt preident
+| empty
+]
+
+reference_list_opt: [
+| reference_list_opt reference
+| empty
+]
+
+int_opt: [
+| int
+| empty
+]
+
+using_opt: [
+| "using" section_subset_expr
+| empty
+]
+
+with_opt: [
+| "with" tactic
+| empty
+]
+
+ltac_tactic_level_opt: [
+| ltac_tactic_level
+| empty
+]
+
+ltac_production_item_list: [
+| ltac_production_item_list ltac_production_item
+| ltac_production_item
+]
+
+ltac_tacdef_body_list: [
+| ltac_tacdef_body_list "with" ltac_tacdef_body
+| ltac_tacdef_body
+]
+
+int_or_id_list_opt: [
+| int_or_id_list_opt int_or_id
+| empty
+]
+
+ident_list: [
+| ident_list ident
+| ident
+]
+
+string_list_opt: [
+| string_list_opt string
+| empty
+]
+
+mlname_list_opt: [
+| mlname_list_opt mlname
+| empty
+]
+
+string_opt: [
+| string
+| empty
+]
+
+function_rec_definition_loc_list: [
+| function_rec_definition_loc_list "with" function_rec_definition_loc
+| function_rec_definition_loc
+]
+
+fun_scheme_arg_list: [
+| fun_scheme_arg_list "with" fun_scheme_arg
+| fun_scheme_arg
+]
+
+ring_mods_opt: [
+| ring_mods
+| empty
+]
+
+field_mods_opt: [
+| field_mods
+| empty
+]
+
+ssrhintref_list: [
+| ssrhintref_list ssrhintref
+| ssrhintref
+]
+
+query_command: [
+| "Eval" red_expr "in" lconstr "."
+| "Compute" lconstr "."
+| "Check" lconstr "."
+| "About" smart_global univ_name_list_opt "."
+| "SearchHead" constr_pattern in_or_out_modules "."
+| "SearchPattern" constr_pattern in_or_out_modules "."
+| "SearchRewrite" constr_pattern in_or_out_modules "."
+| "Search" searchabout_query searchabout_queries "."
+| "SearchAbout" searchabout_query searchabout_queries "."
+| "SearchAbout" "[" searchabout_query_list "]" in_or_out_modules "."
+]
+
+searchabout_query_list: [
+| searchabout_query_list searchabout_query
+| searchabout_query
+]
+
+printable: [
+| "Term" smart_global univ_name_list_opt
+| "All"
+| "Section" global
+| "Grammar" IDENT
+| "Custom" "Grammar" IDENT
+| "LoadPath" dirpath_opt
+| "Modules"
+| "Libraries"
+| "ML" "Path"
+| "ML" "Modules"
+| "Debug" "GC"
+| "Graph"
+| "Classes"
+| "TypeClasses"
+| "Instances" smart_global
+| "Coercions"
+| "Coercion" "Paths" class_rawexpr class_rawexpr
+| "Canonical" "Projections"
+| "Tables"
+| "Options"
+| "Hint"
+| "Hint" smart_global
+| "Hint" "*"
+| "HintDb" IDENT
+| "Scopes"
+| "Scope" IDENT
+| "Visibility" IDENT_opt3
+| "Implicit" smart_global
+| Sorted_opt "Universes" printunivs_subgraph_opt ne_string_opt
+| "Assumptions" smart_global
+| "Opaque" "Dependencies" smart_global
+| "Transparent" "Dependencies" smart_global
+| "All" "Dependencies" smart_global
+| "Strategy" smart_global
+| "Strategies"
+| "Registered"
+]
+
+dirpath_opt: [
+| dirpath
+| empty
+]
+
+IDENT_opt3: [
+| IDENT
+| empty
+]
+
+Sorted_opt: [
+| "Sorted"
+| empty
+]
+
+printunivs_subgraph_opt: [
+| printunivs_subgraph
+| empty
+]
+
+ne_string_opt: [
+| ne_string
+| empty
+]
+
+printunivs_subgraph: [
+| "Subgraph" "(" reference_list_opt ")"
+]
+
+class_rawexpr: [
+| "Funclass"
+| "Sortclass"
+| smart_global
+]
+
+locatable: [
+| smart_global
+| "Term" smart_global
+| "File" ne_string
+| "Library" global
+| "Module" global
+]
+
+option_setting: [
+| empty
+| integer
+| STRING
+]
+
+option_ref_value: [
+| global
+| STRING
+]
+
+option_table: [
+| IDENT_list
+]
+
+as_dirpath: [
+| as_opt3
+]
+
+as_opt: [
+| "as" name
+| empty
+]
+
+ne_in_or_out_modules: [
+| "inside" global_list
+| "outside" global_list
+]
+
+in_or_out_modules: [
+| ne_in_or_out_modules
+| empty
+]
+
+comment: [
+| term
+| STRING
+| natural
+]
+
+positive_search_mark: [
+| "-"
+| empty
+]
+
+searchabout_query: [
+| positive_search_mark ne_string scope_opt
+| positive_search_mark constr_pattern
+]
+
+searchabout_queries: [
+| ne_in_or_out_modules
+| searchabout_query searchabout_queries
+| empty
+]
+
+univ_name_list: [
+| "@{" name_list_opt "}"
+]
+
+syntax: [
+| "Open" "Scope" IDENT
+| "Close" "Scope" IDENT
+| "Delimit" "Scope" IDENT; "with" IDENT
+| "Undelimit" "Scope" IDENT
+| "Bind" "Scope" IDENT; "with" class_rawexpr_list
+| "Infix" ne_lstring ":=" term syntax_modifier_opt IDENT_opt2
+| "Notation" ident ident_list_opt ":=" term only_parsing
+| "Notation" lstring ":=" term syntax_modifier_opt IDENT_opt2
+| "Format" "Notation" STRING STRING STRING
+| "Reserved" "Infix" ne_lstring syntax_modifier_opt
+| "Reserved" "Notation" ne_lstring syntax_modifier_opt
+]
+
+class_rawexpr_list: [
+| class_rawexpr_list class_rawexpr
+| class_rawexpr
+]
+
+syntax_modifier_opt: [
+| "(" syntax_modifier_list_comma ")"
+| empty
+]
+
+syntax_modifier_list_comma: [
+| syntax_modifier_list_comma "," syntax_modifier
+| syntax_modifier
+]
+
+only_parsing: [
+| "(" "only" "parsing" ")"
+| "(" "compat" STRING ")"
+| empty
+]
+
+level: [
+| "level" natural
+| "next" "level"
+]
+
+syntax_modifier: [
+| "at" "level" natural
+| "in" "custom" IDENT
+| "in" "custom" IDENT; "at" "level" natural
+| "left" "associativity"
+| "right" "associativity"
+| "no" "associativity"
+| "only" "printing"
+| "only" "parsing"
+| "compat" STRING
+| "format" STRING STRING_opt
+| IDENT; "," IDENT_list_comma "at" level
+| IDENT; "at" level
+| IDENT; "at" level constr_as_binder_kind
+| IDENT constr_as_binder_kind
+| IDENT syntax_extension_type
+]
+
+STRING_opt: [
+| STRING
+| empty
+]
+
+IDENT_list_comma: [
+| IDENT_list_comma "," IDENT
+| IDENT
+]
+
+syntax_extension_type: [
+| "ident"
+| "global"
+| "bigint"
+| "binder"
+| "constr"
+| "constr" at_level_opt constr_as_binder_kind_opt
+| "pattern"
+| "pattern" "at" "level" natural
+| "strict" "pattern"
+| "strict" "pattern" "at" "level" natural
+| "closed" "binder"
+| "custom" IDENT at_level_opt constr_as_binder_kind_opt
+]
+
+at_level_opt: [
+| at_level
+| empty
+]
+
+constr_as_binder_kind_opt: [
+| constr_as_binder_kind
+| empty
+]
+
+at_level: [
+| "at" level
+]
+
+constr_as_binder_kind: [
+| "as" "ident"
+| "as" "pattern"
+| "as" "strict" "pattern"
+]
+
+opt_hintbases: [
+| empty
+| ":" IDENT_list
+]
+
+IDENT_list: [
+| IDENT_list IDENT
+| IDENT
+]
+
+reference_or_constr: [
+| global
+| term
+]
+
+hint: [
+| "Resolve" reference_or_constr_list hint_info
+| "Resolve" "->" global_list natural_opt
+| "Resolve" "<-" global_list natural_opt
+| "Immediate" reference_or_constr_list
+| "Variables" "Transparent"
+| "Variables" "Opaque"
+| "Constants" "Transparent"
+| "Constants" "Opaque"
+| "Transparent" global_list
+| "Opaque" global_list
+| "Mode" global mode
+| "Unfold" global_list
+| "Constructors" global_list
+| "Extern" natural constr_pattern_opt "=>" tactic
+]
+
+reference_or_constr_list: [
+| reference_or_constr_list reference_or_constr
+| reference_or_constr
+]
+
+natural_opt2: [
+| "|" natural
+| empty
+]
+
+constr_pattern_opt: [
+| constr_pattern
+| empty
+]
+
+constr_body: [
+| ":=" lconstr
+| ":" lconstr ":=" lconstr
+]
+
+mode: [
+| plus_list
+]
+
+plus_list: [
+| plus_list plus_alt
+| plus_alt
+]
+
+plus_alt: [
+| "+"
+| "!"
+| "-"
+]
+
+vernac_toplevel: [
+| "Drop" "."
+| "Quit" "."
+| "Backtrack" natural natural natural "."
+| "Show" "Goal" natural "at" natural "."
+| vernac_control
+]
+
+orient: [
+| "->"
+| "<-"
+| empty
+]
+
+occurrences: [
+| integer_list
+| var
+]
+
+integer_list: [
+| integer_list integer
+| integer
+]
+
+glob: [
+| term
+]
+
+lglob: [
+| lconstr
+]
+
+casted_constr: [
+| term
+]
+
+hloc: [
+| empty
+| "in" "|-" "*"
+| "in" ident
+| "in" "(" "Type" "of" ident ")"
+| "in" "(" "Value" "of" ident ")"
+| "in" "(" "type" "of" ident ")"
+| "in" "(" "value" "of" ident ")"
+]
+
+rename: [
+| ident "into" ident
+]
+
+by_arg_tac: [
+| "by" ltac_expr3
+| empty
+]
+
+in_clause: [
+| in_clause
+| "*" occs
+| "*" "|-" concl_occ
+| hypident_occ_list_comma_opt "|-" concl_occ
+| hypident_occ_list_comma_opt
+]
+
+hypident_occ_list_comma_opt: [
+| hypident_occ_list_comma
+| empty
+]
+
+hypident_occ_list_comma: [
+| hypident_occ_list_comma "," hypident_occ
+| hypident_occ
+]
+
+test_lpar_id_colon: [
+| empty
+]
+
+withtac: [
+| "with" tactic
+| empty
+]
+
+closed_binder: [
+| "(" name name_list ":" lconstr ")"
+| "(" name ":" lconstr ")"
+| "(" name ":=" lconstr ")"
+| "(" name ":" lconstr ":=" lconstr ")"
+| "{" name "}"
+| "{" name name_list ":" lconstr "}"
+| "{" name ":" lconstr "}"
+| "{" name name_list "}"
+| "`(" typeclass_constraint_list_comma ")"
+| "`{" typeclass_constraint_list_comma "}"
+| "'" pattern0
+| of_alt operconstr99 (* ssr plugin *)
+| "(" "_" ":" lconstr "|" lconstr ")"
+]
+
+typeclass_constraint_list_comma: [
+| typeclass_constraint_list_comma "," typeclass_constraint
+| typeclass_constraint
+]
+
+of_alt: [
+| "of"
+| "&"
+]
+
+simple_tactic: [
+| "reflexivity"
+| "exact" casted_constr
+| "assumption"
+| "etransitivity"
+| "cut" term
+| "exact_no_check" term
+| "vm_cast_no_check" term
+| "native_cast_no_check" term
+| "casetype" term
+| "elimtype" term
+| "lapply" term
+| "transitivity" term
+| "left"
+| "eleft"
+| "left" "with" bindings
+| "eleft" "with" bindings
+| "right"
+| "eright"
+| "right" "with" bindings
+| "eright" "with" bindings
+| "constructor"
+| "constructor" int_or_var
+| "constructor" int_or_var "with" bindings
+| "econstructor"
+| "econstructor" int_or_var
+| "econstructor" int_or_var "with" bindings
+| "specialize" constr_with_bindings
+| "specialize" constr_with_bindings "as" simple_intropattern
+| "symmetry"
+| "symmetry" "in" in_clause
+| "split"
+| "esplit"
+| "split" "with" bindings
+| "esplit" "with" bindings
+| "exists"
+| "exists" bindings_list_comma
+| "eexists"
+| "eexists" bindings_list_comma
+| "intros" "until" quantified_hypothesis
+| "intro"
+| "intro" ident
+| "intro" ident "at" "top"
+| "intro" ident "at" "bottom"
+| "intro" ident "after" var
+| "intro" ident "before" var
+| "intro" "at" "top"
+| "intro" "at" "bottom"
+| "intro" "after" var
+| "intro" "before" var
+| "move" var "at" "top"
+| "move" var "at" "bottom"
+| "move" var "after" var
+| "move" var "before" var
+| "rename" rename_list_comma
+| "revert" var_list
+| "simple" "induction" quantified_hypothesis
+| "simple" "destruct" quantified_hypothesis
+| "double" "induction" quantified_hypothesis quantified_hypothesis
+| "admit"
+| "fix" ident natural
+| "cofix" ident
+| "clear" var_list_opt
+| "clear" "-" var_list
+| "clearbody" var_list
+| "generalize" "dependent" term
+| "replace" uconstr "with" term clause_dft_concl by_arg_tac
+| "replace" "->" uconstr clause_dft_concl
+| "replace" "<-" uconstr clause_dft_concl
+| "replace" uconstr clause_dft_concl
+| "simplify_eq"
+| "simplify_eq" destruction_arg
+| "esimplify_eq"
+| "esimplify_eq" destruction_arg
+| "discriminate"
+| "discriminate" destruction_arg
+| "ediscriminate"
+| "ediscriminate" destruction_arg
+| "injection"
+| "injection" destruction_arg
+| "einjection"
+| "einjection" destruction_arg
+| "injection" "as" simple_intropattern_list_opt
+| "injection" destruction_arg "as" simple_intropattern_list_opt
+| "einjection" "as" simple_intropattern_list_opt
+| "einjection" destruction_arg "as" simple_intropattern_list_opt
+| "simple" "injection"
+| "simple" "injection" destruction_arg
+| "dependent" "rewrite" orient term
+| "dependent" "rewrite" orient term "in" var
+| "cutrewrite" orient term
+| "cutrewrite" orient term "in" var
+| "decompose" "sum" term
+| "decompose" "record" term
+| "absurd" term
+| "contradiction" constr_with_bindings_opt
+| "autorewrite" "with" preident_list clause_dft_concl
+| "autorewrite" "with" preident_list clause_dft_concl "using" tactic
+| "autorewrite" "*" "with" preident_list clause_dft_concl
+| "autorewrite" "*" "with" preident_list clause_dft_concl "using" tactic
+| "rewrite" "*" orient uconstr "in" var "at" occurrences by_arg_tac
+| "rewrite" "*" orient uconstr "at" occurrences "in" var by_arg_tac
+| "rewrite" "*" orient uconstr "in" var by_arg_tac
+| "rewrite" "*" orient uconstr "at" occurrences by_arg_tac
+| "rewrite" "*" orient uconstr by_arg_tac
+| "refine" uconstr
+| "simple" "refine" uconstr
+| "notypeclasses" "refine" uconstr
+| "simple" "notypeclasses" "refine" uconstr
+| "solve_constraints"
+| "subst" var_list
+| "subst"
+| "simple" "subst"
+| "evar" test_lpar_id_colon "(" ident ":" lconstr ")"
+| "evar" term
+| "instantiate" "(" ident ":=" lglob ")"
+| "instantiate" "(" integer ":=" lglob ")" hloc
+| "instantiate"
+| "stepl" term "by" tactic
+| "stepl" term
+| "stepr" term "by" tactic
+| "stepr" term
+| "generalize_eqs" var
+| "dependent" "generalize_eqs" var
+| "generalize_eqs_vars" var
+| "dependent" "generalize_eqs_vars" var
+| "specialize_eqs" var
+| "hresolve_core" "(" ident ":=" term ")" "at" int_or_var "in" term
+| "hresolve_core" "(" ident ":=" term ")" "in" term
+| "hget_evar" int_or_var
+| "destauto"
+| "destauto" "in" var
+| "transparent_abstract" ltac_expr3
+| "transparent_abstract" ltac_expr3 "using" ident
+| "constr_eq" term term
+| "constr_eq_strict" term term
+| "constr_eq_nounivs" term term
+| "is_evar" term
+| "has_evar" term
+| "is_var" term
+| "is_fix" term
+| "is_cofix" term
+| "is_ind" term
+| "is_constructor" term
+| "is_proj" term
+| "is_const" term
+| "shelve"
+| "shelve_unifiable"
+| "unshelve" ltac_expr1
+| "give_up"
+| "cycle" int_or_var
+| "swap" int_or_var int_or_var
+| "revgoals"
+| "guard" test
+| "decompose" "[" term_list "]" term
+| "optimize_heap"
+| "start" "ltac" "profiling"
+| "stop" "ltac" "profiling"
+| "reset" "ltac" "profile"
+| "show" "ltac" "profile"
+| "show" "ltac" "profile" "cutoff" int
+| "show" "ltac" "profile" string
+| "restart_timer" string_opt
+| "finish_timing" string_opt
+| "finish_timing" "(" string ")" string_opt
+| "eassumption"
+| "eexact" term
+| "trivial" auto_using hintbases
+| "info_trivial" auto_using hintbases
+| "debug" "trivial" auto_using hintbases
+| "auto" int_or_var_opt auto_using hintbases
+| "info_auto" int_or_var_opt auto_using hintbases
+| "debug" "auto" int_or_var_opt auto_using hintbases
+| "prolog" "[" uconstr_list_opt "]" int_or_var
+| "eauto" int_or_var_opt int_or_var_opt auto_using hintbases
+| "new" "auto" int_or_var_opt auto_using hintbases
+| "debug" "eauto" int_or_var_opt int_or_var_opt auto_using hintbases
+| "info_eauto" int_or_var_opt int_or_var_opt auto_using hintbases
+| "dfs" "eauto" int_or_var_opt auto_using hintbases
+| "autounfold" hintbases clause_dft_concl
+| "autounfold_one" hintbases "in" var
+| "autounfold_one" hintbases
+| "unify" term term
+| "unify" term term "with" preident
+| "convert_concl_no_check" term
+| "typeclasses" "eauto" "bfs" int_or_var_opt "with" preident_list
+| "typeclasses" "eauto" int_or_var_opt "with" preident_list
+| "typeclasses" "eauto" int_or_var_opt
+| "head_of_constr" ident term
+| "not_evar" term
+| "is_ground" term
+| "autoapply" term "using" preident
+| "autoapply" term "with" preident
+| "progress_evars" tactic
+| "rewrite_strat" rewstrategy "in" var
+| "rewrite_strat" rewstrategy
+| "rewrite_db" preident "in" var
+| "rewrite_db" preident
+| "substitute" orient glob_constr_with_bindings
+| "setoid_rewrite" orient glob_constr_with_bindings
+| "setoid_rewrite" orient glob_constr_with_bindings "in" var
+| "setoid_rewrite" orient glob_constr_with_bindings "at" occurrences
+| "setoid_rewrite" orient glob_constr_with_bindings "at" occurrences "in" var
+| "setoid_rewrite" orient glob_constr_with_bindings "in" var "at" occurrences
+| "setoid_symmetry"
+| "setoid_symmetry" "in" var
+| "setoid_reflexivity"
+| "setoid_transitivity" term
+| "setoid_etransitivity"
+| "decide" "equality"
+| "compare" term term
+| "intros" intropattern_list_opt
+| "eintros" intropattern_list_opt
+| "apply" constr_with_bindings_arg_list_comma in_hyp_as
+| "eapply" constr_with_bindings_arg_list_comma in_hyp_as
+| "simple" "apply" constr_with_bindings_arg_list_comma in_hyp_as
+| "simple" "eapply" constr_with_bindings_arg_list_comma in_hyp_as
+| "elim" constr_with_bindings_arg eliminator_opt
+| "eelim" constr_with_bindings_arg eliminator_opt
+| "case" induction_clause_list
+| "ecase" induction_clause_list
+| "fix" ident natural "with" fixdecl_list
+| "cofix" ident "with" cofixdecl_list
+| "pose" bindings_with_parameters
+| "pose" term as_name
+| "epose" bindings_with_parameters
+| "epose" term as_name
+| "set" bindings_with_parameters clause_dft_concl
+| "set" term as_name clause_dft_concl
+| "eset" bindings_with_parameters clause_dft_concl
+| "eset" term as_name clause_dft_concl
+| "remember" term as_name eqn_ipat clause_dft_all
+| "eremember" term as_name eqn_ipat clause_dft_all
+| "assert" "(" ident ":=" lconstr ")"
+| "eassert" "(" ident ":=" lconstr ")"
+| "assert" test_lpar_id_colon "(" ident ":" lconstr ")" by_tactic
+| "eassert" test_lpar_id_colon "(" ident ":" lconstr ")" by_tactic
+| "enough" test_lpar_id_colon "(" ident ":" lconstr ")" by_tactic
+| "eenough" test_lpar_id_colon "(" ident ":" lconstr ")" by_tactic
+| "assert" term as_ipat by_tactic
+| "eassert" term as_ipat by_tactic
+| "pose" "proof" lconstr as_ipat
+| "epose" "proof" lconstr as_ipat
+| "enough" term as_ipat by_tactic
+| "eenough" term as_ipat by_tactic
+| "generalize" term
+| "generalize" term term_list
+| "generalize" term occs as_name pattern_occ_list_opt
+| "induction" induction_clause_list
+| "einduction" induction_clause_list
+| "destruct" induction_clause_list
+| "edestruct" induction_clause_list
+| "rewrite" oriented_rewriter_list_comma clause_dft_concl by_tactic
+| "erewrite" oriented_rewriter_list_comma clause_dft_concl by_tactic
+| "dependent" simple_alt quantified_hypothesis as_or_and_ipat with_opt2
+| "simple" "inversion" quantified_hypothesis as_or_and_ipat in_hyp_list
+| "inversion" quantified_hypothesis as_or_and_ipat in_hyp_list
+| "inversion_clear" quantified_hypothesis as_or_and_ipat in_hyp_list
+| "inversion" quantified_hypothesis "using" term in_hyp_list
+| "red" clause_dft_concl
+| "hnf" clause_dft_concl
+| "simpl" delta_flag ref_or_pattern_occ_opt clause_dft_concl
+| "cbv" strategy_flag clause_dft_concl
+| "cbn" strategy_flag clause_dft_concl
+| "lazy" strategy_flag clause_dft_concl
+| "compute" delta_flag clause_dft_concl
+| "vm_compute" ref_or_pattern_occ_opt clause_dft_concl
+| "native_compute" ref_or_pattern_occ_opt clause_dft_concl
+| "unfold" unfold_occ_list_comma clause_dft_concl
+| "fold" term_list clause_dft_concl
+| "pattern" pattern_occ_list_comma clause_dft_concl
+| "change" conversion clause_dft_concl
+| "change_no_check" conversion clause_dft_concl
+| "btauto"
+| "rtauto"
+| "congruence"
+| "congruence" integer
+| "congruence" "with" term_list
+| "congruence" integer "with" term_list
+| "f_equal"
+| "firstorder" tactic_opt firstorder_using
+| "firstorder" tactic_opt "with" preident_list
+| "firstorder" tactic_opt firstorder_using "with" preident_list
+| "gintuition" tactic_opt
+| "functional" "inversion" quantified_hypothesis reference_opt (* funind plugin *)
+| "functional" "induction" term_list fun_ind_using with_names (* funind plugin *)
+| "soft" "functional" "induction" term_list fun_ind_using with_names (* funind plugin *)
+| "myred" (* micromega plugin *)
+| "psatz_Z" int_or_var tactic (* micromega plugin *)
+| "psatz_Z" tactic (* micromega plugin *)
+| "xlia" tactic (* micromega plugin *)
+| "xnlia" tactic (* micromega plugin *)
+| "xnra" tactic (* micromega plugin *)
+| "xnqa" tactic (* micromega plugin *)
+| "sos_Z" tactic (* micromega plugin *)
+| "sos_Q" tactic (* micromega plugin *)
+| "sos_R" tactic (* micromega plugin *)
+| "lra_Q" tactic (* micromega plugin *)
+| "lra_R" tactic (* micromega plugin *)
+| "psatz_R" int_or_var tactic (* micromega plugin *)
+| "psatz_R" tactic (* micromega plugin *)
+| "psatz_Q" int_or_var tactic (* micromega plugin *)
+| "psatz_Q" tactic (* micromega plugin *)
+| "nsatz_compute" term (* nsatz plugin *)
+| "omega" (* omega plugin *)
+| "omega" "with" ident_list (* omega plugin *)
+| "omega" "with" "*" (* omega plugin *)
+| "protect_fv" string "in" ident (* setoid_ring plugin *)
+| "protect_fv" string (* setoid_ring plugin *)
+| "ring_lookup" ltac_expr0 "[" term_list_opt "]" term_list (* setoid_ring plugin *)
+| "field_lookup" tactic "[" term_list_opt "]" term_list (* setoid_ring plugin *)
+| "YouShouldNotTypeThis" ssrintrosarg (* ssr plugin *)
+| "by" ssrhintarg (* ssr plugin *)
+| "YouShouldNotTypeThis" "do" (* ssr plugin *)
+| "YouShouldNotTypeThis" ssrtclarg ssrseqarg (* ssr plugin *)
+| "clear" natural (* ssr plugin *)
+| "move" ssrmovearg ssrrpat (* ssr plugin *)
+| "move" ssrmovearg ssrclauses (* ssr plugin *)
+| "move" ssrrpat (* ssr plugin *)
+| "move" (* ssr plugin *)
+| "case" ssrcasearg ssrclauses (* ssr plugin *)
+| "case" (* ssr plugin *)
+| "elim" ssrarg ssrclauses (* ssr plugin *)
+| "elim" (* ssr plugin *)
+| "apply" ssrapplyarg (* ssr plugin *)
+| "apply" (* ssr plugin *)
+| "exact" ssrexactarg (* ssr plugin *)
+| "exact" (* ssr plugin *)
+| "exact" "<:" lconstr (* ssr plugin *)
+| "congr" ssrcongrarg (* ssr plugin *)
+| "ssrinstancesofruleL2R" ssrterm (* ssr plugin *)
+| "ssrinstancesofruleR2L" ssrterm (* ssr plugin *)
+| "rewrite" ssrrwargs ssrclauses (* ssr plugin *)
+| "unlock" ssrunlockargs ssrclauses (* ssr plugin *)
+| "pose" ssrfixfwd (* ssr plugin *)
+| "pose" ssrcofixfwd (* ssr plugin *)
+| "pose" ssrfwdid ssrposefwd (* ssr plugin *)
+| "set" ssrfwdid ssrsetfwd ssrclauses (* ssr plugin *)
+| "abstract" ssrdgens (* ssr plugin *)
+| "have" ssrhavefwdwbinders (* ssr plugin *)
+| "have" "suff" ssrhpats_nobs ssrhavefwd (* ssr plugin *)
+| "have" "suffices" ssrhpats_nobs ssrhavefwd (* ssr plugin *)
+| "suff" "have" ssrhpats_nobs ssrhavefwd (* ssr plugin *)
+| "suffices" "have" ssrhpats_nobs ssrhavefwd (* ssr plugin *)
+| "suff" ssrsufffwd (* ssr plugin *)
+| "suffices" ssrsufffwd (* ssr plugin *)
+| "wlog" ssrhpats_nobs ssrwlogfwd ssrhint (* ssr plugin *)
+| "wlog" "suff" ssrhpats_nobs ssrwlogfwd ssrhint (* ssr plugin *)
+| "wlog" "suffices" ssrhpats_nobs ssrwlogfwd ssrhint (* ssr plugin *)
+| "without" "loss" ssrhpats_nobs ssrwlogfwd ssrhint (* ssr plugin *)
+| "without" "loss" "suff" ssrhpats_nobs ssrwlogfwd ssrhint (* ssr plugin *)
+| "without" "loss" "suffices" ssrhpats_nobs ssrwlogfwd ssrhint (* ssr plugin *)
+| "gen" "have" ssrclear ssr_idcomma ssrhpats_nobs ssrwlogfwd ssrhint (* ssr plugin *)
+| "generally" "have" ssrclear ssr_idcomma ssrhpats_nobs ssrwlogfwd ssrhint (* ssr plugin *)
+| "under" ssrrwarg (* ssr plugin *)
+| "under" ssrrwarg ssrintros_ne (* ssr plugin *)
+| "under" ssrrwarg ssrintros_ne "do" ssrhint3arg (* ssr plugin *)
+| "under" ssrrwarg "do" ssrhint3arg (* ssr plugin *)
+| "ssrinstancesoftpat" cpattern (* ssrmatching plugin *)
+]
+
+var_list: [
+| var_list var
+| var
+]
+
+var_list_opt: [
+| var_list_opt var
+| empty
+]
+
+constr_with_bindings_opt: [
+| constr_with_bindings
+| empty
+]
+
+int_or_var_opt: [
+| int_or_var
+| empty
+]
+
+uconstr_list_opt: [
+| uconstr_list_opt uconstr
+| empty
+]
+
+constr_with_bindings_arg_list_comma: [
+| constr_with_bindings_arg_list_comma "," constr_with_bindings_arg
+| constr_with_bindings_arg
+]
+
+fixdecl_list: [
+| fixdecl_list fixdecl
+| fixdecl
+]
+
+cofixdecl_list: [
+| cofixdecl_list cofixdecl
+| cofixdecl
+]
+
+pattern_occ_list_opt: [
+| pattern_occ_list_opt "," pattern_occ as_name
+| empty
+]
+
+oriented_rewriter_list_comma: [
+| oriented_rewriter_list_comma "," oriented_rewriter
+| oriented_rewriter
+]
+
+simple_alt: [
+| "simple" "inversion"
+| "inversion"
+| "inversion_clear"
+]
+
+with_opt2: [
+| "with" term
+| empty
+]
+
+tactic_opt: [
+| tactic
+| empty
+]
+
+reference_opt: [
+| reference
+| empty
+]
+
+bindings_list_comma: [
+| bindings_list_comma "," bindings
+| bindings
+]
+
+rename_list_comma: [
+| rename_list_comma "," rename
+| rename
+]
+
+orient_string: [
+| orient preident
+]
+
+comparison: [
+| "="
+| "<"
+| "<="
+| ">"
+| ">="
+]
+
+test: [
+| int_or_var comparison int_or_var
+]
+
+hintbases: [
+| "with" "*"
+| "with" preident_list
+| empty
+]
+
+preident_list: [
+| preident_list preident
+| preident
+]
+
+auto_using: [
+| "using" uconstr_list_comma
+| empty
+]
+
+uconstr_list_comma: [
+| uconstr_list_comma "," uconstr
+| uconstr
+]
+
+hints_path_atom: [
+| global_list
+| "_"
+]
+
+hints_path: [
+| "(" hints_path ")"
+| hints_path "*"
+| "emp"
+| "eps"
+| hints_path "|" hints_path
+| hints_path_atom
+| hints_path hints_path
+]
+
+opthints: [
+| ":" preident_list
+| empty
+]
+
+debug: [
+| "debug"
+| empty
+]
+
+eauto_search_strategy: [
+| "(bfs)"
+| "(dfs)"
+| empty
+]
+
+glob_constr_with_bindings: [
+| constr_with_bindings
+]
+
+rewstrategy: [
+| glob
+| "<-" term
+| "subterms" rewstrategy
+| "subterm" rewstrategy
+| "innermost" rewstrategy
+| "outermost" rewstrategy
+| "bottomup" rewstrategy
+| "topdown" rewstrategy
+| "id"
+| "fail"
+| "refl"
+| "progress" rewstrategy
+| "try" rewstrategy
+| "any" rewstrategy
+| "repeat" rewstrategy
+| rewstrategy ";" rewstrategy
+| "(" rewstrategy ")"
+| "choice" rewstrategy rewstrategy
+| "old_hints" preident
+| "hints" preident
+| "terms" term_list_opt
+| "eval" red_expr
+| "fold" term
+]
+
+term_list_opt: [
+| term_list_opt term
+| empty
+]
+
+int_or_var: [
+| integer
+| ident
+]
+
+nat_or_var: [
+| natural
+| ident
+]
+
+id_or_meta: [
+| ident
+]
+
+open_constr: [
+| term
+]
+
+uconstr: [
+| term
+]
+
+destruction_arg: [
+| natural
+| constr_with_bindings
+| constr_with_bindings_arg
+]
+
+constr_with_bindings_arg: [
+| ">" constr_with_bindings
+| constr_with_bindings
+]
+
+quantified_hypothesis: [
+| ident
+| natural
+]
+
+conversion: [
+| term
+| term "with" term
+| term "at" occs_nums "with" term
+]
+
+occs_nums: [
+| nat_or_var_list
+| "-" nat_or_var int_or_var_list_opt
+]
+
+nat_or_var_list: [
+| nat_or_var_list nat_or_var
+| nat_or_var
+]
+
+int_or_var_list_opt: [
+| int_or_var_list_opt int_or_var
+| empty
+]
+
+occs: [
+| "at" occs_nums
+| empty
+]
+
+pattern_occ: [
+| term occs
+]
+
+ref_or_pattern_occ: [
+| smart_global occs
+| term occs
+]
+
+unfold_occ: [
+| smart_global occs
+]
+
+intropattern_list_opt: [
+| intropattern_list_opt intropattern
+| empty
+]
+
+or_and_intropattern: [
+| "[" intropattern_or_list_or "]"
+| "(" simple_intropattern_list_comma_opt ")"
+| "(" simple_intropattern "&" simple_intropattern_list_ ")"
+]
+
+simple_intropattern_list_comma_opt: [
+| simple_intropattern_list_comma
+| empty
+]
+
+simple_intropattern_list_comma: [
+| simple_intropattern_list_comma "," simple_intropattern
+| simple_intropattern
+]
+
+simple_intropattern_list_: [
+| simple_intropattern_list_ "&" simple_intropattern
+| simple_intropattern
+]
+
+intropattern_or_list_or: [
+| intropattern_or_list_or "|" intropattern_list_opt
+| intropattern_list_opt
+]
+
+simple_intropattern_list_opt: [
+| simple_intropattern_list_opt simple_intropattern
+| empty
+]
+
+equality_intropattern: [
+| "->"
+| "<-"
+| "[=" intropattern_list_opt "]"
+]
+
+naming_intropattern: [
+| "?" ident
+| "?"
+| ident
+]
+
+intropattern: [
+| simple_intropattern
+| "*"
+| "**"
+]
+
+simple_intropattern: [
+| simple_intropattern_closed operconstr0_list_opt
+]
+
+operconstr0_list_opt: [
+| operconstr0_list_opt "%" operconstr0
+| empty
+]
+
+simple_intropattern_closed: [
+| or_and_intropattern
+| equality_intropattern
+| "_"
+| naming_intropattern
+]
+
+simple_binding: [
+| "(" ident ":=" lconstr ")"
+| "(" natural ":=" lconstr ")"
+]
+
+bindings: [
+| simple_binding_list
+| term_list
+]
+
+simple_binding_list: [
+| simple_binding_list simple_binding
+| simple_binding
+]
+
+term_list: [
+| term_list term
+| term
+]
+
+constr_with_bindings: [
+| term with_bindings
+]
+
+with_bindings: [
+| "with" bindings
+| empty
+]
+
+red_flags: [
+| "beta"
+| "iota"
+| "match"
+| "fix"
+| "cofix"
+| "zeta"
+| "delta" delta_flag
+]
+
+delta_flag: [
+| "-" "[" smart_global_list "]"
+| "[" smart_global_list "]"
+| empty
+]
+
+smart_global_list: [
+| smart_global_list smart_global
+| smart_global
+]
+
+strategy_flag: [
+| red_flags_list
+| delta_flag
+]
+
+red_flags_list: [
+| red_flags_list red_flags
+| red_flags
+]
+
+red_expr: [
+| "red"
+| "hnf"
+| "simpl" delta_flag ref_or_pattern_occ_opt
+| "cbv" strategy_flag
+| "cbn" strategy_flag
+| "lazy" strategy_flag
+| "compute" delta_flag
+| "vm_compute" ref_or_pattern_occ_opt
+| "native_compute" ref_or_pattern_occ_opt
+| "unfold" unfold_occ_list_comma
+| "fold" term_list
+| "pattern" pattern_occ_list_comma
+| IDENT
+]
+
+ref_or_pattern_occ_opt: [
+| ref_or_pattern_occ
+| empty
+]
+
+unfold_occ_list_comma: [
+| unfold_occ_list_comma "," unfold_occ
+| unfold_occ
+]
+
+pattern_occ_list_comma: [
+| pattern_occ_list_comma "," pattern_occ
+| pattern_occ
+]
+
+hypident: [
+| id_or_meta
+| "(" "type" "of" id_or_meta ")"
+| "(" "value" "of" id_or_meta ")"
+| "(" "type" "of" ident ")" (* ssr plugin *)
+| "(" "value" "of" ident ")" (* ssr plugin *)
+]
+
+hypident_occ: [
+| hypident occs
+]
+
+clause_dft_concl: [
+| "in" in_clause
+| occs
+| empty
+]
+
+clause_dft_all: [
+| "in" in_clause
+| empty
+]
+
+opt_clause: [
+| "in" in_clause
+| "at" occs_nums
+| empty
+]
+
+concl_occ: [
+| "*" occs
+| empty
+]
+
+in_hyp_list: [
+| "in" id_or_meta_list
+| empty
+]
+
+id_or_meta_list: [
+| id_or_meta_list id_or_meta
+| id_or_meta
+]
+
+in_hyp_as: [
+| "in" id_or_meta as_ipat
+| empty
+]
+
+simple_binder: [
+| name
+| "(" name_list ":" lconstr ")"
+]
+
+fixdecl: [
+| "(" ident simple_binder_list_opt fixannot ":" lconstr ")"
+]
+
+cofixdecl: [
+| "(" ident simple_binder_list_opt ":" lconstr ")"
+]
+
+bindings_with_parameters: [
+| "(" ident simple_binder_list_opt ":=" lconstr ")"
+]
+
+simple_binder_list_opt: [
+| simple_binder_list_opt simple_binder
+| empty
+]
+
+eliminator: [
+| "using" constr_with_bindings
+]
+
+as_ipat: [
+| "as" simple_intropattern
+| empty
+]
+
+or_and_intropattern_loc: [
+| or_and_intropattern
+| ident
+]
+
+as_or_and_ipat: [
+| "as" or_and_intropattern_loc
+| empty
+]
+
+eqn_ipat: [
+| "eqn" ":" naming_intropattern
+| "_eqn" ":" naming_intropattern
+| "_eqn"
+| empty
+]
+
+as_name: [
+| "as" ident
+| empty
+]
+
+by_tactic: [
+| "by" ltac_expr3
+| empty
+]
+
+rewriter: [
+| "!" constr_with_bindings_arg
+| qmark_alt constr_with_bindings_arg
+| natural "!" constr_with_bindings_arg
+| natural qmark_alt constr_with_bindings_arg
+| natural constr_with_bindings_arg
+| constr_with_bindings_arg
+]
+
+qmark_alt: [
+| "?"
+| "?"
+]
+
+oriented_rewriter: [
+| orient rewriter
+]
+
+induction_clause: [
+| destruction_arg as_or_and_ipat eqn_ipat opt_clause
+]
+
+induction_clause_list: [
+| induction_clause_list_comma eliminator_opt opt_clause
+]
+
+induction_clause_list_comma: [
+| induction_clause_list_comma "," induction_clause
+| induction_clause
+]
+
+eliminator_opt: [
+| eliminator
+| empty
+]
+
+ltac_expr: [
+| binder_tactic
+| ltac_expr4
+]
+
+binder_tactic: [
+| "fun" input_fun_list "=>" ltac_expr
+| "let" rec_opt let_clause_list "in" ltac_expr
+| "info" ltac_expr
+]
+
+input_fun_list: [
+| input_fun_list input_fun
+| input_fun
+]
+
+input_fun: [
+| "_"
+| ident
+]
+
+rec_opt: [
+| "rec"
+| empty
+]
+
+let_clause_list: [
+| let_clause_list "with" let_clause
+| let_clause
+]
+
+let_clause: [
+| ident ":=" ltac_expr
+| "_" ":=" ltac_expr
+| ident input_fun_list ":=" ltac_expr
+]
+
+ltac_expr4: [
+| ltac_expr3 ";" binder_tactic
+| ltac_expr3 ";" ltac_expr3
+| ltac_expr3 ";" "[" gt_opt tactic_then_gen "]"
+| ltac_expr3
+| ltac_expr ";" "first" ssr_first_else (* ssr plugin *)
+| ltac_expr ";" "first" ssrseqarg (* ssr plugin *)
+| ltac_expr ";" "last" ssrseqarg (* ssr plugin *)
+]
+
+gt_opt: [
+| ">"
+| empty
+]
+
+tactic_then_gen: [
+| ltac_expr_opt "|" tactic_then_gen
+| ltac_expr_opt ".." or_opt ltac_expr_list2
+| ltac_expr
+| empty
+]
+
+ltac_expr_opt: [
+| ltac_expr
+| empty
+]
+
+ltac_expr_list_or2_opt: [
+| ltac_expr_list_or2
+| empty
+]
+
+ltac_expr_list_or2: [
+| ltac_expr_list_or2 "|" ltac_expr_opt
+| ltac_expr_opt
+]
+
+ltac_expr3: [
+| "try" ltac_expr3
+| "do" int_or_var ltac_expr3
+| "timeout" int_or_var ltac_expr3
+| "time" string_opt ltac_expr3
+| "repeat" ltac_expr3
+| "progress" ltac_expr3
+| "once" ltac_expr3
+| "exactly_once" ltac_expr3
+| "infoH" ltac_expr3
+| "abstract" ltac_expr2
+| "abstract" ltac_expr2 "using" ident
+| selector ltac_expr3
+| "do" ssrmmod ssrdotac ssrclauses (* ssr plugin *)
+| "do" ssrortacarg ssrclauses (* ssr plugin *)
+| "do" int_or_var ssrmmod ssrdotac ssrclauses (* ssr plugin *)
+| "abstract" ssrdgens (* ssr plugin *)
+| ltac_expr2
+]
+
+tactic_mode: [
+| toplevel_selector_opt query_command
+| toplevel_selector_opt "{"
+| toplevel_selector_opt ltac_info_opt tactic ltac_use_default
+| "par" ":" ltac_info_opt tactic ltac_use_default
+]
+
+toplevel_selector_opt: [
+| toplevel_selector
+| empty
+]
+
+toplevel_selector: [
+| selector_body ":"
+| "!" ":"
+| "all" ":"
+]
+
+selector: [
+| "only" selector_body ":"
+]
+
+selector_body: [
+| range_selector_list_comma
+| "[" ident "]"
+]
+
+range_selector_list_comma: [
+| range_selector_list_comma "," range_selector
+| range_selector
+]
+
+range_selector: [
+| natural "-" natural
+| natural
+]
+
+ltac_expr2: [
+| ltac_expr1 "+" binder_tactic
+| ltac_expr1 "+" ltac_expr2
+| "tryif" ltac_expr "then" ltac_expr "else" ltac_expr2
+| ltac_expr1 "||" binder_tactic
+| ltac_expr1 "||" ltac_expr2
+| ltac_expr1
+]
+
+ltac_expr1: [
+| match_key reverse_opt "goal" "with" match_context_list "end"
+| match_key ltac_expr "with" match_list "end"
+| "first" "[" ltac_expr_list_or_opt "]"
+| "solve" "[" ltac_expr_list_or_opt "]"
+| "idtac" message_token_list_opt
+| failkw int_or_var_opt message_token_list_opt
+| simple_tactic
+| tactic_arg
+| reference tactic_arg_compat_list_opt
+| ltac_expr ssrintros_ne (* ssr plugin *)
+| ltac_expr0
+]
+
+match_key: [
+| "match"
+| "lazymatch"
+| "multimatch"
+]
+
+reverse_opt: [
+| "reverse"
+| empty
+]
+
+ltac_expr_list_or_opt: [
+| ltac_expr_list_or
+| empty
+]
+
+ltac_expr_list_or: [
+| ltac_expr_list_or "|" ltac_expr
+| ltac_expr
+]
+
+match_context_list: [
+| or_opt match_context_rule_list_or
+]
+
+match_context_rule_list_or: [
+| match_context_rule_list_or "|" match_context_rule
+| match_context_rule
+]
+
+or_opt: [
+| "|"
+| empty
+]
+
+eqn_list_or_opt: [
+| eqn_list_or
+| empty
+]
+
+eqn_list_or: [
+| eqn_list_or "|" eqn
+| eqn
+]
+
+match_context_rule: [
+| match_hyps_list_comma_opt "|-" match_pattern "=>" ltac_expr
+| "[" match_hyps_list_comma_opt "|-" match_pattern "]" "=>" ltac_expr
+| "_" "=>" ltac_expr
+]
+
+match_hyps_list_comma_opt: [
+| match_hyps_list_comma
+| empty
+]
+
+match_hyps_list_comma: [
+| match_hyps_list_comma "," match_hyps
+| match_hyps
+]
+
+match_hyps: [
+| name ":" match_pattern
+| name ":=" match_pattern_opt match_pattern
+]
+
+match_pattern: [
+| "context" ident_opt "[" lconstr_pattern "]"
+| lconstr_pattern
+]
+
+ident_opt: [
+| ident
+| empty
+]
+
+lconstr_pattern: [
+| lconstr
+]
+
+match_pattern_opt: [
+| "[" match_pattern "]" ":"
+| empty
+]
+
+match_list: [
+| or_opt match_rule_list_or
+]
+
+match_rule_list_or: [
+| match_rule_list_or "|" match_rule
+| match_rule
+]
+
+match_rule: [
+| match_pattern "=>" ltac_expr
+| "_" "=>" ltac_expr
+]
+
+message_token_list_opt: [
+| message_token_list_opt message_token
+| empty
+]
+
+message_token: [
+| ident
+| STRING
+| integer
+]
+
+failkw: [
+| "fail"
+| "gfail"
+]
+
+tactic_arg: [
+| "eval" red_expr "in" term
+| "context" ident "[" lconstr "]"
+| "type" "of" term
+| "fresh" fresh_id_list_opt
+| "type_term" uconstr
+| "numgoals"
+]
+
+fresh_id_list_opt: [
+| fresh_id_list_opt fresh_id
+| empty
+]
+
+fresh_id: [
+| STRING
+| qualid
+]
+
+tactic_arg_compat_list_opt: [
+| tactic_arg_compat_list_opt tactic_arg_compat
+| empty
+]
+
+tactic_arg_compat: [
+| tactic_arg
+| term
+| "()"
+]
+
+ltac_expr0: [
+| "(" ltac_expr ")"
+| "[" ">" tactic_then_gen "]"
+| tactic_atom
+| ssrparentacarg (* ssr plugin *)
+]
+
+tactic_atom: [
+| integer
+| reference
+| "()"
+]
+
+constr_may_eval: [
+| "eval" red_expr "in" term
+| "context" ident "[" lconstr "]"
+| "type" "of" term
+| term
+]
+
+ltac_def_kind: [
+| ":="
+| "::="
+]
+
+tacdef_body: [
+| global input_fun_list ltac_def_kind ltac_expr
+| global ltac_def_kind ltac_expr
+]
+
+tactic: [
+| ltac_expr
+]
+
+ltac_info_opt: [
+| ltac_info
+| empty
+]
+
+ltac_info: [
+| "Info" natural
+]
+
+ltac_use_default: [
+| "."
+| "..."
+]
+
+ltac_tactic_level: [
+| "(" "at" "level" natural ")"
+]
+
+ltac_production_sep: [
+| "," string
+]
+
+ltac_production_item: [
+| string
+| ident "(" ident ltac_production_sep_opt ")"
+| ident
+]
+
+ltac_production_sep_opt: [
+| ltac_production_sep
+| empty
+]
+
+ltac_tacdef_body: [
+| tacdef_body
+]
+
+firstorder_using: [
+| "using" reference
+| "using" reference "," reference_list_comma
+| "using" reference reference reference_list_opt
+| empty
+]
+
+reference_list_comma: [
+| reference_list_comma "," reference
+| reference
+]
+
+numnotoption: [
+| empty
+| "(" "warning" "after" bigint ")"
+| "(" "abstract" "after" bigint ")"
+]
+
+mlname: [
+| preident (* extraction plugin *)
+| string (* extraction plugin *)
+]
+
+int_or_id: [
+| preident (* extraction plugin *)
+| integer (* extraction plugin *)
+]
+
+language: [
+| "Ocaml" (* extraction plugin *)
+| "OCaml" (* extraction plugin *)
+| "Haskell" (* extraction plugin *)
+| "Scheme" (* extraction plugin *)
+| "JSON" (* extraction plugin *)
+]
+
+fun_ind_using: [
+| "using" constr_with_bindings (* funind plugin *)
+| empty (* funind plugin *)
+]
+
+with_names: [
+| "as" simple_intropattern (* funind plugin *)
+| empty (* funind plugin *)
+]
+
+constr_comma_sequence': [
+| term "," constr_comma_sequence' (* funind plugin *)
+| term (* funind plugin *)
+]
+
+auto_using': [
+| "using" constr_comma_sequence' (* funind plugin *)
+| empty (* funind plugin *)
+]
+
+function_rec_definition_loc: [
+| rec_definition (* funind plugin *)
+]
+
+fun_scheme_arg: [
+| ident ":=" "Induction" "for" reference "Sort" sort_family (* funind plugin *)
+]
+
+ring_mod: [
+| "decidable" term (* setoid_ring plugin *)
+| "abstract" (* setoid_ring plugin *)
+| "morphism" term (* setoid_ring plugin *)
+| "constants" "[" tactic "]" (* setoid_ring plugin *)
+| "closed" "[" global_list "]" (* setoid_ring plugin *)
+| "preprocess" "[" tactic "]" (* setoid_ring plugin *)
+| "postprocess" "[" tactic "]" (* setoid_ring plugin *)
+| "setoid" term term (* setoid_ring plugin *)
+| "sign" term (* setoid_ring plugin *)
+| "power" term "[" global_list "]" (* setoid_ring plugin *)
+| "power_tac" term "[" tactic "]" (* setoid_ring plugin *)
+| "div" term (* setoid_ring plugin *)
+]
+
+ring_mods: [
+| "(" ring_mod_list_comma ")" (* setoid_ring plugin *)
+]
+
+ring_mod_list_comma: [
+| ring_mod_list_comma "," ring_mod
+| ring_mod
+]
+
+field_mod: [
+| ring_mod (* setoid_ring plugin *)
+| "completeness" term (* setoid_ring plugin *)
+]
+
+field_mods: [
+| "(" field_mod_list_comma ")" (* setoid_ring plugin *)
+]
+
+field_mod_list_comma: [
+| field_mod_list_comma "," field_mod
+| field_mod
+]
+
+ssrtacarg: [
+| ltac_expr (* ssr plugin *)
+]
+
+ssrtac3arg: [
+| ltac_expr3 (* ssr plugin *)
+]
+
+ssrtclarg: [
+| ssrtacarg (* ssr plugin *)
+]
+
+ssrhyp: [
+| ident (* ssr plugin *)
+]
+
+ssrhoi_hyp: [
+| ident (* ssr plugin *)
+]
+
+ssrhoi_id: [
+| ident (* ssr plugin *)
+]
+
+ssrsimpl_ne: [
+| "//=" (* ssr plugin *)
+| "/=" (* ssr plugin *)
+| "/" natural "/" natural "=" (* ssr plugin *)
+| "/" natural "/" (* ssr plugin *)
+| "/" natural "=" (* ssr plugin *)
+| "/" natural "/=" (* ssr plugin *)
+| "/" natural "/" "=" (* ssr plugin *)
+| "//" natural "=" (* ssr plugin *)
+| "//" (* ssr plugin *)
+]
+
+ssrclear_ne: [
+| "{" ssrhyp_list "}" (* ssr plugin *)
+]
+
+ssrclear: [
+| ssrclear_ne (* ssr plugin *)
+| empty (* ssr plugin *)
+]
+
+ssrindex: [
+| int_or_var (* ssr plugin *)
+]
+
+ssrocc: [
+| natural natural_list_opt (* ssr plugin *)
+| "-" natural_list_opt (* ssr plugin *)
+| "+" natural_list_opt (* ssr plugin *)
+]
+
+natural_list_opt: [
+| natural_list_opt natural
+| empty
+]
+
+ssrmmod: [
+| "!" (* ssr plugin *)
+| "?" (* ssr plugin *)
+| "?" (* ssr plugin *)
+]
+
+ssrmult_ne: [
+| natural ssrmmod (* ssr plugin *)
+| ssrmmod (* ssr plugin *)
+]
+
+ssrmult: [
+| ssrmult_ne (* ssr plugin *)
+| empty (* ssr plugin *)
+]
+
+ssrdocc: [
+| "{" ssrocc "}" (* ssr plugin *)
+| "{" ssrhyp_list_opt "}" (* ssr plugin *)
+]
+
+ssrhyp_list_opt: [
+| ssrhyp_list_opt ssrhyp
+| empty
+]
+
+ssrterm: [
+| "YouShouldNotTypeThis" term (* ssr plugin *)
+| term (* ssr plugin *)
+]
+
+ast_closure_term: [
+| term (* ssr plugin *)
+]
+
+ast_closure_lterm: [
+| lconstr (* ssr plugin *)
+]
+
+ssrbwdview: [
+| "YouShouldNotTypeThis" (* ssr plugin *)
+| "/" term (* ssr plugin *)
+| "/" term ssrbwdview (* ssr plugin *)
+]
+
+ssrfwdview: [
+| "YouShouldNotTypeThis" (* ssr plugin *)
+| "/" ast_closure_term (* ssr plugin *)
+| "/" ast_closure_term ssrfwdview (* ssr plugin *)
+]
+
+ident_no_do: [
+| "YouShouldNotTypeThis" ident (* ssr plugin *)
+| IDENT (* ssr plugin *)
+]
+
+ssripat: [
+| "_" (* ssr plugin *)
+| "*" (* ssr plugin *)
+| ">" (* ssr plugin *)
+| ident_no_do (* ssr plugin *)
+| "?" (* ssr plugin *)
+| "+" (* ssr plugin *)
+| "++" (* ssr plugin *)
+| ssrsimpl_ne (* ssr plugin *)
+| ssrdocc "->" (* ssr plugin *)
+| ssrdocc "<-" (* ssr plugin *)
+| ssrdocc (* ssr plugin *)
+| "->" (* ssr plugin *)
+| "<-" (* ssr plugin *)
+| "-" (* ssr plugin *)
+| "-/" "=" (* ssr plugin *)
+| "-/=" (* ssr plugin *)
+| "-/" "/" (* ssr plugin *)
+| "-//" (* ssr plugin *)
+| "-/" integer "/" (* ssr plugin *)
+| "-/" "/=" (* ssr plugin *)
+| "-//" "=" (* ssr plugin *)
+| "-//=" (* ssr plugin *)
+| "-/" integer "/=" (* ssr plugin *)
+| "-/" integer "/" integer "=" (* ssr plugin *)
+| ssrfwdview (* ssr plugin *)
+| "[" ":" ident_list_opt "]" (* ssr plugin *)
+| "[:" ident_list_opt "]" (* ssr plugin *)
+| ssrcpat (* ssr plugin *)
+]
+
+ident_list_opt: [
+| ident_list_opt ident
+| empty
+]
+
+ssripats: [
+| ssripat ssripats (* ssr plugin *)
+| empty (* ssr plugin *)
+]
+
+ssriorpat: [
+| ssripats "|" ssriorpat (* ssr plugin *)
+| ssripats "|-" ">" ssriorpat (* ssr plugin *)
+| ssripats "|-" ssriorpat (* ssr plugin *)
+| ssripats "|->" ssriorpat (* ssr plugin *)
+| ssripats "||" ssriorpat (* ssr plugin *)
+| ssripats "|||" ssriorpat (* ssr plugin *)
+| ssripats "||||" ssriorpat (* ssr plugin *)
+| ssripats (* ssr plugin *)
+]
+
+ssrcpat: [
+| "YouShouldNotTypeThis" ssriorpat (* ssr plugin *)
+| "[" hat "]" (* ssr plugin *)
+| "[" ssriorpat "]" (* ssr plugin *)
+| "[=" ssriorpat "]" (* ssr plugin *)
+]
+
+hat: [
+| "^" ident (* ssr plugin *)
+| "^" "~" ident (* ssr plugin *)
+| "^" "~" natural (* ssr plugin *)
+| "^~" ident (* ssr plugin *)
+| "^~" natural (* ssr plugin *)
+]
+
+ssripats_ne: [
+| ssripat ssripats (* ssr plugin *)
+]
+
+ssrhpats: [
+| ssripats (* ssr plugin *)
+]
+
+ssrhpats_wtransp: [
+| ssripats (* ssr plugin *)
+| ssripats "@" ssripats (* ssr plugin *)
+]
+
+ssrhpats_nobs: [
+| ssripats (* ssr plugin *)
+]
+
+ssrrpat: [
+| "->" (* ssr plugin *)
+| "<-" (* ssr plugin *)
+]
+
+ssrintros_ne: [
+| "=>" ssripats_ne (* ssr plugin *)
+]
+
+ssrintros: [
+| ssrintros_ne (* ssr plugin *)
+| empty (* ssr plugin *)
+]
+
+ssrintrosarg: [
+| "YouShouldNotTypeThis" ssrtacarg ssrintros_ne (* ssr plugin *)
+]
+
+ssrfwdid: [
+| ident (* ssr plugin *)
+]
+
+ssrortacs: [
+| ssrtacarg "|" ssrortacs (* ssr plugin *)
+| ssrtacarg "|" (* ssr plugin *)
+| ssrtacarg (* ssr plugin *)
+| "|" ssrortacs (* ssr plugin *)
+| "|" (* ssr plugin *)
+]
+
+ssrhintarg: [
+| "[" "]" (* ssr plugin *)
+| "[" ssrortacs "]" (* ssr plugin *)
+| ssrtacarg (* ssr plugin *)
+]
+
+ssrhint3arg: [
+| "[" "]" (* ssr plugin *)
+| "[" ssrortacs "]" (* ssr plugin *)
+| ssrtac3arg (* ssr plugin *)
+]
+
+ssrortacarg: [
+| "[" ssrortacs "]" (* ssr plugin *)
+]
+
+ssrhint: [
+| empty (* ssr plugin *)
+| "by" ssrhintarg (* ssr plugin *)
+]
+
+ssrwgen: [
+| ssrclear_ne (* ssr plugin *)
+| ssrhoi_hyp (* ssr plugin *)
+| "@" ssrhoi_hyp (* ssr plugin *)
+| "(" ssrhoi_id ":=" lcpattern ")" (* ssr plugin *)
+| "(" ssrhoi_id ")" (* ssr plugin *)
+| "(@" ssrhoi_id ":=" lcpattern ")" (* ssr plugin *)
+| "(" "@" ssrhoi_id ":=" lcpattern ")" (* ssr plugin *)
+]
+
+ssrclausehyps: [
+| ssrwgen "," ssrclausehyps (* ssr plugin *)
+| ssrwgen ssrclausehyps (* ssr plugin *)
+| ssrwgen (* ssr plugin *)
+]
+
+ssrclauses: [
+| "in" ssrclausehyps "|-" "*" (* ssr plugin *)
+| "in" ssrclausehyps "|-" (* ssr plugin *)
+| "in" ssrclausehyps "*" (* ssr plugin *)
+| "in" ssrclausehyps (* ssr plugin *)
+| "in" "|-" "*" (* ssr plugin *)
+| "in" "*" (* ssr plugin *)
+| "in" "*" "|-" (* ssr plugin *)
+| empty (* ssr plugin *)
+]
+
+ssrfwd: [
+| ":=" ast_closure_lterm (* ssr plugin *)
+| ":" ast_closure_lterm ":=" ast_closure_lterm (* ssr plugin *)
+]
+
+ssrbvar: [
+| ident (* ssr plugin *)
+| "_" (* ssr plugin *)
+]
+
+ssrbinder: [
+| ssrbvar (* ssr plugin *)
+| "(" ssrbvar ")" (* ssr plugin *)
+| "(" ssrbvar ":" lconstr ")" (* ssr plugin *)
+| "(" ssrbvar ssrbvar_list ":" lconstr ")" (* ssr plugin *)
+| "(" ssrbvar ":" lconstr ":=" lconstr ")" (* ssr plugin *)
+| "(" ssrbvar ":=" lconstr ")" (* ssr plugin *)
+| of_alt operconstr99 (* ssr plugin *)
+]
+
+ssrbvar_list: [
+| ssrbvar_list ssrbvar
+| ssrbvar
+]
+
+ssrstruct: [
+| "{" "struct" ident "}" (* ssr plugin *)
+| empty (* ssr plugin *)
+]
+
+ssrposefwd: [
+| ssrbinder_list_opt ssrfwd (* ssr plugin *)
+]
+
+ssrfixfwd: [
+| "fix" ssrbvar ssrbinder_list_opt ssrstruct ssrfwd (* ssr plugin *)
+]
+
+ssrcofixfwd: [
+| "cofix" ssrbvar ssrbinder_list_opt ssrfwd (* ssr plugin *)
+]
+
+ssrbinder_list_opt: [
+| ssrbinder_list_opt ssrbinder
+| empty
+]
+
+ssrsetfwd: [
+| ":" ast_closure_lterm ":=" "{" ssrocc "}" cpattern (* ssr plugin *)
+| ":" ast_closure_lterm ":=" lcpattern (* ssr plugin *)
+| ":=" "{" ssrocc "}" cpattern (* ssr plugin *)
+| ":=" lcpattern (* ssr plugin *)
+]
+
+ssrhavefwd: [
+| ":" ast_closure_lterm ssrhint (* ssr plugin *)
+| ":" ast_closure_lterm ":=" ast_closure_lterm (* ssr plugin *)
+| ":" ast_closure_lterm ":=" (* ssr plugin *)
+| ":=" ast_closure_lterm (* ssr plugin *)
+]
+
+ssrhavefwdwbinders: [
+| ssrhpats_wtransp ssrbinder_list_opt ssrhavefwd (* ssr plugin *)
+]
+
+ssrseqarg: [
+| ssrswap (* ssr plugin *)
+| ssrseqidx ssrortacarg ssrorelse_opt (* ssr plugin *)
+| ssrseqidx ssrswap (* ssr plugin *)
+| ltac_expr3 (* ssr plugin *)
+]
+
+ssrorelse_opt: [
+| ssrorelse
+| empty
+]
+
+ssrseqidx: [
+| ident (* ssr plugin *)
+| natural (* ssr plugin *)
+]
+
+ssrswap: [
+| "first" (* ssr plugin *)
+| "last" (* ssr plugin *)
+]
+
+ssrorelse: [
+| "||" ltac_expr2 (* ssr plugin *)
+]
+
+ident: [
+| IDENT
+]
+
+ssrparentacarg: [
+| "(" ltac_expr ")" (* ssr plugin *)
+]
+
+ssrdotac: [
+| ltac_expr3 (* ssr plugin *)
+| ssrortacarg (* ssr plugin *)
+]
+
+ssr_first: [
+| ssr_first ssrintros_ne (* ssr plugin *)
+| "[" ltac_expr_list_or_opt "]" (* ssr plugin *)
+]
+
+ssr_first_else: [
+| ssr_first ssrorelse (* ssr plugin *)
+| ssr_first (* ssr plugin *)
+]
+
+ssrgen: [
+| ssrdocc cpattern (* ssr plugin *)
+| cpattern (* ssr plugin *)
+]
+
+ssrdgens_tl: [
+| "{" ssrhyp_list "}" cpattern ssrdgens_tl (* ssr plugin *)
+| "{" ssrhyp_list "}" (* ssr plugin *)
+| "{" ssrocc "}" cpattern ssrdgens_tl (* ssr plugin *)
+| "/" ssrdgens_tl (* ssr plugin *)
+| cpattern ssrdgens_tl (* ssr plugin *)
+| empty (* ssr plugin *)
+]
+
+ssrdgens: [
+| ":" ssrgen ssrdgens_tl (* ssr plugin *)
+]
+
+ssreqid: [
+| ssreqpat (* ssr plugin *)
+| empty (* ssr plugin *)
+]
+
+ssreqpat: [
+| ident (* ssr plugin *)
+| "_" (* ssr plugin *)
+| "?" (* ssr plugin *)
+| "+" (* ssr plugin *)
+| ssrdocc "->" (* ssr plugin *)
+| ssrdocc "<-" (* ssr plugin *)
+| "->" (* ssr plugin *)
+| "<-" (* ssr plugin *)
+]
+
+ssrarg: [
+| ssrfwdview ssreqid ssrdgens ssrintros (* ssr plugin *)
+| ssrfwdview ssrclear ssrintros (* ssr plugin *)
+| ssreqid ssrdgens ssrintros (* ssr plugin *)
+| ssrclear_ne ssrintros (* ssr plugin *)
+| ssrintros_ne (* ssr plugin *)
+]
+
+ssrmovearg: [
+| ssrarg (* ssr plugin *)
+]
+
+ssrcasearg: [
+| ssrarg (* ssr plugin *)
+]
+
+ssragen: [
+| "{" ssrhyp_list "}" ssrterm (* ssr plugin *)
+| ssrterm (* ssr plugin *)
+]
+
+ssrhyp_list: [
+| ssrhyp_list ssrhyp
+| ssrhyp
+]
+
+ssragens: [
+| "{" ssrhyp_list "}" ssrterm ssragens (* ssr plugin *)
+| "{" ssrhyp_list "}" (* ssr plugin *)
+| ssrterm ssragens (* ssr plugin *)
+| empty (* ssr plugin *)
+]
+
+ssrapplyarg: [
+| ":" ssragen ssragens ssrintros (* ssr plugin *)
+| ssrclear_ne ssrintros (* ssr plugin *)
+| ssrintros_ne (* ssr plugin *)
+| ssrbwdview ":" ssragen ssragens ssrintros (* ssr plugin *)
+| ssrbwdview ssrclear ssrintros (* ssr plugin *)
+]
+
+ssrexactarg: [
+| ":" ssragen ssragens (* ssr plugin *)
+| ssrbwdview ssrclear (* ssr plugin *)
+| ssrclear_ne (* ssr plugin *)
+]
+
+ssrcongrarg: [
+| natural term ssrdgens (* ssr plugin *)
+| natural term (* ssr plugin *)
+| term ssrdgens (* ssr plugin *)
+| term (* ssr plugin *)
+]
+
+ssrrwocc: [
+| "{" ssrhyp_list_opt "}" (* ssr plugin *)
+| "{" ssrocc "}" (* ssr plugin *)
+| empty (* ssr plugin *)
+]
+
+ssrrule_ne: [
+| ssrterm_alt (* ssr plugin *)
+| ssrsimpl_ne (* ssr plugin *)
+]
+
+ssrterm_alt: [
+| "/" ssrterm
+| ssrterm
+| ssrsimpl_ne
+]
+
+ssrrule: [
+| ssrrule_ne (* ssr plugin *)
+| empty (* ssr plugin *)
+]
+
+ssrpattern_squarep: [
+| "[" rpattern "]" (* ssr plugin *)
+| empty (* ssr plugin *)
+]
+
+ssrpattern_ne_squarep: [
+| "[" rpattern "]" (* ssr plugin *)
+]
+
+ssrrwarg: [
+| "-" ssrmult ssrrwocc ssrpattern_squarep ssrrule_ne (* ssr plugin *)
+| "-/" ssrterm (* ssr plugin *)
+| ssrmult_ne ssrrwocc ssrpattern_squarep ssrrule_ne (* ssr plugin *)
+| "{" ssrhyp_list "}" ssrpattern_ne_squarep ssrrule_ne (* ssr plugin *)
+| "{" ssrhyp_list "}" ssrrule (* ssr plugin *)
+| "{" ssrocc "}" ssrpattern_squarep ssrrule_ne (* ssr plugin *)
+| "{" "}" ssrpattern_squarep ssrrule_ne (* ssr plugin *)
+| ssrpattern_ne_squarep ssrrule_ne (* ssr plugin *)
+| ssrrule_ne (* ssr plugin *)
+]
+
+ssrrwargs: [
+| ssrrwarg_list (* ssr plugin *)
+]
+
+ssrrwarg_list: [
+| ssrrwarg_list ssrrwarg
+| ssrrwarg
+]
+
+ssrunlockarg: [
+| "{" ssrocc "}" ssrterm (* ssr plugin *)
+| ssrterm (* ssr plugin *)
+]
+
+ssrunlockargs: [
+| ssrunlockarg_list_opt (* ssr plugin *)
+]
+
+ssrunlockarg_list_opt: [
+| ssrunlockarg_list_opt ssrunlockarg
+| empty
+]
+
+ssrsufffwd: [
+| ssrhpats ssrbinder_list_opt ":" ast_closure_lterm ssrhint (* ssr plugin *)
+]
+
+ssrwlogfwd: [
+| ":" ssrwgen_list_opt "/" ast_closure_lterm (* ssr plugin *)
+]
+
+ssrwgen_list_opt: [
+| ssrwgen_list_opt ssrwgen
+| empty
+]
+
+ssr_idcomma: [
+| empty (* ssr plugin *)
+| IDENT_alt "," (* ssr plugin *)
+]
+
+IDENT_alt: [
+| IDENT
+| "_"
+]
+
+ssr_rtype: [
+| "return" operconstr100 (* ssr plugin *)
+]
+
+ssr_mpat: [
+| pattern200 (* ssr plugin *)
+]
+
+ssr_dpat: [
+| ssr_mpat "in" pattern200 ssr_rtype (* ssr plugin *)
+| ssr_mpat ssr_rtype (* ssr plugin *)
+| ssr_mpat (* ssr plugin *)
+]
+
+ssr_dthen: [
+| ssr_dpat "then" lconstr (* ssr plugin *)
+]
+
+ssr_elsepat: [
+| "else" (* ssr plugin *)
+]
+
+ssr_else: [
+| ssr_elsepat lconstr (* ssr plugin *)
+]
+
+ssr_search_item: [
+| string (* ssr plugin *)
+| string "%" preident (* ssr plugin *)
+| constr_pattern (* ssr plugin *)
+]
+
+ssr_search_arg: [
+| "-" ssr_search_item ssr_search_arg (* ssr plugin *)
+| ssr_search_item ssr_search_arg (* ssr plugin *)
+| empty (* ssr plugin *)
+]
+
+ssr_modlocs: [
+| empty (* ssr plugin *)
+| "in" modloc_list (* ssr plugin *)
+]
+
+modloc_list: [
+| modloc_list modloc
+| modloc
+]
+
+modloc: [
+| "-" global (* ssr plugin *)
+| global (* ssr plugin *)
+]
+
+ssrhintref: [
+| term (* ssr plugin *)
+| term "|" natural (* ssr plugin *)
+]
+
+ssrviewpos: [
+| "for" "move" "/" (* ssr plugin *)
+| "for" "apply" "/" (* ssr plugin *)
+| "for" "apply" "/" "/" (* ssr plugin *)
+| "for" "apply" "//" (* ssr plugin *)
+| empty (* ssr plugin *)
+]
+
+ssrviewposspc: [
+| ssrviewpos (* ssr plugin *)
+]
+
+rpattern: [
+| lconstr (* ssrmatching plugin *)
+| "in" lconstr (* ssrmatching plugin *)
+| lconstr "in" lconstr (* ssrmatching plugin *)
+| "in" lconstr "in" lconstr (* ssrmatching plugin *)
+| lconstr "in" lconstr "in" lconstr (* ssrmatching plugin *)
+| lconstr "as" lconstr "in" lconstr (* ssrmatching plugin *)
+]
+
+cpattern: [
+| "Qed" term (* ssrmatching plugin *)
+| term (* ssrmatching plugin *)
+]
+
+lcpattern: [
+| "Qed" lconstr (* ssrmatching plugin *)
+| lconstr (* ssrmatching plugin *)
+]
+
+ssrpatternarg: [
+| rpattern (* ssrmatching plugin *)
+]
+
+empty: [
+|
+]
+
+lpar_id_coloneq: [
+| "(" IDENT; ":="
+]
+
+name_colon: [
+| IDENT; ":"
+| "_" ":"
+]
+
+int: [
+| integer
+]
+
+command_entry: [
+| noedit_mode
+]
+
diff --git a/doc/tools/docgram/prodn.edit_mlg b/doc/tools/docgram/prodn.edit_mlg
new file mode 100644
index 0000000000..a28d07636a
--- /dev/null
+++ b/doc/tools/docgram/prodn.edit_mlg
@@ -0,0 +1,14 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2019 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+(* Defines additional productions and edits for use in documentation. Not compiled into Coq *)
+(* Contents used to generate prodn in doc *)
+
+DOC_GRAMMAR
diff --git a/doc/tools/docgram/productionlist.edit_mlg b/doc/tools/docgram/productionlist.edit_mlg
new file mode 100644
index 0000000000..84acd07075
--- /dev/null
+++ b/doc/tools/docgram/productionlist.edit_mlg
@@ -0,0 +1,25 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2019 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+(* Defines additional productions and edits for use in documentation. Not compiled into Coq *)
+(* Contents used to generate productionlists in doc *)
+
+DOC_GRAMMAR
+
+EXPAND: [ | ]
+
+(* ugh todo: try to handle before expansion *)
+tactic_then_gen : [
+| REPLACE ltac_expr_opt ".." ltac_expr_opt2
+| WITH ltac_expr_opt ".." or_opt ltac_expr_list2
+]
+
+ltac_expr_opt2 : [ | DELETENT ]
+ltac_expr_list2_opt : [ | DELETENT ]
diff --git a/ide/MacOS/default_accel_map b/ide/MacOS/default_accel_map
index 54a592a04d..6bcf3b438f 100644
--- a/ide/MacOS/default_accel_map
+++ b/ide/MacOS/default_accel_map
@@ -7,7 +7,6 @@
; (gtk_accel_path "<Actions>/Templates/Template Program Lemma" "")
(gtk_accel_path "<Actions>/Templates/Lemma" "<Shift><Primary>l")
; (gtk_accel_path "<Actions>/Templates/Template Fact" "")
-(gtk_accel_path "<Actions>/Tactics/auto" "<Primary><Control>a")
; (gtk_accel_path "<Actions>/Tactics/Tactic fold" "")
; (gtk_accel_path "<Actions>/Help/About Coq" "")
; (gtk_accel_path "<Actions>/Templates/Template Add Ring A Aplus Amult Aone Azero Ainv Aeq T [ c1 ... cn ]. " "")
@@ -19,7 +18,6 @@
; (gtk_accel_path "<Actions>/Tactics/Tactic inversion" "")
; (gtk_accel_path "<Actions>/Templates/Template Write State" "")
; (gtk_accel_path "<Actions>/Export/Export to" "")
-(gtk_accel_path "<Actions>/Tactics/auto with *" "<Primary><Control>asterisk")
; (gtk_accel_path "<Actions>/Tactics/Tactic inversion--clear" "")
; (gtk_accel_path "<Actions>/Templates/Template Implicit Arguments" "")
; (gtk_accel_path "<Actions>/Edit/Copy" "<Primary>c")
@@ -50,7 +48,6 @@
; (gtk_accel_path "<Actions>/Tactics/Tactic fail" "")
; (gtk_accel_path "<Actions>/Tactics/Tactic left" "")
(gtk_accel_path "<Actions>/Edit/Undo" "<Primary>u")
-(gtk_accel_path "<Actions>/Tactics/eauto with *" "<Primary><Control>ampersand")
; (gtk_accel_path "<Actions>/Templates/Template Infix" "")
; (gtk_accel_path "<Actions>/Tactics/Tactic functional induction" "")
; (gtk_accel_path "<Actions>/Tactics/Tactic clear" "")
@@ -149,7 +146,6 @@
(gtk_accel_path "<Actions>/Templates/Theorem" "<Shift><Primary>t")
; (gtk_accel_path "<Actions>/Templates/Template Derive Dependent Inversion--clear" "")
; (gtk_accel_path "<Actions>/Tactics/Tactic unfold" "")
-; (gtk_accel_path "<Actions>/Tactics/Try Tactics" "")
; (gtk_accel_path "<Actions>/Tactics/Tactic red in" "")
; (gtk_accel_path "<Actions>/Tactics/Tactic rewrite <- -- in" "")
; (gtk_accel_path "<Actions>/Templates/Template Hint Extern" "")
@@ -187,7 +183,6 @@
; (gtk_accel_path "<Actions>/Tactics/Tactic intro -- after" "")
; (gtk_accel_path "<Actions>/Tactics/Tactic fold -- in" "")
; (gtk_accel_path "<Actions>/Templates/Template Program Definition" "")
-(gtk_accel_path "<Actions>/Tactics/Wizard" "<Primary><Control>dollar")
; (gtk_accel_path "<Actions>/Templates/Template Hint Resolve" "")
; (gtk_accel_path "<Actions>/Templates/Template Set Extraction Optimize" "")
; (gtk_accel_path "<Actions>/File/Revert all buffers" "")
@@ -228,7 +223,6 @@
; (gtk_accel_path "<Actions>/Export/Html" "")
; (gtk_accel_path "<Actions>/Templates/Template Extraction Inline" "")
; (gtk_accel_path "<Actions>/Tactics/Tactic absurd" "")
-(gtk_accel_path "<Actions>/Tactics/intuition" "<Primary><Control>i")
; (gtk_accel_path "<Actions>/Tactics/Tactic simple induction" "")
; (gtk_accel_path "<Actions>/Queries/Queries" "")
; (gtk_accel_path "<Actions>/Tactics/Tactic rewrite -- in" "")
@@ -289,7 +283,6 @@
; (gtk_accel_path "<Actions>/Templates/Template Add Field" "")
; (gtk_accel_path "<Actions>/Templates/Template Require Export" "")
; (gtk_accel_path "<Actions>/Tactics/Tactic rewrite <-" "")
-(gtk_accel_path "<Actions>/Tactics/omega" "<Primary><Control>o")
; (gtk_accel_path "<Actions>/Tactics/Tactic split" "")
; (gtk_accel_path "<Actions>/File/Quit" "<Primary>q")
(gtk_accel_path "<Actions>/View/Display existential variable instances" "<Shift><Control>e")
@@ -328,7 +321,6 @@
; (gtk_accel_path "<Actions>/Edit/Edit" "")
; (gtk_accel_path "<Actions>/Tactics/Tactic firstorder" "")
; (gtk_accel_path "<Actions>/Templates/Template C" "")
-(gtk_accel_path "<Actions>/Tactics/simpl" "<Primary><Control>s")
; (gtk_accel_path "<Actions>/Tactics/Tactic replace -- with" "")
; (gtk_accel_path "<Actions>/Templates/Template A" "")
; (gtk_accel_path "<Actions>/Templates/Template Remove Printing Record" "")
@@ -360,13 +352,11 @@
; (gtk_accel_path "<Actions>/File/File" "")
; (gtk_accel_path "<Actions>/Tactics/Tactic setoid--replace" "")
; (gtk_accel_path "<Actions>/Tactics/Tactic generalize dependent" "")
-(gtk_accel_path "<Actions>/Tactics/trivial" "<Primary><Control>v")
; (gtk_accel_path "<Actions>/Tactics/Tactic fix -- with" "")
; (gtk_accel_path "<Actions>/Tactics/Tactic pose --:=--)" "")
; (gtk_accel_path "<Actions>/Tactics/Tactic auto with" "")
; (gtk_accel_path "<Actions>/Templates/Template Add Printing Record" "")
; (gtk_accel_path "<Actions>/Tactics/Tactic inversion -- in" "")
-(gtk_accel_path "<Actions>/Tactics/eauto" "<Primary><Control>e")
; (gtk_accel_path "<Actions>/File/Open" "<Primary>o")
; (gtk_accel_path "<Actions>/Tactics/Tactic elim -- using" "")
; (gtk_accel_path "<Actions>/Templates/Template Hint" "")
diff --git a/ide/coqOps.ml b/ide/coqOps.ml
index 566654218d..d52f038f1f 100644
--- a/ide/coqOps.ml
+++ b/ide/coqOps.ml
@@ -137,7 +137,6 @@ class type ops =
object
method go_to_insert : unit task
method go_to_mark : GText.mark -> unit task
- method tactic_wizard : string list -> unit task
method process_next_phrase : unit task
method process_until_end_or_error : unit task
method handle_reset_initial : unit task
@@ -806,48 +805,6 @@ object(self)
else Coq.seq (self#backtrack_to_iter ~move_insert:false point)
(Coq.lift (fun () -> Sentence.tag_on_insert buffer)))
- method tactic_wizard l =
- let insert_phrase phrase tag =
- let stop = self#get_start_of_input in
- let phrase' = if stop#starts_line then phrase else "\n"^phrase in
- buffer#insert ~iter:stop phrase';
- Sentence.tag_on_insert buffer;
- let start = self#get_start_of_input in
- buffer#move_mark ~where:stop (`NAME "start_of_input");
- buffer#apply_tag tag ~start ~stop;
- if self#get_insert#compare stop <= 0 then
- buffer#place_cursor ~where:stop;
- let sentence =
- mk_sentence
- ~start:(`MARK (buffer#create_mark start))
- ~stop:(`MARK (buffer#create_mark stop))
- [] in
- Doc.push document sentence;
- messages#default_route#clear;
- self#show_goals
- in
- let display_error (loc, s) =
- messages#default_route#add (Ideutils.validate s) in
- let try_phrase phrase stop more =
- let action = log "Sending to coq now" in
- let route_id = 0 in
- let query = Coq.query (route_id,(phrase,Stateid.dummy)) in
- let next = function
- | Fail (_, l, str) -> (* FIXME: check *)
- display_error (l, str);
- messages#default_route#add (Pp.str ("Unsuccessfully tried: "^phrase));
- more
- | Good () -> stop Tags.Script.processed
- in
- Coq.bind (Coq.seq action query) next
- in
- let rec loop l = match l with
- | [] -> Coq.return ()
- | p :: l' ->
- try_phrase ("progress "^p^".") (insert_phrase (p^".")) (loop l')
- in
- loop l
-
method handle_reset_initial =
let action () =
(* clear the stack *)
diff --git a/ide/coqOps.mli b/ide/coqOps.mli
index 83ad8c15dc..1e8d87bb15 100644
--- a/ide/coqOps.mli
+++ b/ide/coqOps.mli
@@ -15,7 +15,6 @@ class type ops =
object
method go_to_insert : unit task
method go_to_mark : GText.mark -> unit task
- method tactic_wizard : string list -> unit task
method process_next_phrase : unit task
method process_until_end_or_error : unit task
method handle_reset_initial : unit task
diff --git a/ide/coqide.ml b/ide/coqide.ml
index 8d95dcee27..00168a06b1 100644
--- a/ide/coqide.ml
+++ b/ide/coqide.ml
@@ -603,9 +603,6 @@ module Nav = struct
let join_document _ = send_to_coq (fun sn -> sn.coqops#join_document)
end
-let tactic_wizard_callback l _ =
- send_to_coq (fun sn -> sn.coqops#tactic_wizard l)
-
let printopts_callback opts v =
let b = v#get_active in
let () = List.iter (fun o -> Coq.PrintOpt.set o b) opts in
@@ -881,10 +878,20 @@ let no_under = Util.String.map (fun x -> if x = '_' then '-' else x)
let alpha_items menu_name item_name l =
let mk_item text =
let text' =
- let last = String.length text - 1 in
- if text.[last] = '.'
- then text ^"\n"
- else text ^" "
+ let len = String.length text in
+ let buf = Buffer.create (len + 1) in
+ let escaped = ref false in
+ String.iter (fun c ->
+ if !escaped then
+ let () = Buffer.add_char buf c in
+ escaped := false
+ else if c = '_' then escaped := true
+ else Buffer.add_char buf c
+ ) text;
+ if text.[len - 1] = '.'
+ then Buffer.add_char buf '\n'
+ else Buffer.add_char buf ' ';
+ Buffer.contents buf
in
let callback _ =
on_current_term (fun sn -> sn.buffer#insert_interactive text')
@@ -1106,25 +1113,8 @@ let build_ui () =
("Force", "_Force", `EXECUTE, Nav.join_document, "Fully check the document", "f");
] end;
- let tacitem s sc =
- item s ~label:("_"^s)
- ~accel:(modifier_for_tactics#get^sc)
- ~callback:(tactic_wizard_callback [s])
- in
menu tactics_menu [
- item "Try Tactics" ~label:"_Try Tactics";
- item "Wizard" ~label:"<Proof Wizard>" ~stock:`DIALOG_INFO
- ~tooltip:"Proof Wizard" ~accel:(modifier_for_tactics#get^"dollar")
- ~callback:(tactic_wizard_callback automatic_tactics#get);
- tacitem "auto" "a";
- tacitem "auto with *" "asterisk";
- tacitem "eauto" "e";
- tacitem "eauto with *" "ampersand";
- tacitem "intuition" "i";
- tacitem "omega" "o";
- tacitem "simpl" "s";
- tacitem "tauto" "p";
- tacitem "trivial" "v";
+ item "Tactics" ~label:"_Tactics";
];
alpha_items tactics_menu "Tactic" Coq_commands.tactics;
diff --git a/ide/coqide_ui.ml b/ide/coqide_ui.ml
index d4a339f4f5..452808490d 100644
--- a/ide/coqide_ui.ml
+++ b/ide/coqide_ui.ml
@@ -100,18 +100,7 @@ let init () =
\n <menuitem action='Previous' />\
\n <menuitem action='Next' />\
\n </menu>\
-\n <menu action='Try Tactics'>\
-\n <menuitem action='auto' />\
-\n <menuitem action='auto with *' />\
-\n <menuitem action='eauto' />\
-\n <menuitem action='eauto with *' />\
-\n <menuitem action='intuition' />\
-\n <menuitem action='omega' />\
-\n <menuitem action='simpl' />\
-\n <menuitem action='tauto' />\
-\n <menuitem action='trivial' />\
-\n <menuitem action='Wizard' />\
-\n <separator />\
+\n <menu action='Tactics'>\
\n %s\
\n </menu>\
\n <menu action='Templates'>\
@@ -173,7 +162,6 @@ let init () =
\n <toolitem action='Interrupt' />\
\n <toolitem action='Previous' />\
\n <toolitem action='Next' />\
-\n <toolitem action='Wizard' />\
\n</toolbar>\
\n</ui>"
(if Coq_config.gtk_platform <> `QUARTZ then "<menuitem action='Quit' />" else "")
diff --git a/ide/preferences.ml b/ide/preferences.ml
index ea0495bb19..bf9fe8922a 100644
--- a/ide/preferences.ml
+++ b/ide/preferences.ml
@@ -938,16 +938,6 @@ let configure ?(apply=(fun () -> ())) parent =
else cmd_browse#get])
cmd_browse#get
in
-(*
- let automatic_tactics =
- strings
- ~f:automatic_tactics#set
- ~add:(fun () -> ["<edit me>"])
- "Wizard tactics to try in order"
- automatic_tactics#get
-
- in
-*)
let contextual_menus_on_goal = pbool "Contextual menus on goal" contextual_menus_on_goal in
@@ -1008,10 +998,6 @@ let configure ?(apply=(fun () -> ())) parent =
Section("Externals", None,
[cmd_coqtop;cmd_coqc;cmd_make;cmd_coqmakefile; cmd_coqdoc;
cmd_print;cmd_editor;cmd_browse]);
-(*
- Section("Tactics Wizard", None,
- [automatic_tactics]);
-*)
Section("Shortcuts", Some `PREFERENCES,
[modifiers_valid; modifier_for_tactics;
modifier_for_templates; modifier_for_display; modifier_for_navigation;
diff --git a/kernel/byterun/coq_uint63_native.h b/kernel/byterun/coq_uint63_native.h
index 1fdafc9d8f..9fbd3f83d8 100644
--- a/kernel/byterun/coq_uint63_native.h
+++ b/kernel/byterun/coq_uint63_native.h
@@ -111,51 +111,26 @@ value uint63_mulc(value x, value y, value* h) {
#define le128(xh,xl,yh,yl) (uint63_lt(xh,yh) || (uint63_eq(xh,yh) && uint63_leq(xl,yl)))
#define maxuint63 ((uint64_t)0x7FFFFFFFFFFFFFFF)
-/* precondition: y <> 0 */
-/* outputs r and sets ql to q % 2^63 s.t. x = q * y + r, r < y */
+/* precondition: xh < y */
+/* outputs r and sets ql to q s.t. x = q * y + r, r < y */
static value uint63_div21_aux(value xh, value xl, value y, value* ql) {
- xh = uint63_of_value(xh);
- xl = uint63_of_value(xl);
+ uint64_t nh = uint63_of_value(xh);
+ uint64_t nl = uint63_of_value(xl);
y = uint63_of_value(y);
- uint64_t maskh = 0;
- uint64_t maskl = 1;
- uint64_t dh = 0;
- uint64_t dl = y;
- int cmp = 1;
- /* int n = 0 */
- /* loop invariant: mask = 2^n, d = mask * y, (2 * d <= x -> cmp), n >= 0, d < 2^(2*63) */
- while (!(dh >> (63 - 1)) && cmp) {
- dh = (dh << 1) | (dl >> (63 - 1));
- dl = (dl << 1) & maxuint63;
- maskh = (maskh << 1) | (maskl >> (63 - 1));
- maskl = (maskl << 1) & maxuint63;
- /* ++n */
- cmp = lt128(dh,dl,xh,xl);
+ uint64_t q = 0;
+ for (int i = 0; i < 63; ++i) {
+ // invariants: 0 <= nh < y, nl = (xl*2^i) % 2^64,
+ // (q*y + nh) * 2^(63-i) + (xl % 2^(63-i)) = (xh%y) * 2^63 + xl
+ nl <<= 1;
+ nh = (nh << 1) | (nl >> 63);
+ q <<= 1;
+ if (nh >= y) { q |= 1; nh -= y; }
}
- uint64_t remh = xh;
- uint64_t reml = xl;
- /* uint64_t quotienth = 0; */
- uint64_t quotientl = 0;
- /* loop invariant: x = quotient * y + rem, y * 2^(n+1) > r,
- mask = floor(2^n), d = mask * y, n >= -1 */
- while (maskh | maskl) {
- if (le128(dh,dl,remh,reml)) { /* if rem >= d, add one bit and subtract d */
- /* quotienth = quotienth | maskh */
- quotientl = quotientl | maskl;
- remh = (uint63_lt(reml,dl)) ? (remh - dh - 1) : (remh - dh);
- reml = reml - dl;
- }
- maskl = (maskl >> 1) | ((maskh << (63 - 1)) & maxuint63);
- maskh = maskh >> 1;
- dl = (dl >> 1) | ((dh << (63 - 1)) & maxuint63);
- dh = dh >> 1;
- /* decr n */
- }
- *ql = Val_int(quotientl);
- return Val_int(reml);
+ *ql = Val_int(q);
+ return Val_int(nh);
}
value uint63_div21(value xh, value xl, value y, value* ql) {
- if (uint63_of_value(y) == 0) {
+ if (uint63_leq(y, xh)) {
*ql = Val_int(0);
return Val_int(0);
} else {
diff --git a/kernel/cClosure.ml b/kernel/cClosure.ml
index 6be8a59aeb..3fd613e905 100644
--- a/kernel/cClosure.ml
+++ b/kernel/cClosure.ml
@@ -211,15 +211,6 @@ let delta = mkflags [fDELTA]
let zeta = mkflags [fZETA]
let nored = no_red
-(* Removing fZETA for finer behaviour would break many developments *)
-let unfold_side_flags = [fBETA;fMATCH;fFIX;fCOFIX;fZETA]
-let unfold_side_red = mkflags [fBETA;fMATCH;fFIX;fCOFIX;fZETA]
-let unfold_red kn =
- let flag = match kn with
- | EvalVarRef id -> fVAR id
- | EvalConstRef kn -> fCONST kn in
- mkflags (flag::unfold_side_flags)
-
(* Flags of reduction and cache of constants: 'a is a type that may be
* mapped to constr. 'a infos implements a cache for constants and
* abstractions, storing a representation (of type 'a) of the body of
@@ -1084,7 +1075,7 @@ module FNativeEntries =
let mkInt env i =
check_int env;
- { mark = mark Norm KnownR; term = FInt i }
+ { mark = mark Cstr KnownR; term = FInt i }
let mkBool env b =
check_bool env;
diff --git a/kernel/cClosure.mli b/kernel/cClosure.mli
index 027d5245c9..cd1de4c834 100644
--- a/kernel/cClosure.mli
+++ b/kernel/cClosure.mli
@@ -84,10 +84,6 @@ val delta : reds
val zeta : reds
val nored : reds
-
-val unfold_side_red : reds
-val unfold_red : evaluable_global_reference -> reds
-
(***********************************************************************)
type table_key = Constant.t Univ.puniverses tableKey
diff --git a/kernel/uint63.mli b/kernel/uint63.mli
index 93632da110..5542716af2 100644
--- a/kernel/uint63.mli
+++ b/kernel/uint63.mli
@@ -37,6 +37,8 @@ val mul : t -> t -> t
val div : t -> t -> t
val rem : t -> t -> t
+val diveucl : t -> t -> t * t
+
(* Specific arithmetic operations *)
val mulc : t -> t -> t * t
val addmuldiv : t -> t -> t -> t
@@ -57,3 +59,13 @@ val head0 : t -> t
val tail0 : t -> t
val is_uint63 : Obj.t -> bool
+
+(* Arithmetic with explicit carries *)
+
+(* Analog of Numbers.Abstract.Cyclic.carry *)
+type 'a carry = C0 of 'a | C1 of 'a
+
+val addc : t -> t -> t carry
+val addcarryc : t -> t -> t carry
+val subc : t -> t -> t carry
+val subcarryc : t -> t -> t carry
diff --git a/kernel/uint63_amd64_63.ml b/kernel/uint63_amd64_63.ml
index 20b2f58496..5c4028e1c8 100644
--- a/kernel/uint63_amd64_63.ml
+++ b/kernel/uint63_amd64_63.ml
@@ -82,6 +82,8 @@ let div (x : int) (y : int) =
let rem (x : int) (y : int) =
if y = 0 then 0 else Int64.to_int (Int64.rem (to_uint64 x) (to_uint64 y))
+let diveucl x y = (div x y, rem x y)
+
let addmuldiv p x y =
l_or (l_sl x p) (l_sr y (uint_size - p))
@@ -94,55 +96,32 @@ let le (x : int) (y : int) =
(x lxor 0x4000000000000000) <= (y lxor 0x4000000000000000)
[@@ocaml.inline always]
-(* A few helper functions on 128 bits *)
-let lt128 xh xl yh yl =
- lt xh yh || (xh = yh && lt xl yl)
-
-let le128 xh xl yh yl =
- lt xh yh || (xh = yh && le xl yl)
-
(* division of two numbers by one *)
-(* precondition: y <> 0 *)
-(* outputs: q % 2^63, r s.t. x = q * y + r, r < y *)
+(* precondition: xh < y *)
+(* outputs: q, r s.t. x = q * y + r, r < y *)
let div21 xh xl y =
- let maskh = ref 0 in
- let maskl = ref 1 in
- let dh = ref 0 in
- let dl = ref y in
- let cmp = ref true in
- (* n = ref 0 *)
- (* loop invariant: mask = 2^n, d = mask * y, (2 * d <= x -> cmp), n >= 0 *)
- while !dh >= 0 && !cmp do (* dh >= 0 tests that dh highest bit is zero *)
- (* We don't use addmuldiv below to avoid checks on 1 *)
- dh := (!dh lsl 1) lor (!dl lsr (uint_size - 1));
- dl := !dl lsl 1;
- maskh := (!maskh lsl 1) lor (!maskl lsr (uint_size - 1));
- maskl := !maskl lsl 1;
- (* incr n *)
- cmp := lt128 !dh !dl xh xl;
- done; (* mask = 2^n, d = 2^n * y, 2 * d > x *)
- let remh = ref xh in
- let reml = ref xl in
- (* quotienth = ref 0 *)
- let quotientl = ref 0 in
- (* loop invariant: x = quotient * y + rem, y * 2^(n+1) > r,
- mask = floor(2^n), d = mask * y, n >= -1 *)
- while !maskh lor !maskl <> 0 do
- if le128 !dh !dl !remh !reml then begin (* if rem >= d, add one bit and subtract d *)
- (* quotienth := !quotienth lor !maskh *)
- quotientl := !quotientl lor !maskl;
- remh := if lt !reml !dl then !remh - !dh - 1 else !remh - !dh;
- reml := !reml - !dl;
- end;
- maskl := (!maskl lsr 1) lor (!maskh lsl (uint_size - 1));
- maskh := !maskh lsr 1;
- dl := (!dl lsr 1) lor (!dh lsl (uint_size - 1));
- dh := !dh lsr 1;
- (* decr n *)
+ (* nh might temporarily grow as large as 2*y - 1 in the loop body,
+ so we store it as a 64-bit unsigned integer *)
+ let nh = ref xh in
+ let nl = ref xl in
+ let q = ref 0 in
+ for _i = 0 to 62 do
+ (* invariants: 0 <= nh < y, nl = (xl*2^i) % 2^63,
+ (q*y + nh) * 2^(63-i) + (xl % 2^(63-i)) = (xh%y) * 2^63 + xl *)
+ nh := Int64.logor (Int64.shift_left !nh 1) (Int64.of_int (!nl lsr 62));
+ nl := !nl lsl 1;
+ q := !q lsl 1;
+ (* TODO: use "Int64.unsigned_compare !nh y >= 0",
+ once OCaml 4.08 becomes the minimal required version *)
+ if Int64.compare !nh 0L < 0 || Int64.compare !nh y >= 0 then
+ begin q := !q lor 1; nh := Int64.sub !nh y; end
done;
- !quotientl, !reml
+ !q, Int64.to_int !nh
-let div21 xh xl y = if y = 0 then 0, 0 else div21 xh xl y
+let div21 xh xl y =
+ let xh = to_uint64 xh in
+ let y = to_uint64 y in
+ if Int64.compare y xh <= 0 then 0, 0 else div21 xh xl y
(* exact multiplication *)
(* TODO: check that none of these additions could be a logical or *)
@@ -225,3 +204,24 @@ let tail0 x =
let is_uint63 t =
Obj.is_int t
[@@ocaml.inline always]
+
+(* Arithmetic with explicit carries *)
+
+(* Analog of Numbers.Abstract.Cyclic.carry *)
+type 'a carry = C0 of 'a | C1 of 'a
+
+let addc x y =
+ let r = x + y in
+ if lt r x then C1 r else C0 r
+
+let addcarryc x y =
+ let r = x + y + 1 in
+ if le r x then C1 r else C0 r
+
+let subc x y =
+ let r = x - y in
+ if le y x then C0 r else C1 r
+
+let subcarryc x y =
+ let r = x - y - 1 in
+ if lt y x then C0 r else C1 r
diff --git a/kernel/uint63_i386_31.ml b/kernel/uint63_i386_31.ml
index c3279779e1..b8eccd19fb 100644
--- a/kernel/uint63_i386_31.ml
+++ b/kernel/uint63_i386_31.ml
@@ -83,58 +83,33 @@ let div x y =
let rem x y =
if y = 0L then 0L else Int64.rem x y
+let diveucl x y = (div x y, rem x y)
+
let addmuldiv p x y =
l_or (l_sl x p) (l_sr y Int64.(sub (of_int uint_size) p))
-(* A few helper functions on 128 bits *)
-let lt128 xh xl yh yl =
- lt xh yh || (xh = yh && lt xl yl)
-
-let le128 xh xl yh yl =
- lt xh yh || (xh = yh && le xl yl)
-
(* division of two numbers by one *)
-(* precondition: y <> 0 *)
-(* outputs: q % 2^63, r s.t. x = q * y + r, r < y *)
+(* precondition: xh < y *)
+(* outputs: q, r s.t. x = q * y + r, r < y *)
let div21 xh xl y =
- let maskh = ref zero in
- let maskl = ref one in
- let dh = ref zero in
- let dl = ref y in
- let cmp = ref true in
- (* n = ref 0 *)
- (* loop invariant: mask = 2^n, d = mask * y, (2 * d <= x -> cmp), n >= 0 *)
- while Int64.equal (l_sr !dh (of_int (uint_size - 1))) zero && !cmp do
- (* We don't use addmuldiv below to avoid checks on 1 *)
- dh := l_or (l_sl !dh one) (l_sr !dl (of_int (uint_size - 1)));
- dl := l_sl !dl one;
- maskh := l_or (l_sl !maskh one) (l_sr !maskl (of_int (uint_size - 1)));
- maskl := l_sl !maskl one;
- (* incr n *)
- cmp := lt128 !dh !dl xh xl;
- done; (* mask = 2^n, d = 2^n * d, 2 * d > x *)
- let remh = ref xh in
- let reml = ref xl in
- (* quotienth = ref 0 *)
- let quotientl = ref zero in
- (* loop invariant: x = quotient * y + rem, y * 2^(n+1) > r,
- mask = floor(2^n), d = mask * y, n >= -1 *)
- while not (Int64.equal (l_or !maskh !maskl) zero) do
- if le128 !dh !dl !remh !reml then begin (* if rem >= d, add one bit and subtract d *)
- (* quotienth := !quotienth lor !maskh *)
- quotientl := l_or !quotientl !maskl;
- remh := if lt !reml !dl then sub (sub !remh !dh) one else sub !remh !dh;
- reml := sub !reml !dl
- end;
- maskl := l_or (l_sr !maskl one) (l_sl !maskh (of_int (uint_size - 1)));
- maskh := l_sr !maskh one;
- dl := l_or (l_sr !dl one) (l_sl !dh (of_int (uint_size - 1)));
- dh := l_sr !dh one
- (* decr n *)
+ let nh = ref xh in
+ let nl = ref xl in
+ let q = ref 0L in
+ for _i = 0 to 62 do
+ (* invariants: 0 <= nh < y, nl = (xl*2^i) % 2^64,
+ (q*y + nh) * 2^(63-i) + (xl % 2^(63-i)) = (xh%y) * 2^63 + xl *)
+ nl := Int64.shift_left !nl 1;
+ nh := Int64.logor (Int64.shift_left !nh 1) (Int64.shift_right_logical !nl 63);
+ q := Int64.shift_left !q 1;
+ (* TODO: use "Int64.unsigned_compare !nh y >= 0",
+ once OCaml 4.08 becomes the minimal required version *)
+ if Int64.compare !nh 0L < 0 || Int64.compare !nh y >= 0 then
+ begin q := Int64.logor !q 1L; nh := Int64.sub !nh y; end
done;
- !quotientl, !reml
+ !q, !nh
-let div21 xh xl y = if Int64.equal y zero then zero, zero else div21 xh xl y
+let div21 xh xl y =
+ if Int64.compare y xh <= 0 then zero, zero else div21 xh xl y
(* exact multiplication *)
let mulc x y =
@@ -191,6 +166,27 @@ let is_uint63 t =
Obj.is_block t && Int.equal (Obj.tag t) Obj.custom_tag
&& le (Obj.magic t) maxuint63
+(* Arithmetic with explicit carries *)
+
+(* Analog of Numbers.Abstract.Cyclic.carry *)
+type 'a carry = C0 of 'a | C1 of 'a
+
+let addc x y =
+ let r = add x y in
+ if lt r x then C1 r else C0 r
+
+let addcarryc x y =
+ let r = addcarry x y in
+ if le r x then C1 r else C0 r
+
+let subc x y =
+ let r = sub x y in
+ if le y x then C0 r else C1 r
+
+let subcarryc x y =
+ let r = subcarry x y in
+ if lt y x then C0 r else C1 r
+
(* Register all exported functions so that they can be called from C code *)
let () =
diff --git a/library/lib.ml b/library/lib.ml
index d461644d56..59b55cc16b 100644
--- a/library/lib.ml
+++ b/library/lib.ml
@@ -427,46 +427,60 @@ type secentry =
| Variable of {
id:Names.Id.t;
kind:Decl_kinds.binding_kind;
- poly:bool;
univs:Univ.ContextSet.t;
}
| Context of Univ.ContextSet.t
-let sectab =
- Summary.ref ([] : (secentry list * Opaqueproof.work_list * abstr_list) list)
- ~name:"section-context"
+type section_data = {
+ sec_entry : secentry list;
+ sec_workl : Opaqueproof.work_list;
+ sec_abstr : abstr_list;
+ sec_poly : bool;
+}
-let add_section () =
- sectab := ([],(Names.Cmap.empty,Names.Mindmap.empty),
- (Names.Cmap.empty,Names.Mindmap.empty)) :: !sectab
+let empty_section_data ~poly = {
+ sec_entry = [];
+ sec_workl = (Names.Cmap.empty,Names.Mindmap.empty);
+ sec_abstr = (Names.Cmap.empty,Names.Mindmap.empty);
+ sec_poly = poly;
+}
-let check_same_poly p vars =
- let pred = function Context _ -> p = false | Variable {poly} -> p != poly in
- if List.exists pred vars then
+let sectab =
+ Summary.ref ([] : section_data list) ~name:"section-context"
+
+let check_same_poly p sec =
+ if p != sec.sec_poly then
user_err Pp.(str "Cannot mix universe polymorphic and monomorphic declarations in sections.")
+let add_section ~poly () =
+ List.iter (fun tab -> check_same_poly poly tab) !sectab;
+ sectab := empty_section_data ~poly :: !sectab
+
let add_section_variable ~name ~kind ~poly univs =
match !sectab with
| [] -> () (* because (Co-)Fixpoint temporarily uses local vars *)
- | (vars,repl,abs)::sl ->
- List.iter (fun tab -> check_same_poly poly (pi1 tab)) !sectab;
- sectab := (Variable {id=name;kind;poly;univs}::vars,repl,abs)::sl
+ | s :: sl ->
+ List.iter (fun tab -> check_same_poly poly tab) !sectab;
+ let s = { s with sec_entry = Variable {id=name;kind;univs} :: s.sec_entry } in
+ sectab := s :: sl
let add_section_context ctx =
match !sectab with
| [] -> () (* because (Co-)Fixpoint temporarily uses local vars *)
- | (vars,repl,abs)::sl ->
- check_same_poly true vars;
- sectab := (Context ctx :: vars,repl,abs)::sl
+ | s :: sl ->
+ check_same_poly true s;
+ let s = { s with sec_entry = Context ctx :: s.sec_entry } in
+ sectab := s :: sl
exception PolyFound of bool (* make this a let exception once possible *)
let is_polymorphic_univ u =
try
let open Univ in
- List.iter (fun (vars,_,_) ->
+ List.iter (fun s ->
+ let vars = s.sec_entry in
List.iter (function
- | Variable {poly;univs=(univs,_)} ->
- if LSet.mem u univs then raise (PolyFound poly)
+ | Variable {univs=(univs,_)} ->
+ if LSet.mem u univs then raise (PolyFound s.sec_poly)
| Context (univs,_) ->
if LSet.mem u univs then raise (PolyFound true)
) vars
@@ -474,12 +488,12 @@ let is_polymorphic_univ u =
false
with PolyFound b -> b
-let extract_hyps (secs,ohyps) =
+let extract_hyps poly (secs,ohyps) =
let rec aux = function
- | (Variable {id;kind;poly;univs}::idl, decl::hyps) when Names.Id.equal id (NamedDecl.get_id decl) ->
+ | (Variable {id;kind;univs}::idl, decl::hyps) when Names.Id.equal id (NamedDecl.get_id decl) ->
let l, r = aux (idl,hyps) in
(decl,kind) :: l, if poly then Univ.ContextSet.union r univs else r
- | (Variable {poly;univs}::idl,hyps) ->
+ | (Variable {univs}::idl,hyps) ->
let l, r = aux (idl,hyps) in
l, if poly then Univ.ContextSet.union r univs else r
| (Context ctx :: idl, hyps) ->
@@ -511,9 +525,9 @@ let name_instance inst =
let add_section_replacement f g poly hyps =
match !sectab with
| [] -> ()
- | (vars,exps,abs)::sl ->
- let () = check_same_poly poly vars in
- let sechyps,ctx = extract_hyps (vars,hyps) in
+ | s :: sl ->
+ let () = check_same_poly poly s in
+ let sechyps,ctx = extract_hyps s.sec_poly (s.sec_entry, hyps) in
let ctx = Univ.ContextSet.to_context ctx in
let inst = Univ.UContext.instance ctx in
let nas = name_instance inst in
@@ -524,7 +538,11 @@ let add_section_replacement f g poly hyps =
abstr_subst = subst;
abstr_uctx = ctx;
} in
- sectab := (vars,f (inst,args) exps, g info abs) :: sl
+ let s = { s with
+ sec_workl = f (inst, args) s.sec_workl;
+ sec_abstr = g info s.sec_abstr;
+ } in
+ sectab := s :: sl
let add_section_kn ~poly kn =
let f x (l1,l2) = (l1,Names.Mindmap.add kn x l2) in
@@ -534,13 +552,13 @@ let add_section_constant ~poly kn =
let f x (l1,l2) = (Names.Cmap.add kn x l1,l2) in
add_section_replacement f f poly
-let replacement_context () = pi2 (List.hd !sectab)
+let replacement_context () = (List.hd !sectab).sec_workl
let section_segment_of_constant con =
- Names.Cmap.find con (fst (pi3 (List.hd !sectab)))
+ Names.Cmap.find con (fst (List.hd !sectab).sec_abstr)
let section_segment_of_mutual_inductive kn =
- Names.Mindmap.find kn (snd (pi3 (List.hd !sectab)))
+ Names.Mindmap.find kn (snd (List.hd !sectab).sec_abstr)
let empty_segment = {
abstr_ctx = [];
@@ -563,20 +581,20 @@ let section_instance = let open GlobRef in function
| Variable {id=id'} -> Names.Id.equal id id'
| Context _ -> false
in
- if List.exists eq (pi1 (List.hd !sectab))
+ if List.exists eq (List.hd !sectab).sec_entry
then Univ.Instance.empty, [||]
else raise Not_found
| ConstRef con ->
- Names.Cmap.find con (fst (pi2 (List.hd !sectab)))
+ Names.Cmap.find con (fst (List.hd !sectab).sec_workl)
| IndRef (kn,_) | ConstructRef ((kn,_),_) ->
- Names.Mindmap.find kn (snd (pi2 (List.hd !sectab)))
+ Names.Mindmap.find kn (snd (List.hd !sectab).sec_workl)
let is_in_section ref =
try ignore (section_instance ref); true with Not_found -> false
(*************)
(* Sections. *)
-let open_section id =
+let open_section ~poly id =
let opp = !lib_state.path_prefix in
let obj_dir = add_dirpath_suffix opp.Nametab.obj_dir id in
let prefix = Nametab.{ obj_dir; obj_mp = opp.obj_mp; obj_sec = add_dirpath_suffix opp.obj_sec id } in
@@ -587,7 +605,7 @@ let open_section id =
(*Pushed for the lifetime of the section: removed by unfrozing the summary*)
Nametab.(push_dir (Until 1) obj_dir (GlobDirRef.DirOpenSection prefix));
lib_state := { !lib_state with path_prefix = prefix };
- add_section ()
+ add_section ~poly ()
(* Restore lib_stk and summaries as before the section opening, and
diff --git a/library/lib.mli b/library/lib.mli
index 01366ddfd0..fe6bf69ec4 100644
--- a/library/lib.mli
+++ b/library/lib.mli
@@ -147,7 +147,7 @@ val library_part : GlobRef.t -> DirPath.t
(** {6 Sections } *)
-val open_section : Id.t -> unit
+val open_section : poly:bool -> Id.t -> unit
val close_section : unit -> unit
(** {6 We can get and set the state of the operations (used in [States]). } *)
diff --git a/plugins/extraction/ExtrOCamlInt63.v b/plugins/extraction/ExtrOCamlInt63.v
new file mode 100644
index 0000000000..a2ee602313
--- /dev/null
+++ b/plugins/extraction/ExtrOCamlInt63.v
@@ -0,0 +1,56 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2019 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+(** Extraction to OCaml of native 63-bit machine integers. *)
+
+From Coq Require Int63 Extraction.
+
+(** Basic data types used by some primitive operators. *)
+
+Extract Inductive bool => bool [ true false ].
+Extract Inductive prod => "( * )" [ "" ].
+Extract Inductive comparison => int [ "0" "(-1)" "1" ].
+Extract Inductive DoubleType.carry => "Uint63.carry" [ "Uint63.C0" "Uint63.C1" ].
+
+(** Primitive types and operators. *)
+Extract Constant Int63.int => "Uint63.t".
+Extraction Inline Int63.int.
+(* Otherwise, the name conflicts with the primitive OCaml type [int] *)
+
+Extract Constant Int63.lsl => "Uint63.l_sl".
+Extract Constant Int63.lsr => "Uint63.l_sr".
+Extract Constant Int63.land => "Uint63.l_and".
+Extract Constant Int63.lor => "Uint63.l_or".
+Extract Constant Int63.lxor => "Uint63.l_xor".
+
+Extract Constant Int63.add => "Uint63.add".
+Extract Constant Int63.sub => "Uint63.sub".
+Extract Constant Int63.mul => "Uint63.mul".
+Extract Constant Int63.mulc => "Uint63.mulc".
+Extract Constant Int63.div => "Uint63.div".
+Extract Constant Int63.mod => "Uint63.rem".
+
+Extract Constant Int63.eqb => "Uint63.equal".
+Extract Constant Int63.ltb => "Uint63.lt".
+Extract Constant Int63.leb => "Uint63.le".
+
+Extract Constant Int63.addc => "Uint63.addc".
+Extract Constant Int63.addcarryc => "Uint63.addcarryc".
+Extract Constant Int63.subc => "Uint63.subc".
+Extract Constant Int63.subcarryc => "Uint63.subcarryc".
+
+Extract Constant Int63.diveucl => "Uint63.diveucl".
+Extract Constant Int63.diveucl_21 => "Uint63.div21".
+Extract Constant Int63.addmuldiv => "Uint63.addmuldiv".
+
+Extract Constant Int63.compare => "Uint63.compare".
+
+Extract Constant Int63.head0 => "Uint63.head0".
+Extract Constant Int63.tail0 => "Uint63.tail0".
diff --git a/plugins/funind/functional_principles_types.mli b/plugins/funind/functional_principles_types.mli
index b4f6f92f9c..7cadd4396d 100644
--- a/plugins/funind/functional_principles_types.mli
+++ b/plugins/funind/functional_principles_types.mli
@@ -33,8 +33,10 @@ val generate_functional_principle :
exception No_graph_found
-val make_scheme : Evd.evar_map ref ->
- (pconstant*Sorts.family) list -> Evd.side_effects Proof_global.proof_entry list
+val make_scheme
+ : Evd.evar_map ref
+ -> (pconstant*Sorts.family) list
+ -> Evd.side_effects Proof_global.proof_entry list
val build_scheme : (Id.t*Libnames.qualid*Sorts.family) list -> unit
val build_case_scheme : (Id.t*Libnames.qualid*Sorts.family) -> unit
diff --git a/plugins/funind/g_indfun.mlg b/plugins/funind/g_indfun.mlg
index 5f859b3e4b..1b75d3d966 100644
--- a/plugins/funind/g_indfun.mlg
+++ b/plugins/funind/g_indfun.mlg
@@ -148,9 +148,7 @@ END
module Vernac = Pvernac.Vernac_
module Tactic = Pltac
-type function_rec_definition_loc_argtype = (Vernacexpr.fixpoint_expr * Vernacexpr.decl_notation list) Loc.located
-
-let (wit_function_rec_definition_loc : function_rec_definition_loc_argtype Genarg.uniform_genarg_type) =
+let (wit_function_rec_definition_loc : Vernacexpr.fixpoint_expr Loc.located Genarg.uniform_genarg_type) =
Genarg.create_arg "function_rec_definition_loc"
let function_rec_definition_loc =
@@ -175,10 +173,10 @@ let () =
let is_proof_termination_interactively_checked recsl =
List.exists (function
- | _,((_,( Some { CAst.v = CMeasureRec _ }
- | Some { CAst.v = CWfRec _}),_,_,_),_) -> true
- | _,((_,Some { CAst.v = CStructRec _ },_,_,_),_)
- | _,((_,None,_,_,_),_) -> false) recsl
+ | _,( Vernacexpr.{ rec_order = Some { CAst.v = CMeasureRec _ } }
+ | Vernacexpr.{ rec_order = Some { CAst.v = CWfRec _} }) -> true
+ | _, Vernacexpr.{ rec_order = Some { CAst.v = CStructRec _ } }
+ | _, Vernacexpr.{ rec_order = None } -> false) recsl
let classify_as_Fixpoint recsl =
Vernac_classifier.classify_vernac
diff --git a/plugins/funind/indfun.ml b/plugins/funind/indfun.ml
index 99efe3e5e2..1987677d7d 100644
--- a/plugins/funind/indfun.ml
+++ b/plugins/funind/indfun.ml
@@ -1,3 +1,13 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2019 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
open CErrors
open Sorts
open Util
@@ -157,17 +167,16 @@ let interp_casted_constr_with_implicits env sigma impls c =
and not as a constr
*)
-let build_newrecursive
- lnameargsardef =
+let build_newrecursive lnameargsardef =
let env0 = Global.env() in
let sigma = Evd.from_env env0 in
let (rec_sign,rec_impls) =
List.fold_left
- (fun (env,impls) (({CAst.v=recname},_),bl,arityc,_) ->
- let arityc = Constrexpr_ops.mkCProdN bl arityc in
+ (fun (env,impls) { Vernacexpr.fname={CAst.v=recname}; binders; rtype } ->
+ let arityc = Constrexpr_ops.mkCProdN binders rtype in
let arity,ctx = Constrintern.interp_type env0 sigma arityc in
let evd = Evd.from_env env0 in
- let evd, (_, (_, impls')) = Constrintern.interp_context_evars ~program_mode:false env evd bl in
+ let evd, (_, (_, impls')) = Constrintern.interp_context_evars ~program_mode:false env evd binders in
let impl = Constrintern.compute_internalization_data env0 evd Constrintern.Recursive arity impls' in
let open Context.Named.Declaration in
let r = Sorts.Relevant in (* TODO relevance *)
@@ -175,26 +184,18 @@ let build_newrecursive
(env0,Constrintern.empty_internalization_env) lnameargsardef in
let recdef =
(* Declare local notations *)
- let f (_,bl,_,def) =
- let def = abstract_glob_constr def bl in
- interp_casted_constr_with_implicits
- rec_sign sigma rec_impls def
+ let f { Vernacexpr.binders; body_def } =
+ match body_def with
+ | Some body_def ->
+ let def = abstract_glob_constr body_def binders in
+ interp_casted_constr_with_implicits
+ rec_sign sigma rec_impls def
+ | None -> user_err ~hdr:"Function" (str "Body of Function must be given")
in
States.with_state_protection (List.map f) lnameargsardef
in
recdef,rec_impls
-let build_newrecursive l =
- let l' = List.map
- (fun ((fixna,_,bll,ar,body_opt),lnot) ->
- match body_opt with
- | Some body ->
- (fixna,bll,ar,body)
- | None -> user_err ~hdr:"Function" (str "Body of Function must be given")
- ) l
- in
- build_newrecursive l'
-
let error msg = user_err Pp.(str msg)
(* Checks whether or not the mutual bloc is recursive *)
@@ -237,8 +238,8 @@ let rec local_binders_length = function
| Constrexpr.CLocalAssum (idl,_,_)::bl -> List.length idl + local_binders_length bl
| Constrexpr.CLocalPattern _::bl -> assert false
-let prepare_body ((name,_,args,types,_),_) rt =
- let n = local_binders_length args in
+let prepare_body { Vernacexpr.binders; rtype } rt =
+ let n = local_binders_length binders in
(* Pp.msgnl (str "nb lambda to chop : " ++ str (string_of_int n) ++ fnl () ++Printer.pr_glob_constr rt); *)
let fun_args,rt' = chop_rlambda_n n rt in
(fun_args,rt')
@@ -285,7 +286,6 @@ let derive_inversion fix_names =
(evd',[])
in
Invfun.derive_correctness
- Functional_principles_types.make_scheme
fix_names_as_constant
lind;
with e when CErrors.noncritical e ->
@@ -337,13 +337,13 @@ let error_error names e =
| _ -> raise e
let generate_principle (evd:Evd.evar_map ref) pconstants on_error
- is_general do_built (fix_rec_l:(Vernacexpr.fixpoint_expr * Vernacexpr.decl_notation list) list) recdefs interactive_proof
+ is_general do_built (fix_rec_l : Vernacexpr.fixpoint_expr list) recdefs interactive_proof
(continue_proof : int -> Names.Constant.t array -> EConstr.constr array -> int ->
Tacmach.tactic) : unit =
- let names = List.map (function (({CAst.v=name},_),_,_,_,_),_ -> name) fix_rec_l in
+ let names = List.map (function { Vernacexpr.fname = {CAst.v=name} } -> name) fix_rec_l in
let fun_bodies = List.map2 prepare_body fix_rec_l recdefs in
let funs_args = List.map fst fun_bodies in
- let funs_types = List.map (function ((_,_,_,types,_),_) -> types) fix_rec_l in
+ let funs_types = List.map (function { Vernacexpr.rtype } -> rtype) fix_rec_l in
try
(* We then register the Inductive graphs of the functions *)
Glob_term_to_relation.build_inductive !evd pconstants funs_args funs_types recdefs;
@@ -360,7 +360,7 @@ let generate_principle (evd:Evd.evar_map ref) pconstants on_error
locate_ind
f_R_mut)
in
- let fname_kn (((fname,_),_,_,_,_),_) =
+ let fname_kn { Vernacexpr.fname } =
let f_ref = qualid_of_ident ?loc:fname.CAst.loc fname.CAst.v in
locate_with_msg
(pr_qualid f_ref++str ": Not an inductive type!")
@@ -399,23 +399,25 @@ let generate_principle (evd:Evd.evar_map ref) pconstants on_error
with e when CErrors.noncritical e ->
on_error names e
-let register_struct is_rec (fixpoint_exprl:(Vernacexpr.fixpoint_expr * Vernacexpr.decl_notation list) list) =
+let register_struct is_rec (fixpoint_exprl: Vernacexpr.fixpoint_expr list) =
match fixpoint_exprl with
- | [(({CAst.v=fname},pl),_,bl,ret_type,body),_] when not is_rec ->
- let body = match body with | Some body -> body | None -> user_err ~hdr:"Function" (str "Body of Function must be given") in
+ | [ { Vernacexpr.fname; univs; binders; rtype; body_def } ] when not is_rec ->
+ let body = match body_def with
+ | Some body -> body
+ | None -> user_err ~hdr:"Function" (str "Body of Function must be given") in
ComDefinition.do_definition
~program_mode:false
- ~name:fname
+ ~name:fname.CAst.v
~poly:false
~scope:(DeclareDef.Global Declare.ImportDefaultBehavior)
- ~kind:Decls.Definition pl
- bl None body (Some ret_type);
+ ~kind:Decls.Definition univs
+ binders None body (Some rtype);
let evd,rev_pconstants =
List.fold_left
- (fun (evd,l) ((({CAst.v=fname},_),_,_,_,_),_) ->
+ (fun (evd,l) { Vernacexpr.fname } ->
let evd,c =
Evd.fresh_global
- (Global.env ()) evd (Constrintern.locate_reference (Libnames.qualid_of_ident fname)) in
+ (Global.env ()) evd (Constrintern.locate_reference (Libnames.qualid_of_ident fname.CAst.v)) in
let (cst, u) = destConst evd c in
let u = EInstance.kind evd u in
evd,((cst, u) :: l)
@@ -428,10 +430,10 @@ let register_struct is_rec (fixpoint_exprl:(Vernacexpr.fixpoint_expr * Vernacexp
ComFixpoint.do_fixpoint ~scope:(DeclareDef.Global Declare.ImportDefaultBehavior) ~poly:false fixpoint_exprl;
let evd,rev_pconstants =
List.fold_left
- (fun (evd,l) ((({CAst.v=fname},_),_,_,_,_),_) ->
+ (fun (evd,l) { Vernacexpr.fname } ->
let evd,c =
Evd.fresh_global
- (Global.env ()) evd (Constrintern.locate_reference (Libnames.qualid_of_ident fname)) in
+ (Global.env ()) evd (Constrintern.locate_reference (Libnames.qualid_of_ident fname.CAst.v)) in
let (cst, u) = destConst evd c in
let u = EInstance.kind evd u in
evd,((cst, u) :: l)
@@ -465,7 +467,7 @@ let register_wf interactive_proof ?(is_mes=false) fname rec_impls wf_rel_expr wf
let unbounded_eq =
let f_app_args =
CAst.make @@ Constrexpr.CAppExpl(
- (None,qualid_of_ident fname,None) ,
+ (None,qualid_of_ident fname.CAst.v,None) ,
(List.map
(function
| {CAst.v=Anonymous} -> assert false
@@ -486,13 +488,13 @@ let register_wf interactive_proof ?(is_mes=false) fname rec_impls wf_rel_expr wf
(generate_correction_proof_wf f_ref tcc_lemma_ref is_mes
functional_ref eq_ref rec_arg_num rec_arg_type nb_args relation
);
- derive_inversion [fname]
+ derive_inversion [fname.CAst.v]
with e when CErrors.noncritical e ->
(* No proof done *)
()
in
Recdef.recursive_definition ~interactive_proof
- ~is_mes fname rec_impls
+ ~is_mes fname.CAst.v rec_impls
type_of_f
wf_rel_expr
rec_arg_num
@@ -608,88 +610,93 @@ and rebuild_nal aux bk bl' nal typ =
let rebuild_bl aux bl typ = rebuild_bl aux bl typ
-let recompute_binder_list (fixpoint_exprl : (Vernacexpr.fixpoint_expr * Vernacexpr.decl_notation list) list) =
- let fixl,ntns = ComFixpoint.extract_fixpoint_components ~structonly:false fixpoint_exprl in
- let ((_,_,_,typel),_,ctx,_) = ComFixpoint.interp_fixpoint ~cofix:false fixl ntns in
+let recompute_binder_list fixpoint_exprl =
+ let fixl =
+ List.map (fun fix -> Vernacexpr.{
+ fix
+ with rec_order = ComFixpoint.adjust_rec_order ~structonly:false fix.binders fix.rec_order }) fixpoint_exprl in
+ let ((_,_,_,typel),_,ctx,_) = ComFixpoint.interp_fixpoint ~cofix:false fixl in
let constr_expr_typel =
with_full_print (List.map (fun c -> Constrextern.extern_constr false (Global.env ()) (Evd.from_ctx ctx) (EConstr.of_constr c))) typel in
let fixpoint_exprl_with_new_bl =
- List.map2 (fun ((lna,rec_order_opt,bl,ret_typ,opt_body),notation_list) fix_typ ->
-
- let new_bl',new_ret_type = rebuild_bl [] bl fix_typ in
- (((lna,rec_order_opt,new_bl',new_ret_type,opt_body),notation_list):(Vernacexpr.fixpoint_expr * Vernacexpr.decl_notation list))
- )
- fixpoint_exprl constr_expr_typel
+ List.map2 (fun ({ Vernacexpr.binders } as fp) fix_typ ->
+ let binders, rtype = rebuild_bl [] binders fix_typ in
+ { fp with Vernacexpr.binders; rtype }
+ ) fixpoint_exprl constr_expr_typel
in
fixpoint_exprl_with_new_bl
let do_generate_principle_aux pconstants on_error register_built interactive_proof
- (fixpoint_exprl:(Vernacexpr.fixpoint_expr * Vernacexpr.decl_notation list) list) : Lemmas.t option =
- List.iter (fun (_,l) -> if not (List.is_empty l) then error "Function does not support notations for now") fixpoint_exprl;
+ (fixpoint_exprl : Vernacexpr.fixpoint_expr list) : Lemmas.t option =
+ List.iter (fun { Vernacexpr.notations } ->
+ if not (List.is_empty notations)
+ then error "Function does not support notations for now") fixpoint_exprl;
let lemma, _is_struct =
match fixpoint_exprl with
- | [((_,Some {CAst.v = Constrexpr.CWfRec (wf_x,wf_rel)},_,_,_),_) as fixpoint_expr] ->
- let (((({CAst.v=name},pl),_,args,types,body)),_) as fixpoint_expr =
- match recompute_binder_list [fixpoint_expr] with
- | [e] -> e
- | _ -> assert false
- in
- let fixpoint_exprl = [fixpoint_expr] in
- let body = match body with | Some body -> body | None -> user_err ~hdr:"Function" (str "Body of Function must be given") in
- let recdefs,rec_impls = build_newrecursive fixpoint_exprl in
- let using_lemmas = [] in
- let pre_hook pconstants =
- generate_principle
- (ref (Evd.from_env (Global.env ())))
- pconstants
- on_error
- true
- register_built
- fixpoint_exprl
- recdefs
- true
- in
- if register_built
- then register_wf interactive_proof name rec_impls wf_rel wf_x.CAst.v using_lemmas args types body pre_hook, false
- else None, false
- |[((_,Some {CAst.v = Constrexpr.CMeasureRec(wf_x,wf_mes,wf_rel_opt)},_,_,_),_) as fixpoint_expr] ->
- let (((({CAst.v=name},_),_,args,types,body)),_) as fixpoint_expr =
- match recompute_binder_list [fixpoint_expr] with
- | [e] -> e
- | _ -> assert false
- in
- let fixpoint_exprl = [fixpoint_expr] in
- let recdefs,rec_impls = build_newrecursive fixpoint_exprl in
- let using_lemmas = [] in
- let body = match body with | Some body -> body | None -> user_err ~hdr:"Function" (str "Body of Function must be given") in
- let pre_hook pconstants =
- generate_principle
- (ref (Evd.from_env (Global.env ())))
- pconstants
- on_error
- true
- register_built
- fixpoint_exprl
- recdefs
- true
- in
- if register_built
- then register_mes interactive_proof name rec_impls wf_mes wf_rel_opt (map_option (fun x -> x.CAst.v) wf_x) using_lemmas args types body pre_hook, true
- else None, true
+ | [{ Vernacexpr.rec_order = Some {CAst.v = Constrexpr.CWfRec (wf_x,wf_rel)} } as fixpoint_expr] ->
+ let { Vernacexpr.fname; univs; binders; rtype; body_def } as fixpoint_expr =
+ match recompute_binder_list [fixpoint_expr] with
+ | [e] -> e
+ | _ -> assert false
+ in
+ let fixpoint_exprl = [fixpoint_expr] in
+ let body = match body_def with
+ | Some body -> body
+ | None -> user_err ~hdr:"Function" (str "Body of Function must be given") in
+ let recdefs,rec_impls = build_newrecursive fixpoint_exprl in
+ let using_lemmas = [] in
+ let pre_hook pconstants =
+ generate_principle
+ (ref (Evd.from_env (Global.env ())))
+ pconstants
+ on_error
+ true
+ register_built
+ fixpoint_exprl
+ recdefs
+ true
+ in
+ if register_built
+ then register_wf interactive_proof fname rec_impls wf_rel wf_x.CAst.v using_lemmas binders rtype body pre_hook, false
+ else None, false
+ |[{ Vernacexpr.rec_order=Some {CAst.v = Constrexpr.CMeasureRec(wf_x,wf_mes,wf_rel_opt)} } as fixpoint_expr] ->
+ let { Vernacexpr.fname; univs; binders; rtype; body_def} as fixpoint_expr =
+ match recompute_binder_list [fixpoint_expr] with
+ | [e] -> e
+ | _ -> assert false
+ in
+ let fixpoint_exprl = [fixpoint_expr] in
+ let recdefs,rec_impls = build_newrecursive fixpoint_exprl in
+ let using_lemmas = [] in
+ let body = match body_def with
+ | Some body -> body
+ | None -> user_err ~hdr:"Function" (str "Body of Function must be given") in
+ let pre_hook pconstants =
+ generate_principle
+ (ref (Evd.from_env (Global.env ())))
+ pconstants
+ on_error
+ true
+ register_built
+ fixpoint_exprl
+ recdefs
+ true
+ in
+ if register_built
+ then register_mes interactive_proof fname rec_impls wf_mes wf_rel_opt (map_option (fun x -> x.CAst.v) wf_x) using_lemmas binders rtype body pre_hook, true
+ else None, true
| _ ->
- List.iter (function ((_na,ord,_args,_body,_type),_not) ->
- match ord with
- | Some { CAst.v = (Constrexpr.CMeasureRec _ | Constrexpr.CWfRec _) } ->
- error
- ("Cannot use mutual definition with well-founded recursion or measure")
- | _ -> ()
+ List.iter (function { Vernacexpr.rec_order } ->
+ match rec_order with
+ | Some { CAst.v = (Constrexpr.CMeasureRec _ | Constrexpr.CWfRec _) } ->
+ error
+ ("Cannot use mutual definition with well-founded recursion or measure")
+ | _ -> ()
)
fixpoint_exprl;
let fixpoint_exprl = recompute_binder_list fixpoint_exprl in
- let fix_names =
- List.map (function ((({CAst.v=name},_),_,_,_,_),_) -> name) fixpoint_exprl
- in
+ let fix_names = List.map (function { Vernacexpr.fname } -> fname.CAst.v) fixpoint_exprl in
(* ok all the expressions are structural *)
let recdefs,rec_impls = build_newrecursive fixpoint_exprl in
let is_rec = List.exists (is_rec fix_names) recdefs in
@@ -846,59 +853,59 @@ let make_graph (f_ref : GlobRef.t) =
| None -> error "Cannot build a graph over an axiom!"
| Some (body, _, _) ->
let env = Global.env () in
- let extern_body,extern_type =
- with_full_print (fun () ->
- (Constrextern.extern_constr false env sigma (EConstr.of_constr body),
- Constrextern.extern_type false env sigma
- (EConstr.of_constr (*FIXME*) c_body.const_type)
- )
+ let extern_body,extern_type =
+ with_full_print (fun () ->
+ (Constrextern.extern_constr false env sigma (EConstr.of_constr body),
+ Constrextern.extern_type false env sigma
+ (EConstr.of_constr (*FIXME*) c_body.const_type)
)
- ()
- in
- let (nal_tas,b,t) = get_args extern_body extern_type in
- let expr_list =
- match b.CAst.v with
- | Constrexpr.CFix(l_id,fixexprl) ->
- let l =
- List.map
- (fun (id,recexp,bl,t,b) ->
- let { CAst.loc; v=rec_id } = match Option.get recexp with
- | { CAst.v = CStructRec id } -> id
- | { CAst.v = CWfRec (id,_) } -> id
- | { CAst.v = CMeasureRec (oid,_,_) } -> Option.get oid
- in
- let new_args =
- List.flatten
- (List.map
- (function
- | Constrexpr.CLocalDef (na,_,_)-> []
- | Constrexpr.CLocalAssum (nal,_,_) ->
- List.map
- (fun {CAst.loc;v=n} -> CAst.make ?loc @@
- CRef(Libnames.qualid_of_ident ?loc @@ Nameops.Name.get_id n,None))
- nal
- | Constrexpr.CLocalPattern _ -> assert false
- )
- nal_tas
- )
- in
- let b' = add_args id.CAst.v new_args b in
- ((((id,None), ( Some (CAst.make (CStructRec (CAst.make rec_id)))),nal_tas@bl,t,Some b'),[]):(Vernacexpr.fixpoint_expr * Vernacexpr.decl_notation list))
- )
- fixexprl
+ ) ()
+ in
+ let (nal_tas,b,t) = get_args extern_body extern_type in
+ let expr_list =
+ match b.CAst.v with
+ | Constrexpr.CFix(l_id,fixexprl) ->
+ let l =
+ List.map
+ (fun (id,recexp,bl,t,b) ->
+ let { CAst.loc; v=rec_id } = match Option.get recexp with
+ | { CAst.v = CStructRec id } -> id
+ | { CAst.v = CWfRec (id,_) } -> id
+ | { CAst.v = CMeasureRec (oid,_,_) } -> Option.get oid
+ in
+ let new_args =
+ List.flatten
+ (List.map
+ (function
+ | Constrexpr.CLocalDef (na,_,_)-> []
+ | Constrexpr.CLocalAssum (nal,_,_) ->
+ List.map
+ (fun {CAst.loc;v=n} -> CAst.make ?loc @@
+ CRef(Libnames.qualid_of_ident ?loc @@ Nameops.Name.get_id n,None))
+ nal
+ | Constrexpr.CLocalPattern _ -> assert false
+ )
+ nal_tas
+ )
in
- l
- | _ ->
- let id = Label.to_id (Constant.label c) in
- [((CAst.make id,None),None,nal_tas,t,Some b),[]]
- in
- let mp = Constant.modpath c in
- let pstate = do_generate_principle_aux [c,Univ.Instance.empty] error_error false false expr_list in
- assert (Option.is_empty pstate);
- (* We register the infos *)
- List.iter
- (fun ((({CAst.v=id},_),_,_,_,_),_) -> add_Function false (Constant.make2 mp (Label.of_id id)))
- expr_list)
+ let b' = add_args id.CAst.v new_args b in
+ { Vernacexpr.fname=id; univs=None
+ ; rec_order = Some (CAst.make (CStructRec (CAst.make rec_id)))
+ ; binders = nal_tas@bl; rtype=t; body_def=Some b'; notations = []}
+ ) fixexprl in
+ l
+ | _ ->
+ let fname = CAst.make (Label.to_id (Constant.label c)) in
+ [{ Vernacexpr.fname; univs=None; rec_order = None; binders=nal_tas; rtype=t; body_def=Some b; notations=[]}]
+ in
+ let mp = Constant.modpath c in
+ let pstate = do_generate_principle_aux [c,Univ.Instance.empty] error_error false false expr_list in
+ assert (Option.is_empty pstate);
+ (* We register the infos *)
+ List.iter
+ (fun { Vernacexpr.fname= {CAst.v=id} } ->
+ add_Function false (Constant.make2 mp (Label.of_id id)))
+ expr_list)
(* *************** statically typed entrypoints ************************* *)
diff --git a/plugins/funind/indfun.mli b/plugins/funind/indfun.mli
index 3bc52272ac..bfc9686ae5 100644
--- a/plugins/funind/indfun.mli
+++ b/plugins/funind/indfun.mli
@@ -5,12 +5,9 @@ val warn_cannot_define_graph : ?loc:Loc.t -> Pp.t * Pp.t -> unit
val warn_cannot_define_principle : ?loc:Loc.t -> Pp.t * Pp.t -> unit
-val do_generate_principle :
- (Vernacexpr.fixpoint_expr * Vernacexpr.decl_notation list) list -> unit
+val do_generate_principle : Vernacexpr.fixpoint_expr list -> unit
-val do_generate_principle_interactive :
- (Vernacexpr.fixpoint_expr * Vernacexpr.decl_notation list) list ->
- Lemmas.t
+val do_generate_principle_interactive : Vernacexpr.fixpoint_expr list -> Lemmas.t
val functional_induction :
bool ->
diff --git a/plugins/funind/invfun.ml b/plugins/funind/invfun.ml
index d4cc31c0af..f6b5a06cac 100644
--- a/plugins/funind/invfun.ml
+++ b/plugins/funind/invfun.ml
@@ -736,11 +736,9 @@ let prove_fun_complete funcs graphs schemes lemmas_types_infos i : Tacmach.tacti
(* [derive_correctness make_scheme funs graphs] create correctness and completeness
lemmas for each function in [funs] w.r.t. [graphs]
-
- [make_scheme] is Functional_principle_types.make_scheme (dependency pb) and
*)
-let derive_correctness make_scheme (funs: pconstant list) (graphs:inductive list) =
+let derive_correctness (funs: pconstant list) (graphs:inductive list) =
assert (funs <> []);
assert (graphs <> []);
let funs = Array.of_list funs and graphs = Array.of_list graphs in
@@ -786,7 +784,7 @@ let derive_correctness make_scheme (funs: pconstant list) (graphs:inductive list
(fun entry ->
(EConstr.of_constr (fst (fst(Future.force entry.Proof_global.proof_entry_body))), EConstr.of_constr (Option.get entry.Proof_global.proof_entry_type ))
)
- (make_scheme evd (Array.map_to_list (fun const -> const,Sorts.InType) funs))
+ (Functional_principles_types.make_scheme evd (Array.map_to_list (fun const -> const,Sorts.InType) funs))
)
)
in
diff --git a/plugins/funind/invfun.mli b/plugins/funind/invfun.mli
index 96601785b6..c7538fae9a 100644
--- a/plugins/funind/invfun.mli
+++ b/plugins/funind/invfun.mli
@@ -12,8 +12,8 @@ val invfun :
Tactypes.quantified_hypothesis ->
Names.GlobRef.t option ->
Evar.t Evd.sigma -> Evar.t list Evd.sigma
-val derive_correctness :
- (Evd.evar_map ref ->
- (Constr.pconstant * Sorts.family) list ->
- 'a Proof_global.proof_entry list) ->
- Constr.pconstant list -> Names.inductive list -> unit
+
+val derive_correctness
+ : Constr.pconstant list
+ -> Names.inductive list
+ -> unit
diff --git a/plugins/ltac/extratactics.mlg b/plugins/ltac/extratactics.mlg
index 1e2b23bf96..21d61d1f97 100644
--- a/plugins/ltac/extratactics.mlg
+++ b/plugins/ltac/extratactics.mlg
@@ -17,7 +17,6 @@ open Genarg
open Stdarg
open Tacarg
open Extraargs
-open Pcoq.Prim
open Pltac
open Mod_subst
open Names
@@ -258,19 +257,8 @@ END
open Autorewrite
-let pr_orient _prc _prlc _prt = function
- | true -> Pp.mt ()
- | false -> Pp.str " <-"
-
-let pr_orient_string _prc _prlc _prt (orient, s) =
- pr_orient _prc _prlc _prt orient ++ Pp.spc () ++ Pp.str s
-
}
-ARGUMENT EXTEND orient_string TYPED AS (bool * string) PRINTED BY { pr_orient_string }
-| [ orient(r) preident(i) ] -> { r, i }
-END
-
TACTIC EXTEND autorewrite
| [ "autorewrite" "with" ne_preident_list(l) clause(cl) ] ->
{ auto_multi_rewrite l ( cl) }
diff --git a/plugins/ssr/ssrparser.mlg b/plugins/ssr/ssrparser.mlg
index 858a75698a..962730d8dc 100644
--- a/plugins/ssr/ssrparser.mlg
+++ b/plugins/ssr/ssrparser.mlg
@@ -68,6 +68,25 @@ DECLARE PLUGIN "ssreflect_plugin"
{
+let ssrtac_name name = {
+ mltac_plugin = "ssreflect_plugin";
+ mltac_tactic = "ssr" ^ name;
+}
+
+let ssrtac_entry name = {
+ mltac_name = ssrtac_name name;
+ mltac_index = 0;
+}
+
+let register_ssrtac name f =
+ Tacenv.register_ml_tactic (ssrtac_name name) [|f|]
+
+let cast_arg wit v = Taccoerce.Value.cast (Genarg.topwit wit) v
+
+}
+
+{
+
(* Defining grammar rules with "xx" in it automatically declares keywords too,
* we thus save the lexer to restore it at the end of the file *)
let frozen_lexer = CLexer.get_keyword_state () ;;
@@ -366,7 +385,6 @@ open Pltac
ARGUMENT EXTEND ssrindex PRINTED BY { pr_ssrindex }
INTERPRETED BY { interp_index }
-| [ int_or_var(i) ] -> { mk_index ~loc i }
END
@@ -504,7 +522,6 @@ ARGUMENT EXTEND ssrterm
GLOBALIZED BY { glob_ssrterm } SUBSTITUTED BY { subst_ssrterm }
RAW_PRINTED BY { pr_ssrterm }
GLOB_PRINTED BY { pr_ssrterm }
-| [ "YouShouldNotTypeThis" constr(c) ] -> { mk_lterm c }
END
GRAMMAR EXTEND Gram
@@ -551,7 +568,6 @@ let pr_ssrbwdview _ _ _ = pr_view
ARGUMENT EXTEND ssrbwdview TYPED AS ssrterm list
PRINTED BY { pr_ssrbwdview }
-| [ "YouShouldNotTypeThis" ] -> { [] }
END
(* Pcoq *)
@@ -575,7 +591,6 @@ let pr_ssrfwdview _ _ _ = pr_view2
ARGUMENT EXTEND ssrfwdview TYPED AS ast_closure_term list
PRINTED BY { pr_ssrfwdview }
-| [ "YouShouldNotTypeThis" ] -> { [] }
END
(* Pcoq *)
@@ -743,7 +758,6 @@ let test_ident_no_do =
}
ARGUMENT EXTEND ident_no_do PRINTED BY { fun _ _ _ -> Names.Id.print }
-| [ "YouShouldNotTypeThis" ident(id) ] -> { id }
END
@@ -838,7 +852,6 @@ let _test_nobinder = Pcoq.Entry.of_parser "test_nobinder" (reject_binder false 0
}
ARGUMENT EXTEND ssrcpat TYPED AS ssripatrep PRINTED BY { pr_ssripat }
- | [ "YouShouldNotTypeThis" ssriorpat(x) ] -> { IPatCase(Regular x) }
END
(* Pcoq *)
@@ -966,17 +979,19 @@ let pr_ssrintrosarg env sigma _ _ prt (tac, ipats) =
ARGUMENT EXTEND ssrintrosarg TYPED AS (tactic * ssrintros)
PRINTED BY { pr_ssrintrosarg env sigma }
-| [ "YouShouldNotTypeThis" ssrtacarg(arg) ssrintros_ne(ipats) ] -> { arg, ipats }
-END
-
-TACTIC EXTEND ssrtclintros
-| [ "YouShouldNotTypeThis" ssrintrosarg(arg) ] ->
- { let tac, intros = arg in
- ssrevaltac ist tac <*> tclIPATssr intros }
END
{
+let () = register_ssrtac "tclintros" begin fun args ist -> match args with
+| [arg] ->
+ let arg = cast_arg wit_ssrintrosarg arg in
+ let tac, intros = arg in
+ ssrevaltac ist tac <*> tclIPATssr intros
+| _ -> assert false
+end
+
+
(** Defined identifier *)
let pr_ssrfwdid id = pr_spc () ++ pr_id id
@@ -1689,28 +1704,10 @@ let _ = add_internal_name (is_tagged perm_tag)
(** Tactical extensions. *)
-(* The TACTIC EXTEND facility can't be used for defining new user *)
-(* tacticals, because: *)
-(* - the concrete syntax must start with a fixed string *)
-(* We use the following workaround: *)
-(* - We use the (unparsable) "YouShouldNotTypeThis" token for tacticals that *)
-(* don't start with a token, then redefine the grammar and *)
-(* printer using GEXTEND and set_pr_ssrtac, respectively. *)
-
{
type ssrargfmt = ArgSsr of string | ArgSep of string
-let ssrtac_name name = {
- mltac_plugin = "ssreflect_plugin";
- mltac_tactic = "ssr" ^ name;
-}
-
-let ssrtac_entry name n = {
- mltac_name = ssrtac_name name;
- mltac_index = n;
-}
-
let set_pr_ssrtac name prec afmt = (* FIXME *) () (*
let fmt = List.map (function
| ArgSep s -> Egramml.GramTerminal s
@@ -1718,8 +1715,7 @@ let set_pr_ssrtac name prec afmt = (* FIXME *) () (*
| ArgCoq at -> Egramml.GramTerminal "COQ_ARG") afmt in
let tacname = ssrtac_name name in () *)
-let ssrtac_atom ?loc name args = TacML (CAst.make ?loc (ssrtac_entry name 0, args))
-let ssrtac_expr ?loc name args = ssrtac_atom ?loc name args
+let ssrtac_expr ?loc name args = TacML (CAst.make ?loc (ssrtac_entry name, args))
let tclintros_expr ?loc tac ipats =
let args = [Tacexpr.TacGeneric (in_gen (rawwit wit_ssrintrosarg) (tac, ipats))] in
@@ -1802,15 +1798,15 @@ END
(** The "do" tactical. ********************************************************)
-(*
-type ssrdoarg = ((ssrindex * ssrmmod) * ssrhint) * ssrclauses
-*)
-TACTIC EXTEND ssrtcldo
-| [ "YouShouldNotTypeThis" "do" ssrdoarg(arg) ] -> { V82.tactic (ssrdotac ist arg) }
-END
-
{
+let () = register_ssrtac "tcldo" begin fun args ist -> match args with
+| [arg] ->
+ let arg = cast_arg wit_ssrdoarg arg in
+ V82.tactic (ssrdotac ist arg)
+| _ -> assert false
+end
+
let _ = set_pr_ssrtac "tcldo" 3 [ArgSep "do "; ArgSsr "doarg"]
let ssrdotac_expr ?loc n m tac clauses =
@@ -1849,13 +1845,17 @@ let pr_ssrseqdir _ _ _ = function
ARGUMENT EXTEND ssrseqdir TYPED AS ssrdir PRINTED BY { pr_ssrseqdir }
END
-TACTIC EXTEND ssrtclseq
-| [ "YouShouldNotTypeThis" ssrtclarg(tac) ssrseqdir(dir) ssrseqarg(arg) ] ->
- { V82.tactic (tclSEQAT ist tac dir arg) }
-END
-
{
+let () = register_ssrtac "tclseq" begin fun args ist -> match args with
+| [tac; dir; arg] ->
+ let tac = cast_arg wit_ssrtclarg tac in
+ let dir = cast_arg wit_ssrseqdir dir in
+ let arg = cast_arg wit_ssrseqarg arg in
+ V82.tactic (tclSEQAT ist tac dir arg)
+| _ -> assert false
+end
+
let _ = set_pr_ssrtac "tclseq" 5 [ArgSsr "tclarg"; ArgSsr "seqdir"; ArgSsr "seqarg"]
let tclseq_expr ?loc tac dir arg =
@@ -2228,8 +2228,6 @@ END
(** The "congr" tactic *)
-(* type ssrcongrarg = open_constr * (int * constr) *)
-
{
let pr_ssrcongrarg _ _ _ ((n, f), dgens) =
diff --git a/pretyping/evarconv.ml b/pretyping/evarconv.ml
index a82eff9cf0..be21a3a60d 100644
--- a/pretyping/evarconv.ml
+++ b/pretyping/evarconv.ml
@@ -1310,27 +1310,27 @@ let set_of_evctx l =
(** Weaken the existentials so that they can be typed in sign and raise
an error if the term otherwise mentions variables not bound in sign. *)
let thin_evars env sigma sign c =
- let evdref = ref sigma in
+ let sigma = ref sigma in
let ctx = set_of_evctx sign in
let rec applyrec (env,acc) t =
- match kind sigma t with
+ match kind !sigma t with
| Evar (ev, args) ->
- let evi = Evd.find_undefined sigma ev in
- let filter = Array.map (fun c -> Id.Set.subset (collect_vars sigma c) ctx) args in
+ let evi = Evd.find_undefined !sigma ev in
+ let filter = Array.map (fun c -> Id.Set.subset (collect_vars !sigma c) ctx) args in
let filter = Filter.make (Array.to_list filter) in
let candidates = Option.map (List.map EConstr.of_constr) (evar_candidates evi) in
- let evd, ev = restrict_evar !evdref ev filter candidates in
- evdref := evd; whd_evar !evdref t
+ let evd, ev = restrict_evar !sigma ev filter candidates in
+ sigma := evd; whd_evar !sigma t
| Var id ->
- if not (Id.Set.mem id ctx) then raise (TypingFailed sigma)
+ if not (Id.Set.mem id ctx) then raise (TypingFailed !sigma)
else t
| _ ->
- map_constr_with_binders_left_to_right !evdref
+ map_constr_with_binders_left_to_right !sigma
(fun d (env,acc) -> (push_rel d env, acc+1))
applyrec (env,acc) t
in
let c' = applyrec (env,0) c in
- (!evdref, c')
+ (!sigma, c')
let second_order_matching flags env_rhs evd (evk,args) (test,argoccs) rhs =
try
diff --git a/pretyping/tacred.ml b/pretyping/tacred.ml
index 6fdceb929a..866c0da555 100644
--- a/pretyping/tacred.ml
+++ b/pretyping/tacred.ml
@@ -1103,6 +1103,15 @@ let string_of_evaluable_ref env = function
string_of_qualid
(Nametab.shortest_qualid_of_global (vars_of_env env) (GlobRef.ConstRef kn))
+(* Removing fZETA for finer behaviour would break many developments *)
+let unfold_side_flags = RedFlags.[fBETA;fMATCH;fFIX;fCOFIX;fZETA]
+let unfold_side_red = RedFlags.(mkflags [fBETA;fMATCH;fFIX;fCOFIX;fZETA])
+let unfold_red kn =
+ let flag = match kn with
+ | EvalVarRef id -> RedFlags.fVAR id
+ | EvalConstRef kn -> RedFlags.fCONST kn in
+ RedFlags.mkflags (flag::unfold_side_flags)
+
let unfold env sigma name c =
if is_evaluable env name then
clos_norm_flags (unfold_red name) env sigma c
diff --git a/pretyping/unification.ml b/pretyping/unification.ml
index 00831b5962..a9eb43e573 100644
--- a/pretyping/unification.ml
+++ b/pretyping/unification.ml
@@ -839,8 +839,9 @@ let rec unify_0_with_initial_metas (sigma,ms,es as subst : subst0) conv_at_top e
unify_app_pattern true curenvnb pb opt substn cM f1 l1 cN f2 l2
| _ -> raise ex)
- | Case (_,p1,c1,cl1), Case (_,p2,c2,cl2) ->
- (try
+ | Case (ci1,p1,c1,cl1), Case (ci2,p2,c2,cl2) ->
+ (try
+ if not (eq_ind ci1.ci_ind ci2.ci_ind) then error_cannot_unify curenv sigma (cM,cN);
let opt' = {opt with at_top = true; with_types = false} in
Array.fold_left2 (unirec_rec curenvnb CONV {opt with at_top = true})
(unirec_rec curenvnb CONV opt'
diff --git a/printing/printer.ml b/printing/printer.ml
index 97b3233d12..ec1b9b8e49 100644
--- a/printing/printer.ml
+++ b/printing/printer.ml
@@ -825,9 +825,9 @@ let pr_nth_open_subgoal ~proof n =
let pr_goal_by_id ~proof id =
try
- Proof.in_proof proof (fun sigma ->
- let g = Evd.evar_key id sigma in
- pr_selected_subgoal (pr_id id) sigma g)
+ let { Proof.sigma } = Proof.data proof in
+ let g = Evd.evar_key id sigma in
+ pr_selected_subgoal (pr_id id) sigma g
with Not_found -> user_err Pp.(str "No such goal.")
(** print a goal identified by the goal id as it appears in -emacs mode.
@@ -843,7 +843,8 @@ let pr_goal_emacs ~proof gid sid =
++ pr_goal gs)
in
try
- Proof.in_proof proof (fun sigma -> pr {it=(Evar.unsafe_of_int gid);sigma=sigma;})
+ let { Proof.sigma } = Proof.data proof in
+ pr { it = Evar.unsafe_of_int gid ; sigma }
with Not_found -> user_err Pp.(str "No such goal.")
(* Printer function for sets of Assumptions.assumptions.
diff --git a/proofs/pfedit.ml b/proofs/pfedit.ml
index ed60b8274a..99a254652c 100644
--- a/proofs/pfedit.ml
+++ b/proofs/pfedit.ml
@@ -61,8 +61,8 @@ let get_current_context pf =
with
| NoSuchGoal ->
(* No more focused goals *)
- let evd = Proof.in_proof p (fun x -> x) in
- evd, Global.env ()
+ let { Proof.sigma } = Proof.data p in
+ sigma, Global.env ()
let solve ?with_end_tac gi info_lvl tac pr =
let tac = match with_end_tac with
diff --git a/proofs/proof.ml b/proofs/proof.ml
index 9f2c90c375..5f07cc1acc 100644
--- a/proofs/proof.ml
+++ b/proofs/proof.ml
@@ -398,8 +398,6 @@ let run_tactic env tac pr =
(*** Commands ***)
-let in_proof p k = k (Proofview.return p.proofview)
-
(* Remove all the goals from the shelf and adds them at the end of the
focused goals. *)
let unshelve p =
diff --git a/proofs/proof.mli b/proofs/proof.mli
index 7e535a258c..9973df492d 100644
--- a/proofs/proof.mli
+++ b/proofs/proof.mli
@@ -176,8 +176,6 @@ val maximal_unfocus : 'a focus_kind -> t -> t
(*** Commands ***)
-val in_proof : t -> (Evd.evar_map -> 'a) -> 'a
-
(* Remove all the goals from the shelf and adds them at the end of the
focused goals. *)
val unshelve : t -> t
diff --git a/proofs/proof_global.ml b/proofs/proof_global.ml
index ab8d87c100..851a3d1135 100644
--- a/proofs/proof_global.ml
+++ b/proofs/proof_global.ml
@@ -177,7 +177,8 @@ let close_proof ~opaque ~keep_body_ucst_separate ?feedback_id ~now
have existential variables in the initial types of goals, we need to
normalise them for the kernel. *)
let subst_evar k =
- Proof.in_proof proof (fun m -> Evd.existential_opt_value0 m k) in
+ let { Proof.sigma } = Proof.data proof in
+ Evd.existential_opt_value0 sigma k in
let nf = UnivSubst.nf_evars_and_universes_opt_subst subst_evar
(UState.subst universes) in
@@ -307,7 +308,7 @@ let close_proof ~opaque ~keep_body_ucst_separate fix_exn ps =
let update_global_env =
map_proof (fun p ->
- Proof.in_proof p (fun sigma ->
- let tac = Proofview.Unsafe.tclEVARS (Evd.update_sigma_env sigma (Global.env ())) in
- let p,(status,info),_ = Proof.run_tactic (Global.env ()) tac p in
- p))
+ let { Proof.sigma } = Proof.data p in
+ let tac = Proofview.Unsafe.tclEVARS (Evd.update_sigma_env sigma (Global.env ())) in
+ let p, (status,info), _ = Proof.run_tactic (Global.env ()) tac p in
+ p)
diff --git a/stm/stm.ml b/stm/stm.ml
index 9bbff476f8..69dbebbc57 100644
--- a/stm/stm.ml
+++ b/stm/stm.ml
@@ -118,15 +118,6 @@ include Hook
(* enables: Hooks.(call foo args) *)
let call = get
-let call_process_error_once =
- let processed : unit Exninfo.t = Exninfo.make () in
- fun (e, info) ->
- match Exninfo.get info processed with
- | Some _ -> e, info
- | None ->
- let info = Exninfo.add info processed () in
- e, info
-
end
let async_proofs_workers_extra_env = ref [||]
@@ -988,7 +979,6 @@ end = struct (* {{{ *)
| Some _ -> (e, info)
| None ->
let loc = Loc.get_loc info in
- let (e, info) = Hooks.(call_process_error_once (e, info)) in
execution_error ?loc id (iprint (e, info));
(e, Stateid.add info ~valid id)
@@ -1066,11 +1056,7 @@ end (* }}} *)
(* Wrapper for the proof-closing special path for Qed *)
let stm_qed_delay_proof ?route ~proof ~info ~id ~st ~loc pending : Vernacstate.t =
set_id_for_feedback ?route dummy_doc id;
- try
- Vernacentries.interp_qed_delayed_proof ~proof ~info ~st ?loc:loc pending
- with e ->
- let e = CErrors.push e in
- Exninfo.iraise Hooks.(call_process_error_once e)
+ Vernacentries.interp_qed_delayed_proof ~proof ~info ~st ?loc:loc pending
(* Wrapper for Vernacentries.interp to set the feedback id *)
(* It is currently called 19 times, this number should be certainly
@@ -1087,7 +1073,7 @@ let stm_vernac_interp ?route id st { verbose; expr } : Vernacstate.t =
*)
let is_filtered_command = function
| VernacResetName _ | VernacResetInitial | VernacBack _
- | VernacBackTo _ | VernacRestart | VernacUndo _ | VernacUndoTo _
+ | VernacRestart | VernacUndo _ | VernacUndoTo _
| VernacAbortAll | VernacAbort _ -> true
| _ -> false
in
@@ -1097,10 +1083,7 @@ let stm_vernac_interp ?route id st { verbose; expr } : Vernacstate.t =
(stm_pperr_endline Pp.(fun () -> str "ignoring " ++ Ppvernac.pr_vernac expr); st)
else begin
stm_pperr_endline Pp.(fun () -> str "interpreting " ++ Ppvernac.pr_vernac expr);
- try Vernacentries.interp ?verbosely:(Some verbose) ~st expr
- with e ->
- let e = CErrors.push e in
- Exninfo.iraise Hooks.(call_process_error_once e)
+ Vernacentries.interp ?verbosely:(Some verbose) ~st expr
end
(****************************** CRUFT *****************************************)
@@ -1233,8 +1216,6 @@ end = struct (* {{{ *)
match Vcs_.branches vcs with [_] -> `Stop id | _ -> `Cont ())
() id in
oid
- | VernacBackTo id ->
- Stateid.of_int id
| _ -> anomaly Pp.(str "incorrect VtMeta classification")
with
| Not_found ->
diff --git a/stm/vernac_classifier.ml b/stm/vernac_classifier.ml
index aaba36287a..5af576dad2 100644
--- a/stm/vernac_classifier.ml
+++ b/stm/vernac_classifier.ml
@@ -106,8 +106,8 @@ let classify_vernac e =
else GuaranteesOpacity
in
let ids, open_proof =
- List.fold_left (fun (l,b) ((({v=id},_),_,_,_,p),_) ->
- id::l, b || p = None) ([],false) l in
+ List.fold_left (fun (l,b) {Vernacexpr.fname={CAst.v=id}; body_def} ->
+ id::l, b || body_def = None) ([],false) l in
if open_proof
then VtStartProof (guarantee,ids)
else VtSideff (ids, VtLater)
@@ -118,8 +118,8 @@ let classify_vernac e =
else GuaranteesOpacity
in
let ids, open_proof =
- List.fold_left (fun (l,b) ((({v=id},_),_,_,p),_) ->
- id::l, b || p = None) ([],false) l in
+ List.fold_left (fun (l,b) { Vernacexpr.fname={CAst.v=id}; body_def } ->
+ id::l, b || body_def = None) ([],false) l in
if open_proof
then VtStartProof (guarantee,ids)
else VtSideff (ids, VtLater)
@@ -193,7 +193,7 @@ let classify_vernac e =
| VernacBack _ | VernacAbortAll
| VernacUndoTo _ | VernacUndo _
| VernacResetName _ | VernacResetInitial
- | VernacBackTo _ | VernacRestart -> VtMeta
+ | VernacRestart -> VtMeta
(* What are these? *)
| VernacRestoreState _
| VernacWriteState _ -> VtSideff ([], VtNow)
diff --git a/test-suite/arithmetic/diveucl_21.v b/test-suite/arithmetic/diveucl_21.v
index b888c97be3..b12dba429c 100644
--- a/test-suite/arithmetic/diveucl_21.v
+++ b/test-suite/arithmetic/diveucl_21.v
@@ -10,11 +10,11 @@ Check (eq_refl (4611686018427387904,1) <<: diveucl_21 1 1 2 = (46116860184273879
Definition compute1 := Eval compute in diveucl_21 1 1 2.
Check (eq_refl compute1 : (4611686018427387904,1) = (4611686018427387904,1)).
-Check (eq_refl : diveucl_21 3 1 2 = (4611686018427387904, 1)).
-Check (eq_refl (4611686018427387904, 1) <: diveucl_21 3 1 2 = (4611686018427387904, 1)).
-Check (eq_refl (4611686018427387904, 1) <<: diveucl_21 3 1 2 = (4611686018427387904, 1)).
+Check (eq_refl : diveucl_21 3 1 2 = (0, 0)).
+Check (eq_refl (0, 0) <: diveucl_21 3 1 2 = (0, 0)).
+Check (eq_refl (0, 0) <<: diveucl_21 3 1 2 = (0, 0)).
Definition compute2 := Eval compute in diveucl_21 3 1 2.
-Check (eq_refl compute2 : (4611686018427387904, 1) = (4611686018427387904, 1)).
+Check (eq_refl compute2 : (0, 0) = (0, 0)).
Check (eq_refl : diveucl_21 1 1 0 = (0,0)).
Check (eq_refl (0,0) <: diveucl_21 1 1 0 = (0,0)).
@@ -23,3 +23,7 @@ Check (eq_refl (0,0) <<: diveucl_21 1 1 0 = (0,0)).
Check (eq_refl : diveucl_21 9223372036854775807 0 1 = (0,0)).
Check (eq_refl (0,0) <: diveucl_21 9223372036854775807 0 1 = (0,0)).
Check (eq_refl (0,0) <<: diveucl_21 9223372036854775807 0 1 = (0,0)).
+
+Check (eq_refl : diveucl_21 9305446873517 1793572051078448654 4930380657631323783 = (17407905077428, 3068214991893055266)).
+Check (eq_refl (17407905077428, 3068214991893055266) <: diveucl_21 9305446873517 1793572051078448654 4930380657631323783 = (17407905077428, 3068214991893055266)).
+Check (eq_refl (17407905077428, 3068214991893055266) <<: diveucl_21 9305446873517 1793572051078448654 4930380657631323783 = (17407905077428, 3068214991893055266)).
diff --git a/test-suite/bugs/closed/HoTT_coq_020.v b/test-suite/bugs/closed/HoTT_coq_020.v
index 73da464bbe..babd180209 100644
--- a/test-suite/bugs/closed/HoTT_coq_020.v
+++ b/test-suite/bugs/closed/HoTT_coq_020.v
@@ -26,6 +26,7 @@ Ltac present_obj from to :=
| [ |- context[from ?obj ?C] ] => progress change (from obj C) with (to obj C) in *
end.
+#[universes(polymorphic)]
Section NaturalTransformationComposition.
Set Universe Polymorphism.
Context `(C : @Category objC).
@@ -58,6 +59,7 @@ Polymorphic Definition Cat0 : Category Empty_set
Polymorphic Definition FunctorFrom0 objC (C : Category objC) : Functor Cat0 C
:= Build_Functor Cat0 C (fun x => match x with end).
+#[universes(polymorphic)]
Section Law0.
Polymorphic Variable objC : Type.
Polymorphic Variable C : Category objC.
diff --git a/test-suite/bugs/closed/HoTT_coq_098.v b/test-suite/bugs/closed/HoTT_coq_098.v
index bdcd8ba97d..3b58605575 100644
--- a/test-suite/bugs/closed/HoTT_coq_098.v
+++ b/test-suite/bugs/closed/HoTT_coq_098.v
@@ -21,6 +21,7 @@ Polymorphic Definition GraphIndexingCategory : @SpecializedCategory GraphIndex.
Admitted.
Module success.
+ #[universes(polymorphic)]
Section SpecializedFunctor.
Set Universe Polymorphism.
Context `(C : @SpecializedCategory objC).
@@ -39,6 +40,7 @@ Module success.
End success.
Module success2.
+ #[universes(polymorphic)]
Section SpecializedFunctor.
Polymorphic Context `(C : @SpecializedCategory objC).
Polymorphic Context `(D : @SpecializedCategory objD).
diff --git a/test-suite/bugs/closed/bug_10300.v b/test-suite/bugs/closed/bug_10300.v
new file mode 100644
index 0000000000..374c2cf967
--- /dev/null
+++ b/test-suite/bugs/closed/bug_10300.v
@@ -0,0 +1,14 @@
+Set Implicit Arguments.
+
+Definition hprop := nat -> Prop.
+
+Definition himpl := fun H1 H2 : hprop => forall (h : nat), H1 h -> H2 h.
+
+Parameter himpl_refl : forall H : hprop, himpl H H.
+
+Parameter hstar : hprop -> hprop -> hprop.
+
+Parameter hpure : hprop.
+
+Lemma test : (forall (H:hprop), himpl (hstar H H) hpure -> True) -> True.
+Proof. intros M. eapply M. apply himpl_refl. Abort.
diff --git a/test-suite/bugs/closed/bug_10533.v b/test-suite/bugs/closed/bug_10533.v
new file mode 100644
index 0000000000..e72957bdee
--- /dev/null
+++ b/test-suite/bugs/closed/bug_10533.v
@@ -0,0 +1,8 @@
+
+Require Import Eqdep Setoid.
+Goal forall (t : unit) (pf : tt = t),
+ if (match pf with eq_refl => false end) then True else False.
+Proof.
+ intros.
+ try setoid_rewrite <-Eqdep.Eq_rect_eq.eq_rect_eq.
+Abort.
diff --git a/test-suite/bugs/closed/bug_10560.v b/test-suite/bugs/closed/bug_10560.v
new file mode 100644
index 0000000000..a9a0949d9a
--- /dev/null
+++ b/test-suite/bugs/closed/bug_10560.v
@@ -0,0 +1,9 @@
+From Coq Require Import Int63.
+Open Scope int63_scope.
+
+Lemma foo :
+ let n := opp 0 in add n 0 = n.
+Proof.
+cbv.
+apply eq_refl.
+Qed.
diff --git a/test-suite/bugs/closed/bug_3314.v b/test-suite/bugs/closed/bug_3314.v
index a5782298c3..794de93b37 100644
--- a/test-suite/bugs/closed/bug_3314.v
+++ b/test-suite/bugs/closed/bug_3314.v
@@ -24,10 +24,10 @@ Fail Eval compute in Lift nat : Prop.
(* = nat
: Prop *)
-Section Hurkens.
+Monomorphic Definition Type2 := Type.
+Monomorphic Definition Type1 := Type : Type2.
- Monomorphic Definition Type2 := Type.
- Monomorphic Definition Type1 := Type : Type2.
+Section Hurkens.
(** Assumption of a retract from Type into Prop *)
diff --git a/test-suite/bugs/closed/bug_4503.v b/test-suite/bugs/closed/bug_4503.v
index 26731e3292..c53d4cabc7 100644
--- a/test-suite/bugs/closed/bug_4503.v
+++ b/test-suite/bugs/closed/bug_4503.v
@@ -5,11 +5,12 @@ Class PreOrder (A : Type) (r : A -> A -> Type) : Type :=
(* FAILURE 1 *)
+#[universes(polymorphic)]
Section foo.
Polymorphic Universes A.
Polymorphic Context {A : Type@{A}} {rA : A -> A -> Prop} {PO : PreOrder A rA}.
- Fail Definition foo := PO.
+ Fail Monomorphic Definition foo := PO.
End foo.
@@ -30,8 +31,9 @@ End ILogic.
Set Printing Universes.
(* There is still a problem if the class is universe polymorphic *)
+#[universes(polymorphic)]
Section Embed_ILogic_Pre.
Polymorphic Universes A T.
- Fail Context {A : Type@{A}} {ILA: ILogic.ILogic@{A} A}.
+ Fail Monomorphic Context {A : Type@{A}} {ILA: ILogic.ILogic@{A} A}.
End Embed_ILogic_Pre.
diff --git a/test-suite/bugs/closed/bug_4816.v b/test-suite/bugs/closed/bug_4816.v
index 00a523842e..0bb05e77ce 100644
--- a/test-suite/bugs/closed/bug_4816.v
+++ b/test-suite/bugs/closed/bug_4816.v
@@ -1,18 +1,21 @@
+#[universes(polymorphic)]
Section foo.
Polymorphic Universes A B.
-Fail Constraint A <= B.
+Fail Monomorphic Constraint A <= B.
End foo.
(* gives an anomaly Universe undefined *)
Universes X Y.
+#[universes(polymorphic)]
Section Foo.
Polymorphic Universes Z W.
Polymorphic Constraint W < Z.
- Fail Definition bla := Type@{W}.
+ Fail Monomorphic Definition bla := Type@{W}.
Polymorphic Definition bla := Type@{W}.
+ #[universes(polymorphic)]
Section Bar.
- Fail Constraint X <= Z.
+ Fail Monomorphic Constraint X <= Z.
End Bar.
End Foo.
@@ -21,9 +24,11 @@ Require Coq.Classes.RelationClasses.
Class PreOrder (A : Type) (r : A -> A -> Type) : Type :=
{ refl : forall x, r x x }.
+#[universes(polymorphic)]
Section qux.
Polymorphic Universes A.
+ #[universes(polymorphic)]
Section bar.
- Fail Context {A : Type@{A}} {rA : A -> A -> Prop} {PO : PreOrder A rA}.
+ Fail Monomorphic Context {A : Type@{A}} {rA : A -> A -> Prop} {PO : PreOrder A rA}.
End bar.
End qux.
diff --git a/test-suite/misc/universes/dune b/test-suite/misc/universes/dune
index 0772f95604..c0d925deb5 100644
--- a/test-suite/misc/universes/dune
+++ b/test-suite/misc/universes/dune
@@ -1,8 +1,9 @@
(rule
(targets all_stdlib.v)
(deps
+ build_all_stdlib.sh
(source_tree ../../../theories)
(source_tree ../../../plugins))
(action
- (with-outputs-to all_stdlib.v
+ (with-stdout-to all_stdlib.v
(bash "./build_all_stdlib.sh"))))
diff --git a/test-suite/success/namedunivs.v b/test-suite/success/namedunivs.v
index f9154ef576..01a2136652 100644
--- a/test-suite/success/namedunivs.v
+++ b/test-suite/success/namedunivs.v
@@ -6,6 +6,7 @@
Unset Strict Universe Declaration.
+#[universes(polymorphic)]
Section lift_strict.
Polymorphic Definition liftlt :=
let t := Type@{i} : Type@{k} in
diff --git a/test-suite/success/polymorphism.v b/test-suite/success/polymorphism.v
index 339f798240..9ab8ace39e 100644
--- a/test-suite/success/polymorphism.v
+++ b/test-suite/success/polymorphism.v
@@ -122,8 +122,6 @@ Fail Definition id1impred := ((forall A : Type1, A) : Type1).
End Hierarchy.
-Section structures.
-
Record hypo : Type := mkhypo {
hypo_type : Type;
hypo_proof : hypo_type
@@ -154,9 +152,6 @@ Polymorphic Definition nest (d : dyn) := {| dyn_proof := d |}.
Polymorphic Definition twoprojs (d : dyn) := dyn_proof d = dyn_proof d.
-End structures.
-
-
Module binders.
Definition mynat@{|} := nat.
@@ -201,7 +196,8 @@ Module binders.
Definition with_mono@{u|u < M} : Type@{M} := Type@{u}.
End binders.
-
+
+#[universes(polymorphic)]
Section cats.
Local Set Universe Polymorphism.
Require Import Utf8.
@@ -307,6 +303,7 @@ Fail Check (let A := Set in fooS (id A)).
Fail Check (let A := Prop in fooS (id A)).
(* Some tests of sort-polymorphisme *)
+#[universes(polymorphic)]
Section S.
Polymorphic Variable A:Type.
(*
diff --git a/theories/Numbers/Cyclic/Int63/Int63.v b/theories/Numbers/Cyclic/Int63/Int63.v
index 9bb16b97e2..9e9481341f 100644
--- a/theories/Numbers/Cyclic/Int63/Int63.v
+++ b/theories/Numbers/Cyclic/Int63/Int63.v
@@ -388,7 +388,7 @@ Axiom diveucl_def_spec : forall x y, diveucl x y = diveucl_def x y.
Axiom diveucl_21_spec : forall a1 a2 b,
let (q,r) := diveucl_21 a1 a2 b in
let (q',r') := Z.div_eucl ([|a1|] * wB + [|a2|]) [|b|] in
- [|q|] = Z.modulo q' wB /\ [|r|] = r'.
+ [|a1|] < [|b|] -> [|q|] = q' /\ [|r|] = r'.
Axiom addmuldiv_def_spec : forall p x y,
addmuldiv p x y = addmuldiv_def p x y.
@@ -812,14 +812,6 @@ Proof.
eapply Z.lt_le_trans; [ | apply Zpower2_le_lin ]; auto with zarith.
Qed.
-Lemma lsr_add_distr x y n: (x + y) << n = ((x << n) + (y << n))%int63.
-Proof.
- apply to_Z_inj.
- rewrite -> add_spec, !lsl_spec, add_spec.
- rewrite -> Zmult_mod_idemp_l, <-Zplus_mod.
- apply f_equal2 with (f := Zmod); auto with zarith.
-Qed.
-
(* LSL *)
Lemma lsl0 i: 0 << i = 0%int63.
Proof.
@@ -1119,7 +1111,7 @@ Proof.
generalize (add_le_r x y); rewrite Heq, lor_le; intro Hb.
generalize Heq; rewrite (bit_split x) at 1; rewrite (bit_split y )at 1; clear Heq.
rewrite (fun y => add_comm y (bit x 0)), <-!add_assoc, add_comm,
- <-!add_assoc, (add_comm (bit y 0)), add_assoc, <-lsr_add_distr.
+ <-!add_assoc, (add_comm (bit y 0)), add_assoc, <-lsl_add_distr.
rewrite (bit_split (x lor y)), lor_spec.
intros Heq.
assert (F: (bit x 0 + bit y 0)%int63 = (bit x 0 || bit y 0)).
@@ -1429,26 +1421,9 @@ Proof.
generalize (Z_div_mod ([|a1|]*wB+[|a2|]) [|b|] H).
revert W.
destruct (diveucl_21 a1 a2 b); destruct (Z.div_eucl ([|a1|]*wB+[|a2|]) [|b|]).
- intros (H', H''); rewrite H', H''; clear H' H''.
+ intros (H', H''); auto; rewrite H', H''; clear H' H''.
intros (H', H''); split; [ |exact H''].
- rewrite H', Zmult_comm, Z.mod_small; [reflexivity| ].
- split.
- { revert H'; case z; [now simpl..|intros p H'].
- exfalso; apply (Z.lt_irrefl 0), (Z.le_lt_trans _ ([|a1|] * wB + [|a2|])).
- { now apply Z.add_nonneg_nonneg; [apply Z.mul_nonneg_nonneg| ]. }
- rewrite H'; apply (Zplus_lt_reg_r _ _ (- z0)); ring_simplify.
- apply (Z.le_lt_trans _ (- [|b|])); [ |now auto with zarith].
- rewrite Z.opp_eq_mul_m1; apply Zmult_le_compat_l; [ |now apply Wb].
- rewrite <-!Pos2Z.opp_pos, <-Z.opp_le_mono.
- now change 1 with (Z.succ 0); apply Zlt_le_succ. }
- rewrite <-Z.nle_gt; intro Hz; revert H2; apply Zle_not_lt.
- rewrite (Z.div_unique_pos (wB * [|a1|] + [|a2|]) wB [|a1|] [|a2|]);
- [ |now simpl..].
- rewrite Z.mul_comm, H'.
- rewrite (Z.div_unique_pos (wB * [|b|] + z0) wB [|b|] z0) at 1;
- [ |split; [ |apply (Z.lt_trans _ [|b|])]; now simpl|reflexivity].
- apply Z_div_le; [now simpl| ]; rewrite Z.mul_comm; apply Zplus_le_compat_r.
- now apply Zmult_le_compat_l.
+ now rewrite H', Zmult_comm.
Qed.
Lemma div2_phi ih il j: (2^62 <= [|j|] -> [|ih|] < [|j|] ->
diff --git a/theories/Numbers/Cyclic/ZModulo/ZModulo.v b/theories/Numbers/Cyclic/ZModulo/ZModulo.v
index 28565b2fe3..2785e89c5d 100644
--- a/theories/Numbers/Cyclic/ZModulo/ZModulo.v
+++ b/theories/Numbers/Cyclic/ZModulo/ZModulo.v
@@ -648,40 +648,15 @@ Section ZModulo.
apply two_power_pos_correct.
Qed.
- Definition head0 x := match [|x|] with
+ Definition head0 x :=
+ match [| x |] with
| Z0 => zdigits
- | Zpos p => zdigits - log_inf p - 1
- | _ => 0
- end.
+ | Zneg _ => 0
+ | (Zpos _) as p => zdigits - Z.log2 p - 1
+ end.
Lemma spec_head00: forall x, [|x|] = 0 -> [|head0 x|] = Zpos digits.
- Proof.
- unfold head0; intros.
- rewrite H; simpl.
- apply spec_zdigits.
- Qed.
-
- Lemma log_inf_bounded : forall x p, Zpos x < 2^p -> log_inf x < p.
- Proof.
- induction x; simpl; intros.
-
- assert (0 < p) by (destruct p; compute; auto with zarith; discriminate).
- cut (log_inf x < p - 1); [omega| ].
- apply IHx.
- change (Zpos x~1) with (2*(Zpos x)+1) in H.
- replace p with (Z.succ (p-1)) in H; auto with zarith.
- rewrite Z.pow_succ_r in H; auto with zarith.
-
- assert (0 < p) by (destruct p; compute; auto with zarith; discriminate).
- cut (log_inf x < p - 1); [omega| ].
- apply IHx.
- change (Zpos x~0) with (2*(Zpos x)) in H.
- replace p with (Z.succ (p-1)) in H; auto with zarith.
- rewrite Z.pow_succ_r in H; auto with zarith.
-
- simpl; intros; destruct p; compute; auto with zarith.
- Qed.
-
+ Proof. unfold head0; intros x ->; apply spec_zdigits. Qed.
Lemma spec_head0 : forall x, 0 < [|x|] ->
wB/ 2 <= 2 ^ ([|head0 x|]) * [|x|] < wB.
@@ -689,36 +664,35 @@ Section ZModulo.
intros; unfold head0.
generalize (spec_to_Z x).
destruct [|x|]; try discriminate.
+ pose proof (Z.log2_nonneg (Zpos p)).
+ destruct (Z.log2_spec (Zpos p)); auto.
intros.
- destruct (log_inf_correct p).
- rewrite 2 two_p_power2 in H2; auto with zarith.
- assert (0 <= zdigits - log_inf p - 1 < wB).
+ assert (0 <= zdigits - Z.log2 (Zpos p) - 1 < wB) as Hrange.
split.
- cut (log_inf p < zdigits); try omega.
+ cut (Z.log2 (Zpos p) < zdigits). omega.
unfold zdigits.
unfold wB, base in *.
- apply log_inf_bounded; auto with zarith.
+ apply Z.log2_lt_pow2; intuition.
apply Z.lt_trans with zdigits.
omega.
unfold zdigits, wB, base; apply Zpower2_lt_lin; auto with zarith.
- unfold to_Z; rewrite (Zmod_small _ _ H3).
- destruct H2.
+ unfold to_Z; rewrite (Zmod_small _ _ Hrange).
split.
- apply Z.le_trans with (2^(zdigits - log_inf p - 1)*(2^log_inf p)).
+ apply Z.le_trans with (2^(zdigits - Z.log2 (Zpos p) - 1)*(2^Z.log2 (Zpos p))).
apply Zdiv_le_upper_bound; auto with zarith.
rewrite <- Zpower_exp; auto with zarith.
rewrite Z.mul_comm; rewrite <- Z.pow_succ_r; auto with zarith.
- replace (Z.succ (zdigits - log_inf p -1 +log_inf p)) with zdigits
+ replace (Z.succ (zdigits - Z.log2 (Zpos p) -1 + Z.log2 (Zpos p))) with zdigits
by ring.
unfold wB, base, zdigits; auto with zarith.
apply Z.mul_le_mono_nonneg; auto with zarith.
apply Z.lt_le_trans
- with (2^(zdigits - log_inf p - 1)*(2^(Z.succ (log_inf p)))).
+ with (2^(zdigits - Z.log2 (Zpos p) - 1)*(2^(Z.succ (Z.log2 (Zpos p))))).
apply Z.mul_lt_mono_pos_l; auto with zarith.
rewrite <- Zpower_exp; auto with zarith.
- replace (zdigits - log_inf p -1 +Z.succ (log_inf p)) with zdigits
+ replace (zdigits - Z.log2 (Zpos p) -1 +Z.succ (Z.log2 (Zpos p))) with zdigits
by ring.
unfold wB, base, zdigits; auto with zarith.
Qed.
diff --git a/theories/Reals/Rtrigo_calc.v b/theories/Reals/Rtrigo_calc.v
index 5ed60b0a0f..2428fc495d 100644
--- a/theories/Reals/Rtrigo_calc.v
+++ b/theories/Reals/Rtrigo_calc.v
@@ -178,7 +178,7 @@ Proof.
change (cos (PI / 4) <> 0); rewrite cos_PI4; apply R1_sqrt2_neq_0.
Qed.
-Lemma cos3PI4 : cos (3 * (PI / 4)) = -1 / sqrt 2.
+Lemma cos_3PI4 : cos (3 * (PI / 4)) = -1 / sqrt 2.
Proof.
replace (3 * (PI / 4)) with (PI / 2 - - (PI / 4)) by field.
rewrite cos_shift; rewrite sin_neg; rewrite sin_PI4.
@@ -186,12 +186,16 @@ Proof.
ring.
Qed.
-Lemma sin3PI4 : sin (3 * (PI / 4)) = 1 / sqrt 2.
+#[deprecated(since="8.10",note="Use cos_3PI4 instead.")] Notation cos3PI4 := cos_3PI4.
+
+Lemma sin_3PI4 : sin (3 * (PI / 4)) = 1 / sqrt 2.
Proof.
replace (3 * (PI / 4)) with (PI / 2 - - (PI / 4)) by field.
now rewrite sin_shift, cos_neg, cos_PI4.
Qed.
+#[deprecated(since="8.10",note="Use sin_3PI4 instead.")] Notation sin3PI4 := sin_3PI4.
+
Lemma cos_PI6 : cos (PI / 6) = sqrt 3 / 2.
Proof with trivial.
apply Rsqr_inj...
diff --git a/theories/ZArith/ZArith.v b/theories/ZArith/ZArith.v
index c2c97fca4f..b0744caa7b 100644
--- a/theories/ZArith/ZArith.v
+++ b/theories/ZArith/ZArith.v
@@ -21,6 +21,5 @@ Require Export Zpow_def.
Require Export Zcomplements.
Require Export Zpower.
Require Export Zdiv.
-Require Export Zlogarithm.
Export ZArithRing.
diff --git a/theories/ZArith/Zlogarithm.v b/theories/ZArith/Zlogarithm.v
deleted file mode 100644
index edbd3a18fe..0000000000
--- a/theories/ZArith/Zlogarithm.v
+++ /dev/null
@@ -1,273 +0,0 @@
-(************************************************************************)
-(* * The Coq Proof Assistant / The Coq Development Team *)
-(* v * INRIA, CNRS and contributors - Copyright 1999-2019 *)
-(* <O___,, * (see CREDITS file for the list of authors) *)
-(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
-(* * (see LICENSE file for the text of the license) *)
-(************************************************************************)
-
-(**********************************************************************)
-
-(** The integer logarithms with base 2. *)
-
-(** THIS FILE IS DEPRECATED.
- Please rather use [Z.log2] (or [Z.log2_up]), which
- are defined in [BinIntDef], and whose properties can
- be found in [BinInt.Z]. *)
-
-(* There are three logarithms defined here,
- depending on the rounding of the real 2-based logarithm:
- - [Log_inf]: [y = (Log_inf x) iff 2^y <= x < 2^(y+1)]
- i.e. [Log_inf x] is the biggest integer that is smaller than [Log x]
- - [Log_sup]: [y = (Log_sup x) iff 2^(y-1) < x <= 2^y]
- i.e. [Log_inf x] is the smallest integer that is bigger than [Log x]
- - [Log_nearest]: [y= (Log_nearest x) iff 2^(y-1/2) < x <= 2^(y+1/2)]
- i.e. [Log_nearest x] is the integer nearest from [Log x] *)
-
-Require Import ZArith_base Omega Zcomplements Zpower.
-Local Open Scope Z_scope.
-
-Section Log_pos. (* Log of positive integers *)
-
- (** First we build [log_inf] and [log_sup] *)
-
- Fixpoint log_inf (p:positive) : Z :=
- match p with
- | xH => 0 (* 1 *)
- | xO q => Z.succ (log_inf q) (* 2n *)
- | xI q => Z.succ (log_inf q) (* 2n+1 *)
- end.
-
- Fixpoint log_sup (p:positive) : Z :=
- match p with
- | xH => 0 (* 1 *)
- | xO n => Z.succ (log_sup n) (* 2n *)
- | xI n => Z.succ (Z.succ (log_inf n)) (* 2n+1 *)
- end.
-
- Hint Unfold log_inf log_sup : core.
-
- Lemma Psize_log_inf : forall p, Zpos (Pos.size p) = Z.succ (log_inf p).
- Proof.
- induction p; simpl; now rewrite ?Pos2Z.inj_succ, ?IHp.
- Qed.
-
- Lemma Zlog2_log_inf : forall p, Z.log2 (Zpos p) = log_inf p.
- Proof.
- unfold Z.log2. destruct p; simpl; trivial; apply Psize_log_inf.
- Qed.
-
- Lemma Zlog2_up_log_sup : forall p, Z.log2_up (Zpos p) = log_sup p.
- Proof.
- induction p; simpl log_sup.
- - change (Zpos p~1) with (2*(Zpos p)+1).
- rewrite Z.log2_up_succ_double, Zlog2_log_inf; try easy.
- unfold Z.succ. now rewrite !(Z.add_comm _ 1), Z.add_assoc.
- - change (Zpos p~0) with (2*Zpos p).
- now rewrite Z.log2_up_double, IHp.
- - reflexivity.
- Qed.
-
- (** Then we give the specifications of [log_inf] and [log_sup]
- and prove their validity *)
-
- Hint Resolve Z.le_trans: zarith.
-
- Theorem log_inf_correct :
- forall x:positive,
- 0 <= log_inf x /\ two_p (log_inf x) <= Zpos x < two_p (Z.succ (log_inf x)).
- Proof.
- simple induction x; intros; simpl;
- [ elim H; intros Hp HR; clear H; split;
- [ auto with zarith
- | rewrite two_p_S with (x := Z.succ (log_inf p)) by (apply Z.le_le_succ_r; trivial);
- rewrite two_p_S by trivial;
- rewrite two_p_S in HR by trivial; rewrite (BinInt.Pos2Z.inj_xI p);
- omega ]
- | elim H; intros Hp HR; clear H; split;
- [ auto with zarith
- | rewrite two_p_S with (x := Z.succ (log_inf p)) by (apply Z.le_le_succ_r; trivial);
- rewrite two_p_S by trivial;
- rewrite two_p_S in HR by trivial; rewrite (BinInt.Pos2Z.inj_xO p);
- omega ]
- | unfold two_power_pos; unfold shift_pos; simpl;
- omega ].
- Qed.
-
- Definition log_inf_correct1 (p:positive) := proj1 (log_inf_correct p).
- Definition log_inf_correct2 (p:positive) := proj2 (log_inf_correct p).
-
- Opaque log_inf_correct1 log_inf_correct2.
-
- Hint Resolve log_inf_correct1 log_inf_correct2: zarith.
-
- Lemma log_sup_correct1 : forall p:positive, 0 <= log_sup p.
- Proof.
- simple induction p; intros; simpl; auto with zarith.
- Qed.
-
- (** For every [p], either [p] is a power of two and [(log_inf p)=(log_sup p)]
- either [(log_sup p)=(log_inf p)+1] *)
-
- Theorem log_sup_log_inf :
- forall p:positive,
- IF Zpos p = two_p (log_inf p) then Zpos p = two_p (log_sup p)
- else log_sup p = Z.succ (log_inf p).
- Proof.
- simple induction p; intros;
- [ elim H; right; simpl;
- rewrite (two_p_S (log_inf p0) (log_inf_correct1 p0));
- rewrite BinInt.Pos2Z.inj_xI; unfold Z.succ; omega
- | elim H; clear H; intro Hif;
- [ left; simpl;
- rewrite (two_p_S (log_inf p0) (log_inf_correct1 p0));
- rewrite (two_p_S (log_sup p0) (log_sup_correct1 p0));
- rewrite <- (proj1 Hif); rewrite <- (proj2 Hif);
- auto
- | right; simpl;
- rewrite (two_p_S (log_inf p0) (log_inf_correct1 p0));
- rewrite BinInt.Pos2Z.inj_xO; unfold Z.succ;
- omega ]
- | left; auto ].
- Qed.
-
- Theorem log_sup_correct2 :
- forall x:positive, two_p (Z.pred (log_sup x)) < Zpos x <= two_p (log_sup x).
- Proof.
- intro.
- elim (log_sup_log_inf x).
- (* x is a power of two and [log_sup = log_inf] *)
- intros [E1 E2]; rewrite E2.
- split; [ apply two_p_pred; apply log_sup_correct1 | apply Z.le_refl ].
- intros [E1 E2]; rewrite E2.
- rewrite (Z.pred_succ (log_inf x)).
- generalize (log_inf_correct2 x); omega.
- Qed.
-
- Lemma log_inf_le_log_sup : forall p:positive, log_inf p <= log_sup p.
- Proof.
- simple induction p; simpl; intros; omega.
- Qed.
-
- Lemma log_sup_le_Slog_inf : forall p:positive, log_sup p <= Z.succ (log_inf p).
- Proof.
- simple induction p; simpl; intros; omega.
- Qed.
-
- (** Now it's possible to specify and build the [Log] rounded to the nearest *)
-
- Fixpoint log_near (x:positive) : Z :=
- match x with
- | xH => 0
- | xO xH => 1
- | xI xH => 2
- | xO y => Z.succ (log_near y)
- | xI y => Z.succ (log_near y)
- end.
-
- Theorem log_near_correct1 : forall p:positive, 0 <= log_near p.
- Proof.
- simple induction p; simpl; intros;
- [ elim p0; auto with zarith
- | elim p0; auto with zarith
- | trivial with zarith ].
- intros; apply Z.le_le_succ_r.
- generalize H0; now elim p1.
- intros; apply Z.le_le_succ_r.
- generalize H0; now elim p1.
- Qed.
-
- Theorem log_near_correct2 :
- forall p:positive, log_near p = log_inf p \/ log_near p = log_sup p.
- Proof.
- simple induction p.
- intros p0 [Einf| Esup].
- simpl. rewrite Einf.
- case p0; [ left | left | right ]; reflexivity.
- simpl; rewrite Esup.
- elim (log_sup_log_inf p0).
- generalize (log_inf_le_log_sup p0).
- generalize (log_sup_le_Slog_inf p0).
- case p0; auto with zarith.
- intros; omega.
- case p0; intros; auto with zarith.
- intros p0 [Einf| Esup].
- simpl.
- repeat rewrite Einf.
- case p0; intros; auto with zarith.
- simpl.
- repeat rewrite Esup.
- case p0; intros; auto with zarith.
- auto.
- Qed.
-
-End Log_pos.
-
-Section divers.
-
- (** Number of significative digits. *)
-
- Definition N_digits (x:Z) :=
- match x with
- | Zpos p => log_inf p
- | Zneg p => log_inf p
- | Z0 => 0
- end.
-
- Lemma ZERO_le_N_digits : forall x:Z, 0 <= N_digits x.
- Proof.
- simple induction x; simpl;
- [ apply Z.le_refl | exact log_inf_correct1 | exact log_inf_correct1 ].
- Qed.
-
- Lemma log_inf_shift_nat : forall n:nat, log_inf (shift_nat n 1) = Z.of_nat n.
- Proof.
- simple induction n; intros;
- [ try trivial | rewrite Nat2Z.inj_succ; rewrite <- H; reflexivity ].
- Qed.
-
- Lemma log_sup_shift_nat : forall n:nat, log_sup (shift_nat n 1) = Z.of_nat n.
- Proof.
- simple induction n; intros;
- [ try trivial | rewrite Nat2Z.inj_succ; rewrite <- H; reflexivity ].
- Qed.
-
- (** [Is_power p] means that p is a power of two *)
- Fixpoint Is_power (p:positive) : Prop :=
- match p with
- | xH => True
- | xO q => Is_power q
- | xI q => False
- end.
-
- Lemma Is_power_correct :
- forall p:positive, Is_power p <-> (exists y : nat, p = shift_nat y 1).
- Proof.
- split;
- [ elim p;
- [ simpl; tauto
- | simpl; intros; generalize (H H0); intro H1; elim H1;
- intros y0 Hy0; exists (S y0); rewrite Hy0; reflexivity
- | intro; exists 0%nat; reflexivity ]
- | intros; elim H; intros; rewrite H0; elim x; intros; simpl; trivial ].
- Qed.
-
- Lemma Is_power_or : forall p:positive, Is_power p \/ ~ Is_power p.
- Proof.
- simple induction p;
- [ intros; right; simpl; tauto
- | intros; elim H;
- [ intros; left; simpl; exact H0
- | intros; right; simpl; exact H0 ]
- | left; simpl; trivial ].
- Qed.
-
-End divers.
-
-
-
-
-
-
diff --git a/theories/ZArith/Zsqrt_compat.v b/theories/ZArith/Zsqrt_compat.v
deleted file mode 100644
index 6873c737a7..0000000000
--- a/theories/ZArith/Zsqrt_compat.v
+++ /dev/null
@@ -1,234 +0,0 @@
-(************************************************************************)
-(* * The Coq Proof Assistant / The Coq Development Team *)
-(* v * INRIA, CNRS and contributors - Copyright 1999-2019 *)
-(* <O___,, * (see CREDITS file for the list of authors) *)
-(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
-(* * (see LICENSE file for the text of the license) *)
-(************************************************************************)
-
-Require Import ZArithRing.
-Require Import Omega.
-Require Export ZArith_base.
-Local Open Scope Z_scope.
-
-(** THIS FILE IS DEPRECATED
-
- Instead of the various [Zsqrt] defined here, please use rather
- [Z.sqrt] (or [Z.sqrtrem]). The latter are pure functions without
- proof parts, and more results are available about them.
- Some equivalence proofs between the old and the new versions
- can be found below. Importing ZArith will provides by default
- the new versions.
-
-*)
-
-(**********************************************************************)
-(** Definition and properties of square root on Z *)
-
-(** The following tactic replaces all instances of (POS (xI ...)) by
- `2*(POS ...)+1`, but only when ... is not made only with xO, XI, or xH. *)
-Ltac compute_POS :=
- match goal with
- | |- context [(Zpos (xI ?X1))] =>
- match constr:(X1) with
- | context [1%positive] => fail 1
- | _ => rewrite (Pos2Z.inj_xI X1)
- end
- | |- context [(Zpos (xO ?X1))] =>
- match constr:(X1) with
- | context [1%positive] => fail 1
- | _ => rewrite (Pos2Z.inj_xO X1)
- end
- end.
-
-Inductive sqrt_data (n:Z) : Set :=
- c_sqrt : forall s r:Z, n = s * s + r -> 0 <= r <= 2 * s -> sqrt_data n.
-
-Definition sqrtrempos : forall p:positive, sqrt_data (Zpos p).
- refine
- (fix sqrtrempos (p:positive) : sqrt_data (Zpos p) :=
- match p return sqrt_data (Zpos p) with
- | xH => c_sqrt 1 1 0 _ _
- | xO xH => c_sqrt 2 1 1 _ _
- | xI xH => c_sqrt 3 1 2 _ _
- | xO (xO p') =>
- match sqrtrempos p' with
- | c_sqrt _ s' r' Heq Hint =>
- match Z_le_gt_dec (4 * s' + 1) (4 * r') with
- | left Hle =>
- c_sqrt (Zpos (xO (xO p'))) (2 * s' + 1)
- (4 * r' - (4 * s' + 1)) _ _
- | right Hgt => c_sqrt (Zpos (xO (xO p'))) (2 * s') (4 * r') _ _
- end
- end
- | xO (xI p') =>
- match sqrtrempos p' with
- | c_sqrt _ s' r' Heq Hint =>
- match Z_le_gt_dec (4 * s' + 1) (4 * r' + 2) with
- | left Hle =>
- c_sqrt (Zpos (xO (xI p'))) (2 * s' + 1)
- (4 * r' + 2 - (4 * s' + 1)) _ _
- | right Hgt =>
- c_sqrt (Zpos (xO (xI p'))) (2 * s') (4 * r' + 2) _ _
- end
- end
- | xI (xO p') =>
- match sqrtrempos p' with
- | c_sqrt _ s' r' Heq Hint =>
- match Z_le_gt_dec (4 * s' + 1) (4 * r' + 1) with
- | left Hle =>
- c_sqrt (Zpos (xI (xO p'))) (2 * s' + 1)
- (4 * r' + 1 - (4 * s' + 1)) _ _
- | right Hgt =>
- c_sqrt (Zpos (xI (xO p'))) (2 * s') (4 * r' + 1) _ _
- end
- end
- | xI (xI p') =>
- match sqrtrempos p' with
- | c_sqrt _ s' r' Heq Hint =>
- match Z_le_gt_dec (4 * s' + 1) (4 * r' + 3) with
- | left Hle =>
- c_sqrt (Zpos (xI (xI p'))) (2 * s' + 1)
- (4 * r' + 3 - (4 * s' + 1)) _ _
- | right Hgt =>
- c_sqrt (Zpos (xI (xI p'))) (2 * s') (4 * r' + 3) _ _
- end
- end
- end); clear sqrtrempos; repeat compute_POS;
- try (try rewrite Heq; ring); try omega.
-Defined.
-
-(** Define with integer input, but with a strong (readable) specification. *)
-Definition Zsqrt :
- forall x:Z,
- 0 <= x ->
- {s : Z & {r : Z | x = s * s + r /\ s * s <= x < (s + 1) * (s + 1)}}.
- refine
- (fun x =>
- match
- x
- return
- 0 <= x ->
- {s : Z & {r : Z | x = s * s + r /\ s * s <= x < (s + 1) * (s + 1)}}
- with
- | Zpos p =>
- fun h =>
- match sqrtrempos p with
- | c_sqrt _ s r Heq Hint =>
- existT
- (fun s:Z =>
- {r : Z |
- Zpos p = s * s + r /\ s * s <= Zpos p < (s + 1) * (s + 1)})
- s
- (exist
- (fun r:Z =>
- Zpos p = s * s + r /\
- s * s <= Zpos p < (s + 1) * (s + 1)) r _)
- end
- | Zneg p =>
- fun h =>
- False_rec
- {s : Z &
- {r : Z |
- Zneg p = s * s + r /\ s * s <= Zneg p < (s + 1) * (s + 1)}}
- (h (eq_refl Datatypes.Gt))
- | Z0 =>
- fun h =>
- existT
- (fun s:Z =>
- {r : Z | 0 = s * s + r /\ s * s <= 0 < (s + 1) * (s + 1)}) 0
- (exist
- (fun r:Z => 0 = 0 * 0 + r /\ 0 * 0 <= 0 < (0 + 1) * (0 + 1)) 0
- _)
- end); try omega.
- split; [ omega | rewrite Heq; ring_simplify (s*s) ((s + 1) * (s + 1)); omega ].
-Defined.
-
-(** Define a function of type Z->Z that computes the integer square root,
- but only for positive numbers, and 0 for others. *)
-Definition Zsqrt_plain (x:Z) : Z :=
- match x with
- | Zpos p =>
- match Zsqrt (Zpos p) (Pos2Z.is_nonneg p) with
- | existT _ s _ => s
- end
- | Zneg p => 0
- | Z0 => 0
- end.
-
-(** A basic theorem about Zsqrt_plain *)
-
-Theorem Zsqrt_interval :
- forall n:Z,
- 0 <= n ->
- Zsqrt_plain n * Zsqrt_plain n <= n <
- (Zsqrt_plain n + 1) * (Zsqrt_plain n + 1).
-Proof.
- intros [|p|p] Hp.
- - now compute.
- - unfold Zsqrt_plain.
- now destruct Zsqrt as (s & r & Heq & Hint).
- - now elim Hp.
-Qed.
-
-(** Positivity *)
-
-Theorem Zsqrt_plain_is_pos: forall n, 0 <= n -> 0 <= Zsqrt_plain n.
-Proof.
- intros n m; case (Zsqrt_interval n); auto with zarith.
- intros H1 H2; case (Z.le_gt_cases 0 (Zsqrt_plain n)); auto.
- intros H3; contradict H2; auto; apply Z.le_ngt.
- apply Z.le_trans with ( 2 := H1 ).
- replace ((Zsqrt_plain n + 1) * (Zsqrt_plain n + 1))
- with (Zsqrt_plain n * Zsqrt_plain n + (2 * Zsqrt_plain n + 1));
- auto with zarith.
- ring.
-Qed.
-
-(** Direct correctness on squares. *)
-
-Theorem Zsqrt_square_id: forall a, 0 <= a -> Zsqrt_plain (a * a) = a.
-Proof.
- intros a H.
- generalize (Zsqrt_plain_is_pos (a * a)); auto with zarith; intros Haa.
- case (Zsqrt_interval (a * a)); auto with zarith.
- intros H1 H2.
- case (Z.le_gt_cases a (Zsqrt_plain (a * a))); intros H3.
- - Z.le_elim H3; auto.
- contradict H1; auto; apply Z.lt_nge; auto with zarith.
- apply Z.le_lt_trans with (a * Zsqrt_plain (a * a)); auto with zarith.
- apply Z.mul_lt_mono_pos_r; auto with zarith.
- - contradict H2; auto; apply Z.le_ngt; auto with zarith.
- apply Z.mul_le_mono_nonneg; auto with zarith.
-Qed.
-
-(** [Zsqrt_plain] is increasing *)
-
-Theorem Zsqrt_le:
- forall p q, 0 <= p <= q -> Zsqrt_plain p <= Zsqrt_plain q.
-Proof.
- intros p q [H1 H2].
- Z.le_elim H2; [ | subst q; auto with zarith].
- case (Z.le_gt_cases (Zsqrt_plain p) (Zsqrt_plain q)); auto; intros H3.
- assert (Hp: (0 <= Zsqrt_plain q)).
- { apply Zsqrt_plain_is_pos; auto with zarith. }
- absurd (q <= p); auto with zarith.
- apply Z.le_trans with ((Zsqrt_plain q + 1) * (Zsqrt_plain q + 1)).
- case (Zsqrt_interval q); auto with zarith.
- apply Z.le_trans with (Zsqrt_plain p * Zsqrt_plain p); auto with zarith.
- apply Z.mul_le_mono_nonneg; auto with zarith.
- case (Zsqrt_interval p); auto with zarith.
-Qed.
-
-
-(** Equivalence between Zsqrt_plain and [Z.sqrt] *)
-
-Lemma Zsqrt_equiv : forall n, Zsqrt_plain n = Z.sqrt n.
-Proof.
- intros. destruct (Z_le_gt_dec 0 n).
- symmetry. apply Z.sqrt_unique; trivial.
- now apply Zsqrt_interval.
- now destruct n.
-Qed.
diff --git a/toplevel/coqloop.ml b/toplevel/coqloop.ml
index e49b1c0c07..2673995a86 100644
--- a/toplevel/coqloop.ml
+++ b/toplevel/coqloop.ml
@@ -383,7 +383,7 @@ let rec vernac_loop ~state =
try
let input = top_buffer.tokens in
match read_sentence ~state input with
- | Some (VernacBacktrack(bid,_,_)) ->
+ | Some (VernacBackTo bid) ->
let bid = Stateid.of_int bid in
let doc, res = Stm.edit_at ~doc:state.doc bid in
assert (res = `NewTip);
diff --git a/toplevel/g_toplevel.mlg b/toplevel/g_toplevel.mlg
index fed337ab03..1a1537113e 100644
--- a/toplevel/g_toplevel.mlg
+++ b/toplevel/g_toplevel.mlg
@@ -17,7 +17,7 @@ open Vernacexpr
(* Vernaculars specific to the toplevel *)
type vernac_toplevel =
- | VernacBacktrack of int * int * int
+ | VernacBackTo of int
| VernacDrop
| VernacQuit
| VernacControl of vernac_control
@@ -54,8 +54,8 @@ GRAMMAR EXTEND Gram
vernac_toplevel: FIRST
[ [ IDENT "Drop"; "." -> { Some VernacDrop }
| IDENT "Quit"; "." -> { Some VernacQuit }
- | IDENT "Backtrack"; n = natural ; m = natural ; p = natural; "." ->
- { Some (VernacBacktrack (n,m,p)) }
+ | IDENT "BackTo"; n = natural; "." ->
+ { Some (VernacBackTo n) }
(* show a goal for the specified proof state *)
| test_show_goal; IDENT "Show"; IDENT "Goal"; gid = natural; IDENT "at"; sid = natural; "." ->
{ Some (VernacShowGoal {gid; sid}) }
diff --git a/user-contrib/Ltac2/tac2entries.ml b/user-contrib/Ltac2/tac2entries.ml
index 3b8fc58c6f..17004bb012 100644
--- a/user-contrib/Ltac2/tac2entries.ml
+++ b/user-contrib/Ltac2/tac2entries.ml
@@ -763,7 +763,7 @@ let perform_eval ~pstate e =
| Goal_select.SelectAlreadyFocused -> assert false (* TODO **)
in
let (proof, _, ans) = Proof.run_tactic (Global.env ()) v proof in
- let sigma = Proof.in_proof proof (fun sigma -> sigma) in
+ let { Proof.sigma } = Proof.data proof in
let name = int_name () in
Feedback.msg_notice (str "- : " ++ pr_glbtype name (snd ty)
++ spc () ++ str "=" ++ spc () ++
diff --git a/vernac/comFixpoint.ml b/vernac/comFixpoint.ml
index 3f13d772ab..74c9bc2886 100644
--- a/vernac/comFixpoint.ml
+++ b/vernac/comFixpoint.ml
@@ -107,26 +107,20 @@ let check_mutuality env evd isfix fixl =
warn_non_full_mutual (x,xge,y,yge,isfix,rest)
| _ -> ()
-type structured_fixpoint_expr = {
- fix_name : Id.t;
- fix_univs : universe_decl_expr option;
- fix_annot : lident option;
- fix_binders : local_binder_expr list;
- fix_body : constr_expr option;
- fix_type : constr_expr
-}
-
let interp_fix_context ~program_mode ~cofix env sigma fix =
- let before, after = if not cofix then split_at_annot fix.fix_binders fix.fix_annot else [], fix.fix_binders in
+ let before, after =
+ if not cofix
+ then split_at_annot fix.Vernacexpr.binders fix.Vernacexpr.rec_order
+ else [], fix.Vernacexpr.binders in
let sigma, (impl_env, ((env', ctx), imps)) = interp_context_evars ~program_mode env sigma before in
let sigma, (impl_env', ((env'', ctx'), imps')) =
interp_context_evars ~program_mode ~impl_env ~shift:(Context.Rel.nhyps ctx) env' sigma after
in
- let annot = Option.map (fun _ -> List.length (assums_of_rel_context ctx)) fix.fix_annot in
+ let annot = Option.map (fun _ -> List.length (assums_of_rel_context ctx)) fix.Vernacexpr.rec_order in
sigma, ((env'', ctx' @ ctx), (impl_env',imps @ imps'), annot)
let interp_fix_ccl ~program_mode sigma impls (env,_) fix =
- let sigma, (c, impl) = interp_type_evars_impls ~program_mode ~impls env sigma fix.fix_type in
+ let sigma, (c, impl) = interp_type_evars_impls ~program_mode ~impls env sigma fix.Vernacexpr.rtype in
let r = Retyping.relevance_of_type env sigma c in
sigma, (c, r, impl)
@@ -135,7 +129,7 @@ let interp_fix_body ~program_mode env_rec sigma impls (_,ctx) fix ccl =
Option.cata (fun body ->
let env = push_rel_context ctx env_rec in
let sigma, body = interp_casted_constr_evars ~program_mode env sigma ~impls body ccl in
- sigma, Some (it_mkLambda_or_LetIn body ctx)) (sigma, None) fix.fix_body
+ sigma, Some (it_mkLambda_or_LetIn body ctx)) (sigma, None) fix.Vernacexpr.body_def
let build_fix_type (_,ctx) ccl = EConstr.it_mkProd_or_LetIn ccl ctx
@@ -167,16 +161,16 @@ type recursive_preentry =
let fix_proto sigma =
Evarutil.new_global sigma (Coqlib.lib_ref "program.tactic.fix_proto")
-let interp_recursive ~program_mode ~cofix fixl notations =
+let interp_recursive ~program_mode ~cofix (fixl : 'a Vernacexpr.fix_expr_gen list) =
let open Context.Named.Declaration in
let open EConstr in
let env = Global.env() in
- let fixnames = List.map (fun fix -> fix.fix_name) fixl in
+ let fixnames = List.map (fun fix -> fix.Vernacexpr.fname.CAst.v) fixl in
(* Interp arities allowing for unresolved types *)
let all_universes =
List.fold_right (fun sfe acc ->
- match sfe.fix_univs , acc with
+ match sfe.Vernacexpr.univs , acc with
| None , acc -> acc
| x , None -> x
| Some ls , Some us ->
@@ -222,6 +216,7 @@ let interp_recursive ~program_mode ~cofix fixl notations =
(* Interp bodies with rollback because temp use of notations/implicit *)
let sigma, fixdefs =
Metasyntax.with_syntax_protection (fun () ->
+ let notations = List.map_append (fun { Vernacexpr.notations } -> notations) fixl in
List.iter (Metasyntax.set_notation_for_interpretation env_rec impls) notations;
List.fold_left4_map
(fun sigma fixctximpenv -> interp_fix_body ~program_mode env_rec sigma (Id.Map.fold Id.Map.add fixctximpenv impls))
@@ -248,8 +243,8 @@ let ground_fixpoint env evd (fixnames,fixrs,fixdefs,fixtypes) =
let fixtypes = List.map EConstr.(to_constr evd) fixtypes in
Evd.evar_universe_context evd, (fixnames,fixrs,fixdefs,fixtypes)
-let interp_fixpoint ~cofix l ntns =
- let (env,_,pl,evd),fix,info = interp_recursive ~program_mode:false ~cofix l ntns in
+let interp_fixpoint ~cofix l =
+ let (env,_,pl,evd),fix,info = interp_recursive ~program_mode:false ~cofix l in
check_recursive true env evd fix;
let uctx,fix = ground_fixpoint env evd fix in
(fix,pl,uctx,info)
@@ -316,38 +311,29 @@ let extract_decreasing_argument ~structonly = function { CAst.v = v } -> match v
| _ -> user_err Pp.(str
"Well-founded induction requires Program Fixpoint or Function.")
-let extract_fixpoint_components ~structonly l =
- let fixl, ntnl = List.split l in
- let fixl = List.map (fun (({CAst.v=id},pl),ann,bl,typ,def) ->
- (* This is a special case: if there's only one binder, we pick it as the
- recursive argument if none is provided. *)
- let ann = Option.map (fun ann -> match bl, ann with
- | [CLocalAssum([{ CAst.v = Name x }],_,_)], { CAst.v = CMeasureRec(None, mes, rel); CAst.loc } ->
- CAst.make ?loc @@ CMeasureRec(Some (CAst.make x), mes, rel)
- | [CLocalDef({ CAst.v = Name x },_,_)], { CAst.v = CMeasureRec(None, mes, rel); CAst.loc } ->
- CAst.make ?loc @@ CMeasureRec(Some (CAst.make x), mes, rel)
- | _, x -> x) ann
- in
- let ann = Option.map (extract_decreasing_argument ~structonly) ann in
- {fix_name = id; fix_annot = ann; fix_univs = pl;
- fix_binders = bl; fix_body = def; fix_type = typ}) fixl in
- fixl, List.flatten ntnl
-
-let extract_cofixpoint_components l =
- let fixl, ntnl = List.split l in
- List.map (fun (({CAst.v=id},pl),bl,typ,def) ->
- {fix_name = id; fix_annot = None; fix_univs = pl;
- fix_binders = bl; fix_body = def; fix_type = typ}) fixl,
- List.flatten ntnl
+(* This is a special case: if there's only one binder, we pick it as
+ the recursive argument if none is provided. *)
+let adjust_rec_order ~structonly binders rec_order =
+ let rec_order = Option.map (fun rec_order -> match binders, rec_order with
+ | [CLocalAssum([{ CAst.v = Name x }],_,_)], { CAst.v = CMeasureRec(None, mes, rel); CAst.loc } ->
+ CAst.make ?loc @@ CMeasureRec(Some (CAst.make x), mes, rel)
+ | [CLocalDef({ CAst.v = Name x },_,_)], { CAst.v = CMeasureRec(None, mes, rel); CAst.loc } ->
+ CAst.make ?loc @@ CMeasureRec(Some (CAst.make x), mes, rel)
+ | _, x -> x) rec_order
+ in
+ Option.map (extract_decreasing_argument ~structonly) rec_order
let check_safe () =
let open Declarations in
let flags = Environ.typing_flags (Global.env ()) in
flags.check_universes && flags.check_guarded
-let do_fixpoint_common l =
- let fixl, ntns = extract_fixpoint_components ~structonly:true l in
- let (_, _, _, info as fix) = interp_fixpoint ~cofix:false fixl ntns in
+let do_fixpoint_common (fixl : Vernacexpr.fixpoint_expr list) =
+ let fixl = List.map (fun fix ->
+ Vernacexpr.{ fix
+ with rec_order = adjust_rec_order ~structonly:true fix.binders fix.rec_order }) fixl in
+ let ntns = List.map_append (fun { Vernacexpr.notations } -> notations ) fixl in
+ let (_, _, _, info as fix) = interp_fixpoint ~cofix:false fixl in
fixl, ntns, fix, List.map compute_possible_guardness_evidences info
let do_fixpoint_interactive ~scope ~poly l : Lemmas.t =
@@ -361,17 +347,18 @@ let do_fixpoint ~scope ~poly l =
declare_fixpoint_generic ~indexes:possible_indexes ~scope ~poly fix ntns;
if not (check_safe ()) then Feedback.feedback Feedback.AddedAxiom else ()
-let do_cofixpoint_common l =
- let fixl,ntns = extract_cofixpoint_components l in
- ntns, interp_fixpoint ~cofix:true fixl ntns
+let do_cofixpoint_common (fixl : Vernacexpr.cofixpoint_expr list) =
+ let fixl = List.map (fun fix -> {fix with Vernacexpr.rec_order = None}) fixl in
+ let ntns = List.map_append (fun { Vernacexpr.notations } -> notations ) fixl in
+ interp_fixpoint ~cofix:true fixl, ntns
let do_cofixpoint_interactive ~scope ~poly l =
- let ntns, cofix = do_cofixpoint_common l in
+ let cofix, ntns = do_cofixpoint_common l in
let lemma = declare_fixpoint_interactive_generic ~scope ~poly cofix ntns in
if not (check_safe ()) then Feedback.feedback Feedback.AddedAxiom else ();
lemma
let do_cofixpoint ~scope ~poly l =
- let ntns, cofix = do_cofixpoint_common l in
+ let cofix, ntns = do_cofixpoint_common l in
declare_fixpoint_generic ~scope ~poly cofix ntns;
if not (check_safe ()) then Feedback.feedback Feedback.AddedAxiom else ()
diff --git a/vernac/comFixpoint.mli b/vernac/comFixpoint.mli
index 982d316605..4f8e9018de 100644
--- a/vernac/comFixpoint.mli
+++ b/vernac/comFixpoint.mli
@@ -10,7 +10,6 @@
open Names
open Constr
-open Constrexpr
open Vernacexpr
(** {6 Fixpoints and cofixpoints} *)
@@ -18,39 +17,35 @@ open Vernacexpr
(** Entry points for the vernacular commands Fixpoint and CoFixpoint *)
val do_fixpoint_interactive :
- scope:DeclareDef.locality -> poly:bool -> (fixpoint_expr * decl_notation list) list -> Lemmas.t
+ scope:DeclareDef.locality -> poly:bool -> fixpoint_expr list -> Lemmas.t
val do_fixpoint :
- scope:DeclareDef.locality -> poly:bool -> (fixpoint_expr * decl_notation list) list -> unit
+ scope:DeclareDef.locality -> poly:bool -> fixpoint_expr list -> unit
val do_cofixpoint_interactive :
- scope:DeclareDef.locality -> poly:bool -> (cofixpoint_expr * decl_notation list) list -> Lemmas.t
+ scope:DeclareDef.locality -> poly:bool -> cofixpoint_expr list -> Lemmas.t
val do_cofixpoint :
- scope:DeclareDef.locality -> poly:bool -> (cofixpoint_expr * decl_notation list) list -> unit
+ scope:DeclareDef.locality -> poly:bool -> cofixpoint_expr list -> unit
(************************************************************************)
(** Internal API *)
(************************************************************************)
-type structured_fixpoint_expr = {
- fix_name : Id.t;
- fix_univs : Constrexpr.universe_decl_expr option;
- fix_annot : lident option;
- fix_binders : local_binder_expr list;
- fix_body : constr_expr option;
- fix_type : constr_expr
-}
-
(** Typing global fixpoints and cofixpoint_expr *)
+val adjust_rec_order
+ : structonly:bool
+ -> Constrexpr.local_binder_expr list
+ -> Constrexpr.recursion_order_expr option
+ -> lident option
+
(** Exported for Program *)
val interp_recursive :
(* Misc arguments *)
program_mode:bool -> cofix:bool ->
(* Notations of the fixpoint / should that be folded in the previous argument? *)
- structured_fixpoint_expr list -> decl_notation list ->
-
+ lident option fix_expr_gen list ->
(* env / signature / univs / evar_map *)
(Environ.env * EConstr.named_context * UState.universe_decl * Evd.evar_map) *
(* names / defs / types *)
@@ -60,25 +55,13 @@ val interp_recursive :
(** Exported for Funind *)
-(** Extracting the semantical components out of the raw syntax of
- (co)fixpoints declarations *)
-
-val extract_fixpoint_components : structonly:bool ->
- (fixpoint_expr * decl_notation list) list ->
- structured_fixpoint_expr list * decl_notation list
+type recursive_preentry = Id.t list * Sorts.relevance list * constr option list * types list
-val extract_cofixpoint_components :
- (cofixpoint_expr * decl_notation list) list ->
- structured_fixpoint_expr list * decl_notation list
-
-type recursive_preentry =
- Id.t list * Sorts.relevance list * constr option list * types list
-
-val interp_fixpoint :
- cofix:bool ->
- structured_fixpoint_expr list -> decl_notation list ->
- recursive_preentry * UState.universe_decl * UState.t *
- (EConstr.rel_context * Impargs.manual_implicits * int option) list
+val interp_fixpoint
+ : cofix:bool
+ -> lident option fix_expr_gen list
+ -> recursive_preentry * UState.universe_decl * UState.t *
+ (EConstr.rel_context * Impargs.manual_implicits * int option) list
(** Very private function, do not use *)
val compute_possible_guardness_evidences :
diff --git a/vernac/comInductive.ml b/vernac/comInductive.ml
index 65db4401d9..664010c917 100644
--- a/vernac/comInductive.ml
+++ b/vernac/comInductive.ml
@@ -80,9 +80,6 @@ type structured_one_inductive_expr = {
ind_lc : (Id.t * constr_expr) list
}
-type structured_inductive_expr =
- local_binder_expr list * structured_one_inductive_expr list
-
let minductive_message = function
| [] -> user_err Pp.(str "No inductive definition.")
| [x] -> (Id.print x ++ str " is defined")
@@ -468,9 +465,6 @@ let interp_mutual_inductive_gen env0 ~template udecl (uparamsl,paramsl,indl) not
InferCumulativity.infer_inductive env_ar mind_ent
else mind_ent), Evd.universe_binders sigma, impls
-let interp_mutual_inductive ~template udecl (paramsl,indl) notations ~cumulative ~poly ~private_ind finite =
- interp_mutual_inductive_gen (Global.env()) ~template udecl ([],paramsl,indl) notations ~cumulative ~poly ~private_ind finite
-
(* Very syntactical equality *)
let eq_local_binders bl1 bl2 =
List.equal local_binder_eq bl1 bl2
diff --git a/vernac/comInductive.mli b/vernac/comInductive.mli
index 97f930c0a1..285be8cd51 100644
--- a/vernac/comInductive.mli
+++ b/vernac/comInductive.mli
@@ -10,7 +10,6 @@
open Names
open Entries
-open Libnames
open Vernacexpr
open Constrexpr
@@ -33,12 +32,20 @@ val do_mutual_inductive
-> Declarations.recursivity_kind
-> unit
+(** User-interface API *)
+
+(** Prepare a "match" template for a given inductive type.
+ For each branch of the match, we list the constructor name
+ followed by enough pattern variables.
+ [Not_found] is raised if the given string isn't the qualid of
+ a known inductive type. *)
+
+val make_cases : Names.inductive -> string list list
+
(************************************************************************)
-(** Internal API *)
+(** Internal API, exported for Record *)
(************************************************************************)
-(** Exported for Record and Funind *)
-
(** Registering a mutual inductive definition together with its
associated schemes *)
@@ -55,41 +62,3 @@ val should_auto_template : Id.t -> bool -> bool
(** [should_auto_template x b] is [true] when [b] is [true] and we
automatically use template polymorphism. [x] is the name of the
inductive under consideration. *)
-
-(** Exported for Funind *)
-
-(** Extracting the semantical components out of the raw syntax of mutual
- inductive declarations *)
-
-type structured_one_inductive_expr = {
- ind_name : Id.t;
- ind_arity : constr_expr;
- ind_lc : (Id.t * constr_expr) list
-}
-
-type structured_inductive_expr =
- local_binder_expr list * structured_one_inductive_expr list
-
-val extract_mutual_inductive_declaration_components :
- (one_inductive_expr * decl_notation list) list ->
- structured_inductive_expr * (*coercions:*) qualid list * decl_notation list
-
-(** Typing mutual inductive definitions *)
-val interp_mutual_inductive
- : template:bool option
- -> universe_decl_expr option
- -> structured_inductive_expr
- -> decl_notation list
- -> cumulative:bool
- -> poly:bool
- -> private_ind:bool
- -> Declarations.recursivity_kind
- -> mutual_inductive_entry * UnivNames.universe_binders * one_inductive_impls list
-
-(** Prepare a "match" template for a given inductive type.
- For each branch of the match, we list the constructor name
- followed by enough pattern variables.
- [Not_found] is raised if the given string isn't the qualid of
- a known inductive type. *)
-
-val make_cases : Names.inductive -> string list list
diff --git a/vernac/comProgramFixpoint.ml b/vernac/comProgramFixpoint.ml
index 0fd65ad9b4..c6e68effd7 100644
--- a/vernac/comProgramFixpoint.ml
+++ b/vernac/comProgramFixpoint.ml
@@ -244,10 +244,10 @@ let collect_evars_of_term evd c ty =
Evar.Set.fold (fun ev acc -> Evd.add acc ev (Evd.find_undefined evd ev))
evars (Evd.from_ctx (Evd.evar_universe_context evd))
-let do_program_recursive ~scope ~poly fixkind fixl ntns =
+let do_program_recursive ~scope ~poly fixkind fixl =
let cofix = fixkind = DeclareObl.IsCoFixpoint in
let (env, rec_sign, pl, evd), fix, info =
- interp_recursive ~cofix ~program_mode:true fixl ntns
+ interp_recursive ~cofix ~program_mode:true fixl
in
(* Program-specific code *)
(* Get the interesting evars, those that were not instantiated *)
@@ -289,16 +289,19 @@ let do_program_recursive ~scope ~poly fixkind fixl ntns =
| DeclareObl.IsFixpoint _ -> Decls.Fixpoint
| DeclareObl.IsCoFixpoint -> Decls.CoFixpoint
in
+ let ntns = List.map_append (fun { Vernacexpr.notations } -> notations ) fixl in
Obligations.add_mutual_definitions defs ~poly ~scope ~kind ~univdecl:pl ctx ntns fixkind
let do_program_fixpoint ~scope ~poly l =
- let g = List.map (fun ((_,wf,_,_,_),_) -> wf) l in
+ let g = List.map (fun { Vernacexpr.rec_order } -> rec_order) l in
match g, l with
- | [Some { CAst.v = CWfRec (n,r) }], [((({CAst.v=id},pl),_,bl,typ,def),ntn)] ->
+ | [Some { CAst.v = CWfRec (n,r) }],
+ [ Vernacexpr.{fname={CAst.v=id}; univs; binders; rtype; body_def; notations} ] ->
let recarg = mkIdentC n.CAst.v in
- build_wellfounded (id, pl, bl, typ, out_def def) poly r recarg ntn
+ build_wellfounded (id, univs, binders, rtype, out_def body_def) poly r recarg notations
- | [Some { CAst.v = CMeasureRec (n, m, r) }], [((({CAst.v=id},pl),_,bl,typ,def),ntn)] ->
+ | [Some { CAst.v = CMeasureRec (n, m, r) }],
+ [Vernacexpr.{fname={CAst.v=id}; univs; binders; rtype; body_def; notations }] ->
(* We resolve here a clash between the syntax of Program Fixpoint and the one of funind *)
let r = match n, r with
| Some id, None ->
@@ -308,25 +311,20 @@ let do_program_fixpoint ~scope ~poly l =
user_err Pp.(str"Measure takes only two arguments in Program Fixpoint.")
| _, _ -> r
in
- build_wellfounded (id, pl, bl, typ, out_def def) poly
- (Option.default (CAst.make @@ CRef (lt_ref,None)) r) m ntn
+ build_wellfounded (id, univs, binders, rtype, out_def body_def) poly
+ (Option.default (CAst.make @@ CRef (lt_ref,None)) r) m notations
| _, _ when List.for_all (fun ro -> match ro with None | Some { CAst.v = CStructRec _} -> true | _ -> false) g ->
- let fixl,ntns = extract_fixpoint_components ~structonly:true l in
- let fixkind = DeclareObl.IsFixpoint (List.map (fun d -> d.fix_annot) fixl) in
- do_program_recursive ~scope ~poly fixkind fixl ntns
+ let annots = List.map (fun fix ->
+ Vernacexpr.(ComFixpoint.adjust_rec_order ~structonly:true fix.binders fix.rec_order)) l in
+ let fixkind = DeclareObl.IsFixpoint annots in
+ let l = List.map2 (fun fix rec_order -> { fix with Vernacexpr.rec_order }) l annots in
+ do_program_recursive ~scope ~poly fixkind l
| _, _ ->
user_err ~hdr:"do_program_fixpoint"
(str "Well-founded fixpoints not allowed in mutually recursive blocks")
-let extract_cofixpoint_components l =
- let fixl, ntnl = List.split l in
- List.map (fun (({CAst.v=id},pl),bl,typ,def) ->
- {fix_name = id; fix_annot = None; fix_univs = pl;
- fix_binders = bl; fix_body = def; fix_type = typ}) fixl,
- List.flatten ntnl
-
let check_safe () =
let open Declarations in
let flags = Environ.typing_flags (Global.env ()) in
@@ -336,7 +334,7 @@ let do_fixpoint ~scope ~poly l =
do_program_fixpoint ~scope ~poly l;
if not (check_safe ()) then Feedback.feedback Feedback.AddedAxiom else ()
-let do_cofixpoint ~scope ~poly l =
- let fixl,ntns = extract_cofixpoint_components l in
- do_program_recursive ~scope ~poly DeclareObl.IsCoFixpoint fixl ntns;
+let do_cofixpoint ~scope ~poly fixl =
+ let fixl = List.map (fun fix -> { fix with Vernacexpr.rec_order = None }) fixl in
+ do_program_recursive ~scope ~poly DeclareObl.IsCoFixpoint fixl;
if not (check_safe ()) then Feedback.feedback Feedback.AddedAxiom else ()
diff --git a/vernac/comProgramFixpoint.mli b/vernac/comProgramFixpoint.mli
index f25abb95c3..a851e4dff5 100644
--- a/vernac/comProgramFixpoint.mli
+++ b/vernac/comProgramFixpoint.mli
@@ -1,11 +1,21 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2019 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
open Vernacexpr
(** Special Fixpoint handling when command is activated. *)
val do_fixpoint :
(* When [false], assume guarded. *)
- scope:DeclareDef.locality -> poly:bool -> (fixpoint_expr * decl_notation list) list -> unit
+ scope:DeclareDef.locality -> poly:bool -> fixpoint_expr list -> unit
val do_cofixpoint :
(* When [false], assume guarded. *)
- scope:DeclareDef.locality -> poly:bool -> (cofixpoint_expr * decl_notation list) list -> unit
+ scope:DeclareDef.locality -> poly:bool -> cofixpoint_expr list -> unit
diff --git a/vernac/declareObl.ml b/vernac/declareObl.ml
index 0c45ff11d7..c5cbb095ca 100644
--- a/vernac/declareObl.ml
+++ b/vernac/declareObl.ml
@@ -29,9 +29,6 @@ type obligation =
type obligations = obligation array * int
-type notations =
- (lstring * Constrexpr.constr_expr * Notation_term.scope_name option) list
-
type fixpoint_kind =
| IsFixpoint of lident option list
| IsCoFixpoint
@@ -46,7 +43,7 @@ type program_info =
; prg_deps : Id.t list
; prg_fixkind : fixpoint_kind option
; prg_implicits : Impargs.manual_implicits
- ; prg_notations : notations
+ ; prg_notations : Vernacexpr.decl_notation list
; prg_poly : bool
; prg_scope : DeclareDef.locality
; prg_kind : Decls.definition_object_kind
diff --git a/vernac/declareObl.mli b/vernac/declareObl.mli
index a8dd5040cb..2a8fa734b3 100644
--- a/vernac/declareObl.mli
+++ b/vernac/declareObl.mli
@@ -24,9 +24,6 @@ type obligation =
type obligations = obligation array * int
-type notations =
- (lstring * Constrexpr.constr_expr * Notation_term.scope_name option) list
-
type fixpoint_kind =
| IsFixpoint of lident option list
| IsCoFixpoint
@@ -41,7 +38,7 @@ type program_info =
; prg_deps : Id.t list
; prg_fixkind : fixpoint_kind option
; prg_implicits : Impargs.manual_implicits
- ; prg_notations : notations
+ ; prg_notations : Vernacexpr.decl_notation list
; prg_poly : bool
; prg_scope : DeclareDef.locality
; prg_kind : Decls.definition_object_kind
diff --git a/vernac/g_vernac.mlg b/vernac/g_vernac.mlg
index 2b475f1ef9..ad5d98669d 100644
--- a/vernac/g_vernac.mlg
+++ b/vernac/g_vernac.mlg
@@ -402,16 +402,19 @@ GRAMMAR EXTEND Gram
;
(* (co)-fixpoints *)
rec_definition:
- [ [ id = ident_decl;
+ [ [ id_decl = ident_decl;
bl = binders_fixannot;
- ty = type_cstr;
- def = OPT [":="; def = lconstr -> { def } ]; ntn = decl_notation ->
- { let bl, annot = bl in ((id,annot,bl,ty,def),ntn) } ] ]
+ rtype = type_cstr;
+ body_def = OPT [":="; def = lconstr -> { def } ]; notations = decl_notation ->
+ { let binders, rec_order = bl in
+ {fname = fst id_decl; univs = snd id_decl; rec_order; binders; rtype; body_def; notations}
+ } ] ]
;
corec_definition:
- [ [ id = ident_decl; bl = binders; ty = type_cstr;
- def = OPT [":="; def = lconstr -> { def }]; ntn = decl_notation ->
- { ((id,bl,ty,def),ntn) } ] ]
+ [ [ id_decl = ident_decl; binders = binders; rtype = type_cstr;
+ body_def = OPT [":="; def = lconstr -> { def }]; notations = decl_notation ->
+ { {fname = fst id_decl; univs = snd id_decl; rec_order = (); binders; rtype; body_def; notations}
+ } ]]
;
type_cstr:
[ [ ":"; c=lconstr -> { c }
@@ -1138,7 +1141,6 @@ GRAMMAR EXTEND Gram
| IDENT "Reset"; id = identref -> { VernacResetName id }
| IDENT "Back" -> { VernacBack 1 }
| IDENT "Back"; n = natural -> { VernacBack n }
- | IDENT "BackTo"; n = natural -> { VernacBackTo n }
(* Tactic Debugger *)
| IDENT "Debug"; IDENT "On" ->
diff --git a/vernac/lemmas.ml b/vernac/lemmas.ml
index ecea9ae4c9..6a754a0cde 100644
--- a/vernac/lemmas.ml
+++ b/vernac/lemmas.ml
@@ -113,46 +113,6 @@ let by tac pf =
(* Creating a lemma-like constant *)
(************************************************************************)
-(* Support for mutually proved theorems *)
-
-let retrieve_first_recthm uctx = function
- | GlobRef.VarRef id ->
- NamedDecl.get_value (Global.lookup_named id),
- Decls.variable_opacity id
- | GlobRef.ConstRef cst ->
- let cb = Global.lookup_constant cst in
- (* we get the right order somehow but surely it could be enforced in a better way *)
- let uctx = UState.context uctx in
- let inst = Univ.UContext.instance uctx in
- let map (c, _, _) = Vars.subst_instance_constr inst c in
- (Option.map map (Global.body_of_constant_body Library.indirect_accessor cb), is_opaque cb)
- | _ -> assert false
-
-let adjust_guardness_conditions const = function
- | [] -> const (* Not a recursive statement *)
- | possible_indexes ->
- (* Try all combinations... not optimal *)
- let env = Global.env() in
- let open Proof_global in
- { const with proof_entry_body =
- Future.chain const.proof_entry_body
- (fun ((body, ctx), eff) ->
- match Constr.kind body with
- | Fix ((nv,0),(_,_,fixdefs as fixdecls)) ->
-(* let possible_indexes =
- List.map2 (fun i c -> match i with Some i -> i | None ->
- List.interval 0 (List.length ((lam_assum c))))
- lemma_guard (Array.to_list fixdefs) in
-*)
- let env = Safe_typing.push_private_constants env eff.Evd.seff_private in
- let indexes =
- search_guard env
- possible_indexes fixdecls in
- (mkFix ((indexes,0),fixdecls), ctx), eff
- | _ -> (body, ctx), eff) }
-
-let default_thm_id = Id.of_string "Unnamed_thm"
-
let check_name_freshness locality {CAst.loc;v=id} : unit =
(* We check existence here: it's a bit late at Qed time *)
if Nametab.exists_cci (Lib.make_path id) || is_section_variable id ||
@@ -160,52 +120,6 @@ let check_name_freshness locality {CAst.loc;v=id} : unit =
then
user_err ?loc (Id.print id ++ str " already exists.")
-let save_remaining_recthms env sigma ~poly ~scope norm univs body opaq i { Recthm.name; typ; impargs } =
- let t_i = norm typ in
- let kind = Decls.(IsAssumption Conjectural) in
- match body with
- | None ->
- let open DeclareDef in
- (match scope with
- | Discharge ->
- let impl = false in (* copy values from Vernacentries *)
- let univs = match univs with
- | Polymorphic_entry (_, univs) ->
- (* What is going on here? *)
- Univ.ContextSet.of_context univs
- | Monomorphic_entry univs -> univs
- in
- let c = Declare.SectionLocalAssum {typ=t_i; univs; poly; impl} in
- let () = Declare.declare_variable ~name ~kind c in
- (GlobRef.VarRef name,impargs)
- | Global local ->
- let kind = Decls.(IsAssumption Conjectural) in
- let decl = Declare.ParameterEntry (None,(t_i,univs),None) in
- let kn = Declare.declare_constant ~name ~local ~kind decl in
- (GlobRef.ConstRef kn,impargs))
- | Some body ->
- let body = norm body in
- let rec body_i t = match Constr.kind t with
- | Fix ((nv,0),decls) -> mkFix ((nv,i),decls)
- | CoFix (0,decls) -> mkCoFix (i,decls)
- | LetIn(na,t1,ty,t2) -> mkLetIn (na,t1,ty, body_i t2)
- | Lambda(na,ty,t) -> mkLambda(na,ty,body_i t)
- | App (t, args) -> mkApp (body_i t, args)
- | _ ->
- anomaly Pp.(str "Not a proof by induction: " ++ Printer.pr_constr_env env sigma body ++ str ".") in
- let body_i = body_i body in
- let open DeclareDef in
- match scope with
- | Discharge ->
- let const = Declare.definition_entry ~types:t_i ~opaque:opaq ~univs body_i in
- let c = Declare.SectionLocalDef const in
- let () = Declare.declare_variable ~name ~kind c in
- (GlobRef.VarRef name,impargs)
- | Global local ->
- let const = Declare.definition_entry ~types:t_i ~univs ~opaque:opaq body_i in
- let kn = Declare.declare_constant ~name ~local ~kind (Declare.DefinitionEntry const) in
- (GlobRef.ConstRef kn,impargs)
-
let initialize_named_context_for_proof () =
let sign = Global.named_context () in
List.fold_right
@@ -315,9 +229,73 @@ let start_lemma_com ~program_mode ~poly ~scope ~kind ?inference_hook ?hook thms
start_lemma_with_initialization ?hook ~poly ~scope ~kind evd ~udecl recguard thms snl
(************************************************************************)
-(* Commom constant saving path *)
+(* Commom constant saving path, for both Qed and Admitted *)
(************************************************************************)
+(* Helper for process_recthms *)
+let retrieve_first_recthm uctx = function
+ | GlobRef.VarRef id ->
+ NamedDecl.get_value (Global.lookup_named id),
+ Decls.variable_opacity id
+ | GlobRef.ConstRef cst ->
+ let cb = Global.lookup_constant cst in
+ (* we get the right order somehow but surely it could be enforced in a better way *)
+ let uctx = UState.context uctx in
+ let inst = Univ.UContext.instance uctx in
+ let map (c, _, _) = Vars.subst_instance_constr inst c in
+ (Option.map map (Global.body_of_constant_body Library.indirect_accessor cb), is_opaque cb)
+ | _ -> assert false
+
+(* Helper for process_recthms *)
+let save_remaining_recthms env sigma ~poly ~scope ~udecl uctx body opaq i { Recthm.name; typ; impargs } =
+ let norm c = EConstr.to_constr (Evd.from_ctx uctx) c in
+ let body = Option.map EConstr.of_constr body in
+ let univs = UState.check_univ_decl ~poly uctx udecl in
+ let t_i = norm typ in
+ let kind = Decls.(IsAssumption Conjectural) in
+ match body with
+ | None ->
+ let open DeclareDef in
+ (match scope with
+ | Discharge ->
+ let impl = false in (* copy values from Vernacentries *)
+ let univs = match univs with
+ | Polymorphic_entry (_, univs) ->
+ (* What is going on here? *)
+ Univ.ContextSet.of_context univs
+ | Monomorphic_entry univs -> univs
+ in
+ let c = Declare.SectionLocalAssum {typ=t_i; univs; poly; impl} in
+ let () = Declare.declare_variable ~name ~kind c in
+ GlobRef.VarRef name, impargs
+ | Global local ->
+ let kind = Decls.(IsAssumption Conjectural) in
+ let decl = Declare.ParameterEntry (None,(t_i,univs),None) in
+ let kn = Declare.declare_constant ~name ~local ~kind decl in
+ GlobRef.ConstRef kn, impargs)
+ | Some body ->
+ let body = norm body in
+ let rec body_i t = match Constr.kind t with
+ | Fix ((nv,0),decls) -> mkFix ((nv,i),decls)
+ | CoFix (0,decls) -> mkCoFix (i,decls)
+ | LetIn(na,t1,ty,t2) -> mkLetIn (na,t1,ty, body_i t2)
+ | Lambda(na,ty,t) -> mkLambda(na,ty,body_i t)
+ | App (t, args) -> mkApp (body_i t, args)
+ | _ ->
+ anomaly Pp.(str "Not a proof by induction: " ++ Printer.pr_constr_env env sigma body ++ str ".") in
+ let body_i = body_i body in
+ let open DeclareDef in
+ match scope with
+ | Discharge ->
+ let const = Declare.definition_entry ~types:t_i ~opaque:opaq ~univs body_i in
+ let c = Declare.SectionLocalDef const in
+ let () = Declare.declare_variable ~name ~kind c in
+ GlobRef.VarRef name, impargs
+ | Global local ->
+ let const = Declare.definition_entry ~types:t_i ~univs ~opaque:opaq body_i in
+ let kn = Declare.declare_constant ~name ~local ~kind (Declare.DefinitionEntry const) in
+ GlobRef.ConstRef kn, impargs
+
(* This declares implicits and calls the hooks for all the theorems,
including the main one *)
let process_recthms ?fix_exn ?hook env sigma uctx ~udecl ~poly ~scope dref imps other_thms =
@@ -325,10 +303,7 @@ let process_recthms ?fix_exn ?hook env sigma uctx ~udecl ~poly ~scope dref imps
if List.is_empty other_thms then [] else
(* there are several theorems defined mutually *)
let body,opaq = retrieve_first_recthm uctx dref in
- let norm c = EConstr.to_constr (Evd.from_ctx uctx) c in
- let body = Option.map EConstr.of_constr body in
- let uctx = UState.check_univ_decl ~poly uctx udecl in
- List.map_i (save_remaining_recthms env sigma ~poly ~scope norm uctx body opaq) 1 other_thms in
+ List.map_i (save_remaining_recthms env sigma ~poly ~scope ~udecl uctx body opaq) 1 other_thms in
let thms_data = (dref,imps)::other_thms_data in
List.iter (fun (dref,imps) ->
maybe_declare_manual_implicits false dref imps;
@@ -395,10 +370,33 @@ let save_lemma_admitted ~(lemma : t) : unit =
(* Saving a lemma-like constant *)
(************************************************************************)
+let default_thm_id = Id.of_string "Unnamed_thm"
+
let check_anonymity id save_ident =
if not (String.equal (atompart_of_id id) (Id.to_string (default_thm_id))) then
user_err Pp.(str "This command can only be used for unnamed theorem.")
+(* Support for mutually proved theorems *)
+
+(* Helper for finish_proved *)
+let adjust_guardness_conditions const = function
+ | [] -> const (* Not a recursive statement *)
+ | possible_indexes ->
+ (* Try all combinations... not optimal *)
+ let env = Global.env() in
+ let open Proof_global in
+ { const with
+ proof_entry_body =
+ Future.chain const.proof_entry_body
+ (fun ((body, ctx), eff) ->
+ match Constr.kind body with
+ | Fix ((nv,0),(_,_,fixdefs as fixdecls)) ->
+ let env = Safe_typing.push_private_constants env eff.Evd.seff_private in
+ let indexes = search_guard env possible_indexes fixdecls in
+ (mkFix ((indexes,0),fixdecls), ctx), eff
+ | _ -> (body, ctx), eff)
+ }
+
let finish_proved env sigma idopt po info =
let open Proof_global in
let { Info.hook; compute_guard; impargs; other_thms; scope; kind } = info in
diff --git a/vernac/obligations.mli b/vernac/obligations.mli
index f97bc784c3..2a0d0aba97 100644
--- a/vernac/obligations.mli
+++ b/vernac/obligations.mli
@@ -18,27 +18,33 @@ val check_evars : env -> evar_map -> unit
val evar_dependencies : evar_map -> Evar.t -> Evar.Set.t
val sort_dependencies : (Evar.t * evar_info * Evar.Set.t) list -> (Evar.t * evar_info * Evar.Set.t) list
-(* env, id, evars, number of function prototypes to try to clear from
- evars contexts, object and type *)
-val eterm_obligations : env -> Id.t -> evar_map -> int ->
- ?status:Evar_kinds.obligation_definition_status -> EConstr.constr -> EConstr.types ->
- (Id.t * types * Evar_kinds.t Loc.located *
- (bool * Evar_kinds.obligation_definition_status) * Int.Set.t *
- unit Proofview.tactic option) array
- (* Existential key, obl. name, type as product,
- location of the original evar, associated tactic,
- status and dependencies as indexes into the array *)
- * ((Evar.t * Id.t) list * ((Id.t -> EConstr.constr) -> EConstr.constr -> constr)) *
- constr * types
- (* Translations from existential identifiers to obligation identifiers
- and for terms with existentials to closed terms, given a
- translation from obligation identifiers to constrs, new term, new type *)
-
+(* ident, type, location, (opaque or transparent, expand or define), dependencies, tactic to solve it *)
type obligation_info =
(Id.t * types * Evar_kinds.t Loc.located *
- (bool * Evar_kinds.obligation_definition_status) * Int.Set.t * unit Proofview.tactic option) array
- (* ident, type, location, (opaque or transparent, expand or define),
- dependencies, tactic to solve it *)
+ (bool * Evar_kinds.obligation_definition_status) * Int.Set.t * unit Proofview.tactic option) array
+
+(* env, id, evars, number of function prototypes to try to clear from
+ evars contexts, object and type *)
+val eterm_obligations
+ : env
+ -> Id.t
+ -> evar_map
+ -> int
+ -> ?status:Evar_kinds.obligation_definition_status
+ -> EConstr.constr
+ -> EConstr.types
+ -> obligation_info *
+
+ (* Existential key, obl. name, type as product, location of the
+ original evar, associated tactic, status and dependencies as
+ indexes into the array *)
+ ((Evar.t * Id.t) list * ((Id.t -> EConstr.constr) -> EConstr.constr -> constr)) *
+
+ (* Translations from existential identifiers to obligation
+ identifiers and for terms with existentials to closed terms,
+ given a translation from obligation identifiers to constrs,
+ new term, new type *)
+ constr * types
val default_tactic : unit Proofview.tactic ref
@@ -69,7 +75,7 @@ val add_mutual_definitions
-> ?kind:Decls.definition_object_kind
-> ?reduce:(constr -> constr)
-> ?hook:DeclareDef.Hook.t -> ?opaque:bool
- -> DeclareObl.notations
+ -> Vernacexpr.decl_notation list
-> DeclareObl.fixpoint_kind -> unit
val obligation
diff --git a/vernac/ppvernac.ml b/vernac/ppvernac.ml
index e676fe94db..0eb0b1b6f6 100644
--- a/vernac/ppvernac.ml
+++ b/vernac/ppvernac.ml
@@ -419,15 +419,15 @@ let string_of_theorem_kind = let open Decls in function
| l -> spc() ++
hov 1 (str"(" ++ prlist_with_sep sep_v2 pr_syntax_modifier l ++ str")")
- let pr_rec_definition ((iddecl,ro,bl,type_,def),ntn) =
+ let pr_rec_definition { fname; univs; rec_order; binders; rtype; body_def; notations } =
let env = Global.env () in
let sigma = Evd.from_env env in
let pr_pure_lconstr c = Flags.without_option Flags.beautify pr_lconstr c in
- let annot = pr_guard_annot (pr_lconstr_expr env sigma) bl ro in
- pr_ident_decl iddecl ++ pr_binders_arg bl ++ annot
- ++ pr_type_option (fun c -> spc() ++ pr_lconstr_expr env sigma c) type_
- ++ pr_opt (fun def -> str":=" ++ brk(1,2) ++ pr_pure_lconstr env sigma def) def
- ++ prlist (pr_decl_notation @@ pr_constr env sigma) ntn
+ let annot = pr_guard_annot (pr_lconstr_expr env sigma) binders rec_order in
+ pr_ident_decl (fname,univs) ++ pr_binders_arg binders ++ annot
+ ++ pr_type_option (fun c -> spc() ++ pr_lconstr_expr env sigma c) rtype
+ ++ pr_opt (fun def -> str":=" ++ brk(1,2) ++ pr_pure_lconstr env sigma def) body_def
+ ++ prlist (pr_decl_notation @@ pr_constr env sigma) notations
let pr_statement head (idpl,(bl,c)) =
let env = Global.env () in
@@ -669,8 +669,6 @@ let string_of_definition_object_kind = let open Decls in function
return (
if Int.equal i 1 then keyword "Back" else keyword "Back" ++ pr_intarg i
)
- | VernacBackTo i ->
- return (keyword "BackTo" ++ pr_intarg i)
(* State management *)
| VernacWriteState s ->
@@ -858,11 +856,11 @@ let string_of_definition_object_kind = let open Decls in function
| DoDischarge -> keyword "Let" ++ spc ()
| NoDischarge -> str ""
in
- let pr_onecorec ((iddecl,bl,c,def),ntn) =
- pr_ident_decl iddecl ++ spc() ++ pr_binders env sigma bl ++ spc() ++ str":" ++
- spc() ++ pr_lconstr_expr env sigma c ++
- pr_opt (fun def -> str":=" ++ brk(1,2) ++ pr_lconstr env sigma def) def ++
- prlist (pr_decl_notation @@ pr_constr env sigma) ntn
+ let pr_onecorec {fname; univs; binders; rtype; body_def; notations } =
+ pr_ident_decl (fname,univs) ++ spc() ++ pr_binders env sigma binders ++ spc() ++ str":" ++
+ spc() ++ pr_lconstr_expr env sigma rtype ++
+ pr_opt (fun def -> str":=" ++ brk(1,2) ++ pr_lconstr env sigma def) body_def ++
+ prlist (pr_decl_notation @@ pr_constr env sigma) notations
in
return (
hov 0 (local ++ keyword "CoFixpoint" ++ spc() ++
diff --git a/vernac/ppvernac.mli b/vernac/ppvernac.mli
index d4d49a09a3..9ade5afb87 100644
--- a/vernac/ppvernac.mli
+++ b/vernac/ppvernac.mli
@@ -14,7 +14,7 @@
val pr_set_entry_type : ('a -> Pp.t) -> 'a Extend.constr_entry_key_gen -> Pp.t
(** Prints a fixpoint body *)
-val pr_rec_definition : (Vernacexpr.fixpoint_expr * Vernacexpr.decl_notation list) -> Pp.t
+val pr_rec_definition : Vernacexpr.fixpoint_expr -> Pp.t
(** Prints a vernac expression without dot *)
val pr_vernac_expr : Vernacexpr.vernac_expr -> Pp.t
diff --git a/vernac/pvernac.mli b/vernac/pvernac.mli
index c9eb979a90..3bd252ecef 100644
--- a/vernac/pvernac.mli
+++ b/vernac/pvernac.mli
@@ -23,7 +23,7 @@ module Vernac_ :
val command : vernac_expr Entry.t
val syntax : vernac_expr Entry.t
val vernac_control : vernac_control Entry.t
- val rec_definition : (fixpoint_expr * decl_notation list) Entry.t
+ val rec_definition : fixpoint_expr Entry.t
val noedit_mode : vernac_expr Entry.t
val command_entry : vernac_expr Entry.t
val main_entry : vernac_control option Entry.t
diff --git a/vernac/vernacentries.ml b/vernac/vernacentries.ml
index 68b7462bde..9af8d8b67c 100644
--- a/vernac/vernacentries.ml
+++ b/vernac/vernacentries.ml
@@ -772,7 +772,7 @@ let vernac_inductive ~atts cum lo finite indl =
let vernac_fixpoint_common ~atts discharge l =
if Dumpglob.dump () then
- List.iter (fun (((lid,_), _, _, _, _), _) -> Dumpglob.dump_definition lid false "def") l;
+ List.iter (fun { fname } -> Dumpglob.dump_definition fname false "def") l;
enforce_locality_exp atts.DefAttributes.locality discharge
let vernac_fixpoint_interactive ~atts discharge l =
@@ -793,7 +793,7 @@ let vernac_fixpoint ~atts discharge l =
let vernac_cofixpoint_common ~atts discharge l =
if Dumpglob.dump () then
- List.iter (fun (((lid,_), _, _, _), _) -> Dumpglob.dump_definition lid false "def") l;
+ List.iter (fun { fname } -> Dumpglob.dump_definition fname false "def") l;
enforce_locality_exp atts.DefAttributes.locality discharge
let vernac_cofixpoint_interactive ~atts discharge l =
@@ -963,9 +963,10 @@ let vernac_include l =
(* Sections *)
-let vernac_begin_section ({v=id} as lid) =
+let vernac_begin_section ~poly ({v=id} as lid) =
Dumpglob.dump_definition lid true "sec";
- Lib.open_section id
+ Lib.open_section ~poly id;
+ set_bool_option_value_gen ~locality:OptLocal ["Universe"; "Polymorphism"] poly
let vernac_end_section {CAst.loc} =
Dumpglob.dump_reference ?loc
@@ -2297,7 +2298,6 @@ let rec translate_vernac ~atts v = let open Vernacextend in match v with
| VernacResetName _
| VernacResetInitial
| VernacBack _
- | VernacBackTo _
| VernacAbort _ ->
anomaly (str "type_vernac")
(* Syntax *)
@@ -2357,7 +2357,7 @@ let rec translate_vernac ~atts v = let open Vernacextend in match v with
| VernacInductive (cum, priv, finite, l) ->
VtDefault(fun () -> vernac_inductive ~atts cum priv finite l)
| VernacFixpoint (discharge, l) ->
- let opens = List.exists (fun ((_,_,_,_,p),_) -> Option.is_empty p) l in
+ let opens = List.exists (fun { body_def } -> Option.is_empty body_def) l in
if opens then
VtOpenProof (fun () ->
with_def_attributes ~atts vernac_fixpoint_interactive discharge l)
@@ -2365,7 +2365,7 @@ let rec translate_vernac ~atts v = let open Vernacextend in match v with
VtDefault (fun () ->
with_def_attributes ~atts vernac_fixpoint discharge l)
| VernacCoFixpoint (discharge, l) ->
- let opens = List.exists (fun ((_,_,_,p),_) -> Option.is_empty p) l in
+ let opens = List.exists (fun { body_def } -> Option.is_empty body_def) l in
if opens then
VtOpenProof(fun () -> with_def_attributes ~atts vernac_cofixpoint_interactive discharge l)
else
@@ -2396,8 +2396,7 @@ let rec translate_vernac ~atts v = let open Vernacextend in match v with
(* Gallina extensions *)
| VernacBeginSection lid ->
VtNoProof(fun () ->
- unsupported_attributes atts;
- vernac_begin_section lid)
+ vernac_begin_section ~poly:(only_polymorphism atts) lid)
| VernacEndSegment lid ->
VtNoProof(fun () ->
unsupported_attributes atts;
@@ -2630,7 +2629,6 @@ and interp_expr ?proof ~atts ~st c =
| VernacResetName _ -> anomaly (str "VernacResetName not handled by Stm.")
| VernacResetInitial -> anomaly (str "VernacResetInitial not handled by Stm.")
| VernacBack _ -> anomaly (str "VernacBack not handled by Stm.")
- | VernacBackTo _ -> anomaly (str "VernacBackTo not handled by Stm.")
(* This one is possible to handle here *)
| VernacAbort id -> CErrors.user_err (str "Abort cannot be used through the Load command")
diff --git a/vernac/vernacexpr.ml b/vernac/vernacexpr.ml
index ee1f839b8d..0968632c2d 100644
--- a/vernac/vernacexpr.ml
+++ b/vernac/vernacexpr.ml
@@ -128,18 +128,26 @@ type definition_expr =
| DefineBody of local_binder_expr list * Genredexpr.raw_red_expr option * constr_expr
* constr_expr option
-type fixpoint_expr =
- ident_decl * recursion_order_expr option * local_binder_expr list * constr_expr * constr_expr option
+type decl_notation = lstring * constr_expr * scope_name option
+
+type 'a fix_expr_gen =
+ { fname : lident
+ ; univs : universe_decl_expr option
+ ; rec_order : 'a
+ ; binders : local_binder_expr list
+ ; rtype : constr_expr
+ ; body_def : constr_expr option
+ ; notations : decl_notation list
+ }
-type cofixpoint_expr =
- ident_decl * local_binder_expr list * constr_expr * constr_expr option
+type fixpoint_expr = recursion_order_expr option fix_expr_gen
+type cofixpoint_expr = unit fix_expr_gen
type local_decl_expr =
| AssumExpr of lname * constr_expr
| DefExpr of lname * constr_expr * constr_expr option
type inductive_kind = Inductive_kw | CoInductive | Variant | Record | Structure | Class of bool (* true = definitional, false = inductive *)
-type decl_notation = lstring * constr_expr * scope_name option
type simple_binder = lident list * constr_expr
type class_binder = lident * constr_expr list
type 'a with_coercion = coercion_flag * 'a
@@ -283,8 +291,8 @@ type nonrec vernac_expr =
| VernacAssumption of (discharge * Decls.assumption_object_kind) *
Declaremods.inline * (ident_decl list * constr_expr) with_coercion list
| VernacInductive of vernac_cumulative option * bool (* private *) * inductive_flag * (inductive_expr * decl_notation list) list
- | VernacFixpoint of discharge * (fixpoint_expr * decl_notation list) list
- | VernacCoFixpoint of discharge * (cofixpoint_expr * decl_notation list) list
+ | VernacFixpoint of discharge * fixpoint_expr list
+ | VernacCoFixpoint of discharge * cofixpoint_expr list
| VernacScheme of (lident option * scheme) list
| VernacCombinedScheme of lident * lident list
| VernacUniverse of lident list
@@ -351,7 +359,6 @@ type nonrec vernac_expr =
| VernacResetName of lident
| VernacResetInitial
| VernacBack of int
- | VernacBackTo of int
(* Commands *)
| VernacCreateHintDb of string * bool
diff --git a/vernac/vernacprop.ml b/vernac/vernacprop.ml
index 1dd8164ebc..747998c6cc 100644
--- a/vernac/vernacprop.ml
+++ b/vernac/vernacprop.ml
@@ -32,7 +32,6 @@ let rec has_Fail v = v |> CAst.with_val (function
let is_navigation_vernac_expr = function
| VernacResetInitial
| VernacResetName _
- | VernacBackTo _
| VernacBack _ -> true
| _ -> false