diff options
87 files changed, 4488 insertions, 494 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 04b75bfdf6..03e001f4a9 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -27,8 +27,9 @@ variables: #COQIDE_PACKAGES_32BIT: "libgtk2.0-dev:i386 libgtksourceview2.0-dev:i386" COQIDE_OPAM: "lablgtk-extras" COQIDE_OPAM_BE: "lablgtk.2.18.6 lablgtk-extras.1.6" - COQDOC_PACKAGES: "texlive-latex-base texlive-latex-recommended texlive-latex-extra texlive-math-extra texlive-fonts-recommended texlive-fonts-extra latex-xcolor ghostscript transfig imagemagick tipa" + COQDOC_PACKAGES: "texlive-latex-base texlive-latex-recommended texlive-latex-extra texlive-math-extra texlive-fonts-recommended texlive-fonts-extra latex-xcolor ghostscript transfig imagemagick tipa python3-pip" COQDOC_OPAM: "hevea" + SPHINX_PACKAGES: "bs4 sphinx sphinx_rtd_theme pexpect antlr4-python3-runtime sphinxcontrib-bibtex" before_script: @@ -36,6 +37,7 @@ before_script: - printenv # - if [ "$COMPILER" = "$COMPILER_32BIT" ]; then sudo dpkg --add-architecture i386; fi - if [ -n "${EXTRA_PACKAGES}" ]; then sudo apt-get update -qq && sudo apt-get install -y -qq ${EXTRA_PACKAGES}; fi + - if [ -n "${PIP_PACKAGES}" ]; then sudo pip3 install ${PIP_PACKAGES}; fi # setup cache - if [ ! "(" -d .opamcache ")" ]; then mv ~/.opam .opamcache; else mv ~/.opam ~/.opam-old; fi @@ -132,28 +134,6 @@ before_script: - for regexp in 's/.vo//' 's:lib/coq/plugins:Coq:' 's:lib/coq/theories:Coq:' 's:/:.:g'; do sed -z -i "$regexp" vofiles; done - xargs -0 --arg-file=vofiles bin/coqchk -boot -silent -o -m -coqlib lib/coq/ -.documentation-template: &documentation-template - stage: test - script: - - INSTALLDIR=$(readlink -f _install_ci) - - cp "$INSTALLDIR/lib/coq/tools/coqdoc/coqdoc.sty" . - - - LIB="$INSTALLDIR/lib/coq" - # WTF using a newline makes make sigsev - # see https://gitlab.com/SkySkimmer/coq/builds/17313312 - - DOCVFILES=$(find "$LIB/" -name '*.v' -printf "%p ") - - DOCLIGHTDIRS="$LIB/theories/Init/ $LIB/theories/Logic/ $LIB/theories/Unicode/ $LIB/theories/Arith/" - - DOCLIGHTVOFILES=$(find $DOCLIGHTDIRS -name '*.vo' -printf "%p ") - - - make doc QUICK=true COQDOC_NOBOOT=true COQTEX="$INSTALLDIR/bin/coq-tex" COQDOC="$INSTALLDIR/bin/coqdoc" VFILES="$DOCVFILES" THEORIESLIGHTVO="$DOCLIGHTVOFILES" - - - make install-doc - artifacts: - name: "$CI_JOB_NAME" - paths: - - _install_ci/share/doc - expire_in: 1 week - .ci-template: &ci-template stage: test script: @@ -170,6 +150,11 @@ before_script: build: <<: *build-template + variables: + EXTRA_CONF: "-with-doc yes" + EXTRA_PACKAGES: "$COQDOC_PACKAGES" + EXTRA_OPAM: "$COQDOC_OPAM" + PIP_PACKAGES: "$SPHINX_PACKAGES" # no coqide for 32bit: libgtk installation problems build:32bit: @@ -229,24 +214,6 @@ test-suite:bleeding-edge: CAMLP5_VER: "$CAMLP5_VER_BLEEDING_EDGE" EXTRA_PACKAGES: "$TIMING_PACKAGES" -documentation: - <<: *documentation-template - dependencies: - - build - variables: - EXTRA_PACKAGES: "$COQDOC_PACKAGES" - EXTRA_OPAM: "$COQDOC_OPAM" - -documentation:bleeding-edge: - <<: *documentation-template - dependencies: - - build:bleeding-edge - variables: - COMPILER: "$COMPILER_BLEEDING_EDGE" - CAMLP5_VER: "$CAMLP5_VER_BLEEDING_EDGE" - EXTRA_PACKAGES: "$COQDOC_PACKAGES" - EXTRA_OPAM: "$COQDOC_OPAM" - validate: <<: *validate-template dependencies: diff --git a/.travis.yml b/.travis.yml index 1699568ca0..d775824f0f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -138,6 +138,8 @@ matrix: - TEST_TARGET="test-suite" - EXTRA_CONF="-coqide opt -with-doc yes" - EXTRA_OPAM="hevea ${LABLGTK}" + before_install: &sphinx-install + - sudo pip3 install bs4 sphinx sphinx_rtd_theme pexpect antlr4-python3-runtime sphinxcontrib-bibtex addons: apt: sources: @@ -158,6 +160,9 @@ matrix: - transfig - imagemagick - tipa + - python3 + - python3-pip + - python3-setuptools - env: - TEST_TARGET="test-suite" @@ -166,6 +171,7 @@ matrix: - CAMLP5_VER="${CAMLP5_VER_BE}" - EXTRA_CONF="-coqide opt -with-doc yes" - EXTRA_OPAM="num hevea ${LABLGTK_BE}" + before_install: *sphinx-install addons: apt: sources: @@ -181,6 +187,7 @@ matrix: - NATIVE_COMP="no" - EXTRA_CONF="-coqide opt -with-doc yes -flambda-opts -O3" - EXTRA_OPAM="num hevea ${LABLGTK_BE}" + before_install: *sphinx-install addons: apt: sources: @@ -57,6 +57,9 @@ Tactics - A bug fixed in "rewrite H in *" and "rewrite H in * |-" may cause a few rare incompatibilities (it was unintendedly recursively rewriting in the side conditions generated by H). +- Added tactics "assert_succeeds tac" and "assert_fails tac" to ensure + properties of the executation of a tactic without keeping the effect + of the execution. Focusing @@ -77,6 +80,10 @@ Vernacular Commands - Using “Require” inside a section is deprecated. - An experimental command "Show Extraction" allows to extract the content of the current ongoing proof (grant wish #4129). +- Coercion now accepts the type of its argument to be "Prop" or "Type". +- The "Export" modifier can now be used when setting and unsetting options, and + will result in performing the same change when the module corresponding the + command is imported. Universes diff --git a/INSTALL.doc b/INSTALL.doc index 21b21163c0..b71115bfa1 100644 --- a/INSTALL.doc +++ b/INSTALL.doc @@ -26,6 +26,12 @@ To produce all the documents, the following tools are needed: - convert (ImageMagick) - hevea - hacha + - Python 3 + - Sphinx 1.6.5 (http://www.sphinx-doc.org/en/stable/) + - sphinx_rtd_theme + - pexpect + - beautifulsoup4 + - Antlr4 runtime for Python 3 Under Debian based operating systems (Debian, Ubuntu, ...) a @@ -34,6 +40,10 @@ working set of packages for compiling the documentation for Coq is: texlive texlive-latex-extra texlive-math-extra texlive-fonts-extra texlive-humanities texlive-pictures latex-xcolor hevea transfig imagemagick + python3 python-pip3 + +To install the Python packages required to build the user manual, run: + pip3 install sphinx sphinx_rtd_theme beautifulsoup4 antlr4-python3-runtime pexpect Compilation @@ -241,6 +241,7 @@ docclean: rm -f doc/common/version.tex rm -f doc/refman/styles.hva doc/refman/cover.html doc/refman/Reference-Manual.html rm -f doc/coq.tex + rm -rf doc/sphinx/_build archclean: clean-ide optclean voclean rm -rf _build diff --git a/Makefile.doc b/Makefile.doc index 9fd93651d1..a98f35a1c1 100644 --- a/Makefile.doc +++ b/Makefile.doc @@ -46,6 +46,14 @@ else COQTEXOPTS:=-boot -n 72 -sl -small endif +# Sphinx-related variables +SPHINXOPTS= -j4 +SPHINXBUILD= sphinx-build +SPHINXBUILDDIR= doc/sphinx/_build + +# Internal variables. +ALLSPHINXOPTS= -d $(SPHINXBUILDDIR)/doctrees $(SPHINXOPTS) + DOCCOMMON:=doc/common/version.tex doc/common/title.tex doc/common/macros.tex REFMANCOQTEXFILES:=$(addprefix doc/refman/, \ @@ -78,12 +86,18 @@ REFMANPNGFILES:=$(REFMANEPSFILES:.eps=.png) ### General rules ###################################################################### -.PHONY: doc doc-html doc-pdf doc-ps refman refman-quick tutorial +.PHONY: doc sphinxdoc-html doc-pdf doc-ps refman refman-quick tutorial .PHONY: stdlib full-stdlib rectutorial refman-html-dir INDEXURLS:=doc/refman/html/index_urls.txt -doc: refman tutorial rectutorial stdlib $(INDEXURLS) +doc: sphinx refman tutorial rectutorial stdlib $(INDEXURLS) + +sphinx: coq + $(SHOW)'SPHINXBUILD doc/sphinx' + $(HIDE)COQBIN="$(PWD)/bin" $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) doc/sphinx $(SPHINXBUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(SPHINXBUILDDIR)/html." doc-html:\ doc/tutorial/Tutorial.v.html doc/refman/html/index.html \ @@ -347,9 +361,11 @@ doc/RecTutorial/RecTutorial.html: doc/RecTutorial/RecTutorial.tex # Install all documentation files ###################################################################### -.PHONY: install-doc install-doc-meta install-doc-html install-doc-printable install-doc-index-urls +.PHONY: install-doc install-doc-meta install-doc-html install-doc-printable \ + install-doc-index-urls install-doc-sphinx -install-doc: install-doc-meta install-doc-html install-doc-printable install-doc-index-urls +install-doc: install-doc-meta install-doc-html install-doc-printable \ + install-doc-index-urls install-doc-sphinx install-doc-meta: $(MKDIR) $(FULLDOCDIR) @@ -380,6 +396,12 @@ install-doc-index-urls: $(MKDIR) $(FULLDATADIR) $(INSTALLLIB) $(INDEXURLS) $(FULLDATADIR) +install-doc-sphinx: + $(MKDIR) $(FULLDOCDIR)/sphinx + (for f in `cd doc/sphinx/_build; find . -type f`; do \ + $(MKDIR) $$(dirname $(FULLDOCDIR)/sphinx/$$f);\ + $(INSTALLLIB) doc/sphinx/_build/$$f $(FULLDOCDIR)/sphinx/$$f;\ + done) ########################################################################### # Documentation of the source code (using ocamldoc) diff --git a/configure.ml b/configure.ml index 1eae3bd931..4726831e44 100644 --- a/configure.ml +++ b/configure.ml @@ -957,23 +957,18 @@ let strip = (** * Documentation : do we have latex, hevea, ... *) +let check_sphinx_deps () = + ignore (run (which "python3") ["doc/tools/coqrst/checkdeps.py"]) + let check_doc () = let err s = - ceprintf "%s was not found; documentation will not be available" s; - raise Not_found + die (sprintf "A documentation build was requested, but %s was not found." s); in - try - if not !prefs.withdoc then raise Not_found; - if not (program_in_path "latex") then err "latex"; - if not (program_in_path "hevea") then err "hevea"; - if not (program_in_path "hacha") then err "hacha"; - if not (program_in_path "fig2dev") then err "fig2dev"; - if not (program_in_path "convert") then err "convert"; - true - with Not_found -> false - -let withdoc = check_doc () + if not (program_in_path "python3") then err "python3"; + if not (program_in_path "sphinx-build") then err "sphinx-build"; + check_sphinx_deps () +let _ = if !prefs.withdoc then check_doc () (** * Installation directories : bindir, libdir, mandir, docdir, etc *) @@ -1114,7 +1109,7 @@ let print_summary () = pr " Mac OS integration is on\n"; pr " CoqIde : %s\n" coqide; pr " Documentation : %s\n" - (if withdoc then "All" else "None"); + (if !prefs.withdoc then "All" else "None"); pr " Web browser : %s\n" browser; pr " Coq web site : %s\n" !prefs.coqwebsite; pr " Bytecode VM enabled : %B\n" !prefs.bytecodecompiler; @@ -1342,7 +1337,7 @@ let write_makefile f = pr "# Defining REVISION\n"; pr "CHECKEDOUT=%s\n\n" vcs; pr "# Option to control compilation and installation of the documentation\n"; - pr "WITHDOC=%s\n\n" (if withdoc then "all" else "no"); + pr "WITHDOC=%s\n\n" (if !prefs.withdoc then "all" else "no"); pr "# Option to produce precompiled files for native_compute\n"; pr "NATIVECOMPUTE=%s\n" (if !prefs.nativecompiler then "-native-compiler" else ""); close_out o; diff --git a/default.nix b/default.nix index af2a13a842..2011bf574c 100644 --- a/default.nix +++ b/default.nix @@ -22,7 +22,7 @@ # a symlink to where Coq was installed. { pkgs ? (import <nixpkgs> {}), ocamlPackages ? pkgs.ocamlPackages, - buildIde ? true, doCheck ? true }: + buildIde ? true, buildDoc ? true, doCheck ? true }: with pkgs; @@ -43,6 +43,15 @@ stdenv.mkDerivation rec { # CoqIDE dependencies ocamlPackages.lablgtk + ] else []) ++ (if buildDoc then [ + + # Sphinx doc dependencies + ocamlPackages.lablgtk + pkgconfig (python3.withPackages + (ps: [ ps.sphinx ps.sphinx_rtd_theme ps.pexpect ps.beautifulsoup4 + ps.antlr4-python3-runtime ps.sphinxcontrib-bibtex ])) + antlr4 + ] else []) ++ (if doCheck then # Test-suite dependencies diff --git a/dev/ci/user-overlays/06923-ppedrot-export-options.sh b/dev/ci/user-overlays/06923-ppedrot-export-options.sh new file mode 100644 index 0000000000..333a9e84bd --- /dev/null +++ b/dev/ci/user-overlays/06923-ppedrot-export-options.sh @@ -0,0 +1,7 @@ +if [ "$CI_PULL_REQUEST" = "6923" ] || [ "$CI_BRANCH" = "export-options" ]; then + ltac2_CI_BRANCH=export-options + ltac2_CI_GITURL=https://github.com/ppedrot/ltac2 + + Equations_CI_BRANCH=export-options + Equations_CI_GITURL=https://github.com/ppedrot/Coq-Equations +fi diff --git a/doc/LICENSE b/doc/LICENSE index 0aa0d629e8..7ae31b089c 100644 --- a/doc/LICENSE +++ b/doc/LICENSE @@ -2,10 +2,14 @@ The Coq Reference Manual is a collective work from the Coq Development Team whose members are listed in the file CREDITS of the Coq source package. All related documents (the LaTeX and BibTeX sources, the embedded png files, and the PostScript, PDF and html outputs) are -copyright (c) INRIA 1999-2006. The material connected to the Reference -Manual may be distributed only subject to the terms and conditions set -forth in the Open Publication License, v1.0 or later (the latest -version is presently available at http://www.opencontent.org/openpub/). +copyright (c) INRIA 1999-2006, with the exception of the Ubuntu font files +(UbuntuMono-Square.ttf and UbuntuMono-B.ttf), derived from UbuntuMono-Regular, +which is Copyright 2010,2011 Canonical Ltd and licensed under the Ubuntu font +license, version 1.0 +(https://www.ubuntu.com/legal/terms-and-policies/font-licence). The material +connected to the Reference Manual may be distributed only subject to the terms +and conditions set forth in the Open Publication License, v1.0 or later (the +latest version is presently available at http://www.opencontent.org/openpub/). Options A and B are *not* elected. The Coq Tutorial is a work by Gérard Huet, Gilles Kahn and Christine diff --git a/doc/refman/Coercion.tex b/doc/refman/Coercion.tex index ec46e1eb58..53b6b7827a 100644 --- a/doc/refman/Coercion.tex +++ b/doc/refman/Coercion.tex @@ -33,7 +33,7 @@ classes: \begin{itemize} \item {\tt Sortclass}, the class of sorts; - its objects are the terms whose type is a sort. + its objects are the terms whose type is a sort (e.g., \ssrC{Prop} or \ssrC{Type}). \item {\tt Funclass}, the class of functions; its objects are all the terms with a functional type, i.e. of form $forall~ x:A, B$. @@ -73,8 +73,8 @@ conditions holds: We then write $f:C \mbox{\texttt{>->}} D$. The restriction on the type of coercions is called {\em the uniform inheritance condition}. -Remark that the abstract classes {\tt Funclass} and {\tt Sortclass} -cannot be source classes. +Remark: the abstract class {\tt Sortclass} can be used as source class, +but the abstract class {\tt Funclass} cannot. To coerce an object $t:C~t_1..t_n$ of $C$ towards $D$, we have to apply the coercion $f$ to it; the obtained term $f~t_1..t_n~t$ is @@ -160,7 +160,6 @@ Declares the construction denoted by {\qualid} as a coercion between \item {\qualid} \errindex{not declared} \item {\qualid} \errindex{is already a coercion} \item \errindex{Funclass cannot be a source class} -\item \errindex{Sortclass cannot be a source class} \item {\qualid} \errindex{is not a function} \item \errindex{Cannot find the source class of {\qualid}} \item \errindex{Cannot recognize {\class$_1$} as a source class of {\qualid}} diff --git a/doc/refman/RefMan-ltac.tex b/doc/refman/RefMan-ltac.tex index 0a4d0ef9aa..3ed697d8be 100644 --- a/doc/refman/RefMan-ltac.tex +++ b/doc/refman/RefMan-ltac.tex @@ -156,6 +156,8 @@ is understood as & | & {\tt type\_term} {\term}\\ & | & {\tt numgoals} \\ & | & {\tt guard} {\it test}\\ +& | & {\tt assert\_fails} {\tacexprpref}\\ +& | & {\tt assert\_succeds} {\tacexprpref}\\ & | & \atomictac\\ & | & {\qualid} \nelist{\tacarg}{}\\ & | & {\atom} @@ -598,6 +600,22 @@ The experimental status of this tactic pertains to the fact if $v$ performs side \ErrMsg \errindex{This tactic has more than one success} +\subsubsection[Checking the failure]{Checking the failure\tacindex{assert\_fails}\index{Tacticals!assert\_fails@{\tt assert\_fails}}} + +Coq provides a derived tactic to check that a tactic \emph{fails}: +\begin{quote} +{\tt assert\_fails} {\tacexpr} +\end{quote} +This behaves like {\tt tryif {\tacexpr} then fail 0 tac "succeeds" else idtac}. + +\subsubsection[Checking the success]{Checking the success\tacindex{assert\_succeeds}\index{Tacticals!assert\_succeeds@{\tt assert\_succeeds}}} + +Coq provides a derived tactic to check that a tactic has \emph{at least one} success: +\begin{quote} +{\tt assert\_succeeds} {\tacexpr} +\end{quote} +This behaves like {\tt tryif (assert\_fails tac) then fail 0 tac "fails" else idtac}. + \subsubsection[Solving]{Solving\tacindex{solve} \index{Tacticals!solve@{\tt solve}}} diff --git a/doc/refman/RefMan-tac.tex b/doc/refman/RefMan-tac.tex index 40ba43b6cd..2597e3c37d 100644 --- a/doc/refman/RefMan-tac.tex +++ b/doc/refman/RefMan-tac.tex @@ -3505,17 +3505,6 @@ reduced to \texttt{S t}. \end{Variants} \begin{quote} -\optindex{Refolding Reduction} -{\tt Refolding Reduction} -\end{quote} -\emph{Deprecated since 8.7} - -This option (off by default) controls the use of the refolding strategy -of {\tt cbn} while doing reductions in unification, type inference and -tactic applications. It can result in expensive unifications, as -refolding currently uses a potentially exponential heuristic. - -\begin{quote} \optindex{Debug RAKAM} {\tt Set Debug RAKAM} \end{quote} diff --git a/doc/sphinx/MIGRATING b/doc/sphinx/MIGRATING new file mode 100644 index 0000000000..fa6fe15379 --- /dev/null +++ b/doc/sphinx/MIGRATING @@ -0,0 +1,238 @@ +How to migrate the Coq Reference Manual to Sphinx +================================================= + +# Install Python3 packages (requires Python 3, python3-pip, python3-setuptools) + + * pip3 install bs4 sphinx sphinx_rtd_theme pexpect antlr4-python3-runtime sphinxcontrib-bibtex + +# You may want to do this under a virtualenv, particularly if you end up with issues finding sphinxcontrib.bibtex. http://docs.python-guide.org/en/latest/dev/virtualenvs/ + + * pip3 install virtualenv + * virtualenv coqsphinxing # you may want to use -p to specify the python version + * source coqsphinxing/bin/activate # activate the virtual environment + +# After activating the virtual environment you can run the above pip3 command to install sphinx. You will have to activate the virtual environment before building the docs in your session. + +# Add this Elisp code to .emacs, if you're using emacs (recommended): + + (defun sphinx/quote-coq-refman-region (left right &optional beg end count) + (unless beg + (if (region-active-p) + (setq beg (region-beginning) end (region-end)) + (setq beg (point) end nil))) + (unless count + (setq count 1)) + (save-excursion + (goto-char (or end beg)) + (dotimes (_ count) (insert right))) + (save-excursion + (goto-char beg) + (dotimes (_ count) (insert left))) + (if (and end (characterp left)) ;; Second test handles the ::`` case + (goto-char (+ (* 2 count) end)) + (goto-char (+ count beg)))) + + (defun sphinx/coqtop (beg end) + (interactive (list (region-beginning) (region-end))) + (replace-regexp "^Coq < " " " nil beg end) + (indent-rigidly beg end -3) + (goto-char beg) + (insert ".. coqtop:: all\n\n")) + + (defun sphinx/rst-coq-action () + (interactive) + (pcase (read-char "Command?") + (?g (sphinx/quote-coq-refman-region ":g:`" "`")) + (?n (sphinx/quote-coq-refman-region ":n:`" "`")) + (?t (sphinx/quote-coq-refman-region ":token:`" "`")) + (?m (sphinx/quote-coq-refman-region ":math:`" "`")) + (?: (sphinx/quote-coq-refman-region "::`" "`")) + (?` (sphinx/quote-coq-refman-region "``" "``")) + (?c (sphinx/coqtop (region-beginning) (region-end))))) + + (global-set-key (kbd "<f12>") #'sphinx/rst-coq-action) + + With this code installed, you can hit "F12" followed by an appropriate key to do quick markup of text + (this will make more sense once you've started editing the text). + +# Fork the Coq repo, if needed: + + https://github.com/coq/coq + +# Clone the repo to your work machine + +# Add Maxime Dénès's repo as a remote: + + git remote add sphinx https://github.com/maximedenes/coq.git + + (or choose a name other than "sphinx") + + Verify with: + + git remote -v + +# Fetch from the remote + + git fetch sphinx + +# Checkout the sphinx-doc branch + + git checkout sphinx-doc + + You should pull from the repo from time to time to keep your local copy up-to-date: + + git pull sphinx sphinx-doc + + You may want to create a new branch to do your work in. + +# Choose a Reference Manual chapter to work on at + + https://docs.google.com/document/d/1Yo7dV4OI0AY9Di-lsEQ3UTmn5ygGLlhxjym7cTCMCWU + +# For each chapter, raw ReStructuredText (the Sphinx format), created by the "html2rest" utility, + is available in the directory porting/raw-rst/ + + Elsewhere, depending on the chapter, there should be an almost-empty template file already created, + which is in the location where the final version should go + +# Manually edit the .rst file, place it in the correct location + + There are small examples in sphinx/porting/, a larger example in language/gallina-extensions.rst + + (N.B.: the migration is a work-in-progress, your suggestions are welcome) + + Find the chapter you're working on from the online manual at https://coq.inria.fr/distrib/current/refman/. + At the top of the file, after the chapter heading, add: + + :Source: https://coq.inria.fr/distrib/current/refman/the-chapter-file.html + :Converted by: Your Name + + N.B.: These source and converted-by annotations should help for the migration phase. Later on, + those annotations will be removed, and contributors will be mentioned in the Coq credits. + + Remove chapter numbers + + Replace section, subsection numbers with reference labels: + + .. _some-reference-label: + + Place the label before the section or subsection, followed by a blank line. + + Note the leading underscore. Use :ref:`some_reference-label` to refer to such a label; note the leading underscore is omitted. + Many cross-references may be to other chapters. If the required label exists, use it. Otherwise, use a dummy reference of the form + `TODO-n.n.n-mnemonic` we can fixup later. Example: + + :ref:`TODO-1.3.2-definitions` + + We can grep for those TODOs, and the existing subsection number makes it easy to find in the exisyting manual. + + For the particular case of references to chapters, we can use a +convention for the cross-reference name, so no TODO is needed. + + :ref:`thegallinaspecificationlanguage` + +That is, the chapter label is the chapter title, all in lower-case, +with no spaces or punctuation. For chapters with subtitles marked with +a ":", like those for Omega and Nsatz, use just the chapter part +preceding the ":". These labels should already be in the +placeholder .rst files for each chapter. + + + You can also label other items, like grammars, with the same syntax. To refer to such labels, not involving a + section or subsection, use the syntax + + :ref:`Some link text <label-name>` + + Yes, the angle-brackets are needed here! + + For bibliographic references (those in biblio.bib), use :cite:`thecitation`. + + Grammars will get mangled by the translation. Look for "productionlist" in the examples, also see + http://www.sphinx-doc.org/en/stable/markup/para.html. + + For Coq examples that appear, look at the "coqtop" syntax in porting/tricky-bits.rst. The Sphinx + script will run coqtop on those examples, and can show the output (or not). + + The file replaces.rst contains replacement definitions for some items that are clumsy to write out otherwise. + Use + + .. include:: replaces.rst + + to gain access to those definitions in your file (you might need a path prefix). Some especially-important + replacements are |Cic|, |Coq|, |CoqIDE|, and |Gallina|, which display those names in small-caps. Please use them, + so that they're rendered consistently. + + Similarly, there are some LaTeX macros in preamble.rst that can be useful. + + Conventions: + + - Keywords and other literal text is double-backquoted (e.g. ``Module``, ``Section``, ``(``, ``,``). + + - Metavariables are single-backquotes (e.g. `term`, `ident`) + + - Use the cmd directive for Vernacular commands, like: + + .. cmd:: Set Printing All. + + Within this directive, prefix metavariables (ident, term) with @: + + .. cmd:: Add Printing Let @ident. + + There's also the "cmdv" directive for variants of a command. + + - Use the "exn" and "warn" directives for errors and warnings: + + .. exn:: Something's not right. + .. warn:: You shouldn't do that. + + - Use the "example" directive for examples + + - Use the "g" role for inline Gallina, like :g:`fun x => x` + + - Use code blocks for blocks of Gallina. You can use a double-colon at the end of a line:: + + your code here + + which prints a single colon, or put the double-colon on a newline. + +:: + + your other code here + +# Making changes to the text + + The goal of the migration is simply to change the storage format from LaTeX to ReStructuredText. The goal is not + to make any organizational or other substantive changes to the text. If you do notice nits (misspellings, wrong + verb tense, and so on), please do change them. For example, the programming language that Coq is written in is these days + called "OCaml", and there are mentions of the older name "Objective Caml" in the reference manual that should be changed. + +# Build, view the manual + + In the root directory of your local repo, run "make sphinx". You can view the result in a browser by loading the HTML file + associated with your chapter, which will be contained in the directory doc/sphinx/_build/html/ beneath the repo root directory. + Make any changes you need until there are no build warnings and the output is perfect. :-) + +# Creating pull requests + + When your changes are done, commit them, push to your fork: + + git commit -m "useful commit message" file + git push origin sphinx-doc + + (or push to another branch, if you've created one). Then go to your GitHub + fork and create a pull request against Maxime's sphinx-doc + branch. If your commit is recent, you should see a link on your + fork's code page to do that. Otherwise, you may need to go to your + branch on GitHub to do that. + +# Issues/Questions/Suggestions + + As the migration proceeds, if you have technical issues, have a more general question, or want to suggest something, please contact: + + Paul Steckler <steck@stecksoft.com> + Maxime Dénès <maxime.denes@inria.fr> + +# Issues + + Should the stuff in replaces.rst go in preamble.rst? + In LaTeX, some of the grammars add productions to existing nonterminals, like term ++= ... . How to indicate that? diff --git a/doc/sphinx/_static/UbuntuMono-Square.ttf b/doc/sphinx/_static/UbuntuMono-Square.ttf Binary files differnew file mode 100644 index 0000000000..12b7c6d51a --- /dev/null +++ b/doc/sphinx/_static/UbuntuMono-Square.ttf diff --git a/doc/sphinx/_static/ansi-dark.css b/doc/sphinx/_static/ansi-dark.css new file mode 100644 index 0000000000..a564fd70bb --- /dev/null +++ b/doc/sphinx/_static/ansi-dark.css @@ -0,0 +1,144 @@ +/************************************************************************/ +/* * The Coq Proof Assistant / The Coq Development Team */ +/* v * INRIA, CNRS and contributors - Copyright 1999-2018 */ +/* <O___,, * (see CREDITS file for the list of authors) */ +/* \VV/ **************************************************************/ +/* // * This file is distributed under the terms of the */ +/* * GNU Lesser General Public License Version 2.1 */ +/* * (see LICENSE file for the text of the license) */ +/************************************************************************/ +.ansi-bold { + font-weight: bold; +} + +.ansi-italic { + font-style: italic; +} + +.ansi-negative { + filter: invert(100%); +} + +.ansi-underline { + text-decoration: underline; +} + +.ansi-no-bold { + font-weight: normal; +} + +.ansi-no-italic { + font-style: normal; +} + +.ansi-no-negative { + filter: invert(0%); +} + +.ansi-no-underline { + text-decoration: none; +} + +.ansi-black { + color: #000000; +} + +.ansi-fg-red { + color: #b21717; +} + +.ansi-fg-green { + color: #17b217; +} + +.ansi-fg-yellow { + color: #b26717; +} + +.ansi-fg-blue { + color: #1717b2; +} + +.ansi-fg-magenta { + color: #b217b2; +} + +.ansi-fg-cyan { + color: #17b2b2; +} + +.ansi-fg-white { + color: #b2b2b2; +} + +.ansi-fg-default { + color: #404040; +} + +.ansi-fg-light-black { + color: #686868; +} + +.ansi-fg-light-red { + color: #ff5454; +} + +.ansi-fg-light-green { + color: #54ff54; +} + +.ansi-fg-light-yellow { + color: #ffff54; +} + +.ansi-fg-light-blue { + color: #5454ff; +} + +.ansi-fg-light-magenta { + color: #ff54ff; +} + +.ansi-fg-light-cyan { + color: #54ffff; +} + +.ansi-fg-light-white { + color: #ffffff; +} + +.ansi-bg-black { + background-color: #000000; +} + +.ansi-bg-red { + background-color: #b21717; +} + +.ansi-bg-green { + background-color: #17b217; +} + +.ansi-bg-yellow { + background-color: #b26717; +} + +.ansi-bg-blue { + background-color: #1717b2; +} + +.ansi-bg-magenta { + background-color: #b217b2; +} + +.ansi-bg-cyan { + background-color: #17b2b2; +} + +.ansi-bg-white { + background-color: #b2b2b2; +} + +.ansi-bg-default { + background-color: transparent; +} diff --git a/doc/sphinx/_static/ansi.css b/doc/sphinx/_static/ansi.css new file mode 100644 index 0000000000..26bd797709 --- /dev/null +++ b/doc/sphinx/_static/ansi.css @@ -0,0 +1,145 @@ +/************************************************************************/ +/* * The Coq Proof Assistant / The Coq Development Team */ +/* v * INRIA, CNRS and contributors - Copyright 1999-2018 */ +/* <O___,, * (see CREDITS file for the list of authors) */ +/* \VV/ **************************************************************/ +/* // * This file is distributed under the terms of the */ +/* * GNU Lesser General Public License Version 2.1 */ +/* * (see LICENSE file for the text of the license) */ +/************************************************************************/ +.ansi-bold { + font-weight: bold; +} + +.ansi-italic { + font-style: italic; +} + +.ansi-negative { + filter: invert(100%); +} + +.ansi-underline { + text-decoration: underline; +} + +.ansi-no-bold { + font-weight: normal; +} + +.ansi-no-italic { + font-style: normal; +} + +.ansi-no-negative { + filter: invert(0%); +} + +.ansi-no-underline { + text-decoration: none; +} + + +.ansi-fg-black { + color: #babdb6; +} + +.ansi-fg-red { + color: #a40000; +} + +.ansi-fg-green { + color: #4e9a06; +} + +.ansi-fg-yellow { + color: #ce5c00; +} + +.ansi-fg-blue { + color: #204a87; +} + +.ansi-fg-magenta { + color: #5c3566; +} + +.ansi-fg-cyan { + color: #8f5902; +} + +.ansi-fg-white { + color: #2e3436; +} + +.ansi-fg-light-black { + color: #d3d7cf; +} + +.ansi-fg-light-red { + color: #cc0000; +} + +.ansi-fg-light-green { + color: #346604; /* From tango.el */ +} + +.ansi-fg-light-yellow { + color: #f57900; +} + +.ansi-fg-light-blue { + color: #3465a4; +} + +.ansi-fg-light-magenta { + color: #75507b; +} + +.ansi-fg-light-cyan { + color: #c14d11; +} + +.ansi-fg-light-white { + color: #555753; +} + +.ansi-fg-default { + color: #eeeeec; +} + +.ansi-bg-black { + background-color: #babdb6; +} + +.ansi-bg-red { + background-color: #a40000; +} + +.ansi-bg-green { + background-color: #4e9a06; +} + +.ansi-bg-yellow { + background-color: #ce5c00; +} + +.ansi-bg-blue { + background-color: #204a87; +} + +.ansi-bg-magenta { + background-color: #5c3566; +} + +.ansi-bg-cyan { + background-color: #8f5902; +} + +.ansi-bg-white { + background-color: #2e3436; +} + +.ansi-bg-default { + background-color: transparent; +} diff --git a/doc/sphinx/_static/coqdoc.css b/doc/sphinx/_static/coqdoc.css new file mode 100644 index 0000000000..bbcc044a20 --- /dev/null +++ b/doc/sphinx/_static/coqdoc.css @@ -0,0 +1,68 @@ +/************************************************************************/ +/* * The Coq Proof Assistant / The Coq Development Team */ +/* v * INRIA, CNRS and contributors - Copyright 1999-2018 */ +/* <O___,, * (see CREDITS file for the list of authors) */ +/* \VV/ **************************************************************/ +/* // * This file is distributed under the terms of the */ +/* * GNU Lesser General Public License Version 2.1 */ +/* * (see LICENSE file for the text of the license) */ +/************************************************************************/ +/* Taken from CoqDoc's default stylesheet */ + +.coqdoc-constructor { + color: rgb(60%,0%,0%); +} + +.coqdoc-var { + color: rgb(40%,0%,40%); +} + +.coqdoc-variable { + color: rgb(40%,0%,40%); +} + +.coqdoc-definition { + color: rgb(0%,40%,0%); +} + +.coqdoc-abbreviation { + color: rgb(0%,40%,0%); +} + +.coqdoc-lemma { + color: rgb(0%,40%,0%); +} + +.coqdoc-instance { + color: rgb(0%,40%,0%); +} + +.coqdoc-projection { + color: rgb(0%,40%,0%); +} + +.coqdoc-method { + color: rgb(0%,40%,0%); +} + +.coqdoc-inductive { + color: rgb(0%,0%,80%); +} + +.coqdoc-record { + color: rgb(0%,0%,80%); +} + +.coqdoc-class { + color: rgb(0%,0%,80%); +} + +.coqdoc-keyword { + color : #cf1d1d; +} + +/* Custom additions */ + +.coqdoc-tactic { + font-weight: bold; +} diff --git a/doc/sphinx/_static/coqnotations.sty b/doc/sphinx/_static/coqnotations.sty new file mode 100644 index 0000000000..75eac1f724 --- /dev/null +++ b/doc/sphinx/_static/coqnotations.sty @@ -0,0 +1,50 @@ +% The LaTeX generator wraps all custom spans in \DUrole{class}{contents}. That +% command then checks for another command called \DUroleclass. + +% Most of our CSS class names have dashes, so we need ‘\csname … \endcsname’ + +% <magic> +% \def\newcssclass#1#2{\expandafter\def\csname DUrole#1\endcsname ##1{#2}} +% </magic> + +\RequirePackage{adjustbox} +\RequirePackage{xcolor} +\RequirePackage{amsmath} + +\definecolor{nbordercolor}{HTML}{AAAAAA} +\definecolor{nbgcolor}{HTML}{EAEAEA} +\definecolor{nholecolor}{HTML}{4E9A06} + +\newlength{\nscriptsize} +\setlength{\nscriptsize}{0.8em} + +\newcommand*{\scriptsmallsquarebox}[1]{% + % Force width + \makebox[\nscriptsize]{% + % Force height and center vertically + \raisebox{\dimexpr .5\nscriptsize - .5\height \relax}[\nscriptsize][0pt]{% + % Cancel depth + \raisebox{\depth}{#1}}}} +\newcommand*{\nscriptdecoratedbox}[2][]{\adjustbox{cfbox=nbordercolor 0.5pt 0pt,bgcolor=nbgcolor}{#2}} +\newcommand*{\nscriptbox}[1]{\nscriptdecoratedbox{\scriptsmallsquarebox{\textbf{#1}}}} +\newcommand*{\nscript}[2]{\text{\hspace{-.5\nscriptsize}\raisebox{-#1\nscriptsize}{\nscriptbox{\small#2}}}} +\newcommand*{\nsup}[1]{^{\nscript{0.15}{#1}}} +\newcommand*{\nsub}[1]{_{\nscript{0.35}{#1}}} +\newcommand*{\nnotation}[1]{#1} +\newcommand*{\nrepeat}[1]{\text{\adjustbox{cfbox=nbordercolor 0.5pt 2pt,bgcolor=nbgcolor}{#1\hspace{.5\nscriptsize}}}} +\newcommand*{\nwrapper}[1]{\ensuremath{\displaystyle#1}} % https://tex.stackexchange.com/questions/310877/ +\newcommand*{\nhole}[1]{\textit{\color{nholecolor}#1}} + +% <magic> +% Make it easier to define new commands matching CSS classes +\newcommand{\newcssclass}[2]{% + \expandafter\def\csname DUrole#1\endcsname##1{#2} +} +% </magic> + +\newcssclass{notation-sup}{\nsup{#1}} +\newcssclass{notation-sub}{\nsub{#1}} +\newcssclass{notation}{\nnotation{#1}} +\newcssclass{repeat}{\nrepeat{#1}} +\newcssclass{repeat-wrapper}{\nwrapper{#1}} +\newcssclass{hole}{\nhole{#1}} diff --git a/doc/sphinx/_static/notations.css b/doc/sphinx/_static/notations.css new file mode 100644 index 0000000000..1ae7a7cd7f --- /dev/null +++ b/doc/sphinx/_static/notations.css @@ -0,0 +1,177 @@ +/************************************************************************/ +/* * The Coq Proof Assistant / The Coq Development Team */ +/* v * INRIA, CNRS and contributors - Copyright 1999-2018 */ +/* <O___,, * (see CREDITS file for the list of authors) */ +/* \VV/ **************************************************************/ +/* // * This file is distributed under the terms of the */ +/* * GNU Lesser General Public License Version 2.1 */ +/* * (see LICENSE file for the text of the license) */ +/************************************************************************/ +.notation { + /* font-family: "Ubuntu Mono", "Consolas", monospace; */ + white-space: pre-wrap; +} + +.notation .notation-sup { + top: -0.4em; +} + +.notation .notation-sub { + bottom: -0.4em; + border-radius: 1rem; +} + +@font-face { /* This font has been edited to center all characters */ + font-family: 'UbuntuMono-Square'; + font-style: normal; + font-weight: 800; + src: local('UbuntuMono-Square'), url(./UbuntuMono-Square.ttf) format('truetype'); +} + +.notation .notation-sup, .notation .notation-sub { + background: #EAEAEA; + border: 1px solid #AAA; + color: black; + /* cursor: help; */ + display: inline-block; + font-size: 0.5em; + font-weight: bolder; + font-family: UbuntuMono-Square, monospace; + height: 2em; + line-height: 1.6em; + position: absolute; + right: -1em; /* half of the width */ + text-align: center; + width: 2em; +} + +.notation .repeat { + background: #EAEAEA; + border: 1px solid #AAA; + display: inline-block; + padding-right: 0.6em; /* Space for the left half of the sub- and sup-scripts */ + padding-left: 0.2em; + margin: 0.25em 0; +} + +.notation .repeat-wrapper { + display: inline-block; + position: relative; + margin-right: 0.4em; /* Space for the right half of the sub- and sup-scripts */ +} + +.notation .hole { + color: #4e9a06; + font-style: italic; +} + +/***********************/ +/* Small extra classes */ +/***********************/ + +.math-preamble { + display: none; +} + +.inline-grammar-production { + font-weight: bold; +} + +/************************/ +/* Coqtop IO and Coqdoc */ +/************************/ + +.coqtop dd, .ansi-bg-default { + background: #eeeeee !important; +} + +.coqtop dd, .ansi-fg-default { + color: #2e3436 !important; +} + +.coqtop dt { /* Questions */ + background: none !important; + color: #333 !important; + font-weight: normal !important; + padding: 0 !important; + margin: 0 !important; +} + +.coqtop dd { /* Responses */ + padding: 0.5em; + margin-left: 0 !important; + margin-top: 0.5em !important; +} + +.coqdoc, .coqtop dl { + margin: 12px; /* Copied from RTD theme */ +} + +.coqdoc { + display: block; + white-space: pre; +} + +.coqtop dt, .coqtop dd { + border: none !important; + display: block !important; +} + +.coqtop.coqtop-hidden, dd.coqtop-hidden, dt.coqtop-hidden { /* Overqualifying for precedence */ + display: none !important; +} + +/* FIXME: Specific to the RTD theme */ +.coqdoc, .coqtop dt, .coqtop dd, pre { /* pre added because of production lists */ + font-family: Consolas,"Andale Mono WT","Andale Mono","Lucida Console","Lucida Sans Typewriter","DejaVu Sans Mono","Bitstream Vera Sans Mono","Liberation Mono","Nimbus Mono L",Monaco,"Courier New",Courier,monospace !important; /* Copied from RTD theme */ + font-size: 12px !important; /* Copied from RTD theme */ + line-height: 1.5 !important; /* Copied from RTD theme */ + white-space: pre; +} + +/*************/ +/* Overrides */ +/*************/ + +.rst-content table.docutils td, .rst-content table.docutils th { + padding: 8px; /* Reduce horizontal padding */ + border-left: none; + border-right: none; +} + +/* We can't display nested blocks otherwise */ +code, .rst-content tt, .rst-content code { + background: transparent !important; + border: none !important; + font-size: inherit !important; +} + +code { + padding: 0 !important; /* This padding doesn't make sense without a border */ +} + +dt > .property { + margin-right: 0.25em; +} + +.icon-home:visited { + color: #FFFFFF; +} + +/* FIXME: Specific to the RTD theme */ +a:visited { + color: #2980B9; +} + +/* Pygments for Coq is confused by ‘…’ */ +code span.error { + background: inherit !important; + line-height: inherit !important; + margin-bottom: 0 !important; + padding: 0 !important; +} + +/* Red is too aggressive */ +.rst-content tt.literal, .rst-content tt.literal, .rst-content code.literal { + color: inherit !important; +} diff --git a/doc/sphinx/_static/notations.js b/doc/sphinx/_static/notations.js new file mode 100644 index 0000000000..eb7f211e8b --- /dev/null +++ b/doc/sphinx/_static/notations.js @@ -0,0 +1,43 @@ +/************************************************************************/ +/* * The Coq Proof Assistant / The Coq Development Team */ +/* v * INRIA, CNRS and contributors - Copyright 1999-2018 */ +/* <O___,, * (see CREDITS file for the list of authors) */ +/* \VV/ **************************************************************/ +/* // * This file is distributed under the terms of the */ +/* * GNU Lesser General Public License Version 2.1 */ +/* * (see LICENSE file for the text of the license) */ +/************************************************************************/ +function annotateSup(marker) { + switch (marker) { + case "?": + return "This block is optional."; + case "*": + return "This block is optional, and may be repeated."; + case "+": + return "This block may be repeated."; + } +} + +function annotateSub(separator) { + return "Use “" + separator + "” to separate repetitions of this block."; +} + +// function translatePunctuation(original) { +// var mappings = { ",": "⸴" }; // , +// return mappings[original] || original; +// } + +function annotateNotations () { + $(".repeat-wrapper > sup") + .attr("data-hint", function() { + return annotateSup($(this).text()); + }).addClass("hint--top hint--rounded"); + + $(".repeat-wrapper > sub") + .attr("data-hint", function() { + return annotateSub($(this).text()); + }).addClass("hint--bottom hint--rounded"); + //.text(function(i, text) { return translatePunctuation(text); }); +} + +$(annotateNotations); diff --git a/doc/sphinx/_static/pre-text.css b/doc/sphinx/_static/pre-text.css new file mode 100644 index 0000000000..38d81abefe --- /dev/null +++ b/doc/sphinx/_static/pre-text.css @@ -0,0 +1,29 @@ +/************************************************************************/ +/* * The Coq Proof Assistant / The Coq Development Team */ +/* v * INRIA, CNRS and contributors - Copyright 1999-2018 */ +/* <O___,, * (see CREDITS file for the list of authors) */ +/* \VV/ **************************************************************/ +/* // * This file is distributed under the terms of the */ +/* * GNU Lesser General Public License Version 2.1 */ +/* * (see LICENSE file for the text of the license) */ +/************************************************************************/ +/* Formatting for PRE (literal) text in .rst files */ + +.line-block { + background-color: rgb(80%,90%,80%); + margin: 0px; + margin-top: 0px; + margin-right: 16px; + margin-bottom: 20px; + padding-left: 4px; + padding-top: 4px; + padding-bottom: 4px; +} + +.line-block cite { + font-size: 90%; +} + +.pre { + font-size: 90%; +} diff --git a/doc/sphinx/conf.py b/doc/sphinx/conf.py new file mode 100755 index 0000000000..0bff41a259 --- /dev/null +++ b/doc/sphinx/conf.py @@ -0,0 +1,400 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## <O___,, # (see CREDITS file for the list of authors) ## +## \VV/ ############################################################### +## // # This file is distributed under the terms of the ## +## # GNU Lesser General Public License Version 2.1 ## +## # (see LICENSE file for the text of the license) ## +########################################################################## +# +# Coq 8.5 documentation build configuration file, created by +# sphinx-quickstart on Wed May 11 11:23:13 2016. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os + +# Increase recursion limit for sphinx +sys.setrecursionlimit(1500) + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.append(os.path.abspath('../tools/')) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.mathjax', + 'sphinx.ext.todo', + 'sphinxcontrib.bibtex', + 'coqrst.coqdomain' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = 'Coq' +copyright = '2016, Inria' +author = 'The Coq Development Team' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '8.7' +# The full version, including alpha/beta/rc tags. +release = '8.7.dev' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# Use the Coq domain +primary_domain = 'coq' + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' +highlight_language = 'text' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + +# Extra warnings, including undefined references +nitpicky = False + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'sphinx_rtd_theme' +# html_theme = 'agogo' +# html_theme = 'alabaster' +# html_theme = 'haiku' +# html_theme = 'bizstyle' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +import sphinx_rtd_theme +html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + +# The name for this set of Sphinx documents. +# "<project> v<release> documentation" by default. +#html_title = 'Coq 8.5 v8.5pl1' + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (relative to this directory) to use as a favicon of +# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not None, a 'Last updated on:' timestamp is inserted at every page +# bottom, using the given strftime format. +# The empty string is equivalent to '%b %d, %Y'. +#html_last_updated_fmt = None + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +html_use_smartypants = False # FIXME wrap code in <code> tags, otherwise quotesget converted in there + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a <link> tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# 'ja' uses this config value. +# 'zh' user can custom change `jieba` dictionary path. +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'Coq85doc' + +# -- Options for LaTeX output --------------------------------------------- + +########################### +# Set things up for XeTeX # +########################### +latex_elements = { + 'babel': '', + 'fontenc': '', + 'inputenc': '', + 'utf8extra': '', + 'cmappkg': '', + # https://www.topbug.net/blog/2015/12/10/a-collection-of-issues-about-the-latex-output-in-sphinx-and-the-solutions/ + 'papersize': 'letterpaper', + 'classoptions': ',openany', # No blank pages + 'polyglossia' : '\\usepackage{polyglossia}', + 'unicode-math' : '\\usepackage{unicode-math}', + 'microtype' : '\\usepackage{microtype}', + "preamble": r"\usepackage{coqnotations}" +} + +from sphinx.builders.latex import LaTeXBuilder + +######## +# done # +######## + +latex_additional_files = ["_static/coqnotations.sty"] + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'Coq85.tex', 'Coq 8.5 Documentation', + 'The Coq Development Team (edited by C. Pit-Claudel)', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'coq85', 'Coq 8.5 Documentation', + [author], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'Coq85', 'Coq 8.5 Documentation', + author, 'Coq85', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + + +# -- Options for Epub output ---------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = project +epub_author = author +epub_publisher = author +epub_copyright = copyright + +# The basename for the epub file. It defaults to the project name. +#epub_basename = project + +# The HTML theme for the epub output. Since the default themes are not +# optimized for small screen space, using the same theme for HTML and epub +# output is usually not wise. This defaults to 'epub', a theme designed to save +# visual space. +#epub_theme = 'epub' + +# The language of the text. It defaults to the language option +# or 'en' if the language is not set. +#epub_language = '' + +# The scheme of the identifier. Typical schemes are ISBN or URL. +#epub_scheme = '' + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +#epub_identifier = '' + +# A unique identification for the text. +#epub_uid = '' + +# A tuple containing the cover image and cover page html template filenames. +#epub_cover = () + +# A sequence of (type, uri, title) tuples for the guide element of content.opf. +#epub_guide = () + +# HTML files that should be inserted before the pages created by sphinx. +# The format is a list of tuples containing the path and title. +#epub_pre_files = [] + +# HTML files that should be inserted after the pages created by sphinx. +# The format is a list of tuples containing the path and title. +#epub_post_files = [] + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ['search.html'] + +# The depth of the table of contents in toc.ncx. +#epub_tocdepth = 3 + +# Allow duplicate toc entries. +#epub_tocdup = True + +# Choose between 'default' and 'includehidden'. +#epub_tocscope = 'default' + +# Fix unsupported image types using the Pillow. +#epub_fix_images = False + +# Scale large images. +#epub_max_image_width = 0 + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#epub_show_urls = 'inline' + +# If false, no index is generated. +#epub_use_index = True + +# navtree options +navtree_shift = True diff --git a/doc/sphinx/coqdoc.css b/doc/sphinx/coqdoc.css new file mode 100644 index 0000000000..a34bb81ebd --- /dev/null +++ b/doc/sphinx/coqdoc.css @@ -0,0 +1,338 @@ +/************************************************************************/ +/* * The Coq Proof Assistant / The Coq Development Team */ +/* v * INRIA, CNRS and contributors - Copyright 1999-2018 */ +/* <O___,, * (see CREDITS file for the list of authors) */ +/* \VV/ **************************************************************/ +/* // * This file is distributed under the terms of the */ +/* * GNU Lesser General Public License Version 2.1 */ +/* * (see LICENSE file for the text of the license) */ +/************************************************************************/ +body { padding: 0px 0px; + margin: 0px 0px; + background-color: white } + +#page { display: block; + padding: 0px; + margin: 0px; + padding-bottom: 10px; } + +#header { display: block; + position: relative; + padding: 0; + margin: 0; + vertical-align: middle; + border-bottom-style: solid; + border-width: thin } + +#header h1 { padding: 0; + margin: 0;} + + +/* Contents */ + +#main{ display: block; + padding: 10px; + font-family: sans-serif; + font-size: 100%; + line-height: 100% } + +#main h1 { line-height: 95% } /* allow for multi-line headers */ + +#main a.idref:visited {color : #416DFF; text-decoration : none; } +#main a.idref:link {color : #416DFF; text-decoration : none; } +#main a.idref:hover {text-decoration : none; } +#main a.idref:active {text-decoration : none; } + +#main a.modref:visited {color : #416DFF; text-decoration : none; } +#main a.modref:link {color : #416DFF; text-decoration : none; } +#main a.modref:hover {text-decoration : none; } +#main a.modref:active {text-decoration : none; } + +#main .keyword { color : #cf1d1d } +#main { color: black } + +.section { background-color: rgb(60%,60%,100%); + padding-top: 13px; + padding-bottom: 13px; + padding-left: 3px; + margin-top: 5px; + margin-bottom: 5px; + font-size : 175% } + +h2.section { background-color: rgb(80%,80%,100%); + padding-left: 3px; + padding-top: 12px; + padding-bottom: 10px; + font-size : 130% } + +h3.section { background-color: rgb(90%,90%,100%); + padding-left: 3px; + padding-top: 7px; + padding-bottom: 7px; + font-size : 115% } + +h4.section { +/* + background-color: rgb(80%,80%,80%); + max-width: 20em; + padding-left: 5px; + padding-top: 5px; + padding-bottom: 5px; +*/ + background-color: white; + padding-left: 0px; + padding-top: 0px; + padding-bottom: 0px; + font-size : 100%; + font-weight : bold; + text-decoration : underline; + } + +#main .doc { margin: 0px; + font-family: sans-serif; + font-size: 100%; + line-height: 125%; + max-width: 40em; + color: black; + padding: 10px; + background-color: #90bdff } + +.inlinecode { + display: inline; +/* font-size: 125%; */ + color: #666666; + font-family: monospace } + +.doc .inlinecode { + display: inline; + font-size: 120%; + color: rgb(30%,30%,70%); + font-family: monospace } + +.doc .inlinecode .id { + color: rgb(30%,30%,70%); +} + +.inlinecodenm { + display: inline; + color: #444444; +} + +.doc .code { + display: inline; + font-size: 120%; + color: rgb(30%,30%,70%); + font-family: monospace } + +.comment { + display: inline; + font-family: monospace; + color: rgb(50%,50%,80%); +} + +.code { + display: block; +/* padding-left: 15px; */ + font-size: 110%; + font-family: monospace; + } + +table.infrule { + border: 0px; + margin-left: 50px; + margin-top: 10px; + margin-bottom: 10px; +} + +td.infrule { + font-family: monospace; + text-align: center; +/* color: rgb(35%,35%,70%); */ + padding: 0px; + line-height: 100%; +} + +tr.infrulemiddle hr { + margin: 1px 0 1px 0; +} + +.infrulenamecol { + color: rgb(60%,60%,60%); + font-size: 80%; + padding-left: 1em; + padding-bottom: 0.1em +} + +/* Pied de page */ + +#footer { font-size: 65%; + font-family: sans-serif; } + +/* Identifiers: <span class="id" title="...">) */ + +.id { display: inline; } + +.id[title="constructor"] { + color: rgb(60%,0%,0%); +} + +.id[title="var"] { + color: rgb(40%,0%,40%); +} + +.id[title="variable"] { + color: rgb(40%,0%,40%); +} + +.id[title="definition"] { + color: rgb(0%,40%,0%); +} + +.id[title="abbreviation"] { + color: rgb(0%,40%,0%); +} + +.id[title="lemma"] { + color: rgb(0%,40%,0%); +} + +.id[title="instance"] { + color: rgb(0%,40%,0%); +} + +.id[title="projection"] { + color: rgb(0%,40%,0%); +} + +.id[title="method"] { + color: rgb(0%,40%,0%); +} + +.id[title="inductive"] { + color: rgb(0%,0%,80%); +} + +.id[title="record"] { + color: rgb(0%,0%,80%); +} + +.id[title="class"] { + color: rgb(0%,0%,80%); +} + +.id[title="keyword"] { + color : #cf1d1d; +/* color: black; */ +} + +/* Deprecated rules using the 'type' attribute of <span> (not xhtml valid) */ + +.id[type="constructor"] { + color: rgb(60%,0%,0%); +} + +.id[type="var"] { + color: rgb(40%,0%,40%); +} + +.id[type="variable"] { + color: rgb(40%,0%,40%); +} + +.id[type="definition"] { + color: rgb(0%,40%,0%); +} + +.id[type="abbreviation"] { + color: rgb(0%,40%,0%); +} + +.id[type="lemma"] { + color: rgb(0%,40%,0%); +} + +.id[type="instance"] { + color: rgb(0%,40%,0%); +} + +.id[type="projection"] { + color: rgb(0%,40%,0%); +} + +.id[type="method"] { + color: rgb(0%,40%,0%); +} + +.id[type="inductive"] { + color: rgb(0%,0%,80%); +} + +.id[type="record"] { + color: rgb(0%,0%,80%); +} + +.id[type="class"] { + color: rgb(0%,0%,80%); +} + +.id[type="keyword"] { + color : #cf1d1d; +/* color: black; */ +} + +.inlinecode .id { + color: rgb(0%,0%,0%); +} + + +/* TOC */ + +#toc h2 { + padding: 10px; + background-color: rgb(60%,60%,100%); +} + +#toc li { + padding-bottom: 8px; +} + +/* Index */ + +#index { + margin: 0; + padding: 0; + width: 100%; +} + +#index #frontispiece { + margin: 1em auto; + padding: 1em; + width: 60%; +} + +.booktitle { font-size : 140% } +.authors { font-size : 90%; + line-height: 115%; } +.moreauthors { font-size : 60% } + +#index #entrance { + text-align: center; +} + +#index #entrance .spacer { + margin: 0 30px 0 30px; +} + +#index #footer { + position: absolute; + bottom: 0; +} + +.paragraph { + height: 0.75em; +} + +ul.doclist { + margin-top: 0em; + margin-bottom: 0em; +} diff --git a/doc/sphinx/index.rst b/doc/sphinx/index.rst new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/doc/sphinx/index.rst diff --git a/doc/tools/coqrst/__init__.py b/doc/tools/coqrst/__init__.py new file mode 100644 index 0000000000..2dda7d9216 --- /dev/null +++ b/doc/tools/coqrst/__init__.py @@ -0,0 +1,10 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## <O___,, # (see CREDITS file for the list of authors) ## +## \VV/ ############################################################### +## // # This file is distributed under the terms of the ## +## # GNU Lesser General Public License Version 2.1 ## +## # (see LICENSE file for the text of the license) ## +########################################################################## +"""Utilities for documenting Coq in Sphinx""" diff --git a/doc/tools/coqrst/checkdeps.py b/doc/tools/coqrst/checkdeps.py new file mode 100644 index 0000000000..11f95c4e94 --- /dev/null +++ b/doc/tools/coqrst/checkdeps.py @@ -0,0 +1,39 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## <O___,, # (see CREDITS file for the list of authors) ## +## \VV/ ############################################################### +## // # This file is distributed under the terms of the ## +## # GNU Lesser General Public License Version 2.1 ## +## # (see LICENSE file for the text of the license) ## +########################################################################## +from __future__ import print_function +import sys + +def eprint(*args, **kwargs): + print(*args, file=sys.stderr, **kwargs) + +def missing_dep(dep): + eprint('Cannot find %s (needed to build documentation)' % dep) + eprint('You can run `pip3 install %s` to install it.' % dep) + sys.exit(1) + +try: + import sphinx_rtd_theme +except: + missing_dep('sphinx_rtd_theme') + +try: + import pexpect +except: + missing_dep('pexpect') + +try: + import antlr4 +except: + missing_dep('antlr4-python3-runtime') + +try: + import bs4 +except: + missing_dep('beautifulsoup4') diff --git a/doc/tools/coqrst/coqdoc/__init__.py b/doc/tools/coqrst/coqdoc/__init__.py new file mode 100644 index 0000000000..a89a548e2c --- /dev/null +++ b/doc/tools/coqrst/coqdoc/__init__.py @@ -0,0 +1,10 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## <O___,, # (see CREDITS file for the list of authors) ## +## \VV/ ############################################################### +## // # This file is distributed under the terms of the ## +## # GNU Lesser General Public License Version 2.1 ## +## # (see LICENSE file for the text of the license) ## +########################################################################## +from .main import * diff --git a/doc/tools/coqrst/coqdoc/main.py b/doc/tools/coqrst/coqdoc/main.py new file mode 100644 index 0000000000..d464f75bb2 --- /dev/null +++ b/doc/tools/coqrst/coqdoc/main.py @@ -0,0 +1,86 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## <O___,, # (see CREDITS file for the list of authors) ## +## \VV/ ############################################################### +## // # This file is distributed under the terms of the ## +## # GNU Lesser General Public License Version 2.1 ## +## # (see LICENSE file for the text of the license) ## +########################################################################## +""" +Use CoqDoc to highlight Coq snippets +==================================== + +Pygment's Coq parser isn't the best; instead, use coqdoc to highlight snippets. +Only works for full, syntactically valid sentences; on shorter snippets, coqdoc +swallows parts of the input. + +Works by reparsing coqdoc's output into the output that Sphinx expects from a +lexer. +""" + +import os +from tempfile import mkstemp +from subprocess import check_output + +from bs4 import BeautifulSoup +from bs4.element import NavigableString + +COQDOC_OPTIONS = ['--body-only', '--no-glob', '--no-index', '--no-externals', + '-s', '--html', '--stdout', '--utf8'] + +COQDOC_SYMBOLS = ["->", "<-", "<->", "=>", "<=", ">=", "<>", "~", "/\\", "\\/", "|-", "*", "forall", "exists"] +COQDOC_HEADER = "".join("(** remove printing {} *)".format(s) for s in COQDOC_SYMBOLS) + +def coqdoc(coq_code, coqdoc_bin = os.path.join(os.getenv("COQBIN"),"coqdoc")): + """Get the output of coqdoc on coq_code.""" + fd, filename = mkstemp(prefix="coqdoc-", suffix=".v") + try: + os.write(fd, COQDOC_HEADER.encode("utf-8")) + os.write(fd, coq_code.encode("utf-8")) + os.close(fd) + return check_output([coqdoc_bin] + COQDOC_OPTIONS + [filename], timeout = 2).decode("utf-8") + finally: + os.remove(filename) + +def is_whitespace_string(elem): + return isinstance(elem, NavigableString) and elem.strip() == "" + +def strip_soup(soup, pred): + """Strip elements maching pred from front and tail of soup.""" + while soup.contents and pred(soup.contents[-1]): + soup.contents.pop() + + skip = 0 + for elem in soup.contents: + if not pred(elem): + break + skip += 1 + + soup.contents[:] = soup.contents[skip:] + +def lex(source): + """Convert source into a stream of (css_classes, token_string).""" + coqdoc_output = coqdoc(source) + soup = BeautifulSoup(coqdoc_output, "html.parser") + root = soup.find(class_='code') + strip_soup(root, is_whitespace_string) + for elem in root.children: + if isinstance(elem, NavigableString): + yield [], elem + elif elem.name == "span": + cls = "coqdoc-{}".format(elem['title']) + yield [cls], elem.string + elif elem.name == 'br': + pass + else: + raise ValueError(elem) + +def main(): + """Lex stdin (for testing purposes)""" + import sys + for classes, text in lex(sys.stdin.read()): + print(repr(text) + "\t" ' '.join(classes)) + +if __name__ == '__main__': + main() diff --git a/doc/tools/coqrst/coqdomain.py b/doc/tools/coqrst/coqdomain.py new file mode 100644 index 0000000000..18f32d7a8a --- /dev/null +++ b/doc/tools/coqrst/coqdomain.py @@ -0,0 +1,816 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## <O___,, # (see CREDITS file for the list of authors) ## +## \VV/ ############################################################### +## // # This file is distributed under the terms of the ## +## # GNU Lesser General Public License Version 2.1 ## +## # (see LICENSE file for the text of the license) ## +########################################################################## +"""A Coq domain for Sphinx. + +Currently geared towards Coq's manual, rather than Coq source files, but one +could imagine extending it. +""" + +# pylint: disable=too-few-public-methods + +import re +from itertools import chain +from collections import defaultdict + +from docutils import nodes, utils +from docutils.transforms import Transform +from docutils.parsers.rst import Directive, directives +from docutils.parsers.rst.roles import code_role #, set_classes +from docutils.parsers.rst.directives.admonitions import BaseAdmonition + +from sphinx import addnodes +from sphinx.roles import XRefRole +from sphinx.util.nodes import set_source_info, set_role_source_info, make_refnode +from sphinx.directives import ObjectDescription +from sphinx.domains import Domain, ObjType, Index +from sphinx.ext.mathbase import MathDirective, displaymath + +from . import coqdoc +from .repl import ansicolors +from .repl.coqtop import CoqTop +from .notations.sphinx import sphinxify +from .notations.plain import stringify_with_ellipses + +def parse_notation(notation, source, line, rawtext=None): + """Parse notation and wrap it in an inline node""" + node = nodes.inline(rawtext or notation, '', *sphinxify(notation), classes=['notation']) + node.source, node.line = source, line + return node + +def highlight_using_coqdoc(sentence): + """Lex sentence using coqdoc, and yield inline nodes for each token""" + tokens = coqdoc.lex(utils.unescape(sentence, 1)) + for classes, value in tokens: + yield nodes.inline(value, value, classes=classes) + +def make_target(objtype, targetid): + """Create a target to an object of type objtype and id targetid""" + return "coq:{}.{}".format(objtype, targetid) + +class CoqObject(ObjectDescription): + """A generic Coq object; all Coq objects are subclasses of this. + + The fields and methods to override are listed at the top of this class' + implementation. Each object supports the :name: option, which gives an + explicit name to link to. + + See the documentation of CoqDomain for high-level information. + """ + + # The semantic domain in which this object lives. + # It matches exactly one of the roles used for cross-referencing. + subdomain = None + + # The suffix to use in indices for objects of this type + index_suffix = None + + # The annotation to add to headers of objects of this type + annotation = None + + def _name_from_signature(self, signature): # pylint: disable=no-self-use, unused-argument + """Convert a signature into a name to link to. + + Returns None by default, in which case no name will be automatically + generated. + """ + return None + + def _render_signature(self, signature, signode): + """Render a signature, placing resulting nodes into signode.""" + raise NotImplementedError(self) + + option_spec = { + # One can give an explicit name to each documented object + 'name': directives.unchanged + } + + def _subdomain(self): + if self.subdomain is None: + raise ValueError() + return self.subdomain + + def handle_signature(self, signature, signode): + """Prefix signature with the proper annotation, then render it using + _render_signature. + + :returns: the name given to the resulting node, if any + """ + if self.annotation: + annotation = self.annotation + ' ' + signode += addnodes.desc_annotation(annotation, annotation) + self._render_signature(signature, signode) + return self._name_from_signature(signature) + + @property + def _index_suffix(self): + if self.index_suffix: + return " " + self.index_suffix + + def _record_name(self, name, target_id): + """Record a name, mapping it to target_id + + Warns if another object of the same name already exists. + """ + names_in_subdomain = self.env.domaindata['coq']['objects'][self._subdomain()] + # Check that two objects in the same domain don't have the same name + if name in names_in_subdomain: + self.state_machine.reporter.warning( + 'Duplicate Coq object: {}; other is at {}'.format( + name, self.env.doc2path(names_in_subdomain[name][0])), + line=self.lineno) + names_in_subdomain[name] = (self.env.docname, self.objtype, target_id) + + def _add_target(self, signode, name): + """Register a link target ‘name’, pointing to signode.""" + targetid = make_target(self.objtype, nodes.make_id(name)) + if targetid not in self.state.document.ids: + signode['ids'].append(targetid) + signode['names'].append(name) + signode['first'] = (not self.names) + self.state.document.note_explicit_target(signode) + self._record_name(name, targetid) + return targetid + + def _add_index_entry(self, name, target): + """Add name (with target) to the main index.""" + index_text = name + self._index_suffix + self.indexnode['entries'].append(('single', index_text, target, '', None)) + + def run(self): + """Small extension of the parent's run method, handling user-provided names.""" + [idx, node] = super().run() + custom_name = self.options.get("name") + if custom_name: + self.add_target_and_index(custom_name, "", node.children[0]) + return [idx, node] + + def add_target_and_index(self, name, _, signode): + """Create a target and an index entry for name""" + if name: + target = self._add_target(signode, name) + self._add_index_entry(name, target) + return target + +class PlainObject(CoqObject): + """A base class for objects whose signatures should be rendered literaly.""" + def _render_signature(self, signature, signode): + signode += addnodes.desc_name(signature, signature) + +class NotationObject(CoqObject): + """A base class for objects whose signatures should be rendered as nested boxes.""" + def _render_signature(self, signature, signode): + position = self.state_machine.get_source_and_line(self.lineno) + tacn_node = parse_notation(signature, *position) + signode += addnodes.desc_name(signature, '', tacn_node) + +class TacticObject(PlainObject): + """An object to represent Coq tactics""" + subdomain = "tac" + index_suffix = "(tactic)" + annotation = None + +class GallinaObject(PlainObject): + """An object to represent Coq theorems""" + subdomain = "thm" + index_suffix = "(theorem)" + annotation = "Theorem" + +class VernacObject(NotationObject): + """An object to represent Coq commands""" + subdomain = "cmd" + index_suffix = "(command)" + annotation = "Command" + + def _name_from_signature(self, signature): + return stringify_with_ellipses(signature) + +class VernacVariantObject(VernacObject): + """An object to represent variants of Coq commands""" + index_suffix = "(command variant)" + annotation = "Variant" + +class TacticNotationObject(NotationObject): + """An object to represent Coq tactic notations""" + subdomain = "tacn" + index_suffix = "(tactic notation)" + annotation = None + +class TacticNotationVariantObject(TacticNotationObject): + """An object to represent variants of Coq tactic notations""" + index_suffix = "(tactic variant)" + annotation = "Variant" + +class OptionObject(NotationObject): + """An object to represent variants of Coq options""" + subdomain = "opt" + index_suffix = "(option)" + annotation = "Option" + + def _name_from_signature(self, signature): + return stringify_with_ellipses(signature) + +class ExceptionObject(NotationObject): + """An object to represent Coq errors.""" + subdomain = "exn" + index_suffix = "(error)" + annotation = "Error" + # Uses “exn” since “err” already is a CSS class added by “writer_aux”. + + # Generate names automatically + def _name_from_signature(self, signature): + return stringify_with_ellipses(signature) + +class WarningObject(NotationObject): + """An object to represent Coq warnings.""" + subdomain = "warn" + index_suffix = "(warn)" + annotation = "Warning" + + # Generate names automatically + def _name_from_signature(self, signature): + return stringify_with_ellipses(signature) + +def NotationRole(role, rawtext, text, lineno, inliner, options={}, content=[]): + #pylint: disable=unused-argument, dangerous-default-value + """And inline role for notations""" + notation = utils.unescape(text, 1) + position = inliner.reporter.get_source_and_line(lineno) + return [nodes.literal(rawtext, '', parse_notation(notation, *position, rawtext=rawtext))], [] + +def coq_code_role(role, rawtext, text, lineno, inliner, options={}, content=[]): + #pylint: disable=dangerous-default-value + """And inline role for Coq source code""" + options['language'] = 'Coq' + return code_role(role, rawtext, text, lineno, inliner, options, content) + ## Too heavy: + ## Forked from code_role to use our custom tokenizer; this doesn't work for + ## snippets though: for example CoqDoc swallows the parentheses around this: + ## “(a: A) (b: B)” + # set_classes(options) + # classes = ['code', 'coq'] + # code = utils.unescape(text, 1) + # node = nodes.literal(rawtext, '', *highlight_using_coqdoc(code), classes=classes) + # return [node], [] + +# TODO pass different languages? +LtacRole = GallinaRole = VernacRole = coq_code_role + +class CoqtopDirective(Directive): + """A reST directive to describe interactions with Coqtop. + + Usage:: + + .. coqtop:: (options)+ + + Coq code to send to coqtop + + Example:: + + .. coqtop:: in reset undo + + Print nat. + Definition a := 1. + + Here is a list of permissible options: + + Display + - ‘all’: Display input and output + - ‘in’: Display only input + - ‘out’: Display only output + - ‘none’: Display neither (useful for setup commands) + Behaviour + - ‘reset’: Send a `Reset Initial` command before running this block + - ‘undo’: Send an `Undo n` (n=number of sentences) command after running + all the commands in this block + """ + has_content = True + required_arguments = 0 + optional_arguments = 1 + final_argument_whitespace = True + + def run(self): + # Uses a ‘container’ instead of a ‘literal_block’ to disable + # Pygments-based post-processing (we could also set rawsource to '') + content = '\n'.join(self.content) + options = self.arguments[0].split() if self.arguments else ['in'] + if 'all' in options: + options.extend(['in', 'out']) + node = nodes.container(content, coqtop_options = list(set(options)), + classes=['coqtop', 'literal-block']) + self.add_name(node) + return [node] + +class CoqdocDirective(Directive): + """A reST directive to display Coqtop-formatted source code""" + # TODO implement this as a Pygments highlighter? + has_content = True + required_arguments = 0 + optional_arguments = 0 + final_argument_whitespace = True + + def run(self): + # Uses a ‘container’ instead of a ‘literal_block’ to disable + # Pygments-based post-processing (we could also set rawsource to '') + content = '\n'.join(self.content) + node = nodes.inline(content, '', *highlight_using_coqdoc(content)) + wrapper = nodes.container(content, node, classes=['coqdoc', 'literal-block']) + return [wrapper] + +class ExampleDirective(BaseAdmonition): + """A reST directive for examples""" + node_class = nodes.admonition + + def run(self): + # ‘BaseAdmonition’ checks whether ‘node_class’ is ‘nodes.admonition’, + # and uses arguments[0] as the title in that case (in other cases, the + # title is unset, and it is instead set in the HTML visitor). + assert not self.arguments # Arguments have been parsed as content + self.arguments = ['Example'] + self.options['classes'] = ['admonition', 'note'] + return super().run() + +class PreambleDirective(MathDirective): + r"""A reST directive for hidden math. + + Mostly useful to let MathJax know about `\def`s and `\newcommand`s + """ + def run(self): + self.options['nowrap'] = True + [node] = super().run() + node['classes'] = ["math-preamble"] + return [node] + +class InferenceDirective(Directive): + r"""A small example of what directives let you do in Sphinx. + + Usage:: + + .. inference:: name + + \n-separated premisses + ---------------------- + conclusion + + Example:: + + .. inference:: Prod-Pro + + \WTEG{T}{s} + s \in \Sort + \WTE{\Gamma::(x:T)}{U}{\Prop} + ----------------------------- + \WTEG{\forall~x:T,U}{\Prop} + """ + required_arguments = 1 + optional_arguments = 0 + has_content = True + + def make_math_node(self, latex): + node = displaymath() + node['latex'] = latex + node['label'] = None # Otherwise equations are numbered + node['nowrap'] = False + node['docname'] = self.state.document.settings.env.docname + node['number'] = None + return node + + @staticmethod + def prepare_latex_operand(op): + # TODO: Could use a fancier inference class in LaTeX + return '%\n\\hspace{3em}%\n'.join(op.strip().splitlines()) + + def prepare_latex(self, content): + parts = re.split('^ *----+ *$', content, flags=re.MULTILINE) + if len(parts) != 2: + raise self.error('Expected two parts in ‘inference’ directive, separated by a rule (----).') + + top, bottom = tuple(InferenceDirective.prepare_latex_operand(p) for p in parts) + return "%\n".join(("\\frac{", top, "}{", bottom, "}")) + + def run(self): + self.assert_has_content() + + title = self.arguments[0] + content = '\n'.join(self.content) + latex = self.prepare_latex(content) + math_node = self.make_math_node(latex) + + tid = nodes.make_id(title) + target = nodes.target('', '', ids=['inference-' + tid]) + self.state.document.note_explicit_target(target) + + term, desc = nodes.term('', title), nodes.description('', math_node) + dli = nodes.definition_list_item('', term, desc) + dl = nodes.definition_list(content, target, dli) + set_source_info(self, dl) + return [dl] + +class AnsiColorsParser(): + """Parse ANSI-colored output from Coqtop into Sphinx nodes.""" + + # Coqtop's output crashes ansi.py, because it contains a bunch of extended codes + # This class is a fork of the original ansi.py, released under a BSD license in sphinx-contribs + + COLOR_PATTERN = re.compile('\x1b\\[([^m]+)m') + + def __init__(self): + self.new_nodes, self.pending_nodes = [], [] + + def _finalize_pending_nodes(self): + self.new_nodes.extend(self.pending_nodes) + self.pending_nodes = [] + + def _add_text(self, raw, beg, end): + if beg < end: + text = raw[beg:end] + if self.pending_nodes: + self.pending_nodes[-1].append(nodes.Text(text)) + else: + self.new_nodes.append(nodes.inline('', text)) + + def colorize_str(self, raw): + """Parse raw (an ANSI-colored output string from Coqtop) into Sphinx nodes.""" + last_end = 0 + for match in AnsiColorsParser.COLOR_PATTERN.finditer(raw): + self._add_text(raw, last_end, match.start()) + last_end = match.end() + classes = ansicolors.parse_ansi(match.group(1)) + if 'ansi-reset' in classes: + self._finalize_pending_nodes() + else: + node = nodes.inline() + self.pending_nodes.append(node) + node['classes'].extend(classes) + self._add_text(raw, last_end, len(raw)) + self._finalize_pending_nodes() + return self.new_nodes + +class CoqtopBlocksTransform(Transform): + """Filter handling the actual work for the coqtop directive + + Adds coqtop's responses, colorizes input and output, and merges consecutive + coqtop directives for better visual rendition. + """ + default_priority = 10 + + @staticmethod + def is_coqtop_block(node): + return isinstance(node, nodes.Element) and 'coqtop_options' in node + + @staticmethod + def split_sentences(source): + """Split Coq sentences in source. Could be improved.""" + return re.split(r"(?<=(?<!\.)\.)\s+", source) + + @staticmethod + def parse_options(options): + """Parse options according to the description in CoqtopDirective.""" + opt_undo = 'undo' in options + opt_reset = 'reset' in options + opt_all, opt_none = 'all' in options, 'none' in options + opt_input, opt_output = opt_all or 'in' in options, opt_all or 'out' in options + + unexpected_options = list(set(options) - set(('reset', 'undo', 'all', 'none', 'in', 'out'))) + if unexpected_options: + raise ValueError("Unexpected options for .. coqtop:: {}".format(unexpected_options)) + elif (opt_input or opt_output) and opt_none: + raise ValueError("Inconsistent options for .. coqtop:: ‘none’ with ‘in’, ‘out’, or ‘all’") + elif opt_reset and opt_undo: + raise ValueError("Inconsistent options for .. coqtop:: ‘undo’ with ‘reset’") + + return opt_undo, opt_reset, opt_input and not opt_none, opt_output and not opt_none + + @staticmethod + def block_classes(should_show, contents=None): + """Compute classes to add to a node containing contents. + + :param should_show: Whether this node should be displayed""" + is_empty = contents is not None and re.match(r"^\s*$", contents) + if is_empty or not should_show: + return ['coqtop-hidden'] + else: + return [] + + @staticmethod + def make_rawsource(pairs, opt_input, opt_output): + blocks = [] + for sentence, output in pairs: + output = AnsiColorsParser.COLOR_PATTERN.sub("", output).strip() + if opt_input: + blocks.append(sentence) + if output and opt_output: + blocks.append(re.sub("^", " ", output, flags=re.MULTILINE) + "\n") + return '\n'.join(blocks) + + def add_coqtop_output(self): + """Add coqtop's responses to a Sphinx AST + + Finds nodes to process using is_coqtop_block.""" + with CoqTop(color=True) as repl: + for node in self.document.traverse(CoqtopBlocksTransform.is_coqtop_block): + options = node['coqtop_options'] + opt_undo, opt_reset, opt_input, opt_output = self.parse_options(options) + + if opt_reset: + repl.sendone("Reset Initial.") + pairs = [] + for sentence in self.split_sentences(node.rawsource): + pairs.append((sentence, repl.sendone(sentence))) + if opt_undo: + repl.sendone("Undo {}.".format(len(pairs))) + + dli = nodes.definition_list_item() + for sentence, output in pairs: + # Use Coqdoq to highlight input + in_chunks = highlight_using_coqdoc(sentence) + dli += nodes.term(sentence, '', *in_chunks, classes=self.block_classes(opt_input)) + # Parse ANSI sequences to highlight output + out_chunks = AnsiColorsParser().colorize_str(output) + dli += nodes.definition(output, *out_chunks, classes=self.block_classes(opt_output, output)) + node.clear() + node.rawsource = self.make_rawsource(pairs, opt_input, opt_output) + node['classes'].extend(self.block_classes(opt_input or opt_output)) + node += nodes.inline('', '', classes=['coqtop-reset'] * opt_reset) + node += nodes.definition_list(node.rawsource, dli) + + @staticmethod + def merge_coqtop_classes(kept_node, discarded_node): + discarded_classes = discarded_node['classes'] + if not 'coqtop-hidden' in discarded_classes: + kept_node['classes'] = [c for c in kept_node['classes'] + if c != 'coqtop-hidden'] + + def merge_consecutive_coqtop_blocks(self): + """Merge consecutive divs wrapping lists of Coq sentences; keep ‘dl’s separate.""" + for node in self.document.traverse(CoqtopBlocksTransform.is_coqtop_block): + if node.parent: + for sibling in node.traverse(include_self=False, descend=False, + siblings=True, ascend=False): + if CoqtopBlocksTransform.is_coqtop_block(sibling): + self.merge_coqtop_classes(node, sibling) + node.extend(sibling.children) + node.parent.remove(sibling) + sibling.parent = None + else: + break + + def apply(self): + self.add_coqtop_output() + self.merge_consecutive_coqtop_blocks() + +class CoqSubdomainsIndex(Index): + """Index subclass to provide subdomain-specific indices. + + Just as in the original manual, we want to have separate indices for each + Coq subdomain (tactics, commands, options, etc)""" + + name, localname, shortname, subdomains = None, None, None, None # Must be overwritten + + def generate(self, docnames=None): + content = defaultdict(list) + items = chain(*(self.domain.data['objects'][subdomain].items() + for subdomain in self.subdomains)) + + for itemname, (docname, _, anchor) in sorted(items, key=lambda x: x[0].lower()): + if docnames and docname not in docnames: + continue + + entries = content[itemname[0].lower()] + entries.append([itemname, 0, docname, anchor, '', '', '']) + + collapse = False + content = sorted(content.items()) + return content, collapse + +class CoqVernacIndex(CoqSubdomainsIndex): + name, localname, shortname, subdomains = "cmdindex", "Command Index", "commands", ["cmd"] + +class CoqTacticIndex(CoqSubdomainsIndex): + name, localname, shortname, subdomains = "tacindex", "Tactic Index", "tactics", ["tac", "tacn"] + +class CoqOptionIndex(CoqSubdomainsIndex): + name, localname, shortname, subdomains = "optindex", "Option Index", "options", ["opt"] + +class CoqGallinaIndex(CoqSubdomainsIndex): + name, localname, shortname, subdomains = "thmindex", "Gallina Index", "theorems", ["thm"] + +class CoqExceptionIndex(CoqSubdomainsIndex): + name, localname, shortname, subdomains = "exnindex", "Error Index", "errors", ["exn"] + +class CoqWarningIndex(CoqSubdomainsIndex): + name, localname, shortname, subdomains = "warnindex", "Warning Index", "warnings", ["warn"] + +class IndexXRefRole(XRefRole): + """A link to one of our domain-specific indices.""" + lowercase = True, + innernodeclass = nodes.inline + warn_dangling = True + + def process_link(self, env, refnode, has_explicit_title, title, target): + if not has_explicit_title: + index = CoqDomain.find_index_by_name(target) + if index: + title = index.localname + return title, target + +def GrammarProductionRole(typ, rawtext, text, lineno, inliner, options={}, content=[]): + """An inline role to declare grammar productions that are not in fact included + in a `productionlist` directive. + + Useful to informally introduce a production, as part of running text + """ + #pylint: disable=dangerous-default-value, unused-argument + env = inliner.document.settings.env + targetid = 'grammar-token-{}'.format(text) + target = nodes.target('', '', ids=[targetid]) + inliner.document.note_explicit_target(target) + code = nodes.literal(rawtext, text, role=typ.lower()) + node = nodes.inline(rawtext, '', target, code, classes=['inline-grammar-production']) + set_role_source_info(inliner, lineno, node) + env.domaindata['std']['objects']['token', text] = env.docname, targetid + return [node], [] + +class CoqDomain(Domain): + """A domain to document Coq code. + + Sphinx has a notion of “domains”, used to tailor it to a specific language. + Domains mostly consist in descriptions of the objects that we wish to + describe (for Coq, this includes tactics, tactic notations, options, + exceptions, etc.), as well as domain-specific roles and directives. + + Each domain is responsible for tracking its objects, and resolving + references to them. In the case of Coq, this leads us to define Coq + “subdomains”, which classify objects into categories in which names must be + unique. For example, a tactic and a theorem may share a name, but two + tactics cannot be named the same. + """ + + name = 'coq' + label = 'Coq' + + object_types = { + # ObjType (= directive type) → (Local name, *xref-roles) + 'cmd': ObjType('cmd', 'cmd'), + 'cmdv': ObjType('cmdv', 'cmd'), + 'tac': ObjType('tac', 'tac'), + 'tacn': ObjType('tacn', 'tacn'), + 'tacv': ObjType('tacv', 'tacn'), + 'opt': ObjType('opt', 'opt'), + 'thm': ObjType('thm', 'thm'), + 'exn': ObjType('exn', 'exn'), + 'warn': ObjType('warn', 'warn'), + 'index': ObjType('index', 'index', searchprio=-1) + } + + directives = { + # Note that some directives live in the same semantic subdomain; ie + # there's one directive per object type, but some object types map to + # the same role. + 'cmd': VernacObject, + 'cmdv': VernacVariantObject, + 'tac': TacticObject, + 'tacn': TacticNotationObject, + 'tacv': TacticNotationVariantObject, + 'opt': OptionObject, + 'thm': GallinaObject, + 'exn': ExceptionObject, + 'warn': WarningObject, + } + + roles = { + # Each of these roles lives in a different semantic “subdomain” + 'cmd': XRefRole(), + 'tac': XRefRole(), + 'tacn': XRefRole(), + 'opt': XRefRole(), + 'thm': XRefRole(), + 'exn': XRefRole(), + 'warn': XRefRole(), + # This one is special + 'index': IndexXRefRole(), + # These are used for highlighting + 'notation': NotationRole, + 'gallina': GallinaRole, + 'ltac': LtacRole, + 'n': NotationRole, + 'g': GallinaRole, + 'l': LtacRole, #FIXME unused? + } + + indices = [CoqVernacIndex, CoqTacticIndex, CoqOptionIndex, CoqGallinaIndex, CoqExceptionIndex, CoqWarningIndex] + + data_version = 1 + initial_data = { + # Collect everything under a key that we control, since Sphinx adds + # others, such as “version” + 'objects' : { # subdomain → name → docname, objtype, targetid + 'cmd': {}, + 'tac': {}, + 'tacn': {}, + 'opt': {}, + 'thm': {}, + 'exn': {}, + 'warn': {}, + } + } + + @staticmethod + def find_index_by_name(targetid): + for index in CoqDomain.indices: + if index.name == targetid: + return index + + def get_objects(self): + # Used for searching and object inventories (intersphinx) + for _, objects in self.data['objects'].items(): + for name, (docname, objtype, targetid) in objects.items(): + yield (name, name, objtype, docname, targetid, self.object_types[objtype].attrs['searchprio']) + for index in self.indices: + yield (index.name, index.localname, 'index', "coq-" + index.name, '', -1) + + def merge_domaindata(self, docnames, otherdata): + DUP = "Duplicate declaration: '{}' also defined in '{}'.\n" + for subdomain, their_objects in otherdata['objects'].items(): + our_objects = self.data['objects'][subdomain] + for name, (docname, objtype, targetid) in their_objects.items(): + if docname in docnames: + if name in our_objects: + self.env.warn(docname, DUP.format(name, our_objects[name][0])) + our_objects[name] = (docname, objtype, targetid) + + def resolve_xref(self, env, fromdocname, builder, role, targetname, node, contnode): + # ‘target’ is the name that was written in the document + # ‘role’ is where this xref comes from; it's exactly one of our subdomains + if role == 'index': + index = CoqDomain.find_index_by_name(targetname) + if index: + return make_refnode(builder, fromdocname, "coq-" + index.name, '', contnode, index.localname) + else: + resolved = self.data['objects'][role].get(targetname) + if resolved: + (todocname, _, targetid) = resolved + return make_refnode(builder, fromdocname, todocname, targetid, contnode, targetname) + + def clear_doc(self, docname_to_clear): + for subdomain_objects in self.data['objects'].values(): + for name, (docname, _, _) in list(subdomain_objects.items()): + if docname == docname_to_clear: + del subdomain_objects[name] + +def is_coqtop_or_coqdoc_block(node): + return (isinstance(node, nodes.Element) and + ('coqtop' in node['classes'] or 'coqdoc' in node['classes'])) + +def simplify_source_code_blocks_for_latex(app, doctree, fromdocname): # pylint: disable=unused-argument + """Simplify coqdoc and coqtop blocks. + + In HTML mode, this does nothing; in other formats, such as LaTeX, it + replaces coqdoc and coqtop blocks by plain text sources, which will use + pygments if available. This prevents the LaTeX builder from getting + confused. + """ + + is_html = app.builder.tags.has("html") + for node in doctree.traverse(is_coqtop_or_coqdoc_block): + if is_html: + node.rawsource = '' # Prevent pygments from kicking in + else: + if 'coqtop-hidden' in node['classes']: + node.parent.remove(node) + else: + node.replace_self(nodes.literal_block(node.rawsource, node.rawsource, language="Coq")) + +def setup(app): + """Register the Coq domain""" + + # A few sanity checks: + subdomains = set(obj.subdomain for obj in CoqDomain.directives.values()) + assert subdomains == set(chain(*(idx.subdomains for idx in CoqDomain.indices))) + assert subdomains.issubset(CoqDomain.roles.keys()) + + # Add domain, directives, and roles + app.add_domain(CoqDomain) + app.add_role("production", GrammarProductionRole) + app.add_directive("coqtop", CoqtopDirective) + app.add_directive("coqdoc", CoqdocDirective) + app.add_directive("example", ExampleDirective) + app.add_directive("inference", InferenceDirective) + app.add_directive("preamble", PreambleDirective) + app.add_transform(CoqtopBlocksTransform) + app.connect('doctree-resolved', simplify_source_code_blocks_for_latex) + + # Add extra styles + app.add_stylesheet("ansi.css") + app.add_stylesheet("coqdoc.css") + app.add_javascript("notations.js") + app.add_stylesheet("notations.css") + app.add_stylesheet("pre-text.css") + + return {'version': '0.1', "parallel_read_safe": True} diff --git a/doc/tools/coqrst/notations/Makefile b/doc/tools/coqrst/notations/Makefile new file mode 100644 index 0000000000..c017aed951 --- /dev/null +++ b/doc/tools/coqrst/notations/Makefile @@ -0,0 +1,27 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## <O___,, # (see CREDITS file for the list of authors) ## +## \VV/ ############################################################### +## // # This file is distributed under the terms of the ## +## # GNU Lesser General Public License Version 2.1 ## +## # (see LICENSE file for the text of the license) ## +########################################################################## +# Parsing compact tactic notation syntax in + +TEST_INPUT="unfold {+, @qualid|@string at {+, num}}" + +python: + antlr4 -Dlanguage=Python3 -visitor -no-listener TacticNotations.g + +java: + antlr4 -Dlanguage=Java TacticNotations.g && javac TacticNotations*.java + +test: java + grun TacticNotations top -tree <<< "$(TEST_INPUT)" + +gui: java + grun TacticNotations top -gui <<< "$(TEST_INPUT)" + +sample: + cd ..; python3 -m coqnotations.driver < ../tests/tactics > ../tests/antlr-notations.html diff --git a/doc/tools/coqrst/notations/TacticNotations.g b/doc/tools/coqrst/notations/TacticNotations.g new file mode 100644 index 0000000000..72ae8eb6be --- /dev/null +++ b/doc/tools/coqrst/notations/TacticNotations.g @@ -0,0 +1,30 @@ +/************************************************************************/ +/* * The Coq Proof Assistant / The Coq Development Team */ +/* v * INRIA, CNRS and contributors - Copyright 1999-2018 */ +/* <O___,, * (see CREDITS file for the list of authors) */ +/* \VV/ **************************************************************/ +/* // * This file is distributed under the terms of the */ +/* * GNU Lesser General Public License Version 2.1 */ +/* * (see LICENSE file for the text of the license) */ +/************************************************************************/ +grammar TacticNotations; + +// Terminals are not visited, so we add non-terminals for each terminal that +// needs rendering (in particular whitespace (kept in output) vs. WHITESPACE +// (discarded)). + +top: blocks EOF; +blocks: block ((whitespace)? block)*; +block: atomic | hole | repeat | curlies; +repeat: LGROUP (ATOM)? WHITESPACE blocks (WHITESPACE)? RBRACE; +curlies: LBRACE (whitespace)? blocks (whitespace)? RBRACE; +whitespace: WHITESPACE; +atomic: ATOM; +hole: ID; + +LGROUP: '{' [+*?]; +LBRACE: '{'; +RBRACE: '}'; +ATOM: ~[@{} ]+; +ID: '@' [a-zA-Z0-9_]+; +WHITESPACE: ' '+; diff --git a/doc/tools/coqrst/notations/TacticNotations.tokens b/doc/tools/coqrst/notations/TacticNotations.tokens new file mode 100644 index 0000000000..4d41a38837 --- /dev/null +++ b/doc/tools/coqrst/notations/TacticNotations.tokens @@ -0,0 +1,8 @@ +LGROUP=1 +LBRACE=2 +RBRACE=3 +ATOM=4 +ID=5 +WHITESPACE=6 +'{'=2 +'}'=3 diff --git a/doc/tools/coqrst/notations/TacticNotationsLexer.py b/doc/tools/coqrst/notations/TacticNotationsLexer.py new file mode 100644 index 0000000000..4cac071ac3 --- /dev/null +++ b/doc/tools/coqrst/notations/TacticNotationsLexer.py @@ -0,0 +1,60 @@ +# Generated from TacticNotations.g by ANTLR 4.7 +from antlr4 import * +from io import StringIO +from typing.io import TextIO +import sys + + +def serializedATN(): + with StringIO() as buf: + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\b") + buf.write("&\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") + buf.write("\3\2\3\2\3\2\3\3\3\3\3\4\3\4\3\5\6\5\30\n\5\r\5\16\5\31") + buf.write("\3\6\3\6\6\6\36\n\6\r\6\16\6\37\3\7\6\7#\n\7\r\7\16\7") + buf.write("$\2\2\b\3\3\5\4\7\5\t\6\13\7\r\b\3\2\5\4\2,-AA\6\2\"\"") + buf.write("BB}}\177\177\6\2\62;C\\aac|\2(\2\3\3\2\2\2\2\5\3\2\2\2") + buf.write("\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\3\17") + buf.write("\3\2\2\2\5\22\3\2\2\2\7\24\3\2\2\2\t\27\3\2\2\2\13\33") + buf.write("\3\2\2\2\r\"\3\2\2\2\17\20\7}\2\2\20\21\t\2\2\2\21\4\3") + buf.write("\2\2\2\22\23\7}\2\2\23\6\3\2\2\2\24\25\7\177\2\2\25\b") + buf.write("\3\2\2\2\26\30\n\3\2\2\27\26\3\2\2\2\30\31\3\2\2\2\31") + buf.write("\27\3\2\2\2\31\32\3\2\2\2\32\n\3\2\2\2\33\35\7B\2\2\34") + buf.write("\36\t\4\2\2\35\34\3\2\2\2\36\37\3\2\2\2\37\35\3\2\2\2") + buf.write("\37 \3\2\2\2 \f\3\2\2\2!#\7\"\2\2\"!\3\2\2\2#$\3\2\2\2") + buf.write("$\"\3\2\2\2$%\3\2\2\2%\16\3\2\2\2\6\2\31\37$\2") + return buf.getvalue() + + +class TacticNotationsLexer(Lexer): + + atn = ATNDeserializer().deserialize(serializedATN()) + + decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] + + LGROUP = 1 + LBRACE = 2 + RBRACE = 3 + ATOM = 4 + ID = 5 + WHITESPACE = 6 + + channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] + + modeNames = [ "DEFAULT_MODE" ] + + literalNames = [ "<INVALID>", + "'{'", "'}'" ] + + symbolicNames = [ "<INVALID>", + "LGROUP", "LBRACE", "RBRACE", "ATOM", "ID", "WHITESPACE" ] + + ruleNames = [ "LGROUP", "LBRACE", "RBRACE", "ATOM", "ID", "WHITESPACE" ] + + grammarFileName = "TacticNotations.g" + + def __init__(self, input=None, output:TextIO = sys.stdout): + super().__init__(input, output) + self.checkVersion("4.7") + self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) + self._actions = None + self._predicates = None diff --git a/doc/tools/coqrst/notations/TacticNotationsLexer.tokens b/doc/tools/coqrst/notations/TacticNotationsLexer.tokens new file mode 100644 index 0000000000..4d41a38837 --- /dev/null +++ b/doc/tools/coqrst/notations/TacticNotationsLexer.tokens @@ -0,0 +1,8 @@ +LGROUP=1 +LBRACE=2 +RBRACE=3 +ATOM=4 +ID=5 +WHITESPACE=6 +'{'=2 +'}'=3 diff --git a/doc/tools/coqrst/notations/TacticNotationsParser.py b/doc/tools/coqrst/notations/TacticNotationsParser.py new file mode 100644 index 0000000000..357902ddb5 --- /dev/null +++ b/doc/tools/coqrst/notations/TacticNotationsParser.py @@ -0,0 +1,519 @@ +# Generated from TacticNotations.g by ANTLR 4.7 +# encoding: utf-8 +from antlr4 import * +from io import StringIO +from typing.io import TextIO +import sys + +def serializedATN(): + with StringIO() as buf: + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\b") + buf.write("A\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b") + buf.write("\t\b\4\t\t\t\3\2\3\2\3\2\3\3\3\3\5\3\30\n\3\3\3\7\3\33") + buf.write("\n\3\f\3\16\3\36\13\3\3\4\3\4\3\4\3\4\5\4$\n\4\3\5\3\5") + buf.write("\5\5(\n\5\3\5\3\5\3\5\5\5-\n\5\3\5\3\5\3\6\3\6\5\6\63") + buf.write("\n\6\3\6\3\6\5\6\67\n\6\3\6\3\6\3\7\3\7\3\b\3\b\3\t\3") + buf.write("\t\3\t\2\2\n\2\4\6\b\n\f\16\20\2\2\2A\2\22\3\2\2\2\4\25") + buf.write("\3\2\2\2\6#\3\2\2\2\b%\3\2\2\2\n\60\3\2\2\2\f:\3\2\2\2") + buf.write("\16<\3\2\2\2\20>\3\2\2\2\22\23\5\4\3\2\23\24\7\2\2\3\24") + buf.write("\3\3\2\2\2\25\34\5\6\4\2\26\30\5\f\7\2\27\26\3\2\2\2\27") + buf.write("\30\3\2\2\2\30\31\3\2\2\2\31\33\5\6\4\2\32\27\3\2\2\2") + buf.write("\33\36\3\2\2\2\34\32\3\2\2\2\34\35\3\2\2\2\35\5\3\2\2") + buf.write("\2\36\34\3\2\2\2\37$\5\16\b\2 $\5\20\t\2!$\5\b\5\2\"$") + buf.write("\5\n\6\2#\37\3\2\2\2# \3\2\2\2#!\3\2\2\2#\"\3\2\2\2$\7") + buf.write("\3\2\2\2%\'\7\3\2\2&(\7\6\2\2\'&\3\2\2\2\'(\3\2\2\2()") + buf.write("\3\2\2\2)*\7\b\2\2*,\5\4\3\2+-\7\b\2\2,+\3\2\2\2,-\3\2") + buf.write("\2\2-.\3\2\2\2./\7\5\2\2/\t\3\2\2\2\60\62\7\4\2\2\61\63") + buf.write("\5\f\7\2\62\61\3\2\2\2\62\63\3\2\2\2\63\64\3\2\2\2\64") + buf.write("\66\5\4\3\2\65\67\5\f\7\2\66\65\3\2\2\2\66\67\3\2\2\2") + buf.write("\678\3\2\2\289\7\5\2\29\13\3\2\2\2:;\7\b\2\2;\r\3\2\2") + buf.write("\2<=\7\6\2\2=\17\3\2\2\2>?\7\7\2\2?\21\3\2\2\2\t\27\34") + buf.write("#\',\62\66") + return buf.getvalue() + + +class TacticNotationsParser ( Parser ): + + grammarFileName = "TacticNotations.g" + + atn = ATNDeserializer().deserialize(serializedATN()) + + decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] + + sharedContextCache = PredictionContextCache() + + literalNames = [ "<INVALID>", "<INVALID>", "'{'", "'}'" ] + + symbolicNames = [ "<INVALID>", "LGROUP", "LBRACE", "RBRACE", "ATOM", + "ID", "WHITESPACE" ] + + RULE_top = 0 + RULE_blocks = 1 + RULE_block = 2 + RULE_repeat = 3 + RULE_curlies = 4 + RULE_whitespace = 5 + RULE_atomic = 6 + RULE_hole = 7 + + ruleNames = [ "top", "blocks", "block", "repeat", "curlies", "whitespace", + "atomic", "hole" ] + + EOF = Token.EOF + LGROUP=1 + LBRACE=2 + RBRACE=3 + ATOM=4 + ID=5 + WHITESPACE=6 + + def __init__(self, input:TokenStream, output:TextIO = sys.stdout): + super().__init__(input, output) + self.checkVersion("4.7") + self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) + self._predicates = None + + + + class TopContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def blocks(self): + return self.getTypedRuleContext(TacticNotationsParser.BlocksContext,0) + + + def EOF(self): + return self.getToken(TacticNotationsParser.EOF, 0) + + def getRuleIndex(self): + return TacticNotationsParser.RULE_top + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitTop" ): + return visitor.visitTop(self) + else: + return visitor.visitChildren(self) + + + + + def top(self): + + localctx = TacticNotationsParser.TopContext(self, self._ctx, self.state) + self.enterRule(localctx, 0, self.RULE_top) + try: + self.enterOuterAlt(localctx, 1) + self.state = 16 + self.blocks() + self.state = 17 + self.match(TacticNotationsParser.EOF) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class BlocksContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def block(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(TacticNotationsParser.BlockContext) + else: + return self.getTypedRuleContext(TacticNotationsParser.BlockContext,i) + + + def whitespace(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(TacticNotationsParser.WhitespaceContext) + else: + return self.getTypedRuleContext(TacticNotationsParser.WhitespaceContext,i) + + + def getRuleIndex(self): + return TacticNotationsParser.RULE_blocks + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitBlocks" ): + return visitor.visitBlocks(self) + else: + return visitor.visitChildren(self) + + + + + def blocks(self): + + localctx = TacticNotationsParser.BlocksContext(self, self._ctx, self.state) + self.enterRule(localctx, 2, self.RULE_blocks) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 19 + self.block() + self.state = 26 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,1,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + self.state = 21 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==TacticNotationsParser.WHITESPACE: + self.state = 20 + self.whitespace() + + + self.state = 23 + self.block() + self.state = 28 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,1,self._ctx) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class BlockContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def atomic(self): + return self.getTypedRuleContext(TacticNotationsParser.AtomicContext,0) + + + def hole(self): + return self.getTypedRuleContext(TacticNotationsParser.HoleContext,0) + + + def repeat(self): + return self.getTypedRuleContext(TacticNotationsParser.RepeatContext,0) + + + def curlies(self): + return self.getTypedRuleContext(TacticNotationsParser.CurliesContext,0) + + + def getRuleIndex(self): + return TacticNotationsParser.RULE_block + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitBlock" ): + return visitor.visitBlock(self) + else: + return visitor.visitChildren(self) + + + + + def block(self): + + localctx = TacticNotationsParser.BlockContext(self, self._ctx, self.state) + self.enterRule(localctx, 4, self.RULE_block) + try: + self.state = 33 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [TacticNotationsParser.ATOM]: + self.enterOuterAlt(localctx, 1) + self.state = 29 + self.atomic() + pass + elif token in [TacticNotationsParser.ID]: + self.enterOuterAlt(localctx, 2) + self.state = 30 + self.hole() + pass + elif token in [TacticNotationsParser.LGROUP]: + self.enterOuterAlt(localctx, 3) + self.state = 31 + self.repeat() + pass + elif token in [TacticNotationsParser.LBRACE]: + self.enterOuterAlt(localctx, 4) + self.state = 32 + self.curlies() + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class RepeatContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def LGROUP(self): + return self.getToken(TacticNotationsParser.LGROUP, 0) + + def WHITESPACE(self, i:int=None): + if i is None: + return self.getTokens(TacticNotationsParser.WHITESPACE) + else: + return self.getToken(TacticNotationsParser.WHITESPACE, i) + + def blocks(self): + return self.getTypedRuleContext(TacticNotationsParser.BlocksContext,0) + + + def RBRACE(self): + return self.getToken(TacticNotationsParser.RBRACE, 0) + + def ATOM(self): + return self.getToken(TacticNotationsParser.ATOM, 0) + + def getRuleIndex(self): + return TacticNotationsParser.RULE_repeat + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitRepeat" ): + return visitor.visitRepeat(self) + else: + return visitor.visitChildren(self) + + + + + def repeat(self): + + localctx = TacticNotationsParser.RepeatContext(self, self._ctx, self.state) + self.enterRule(localctx, 6, self.RULE_repeat) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 35 + self.match(TacticNotationsParser.LGROUP) + self.state = 37 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==TacticNotationsParser.ATOM: + self.state = 36 + self.match(TacticNotationsParser.ATOM) + + + self.state = 39 + self.match(TacticNotationsParser.WHITESPACE) + self.state = 40 + self.blocks() + self.state = 42 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==TacticNotationsParser.WHITESPACE: + self.state = 41 + self.match(TacticNotationsParser.WHITESPACE) + + + self.state = 44 + self.match(TacticNotationsParser.RBRACE) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class CurliesContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def LBRACE(self): + return self.getToken(TacticNotationsParser.LBRACE, 0) + + def blocks(self): + return self.getTypedRuleContext(TacticNotationsParser.BlocksContext,0) + + + def RBRACE(self): + return self.getToken(TacticNotationsParser.RBRACE, 0) + + def whitespace(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(TacticNotationsParser.WhitespaceContext) + else: + return self.getTypedRuleContext(TacticNotationsParser.WhitespaceContext,i) + + + def getRuleIndex(self): + return TacticNotationsParser.RULE_curlies + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitCurlies" ): + return visitor.visitCurlies(self) + else: + return visitor.visitChildren(self) + + + + + def curlies(self): + + localctx = TacticNotationsParser.CurliesContext(self, self._ctx, self.state) + self.enterRule(localctx, 8, self.RULE_curlies) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 46 + self.match(TacticNotationsParser.LBRACE) + self.state = 48 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==TacticNotationsParser.WHITESPACE: + self.state = 47 + self.whitespace() + + + self.state = 50 + self.blocks() + self.state = 52 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==TacticNotationsParser.WHITESPACE: + self.state = 51 + self.whitespace() + + + self.state = 54 + self.match(TacticNotationsParser.RBRACE) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class WhitespaceContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def WHITESPACE(self): + return self.getToken(TacticNotationsParser.WHITESPACE, 0) + + def getRuleIndex(self): + return TacticNotationsParser.RULE_whitespace + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitWhitespace" ): + return visitor.visitWhitespace(self) + else: + return visitor.visitChildren(self) + + + + + def whitespace(self): + + localctx = TacticNotationsParser.WhitespaceContext(self, self._ctx, self.state) + self.enterRule(localctx, 10, self.RULE_whitespace) + try: + self.enterOuterAlt(localctx, 1) + self.state = 56 + self.match(TacticNotationsParser.WHITESPACE) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class AtomicContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def ATOM(self): + return self.getToken(TacticNotationsParser.ATOM, 0) + + def getRuleIndex(self): + return TacticNotationsParser.RULE_atomic + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitAtomic" ): + return visitor.visitAtomic(self) + else: + return visitor.visitChildren(self) + + + + + def atomic(self): + + localctx = TacticNotationsParser.AtomicContext(self, self._ctx, self.state) + self.enterRule(localctx, 12, self.RULE_atomic) + try: + self.enterOuterAlt(localctx, 1) + self.state = 58 + self.match(TacticNotationsParser.ATOM) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class HoleContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def ID(self): + return self.getToken(TacticNotationsParser.ID, 0) + + def getRuleIndex(self): + return TacticNotationsParser.RULE_hole + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitHole" ): + return visitor.visitHole(self) + else: + return visitor.visitChildren(self) + + + + + def hole(self): + + localctx = TacticNotationsParser.HoleContext(self, self._ctx, self.state) + self.enterRule(localctx, 14, self.RULE_hole) + try: + self.enterOuterAlt(localctx, 1) + self.state = 60 + self.match(TacticNotationsParser.ID) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx diff --git a/doc/tools/coqrst/notations/TacticNotationsVisitor.py b/doc/tools/coqrst/notations/TacticNotationsVisitor.py new file mode 100644 index 0000000000..80e69d4335 --- /dev/null +++ b/doc/tools/coqrst/notations/TacticNotationsVisitor.py @@ -0,0 +1,53 @@ +# Generated from TacticNotations.g by ANTLR 4.7 +from antlr4 import * +if __name__ is not None and "." in __name__: + from .TacticNotationsParser import TacticNotationsParser +else: + from TacticNotationsParser import TacticNotationsParser + +# This class defines a complete generic visitor for a parse tree produced by TacticNotationsParser. + +class TacticNotationsVisitor(ParseTreeVisitor): + + # Visit a parse tree produced by TacticNotationsParser#top. + def visitTop(self, ctx:TacticNotationsParser.TopContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TacticNotationsParser#blocks. + def visitBlocks(self, ctx:TacticNotationsParser.BlocksContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TacticNotationsParser#block. + def visitBlock(self, ctx:TacticNotationsParser.BlockContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TacticNotationsParser#repeat. + def visitRepeat(self, ctx:TacticNotationsParser.RepeatContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TacticNotationsParser#curlies. + def visitCurlies(self, ctx:TacticNotationsParser.CurliesContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TacticNotationsParser#whitespace. + def visitWhitespace(self, ctx:TacticNotationsParser.WhitespaceContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TacticNotationsParser#atomic. + def visitAtomic(self, ctx:TacticNotationsParser.AtomicContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TacticNotationsParser#hole. + def visitHole(self, ctx:TacticNotationsParser.HoleContext): + return self.visitChildren(ctx) + + + +del TacticNotationsParser
\ No newline at end of file diff --git a/doc/tools/coqrst/notations/UbuntuMono-B.ttf b/doc/tools/coqrst/notations/UbuntuMono-B.ttf Binary files differnew file mode 100644 index 0000000000..7bd6665765 --- /dev/null +++ b/doc/tools/coqrst/notations/UbuntuMono-B.ttf diff --git a/doc/tools/coqrst/notations/UbuntuMono-Square.ttf b/doc/tools/coqrst/notations/UbuntuMono-Square.ttf Binary files differnew file mode 100644 index 0000000000..a53a9a0f03 --- /dev/null +++ b/doc/tools/coqrst/notations/UbuntuMono-Square.ttf diff --git a/doc/tools/coqrst/notations/__init__.py b/doc/tools/coqrst/notations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/doc/tools/coqrst/notations/__init__.py diff --git a/doc/tools/coqrst/notations/fontsupport.py b/doc/tools/coqrst/notations/fontsupport.py new file mode 100755 index 0000000000..3402ea2aaf --- /dev/null +++ b/doc/tools/coqrst/notations/fontsupport.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python2 +# -*- coding: utf-8 -*- +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## <O___,, # (see CREDITS file for the list of authors) ## +## \VV/ ############################################################### +## // # This file is distributed under the terms of the ## +## # GNU Lesser General Public License Version 2.1 ## +## # (see LICENSE file for the text of the license) ## +########################################################################## +"""Transform a font to center each of its characters in square bounding boxes. + +See https://stackoverflow.com/questions/37377476/ for background information. +""" + +from collections import Counter + +try: + import fontforge + import psMat +except ImportError: + print("This program requires FontForge's python bindings:") + print(" git clone https://github.com/fontforge/fontforge") + print(" cd fontforge") + print(" ./bootstrap") + print(" ./configure") + print(" make -j8") + print(" sudo make install") + raise + +def glyph_height(glyph): + _, ylo, _, yhi = glyph.boundingBox() + return yhi - ylo + +def scale_single_glyph(glyph, width, height): + """Center glyph in a box of size width*height""" + # Some glyphs (such as ‘;’) contain references (‘.’ + ‘,’), and moving the + # referenced glyphs moves them in all glyphs that reference them. + # Unlinking copies references into this glyph + glyph.unlinkThisGlyph() + # Width + deltaw = width - glyph.width + glyph.left_side_bearing += deltaw / 2 + glyph.right_side_bearing += deltaw - glyph.left_side_bearing + glyph.width = width + # Height + ylo = glyph.boundingBox()[1] + deltah = height - glyph_height(glyph) + glyph.transform(psMat.translate(0, deltah / 2.0 - ylo)) + +def avg(lst): + lst = list(lst) + return sum(lst) / float(len(lst)) + +def trim_font(fnt): + """Remove characters codes beyond 191 front fnt""" + for g in fnt.glyphs(): + if g.unicode >= 191: + fnt.removeGlyph(g) + return fnt + +def center_glyphs(src_font_path, dst_font_path, dst_name): + fnt = trim_font(fontforge.open(src_font_path)) + + size = max(max(g.width for g in fnt.glyphs()), + max(glyph_height(g) for g in fnt.glyphs())) + fnt.ascent, fnt.descent = size, 0 + for glyph in fnt.glyphs(): + scale_single_glyph(glyph, size, size) + + fnt.sfnt_names = [] + fnt.fontname = fnt.familyname = fnt.fullname = dst_name + fnt.generate(dst_font_path) + +if __name__ == '__main__': + from os.path import dirname, join, abspath + curdir = dirname(abspath(__file__)) + ubuntumono_path = join(curdir, "UbuntuMono-B.ttf") + ubuntumono_mod_path = join(curdir, "UbuntuMono-Square.ttf") + center_glyphs(ubuntumono_path, ubuntumono_mod_path, "UbuntuMono-Square") diff --git a/doc/tools/coqrst/notations/html.py b/doc/tools/coqrst/notations/html.py new file mode 100644 index 0000000000..d91bbb64c4 --- /dev/null +++ b/doc/tools/coqrst/notations/html.py @@ -0,0 +1,65 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## <O___,, # (see CREDITS file for the list of authors) ## +## \VV/ ############################################################### +## // # This file is distributed under the terms of the ## +## # GNU Lesser General Public License Version 2.1 ## +## # (see LICENSE file for the text of the license) ## +########################################################################## +"""A visitor for ANTLR notation ASTs, producing raw HTML. + +Uses the dominate package. +""" + +from dominate import tags + +from .parsing import parse +from .TacticNotationsParser import TacticNotationsParser +from .TacticNotationsVisitor import TacticNotationsVisitor + +class TacticNotationsToHTMLVisitor(TacticNotationsVisitor): + def visitRepeat(self, ctx:TacticNotationsParser.RepeatContext): + with tags.span(_class="repeat-wrapper"): + with tags.span(_class="repeat"): + self.visitChildren(ctx) + repeat_marker = ctx.LGROUP().getText()[1] + separator = ctx.ATOM() + tags.sup(repeat_marker) + if separator: + tags.sub(separator.getText()) + + def visitCurlies(self, ctx:TacticNotationsParser.CurliesContext): + sp = tags.span(_class="curlies") + sp.add("{") + with sp: + self.visitChildren(ctx) + sp.add("}") + + def visitAtomic(self, ctx:TacticNotationsParser.AtomicContext): + tags.span(ctx.ATOM().getText()) + + def visitHole(self, ctx:TacticNotationsParser.HoleContext): + tags.span(ctx.ID().getText()[1:], _class="hole") + + def visitWhitespace(self, ctx:TacticNotationsParser.WhitespaceContext): + tags.span(" ") # TODO: no need for a <span> here + +def htmlize(notation): + """Translate notation to a dominate HTML tree""" + top = tags.span(_class='notation') + with top: + TacticNotationsToHTMLVisitor().visit(parse(notation)) + return top + +def htmlize_str(notation): + """Translate notation to a raw HTML document""" + # ‘pretty=True’ introduces spurious spaces + return htmlize(notation).render(pretty=False) + +def htmlize_p(notation): + """Like `htmlize`, wrapped in a ‘p’. + Does not return: instead, must be run in a dominate context. + """ + with tags.p(): + htmlize(notation) diff --git a/doc/tools/coqrst/notations/parsing.py b/doc/tools/coqrst/notations/parsing.py new file mode 100644 index 0000000000..73be6f26ed --- /dev/null +++ b/doc/tools/coqrst/notations/parsing.py @@ -0,0 +1,37 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## <O___,, # (see CREDITS file for the list of authors) ## +## \VV/ ############################################################### +## // # This file is distributed under the terms of the ## +## # GNU Lesser General Public License Version 2.1 ## +## # (see LICENSE file for the text of the license) ## +########################################################################## +from .TacticNotationsLexer import TacticNotationsLexer +from .TacticNotationsParser import TacticNotationsParser + +from antlr4 import CommonTokenStream, InputStream + +SUBSTITUTIONS = [("@bindings_list", "{+ (@id := @val) }"), + ("@qualid_or_string", "@id|@string")] + +def substitute(notation): + """Perform common substitutions in the notation string. + + Nested notations quickly became unwieldy in the original ‘…’-based format, + so they were avoided and replaced by pointers to grammar rules. With the + new format, it's usually nicer to remove the indirection. + """ + for (src, dst) in SUBSTITUTIONS: + notation = notation.replace(src, dst) + return notation + +def parse(notation): + """Parse a notation string. + + :return: An ANTLR AST. Use one of the supplied visitors (or write your own) + to turn it into useful output. + """ + substituted = substitute(notation) + lexer = TacticNotationsLexer(InputStream(substituted)) + return TacticNotationsParser(CommonTokenStream(lexer)).top() diff --git a/doc/tools/coqrst/notations/plain.py b/doc/tools/coqrst/notations/plain.py new file mode 100644 index 0000000000..5d4501892f --- /dev/null +++ b/doc/tools/coqrst/notations/plain.py @@ -0,0 +1,50 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## <O___,, # (see CREDITS file for the list of authors) ## +## \VV/ ############################################################### +## // # This file is distributed under the terms of the ## +## # GNU Lesser General Public License Version 2.1 ## +## # (see LICENSE file for the text of the license) ## +########################################################################## +"""A visitor for ANTLR notation ASTs, producing plain text with ellipses. + +Somewhat-closely approximates the rendering of the original manual. +""" + +from io import StringIO + +from .parsing import parse +from .TacticNotationsParser import TacticNotationsParser +from .TacticNotationsVisitor import TacticNotationsVisitor + +class TacticNotationsToDotsVisitor(TacticNotationsVisitor): + def __init__(self): + self.buffer = StringIO() + + def visitRepeat(self, ctx:TacticNotationsParser.RepeatContext): + separator = ctx.ATOM() + self.visitChildren(ctx) + if ctx.LGROUP().getText()[1] == "+": + spacer = (separator.getText() + " " if separator else "") + self.buffer.write(spacer + "…" + spacer) + self.visitChildren(ctx) + + def visitCurlies(self, ctx:TacticNotationsParser.CurliesContext): + self.buffer.write("{") + self.visitChildren(ctx) + self.buffer.write("}") + + def visitAtomic(self, ctx:TacticNotationsParser.AtomicContext): + self.buffer.write(ctx.ATOM().getText()) + + def visitHole(self, ctx:TacticNotationsParser.HoleContext): + self.buffer.write("‘{}’".format(ctx.ID().getText()[1:])) + + def visitWhitespace(self, ctx:TacticNotationsParser.WhitespaceContext): + self.buffer.write(" ") + +def stringify_with_ellipses(notation): + vs = TacticNotationsToDotsVisitor() + vs.visit(parse(notation)) + return vs.buffer.getvalue() diff --git a/doc/tools/coqrst/notations/regexp.py b/doc/tools/coqrst/notations/regexp.py new file mode 100644 index 0000000000..cac6aaecbb --- /dev/null +++ b/doc/tools/coqrst/notations/regexp.py @@ -0,0 +1,57 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## <O___,, # (see CREDITS file for the list of authors) ## +## \VV/ ############################################################### +## // # This file is distributed under the terms of the ## +## # GNU Lesser General Public License Version 2.1 ## +## # (see LICENSE file for the text of the license) ## +########################################################################## +"""An experimental visitor for ANTLR notation ASTs, producing regular expressions.""" + +import re +from io import StringIO + +from .parsing import parse +from .TacticNotationsParser import TacticNotationsParser +from .TacticNotationsVisitor import TacticNotationsVisitor + +class TacticNotationsToRegexpVisitor(TacticNotationsVisitor): + def __init__(self): + self.buffer = StringIO() + + def visitRepeat(self, ctx:TacticNotationsParser.RepeatContext): + separator = ctx.ATOM() + repeat_marker = ctx.LGROUP().getText()[1] + + self.buffer.write("(") + self.visitChildren(ctx) + self.buffer.write(")") + + if repeat_marker in ["?", "*"]: + self.buffer.write("?") + elif repeat_marker in ["+", "*"]: + self.buffer.write("(") + self.buffer.write(r"\s*" + re.escape(separator.getText() if separator else " ") + r"\s*") + self.visitChildren(ctx) + self.buffer.write(")*") + + def visitCurlies(self, ctx:TacticNotationsParser.CurliesContext): + self.buffer.write(r"\{") + self.visitChildren(ctx) + self.buffer.write(r"\}") + + def visitAtomic(self, ctx:TacticNotationsParser.AtomicContext): + self.buffer.write(re.escape(ctx.ATOM().getText())) + + def visitHole(self, ctx:TacticNotationsParser.HoleContext): + self.buffer.write("([^();. \n]+)") # FIXME could allow more things + + def visitWhitespace(self, ctx:TacticNotationsParser.WhitespaceContext): + self.buffer.write(r"\s+") + +def regexpify(notation): + """Translate notation to a Python regular expression matching it""" + vs = TacticNotationsToRegexpVisitor() + vs.visit(parse(notation)) + return vs.buffer.getvalue() diff --git a/doc/tools/coqrst/notations/sphinx.py b/doc/tools/coqrst/notations/sphinx.py new file mode 100644 index 0000000000..889bf70a46 --- /dev/null +++ b/doc/tools/coqrst/notations/sphinx.py @@ -0,0 +1,77 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## <O___,, # (see CREDITS file for the list of authors) ## +## \VV/ ############################################################### +## // # This file is distributed under the terms of the ## +## # GNU Lesser General Public License Version 2.1 ## +## # (see LICENSE file for the text of the license) ## +########################################################################## +"""A visitor for ANTLR notation ASTs, producing Sphinx nodes. + +Unlike the HTML visitor, this produces Sphinx-friendly nodes that can be used by +all backends. If you just want HTML output, use the HTML visitor. +""" + +from .parsing import parse +from .TacticNotationsParser import TacticNotationsParser +from .TacticNotationsVisitor import TacticNotationsVisitor + +from docutils import nodes +from sphinx import addnodes + +class TacticNotationsToSphinxVisitor(TacticNotationsVisitor): + def defaultResult(self): + return [] + + def aggregateResult(self, aggregate, nextResult): + if nextResult: + aggregate.extend(nextResult) + return aggregate + + def visitRepeat(self, ctx:TacticNotationsParser.RepeatContext): + # Uses inline nodes instead of subscript and superscript to ensure that + # we get the right customization hooks at the LaTeX level + wrapper = nodes.inline('', '', classes=['repeat-wrapper']) + wrapper += nodes.inline('', '', *self.visitChildren(ctx), classes=["repeat"]) + + repeat_marker = ctx.LGROUP().getText()[1] + wrapper += nodes.inline(repeat_marker, repeat_marker, classes=['notation-sup']) + + separator = ctx.ATOM() + if separator: + sep = separator.getText() + wrapper += nodes.inline(sep, sep, classes=['notation-sub']) + + return [wrapper] + + def visitCurlies(self, ctx:TacticNotationsParser.CurliesContext): + sp = nodes.inline('', '', classes=["curlies"]) + sp += nodes.Text("{") + sp.extend(self.visitChildren(ctx)) + sp += nodes.Text("}") + return [sp] + + def visitAtomic(self, ctx:TacticNotationsParser.AtomicContext): + atom = ctx.ATOM().getText() + return [nodes.inline(atom, atom)] + + def visitHole(self, ctx:TacticNotationsParser.HoleContext): + hole = ctx.ID().getText() + token_name = hole[1:] + node = nodes.inline(hole, token_name, classes=["hole"]) + return [addnodes.pending_xref(token_name, node, reftype='token', refdomain='std', reftarget=token_name)] + + def visitWhitespace(self, ctx:TacticNotationsParser.WhitespaceContext): + return [nodes.Text(" ")] + +def sphinxify(notation): + """Translate notation into a Sphinx document tree""" + vs = TacticNotationsToSphinxVisitor() + return vs.visit(parse(notation)) + +def main(): + print(sphinxify("a := {+, b {+ c}}")) + +if __name__ == '__main__': + main() diff --git a/doc/tools/coqrst/repl/__init__.py b/doc/tools/coqrst/repl/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/doc/tools/coqrst/repl/__init__.py diff --git a/doc/tools/coqrst/repl/ansicolors.py b/doc/tools/coqrst/repl/ansicolors.py new file mode 100644 index 0000000000..495eea9107 --- /dev/null +++ b/doc/tools/coqrst/repl/ansicolors.py @@ -0,0 +1,99 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## <O___,, # (see CREDITS file for the list of authors) ## +## \VV/ ############################################################### +## // # This file is distributed under the terms of the ## +## # GNU Lesser General Public License Version 2.1 ## +## # (see LICENSE file for the text of the license) ## +########################################################################## +""" +Parse Coq's ANSI output. +======================== + +Translated to Python from Coq's terminal.ml. +""" + +# pylint: disable=too-many-return-statements, too-many-branches + +def parse_color(style, offset): + color = style[offset] % 10 + if color == 0: + return ("black", 1) + elif color == 1: + return ("red", 1) + elif color == 2: + return ("green", 1) + elif color == 3: + return ("yellow", 1) + elif color == 4: + return ("blue", 1) + elif color == 5: + return ("magenta", 1) + elif color == 6: + return ("cyan", 1) + elif color == 7: + return ("white", 1) + elif color == 9: + return ("default", 1) + elif color == 8: + nxt = style[offset + 1] + if nxt == 5: + return ("index-{}".format(style[offset + 1]), 2) + elif nxt == 2: + return ("rgb-{}-{}-{}".format(*style[offset+1:offset+4]), 4) + else: + raise ValueError("{}, {}".format(style, offset)) + else: + raise ValueError() + +def parse_style(style, offset, acc): + offset = 0 + while offset < len(style): + head = style[offset] + + if head == 0: + acc.append("reset") + elif head == 1: + acc.append("bold") + elif head == 3: + acc.append("italic") + elif head == 4: + acc.append("underline") + elif head == 7: + acc.append("negative") + elif head == 22: + acc.append("no-bold") + elif head == 23: + acc.append("no-italic") + elif head == 24: + acc.append("no-underline") + elif head == 27: + acc.append("no-negative") + else: + color, suboffset = parse_color(style, offset) + offset += suboffset - 1 + if 30 <= head < 40: + acc.append("fg-{}".format(color)) + elif 40 <= head < 50: + acc.append("bg-{}".format(color)) + elif 90 <= head < 100: + acc.append("fg-light-{}".format(color)) + elif 100 <= head < 110: + acc.append("bg-light-{}".format(color)) + + offset += 1 + +def parse_ansi(code): + """Parse an ansi code into a collection of CSS classes. + + :param code: A sequence of ‘;’-separated ANSI codes. Do not include the + leading ‘^[[’ or the final ‘m’ + """ + classes = [] + parse_style([int(c) for c in code.split(';')], 0, classes) + return ["ansi-" + cls for cls in classes] + +if __name__ == '__main__': + # As produced by Coq with ‘Check nat.’ + print(parse_ansi("92;49;22;23;24;27")) diff --git a/doc/tools/coqrst/repl/coqtop.py b/doc/tools/coqrst/repl/coqtop.py new file mode 100644 index 0000000000..2df97d3dc5 --- /dev/null +++ b/doc/tools/coqrst/repl/coqtop.py @@ -0,0 +1,98 @@ +########################################################################## +## # The Coq Proof Assistant / The Coq Development Team ## +## v # INRIA, CNRS and contributors - Copyright 1999-2018 ## +## <O___,, # (see CREDITS file for the list of authors) ## +## \VV/ ############################################################### +## // # This file is distributed under the terms of the ## +## # GNU Lesser General Public License Version 2.1 ## +## # (see LICENSE file for the text of the license) ## +########################################################################## +""" +Drive coqtop with Python! +========================= + +This module is a simple pexpect-based driver for coqtop, based on the old +REPL interface. +""" + +import os +import re + +import pexpect + +class CoqTop: + """Create an instance of coqtop. + + Use this as a context manager: no instance of coqtop is created until + you call `__enter__`. coqtop is terminated when you `__exit__` the + context manager. + + Sentence parsing is very basic for now (a "." in a quoted string will + confuse it). + """ + + COQTOP_PROMPT = re.compile("\r\n[^< ]+ < ") + + def __init__(self, coqtop_bin=None, color=False, args=None) -> str: + """Configure a coqtop instance (but don't start it yet). + + :param coqtop_bin: The path to coqtop; uses $COQBIN by default, falling back to "coqtop" + :param color: When True, tell coqtop to produce ANSI color codes (see + the ansicolors module) + :param args: Additional arugments to coqtop. + """ + self.coqtop_bin = coqtop_bin or os.path.join(os.getenv('COQBIN'),"coqtop") + self.args = (args or []) + ["-boot", "-color", "on"] * color + self.coqtop = None + + def __enter__(self): + if self.coqtop: + raise ValueError("This module isn't re-entrant") + self.coqtop = pexpect.spawn(self.coqtop_bin, args=self.args, echo=False, encoding="utf-8") + # Disable delays (http://pexpect.readthedocs.io/en/stable/commonissues.html?highlight=delaybeforesend) + self.coqtop.delaybeforesend = 0 + self.next_prompt() + return self + + def __exit__(self, type, value, traceback): + self.coqtop.kill(9) + + def next_prompt(self): + "Wait for the next coqtop prompt, and return the output preceeding it." + self.coqtop.expect(CoqTop.COQTOP_PROMPT, timeout = 1) + return self.coqtop.before + + def sendone(self, sentence): + """Send a single sentence to coqtop. + + :sentence: One Coq sentence (otherwise, Coqtop will produce multiple + prompts and we'll get confused) + """ + # Suppress newlines, but not spaces: they are significant in notations + sentence = re.sub(r"[\r\n]+", " ", sentence).strip() + # print("Sending {}".format(sentence)) + self.coqtop.sendline(sentence) + output = self.next_prompt() + # print("Got {}".format(repr(output))) + return output + +def sendmany(*sentences): + """A small demo: send each sentence in sentences and print the output""" + with CoqTop() as coqtop: + for sentence in sentences: + print("=====================================") + print(sentence) + print("-------------------------------------") + response = coqtop.sendone(sentence) + print(response) + +def main(): + """Run a simple performance test and demo `sendmany`""" + with CoqTop() as coqtop: + for _ in range(200): + print(repr(coqtop.sendone("Check nat."))) + sendmany("Goal False -> True.", "Proof.", "intros H.", + "Check H.", "Chchc.", "apply I.", "Qed.") + +if __name__ == '__main__': + main() diff --git a/ide/coqOps.ml b/ide/coqOps.ml index b45a87b1f6..78fbce5c81 100644 --- a/ide/coqOps.ml +++ b/ide/coqOps.ml @@ -141,7 +141,8 @@ object method process_next_phrase : unit task method process_until_end_or_error : unit task method handle_reset_initial : unit task - method raw_coq_query : string -> unit task + method raw_coq_query : + route_id:int -> next:(query_rty value -> unit task) -> string -> unit task method show_goals : unit task method backtrack_last_phrase : unit task method initialize : unit task @@ -164,17 +165,6 @@ let flags_to_color f = else if List.mem `INCOMPLETE f then `NAME "gray" else `NAME Preferences.processed_color#get -(* Move to utils? *) -let rec validate (s : Pp.t) = match Pp.repr s with - | Pp.Ppcmd_empty - | Pp.Ppcmd_print_break _ - | Pp.Ppcmd_force_newline -> true - | Pp.Ppcmd_glue l -> List.for_all validate l - | Pp.Ppcmd_string s -> Glib.Utf8.validate s - | Pp.Ppcmd_box (_,s) - | Pp.Ppcmd_tag (_,s) -> validate s - | Pp.Ppcmd_comment s -> List.for_all Glib.Utf8.validate s - module Doc = Document let segment_model (doc : sentence Doc.document) : Wg_Segment.model = @@ -229,7 +219,7 @@ end class coqops (_script:Wg_ScriptView.script_view) (_pv:Wg_ProofView.proof_view) - (_mv:Wg_MessageView.message_view) + (_mv:Wg_RoutedMessageViews.message_views_router) (_sg:Wg_Segment.segment) (_ct:Coq.coqtop) get_filename = @@ -364,23 +354,12 @@ object(self) method show_goals = self#show_goals_aux () (* This method is intended to perform stateless commands *) - method raw_coq_query phrase = + method raw_coq_query ~route_id ~next phrase : unit Coq.task = let sid = try Document.tip document with Document.Empty -> Stateid.initial in let action = log "raw_coq_query starting now" in - let display_error s = - if not (validate s) then - flash_info "This error is so nasty that I can't even display it." - else messages#add s; - in - let query = - let route_id = 0 in - Coq.query (route_id,(phrase,sid)) in - let next = function - | Fail (_, _, err) -> display_error err; Coq.return () - | Good msg -> Coq.return () - in + let query = Coq.query (route_id,(phrase,sid)) in Coq.bind (Coq.seq action query) next method private mark_as_needed sentence = @@ -472,22 +451,22 @@ object(self) self#mark_as_needed sentence; self#attach_tooltip ?loc sentence rmsg; self#position_tag_at_sentence ?loc Tags.Script.error sentence - | Message(Warning, loc, msg), Some (id,sentence) -> - log_pp ?id Pp.(str "WarningMsg " ++ msg); - let rmsg = Pp.string_of_ppcmds msg in + | Message(Warning, loc, message), Some (id,sentence) -> + log_pp ?id Pp.(str "WarningMsg " ++ message); + let rmsg = Pp.string_of_ppcmds message in add_flag sentence (`WARNING (loc, rmsg)); self#attach_tooltip ?loc sentence rmsg; self#position_tag_at_sentence ?loc Tags.Script.warning sentence; - messages#push Warning msg - | Message(lvl, loc, msg), Some (id,sentence) -> - log_pp ?id Pp.(str "Msg " ++ msg); - messages#push lvl msg + (messages#route msg.route)#push Warning message + | Message(lvl, loc, message), Some (id,sentence) -> + log_pp ?id Pp.(str "Msg " ++ message); + (messages#route msg.route)#push lvl message (* We do nothing here as for BZ#5583 *) | Message(Error, loc, msg), None -> log_pp Pp.(str "Error Msg without a sentence" ++ msg) - | Message(lvl, loc, msg), None -> - log_pp Pp.(str "Msg without a sentence " ++ msg); - messages#push lvl msg + | Message(lvl, loc, message), None -> + log_pp Pp.(str "Msg without a sentence " ++ message); + (messages#route msg.route)#push lvl message | InProgress n, _ -> if n < 0 then processed <- processed + abs n else to_process <- to_process + n @@ -538,8 +517,8 @@ object(self) if Stateid.equal id tip || Stateid.equal id Stateid.dummy then begin self#position_tag_at_iter ?loc start stop Tags.Script.error phrase; buffer#place_cursor ~where:stop; - messages#clear; - messages#push Feedback.Error msg; + messages#default_route#clear; + messages#default_route#push Feedback.Error msg; self#show_goals end else self#show_goals_aux ~move_insert:true () @@ -597,12 +576,12 @@ object(self) (** Compute the phrases until [until] returns [true]. *) method private process_until ?move_insert until verbose = - let logger lvl msg = if verbose then messages#push lvl msg in + let logger lvl msg = if verbose then messages#default_route#push lvl msg in let fill_queue = Coq.lift (fun () -> let queue = Queue.create () in (* Lock everything and fill the waiting queue *) push_info "Coq is computing"; - messages#clear; + messages#default_route#clear; script#set_editable false; self#fill_command_queue until queue; (* Now unlock and process asynchronously. Since [until] @@ -664,8 +643,9 @@ object(self) method join_document = let next = function | Good _ -> - messages#clear; - messages#push Feedback.Info (Pp.str "All proof terms checked by the kernel"); + messages#default_route#clear; + messages#default_route#push + Feedback.Info (Pp.str "All proof terms checked by the kernel"); Coq.return () | Fail x -> self#handle_failure x in Coq.bind (Coq.status true) next @@ -767,7 +747,7 @@ object(self) conclusion () | Fail (safe_id, loc, msg) -> (* if loc <> None then messages#push Feedback.Error (Richpp.richpp_of_string "Fixme LOC"); *) - messages#push Feedback.Error msg; + messages#default_route#push Feedback.Error msg; if Stateid.equal safe_id Stateid.dummy then self#show_goals else undo safe_id (Doc.focused document && Doc.is_in_focus document safe_id)) @@ -784,7 +764,7 @@ object(self) method private handle_failure_aux ?(move_insert=false) (safe_id, (loc : (int * int) option), msg) = - messages#push Feedback.Error msg; + messages#default_route#push Feedback.Error msg; ignore(self#process_feedback ()); if Stateid.equal safe_id Stateid.dummy then Coq.lift (fun () -> ()) else @@ -796,7 +776,7 @@ object(self) method handle_failure f = self#handle_failure_aux f method backtrack_last_phrase = - messages#clear; + messages#default_route#clear; try let tgt = Doc.before_tip document in self#backtrack_to_id tgt @@ -804,7 +784,7 @@ object(self) method go_to_insert = Coq.bind (Coq.return ()) (fun () -> - messages#clear; + messages#default_route#clear; let point = self#get_insert in if point#compare self#get_start_of_input >= 0 then self#process_until_iter point @@ -812,7 +792,7 @@ object(self) method go_to_mark m = Coq.bind (Coq.return ()) (fun () -> - messages#clear; + messages#default_route#clear; let point = buffer#get_iter_at_mark m in if point#compare self#get_start_of_input >= 0 then Coq.seq (self#process_until_iter point) @@ -837,14 +817,11 @@ object(self) ~stop:(`MARK (buffer#create_mark stop)) [] in Doc.push document sentence; - messages#clear; + messages#default_route#clear; self#show_goals in let display_error (loc, s) = - if not (validate s) then - flash_info "This error is so nasty that I can't even display it." - else messages#add s - in + messages#default_route#add (Ideutils.validate s) in let try_phrase phrase stop more = let action = log "Sending to coq now" in let route_id = 0 in @@ -852,7 +829,7 @@ object(self) let next = function | Fail (_, l, str) -> (* FIXME: check *) display_error (l, str); - messages#add (Pp.str ("Unsuccessfully tried: "^phrase)); + messages#default_route#add (Pp.str ("Unsuccessfully tried: "^phrase)); more | Good () -> stop Tags.Script.processed in @@ -882,7 +859,7 @@ object(self) buffer#move_mark ~where:buffer#end_iter (`NAME "stop_of_input"); Sentence.tag_all buffer; (* clear the views *) - messages#clear; + messages#default_route#clear; proof#clear (); clear_info (); processed <- 0; diff --git a/ide/coqOps.mli b/ide/coqOps.mli index ce983c882b..3685fea92e 100644 --- a/ide/coqOps.mli +++ b/ide/coqOps.mli @@ -9,6 +9,7 @@ (************************************************************************) open Coq +open Interface class type ops = object @@ -18,7 +19,8 @@ object method process_next_phrase : unit task method process_until_end_or_error : unit task method handle_reset_initial : unit task - method raw_coq_query : string -> unit task + method raw_coq_query : + route_id:int -> next:(query_rty value -> unit task) -> string -> unit task method show_goals : unit task method backtrack_last_phrase : unit task method initialize : unit task @@ -30,7 +32,7 @@ object method get_slaves_status : int * int * string CString.Map.t - method handle_failure : Interface.handle_exn_rty -> unit task + method handle_failure : handle_exn_rty -> unit task method destroy : unit -> unit end @@ -38,7 +40,7 @@ end class coqops : Wg_ScriptView.script_view -> Wg_ProofView.proof_view -> - Wg_MessageView.message_view -> + Wg_RoutedMessageViews.message_views_router -> Wg_Segment.segment -> coqtop -> (unit -> string option) -> diff --git a/ide/coqide.ml b/ide/coqide.ml index 82b7ba32c0..aa816f2b8b 100644 --- a/ide/coqide.ml +++ b/ide/coqide.ml @@ -332,10 +332,10 @@ let export kind sn = local_cd f ^ cmd_coqdoc#get ^ " --" ^ kind ^ " -o " ^ (Filename.quote output) ^ " " ^ (Filename.quote basef) ^ " 2>&1" in - sn.messages#set (Pp.str ("Running: "^cmd)); + sn.messages#default_route#set (Pp.str ("Running: "^cmd)); let finally st = flash_info (cmd ^ pr_exit_status st) in - run_command (fun msg -> sn.messages#add_string msg) finally cmd + run_command (fun msg -> sn.messages#default_route#add_string msg) finally cmd let export kind = cb_on_current_term (export kind) @@ -447,9 +447,9 @@ let compile sn = ^ " " ^ (Filename.quote f) ^ " 2>&1" in let buf = Buffer.create 1024 in - sn.messages#set (Pp.str ("Running: "^cmd)); + sn.messages#default_route#set (Pp.str ("Running: "^cmd)); let display s = - sn.messages#add_string s; + sn.messages#default_route#add_string s; Buffer.add_string buf s in let finally st = @@ -457,8 +457,8 @@ let compile sn = flash_info (f ^ " successfully compiled") else begin flash_info (f ^ " failed to compile"); - sn.messages#set (Pp.str "Compilation output:\n"); - sn.messages#add (Pp.str (Buffer.contents buf)); + sn.messages#default_route#set (Pp.str "Compilation output:\n"); + sn.messages#default_route#add (Pp.str (Buffer.contents buf)); end in run_command display finally cmd @@ -480,13 +480,13 @@ let make sn = |Some f -> File.saveall (); let cmd = local_cd f ^ cmd_make#get ^ " 2>&1" in - sn.messages#set (Pp.str "Compilation output:\n"); + sn.messages#default_route#set (Pp.str "Compilation output:\n"); Buffer.reset last_make_buf; last_make := ""; last_make_index := 0; last_make_dir := Filename.dirname f; let display s = - sn.messages#add_string s; + sn.messages#default_route#add_string s; Buffer.add_string last_make_buf s in let finally st = flash_info (cmd_make#get ^ pr_exit_status st) @@ -524,11 +524,11 @@ let next_error sn = let stopi = b#get_iter_at_byte ~line:(line-1) stop in b#apply_tag Tags.Script.error ~start:starti ~stop:stopi; b#place_cursor ~where:starti; - sn.messages#set (Pp.str error_msg); + sn.messages#default_route#set (Pp.str error_msg); sn.script#misc#grab_focus () with Not_found -> last_make_index := 0; - sn.messages#set (Pp.str "No more errors.\n") + sn.messages#default_route#set (Pp.str "No more errors.\n") let next_error = cb_on_current_term next_error @@ -609,16 +609,14 @@ let get_current_word term = (** Then look at the current selected word *) let buf1 = term.script#buffer in let buf2 = term.proof#buffer in - let buf3 = term.messages#buffer in if buf1#has_selection then let (start, stop) = buf1#selection_bounds in buf1#get_text ~slice:true ~start ~stop () else if buf2#has_selection then let (start, stop) = buf2#selection_bounds in buf2#get_text ~slice:true ~start ~stop () - else if buf3#has_selection then - let (start, stop) = buf3#selection_bounds in - buf3#get_text ~slice:true ~start ~stop () + else if term.messages#has_selection then + term.messages#get_selected_text (** Otherwise try to find the word around the cursor *) else let it = term.script#buffer#get_iter_at_mark `INSERT in @@ -668,36 +666,18 @@ let match_callback = cb_on_current_term match_callback module Query = struct -let searchabout sn = - let word = get_current_word sn in - let buf = sn.messages#buffer in - let insert result = - let qualid = result.Interface.coq_object_qualid in - let name = String.concat "." qualid in - let tpe = result.Interface.coq_object_object in - buf#insert ~tags:[Tags.Message.item] name; - buf#insert "\n"; - buf#insert tpe; - buf#insert "\n"; - in - let display_results r = - sn.messages#clear; - List.iter insert (match r with Interface.Good l -> l | _ -> []); - Coq.return () - in - let launch_query = - let search = Coq.search [Interface.SubType_Pattern word, true] in - Coq.bind search display_results - in - Coq.try_grab sn.coqtop launch_query ignore - -let searchabout () = on_current_term searchabout - let doquery query sn = - sn.messages#clear; - Coq.try_grab sn.coqtop (sn.coqops#raw_coq_query query) ignore - -let otherquery command sn = + sn.messages#default_route#clear; + Coq.try_grab sn.coqtop (sn.coqops#raw_coq_query query ~route_id:0 + ~next:(function + | Interface.Fail (_, _, err) -> + let err = Ideutils.validate err in + sn.messages#default_route#add err; + Coq.return () + | Interface.Good () -> Coq.return ())) + ignore + +let queryif command sn = Option.iter (fun query -> doquery (query ^ ".") sn) begin try let i = CString.string_index_from command 0 "..." in @@ -706,12 +686,7 @@ let otherquery command sn = else Some (CString.sub command 0 i ^ " " ^ word) with Not_found -> Some command end -let otherquery command = cb_on_current_term (otherquery command) - -let query command _ = - if command = "Search" || command = "SearchAbout" - then searchabout () - else otherquery command () +let query command _ = cb_on_current_term (queryif command) () end @@ -740,7 +715,7 @@ let initial_about () = else "" in let msg = initial_string ^ version_info ^ log_file_message () in - on_current_term (fun term -> term.messages#add_string msg) + on_current_term (fun term -> term.messages#default_route#add_string msg) let coq_icon () = (* May raise Nof_found *) @@ -804,8 +779,8 @@ let coqtop_arguments sn = | args -> let args = String.concat " " args in let msg = Printf.sprintf "Invalid arguments: %s" args in - let () = sn.messages#clear in - sn.messages#push Feedback.Error (Pp.str msg) + let () = sn.messages#default_route#clear in + sn.messages#default_route#push Feedback.Error (Pp.str msg) else dialog#destroy () in let _ = entry#connect#activate ~callback:ok_cb in @@ -1177,17 +1152,17 @@ let build_ui () = item "Help" ~label:"_Help"; item "Browse Coq Manual" ~label:"Browse Coq _Manual" ~callback:(fun _ -> - browse notebook#current_term.messages#add_string (doc_url ())); + browse notebook#current_term.messages#default_route#add_string (doc_url ())); item "Browse Coq Library" ~label:"Browse Coq _Library" ~callback:(fun _ -> - browse notebook#current_term.messages#add_string library_url#get); + browse notebook#current_term.messages#default_route#add_string library_url#get); item "Help for keyword" ~label:"Help for _keyword" ~stock:`HELP ~callback:(fun _ -> on_current_term (fun sn -> - browse_keyword sn.messages#add_string (get_current_word sn))); + browse_keyword sn.messages#default_route#add_string (get_current_word sn))); item "Help for μPG mode" ~label:"Help for μPG mode" ~callback:(fun _ -> on_current_term (fun sn -> - sn.messages#clear; - sn.messages#add_string (NanoPG.get_documentation ()))); + sn.messages#default_route#clear; + sn.messages#default_route#add_string (NanoPG.get_documentation ()))); item "About Coq" ~label:"_About" ~stock:`ABOUT ~callback:MiscMenu.about ]; diff --git a/ide/ide.mllib b/ide/ide.mllib index 57e9fe5adc..96ea8c410e 100644 --- a/ide/ide.mllib +++ b/ide/ide.mllib @@ -26,15 +26,16 @@ Gtk_parsing Wg_Segment Wg_ProofView Wg_MessageView +Wg_RoutedMessageViews Wg_Detachable Wg_Find Wg_Completion Wg_ScriptView Coq_commands -Wg_Command FileOps Document CoqOps +Wg_Command Session Coqide_ui NanoPG diff --git a/ide/ide_slave.ml b/ide/ide_slave.ml index 0ba1b3a4fb..55b4fa87e6 100644 --- a/ide/ide_slave.ml +++ b/ide/ide_slave.ml @@ -57,8 +57,8 @@ let coqide_known_option table = List.mem table [ ["Printing";"Unfocused"]] let is_known_option cmd = match Vernacprop.under_control cmd with - | VernacSetOption (o,BoolValue true) - | VernacUnsetOption o -> coqide_known_option o + | VernacSetOption (_, o, BoolValue true) + | VernacUnsetOption (_, o) -> coqide_known_option o | _ -> false (** Check whether a command is forbidden in the IDE *) @@ -340,7 +340,7 @@ let set_options options = | IntValue i -> Goptions.set_int_option_value name i | StringValue s -> Goptions.set_string_option_value name s | StringOptValue (Some s) -> Goptions.set_string_option_value name s - | StringOptValue None -> Goptions.unset_option_value_gen None name + | StringOptValue None -> Goptions.unset_option_value_gen name in List.iter iter options diff --git a/ide/ideutils.ml b/ide/ideutils.ml index 1786957597..bdb39e94a1 100644 --- a/ide/ideutils.ml +++ b/ide/ideutils.ml @@ -473,3 +473,15 @@ let browse_keyword prerr text = let u = Lazy.force url_for_keyword text in browse prerr (doc_url() ^ u) with Not_found -> prerr ("No documentation found for \""^text^"\".\n") + +let rec is_valid (s : Pp.t) = match Pp.repr s with + | Pp.Ppcmd_empty + | Pp.Ppcmd_print_break _ + | Pp.Ppcmd_force_newline -> true + | Pp.Ppcmd_glue l -> List.for_all is_valid l + | Pp.Ppcmd_string s -> Glib.Utf8.validate s + | Pp.Ppcmd_box (_,s) + | Pp.Ppcmd_tag (_,s) -> is_valid s + | Pp.Ppcmd_comment s -> List.for_all Glib.Utf8.validate s +let validate s = + if is_valid s then s else Pp.str "This error massage can't be printed." diff --git a/ide/ideutils.mli b/ide/ideutils.mli index babbfe2f24..0031c59c17 100644 --- a/ide/ideutils.mli +++ b/ide/ideutils.mli @@ -98,3 +98,7 @@ val io_read_all : Glib.Io.channel -> string val run_command : (string -> unit) -> (Unix.process_status -> unit) -> string -> unit + +(* Checks if an error message is printable, it not replaces it with + * a printable error *) +val validate : Pp.t -> Pp.t diff --git a/ide/session.ml b/ide/session.ml index 210fbdec4d..be2bfe060c 100644 --- a/ide/session.ml +++ b/ide/session.ml @@ -33,7 +33,7 @@ type session = { buffer : GText.buffer; script : Wg_ScriptView.script_view; proof : Wg_ProofView.proof_view; - messages : Wg_MessageView.message_view; + messages : Wg_RoutedMessageViews.message_views_router; segment : Wg_Segment.segment; fileops : FileOps.ops; coqops : CoqOps.ops; @@ -366,7 +366,7 @@ let create_proof () = let create_messages () = let messages = Wg_MessageView.message_view () in let _ = messages#misc#set_can_focus true in - messages + Wg_RoutedMessageViews.message_views ~route_0:messages let dummy_control : control = object @@ -390,7 +390,7 @@ let create file coqtop_args = let _ = fops#update_stats in let cops = new CoqOps.coqops script proof messages segment coqtop (fun () -> fops#filename) in - let command = new Wg_Command.command_window basename coqtop cops in + let command = new Wg_Command.command_window basename coqtop cops messages in let errpage = create_errpage script in let jobpage = create_jobpage coqtop cops in let _ = set_buffer_handlers (buffer :> GText.buffer) script cops coqtop in @@ -511,12 +511,12 @@ let build_layout (sn:session) = sn.command#pack_in (session_paned#pack2 ~shrink:false ~resize:false); script_scroll#add sn.script#coerce; proof_scroll#add sn.proof#coerce; - let detach, _ = add_msg_page 0 sn.tab_label#text "Messages" sn.messages#coerce in + let detach, _ = add_msg_page 0 sn.tab_label#text "Messages" sn.messages#default_route#coerce in let _, label = add_msg_page 1 sn.tab_label#text "Errors" sn.errpage#coerce in let _, _ = add_msg_page 2 sn.tab_label#text "Jobs" sn.jobpage#coerce in (** When a message is received, focus on the message pane *) let _ = - sn.messages#connect#pushed ~callback:(fun _ _ -> + sn.messages#default_route#connect#pushed ~callback:(fun _ _ -> let num = message_frame#page_num detach#coerce in if 0 <= num then message_frame#goto_page num ) diff --git a/ide/session.mli b/ide/session.mli index e99f080245..bb38169001 100644 --- a/ide/session.mli +++ b/ide/session.mli @@ -31,7 +31,7 @@ type session = { buffer : GText.buffer; script : Wg_ScriptView.script_view; proof : Wg_ProofView.proof_view; - messages : Wg_MessageView.message_view; + messages : Wg_RoutedMessageViews.message_views_router; segment : Wg_Segment.segment; fileops : FileOps.ops; coqops : CoqOps.ops; diff --git a/ide/wg_Command.ml b/ide/wg_Command.ml index 3ce2c484f6..8eddfb3149 100644 --- a/ide/wg_Command.ml +++ b/ide/wg_Command.ml @@ -10,7 +10,7 @@ open Preferences -class command_window name coqtop coqops = +class command_window name coqtop coqops router = let frame = Wg_Detachable.detachable ~title:(Printf.sprintf "Query pane (%s)" name) () in let _ = frame#hide in @@ -23,6 +23,10 @@ class command_window name coqtop coqops = notebook#misc#set_size_request ~width:600 ~height:500 (); notebook#misc#grab_focus ()) in + let route_id = + let r = ref 0 in + fun () -> incr r; !r in + object(self) val frame = frame @@ -54,11 +58,13 @@ object(self) method private new_query_aux ?command ?term ?(grab_now=true) () = let frame = GBin.frame ~shadow_type:`NONE () in ignore(notebook#insert_page ~pos:(notebook#page_num new_page) frame#coerce); + let route_id = route_id () in let new_tab_lbl text = let hbox = GPack.hbox ~homogeneous:false () in ignore(GMisc.label ~width:100 ~ellipsize:`END ~text ~packing:hbox#pack()); let b = GButton.button ~packing:hbox#pack () in ignore(b#connect#clicked ~callback:(fun () -> + router#delete_route route_id; views <- List.filter (fun (f,_,_) -> f#get_oid <> frame#coerce#get_oid) views; notebook#remove_page (notebook#page_num frame#coerce))); @@ -90,7 +96,9 @@ object(self) ~vpolicy:`AUTOMATIC ~hpolicy:`AUTOMATIC ~packing:(vbox#pack ~fill:true ~expand:true) () in - let result = GText.view ~packing:r_bin#add () in + let result = Wg_MessageView.message_view () in + router#register_route route_id result; + r_bin#add (result :> GObj.widget); views <- (frame#coerce, result, combo#entry) :: views; let cb clr = result#misc#modify_base [`NORMAL, `NAME clr] in let _ = background_color#connect#changed ~callback:cb in @@ -98,7 +106,6 @@ object(self) let cb ft = result#misc#modify_font (Pango.Font.from_string ft) in stick text_font result cb; result#misc#set_can_focus true; (* false causes problems for selection *) - result#set_editable false; let callback () = let com = combo#entry#text in let arg = entry#text in @@ -108,22 +115,19 @@ object(self) else com ^ " " ^ arg ^" . " in let process = - (* We need to adapt this to route_id and redirect to the result buffer below *) - coqops#raw_coq_query phrase - (* - Coq.bind (Coq.query (phrase,sid)) (function - | Interface.Fail (_,l,str) -> - let width = Ideutils.textview_width result in - Ideutils.insert_xml result#buffer (Richpp.richpp_of_pp width str); + let next = function + | Interface.Fail (_, _, err) -> + let err = Ideutils.validate err in + result#set err; notebook#set_page ~tab_label:(new_tab_lbl "Error") frame#coerce; - Coq.return () - | Interface.Good res -> - result#buffer#insert res; + Coq.return () + | Interface.Good () -> notebook#set_page ~tab_label:(new_tab_lbl arg) frame#coerce; - Coq.return ()) - *) + Coq.return () + in + coqops#raw_coq_query ~route_id ~next phrase in - result#buffer#set_text ("Result for command " ^ phrase ^ ":\n"); + result#set (Pp.str ("Result for command " ^ phrase ^ ":\n")); Coq.try_grab coqtop process ignore in ignore (combo#entry#connect#activate ~callback); diff --git a/ide/wg_Command.mli b/ide/wg_Command.mli index c70a95761c..1e0eb675c6 100644 --- a/ide/wg_Command.mli +++ b/ide/wg_Command.mli @@ -8,7 +8,7 @@ (* * (see LICENSE file for the text of the license) *) (************************************************************************) -class command_window : string -> Coq.coqtop -> CoqOps.coqops -> +class command_window : string -> Coq.coqtop -> CoqOps.coqops -> Wg_RoutedMessageViews.message_views_router -> object method new_query : ?command:string -> ?term:string -> unit -> unit method pack_in : (GObj.widget -> unit) -> unit diff --git a/ide/wg_MessageView.ml b/ide/wg_MessageView.ml index 74f687ef76..a79a093e32 100644 --- a/ide/wg_MessageView.ml +++ b/ide/wg_MessageView.ml @@ -36,8 +36,8 @@ class type message_view = method refresh : bool -> unit method push : Ideutils.logger (** same as [add], but with an explicit level instead of [Notice] *) - method buffer : GText.buffer - (** for more advanced text edition *) + method has_selection : bool + method get_selected_text : string end let message_view () : message_view = @@ -45,7 +45,6 @@ let message_view () : message_view = ~highlight_matching_brackets:true ~tag_table:Tags.Message.table () in - let text_buffer = new GText.buffer buffer#as_buffer in let mark = buffer#create_mark ~left_gravity:false buffer#start_iter in let box = GPack.vbox () in let scroll = GBin.scrolled_window @@ -120,7 +119,12 @@ let message_view () : message_view = method set msg = self#clear; self#add msg - method buffer = text_buffer + method has_selection = buffer#has_selection + method get_selected_text = + if buffer#has_selection then + let start, stop = buffer#selection_bounds in + buffer#get_text ~slice:true ~start ~stop () + else "" end in diff --git a/ide/wg_MessageView.mli b/ide/wg_MessageView.mli index e7ec3c5788..472aaf5ed4 100644 --- a/ide/wg_MessageView.mli +++ b/ide/wg_MessageView.mli @@ -26,8 +26,8 @@ class type message_view = method refresh : bool -> unit method push : Ideutils.logger (** same as [add], but with an explicit level instead of [Notice] *) - method buffer : GText.buffer - (** for more advanced text edition *) + method has_selection : bool + method get_selected_text : string end val message_view : unit -> message_view diff --git a/ide/wg_RoutedMessageViews.ml b/ide/wg_RoutedMessageViews.ml new file mode 100644 index 0000000000..4bd3035244 --- /dev/null +++ b/ide/wg_RoutedMessageViews.ml @@ -0,0 +1,47 @@ +(************************************************************************) +(* * The Coq Proof Assistant / The Coq Development Team *) +(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *) +(* <O___,, * (see CREDITS file for the list of authors) *) +(* \VV/ **************************************************************) +(* // * This file is distributed under the terms of the *) +(* * GNU Lesser General Public License Version 2.1 *) +(* * (see LICENSE file for the text of the license) *) +(************************************************************************) + +class type message_views_router = object + method route : int -> Wg_MessageView.message_view + method default_route : Wg_MessageView.message_view + + method has_selection : bool + method get_selected_text : string + + method register_route : int -> Wg_MessageView.message_view -> unit + method delete_route : int -> unit +end + +let message_views ~route_0 : message_views_router = + let route_table = Hashtbl.create 17 in + let () = Hashtbl.add route_table 0 route_0 in +object + method route i = + try Hashtbl.find route_table i + with Not_found -> + (* at least the message will be printed somewhere*) + Hashtbl.find route_table 0 + + method default_route = route_0 + + method register_route i mv = Hashtbl.add route_table i mv + + method delete_route i = Hashtbl.remove route_table i + + method has_selection = + Hashtbl.fold (fun _ v -> (||) v#has_selection) route_table false + + method get_selected_text = + Option.default "" + (Hashtbl.fold (fun _ v acc -> + if v#has_selection then Some v#get_selected_text else acc) + route_table None) + +end diff --git a/ide/wg_RoutedMessageViews.mli b/ide/wg_RoutedMessageViews.mli new file mode 100644 index 0000000000..cca43d55ba --- /dev/null +++ b/ide/wg_RoutedMessageViews.mli @@ -0,0 +1,23 @@ +(************************************************************************) +(* * The Coq Proof Assistant / The Coq Development Team *) +(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *) +(* <O___,, * (see CREDITS file for the list of authors) *) +(* \VV/ **************************************************************) +(* // * This file is distributed under the terms of the *) +(* * GNU Lesser General Public License Version 2.1 *) +(* * (see LICENSE file for the text of the license) *) +(************************************************************************) + +class type message_views_router = object + method route : int -> Wg_MessageView.message_view + method default_route : Wg_MessageView.message_view + + method has_selection : bool + method get_selected_text : string + + method register_route : int -> Wg_MessageView.message_view -> unit + method delete_route : int -> unit +end + +val message_views : + route_0:Wg_MessageView.message_view -> message_views_router diff --git a/intf/vernacexpr.ml b/intf/vernacexpr.ml index 0a6e5b3b31..dca4910574 100644 --- a/intf/vernacexpr.ml +++ b/intf/vernacexpr.ml @@ -425,9 +425,8 @@ type nonrec vernac_expr = | VernacSetOpacity of (Conv_oracle.level * reference or_by_notation list) | VernacSetStrategy of (Conv_oracle.level * reference or_by_notation list) list - | VernacUnsetOption of Goptions.option_name - | VernacSetOption of Goptions.option_name * option_value - | VernacSetAppendOption of Goptions.option_name * string + | VernacUnsetOption of export_flag * Goptions.option_name + | VernacSetOption of export_flag * Goptions.option_name * option_value | VernacAddOption of Goptions.option_name * option_ref_value list | VernacRemoveOption of Goptions.option_name * option_ref_value list | VernacMemOption of Goptions.option_name * option_ref_value list diff --git a/kernel/cClosure.ml b/kernel/cClosure.ml index 11faef02cb..5f683790c1 100644 --- a/kernel/cClosure.ml +++ b/kernel/cClosure.ml @@ -259,12 +259,14 @@ module KeyTable = Hashtbl.Make(IdKeyHash) let eq_table_key = IdKeyHash.equal +type 'a infos_tab = 'a KeyTable.t + type 'a infos_cache = { - i_repr : 'a infos -> constr -> 'a; + i_repr : 'a infos -> 'a infos_tab -> constr -> 'a; i_env : env; i_sigma : existential -> constr option; i_rels : (Context.Rel.Declaration.t * Pre_env.lazy_val) Range.t; - i_tab : 'a KeyTable.t } +} and 'a infos = { i_flags : reds; @@ -279,9 +281,9 @@ let assoc_defined id env = match Environ.lookup_named id env with | LocalDef (_, c, _) -> c | _ -> raise Not_found -let ref_value_cache ({i_cache = cache} as infos) ref = +let ref_value_cache ({i_cache = cache} as infos) tab ref = try - Some (KeyTable.find cache.i_tab ref) + Some (KeyTable.find tab ref) with Not_found -> try let body = @@ -300,8 +302,8 @@ let ref_value_cache ({i_cache = cache} as infos) ref = | VarKey id -> assoc_defined id cache.i_env | ConstKey cst -> constant_value_in cache.i_env cst in - let v = cache.i_repr infos body in - KeyTable.add cache.i_tab ref v; + let v = cache.i_repr infos tab body in + KeyTable.add tab ref v; Some v with | Not_found (* List.assoc *) @@ -318,7 +320,7 @@ let create mk_cl flgs env evars = i_env = env; i_sigma = evars; i_rels = (Environ.pre_env env).env_rel_context.env_rel_map; - i_tab = KeyTable.create 17 } + } in { i_flags = flgs; i_cache = cache } @@ -906,23 +908,23 @@ and knht info e t stk = (************************************************************************) (* Computes a weak head normal form from the result of knh. *) -let rec knr info m stk = +let rec knr info tab m stk = match m.term with | FLambda(n,tys,f,e) when red_set info.i_flags fBETA -> (match get_args n tys f e stk with - Inl e', s -> knit info e' f s + Inl e', s -> knit info tab e' f s | Inr lam, s -> (lam,s)) | FFlex(ConstKey (kn,_ as c)) when red_set info.i_flags (fCONST kn) -> - (match ref_value_cache info (ConstKey c) with - Some v -> kni info v stk + (match ref_value_cache info tab (ConstKey c) with + Some v -> kni info tab v stk | None -> (set_norm m; (m,stk))) | FFlex(VarKey id) when red_set info.i_flags (fVAR id) -> - (match ref_value_cache info (VarKey id) with - Some v -> kni info v stk + (match ref_value_cache info tab (VarKey id) with + Some v -> kni info tab v stk | None -> (set_norm m; (m,stk))) | FFlex(RelKey k) when red_set info.i_flags fDELTA -> - (match ref_value_cache info (RelKey k) with - Some v -> kni info v stk + (match ref_value_cache info tab (RelKey k) with + Some v -> kni info tab v stk | None -> (set_norm m; (m,stk))) | FConstruct((ind,c),u) -> let use_match = red_set info.i_flags fMATCH in @@ -932,29 +934,29 @@ let rec knr info m stk = | (depth, args, ZcaseT(ci,_,br,e)::s) when use_match -> assert (ci.ci_npar>=0); let rargs = drop_parameters depth ci.ci_npar args in - knit info e br.(c-1) (rargs@s) + knit info tab e br.(c-1) (rargs@s) | (_, cargs, Zfix(fx,par)::s) when use_fix -> let rarg = fapp_stack(m,cargs) in let stk' = par @ append_stack [|rarg|] s in let (fxe,fxbd) = contract_fix_vect fx.term in - knit info fxe fxbd stk' + knit info tab fxe fxbd stk' | (depth, args, Zproj (n, m, cst)::s) when use_match -> let rargs = drop_parameters depth n args in let rarg = project_nth_arg m rargs in - kni info rarg s + kni info tab rarg s | (_,args,s) -> (m,args@s)) else (m,stk) | FCoFix _ when red_set info.i_flags fCOFIX -> (match strip_update_shift_app m stk with (_, args, (((ZcaseT _|Zproj _)::_) as stk')) -> let (fxe,fxbd) = contract_fix_vect m.term in - knit info fxe fxbd (args@stk') + knit info tab fxe fxbd (args@stk') | (_,args,s) -> (m,args@s)) | FLetIn (_,v,_,bd,e) when red_set info.i_flags fZETA -> - knit info (subs_cons([|v|],e)) bd stk + knit info tab (subs_cons([|v|],e)) bd stk | FEvar(ev,env) -> (match evar_value info.i_cache ev with - Some c -> knit info env c stk + Some c -> knit info tab env c stk | None -> (m,stk)) | FLOCKED | FRel _ | FAtom _ | FCast _ | FFlex _ | FInd _ | FApp _ | FProj _ | FFix _ | FCoFix _ | FCaseT _ | FLambda _ | FProd _ | FLetIn _ | FLIFT _ @@ -962,14 +964,14 @@ let rec knr info m stk = (* Computes the weak head normal form of a term *) -and kni info m stk = +and kni info tab m stk = let (hm,s) = knh info m stk in - knr info hm s -and knit info e t stk = + knr info tab hm s +and knit info tab e t stk = let (ht,s) = knht info e t stk in - knr info ht s + knr info tab ht s -let kh info v stk = fapp_stack(kni info v stk) +let kh info tab v stk = fapp_stack(kni info tab v stk) (************************************************************************) @@ -997,61 +999,61 @@ let rec zip_term zfun m stk = 1- Calls kni 2- tries to rebuild the term. If a closure still has to be computed, calls itself recursively. *) -let rec kl info m = +let rec kl info tab m = if is_val m then (incr prune; term_of_fconstr m) else - let (nm,s) = kni info m [] in + let (nm,s) = kni info tab m [] in let () = if !share then ignore (fapp_stack (nm, s)) in (* to unlock Zupdates! *) - zip_term (kl info) (norm_head info nm) s + zip_term (kl info tab) (norm_head info tab nm) s (* no redex: go up for atoms and already normalized terms, go down otherwise. *) -and norm_head info m = +and norm_head info tab m = if is_val m then (incr prune; term_of_fconstr m) else match m.term with | FLambda(n,tys,f,e) -> let (e',rvtys) = List.fold_left (fun (e,ctxt) (na,ty) -> - (subs_lift e, (na,kl info (mk_clos e ty))::ctxt)) + (subs_lift e, (na,kl info tab (mk_clos e ty))::ctxt)) (e,[]) tys in - let bd = kl info (mk_clos e' f) in + let bd = kl info tab (mk_clos e' f) in List.fold_left (fun b (na,ty) -> mkLambda(na,ty,b)) bd rvtys | FLetIn(na,a,b,f,e) -> let c = mk_clos (subs_lift e) f in - mkLetIn(na, kl info a, kl info b, kl info c) + mkLetIn(na, kl info tab a, kl info tab b, kl info tab c) | FProd(na,dom,rng) -> - mkProd(na, kl info dom, kl info rng) + mkProd(na, kl info tab dom, kl info tab rng) | FCoFix((n,(na,tys,bds)),e) -> let ftys = CArray.Fun1.map mk_clos e tys in let fbds = CArray.Fun1.map mk_clos (subs_liftn (Array.length na) e) bds in - mkCoFix(n,(na, CArray.Fun1.map kl info ftys, CArray.Fun1.map kl info fbds)) + mkCoFix(n,(na, CArray.map (kl info tab) ftys, CArray.map (kl info tab) fbds)) | FFix((n,(na,tys,bds)),e) -> let ftys = CArray.Fun1.map mk_clos e tys in let fbds = CArray.Fun1.map mk_clos (subs_liftn (Array.length na) e) bds in - mkFix(n,(na, CArray.Fun1.map kl info ftys, CArray.Fun1.map kl info fbds)) + mkFix(n,(na, CArray.map (kl info tab) ftys, CArray.map (kl info tab) fbds)) | FEvar((i,args),env) -> - mkEvar(i, Array.map (fun a -> kl info (mk_clos env a)) args) + mkEvar(i, Array.map (fun a -> kl info tab (mk_clos env a)) args) | FProj (p,c) -> - mkProj (p, kl info c) + mkProj (p, kl info tab c) | FLOCKED | FRel _ | FAtom _ | FCast _ | FFlex _ | FInd _ | FConstruct _ | FApp _ | FCaseT _ | FLIFT _ | FCLOS _ -> term_of_fconstr m (* Initialization and then normalization *) (* weak reduction *) -let whd_val info v = - with_stats (lazy (term_of_fconstr (kh info v []))) +let whd_val info tab v = + with_stats (lazy (term_of_fconstr (kh info tab v []))) (* strong reduction *) -let norm_val info v = - with_stats (lazy (kl info v)) +let norm_val info tab v = + with_stats (lazy (kl info tab v)) let inject c = mk_clos (subs_id 0) c -let whd_stack infos m stk = - let k = kni infos m stk in +let whd_stack infos tab m stk = + let k = kni infos tab m stk in let () = if !share then ignore (fapp_stack k) in (* to unlock Zupdates! *) k @@ -1059,7 +1061,10 @@ let whd_stack infos m stk = type clos_infos = fconstr infos let create_clos_infos ?(evars=fun _ -> None) flgs env = - create (fun _ -> inject) flgs env evars + create (fun _ _ c -> inject c) flgs env evars + +let create_tab () = KeyTable.create 17 + let oracle_of_infos infos = Environ.oracle infos.i_cache.i_env let env_of_infos infos = infos.i_cache.i_env @@ -1067,14 +1072,14 @@ let env_of_infos infos = infos.i_cache.i_env let infos_with_reds infos reds = { infos with i_flags = reds } -let unfold_reference info key = +let unfold_reference info tab key = match key with | ConstKey (kn,_) -> if red_set info.i_flags (fCONST kn) then - ref_value_cache info key + ref_value_cache info tab key else None | VarKey i -> if red_set info.i_flags (fVAR i) then - ref_value_cache info key + ref_value_cache info tab key else None - | _ -> ref_value_cache info key + | _ -> ref_value_cache info tab key diff --git a/kernel/cClosure.mli b/kernel/cClosure.mli index b9c71d72af..3a7f77d521 100644 --- a/kernel/cClosure.mli +++ b/kernel/cClosure.mli @@ -100,13 +100,15 @@ val unfold_red : evaluable_global_reference -> reds type table_key = Constant.t Univ.puniverses tableKey type 'a infos_cache +type 'a infos_tab type 'a infos = { i_flags : reds; i_cache : 'a infos_cache } -val ref_value_cache: 'a infos -> table_key -> 'a option -val create: ('a infos -> constr -> 'a) -> reds -> env -> +val ref_value_cache: 'a infos -> 'a infos_tab -> table_key -> 'a option +val create: ('a infos -> 'a infos_tab -> constr -> 'a) -> reds -> env -> (existential -> constr option) -> 'a infos +val create_tab : unit -> 'a infos_tab val evar_value : 'a infos_cache -> existential -> constr option val info_env : 'a infos -> env @@ -200,15 +202,15 @@ val infos_with_reds : clos_infos -> reds -> clos_infos (** Reduction function *) (** [norm_val] is for strong normalization *) -val norm_val : clos_infos -> fconstr -> constr +val norm_val : clos_infos -> fconstr infos_tab -> fconstr -> constr (** [whd_val] is for weak head normalization *) -val whd_val : clos_infos -> fconstr -> constr +val whd_val : clos_infos -> fconstr infos_tab -> fconstr -> constr (** [whd_stack] performs weak head normalization in a given stack. It stops whenever a reduction is blocked. *) val whd_stack : - clos_infos -> fconstr -> stack -> fconstr * stack + clos_infos -> fconstr infos_tab -> fconstr -> stack -> fconstr * stack (** [eta_expand_ind_stack env ind c s t] computes stacks correspoding to the conversion of the eta expansion of t, considered as an inhabitant @@ -225,7 +227,7 @@ val eta_expand_ind_stack : env -> inductive -> fconstr -> stack -> (** Conversion auxiliary functions to do step by step normalisation *) (** [unfold_reference] unfolds references in a [fconstr] *) -val unfold_reference : clos_infos -> table_key -> fconstr option +val unfold_reference : clos_infos -> fconstr infos_tab -> table_key -> fconstr option val eq_table_key : table_key -> table_key -> bool @@ -241,9 +243,9 @@ val mk_clos_deep : (fconstr subs -> constr -> fconstr) -> fconstr subs -> constr -> fconstr -val kni: clos_infos -> fconstr -> stack -> fconstr * stack -val knr: clos_infos -> fconstr -> stack -> fconstr * stack -val kl : clos_infos -> fconstr -> constr +val kni: clos_infos -> fconstr infos_tab -> fconstr -> stack -> fconstr * stack +val knr: clos_infos -> fconstr infos_tab -> fconstr -> stack -> fconstr * stack +val kl : clos_infos -> fconstr infos_tab -> fconstr -> constr val to_constr : (lift -> fconstr -> constr) -> lift -> fconstr -> constr diff --git a/kernel/reduction.ml b/kernel/reduction.ml index b3e6894143..2e59fcc18f 100644 --- a/kernel/reduction.ml +++ b/kernel/reduction.ml @@ -117,12 +117,12 @@ let whd_betaiota env t = | App (c, _) -> begin match kind c with | Ind _ | Construct _ | Evar _ | Meta _ | Const _ | LetIn _ -> t - | _ -> whd_val (create_clos_infos betaiota env) (inject t) + | _ -> whd_val (create_clos_infos betaiota env) (create_tab ()) (inject t) end - | _ -> whd_val (create_clos_infos betaiota env) (inject t) + | _ -> whd_val (create_clos_infos betaiota env) (create_tab ()) (inject t) let nf_betaiota env t = - norm_val (create_clos_infos betaiota env) (inject t) + norm_val (create_clos_infos betaiota env) (create_tab ()) (inject t) let whd_betaiotazeta env x = match kind x with @@ -133,10 +133,10 @@ let whd_betaiotazeta env x = | Ind _ | Construct _ | Evar _ | Meta _ | Const _ -> x | Sort _ | Rel _ | Var _ | Cast _ | Prod _ | Lambda _ | LetIn _ | App _ | Case _ | Fix _ | CoFix _ | Proj _ -> - whd_val (create_clos_infos betaiotazeta env) (inject x) + whd_val (create_clos_infos betaiotazeta env) (create_tab ()) (inject x) end | Rel _ | Cast _ | LetIn _ | Case _ | Proj _ -> - whd_val (create_clos_infos betaiotazeta env) (inject x) + whd_val (create_clos_infos betaiotazeta env) (create_tab ()) (inject x) let whd_all env t = match kind t with @@ -147,10 +147,10 @@ let whd_all env t = | Ind _ | Construct _ | Evar _ | Meta _ -> t | Sort _ | Rel _ | Var _ | Cast _ | Prod _ | Lambda _ | LetIn _ | App _ | Const _ |Case _ | Fix _ | CoFix _ | Proj _ -> - whd_val (create_clos_infos all env) (inject t) + whd_val (create_clos_infos all env) (create_tab ()) (inject t) end | Rel _ | Cast _ | LetIn _ | Case _ | Proj _ | Const _ | Var _ -> - whd_val (create_clos_infos all env) (inject t) + whd_val (create_clos_infos all env) (create_tab ()) (inject t) let whd_allnolet env t = match kind t with @@ -161,10 +161,10 @@ let whd_allnolet env t = | Ind _ | Construct _ | Evar _ | Meta _ | LetIn _ -> t | Sort _ | Rel _ | Var _ | Cast _ | Prod _ | Lambda _ | App _ | Const _ | Case _ | Fix _ | CoFix _ | Proj _ -> - whd_val (create_clos_infos allnolet env) (inject t) + whd_val (create_clos_infos allnolet env) (create_tab ()) (inject t) end | Rel _ | Cast _ | Case _ | Proj _ | Const _ | Var _ -> - whd_val (create_clos_infos allnolet env) (inject t) + whd_val (create_clos_infos allnolet env) (create_tab ()) (inject t) (********************************************************************) (* Conversion *) @@ -316,46 +316,16 @@ let compare_stacks f fmind lft1 stk1 lft2 stk2 cuniv = cmp_rec (pure_stack lft1 stk1) (pure_stack lft2 stk2) cuniv else raise NotConvertible -let rec no_arg_available = function - | [] -> true - | Zupdate _ :: stk -> no_arg_available stk - | Zshift _ :: stk -> no_arg_available stk - | Zapp v :: stk -> Int.equal (Array.length v) 0 && no_arg_available stk - | Zproj _ :: _ -> true - | ZcaseT _ :: _ -> true - | Zfix _ :: _ -> true - -let rec no_nth_arg_available n = function - | [] -> true - | Zupdate _ :: stk -> no_nth_arg_available n stk - | Zshift _ :: stk -> no_nth_arg_available n stk - | Zapp v :: stk -> - let k = Array.length v in - if n >= k then no_nth_arg_available (n-k) stk - else false - | Zproj _ :: _ -> true - | ZcaseT _ :: _ -> true - | Zfix _ :: _ -> true - -let rec no_case_available = function - | [] -> true - | Zupdate _ :: stk -> no_case_available stk - | Zshift _ :: stk -> no_case_available stk - | Zapp _ :: stk -> no_case_available stk - | Zproj (_,_,p) :: _ -> false - | ZcaseT _ :: _ -> false - | Zfix _ :: _ -> true - -let in_whnf (t,stk) = - match fterm_of t with - | (FLetIn _ | FCaseT _ | FApp _ - | FCLOS _ | FLIFT _ | FCast _) -> false - | FLambda _ -> no_arg_available stk - | FConstruct _ -> no_case_available stk - | FCoFix _ -> no_case_available stk - | FFix(((ri,n),(_,_,_)),_) -> no_nth_arg_available ri.(n) stk - | (FFlex _ | FProd _ | FEvar _ | FInd _ | FAtom _ | FRel _ | FProj _) -> true - | FLOCKED -> assert false +type conv_tab = { + cnv_inf : clos_infos; + lft_tab : fconstr infos_tab; + rgt_tab : fconstr infos_tab; +} +(** Invariant: for any tl ∈ lft_tab and tr ∈ rgt_tab, there is no mutable memory + location contained both in tl and in tr. *) + +(** The same heap separation invariant must hold for the fconstr arguments + passed to each respective side of the conversion function below. *) (* Conversion between [lft1]term1 and [lft2]term2 *) let rec ccnv cv_pb l2r infos lft1 lft2 term1 term2 cuniv = @@ -365,15 +335,10 @@ let rec ccnv cv_pb l2r infos lft1 lft2 term1 term2 cuniv = and eqappr cv_pb l2r infos (lft1,st1) (lft2,st2) cuniv = Control.check_for_interrupt (); (* First head reduce both terms *) - let whd = whd_stack (infos_with_reds infos betaiotazeta) in - let rec whd_both (t1,stk1) (t2,stk2) = - let st1' = whd t1 stk1 in - let st2' = whd t2 stk2 in - (* Now, whd_stack on term2 might have modified st1 (due to sharing), - and st1 might not be in whnf anymore. If so, we iterate ccnv. *) - if in_whnf st1' then (st1',st2') else whd_both st1' st2' in - let ((hd1,v1),(hd2,v2)) = whd_both st1 st2 in - let appr1 = (lft1,(hd1,v1)) and appr2 = (lft2,(hd2,v2)) in + let ninfos = infos_with_reds infos.cnv_inf betaiotazeta in + let (hd1, v1 as appr1) = whd_stack ninfos infos.lft_tab (fst st1) (snd st1) in + let (hd2, v2 as appr2) = whd_stack ninfos infos.rgt_tab (fst st2) (snd st2) in + let appr1 = (lft1, appr1) and appr2 = (lft2, appr2) in (** We delay the computation of the lifts that apply to the head of the term with [el_stack] inside the branches where they are actually used. *) match (fterm_of hd1, fterm_of hd2) with @@ -383,7 +348,7 @@ and eqappr cv_pb l2r infos (lft1,st1) (lft2,st2) cuniv = | (Sort s1, Sort s2) -> if not (is_empty_stack v1 && is_empty_stack v2) then anomaly (Pp.str "conversion was given ill-typed terms (Sort)."); - sort_cmp_universes (env_of_infos infos) cv_pb s1 s2 cuniv + sort_cmp_universes (env_of_infos infos.cnv_inf) cv_pb s1 s2 cuniv | (Meta n, Meta m) -> if Int.equal n m then convert_stacks l2r infos lft1 lft2 v1 v2 cuniv @@ -410,24 +375,24 @@ and eqappr cv_pb l2r infos (lft1,st1) (lft2,st2) cuniv = (* 2 constants, 2 local defined vars or 2 defined rels *) | (FFlex fl1, FFlex fl2) -> (try - let cuniv = conv_table_key infos fl1 fl2 cuniv in + let cuniv = conv_table_key infos.cnv_inf fl1 fl2 cuniv in convert_stacks l2r infos lft1 lft2 v1 v2 cuniv with NotConvertible | Univ.UniverseInconsistency _ -> (* else the oracle tells which constant is to be expanded *) - let oracle = CClosure.oracle_of_infos infos in + let oracle = CClosure.oracle_of_infos infos.cnv_inf in let (app1,app2) = if Conv_oracle.oracle_order Univ.out_punivs oracle l2r fl1 fl2 then - match unfold_reference infos fl1 with + match unfold_reference infos.cnv_inf infos.lft_tab fl1 with | Some def1 -> ((lft1, (def1, v1)), appr2) | None -> - (match unfold_reference infos fl2 with + (match unfold_reference infos.cnv_inf infos.rgt_tab fl2 with | Some def2 -> (appr1, (lft2, (def2, v2))) | None -> raise NotConvertible) else - match unfold_reference infos fl2 with + match unfold_reference infos.cnv_inf infos.rgt_tab fl2 with | Some def2 -> (appr1, (lft2, (def2, v2))) | None -> - (match unfold_reference infos fl1 with + (match unfold_reference infos.cnv_inf infos.lft_tab fl1 with | Some def1 -> ((lft1, (def1, v1)), appr2) | None -> raise NotConvertible) in @@ -437,11 +402,11 @@ and eqappr cv_pb l2r infos (lft1,st1) (lft2,st2) cuniv = (* Projections: prefer unfolding to first-order unification, which will happen naturally if the terms c1, c2 are not in constructor form *) - (match unfold_projection infos p1 with + (match unfold_projection infos.cnv_inf p1 with | Some s1 -> eqappr cv_pb l2r infos (lft1, (c1, (s1 :: v1))) appr2 cuniv | None -> - match unfold_projection infos p2 with + match unfold_projection infos.cnv_inf p2 with | Some s2 -> eqappr cv_pb l2r infos appr1 (lft2, (c2, (s2 :: v2))) cuniv | None -> @@ -455,26 +420,26 @@ and eqappr cv_pb l2r infos (lft1,st1) (lft2,st2) cuniv = raise NotConvertible) | (FProj (p1,c1), t2) -> - (match unfold_projection infos p1 with + (match unfold_projection infos.cnv_inf p1 with | Some s1 -> eqappr cv_pb l2r infos (lft1, (c1, (s1 :: v1))) appr2 cuniv | None -> (match t2 with | FFlex fl2 -> - (match unfold_reference infos fl2 with + (match unfold_reference infos.cnv_inf infos.rgt_tab fl2 with | Some def2 -> eqappr cv_pb l2r infos appr1 (lft2, (def2, v2)) cuniv | None -> raise NotConvertible) | _ -> raise NotConvertible)) | (t1, FProj (p2,c2)) -> - (match unfold_projection infos p2 with + (match unfold_projection infos.cnv_inf p2 with | Some s2 -> eqappr cv_pb l2r infos appr1 (lft2, (c2, (s2 :: v2))) cuniv | None -> (match t1 with | FFlex fl1 -> - (match unfold_reference infos fl1 with + (match unfold_reference infos.cnv_inf infos.lft_tab fl1 with | Some def1 -> eqappr cv_pb l2r infos (lft1, (def1, v1)) appr2 cuniv | None -> raise NotConvertible) @@ -524,37 +489,37 @@ and eqappr cv_pb l2r infos (lft1,st1) (lft2,st2) cuniv = (* only one constant, defined var or defined rel *) | (FFlex fl1, c2) -> - (match unfold_reference infos fl1 with + (match unfold_reference infos.cnv_inf infos.lft_tab fl1 with | Some def1 -> (** By virtue of the previous case analyses, we know [c2] is rigid. Conversion check to rigid terms eventually implies full weak-head reduction, so instead of repeatedly performing small-step unfoldings, we perform reduction with all flags on. *) - let all = RedFlags.red_add_transparent all (RedFlags.red_transparent (info_flags infos)) in - let r1 = whd_stack (infos_with_reds infos all) def1 v1 in + let all = RedFlags.red_add_transparent all (RedFlags.red_transparent (info_flags infos.cnv_inf)) in + let r1 = whd_stack (infos_with_reds infos.cnv_inf all) infos.lft_tab def1 v1 in eqappr cv_pb l2r infos (lft1, r1) appr2 cuniv | None -> match c2 with | FConstruct ((ind2,j2),u2) -> (try let v2, v1 = - eta_expand_ind_stack (info_env infos) ind2 hd2 v2 (snd appr1) + eta_expand_ind_stack (info_env infos.cnv_inf) ind2 hd2 v2 (snd appr1) in convert_stacks l2r infos lft1 lft2 v1 v2 cuniv with Not_found -> raise NotConvertible) | _ -> raise NotConvertible) | (c1, FFlex fl2) -> - (match unfold_reference infos fl2 with + (match unfold_reference infos.cnv_inf infos.rgt_tab fl2 with | Some def2 -> (** Symmetrical case of above. *) - let all = RedFlags.red_add_transparent all (RedFlags.red_transparent (info_flags infos)) in - let r2 = whd_stack (infos_with_reds infos all) def2 v2 in + let all = RedFlags.red_add_transparent all (RedFlags.red_transparent (info_flags infos.cnv_inf)) in + let r2 = whd_stack (infos_with_reds infos.cnv_inf all) infos.rgt_tab def2 v2 in eqappr cv_pb l2r infos appr1 (lft2, r2) cuniv | None -> match c1 with | FConstruct ((ind1,j1),u1) -> (try let v1, v2 = - eta_expand_ind_stack (info_env infos) ind1 hd1 v1 (snd appr2) + eta_expand_ind_stack (info_env infos.cnv_inf) ind1 hd1 v1 (snd appr2) in convert_stacks l2r infos lft1 lft2 v1 v2 cuniv with Not_found -> raise NotConvertible) | _ -> raise NotConvertible) @@ -566,7 +531,7 @@ and eqappr cv_pb l2r infos (lft1,st1) (lft2,st2) cuniv = let cuniv = convert_instances ~flex:false u1 u2 cuniv in convert_stacks l2r infos lft1 lft2 v1 v2 cuniv else - let mind = Environ.lookup_mind (fst ind1) (info_env infos) in + let mind = Environ.lookup_mind (fst ind1) (info_env infos.cnv_inf) in let nargs = CClosure.stack_args_size v1 in if not (Int.equal nargs (CClosure.stack_args_size v2)) then raise NotConvertible @@ -581,7 +546,7 @@ and eqappr cv_pb l2r infos (lft1,st1) (lft2,st2) cuniv = let cuniv = convert_instances ~flex:false u1 u2 cuniv in convert_stacks l2r infos lft1 lft2 v1 v2 cuniv else - let mind = Environ.lookup_mind (fst ind1) (info_env infos) in + let mind = Environ.lookup_mind (fst ind1) (info_env infos.cnv_inf) in let nargs = CClosure.stack_args_size v1 in if not (Int.equal nargs (CClosure.stack_args_size v2)) then raise NotConvertible @@ -594,14 +559,14 @@ and eqappr cv_pb l2r infos (lft1,st1) (lft2,st2) cuniv = | (FConstruct ((ind1,j1),u1), _) -> (try let v1, v2 = - eta_expand_ind_stack (info_env infos) ind1 hd1 v1 (snd appr2) + eta_expand_ind_stack (info_env infos.cnv_inf) ind1 hd1 v1 (snd appr2) in convert_stacks l2r infos lft1 lft2 v1 v2 cuniv with Not_found -> raise NotConvertible) | (_, FConstruct ((ind2,j2),u2)) -> (try let v2, v1 = - eta_expand_ind_stack (info_env infos) ind2 hd2 v2 (snd appr1) + eta_expand_ind_stack (info_env infos.cnv_inf) ind2 hd2 v2 (snd appr1) in convert_stacks l2r infos lft1 lft2 v1 v2 cuniv with Not_found -> raise NotConvertible) @@ -669,6 +634,11 @@ and convert_vect l2r infos lft1 lft2 v1 v2 cuniv = let clos_gen_conv trans cv_pb l2r evars env univs t1 t2 = let reds = CClosure.RedFlags.red_add_transparent betaiotazeta trans in let infos = create_clos_infos ~evars reds env in + let infos = { + cnv_inf = infos; + lft_tab = create_tab (); + rgt_tab = create_tab (); + } in ccnv cv_pb l2r infos el_id el_id (inject t1) (inject t2) univs diff --git a/library/goptions.ml b/library/goptions.ml index 5681421caa..76071ebcc2 100644 --- a/library/goptions.ml +++ b/library/goptions.ml @@ -196,7 +196,7 @@ type 'a option_sig = { optread : unit -> 'a; optwrite : 'a -> unit } -type option_locality = OptLocal | OptDefault | OptGlobal +type option_locality = OptDefault | OptLocal | OptExport | OptGlobal type option_mod = OptSet | OptAppend @@ -229,11 +229,6 @@ let warn_deprecated_option = (fun key -> str "Option" ++ spc () ++ str (nickname key) ++ strbrk " is deprecated") -let get_locality = function - | Some true -> OptLocal - | Some false -> OptGlobal - | None -> OptDefault - let declare_option cast uncast append ?(preprocess = fun x -> x) { optdepr=depr; optname=name; optkey=key; optread=read; optwrite=write } = check_key key; @@ -247,16 +242,30 @@ let declare_option cast uncast append ?(preprocess = fun x -> x) match m with | OptSet -> write v | OptAppend -> write (append (read ()) v) in - let load_options i o = cache_options o in + let load_options i (_, (l, _, _) as o) = match l with + | OptGlobal -> cache_options o + | OptExport -> () + | OptLocal | OptDefault -> + (** Ruled out by classify_function *) + assert false + in + let open_options i (_, (l, _, _) as o) = match l with + | OptExport -> if Int.equal i 1 then cache_options o + | OptGlobal -> () + | OptLocal | OptDefault -> + (** Ruled out by classify_function *) + assert false + in let subst_options (subst,obj) = obj in let discharge_options (_,(l,_,_ as o)) = - match l with OptLocal -> None | _ -> Some o in + match l with OptLocal -> None | (OptExport | OptGlobal | OptDefault) -> Some o in let classify_options (l,_,_ as o) = - match l with OptGlobal -> Substitute o | _ -> Dispose in + match l with (OptExport | OptGlobal) -> Substitute o | (OptLocal | OptDefault) -> Dispose in let options : option_locality * option_mod * _ -> obj = declare_object { (default_object (nickname key)) with load_function = load_options; + open_function = open_options; cache_function = cache_options; subst_function = subst_options; discharge_function = discharge_options; @@ -302,12 +311,12 @@ let warn_unknown_option = (fun key -> strbrk "There is no option " ++ str (nickname key) ++ str ".") -let set_option_value locality check_and_cast key v = +let set_option_value ?(locality = OptDefault) check_and_cast key v = let opt = try Some (get_option key) with Not_found -> None in match opt with | None -> warn_unknown_option key | Some (name, depr, (read,write,append)) -> - write (get_locality locality) (check_and_cast v (read ())) + write locality (check_and_cast v (read ())) let bad_type_error () = user_err Pp.(str "Bad type of value for this option.") @@ -334,25 +343,25 @@ let check_unset_value v = function warning. This allows a script to refer to an option that doesn't exist anymore *) -let set_int_option_value_gen locality = - set_option_value locality check_int_value -let set_bool_option_value_gen locality key v = - set_option_value locality check_bool_value key v -let set_string_option_value_gen locality = - set_option_value locality check_string_value -let unset_option_value_gen locality key = - set_option_value locality check_unset_value key () +let set_int_option_value_gen ?locality = + set_option_value ?locality check_int_value +let set_bool_option_value_gen ?locality key v = + set_option_value ?locality check_bool_value key v +let set_string_option_value_gen ?locality = + set_option_value ?locality check_string_value +let unset_option_value_gen ?locality key = + set_option_value ?locality check_unset_value key () -let set_string_option_append_value_gen locality key v = +let set_string_option_append_value_gen ?(locality = OptDefault) key v = let opt = try Some (get_option key) with Not_found -> None in match opt with | None -> warn_unknown_option key | Some (name, depr, (read,write,append)) -> - append (get_locality locality) (check_string_value v (read ())) + append locality (check_string_value v (read ())) -let set_int_option_value = set_int_option_value_gen None -let set_bool_option_value = set_bool_option_value_gen None -let set_string_option_value = set_string_option_value_gen None +let set_int_option_value opt v = set_int_option_value_gen opt v +let set_bool_option_value opt v = set_bool_option_value_gen opt v +let set_string_option_value opt v = set_string_option_value_gen opt v (* Printing options/tables *) diff --git a/library/goptions.mli b/library/goptions.mli index 31920b832a..6c14d19ee9 100644 --- a/library/goptions.mli +++ b/library/goptions.mli @@ -50,6 +50,8 @@ open Mod_subst type option_name = string list +type option_locality = OptDefault | OptLocal | OptExport | OptGlobal + (** {6 Tables. } *) (** The functor [MakeStringTable] declares a table containing objects @@ -150,13 +152,12 @@ val get_ref_table : mem : reference -> unit; print : unit > -(** The first argument is a locality flag. - [Some true] = "Local", [Some false]="Global". *) -val set_int_option_value_gen : bool option -> option_name -> int option -> unit -val set_bool_option_value_gen : bool option -> option_name -> bool -> unit -val set_string_option_value_gen : bool option -> option_name -> string -> unit -val set_string_option_append_value_gen : bool option -> option_name -> string -> unit -val unset_option_value_gen : bool option -> option_name -> unit +(** The first argument is a locality flag. *) +val set_int_option_value_gen : ?locality:option_locality -> option_name -> int option -> unit +val set_bool_option_value_gen : ?locality:option_locality -> option_name -> bool -> unit +val set_string_option_value_gen : ?locality:option_locality -> option_name -> string -> unit +val set_string_option_append_value_gen : ?locality:option_locality -> option_name -> string -> unit +val unset_option_value_gen : ?locality:option_locality -> option_name -> unit val set_int_option_value : option_name -> int option -> unit val set_bool_option_value : option_name -> bool -> unit diff --git a/parsing/g_vernac.ml4 b/parsing/g_vernac.ml4 index feaef31619..f1ec496231 100644 --- a/parsing/g_vernac.ml4 +++ b/parsing/g_vernac.ml4 @@ -822,7 +822,14 @@ GEXTEND Gram END GEXTEND Gram - GLOBAL: command query_command class_rawexpr; + GLOBAL: command query_command class_rawexpr gallina_ext; + + gallina_ext: + [ [ IDENT "Export"; "Set"; table = option_table; v = option_value -> + VernacSetOption (true, table, v) + | IDENT "Export"; IDENT "Unset"; table = option_table -> + VernacUnsetOption (true, table) + ] ]; command: [ [ IDENT "Comments"; l = LIST0 comment -> VernacComments l @@ -887,24 +894,9 @@ GEXTEND Gram (* For acting on parameter tables *) | "Set"; table = option_table; v = option_value -> - begin match v with - | StringValue s -> - (* We make a special case for warnings because appending is their - natural semantics *) - if CString.List.equal table ["Warnings"] then - VernacSetAppendOption (table, s) - else - let (last, prefix) = List.sep_last table in - if String.equal last "Append" && not (List.is_empty prefix) then - VernacSetAppendOption (prefix, s) - else - VernacSetOption (table, v) - | _ -> VernacSetOption (table, v) - end - | "Set"; table = option_table -> - VernacSetOption (table,BoolValue true) + VernacSetOption (false, table, v) | IDENT "Unset"; table = option_table -> - VernacUnsetOption table + VernacUnsetOption (false, table) | IDENT "Print"; IDENT "Table"; table = option_table -> VernacPrintOption table @@ -1010,7 +1002,8 @@ GEXTEND Gram | IDENT "Module"; qid = global -> LocateModule qid ] ] ; option_value: - [ [ n = integer -> IntValue (Some n) + [ [ -> BoolValue true + | n = integer -> IntValue (Some n) | s = STRING -> StringValue s ] ] ; option_ref_value: @@ -1081,10 +1074,10 @@ GEXTEND Gram (* Tactic Debugger *) | IDENT "Debug"; IDENT "On" -> - VernacSetOption (["Ltac";"Debug"], BoolValue true) + VernacSetOption (false, ["Ltac";"Debug"], BoolValue true) | IDENT "Debug"; IDENT "Off" -> - VernacSetOption (["Ltac";"Debug"], BoolValue false) + VernacSetOption (false, ["Ltac";"Debug"], BoolValue false) (* registration of a custom reduction *) diff --git a/plugins/setoid_ring/newring.ml b/plugins/setoid_ring/newring.ml index b6bac1a14e..99bb8440c6 100644 --- a/plugins/setoid_ring/newring.ml +++ b/plugins/setoid_ring/newring.ml @@ -90,7 +90,7 @@ let lookup_map map = let protect_red map env sigma c0 = let evars ev = Evarutil.safe_evar_value sigma ev in let c = EConstr.Unsafe.to_constr c0 in - EConstr.of_constr (kl (create_clos_infos ~evars all env) + EConstr.of_constr (kl (create_clos_infos ~evars all env) (create_tab ()) (mk_clos_but (lookup_map map sigma c0) (Esubst.subs_id 0) c));; let protect_tac map = diff --git a/plugins/ssr/ssrvernac.ml4 b/plugins/ssr/ssrvernac.ml4 index e3941c1c5d..c3b6a7c59f 100644 --- a/plugins/ssr/ssrvernac.ml4 +++ b/plugins/ssr/ssrvernac.ml4 @@ -178,7 +178,7 @@ GEXTEND Gram GLOBAL: gallina_ext; gallina_ext: [ [ IDENT "Import"; IDENT "Prenex"; IDENT "Implicits" -> - Vernacexpr.VernacUnsetOption (["Printing"; "Implicit"; "Defensive"]) + Vernacexpr.VernacUnsetOption (false, ["Printing"; "Implicit"; "Defensive"]) ] ] ; END diff --git a/pretyping/cbv.ml b/pretyping/cbv.ml index 7cfb30f4c1..a2155697ec 100644 --- a/pretyping/cbv.ml +++ b/pretyping/cbv.ml @@ -134,7 +134,7 @@ let mkSTACK = function | STACK(0,v0,stk0), stk -> STACK(0,v0,stack_concat stk0 stk) | v,stk -> STACK(0,v,stk) -type cbv_infos = { infos : cbv_value infos; sigma : Evd.evar_map } +type cbv_infos = { tab : cbv_value infos_tab; infos : cbv_value infos; sigma : Evd.evar_map } (* Change: zeta reduction cannot be avoided in CBV *) @@ -318,7 +318,7 @@ let rec norm_head info env t stack = and norm_head_ref k info env stack normt = if red_set_ref (info_flags info.infos) normt then - match ref_value_cache info.infos normt with + match ref_value_cache info.infos info.tab normt with | Some body -> if !debug_cbv then Feedback.msg_debug Pp.(str "Unfolding " ++ pr_key normt); strip_appl (shift_value k body) stack @@ -455,8 +455,8 @@ let cbv_norm infos constr = (* constant bodies are normalized at the first expansion *) let create_cbv_infos flgs env sigma = let infos = create - (fun old_info c -> cbv_stack_term { infos = old_info; sigma } TOP (subs_id 0) c) + (fun old_info tab c -> cbv_stack_term { tab; infos = old_info; sigma } TOP (subs_id 0) c) flgs env (Reductionops.safe_evar_value sigma) in - { infos; sigma } + { tab = CClosure.create_tab (); infos; sigma } diff --git a/pretyping/inferCumulativity.ml b/pretyping/inferCumulativity.ml index 20883f6f6b..eb283a0220 100644 --- a/pretyping/inferCumulativity.ml +++ b/pretyping/inferCumulativity.ml @@ -83,10 +83,12 @@ let infer_table_key infos variances c = infer_generic_instance_eq variances u | VarKey _ | RelKey _ -> variances +let whd_stack (infos, tab) hd stk = CClosure.whd_stack infos tab hd stk + let rec infer_fterm cv_pb infos variances hd stk = Control.check_for_interrupt (); - let open CClosure in let hd,stk = whd_stack infos hd stk in + let open CClosure in match fterm_of hd with | FAtom a -> begin match kind a with @@ -116,7 +118,7 @@ let rec infer_fterm cv_pb infos variances hd stk = if Instance.is_empty u then variances else let nargs = stack_args_size stk in - infer_inductive_instance cv_pb (info_env infos) variances ind nargs u + infer_inductive_instance cv_pb (info_env (fst infos)) variances ind nargs u in infer_stack infos variances stk | FConstruct (ctor,u) -> @@ -124,7 +126,7 @@ let rec infer_fterm cv_pb infos variances hd stk = if Instance.is_empty u then variances else let nargs = stack_args_size stk in - infer_constructor_instance_eq (info_env infos) variances ctor nargs u + infer_constructor_instance_eq (info_env (fst infos)) variances ctor nargs u in infer_stack infos variances stk | FFix ((_,(_,tys,cl)),e) | FCoFix ((_,(_,tys,cl)),e) -> @@ -161,7 +163,7 @@ and infer_vect infos variances v = let infer_term cv_pb env variances c = let open CClosure in - let infos = create_clos_infos all env in + let infos = (create_clos_infos all env, create_tab ()) in infer_fterm cv_pb infos variances (CClosure.inject c) [] let infer_arity_constructor is_arity env variances arcn = diff --git a/pretyping/reductionops.ml b/pretyping/reductionops.ml index e8b19f6bc4..0e66ff0b6f 100644 --- a/pretyping/reductionops.ml +++ b/pretyping/reductionops.ml @@ -29,19 +29,6 @@ exception Elimconst their parameters in its stack. *) -let refolding_in_reduction = ref false -let _ = Goptions.declare_bool_option { - Goptions.optdepr = true; (* remove in 8.8 *) - Goptions.optname = - "Perform refolding of fixpoints/constants like cbn during reductions"; - Goptions.optkey = ["Refolding";"Reduction"]; - Goptions.optread = (fun () -> !refolding_in_reduction); - Goptions.optwrite = (fun a -> refolding_in_reduction:=a); -} - -let get_refolding_in_reduction () = !refolding_in_reduction -let set_refolding_in_reduction = (:=) refolding_in_reduction - (** Support for reduction effects *) open Mod_subst @@ -1135,7 +1122,7 @@ let local_whd_state_gen flags sigma = whrec let raw_whd_state_gen flags env = - let f sigma s = fst (whd_state_gen ~refold:(get_refolding_in_reduction ()) + let f sigma s = fst (whd_state_gen ~refold:false ~tactic_mode:false flags env sigma s) in f @@ -1232,6 +1219,7 @@ let clos_norm_flags flgs env sigma t = let evars ev = safe_evar_value sigma ev in EConstr.of_constr (CClosure.norm_val (CClosure.create_clos_infos ~evars flgs env) + (CClosure.create_tab ()) (CClosure.inject (EConstr.Unsafe.to_constr t))) with e when is_anomaly e -> user_err Pp.(str "Tried to normalize ill-typed term") @@ -1240,6 +1228,7 @@ let clos_whd_flags flgs env sigma t = let evars ev = safe_evar_value sigma ev in EConstr.of_constr (CClosure.whd_val (CClosure.create_clos_infos ~evars flgs env) + (CClosure.create_tab ()) (CClosure.inject (EConstr.Unsafe.to_constr t))) with e when is_anomaly e -> user_err Pp.(str "Tried to normalize ill-typed term") @@ -1561,7 +1550,7 @@ let is_sort env sigma t = of case/fix (heuristic used by evar_conv) *) let whd_betaiota_deltazeta_for_iota_state ts env sigma csts s = - let refold = get_refolding_in_reduction () in + let refold = false in let tactic_mode = false in let rec whrec csts s = let (t, stack as s),csts' = whd_state_gen ~csts ~refold ~tactic_mode CClosure.betaiota env sigma s in diff --git a/pretyping/reductionops.mli b/pretyping/reductionops.mli index 3b56513f5e..29dc3ed0f2 100644 --- a/pretyping/reductionops.mli +++ b/pretyping/reductionops.mli @@ -31,11 +31,6 @@ module ReductionBehaviour : sig val print : Globnames.global_reference -> Pp.t end -(** Option telling if reduction should use the refolding machinery of cbn - (off by default) *) -val get_refolding_in_reduction : unit -> bool -val set_refolding_in_reduction : bool -> unit - (** {6 Support for reduction effects } *) type effect_name = string diff --git a/printing/ppvernac.ml b/printing/ppvernac.ml index 2b7d643d6f..ea1ca26fbe 100644 --- a/printing/ppvernac.ml +++ b/printing/ppvernac.ml @@ -1118,18 +1118,15 @@ open Decl_kinds hov 1 (keyword "Strategy" ++ spc() ++ hv 0 (prlist_with_sep sep pr_line l)) ) - | VernacUnsetOption (na) -> + | VernacUnsetOption (export, na) -> + let export = if export then keyword "Export" ++ spc () else mt () in return ( - hov 1 (keyword "Unset" ++ spc() ++ pr_printoption na None) + hov 1 (export ++ keyword "Unset" ++ spc() ++ pr_printoption na None) ) - | VernacSetOption (na,v) -> + | VernacSetOption (export, na,v) -> + let export = if export then keyword "Export" ++ spc () else mt () in return ( - hov 2 (keyword "Set" ++ spc() ++ pr_set_option na v) - ) - | VernacSetAppendOption (na,v) -> - return ( - hov 2 (keyword "Set" ++ spc() ++ pr_printoption na None ++ - spc() ++ keyword "Append" ++ spc() ++ qs v) + hov 2 (export ++ keyword "Set" ++ spc() ++ pr_set_option na v) ) | VernacAddOption (na,l) -> return ( diff --git a/stm/vernac_classifier.ml b/stm/vernac_classifier.ml index 48ccb8f4ce..f68c8b326b 100644 --- a/stm/vernac_classifier.ml +++ b/stm/vernac_classifier.ml @@ -61,7 +61,7 @@ let classify_vernac e = (* Univ poly compatibility: we run it now, so that we can just * look at Flags in stm.ml. Would be nicer to have the stm * look at the entire dag to detect this option. *) - | ( VernacSetOption (l,_) | VernacUnsetOption l) + | ( VernacSetOption (_, l,_) | VernacUnsetOption (_, l)) when CList.equal String.equal l Vernacentries.universe_polymorphism_option_name -> VtSideff [], VtNow (* Qed *) @@ -87,8 +87,8 @@ let classify_vernac e = proof_block_detection = Some "curly" }, VtLater (* Options changing parser *) - | VernacUnsetOption (["Default";"Proof";"Using"]) - | VernacSetOption (["Default";"Proof";"Using"],_) -> VtSideff [], VtNow + | VernacUnsetOption (_, ["Default";"Proof";"Using"]) + | VernacSetOption (_, ["Default";"Proof";"Using"],_) -> VtSideff [], VtNow (* StartProof *) | VernacDefinition ((Decl_kinds.DoDischarge,_),({v=i},_),ProveBody _) -> VtStartProof(default_proof_mode (),Doesn'tGuaranteeOpacity, idents_of_name i), VtLater @@ -149,7 +149,7 @@ let classify_vernac e = | VernacReserve _ | VernacGeneralizable _ | VernacSetOpacity _ | VernacSetStrategy _ - | VernacUnsetOption _ | VernacSetOption _ | VernacSetAppendOption _ + | VernacUnsetOption _ | VernacSetOption _ | VernacAddOption _ | VernacRemoveOption _ | VernacMemOption _ | VernacPrintOption _ | VernacGlobalCheck _ diff --git a/test-suite/bugs/closed/3424.v b/test-suite/bugs/opened/3424.v index ee8cabf171..d1c5bb68f9 100644 --- a/test-suite/bugs/closed/3424.v +++ b/test-suite/bugs/opened/3424.v @@ -13,12 +13,12 @@ Notation "0" := (trunc_S minus_one) : trunc_scope. Class IsTrunc (n : trunc_index) (A : Type) : Type := Trunc_is_trunc : IsTrunc_internal n A. Notation IsHProp := (IsTrunc minus_one). Notation IsHSet := (IsTrunc 0). -Set Refolding Reduction. Goal forall (A : Type) (a b : A) (H' : IsHSet A), { x : Type & IsHProp x }. Proof. intros. eexists. (* exact (H' a b). *) (* Undo. *) -apply (H' a b). +Fail apply (H' a b). +exact (H' a b). Qed. diff --git a/test-suite/output/inference.out b/test-suite/output/inference.out index 5e9eff0481..f7ffd1959a 100644 --- a/test-suite/output/inference.out +++ b/test-suite/output/inference.out @@ -4,8 +4,6 @@ fun e : option L => match e with | None => None end : option L -> option L -fun (m n p : nat) (H : S m <= S n + p) => le_S_n m (n + p) H - : forall m n p : nat, S m <= S n + p -> m <= n + p fun n : nat => let y : T n := A n in ?t ?x : T n : forall n : nat, T n where diff --git a/test-suite/output/inference.v b/test-suite/output/inference.v index 73169dae65..57a4739e9f 100644 --- a/test-suite/output/inference.v +++ b/test-suite/output/inference.v @@ -13,12 +13,6 @@ Definition P (e:option L) := Print P. -(* Check that plus is folded even if reduction is involved *) -Set Warnings Append "-deprecated-option". -Set Refolding Reduction. -Check (fun m n p (H : S m <= (S n) + p) => le_S_n _ _ H). - - (* Check that the heuristic to solve constraints is not artificially dependent on the presence of a let-in, and in particular that the second [_] below is not inferred to be n, as if obtained by diff --git a/test-suite/success/coercions.v b/test-suite/success/coercions.v index 9b11bc011c..9389c9d32e 100644 --- a/test-suite/success/coercions.v +++ b/test-suite/success/coercions.v @@ -130,4 +130,59 @@ Local Coercion l2v2 : list >-> vect. of coercions *) Fail Check (fun l : list (T1 * T1) => (l : vect _ _)). Check (fun l : list (T1 * T1) => (l2v2 l : vect _ _)). -Section what_we_could_do. +End what_we_could_do. + + +(** Unit test for Prop as source class *) + +Module TestPropAsSourceCoercion. + + Parameter heap : Prop. + + Parameter heap_empty : heap. + + Definition hprop := heap -> Prop. + + Coercion hpure (P:Prop) : hprop := fun h => h = heap_empty /\ P. + + Parameter heap_single : nat -> nat -> hprop. + + Parameter hstar : hprop -> hprop -> hprop. + + Notation "H1 \* H2" := (hstar H1 H2) (at level 69). + + Definition test := heap_single 4 5 \* (5 <> 4) \* heap_single 2 4 \* (True). + + (* Print test. -- reveals [hpure] coercions *) + +End TestPropAsSourceCoercion. + + +(** Unit test for Type as source class *) + +Module TestTypeAsSourceCoercion. + + Require Import Coq.Setoids.Setoid. + + Record setoid := { A : Type ; R : relation A ; eqv : Equivalence R }. + + Definition default_setoid (T : Type) : setoid + := {| A := T ; R := eq ; eqv := _ |}. + + Coercion default_setoid : Sortclass >-> setoid. + + Definition foo := Type : setoid. + + Inductive type := U | Nat. + Inductive term : type -> Type := + | ty (_ : Type) : term U + | nv (_ : nat) : term Nat. + + Coercion ty : Sortclass >-> term. + + Definition ty1 := Type : term _. + Definition ty2 := Prop : term _. + Definition ty3 := Set : term _. + Definition ty4 := (Type : Type) : term _. + +End TestTypeAsSourceCoercion. diff --git a/theories/Init/Tactics.v b/theories/Init/Tactics.v index 9e6c26b10c..8df533e743 100644 --- a/theories/Init/Tactics.v +++ b/theories/Init/Tactics.v @@ -255,6 +255,7 @@ Tactic Notation "dependent" "induction" ident(H) := writing a version of [inversion] / [dependent destruction] which does not lose information, i.e., does not turn a goal which is provable into one which requires axiom K / UIP. *) + Ltac simpl_proj_exist_in H := repeat match type of H with | context G[proj1_sig (exist _ ?x ?p)] @@ -310,8 +311,20 @@ Ltac inversion_sigma_step := Ltac inversion_sigma := repeat inversion_sigma_step. (** A version of [time] that works for constrs *) + Ltac time_constr tac := let eval_early := match goal with _ => restart_timer end in let ret := tac () in let eval_early := match goal with _ => finish_timing ( "Tactic evaluation" ) end in ret. + +(** Useful combinators *) + +Ltac assert_fails tac := + tryif tac then fail 0 tac "succeeds" else idtac. +Ltac assert_succeeds tac := + tryif (assert_fails tac) then fail 0 tac "fails" else idtac. +Tactic Notation "assert_succeeds" tactic3(tac) := + assert_succeeds tac. +Tactic Notation "assert_fails" tactic3(tac) := + assert_fails tac. diff --git a/vernac/class.ml b/vernac/class.ml index cc676af1b4..59d9331087 100644 --- a/vernac/class.ml +++ b/vernac/class.ml @@ -225,7 +225,7 @@ let build_id_coercion idf_opt source poly = ConstRef kn let check_source = function -| Some (CL_FUN|CL_SORT as s) -> raise (CoercionError (ForbiddenSourceClass s)) +| Some (CL_FUN as s) -> raise (CoercionError (ForbiddenSourceClass s)) | _ -> () (* diff --git a/vernac/vernacentries.ml b/vernac/vernacentries.ml index 4c9b41b216..7764920d9f 100644 --- a/vernac/vernacentries.ml +++ b/vernac/vernacentries.ml @@ -1539,18 +1539,45 @@ let vernac_set_opacity ~atts (v,l) = let l = List.map glob_ref l in Redexpr.set_strategy local [v,l] -let vernac_set_option ~atts key = function - | StringValue s -> set_string_option_value_gen atts.locality key s - | StringOptValue (Some s) -> set_string_option_value_gen atts.locality key s - | StringOptValue None -> unset_option_value_gen atts.locality key - | IntValue n -> set_int_option_value_gen atts.locality key n - | BoolValue b -> set_bool_option_value_gen atts.locality key b - -let vernac_set_append_option ~atts key s = - set_string_option_append_value_gen atts.locality key s +let get_option_locality export local = + if export then + if Option.is_empty local then OptExport + else user_err Pp.(str "Locality modifiers forbidden with Export") + else match local with + | Some true -> OptLocal + | Some false -> OptGlobal + | None -> OptDefault + +let vernac_set_option0 ~atts export key opt = + let locality = get_option_locality export atts.locality in + match opt with + | StringValue s -> set_string_option_value_gen ~locality key s + | StringOptValue (Some s) -> set_string_option_value_gen ~locality key s + | StringOptValue None -> unset_option_value_gen ~locality key + | IntValue n -> set_int_option_value_gen ~locality key n + | BoolValue b -> set_bool_option_value_gen ~locality key b + +let vernac_set_append_option ~atts export key s = + let locality = get_option_locality export atts.locality in + set_string_option_append_value_gen ~locality key s + +let vernac_set_option ~atts export table v = match v with +| StringValue s -> + (* We make a special case for warnings because appending is their + natural semantics *) + if CString.List.equal table ["Warnings"] then + vernac_set_append_option ~atts export table s + else + let (last, prefix) = List.sep_last table in + if String.equal last "Append" && not (List.is_empty prefix) then + vernac_set_append_option ~atts export prefix s + else + vernac_set_option0 ~atts export table v +| _ -> vernac_set_option0 ~atts export table v -let vernac_unset_option ~atts key = - unset_option_value_gen atts.locality key +let vernac_unset_option ~atts export key = + let locality = get_option_locality export atts.locality in + unset_option_value_gen ~locality key let vernac_add_option key lv = let f = function @@ -2083,9 +2110,8 @@ let interp ?proof ~atts ~st c = | VernacGeneralizable gen -> vernac_generalizable ~atts gen | VernacSetOpacity qidl -> vernac_set_opacity ~atts qidl | VernacSetStrategy l -> vernac_set_strategy ~atts l - | VernacSetOption (key,v) -> vernac_set_option ~atts key v - | VernacSetAppendOption (key,v) -> vernac_set_append_option ~atts key v - | VernacUnsetOption key -> vernac_unset_option ~atts key + | VernacSetOption (export, key,v) -> vernac_set_option ~atts export key v + | VernacUnsetOption (export, key) -> vernac_unset_option ~atts export key | VernacRemoveOption (key,v) -> vernac_remove_option key v | VernacAddOption (key,v) -> vernac_add_option key v | VernacMemOption (key,v) -> vernac_mem_option key v @@ -2148,7 +2174,7 @@ let check_vernac_supports_locality c l = | VernacArgumentsScope _ | VernacDeclareImplicits _ | VernacArguments _ | VernacGeneralizable _ | VernacSetOpacity _ | VernacSetStrategy _ - | VernacSetOption _ | VernacSetAppendOption _ | VernacUnsetOption _ + | VernacSetOption _ | VernacUnsetOption _ | VernacDeclareReduction _ | VernacExtend _ | VernacInductive _) -> () diff --git a/vernac/vernacprop.ml b/vernac/vernacprop.ml index 44a7a9b15e..a837b77a33 100644 --- a/vernac/vernacprop.ml +++ b/vernac/vernacprop.ml @@ -52,7 +52,7 @@ let is_reset = function | _ -> false let is_debug cmd = match under_control cmd with - | VernacSetOption (["Ltac";"Debug"], _) -> true + | VernacSetOption (_, ["Ltac";"Debug"], _) -> true | _ -> false let is_undo cmd = match under_control cmd with |
