aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Makefile.doc5
-rw-r--r--checker/checkInductive.ml47
-rw-r--r--checker/checkTypes.mli2
-rw-r--r--checker/values.ml10
-rw-r--r--clib/cStack.ml44
-rw-r--r--clib/cStack.mli58
-rw-r--r--clib/clib.mllib2
-rw-r--r--default.nix2
-rw-r--r--dev/ci/ci-common.sh2
-rw-r--r--dev/ci/user-overlays/11764-ppedrot-simplify-template.sh18
-rw-r--r--dev/nixpkgs.nix4
-rw-r--r--doc/sphinx/README.rst6
-rw-r--r--doc/sphinx/README.template.rst4
-rw-r--r--doc/sphinx/addendum/generalized-rewriting.rst155
-rw-r--r--doc/sphinx/addendum/implicit-coercions.rst48
-rw-r--r--doc/sphinx/addendum/ring.rst105
-rw-r--r--doc/sphinx/addendum/universe-polymorphism.rst9
-rwxr-xr-xdoc/sphinx/conf.py3
-rw-r--r--doc/sphinx/language/gallina-specification-language.rst31
-rw-r--r--doc/sphinx/practical-tools/utilities.rst37
-rw-r--r--doc/sphinx/proof-engine/tactics.rst2
-rw-r--r--doc/sphinx/user-extensions/syntax-extensions.rst4
-rw-r--r--doc/tools/coqrst/coqdomain.py2
-rw-r--r--doc/tools/docgram/common.edit_mlg77
-rw-r--r--doc/tools/docgram/doc_grammar.ml2
-rw-r--r--doc/tools/docgram/fullGrammar43
-rw-r--r--doc/tools/docgram/orderedGrammar500
-rw-r--r--ide/coqide.ml9
-rw-r--r--kernel/cooking.ml24
-rw-r--r--kernel/declarations.ml7
-rw-r--r--kernel/declareops.ml7
-rw-r--r--kernel/entries.ml2
-rw-r--r--kernel/environ.ml8
-rw-r--r--kernel/indTyping.ml126
-rw-r--r--kernel/indTyping.mli3
-rw-r--r--kernel/indtypes.ml7
-rw-r--r--kernel/inductive.ml10
-rw-r--r--kernel/inductive.mli3
-rw-r--r--kernel/safe_typing.ml7
-rw-r--r--lib/control.ml2
-rw-r--r--lib/pp.ml6
-rw-r--r--lib/util.ml4
-rw-r--r--lib/util.mli4
-rw-r--r--plugins/cc/ccalgo.mli97
-rw-r--r--plugins/funind/indfun_common.ml27
-rw-r--r--plugins/funind/recdef.ml46
-rw-r--r--plugins/syntax/r_syntax.ml2
-rw-r--r--pretyping/evarsolve.ml6
-rw-r--r--pretyping/inductiveops.ml6
-rw-r--r--tactics/pfedit.ml20
-rw-r--r--test-suite/output/RealSyntax.out2
-rw-r--r--test-suite/output/RealSyntax.v2
-rw-r--r--theories/Arith/Lt.v8
-rw-r--r--theories/Arith/PeanoNat.v3
-rw-r--r--theories/Arith/Wf_nat.v2
-rw-r--r--theories/Init/Peano.v2
-rw-r--r--theories/Init/Wf.v3
-rw-r--r--vernac/comInductive.ml25
-rw-r--r--vernac/record.ml17
-rw-r--r--vernac/vernacextend.ml24
-rw-r--r--vernac/vernacstate.ml9
61 files changed, 842 insertions, 910 deletions
diff --git a/Makefile.doc b/Makefile.doc
index 1249555cd7..5aa1ae9850 100644
--- a/Makefile.doc
+++ b/Makefile.doc
@@ -38,10 +38,11 @@ SPHINXENV:=COQBIN="$(CURDIR)/bin/" COQLIB="$(WIN_CURDIR)"
else
SPHINXENV:=COQBIN="$(CURDIR)/bin/" COQLIB="$(CURDIR)"
endif
-SPHINXOPTS= -j4
SPHINXWARNERROR ?= 1
ifeq ($(SPHINXWARNERROR),1)
-SPHINXOPTS += -W
+SPHINXOPTS= -W
+else
+SPHINXOPTS=
endif
SPHINXBUILD= sphinx-build
SPHINXBUILDDIR= doc/sphinx/_build
diff --git a/checker/checkInductive.ml b/checker/checkInductive.ml
index 62e732ce69..c4c6d9bb4f 100644
--- a/checker/checkInductive.ml
+++ b/checker/checkInductive.ml
@@ -20,7 +20,7 @@ exception InductiveMismatch of MutInd.t * string
let check mind field b = if not b then raise (InductiveMismatch (mind,field))
-let to_entry mind (mb:mutual_inductive_body) : Entries.mutual_inductive_entry =
+let to_entry (mb:mutual_inductive_body) : Entries.mutual_inductive_entry =
let open Entries in
let nparams = List.length mb.mind_params_ctxt in (* include letins *)
let mind_entry_record = match mb.mind_record with
@@ -33,39 +33,27 @@ let to_entry mind (mb:mutual_inductive_body) : Entries.mutual_inductive_entry =
inductive types. The set of monomorphic constraints is already part of
the graph at that point, but we need to emulate a broken bound variable
mechanism for template inductive types. *)
- let fold accu ind = match ind.mind_arity with
- | RegularArity _ -> accu
- | TemplateArity ar ->
- match accu with
- | None -> Some ar.template_context
- | Some ctx ->
- (* Ensure that all template contexts agree. This is enforced by the
- kernel. *)
- let () = check mind "mind_arity" (ContextSet.equal ctx ar.template_context) in
- Some ctx
- in
- let univs = match Array.fold_left fold None mb.mind_packets with
+ let univs = match mb.mind_template with
| None -> ContextSet.empty
- | Some ctx -> ctx
+ | Some ctx -> ctx.template_context
in
Monomorphic_entry univs
| Polymorphic auctx -> Polymorphic_entry (AUContext.names auctx, AUContext.repr auctx)
in
let mind_entry_inds = Array.map_to_list (fun ind ->
- let mind_entry_arity, mind_entry_template = match ind.mind_arity with
+ let mind_entry_arity = match ind.mind_arity with
| RegularArity ar ->
let ctx, arity = Term.decompose_prod_n_assum nparams ar.mind_user_arity in
ignore ctx; (* we will check that the produced user_arity is equal to the input *)
- arity, false
+ arity
| TemplateArity ar ->
let ctx = ind.mind_arity_ctxt in
let ctx = List.firstn (List.length ctx - nparams) ctx in
- Term.mkArity (ctx, Sorts.sort_of_univ ar.template_level), true
+ Term.mkArity (ctx, Sorts.sort_of_univ ar.template_level)
in
{
mind_entry_typename = ind.mind_typename;
mind_entry_arity;
- mind_entry_template;
mind_entry_consnames = Array.to_list ind.mind_consnames;
mind_entry_lc = Array.map_to_list (fun c ->
let ctx, c = Term.decompose_prod_n_assum nparams c in
@@ -75,12 +63,19 @@ let to_entry mind (mb:mutual_inductive_body) : Entries.mutual_inductive_entry =
})
mb.mind_packets
in
+ let check_template ind = match ind.mind_arity with
+ | RegularArity _ -> false
+ | TemplateArity _ -> true
+ in
+ let mind_entry_template = Array.exists check_template mb.mind_packets in
+ let () = if mind_entry_template then assert (Array.for_all check_template mb.mind_packets) in
{
mind_entry_record;
mind_entry_finite = mb.mind_finite;
mind_entry_params = mb.mind_params_ctxt;
mind_entry_inds;
mind_entry_universes;
+ mind_entry_template;
mind_entry_cumulative= Option.has_some mb.mind_variance;
mind_entry_private = mb.mind_private;
}
@@ -89,13 +84,18 @@ let check_arity env ar1 ar2 = match ar1, ar2 with
| RegularArity ar, RegularArity {mind_user_arity;mind_sort} ->
Constr.equal ar.mind_user_arity mind_user_arity &&
Sorts.equal ar.mind_sort mind_sort
- | TemplateArity ar, TemplateArity {template_param_levels;template_level;template_context} ->
- List.equal (Option.equal Univ.Level.equal) ar.template_param_levels template_param_levels &&
- ContextSet.equal template_context ar.template_context &&
+ | TemplateArity ar, TemplateArity {template_level} ->
UGraph.check_leq (universes env) template_level ar.template_level
(* template_level is inferred by indtypes, so functor application can produce a smaller one *)
| (RegularArity _ | TemplateArity _), _ -> assert false
+let check_template ar1 ar2 = match ar1, ar2 with
+| None, None -> true
+| Some ar, Some {template_context; template_param_levels} ->
+ List.equal (Option.equal Univ.Level.equal) ar.template_param_levels template_param_levels &&
+ ContextSet.equal template_context ar.template_context
+| None, Some _ | Some _, None -> false
+
let check_kelim k1 k2 = Sorts.family_leq k1 k2
(* Use [eq_ind_chk] because when we rebuild the recargs we have lost
@@ -157,10 +157,10 @@ let check_same_record r1 r2 = match r1, r2 with
| (NotRecord | FakeRecord | PrimRecord _), _ -> false
let check_inductive env mind mb =
- let entry = to_entry mind mb in
+ let entry = to_entry mb in
let { mind_packets; mind_record; mind_finite; mind_ntypes; mind_hyps;
mind_nparams; mind_nparams_rec; mind_params_ctxt;
- mind_universes; mind_variance; mind_sec_variance;
+ mind_universes; mind_template; mind_variance; mind_sec_variance;
mind_private; mind_typing_flags; }
=
(* Locally set typing flags for further typechecking *)
@@ -191,6 +191,7 @@ let check_inductive env mind mb =
check "mind_params_ctxt" (Context.Rel.equal Constr.equal mb.mind_params_ctxt mind_params_ctxt);
ignore mind_universes; (* Indtypes did the necessary checking *)
+ check "mind_template" (check_template mb.mind_template mind_template);
check "mind_variance" (Option.equal (Array.equal Univ.Variance.equal)
mb.mind_variance mind_variance);
check "mind_sec_variance" (Option.is_empty mind_sec_variance);
diff --git a/checker/checkTypes.mli b/checker/checkTypes.mli
index ac9ea2fb31..9ef6ff017c 100644
--- a/checker/checkTypes.mli
+++ b/checker/checkTypes.mli
@@ -17,4 +17,4 @@ open Environ
(*s Typing functions (not yet tagged as safe) *)
val check_polymorphic_arity :
- env -> rel_context -> template_arity -> unit
+ env -> rel_context -> template_universes -> unit
diff --git a/checker/values.ml b/checker/values.ml
index ed730cff8e..cba96e6636 100644
--- a/checker/values.ml
+++ b/checker/values.ml
@@ -227,8 +227,11 @@ let v_oracle =
v_pred v_cst;
|]
-let v_pol_arity =
- v_tuple "polymorphic_arity" [|List(Opt v_level);v_univ;v_context_set|]
+let v_template_arity =
+ v_tuple "template_arity" [|v_univ|]
+
+let v_template_universes =
+ v_tuple "template_universes" [|List(Opt v_level);v_context_set|]
let v_primitive =
v_enum "primitive" 44 (* Number of "Primitive" in Int63.v and PrimFloat.v *)
@@ -265,7 +268,7 @@ let v_mono_ind_arity =
v_tuple "monomorphic_inductive_arity" [|v_constr;v_sort|]
let v_ind_arity = v_sum "inductive_arity" 0
- [|[|v_mono_ind_arity|];[|v_pol_arity|]|]
+ [|[|v_mono_ind_arity|];[|v_template_arity|]|]
let v_one_ind = v_tuple "one_inductive_body"
[|v_id;
@@ -301,6 +304,7 @@ let v_ind_pack = v_tuple "mutual_inductive_body"
Int;
v_rctxt;
v_univs; (* universes *)
+ Opt v_template_universes;
Opt (Array v_variance);
Opt (Array v_variance);
Opt v_bool;
diff --git a/clib/cStack.ml b/clib/cStack.ml
deleted file mode 100644
index 0432e29fad..0000000000
--- a/clib/cStack.ml
+++ /dev/null
@@ -1,44 +0,0 @@
-(************************************************************************)
-(* * The Coq Proof Assistant / The Coq Development Team *)
-(* v * INRIA, CNRS and contributors - Copyright 1999-2019 *)
-(* <O___,, * (see CREDITS file for the list of authors) *)
-(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
-(* * (see LICENSE file for the text of the license) *)
-(************************************************************************)
-
-exception Empty = Stack.Empty
-
-type 'a t = {
- mutable stack : 'a list;
-}
-
-let create () = { stack = [] }
-
-let push x s = s.stack <- x :: s.stack
-
-let pop = function
- | { stack = [] } -> raise Stack.Empty
- | { stack = x::xs } as s -> s.stack <- xs; x
-
-let top = function
- | { stack = [] } -> raise Stack.Empty
- | { stack = x::_ } -> x
-
-let to_list { stack = s } = s
-
-let find f s = List.find f (to_list s)
-
-let find_map f s = CList.find_map f s.stack
-
-let fold_until f accu s = CList.fold_left_until f accu s.stack
-
-let is_empty { stack = s } = s = []
-
-let iter f { stack = s } = List.iter f s
-
-let clear s = s.stack <- []
-
-let length { stack = s } = List.length s
-
diff --git a/clib/cStack.mli b/clib/cStack.mli
deleted file mode 100644
index de802160e7..0000000000
--- a/clib/cStack.mli
+++ /dev/null
@@ -1,58 +0,0 @@
-(************************************************************************)
-(* * The Coq Proof Assistant / The Coq Development Team *)
-(* v * INRIA, CNRS and contributors - Copyright 1999-2019 *)
-(* <O___,, * (see CREDITS file for the list of authors) *)
-(* \VV/ **************************************************************)
-(* // * This file is distributed under the terms of the *)
-(* * GNU Lesser General Public License Version 2.1 *)
-(* * (see LICENSE file for the text of the license) *)
-(************************************************************************)
-
-(** Extended interface for OCaml stacks. *)
-
-type 'a t
-
-exception Empty
-(** Alias for Stack.Empty. *)
-
-val create : unit -> 'a t
-(** Create an empty stack. *)
-
-val push : 'a -> 'a t -> unit
-(** Add an element to a stack. *)
-
-val find : ('a -> bool) -> 'a t -> 'a
-(** Find the first element satisfying the predicate.
- @raise Not_found it there is none. *)
-
-val is_empty : 'a t -> bool
-(** Whether a stack is empty. *)
-
-val iter : ('a -> unit) -> 'a t -> unit
-(** Iterate a function over elements, from the last added one. *)
-
-val clear : 'a t -> unit
-(** Empty a stack. *)
-
-val length : 'a t -> int
-(** Length of a stack. *)
-
-val pop : 'a t -> 'a
-(** Remove and returns the first element of the stack.
- @raise Empty if empty. *)
-
-val top : 'a t -> 'a
-(** Remove the first element of the stack without modifying it.
- @raise Empty if empty. *)
-
-val to_list : 'a t -> 'a list
-(** Convert to a list. *)
-
-val find_map : ('a -> 'b option) -> 'a t -> 'b
-(** Find the first element that returns [Some _].
- @raise Not_found it there is none. *)
-
-val fold_until : ('c -> 'a -> 'c CSig.until) -> 'c -> 'a t -> 'c
-(** Like CList.fold_left_until.
- The stack is traversed from the top and is not altered. *)
-
diff --git a/clib/clib.mllib b/clib/clib.mllib
index 5a2c9a9ce9..be3b5971be 100644
--- a/clib/clib.mllib
+++ b/clib/clib.mllib
@@ -9,7 +9,6 @@ CSet
CMap
CList
CString
-CStack
Int
Range
@@ -33,7 +32,6 @@ Unionfind
Dyn
Store
Exninfo
-Backtrace
IStream
Terminal
Monad
diff --git a/default.nix b/default.nix
index ae6a8d06e5..841bccb129 100644
--- a/default.nix
+++ b/default.nix
@@ -22,7 +22,7 @@
# a symlink to where Coq was installed.
{ pkgs ? import ./dev/nixpkgs.nix {}
-, ocamlPackages ? pkgs.ocamlPackages
+, ocamlPackages ? pkgs.ocaml-ng.ocamlPackages_4_09
, buildIde ? true
, buildDoc ? true
, doInstallCheck ? true
diff --git a/dev/ci/ci-common.sh b/dev/ci/ci-common.sh
index 7aa265cf90..f0dbe485f7 100644
--- a/dev/ci/ci-common.sh
+++ b/dev/ci/ci-common.sh
@@ -19,7 +19,7 @@ then
elif [ -d "$PWD/_build/install/default/" ];
then
# Dune build
- export OCAMLPATH="$PWD/_build/install/default/lib/"
+ export OCAMLPATH="$PWD/_build/install/default/lib/:$OCAMLPATH"
export COQBIN="$PWD/_build/install/default/bin"
export COQLIB="$PWD/_build/install/default/lib/coq"
CI_BRANCH="$(git rev-parse --abbrev-ref HEAD)"
diff --git a/dev/ci/user-overlays/11764-ppedrot-simplify-template.sh b/dev/ci/user-overlays/11764-ppedrot-simplify-template.sh
new file mode 100644
index 0000000000..f8871ae158
--- /dev/null
+++ b/dev/ci/user-overlays/11764-ppedrot-simplify-template.sh
@@ -0,0 +1,18 @@
+if [ "$CI_PULL_REQUEST" = "11764" ] || [ "$CI_BRANCH" = "simplify-template" ]; then
+
+ elpi_CI_REF="simplify-template"
+ elpi_CI_GITURL=https://github.com/ppedrot/coq-elpi
+
+ equations_CI_REF="simplify-template"
+ equations_CI_GITURL=https://github.com/ppedrot/Coq-Equations
+
+ paramcoq_CI_REF="simplify-template"
+ paramcoq_CI_GITURL=https://github.com/ppedrot/paramcoq
+
+ mtac2_CI_REF="simplify-template"
+ mtac2_CI_GITURL=https://github.com/ppedrot/Mtac2
+
+ rewriter_CI_REF="simplify-template"
+ rewriter_CI_GITURL=https://github.com/ppedrot/rewriter
+
+fi
diff --git a/dev/nixpkgs.nix b/dev/nixpkgs.nix
index 54baaee1fe..b8a696ef21 100644
--- a/dev/nixpkgs.nix
+++ b/dev/nixpkgs.nix
@@ -1,4 +1,4 @@
import (fetchTarball {
- url = "https://github.com/NixOS/nixpkgs/archive/8da81465c19fca393a3b17004c743e4d82a98e4f.tar.gz";
- sha256 = "1f3s27nrssfk413pszjhbs70wpap43bbjx2pf4zq5x2c1kd72l6y";
+ url = "https://github.com/NixOS/nixpkgs/archive/34e41a91547e342f6fbc901929134b34000297eb.tar.gz";
+ sha256 = "0mlqxim36xg8aj4r35mpcgqg27wy1dbbim9l1cpjl24hcy96v48w";
})
diff --git a/doc/sphinx/README.rst b/doc/sphinx/README.rst
index 89b4bda71a..0802b5d0b4 100644
--- a/doc/sphinx/README.rst
+++ b/doc/sphinx/README.rst
@@ -3,8 +3,8 @@
=============================
..
- README.rst is auto-generated from README.template.rst and the coqrst docs;
- use ``doc/tools/coqrst/regen_readme.py`` to rebuild it.
+ README.rst is auto-generated from README.template.rst and the coqrst/*.py files
+ (in particular coqdomain.py). Use ``doc/tools/coqrst/regen_readme.py`` to rebuild it.
Coq's reference manual is written in `reStructuredText <http://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html>`_ (“reST”), and compiled with `Sphinx <http://www.sphinx-doc.org/en/master/>`_.
@@ -97,7 +97,7 @@ Here is the list of all objects of the Coq domain (The symbol :black_nib: indica
``.. cmd::`` :black_nib: A Coq command.
Example::
- .. cmd:: Infix @string := @term1_extended {? ( {+, @syntax_modifier } ) } {? : @ident }
+ .. cmd:: Infix @string := @one_term {? ( {+, @syntax_modifier } ) } {? : @ident }
This command is equivalent to :n:`…`.
diff --git a/doc/sphinx/README.template.rst b/doc/sphinx/README.template.rst
index c5e0007e78..5762967c36 100644
--- a/doc/sphinx/README.template.rst
+++ b/doc/sphinx/README.template.rst
@@ -3,8 +3,8 @@
=============================
..
- README.rst is auto-generated from README.template.rst and the coqrst docs;
- use ``doc/tools/coqrst/regen_readme.py`` to rebuild it.
+ README.rst is auto-generated from README.template.rst and the coqrst/*.py files
+ (in particular coqdomain.py). Use ``doc/tools/coqrst/regen_readme.py`` to rebuild it.
Coq's reference manual is written in `reStructuredText <http://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html>`_ (“reST”), and compiled with `Sphinx <http://www.sphinx-doc.org/en/master/>`_.
diff --git a/doc/sphinx/addendum/generalized-rewriting.rst b/doc/sphinx/addendum/generalized-rewriting.rst
index 94ab6e789c..315c9d4a80 100644
--- a/doc/sphinx/addendum/generalized-rewriting.rst
+++ b/doc/sphinx/addendum/generalized-rewriting.rst
@@ -713,48 +713,119 @@ Definitions
~~~~~~~~~~~
The generalized rewriting tactic is based on a set of strategies that can be
-combined to obtain custom rewriting procedures. Its set of strategies is based
+combined to create custom rewriting procedures. Its set of strategies is based
on the programmable rewriting strategies with generic traversals by Visser et al.
:cite:`Luttik97specificationof` :cite:`Visser98`, which formed the core of
the Stratego transformation language :cite:`Visser01`. Rewriting strategies
-are applied using the tactic :n:`rewrite_strat @strategy` where :token:`strategy` is a
-strategy expression. Strategies are defined inductively as described by the
-following grammar:
-
-.. productionlist:: coq
- strategy : `qualid` (lemma, left to right)
- : <- `qualid` (lemma, right to left)
- : fail (failure)
- : id (identity)
- : refl (reflexivity)
- : progress `strategy` (progress)
- : try `strategy` (try catch)
- : `strategy` ; `strategy` (composition)
- : choice `strategy` `strategy` (left_biased_choice)
- : repeat `strategy` (one or more)
- : any `strategy` (zero or more)
- : subterm `strategy` (one subterm)
- : subterms `strategy` (all subterms)
- : innermost `strategy` (innermost first)
- : outermost `strategy` (outermost first)
- : bottomup `strategy` (bottom-up)
- : topdown `strategy` (top-down)
- : hints `ident` (apply hints from hint database)
- : terms `term` ... `term` (any of the terms)
- : eval `red_expr` (apply reduction)
- : fold `term` (unify)
- : ( `strategy` )
-
-Actually a few of these are defined in term of the others using a
+are applied using the tactic :n:`rewrite_strat @rewstrategy`.
+
+.. insertprodn rewstrategy rewstrategy
+
+.. prodn::
+ rewstrategy ::= @one_term
+ | <- @one_term
+ | fail
+ | id
+ | refl
+ | progress @rewstrategy
+ | try @rewstrategy
+ | @rewstrategy ; @rewstrategy
+ | choice @rewstrategy @rewstrategy
+ | repeat @rewstrategy
+ | any @rewstrategy
+ | subterm @rewstrategy
+ | subterms @rewstrategy
+ | innermost @rewstrategy
+ | outermost @rewstrategy
+ | bottomup @rewstrategy
+ | topdown @rewstrategy
+ | hints @ident
+ | terms {* @one_term }
+ | eval @red_expr
+ | fold @one_term
+ | ( @rewstrategy )
+ | old_hints @ident
+
+:n:`@one_term`
+ lemma, left to right
+
+:n:`<- @one_term`
+ lemma, right to left
+
+:n:`fail`
+ failure
+
+:n:`id`
+ identity
+
+:n:`refl`
+ reflexivity
+
+:n:`progress @rewstrategy`
+ progress
+
+:n:`try @rewstrategy`
+ try catch
+
+:n:`@rewstrategy ; @rewstrategy`
+ composition
+
+:n:`choice @rewstrategy @rewstrategy`
+ left_biased_choice
+
+:n:`repeat @rewstrategy`
+ one or more
+
+:n:`any @rewstrategy`
+ zero or more
+
+:n:`subterm @rewstrategy`
+ one subterm
+
+:n:`subterms @rewstrategy`
+ all subterms
+
+:n:`innermost @rewstrategy`
+ innermost first
+
+:n:`outermost @rewstrategy`
+ outermost first
+
+:n:`bottomup @rewstrategy`
+ bottom-up
+
+:n:`topdown @rewstrategy`
+ top-down
+
+:n:`hints @ident`
+ apply hints from hint database
+
+:n:`terms {* @one_term }`
+ any of the terms
+
+:n:`eval @red_expr`
+ apply reduction
+
+:n:`fold @term`
+ unify
+
+:n:`( @rewstrategy )`
+ to be documented
+
+:n:`old_hints @ident`
+ to be documented
+
+
+A few of these are defined in terms of the others using a
primitive fixpoint operator:
-- :n:`try @strategy := choice @strategy id`
-- :n:`any @strategy := fix @ident. try (@strategy ; @ident)`
-- :n:`repeat @strategy := @strategy; any @strategy`
-- :n:`bottomup @strategy := fix @ident. (choice (progress (subterms @ident)) @strategy) ; try @ident`
-- :n:`topdown @strategy := fix @ident. (choice @strategy (progress (subterms @ident))) ; try @ident`
-- :n:`innermost @strategy := fix @ident. (choice (subterm @ident) @strategy)`
-- :n:`outermost @strategy := fix @ident. (choice @strategy (subterm @ident))`
+- :n:`try @rewstrategy := choice @rewstrategy id`
+- :n:`any @rewstrategy := fix @ident. try (@rewstrategy ; @ident)`
+- :n:`repeat @rewstrategy := @rewstrategy; any @rewstrategy`
+- :n:`bottomup @rewstrategy := fix @ident. (choice (progress (subterms @ident)) @rewstrategy) ; try @ident`
+- :n:`topdown @rewstrategy := fix @ident. (choice @rewstrategy (progress (subterms @ident))) ; try @ident`
+- :n:`innermost @rewstrategy := fix @ident. (choice (subterm @ident) @rewstrategy)`
+- :n:`outermost @rewstrategy := fix @ident. (choice @rewstrategy (subterm @ident))`
The basic control strategy semantics are straightforward: strategies
are applied to subterms of the term to rewrite, starting from the root
@@ -764,18 +835,18 @@ hand-side. Composition can be used to continue rewriting on the
current subterm. The ``fail`` strategy always fails while the identity
strategy succeeds without making progress. The reflexivity strategy
succeeds, making progress using a reflexivity proof of rewriting.
-``progress`` tests progress of the argument :token:`strategy` and fails if no
+``progress`` tests progress of the argument :n:`@rewstrategy` and fails if no
progress was made, while ``try`` always succeeds, catching failures.
``choice`` is left-biased: it will launch the first strategy and fall back
on the second one in case of failure. One can iterate a strategy at
least 1 time using ``repeat`` and at least 0 times using ``any``.
-The ``subterm`` and ``subterms`` strategies apply their argument :token:`strategy` to
+The ``subterm`` and ``subterms`` strategies apply their argument :n:`@rewstrategy` to
respectively one or all subterms of the current term under
consideration, left-to-right. ``subterm`` stops at the first subterm for
-which :token:`strategy` made progress. The composite strategies ``innermost`` and ``outermost``
+which :n:`@rewstrategy` made progress. The composite strategies ``innermost`` and ``outermost``
perform a single innermost or outermost rewrite using their argument
-:token:`strategy`. Their counterparts ``bottomup`` and ``topdown`` perform as many
+:n:`@rewstrategy`. Their counterparts ``bottomup`` and ``topdown`` perform as many
rewritings as possible, starting from the bottom or the top of the
term.
@@ -793,7 +864,7 @@ Usage
~~~~~
-.. tacn:: rewrite_strat @strategy {? in @ident }
+.. tacn:: rewrite_strat @rewstrategy {? in @ident }
:name: rewrite_strat
Rewrite using the strategy s in hypothesis ident or the conclusion.
diff --git a/doc/sphinx/addendum/implicit-coercions.rst b/doc/sphinx/addendum/implicit-coercions.rst
index b007509b2e..1f33775a01 100644
--- a/doc/sphinx/addendum/implicit-coercions.rst
+++ b/doc/sphinx/addendum/implicit-coercions.rst
@@ -37,12 +37,13 @@ In addition to these user-defined classes, we have two built-in classes:
* ``Funclass``, the class of functions; its objects are all the terms with a functional
type, i.e. of form :g:`forall x:A,B`.
-Formally, the syntax of classes is defined as:
+ .. insertprodn class class
+
+ .. prodn::
+ class ::= Funclass
+ | Sortclass
+ | @smart_qualid
-.. productionlist::
- class: `qualid`
- : Sortclass
- : Funclass
Coercions
@@ -186,37 +187,12 @@ Declaring Coercions
This defines :token:`ident` just like :n:`Let @ident := @term {? @type }`,
and then declares :token:`ident` as a coercion between it source and its target.
-Assumptions can be declared as coercions at declaration time.
-This extends the grammar of assumptions from
-Figure :ref:`vernacular` as follows:
-
-..
- FIXME:
- \comindex{Variable \mbox{\rm (and coercions)}}
- \comindex{Axiom \mbox{\rm (and coercions)}}
- \comindex{Parameter \mbox{\rm (and coercions)}}
- \comindex{Hypothesis \mbox{\rm (and coercions)}}
-
-.. productionlist::
- assumption : `assumption_token` `assums` .
- assums : `simple_assums`
- : (`simple_assums`) ... (`simple_assums`)
- simple_assums : `ident` ... `ident` :[>] `term`
-
-If the extra ``>`` is present before the type of some assumptions, these
-assumptions are declared as coercions.
-
-Similarly, constructors of inductive types can be declared as coercions at
-definition time of the inductive type. This extends and modifies the
-grammar of inductive types from Figure :ref:`vernacular` as follows:
-
-..
- FIXME:
- \comindex{Inductive \mbox{\rm (and coercions)}}
- \comindex{CoInductive \mbox{\rm (and coercions)}}
-
-Especially, if the extra ``>`` is present in a constructor
-declaration, this constructor is declared as a coercion.
+Some objects can be declared as coercions when they are defined.
+This applies to :ref:`assumptions<gallina-assumptions>` and
+constructors of :ref:`inductive types and record fields<gallina-inductive-definitions>`.
+Use :n:`:>` instead of :n:`:` before the
+:n:`@type` of the assumption to do so. See :n:`@of_type`.
+
.. cmd:: Identity Coercion @ident : @class >-> @class
diff --git a/doc/sphinx/addendum/ring.rst b/doc/sphinx/addendum/ring.rst
index 1098aa75da..76174e32b5 100644
--- a/doc/sphinx/addendum/ring.rst
+++ b/doc/sphinx/addendum/ring.rst
@@ -300,70 +300,79 @@ following property:
The syntax for adding a new ring is
-.. cmd:: Add Ring @ident : @term {? ( @ring_mod {* , @ring_mod } )}
-
- The :token:`ident` is not relevant. It is used just for error messages. The
- :token:`term` is a proof that the ring signature satisfies the (semi-)ring
+.. cmd:: Add Ring @ident : @one_term {? ( {+, @ring_mod } ) }
+
+ .. insertprodn ring_mod ring_mod
+
+ .. prodn::
+ ring_mod ::= decidable @one_term
+ | abstract
+ | morphism @one_term
+ | constants [ @ltac_expr ]
+ | preprocess [ @ltac_expr ]
+ | postprocess [ @ltac_expr ]
+ | setoid @one_term @one_term
+ | sign @one_term
+ | power @one_term [ {+ @qualid } ]
+ | power_tac @one_term [ @ltac_expr ]
+ | div @one_term
+ | closed [ {+ @qualid } ]
+
+ The :n:`@ident` is used only for error messages. The
+ :n:`@one_term` is a proof that the ring signature satisfies the (semi-)ring
axioms. The optional list of modifiers is used to tailor the behavior
- of the tactic. The following list describes their syntax and effects:
-
- .. productionlist:: coq
- ring_mod : abstract | decidable `term` | morphism `term`
- : setoid `term` `term`
- : constants [ `tactic` ]
- : preprocess [ `tactic` ]
- : postprocess [ `tactic` ]
- : power_tac `term` [ `tactic` ]
- : sign `term`
- : div `term`
-
- abstract
+ of the tactic. Here are their effects:
+
+ :n:`abstract`
declares the ring as abstract. This is the default.
- decidable :n:`@term`
+ :n:`decidable @one_term`
declares the ring as computational. The expression
- :n:`@term` is the correctness proof of an equality test ``?=!``
+ :n:`@one_term` is the correctness proof of an equality test ``?=!``
(which should be evaluable). Its type should be of the form
``forall x y, x ?=! y = true → x == y``.
- morphism :n:`@term`
+ :n:`morphism @one_term`
declares the ring as a customized one. The expression
- :n:`@term` is a proof that there exists a morphism between a set of
+ :n:`@one_term` is a proof that there exists a morphism between a set of
coefficient and the ring carrier (see ``Ring_theory.ring_morph`` and
``Ring_theory.semi_morph``).
- setoid :n:`@term` :n:`@term`
+ :n:`setoid @one_term @one_term`
forces the use of given setoid. The first
- :n:`@term` is a proof that the equality is indeed a setoid (see
- ``Setoid.Setoid_Theory``), and the second :n:`@term` a proof that the
+ :n:`@one_term` is a proof that the equality is indeed a setoid (see
+ ``Setoid.Setoid_Theory``), and the second a proof that the
ring operations are morphisms (see ``Ring_theory.ring_eq_ext`` and
``Ring_theory.sring_eq_ext``).
This modifier needs not be used if the setoid and morphisms have been
declared.
- constants [ :n:`@tactic` ]
- specifies a tactic expression :n:`@tactic` that, given a
+ :n:`constants [ @ltac_expr ]`
+ specifies a tactic expression :n:`@ltac_expr` that, given a
term, returns either an object of the coefficient set that is mapped
to the expression via the morphism, or returns
``InitialRing.NotConstant``. The default behavior is to map only 0 and 1
to their counterpart in the coefficient set. This is generally not
desirable for non trivial computational rings.
- preprocess [ :n:`@tactic` ]
- specifies a tactic :n:`@tactic` that is applied as a
+ :n:`preprocess [ @ltac_expr ]`
+ specifies a tactic :n:`@ltac_expr` that is applied as a
preliminary step for :tacn:`ring` and :tacn:`ring_simplify`. It can be used to
transform a goal so that it is better recognized. For instance, ``S n``
can be changed to ``plus 1 n``.
- postprocess [ :n:`@tactic` ]
- specifies a tactic :n:`@tactic` that is applied as a final
+ :n:`postprocess [ @ltac_expr ]`
+ specifies a tactic :n:`@ltac_expr` that is applied as a final
step for :tacn:`ring_simplify`. For instance, it can be used to undo
modifications of the preprocessor.
- power_tac :n:`@term` [ :n:`@tactic` ]
+ :n:`power @one_term [ {+ @qualid } ]`
+ to be documented
+
+ :n:`power_tac @one_term @ltac_expr ]`
allows :tacn:`ring` and :tacn:`ring_simplify` to recognize
power expressions with a constant positive integer exponent (example:
- :math:`x^2` ). The term :n:`@term` is a proof that a given power function satisfies
+ :math:`x^2` ). The term :n:`@one_term` is a proof that a given power function satisfies
the specification of a power function (term has to be a proof of
``Ring_theory.power_theory``) and :n:`@tactic` specifies a tactic expression
that, given a term, “abstracts” it into an object of type |N| whose
@@ -374,22 +383,25 @@ The syntax for adding a new ring is
and ``plugins/setoid_ring/RealField.v`` for examples. By default the tactic
does not recognize power expressions as ring expressions.
- sign :n:`@term`
+ :n:`sign @one_term`
allows :tacn:`ring_simplify` to use a minus operation when
outputting its normal form, i.e writing ``x − y`` instead of ``x + (− y)``. The
term :token:`term` is a proof that a given sign function indicates expressions
that are signed (:token:`term` has to be a proof of ``Ring_theory.get_sign``). See
``plugins/setoid_ring/InitialRing.v`` for examples of sign function.
- div :n:`@term`
+ :n:`div @one_term`
allows :tacn:`ring` and :tacn:`ring_simplify` to use monomials with
- coefficients other than 1 in the rewriting. The term :n:`@term` is a proof
+ coefficients other than 1 in the rewriting. The term :n:`@one_term` is a proof
that a given division function satisfies the specification of an
- euclidean division function (:n:`@term` has to be a proof of
+ euclidean division function (:n:`@one_term` has to be a proof of
``Ring_theory.div_theory``). For example, this function is called when
trying to rewrite :math:`7x` by :math:`2x = z` to tell that :math:`7 = 3 \times 2 + 1`. See
``plugins/setoid_ring/InitialRing.v`` for examples of div function.
+ :n:`closed [ {+ @qualid } ]`
+ to be documented
+
Error messages:
.. exn:: Bad ring structure.
@@ -653,24 +665,27 @@ zero for the correctness of the algorithm.
The syntax for adding a new field is
-.. cmd:: Add Field @ident : @term {? ( @field_mod {* , @field_mod } )}
+.. cmd:: Add Field @ident : @one_term {? ( {+, @field_mod } ) }
- The :n:`@ident` is not relevant. It is used just for error
- messages. :n:`@term` is a proof that the field signature satisfies the
+ .. insertprodn field_mod field_mod
+
+ .. prodn::
+ field_mod ::= @ring_mod
+ | completeness @one_term
+
+ The :n:`@ident` is used only for error
+ messages. :n:`@one_term` is a proof that the field signature satisfies the
(semi-)field axioms. The optional list of modifiers is used to tailor
the behavior of the tactic.
- .. productionlist:: coq
- field_mod : `ring_mod` | completeness `term`
-
Since field tactics are built upon ``ring``
- tactics, all modifiers of the ``Add Ring`` apply. There is only one
+ tactics, all modifiers of :cmd:`Add Ring` apply. There is only one
specific modifier:
- completeness :n:`@term`
+ completeness :n:`@one_term`
allows the field tactic to prove automatically
that the image of nonzero coefficients are mapped to nonzero
- elements of the field. :n:`@term` is a proof of
+ elements of the field. :n:`@one_term` is a proof of
:g:`forall x y, [x] == [y] -> x ?=! y = true`,
which is the completeness of equality on coefficients
w.r.t. the field equality.
diff --git a/doc/sphinx/addendum/universe-polymorphism.rst b/doc/sphinx/addendum/universe-polymorphism.rst
index c069782add..0e326f45d2 100644
--- a/doc/sphinx/addendum/universe-polymorphism.rst
+++ b/doc/sphinx/addendum/universe-polymorphism.rst
@@ -372,16 +372,11 @@ to universes and explicitly instantiate polymorphic definitions.
universe quantification will be discharged on each section definition
independently.
-.. cmd:: Constraint @universe_constraint
- Polymorphic Constraint @universe_constraint
+.. cmd:: Constraint @univ_constraint
+ Polymorphic Constraint @univ_constraint
This command declares a new constraint between named universes.
- .. productionlist:: coq
- universe_constraint : `qualid` < `qualid`
- : `qualid` <= `qualid`
- : `qualid` = `qualid`
-
If consistent, the constraint is then enforced in the global
environment. Like :cmd:`Universe`, it can be used with the
``Polymorphic`` prefix in sections only to declare constraints
diff --git a/doc/sphinx/conf.py b/doc/sphinx/conf.py
index 22102aa3ab..d864f8549d 100755
--- a/doc/sphinx/conf.py
+++ b/doc/sphinx/conf.py
@@ -183,9 +183,9 @@ todo_include_todos = False
nitpicky = True
nitpick_ignore = [ ('token', token) for token in [
+ 'assums',
'binders',
'collection',
- 'command',
'definition',
'dirpath',
'inductive',
@@ -194,7 +194,6 @@ nitpick_ignore = [ ('token', token) for token in [
'module',
'simple_tactic',
'symbol',
- 'tactic',
'term_pattern',
'term_pattern_string',
'toplevel_selector',
diff --git a/doc/sphinx/language/gallina-specification-language.rst b/doc/sphinx/language/gallina-specification-language.rst
index e12ff1ba98..4f0cf5f815 100644
--- a/doc/sphinx/language/gallina-specification-language.rst
+++ b/doc/sphinx/language/gallina-specification-language.rst
@@ -158,6 +158,8 @@ is described in Chapter :ref:`syntaxextensionsandinterpretationscopes`.
| @term1
arg ::= ( @ident := @term )
| @term1
+ one_term ::= @term1
+ | @ @qualid {? @univ_annot }
term1 ::= @term_projection
| @term0 % @ident
| @term0
@@ -175,6 +177,13 @@ is described in Chapter :ref:`syntaxextensionsandinterpretationscopes`.
| ltac : ( @ltac_expr )
field_def ::= @qualid {* @binder } := @term
+.. note::
+
+ Many commands and tactics use :n:`@one_term` rather than :n:`@term`.
+ The former need to be enclosed in parentheses unless they're very
+ simple, such as a single identifier. This avoids confusing a space-separated
+ list of terms with a :n:`@term1` applied to a list of arguments.
+
.. _types:
Types
@@ -591,17 +600,15 @@ Sections :ref:`if-then-else` and :ref:`irrefutable-patterns`).
Recursive and co-recursive functions: fix and cofix
---------------------------------------------------
-.. insertprodn term_fix term1_extended
+.. insertprodn term_fix fixannot
.. prodn::
term_fix ::= let fix @fix_body in @term
| fix @fix_body {? {+ with @fix_body } for @ident }
fix_body ::= @ident {* @binder } {? @fixannot } {? : @type } := @term
fixannot ::= %{ struct @ident %}
- | %{ wf @term1_extended @ident %}
- | %{ measure @term1_extended {? @ident } {? @term1_extended } %}
- term1_extended ::= @term1
- | @ @qualid {? @univ_annot }
+ | %{ wf @one_term @ident %}
+ | %{ measure @one_term {? @ident } {? @one_term } %}
The expression ":n:`fix @ident__1 @binder__1 : @type__1 := @term__1 with … with @ident__n @binder__n : @type__n := @term__n for @ident__i`" denotes the
@@ -1472,11 +1479,11 @@ Computations
| vm_compute {? @ref_or_pattern_occ }
| native_compute {? @ref_or_pattern_occ }
| unfold {+, @unfold_occ }
- | fold {+ @term1_extended }
+ | fold {+ @one_term }
| pattern {+, @pattern_occ }
| @ident
- delta_flag ::= {? - } [ {+ @smart_global } ]
- smart_global ::= @qualid
+ delta_flag ::= {? - } [ {+ @smart_qualid } ]
+ smart_qualid ::= @qualid
| @by_notation
by_notation ::= @string {? % @ident }
strategy_flag ::= {+ @red_flags }
@@ -1488,16 +1495,16 @@ Computations
| cofix
| zeta
| delta {? @delta_flag }
- ref_or_pattern_occ ::= @smart_global {? at @occs_nums }
- | @term1_extended {? at @occs_nums }
+ ref_or_pattern_occ ::= @smart_qualid {? at @occs_nums }
+ | @one_term {? at @occs_nums }
occs_nums ::= {+ @num_or_var }
| - @num_or_var {* @int_or_var }
num_or_var ::= @num
| @ident
int_or_var ::= @int
| @ident
- unfold_occ ::= @smart_global {? at @occs_nums }
- pattern_occ ::= @term1_extended {? at @occs_nums }
+ unfold_occ ::= @smart_qualid {? at @occs_nums }
+ pattern_occ ::= @one_term {? at @occs_nums }
See :ref:`Conversion-rules`.
diff --git a/doc/sphinx/practical-tools/utilities.rst b/doc/sphinx/practical-tools/utilities.rst
index 179dff9959..514353e39b 100644
--- a/doc/sphinx/practical-tools/utilities.rst
+++ b/doc/sphinx/practical-tools/utilities.rst
@@ -505,34 +505,41 @@ Building a |Coq| project with Dune
.. note::
+ Dune's Coq support is still experimental; we strongly recommend
+ using Dune 2.3 or later.
+
+.. note::
+
The canonical documentation for the Coq Dune extension is
maintained upstream; please refer to the `Dune manual
- <https://dune.readthedocs.io/>`_ for up-to-date information.
+ <https://dune.readthedocs.io/>`_ for up-to-date information. This
+ documentation is up to date for Dune 2.3.
Building a Coq project with Dune requires setting up a Dune project
for your files. This involves adding a ``dune-project`` and
-``pkg.opam`` file to the root (``pkg.opam`` can be empty), and then
-providing ``dune`` files in the directories your ``.v`` files are
-placed. For the experimental version "0.1" of the Coq Dune language,
-|Coq| library stanzas look like:
+``pkg.opam`` file to the root (``pkg.opam`` can be empty or generated
+by Dune itself), and then providing ``dune`` files in the directories
+your ``.v`` files are placed. For the experimental version "0.1" of
+the Coq Dune language, |Coq| library stanzas look like:
.. code:: scheme
- (coqlib
+ (coq.theory
(name <module_prefix>)
- (public_name <package.lib_name>)
+ (package <opam_package>)
(synopsis <text>)
(modules <ordered_set_lang>)
(libraries <ocaml_libraries>)
(flags <coq_flags>))
This stanza will build all `.v` files in the given directory, wrapping
-the library under ``<module_prefix>``. If you declare a
-``<package.lib_name>`` a ``.install`` file for the library will be
-generated; the optional ``<modules>`` field allows you to filter
-the list of modules, and ``<libraries>`` allows to depend on ML
-plugins. For the moment, Dune relies on Coq's standard mechanisms
-(such as ``COQPATH``) to locate installed Coq libraries.
+the library under ``<module_prefix>``. If you declare an
+``<opam_package>``, an ``.install`` file for the library will be
+generated; the optional ``(modules <ordered_set_lang>)`` field allows
+you to filter the list of modules, and ``(libraries
+<ocaml_libraries>)`` allows the Coq theory depend on ML plugins. For
+the moment, Dune relies on Coq's standard mechanisms (such as
+``COQPATH``) to locate installed Coq libraries.
By default Dune will skip ``.v`` files present in subdirectories. In
order to enable the usual recursive organization of Coq projects add
@@ -565,9 +572,9 @@ of your project.
.. code:: scheme
- (coqlib
+ (coq.theory
(name Equations) ; -R flag
- (public_name equations.Equations)
+ (package equations)
(synopsis "Equations Plugin")
(libraries coq.plugins.extraction equations.plugin)
(modules :standard \ IdDec NoCycle)) ; exclude some modules that don't build
diff --git a/doc/sphinx/proof-engine/tactics.rst b/doc/sphinx/proof-engine/tactics.rst
index 2bfd810ea1..4f2f74aae4 100644
--- a/doc/sphinx/proof-engine/tactics.rst
+++ b/doc/sphinx/proof-engine/tactics.rst
@@ -72,7 +72,7 @@ specified, the default selector is used.
.. _bindingslist:
Bindings list
-~~~~~~~~~~~~~~~~~~~
+~~~~~~~~~~~~~
Tactics that take a term as an argument may also support a bindings list
to instantiate some parameters of the term by name or position.
diff --git a/doc/sphinx/user-extensions/syntax-extensions.rst b/doc/sphinx/user-extensions/syntax-extensions.rst
index fd95a5cef4..669975ba7e 100644
--- a/doc/sphinx/user-extensions/syntax-extensions.rst
+++ b/doc/sphinx/user-extensions/syntax-extensions.rst
@@ -947,7 +947,7 @@ notations are given below. The optional :production:`scope` is described in
.. prodn::
decl_notations ::= where @decl_notation {* and @decl_notation }
- decl_notation ::= @string := @term1_extended [(only parsing)] {? : @ident }
+ decl_notation ::= @string := @one_term {? ( only parsing ) } {? : @ident }
.. note:: No typing of the denoted expression is performed at definition
time. Type checking is done only at the time of use of the notation.
@@ -1194,7 +1194,7 @@ Binding arguments of a constant to an interpretation scope
Binding types of arguments to an interpretation scope
+++++++++++++++++++++++++++++++++++++++++++++++++++++
-.. cmd:: Bind Scope @scope with @qualid
+.. cmd:: Bind Scope @ident with {+ @class }
When an interpretation scope is naturally associated to a type (e.g. the
scope of operations on the natural numbers), it may be convenient to bind it
diff --git a/doc/tools/coqrst/coqdomain.py b/doc/tools/coqrst/coqdomain.py
index d6ecf311f1..4d5c837e5c 100644
--- a/doc/tools/coqrst/coqdomain.py
+++ b/doc/tools/coqrst/coqdomain.py
@@ -294,7 +294,7 @@ class VernacObject(NotationObject):
Example::
- .. cmd:: Infix @string := @term1_extended {? ( {+, @syntax_modifier } ) } {? : @ident }
+ .. cmd:: Infix @string := @one_term {? ( {+, @syntax_modifier } ) } {? : @ident }
This command is equivalent to :n:`…`.
"""
diff --git a/doc/tools/docgram/common.edit_mlg b/doc/tools/docgram/common.edit_mlg
index 3524d77380..7a165988a6 100644
--- a/doc/tools/docgram/common.edit_mlg
+++ b/doc/tools/docgram/common.edit_mlg
@@ -57,14 +57,15 @@ DELETE: [
| check_for_coloneq
| local_test_lpar_id_colon
| lookup_at_as_comma
-| only_starredidentrefs
+| test_only_starredidentrefs
| test_bracket_ident
| test_lpar_id_colon
| test_lpar_id_coloneq (* todo: grammar seems incorrect, repeats the "(" IDENT ":=" *)
| test_lpar_id_rpar
| test_lpar_idnum_coloneq
-| test_nospace_pipe_closedcurly
| test_show_goal
+| test_name_colon
+| test_pipe_closedcurly
| ensure_fixannot
(* SSR *)
@@ -332,8 +333,8 @@ typeclass_constraint: [
| EDIT ADD_OPT "!" operconstr200
| REPLACE "{" name "}" ":" [ "!" | ] operconstr200
| WITH "{" name "}" ":" OPT "!" operconstr200
-| REPLACE name_colon [ "!" | ] operconstr200
-| WITH name_colon OPT "!" operconstr200
+| REPLACE name ":" [ "!" | ] operconstr200
+| WITH name ":" OPT "!" operconstr200
]
(* ?? From the grammar, Prim.name seems to be only "_" but ident is also accepted "*)
@@ -409,19 +410,6 @@ DELETE: [
| cumulativity_token
]
-opt_coercion: [
-| OPTINREF
-]
-
-opt_constructors_or_fields: [
-| OPTINREF
-]
-
-SPLICE: [
-| opt_coercion
-| opt_constructors_or_fields
-]
-
constructor_list_or_record_decl: [
| OPTINREF
]
@@ -433,11 +421,6 @@ record_fields: [
| DELETE (* empty *)
]
-decl_notation: [
-| REPLACE "where" LIST1 one_decl_notation SEP decl_sep
-| WITH "where" one_decl_notation LIST0 ( decl_sep one_decl_notation )
-]
-
assumptions_token: [
| DELETENT
]
@@ -767,13 +750,13 @@ vernacular: [
]
rec_definition: [
-| REPLACE ident_decl binders_fixannot type_cstr OPT [ ":=" lconstr ] decl_notation
-| WITH ident_decl binders_fixannot type_cstr OPT [ ":=" lconstr ] decl_notation
+| REPLACE ident_decl binders_fixannot type_cstr OPT [ ":=" lconstr ] decl_notations
+| WITH ident_decl binders_fixannot type_cstr OPT [ ":=" lconstr ] decl_notations
]
corec_definition: [
-| REPLACE ident_decl binders type_cstr OPT [ ":=" lconstr ] decl_notation
-| WITH ident_decl binders type_cstr OPT [ ":=" lconstr ] decl_notation
+| REPLACE ident_decl binders type_cstr OPT [ ":=" lconstr ] decl_notations
+| WITH ident_decl binders type_cstr OPT [ ":=" lconstr ] decl_notations
]
type_cstr: [
@@ -782,13 +765,9 @@ type_cstr: [
| OPTINREF
]
-decl_notation: [
-| OPTINREF
-]
-
inductive_definition: [
-| REPLACE OPT ">" ident_decl binders OPT [ "|" binders ] OPT [ ":" lconstr ] OPT ( ":=" OPT constructor_list_or_record_decl ) OPT decl_notation
-| WITH OPT ">" ident_decl binders OPT [ "|" binders ] OPT [ ":" type ] OPT ( ":=" OPT constructor_list_or_record_decl ) OPT decl_notation
+| REPLACE opt_coercion ident_decl binders OPT [ "|" binders ] OPT [ ":" lconstr ] opt_constructors_or_fields decl_notations
+| WITH opt_coercion ident_decl binders OPT [ "|" binders ] OPT [ ":" type ] opt_constructors_or_fields decl_notations
]
constructor_list_or_record_decl: [
@@ -807,6 +786,31 @@ record_binder: [
| DELETE name
]
+in_clause: [
+| DELETE in_clause'
+| REPLACE LIST0 hypident_occ SEP "," "|-" concl_occ
+| WITH LIST0 hypident_occ SEP "," OPT ( "|-" concl_occ )
+| DELETE LIST0 hypident_occ SEP ","
+]
+
+concl_occ: [
+| OPTINREF
+]
+
+opt_coercion: [
+| OPTINREF
+]
+
+opt_constructors_or_fields: [
+| OPTINREF
+]
+
+decl_notations: [
+| REPLACE "where" LIST1 decl_notation SEP decl_sep
+| WITH "where" decl_notation LIST0 (decl_sep decl_notation )
+| OPTINREF
+]
+
SPLICE: [
| noedit_mode
| command_entry
@@ -941,11 +945,12 @@ SPLICE: [
| record_fields
| constructor_type
| record_binder
+| opt_coercion
+| opt_constructors_or_fields
] (* end SPLICE *)
RENAME: [
| clause clause_dft_concl
-| in_clause' in_clause
| tactic3 ltac_expr3 (* todo: can't figure out how this gets mapped by coqpp *)
| tactic1 ltac_expr1 (* todo: can't figure out how this gets mapped by coqpp *)
@@ -980,7 +985,7 @@ RENAME: [
| nat_or_var num_or_var
| fix_decl fix_body
| cofix_decl cofix_body
-| constr term1_extended
+| constr one_term
| appl_arg arg
| rec_definition fix_definition
| corec_definition cofix_definition
@@ -988,12 +993,12 @@ RENAME: [
| univ_instance univ_annot
| simple_assum_coe assumpt
| of_type_with_opt_coercion of_type
-| decl_notation decl_notations
-| one_decl_notation decl_notation
| attribute attr
| attribute_value attr_value
| constructor_list_or_record_decl constructors_or_record
| record_binder_body field_body
+| class_rawexpr class
+| smart_global smart_qualid
]
diff --git a/doc/tools/docgram/doc_grammar.ml b/doc/tools/docgram/doc_grammar.ml
index 5fcb56f5f2..366b70a1f7 100644
--- a/doc/tools/docgram/doc_grammar.ml
+++ b/doc/tools/docgram/doc_grammar.ml
@@ -1717,7 +1717,7 @@ let process_rst g file args seen tac_prods cmd_prods =
else begin
let line3 = getline() in
if not (Str.string_match dir_regex line3 0) || (Str.matched_group 2 line3) <> "prodn::" then
- error "%s line %d: expecting 'prodn' after 'insertprodn'\n" file !linenum
+ error "%s line %d: expecting '.. prodn::' after 'insertprodn'\n" file !linenum
else begin
let indent = Str.matched_group 1 line3 in
let rec skip_to_end () =
diff --git a/doc/tools/docgram/fullGrammar b/doc/tools/docgram/fullGrammar
index 529d81e424..6897437457 100644
--- a/doc/tools/docgram/fullGrammar
+++ b/doc/tools/docgram/fullGrammar
@@ -152,7 +152,7 @@ binder_constr: [
]
appl_arg: [
-| lpar_id_coloneq lconstr ")"
+| test_lpar_id_coloneq "(" ident ":=" lconstr ")"
| operconstr9
]
@@ -335,7 +335,7 @@ closed_binder: [
typeclass_constraint: [
| "!" operconstr200
| "{" name "}" ":" [ "!" | ] operconstr200
-| name_colon [ "!" | ] operconstr200
+| test_name_colon name ":" [ "!" | ] operconstr200
| operconstr200
]
@@ -449,7 +449,7 @@ bigint: [
]
bar_cbrace: [
-| test_nospace_pipe_closedcurly "|" "}"
+| test_pipe_closedcurly "|" "}"
]
vernac_toplevel: [
@@ -511,8 +511,8 @@ command: [
| "Load" [ "Verbose" | ] [ ne_string | IDENT ]
| "Declare" "ML" "Module" LIST1 ne_string
| "Locate" locatable
-| "Add" "LoadPath" ne_string as_dirpath
-| "Add" "Rec" "LoadPath" ne_string as_dirpath
+| "Add" "LoadPath" ne_string "as" dirpath
+| "Add" "Rec" "LoadPath" ne_string "as" dirpath
| "Remove" "LoadPath" ne_string
| "Type" lconstr
| "Print" printable
@@ -522,7 +522,6 @@ command: [
| "Print" "Namespace" dirpath
| "Inspect" natural
| "Add" "ML" "Path" ne_string
-| "Add" "Rec" "ML" "Path" ne_string
| "Set" option_table option_setting
| "Unset" option_table
| "Print" "Table" option_table
@@ -655,6 +654,7 @@ command: [
| "Add" "CstOp" constr (* micromega plugin *)
| "Add" "BinRel" constr (* micromega plugin *)
| "Add" "PropOp" constr (* micromega plugin *)
+| "Add" "PropBinOp" constr (* micromega plugin *)
| "Add" "PropUOp" constr (* micromega plugin *)
| "Add" "Spec" constr (* micromega plugin *)
| "Add" "BinOpSpec" constr (* micromega plugin *)
@@ -924,16 +924,16 @@ reduce: [
|
]
-one_decl_notation: [
-| ne_lstring ":=" constr OPT [ ":" IDENT ]
+decl_notation: [
+| ne_lstring ":=" constr only_parsing OPT [ ":" IDENT ]
]
decl_sep: [
| "and"
]
-decl_notation: [
-| "where" LIST1 one_decl_notation SEP decl_sep
+decl_notations: [
+| "where" LIST1 decl_notation SEP decl_sep
|
]
@@ -943,7 +943,7 @@ opt_constructors_or_fields: [
]
inductive_definition: [
-| opt_coercion ident_decl binders OPT [ "|" binders ] OPT [ ":" lconstr ] opt_constructors_or_fields decl_notation
+| opt_coercion ident_decl binders OPT [ "|" binders ] OPT [ ":" lconstr ] opt_constructors_or_fields decl_notations
]
constructor_list_or_record_decl: [
@@ -961,11 +961,11 @@ opt_coercion: [
]
rec_definition: [
-| ident_decl binders_fixannot type_cstr OPT [ ":=" lconstr ] decl_notation
+| ident_decl binders_fixannot type_cstr OPT [ ":=" lconstr ] decl_notations
]
corec_definition: [
-| ident_decl binders type_cstr OPT [ ":=" lconstr ] decl_notation
+| ident_decl binders type_cstr OPT [ ":=" lconstr ] decl_notations
]
scheme: [
@@ -982,7 +982,7 @@ scheme_kind: [
]
record_field: [
-| LIST0 quoted_attributes record_binder OPT [ "|" natural ] decl_notation
+| LIST0 quoted_attributes record_binder OPT [ "|" natural ] decl_notations
]
record_fields: [
@@ -1148,7 +1148,7 @@ module_type: [
]
section_subset_expr: [
-| only_starredidentrefs LIST0 starredidentref
+| test_only_starredidentrefs LIST0 starredidentref
| ssexpr35
]
@@ -1172,8 +1172,8 @@ ssexpr50: [
ssexpr0: [
| starredidentref
-| "(" only_starredidentrefs LIST0 starredidentref ")"
-| "(" only_starredidentrefs LIST0 starredidentref ")" "*"
+| "(" test_only_starredidentrefs LIST0 starredidentref ")"
+| "(" test_only_starredidentrefs LIST0 starredidentref ")" "*"
| "(" ssexpr35 ")"
| "(" ssexpr35 ")" "*"
]
@@ -1331,10 +1331,6 @@ option_table: [
| LIST1 IDENT
]
-as_dirpath: [
-| OPT [ "as" dirpath ]
-]
-
ne_in_or_out_modules: [
| "inside" LIST1 global
| "outside" LIST1 global
@@ -1684,6 +1680,8 @@ simple_tactic: [
| "eenough" test_lpar_id_colon "(" identref ":" lconstr ")" by_tactic
| "assert" constr as_ipat by_tactic
| "eassert" constr as_ipat by_tactic
+| "pose" "proof" test_lpar_id_coloneq "(" identref ":=" lconstr ")"
+| "epose" "proof" test_lpar_id_coloneq "(" identref ":=" lconstr ")"
| "pose" "proof" lconstr as_ipat
| "epose" "proof" lconstr as_ipat
| "enough" constr as_ipat by_tactic
@@ -1740,10 +1738,11 @@ simple_tactic: [
| "psatz_R" tactic (* micromega plugin *)
| "psatz_Q" int_or_var tactic (* micromega plugin *)
| "psatz_Q" tactic (* micromega plugin *)
-| "zify_iter_specs" tactic (* micromega plugin *)
+| "zify_iter_specs" (* micromega plugin *)
| "zify_op" (* micromega plugin *)
| "zify_saturate" (* micromega plugin *)
| "zify_iter_let" tactic (* micromega plugin *)
+| "zify_elim_let" (* micromega plugin *)
| "nsatz_compute" constr (* nsatz plugin *)
| "omega" (* omega plugin *)
| "rtauto"
diff --git a/doc/tools/docgram/orderedGrammar b/doc/tools/docgram/orderedGrammar
index 908e3ccd51..f26a174722 100644
--- a/doc/tools/docgram/orderedGrammar
+++ b/doc/tools/docgram/orderedGrammar
@@ -58,6 +58,11 @@ arg: [
| term1
]
+one_term: [
+| term1
+| "@" qualid OPT univ_annot
+]
+
term1: [
| term_projection
| term0 "%" ident
@@ -238,13 +243,8 @@ fix_body: [
fixannot: [
| "{" "struct" ident "}"
-| "{" "wf" term1_extended ident "}"
-| "{" "measure" term1_extended OPT ident OPT term1_extended "}"
-]
-
-term1_extended: [
-| term1
-| "@" qualid OPT univ_annot
+| "{" "wf" one_term ident "}"
+| "{" "measure" one_term OPT ident OPT one_term "}"
]
term_cofix: [
@@ -400,7 +400,7 @@ decl_notations: [
]
decl_notation: [
-| string ":=" term1_extended OPT [ ":" ident ]
+| string ":=" one_term OPT ( "(" "only" "parsing" ")" ) OPT [ ":" ident ]
]
register_token: [
@@ -484,16 +484,16 @@ red_expr: [
| "vm_compute" OPT ref_or_pattern_occ
| "native_compute" OPT ref_or_pattern_occ
| "unfold" LIST1 unfold_occ SEP ","
-| "fold" LIST1 term1_extended
+| "fold" LIST1 one_term
| "pattern" LIST1 pattern_occ SEP ","
| ident
]
delta_flag: [
-| OPT "-" "[" LIST1 smart_global "]"
+| OPT "-" "[" LIST1 smart_qualid "]"
]
-smart_global: [
+smart_qualid: [
| qualid
| by_notation
]
@@ -518,8 +518,8 @@ red_flags: [
]
ref_or_pattern_occ: [
-| smart_global OPT ( "at" occs_nums )
-| term1_extended OPT ( "at" occs_nums )
+| smart_qualid OPT ( "at" occs_nums )
+| one_term OPT ( "at" occs_nums )
]
occs_nums: [
@@ -538,11 +538,11 @@ int_or_var: [
]
unfold_occ: [
-| smart_global OPT ( "at" occs_nums )
+| smart_qualid OPT ( "at" occs_nums )
]
pattern_occ: [
-| term1_extended OPT ( "at" occs_nums )
+| one_term OPT ( "at" occs_nums )
]
finite_token: [
@@ -587,11 +587,11 @@ scheme: [
]
scheme_kind: [
-| "Induction" "for" smart_global "Sort" sort_family
-| "Minimality" "for" smart_global "Sort" sort_family
-| "Elimination" "for" smart_global "Sort" sort_family
-| "Case" "for" smart_global "Sort" sort_family
-| "Equality" "for" smart_global
+| "Induction" "for" smart_qualid "Sort" sort_family
+| "Minimality" "for" smart_qualid "Sort" sort_family
+| "Elimination" "for" smart_qualid "Sort" sort_family
+| "Case" "for" smart_qualid "Sort" sort_family
+| "Equality" "for" smart_qualid
]
sort_family: [
@@ -615,21 +615,21 @@ gallina_ext: [
| "Export" LIST1 qualid
| "Include" module_type_inl LIST0 ( "<+" module_expr_inl )
| "Include" "Type" module_type_inl LIST0 ( "<+" module_type_inl )
-| "Transparent" LIST1 smart_global
-| "Opaque" LIST1 smart_global
-| "Strategy" LIST1 [ strategy_level "[" LIST1 smart_global "]" ]
+| "Transparent" LIST1 smart_qualid
+| "Opaque" LIST1 smart_qualid
+| "Strategy" LIST1 [ strategy_level "[" LIST1 smart_qualid "]" ]
| "Canonical" OPT "Structure" qualid OPT [ OPT univ_decl def_body ]
| "Canonical" OPT "Structure" by_notation
| "Coercion" qualid OPT univ_decl def_body
-| "Identity" "Coercion" ident ":" class_rawexpr ">->" class_rawexpr
-| "Coercion" qualid ":" class_rawexpr ">->" class_rawexpr
-| "Coercion" by_notation ":" class_rawexpr ">->" class_rawexpr
+| "Identity" "Coercion" ident ":" class ">->" class
+| "Coercion" qualid ":" class ">->" class
+| "Coercion" by_notation ":" class ">->" class
| "Context" LIST1 binder
| "Instance" instance_name ":" term hint_info [ ":=" "{" [ LIST1 field_def SEP ";" | ] "}" | ":=" term | ]
| "Existing" "Instance" qualid hint_info
| "Existing" "Instances" LIST1 qualid OPT [ "|" num ]
| "Existing" "Class" qualid
-| "Arguments" smart_global LIST0 argument_spec_block LIST0 [ "," LIST0 more_implicits_block ] OPT [ ":" LIST1 arguments_modifier SEP "," ]
+| "Arguments" smart_qualid LIST0 argument_spec_block LIST0 [ "," LIST0 more_implicits_block ] OPT [ ":" LIST1 arguments_modifier SEP "," ]
| "Implicit" "Type" reserv_list
| "Implicit" "Types" reserv_list
| "Generalizable" [ "All" "Variables" | "No" "Variables" | [ "Variable" | "Variables" ] LIST1 ident ]
@@ -643,14 +643,8 @@ option_setting: [
| string
]
-class_rawexpr: [
-| "Funclass"
-| "Sortclass"
-| smart_global
-]
-
hint_info: [
-| "|" OPT num OPT term1_extended
+| "|" OPT num OPT one_term
|
]
@@ -780,11 +774,11 @@ command: [
| "Load" [ "Verbose" | ] [ string | ident ]
| "Declare" "ML" "Module" LIST1 string
| "Locate" locatable
-| "Add" "LoadPath" string as_dirpath
-| "Add" "Rec" "LoadPath" string as_dirpath
+| "Add" "LoadPath" string "as" dirpath
+| "Add" "Rec" "LoadPath" string "as" dirpath
| "Remove" "LoadPath" string
| "Type" term
-| "Print" "Term" smart_global OPT ( "@{" LIST0 name "}" )
+| "Print" "Term" smart_qualid OPT ( "@{" LIST0 name "}" )
| "Print" "All"
| "Print" "Section" qualid
| "Print" "Grammar" ident
@@ -798,36 +792,35 @@ command: [
| "Print" "Graph"
| "Print" "Classes"
| "Print" "TypeClasses"
-| "Print" "Instances" smart_global
+| "Print" "Instances" smart_qualid
| "Print" "Coercions"
-| "Print" "Coercion" "Paths" class_rawexpr class_rawexpr
-| "Print" "Canonical" "Projections" LIST0 smart_global
+| "Print" "Coercion" "Paths" class class
+| "Print" "Canonical" "Projections" LIST0 smart_qualid
| "Print" "Typing" "Flags"
| "Print" "Tables"
| "Print" "Options"
| "Print" "Hint"
-| "Print" "Hint" smart_global
+| "Print" "Hint" smart_qualid
| "Print" "Hint" "*"
| "Print" "HintDb" ident
| "Print" "Scopes"
| "Print" "Scope" ident
| "Print" "Visibility" OPT ident
-| "Print" "Implicit" smart_global
+| "Print" "Implicit" smart_qualid
| "Print" OPT "Sorted" "Universes" OPT ( "Subgraph" "(" LIST0 qualid ")" ) OPT string
-| "Print" "Assumptions" smart_global
-| "Print" "Opaque" "Dependencies" smart_global
-| "Print" "Transparent" "Dependencies" smart_global
-| "Print" "All" "Dependencies" smart_global
-| "Print" "Strategy" smart_global
+| "Print" "Assumptions" smart_qualid
+| "Print" "Opaque" "Dependencies" smart_qualid
+| "Print" "Transparent" "Dependencies" smart_qualid
+| "Print" "All" "Dependencies" smart_qualid
+| "Print" "Strategy" smart_qualid
| "Print" "Strategies"
| "Print" "Registered"
-| "Print" smart_global OPT ( "@{" LIST0 name "}" )
+| "Print" smart_qualid OPT ( "@{" LIST0 name "}" )
| "Print" "Module" "Type" qualid
| "Print" "Module" qualid
| "Print" "Namespace" dirpath
| "Inspect" num
| "Add" "ML" "Path" string
-| "Add" "Rec" "ML" "Path" string
| "Set" LIST1 ident option_setting
| "Unset" LIST1 ident
| "Print" "Table" LIST1 ident
@@ -849,7 +842,7 @@ command: [
| "Debug" "Off"
| "Declare" "Reduction" ident ":=" red_expr
| "Declare" "Custom" "Entry" ident
-| "Derive" ident "SuchThat" term1_extended "As" ident (* derive plugin *)
+| "Derive" ident "SuchThat" one_term "As" ident (* derive plugin *)
| "Proof"
| "Proof" "Mode" string
| "Proof" term
@@ -907,31 +900,31 @@ command: [
| "Obligations"
| "Preterm" "of" ident
| "Preterm"
-| "Add" "Relation" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "symmetry" "proved" "by" term1_extended "as" ident
-| "Add" "Relation" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "as" ident
-| "Add" "Relation" term1_extended term1_extended "as" ident
-| "Add" "Relation" term1_extended term1_extended "symmetry" "proved" "by" term1_extended "as" ident
-| "Add" "Relation" term1_extended term1_extended "symmetry" "proved" "by" term1_extended "transitivity" "proved" "by" term1_extended "as" ident
-| "Add" "Relation" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "transitivity" "proved" "by" term1_extended "as" ident
-| "Add" "Relation" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "symmetry" "proved" "by" term1_extended "transitivity" "proved" "by" term1_extended "as" ident
-| "Add" "Relation" term1_extended term1_extended "transitivity" "proved" "by" term1_extended "as" ident
-| "Add" "Parametric" "Relation" LIST0 binder ":" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "symmetry" "proved" "by" term1_extended "as" ident
-| "Add" "Parametric" "Relation" LIST0 binder ":" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "as" ident
-| "Add" "Parametric" "Relation" LIST0 binder ":" term1_extended term1_extended "as" ident
-| "Add" "Parametric" "Relation" LIST0 binder ":" term1_extended term1_extended "symmetry" "proved" "by" term1_extended "as" ident
-| "Add" "Parametric" "Relation" LIST0 binder ":" term1_extended term1_extended "symmetry" "proved" "by" term1_extended "transitivity" "proved" "by" term1_extended "as" ident
-| "Add" "Parametric" "Relation" LIST0 binder ":" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "transitivity" "proved" "by" term1_extended "as" ident
-| "Add" "Parametric" "Relation" LIST0 binder ":" term1_extended term1_extended "reflexivity" "proved" "by" term1_extended "symmetry" "proved" "by" term1_extended "transitivity" "proved" "by" term1_extended "as" ident
-| "Add" "Parametric" "Relation" LIST0 binder ":" term1_extended term1_extended "transitivity" "proved" "by" term1_extended "as" ident
-| "Add" "Setoid" term1_extended term1_extended term1_extended "as" ident
-| "Add" "Parametric" "Setoid" LIST0 binder ":" term1_extended term1_extended term1_extended "as" ident
-| "Add" "Morphism" term1_extended ":" ident
-| "Declare" "Morphism" term1_extended ":" ident
-| "Add" "Morphism" term1_extended "with" "signature" term "as" ident
-| "Add" "Parametric" "Morphism" LIST0 binder ":" term1_extended "with" "signature" term "as" ident
+| "Add" "Relation" one_term one_term "reflexivity" "proved" "by" one_term "symmetry" "proved" "by" one_term "as" ident
+| "Add" "Relation" one_term one_term "reflexivity" "proved" "by" one_term "as" ident
+| "Add" "Relation" one_term one_term "as" ident
+| "Add" "Relation" one_term one_term "symmetry" "proved" "by" one_term "as" ident
+| "Add" "Relation" one_term one_term "symmetry" "proved" "by" one_term "transitivity" "proved" "by" one_term "as" ident
+| "Add" "Relation" one_term one_term "reflexivity" "proved" "by" one_term "transitivity" "proved" "by" one_term "as" ident
+| "Add" "Relation" one_term one_term "reflexivity" "proved" "by" one_term "symmetry" "proved" "by" one_term "transitivity" "proved" "by" one_term "as" ident
+| "Add" "Relation" one_term one_term "transitivity" "proved" "by" one_term "as" ident
+| "Add" "Parametric" "Relation" LIST0 binder ":" one_term one_term "reflexivity" "proved" "by" one_term "symmetry" "proved" "by" one_term "as" ident
+| "Add" "Parametric" "Relation" LIST0 binder ":" one_term one_term "reflexivity" "proved" "by" one_term "as" ident
+| "Add" "Parametric" "Relation" LIST0 binder ":" one_term one_term "as" ident
+| "Add" "Parametric" "Relation" LIST0 binder ":" one_term one_term "symmetry" "proved" "by" one_term "as" ident
+| "Add" "Parametric" "Relation" LIST0 binder ":" one_term one_term "symmetry" "proved" "by" one_term "transitivity" "proved" "by" one_term "as" ident
+| "Add" "Parametric" "Relation" LIST0 binder ":" one_term one_term "reflexivity" "proved" "by" one_term "transitivity" "proved" "by" one_term "as" ident
+| "Add" "Parametric" "Relation" LIST0 binder ":" one_term one_term "reflexivity" "proved" "by" one_term "symmetry" "proved" "by" one_term "transitivity" "proved" "by" one_term "as" ident
+| "Add" "Parametric" "Relation" LIST0 binder ":" one_term one_term "transitivity" "proved" "by" one_term "as" ident
+| "Add" "Setoid" one_term one_term one_term "as" ident
+| "Add" "Parametric" "Setoid" LIST0 binder ":" one_term one_term one_term "as" ident
+| "Add" "Morphism" one_term ":" ident
+| "Declare" "Morphism" one_term ":" ident
+| "Add" "Morphism" one_term "with" "signature" term "as" ident
+| "Add" "Parametric" "Morphism" LIST0 binder ":" one_term "with" "signature" term "as" ident
| "Grab" "Existential" "Variables"
| "Unshelve"
-| "Declare" "Equivalent" "Keys" term1_extended term1_extended
+| "Declare" "Equivalent" "Keys" one_term one_term
| "Print" "Equivalent" "Keys"
| "Optimize" "Proof"
| "Optimize" "Heap"
@@ -940,24 +933,25 @@ command: [
| "Show" "Ltac" "Profile" "CutOff" int
| "Show" "Ltac" "Profile" string
| "Show" "Lia" "Profile" (* micromega plugin *)
-| "Add" "InjTyp" term1_extended (* micromega plugin *)
-| "Add" "BinOp" term1_extended (* micromega plugin *)
-| "Add" "UnOp" term1_extended (* micromega plugin *)
-| "Add" "CstOp" term1_extended (* micromega plugin *)
-| "Add" "BinRel" term1_extended (* micromega plugin *)
-| "Add" "PropOp" term1_extended (* micromega plugin *)
-| "Add" "PropUOp" term1_extended (* micromega plugin *)
-| "Add" "Spec" term1_extended (* micromega plugin *)
-| "Add" "BinOpSpec" term1_extended (* micromega plugin *)
-| "Add" "UnOpSpec" term1_extended (* micromega plugin *)
-| "Add" "Saturate" term1_extended (* micromega plugin *)
+| "Add" "InjTyp" one_term (* micromega plugin *)
+| "Add" "BinOp" one_term (* micromega plugin *)
+| "Add" "UnOp" one_term (* micromega plugin *)
+| "Add" "CstOp" one_term (* micromega plugin *)
+| "Add" "BinRel" one_term (* micromega plugin *)
+| "Add" "PropOp" one_term (* micromega plugin *)
+| "Add" "PropBinOp" one_term (* micromega plugin *)
+| "Add" "PropUOp" one_term (* micromega plugin *)
+| "Add" "Spec" one_term (* micromega plugin *)
+| "Add" "BinOpSpec" one_term (* micromega plugin *)
+| "Add" "UnOpSpec" one_term (* micromega plugin *)
+| "Add" "Saturate" one_term (* micromega plugin *)
| "Show" "Zify" "InjTyp" (* micromega plugin *)
| "Show" "Zify" "BinOp" (* micromega plugin *)
| "Show" "Zify" "UnOp" (* micromega plugin *)
| "Show" "Zify" "CstOp" (* micromega plugin *)
| "Show" "Zify" "BinRel" (* micromega plugin *)
| "Show" "Zify" "Spec" (* micromega plugin *)
-| "Add" "Ring" ident ":" term1_extended OPT ( "(" LIST1 ring_mod SEP "," ")" ) (* setoid_ring plugin *)
+| "Add" "Ring" ident ":" one_term OPT ( "(" LIST1 ring_mod SEP "," ")" ) (* setoid_ring plugin *)
| "Hint" "Cut" "[" hints_path "]" opthints
| "Typeclasses" "Transparent" LIST0 qualid
| "Typeclasses" "Opaque" LIST0 qualid
@@ -996,20 +990,20 @@ command: [
| "Show" "Extraction" (* extraction plugin *)
| "Functional" "Case" fun_scheme_arg (* funind plugin *)
| "Generate" "graph" "for" qualid (* funind plugin *)
-| "Hint" "Rewrite" orient LIST1 term1_extended ":" LIST0 ident
-| "Hint" "Rewrite" orient LIST1 term1_extended "using" ltac_expr ":" LIST0 ident
-| "Hint" "Rewrite" orient LIST1 term1_extended
-| "Hint" "Rewrite" orient LIST1 term1_extended "using" ltac_expr
-| "Derive" "Inversion_clear" ident "with" term1_extended "Sort" sort_family
-| "Derive" "Inversion_clear" ident "with" term1_extended
-| "Derive" "Inversion" ident "with" term1_extended "Sort" sort_family
-| "Derive" "Inversion" ident "with" term1_extended
-| "Derive" "Dependent" "Inversion" ident "with" term1_extended "Sort" sort_family
-| "Derive" "Dependent" "Inversion_clear" ident "with" term1_extended "Sort" sort_family
-| "Declare" "Left" "Step" term1_extended
-| "Declare" "Right" "Step" term1_extended
+| "Hint" "Rewrite" orient LIST1 one_term ":" LIST0 ident
+| "Hint" "Rewrite" orient LIST1 one_term "using" ltac_expr ":" LIST0 ident
+| "Hint" "Rewrite" orient LIST1 one_term
+| "Hint" "Rewrite" orient LIST1 one_term "using" ltac_expr
+| "Derive" "Inversion_clear" ident "with" one_term "Sort" sort_family
+| "Derive" "Inversion_clear" ident "with" one_term
+| "Derive" "Inversion" ident "with" one_term "Sort" sort_family
+| "Derive" "Inversion" ident "with" one_term
+| "Derive" "Dependent" "Inversion" ident "with" one_term "Sort" sort_family
+| "Derive" "Dependent" "Inversion_clear" ident "with" one_term "Sort" sort_family
+| "Declare" "Left" "Step" one_term
+| "Declare" "Right" "Step" one_term
| "Print" "Rings" (* setoid_ring plugin *)
-| "Add" "Field" ident ":" term1_extended OPT ( "(" LIST1 field_mod SEP "," ")" ) (* setoid_ring plugin *)
+| "Add" "Field" ident ":" one_term OPT ( "(" LIST1 field_mod SEP "," ")" ) (* setoid_ring plugin *)
| "Print" "Fields" (* setoid_ring plugin *)
| "Numeral" "Notation" qualid qualid qualid ":" ident OPT numnotoption
| "String" "Notation" qualid qualid qualid ":" ident
@@ -1059,8 +1053,8 @@ dirpath: [
]
locatable: [
-| smart_global
-| "Term" smart_global
+| smart_qualid
+| "Term" smart_qualid
| "File" string
| "Library" qualid
| "Module" qualid
@@ -1071,19 +1065,15 @@ option_ref_value: [
| string
]
-as_dirpath: [
-| OPT [ "as" dirpath ]
-]
-
comment: [
-| term1_extended
+| one_term
| string
| num
]
reference_or_constr: [
| qualid
-| term1_extended
+| one_term
]
hint: [
@@ -1100,7 +1090,7 @@ hint: [
| "Mode" qualid LIST1 [ "+" | "!" | "-" ]
| "Unfold" LIST1 qualid
| "Constructors" LIST1 qualid
-| "Extern" num OPT term1_extended "=>" ltac_expr
+| "Extern" num OPT one_term "=>" ltac_expr
]
constr_body: [
@@ -1157,23 +1147,23 @@ fun_scheme_arg: [
]
ring_mod: [
-| "decidable" term1_extended (* setoid_ring plugin *)
+| "decidable" one_term (* setoid_ring plugin *)
| "abstract" (* setoid_ring plugin *)
-| "morphism" term1_extended (* setoid_ring plugin *)
+| "morphism" one_term (* setoid_ring plugin *)
| "constants" "[" ltac_expr "]" (* setoid_ring plugin *)
-| "closed" "[" LIST1 qualid "]" (* setoid_ring plugin *)
| "preprocess" "[" ltac_expr "]" (* setoid_ring plugin *)
| "postprocess" "[" ltac_expr "]" (* setoid_ring plugin *)
-| "setoid" term1_extended term1_extended (* setoid_ring plugin *)
-| "sign" term1_extended (* setoid_ring plugin *)
-| "power" term1_extended "[" LIST1 qualid "]" (* setoid_ring plugin *)
-| "power_tac" term1_extended "[" ltac_expr "]" (* setoid_ring plugin *)
-| "div" term1_extended (* setoid_ring plugin *)
+| "setoid" one_term one_term (* setoid_ring plugin *)
+| "sign" one_term (* setoid_ring plugin *)
+| "power" one_term "[" LIST1 qualid "]" (* setoid_ring plugin *)
+| "power_tac" one_term "[" ltac_expr "]" (* setoid_ring plugin *)
+| "div" one_term (* setoid_ring plugin *)
+| "closed" "[" LIST1 qualid "]" (* setoid_ring plugin *)
]
field_mod: [
| ring_mod (* setoid_ring plugin *)
-| "completeness" term1_extended (* setoid_ring plugin *)
+| "completeness" one_term (* setoid_ring plugin *)
]
debug: [
@@ -1216,15 +1206,21 @@ query_command: [
| "Eval" red_expr "in" term "."
| "Compute" term "."
| "Check" term "."
-| "About" smart_global OPT ( "@{" LIST0 name "}" ) "."
-| "SearchHead" term1_extended in_or_out_modules "."
-| "SearchPattern" term1_extended in_or_out_modules "."
-| "SearchRewrite" term1_extended in_or_out_modules "."
+| "About" smart_qualid OPT ( "@{" LIST0 name "}" ) "."
+| "SearchHead" one_term in_or_out_modules "."
+| "SearchPattern" one_term in_or_out_modules "."
+| "SearchRewrite" one_term in_or_out_modules "."
| "Search" searchabout_query searchabout_queries "."
| "SearchAbout" searchabout_query searchabout_queries "."
| "SearchAbout" "[" LIST1 searchabout_query "]" in_or_out_modules "."
]
+class: [
+| "Funclass"
+| "Sortclass"
+| smart_qualid
+]
+
ne_in_or_out_modules: [
| "inside" LIST1 qualid
| "outside" LIST1 qualid
@@ -1242,7 +1238,7 @@ positive_search_mark: [
searchabout_query: [
| positive_search_mark string OPT ( "%" ident )
-| positive_search_mark term1_extended
+| positive_search_mark one_term
]
searchabout_queries: [
@@ -1256,10 +1252,10 @@ syntax: [
| "Close" "Scope" ident
| "Delimit" "Scope" ident "with" ident
| "Undelimit" "Scope" ident
-| "Bind" "Scope" ident "with" LIST1 class_rawexpr
-| "Infix" string ":=" term1_extended OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] OPT [ ":" ident ]
-| "Notation" ident LIST0 ident ":=" term1_extended OPT ( "(" "only" "parsing" ")" )
-| "Notation" string ":=" term1_extended OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] OPT [ ":" ident ]
+| "Bind" "Scope" ident "with" LIST1 class
+| "Infix" string ":=" one_term OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] OPT [ ":" ident ]
+| "Notation" ident LIST0 ident ":=" one_term OPT ( "(" "only" "parsing" ")" )
+| "Notation" string ":=" one_term OPT [ "(" LIST1 syntax_modifier SEP "," ")" ] OPT [ ":" ident ]
| "Format" "Notation" string string string
| "Reserved" "Infix" string OPT [ "(" LIST1 syntax_modifier SEP "," ")" ]
| "Reserved" "Notation" string OPT [ "(" LIST1 syntax_modifier SEP "," ")" ]
@@ -1314,17 +1310,17 @@ at_level_opt: [
simple_tactic: [
| "reflexivity"
-| "exact" term1_extended
+| "exact" one_term
| "assumption"
| "etransitivity"
-| "cut" term1_extended
-| "exact_no_check" term1_extended
-| "vm_cast_no_check" term1_extended
-| "native_cast_no_check" term1_extended
-| "casetype" term1_extended
-| "elimtype" term1_extended
-| "lapply" term1_extended
-| "transitivity" term1_extended
+| "cut" one_term
+| "exact_no_check" one_term
+| "vm_cast_no_check" one_term
+| "native_cast_no_check" one_term
+| "casetype" one_term
+| "elimtype" one_term
+| "lapply" one_term
+| "transitivity" one_term
| "left"
| "eleft"
| "left" "with" bindings
@@ -1377,11 +1373,11 @@ simple_tactic: [
| "clear" LIST0 ident
| "clear" "-" LIST1 ident
| "clearbody" LIST1 ident
-| "generalize" "dependent" term1_extended
-| "replace" term1_extended "with" term1_extended clause_dft_concl by_arg_tac
-| "replace" "->" term1_extended clause_dft_concl
-| "replace" "<-" term1_extended clause_dft_concl
-| "replace" term1_extended clause_dft_concl
+| "generalize" "dependent" one_term
+| "replace" one_term "with" one_term clause_dft_concl by_arg_tac
+| "replace" "->" one_term clause_dft_concl
+| "replace" "<-" one_term clause_dft_concl
+| "replace" one_term clause_dft_concl
| "simplify_eq"
| "simplify_eq" destruction_arg
| "esimplify_eq"
@@ -1400,64 +1396,64 @@ simple_tactic: [
| "einjection" destruction_arg "as" LIST0 simple_intropattern
| "simple" "injection"
| "simple" "injection" destruction_arg
-| "dependent" "rewrite" orient term1_extended
-| "dependent" "rewrite" orient term1_extended "in" ident
-| "cutrewrite" orient term1_extended
-| "cutrewrite" orient term1_extended "in" ident
-| "decompose" "sum" term1_extended
-| "decompose" "record" term1_extended
-| "absurd" term1_extended
+| "dependent" "rewrite" orient one_term
+| "dependent" "rewrite" orient one_term "in" ident
+| "cutrewrite" orient one_term
+| "cutrewrite" orient one_term "in" ident
+| "decompose" "sum" one_term
+| "decompose" "record" one_term
+| "absurd" one_term
| "contradiction" OPT constr_with_bindings
| "autorewrite" "with" LIST1 ident clause_dft_concl
| "autorewrite" "with" LIST1 ident clause_dft_concl "using" ltac_expr
| "autorewrite" "*" "with" LIST1 ident clause_dft_concl
| "autorewrite" "*" "with" LIST1 ident clause_dft_concl "using" ltac_expr
-| "rewrite" "*" orient term1_extended "in" ident "at" occurrences by_arg_tac
-| "rewrite" "*" orient term1_extended "at" occurrences "in" ident by_arg_tac
-| "rewrite" "*" orient term1_extended "in" ident by_arg_tac
-| "rewrite" "*" orient term1_extended "at" occurrences by_arg_tac
-| "rewrite" "*" orient term1_extended by_arg_tac
-| "refine" term1_extended
-| "simple" "refine" term1_extended
-| "notypeclasses" "refine" term1_extended
-| "simple" "notypeclasses" "refine" term1_extended
+| "rewrite" "*" orient one_term "in" ident "at" occurrences by_arg_tac
+| "rewrite" "*" orient one_term "at" occurrences "in" ident by_arg_tac
+| "rewrite" "*" orient one_term "in" ident by_arg_tac
+| "rewrite" "*" orient one_term "at" occurrences by_arg_tac
+| "rewrite" "*" orient one_term by_arg_tac
+| "refine" one_term
+| "simple" "refine" one_term
+| "notypeclasses" "refine" one_term
+| "simple" "notypeclasses" "refine" one_term
| "solve_constraints"
| "subst" LIST1 ident
| "subst"
| "simple" "subst"
| "evar" "(" ident ":" term ")"
-| "evar" term1_extended
+| "evar" one_term
| "instantiate" "(" ident ":=" term ")"
| "instantiate" "(" int ":=" term ")" hloc
| "instantiate"
-| "stepl" term1_extended "by" ltac_expr
-| "stepl" term1_extended
-| "stepr" term1_extended "by" ltac_expr
-| "stepr" term1_extended
+| "stepl" one_term "by" ltac_expr
+| "stepl" one_term
+| "stepr" one_term "by" ltac_expr
+| "stepr" one_term
| "generalize_eqs" ident
| "dependent" "generalize_eqs" ident
| "generalize_eqs_vars" ident
| "dependent" "generalize_eqs_vars" ident
| "specialize_eqs" ident
-| "hresolve_core" "(" ident ":=" term1_extended ")" "at" int_or_var "in" term1_extended
-| "hresolve_core" "(" ident ":=" term1_extended ")" "in" term1_extended
+| "hresolve_core" "(" ident ":=" one_term ")" "at" int_or_var "in" one_term
+| "hresolve_core" "(" ident ":=" one_term ")" "in" one_term
| "hget_evar" int_or_var
| "destauto"
| "destauto" "in" ident
| "transparent_abstract" ltac_expr3
| "transparent_abstract" ltac_expr3 "using" ident
-| "constr_eq" term1_extended term1_extended
-| "constr_eq_strict" term1_extended term1_extended
-| "constr_eq_nounivs" term1_extended term1_extended
-| "is_evar" term1_extended
-| "has_evar" term1_extended
-| "is_var" term1_extended
-| "is_fix" term1_extended
-| "is_cofix" term1_extended
-| "is_ind" term1_extended
-| "is_constructor" term1_extended
-| "is_proj" term1_extended
-| "is_const" term1_extended
+| "constr_eq" one_term one_term
+| "constr_eq_strict" one_term one_term
+| "constr_eq_nounivs" one_term one_term
+| "is_evar" one_term
+| "has_evar" one_term
+| "is_var" one_term
+| "is_fix" one_term
+| "is_cofix" one_term
+| "is_ind" one_term
+| "is_constructor" one_term
+| "is_proj" one_term
+| "is_const" one_term
| "shelve"
| "shelve_unifiable"
| "unshelve" ltac_expr1
@@ -1466,7 +1462,7 @@ simple_tactic: [
| "swap" int_or_var int_or_var
| "revgoals"
| "guard" int_or_var comparison int_or_var
-| "decompose" "[" LIST1 term1_extended "]" term1_extended
+| "decompose" "[" LIST1 one_term "]" one_term
| "optimize_heap"
| "start" "ltac" "profiling"
| "stop" "ltac" "profiling"
@@ -1478,14 +1474,14 @@ simple_tactic: [
| "finish_timing" OPT string
| "finish_timing" "(" string ")" OPT string
| "eassumption"
-| "eexact" term1_extended
+| "eexact" one_term
| "trivial" auto_using hintbases
| "info_trivial" auto_using hintbases
| "debug" "trivial" auto_using hintbases
| "auto" OPT int_or_var auto_using hintbases
| "info_auto" OPT int_or_var auto_using hintbases
| "debug" "auto" OPT int_or_var auto_using hintbases
-| "prolog" "[" LIST0 term1_extended "]" int_or_var
+| "prolog" "[" LIST0 one_term "]" int_or_var
| "eauto" OPT int_or_var OPT int_or_var auto_using hintbases
| "new" "auto" OPT int_or_var auto_using hintbases
| "debug" "eauto" OPT int_or_var OPT int_or_var auto_using hintbases
@@ -1494,17 +1490,17 @@ simple_tactic: [
| "autounfold" hintbases clause_dft_concl
| "autounfold_one" hintbases "in" ident
| "autounfold_one" hintbases
-| "unify" term1_extended term1_extended
-| "unify" term1_extended term1_extended "with" ident
-| "convert_concl_no_check" term1_extended
+| "unify" one_term one_term
+| "unify" one_term one_term "with" ident
+| "convert_concl_no_check" one_term
| "typeclasses" "eauto" "bfs" OPT int_or_var "with" LIST1 ident
| "typeclasses" "eauto" OPT int_or_var "with" LIST1 ident
| "typeclasses" "eauto" OPT int_or_var
-| "head_of_constr" ident term1_extended
-| "not_evar" term1_extended
-| "is_ground" term1_extended
-| "autoapply" term1_extended "using" ident
-| "autoapply" term1_extended "with" ident
+| "head_of_constr" ident one_term
+| "not_evar" one_term
+| "is_ground" one_term
+| "autoapply" one_term "using" ident
+| "autoapply" one_term "with" ident
| "progress_evars" ltac_expr
| "rewrite_strat" rewstrategy
| "rewrite_db" ident "in" ident
@@ -1518,10 +1514,10 @@ simple_tactic: [
| "setoid_symmetry"
| "setoid_symmetry" "in" ident
| "setoid_reflexivity"
-| "setoid_transitivity" term1_extended
+| "setoid_transitivity" one_term
| "setoid_etransitivity"
| "decide" "equality"
-| "compare" term1_extended term1_extended
+| "compare" one_term one_term
| "rewrite_strat" rewstrategy "in" ident
| "intros" intropattern_list_opt
| "eintros" intropattern_list_opt
@@ -1536,41 +1532,43 @@ simple_tactic: [
| "fix" ident num "with" LIST1 fixdecl
| "cofix" ident "with" LIST1 cofixdecl
| "pose" bindings_with_parameters
-| "pose" term1_extended as_name
+| "pose" one_term as_name
| "epose" bindings_with_parameters
-| "epose" term1_extended as_name
+| "epose" one_term as_name
| "set" bindings_with_parameters clause_dft_concl
-| "set" term1_extended as_name clause_dft_concl
+| "set" one_term as_name clause_dft_concl
| "eset" bindings_with_parameters clause_dft_concl
-| "eset" term1_extended as_name clause_dft_concl
-| "remember" term1_extended as_name eqn_ipat clause_dft_all
-| "eremember" term1_extended as_name eqn_ipat clause_dft_all
+| "eset" one_term as_name clause_dft_concl
+| "remember" one_term as_name eqn_ipat clause_dft_all
+| "eremember" one_term as_name eqn_ipat clause_dft_all
| "assert" "(" ident ":=" term ")"
| "eassert" "(" ident ":=" term ")"
| "assert" "(" ident ":" term ")" by_tactic
| "eassert" "(" ident ":" term ")" by_tactic
| "enough" "(" ident ":" term ")" by_tactic
| "eenough" "(" ident ":" term ")" by_tactic
-| "assert" term1_extended as_ipat by_tactic
-| "eassert" term1_extended as_ipat by_tactic
+| "assert" one_term as_ipat by_tactic
+| "eassert" one_term as_ipat by_tactic
+| "pose" "proof" "(" ident ":=" term ")"
+| "epose" "proof" "(" ident ":=" term ")"
| "pose" "proof" term as_ipat
| "epose" "proof" term as_ipat
-| "enough" term1_extended as_ipat by_tactic
-| "eenough" term1_extended as_ipat by_tactic
-| "generalize" term1_extended
-| "generalize" term1_extended LIST1 term1_extended
-| "generalize" term1_extended OPT ( "at" occs_nums ) as_name LIST0 [ "," pattern_occ as_name ]
+| "enough" one_term as_ipat by_tactic
+| "eenough" one_term as_ipat by_tactic
+| "generalize" one_term
+| "generalize" one_term LIST1 one_term
+| "generalize" one_term OPT ( "at" occs_nums ) as_name LIST0 [ "," pattern_occ as_name ]
| "induction" induction_clause_list
| "einduction" induction_clause_list
| "destruct" induction_clause_list
| "edestruct" induction_clause_list
| "rewrite" LIST1 oriented_rewriter SEP "," clause_dft_concl by_tactic
| "erewrite" LIST1 oriented_rewriter SEP "," clause_dft_concl by_tactic
-| "dependent" [ "simple" "inversion" | "inversion" | "inversion_clear" ] quantified_hypothesis as_or_and_ipat OPT [ "with" term1_extended ]
+| "dependent" [ "simple" "inversion" | "inversion" | "inversion_clear" ] quantified_hypothesis as_or_and_ipat OPT [ "with" one_term ]
| "simple" "inversion" quantified_hypothesis as_or_and_ipat in_hyp_list
| "inversion" quantified_hypothesis as_or_and_ipat in_hyp_list
| "inversion_clear" quantified_hypothesis as_or_and_ipat in_hyp_list
-| "inversion" quantified_hypothesis "using" term1_extended in_hyp_list
+| "inversion" quantified_hypothesis "using" one_term in_hyp_list
| "red" clause_dft_concl
| "hnf" clause_dft_concl
| "simpl" OPT delta_flag OPT ref_or_pattern_occ clause_dft_concl
@@ -1581,7 +1579,7 @@ simple_tactic: [
| "vm_compute" OPT ref_or_pattern_occ clause_dft_concl
| "native_compute" OPT ref_or_pattern_occ clause_dft_concl
| "unfold" LIST1 unfold_occ SEP "," clause_dft_concl
-| "fold" LIST1 term1_extended clause_dft_concl
+| "fold" LIST1 one_term clause_dft_concl
| "pattern" LIST1 pattern_occ SEP "," clause_dft_concl
| "change" conversion clause_dft_concl
| "change_no_check" conversion clause_dft_concl
@@ -1589,16 +1587,16 @@ simple_tactic: [
| "rtauto"
| "congruence"
| "congruence" int
-| "congruence" "with" LIST1 term1_extended
-| "congruence" int "with" LIST1 term1_extended
+| "congruence" "with" LIST1 one_term
+| "congruence" int "with" LIST1 one_term
| "f_equal"
| "firstorder" OPT ltac_expr firstorder_using
| "firstorder" OPT ltac_expr "with" LIST1 ident
| "firstorder" OPT ltac_expr firstorder_using "with" LIST1 ident
| "gintuition" OPT ltac_expr
| "functional" "inversion" quantified_hypothesis OPT qualid (* funind plugin *)
-| "functional" "induction" LIST1 term1_extended fun_ind_using with_names (* funind plugin *)
-| "soft" "functional" "induction" LIST1 term1_extended fun_ind_using with_names (* funind plugin *)
+| "functional" "induction" LIST1 one_term fun_ind_using with_names (* funind plugin *)
+| "soft" "functional" "induction" LIST1 one_term fun_ind_using with_names (* funind plugin *)
| "psatz_Z" int_or_var ltac_expr (* micromega plugin *)
| "psatz_Z" ltac_expr (* micromega plugin *)
| "xlia" ltac_expr (* micromega plugin *)
@@ -1614,16 +1612,17 @@ simple_tactic: [
| "psatz_R" ltac_expr (* micromega plugin *)
| "psatz_Q" int_or_var ltac_expr (* micromega plugin *)
| "psatz_Q" ltac_expr (* micromega plugin *)
-| "zify_iter_specs" ltac_expr (* micromega plugin *)
+| "zify_iter_specs" (* micromega plugin *)
| "zify_op" (* micromega plugin *)
| "zify_saturate" (* micromega plugin *)
| "zify_iter_let" ltac_expr (* micromega plugin *)
-| "nsatz_compute" term1_extended (* nsatz plugin *)
+| "zify_elim_let" (* micromega plugin *)
+| "nsatz_compute" one_term (* nsatz plugin *)
| "omega" (* omega plugin *)
| "protect_fv" string "in" ident (* setoid_ring plugin *)
| "protect_fv" string (* setoid_ring plugin *)
-| "ring_lookup" ltac_expr0 "[" LIST0 term1_extended "]" LIST1 term1_extended (* setoid_ring plugin *)
-| "field_lookup" ltac_expr "[" LIST0 term1_extended "]" LIST1 term1_extended (* setoid_ring plugin *)
+| "ring_lookup" ltac_expr0 "[" LIST0 one_term "]" LIST1 one_term (* setoid_ring plugin *)
+| "field_lookup" ltac_expr "[" LIST0 one_term "]" LIST1 one_term (* setoid_ring plugin *)
]
hloc: [
@@ -1646,11 +1645,23 @@ by_arg_tac: [
]
in_clause: [
-| in_clause
+| LIST0 hypident_occ SEP "," OPT ( "|-" OPT concl_occ )
+| "*" "|-" OPT concl_occ
| "*" OPT ( "at" occs_nums )
-| "*" "|-" concl_occ
-| LIST0 hypident_occ SEP "," "|-" concl_occ
-| LIST0 hypident_occ SEP ","
+]
+
+concl_occ: [
+| "*" OPT ( "at" occs_nums )
+]
+
+hypident_occ: [
+| hypident OPT ( "at" occs_nums )
+]
+
+hypident: [
+| ident
+| "(" "type" "of" ident ")"
+| "(" "value" "of" ident ")"
]
as_ipat: [
@@ -1707,7 +1718,7 @@ induction_clause_list: [
]
auto_using: [
-| "using" LIST1 term1_extended SEP ","
+| "using" LIST1 one_term SEP ","
|
]
@@ -1762,7 +1773,7 @@ simple_binding: [
bindings: [
| LIST1 simple_binding
-| LIST1 term1_extended
+| LIST1 one_term
]
comparison: [
@@ -1783,16 +1794,6 @@ bindings_with_parameters: [
| "(" ident LIST0 simple_binder ":=" term ")"
]
-hypident: [
-| ident
-| "(" "type" "of" ident ")"
-| "(" "value" "of" ident ")"
-]
-
-hypident_occ: [
-| hypident OPT ( "at" occs_nums )
-]
-
clause_dft_concl: [
| "in" in_clause
| OPT ( "at" occs_nums )
@@ -1810,11 +1811,6 @@ opt_clause: [
|
]
-concl_occ: [
-| "*" OPT ( "at" occs_nums )
-|
-]
-
in_hyp_list: [
| "in" LIST1 ident
|
@@ -1844,7 +1840,7 @@ cofixdecl: [
]
constr_with_bindings: [
-| term1_extended with_bindings
+| one_term with_bindings
]
with_bindings: [
@@ -1869,9 +1865,9 @@ quantified_hypothesis: [
]
conversion: [
-| term1_extended
-| term1_extended "with" term1_extended
-| term1_extended "at" occs_nums "with" term1_extended
+| one_term
+| one_term "with" one_term
+| one_term "at" occs_nums "with" one_term
]
firstorder_using: [
@@ -1897,29 +1893,29 @@ occurrences: [
]
rewstrategy: [
-| term1_extended
-| "<-" term1_extended
-| "subterms" rewstrategy
-| "subterm" rewstrategy
-| "innermost" rewstrategy
-| "outermost" rewstrategy
-| "bottomup" rewstrategy
-| "topdown" rewstrategy
-| "id"
+| one_term
+| "<-" one_term
| "fail"
+| "id"
| "refl"
| "progress" rewstrategy
| "try" rewstrategy
-| "any" rewstrategy
-| "repeat" rewstrategy
| rewstrategy ";" rewstrategy
-| "(" rewstrategy ")"
| "choice" rewstrategy rewstrategy
-| "old_hints" ident
+| "repeat" rewstrategy
+| "any" rewstrategy
+| "subterm" rewstrategy
+| "subterms" rewstrategy
+| "innermost" rewstrategy
+| "outermost" rewstrategy
+| "bottomup" rewstrategy
+| "topdown" rewstrategy
| "hints" ident
-| "terms" LIST0 term1_extended
+| "terms" LIST0 one_term
| "eval" red_expr
-| "fold" term1_extended
+| "fold" one_term
+| "(" rewstrategy ")"
+| "old_hints" ident
]
ltac_expr: [
@@ -2037,7 +2033,7 @@ tactic_arg: [
| "context" ident "[" term "]"
| "type" "of" term
| "fresh" LIST0 fresh_id
-| "type_term" term1_extended
+| "type_term" one_term
| "numgoals"
]
diff --git a/ide/coqide.ml b/ide/coqide.ml
index 61e95c21b1..553b834a37 100644
--- a/ide/coqide.ml
+++ b/ide/coqide.ml
@@ -244,6 +244,13 @@ let close_and_quit () =
List.iter (fun sn -> Coq.close_coqtop sn.coqtop) notebook#pages;
exit 0
+(* Work around a deadlock due to OCaml exit cleanup. The standard [exit]
+ function calls [flush_all], which can block if one of the opened channels is
+ not valid anymore. We do not register [at_exit] functions in CoqIDE, so
+ instead of flushing we simply die as gracefully as possible in the function
+ below. *)
+external sys_exit : int -> 'a = "caml_sys_exit"
+
let crash_save exitcode =
Minilib.log "Starting emergency save of buffers in .crashcoqide files";
let idx =
@@ -263,7 +270,7 @@ let crash_save exitcode =
in
List.iter save_session notebook#pages;
Minilib.log "End emergency save";
- exit exitcode
+ sys_exit exitcode
end
diff --git a/kernel/cooking.ml b/kernel/cooking.ml
index 31dd26d2ba..13ee353c6b 100644
--- a/kernel/cooking.ml
+++ b/kernel/cooking.ml
@@ -295,20 +295,14 @@ let abstract_projection ~params expmod hyps t =
t
let cook_one_ind ~ntypes
- (section_decls,_ as hyps) expmod mip =
+ hyps expmod mip =
let mind_arity = match mip.mind_arity with
| RegularArity {mind_user_arity=arity;mind_sort=sort} ->
let arity = abstract_as_type (expmod arity) hyps in
let sort = destSort (expmod (mkSort sort)) in
RegularArity {mind_user_arity=arity; mind_sort=sort}
- | TemplateArity {template_param_levels=levels;template_level;template_context} ->
- let sec_levels = CList.map_filter (fun d ->
- if RelDecl.is_local_assum d then Some None
- else None)
- section_decls
- in
- let levels = List.rev_append sec_levels levels in
- TemplateArity {template_param_levels=levels;template_level;template_context}
+ | TemplateArity {template_level} ->
+ TemplateArity {template_level}
in
let mind_arity_ctxt =
let ctx = Context.Rel.map expmod mip.mind_arity_ctxt in
@@ -386,6 +380,17 @@ let cook_inductive { Opaqueproof.modlist; abstract } mib =
in
Some (Array.append newvariance variance), Some sec_variance
in
+ let mind_template = match mib.mind_template with
+ | None -> None
+ | Some {template_param_levels=levels; template_context} ->
+ let sec_levels = CList.map_filter (fun d ->
+ if RelDecl.is_local_assum d then Some None
+ else None)
+ section_decls
+ in
+ let levels = List.rev_append sec_levels levels in
+ Some {template_param_levels=levels; template_context}
+ in
{
mind_packets;
mind_record;
@@ -396,6 +401,7 @@ let cook_inductive { Opaqueproof.modlist; abstract } mib =
mind_nparams_rec = mib.mind_nparams_rec + nnewparams;
mind_params_ctxt;
mind_universes;
+ mind_template;
mind_variance;
mind_sec_variance;
mind_private = mib.mind_private;
diff --git a/kernel/declarations.ml b/kernel/declarations.ml
index ac130d018d..11a07ee5cf 100644
--- a/kernel/declarations.ml
+++ b/kernel/declarations.ml
@@ -30,8 +30,11 @@ type engagement = set_predicativity
*)
type template_arity = {
- template_param_levels : Univ.Level.t option list;
template_level : Univ.Universe.t;
+}
+
+type template_universes = {
+ template_param_levels : Univ.Level.t option list;
template_context : Univ.ContextSet.t;
}
@@ -218,6 +221,8 @@ type mutual_inductive_body = {
mind_universes : universes; (** Information about monomorphic/polymorphic/cumulative inductives and their universes *)
+ mind_template : template_universes option;
+
mind_variance : Univ.Variance.t array option; (** Variance info, [None] when non-cumulative. *)
mind_sec_variance : Univ.Variance.t array option;
diff --git a/kernel/declareops.ml b/kernel/declareops.ml
index a3adac7a11..a1122d1279 100644
--- a/kernel/declareops.ml
+++ b/kernel/declareops.ml
@@ -46,9 +46,10 @@ let map_decl_arity f g = function
| TemplateArity a -> TemplateArity (g a)
let hcons_template_arity ar =
+ { template_level = Univ.hcons_univ ar.template_level; }
+
+let hcons_template_universe ar =
{ template_param_levels = ar.template_param_levels;
- (* List.Smart.map (Option.Smart.map Univ.hcons_univ_level) ar.template_param_levels; *)
- template_level = Univ.hcons_univ ar.template_level;
template_context = Univ.hcons_universe_context_set ar.template_context }
let universes_context = function
@@ -247,6 +248,7 @@ let subst_mind_body sub mib =
Context.Rel.map (subst_mps sub) mib.mind_params_ctxt;
mind_packets = Array.Smart.map (subst_mind_packet sub) mib.mind_packets ;
mind_universes = mib.mind_universes;
+ mind_template = mib.mind_template;
mind_variance = mib.mind_variance;
mind_sec_variance = mib.mind_sec_variance;
mind_private = mib.mind_private;
@@ -323,6 +325,7 @@ let hcons_mind mib =
{ mib with
mind_packets = Array.Smart.map hcons_mind_packet mib.mind_packets;
mind_params_ctxt = hcons_rel_context mib.mind_params_ctxt;
+ mind_template = Option.Smart.map hcons_template_universe mib.mind_template;
mind_universes = hcons_universes mib.mind_universes }
(** Hashconsing of modules *)
diff --git a/kernel/entries.ml b/kernel/entries.ml
index 8d930b521c..983fa822e9 100644
--- a/kernel/entries.ml
+++ b/kernel/entries.ml
@@ -37,7 +37,6 @@ then, in i{^ th} block, [mind_entry_params] is [xn:Xn;...;x1:X1];
type one_inductive_entry = {
mind_entry_typename : Id.t;
mind_entry_arity : constr;
- mind_entry_template : bool; (* Use template polymorphism *)
mind_entry_consnames : Id.t list;
mind_entry_lc : constr list }
@@ -50,6 +49,7 @@ type mutual_inductive_entry = {
mind_entry_params : Constr.rel_context;
mind_entry_inds : one_inductive_entry list;
mind_entry_universes : universes_entry;
+ mind_entry_template : bool; (* Use template polymorphism *)
mind_entry_cumulative : bool;
(* universe constraints and the constraints for subtyping of
inductive types in the block. *)
diff --git a/kernel/environ.ml b/kernel/environ.ml
index 501ac99ff3..1b5a77cc96 100644
--- a/kernel/environ.ml
+++ b/kernel/environ.ml
@@ -590,11 +590,11 @@ let template_polymorphic_ind (mind,i) env =
| TemplateArity _ -> true
| RegularArity _ -> false
-let template_polymorphic_variables (mind,i) env =
- match (lookup_mind mind env).mind_packets.(i).mind_arity with
- | TemplateArity { Declarations.template_param_levels = l; _ } ->
+let template_polymorphic_variables (mind, _) env =
+ match (lookup_mind mind env).mind_template with
+ | Some { Declarations.template_param_levels = l; _ } ->
List.map_filter (fun level -> level) l
- | RegularArity _ -> []
+ | None -> []
let template_polymorphic_pind (ind,u) env =
if not (Univ.Instance.is_empty u) then false
diff --git a/kernel/indTyping.ml b/kernel/indTyping.ml
index cc15109f06..d5aadd0c02 100644
--- a/kernel/indTyping.ml
+++ b/kernel/indTyping.ml
@@ -101,10 +101,10 @@ let check_indices_matter env_params info indices =
else check_context_univs ~ctor:false env_params info indices
(* env_ar contains the inductives before the current ones in the block, and no parameters *)
-let check_arity env_params env_ar ind =
+let check_arity ~template env_params env_ar ind =
let {utj_val=arity;utj_type=_} = Typeops.infer_type env_params ind.mind_entry_arity in
let indices, ind_sort = Reduction.dest_arity env_params arity in
- let ind_min_univ = if ind.mind_entry_template then Some Universe.type0m else None in
+ let ind_min_univ = if template then Some Universe.type0m else None in
let univ_info = {
ind_squashed=false;
ind_has_relevant_arg=false;
@@ -200,28 +200,88 @@ let unbounded_from_below u cstrs =
let template_polymorphic_univs ~ctor_levels uctx paramsctxt concl =
let check_level l =
Univ.LSet.mem l (Univ.ContextSet.levels uctx) &&
+ (let () = assert (not @@ Univ.Level.is_small l) in true) &&
unbounded_from_below l (Univ.ContextSet.constraints uctx) &&
not (Univ.LSet.mem l ctor_levels)
in
let univs = Univ.Universe.levels concl in
- let univs =
- Univ.LSet.filter (fun l -> check_level l || Univ.Level.is_prop l) univs
- in
+ let univs = Univ.LSet.filter (fun l -> check_level l) univs in
let fold acc = function
| (LocalAssum (_, p)) ->
(let c = Term.strip_prod_assum p in
match kind c with
| Sort (Type u) ->
(match Univ.Universe.level u with
- | Some l -> if Univ.LSet.mem l univs && not (Univ.Level.is_prop l) then Some l else None
+ | Some l -> if Univ.LSet.mem l univs then Some l else None
| None -> None)
| _ -> None) :: acc
| LocalDef _ -> acc
in
let params = List.fold_left fold [] paramsctxt in
- params, univs
+ if Universe.is_type0m concl then Some (univs, params)
+ else if not @@ Univ.LSet.is_empty univs then Some (univs, params)
+ else None
+
+let get_param_levels ctx params arity splayed_lc =
+ let min_univ = match arity with
+ | RegularArity _ ->
+ CErrors.user_err
+ Pp.(strbrk "Ill-formed template mutual inductive declaration: all types must be template.")
+ | TemplateArity ar -> ar.template_level
+ in
+ let ctor_levels =
+ let add_levels c levels = Univ.LSet.union levels (Vars.universes_of_constr c) in
+ let param_levels =
+ List.fold_left (fun levels d -> match d with
+ | LocalAssum _ -> levels
+ | LocalDef (_,b,t) -> add_levels b (add_levels t levels))
+ Univ.LSet.empty params
+ in
+ Array.fold_left
+ (fun levels (d,c) ->
+ let levels =
+ List.fold_left (fun levels d ->
+ Context.Rel.Declaration.fold_constr add_levels d levels)
+ levels d
+ in
+ add_levels c levels)
+ param_levels
+ splayed_lc
+ in
+ match template_polymorphic_univs ~ctor_levels ctx params min_univ with
+ | None ->
+ CErrors.user_err
+ Pp.(strbrk "Ill-formed template inductive declaration: not polymorphic on any universe.")
+ | Some (_, param_levels) ->
+ param_levels
+
+let get_template univs params data =
+ let ctx = match univs with
+ | Monomorphic ctx -> ctx
+ | Polymorphic _ ->
+ CErrors.anomaly ~label:"polymorphic_template_ind"
+ Pp.(strbrk "Template polymorphism and full polymorphism are incompatible.") in
+ (* For each type in the block, compute potential template parameters *)
+ let params = List.map (fun ((arity, _), (_, splayed_lc), _) -> get_param_levels ctx params arity splayed_lc) data in
+ (* Pick the lower bound of template parameters. Note that in particular, if
+ one of the the inductive types from the block is Prop-valued, then no
+ parameters are template. *)
+ let fold min params =
+ let map u v = match u, v with
+ | (None, _) | (_, None) -> None
+ | Some u, Some v ->
+ let () = assert (Univ.Level.equal u v) in
+ Some u
+ in
+ List.map2 map min params
+ in
+ let params = match params with
+ | [] -> assert false
+ | hd :: rem -> List.fold_left fold hd rem
+ in
+ { template_param_levels = params; template_context = ctx }
-let abstract_packets univs usubst params ((arity,lc),(indices,splayed_lc),univ_info) =
+let abstract_packets usubst ((arity,lc),(indices,splayed_lc),univ_info) =
if not (Universe.Set.is_empty univ_info.missing)
then raise (InductiveError (MissingConstraints (univ_info.missing,univ_info.ind_univ)));
let arity = Vars.subst_univs_level_constr usubst arity in
@@ -237,40 +297,7 @@ let abstract_packets univs usubst params ((arity,lc),(indices,splayed_lc),univ_i
let arity = match univ_info.ind_min_univ with
| None -> RegularArity {mind_user_arity = arity; mind_sort = Sorts.sort_of_univ ind_univ}
- | Some min_univ ->
- let ctx = match univs with
- | Monomorphic ctx -> ctx
- | Polymorphic _ ->
- CErrors.anomaly ~label:"polymorphic_template_ind"
- Pp.(strbrk "Template polymorphism and full polymorphism are incompatible.") in
- let ctor_levels =
- let add_levels c levels = Univ.LSet.union levels (Vars.universes_of_constr c) in
- let param_levels =
- List.fold_left (fun levels d -> match d with
- | LocalAssum _ -> levels
- | LocalDef (_,b,t) -> add_levels b (add_levels t levels))
- Univ.LSet.empty params
- in
- Array.fold_left
- (fun levels (d,c) ->
- let levels =
- List.fold_left (fun levels d ->
- Context.Rel.Declaration.fold_constr add_levels d levels)
- levels d
- in
- add_levels c levels)
- param_levels
- splayed_lc
- in
- let param_levels, concl_levels =
- template_polymorphic_univs ~ctor_levels ctx params min_univ
- in
- if List.for_all (fun x -> Option.is_empty x) param_levels
- && Univ.LSet.is_empty concl_levels then
- CErrors.user_err
- Pp.(strbrk "Ill-formed template inductive declaration: not polymorphic on any universe.")
- else
- TemplateArity {template_param_levels = param_levels; template_level = min_univ; template_context = ctx }
+ | Some min_univ -> TemplateArity { template_level = min_univ; }
in
let kelim = allowed_sorts univ_info in
@@ -285,7 +312,7 @@ let typecheck_inductive env ~sec_univs (mie:mutual_inductive_entry) =
mind_check_names mie;
assert (List.is_empty (Environ.rel_context env));
- let has_template_poly = List.exists (fun oie -> oie.mind_entry_template) mie.mind_entry_inds in
+ let has_template_poly = mie.mind_entry_template in
(* universes *)
let env_univs =
@@ -306,7 +333,7 @@ let typecheck_inductive env ~sec_univs (mie:mutual_inductive_entry) =
let env_params, params = Typeops.check_context env_univs mie.mind_entry_params in
(* Arities *)
- let env_ar, data = List.fold_left_map (check_arity env_params) env_univs mie.mind_entry_inds in
+ let env_ar, data = List.fold_left_map (check_arity ~template:has_template_poly env_params) env_univs mie.mind_entry_inds in
let env_ar_par = push_rel_context params env_ar in
(* Constructors *)
@@ -352,7 +379,14 @@ let typecheck_inductive env ~sec_univs (mie:mutual_inductive_entry) =
(* Abstract universes *)
let usubst, univs = Declareops.abstract_universes mie.mind_entry_universes in
let params = Vars.subst_univs_level_context usubst params in
- let data = List.map (abstract_packets univs usubst params) data in
+ let data = List.map (abstract_packets usubst) data in
+ let template =
+ let check ((arity, _), _, _) = match arity with
+ | TemplateArity _ -> true
+ | RegularArity _ -> false
+ in
+ if List.exists check data then Some (get_template univs params data) else None
+ in
let env_ar_par =
let ctx = Environ.rel_context env_ar_par in
@@ -361,4 +395,4 @@ let typecheck_inductive env ~sec_univs (mie:mutual_inductive_entry) =
Environ.push_rel_context ctx env
in
- env_ar_par, univs, variance, record, params, Array.of_list data
+ env_ar_par, univs, template, variance, record, params, Array.of_list data
diff --git a/kernel/indTyping.mli b/kernel/indTyping.mli
index 723ba5459e..babb82c39e 100644
--- a/kernel/indTyping.mli
+++ b/kernel/indTyping.mli
@@ -29,6 +29,7 @@ val typecheck_inductive : env -> sec_univs:Univ.Level.t array option
-> mutual_inductive_entry
-> env
* universes
+ * template_universes option
* Univ.Variance.t array option
* Names.Id.t array option option
* Constr.rel_context
@@ -44,4 +45,4 @@ val template_polymorphic_univs :
Univ.ContextSet.t ->
Constr.rel_context ->
Univ.Universe.t ->
- Univ.Level.t option list * Univ.LSet.t
+ (Univ.LSet.t * Univ.Level.t option list) option
diff --git a/kernel/indtypes.ml b/kernel/indtypes.ml
index b6b8e5265c..58e5e76b61 100644
--- a/kernel/indtypes.ml
+++ b/kernel/indtypes.ml
@@ -466,7 +466,7 @@ let compute_projections (kn, i as ind) mib =
Array.of_list (List.rev rs),
Array.of_list (List.rev pbs)
-let build_inductive env ~sec_univs names prv univs variance
+let build_inductive env ~sec_univs names prv univs template variance
paramsctxt kn isrecord isfinite inds nmr recargs =
let ntypes = Array.length inds in
(* Compute the set of used section variables *)
@@ -538,6 +538,7 @@ let build_inductive env ~sec_univs names prv univs variance
mind_params_ctxt = paramsctxt;
mind_packets = packets;
mind_universes = univs;
+ mind_template = template;
mind_variance = variance;
mind_sec_variance = sec_variance;
mind_private = prv;
@@ -562,7 +563,7 @@ let build_inductive env ~sec_univs names prv univs variance
let check_inductive env ~sec_univs kn mie =
(* First type-check the inductive definition *)
- let (env_ar_par, univs, variance, record, paramsctxt, inds) =
+ let (env_ar_par, univs, template, variance, record, paramsctxt, inds) =
IndTyping.typecheck_inductive env ~sec_univs mie
in
(* Then check positivity conditions *)
@@ -575,6 +576,6 @@ let check_inductive env ~sec_univs kn mie =
(Array.map (fun ((_,lc),(indices,_),_) -> Context.Rel.nhyps indices,lc) inds)
in
(* Build the inductive packets *)
- build_inductive env ~sec_univs names mie.mind_entry_private univs variance
+ build_inductive env ~sec_univs names mie.mind_entry_private univs template variance
paramsctxt kn record mie.mind_entry_finite
inds nmr recargs
diff --git a/kernel/inductive.ml b/kernel/inductive.ml
index 1be86f2bf8..6325779675 100644
--- a/kernel/inductive.ml
+++ b/kernel/inductive.ml
@@ -185,8 +185,8 @@ let make_subst =
exception SingletonInductiveBecomesProp of Id.t
-let instantiate_universes ctx ar args =
- let subst = make_subst (ctx,ar.template_param_levels,args) in
+let instantiate_universes ctx (templ, ar) args =
+ let subst = make_subst (ctx,templ.template_param_levels,args) in
let level = Univ.subst_univs_universe (Univ.make_subst subst) ar.template_level in
let ty =
(* Singleton type not containing types are interpretable in Prop *)
@@ -215,8 +215,12 @@ let type_of_inductive_gen ?(polyprop=true) ((mib,mip),u) paramtyps =
match mip.mind_arity with
| RegularArity a -> subst_instance_constr u a.mind_user_arity
| TemplateArity ar ->
+ let templ = match mib.mind_template with
+ | None -> assert false
+ | Some t -> t
+ in
let ctx = List.rev mip.mind_arity_ctxt in
- let ctx,s = instantiate_universes ctx ar paramtyps in
+ let ctx,s = instantiate_universes ctx (templ, ar) paramtyps in
(* The Ocaml extraction cannot handle (yet?) "Prop-polymorphism", i.e.
the situation where a non-Prop singleton inductive becomes Prop
when applied to Prop params *)
diff --git a/kernel/inductive.mli b/kernel/inductive.mli
index b690fe1157..90571844b9 100644
--- a/kernel/inductive.mli
+++ b/kernel/inductive.mli
@@ -123,9 +123,6 @@ exception SingletonInductiveBecomesProp of Id.t
val max_inductive_sort : Sorts.t array -> Universe.t
-val instantiate_universes : Constr.rel_context ->
- template_arity -> param_univs -> Constr.rel_context * Sorts.t
-
(** {6 Debug} *)
type size = Large | Strict
diff --git a/kernel/safe_typing.ml b/kernel/safe_typing.ml
index 535b7de121..a37d04d82c 100644
--- a/kernel/safe_typing.ml
+++ b/kernel/safe_typing.ml
@@ -1303,12 +1303,7 @@ let start_library dir senv =
required = senv.required }
let export ?except ~output_native_objects senv dir =
- let senv =
- try join_safe_environment ?except senv
- with e ->
- let e = Exninfo.capture e in
- CErrors.user_err ~hdr:"export" (CErrors.iprint e)
- in
+ let senv = join_safe_environment ?except senv in
assert(senv.future_cst = []);
let () = check_current_library dir senv in
let mp = senv.modpath in
diff --git a/lib/control.ml b/lib/control.ml
index e67e88ee95..1898eab89e 100644
--- a/lib/control.ml
+++ b/lib/control.ml
@@ -75,8 +75,8 @@ let windows_timeout n f x e =
if not !exited then begin killed := true; raise Sys.Break end
else raise e
| e ->
- let () = killed := true in
let e = Exninfo.capture e in
+ let () = killed := true in
Exninfo.iraise e
type timeout = { timeout : 'a 'b. int -> ('a -> 'b) -> 'a -> exn -> 'b }
diff --git a/lib/pp.ml b/lib/pp.ml
index 1bd160dcda..f9b6ef20bf 100644
--- a/lib/pp.ml
+++ b/lib/pp.ml
@@ -201,11 +201,7 @@ let pp_with ft pp =
pp_cmd s;
pp_close_tag ft () [@warning "-3"]
in
- try pp_cmd pp
- with reraise ->
- let reraise = Exninfo.capture reraise in
- let () = Format.pp_print_flush ft () in
- Exninfo.iraise reraise
+ pp_cmd pp
(* If mixing some output and a goal display, please use msg_warning,
so that interfaces (proofgeneral for example) can easily dispatch
diff --git a/lib/util.ml b/lib/util.ml
index e2447b005e..ae8119ced0 100644
--- a/lib/util.ml
+++ b/lib/util.ml
@@ -82,10 +82,6 @@ module Set = CSet
module Map = CMap
-(* Stacks *)
-
-module Stack = CStack
-
(* Matrices *)
let matrix_transpose mat =
diff --git a/lib/util.mli b/lib/util.mli
index 1417d6dfcb..be0cc11763 100644
--- a/lib/util.mli
+++ b/lib/util.mli
@@ -76,10 +76,6 @@ module Set : module type of CSet
module Map : module type of CMap
-(** {6 Stacks.} *)
-
-module Stack : module type of CStack
-
(** {6 Streams. } *)
val stream_nth : int -> 'a Stream.t -> 'a
diff --git a/plugins/cc/ccalgo.mli b/plugins/cc/ccalgo.mli
index 3dc934b426..b0b74f4558 100644
--- a/plugins/cc/ccalgo.mli
+++ b/plugins/cc/ccalgo.mli
@@ -8,7 +8,6 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
-open Util
open Constr
open Names
@@ -156,102 +155,6 @@ val subterms : forest -> int -> int * int
val join_path : forest -> int -> int ->
((int * int) * equality) list * ((int * int) * equality) list
-val make_fun_table : state -> Int.Set.t PafMap.t
-
-val do_match : state ->
- (quant_eq * int array) list ref -> matching_problem Stack.t -> unit
-
-val init_pb_stack : state -> matching_problem Stack.t
-
-val paf_of_patt : int Termhash.t -> ccpattern -> pa_fun
-
-val find_instances : state -> (quant_eq * int array) list
-
val execute : bool -> state -> explanation option
val pr_idx_term : Environ.env -> Evd.evar_map -> forest -> int -> Pp.t
-
-val empty_forest: unit -> forest
-
-
-
-
-
-
-
-
-
-
-(*type pa_constructor
-
-
-module PacMap:CSig.MapS with type key=pa_constructor
-
-type term =
- Symb of Term.constr
- | Eps
- | Appli of term * term
- | Constructor of Names.constructor*int*int
-
-type rule =
- Congruence
- | Axiom of Names.Id.t
- | Injection of int*int*int*int
-
-type equality =
- {lhs : int;
- rhs : int;
- rule : rule}
-
-module ST :
-sig
- type t
- val empty : unit -> t
- val enter : int -> int * int -> t -> unit
- val query : int * int -> t -> int
- val delete : int -> t -> unit
- val delete_list : int list -> t -> unit
-end
-
-module UF :
-sig
- type t
- exception Discriminable of int * int * int * int * t
- val empty : unit -> t
- val find : t -> int -> int
- val size : t -> int -> int
- val get_constructor : t -> int -> Names.constructor
- val pac_arity : t -> int -> int * int -> int
- val mem_node_pac : t -> int -> int * int -> int
- val add_pacs : t -> int -> pa_constructor PacMap.t ->
- int list * equality list
- val term : t -> int -> term
- val subterms : t -> int -> int * int
- val add : t -> term -> int
- val union : t -> int -> int -> equality -> int list * equality list
- val join_path : t -> int -> int ->
- ((int*int)*equality) list*
- ((int*int)*equality) list
-end
-
-
-val combine_rec : UF.t -> int list -> equality list
-val process_rec : UF.t -> equality list -> int list
-
-val cc : UF.t -> unit
-
-val make_uf :
- (Names.Id.t * (term * term)) list -> UF.t
-
-val add_one_diseq : UF.t -> (term * term) -> int * int
-
-val add_disaxioms :
- UF.t -> (Names.Id.t * (term * term)) list ->
- (Names.Id.t * (int * int)) list
-
-val check_equal : UF.t -> int * int -> bool
-
-val find_contradiction : UF.t ->
- (Names.Id.t * (int * int)) list ->
- (Names.Id.t * (int * int))
-*)
diff --git a/plugins/funind/indfun_common.ml b/plugins/funind/indfun_common.ml
index 45fafd2872..7d87fc0220 100644
--- a/plugins/funind/indfun_common.ml
+++ b/plugins/funind/indfun_common.ml
@@ -92,18 +92,14 @@ let list_union_eq eq_fun l1 l2 =
let list_add_set_eq eq_fun x l =
if List.exists (eq_fun x) l then l else x::l
-[@@@ocaml.warning "-3"]
-let coq_constant s =
- UnivGen.constr_of_monomorphic_global @@
- Coqlib.gen_reference_in_modules "RecursiveDefinition"
- Coqlib.init_modules s;;
+let coq_constant s = UnivGen.constr_of_monomorphic_global @@ Coqlib.lib_ref s;;
let find_reference sl s =
let dp = Names.DirPath.make (List.rev_map Id.of_string sl) in
Nametab.locate (make_qualid dp (Id.of_string s))
-let eq = lazy(EConstr.of_constr (coq_constant "eq"))
-let refl_equal = lazy(EConstr.of_constr (coq_constant "eq_refl"))
+let eq = lazy(EConstr.of_constr (coq_constant "core.eq.type"))
+let refl_equal = lazy(EConstr.of_constr (coq_constant "core.eq.refl"))
let with_full_print f a =
let old_implicit_args = Impargs.is_implicit_args ()
@@ -369,10 +365,10 @@ let do_observe_tac s tac g =
ignore(Stack.pop debug_queue);
v
with reraise ->
- let reraise = CErrors.push reraise in
+ let reraise = Exninfo.capture reraise in
if not (Stack.is_empty debug_queue)
then print_debug_queue true (fst reraise);
- Util.iraise reraise
+ Exninfo.iraise reraise
let observe_tac s tac g =
if do_observe ()
@@ -447,14 +443,11 @@ let h_intros l =
let h_id = Id.of_string "h"
let hrec_id = Id.of_string "hrec"
-let well_founded = function () -> EConstr.of_constr (coq_constant "well_founded")
-let acc_rel = function () -> EConstr.of_constr (coq_constant "Acc")
-let acc_inv_id = function () -> EConstr.of_constr (coq_constant "Acc_inv")
-
-[@@@ocaml.warning "-3"]
-let well_founded_ltof () = EConstr.of_constr @@ UnivGen.constr_of_monomorphic_global @@
- Coqlib.find_reference "IndFun" ["Coq"; "Arith";"Wf_nat"] "well_founded_ltof"
-[@@@ocaml.warning "+3"]
+let well_founded = function () -> EConstr.of_constr (coq_constant "core.wf.well_founded")
+let acc_rel = function () -> EConstr.of_constr (coq_constant "core.wf.acc")
+let acc_inv_id = function () -> EConstr.of_constr (coq_constant "core.wf.acc_inv")
+
+let well_founded_ltof () = EConstr.of_constr (coq_constant "num.nat.well_founded_ltof")
let ltof_ref = function () -> (find_reference ["Coq";"Arith";"Wf_nat"] "ltof")
diff --git a/plugins/funind/recdef.ml b/plugins/funind/recdef.ml
index f7f8004998..9fa0ec8c08 100644
--- a/plugins/funind/recdef.ml
+++ b/plugins/funind/recdef.ml
@@ -47,18 +47,12 @@ open Context.Rel.Declaration
(* Ugly things which should not be here *)
-[@@@ocaml.warning "-3"]
-let coq_constant m s = EConstr.of_constr @@ UnivGen.constr_of_monomorphic_global @@
- Coqlib.find_reference "RecursiveDefinition" m s
-
-let arith_Nat = ["Coq"; "Arith";"PeanoNat";"Nat"]
-let arith_Lt = ["Coq"; "Arith";"Lt"]
+let coq_constant s = EConstr.of_constr @@ UnivGen.constr_of_monomorphic_global @@
+ Coqlib.lib_ref s
let coq_init_constant s =
- EConstr.of_constr (
- UnivGen.constr_of_monomorphic_global @@
- Coqlib.gen_reference_in_modules "RecursiveDefinition" Coqlib.init_modules s)
-[@@@ocaml.warning "+3"]
+ EConstr.of_constr(UnivGen.constr_of_monomorphic_global @@ Coqlib.lib_ref s)
+;;
let find_reference sl s =
let dp = Names.DirPath.make (List.rev_map Id.of_string sl) in
@@ -122,26 +116,26 @@ let v_id = Id.of_string "v"
let def_id = Id.of_string "def"
let p_id = Id.of_string "p"
let rec_res_id = Id.of_string "rec_res";;
-let lt = function () -> (coq_init_constant "lt")
-[@@@ocaml.warning "-3"]
-let le = function () -> (Coqlib.gen_reference_in_modules "RecursiveDefinition" Coqlib.init_modules "le")
-let ex = function () -> (coq_init_constant "ex")
-let nat = function () -> (coq_init_constant "nat")
+let lt = function () -> (coq_init_constant "num.nat.lt")
+let le = function () -> Coqlib.lib_ref "num.nat.le"
+
+let ex = function () -> (coq_init_constant "core.ex.type")
+let nat = function () -> (coq_init_constant "num.nat.type")
let iter_ref () =
try find_reference ["Recdef"] "iter"
with Not_found -> user_err Pp.(str "module Recdef not loaded")
let iter_rd = function () -> (constr_of_monomorphic_global (delayed_force iter_ref))
-let eq = function () -> (coq_init_constant "eq")
+let eq = function () -> (coq_init_constant "core.eq.type")
let le_lt_SS = function () -> (constant ["Recdef"] "le_lt_SS")
-let le_lt_n_Sm = function () -> (coq_constant arith_Lt "le_lt_n_Sm")
-let le_trans = function () -> (coq_constant arith_Nat "le_trans")
-let le_lt_trans = function () -> (coq_constant arith_Nat "le_lt_trans")
-let lt_S_n = function () -> (coq_constant arith_Lt "lt_S_n")
-let le_n = function () -> (coq_init_constant "le_n")
+let le_lt_n_Sm = function () -> (coq_constant "num.nat.le_lt_n_Sm")
+let le_trans = function () -> (coq_constant "num.nat.le_trans")
+let le_lt_trans = function () -> (coq_constant "num.nat.le_lt_trans")
+let lt_S_n = function () -> (coq_constant "num.nat.lt_S_n")
+let le_n = function () -> (coq_init_constant "num.nat.le_n")
let coq_sig_ref = function () -> (find_reference ["Coq";"Init";"Specif"] "sig")
-let coq_O = function () -> (coq_init_constant "O")
-let coq_S = function () -> (coq_init_constant "S")
-let lt_n_O = function () -> (coq_constant arith_Nat "nlt_0_r")
+let coq_O = function () -> (coq_init_constant "num.nat.O")
+let coq_S = function () -> (coq_init_constant"num.nat.S")
+let lt_n_O = function () -> (coq_constant "num.nat.nlt_0_r")
let max_ref = function () -> (find_reference ["Recdef"] "max")
let max_constr = function () -> EConstr.of_constr (constr_of_monomorphic_global (delayed_force max_ref))
@@ -817,7 +811,7 @@ let rec prove_le g =
| App (c, [| x0 ; _ |]) ->
EConstr.isVar sigma x0 &&
Id.equal (destVar sigma x0) (destVar sigma x) &&
- EConstr.is_global sigma (le ()) c
+ EConstr.isRefX sigma (le ()) c
| _ -> false
in
let (h,t) = List.find (fun (_,t) -> matching_fun t) (pf_hyps_types g) in
@@ -1194,7 +1188,7 @@ let get_current_subgoals_types pstate =
exception EmptySubgoals
let build_and_l sigma l =
let and_constr = UnivGen.constr_of_monomorphic_global @@ Coqlib.lib_ref "core.and.type" in
- let conj_constr = Coqlib.build_coq_conj () in
+ let conj_constr = Coqlib.lib_ref "core.and.conj" in
let mk_and p1 p2 =
mkApp(EConstr.of_constr and_constr,[|p1;p2|]) in
let rec is_well_founded t =
diff --git a/plugins/syntax/r_syntax.ml b/plugins/syntax/r_syntax.ml
index f6fbdaa958..fa824a88ee 100644
--- a/plugins/syntax/r_syntax.ml
+++ b/plugins/syntax/r_syntax.ml
@@ -134,7 +134,7 @@ let r_of_rawnum ?loc (sign,n) =
| '+' -> Bigint.of_string (String.sub e 2 (String.length e - 2))
| '-' -> Bigint.(neg (of_string (String.sub e 2 (String.length e - 2))))
| _ -> Bigint.of_string (String.sub e 1 (String.length e - 1)) in
- Bigint.(sub e (of_int (String.length f))) in
+ Bigint.(sub e (of_int (String.length (String.concat "" (String.split_on_char '_' f))))) in
if Bigint.is_strictly_pos e then rmult n (izr (pow10 e))
else if Bigint.is_strictly_neg e then rdiv n (izr (pow10 (neg e)))
else n (* e = 0 *)
diff --git a/pretyping/evarsolve.ml b/pretyping/evarsolve.ml
index aafd662f7d..c9ccd668ca 100644
--- a/pretyping/evarsolve.ml
+++ b/pretyping/evarsolve.ml
@@ -78,9 +78,9 @@ let get_polymorphic_positions env sigma f =
match EConstr.kind sigma f with
| Ind (ind, u) | Construct ((ind, _), u) ->
let mib,oib = Inductive.lookup_mind_specif env ind in
- (match oib.mind_arity with
- | RegularArity _ -> assert false
- | TemplateArity templ -> templ.template_param_levels)
+ (match mib.mind_template with
+ | None -> assert false
+ | Some templ -> templ.template_param_levels)
| _ -> assert false
let refresh_universes ?(status=univ_rigid) ?(onlyalg=false) ?(refreshset=false)
diff --git a/pretyping/inductiveops.ml b/pretyping/inductiveops.ml
index a4406aeba1..01994a35c7 100644
--- a/pretyping/inductiveops.ml
+++ b/pretyping/inductiveops.ml
@@ -681,13 +681,17 @@ let type_of_inductive_knowing_conclusion env sigma ((mib,mip),u) conclty =
match mip.mind_arity with
| RegularArity s -> sigma, EConstr.of_constr (subst_instance_constr u s.mind_user_arity)
| TemplateArity ar ->
+ let templ = match mib.mind_template with
+ | None -> assert false
+ | Some t -> t
+ in
let _,scl = splay_arity env sigma conclty in
let scl = EConstr.ESorts.kind sigma scl in
let ctx = List.rev mip.mind_arity_ctxt in
let evdref = ref sigma in
let ctx =
instantiate_universes
- env evdref scl ar.template_level (ctx,ar.template_param_levels) in
+ env evdref scl ar.template_level (ctx,templ.template_param_levels) in
!evdref, EConstr.of_constr (mkArity (List.rev ctx,scl))
let type_of_projection_constant env (p,u) =
diff --git a/tactics/pfedit.ml b/tactics/pfedit.ml
index 7cdfd0637a..a7ba12bb1f 100644
--- a/tactics/pfedit.ml
+++ b/tactics/pfedit.ml
@@ -120,18 +120,14 @@ let build_constant_by_tactic ~name ?(opaque=Proof_global.Transparent) ctx sign ~
let evd = Evd.from_ctx ctx in
let goals = [ (Global.env_of_context sign , typ) ] in
let pf = Proof_global.start_proof ~name ~poly ~udecl:UState.default_univ_decl evd goals in
- try
- let pf, status = by tac pf in
- let open Proof_global in
- let { entries; universes } = close_proof ~opaque ~keep_body_ucst_separate:false (fun x -> x) pf in
- match entries with
- | [entry] ->
- entry, status, universes
- | _ ->
- CErrors.anomaly Pp.(str "[build_constant_by_tactic] close_proof returned more than one proof term")
- with reraise ->
- let reraise = Exninfo.capture reraise in
- Exninfo.iraise reraise
+ let pf, status = by tac pf in
+ let open Proof_global in
+ let { entries; universes } = close_proof ~opaque ~keep_body_ucst_separate:false (fun x -> x) pf in
+ match entries with
+ | [entry] ->
+ entry, status, universes
+ | _ ->
+ CErrors.anomaly Pp.(str "[build_constant_by_tactic] close_proof returned more than one proof term")
let build_by_tactic ?(side_eff=true) env sigma ~poly typ tac =
let name = Id.of_string ("temporary_proof"^string_of_int (next())) in
diff --git a/test-suite/output/RealSyntax.out b/test-suite/output/RealSyntax.out
index 2d877bd813..2b14ca7069 100644
--- a/test-suite/output/RealSyntax.out
+++ b/test-suite/output/RealSyntax.out
@@ -2,6 +2,8 @@
: R
(-31)%R
: R
+15e-1%R
+ : R
eq_refl : 102e-2 = 102e-2
: 102e-2 = 102e-2
eq_refl : 102e-1 = 102e-1
diff --git a/test-suite/output/RealSyntax.v b/test-suite/output/RealSyntax.v
index cb3bce70d4..7be8b18ac8 100644
--- a/test-suite/output/RealSyntax.v
+++ b/test-suite/output/RealSyntax.v
@@ -2,6 +2,8 @@ Require Import Reals.Rdefinitions.
Check 32%R.
Check (-31)%R.
+Check 1.5_%R.
+
Open Scope R_scope.
Check (eq_refl : 1.02 = IZR 102 / IZR (Z.pow_pos 10 2)).
diff --git a/theories/Arith/Lt.v b/theories/Arith/Lt.v
index 918b0efc5a..8904f3f936 100644
--- a/theories/Arith/Lt.v
+++ b/theories/Arith/Lt.v
@@ -41,11 +41,15 @@ Proof.
apply Nat.lt_succ_r.
Qed.
+Register lt_n_Sm_le as num.nat.lt_n_Sm_le.
+
Theorem le_lt_n_Sm n m : n <= m -> n < S m.
Proof.
apply Nat.lt_succ_r.
Qed.
+Register le_lt_n_Sm as num.nat.le_lt_n_Sm.
+
Hint Immediate lt_le_S: arith.
Hint Immediate lt_n_Sm_le: arith.
Hint Immediate le_lt_n_Sm: arith.
@@ -99,6 +103,8 @@ Proof.
apply Nat.succ_lt_mono.
Qed.
+Register lt_S_n as num.nat.lt_S_n.
+
Hint Resolve lt_n_Sn lt_S lt_n_S : arith.
Hint Immediate lt_S_n : arith.
@@ -133,6 +139,8 @@ Notation lt_trans := Nat.lt_trans (only parsing).
Notation lt_le_trans := Nat.lt_le_trans (only parsing).
Notation le_lt_trans := Nat.le_lt_trans (only parsing).
+Register le_lt_trans as num.nat.le_lt_trans.
+
Hint Resolve lt_trans lt_le_trans le_lt_trans: arith.
(** * Large = strict or equal *)
diff --git a/theories/Arith/PeanoNat.v b/theories/Arith/PeanoNat.v
index f12785029a..4657b7f46d 100644
--- a/theories/Arith/PeanoNat.v
+++ b/theories/Arith/PeanoNat.v
@@ -764,6 +764,9 @@ Infix "mod" := Nat.modulo (at level 40, no associativity) : nat_scope.
Hint Unfold Nat.le : core.
Hint Unfold Nat.lt : core.
+Register Nat.le_trans as num.nat.le_trans.
+Register Nat.nlt_0_r as num.nat.nlt_0_r.
+
(** [Nat] contains an [order] tactic for natural numbers *)
(** Note that [Nat.order] is domain-agnostic: it will not prove
diff --git a/theories/Arith/Wf_nat.v b/theories/Arith/Wf_nat.v
index 1c183930f9..c5a6651c05 100644
--- a/theories/Arith/Wf_nat.v
+++ b/theories/Arith/Wf_nat.v
@@ -34,6 +34,8 @@ Proof.
intros a. apply (H (S (f a))). auto with arith.
Defined.
+Register well_founded_ltof as num.nat.well_founded_ltof.
+
Theorem well_founded_gtof : well_founded gtof.
Proof.
exact well_founded_ltof.
diff --git a/theories/Init/Peano.v b/theories/Init/Peano.v
index 149a7a0cc5..beb06ea912 100644
--- a/theories/Init/Peano.v
+++ b/theories/Init/Peano.v
@@ -159,6 +159,8 @@ Inductive le (n:nat) : nat -> Prop :=
where "n <= m" := (le n m) : nat_scope.
+Register le_n as num.nat.le_n.
+
Hint Constructors le: core.
(*i equivalent to : "Hints Resolve le_n le_S : core." i*)
diff --git a/theories/Init/Wf.v b/theories/Init/Wf.v
index 998bbc7047..bd5185fdb0 100644
--- a/theories/Init/Wf.v
+++ b/theories/Init/Wf.v
@@ -32,11 +32,14 @@ Section Well_founded.
Inductive Acc (x: A) : Prop :=
Acc_intro : (forall y:A, R y x -> Acc y) -> Acc x.
+ Register Acc as core.wf.acc.
+
Lemma Acc_inv : forall x:A, Acc x -> forall y:A, R y x -> Acc y.
destruct 1; trivial.
Defined.
Global Arguments Acc_inv [x] _ [y] _, [x] _ y _.
+ Register Acc_inv as core.wf.acc_inv.
(** A relation is well-founded if every element is accessible *)
diff --git a/vernac/comInductive.ml b/vernac/comInductive.ml
index edb03a5c89..718e62b9b7 100644
--- a/vernac/comInductive.ml
+++ b/vernac/comInductive.ml
@@ -329,10 +329,7 @@ let template_polymorphism_candidate ~ctor_levels uctx params concl =
if not concltemplate then false
else
let conclu = Option.cata Sorts.univ_of_sort Univ.type0m_univ concl in
- let params, conclunivs =
- IndTyping.template_polymorphic_univs ~ctor_levels uctx params conclu
- in
- not (Univ.LSet.is_empty conclunivs)
+ Option.has_some @@ IndTyping.template_polymorphic_univs ~ctor_levels uctx params conclu
| Entries.Polymorphic_entry _ -> false
let check_param = function
@@ -370,6 +367,14 @@ let interp_mutual_inductive_constr ~sigma ~template ~udecl ~ctx_params ~indnames
(* Build the inductive entries *)
let entries = List.map4 (fun indname (templatearity, arity) concl (cnames,ctypes) ->
+ { mind_entry_typename = indname;
+ mind_entry_arity = arity;
+ mind_entry_consnames = cnames;
+ mind_entry_lc = ctypes
+ })
+ indnames arities arityconcl constructors
+ in
+ let template = List.map4 (fun indname (templatearity, _) concl (_, ctypes) ->
let template_candidate () =
templatearity ||
let ctor_levels =
@@ -385,22 +390,17 @@ let interp_mutual_inductive_constr ~sigma ~template ~udecl ~ctx_params ~indnames
in
template_polymorphism_candidate ~ctor_levels uctx ctx_params concl
in
- let template = match template with
+ match template with
| Some template ->
if poly && template then user_err
Pp.(strbrk "Template-polymorphism and universe polymorphism are not compatible.");
template
| None ->
should_auto_template indname (template_candidate ())
- in
- { mind_entry_typename = indname;
- mind_entry_arity = arity;
- mind_entry_template = template;
- mind_entry_consnames = cnames;
- mind_entry_lc = ctypes
- })
+ )
indnames arities arityconcl constructors
in
+ let is_template = List.for_all (fun t -> t) template in
(* Build the mutual inductive entry *)
let mind_ent =
{ mind_entry_params = ctx_params;
@@ -409,6 +409,7 @@ let interp_mutual_inductive_constr ~sigma ~template ~udecl ~ctx_params ~indnames
mind_entry_inds = entries;
mind_entry_private = if private_ind then Some false else None;
mind_entry_universes = uctx;
+ mind_entry_template = is_template;
mind_entry_cumulative = poly && cumulative;
}
in
diff --git a/vernac/record.ml b/vernac/record.ml
index 3e44cd85cc..065641989d 100644
--- a/vernac/record.ml
+++ b/vernac/record.ml
@@ -423,7 +423,13 @@ let declare_structure ~cumulative finite ubinders univs paramimpls params templa
let args = Context.Rel.to_extended_list mkRel nfields params in
let ind = applist (mkRel (ntypes - i + nparams + nfields), args) in
let type_constructor = it_mkProd_or_LetIn ind fields in
- let template =
+ { mind_entry_typename = id;
+ mind_entry_arity = arity;
+ mind_entry_consnames = [idbuild];
+ mind_entry_lc = [type_constructor] }
+ in
+ let blocks = List.mapi mk_block record_data in
+ let check_template (id, _, min_univ, _, _, fields, _, _) =
let template_candidate () =
(* we use some dummy values for the arities in the rel_context
as univs_of_constr doesn't care about localassums and
@@ -454,14 +460,8 @@ let declare_structure ~cumulative finite ubinders univs paramimpls params templa
| None, template ->
(* auto detect template *)
ComInductive.should_auto_template id (template && template_candidate ())
- in
- { mind_entry_typename = id;
- mind_entry_arity = arity;
- mind_entry_template = template;
- mind_entry_consnames = [idbuild];
- mind_entry_lc = [type_constructor] }
in
- let blocks = List.mapi mk_block record_data in
+ let template = List.for_all check_template record_data in
let primitive =
!primitive_flag &&
List.for_all (fun (_,_,_,_,_,fields,_,_) -> List.exists is_local_assum fields) record_data
@@ -473,6 +473,7 @@ let declare_structure ~cumulative finite ubinders univs paramimpls params templa
mind_entry_inds = blocks;
mind_entry_private = None;
mind_entry_universes = univs;
+ mind_entry_template = template;
mind_entry_cumulative = poly && cumulative;
}
in
diff --git a/vernac/vernacextend.ml b/vernac/vernacextend.ml
index e0afb7f483..5d38ea32be 100644
--- a/vernac/vernacextend.ml
+++ b/vernac/vernacextend.ml
@@ -92,28 +92,18 @@ let warn_deprecated_command =
(* Interpretation of a vernac command *)
let type_vernac opn converted_args ~atts =
- let phase = ref "Looking up command" in
- try
- let depr, callback = vinterp_map opn in
- let () = if depr then
+ let depr, callback = vinterp_map opn in
+ let () = if depr then
let rules = Egramml.get_extend_vernac_rule opn in
let pr_gram = function
- | Egramml.GramTerminal s -> str s
- | Egramml.GramNonTerminal _ -> str "_"
+ | Egramml.GramTerminal s -> str s
+ | Egramml.GramNonTerminal _ -> str "_"
in
let pr = pr_sequence pr_gram rules in
warn_deprecated_command pr;
- in
- phase := "Checking arguments";
- let hunk = callback converted_args in
- phase := "Executing command";
- hunk ~atts
- with
- | reraise ->
- let reraise = Exninfo.capture reraise in
- if !Flags.debug then
- Feedback.msg_debug (str"Vernac Interpreter " ++ str !phase);
- Exninfo.iraise reraise
+ in
+ let hunk = callback converted_args in
+ hunk ~atts
(** VERNAC EXTEND registering *)
diff --git a/vernac/vernacstate.ml b/vernac/vernacstate.ml
index 59557a60a6..280343f315 100644
--- a/vernac/vernacstate.ml
+++ b/vernac/vernacstate.ml
@@ -18,12 +18,9 @@ module Parser = struct
let parse ps entry pa =
Pcoq.unfreeze ps;
- Flags.with_option Flags.we_are_parsing (fun () ->
- try Pcoq.Entry.parse entry pa
- with e when CErrors.noncritical e ->
- let (e, info) = Exninfo.capture e in
- Exninfo.iraise (e, info))
- ()
+ Flags.with_option Flags.we_are_parsing
+ (fun () -> Pcoq.Entry.parse entry pa)
+ ()
end