aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore5
-rw-r--r--.gitlab-ci.yml2
-rw-r--r--CHANGES.md2
-rw-r--r--META.coq.in2
-rw-r--r--Makefile3
-rw-r--r--Makefile.build14
-rw-r--r--Makefile.checker4
-rw-r--r--Makefile.common2
-rw-r--r--Makefile.dev5
-rw-r--r--Makefile.ide2
-rw-r--r--appveyor.yml4
-rw-r--r--checker/check.mllib2
-rw-r--r--checker/closure.ml80
-rw-r--r--checker/closure.mli4
-rw-r--r--checker/reduction.ml15
-rw-r--r--config/config.mllib1
-rw-r--r--coq.opam24
-rw-r--r--coqide-server.opam20
-rw-r--r--coqide.opam19
-rw-r--r--default.nix6
-rw-r--r--dev/base_db1
-rw-r--r--dev/checker.dbg1
-rw-r--r--dev/checker_db36
-rw-r--r--dev/checker_dune_db5
-rw-r--r--dev/checker_printers.dbg35
-rw-r--r--dev/ci/appveyor.sh2
-rwxr-xr-xdev/ci/ci-basic-overlay.sh2
-rw-r--r--dev/ci/docker/bionic_coq/Dockerfile6
-rw-r--r--dev/ci/user-overlays/08671-mattam-plugin-tutorials.sh7
-rw-r--r--dev/ci/user-overlays/08684-maximedenes-cleanup-kernel-entries.sh9
-rw-r--r--dev/core.dbg4
-rw-r--r--dev/core_dune.dbg20
-rw-r--r--dev/db88
-rw-r--r--dev/doc/build-system.dune.md21
-rw-r--r--dev/dune25
-rwxr-xr-xdev/dune-dbg.in11
-rw-r--r--dev/dune-workspace.all2
-rw-r--r--dev/dune_db6
-rw-r--r--dev/top_printers.dbg85
-rw-r--r--doc/sphinx/credits.rst2
-rw-r--r--doc/sphinx/language/cic.rst4
-rw-r--r--doc/sphinx/proof-engine/proof-handling.rst5
-rw-r--r--doc/sphinx/proof-engine/ssreflect-proof-language.rst39
-rw-r--r--doc/stdlib/index-list.html.template1
-rw-r--r--dune1
-rw-r--r--dune-project2
-rw-r--r--engine/evarutil.ml29
-rw-r--r--engine/evarutil.mli14
-rw-r--r--engine/evd.ml137
-rw-r--r--engine/evd.mli39
-rw-r--r--engine/namegen.ml13
-rw-r--r--engine/proofview.ml58
-rw-r--r--engine/proofview.mli9
-rw-r--r--engine/termops.ml33
-rw-r--r--engine/univGen.ml19
-rw-r--r--engine/univGen.mli10
-rw-r--r--gramlib/LICENSE29
-rw-r--r--gramlib/dune3
-rw-r--r--gramlib/gramext.ml580
-rw-r--r--gramlib/gramext.mli73
-rw-r--r--gramlib/grammar.ml1223
-rw-r--r--gramlib/grammar.mli170
-rw-r--r--gramlib/plexing.ml217
-rw-r--r--gramlib/plexing.mli108
-rw-r--r--gramlib/ploc.ml176
-rw-r--r--gramlib/ploc.mli122
-rw-r--r--gramlib/token.ml37
-rw-r--r--gramlib/token.mli56
-rw-r--r--ide/wg_Command.ml8
-rw-r--r--interp/constrextern.ml9
-rw-r--r--interp/constrintern.ml52
-rw-r--r--interp/declare.ml9
-rw-r--r--interp/discharge.ml11
-rw-r--r--interp/impargs.ml6
-rw-r--r--interp/notation.ml6
-rw-r--r--interp/notation_ops.ml4
-rw-r--r--interp/syntax_def.ml7
-rw-r--r--kernel/cClosure.ml131
-rw-r--r--kernel/cClosure.mli46
-rw-r--r--kernel/constr.ml21
-rw-r--r--kernel/constr.mli4
-rw-r--r--kernel/conv_oracle.ml15
-rw-r--r--kernel/entries.ml10
-rw-r--r--kernel/environ.mli3
-rw-r--r--kernel/indtypes.ml3
-rw-r--r--kernel/names.ml2
-rw-r--r--kernel/names.mli3
-rw-r--r--kernel/reduction.ml6
-rw-r--r--kernel/safe_typing.ml277
-rw-r--r--kernel/term_typing.ml272
-rw-r--r--kernel/term_typing.mli42
-rw-r--r--kernel/typeops.ml78
-rw-r--r--kernel/typeops.mli30
-rw-r--r--lib/coqProject_file.ml7
-rw-r--r--lib/coqProject_file.mli1
-rw-r--r--lib/lib.mllib2
-rw-r--r--library/coqlib.ml12
-rw-r--r--library/global.ml44
-rw-r--r--library/global.mli10
-rw-r--r--man/coq-interface.137
-rw-r--r--man/coq-parser.130
-rw-r--r--man/dune10
-rw-r--r--parsing/cLexer.ml (renamed from parsing/cLexer.ml4)213
-rw-r--r--parsing/dune8
-rw-r--r--plugins/btauto/refl_btauto.ml2
-rw-r--r--plugins/extraction/table.ml4
-rw-r--r--plugins/firstorder/rules.ml7
-rw-r--r--plugins/funind/functional_principles_proofs.ml8
-rw-r--r--plugins/funind/indfun_common.ml10
-rw-r--r--plugins/funind/invfun.ml4
-rw-r--r--plugins/funind/plugin_base.dune1
-rw-r--r--plugins/funind/recdef.ml8
-rw-r--r--plugins/ltac/plugin_base.dune1
-rw-r--r--plugins/ltac/rewrite.ml16
-rw-r--r--plugins/ltac/tacenv.ml7
-rw-r--r--plugins/ltac/tacintern.ml13
-rw-r--r--plugins/ltac/tacinterp.ml9
-rw-r--r--plugins/nsatz/nsatz.ml4
-rw-r--r--plugins/omega/coq_omega.ml3
-rw-r--r--plugins/rtauto/refl_tauto.ml8
-rw-r--r--plugins/setoid_ring/newring.ml8
-rw-r--r--plugins/ssr/plugin_base.dune1
-rw-r--r--plugins/ssr/ssrcommon.ml19
-rw-r--r--plugins/ssr/ssrcommon.mli3
-rw-r--r--plugins/ssr/ssreflect.v18
-rw-r--r--plugins/ssr/ssrelim.ml11
-rw-r--r--plugins/ssr/ssrvernac.mlg2
-rw-r--r--plugins/ssrmatching/plugin_base.dune1
-rw-r--r--plugins/ssrmatching/ssrmatching.ml4
-rw-r--r--pretyping/cases.ml7
-rw-r--r--pretyping/cbv.ml68
-rw-r--r--pretyping/classops.ml13
-rw-r--r--pretyping/detyping.ml5
-rw-r--r--pretyping/dune2
-rw-r--r--pretyping/evarconv.ml3
-rw-r--r--pretyping/evarsolve.ml32
-rw-r--r--pretyping/indrec.ml3
-rw-r--r--pretyping/inferCumulativity.ml2
-rw-r--r--pretyping/pretyping.ml9
-rw-r--r--pretyping/recordops.ml3
-rw-r--r--pretyping/reductionops.ml3
-rw-r--r--pretyping/typeclasses.ml87
-rw-r--r--pretyping/typeclasses.mli12
-rw-r--r--printing/dune1
-rw-r--r--printing/prettyp.ml6
-rw-r--r--printing/printer.ml3
-rw-r--r--proofs/clenv.ml13
-rw-r--r--proofs/clenv.mli2
-rw-r--r--proofs/clenvtac.ml27
-rw-r--r--proofs/goal.ml13
-rw-r--r--proofs/goal.mli6
-rw-r--r--proofs/logic.ml4
-rw-r--r--proofs/proof.ml2
-rw-r--r--proofs/refine.ml2
-rw-r--r--proofs/tacmach.ml5
-rw-r--r--proofs/tacmach.mli2
-rw-r--r--tactics/autorewrite.ml2
-rw-r--r--tactics/class_tactics.ml111
-rw-r--r--tactics/eqdecide.ml3
-rw-r--r--tactics/hints.ml2
-rw-r--r--tactics/tactics.ml43
-rw-r--r--test-suite/bugs/closed/bug_8785.v44
-rw-r--r--test-suite/bugs/closed/bug_8794.v11
-rw-r--r--test-suite/coq-makefile/arg/_CoqProject2
-rw-r--r--test-suite/coq-makefile/compat-subdirs/_CoqProject2
-rw-r--r--test-suite/coq-makefile/coqdoc1/_CoqProject2
-rw-r--r--test-suite/coq-makefile/coqdoc2/_CoqProject2
-rw-r--r--test-suite/coq-makefile/emptyprefix/_CoqProject2
-rw-r--r--test-suite/coq-makefile/extend-subdirs/_CoqProject2
-rw-r--r--test-suite/coq-makefile/findlib-package/_CoqProject2
-rw-r--r--test-suite/coq-makefile/latex1/_CoqProject2
-rw-r--r--test-suite/coq-makefile/merlin1/_CoqProject2
-rw-r--r--test-suite/coq-makefile/mlpack1/_CoqProject2
-rw-r--r--test-suite/coq-makefile/mlpack2/_CoqProject2
-rw-r--r--test-suite/coq-makefile/multiroot/_CoqProject2
-rw-r--r--test-suite/coq-makefile/native1/_CoqProject2
-rw-r--r--test-suite/coq-makefile/only/_CoqProject2
-rw-r--r--test-suite/coq-makefile/plugin1/_CoqProject2
-rw-r--r--test-suite/coq-makefile/plugin2/_CoqProject2
-rw-r--r--test-suite/coq-makefile/plugin3/_CoqProject2
-rw-r--r--test-suite/coq-makefile/quick2vo/_CoqProject2
-rwxr-xr-xtest-suite/coq-makefile/template/init.sh2
-rw-r--r--test-suite/coq-makefile/template/src/test.mlg (renamed from test-suite/coq-makefile/template/src/test.ml4)8
-rw-r--r--test-suite/coq-makefile/uninstall1/_CoqProject2
-rw-r--r--test-suite/coq-makefile/uninstall2/_CoqProject2
-rw-r--r--test-suite/coq-makefile/validate1/_CoqProject2
-rw-r--r--test-suite/coq-makefile/vio2vo/_CoqProject2
-rw-r--r--test-suite/misc/poly-capture-global-univs/_CoqProject2
-rw-r--r--test-suite/misc/poly-capture-global-univs/src/evil.ml49
-rw-r--r--test-suite/misc/poly-capture-global-univs/src/evil.mlg10
-rw-r--r--test-suite/output/RecordFieldErrors.out14
-rw-r--r--test-suite/output/RecordFieldErrors.v38
-rw-r--r--theories/Strings/ByteVector.v56
-rw-r--r--theories/Vectors/VectorDef.v3
-rw-r--r--tools/CoqMakefile.in23
-rw-r--r--tools/coq_makefile.ml9
-rw-r--r--tools/coqdep_common.ml8
-rw-r--r--tools/ocamllibdep.mll4
-rw-r--r--toplevel/dune1
-rw-r--r--vernac/assumptions.ml25
-rw-r--r--vernac/auto_ind_decl.ml20
-rw-r--r--vernac/class.ml7
-rw-r--r--vernac/classes.ml5
-rw-r--r--vernac/comInductive.ml9
-rw-r--r--vernac/dune1
-rw-r--r--vernac/himsg.ml7
-rw-r--r--vernac/indschemes.ml5
-rw-r--r--vernac/obligations.ml2
-rw-r--r--vernac/record.ml25
-rw-r--r--vernac/search.ml9
-rw-r--r--vernac/vernacentries.ml6
211 files changed, 4723 insertions, 1572 deletions
diff --git a/.gitignore b/.gitignore
index 63da6b4d0e..709e87cc9c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -55,7 +55,6 @@ config/coq_config.ml
config/coq_config.py
config/Info-*.plist
dev/ocamldebug-coq
-dev/camlp5.dbg
plugins/micromega/csdpcert
plugins/micromega/.micromega.ml.generated
kernel/byterun/dllcoqrun.so
@@ -68,6 +67,7 @@ time-of-build-after.log
.csdp.cache
test-suite/.lia.cache
test-suite/.nra.cache
+test-suite/.nia.cache
test-suite/trace
test-suite/misc/universes/all_stdlib.v
test-suite/misc/universes/universes.txt
@@ -135,7 +135,6 @@ coqpp/coqpp_parse.mli
g_*.ml
lib/coqProject_file.ml
-parsing/cLexer.ml
plugins/ltac/coretactics.ml
plugins/ltac/extratactics.ml
plugins/ltac/extraargs.ml
@@ -166,8 +165,6 @@ checker/esubst.mli
user-contrib
.*.sw*
.#*
-test-suite/.lia.cache
-test-suite/.nra.cache
plugins/ssr/ssrparser.ml
plugins/ssr/ssrvernac.ml
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 1669145d9b..01931fd7ef 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -9,7 +9,7 @@ stages:
variables:
# Format: $IMAGE-V$DATE [Cache is not used as of today but kept here
# for reference]
- CACHEKEY: "bionic_coq-V2018-10-04-V1"
+ CACHEKEY: "bionic_coq-V2018-10-23-V1"
IMAGE: "$CI_REGISTRY_IMAGE:$CACHEKEY"
# By default, jobs run in the base switch; override to select another switch
OPAM_SWITCH: "base"
diff --git a/CHANGES.md b/CHANGES.md
index 865e1eeb95..ada68f97d5 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -241,6 +241,8 @@ Standard Library
- There are now conversions between `string` and `positive`, `Z`,
`nat`, and `N` in binary, octal, and hex.
+- Added `ByteVector` type that can convert to and from [string].
+
Display diffs between proof steps
- `coqtop` and `coqide` can now highlight the differences between proof steps
diff --git a/META.coq.in b/META.coq.in
index 1ccde1338f..16928587cb 100644
--- a/META.coq.in
+++ b/META.coq.in
@@ -25,6 +25,8 @@ package "config" (
directory = "config"
+ archive(byte) = "config.cma"
+ archive(native) = "config.cmxa"
)
package "clib" (
diff --git a/Makefile b/Makefile
index b74e4e5d4f..f2dc6d7750 100644
--- a/Makefile
+++ b/Makefile
@@ -57,6 +57,7 @@ FIND_SKIP_DIRS:='(' \
-name '_build' -o \
-name '_build_ci' -o \
-name '_install_ci' -o \
+ -name 'gramlib' -o \
-name 'user-contrib' -o \
-name 'test-suite' -o \
-name '.opamcache' -o \
@@ -261,7 +262,7 @@ cacheclean:
find theories plugins test-suite -name '.*.aux' -exec rm -f {} +
cleanconfig:
- rm -f config/Makefile config/coq_config.ml dev/ocamldebug-coq dev/camlp5.dbg config/Info-*.plist
+ rm -f config/Makefile config/coq_config.ml dev/ocamldebug-coq config/Info-*.plist
distclean: clean cleanconfig cacheclean timingclean
diff --git a/Makefile.build b/Makefile.build
index 4d19f9a2e1..08863014ea 100644
--- a/Makefile.build
+++ b/Makefile.build
@@ -406,7 +406,7 @@ grammar/%.cmi: grammar/%.mli
.PHONY: coqbinaries coqbyte
-coqbinaries: $(TOPBINOPT) $(COQTOPEXE) $(CHICKEN) $(CSDPCERT) $(FAKEIDE)
+coqbinaries: $(TOPBINOPT) $(COQTOPEXE) $(CHICKEN) $(CSDPCERT) $(FAKEIDE) $(GRAMMARCMA)
coqbyte: $(TOPBYTE) $(CHICKENBYTE)
# Special rule for coqtop, we imitate `ocamlopt` can delete the target
@@ -441,7 +441,7 @@ $(COQTOPBYTE): $(LINKCMO) $(LIBCOQRUN) $(COQTOP_BYTE)
$(LINKCMO) $(BYTEFLAGS) $(COQTOP_BYTE) -o $@
# For coqc
-COQCCMO:=clib/clib.cma lib/lib.cma toplevel/usage.cmo tools/coqc.cmo
+COQCCMO:=config/config.cma clib/clib.cma lib/lib.cma toplevel/usage.cmo tools/coqc.cmo
$(COQC): $(call bestobj, $(COQCCMO))
$(SHOW)'OCAMLBEST -o $@'
@@ -502,7 +502,7 @@ $(OCAMLLIBDEPBYTE): tools/ocamllibdep.cmo
# The full coqdep (unused by this build, but distributed by make install)
-COQDEPCMO:=clib/clib.cma lib/lib.cma tools/coqdep_lexer.cmo \
+COQDEPCMO:=config/config.cma clib/clib.cma lib/lib.cma tools/coqdep_lexer.cmo \
tools/coqdep_common.cmo tools/coqdep.cmo
$(COQDEP): $(call bestobj, $(COQDEPCMO))
@@ -513,7 +513,7 @@ $(COQDEPBYTE): $(COQDEPCMO)
$(SHOW)'OCAMLC -o $@'
$(HIDE)$(call ocamlbyte, $(SYSMOD))
-COQMAKEFILECMO:=clib/clib.cma lib/lib.cma tools/coq_makefile.cmo
+COQMAKEFILECMO:=config/config.cma clib/clib.cma lib/lib.cma tools/coq_makefile.cmo
$(COQMAKEFILE): $(call bestobj,$(COQMAKEFILECMO))
$(SHOW)'OCAMLBEST -o $@'
@@ -539,7 +539,7 @@ $(COQWCBYTE): tools/coqwc.cmo
$(SHOW)'OCAMLC -o $@'
$(HIDE)$(call ocamlbyte, -package str)
-COQDOCCMO:=clib/clib.cma lib/lib.cma $(addprefix tools/coqdoc/, \
+COQDOCCMO:=config/config.cma clib/clib.cma lib/lib.cma $(addprefix tools/coqdoc/, \
cdglobals.cmo alpha.cmo index.cmo tokens.cmo output.cmo cpretty.cmo main.cmo )
$(COQDOC): $(call bestobj, $(COQDOCCMO))
@@ -550,7 +550,7 @@ $(COQDOCBYTE): $(COQDOCCMO)
$(SHOW)'OCAMLC -o $@'
$(HIDE)$(call ocamlbyte, -package str,unix)
-COQWORKMGRCMO:=clib/clib.cma lib/lib.cma stm/spawned.cmo stm/coqworkmgrApi.cmo tools/coqworkmgr.cmo
+COQWORKMGRCMO:=config/config.cma clib/clib.cma lib/lib.cma stm/spawned.cmo stm/coqworkmgrApi.cmo tools/coqworkmgr.cmo
$(COQWORKMGR): $(call bestobj, $(COQWORKMGRCMO))
$(SHOW)'OCAMLBEST -o $@'
@@ -563,7 +563,7 @@ $(COQWORKMGRBYTE): $(COQWORKMGRCMO)
# fake_ide : for debugging or test-suite purpose, a fake ide simulating
# a connection to coqidetop
-FAKEIDECMO:=clib/clib.cma lib/lib.cma ide/protocol/ideprotocol.cma ide/document.cmo ide/fake_ide.cmo
+FAKEIDECMO:=config/config.cma clib/clib.cma lib/lib.cma ide/protocol/ideprotocol.cma ide/document.cmo ide/fake_ide.cmo
$(FAKEIDE): $(call bestobj, $(FAKEIDECMO)) | $(IDETOP)
$(SHOW)'OCAMLBEST -o $@'
diff --git a/Makefile.checker b/Makefile.checker
index 6c19a1a42b..e6b1541efa 100644
--- a/Makefile.checker
+++ b/Makefile.checker
@@ -49,7 +49,7 @@ checker/names.ml: kernel/names.ml
sed -i.bak '1i(* AUTOGENERATED FILE: DO NOT EDIT *)\n\n\n\n\n\n\n\n' $@ && rm $@.bak
ifeq ($(BEST),opt)
-$(CHICKEN): checker/check.cmxa checker/main.mli checker/main.ml
+$(CHICKEN): config/config.cmxa checker/check.cmxa checker/main.mli checker/main.ml
$(SHOW)'OCAMLOPT -o $@'
$(HIDE)$(OCAMLOPT) -linkpkg $(SYSMOD) $(CHKLIBS) $(OPTFLAGS) $(LINKMETADATA) -o $@ $^
$(STRIP_HIDE) $@
@@ -59,7 +59,7 @@ $(CHICKEN): $(CHICKENBYTE)
cp $< $@
endif
-$(CHICKENBYTE): checker/check.cma checker/main.mli checker/main.ml
+$(CHICKENBYTE): config/config.cma checker/check.cma checker/main.mli checker/main.ml
$(SHOW)'OCAMLC -o $@'
$(HIDE)$(OCAMLC) -linkpkg $(SYSMOD) $(CHKLIBS) $(BYTEFLAGS) $(CUSTOM) -o $@ $^
diff --git a/Makefile.common b/Makefile.common
index f90919a4bc..f2a11ee4b4 100644
--- a/Makefile.common
+++ b/Makefile.common
@@ -117,7 +117,7 @@ BYTERUN:=$(addprefix kernel/byterun/, \
# respecting this order is useful for developers that want to load or link
# the libraries directly
-CORECMA:=clib/clib.cma lib/lib.cma kernel/kernel.cma library/library.cma \
+CORECMA:=config/config.cma clib/clib.cma lib/lib.cma kernel/kernel.cma library/library.cma \
engine/engine.cma pretyping/pretyping.cma interp/interp.cma proofs/proofs.cma \
parsing/parsing.cma printing/printing.cma tactics/tactics.cma vernac/vernac.cma \
stm/stm.cma toplevel/toplevel.cma
diff --git a/Makefile.dev b/Makefile.dev
index 6a2a1b2101..54710b6690 100644
--- a/Makefile.dev
+++ b/Makefile.dev
@@ -20,10 +20,7 @@
DEBUGPRINTERS:=dev/top_printers.cmo dev/vm_printers.cmo dev/checker_printers.cmo
devel: printers
-printers: $(CORECMA) $(DEBUGPRINTERS) dev/camlp5.dbg
-
-dev/camlp5.dbg:
- echo "load_printer gramlib.cma" > $@
+printers: $(CORECMA) $(DEBUGPRINTERS)
############
# revision
diff --git a/Makefile.ide b/Makefile.ide
index cb55960203..6c069a1e50 100644
--- a/Makefile.ide
+++ b/Makefile.ide
@@ -43,7 +43,7 @@ IDESRCDIRS:= $(CORESRCDIRS) ide ide/protocol
COQIDEFLAGS=$(addprefix -I , $(IDESRCDIRS)) $(COQIDEINCLUDES)
-IDEDEPS:=clib/clib.cma lib/lib.cma ide/protocol/ideprotocol.cma
+IDEDEPS:=config/config.cma clib/clib.cma lib/lib.cma ide/protocol/ideprotocol.cma
IDECMA:=ide/ide.cma
IDETOPEXE=bin/coqidetop$(EXE)
IDETOP=bin/coqidetop.opt$(EXE)
diff --git a/appveyor.yml b/appveyor.yml
index cd3b88d007..c9c6bc0684 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -1,6 +1,10 @@
version: '{branch}~{build}'
clone_depth: 10
+cache:
+ - C:\cygwin64 -> dev\ci\appveyor.bat
+ - C:\cygwin64\home\appveyor\.opam -> dev\ci\appveyor.sh
+
platform:
- x64
diff --git a/checker/check.mllib b/checker/check.mllib
index 139fa765b4..173ad1e325 100644
--- a/checker/check.mllib
+++ b/checker/check.mllib
@@ -1,5 +1,3 @@
-Coq_config
-
Analyze
Hook
Terminal
diff --git a/checker/closure.ml b/checker/closure.ml
index 5706011607..138499b0e6 100644
--- a/checker/closure.ml
+++ b/checker/closure.ml
@@ -121,9 +121,6 @@ let betaiotazeta = mkflags [fBETA;fIOTA;fZETA]
* abstractions, storing a representation (of type 'a) of the body of
* this constant or abstraction.
* * i_tab is the cache table of the results
- * * i_repr is the function to get the representation from the current
- * state of the cache and the body of the constant. The result
- * is stored in the table.
* * i_rels = (4,[(1,c);(3,d)]) means there are 4 free rel variables
* and only those with index 1 and 3 have bodies which are c and d resp.
*
@@ -156,33 +153,6 @@ end
module KeyTable = Hashtbl.Make(KeyHash)
-type 'a infos = {
- i_flags : reds;
- i_repr : 'a infos -> constr -> 'a;
- i_env : env;
- i_rels : int * (int * constr) list;
- i_tab : 'a KeyTable.t }
-
-let ref_value_cache info ref =
- try
- Some (KeyTable.find info.i_tab ref)
- with Not_found ->
- try
- let body =
- match ref with
- | RelKey n ->
- let (s,l) = info.i_rels in lift n (Int.List.assoc (s-n) l)
- | VarKey id -> raise Not_found
- | ConstKey cst -> constant_value info.i_env cst
- in
- let v = info.i_repr info body in
- KeyTable.add info.i_tab ref v;
- Some v
- with
- | Not_found (* List.assoc *)
- | NotEvaluableConst _ (* Const *)
- -> None
-
let defined_rels flags env =
(* if red_local_const (snd flags) then*)
fold_rel_context
@@ -193,16 +163,6 @@ let defined_rels flags env =
(rel_context env) ~init:(0,[])
(* else (0,[])*)
-let mind_equiv_infos info = mind_equiv info.i_env
-
-let create mk_cl flgs env =
- { i_flags = flgs;
- i_repr = mk_cl;
- i_env = env;
- i_rels = defined_rels flgs env;
- i_tab = KeyTable.create 17 }
-
-
(**********************************************************************)
(* Lazy reduction: the one used in kernel operations *)
@@ -255,6 +215,12 @@ and fterm =
| FCLOS of constr * fconstr subs
| FLOCKED
+type clos_infos = {
+ i_flags : reds;
+ i_env : env;
+ i_rels : int * (int * constr) list;
+ i_tab : fconstr KeyTable.t }
+
let fterm_of v = v.term
let set_norm v = v.norm <- Norm
@@ -372,6 +338,30 @@ let mk_clos e t =
let mk_clos_vect env v = Array.map (mk_clos env) v
+let inject = mk_clos (subs_id 0)
+
+let ref_value_cache info ref =
+ try
+ Some (KeyTable.find info.i_tab ref)
+ with Not_found ->
+ try
+ let body =
+ match ref with
+ | RelKey n ->
+ let (s,l) = info.i_rels in lift n (Int.List.assoc (s-n) l)
+ | VarKey id -> raise Not_found
+ | ConstKey cst -> constant_value info.i_env cst
+ in
+ let v = inject body in
+ KeyTable.add info.i_tab ref v;
+ Some v
+ with
+ | Not_found (* List.assoc *)
+ | NotEvaluableConst _ (* Const *)
+ -> None
+
+let mind_equiv_infos info = mind_equiv info.i_env
+
(* Translate the head constructor of t from constr to fconstr. This
function is parameterized by the function to apply on the direct
subterms.
@@ -783,21 +773,19 @@ let kh info v stk = fapp_stack(kni info v stk)
let whd_val info v =
with_stats (lazy (term_of_fconstr (kh info v [])))
-let inject = mk_clos (subs_id 0)
-
let whd_stack infos m stk =
let k = kni infos m stk in
let _ = fapp_stack k in (* to unlock Zupdates! *)
k
-(* cache of constants: the body is computed only when needed. *)
-type clos_infos = fconstr infos
-
let infos_env x = x.i_env
let infos_flags x = x.i_flags
let oracle_of_infos x = x.i_env.env_conv_oracle
let create_clos_infos flgs env =
- create (fun _ -> inject) flgs env
+ { i_flags = flgs;
+ i_env = env;
+ i_rels = defined_rels flgs env;
+ i_tab = KeyTable.create 17 }
let unfold_reference = ref_value_cache
diff --git a/checker/closure.mli b/checker/closure.mli
index cec785699d..4c6643754b 100644
--- a/checker/closure.mli
+++ b/checker/closure.mli
@@ -61,10 +61,6 @@ val betadeltaiotanolet : reds
type table_key = Constant.t puniverses tableKey
-type 'a infos
-val ref_value_cache: 'a infos -> table_key -> 'a option
-val create: ('a infos -> constr -> 'a) -> reds -> env -> 'a infos
-
(************************************************************************)
(*s Lazy reduction. *)
diff --git a/checker/reduction.ml b/checker/reduction.ml
index d36c0ef2c9..58a3f4e410 100644
--- a/checker/reduction.ml
+++ b/checker/reduction.ml
@@ -280,17 +280,26 @@ let get_strategy { var_opacity; cst_opacity } = function
with Not_found -> default_level)
| RelKey _ -> Expand
+let dep_order l2r k1 k2 = match k1, k2 with
+| RelKey _, RelKey _ -> l2r
+| RelKey _, (VarKey _ | ConstKey _) -> true
+| VarKey _, RelKey _ -> false
+| VarKey _, VarKey _ -> l2r
+| VarKey _, ConstKey _ -> true
+| ConstKey _, (RelKey _ | VarKey _) -> false
+| ConstKey _, ConstKey _ -> l2r
+
let oracle_order infos l2r k1 k2 =
let o = Closure.oracle_of_infos infos in
match get_strategy o k1, get_strategy o k2 with
- | Expand, Expand -> l2r
+ | Expand, Expand -> dep_order l2r k1 k2
| Expand, (Opaque | Level _) -> true
| (Opaque | Level _), Expand -> false
- | Opaque, Opaque -> l2r
+ | Opaque, Opaque -> dep_order l2r k1 k2
| Level _, Opaque -> true
| Opaque, Level _ -> false
| Level n1, Level n2 ->
- if Int.equal n1 n2 then l2r
+ if Int.equal n1 n2 then dep_order l2r k1 k2
else n1 < n2
let eq_table_key univ =
diff --git a/config/config.mllib b/config/config.mllib
new file mode 100644
index 0000000000..ce3ddfca69
--- /dev/null
+++ b/config/config.mllib
@@ -0,0 +1 @@
+Coq_config
diff --git a/coq.opam b/coq.opam
index f5f553af2c..ab18119ac4 100644
--- a/coq.opam
+++ b/coq.opam
@@ -1,18 +1,28 @@
-opam-version: "1.2"
+synopsis: "The Coq Proof Assistant"
+description: """
+Coq is a formal proof management system. It provides
+a formal language to write mathematical definitions, executable
+algorithms and theorems together with an environment for
+semi-interactive development of machine-checked proofs. Typical
+applications include the certification of properties of programming
+languages (e.g. the CompCert compiler certification project, or the
+Bedrock verified low-level programming library), the formalization of
+mathematics (e.g. the full formalization of the Feit-Thompson theorem
+or homotopy type theory) and teaching.
+"""
+opam-version: "2.0"
maintainer: "The Coq development team <coqdev@inria.fr>"
authors: "The Coq development team, INRIA, CNRS, and contributors."
homepage: "https://coq.inria.fr/"
bug-reports: "https://github.com/coq/coq/issues"
-dev-repo: "https://github.com/coq/coq.git"
+dev-repo: "git+https://github.com/coq/coq.git"
license: "LGPL-2.1"
-available: [ ocaml-version >= "4.05.0" ]
-
depends: [
- "dune" { build & >= "1.2.0" }
- "ocamlfind" { build }
+ "ocaml" { >= "4.05.0" }
+ "dune" { build & >= "1.4.0" }
"num"
- "camlp5" { >= "7.03" }
+ "camlp5" { >= "7.03" }
]
build-env: [
diff --git a/coqide-server.opam b/coqide-server.opam
index 546ce75dbd..ed6f3d98d8 100644
--- a/coqide-server.opam
+++ b/coqide-server.opam
@@ -1,15 +1,25 @@
-opam-version: "1.2"
+synopsis: "The Coq Proof Assistant"
+description: """
+Coq is a formal proof management system. It provides
+a formal language to write mathematical definitions, executable
+algorithms and theorems together with an environment for
+semi-interactive development of machine-checked proofs.
+
+This package provides the `coqidetop` language server, an
+implementation of Coq's [XML protocol](https://github.com/coq/coq/blob/master/dev/doc/xml-protocol.md)
+which allows clients, such as CoqIDE, to interact with Coq in a
+structured way.
+"""
+opam-version: "2.0"
maintainer: "The Coq development team <coqdev@inria.fr>"
authors: "The Coq development team, INRIA, CNRS, and contributors."
homepage: "https://coq.inria.fr/"
bug-reports: "https://github.com/coq/coq/issues"
-dev-repo: "https://github.com/coq/coq.git"
+dev-repo: "git+https://github.com/coq/coq.git"
license: "LGPL-2.1"
-available: [ocaml-version >= "4.05.0"]
-
depends: [
- "dune" { build & >= "1.2.0" }
+ "dune" { build & >= "1.4.0" }
"coq"
]
diff --git a/coqide.opam b/coqide.opam
index 17fb5dbbe2..314943a881 100644
--- a/coqide.opam
+++ b/coqide.opam
@@ -1,16 +1,23 @@
-opam-version: "1.2"
+synopsis: "The Coq Proof Assistant"
+description: """
+Coq is a formal proof management system. It provides
+a formal language to write mathematical definitions, executable
+algorithms and theorems together with an environment for
+semi-interactive development of machine-checked proofs.
+
+This package provides the CoqIDE, a graphical user interface for the
+development of interactive proofs.
+"""
+opam-version: "2.0"
maintainer: "The Coq development team <coqdev@inria.fr>"
authors: "The Coq development team, INRIA, CNRS, and contributors."
homepage: "https://coq.inria.fr/"
bug-reports: "https://github.com/coq/coq/issues"
-dev-repo: "https://github.com/coq/coq.git"
+dev-repo: "git+https://github.com/coq/coq.git"
license: "LGPL-2.1"
-available: [ocaml-version >= "4.05.0"]
-
depends: [
- "dune" { build & >= "1.2.0" }
- "coq"
+ "dune" { build & >= "1.4.0" }
"coqide-server"
"conf-gtksourceview"
"lablgtk" { >= "2.18.5" }
diff --git a/default.nix b/default.nix
index 61f434efe6..9a7afbe89e 100644
--- a/default.nix
+++ b/default.nix
@@ -23,8 +23,8 @@
{ pkgs ?
(import (fetchTarball {
- url = "https://github.com/NixOS/nixpkgs/archive/4c95508641fe780efe41885366e03339b95d04fb.tar.gz";
- sha256 = "1wjspwhzdb6d1kz4khd9l0fivxdk2nq3qvj93pql235sb7909ygx";
+ url = "https://github.com/NixOS/nixpkgs/archive/06613c189eebf4d6167d2d010a59cf38b43b6ff4.tar.gz";
+ sha256 = "13grhy3cvdwr7wql1rm5d7zsfpvp44cyjhiain4zs70r90q3swdg";
}) {})
, ocamlPackages ? pkgs.ocaml-ng.ocamlPackages_4_06
, buildIde ? true
@@ -47,7 +47,7 @@ stdenv.mkDerivation rec {
python2 time # coq-makefile timing tools
dune
]
- ++ (with ocamlPackages; [ ocaml findlib camlp5_strict num ])
+ ++ (with ocamlPackages; [ ocaml findlib camlp5 num ])
++ optional buildIde ocamlPackages.lablgtk
++ optionals buildDoc [
# Sphinx doc dependencies
diff --git a/dev/base_db b/dev/base_db
index e18ac534ac..155e9591e0 100644
--- a/dev/base_db
+++ b/dev/base_db
@@ -1,4 +1,5 @@
source core.dbg
+load_printer ltac_plugin.cmo
load_printer top_printers.cmo
install_printer Top_printers.ppid
install_printer Top_printers.ppsp
diff --git a/dev/checker.dbg b/dev/checker.dbg
index b2323b6175..b5b7f0e6d3 100644
--- a/dev/checker.dbg
+++ b/dev/checker.dbg
@@ -2,5 +2,6 @@ load_printer threads.cma
load_printer str.cma
load_printer clib.cma
load_printer dynlink.cma
+load_printer config.cma
load_printer lib.cma
load_printer check.cma
diff --git a/dev/checker_db b/dev/checker_db
index 327e636c57..fcb6f679ed 100644
--- a/dev/checker_db
+++ b/dev/checker_db
@@ -2,38 +2,4 @@ source checker.dbg
load_printer checker_printers.cmo
-install_printer Checker_printers.pP
-
-install_printer Checker_printers.ppfuture
-
-install_printer Checker_printers.ppid
-install_printer Checker_printers.pplab
-install_printer Checker_printers.ppmbid
-install_printer Checker_printers.ppdir
-install_printer Checker_printers.ppmp
-install_printer Checker_printers.ppcon
-install_printer Checker_printers.ppproj
-install_printer Checker_printers.ppkn
-install_printer Checker_printers.ppmind
-install_printer Checker_printers.ppind
-
-install_printer Checker_printers.ppbigint
-
-install_printer Checker_printers.ppintset
-install_printer Checker_printers.ppidset
-
-install_printer Checker_printers.ppidmapgen
-
-install_printer Checker_printers.ppididmap
-
-install_printer Checker_printers.ppuni
-install_printer Checker_printers.ppuni_level
-install_printer Checker_printers.ppuniverse_set
-install_printer Checker_printers.ppuniverse_instance
-install_printer Checker_printers.ppauniverse_context
-install_printer Checker_printers.ppuniverse_context
-install_printer Checker_printers.ppconstraints
-install_printer Checker_printers.ppuniverse_context_future
-install_printer Checker_printers.ppuniverses
-
-install_printer Checker_printers.pploc
+source checker_printers.dbg
diff --git a/dev/checker_dune_db b/dev/checker_dune_db
new file mode 100644
index 0000000000..cdb6a4b809
--- /dev/null
+++ b/dev/checker_dune_db
@@ -0,0 +1,5 @@
+source checker_dune.dbg
+
+load_printer checker_printers.cma
+
+source checker_printers.dbg
diff --git a/dev/checker_printers.dbg b/dev/checker_printers.dbg
new file mode 100644
index 0000000000..9ebbd74834
--- /dev/null
+++ b/dev/checker_printers.dbg
@@ -0,0 +1,35 @@
+install_printer Checker_printers.pP
+
+install_printer Checker_printers.ppfuture
+
+install_printer Checker_printers.ppid
+install_printer Checker_printers.pplab
+install_printer Checker_printers.ppmbid
+install_printer Checker_printers.ppdir
+install_printer Checker_printers.ppmp
+install_printer Checker_printers.ppcon
+install_printer Checker_printers.ppproj
+install_printer Checker_printers.ppkn
+install_printer Checker_printers.ppmind
+install_printer Checker_printers.ppind
+
+install_printer Checker_printers.ppbigint
+
+install_printer Checker_printers.ppintset
+install_printer Checker_printers.ppidset
+
+install_printer Checker_printers.ppidmapgen
+
+install_printer Checker_printers.ppididmap
+
+install_printer Checker_printers.ppuni
+install_printer Checker_printers.ppuni_level
+install_printer Checker_printers.ppuniverse_set
+install_printer Checker_printers.ppuniverse_instance
+install_printer Checker_printers.ppauniverse_context
+install_printer Checker_printers.ppuniverse_context
+install_printer Checker_printers.ppconstraints
+install_printer Checker_printers.ppuniverse_context_future
+install_printer Checker_printers.ppuniverses
+
+install_printer Checker_printers.pploc
diff --git a/dev/ci/appveyor.sh b/dev/ci/appveyor.sh
index d2176e326c..84fec71f7a 100644
--- a/dev/ci/appveyor.sh
+++ b/dev/ci/appveyor.sh
@@ -12,4 +12,4 @@ opam init -a mingw https://github.com/fdopen/opam-repository-mingw.git --comp $A
eval "$(opam config env)"
opam install -y num ocamlfind camlp5 ounit
-cd "$APPVEYOR_BUILD_FOLDER" && ./configure -local && make && make byte && make -C test-suite all INTERACTIVE= && make validate
+cd "$APPVEYOR_BUILD_FOLDER" && ./configure -local && make && make byte && make -C test-suite all INTERACTIVE= # && make validate
diff --git a/dev/ci/ci-basic-overlay.sh b/dev/ci/ci-basic-overlay.sh
index f422030b53..50d4d21637 100755
--- a/dev/ci/ci-basic-overlay.sh
+++ b/dev/ci/ci-basic-overlay.sh
@@ -238,7 +238,7 @@
########################################################################
# plugin_tutorial
########################################################################
-: "${plugin_tutorial_CI_REF:=14b2976cdf67db788b79d9421ce1e89bd15c7313}"
+: "${plugin_tutorial_CI_REF:=master}"
: "${plugin_tutorial_CI_GITURL:=https://github.com/ybertot/plugin_tutorials}"
: "${plugin_tutorial_CI_ARCHIVEURL:=${plugin_tutorial_CI_GITURL}/archive}"
diff --git a/dev/ci/docker/bionic_coq/Dockerfile b/dev/ci/docker/bionic_coq/Dockerfile
index f257c62dd3..098c950b32 100644
--- a/dev/ci/docker/bionic_coq/Dockerfile
+++ b/dev/ci/docker/bionic_coq/Dockerfile
@@ -1,4 +1,4 @@
-# CACHEKEY: "bionic_coq-V2018-10-04-V2"
+# CACHEKEY: "bionic_coq-V2018-10-23-V1"
# ^^ Update when modifying this file.
FROM ubuntu:bionic
@@ -37,7 +37,7 @@ ENV COMPILER="4.05.0"
# Common OPAM packages.
# `num` does not have a version number as the right version to install varies
# with the compiler version.
-ENV BASE_OPAM="num ocamlfind.1.8.0 dune.1.2.1 ounit.2.0.8 odoc.1.2.0" \
+ENV BASE_OPAM="num ocamlfind.1.8.0 dune.1.4.0 ounit.2.0.8 odoc.1.3.0" \
CI_OPAM="menhir.20180530 elpi.1.1.0 ocamlgraph.1.8.8"
# BASE switch; CI_OPAM contains Coq's CI dependencies.
@@ -56,7 +56,7 @@ RUN opam switch create "${COMPILER}+32bit" && eval $(opam env) && \
ENV COMPILER_EDGE="4.07.0" \
CAMLP5_VER_EDGE="7.06" \
COQIDE_OPAM_EDGE="lablgtk.2.18.6 conf-gtksourceview.2" \
- BASE_OPAM_EDGE="dune-release.0.3.0"
+ BASE_OPAM_EDGE="dune-release.1.1.0"
RUN opam switch create $COMPILER_EDGE && eval $(opam env) && \
opam install $BASE_OPAM $BASE_OPAM_EDGE camlp5.$CAMLP5_VER_EDGE $COQIDE_OPAM_EDGE
diff --git a/dev/ci/user-overlays/08671-mattam-plugin-tutorials.sh b/dev/ci/user-overlays/08671-mattam-plugin-tutorials.sh
new file mode 100644
index 0000000000..bd3e1bf7ff
--- /dev/null
+++ b/dev/ci/user-overlays/08671-mattam-plugin-tutorials.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+if [ "$CI_PULL_REQUEST" = "8741" ] || [ "$CI_BRANCH" = "typeclasses-functional-evar_map" ]; then
+ plugin_tutorial_CI_REF=pr8671-fix
+ plugin_tutorial_CI_GITURL=https://github.com/mattam82/plugin_tutorials
+
+fi
diff --git a/dev/ci/user-overlays/08684-maximedenes-cleanup-kernel-entries.sh b/dev/ci/user-overlays/08684-maximedenes-cleanup-kernel-entries.sh
new file mode 100644
index 0000000000..98530c825a
--- /dev/null
+++ b/dev/ci/user-overlays/08684-maximedenes-cleanup-kernel-entries.sh
@@ -0,0 +1,9 @@
+if [ "$CI_PULL_REQUEST" = "8684" ] || [ "$CI_BRANCH" = "kernel-entries-cleanup" ]; then
+
+ Elpi_CI_REF=kernel-entries-cleanup
+ Elpi_CI_GITURL=https://github.com/maximedenes/coq-elpi
+
+ Equations_CI_REF=kernel-entries-cleanup
+ Equations_CI_GITURL=https://github.com/maximedenes/Coq-Equations
+
+fi
diff --git a/dev/core.dbg b/dev/core.dbg
index 972ba701e4..f676b643e4 100644
--- a/dev/core.dbg
+++ b/dev/core.dbg
@@ -1,6 +1,7 @@
-source camlp5.dbg
load_printer threads.cma
load_printer str.cma
+load_printer gramlib.cma
+load_printer config.cma
load_printer clib.cma
load_printer dynlink.cma
load_printer lib.cma
@@ -16,4 +17,3 @@ load_printer tactics.cma
load_printer vernac.cma
load_printer stm.cma
load_printer toplevel.cma
-load_printer ltac_plugin.cmo
diff --git a/dev/core_dune.dbg b/dev/core_dune.dbg
new file mode 100644
index 0000000000..cf9c5bd39a
--- /dev/null
+++ b/dev/core_dune.dbg
@@ -0,0 +1,20 @@
+load_printer threads.cma
+load_printer str.cma
+load_printer gramlib.cma
+load_printer config.cma
+load_printer clib.cma
+load_printer dynlink.cma
+load_printer lib.cma
+load_printer byterun.cma
+load_printer kernel.cma
+load_printer library.cma
+load_printer engine.cma
+load_printer pretyping.cma
+load_printer interp.cma
+load_printer proofs.cma
+load_printer parsing.cma
+load_printer printing.cma
+load_printer tactics.cma
+load_printer vernac.cma
+load_printer stm.cma
+load_printer toplevel.cma
diff --git a/dev/db b/dev/db
index 2f8c13485a..8733c684af 100644
--- a/dev/db
+++ b/dev/db
@@ -1,88 +1,6 @@
source core.dbg
+
+load_printer ltac_plugin.cmo
load_printer top_printers.cmo
-install_printer Top_printers.pP
-install_printer Top_printers.ppfuture
-install_printer Top_printers.ppid
-install_printer Top_printers.pplab
-install_printer Top_printers.ppmbid
-install_printer Top_printers.ppdir
-install_printer Top_printers.ppmp
-install_printer Top_printers.ppcon
-install_printer Top_printers.ppproj
-install_printer Top_printers.ppkn
-install_printer Top_printers.ppmind
-install_printer Top_printers.ppind
-install_printer Top_printers.ppsp
-install_printer Top_printers.ppqualid
-install_printer Top_printers.ppclindex
-install_printer Top_printers.ppscheme
-install_printer Top_printers.ppwf_paths
-install_printer Top_printers.ppevar
-install_printer Top_printers.ppconstr
-install_printer Top_printers.ppsconstr
-install_printer Top_printers.ppeconstr
-install_printer Top_printers.ppconstr_expr
-install_printer Top_printers.ppglob_constr
-install_printer Top_printers.pppattern
-install_printer Top_printers.ppfconstr
-install_printer Top_printers.ppbigint
-install_printer Top_printers.ppintset
-install_printer Top_printers.ppidset
-install_printer Top_printers.ppidmapgen
-install_printer Top_printers.ppididmap
-install_printer Top_printers.ppconstrunderbindersidmap
-install_printer Top_printers.ppevarsubst
-install_printer Top_printers.ppunbound_ltac_var_map
-install_printer Top_printers.ppclosure
-install_printer Top_printers.ppclosedglobconstr
-install_printer Top_printers.ppclosedglobconstridmap
-install_printer Top_printers.ppglobal
-install_printer Top_printers.ppconst
-install_printer Top_printers.ppvar
-install_printer Top_printers.ppj
-install_printer Top_printers.ppsubst
-install_printer Top_printers.ppdelta
-install_printer Top_printers.pp_idpred
-install_printer Top_printers.pp_cpred
-install_printer Top_printers.pp_transparent_state
-install_printer Top_printers.pp_stack_t
-install_printer Top_printers.pp_cst_stack_t
-install_printer Top_printers.pp_state_t
-install_printer Top_printers.ppmetas
-install_printer Top_printers.ppevm
-install_printer Top_printers.ppexistentialset
-install_printer Top_printers.ppexistentialfilter
-install_printer Top_printers.ppclenv
-install_printer Top_printers.ppgoalgoal
-install_printer Top_printers.ppgoal
-install_printer Top_printers.pphintdb
-install_printer Top_printers.ppproofview
-install_printer Top_printers.ppopenconstr
-install_printer Top_printers.pproof
-install_printer Top_printers.ppuni
-install_printer Top_printers.ppuni_level
-install_printer Top_printers.ppuniverse_set
-install_printer Top_printers.ppuniverse_instance
-install_printer Top_printers.ppuniverse_context
-install_printer Top_printers.ppuniverse_context_set
-install_printer Top_printers.ppuniverse_subst
-install_printer Top_printers.ppuniverse_opt_subst
-install_printer Top_printers.ppuniverse_level_subst
-install_printer Top_printers.ppevar_universe_context
-install_printer Top_printers.ppconstraints
-install_printer Top_printers.ppuniverseconstraints
-install_printer Top_printers.ppuniverse_context_future
-install_printer Top_printers.ppcumulativity_info
-install_printer Top_printers.ppabstract_cumulativity_info
-install_printer Top_printers.ppuniverses
-install_printer Top_printers.ppnamedcontextval
-install_printer Top_printers.ppenv
-install_printer Top_printers.pptac
-install_printer Top_printers.ppobj
-install_printer Top_printers.pploc
-install_printer Top_printers.pp_argument_type
-install_printer Top_printers.pp_generic_argument
-install_printer Top_printers.ppgenarginfo
-install_printer Top_printers.ppgenargargt
-install_printer Top_printers.ppist
+source top_printers.dbg
diff --git a/dev/doc/build-system.dune.md b/dev/doc/build-system.dune.md
index 91ab57f1e9..0aeb30c4e8 100644
--- a/dev/doc/build-system.dune.md
+++ b/dev/doc/build-system.dune.md
@@ -68,6 +68,27 @@ Note that you must invoke the `#rectypes;;` toplevel flag in order to
use Coq libraries. The provided `.ocamlinit` file does this
automatically.
+## ocamldebug
+
+You can use `ocamldebug` with Dune; after a build, do:
+
+```
+dune exec dev/dune-dbg
+(ocd) source dune_db
+```
+
+or
+
+```
+dune exec dev/dune-dbg checker
+(ocd) source checker_dune_db
+```
+
+for the checker. Unfortunately, dependency handling here is not fully
+refined, so you need to build enough of Coq once to use this target
+[it will then correctly compute the deps and rebuild if you call the
+script again] This will be fixed in the future.
+
## Compositionality, developer and release modes.
By default [in "developer mode"], Dune will compose all the packages
diff --git a/dev/dune b/dev/dune
new file mode 100644
index 0000000000..fd6c8cf32c
--- /dev/null
+++ b/dev/dune
@@ -0,0 +1,25 @@
+(library
+ (name top_printers)
+ (public_name coq.top_printers)
+ (synopsis "Coq's Debug Printers")
+ (wrapped false)
+ (modules :standard \ checker_printers)
+ (libraries coq.toplevel coq.plugins.ltac))
+
+(library
+ (name checker_printers)
+ (public_name coq.checker_printers)
+ (synopsis "Coq's Debug Printers [for the Checker]")
+ (wrapped false)
+ (flags :standard -open Checklib)
+ (modules checker_printers)
+ (libraries coq.checklib))
+
+(rule
+ (targets dune-dbg)
+ (deps dune-dbg.in
+ ../checker/main.bc
+ ../topbin/coqtop_byte_bin.bc
+ ; This is not enough as the call to `ocamlfind` will fail :/
+ top_printers.cma)
+ (action (copy dune-dbg.in dune-dbg)))
diff --git a/dev/dune-dbg.in b/dev/dune-dbg.in
new file mode 100755
index 0000000000..464e026400
--- /dev/null
+++ b/dev/dune-dbg.in
@@ -0,0 +1,11 @@
+#!/usr/bin/env bash
+
+# Run in a proper install dune env.
+case $1 in
+checker)
+ ocamldebug `ocamlfind query -recursive -i-format coq.checker_printers` -I +threads -I dev _build/default/checker/main.bc
+ ;;
+*)
+ ocamldebug `ocamlfind query -recursive -i-format coq.top_printers` -I +threads -I dev _build/default/topbin/coqtop_byte_bin.bc
+ ;;
+esac
diff --git a/dev/dune-workspace.all b/dev/dune-workspace.all
index 93b807d5e3..f45f6de529 100644
--- a/dev/dune-workspace.all
+++ b/dev/dune-workspace.all
@@ -1,4 +1,4 @@
-(lang dune 1.2)
+(lang dune 1.4)
; Add custom flags here. Default developer profile is `dev`
(context (opam (switch 4.05.0)))
diff --git a/dev/dune_db b/dev/dune_db
new file mode 100644
index 0000000000..f920f7c75c
--- /dev/null
+++ b/dev/dune_db
@@ -0,0 +1,6 @@
+source core_dune.dbg
+
+load_printer ltac_plugin.cma
+load_printer top_printers.cma
+
+source top_printers.dbg
diff --git a/dev/top_printers.dbg b/dev/top_printers.dbg
new file mode 100644
index 0000000000..eab88c7290
--- /dev/null
+++ b/dev/top_printers.dbg
@@ -0,0 +1,85 @@
+install_printer Top_printers.pP
+install_printer Top_printers.ppfuture
+install_printer Top_printers.ppid
+install_printer Top_printers.pplab
+install_printer Top_printers.ppmbid
+install_printer Top_printers.ppdir
+install_printer Top_printers.ppmp
+install_printer Top_printers.ppcon
+install_printer Top_printers.ppproj
+install_printer Top_printers.ppkn
+install_printer Top_printers.ppmind
+install_printer Top_printers.ppind
+install_printer Top_printers.ppsp
+install_printer Top_printers.ppqualid
+install_printer Top_printers.ppclindex
+install_printer Top_printers.ppscheme
+install_printer Top_printers.ppwf_paths
+install_printer Top_printers.ppevar
+install_printer Top_printers.ppconstr
+install_printer Top_printers.ppsconstr
+install_printer Top_printers.ppeconstr
+install_printer Top_printers.ppconstr_expr
+install_printer Top_printers.ppglob_constr
+install_printer Top_printers.pppattern
+install_printer Top_printers.ppfconstr
+install_printer Top_printers.ppbigint
+install_printer Top_printers.ppintset
+install_printer Top_printers.ppidset
+install_printer Top_printers.ppidmapgen
+install_printer Top_printers.ppididmap
+install_printer Top_printers.ppconstrunderbindersidmap
+install_printer Top_printers.ppevarsubst
+install_printer Top_printers.ppunbound_ltac_var_map
+install_printer Top_printers.ppclosure
+install_printer Top_printers.ppclosedglobconstr
+install_printer Top_printers.ppclosedglobconstridmap
+install_printer Top_printers.ppglobal
+install_printer Top_printers.ppconst
+install_printer Top_printers.ppvar
+install_printer Top_printers.ppj
+install_printer Top_printers.ppsubst
+install_printer Top_printers.ppdelta
+install_printer Top_printers.pp_idpred
+install_printer Top_printers.pp_cpred
+install_printer Top_printers.pp_transparent_state
+install_printer Top_printers.pp_stack_t
+install_printer Top_printers.pp_cst_stack_t
+install_printer Top_printers.pp_state_t
+install_printer Top_printers.ppmetas
+install_printer Top_printers.ppevm
+install_printer Top_printers.ppexistentialset
+install_printer Top_printers.ppexistentialfilter
+install_printer Top_printers.ppclenv
+install_printer Top_printers.ppgoalgoal
+install_printer Top_printers.ppgoal
+install_printer Top_printers.pphintdb
+install_printer Top_printers.ppproofview
+install_printer Top_printers.ppopenconstr
+install_printer Top_printers.pproof
+install_printer Top_printers.ppuni
+install_printer Top_printers.ppuni_level
+install_printer Top_printers.ppuniverse_set
+install_printer Top_printers.ppuniverse_instance
+install_printer Top_printers.ppuniverse_context
+install_printer Top_printers.ppuniverse_context_set
+install_printer Top_printers.ppuniverse_subst
+install_printer Top_printers.ppuniverse_opt_subst
+install_printer Top_printers.ppuniverse_level_subst
+install_printer Top_printers.ppevar_universe_context
+install_printer Top_printers.ppconstraints
+install_printer Top_printers.ppuniverseconstraints
+install_printer Top_printers.ppuniverse_context_future
+install_printer Top_printers.ppcumulativity_info
+install_printer Top_printers.ppabstract_cumulativity_info
+install_printer Top_printers.ppuniverses
+install_printer Top_printers.ppnamedcontextval
+install_printer Top_printers.ppenv
+install_printer Top_printers.pptac
+install_printer Top_printers.ppobj
+install_printer Top_printers.pploc
+install_printer Top_printers.pp_argument_type
+install_printer Top_printers.pp_generic_argument
+install_printer Top_printers.ppgenarginfo
+install_printer Top_printers.ppgenargargt
+install_printer Top_printers.ppist
diff --git a/doc/sphinx/credits.rst b/doc/sphinx/credits.rst
index ffdc4f3ec6..57f1174d59 100644
--- a/doc/sphinx/credits.rst
+++ b/doc/sphinx/credits.rst
@@ -120,7 +120,7 @@ G. Dowek, allowed hierarchical developments of mathematical theories.
This high-level language was called the *Mathematical Vernacular*.
Furthermore, an interactive *Theorem Prover* permitted the incremental
construction of proof trees in a top-down manner, subgoaling recursively
-and backtracking from dead-alleys. The theorem prover executed tactics
+and backtracking from dead-ends. The theorem prover executed tactics
written in CAML, in the LCF fashion. A basic set of tactics was
predefined, which the user could extend by his own specific tactics.
This system (Version 4.10) was released in 1989. Then, the system was
diff --git a/doc/sphinx/language/cic.rst b/doc/sphinx/language/cic.rst
index 381f8bb661..835d6dcaa6 100644
--- a/doc/sphinx/language/cic.rst
+++ b/doc/sphinx/language/cic.rst
@@ -533,10 +533,10 @@ Convertibility
Let us write :math:`E[Γ] ⊢ t \triangleright u` for the contextual closure of the
relation :math:`t` reduces to :math:`u` in the global environment
:math:`E` and local context :math:`Γ` with one of the previous
-reductions β, ι, δ or ζ.
+reductions β, δ, ι or ζ.
We say that two terms :math:`t_1` and :math:`t_2` are
-*βιδζη-convertible*, or simply *convertible*, or *equivalent*, in the
+*βδιζη-convertible*, or simply *convertible*, or *equivalent*, in the
global environment :math:`E` and local context :math:`Γ` iff there
exist terms :math:`u_1` and :math:`u_2` such that :math:`E[Γ] ⊢ t_1 \triangleright
… \triangleright u_1` and :math:`E[Γ] ⊢ t_2 \triangleright … \triangleright u_2` and either :math:`u_1` and
diff --git a/doc/sphinx/proof-engine/proof-handling.rst b/doc/sphinx/proof-engine/proof-handling.rst
index c802f44ac1..741f9fe5b0 100644
--- a/doc/sphinx/proof-engine/proof-handling.rst
+++ b/doc/sphinx/proof-engine/proof-handling.rst
@@ -144,8 +144,9 @@ list of assertion commands is given in :ref:`Assertions`. The command
the proof is a subset of the declared one.
The set of declared variables is closed under type dependency. For
- example if ``T`` is variable and a is a variable of type ``T``, the commands
- ``Proof using a`` and ``Proof using T a`` are actually equivalent.
+ example, if ``T`` is a variable and ``a`` is a variable of type
+ ``T``, then the commands ``Proof using a`` and ``Proof using T a``
+ are equivalent.
.. cmdv:: Proof using {+ @ident } with @tactic
diff --git a/doc/sphinx/proof-engine/ssreflect-proof-language.rst b/doc/sphinx/proof-engine/ssreflect-proof-language.rst
index 52609546d5..3ca0ffe678 100644
--- a/doc/sphinx/proof-engine/ssreflect-proof-language.rst
+++ b/doc/sphinx/proof-engine/ssreflect-proof-language.rst
@@ -916,11 +916,8 @@ but also folds ``x`` in the goal.
.. coqtop:: reset
From Coq Require Import ssreflect.
- Set Implicit Arguments.
- Unset Strict Implicit.
- Unset Printing Implicit Defensive.
- .. coqtop:: all undo
+ .. coqtop:: all
Lemma test x t (Hx : x = 3) : x + t = 4.
set z := 3 in Hx.
@@ -929,6 +926,10 @@ If the localization also mentions the goal, then the result is the following one
.. example::
+ .. coqtop:: reset
+
+ From Coq Require Import ssreflect.
+
.. coqtop:: all
Lemma test x t (Hx : x = 3) : x + t = 4.
@@ -2485,8 +2486,7 @@ destruction of existential assumptions like in the tactic:
.. coqtop:: all
Lemma test : True.
- have [x Px]: exists x : nat, x > 0.
- Focus 2.
+ have [x Px]: exists x : nat, x > 0; last first.
An alternative use of the ``have`` tactic is to provide the explicit proof
term for the intermediate lemma, using tactics of the form:
@@ -2564,8 +2564,7 @@ copying the goal itself.
.. coqtop:: all
Lemma test : True.
- have suff H : 2 + 2 = 3.
- Focus 2.
+ have suff H : 2 + 2 = 3; last first.
Note that H is introduced in the second goal.
@@ -2852,8 +2851,7 @@ pattern will be used to process its instance.
.. coqtop:: all
Lemma simple n (ngt0 : 0 < n ) : P n.
- gen have ltnV, /andP[nge0 neq0] : n ngt0 / (0 <= n) && (n != 0).
- Focus 2.
+ gen have ltnV, /andP[nge0 neq0] : n ngt0 / (0 <= n) && (n != 0); last first.
.. _advanced_generalization_ssr:
@@ -3556,6 +3554,7 @@ corresponding new goals will be generated.
Set Implicit Arguments.
Unset Strict Implicit.
Unset Printing Implicit Defensive.
+ Set Warnings "-notation-overridden".
.. coqtop:: all
@@ -3756,9 +3755,10 @@ which the function is supplied:
:name: congr
This tactic:
-+ checks that the goal is a Leibniz equality
-+ matches both sides of this equality with “term applied to some arguments”, inferring the right number of arguments from the goal and the type of term. This may expand some definitions or fixpoints.
-+ generates the subgoals corresponding to pairwise equalities of the arguments present in the goal.
+
+ + checks that the goal is a Leibniz equality;
+ + matches both sides of this equality with “term applied to some arguments”, inferring the right number of arguments from the goal and the type of term. This may expand some definitions or fixpoints;
+ + generates the subgoals corresponding to pairwise equalities of the arguments present in the goal.
The goal can be a non dependent product ``P -> Q``. In that case, the
system asserts the equation ``P = Q``, uses it to solve the goal, and
@@ -4918,7 +4918,7 @@ which produces the converse implication. In both cases, the two
first Prop arguments are implicit.
If ``term`` is an instance of the ``reflect`` predicate, then ``A`` will be one
-of the defined view hints for the ``reflec``t predicate, which are by
+of the defined view hints for the ``reflect`` predicate, which are by
default the ones present in the file ``ssrbool.v``. These hints are not
only used for choosing the appropriate direction of the translation,
but they also allow complex transformation, involving negations.
@@ -4933,9 +4933,9 @@ but they also allow complex transformation, involving negations.
Unset Printing Implicit Defensive.
Section Test.
- .. coqtop:: in
+ .. coqtop:: all
- Lemma introN : forall (P : Prop) (b : bool), reflect P b -> ~ P -> ~~b.
+ Check introN.
.. coqtop:: all
@@ -4945,12 +4945,11 @@ but they also allow complex transformation, involving negations.
In fact this last script does not
exactly use the hint ``introN``, but the more general hint:
- .. coqtop:: in
+ .. coqtop:: all
- Lemma introNTF : forall (P : Prop) (b c : bool),
- reflect P b -> (if c then ~ P else P) -> ~~ b = c.
+ Check introNTF.
- The lemma ` `introN`` is an instantiation of introNF using c := true.
+ The lemma ``introN`` is an instantiation of ``introNF`` using ``c := true``.
Note that views, being part of :token:`i_pattern`, can be used to interpret
assertions too. For example the following script asserts ``a && b`` but
diff --git a/doc/stdlib/index-list.html.template b/doc/stdlib/index-list.html.template
index 4cbf75b715..e8f6decfbf 100644
--- a/doc/stdlib/index-list.html.template
+++ b/doc/stdlib/index-list.html.template
@@ -502,6 +502,7 @@ through the <tt>Require Import</tt> command.</p>
theories/Strings/BinaryString.v
theories/Strings/HexString.v
theories/Strings/OctalString.v
+ theories/Strings/ByteVector.v
</dd>
<dt> <b>Reals</b>:
diff --git a/dune b/dune
index b4a5266125..aad60d6d46 100644
--- a/dune
+++ b/dune
@@ -38,4 +38,5 @@
; Use summary.log as the target
(alias
(name runtest)
+ (package coqide-server)
(deps test-suite/summary.log))
diff --git a/dune-project b/dune-project
index 607e5a68a5..85238c70c5 100644
--- a/dune-project
+++ b/dune-project
@@ -1,3 +1,3 @@
-(lang dune 1.2)
+(lang dune 1.4)
(name coq)
diff --git a/engine/evarutil.ml b/engine/evarutil.ml
index f9d9ce3569..4e1636e321 100644
--- a/engine/evarutil.ml
+++ b/engine/evarutil.ml
@@ -405,12 +405,13 @@ let push_rel_context_to_named_context ?hypnaming env sigma typ =
let default_source = Loc.tag @@ Evar_kinds.InternalHole
-let new_pure_evar_full evd evi =
- let (evd, evk) = Evd.new_evar evd evi in
+let new_pure_evar_full evd ?typeclass_candidate evi =
+ let (evd, evk) = Evd.new_evar evd ?typeclass_candidate evi in
let evd = Evd.declare_future_goal evk evd in
(evd, evk)
-let new_pure_evar?(src=default_source) ?(filter = Filter.identity) ?candidates ?(store = Store.empty) ?naming ?(principal=false) sign evd typ =
+let new_pure_evar?(src=default_source) ?(filter = Filter.identity) ?candidates ?naming ?typeclass_candidate
+ ?(principal=false) sign evd typ =
let default_naming = IntroAnonymous in
let naming = Option.default default_naming naming in
let name = match naming with
@@ -427,34 +428,34 @@ let new_pure_evar?(src=default_source) ?(filter = Filter.identity) ?candidates ?
evar_body = Evar_empty;
evar_filter = filter;
evar_source = src;
- evar_candidates = candidates;
- evar_extra = store; }
+ evar_candidates = candidates }
in
- let (evd, newevk) = Evd.new_evar evd ?name evi in
+ let typeclass_candidate = if principal then Some false else typeclass_candidate in
+ let (evd, newevk) = Evd.new_evar evd ?name ?typeclass_candidate evi in
let evd =
if principal then Evd.declare_principal_goal newevk evd
else Evd.declare_future_goal newevk evd
in
(evd, newevk)
-let new_evar_instance ?src ?filter ?candidates ?store ?naming ?principal sign evd typ instance =
+let new_evar_instance ?src ?filter ?candidates ?naming ?typeclass_candidate ?principal sign evd typ instance =
let open EConstr in
assert (not !Flags.debug ||
List.distinct (ids_of_named_context (named_context_of_val sign)));
- let (evd, newevk) = new_pure_evar sign evd ?src ?filter ?candidates ?store ?naming ?principal typ in
+ let (evd, newevk) = new_pure_evar sign evd ?src ?filter ?candidates ?naming ?typeclass_candidate ?principal typ in
evd, mkEvar (newevk,Array.of_list instance)
-let new_evar_from_context ?src ?filter ?candidates ?store ?naming ?principal sign evd typ =
+let new_evar_from_context ?src ?filter ?candidates ?naming ?typeclass_candidate ?principal sign evd typ =
let instance = List.map (NamedDecl.get_id %> EConstr.mkVar) (named_context_of_val sign) in
let instance =
match filter with
| None -> instance
| Some filter -> Filter.filter_list filter instance in
- new_evar_instance sign evd typ ?src ?filter ?candidates ?store ?naming ?principal instance
+ new_evar_instance sign evd typ ?src ?filter ?candidates ?naming ?principal instance
(* [new_evar] declares a new existential in an env env with type typ *)
(* Converting the env into the sign of the evar to define *)
-let new_evar ?src ?filter ?candidates ?store ?naming ?principal ?hypnaming env evd typ =
+let new_evar ?src ?filter ?candidates ?naming ?typeclass_candidate ?principal ?hypnaming env evd typ =
let sign,typ',instance,subst = push_rel_context_to_named_context ?hypnaming env evd typ in
let map c = csubst_subst subst c in
let candidates = Option.map (fun l -> List.map map l) candidates in
@@ -462,11 +463,11 @@ let new_evar ?src ?filter ?candidates ?store ?naming ?principal ?hypnaming env e
match filter with
| None -> instance
| Some filter -> Filter.filter_list filter instance in
- new_evar_instance sign evd typ' ?src ?filter ?candidates ?store ?naming ?principal instance
+ new_evar_instance sign evd typ' ?src ?filter ?candidates ?naming ?typeclass_candidate ?principal instance
let new_type_evar ?src ?filter ?naming ?principal ?hypnaming env evd rigid =
let (evd', s) = new_sort_variable rigid evd in
- let (evd', e) = new_evar env evd' ?src ?filter ?naming ?principal ?hypnaming (EConstr.mkSort s) in
+ let (evd', e) = new_evar env evd' ?src ?filter ?naming ~typeclass_candidate:false ?principal ?hypnaming (EConstr.mkSort s) in
evd', (e, s)
let new_Type ?(rigid=Evd.univ_flexible) evd =
@@ -714,7 +715,7 @@ let rec advance sigma evk =
match evi.evar_body with
| Evar_empty -> Some evk
| Evar_defined v ->
- match is_restricted_evar evi with
+ match is_restricted_evar sigma evk with
| Some evk -> advance sigma evk
| None -> None
diff --git a/engine/evarutil.mli b/engine/evarutil.mli
index 11e07175e3..0c8d8c9b8a 100644
--- a/engine/evarutil.mli
+++ b/engine/evarutil.mli
@@ -27,8 +27,9 @@ val mk_new_meta : unit -> constr
val new_evar_from_context :
?src:Evar_kinds.t Loc.located -> ?filter:Filter.t ->
- ?candidates:constr list -> ?store:Store.t ->
+ ?candidates:constr list ->
?naming:intro_pattern_naming_expr ->
+ ?typeclass_candidate:bool ->
?principal:bool ->
named_context_val -> evar_map -> types -> evar_map * EConstr.t
@@ -40,19 +41,21 @@ type naming_mode =
val new_evar :
?src:Evar_kinds.t Loc.located -> ?filter:Filter.t ->
- ?candidates:constr list -> ?store:Store.t ->
+ ?candidates:constr list ->
?naming:intro_pattern_naming_expr ->
+ ?typeclass_candidate:bool ->
?principal:bool -> ?hypnaming:naming_mode ->
env -> evar_map -> types -> evar_map * EConstr.t
val new_pure_evar :
?src:Evar_kinds.t Loc.located -> ?filter:Filter.t ->
- ?candidates:constr list -> ?store:Store.t ->
+ ?candidates:constr list ->
?naming:intro_pattern_naming_expr ->
+ ?typeclass_candidate:bool ->
?principal:bool ->
named_context_val -> evar_map -> types -> evar_map * Evar.t
-val new_pure_evar_full : evar_map -> evar_info -> evar_map * Evar.t
+val new_pure_evar_full : evar_map -> ?typeclass_candidate:bool -> evar_info -> evar_map * Evar.t
(** Create a new Type existential variable, as we keep track of
them during type-checking and unification. *)
@@ -77,7 +80,8 @@ val new_global : evar_map -> GlobRef.t -> evar_map * constr
as a telescope) is [sign] *)
val new_evar_instance :
?src:Evar_kinds.t Loc.located -> ?filter:Filter.t -> ?candidates:constr list ->
- ?store:Store.t -> ?naming:intro_pattern_naming_expr ->
+ ?naming:intro_pattern_naming_expr ->
+ ?typeclass_candidate:bool ->
?principal:bool ->
named_context_val -> evar_map -> types ->
constr list -> evar_map * constr
diff --git a/engine/evd.ml b/engine/evd.ml
index eafdc4cb46..3a77a2b440 100644
--- a/engine/evd.ml
+++ b/engine/evd.ml
@@ -144,8 +144,7 @@ type evar_info = {
evar_body : evar_body;
evar_filter : Filter.t;
evar_source : Evar_kinds.t Loc.located;
- evar_candidates : constr list option; (* if not None, list of allowed instances *)
- evar_extra : Store.t }
+ evar_candidates : constr list option; (* if not None, list of allowed instances *)}
let make_evar hyps ccl = {
evar_concl = ccl;
@@ -153,9 +152,7 @@ let make_evar hyps ccl = {
evar_body = Evar_empty;
evar_filter = Filter.identity;
evar_source = Loc.tag @@ Evar_kinds.InternalHole;
- evar_candidates = None;
- evar_extra = Store.empty
-}
+ evar_candidates = None; }
let instance_mismatch () =
anomaly (Pp.str "Signature and its instance do not match.")
@@ -413,6 +410,11 @@ end
type goal_kind = ToShelve | ToGiveUp
+type evar_flags =
+ { obligation_evars : Evar.Set.t;
+ restricted_evars : Evar.t Evar.Map.t;
+ typeclass_evars : Evar.Set.t }
+
type evar_map = {
(** Existential variables *)
defn_evars : evar_info EvMap.t;
@@ -425,6 +427,7 @@ type evar_map = {
last_mods : Evar.Set.t;
(** Metas *)
metas : clbinding Metamap.t;
+ evar_flags : evar_flags;
(** Interactive proofs *)
effects : Safe_typing.private_constants;
future_goals : Evar.t list; (** list of newly created evars, to be
@@ -441,20 +444,82 @@ type evar_map = {
extras : Store.t;
}
+let get_is_maybe_typeclass, (is_maybe_typeclass_hook : (evar_map -> constr -> bool) Hook.t) = Hook.make ~default:(fun evd c -> false) ()
+
+let is_maybe_typeclass sigma c = Hook.get get_is_maybe_typeclass sigma c
+
(*** Lifting primitive from Evar.Map. ***)
let rename evk id evd =
{ evd with evar_names = EvNames.rename evk id evd.evar_names }
-let add_with_name ?name d e i = match i.evar_body with
+let add_with_name ?name ?(typeclass_candidate = true) d e i = match i.evar_body with
| Evar_empty ->
let evar_names = EvNames.add_name_undefined name e i d.evar_names in
- { d with undf_evars = EvMap.add e i d.undf_evars; evar_names }
+ let evar_flags =
+ if typeclass_candidate && is_maybe_typeclass d i.evar_concl then
+ let flags = d.evar_flags in
+ { flags with typeclass_evars = Evar.Set.add e flags.typeclass_evars }
+ else d.evar_flags
+ in
+ { d with undf_evars = EvMap.add e i d.undf_evars; evar_names; evar_flags }
| Evar_defined _ ->
let evar_names = EvNames.remove_name_defined e d.evar_names in
{ d with defn_evars = EvMap.add e i d.defn_evars; evar_names }
-let add d e i = add_with_name d e i
+(** Evd.add is a low-level function mainly used to update the evar_info
+ associated to an evar, so we prevent registering its typeclass status. *)
+let add d e i = add_with_name ~typeclass_candidate:false d e i
+
+(*** Evar flags: typeclasses, restricted or obligation flag *)
+
+let get_typeclass_evars evd = evd.evar_flags.typeclass_evars
+
+let set_typeclass_evars evd tcs =
+ let flags = evd.evar_flags in
+ { evd with evar_flags = { flags with typeclass_evars = tcs } }
+
+let is_typeclass_evar evd evk =
+ let flags = evd.evar_flags in
+ Evar.Set.mem evk flags.typeclass_evars
+
+let set_obligation_evar evd evk =
+ let flags = evd.evar_flags in
+ let evar_flags = { flags with obligation_evars = Evar.Set.add evk flags.obligation_evars } in
+ { evd with evar_flags }
+
+let is_obligation_evar evd evk =
+ let flags = evd.evar_flags in
+ Evar.Set.mem evk flags.obligation_evars
+
+(** Inheritance of flags: for evar-evar and restriction cases *)
+
+let inherit_evar_flags evar_flags evk evk' =
+ let evk_typeclass = Evar.Set.mem evk evar_flags.typeclass_evars in
+ let evk_obligation = Evar.Set.mem evk evar_flags.obligation_evars in
+ if not (evk_obligation || evk_typeclass) then evar_flags
+ else
+ let typeclass_evars =
+ if evk_typeclass then
+ let typeclass_evars = Evar.Set.remove evk evar_flags.typeclass_evars in
+ Evar.Set.add evk' typeclass_evars
+ else evar_flags.typeclass_evars
+ in
+ let obligation_evars =
+ if evk_obligation then
+ let obligation_evars = Evar.Set.remove evk evar_flags.obligation_evars in
+ Evar.Set.add evk' obligation_evars
+ else evar_flags.obligation_evars
+ in
+ { evar_flags with obligation_evars; typeclass_evars }
+
+(** Removal: in all other cases of definition *)
+
+let remove_evar_flags evk evar_flags =
+ { typeclass_evars = Evar.Set.remove evk evar_flags.typeclass_evars;
+ obligation_evars = Evar.Set.remove evk evar_flags.obligation_evars;
+ (** Restriction information is kept. *)
+ restricted_evars = evar_flags.restricted_evars }
(** New evars *)
@@ -464,9 +529,9 @@ let evar_counter_summary_name = "evar counter"
let evar_ctr, evar_counter_summary_tag = Summary.ref_tag 0 ~name:evar_counter_summary_name
let new_untyped_evar () = incr evar_ctr; Evar.unsafe_of_int !evar_ctr
-let new_evar evd ?name evi =
+let new_evar evd ?name ?typeclass_candidate evi =
let evk = new_untyped_evar () in
- let evd = add_with_name evd ?name evk evi in
+ let evd = add_with_name evd ?name ?typeclass_candidate evk evi in
(evd, evk)
let remove d e =
@@ -478,7 +543,9 @@ let remove d e =
in
let future_goals = List.filter (fun e' -> not (Evar.equal e e')) d.future_goals in
let future_goals_status = EvMap.remove e d.future_goals_status in
- { d with undf_evars; defn_evars; principal_future_goal; future_goals; future_goals_status }
+ let evar_flags = remove_evar_flags e d.evar_flags in
+ { d with undf_evars; defn_evars; principal_future_goal; future_goals; future_goals_status;
+ evar_flags }
let find d e =
try EvMap.find e d.undf_evars
@@ -583,12 +650,18 @@ let cmap f evd =
let create_evar_defs sigma = { sigma with
conv_pbs=[]; last_mods=Evar.Set.empty; metas=Metamap.empty }
+let empty_evar_flags =
+ { obligation_evars = Evar.Set.empty;
+ restricted_evars = Evar.Map.empty;
+ typeclass_evars = Evar.Set.empty }
+
let empty = {
defn_evars = EvMap.empty;
undf_evars = EvMap.empty;
universes = UState.empty;
conv_pbs = [];
last_mods = Evar.Set.empty;
+ evar_flags = empty_evar_flags;
metas = Metamap.empty;
effects = Safe_typing.empty_private_constants;
evar_names = EvNames.empty; (* id<->key for undefined evars *)
@@ -634,9 +707,7 @@ let evar_source evk d = (find d evk).evar_source
let evar_ident evk evd = EvNames.ident evk evd.evar_names
let evar_key id evd = EvNames.key id evd.evar_names
-let restricted = Store.field ()
-
-let define_aux ?dorestrict def undef evk body =
+let define_aux def undef evk body =
let oldinfo =
try EvMap.find evk undef
with Not_found ->
@@ -646,24 +717,39 @@ let define_aux ?dorestrict def undef evk body =
anomaly ~label:"Evd.define" (Pp.str "cannot define undeclared evar.")
in
let () = assert (oldinfo.evar_body == Evar_empty) in
- let evar_extra = match dorestrict with
- | Some evk' -> Store.set oldinfo.evar_extra restricted evk'
- | None -> oldinfo.evar_extra in
- let newinfo = { oldinfo with evar_body = Evar_defined body; evar_extra } in
+ let newinfo = { oldinfo with evar_body = Evar_defined body } in
EvMap.add evk newinfo def, EvMap.remove evk undef
(* define the existential of section path sp as the constr body *)
-let define evk body evd =
+let define_gen evk body evd evar_flags =
let (defn_evars, undf_evars) = define_aux evd.defn_evars evd.undf_evars evk body in
let last_mods = match evd.conv_pbs with
| [] -> evd.last_mods
| _ -> Evar.Set.add evk evd.last_mods
in
let evar_names = EvNames.remove_name_defined evk evd.evar_names in
- { evd with defn_evars; undf_evars; last_mods; evar_names }
+ { evd with defn_evars; undf_evars; last_mods; evar_names; evar_flags }
-let is_restricted_evar evi =
- Store.get evi.evar_extra restricted
+(** By default, the obligation and evar tag of the evar is removed *)
+let define evk body evd =
+ let evar_flags = remove_evar_flags evk evd.evar_flags in
+ define_gen evk body evd evar_flags
+
+(** In case of an evar-evar solution, the flags are inherited *)
+let define_with_evar evk body evd =
+ let evk' = fst (destEvar body) in
+ let evar_flags = inherit_evar_flags evd.evar_flags evk evk' in
+ define_gen evk body evd evar_flags
+
+let is_restricted_evar evd evk =
+ try Some (Evar.Map.find evk evd.evar_flags.restricted_evars)
+ with Not_found -> None
+
+let declare_restricted_evar evar_flags evk evk' =
+ { evar_flags with restricted_evars = Evar.Map.add evk evk' evar_flags.restricted_evars }
+
+(* In case of restriction, we declare the restriction and inherit the obligation
+ and typeclass flags. *)
let restrict evk filter ?candidates ?src evd =
let evk' = new_untyped_evar () in
@@ -679,9 +765,11 @@ let restrict evk filter ?candidates ?src evd =
let ctxt = Filter.filter_list filter (evar_context evar_info) in
let id_inst = Array.map_of_list (NamedDecl.get_id %> mkVar) ctxt in
let body = mkEvar(evk',id_inst) in
- let (defn_evars, undf_evars) = define_aux ~dorestrict:evk' evd.defn_evars evd.undf_evars evk body in
+ let (defn_evars, undf_evars) = define_aux evd.defn_evars evd.undf_evars evk body in
+ let evar_flags = declare_restricted_evar evd.evar_flags evk evk' in
+ let evar_flags = inherit_evar_flags evar_flags evk evk' in
{ evd with undf_evars = EvMap.add evk' evar_info' undf_evars;
- defn_evars; last_mods; evar_names }, evk'
+ defn_evars; last_mods; evar_names; evar_flags }, evk'
let downcast evk ccl evd =
let evar_info = EvMap.find evk evd.undf_evars in
@@ -1019,6 +1107,7 @@ let set_metas evd metas = {
universes = evd.universes;
conv_pbs = evd.conv_pbs;
last_mods = evd.last_mods;
+ evar_flags = evd.evar_flags;
metas;
effects = evd.effects;
evar_names = evd.evar_names;
diff --git a/engine/evd.mli b/engine/evd.mli
index 1a5614988d..b0e3c2b869 100644
--- a/engine/evd.mli
+++ b/engine/evd.mli
@@ -83,10 +83,6 @@ type evar_body =
| Evar_empty
| Evar_defined of econstr
-
-module Store : Store.S
-(** Datatype used to store additional information in evar maps. *)
-
type evar_info = {
evar_concl : econstr;
(** Type of the evar. *)
@@ -102,8 +98,6 @@ type evar_info = {
(** Information about the evar. *)
evar_candidates : econstr list option;
(** List of possible solutions when known that it is a finite list *)
- evar_extra : Store.t
- (** Extra store, used for clever hacks. *)
}
val make_evar : named_context_val -> etypes -> evar_info
@@ -145,7 +139,7 @@ val has_undefined : evar_map -> bool
there are uninstantiated evars in [sigma]. *)
val new_evar : evar_map ->
- ?name:Id.t -> evar_info -> evar_map * Evar.t
+ ?name:Id.t -> ?typeclass_candidate:bool -> evar_info -> evar_map * Evar.t
(** Creates a fresh evar mapping to the given information. *)
val add : evar_map -> Evar.t -> evar_info -> evar_map
@@ -182,7 +176,7 @@ val raw_map_undefined : (Evar.t -> evar_info -> evar_info) -> evar_map -> evar_m
(** Same as {!raw_map}, but restricted to undefined evars. For efficiency
reasons. *)
-val define : Evar.t-> econstr -> evar_map -> evar_map
+val define : Evar.t -> econstr -> evar_map -> evar_map
(** Set the body of an evar to the given constr. It is expected that:
{ul
{- The evar is already present in the evarmap.}
@@ -190,6 +184,10 @@ val define : Evar.t-> econstr -> evar_map -> evar_map
{- All the evars present in the constr should be present in the evar map.}
} *)
+val define_with_evar : Evar.t -> econstr -> evar_map -> evar_map
+(** Same as [define ev body evd], except the body must be an existential variable [ev'].
+ This additionally makes [ev'] inherit the [obligation] and [typeclass] flags of [ev]. *)
+
val cmap : (econstr -> econstr) -> evar_map -> evar_map
(** Map the function on all terms in the evar map. *)
@@ -210,6 +208,8 @@ val undefined_map : evar_map -> evar_info Evar.Map.t
val drop_all_defined : evar_map -> evar_map
+val is_maybe_typeclass_hook : (evar_map -> constr -> bool) Hook.t
+
(** {6 Instantiating partial terms} *)
exception NotInstantiatedEvar
@@ -247,9 +247,27 @@ val restrict : Evar.t-> Filter.t -> ?candidates:econstr list ->
possibly limiting the instances to a set of candidates (candidates
are filtered according to the filter) *)
-val is_restricted_evar : evar_info -> Evar.t option
+val is_restricted_evar : evar_map -> Evar.t -> Evar.t option
(** Tell if an evar comes from restriction of another evar, and if yes, which *)
+val set_typeclass_evars : evar_map -> Evar.Set.t -> evar_map
+(** Mark the given set of evars as available for resolution.
+
+ Precondition: they should indeed refer to undefined typeclass evars.
+ *)
+
+val get_typeclass_evars : evar_map -> Evar.Set.t
+(** The set of undefined typeclass evars *)
+
+val is_typeclass_evar : evar_map -> Evar.t -> bool
+(** Is the evar declared resolvable for typeclass resolution *)
+
+val set_obligation_evar : evar_map -> Evar.t -> evar_map
+(** Declare an evar as an obligation *)
+
+val is_obligation_evar : evar_map -> Evar.t -> bool
+(** Is the evar declared as an obligation *)
+
val downcast : Evar.t-> etypes -> evar_map -> evar_map
(** Change the type of an undefined evar to a new type assumed to be a
subtype of its current type; subtyping must be ensured by caller *)
@@ -357,6 +375,9 @@ val add_universe_constraints : evar_map -> UnivProblem.Set.t -> evar_map
*)
+module Store : Store.S
+(** Datatype used to store additional information in evar maps. *)
+
val get_extra_data : evar_map -> Store.t
val set_extra_data : Store.t -> evar_map -> evar_map
diff --git a/engine/namegen.ml b/engine/namegen.ml
index 7ce759a3fb..db72dc8ec3 100644
--- a/engine/namegen.ml
+++ b/engine/namegen.ml
@@ -21,7 +21,6 @@ open Constr
open Environ
open EConstr
open Vars
-open Nametab
open Nameops
open Libnames
open Globnames
@@ -82,14 +81,14 @@ let is_imported_ref = function
let is_global id =
try
- let ref = locate (qualid_of_ident id) in
+ let ref = Nametab.locate (qualid_of_ident id) in
not (is_imported_ref ref)
with Not_found ->
false
let is_constructor id =
try
- match locate (qualid_of_ident id) with
+ match Nametab.locate (qualid_of_ident id) with
| ConstructRef _ -> true
| _ -> false
with Not_found ->
@@ -116,7 +115,7 @@ let head_name sigma c = (* Find the head constant of a constr if any *)
| Cast (c,_,_) | App (c,_) -> hdrec c
| Proj (kn,_) -> Some (Label.to_id (Constant.label (Projection.constant kn)))
| Const _ | Ind _ | Construct _ | Var _ as c ->
- Some (basename_of_global (global_of_constr c))
+ Some (Nametab.basename_of_global (global_of_constr c))
| Fix ((_,i),(lna,_,_)) | CoFix (i,(lna,_,_)) ->
Some (match lna.(i) with Name id -> id | _ -> assert false)
| Sort _ | Rel _ | Meta _|Evar _|Case (_, _, _, _) -> None
@@ -148,8 +147,8 @@ let hdchar env sigma c =
| Cast (c,_,_) | App (c,_) -> hdrec k c
| Proj (kn,_) -> lowercase_first_char (Label.to_id (Constant.label (Projection.constant kn)))
| Const (kn,_) -> lowercase_first_char (Label.to_id (Constant.label kn))
- | Ind (x,_) -> (try lowercase_first_char (basename_of_global (IndRef x)) with Not_found when !Flags.in_debugger -> "zz")
- | Construct (x,_) -> (try lowercase_first_char (basename_of_global (ConstructRef x)) with Not_found when !Flags.in_debugger -> "zz")
+ | Ind (x,_) -> (try lowercase_first_char (Nametab.basename_of_global (IndRef x)) with Not_found when !Flags.in_debugger -> "zz")
+ | Construct (x,_) -> (try lowercase_first_char (Nametab.basename_of_global (ConstructRef x)) with Not_found when !Flags.in_debugger -> "zz")
| Var id -> lowercase_first_char id
| Sort s -> sort_hdchar (ESorts.kind sigma s)
| Rel n ->
@@ -267,7 +266,7 @@ let visible_ids sigma (nenv, c) =
begin
try
let gseen = GlobRef.Set_env.add g gseen in
- let short = shortest_qualid_of_global Id.Set.empty g in
+ let short = Nametab.shortest_qualid_of_global Id.Set.empty g in
let dir, id = repr_qualid short in
let ids = if DirPath.is_empty dir then Id.Set.add id ids else ids in
accu := (gseen, vseen, ids)
diff --git a/engine/proofview.ml b/engine/proofview.ml
index 12d31e5f46..304b2dff84 100644
--- a/engine/proofview.ml
+++ b/engine/proofview.ml
@@ -60,19 +60,14 @@ type telescope =
| TNil of Evd.evar_map
| TCons of Environ.env * Evd.evar_map * EConstr.types * (Evd.evar_map -> EConstr.constr -> telescope)
-let typeclass_resolvable = Evd.Store.field ()
-
let dependent_init =
- (* Goals are created with a store which marks them as unresolvable
- for type classes. *)
- let store = Evd.Store.set Evd.Store.empty typeclass_resolvable () in
(* Goals don't have a source location. *)
let src = Loc.tag @@ Evar_kinds.GoalEvar in
(* Main routine *)
let rec aux = function
| TNil sigma -> [], { solution = sigma; comb = []; shelf = [] }
| TCons (env, sigma, typ, t) ->
- let (sigma, econstr) = Evarutil.new_evar env sigma ~src ~store typ in
+ let (sigma, econstr) = Evarutil.new_evar env sigma ~src ~typeclass_candidate:false typ in
let (gl, _) = EConstr.destEvar sigma econstr in
let ret, { solution = sol; comb = comb } = aux (t sigma econstr) in
let entry = (econstr, typ) :: ret in
@@ -745,26 +740,28 @@ let unshelve l p =
let l = undefined p.solution l in
{ p with comb = p.comb@l }
-let mark_in_evm ~goal evd content =
- let info = Evd.find evd content in
- let info =
+let mark_in_evm ~goal evd evars =
+ let evd =
if goal then
- { info with Evd.evar_source = match info.Evd.evar_source with
- (* Two kinds for goal evars:
- - GoalEvar (morally not dependent)
- - VarInstance (morally dependent of some name).
- This is a heuristic for naming these evars. *)
- | loc, (Evar_kinds.QuestionMark { Evar_kinds.qm_name=Names.Name id} |
- Evar_kinds.ImplicitArg (_,(_,Some id),_)) -> loc, Evar_kinds.VarInstance id
- | _, (Evar_kinds.VarInstance _ | Evar_kinds.GoalEvar) as x -> x
- | loc,_ -> loc,Evar_kinds.GoalEvar }
- else info
- in
- let info = match Evd.Store.get info.Evd.evar_extra typeclass_resolvable with
- | None -> { info with Evd.evar_extra = Evd.Store.set info.Evd.evar_extra typeclass_resolvable () }
- | Some () -> info
+ let mark evd content =
+ let info = Evd.find evd content in
+ let info =
+ { info with Evd.evar_source = match info.Evd.evar_source with
+ (* Two kinds for goal evars:
+ - GoalEvar (morally not dependent)
+ - VarInstance (morally dependent of some name).
+ This is a heuristic for naming these evars. *)
+ | loc, (Evar_kinds.QuestionMark { Evar_kinds.qm_name=Names.Name id} |
+ Evar_kinds.ImplicitArg (_,(_,Some id),_)) -> loc, Evar_kinds.VarInstance id
+ | _, (Evar_kinds.VarInstance _ | Evar_kinds.GoalEvar) as x -> x
+ | loc,_ -> loc,Evar_kinds.GoalEvar }
+ in Evd.add evd content info
+ in CList.fold_left mark evd evars
+ else evd
in
- Evd.add evd content info
+ let tcs = Evd.get_typeclass_evars evd in
+ let evset = Evar.Set.of_list evars in
+ Evd.set_typeclass_evars evd (Evar.Set.diff tcs evset)
let with_shelf tac =
let open Proof in
@@ -781,7 +778,7 @@ let with_shelf tac =
let sigma = Evd.restore_future_goals sigma fgoals in
(* Ensure we mark and return only unsolved goals *)
let gls' = undefined_evars sigma (CList.rev_append gls' gls) in
- let sigma = CList.fold_left (mark_in_evm ~goal:false) sigma gls' in
+ let sigma = mark_in_evm ~goal:false sigma gls' in
let npv = { npv with shelf; solution = sigma } in
Pv.set npv >> tclUNIT (gls', ans)
@@ -1035,7 +1032,7 @@ module Unsafe = struct
let reset_future_goals p =
{ p with solution = Evd.reset_future_goals p.solution }
- let mark_as_goal evd content =
+ let mark_as_goals evd content =
mark_in_evm ~goal:true evd content
let advance = Evarutil.advance
@@ -1043,9 +1040,7 @@ module Unsafe = struct
let undefined = undefined
let mark_as_unresolvable p gl =
- { p with solution = mark_in_evm ~goal:false p.solution gl }
-
- let typeclass_resolvable = typeclass_resolvable
+ { p with solution = mark_in_evm ~goal:false p.solution [gl] }
end
@@ -1065,10 +1060,6 @@ let goal_nf_evar sigma gl =
let sigma = Evd.add sigma gl evi in
(gl, sigma)
-let goal_extra evars gl =
- let evi = Evd.find evars gl in
- evi.Evd.evar_extra
-
let catchable_exception = function
| Logic_monad.Exception _ -> false
@@ -1093,7 +1084,6 @@ module Goal = struct
let sigma {sigma} = sigma
let hyps {env} = EConstr.named_context env
let concl {concl} = concl
- let extra {sigma; self} = goal_extra sigma self
let gmake_with info env sigma goal state =
{ env = Environ.reset_with_named_context (Evd.evar_filtered_hyps info) env ;
diff --git a/engine/proofview.mli b/engine/proofview.mli
index 0bb3229a9b..cda4808a23 100644
--- a/engine/proofview.mli
+++ b/engine/proofview.mli
@@ -456,9 +456,9 @@ module Unsafe : sig
(** Clears the future goals store in the proof view. *)
val reset_future_goals : proofview -> proofview
- (** Give an evar the status of a goal (changes its source location
- and makes it unresolvable for type classes. *)
- val mark_as_goal : Evd.evar_map -> Evar.t -> Evd.evar_map
+ (** Give the evars the status of a goal (changes their source location
+ and makes them unresolvable for type classes. *)
+ val mark_as_goals : Evd.evar_map -> Evar.t list -> Evd.evar_map
(** Make an evar unresolvable for type classes. *)
val mark_as_unresolvable : proofview -> Evar.t -> proofview
@@ -475,8 +475,6 @@ module Unsafe : sig
val undefined : Evd.evar_map -> Proofview_monad.goal_with_state list ->
Proofview_monad.goal_with_state list
- val typeclass_resolvable : unit Evd.Store.field
-
end
(** This module gives access to the innards of the monad. Its use is
@@ -507,7 +505,6 @@ module Goal : sig
val hyps : t -> named_context
val env : t -> Environ.env
val sigma : t -> Evd.evar_map
- val extra : t -> Evd.Store.t
val state : t -> Proofview_monad.StateStore.t
(** [nf_enter t] applies the goal-dependent tactic [t] in each goal
diff --git a/engine/termops.ml b/engine/termops.ml
index ee0c3d210e..5e220fd8f1 100644
--- a/engine/termops.ml
+++ b/engine/termops.ml
@@ -365,12 +365,18 @@ let pr_evar_map_gen with_univs pr_evars sigma =
else
str "CONSTRAINTS:" ++ brk (0, 1) ++
pr_evar_constraints sigma conv_pbs ++ fnl ()
+ and typeclasses =
+ let evars = Evd.get_typeclass_evars sigma in
+ if Evar.Set.is_empty evars then mt ()
+ else
+ str "TYPECLASSES:" ++ brk (0, 1) ++
+ prlist_with_sep spc Evar.print (Evar.Set.elements evars) ++ fnl ()
and metas =
if List.is_empty (Evd.meta_list sigma) then mt ()
else
str "METAS:" ++ brk (0, 1) ++ pr_meta_map sigma
in
- evs ++ svs ++ cstrs ++ metas
+ evs ++ svs ++ cstrs ++ typeclasses ++ metas
let pr_evar_list sigma l =
let open Evd in
@@ -816,26 +822,11 @@ let map_constr_with_full_binders_user_view sigma g f =
each binder traversal; it is not recursive *)
let fold_constr_with_full_binders sigma g f n acc c =
- let open RelDecl in
- match EConstr.kind sigma c with
- | (Rel _ | Meta _ | Var _ | Sort _ | Const _ | Ind _
- | Construct _) -> acc
- | Cast (c,_, t) -> f n (f n acc c) t
- | Prod (na,t,c) -> f (g (LocalAssum (na, t)) n) (f n acc t) c
- | Lambda (na,t,c) -> f (g (LocalAssum (na, t)) n) (f n acc t) c
- | LetIn (na,b,t,c) -> f (g (LocalDef (na, b, t)) n) (f n (f n acc b) t) c
- | App (c,l) -> Array.fold_left (f n) (f n acc c) l
- | Proj (p,c) -> f n acc c
- | Evar (_,l) -> Array.fold_left (f n) acc l
- | Case (_,p,c,bl) -> Array.fold_left (f n) (f n (f n acc p) c) bl
- | Fix (_,(lna,tl,bl)) ->
- let n' = CArray.fold_left2 (fun c n t -> g (LocalAssum (n, t)) c) n lna tl in
- let fd = Array.map2 (fun t b -> (t,b)) tl bl in
- Array.fold_left (fun acc (t,b) -> f n' (f n acc t) b) acc fd
- | CoFix (_,(lna,tl,bl)) ->
- let n' = CArray.fold_left2 (fun c n t -> g (LocalAssum (n, t)) c) n lna tl in
- let fd = Array.map2 (fun t b -> (t,b)) tl bl in
- Array.fold_left (fun acc (t,b) -> f n' (f n acc t) b) acc fd
+ let open EConstr in
+ let f l acc c = f l acc (of_constr c) in
+ let g d l = g (of_rel_decl d) l in
+ let c = Unsafe.to_constr (whd_evar sigma c) in
+ Constr.fold_with_full_binders g f n acc c
let fold_constr_with_binders sigma g f n acc c =
fold_constr_with_full_binders sigma (fun _ x -> g x) f n acc c
diff --git a/engine/univGen.ml b/engine/univGen.ml
index 23ab30eb75..130aa06f53 100644
--- a/engine/univGen.ml
+++ b/engine/univGen.ml
@@ -77,17 +77,14 @@ let fresh_global_instance ?loc ?names env gr =
let u, ctx = fresh_global_instance ?loc ?names env gr in
mkRef (gr, u), ctx
-let constr_of_global gr =
- let c, ctx = fresh_global_instance (Global.env ()) gr in
- if not (Univ.ContextSet.is_empty ctx) then
- if Univ.LSet.is_empty (Univ.ContextSet.levels ctx) then
- (* Should be an error as we might forget constraints, allow for now
- to make firstorder work with "using" clauses *)
- c
- else CErrors.user_err ~hdr:"constr_of_global"
- Pp.(str "globalization of polymorphic reference " ++ Nametab.pr_global_env Id.Set.empty gr ++
- str " would forget universes.")
- else c
+let constr_of_monomorphic_global gr =
+ if not (Global.is_polymorphic gr) then
+ fst (fresh_global_instance (Global.env ()) gr)
+ else CErrors.user_err ~hdr:"constr_of_global"
+ Pp.(str "globalization of polymorphic reference " ++ Nametab.pr_global_env Id.Set.empty gr ++
+ str " would forget universes.")
+
+let constr_of_global gr = constr_of_monomorphic_global gr
let constr_of_global_univ = mkRef
diff --git a/engine/univGen.mli b/engine/univGen.mli
index c2e9d0c696..8af5f8fdb0 100644
--- a/engine/univGen.mli
+++ b/engine/univGen.mli
@@ -74,13 +74,19 @@ val extend_context : 'a in_universe_context_set -> ContextSet.t ->
[@@ocaml.deprecated "Use [Univ.extend_in_context_set]"]
(** Create a fresh global in the global environment, without side effects.
- BEWARE: this raises an ANOMALY on polymorphic constants/inductives:
+ BEWARE: this raises an error on polymorphic constants/inductives:
the constraints should be properly added to an evd.
See Evd.fresh_global, Evarutil.new_global, and pf_constr_of_global for
- the proper way to get a fresh copy of a global reference. *)
+ the proper way to get a fresh copy of a polymorphic global reference. *)
+val constr_of_monomorphic_global : GlobRef.t -> constr
+
val constr_of_global : GlobRef.t -> constr
+[@@ocaml.deprecated "constr_of_global will crash on polymorphic constants,\
+ use [constr_of_monomorphic_global] if the reference is guaranteed to\
+ be monomorphic, [Evarutil.new_global] or [Tacmach.New.pf_constr_of_global] otherwise"]
(** Returns the type of the global reference, by creating a fresh instance of polymorphic
references and computing their instantiated universe context. (side-effect on the
universe counter, use with care). *)
val type_of_global : GlobRef.t -> types in_universe_context_set
+[@@ocaml.deprecated "use [Typeops.type_of_global]"]
diff --git a/gramlib/LICENSE b/gramlib/LICENSE
new file mode 100644
index 0000000000..b696affde7
--- /dev/null
+++ b/gramlib/LICENSE
@@ -0,0 +1,29 @@
+gramlib was derived from Daniel de Rauglaudre's camlp5
+(https://github.com/camlp5/camlp5) whose licence follows:
+
+* Copyright (c) 2007-2017, INRIA (Institut National de Recherches en
+* Informatique et Automatique). All rights reserved.
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are met:
+*
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above copyright
+* notice, this list of conditions and the following disclaimer in the
+* documentation and/or other materials provided with the distribution.
+* * Neither the name of INRIA, nor the names of its contributors may be
+* used to endorse or promote products derived from this software without
+* specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED BY INRIA AND CONTRIBUTORS ``AS IS'' AND
+* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INRIA AND
+* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+* SUCH DAMAGE.
diff --git a/gramlib/dune b/gramlib/dune
new file mode 100644
index 0000000000..6a9e622b4c
--- /dev/null
+++ b/gramlib/dune
@@ -0,0 +1,3 @@
+(library
+ (name gramlib)
+ (public_name coq.gramlib))
diff --git a/gramlib/gramext.ml b/gramlib/gramext.ml
new file mode 100644
index 0000000000..8960d4f257
--- /dev/null
+++ b/gramlib/gramext.ml
@@ -0,0 +1,580 @@
+(* camlp5r *)
+(* gramext.ml,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+open Printf
+
+type 'a parser_t = 'a Stream.t -> Obj.t
+
+type 'te grammar =
+ { gtokens : (Plexing.pattern, int ref) Hashtbl.t;
+ mutable glexer : 'te Plexing.lexer }
+
+type 'te g_entry =
+ { egram : 'te grammar;
+ ename : string;
+ elocal : bool;
+ mutable estart : int -> 'te parser_t;
+ mutable econtinue : int -> int -> Obj.t -> 'te parser_t;
+ mutable edesc : 'te g_desc }
+and 'te g_desc =
+ Dlevels of 'te g_level list
+ | Dparser of 'te parser_t
+and 'te g_level =
+ { assoc : g_assoc;
+ lname : string option;
+ lsuffix : 'te g_tree;
+ lprefix : 'te g_tree }
+and g_assoc = NonA | RightA | LeftA
+and 'te g_symbol =
+ Sfacto of 'te g_symbol
+ | Smeta of string * 'te g_symbol list * Obj.t
+ | Snterm of 'te g_entry
+ | Snterml of 'te g_entry * string
+ | Slist0 of 'te g_symbol
+ | Slist0sep of 'te g_symbol * 'te g_symbol * bool
+ | Slist1 of 'te g_symbol
+ | Slist1sep of 'te g_symbol * 'te g_symbol * bool
+ | Sopt of 'te g_symbol
+ | Sflag of 'te g_symbol
+ | Sself
+ | Snext
+ | Scut
+ | Stoken of Plexing.pattern
+ | Stree of 'te g_tree
+ | Svala of string list * 'te g_symbol
+and g_action = Obj.t
+and 'te g_tree =
+ Node of 'te g_node
+ | LocAct of g_action * g_action list
+ | DeadEnd
+and 'te g_node =
+ { node : 'te g_symbol; son : 'te g_tree; brother : 'te g_tree }
+and err_fun = unit -> string
+
+type position =
+ First
+ | Last
+ | Before of string
+ | After of string
+ | Like of string
+ | Level of string
+
+let warning_verbose = ref true
+
+let rec derive_eps =
+ function
+ Slist0 _ -> true
+ | Slist0sep (_, _, _) -> true
+ | Sopt _ | Sflag _ -> true
+ | Sfacto s -> derive_eps s
+ | Stree t -> tree_derive_eps t
+ | Svala (_, s) -> derive_eps s
+ | Smeta (_, _, _) | Slist1 _ | Slist1sep (_, _, _) | Snterm _ |
+ Snterml (_, _) | Snext | Sself | Scut | Stoken _ ->
+ false
+and tree_derive_eps =
+ function
+ LocAct (_, _) -> true
+ | Node {node = s; brother = bro; son = son} ->
+ derive_eps s && tree_derive_eps son || tree_derive_eps bro
+ | DeadEnd -> false
+
+let rec eq_symbol s1 s2 =
+ match s1, s2 with
+ Snterm e1, Snterm e2 -> e1 == e2
+ | Snterml (e1, l1), Snterml (e2, l2) -> e1 == e2 && l1 = l2
+ | Slist0 s1, Slist0 s2 -> eq_symbol s1 s2
+ | Slist0sep (s1, sep1, b1), Slist0sep (s2, sep2, b2) ->
+ eq_symbol s1 s2 && eq_symbol sep1 sep2 && b1 = b2
+ | Slist1 s1, Slist1 s2 -> eq_symbol s1 s2
+ | Slist1sep (s1, sep1, b1), Slist1sep (s2, sep2, b2) ->
+ eq_symbol s1 s2 && eq_symbol sep1 sep2 && b1 = b2
+ | Sflag s1, Sflag s2 -> eq_symbol s1 s2
+ | Sopt s1, Sopt s2 -> eq_symbol s1 s2
+ | Svala (ls1, s1), Svala (ls2, s2) -> ls1 = ls2 && eq_symbol s1 s2
+ | Stree _, Stree _ -> false
+ | Sfacto (Stree t1), Sfacto (Stree t2) ->
+ (* The only goal of the node 'Sfacto' is to allow tree comparison
+ (therefore factorization) without looking at the semantic
+ actions; allow factorization of rules like "SV foo" which are
+ actually expanded into a tree. *)
+ let rec eq_tree t1 t2 =
+ match t1, t2 with
+ Node n1, Node n2 ->
+ eq_symbol n1.node n2.node && eq_tree n1.son n2.son &&
+ eq_tree n1.brother n2.brother
+ | LocAct (_, _), LocAct (_, _) -> true
+ | DeadEnd, DeadEnd -> true
+ | _ -> false
+ in
+ eq_tree t1 t2
+ | _ -> s1 = s2
+
+let is_before s1 s2 =
+ let s1 =
+ match s1 with
+ Svala (_, s) -> s
+ | _ -> s1
+ in
+ let s2 =
+ match s2 with
+ Svala (_, s) -> s
+ | _ -> s2
+ in
+ match s1, s2 with
+ Stoken ("ANY", _), _ -> false
+ | _, Stoken ("ANY", _) -> true
+ | Stoken (_, s), Stoken (_, "") when s <> "" -> true
+ | Stoken _, Stoken _ -> false
+ | Stoken _, _ -> true
+ | _ -> false
+
+let insert_tree entry_name gsymbols action tree =
+ let rec insert symbols tree =
+ match symbols with
+ s :: sl -> insert_in_tree s sl tree
+ | [] ->
+ match tree with
+ Node {node = s; son = son; brother = bro} ->
+ Node {node = s; son = son; brother = insert [] bro}
+ | LocAct (old_action, action_list) ->
+ if !warning_verbose then
+ begin
+ eprintf "<W> Grammar extension: ";
+ if entry_name <> "" then eprintf "in [%s], " entry_name;
+ eprintf "some rule has been masked\n";
+ flush stderr
+ end;
+ LocAct (action, old_action :: action_list)
+ | DeadEnd -> LocAct (action, [])
+ and insert_in_tree s sl tree =
+ match try_insert s sl tree with
+ Some t -> t
+ | None -> Node {node = s; son = insert sl DeadEnd; brother = tree}
+ and try_insert s sl tree =
+ match tree with
+ Node {node = s1; son = son; brother = bro} ->
+ if eq_symbol s s1 then
+ let t = Node {node = s1; son = insert sl son; brother = bro} in
+ Some t
+ else if s = Scut then
+ try_insert s sl (Node {node = s; son = tree; brother = DeadEnd})
+ else if s1 = Scut then try_insert s1 (s :: sl) tree
+ else if is_before s1 s || derive_eps s && not (derive_eps s1) then
+ let bro =
+ match try_insert s sl bro with
+ Some bro -> bro
+ | None -> Node {node = s; son = insert sl DeadEnd; brother = bro}
+ in
+ let t = Node {node = s1; son = son; brother = bro} in Some t
+ else
+ begin match try_insert s sl bro with
+ Some bro ->
+ let t = Node {node = s1; son = son; brother = bro} in Some t
+ | None -> None
+ end
+ | LocAct (_, _) | DeadEnd -> None
+ in
+ insert gsymbols tree
+
+let srules rl =
+ let t =
+ List.fold_left
+ (fun tree (symbols, action) -> insert_tree "" symbols action tree)
+ DeadEnd rl
+ in
+ Stree t
+
+external action : 'a -> g_action = "%identity"
+
+let is_level_labelled n lev =
+ match lev.lname with
+ Some n1 -> n = n1
+ | None -> false
+
+let rec token_exists_in_level f lev =
+ token_exists_in_tree f lev.lprefix || token_exists_in_tree f lev.lsuffix
+and token_exists_in_tree f =
+ function
+ Node n ->
+ token_exists_in_symbol f n.node || token_exists_in_tree f n.brother ||
+ token_exists_in_tree f n.son
+ | LocAct (_, _) | DeadEnd -> false
+and token_exists_in_symbol f =
+ function
+ Sfacto sy -> token_exists_in_symbol f sy
+ | Smeta (_, syl, _) -> List.exists (token_exists_in_symbol f) syl
+ | Slist0 sy -> token_exists_in_symbol f sy
+ | Slist0sep (sy, sep, _) ->
+ token_exists_in_symbol f sy || token_exists_in_symbol f sep
+ | Slist1 sy -> token_exists_in_symbol f sy
+ | Slist1sep (sy, sep, _) ->
+ token_exists_in_symbol f sy || token_exists_in_symbol f sep
+ | Sopt sy -> token_exists_in_symbol f sy
+ | Sflag sy -> token_exists_in_symbol f sy
+ | Stoken tok -> f tok
+ | Stree t -> token_exists_in_tree f t
+ | Svala (_, sy) -> token_exists_in_symbol f sy
+ | Snterm _ | Snterml (_, _) | Snext | Sself | Scut -> false
+
+let insert_level entry_name e1 symbols action slev =
+ match e1 with
+ true ->
+ {assoc = slev.assoc; lname = slev.lname;
+ lsuffix = insert_tree entry_name symbols action slev.lsuffix;
+ lprefix = slev.lprefix}
+ | false ->
+ {assoc = slev.assoc; lname = slev.lname; lsuffix = slev.lsuffix;
+ lprefix = insert_tree entry_name symbols action slev.lprefix}
+
+let empty_lev lname assoc =
+ let assoc =
+ match assoc with
+ Some a -> a
+ | None -> LeftA
+ in
+ {assoc = assoc; lname = lname; lsuffix = DeadEnd; lprefix = DeadEnd}
+
+let change_lev lev n lname assoc =
+ let a =
+ match assoc with
+ None -> lev.assoc
+ | Some a ->
+ if a <> lev.assoc && !warning_verbose then
+ begin
+ eprintf "<W> Changing associativity of level \"%s\"\n" n;
+ flush stderr
+ end;
+ a
+ in
+ begin match lname with
+ Some n ->
+ if lname <> lev.lname && !warning_verbose then
+ begin eprintf "<W> Level label \"%s\" ignored\n" n; flush stderr end
+ | None -> ()
+ end;
+ {assoc = a; lname = lev.lname; lsuffix = lev.lsuffix; lprefix = lev.lprefix}
+
+let get_level entry position levs =
+ match position with
+ Some First -> [], empty_lev, levs
+ | Some Last -> levs, empty_lev, []
+ | Some (Level n) ->
+ let rec get =
+ function
+ [] ->
+ eprintf "No level labelled \"%s\" in entry \"%s\"\n" n
+ entry.ename;
+ flush stderr;
+ failwith "Grammar.extend"
+ | lev :: levs ->
+ if is_level_labelled n lev then [], change_lev lev n, levs
+ else
+ let (levs1, rlev, levs2) = get levs in lev :: levs1, rlev, levs2
+ in
+ get levs
+ | Some (Before n) ->
+ let rec get =
+ function
+ [] ->
+ eprintf "No level labelled \"%s\" in entry \"%s\"\n" n
+ entry.ename;
+ flush stderr;
+ failwith "Grammar.extend"
+ | lev :: levs ->
+ if is_level_labelled n lev then [], empty_lev, lev :: levs
+ else
+ let (levs1, rlev, levs2) = get levs in lev :: levs1, rlev, levs2
+ in
+ get levs
+ | Some (After n) ->
+ let rec get =
+ function
+ [] ->
+ eprintf "No level labelled \"%s\" in entry \"%s\"\n" n
+ entry.ename;
+ flush stderr;
+ failwith "Grammar.extend"
+ | lev :: levs ->
+ if is_level_labelled n lev then [lev], empty_lev, levs
+ else
+ let (levs1, rlev, levs2) = get levs in lev :: levs1, rlev, levs2
+ in
+ get levs
+ | Some (Like n) ->
+ let f (tok, prm) = n = tok || n = prm in
+ let rec get =
+ function
+ [] ->
+ eprintf "No level with \"%s\" in entry \"%s\"\n" n entry.ename;
+ flush stderr;
+ failwith "Grammar.extend"
+ | lev :: levs ->
+ if token_exists_in_level f lev then [], change_lev lev n, levs
+ else
+ let (levs1, rlev, levs2) = get levs in lev :: levs1, rlev, levs2
+ in
+ get levs
+ | None ->
+ match levs with
+ lev :: levs -> [], change_lev lev "<top>", levs
+ | [] -> [], empty_lev, []
+
+let rec check_gram entry =
+ function
+ Snterm e ->
+ if e.egram != entry.egram then
+ begin
+ eprintf "\
+Error: entries \"%s\" and \"%s\" do not belong to the same grammar.\n"
+ entry.ename e.ename;
+ flush stderr;
+ failwith "Grammar.extend error"
+ end
+ | Snterml (e, _) ->
+ if e.egram != entry.egram then
+ begin
+ eprintf "\
+Error: entries \"%s\" and \"%s\" do not belong to the same grammar.\n"
+ entry.ename e.ename;
+ flush stderr;
+ failwith "Grammar.extend error"
+ end
+ | Sfacto s -> check_gram entry s
+ | Smeta (_, sl, _) -> List.iter (check_gram entry) sl
+ | Slist0sep (s, t, _) -> check_gram entry t; check_gram entry s
+ | Slist1sep (s, t, _) -> check_gram entry t; check_gram entry s
+ | Slist0 s -> check_gram entry s
+ | Slist1 s -> check_gram entry s
+ | Sopt s -> check_gram entry s
+ | Sflag s -> check_gram entry s
+ | Stree t -> tree_check_gram entry t
+ | Svala (_, s) -> check_gram entry s
+ | Snext | Sself | Scut | Stoken _ -> ()
+and tree_check_gram entry =
+ function
+ Node {node = n; brother = bro; son = son} ->
+ check_gram entry n; tree_check_gram entry bro; tree_check_gram entry son
+ | LocAct (_, _) | DeadEnd -> ()
+
+let change_to_self entry =
+ function
+ Snterm e when e == entry -> Sself
+ | x -> x
+
+let get_initial entry =
+ function
+ Sself :: symbols -> true, symbols
+ | symbols -> false, symbols
+
+let insert_tokens gram symbols =
+ let rec insert =
+ function
+ Sfacto s -> insert s
+ | Smeta (_, sl, _) -> List.iter insert sl
+ | Slist0 s -> insert s
+ | Slist1 s -> insert s
+ | Slist0sep (s, t, _) -> insert s; insert t
+ | Slist1sep (s, t, _) -> insert s; insert t
+ | Sopt s -> insert s
+ | Sflag s -> insert s
+ | Stree t -> tinsert t
+ | Svala (_, s) -> insert s
+ | Stoken ("ANY", _) -> ()
+ | Stoken tok ->
+ gram.glexer.Plexing.tok_using tok;
+ let r =
+ try Hashtbl.find gram.gtokens tok with
+ Not_found -> let r = ref 0 in Hashtbl.add gram.gtokens tok r; r
+ in
+ incr r
+ | Snterm _ | Snterml (_, _) | Snext | Sself | Scut -> ()
+ and tinsert =
+ function
+ Node {node = s; brother = bro; son = son} ->
+ insert s; tinsert bro; tinsert son
+ | LocAct (_, _) | DeadEnd -> ()
+ in
+ List.iter insert symbols
+
+let levels_of_rules entry position rules =
+ let elev =
+ match entry.edesc with
+ Dlevels elev -> elev
+ | Dparser _ ->
+ eprintf "Error: entry not extensible: \"%s\"\n" entry.ename;
+ flush stderr;
+ failwith "Grammar.extend"
+ in
+ if rules = [] then elev
+ else
+ let (levs1, make_lev, levs2) = get_level entry position elev in
+ let (levs, _) =
+ List.fold_left
+ (fun (levs, make_lev) (lname, assoc, level) ->
+ let lev = make_lev lname assoc in
+ let lev =
+ List.fold_left
+ (fun lev (symbols, action) ->
+ let symbols = List.map (change_to_self entry) symbols in
+ List.iter (check_gram entry) symbols;
+ let (e1, symbols) = get_initial entry symbols in
+ insert_tokens entry.egram symbols;
+ insert_level entry.ename e1 symbols action lev)
+ lev level
+ in
+ lev :: levs, empty_lev)
+ ([], make_lev) rules
+ in
+ levs1 @ List.rev levs @ levs2
+
+let logically_eq_symbols entry =
+ let rec eq_symbols s1 s2 =
+ match s1, s2 with
+ Snterm e1, Snterm e2 -> e1.ename = e2.ename
+ | Snterm e1, Sself -> e1.ename = entry.ename
+ | Sself, Snterm e2 -> entry.ename = e2.ename
+ | Snterml (e1, l1), Snterml (e2, l2) -> e1.ename = e2.ename && l1 = l2
+ | Slist0 s1, Slist0 s2 -> eq_symbols s1 s2
+ | Slist0sep (s1, sep1, b1), Slist0sep (s2, sep2, b2) ->
+ eq_symbols s1 s2 && eq_symbols sep1 sep2 && b1 = b2
+ | Slist1 s1, Slist1 s2 -> eq_symbols s1 s2
+ | Slist1sep (s1, sep1, b1), Slist1sep (s2, sep2, b2) ->
+ eq_symbols s1 s2 && eq_symbols sep1 sep2 && b1 = b2
+ | Sopt s1, Sopt s2 -> eq_symbols s1 s2
+ | Stree t1, Stree t2 -> eq_trees t1 t2
+ | _ -> s1 = s2
+ and eq_trees t1 t2 =
+ match t1, t2 with
+ Node n1, Node n2 ->
+ eq_symbols n1.node n2.node && eq_trees n1.son n2.son &&
+ eq_trees n1.brother n2.brother
+ | (LocAct (_, _) | DeadEnd), (LocAct (_, _) | DeadEnd) -> true
+ | _ -> false
+ in
+ eq_symbols
+
+(* [delete_rule_in_tree] returns
+ [Some (dsl, t)] if success
+ [dsl] =
+ Some (list of deleted nodes) if branch deleted
+ None if action replaced by previous version of action
+ [t] = remaining tree
+ [None] if failure *)
+
+let delete_rule_in_tree entry =
+ let rec delete_in_tree symbols tree =
+ match symbols, tree with
+ s :: sl, Node n ->
+ if logically_eq_symbols entry s n.node then delete_son sl n
+ else
+ begin match delete_in_tree symbols n.brother with
+ Some (dsl, t) ->
+ Some (dsl, Node {node = n.node; son = n.son; brother = t})
+ | None -> None
+ end
+ | s :: sl, _ -> None
+ | [], Node n ->
+ begin match delete_in_tree [] n.brother with
+ Some (dsl, t) ->
+ Some (dsl, Node {node = n.node; son = n.son; brother = t})
+ | None -> None
+ end
+ | [], DeadEnd -> None
+ | [], LocAct (_, []) -> Some (Some [], DeadEnd)
+ | [], LocAct (_, action :: list) -> Some (None, LocAct (action, list))
+ and delete_son sl n =
+ match delete_in_tree sl n.son with
+ Some (Some dsl, DeadEnd) -> Some (Some (n.node :: dsl), n.brother)
+ | Some (Some dsl, t) ->
+ let t = Node {node = n.node; son = t; brother = n.brother} in
+ Some (Some (n.node :: dsl), t)
+ | Some (None, t) ->
+ let t = Node {node = n.node; son = t; brother = n.brother} in
+ Some (None, t)
+ | None -> None
+ in
+ delete_in_tree
+
+let rec decr_keyw_use gram =
+ function
+ Stoken tok ->
+ let r = Hashtbl.find gram.gtokens tok in
+ decr r;
+ if !r == 0 then
+ begin
+ Hashtbl.remove gram.gtokens tok;
+ gram.glexer.Plexing.tok_removing tok
+ end
+ | Sfacto s -> decr_keyw_use gram s
+ | Smeta (_, sl, _) -> List.iter (decr_keyw_use gram) sl
+ | Slist0 s -> decr_keyw_use gram s
+ | Slist1 s -> decr_keyw_use gram s
+ | Slist0sep (s1, s2, _) -> decr_keyw_use gram s1; decr_keyw_use gram s2
+ | Slist1sep (s1, s2, _) -> decr_keyw_use gram s1; decr_keyw_use gram s2
+ | Sopt s -> decr_keyw_use gram s
+ | Sflag s -> decr_keyw_use gram s
+ | Stree t -> decr_keyw_use_in_tree gram t
+ | Svala (_, s) -> decr_keyw_use gram s
+ | Sself | Snext | Scut | Snterm _ | Snterml (_, _) -> ()
+and decr_keyw_use_in_tree gram =
+ function
+ DeadEnd | LocAct (_, _) -> ()
+ | Node n ->
+ decr_keyw_use gram n.node;
+ decr_keyw_use_in_tree gram n.son;
+ decr_keyw_use_in_tree gram n.brother
+
+let rec delete_rule_in_suffix entry symbols =
+ function
+ lev :: levs ->
+ begin match delete_rule_in_tree entry symbols lev.lsuffix with
+ Some (dsl, t) ->
+ begin match dsl with
+ Some dsl -> List.iter (decr_keyw_use entry.egram) dsl
+ | None -> ()
+ end;
+ begin match t with
+ DeadEnd when lev.lprefix == DeadEnd -> levs
+ | _ ->
+ let lev =
+ {assoc = lev.assoc; lname = lev.lname; lsuffix = t;
+ lprefix = lev.lprefix}
+ in
+ lev :: levs
+ end
+ | None ->
+ let levs = delete_rule_in_suffix entry symbols levs in lev :: levs
+ end
+ | [] -> raise Not_found
+
+let rec delete_rule_in_prefix entry symbols =
+ function
+ lev :: levs ->
+ begin match delete_rule_in_tree entry symbols lev.lprefix with
+ Some (dsl, t) ->
+ begin match dsl with
+ Some dsl -> List.iter (decr_keyw_use entry.egram) dsl
+ | None -> ()
+ end;
+ begin match t with
+ DeadEnd when lev.lsuffix == DeadEnd -> levs
+ | _ ->
+ let lev =
+ {assoc = lev.assoc; lname = lev.lname; lsuffix = lev.lsuffix;
+ lprefix = t}
+ in
+ lev :: levs
+ end
+ | None ->
+ let levs = delete_rule_in_prefix entry symbols levs in lev :: levs
+ end
+ | [] -> raise Not_found
+
+let delete_rule_in_level_list entry symbols levs =
+ match symbols with
+ Sself :: symbols -> delete_rule_in_suffix entry symbols levs
+ | Snterm e :: symbols when e == entry ->
+ delete_rule_in_suffix entry symbols levs
+ | _ -> delete_rule_in_prefix entry symbols levs
diff --git a/gramlib/gramext.mli b/gramlib/gramext.mli
new file mode 100644
index 0000000000..a76b7da9a2
--- /dev/null
+++ b/gramlib/gramext.mli
@@ -0,0 +1,73 @@
+(* camlp5r *)
+(* gramext.mli,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+type 'a parser_t = 'a Stream.t -> Obj.t
+
+type 'te grammar =
+ { gtokens : (Plexing.pattern, int ref) Hashtbl.t;
+ mutable glexer : 'te Plexing.lexer }
+
+type 'te g_entry =
+ { egram : 'te grammar;
+ ename : string;
+ elocal : bool;
+ mutable estart : int -> 'te parser_t;
+ mutable econtinue : int -> int -> Obj.t -> 'te parser_t;
+ mutable edesc : 'te g_desc }
+and 'te g_desc =
+ Dlevels of 'te g_level list
+ | Dparser of 'te parser_t
+and 'te g_level =
+ { assoc : g_assoc;
+ lname : string option;
+ lsuffix : 'te g_tree;
+ lprefix : 'te g_tree }
+and g_assoc = NonA | RightA | LeftA
+and 'te g_symbol =
+ Sfacto of 'te g_symbol
+ | Smeta of string * 'te g_symbol list * Obj.t
+ | Snterm of 'te g_entry
+ | Snterml of 'te g_entry * string
+ | Slist0 of 'te g_symbol
+ | Slist0sep of 'te g_symbol * 'te g_symbol * bool
+ | Slist1 of 'te g_symbol
+ | Slist1sep of 'te g_symbol * 'te g_symbol * bool
+ | Sopt of 'te g_symbol
+ | Sflag of 'te g_symbol
+ | Sself
+ | Snext
+ | Scut
+ | Stoken of Plexing.pattern
+ | Stree of 'te g_tree
+ | Svala of string list * 'te g_symbol
+and g_action = Obj.t
+and 'te g_tree =
+ Node of 'te g_node
+ | LocAct of g_action * g_action list
+ | DeadEnd
+and 'te g_node =
+ { node : 'te g_symbol; son : 'te g_tree; brother : 'te g_tree }
+and err_fun = unit -> string
+
+type position =
+ First
+ | Last
+ | Before of string
+ | After of string
+ | Like of string
+ | Level of string
+
+val levels_of_rules :
+ 'te g_entry -> position option ->
+ (string option * g_assoc option * ('te g_symbol list * g_action) list)
+ list ->
+ 'te g_level list
+val srules : ('te g_symbol list * g_action) list -> 'te g_symbol
+external action : 'a -> g_action = "%identity"
+val eq_symbol : 'a g_symbol -> 'a g_symbol -> bool
+
+val delete_rule_in_level_list :
+ 'te g_entry -> 'te g_symbol list -> 'te g_level list -> 'te g_level list
+
+val warning_verbose : bool ref
diff --git a/gramlib/grammar.ml b/gramlib/grammar.ml
new file mode 100644
index 0000000000..04ec1049ed
--- /dev/null
+++ b/gramlib/grammar.ml
@@ -0,0 +1,1223 @@
+(* camlp5r *)
+(* grammar.ml,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+open Gramext
+open Format
+
+let rec flatten_tree =
+ function
+ DeadEnd -> []
+ | LocAct (_, _) -> [[]]
+ | Node {node = n; brother = b; son = s} ->
+ List.map (fun l -> n :: l) (flatten_tree s) @ flatten_tree b
+
+let utf8_print = ref true
+
+let utf8_string_escaped s =
+ let b = Buffer.create (String.length s) in
+ let rec loop i =
+ if i = String.length s then Buffer.contents b
+ else
+ begin
+ begin match s.[i] with
+ '"' -> Buffer.add_string b "\\\""
+ | '\\' -> Buffer.add_string b "\\\\"
+ | '\n' -> Buffer.add_string b "\\n"
+ | '\t' -> Buffer.add_string b "\\t"
+ | '\r' -> Buffer.add_string b "\\r"
+ | '\b' -> Buffer.add_string b "\\b"
+ | c -> Buffer.add_char b c
+ end;
+ loop (i + 1)
+ end
+ in
+ loop 0
+
+let string_escaped s =
+ if !utf8_print then utf8_string_escaped s else String.escaped s
+
+let print_str ppf s = fprintf ppf "\"%s\"" (string_escaped s)
+
+let rec print_symbol ppf =
+ function
+ Sfacto s -> print_symbol ppf s
+ | Smeta (n, sl, _) -> print_meta ppf n sl
+ | Slist0 s -> fprintf ppf "LIST0 %a" print_symbol1 s
+ | Slist0sep (s, t, osep) ->
+ fprintf ppf "LIST0 %a SEP %a%s" print_symbol1 s print_symbol1 t
+ (if osep then " OPT_SEP" else "")
+ | Slist1 s -> fprintf ppf "LIST1 %a" print_symbol1 s
+ | Slist1sep (s, t, osep) ->
+ fprintf ppf "LIST1 %a SEP %a%s" print_symbol1 s print_symbol1 t
+ (if osep then " OPT_SEP" else "")
+ | Sopt s -> fprintf ppf "OPT %a" print_symbol1 s
+ | Sflag s -> fprintf ppf "FLAG %a" print_symbol1 s
+ | Stoken (con, prm) when con <> "" && prm <> "" ->
+ fprintf ppf "%s@ %a" con print_str prm
+ | Svala (_, s) -> fprintf ppf "V %a" print_symbol s
+ | Snterml (e, l) ->
+ fprintf ppf "%s%s@ LEVEL@ %a" e.ename (if e.elocal then "*" else "")
+ print_str l
+ | Snterm _ | Snext | Sself | Scut | Stoken _ | Stree _ as s ->
+ print_symbol1 ppf s
+and print_meta ppf n sl =
+ let rec loop i =
+ function
+ [] -> ()
+ | s :: sl ->
+ let j =
+ try String.index_from n i ' ' with Not_found -> String.length n
+ in
+ fprintf ppf "%s %a" (String.sub n i (j - i)) print_symbol1 s;
+ if sl = [] then ()
+ else
+ begin fprintf ppf " "; loop (min (j + 1) (String.length n)) sl end
+ in
+ loop 0 sl
+and print_symbol1 ppf =
+ function
+ Sfacto s -> print_symbol1 ppf s
+ | Snterm e -> fprintf ppf "%s%s" e.ename (if e.elocal then "*" else "")
+ | Sself -> pp_print_string ppf "SELF"
+ | Snext -> pp_print_string ppf "NEXT"
+ | Scut -> pp_print_string ppf "/"
+ | Stoken ("", s) -> print_str ppf s
+ | Stoken (con, "") -> pp_print_string ppf con
+ | Stree t -> print_level ppf pp_print_space (flatten_tree t)
+ | Smeta (_, _, _) | Snterml (_, _) | Slist0 _ | Slist0sep (_, _, _) |
+ Slist1 _ | Slist1sep (_, _, _) | Sopt _ | Sflag _ | Stoken _ |
+ Svala (_, _) as s ->
+ fprintf ppf "(%a)" print_symbol s
+and print_rule ppf symbols =
+ fprintf ppf "@[<hov 0>";
+ let _ =
+ List.fold_left
+ (fun sep symbol ->
+ fprintf ppf "%t%a" sep print_symbol symbol;
+ fun ppf -> fprintf ppf ";@ ")
+ (fun ppf -> ()) symbols
+ in
+ fprintf ppf "@]"
+and print_level ppf pp_print_space rules =
+ fprintf ppf "@[<hov 0>[ ";
+ let _ =
+ List.fold_left
+ (fun sep rule ->
+ fprintf ppf "%t%a" sep print_rule rule;
+ fun ppf -> fprintf ppf "%a| " pp_print_space ())
+ (fun ppf -> ()) rules
+ in
+ fprintf ppf " ]@]"
+
+let print_levels ppf elev =
+ let _ =
+ List.fold_left
+ (fun sep lev ->
+ let rules =
+ List.map (fun t -> Sself :: t) (flatten_tree lev.lsuffix) @
+ flatten_tree lev.lprefix
+ in
+ fprintf ppf "%t@[<hov 2>" sep;
+ begin match lev.lname with
+ Some n -> fprintf ppf "%a@;<1 2>" print_str n
+ | None -> ()
+ end;
+ begin match lev.assoc with
+ LeftA -> fprintf ppf "LEFTA"
+ | RightA -> fprintf ppf "RIGHTA"
+ | NonA -> fprintf ppf "NONA"
+ end;
+ fprintf ppf "@]@;<1 2>";
+ print_level ppf pp_force_newline rules;
+ fun ppf -> fprintf ppf "@,| ")
+ (fun ppf -> ()) elev
+ in
+ ()
+
+let print_entry ppf e =
+ fprintf ppf "@[<v 0>[ ";
+ begin match e.edesc with
+ Dlevels elev -> print_levels ppf elev
+ | Dparser _ -> fprintf ppf "<parser>"
+ end;
+ fprintf ppf " ]@]"
+
+let floc = ref (fun _ -> failwith "internal error when computing location")
+
+let loc_of_token_interval bp ep =
+ if bp == ep then
+ if bp == 0 then Ploc.dummy else Ploc.after (!floc (bp - 1)) 0 1
+ else
+ let loc1 = !floc bp in let loc2 = !floc (pred ep) in Ploc.encl loc1 loc2
+
+let name_of_symbol entry =
+ function
+ Snterm e -> "[" ^ e.ename ^ "]"
+ | Snterml (e, l) -> "[" ^ e.ename ^ " level " ^ l ^ "]"
+ | Sself | Snext -> "[" ^ entry.ename ^ "]"
+ | Stoken tok -> entry.egram.glexer.Plexing.tok_text tok
+ | _ -> "???"
+
+let rec get_token_list entry rev_tokl last_tok tree =
+ match tree with
+ Node {node = Stoken tok; son = son; brother = DeadEnd} ->
+ get_token_list entry (last_tok :: rev_tokl) (tok, None) son
+ | Node {node = Svala (ls, Stoken tok); son = son; brother = DeadEnd} ->
+ get_token_list entry (last_tok :: rev_tokl) (tok, Some ls) son
+ | _ -> if rev_tokl = [] then None else Some (rev_tokl, last_tok, tree)
+
+let rec name_of_symbol_failed entry =
+ function
+ Sfacto s -> name_of_symbol_failed entry s
+ | Slist0 s -> name_of_symbol_failed entry s
+ | Slist0sep (s, _, _) -> name_of_symbol_failed entry s
+ | Slist1 s -> name_of_symbol_failed entry s
+ | Slist1sep (s, _, _) -> name_of_symbol_failed entry s
+ | Sopt s -> name_of_symbol_failed entry s
+ | Sflag s -> name_of_symbol_failed entry s
+ | Stree t -> name_of_tree_failed entry t
+ | Svala (_, s) -> name_of_symbol_failed entry s
+ | Smeta (_, s :: _, _) -> name_of_symbol_failed entry s
+ | s -> name_of_symbol entry s
+and name_of_tree_failed entry =
+ function
+ Node {node = s; brother = bro; son = son} ->
+ let tokl =
+ match s with
+ Stoken tok -> get_token_list entry [] (tok, None) son
+ | Svala (ls, Stoken tok) -> get_token_list entry [] (tok, Some ls) son
+ | _ -> None
+ in
+ begin match tokl with
+ None ->
+ let txt = name_of_symbol_failed entry s in
+ let txt =
+ match s, son with
+ Sopt _, Node _ -> txt ^ " or " ^ name_of_tree_failed entry son
+ | _ -> txt
+ in
+ let txt =
+ match bro with
+ DeadEnd | LocAct (_, _) -> txt
+ | Node _ -> txt ^ " or " ^ name_of_tree_failed entry bro
+ in
+ txt
+ | Some (rev_tokl, last_tok, son) ->
+ List.fold_left
+ (fun s (tok, _) ->
+ (if s = "" then "" else s ^ " ") ^
+ entry.egram.glexer.Plexing.tok_text tok)
+ "" (List.rev (last_tok :: rev_tokl))
+ end
+ | DeadEnd | LocAct (_, _) -> "???"
+
+let search_tree_in_entry prev_symb tree =
+ function
+ Dlevels levels ->
+ let rec search_levels =
+ function
+ [] -> tree
+ | level :: levels ->
+ match search_level level with
+ Some tree -> tree
+ | None -> search_levels levels
+ and search_level level =
+ match search_tree level.lsuffix with
+ Some t -> Some (Node {node = Sself; son = t; brother = DeadEnd})
+ | None -> search_tree level.lprefix
+ and search_tree t =
+ if tree <> DeadEnd && t == tree then Some t
+ else
+ match t with
+ Node n ->
+ begin match search_symbol n.node with
+ Some symb ->
+ Some (Node {node = symb; son = n.son; brother = DeadEnd})
+ | None ->
+ match search_tree n.son with
+ Some t ->
+ Some (Node {node = n.node; son = t; brother = DeadEnd})
+ | None -> search_tree n.brother
+ end
+ | LocAct (_, _) | DeadEnd -> None
+ and search_symbol symb =
+ match symb with
+ Snterm _ | Snterml (_, _) | Slist0 _ | Slist0sep (_, _, _) |
+ Slist1 _ | Slist1sep (_, _, _) | Sopt _ | Stoken _ | Stree _
+ when symb == prev_symb ->
+ Some symb
+ | Slist0 symb ->
+ begin match search_symbol symb with
+ Some symb -> Some (Slist0 symb)
+ | None -> None
+ end
+ | Slist0sep (symb, sep, b) ->
+ begin match search_symbol symb with
+ Some symb -> Some (Slist0sep (symb, sep, b))
+ | None ->
+ match search_symbol sep with
+ Some sep -> Some (Slist0sep (symb, sep, b))
+ | None -> None
+ end
+ | Slist1 symb ->
+ begin match search_symbol symb with
+ Some symb -> Some (Slist1 symb)
+ | None -> None
+ end
+ | Slist1sep (symb, sep, b) ->
+ begin match search_symbol symb with
+ Some symb -> Some (Slist1sep (symb, sep, b))
+ | None ->
+ match search_symbol sep with
+ Some sep -> Some (Slist1sep (symb, sep, b))
+ | None -> None
+ end
+ | Sopt symb ->
+ begin match search_symbol symb with
+ Some symb -> Some (Sopt symb)
+ | None -> None
+ end
+ | Stree t ->
+ begin match search_tree t with
+ Some t -> Some (Stree t)
+ | None -> None
+ end
+ | _ -> None
+ in
+ search_levels levels
+ | Dparser _ -> tree
+
+let error_verbose = ref false
+
+let tree_failed entry prev_symb_result prev_symb tree =
+ let txt = name_of_tree_failed entry tree in
+ let txt =
+ match prev_symb with
+ Slist0 s ->
+ let txt1 = name_of_symbol_failed entry s in
+ txt1 ^ " or " ^ txt ^ " expected"
+ | Slist1 s ->
+ let txt1 = name_of_symbol_failed entry s in
+ txt1 ^ " or " ^ txt ^ " expected"
+ | Slist0sep (s, sep, _) ->
+ begin match Obj.magic prev_symb_result with
+ [] ->
+ let txt1 = name_of_symbol_failed entry s in
+ txt1 ^ " or " ^ txt ^ " expected"
+ | _ ->
+ let txt1 = name_of_symbol_failed entry sep in
+ txt1 ^ " or " ^ txt ^ " expected"
+ end
+ | Slist1sep (s, sep, _) ->
+ begin match Obj.magic prev_symb_result with
+ [] ->
+ let txt1 = name_of_symbol_failed entry s in
+ txt1 ^ " or " ^ txt ^ " expected"
+ | _ ->
+ let txt1 = name_of_symbol_failed entry sep in
+ txt1 ^ " or " ^ txt ^ " expected"
+ end
+ | Sopt _ | Sflag _ | Stree _ | Svala (_, _) -> txt ^ " expected"
+ | _ -> txt ^ " expected after " ^ name_of_symbol_failed entry prev_symb
+ in
+ if !error_verbose then
+ begin let tree = search_tree_in_entry prev_symb tree entry.edesc in
+ let ppf = err_formatter in
+ fprintf ppf "@[<v 0>@,";
+ fprintf ppf "----------------------------------@,";
+ fprintf ppf "Parse error in entry [%s], rule:@;<0 2>" entry.ename;
+ fprintf ppf "@[";
+ print_level ppf pp_force_newline (flatten_tree tree);
+ fprintf ppf "@]@,";
+ fprintf ppf "----------------------------------@,";
+ fprintf ppf "@]@."
+ end;
+ txt ^ " (in [" ^ entry.ename ^ "])"
+
+let symb_failed entry prev_symb_result prev_symb symb =
+ let tree = Node {node = symb; brother = DeadEnd; son = DeadEnd} in
+ tree_failed entry prev_symb_result prev_symb tree
+
+external app : Obj.t -> 'a = "%identity"
+
+let is_level_labelled n lev =
+ match lev.lname with
+ Some n1 -> n = n1
+ | None -> false
+
+let level_number entry lab =
+ let rec lookup levn =
+ function
+ [] -> failwith ("unknown level " ^ lab)
+ | lev :: levs ->
+ if is_level_labelled lab lev then levn else lookup (succ levn) levs
+ in
+ match entry.edesc with
+ Dlevels elev -> lookup 0 elev
+ | Dparser _ -> raise Not_found
+
+let rec top_symb entry =
+ function
+ Sself | Snext -> Snterm entry
+ | Snterml (e, _) -> Snterm e
+ | Slist1sep (s, sep, b) -> Slist1sep (top_symb entry s, sep, b)
+ | _ -> raise Stream.Failure
+
+let entry_of_symb entry =
+ function
+ Sself | Snext -> entry
+ | Snterm e -> e
+ | Snterml (e, _) -> e
+ | _ -> raise Stream.Failure
+
+let top_tree entry =
+ function
+ Node {node = s; brother = bro; son = son} ->
+ Node {node = top_symb entry s; brother = bro; son = son}
+ | LocAct (_, _) | DeadEnd -> raise Stream.Failure
+
+let skip_if_empty bp p strm =
+ if Stream.count strm == bp then Gramext.action (fun a -> p strm)
+ else raise Stream.Failure
+
+let continue entry bp a s son p1 (strm__ : _ Stream.t) =
+ let a = (entry_of_symb entry s).econtinue 0 bp a strm__ in
+ let act =
+ try p1 strm__ with
+ Stream.Failure -> raise (Stream.Error (tree_failed entry a s son))
+ in
+ Gramext.action (fun _ -> app act a)
+
+let do_recover parser_of_tree entry nlevn alevn bp a s son
+ (strm__ : _ Stream.t) =
+ try parser_of_tree entry nlevn alevn (top_tree entry son) strm__ with
+ Stream.Failure ->
+ try
+ skip_if_empty bp (fun (strm__ : _ Stream.t) -> raise Stream.Failure)
+ strm__
+ with Stream.Failure ->
+ continue entry bp a s son (parser_of_tree entry nlevn alevn son)
+ strm__
+
+let strict_parsing = ref false
+
+let recover parser_of_tree entry nlevn alevn bp a s son strm =
+ if !strict_parsing then raise (Stream.Error (tree_failed entry a s son))
+ else do_recover parser_of_tree entry nlevn alevn bp a s son strm
+
+let token_count = ref 0
+
+let peek_nth n strm =
+ let list = Stream.npeek n strm in
+ token_count := Stream.count strm + n;
+ let rec loop list n =
+ match list, n with
+ x :: _, 1 -> Some x
+ | _ :: l, n -> loop l (n - 1)
+ | [], _ -> None
+ in
+ loop list n
+
+let item_skipped = ref false
+
+let call_and_push ps al strm =
+ item_skipped := false;
+ let a = ps strm in
+ let al = if !item_skipped then al else a :: al in item_skipped := false; al
+
+let token_ematch gram (tok, vala) =
+ let tematch = gram.glexer.Plexing.tok_match tok in
+ match vala with
+ Some al ->
+ let pa =
+ match al with
+ [] ->
+ let t = "V " ^ fst tok in gram.glexer.Plexing.tok_match (t, "")
+ | al ->
+ let rec loop =
+ function
+ a :: al ->
+ let pa = gram.glexer.Plexing.tok_match ("V", a) in
+ let pal = loop al in
+ (fun tok -> try pa tok with Stream.Failure -> pal tok)
+ | [] -> fun tok -> raise Stream.Failure
+ in
+ loop al
+ in
+ (fun tok ->
+ try Obj.repr (Ploc.VaAnt (Obj.magic (pa tok : string))) with
+ Stream.Failure -> Obj.repr (Ploc.VaVal (tematch tok)))
+ | None -> fun tok -> Obj.repr (tematch tok : string)
+
+let rec parser_of_tree entry nlevn alevn =
+ function
+ DeadEnd -> (fun (strm__ : _ Stream.t) -> raise Stream.Failure)
+ | LocAct (act, _) -> (fun (strm__ : _ Stream.t) -> act)
+ | Node {node = Sself; son = LocAct (act, _); brother = DeadEnd} ->
+ (fun (strm__ : _ Stream.t) ->
+ let a = entry.estart alevn strm__ in app act a)
+ | Node {node = Scut; son = son; brother = _} ->
+ parser_of_tree entry nlevn alevn son
+ | Node {node = Sself; son = LocAct (act, _); brother = bro} ->
+ let p2 = parser_of_tree entry nlevn alevn bro in
+ (fun (strm__ : _ Stream.t) ->
+ match
+ try Some (entry.estart alevn strm__) with Stream.Failure -> None
+ with
+ Some a -> app act a
+ | _ -> p2 strm__)
+ | Node {node = s; son = son; brother = DeadEnd} ->
+ let tokl =
+ match s with
+ Stoken tok -> get_token_list entry [] (tok, None) son
+ | Svala (ls, Stoken tok) -> get_token_list entry [] (tok, Some ls) son
+ | _ -> None
+ in
+ begin match tokl with
+ None ->
+ let ps = parser_of_symbol entry nlevn s in
+ let p1 = parser_of_tree entry nlevn alevn son in
+ let p1 = parser_cont p1 entry nlevn alevn s son in
+ (fun (strm__ : _ Stream.t) ->
+ let bp = Stream.count strm__ in
+ let a = ps strm__ in
+ let act =
+ try p1 bp a strm__ with
+ Stream.Failure ->
+ raise (Stream.Error (tree_failed entry a s son))
+ in
+ app act a)
+ | Some (rev_tokl, (last_tok, svala), son) ->
+ let lt =
+ let t = Stoken last_tok in
+ match svala with
+ Some l -> Svala (l, t)
+ | None -> t
+ in
+ let p1 = parser_of_tree entry nlevn alevn son in
+ let p1 = parser_cont p1 entry nlevn alevn lt son in
+ parser_of_token_list entry s son p1
+ (fun (strm__ : _ Stream.t) -> raise Stream.Failure) rev_tokl
+ (last_tok, svala)
+ end
+ | Node {node = s; son = son; brother = bro} ->
+ let tokl =
+ match s with
+ Stoken tok -> get_token_list entry [] (tok, None) son
+ | Svala (ls, Stoken tok) -> get_token_list entry [] (tok, Some ls) son
+ | _ -> None
+ in
+ match tokl with
+ None ->
+ let ps = parser_of_symbol entry nlevn s in
+ let p1 = parser_of_tree entry nlevn alevn son in
+ let p1 = parser_cont p1 entry nlevn alevn s son in
+ let p2 = parser_of_tree entry nlevn alevn bro in
+ (fun (strm : _ Stream.t) ->
+ let bp = Stream.count strm in
+ match try Some (ps strm) with Stream.Failure -> None with
+ Some a ->
+ begin match
+ (try Some (p1 bp a strm) with Stream.Failure -> None)
+ with
+ Some act -> app act a
+ | None -> raise (Stream.Error (tree_failed entry a s son))
+ end
+ | None -> p2 strm)
+ | Some (rev_tokl, (last_tok, vala), son) ->
+ let lt =
+ let t = Stoken last_tok in
+ match vala with
+ Some ls -> Svala (ls, t)
+ | None -> t
+ in
+ let p2 = parser_of_tree entry nlevn alevn bro in
+ let p1 = parser_of_tree entry nlevn alevn son in
+ let p1 = parser_cont p1 entry nlevn alevn lt son in
+ let p1 =
+ parser_of_token_list entry lt son p1 p2 rev_tokl (last_tok, vala)
+ in
+ fun (strm__ : _ Stream.t) ->
+ try p1 strm__ with Stream.Failure -> p2 strm__
+and parser_cont p1 entry nlevn alevn s son bp a (strm__ : _ Stream.t) =
+ try p1 strm__ with
+ Stream.Failure ->
+ recover parser_of_tree entry nlevn alevn bp a s son strm__
+and parser_of_token_list entry s son p1 p2 rev_tokl last_tok =
+ let plast =
+ let n = List.length rev_tokl + 1 in
+ let tematch = token_ematch entry.egram last_tok in
+ let ps strm =
+ match peek_nth n strm with
+ Some tok ->
+ let r = tematch tok in
+ for _i = 1 to n do Stream.junk strm done; Obj.repr r
+ | None -> raise Stream.Failure
+ in
+ fun (strm : _ Stream.t) ->
+ let bp = Stream.count strm in
+ let a = ps strm in
+ match try Some (p1 bp a strm) with Stream.Failure -> None with
+ Some act -> app act a
+ | None -> raise (Stream.Error (tree_failed entry a s son))
+ in
+ match List.rev rev_tokl with
+ [] -> (fun (strm__ : _ Stream.t) -> plast strm__)
+ | tok :: tokl ->
+ let tematch = token_ematch entry.egram tok in
+ let ps strm =
+ match peek_nth 1 strm with
+ Some tok -> tematch tok
+ | None -> raise Stream.Failure
+ in
+ let p1 =
+ let rec loop n =
+ function
+ [] -> plast
+ | tok :: tokl ->
+ let tematch = token_ematch entry.egram tok in
+ let ps strm =
+ match peek_nth n strm with
+ Some tok -> tematch tok
+ | None -> raise Stream.Failure
+ in
+ let p1 = loop (n + 1) tokl in
+ fun (strm__ : _ Stream.t) ->
+ let a = ps strm__ in let act = p1 strm__ in app act a
+ in
+ loop 2 tokl
+ in
+ fun (strm__ : _ Stream.t) ->
+ let a = ps strm__ in let act = p1 strm__ in app act a
+and parser_of_symbol entry nlevn =
+ function
+ Sfacto s -> parser_of_symbol entry nlevn s
+ | Smeta (_, symbl, act) ->
+ let act = Obj.magic act entry symbl in
+ Obj.magic
+ (List.fold_left
+ (fun act symb -> Obj.magic act (parser_of_symbol entry nlevn symb))
+ act symbl)
+ | Slist0 s ->
+ let ps = call_and_push (parser_of_symbol entry nlevn s) in
+ let rec loop al (strm__ : _ Stream.t) =
+ match try Some (ps al strm__) with Stream.Failure -> None with
+ Some al -> loop al strm__
+ | _ -> al
+ in
+ (fun (strm__ : _ Stream.t) ->
+ let a = loop [] strm__ in Obj.repr (List.rev a))
+ | Slist0sep (symb, sep, false) ->
+ let ps = call_and_push (parser_of_symbol entry nlevn symb) in
+ let pt = parser_of_symbol entry nlevn sep in
+ let rec kont al (strm__ : _ Stream.t) =
+ match try Some (pt strm__) with Stream.Failure -> None with
+ Some v ->
+ let al =
+ try ps al strm__ with
+ Stream.Failure ->
+ raise (Stream.Error (symb_failed entry v sep symb))
+ in
+ kont al strm__
+ | _ -> al
+ in
+ (fun (strm__ : _ Stream.t) ->
+ match try Some (ps [] strm__) with Stream.Failure -> None with
+ Some al -> let a = kont al strm__ in Obj.repr (List.rev a)
+ | _ -> Obj.repr [])
+ | Slist0sep (symb, sep, true) ->
+ let ps = call_and_push (parser_of_symbol entry nlevn symb) in
+ let pt = parser_of_symbol entry nlevn sep in
+ let rec kont al (strm__ : _ Stream.t) =
+ match try Some (pt strm__) with Stream.Failure -> None with
+ Some v ->
+ begin match
+ (try Some (ps al strm__) with Stream.Failure -> None)
+ with
+ Some al -> kont al strm__
+ | _ -> al
+ end
+ | _ -> al
+ in
+ (fun (strm__ : _ Stream.t) ->
+ match try Some (ps [] strm__) with Stream.Failure -> None with
+ Some al -> let a = kont al strm__ in Obj.repr (List.rev a)
+ | _ -> Obj.repr [])
+ | Slist1 s ->
+ let ps = call_and_push (parser_of_symbol entry nlevn s) in
+ let rec loop al (strm__ : _ Stream.t) =
+ match try Some (ps al strm__) with Stream.Failure -> None with
+ Some al -> loop al strm__
+ | _ -> al
+ in
+ (fun (strm__ : _ Stream.t) ->
+ let al = ps [] strm__ in
+ let a = loop al strm__ in Obj.repr (List.rev a))
+ | Slist1sep (symb, sep, false) ->
+ let ps = call_and_push (parser_of_symbol entry nlevn symb) in
+ let pt = parser_of_symbol entry nlevn sep in
+ let rec kont al (strm__ : _ Stream.t) =
+ match try Some (pt strm__) with Stream.Failure -> None with
+ Some v ->
+ let al =
+ try ps al strm__ with
+ Stream.Failure ->
+ let a =
+ try parse_top_symb entry symb strm__ with
+ Stream.Failure ->
+ raise (Stream.Error (symb_failed entry v sep symb))
+ in
+ a :: al
+ in
+ kont al strm__
+ | _ -> al
+ in
+ (fun (strm__ : _ Stream.t) ->
+ let al = ps [] strm__ in
+ let a = kont al strm__ in Obj.repr (List.rev a))
+ | Slist1sep (symb, sep, true) ->
+ let ps = call_and_push (parser_of_symbol entry nlevn symb) in
+ let pt = parser_of_symbol entry nlevn sep in
+ let rec kont al (strm__ : _ Stream.t) =
+ match try Some (pt strm__) with Stream.Failure -> None with
+ Some v ->
+ begin match
+ (try Some (ps al strm__) with Stream.Failure -> None)
+ with
+ Some al -> kont al strm__
+ | _ ->
+ match
+ try Some (parse_top_symb entry symb strm__) with
+ Stream.Failure -> None
+ with
+ Some a -> kont (a :: al) strm__
+ | _ -> al
+ end
+ | _ -> al
+ in
+ (fun (strm__ : _ Stream.t) ->
+ let al = ps [] strm__ in
+ let a = kont al strm__ in Obj.repr (List.rev a))
+ | Sopt s ->
+ let ps = parser_of_symbol entry nlevn s in
+ (fun (strm__ : _ Stream.t) ->
+ match try Some (ps strm__) with Stream.Failure -> None with
+ Some a -> Obj.repr (Some a)
+ | _ -> Obj.repr None)
+ | Sflag s ->
+ let ps = parser_of_symbol entry nlevn s in
+ (fun (strm__ : _ Stream.t) ->
+ match try Some (ps strm__) with Stream.Failure -> None with
+ Some _ -> Obj.repr true
+ | _ -> Obj.repr false)
+ | Stree t ->
+ let pt = parser_of_tree entry 1 0 t in
+ (fun (strm__ : _ Stream.t) ->
+ let bp = Stream.count strm__ in
+ let a = pt strm__ in
+ let ep = Stream.count strm__ in
+ let loc = loc_of_token_interval bp ep in app a loc)
+ | Svala (al, s) ->
+ let pa =
+ match al with
+ [] ->
+ let t =
+ match s with
+ Sflag _ -> Some "V FLAG"
+ | Sopt _ -> Some "V OPT"
+ | Slist0 _ | Slist0sep (_, _, _) -> Some "V LIST"
+ | Slist1 _ | Slist1sep (_, _, _) -> Some "V LIST"
+ | Stoken (con, "") -> Some ("V " ^ con)
+ | _ -> None
+ in
+ begin match t with
+ Some t -> parser_of_token entry (t, "")
+ | None -> fun (strm__ : _ Stream.t) -> raise Stream.Failure
+ end
+ | al ->
+ let rec loop =
+ function
+ a :: al ->
+ let pa = parser_of_token entry ("V", a) in
+ let pal = loop al in
+ (fun (strm__ : _ Stream.t) ->
+ try pa strm__ with Stream.Failure -> pal strm__)
+ | [] -> fun (strm__ : _ Stream.t) -> raise Stream.Failure
+ in
+ loop al
+ in
+ let ps = parser_of_symbol entry nlevn s in
+ (fun (strm__ : _ Stream.t) ->
+ match try Some (pa strm__) with Stream.Failure -> None with
+ Some a -> Obj.repr (Ploc.VaAnt (Obj.magic a : string))
+ | _ -> let a = ps strm__ in Obj.repr (Ploc.VaVal a))
+ | Snterm e -> (fun (strm__ : _ Stream.t) -> e.estart 0 strm__)
+ | Snterml (e, l) ->
+ (fun (strm__ : _ Stream.t) -> e.estart (level_number e l) strm__)
+ | Sself -> (fun (strm__ : _ Stream.t) -> entry.estart 0 strm__)
+ | Snext -> (fun (strm__ : _ Stream.t) -> entry.estart nlevn strm__)
+ | Scut -> (fun (strm__ : _ Stream.t) -> Obj.repr ())
+ | Stoken tok -> parser_of_token entry tok
+and parser_of_token entry tok =
+ let f = entry.egram.glexer.Plexing.tok_match tok in
+ fun strm ->
+ match Stream.peek strm with
+ Some tok -> let r = f tok in Stream.junk strm; Obj.repr r
+ | None -> raise Stream.Failure
+and parse_top_symb entry symb = parser_of_symbol entry 0 (top_symb entry symb)
+
+let rec start_parser_of_levels entry clevn =
+ function
+ [] -> (fun levn (strm__ : _ Stream.t) -> raise Stream.Failure)
+ | lev :: levs ->
+ let p1 = start_parser_of_levels entry (succ clevn) levs in
+ match lev.lprefix with
+ DeadEnd -> p1
+ | tree ->
+ let alevn =
+ match lev.assoc with
+ LeftA | NonA -> succ clevn
+ | RightA -> clevn
+ in
+ let p2 = parser_of_tree entry (succ clevn) alevn tree in
+ match levs with
+ [] ->
+ (fun levn strm ->
+ (* this code should be there but is commented to preserve
+ compatibility with previous versions... with this code,
+ the grammar entry e: [[ "x"; a = e | "y" ]] should fail
+ because it should be: e: [RIGHTA[ "x"; a = e | "y" ]]...
+ if levn > clevn then match strm with parser []
+ else
+ *)
+ let (strm__ : _ Stream.t) = strm in
+ let bp = Stream.count strm__ in
+ let act = p2 strm__ in
+ let ep = Stream.count strm__ in
+ let a = app act (loc_of_token_interval bp ep) in
+ entry.econtinue levn bp a strm)
+ | _ ->
+ fun levn strm ->
+ if levn > clevn then p1 levn strm
+ else
+ let (strm__ : _ Stream.t) = strm in
+ let bp = Stream.count strm__ in
+ match try Some (p2 strm__) with Stream.Failure -> None with
+ Some act ->
+ let ep = Stream.count strm__ in
+ let a = app act (loc_of_token_interval bp ep) in
+ entry.econtinue levn bp a strm
+ | _ -> p1 levn strm__
+
+let rec continue_parser_of_levels entry clevn =
+ function
+ [] -> (fun levn bp a (strm__ : _ Stream.t) -> raise Stream.Failure)
+ | lev :: levs ->
+ let p1 = continue_parser_of_levels entry (succ clevn) levs in
+ match lev.lsuffix with
+ DeadEnd -> p1
+ | tree ->
+ let alevn =
+ match lev.assoc with
+ LeftA | NonA -> succ clevn
+ | RightA -> clevn
+ in
+ let p2 = parser_of_tree entry (succ clevn) alevn tree in
+ fun levn bp a strm ->
+ if levn > clevn then p1 levn bp a strm
+ else
+ let (strm__ : _ Stream.t) = strm in
+ try p1 levn bp a strm__ with
+ Stream.Failure ->
+ let act = p2 strm__ in
+ let ep = Stream.count strm__ in
+ let a = app act a (loc_of_token_interval bp ep) in
+ entry.econtinue levn bp a strm
+
+let continue_parser_of_entry entry =
+ match entry.edesc with
+ Dlevels elev ->
+ let p = continue_parser_of_levels entry 0 elev in
+ (fun levn bp a (strm__ : _ Stream.t) ->
+ try p levn bp a strm__ with Stream.Failure -> a)
+ | Dparser p -> fun levn bp a (strm__ : _ Stream.t) -> raise Stream.Failure
+
+let empty_entry ename levn strm =
+ raise (Stream.Error ("entry [" ^ ename ^ "] is empty"))
+
+let start_parser_of_entry entry =
+ match entry.edesc with
+ Dlevels [] -> empty_entry entry.ename
+ | Dlevels elev -> start_parser_of_levels entry 0 elev
+ | Dparser p -> fun levn strm -> p strm
+
+(* Extend syntax *)
+
+let init_entry_functions entry =
+ entry.estart <-
+ (fun lev strm ->
+ let f = start_parser_of_entry entry in entry.estart <- f; f lev strm);
+ entry.econtinue <-
+ (fun lev bp a strm ->
+ let f = continue_parser_of_entry entry in
+ entry.econtinue <- f; f lev bp a strm)
+
+let extend_entry entry position rules =
+ try
+ let elev = Gramext.levels_of_rules entry position rules in
+ entry.edesc <- Dlevels elev; init_entry_functions entry
+ with Plexing.Error s ->
+ Printf.eprintf "Lexer initialization error:\n- %s\n" s;
+ flush stderr;
+ failwith "Grammar.extend"
+
+(* Deleting a rule *)
+
+let delete_rule entry sl =
+ match entry.edesc with
+ Dlevels levs ->
+ let levs = Gramext.delete_rule_in_level_list entry sl levs in
+ entry.edesc <- Dlevels levs;
+ entry.estart <-
+ (fun lev strm ->
+ let f = start_parser_of_entry entry in
+ entry.estart <- f; f lev strm);
+ entry.econtinue <-
+ (fun lev bp a strm ->
+ let f = continue_parser_of_entry entry in
+ entry.econtinue <- f; f lev bp a strm)
+ | Dparser _ -> ()
+
+(* Normal interface *)
+
+type token = string * string
+type g = token Gramext.grammar
+
+type ('self, 'a) ty_symbol = token Gramext.g_symbol
+type ('self, 'f, 'r) ty_rule = ('self, Obj.t) ty_symbol list
+type 'a ty_production = ('a, Obj.t, Obj.t) ty_rule * Gramext.g_action
+
+let create_toktab () = Hashtbl.create 301
+let gcreate glexer =
+ {gtokens = create_toktab (); glexer = glexer }
+
+let tokens g con =
+ let list = ref [] in
+ Hashtbl.iter
+ (fun (p_con, p_prm) c -> if p_con = con then list := (p_prm, !c) :: !list)
+ g.gtokens;
+ !list
+
+let glexer g = g.glexer
+
+type 'te gen_parsable =
+ { pa_chr_strm : char Stream.t;
+ pa_tok_strm : 'te Stream.t;
+ pa_loc_func : Plexing.location_function }
+
+type parsable = token gen_parsable
+
+let parsable g cs =
+ let (ts, lf) = g.glexer.Plexing.tok_func cs in
+ {pa_chr_strm = cs; pa_tok_strm = ts; pa_loc_func = lf}
+
+let parse_parsable entry p =
+ let efun = entry.estart 0 in
+ let ts = p.pa_tok_strm in
+ let cs = p.pa_chr_strm in
+ let fun_loc = p.pa_loc_func in
+ let restore =
+ let old_floc = !floc in
+ let old_tc = !token_count in
+ fun () -> floc := old_floc; token_count := old_tc
+ in
+ let get_loc () =
+ try
+ let cnt = Stream.count ts in
+ let loc = fun_loc cnt in
+ if !token_count - 1 <= cnt then loc
+ else Ploc.encl loc (fun_loc (!token_count - 1))
+ with Failure _ -> Ploc.make_unlined (Stream.count cs, Stream.count cs + 1)
+ in
+ floc := fun_loc;
+ token_count := 0;
+ try let r = efun ts in restore (); r with
+ Stream.Failure ->
+ let loc = get_loc () in
+ restore ();
+ Ploc.raise loc (Stream.Error ("illegal begin of " ^ entry.ename))
+ | Stream.Error _ as exc ->
+ let loc = get_loc () in restore (); Ploc.raise loc exc
+ | exc ->
+ let loc = Stream.count cs, Stream.count cs + 1 in
+ restore (); Ploc.raise (Ploc.make_unlined loc) exc
+
+let find_entry e s =
+ let rec find_levels =
+ function
+ [] -> None
+ | lev :: levs ->
+ match find_tree lev.lsuffix with
+ None ->
+ begin match find_tree lev.lprefix with
+ None -> find_levels levs
+ | x -> x
+ end
+ | x -> x
+ and find_symbol =
+ function
+ Sfacto s -> find_symbol s
+ | Snterm e -> if e.ename = s then Some e else None
+ | Snterml (e, _) -> if e.ename = s then Some e else None
+ | Smeta (_, sl, _) -> find_symbol_list sl
+ | Slist0 s -> find_symbol s
+ | Slist0sep (s, _, _) -> find_symbol s
+ | Slist1 s -> find_symbol s
+ | Slist1sep (s, _, _) -> find_symbol s
+ | Sopt s -> find_symbol s
+ | Sflag s -> find_symbol s
+ | Stree t -> find_tree t
+ | Svala (_, s) -> find_symbol s
+ | Sself | Snext | Scut | Stoken _ -> None
+ and find_symbol_list =
+ function
+ s :: sl ->
+ begin match find_symbol s with
+ None -> find_symbol_list sl
+ | x -> x
+ end
+ | [] -> None
+ and find_tree =
+ function
+ Node {node = s; brother = bro; son = son} ->
+ begin match find_symbol s with
+ None ->
+ begin match find_tree bro with
+ None -> find_tree son
+ | x -> x
+ end
+ | x -> x
+ end
+ | LocAct (_, _) | DeadEnd -> None
+ in
+ match e.edesc with
+ Dlevels levs ->
+ begin match find_levels levs with
+ Some e -> e
+ | None -> raise Not_found
+ end
+ | Dparser _ -> raise Not_found
+module Entry =
+ struct
+ type te = token
+ type 'a e = te g_entry
+ let create g n =
+ {egram = g; ename = n; elocal = false; estart = empty_entry n;
+ econtinue = (fun _ _ _ (strm__ : _ Stream.t) -> raise Stream.Failure);
+ edesc = Dlevels []}
+ let parse_parsable (entry : 'a e) p : 'a =
+ Obj.magic (parse_parsable entry p : Obj.t)
+ let parse (entry : 'a e) cs : 'a =
+ let parsable = parsable entry.egram cs in parse_parsable entry parsable
+ let parse_parsable_all (entry : 'a e) p : 'a =
+ begin try Obj.magic [(parse_parsable entry p : Obj.t)] with
+ Stream.Failure | Stream.Error _ -> []
+ end
+ let parse_all (entry : 'a e) cs : 'a =
+ let parsable = parsable entry.egram cs in
+ parse_parsable_all entry parsable
+ let parse_token_stream (entry : 'a e) ts : 'a =
+ Obj.magic (entry.estart 0 ts : Obj.t)
+ let _warned_using_parse_token = ref false
+ let parse_token (entry : 'a e) ts : 'a =
+ (* commented: too often warned in Coq...
+ if not warned_using_parse_token.val then do {
+ eprintf "<W> use of Grammar.Entry.parse_token ";
+ eprintf "deprecated since 2017-06-16\n%!";
+ eprintf "use Grammar.Entry.parse_token_stream instead\n%! ";
+ warned_using_parse_token.val := True
+ }
+ else ();
+ *)
+ parse_token_stream entry ts
+ let name e = e.ename
+ let of_parser g n (p : te Stream.t -> 'a) : 'a e =
+ {egram = g; ename = n; elocal = false;
+ estart = (fun _ -> (Obj.magic p : te Stream.t -> Obj.t));
+ econtinue = (fun _ _ _ (strm__ : _ Stream.t) -> raise Stream.Failure);
+ edesc = Dparser (Obj.magic p : te Stream.t -> Obj.t)}
+ external obj : 'a e -> te Gramext.g_entry = "%identity"
+ let print ppf e = fprintf ppf "%a@." print_entry (obj e)
+ let find e s = find_entry (obj e) s
+ end
+
+(* Unsafe *)
+
+let clear_entry e =
+ e.estart <- (fun _ (strm__ : _ Stream.t) -> raise Stream.Failure);
+ e.econtinue <- (fun _ _ _ (strm__ : _ Stream.t) -> raise Stream.Failure);
+ match e.edesc with
+ Dlevels _ -> e.edesc <- Dlevels []
+ | Dparser _ -> ()
+
+let gram_reinit g glexer = Hashtbl.clear g.gtokens; g.glexer <- glexer
+
+module Unsafe =
+ struct
+ let gram_reinit = gram_reinit
+ let clear_entry = clear_entry
+ end
+
+(* Functorial interface *)
+
+module type GLexerType = sig type te val lexer : te Plexing.lexer end
+
+module type S =
+ sig
+ type te
+ type parsable
+ val parsable : char Stream.t -> parsable
+ val tokens : string -> (string * int) list
+ val glexer : te Plexing.lexer
+ module Entry :
+ sig
+ type 'a e
+ val create : string -> 'a e
+ val parse : 'a e -> parsable -> 'a
+ val name : 'a e -> string
+ val of_parser : string -> (te Stream.t -> 'a) -> 'a e
+ val parse_token_stream : 'a e -> te Stream.t -> 'a
+ val print : Format.formatter -> 'a e -> unit
+ external obj : 'a e -> te Gramext.g_entry = "%identity"
+ val parse_token : 'a e -> te Stream.t -> 'a
+ end
+ type ('self, 'a) ty_symbol
+ type ('self, 'f, 'r) ty_rule
+ type 'a ty_production
+ val s_facto : ('self, 'a) ty_symbol -> ('self, 'a) ty_symbol
+ val s_nterm : 'a Entry.e -> ('self, 'a) ty_symbol
+ val s_nterml : 'a Entry.e -> string -> ('self, 'a) ty_symbol
+ val s_list0 : ('self, 'a) ty_symbol -> ('self, 'a list) ty_symbol
+ val s_list0sep :
+ ('self, 'a) ty_symbol -> ('self, 'b) ty_symbol -> bool ->
+ ('self, 'a list) ty_symbol
+ val s_list1 : ('self, 'a) ty_symbol -> ('self, 'a list) ty_symbol
+ val s_list1sep :
+ ('self, 'a) ty_symbol -> ('self, 'b) ty_symbol -> bool ->
+ ('self, 'a list) ty_symbol
+ val s_opt : ('self, 'a) ty_symbol -> ('self, 'a option) ty_symbol
+ val s_flag : ('self, 'a) ty_symbol -> ('self, bool) ty_symbol
+ val s_self : ('self, 'self) ty_symbol
+ val s_next : ('self, 'self) ty_symbol
+ val s_token : Plexing.pattern -> ('self, string) ty_symbol
+ val s_rules : 'a ty_production list -> ('self, 'a) ty_symbol
+ val s_vala :
+ string list -> ('self, 'a) ty_symbol -> ('self, 'a Ploc.vala) ty_symbol
+ val r_stop : ('self, 'r, 'r) ty_rule
+ val r_next :
+ ('self, 'a, 'r) ty_rule -> ('self, 'b) ty_symbol ->
+ ('self, 'b -> 'a, 'r) ty_rule
+ val r_cut : ('self, 'a, 'r) ty_rule -> ('self, 'a, 'r) ty_rule
+ val production : ('a, 'f, Ploc.t -> 'a) ty_rule * 'f -> 'a ty_production
+ module Unsafe :
+ sig
+ val gram_reinit : te Plexing.lexer -> unit
+ val clear_entry : 'a Entry.e -> unit
+ end
+ val extend :
+ 'a Entry.e -> Gramext.position option ->
+ (string option * Gramext.g_assoc option *
+ (te Gramext.g_symbol list * Gramext.g_action) list)
+ list ->
+ unit
+ val safe_extend :
+ 'a Entry.e -> Gramext.position option ->
+ (string option * Gramext.g_assoc option * 'a ty_production list)
+ list ->
+ unit
+ val delete_rule : 'a Entry.e -> te Gramext.g_symbol list -> unit
+ val safe_delete_rule : 'a Entry.e -> ('a, 'r, 'f) ty_rule -> unit
+ end
+
+module GMake (L : GLexerType) =
+ struct
+ type te = L.te
+ type parsable = te gen_parsable
+ let gram = gcreate L.lexer
+ let parsable cs =
+ let (ts, lf) = L.lexer.Plexing.tok_func cs in
+ {pa_chr_strm = cs; pa_tok_strm = ts; pa_loc_func = lf}
+ let tokens = tokens gram
+ let glexer = glexer gram
+ module Entry =
+ struct
+ type 'a e = te g_entry
+ let create n =
+ {egram = gram; ename = n; elocal = false; estart = empty_entry n;
+ econtinue =
+ (fun _ _ _ (strm__ : _ Stream.t) -> raise Stream.Failure);
+ edesc = Dlevels []}
+ external obj : 'a e -> te Gramext.g_entry = "%identity"
+ let parse (e : 'a e) p : 'a =
+ Obj.magic (parse_parsable e p : Obj.t)
+ let parse_token_stream (e : 'a e) ts : 'a =
+ Obj.magic (e.estart 0 ts : Obj.t)
+ let _warned_using_parse_token = ref false
+ let parse_token (entry : 'a e) ts : 'a =
+ (* commented: too often warned in Coq...
+ if not warned_using_parse_token.val then do {
+ eprintf "<W> use of Entry.parse_token ";
+ eprintf "deprecated since 2017-06-16\n%!";
+ eprintf "use Entry.parse_token_stream instead\n%! ";
+ warned_using_parse_token.val := True
+ }
+ else ();
+ *)
+ parse_token_stream entry ts
+ let name e = e.ename
+ let of_parser n (p : te Stream.t -> 'a) : 'a e =
+ {egram = gram; ename = n; elocal = false;
+ estart = (fun _ -> (Obj.magic p : te Stream.t -> Obj.t));
+ econtinue =
+ (fun _ _ _ (strm__ : _ Stream.t) -> raise Stream.Failure);
+ edesc = Dparser (Obj.magic p : te Stream.t -> Obj.t)}
+ let print ppf e = fprintf ppf "%a@." print_entry (obj e)
+ end
+ type ('self, 'a) ty_symbol = te Gramext.g_symbol
+ type ('self, 'f, 'r) ty_rule = ('self, Obj.t) ty_symbol list
+ type 'a ty_production = ('a, Obj.t, Obj.t) ty_rule * Gramext.g_action
+ let s_facto s = Sfacto s
+ let s_nterm e = Snterm e
+ let s_nterml e l = Snterml (e, l)
+ let s_list0 s = Slist0 s
+ let s_list0sep s sep b = Slist0sep (s, sep, b)
+ let s_list1 s = Slist1 s
+ let s_list1sep s sep b = Slist1sep (s, sep, b)
+ let s_opt s = Sopt s
+ let s_flag s = Sflag s
+ let s_self = Sself
+ let s_next = Snext
+ let s_token tok = Stoken tok
+ let s_rules (t : Obj.t ty_production list) = Gramext.srules (Obj.magic t)
+ let s_vala sl s = Svala (sl, s)
+ let r_stop = []
+ let r_next r s = r @ [s]
+ let r_cut r = r @ [Scut]
+ let production
+ (p : ('a, 'f, Ploc.t -> 'a) ty_rule * 'f) : 'a ty_production =
+ Obj.magic p
+ module Unsafe =
+ struct
+ let gram_reinit = gram_reinit gram
+ let clear_entry = clear_entry
+ end
+ let extend = extend_entry
+ let safe_extend e pos
+ (r :
+ (string option * Gramext.g_assoc option * Obj.t ty_production list)
+ list) =
+ extend e pos (Obj.magic r)
+ let delete_rule e r = delete_rule (Entry.obj e) r
+ let safe_delete_rule = delete_rule
+ end
diff --git a/gramlib/grammar.mli b/gramlib/grammar.mli
new file mode 100644
index 0000000000..54b7eb5539
--- /dev/null
+++ b/gramlib/grammar.mli
@@ -0,0 +1,170 @@
+(* camlp5r *)
+(* grammar.mli,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+(** Extensible grammars.
+
+ This module implements the Camlp5 extensible grammars system.
+ Grammars entries can be extended using the [EXTEND] statement,
+ added by loading the Camlp5 [pa_extend.cmo] file. *)
+
+type g
+ (** The type for grammars, holding entries. *)
+type token = string * string
+
+type parsable
+val parsable : g -> char Stream.t -> parsable
+ (** Type and value allowing to keep the same token stream between
+ several calls of entries of the same grammar, to prevent possible
+ loss of tokens. To be used with [Entry.parse_parsable] below *)
+
+module Entry :
+ sig
+ type 'a e
+ val create : g -> string -> 'a e
+ val parse : 'a e -> char Stream.t -> 'a
+ val parse_all : 'a e -> char Stream.t -> 'a list
+ val parse_parsable : 'a e -> parsable -> 'a
+ val name : 'a e -> string
+ val of_parser : g -> string -> (token Stream.t -> 'a) -> 'a e
+ val parse_token_stream : 'a e -> token Stream.t -> 'a
+ val print : Format.formatter -> 'a e -> unit
+ val find : 'a e -> string -> Obj.t e
+ external obj : 'a e -> token Gramext.g_entry = "%identity"
+ val parse_token : 'a e -> token Stream.t -> 'a
+ end
+ (** Module to handle entries.
+- [Entry.e] is the type for entries returning values of type ['a].
+- [Entry.create g n] creates a new entry named [n] in the grammar [g].
+- [Entry.parse e] returns the stream parser of the entry [e].
+- [Entry.parse_all e] returns the stream parser returning all possible
+ values while parsing with the entry [e]: may return more than one
+ value when the parsing algorithm is [Backtracking]
+- [Entry.parse_all e] returns the parser returning all possible values.
+- [Entry.parse_parsable e] returns the parsable parser of the entry [e].
+- [Entry.name e] returns the name of the entry [e].
+- [Entry.of_parser g n p] makes an entry from a token stream parser.
+- [Entry.parse_token_stream e] returns the token stream parser of the
+ entry [e].
+- [Entry.print e] displays the entry [e] using [Format].
+- [Entry.find e s] finds the entry named [s] in the rules of [e].
+- [Entry.obj e] converts an entry into a [Gramext.g_entry] allowing
+ to see what it holds.
+- [Entry.parse_token]: deprecated since 2017-06-16; old name for
+ [Entry.parse_token_stream] *)
+
+type ('self, 'a) ty_symbol
+(** Type of grammar symbols. A type-safe wrapper around Gramext.symbol. The
+ first type argument is the type of the ambient entry, the second one is the
+ type of the produced value. *)
+
+type ('self, 'f, 'r) ty_rule
+
+type 'a ty_production
+
+(** {6 Clearing grammars and entries} *)
+
+module Unsafe :
+ sig
+ val gram_reinit : g -> token Plexing.lexer -> unit
+ val clear_entry : 'a Entry.e -> unit
+ end
+ (** Module for clearing grammars and entries. To be manipulated with
+ care, because: 1) reinitializing a grammar destroys all tokens
+ and there may have problems with the associated lexer if there
+ are keywords; 2) clearing an entry does not destroy the tokens
+ used only by itself.
+- [Unsafe.reinit_gram g lex] removes the tokens of the grammar
+- and sets [lex] as a new lexer for [g]. Warning: the lexer
+- itself is not reinitialized.
+- [Unsafe.clear_entry e] removes all rules of the entry [e]. *)
+
+(** {6 Functorial interface} *)
+
+ (** Alternative for grammars use. Grammars are no more Ocaml values:
+ there is no type for them. Modules generated preserve the
+ rule "an entry cannot call an entry of another grammar" by
+ normal OCaml typing. *)
+
+module type GLexerType = sig type te val lexer : te Plexing.lexer end
+ (** The input signature for the functor [Grammar.GMake]: [te] is the
+ type of the tokens. *)
+
+module type S =
+ sig
+ type te
+ type parsable
+ val parsable : char Stream.t -> parsable
+ val tokens : string -> (string * int) list
+ val glexer : te Plexing.lexer
+ module Entry :
+ sig
+ type 'a e
+ val create : string -> 'a e
+ val parse : 'a e -> parsable -> 'a
+ val name : 'a e -> string
+ val of_parser : string -> (te Stream.t -> 'a) -> 'a e
+ val parse_token_stream : 'a e -> te Stream.t -> 'a
+ val print : Format.formatter -> 'a e -> unit
+ external obj : 'a e -> te Gramext.g_entry = "%identity"
+ val parse_token : 'a e -> te Stream.t -> 'a
+ end
+ type ('self, 'a) ty_symbol
+ type ('self, 'f, 'r) ty_rule
+ type 'a ty_production
+ val s_facto : ('self, 'a) ty_symbol -> ('self, 'a) ty_symbol
+ val s_nterm : 'a Entry.e -> ('self, 'a) ty_symbol
+ val s_nterml : 'a Entry.e -> string -> ('self, 'a) ty_symbol
+ val s_list0 : ('self, 'a) ty_symbol -> ('self, 'a list) ty_symbol
+ val s_list0sep :
+ ('self, 'a) ty_symbol -> ('self, 'b) ty_symbol -> bool ->
+ ('self, 'a list) ty_symbol
+ val s_list1 : ('self, 'a) ty_symbol -> ('self, 'a list) ty_symbol
+ val s_list1sep :
+ ('self, 'a) ty_symbol -> ('self, 'b) ty_symbol -> bool ->
+ ('self, 'a list) ty_symbol
+ val s_opt : ('self, 'a) ty_symbol -> ('self, 'a option) ty_symbol
+ val s_flag : ('self, 'a) ty_symbol -> ('self, bool) ty_symbol
+ val s_self : ('self, 'self) ty_symbol
+ val s_next : ('self, 'self) ty_symbol
+ val s_token : Plexing.pattern -> ('self, string) ty_symbol
+ val s_rules : 'a ty_production list -> ('self, 'a) ty_symbol
+ val s_vala :
+ string list -> ('self, 'a) ty_symbol -> ('self, 'a Ploc.vala) ty_symbol
+ val r_stop : ('self, 'r, 'r) ty_rule
+ val r_next :
+ ('self, 'a, 'r) ty_rule -> ('self, 'b) ty_symbol ->
+ ('self, 'b -> 'a, 'r) ty_rule
+ val r_cut : ('self, 'a, 'r) ty_rule -> ('self, 'a, 'r) ty_rule
+ val production : ('a, 'f, Ploc.t -> 'a) ty_rule * 'f -> 'a ty_production
+
+ module Unsafe :
+ sig
+ val gram_reinit : te Plexing.lexer -> unit
+ val clear_entry : 'a Entry.e -> unit
+ end
+ val extend :
+ 'a Entry.e -> Gramext.position option ->
+ (string option * Gramext.g_assoc option *
+ (te Gramext.g_symbol list * Gramext.g_action) list)
+ list ->
+ unit
+ val safe_extend :
+ 'a Entry.e -> Gramext.position option ->
+ (string option * Gramext.g_assoc option * 'a ty_production list)
+ list ->
+ unit
+ val delete_rule : 'a Entry.e -> te Gramext.g_symbol list -> unit
+ val safe_delete_rule : 'a Entry.e -> ('a, 'f, 'r) ty_rule -> unit
+ end
+ (** Signature type of the functor [Grammar.GMake]. The types and
+ functions are almost the same than in generic interface, but:
+- Grammars are not values. Functions holding a grammar as parameter
+ do not have this parameter yet.
+- The type [parsable] is used in function [parse] instead of
+ the char stream, avoiding the possible loss of tokens.
+- The type of tokens (expressions and patterns) can be any
+ type (instead of (string * string)); the module parameter
+ must specify a way to show them as (string * string) *)
+
+module GMake (L : GLexerType) : S with type te = L.te
diff --git a/gramlib/plexing.ml b/gramlib/plexing.ml
new file mode 100644
index 0000000000..beebcd016e
--- /dev/null
+++ b/gramlib/plexing.ml
@@ -0,0 +1,217 @@
+(* camlp5r *)
+(* plexing.ml,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+type pattern = string * string
+
+exception Error of string
+
+type location = Ploc.t
+type location_function = int -> location
+type 'te lexer_func = char Stream.t -> 'te Stream.t * location_function
+
+type 'te lexer =
+ { tok_func : 'te lexer_func;
+ tok_using : pattern -> unit;
+ tok_removing : pattern -> unit;
+ mutable tok_match : pattern -> 'te -> string;
+ tok_text : pattern -> string;
+ mutable tok_comm : location list option }
+
+let make_loc = Ploc.make_unlined
+let dummy_loc = Ploc.dummy
+
+let lexer_text (con, prm) =
+ if con = "" then "'" ^ prm ^ "'"
+ else if prm = "" then con
+ else con ^ " '" ^ prm ^ "'"
+
+let locerr () = failwith "Lexer: location function"
+let loct_create () = ref (Array.make 1024 None), ref false
+let loct_func (loct, ov) i =
+ match
+ if i < 0 || i >= Array.length !loct then
+ if !ov then Some dummy_loc else None
+ else Array.unsafe_get !loct i
+ with
+ Some loc -> loc
+ | None -> locerr ()
+let loct_add (loct, ov) i loc =
+ if i >= Array.length !loct then
+ let new_tmax = Array.length !loct * 2 in
+ if new_tmax < Sys.max_array_length then
+ let new_loct = Array.make new_tmax None in
+ Array.blit !loct 0 new_loct 0 (Array.length !loct);
+ loct := new_loct;
+ !loct.(i) <- Some loc
+ else ov := true
+ else !loct.(i) <- Some loc
+
+let make_stream_and_location next_token_loc =
+ let loct = loct_create () in
+ let ts =
+ Stream.from
+ (fun i ->
+ let (tok, loc) = next_token_loc () in loct_add loct i loc; Some tok)
+ in
+ ts, loct_func loct
+
+let lexer_func_of_parser next_token_loc cs =
+ let line_nb = ref 1 in
+ let bolpos = ref 0 in
+ make_stream_and_location (fun () -> next_token_loc (cs, line_nb, bolpos))
+
+let lexer_func_of_ocamllex lexfun cs =
+ let lb =
+ Lexing.from_function
+ (fun s n ->
+ try Bytes.set s 0 (Stream.next cs); 1 with Stream.Failure -> 0)
+ in
+ let next_token_loc _ =
+ let tok = lexfun lb in
+ let loc = make_loc (Lexing.lexeme_start lb, Lexing.lexeme_end lb) in
+ tok, loc
+ in
+ make_stream_and_location next_token_loc
+
+(* Char and string tokens to real chars and string *)
+
+let buff = ref (Bytes.create 80)
+let store len x =
+ if len >= Bytes.length !buff then
+ buff := Bytes.(cat !buff (create (length !buff)));
+ Bytes.set !buff len x;
+ succ len
+let get_buff len = Bytes.sub !buff 0 len
+
+let valch x = Char.code x - Char.code '0'
+let valch_a x = Char.code x - Char.code 'a' + 10
+let valch_A x = Char.code x - Char.code 'A' + 10
+
+let rec backslash s i =
+ if i = String.length s then raise Not_found
+ else
+ match s.[i] with
+ 'n' -> '\n', i + 1
+ | 'r' -> '\r', i + 1
+ | 't' -> '\t', i + 1
+ | 'b' -> '\b', i + 1
+ | '\\' -> '\\', i + 1
+ | '"' -> '"', i + 1
+ | '\'' -> '\'', i + 1
+ | '0'..'9' as c -> backslash1 (valch c) s (i + 1)
+ | 'x' -> backslash1h s (i + 1)
+ | _ -> raise Not_found
+and backslash1 cod s i =
+ if i = String.length s then '\\', i - 1
+ else
+ match s.[i] with
+ '0'..'9' as c -> backslash2 (10 * cod + valch c) s (i + 1)
+ | _ -> '\\', i - 1
+and backslash2 cod s i =
+ if i = String.length s then '\\', i - 2
+ else
+ match s.[i] with
+ '0'..'9' as c -> Char.chr (10 * cod + valch c), i + 1
+ | _ -> '\\', i - 2
+and backslash1h s i =
+ if i = String.length s then '\\', i - 1
+ else
+ match s.[i] with
+ '0'..'9' as c -> backslash2h (valch c) s (i + 1)
+ | 'a'..'f' as c -> backslash2h (valch_a c) s (i + 1)
+ | 'A'..'F' as c -> backslash2h (valch_A c) s (i + 1)
+ | _ -> '\\', i - 1
+and backslash2h cod s i =
+ if i = String.length s then '\\', i - 2
+ else
+ match s.[i] with
+ '0'..'9' as c -> Char.chr (16 * cod + valch c), i + 1
+ | 'a'..'f' as c -> Char.chr (16 * cod + valch_a c), i + 1
+ | 'A'..'F' as c -> Char.chr (16 * cod + valch_A c), i + 1
+ | _ -> '\\', i - 2
+
+let rec skip_indent s i =
+ if i = String.length s then i
+ else
+ match s.[i] with
+ ' ' | '\t' -> skip_indent s (i + 1)
+ | _ -> i
+
+let skip_opt_linefeed s i =
+ if i = String.length s then i else if s.[i] = '\010' then i + 1 else i
+
+let eval_char s =
+ if String.length s = 1 then s.[0]
+ else if String.length s = 0 then failwith "invalid char token"
+ else if s.[0] = '\\' then
+ if String.length s = 2 && s.[1] = '\'' then '\''
+ else
+ try
+ let (c, i) = backslash s 1 in
+ if i = String.length s then c else raise Not_found
+ with Not_found -> failwith "invalid char token"
+ else failwith "invalid char token"
+
+let eval_string loc s =
+ let rec loop len i =
+ if i = String.length s then get_buff len
+ else
+ let (len, i) =
+ if s.[i] = '\\' then
+ let i = i + 1 in
+ if i = String.length s then failwith "invalid string token"
+ else if s.[i] = '"' then store len '"', i + 1
+ else
+ match s.[i] with
+ '\010' -> len, skip_indent s (i + 1)
+ | '\013' -> len, skip_indent s (skip_opt_linefeed s (i + 1))
+ | c ->
+ try let (c, i) = backslash s i in store len c, i with
+ Not_found -> store (store len '\\') c, i + 1
+ else store len s.[i], i + 1
+ in
+ loop len i
+ in
+ Bytes.to_string (loop 0 0)
+
+let default_match =
+ function
+ "ANY", "" -> (fun (con, prm) -> prm)
+ | "ANY", v ->
+ (fun (con, prm) -> if v = prm then v else raise Stream.Failure)
+ | p_con, "" ->
+ (fun (con, prm) -> if con = p_con then prm else raise Stream.Failure)
+ | p_con, p_prm ->
+ fun (con, prm) ->
+ if con = p_con && prm = p_prm then prm else raise Stream.Failure
+
+let input_file = ref ""
+let line_nb = ref (ref 0)
+let bol_pos = ref (ref 0)
+let restore_lexing_info = ref None
+
+(* The lexing buffer used by pa_lexer.cmo *)
+
+let rev_implode l =
+ let s = Bytes.create (List.length l) in
+ let rec loop i =
+ function
+ c :: l -> Bytes.unsafe_set s i c; loop (i - 1) l
+ | [] -> s
+ in
+ Bytes.to_string (loop (Bytes.length s - 1) l)
+
+module Lexbuf :
+ sig
+ type t
+ val empty : t
+ val add : char -> t -> t
+ val get : t -> string
+ end =
+ struct
+ type t = char list
+ let empty = []
+ let add c l = c :: l
+ let get = rev_implode
+ end
diff --git a/gramlib/plexing.mli b/gramlib/plexing.mli
new file mode 100644
index 0000000000..6b5f718bc3
--- /dev/null
+++ b/gramlib/plexing.mli
@@ -0,0 +1,108 @@
+(* camlp5r *)
+(* plexing.mli,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+(** Lexing for Camlp5 grammars.
+
+ This module defines the Camlp5 lexer type to be used in extensible
+ grammars (see module [Grammar]). It also provides some useful functions
+ to create lexers. *)
+
+type pattern = string * string
+ (* Type for values used by the generated code of the EXTEND
+ statement to represent terminals in entry rules.
+- The first string is the constructor name (must start with
+ an uppercase character). When it is empty, the second string
+ is supposed to be a keyword.
+- The second string is the constructor parameter. Empty if it
+ has no parameter (corresponding to the 'wildcard' pattern).
+- The way tokens patterns are interpreted to parse tokens is done
+ by the lexer, function [tok_match] below. *)
+
+exception Error of string
+ (** A lexing error exception to be used by lexers. *)
+
+(** Lexer type *)
+
+type 'te lexer =
+ { tok_func : 'te lexer_func;
+ tok_using : pattern -> unit;
+ tok_removing : pattern -> unit;
+ mutable tok_match : pattern -> 'te -> string;
+ tok_text : pattern -> string;
+ mutable tok_comm : Ploc.t list option }
+and 'te lexer_func = char Stream.t -> 'te Stream.t * location_function
+and location_function = int -> Ploc.t
+ (** The type of a function giving the location of a token in the
+ source from the token number in the stream (starting from zero). *)
+
+val lexer_text : pattern -> string
+ (** A simple [tok_text] function. *)
+
+val default_match : pattern -> string * string -> string
+ (** A simple [tok_match] function, appling to the token type
+ [(string * string)] *)
+
+(** Lexers from parsers or ocamllex
+
+ The functions below create lexer functions either from a [char stream]
+ parser or for an [ocamllex] function. With the returned function [f],
+ it is possible to get a simple lexer (of the type [Plexing.glexer] above):
+ {[
+ { Plexing.tok_func = f;
+ Plexing.tok_using = (fun _ -> ());
+ Plexing.tok_removing = (fun _ -> ());
+ Plexing.tok_match = Plexing.default_match;
+ Plexing.tok_text = Plexing.lexer_text;
+ Plexing.tok_comm = None }
+ ]}
+ Note that a better [tok_using] function should check the used tokens
+ and raise [Plexing.Error] for incorrect ones. The other functions
+ [tok_removing], [tok_match] and [tok_text] may have other implementations
+ as well. *)
+
+val lexer_func_of_parser :
+ (char Stream.t * int ref * int ref -> 'te * Ploc.t) -> 'te lexer_func
+ (** A lexer function from a lexer written as a char stream parser
+ returning the next token and its location. The two references
+ with the char stream contain the current line number and the
+ position of the beginning of the current line. *)
+val lexer_func_of_ocamllex : (Lexing.lexbuf -> 'te) -> 'te lexer_func
+ (** A lexer function from a lexer created by [ocamllex] *)
+
+(** Function to build a stream and a location function *)
+
+val make_stream_and_location :
+ (unit -> 'te * Ploc.t) -> 'te Stream.t * location_function
+ (** General function *)
+
+(** Useful functions and values *)
+
+val eval_char : string -> char
+val eval_string : Ploc.t -> string -> string
+ (** Convert a char or a string token, where the backslashes had not
+ been interpreted into a real char or string; raise [Failure] if
+ bad backslash sequence found; [Plexing.eval_char (Char.escaped c)]
+ would return [c] and [Plexing.eval_string (String.escaped s)] would
+ return [s] *)
+
+val restore_lexing_info : (int * int) option ref
+val input_file : string ref
+val line_nb : int ref ref
+val bol_pos : int ref ref
+ (** Special variables used to reinitialize line numbers and position
+ of beginning of line with their correct current values when a parser
+ is called several times with the same character stream. Necessary
+ for directives (e.g. #load or #use) which interrupt the parsing.
+ Without usage of these variables, locations after the directives
+ can be wrong. *)
+
+(** The lexing buffer used by streams lexers *)
+
+module Lexbuf :
+ sig
+ type t
+ val empty : t
+ val add : char -> t -> t
+ val get : t -> string
+ end
diff --git a/gramlib/ploc.ml b/gramlib/ploc.ml
new file mode 100644
index 0000000000..cb71f72678
--- /dev/null
+++ b/gramlib/ploc.ml
@@ -0,0 +1,176 @@
+(* camlp5r *)
+(* ploc.ml,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+type t =
+ { fname : string;
+ line_nb : int;
+ bol_pos : int;
+ line_nb_last : int;
+ bol_pos_last : int;
+ bp : int;
+ ep : int;
+ comm : string;
+ ecomm : string }
+
+let make_loc fname line_nb bol_pos (bp, ep) comm =
+ {fname = fname; line_nb = line_nb; bol_pos = bol_pos;
+ line_nb_last = line_nb; bol_pos_last = bol_pos; bp = bp; ep = ep;
+ comm = comm; ecomm = ""}
+
+let make_unlined (bp, ep) =
+ {fname = ""; line_nb = 1; bol_pos = 0; line_nb_last = -1; bol_pos_last = 0;
+ bp = bp; ep = ep; comm = ""; ecomm = ""}
+
+let dummy =
+ {fname = ""; line_nb = 1; bol_pos = 0; line_nb_last = -1; bol_pos_last = 0;
+ bp = 0; ep = 0; comm = ""; ecomm = ""}
+
+let file_name loc = loc.fname
+let first_pos loc = loc.bp
+let last_pos loc = loc.ep
+let line_nb loc = loc.line_nb
+let bol_pos loc = loc.bol_pos
+let line_nb_last loc = loc.line_nb_last
+let bol_pos_last loc = loc.bol_pos_last
+let comment loc = loc.comm
+let comment_last loc = loc.ecomm
+
+(* *)
+
+let encl loc1 loc2 =
+ if loc1.bp < loc2.bp then
+ if loc1.ep < loc2.ep then
+ {fname = loc1.fname; line_nb = loc1.line_nb; bol_pos = loc1.bol_pos;
+ line_nb_last = loc2.line_nb_last; bol_pos_last = loc2.bol_pos_last;
+ bp = loc1.bp; ep = loc2.ep; comm = loc1.comm; ecomm = loc2.comm}
+ else loc1
+ else if loc2.ep < loc1.ep then
+ {fname = loc2.fname; line_nb = loc2.line_nb; bol_pos = loc2.bol_pos;
+ line_nb_last = loc1.line_nb_last; bol_pos_last = loc1.bol_pos_last;
+ bp = loc2.bp; ep = loc1.ep; comm = loc2.comm; ecomm = loc1.comm}
+ else loc2
+let shift sh loc = {loc with bp = sh + loc.bp; ep = sh + loc.ep}
+let sub loc sh len = {loc with bp = loc.bp + sh; ep = loc.bp + sh + len}
+let after loc sh len = {loc with bp = loc.ep + sh; ep = loc.ep + sh + len}
+let with_comment loc comm = {loc with comm = comm}
+
+let name = ref "loc"
+
+let from_file fname loc =
+ let (bp, ep) = first_pos loc, last_pos loc in
+ try
+ let ic = open_in_bin fname in
+ let strm = Stream.of_channel ic in
+ let rec loop fname lin =
+ let rec not_a_line_dir col (strm__ : _ Stream.t) =
+ let cnt = Stream.count strm__ in
+ match Stream.peek strm__ with
+ Some c ->
+ Stream.junk strm__;
+ let s = strm__ in
+ if cnt < bp then
+ if c = '\n' then loop fname (lin + 1)
+ else not_a_line_dir (col + 1) s
+ else let col = col - (cnt - bp) in fname, lin, col, col + ep - bp
+ | _ -> fname, lin, col, col + 1
+ in
+ let rec a_line_dir str n col (strm__ : _ Stream.t) =
+ match Stream.peek strm__ with
+ Some '\n' -> Stream.junk strm__; loop str n
+ | Some _ -> Stream.junk strm__; a_line_dir str n (col + 1) strm__
+ | _ -> raise Stream.Failure
+ in
+ let rec spaces col (strm__ : _ Stream.t) =
+ match Stream.peek strm__ with
+ Some ' ' -> Stream.junk strm__; spaces (col + 1) strm__
+ | _ -> col
+ in
+ let rec check_string str n col (strm__ : _ Stream.t) =
+ match Stream.peek strm__ with
+ Some '"' ->
+ Stream.junk strm__;
+ let col =
+ try spaces (col + 1) strm__ with
+ Stream.Failure -> raise (Stream.Error "")
+ in
+ a_line_dir str n col strm__
+ | Some c when c <> '\n' ->
+ Stream.junk strm__;
+ check_string (str ^ String.make 1 c) n (col + 1) strm__
+ | _ -> not_a_line_dir col strm__
+ in
+ let check_quote n col (strm__ : _ Stream.t) =
+ match Stream.peek strm__ with
+ Some '"' -> Stream.junk strm__; check_string "" n (col + 1) strm__
+ | _ -> not_a_line_dir col strm__
+ in
+ let rec check_num n col (strm__ : _ Stream.t) =
+ match Stream.peek strm__ with
+ Some ('0'..'9' as c) ->
+ Stream.junk strm__;
+ check_num (10 * n + Char.code c - Char.code '0') (col + 1) strm__
+ | _ -> let col = spaces col strm__ in check_quote n col strm__
+ in
+ let begin_line (strm__ : _ Stream.t) =
+ match Stream.peek strm__ with
+ Some '#' ->
+ Stream.junk strm__;
+ let col =
+ try spaces 1 strm__ with
+ Stream.Failure -> raise (Stream.Error "")
+ in
+ check_num 0 col strm__
+ | _ -> not_a_line_dir 0 strm__
+ in
+ begin_line strm
+ in
+ let r =
+ try loop fname 1 with
+ Stream.Failure ->
+ let bol = bol_pos loc in fname, line_nb loc, bp - bol, ep - bol
+ in
+ close_in ic; r
+ with Sys_error _ -> fname, 1, bp, ep
+
+let second_line fname ep0 (line, bp) ep =
+ let ic = open_in fname in
+ seek_in ic bp;
+ let rec loop line bol p =
+ if p = ep then
+ begin close_in ic; if bol = bp then line, ep0 else line, ep - bol end
+ else
+ let (line, bol) =
+ match input_char ic with
+ '\n' -> line + 1, p + 1
+ | _ -> line, bol
+ in
+ loop line bol (p + 1)
+ in
+ loop line bp bp
+
+let get loc =
+ if loc.fname = "" || loc.fname = "-" then
+ loc.line_nb, loc.bp - loc.bol_pos, loc.line_nb, loc.ep - loc.bol_pos,
+ loc.ep - loc.bp
+ else
+ let (bl, bc, ec) =
+ loc.line_nb, loc.bp - loc.bol_pos, loc.ep - loc.bol_pos
+ in
+ let (el, eep) = second_line loc.fname ec (bl, loc.bp) loc.ep in
+ bl, bc, el, eep, ec - bc
+
+let call_with r v f a =
+ let saved = !r in
+ try r := v; let b = f a in r := saved; b with e -> r := saved; raise e
+
+exception Exc of t * exn
+
+let raise loc exc =
+ match exc with
+ Exc (_, _) -> raise exc
+ | _ -> raise (Exc (loc, exc))
+
+type 'a vala =
+ VaAnt of string
+ | VaVal of 'a
diff --git a/gramlib/ploc.mli b/gramlib/ploc.mli
new file mode 100644
index 0000000000..d2ab62db06
--- /dev/null
+++ b/gramlib/ploc.mli
@@ -0,0 +1,122 @@
+(* camlp5r *)
+(* ploc.mli,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+(** Locations and some pervasive type and value. *)
+
+type t
+
+(* located exceptions *)
+
+exception Exc of t * exn
+ (** [Ploc.Exc loc e] is an encapsulation of the exception [e] with
+ the input location [loc]. To be used to specify a location
+ for an error. This exception must not be raised by [raise] but
+ rather by [Ploc.raise] (see below), to prevent the risk of several
+ encapsulations of [Ploc.Exc]. *)
+val raise : t -> exn -> 'a
+ (** [Ploc.raise loc e], if [e] is already the exception [Ploc.Exc],
+ re-raise it (ignoring the new location [loc]), else raise the
+ exception [Ploc.Exc loc e]. *)
+
+(* making locations *)
+
+val make_loc : string -> int -> int -> int * int -> string -> t
+ (** [Ploc.make_loc fname line_nb bol_pos (bp, ep) comm] creates a location
+ starting at line number [line_nb], where the position of the beginning
+ of the line is [bol_pos] and between the positions [bp] (included) and
+ [ep] excluded. And [comm] is the comment before the location. The
+ positions are in number of characters since the begin of the stream. *)
+val make_unlined : int * int -> t
+ (** [Ploc.make_unlined] is like [Ploc.make] except that the line number
+ is not provided (to be used e.g. when the line number is unknown. *)
+
+val dummy : t
+ (** [Ploc.dummy] is a dummy location, used in situations when location
+ has no meaning. *)
+
+(* getting location info *)
+
+val file_name : t -> string
+ (** [Ploc.file_name loc] returns the file name of the location. *)
+val first_pos : t -> int
+ (** [Ploc.first_pos loc] returns the position of the begin of the location
+ in number of characters since the beginning of the stream. *)
+val last_pos : t -> int
+ (** [Ploc.last_pos loc] returns the position of the first character not
+ in the location in number of characters since the beginning of the
+ stream. *)
+val line_nb : t -> int
+ (** [Ploc.line_nb loc] returns the line number of the location or [-1] if
+ the location does not contain a line number (i.e. built with
+ [Ploc.make_unlined]. *)
+val bol_pos : t -> int
+ (** [Ploc.bol_pos loc] returns the position of the beginning of the line
+ of the location in number of characters since the beginning of
+ the stream, or [0] if the location does not contain a line number
+ (i.e. built with [Ploc.make_unlined]. *)
+val line_nb_last : t -> int
+val bol_pos_last : t -> int
+ (** Return the line number and the position of the beginning of the line
+ of the last position. *)
+val comment : t -> string
+ (** [Ploc.comment loc] returns the comment before the location. *)
+val comment_last : t -> string
+ (** [Ploc.comment loc] returns the last comment of the location. *)
+
+(* combining locations *)
+
+val encl : t -> t -> t
+ (** [Ploc.encl loc1 loc2] returns the location starting at the
+ smallest start of [loc1] and [loc2] and ending at the greatest end
+ of them. In other words, it is the location enclosing [loc1] and
+ [loc2]. *)
+val shift : int -> t -> t
+ (** [Ploc.shift sh loc] returns the location [loc] shifted with [sh]
+ characters. The line number is not recomputed. *)
+val sub : t -> int -> int -> t
+ (** [Ploc.sub loc sh len] is the location [loc] shifted with [sh]
+ characters and with length [len]. The previous ending position
+ of the location is lost. *)
+val after : t -> int -> int -> t
+ (** [Ploc.after loc sh len] is the location just after loc (starting at
+ the end position of [loc]) shifted with [sh] characters and of length
+ [len]. *)
+val with_comment : t -> string -> t
+ (** Change the comment part of the given location *)
+
+(* miscellaneous *)
+
+val name : string ref
+ (** [Ploc.name.val] is the name of the location variable used in grammars
+ and in the predefined quotations for OCaml syntax trees. Default:
+ ["loc"] *)
+
+val get : t -> int * int * int * int * int
+ (** [Ploc.get loc] returns in order: 1/ the line number of the begin
+ of the location, 2/ its column, 3/ the line number of the first
+ character not in the location, 4/ its column and 5/ the length
+ of the location. The file where the location occurs (if any) may
+ be read during this operation. *)
+
+val from_file : string -> t -> string * int * int * int
+ (** [Ploc.from_file fname loc] reads the file [fname] up to the
+ location [loc] and returns the real input file, the line number
+ and the characters location in the line; the real input file
+ can be different from [fname] because of possibility of line
+ directives typically generated by /lib/cpp. *)
+
+(* pervasives *)
+
+type 'a vala =
+ VaAnt of string
+ | VaVal of 'a
+ (** Encloser of many abstract syntax tree nodes types, in "strict" mode.
+ This allow the system of antiquotations of abstract syntax tree
+ quotations to work when using the quotation kit [q_ast.cmo]. *)
+
+val call_with : 'a ref -> 'a -> ('b -> 'c) -> 'b -> 'c
+ (** [Ploc.call_with r v f a] sets the reference [r] to the value [v],
+ then call [f a], and resets [r] to its initial value. If [f a] raises
+ an exception, its initial value is also reset and the exception is
+ re-raised. The result is the result of [f a]. *)
diff --git a/gramlib/token.ml b/gramlib/token.ml
new file mode 100644
index 0000000000..77c737b880
--- /dev/null
+++ b/gramlib/token.ml
@@ -0,0 +1,37 @@
+(* camlp5r *)
+(* token.ml,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+type pattern = Plexing.pattern
+
+exception Error of string
+
+type location = Ploc.t
+type location_function = int -> location
+type 'te lexer_func = char Stream.t -> 'te Stream.t * location_function
+
+type 'te glexer =
+ 'te Plexing.lexer =
+ { tok_func : 'te lexer_func;
+ tok_using : pattern -> unit;
+ tok_removing : pattern -> unit;
+ mutable tok_match : pattern -> 'te -> string;
+ tok_text : pattern -> string;
+ mutable tok_comm : location list option }
+
+let make_loc = Ploc.make_unlined
+let dummy_loc = Ploc.dummy
+
+let make_stream_and_location = Plexing.make_stream_and_location
+let lexer_func_of_parser = Plexing.lexer_func_of_parser
+let lexer_func_of_ocamllex = Plexing.lexer_func_of_ocamllex
+
+let eval_char = Plexing.eval_char
+let eval_string = Plexing.eval_string
+
+let lexer_text = Plexing.lexer_text
+let default_match = Plexing.default_match
+
+let line_nb = Plexing.line_nb
+let bol_pos = Plexing.bol_pos
+let restore_lexing_info = Plexing.restore_lexing_info
diff --git a/gramlib/token.mli b/gramlib/token.mli
new file mode 100644
index 0000000000..c1de5cefff
--- /dev/null
+++ b/gramlib/token.mli
@@ -0,0 +1,56 @@
+(* camlp5r *)
+(* token.mli,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+(** Module deprecated since Camlp5 version 5.00. Use now module Plexing.
+ Compatibility assumed. *)
+
+type pattern = Plexing.pattern
+
+exception Error of string
+ (** Use now [Plexing.Error] *)
+
+type 'te glexer =
+ 'te Plexing.lexer =
+ { tok_func : 'te Plexing.lexer_func;
+ tok_using : pattern -> unit;
+ tok_removing : pattern -> unit;
+ mutable tok_match : pattern -> 'te -> string;
+ tok_text : pattern -> string;
+ mutable tok_comm : Ploc.t list option }
+
+type 'te lexer_func = char Stream.t -> 'te Stream.t * location_function
+and location_function = int -> Ploc.t
+
+val lexer_text : pattern -> string
+ (** Use now [Plexing.lexer_text] *)
+val default_match : pattern -> string * string -> string
+ (** Use now [Plexing.default_match] *)
+
+val lexer_func_of_parser :
+ (char Stream.t * int ref * int ref -> 'te * Ploc.t) -> 'te lexer_func
+ (** Use now [Plexing.lexer_func_of_parser] *)
+val lexer_func_of_ocamllex : (Lexing.lexbuf -> 'te) -> 'te lexer_func
+ (** Use now [Plexing.lexer_func_of_ocamllex] *)
+
+val make_stream_and_location :
+ (unit -> 'te * Ploc.t) -> 'te Stream.t * location_function
+ (** Use now [Plexing.make_stream_and_location] *)
+
+val eval_char : string -> char
+ (** Use now [Plexing.eval_char] *)
+val eval_string : Ploc.t -> string -> string
+ (** Use now [Plexing.eval_string] *)
+
+val restore_lexing_info : (int * int) option ref
+ (** Use now [Plexing.restore_lexing_info] *)
+val line_nb : int ref ref
+ (** Use now [Plexing.line_nb] *)
+val bol_pos : int ref ref
+ (** Use now [Plexing.bol_pos] *)
+
+(* deprecated since version 4.08 *)
+
+type location = Ploc.t
+val make_loc : int * int -> Ploc.t
+val dummy_loc : Ploc.t
diff --git a/ide/wg_Command.ml b/ide/wg_Command.ml
index 8eddfb3149..06281d6287 100644
--- a/ide/wg_Command.ml
+++ b/ide/wg_Command.ml
@@ -98,7 +98,7 @@ object(self)
~packing:(vbox#pack ~fill:true ~expand:true) () in
let result = Wg_MessageView.message_view () in
router#register_route route_id result;
- r_bin#add (result :> GObj.widget);
+ r_bin#add_with_viewport (result :> GObj.widget);
views <- (frame#coerce, result, combo#entry) :: views;
let cb clr = result#misc#modify_base [`NORMAL, `NAME clr] in
let _ = background_color#connect#changed ~callback:cb in
@@ -152,9 +152,9 @@ object(self)
method show =
frame#show;
let cur_page = notebook#get_nth_page notebook#current_page in
- let _, _, e =
- List.find (fun (p,_,_) -> p#get_oid == cur_page#get_oid) views in
- e#misc#grab_focus ()
+ match List.find (fun (p,_,_) -> p#get_oid == cur_page#get_oid) views with
+ | (_, _, e) -> e#misc#grab_focus ()
+ | exception Not_found -> ()
method hide =
frame#hide
diff --git a/interp/constrextern.ml b/interp/constrextern.ml
index 98e1f6dd36..601099c6ff 100644
--- a/interp/constrextern.ml
+++ b/interp/constrextern.ml
@@ -26,7 +26,6 @@ open Notation_ops
open Glob_term
open Glob_ops
open Pattern
-open Nametab
open Notation
open Detyping
open Decl_kinds
@@ -213,7 +212,7 @@ let is_record indsp =
with Not_found -> false
let encode_record r =
- let indsp = global_inductive r in
+ let indsp = Nametab.global_inductive r in
if not (is_record indsp) then
user_err ?loc:r.CAst.loc ~hdr:"encode_record"
(str "This type is not a structure type.");
@@ -279,7 +278,7 @@ let extern_evar n l = CEvar (n,l)
may be inaccurate *)
let default_extern_reference ?loc vars r =
- shortest_qualid_of_global ?loc vars r
+ Nametab.shortest_qualid_of_global ?loc vars r
let my_extern_reference = ref default_extern_reference
@@ -481,7 +480,7 @@ and apply_notation_to_pattern ?loc gr ((subst,substlist),(nb_to_drop,more_args))
(make_pat_notation ?loc ntn (l,ll) l2') key)
end
| SynDefRule kn ->
- let qid = shortest_qualid_of_syndef ?loc vars kn in
+ let qid = Nametab.shortest_qualid_of_syndef ?loc vars kn in
let l1 =
List.rev_map (fun (c,(subentry,(scopt,scl))) ->
extern_cases_pattern_in_scope (subentry,(scopt,scl@scopes)) vars c)
@@ -1136,7 +1135,7 @@ and extern_notation (custom,scopes as allscopes) vars t = function
List.map (fun (c,(subentry,(scopt,scl))) ->
extern true (subentry,(scopt,scl@snd scopes)) vars c, None)
terms in
- let a = CRef (shortest_qualid_of_syndef ?loc vars kn,None) in
+ let a = CRef (Nametab.shortest_qualid_of_syndef ?loc vars kn,None) in
CAst.make ?loc @@ if List.is_empty l then a else CApp ((None, CAst.make a),l) in
if List.is_empty args then e
else
diff --git a/interp/constrintern.ml b/interp/constrintern.ml
index d7497d4e8e..c03a5fee90 100644
--- a/interp/constrintern.ml
+++ b/interp/constrintern.ml
@@ -28,7 +28,6 @@ open Constrexpr
open Constrexpr_ops
open Notation_term
open Notation_ops
-open Nametab
open Notation
open Inductiveops
open Decl_kinds
@@ -121,6 +120,9 @@ type internalization_error =
| UnboundFixName of bool * Id.t
| NonLinearPattern of Id.t
| BadPatternsNumber of int * int
+ | NotAProjection of qualid
+ | NotAProjectionOf of qualid * qualid
+ | ProjectionsOfDifferentRecords of qualid * qualid
exception InternalizationError of internalization_error Loc.located
@@ -146,6 +148,16 @@ let explain_bad_patterns_number n1 n2 =
str "Expecting " ++ int n1 ++ str (String.plural n1 " pattern") ++
str " but found " ++ int n2
+let explain_field_not_a_projection field_id =
+ pr_qualid field_id ++ str ": Not a projection"
+
+let explain_field_not_a_projection_of field_id inductive_id =
+ pr_qualid field_id ++ str ": Not a projection of inductive " ++ pr_qualid inductive_id
+
+let explain_projections_of_diff_records inductive1_id inductive2_id =
+ str "This record contains fields of both " ++ pr_qualid inductive1_id ++
+ str " and " ++ pr_qualid inductive2_id
+
let explain_internalization_error e =
let pp = match e with
| VariableCapture (id,id') -> explain_variable_capture id id'
@@ -154,6 +166,11 @@ let explain_internalization_error e =
| UnboundFixName (iscofix,id) -> explain_unbound_fix_name iscofix id
| NonLinearPattern id -> explain_non_linear_pattern id
| BadPatternsNumber (n1,n2) -> explain_bad_patterns_number n1 n2
+ | NotAProjection field_id -> explain_field_not_a_projection field_id
+ | NotAProjectionOf (field_id, inductive_id) ->
+ explain_field_not_a_projection_of field_id inductive_id
+ | ProjectionsOfDifferentRecords (inductive1_id, inductive2_id) ->
+ explain_projections_of_diff_records inductive1_id inductive2_id
in pp ++ str "."
let error_bad_inductive_type ?loc =
@@ -633,7 +650,7 @@ let terms_of_binders bl =
| PatVar (Name id) -> CRef (qualid_of_ident id, None)
| PatVar (Anonymous) -> error_cannot_coerce_wildcard_term ?loc ()
| PatCstr (c,l,_) ->
- let qid = qualid_of_path ?loc (path_of_global (ConstructRef c)) in
+ let qid = qualid_of_path ?loc (Nametab.path_of_global (ConstructRef c)) in
let hole = CAst.make ?loc @@ CHole (None,IntroAnonymous,None) in
let params = List.make (Inductiveops.inductive_nparams (fst c)) hole in
CAppExpl ((None,qid,None),params @ List.map term_of_pat l)) pt in
@@ -721,7 +738,7 @@ let instantiate_notation_constr loc intern intern_pat ntnvars subst infos c =
try
let gc = intern nenv c in
Id.Map.add id (gc, Some c) map
- with GlobalizationError _ -> map
+ with Nametab.GlobalizationError _ -> map
in
let mk_env' (c, (onlyident,(tmp_scope,subscopes))) =
let nenv = {env with tmp_scope; scopes = subscopes @ env.scopes} in
@@ -986,7 +1003,7 @@ let intern_extended_global_of_qualid qid =
let intern_reference qid =
let r =
try intern_extended_global_of_qualid qid
- with Not_found -> error_global_not_found qid
+ with Not_found -> Nametab.error_global_not_found qid
in
Smartlocate.global_of_extended_global r
@@ -1058,11 +1075,11 @@ let intern_applied_reference intern env namedctx (_, ntnvars as lvar) us args qi
(* Extra allowance for non globalizing functions *)
if !interning_grammar || env.unb then
(gvar (loc,qualid_basename qid) us, [], [], []), args
- else error_global_not_found qid
+ else Nametab.error_global_not_found qid
else
let r,projapp,args2 =
try intern_qualid qid intern env ntnvars us args
- with Not_found -> error_global_not_found qid
+ with Not_found -> Nametab.error_global_not_found qid
in
let x, imp, scopes, l = find_appl_head_data r in
(x,imp,scopes,l), args2
@@ -1282,6 +1299,10 @@ let check_duplicate loc fields =
user_err ?loc (str "This record defines several times the field " ++
pr_qualid r ++ str ".")
+let inductive_of_record loc record =
+ let inductive = IndRef (inductive_of_constructor record.Recordops.s_CONST) in
+ Nametab.shortest_qualid_of_global ?loc Id.Set.empty inductive
+
(** [sort_fields ~complete loc fields completer] expects a list
[fields] of field assignments [f = e1; g = e2; ...], where [f, g]
are fields of a record and [e1] are "values" (either terms, when
@@ -1304,15 +1325,14 @@ let sort_fields ~complete loc fields completer =
let gr = global_reference_of_reference first_field_ref in
(gr, Recordops.find_projection gr)
with Not_found ->
- user_err ?loc ~hdr:"intern"
- (pr_qualid first_field_ref ++ str": Not a projection")
+ raise (InternalizationError(loc, NotAProjection first_field_ref))
in
(* the number of parameters *)
let nparams = record.Recordops.s_EXPECTEDPARAM in
(* the reference constructor of the record *)
let base_constructor =
let global_record_id = ConstructRef record.Recordops.s_CONST in
- try shortest_qualid_of_global ?loc Id.Set.empty global_record_id
+ try Nametab.shortest_qualid_of_global ?loc Id.Set.empty global_record_id
with Not_found ->
anomaly (str "Environment corruption for records.") in
let () = check_duplicate loc fields in
@@ -1364,12 +1384,18 @@ let sort_fields ~complete loc fields completer =
with Not_found ->
user_err ?loc ~hdr:"intern"
(str "The field \"" ++ pr_qualid field_ref ++ str "\" does not exist.") in
+ let this_field_record = try Recordops.find_projection field_glob_ref
+ with Not_found ->
+ let inductive_ref = inductive_of_record loc record in
+ raise (InternalizationError(loc, NotAProjectionOf (field_ref, inductive_ref)))
+ in
let remaining_projs, (field_index, _) =
let the_proj (idx, glob_id) = GlobRef.equal field_glob_ref (ConstRef glob_id) in
try CList.extract_first the_proj remaining_projs
with Not_found ->
- user_err ?loc
- (str "This record contains fields of different records.")
+ let ind1 = inductive_of_record loc record in
+ let ind2 = inductive_of_record loc this_field_record in
+ raise (InternalizationError(loc, ProjectionsOfDifferentRecords (ind1, ind2)))
in
index_fields fields remaining_projs ((field_index, field_value) :: acc)
| [] ->
@@ -1493,7 +1519,7 @@ let drop_notations_pattern looked_for genv =
in
let rec drop_syndef top scopes qid pats =
try
- match locate_extended qid with
+ match Nametab.locate_extended qid with
| SynDef sp ->
let (vars,a) = Syntax_def.search_syntactic_definition sp in
(match a with
@@ -1550,7 +1576,7 @@ let drop_notations_pattern looked_for genv =
| None -> raise (InternalizationError (loc,NotAConstructor head))
end
| CPatCstr (qid, Some expl_pl, pl) ->
- let g = try locate qid
+ let g = try Nametab.locate qid
with Not_found ->
raise (InternalizationError (loc,NotAConstructor qid)) in
if expl_pl == [] then
diff --git a/interp/declare.ml b/interp/declare.ml
index 07a0066ea8..7a32018c0e 100644
--- a/interp/declare.ml
+++ b/interp/declare.ml
@@ -60,14 +60,7 @@ let open_constant i ((sp,kn), obj) =
if obj.cst_locl then ()
else
let con = Global.constant_of_delta_kn kn in
- Nametab.push (Nametab.Exactly i) sp (ConstRef con);
- match (Global.lookup_constant con).const_body with
- | (Def _ | Undef _) -> ()
- | OpaqueDef lc ->
- match Opaqueproof.get_constraints (Global.opaque_tables ()) lc with
- | Some f when Future.is_val f ->
- Global.push_context_set false (Future.force f)
- | _ -> ()
+ Nametab.push (Nametab.Exactly i) sp (ConstRef con)
let exists_name id =
variable_exists id || Global.exists_objlabel (Label.of_id id)
diff --git a/interp/discharge.ml b/interp/discharge.ml
index 0e44a8b467..21b2e85e8f 100644
--- a/interp/discharge.ml
+++ b/interp/discharge.ml
@@ -8,8 +8,6 @@
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
-open Names
-open CErrors
open Util
open Term
open Constr
@@ -17,17 +15,10 @@ open Vars
open Declarations
open Cooking
open Entries
-open Context.Rel.Declaration
(********************************)
(* Discharging mutual inductive *)
-let detype_param =
- function
- | LocalAssum (Name id, p) -> id, LocalAssumEntry p
- | LocalDef (Name id, p,_) -> id, LocalDefEntry p
- | _ -> anomaly (Pp.str "Unnamed inductive local variable.")
-
(* Replace
Var(y1)..Var(yq):C1..Cq |- Ij:Bj
@@ -57,7 +48,7 @@ let abstract_inductive decls nparamdecls inds =
(* To be sure to be the same as before, should probably be moved to process_inductive *)
let params' = let (_,arity,_,_,_) = List.hd inds' in
let (params,_) = decompose_prod_n_assum nparamdecls' arity in
- List.map detype_param params
+ params
in
let ind'' =
List.map
diff --git a/interp/impargs.ml b/interp/impargs.ml
index ce33cb8731..d8582d856e 100644
--- a/interp/impargs.ml
+++ b/interp/impargs.ml
@@ -450,13 +450,13 @@ let compute_mib_implicits flags kn =
(Array.mapi (* No need to lift, arities contain no de Bruijn *)
(fun i mip ->
(** No need to care about constraints here *)
- let ty, _ = Global.type_of_global_in_context env (IndRef (kn,i)) in
+ let ty, _ = Typeops.type_of_global_in_context env (IndRef (kn,i)) in
Context.Rel.Declaration.LocalAssum (Name mip.mind_typename, ty))
mib.mind_packets) in
let env_ar = Environ.push_rel_context ar env in
let imps_one_inductive i mip =
let ind = (kn,i) in
- let ar, _ = Global.type_of_global_in_context env (IndRef ind) in
+ let ar, _ = Typeops.type_of_global_in_context env (IndRef ind) in
((IndRef ind,compute_semi_auto_implicits env sigma flags (of_constr ar)),
Array.mapi (fun j c ->
(ConstructRef (ind,j+1),compute_semi_auto_implicits env_ar sigma flags c))
@@ -694,7 +694,7 @@ let declare_manual_implicits local ref ?enriching l =
let flags = !implicit_args in
let env = Global.env () in
let sigma = Evd.from_env env in
- let t, _ = Global.type_of_global_in_context env ref in
+ let t, _ = Typeops.type_of_global_in_context env ref in
let t = of_constr t in
let enriching = Option.default flags.auto enriching in
let autoimpls = compute_auto_implicits env sigma flags enriching t in
diff --git a/interp/notation.ml b/interp/notation.ml
index 6104ab16c7..db8ee5bc18 100644
--- a/interp/notation.ml
+++ b/interp/notation.ml
@@ -1314,7 +1314,7 @@ let rebuild_arguments_scope sigma (req,r,n,l,_) =
| ArgsScopeNoDischarge -> assert false
| ArgsScopeAuto ->
let env = Global.env () in (*FIXME?*)
- let typ = EConstr.of_constr @@ fst (Global.type_of_global_in_context env r) in
+ let typ = EConstr.of_constr @@ fst (Typeops.type_of_global_in_context env r) in
let scs,cls = compute_arguments_scope_full sigma typ in
(req,r,List.length scs,scs,cls)
| ArgsScopeManual ->
@@ -1322,7 +1322,7 @@ let rebuild_arguments_scope sigma (req,r,n,l,_) =
for the extra parameters of the section. Discard the classes
of the manually given scopes to avoid further re-computations. *)
let env = Global.env () in (*FIXME?*)
- let typ = EConstr.of_constr @@ fst (Global.type_of_global_in_context env r) in
+ let typ = EConstr.of_constr @@ fst (Typeops.type_of_global_in_context env r) in
let l',cls = compute_arguments_scope_full sigma typ in
let l1 = List.firstn n l' in
let cls1 = List.firstn n cls in
@@ -1369,7 +1369,7 @@ let find_arguments_scope r =
let declare_ref_arguments_scope sigma ref =
let env = Global.env () in (* FIXME? *)
- let typ = EConstr.of_constr @@ fst @@ Global.type_of_global_in_context env ref in
+ let typ = EConstr.of_constr @@ fst @@ Typeops.type_of_global_in_context env ref in
let (scs,cls as o) = compute_arguments_scope_full sigma typ in
declare_arguments_scope_gen ArgsScopeAuto ref (List.length scs) o
diff --git a/interp/notation_ops.ml b/interp/notation_ops.ml
index ab57176643..7a525f84a5 100644
--- a/interp/notation_ops.ml
+++ b/interp/notation_ops.ml
@@ -892,7 +892,9 @@ let bind_term_as_binding_env alp (terms,termlists,binders,binderlists as sigma)
| GVar id' ->
(if not (Id.equal id id') then (fst alp,(id,id')::snd alp) else alp),
sigma
- | _ -> anomaly (str "A term which can be a binder has to be a variable.")
+ | t ->
+ (* The term is a non-variable pattern *)
+ raise No_match
with Not_found ->
(* The matching against a term allowing to find the instance has not been found yet *)
(* If it will be a different name, we shall unfortunately fail *)
diff --git a/interp/syntax_def.ml b/interp/syntax_def.ml
index e3d490a1ad..b73d238c22 100644
--- a/interp/syntax_def.ml
+++ b/interp/syntax_def.ml
@@ -15,7 +15,6 @@ open Names
open Libnames
open Libobject
open Lib
-open Nametab
open Notation_term
(* Syntactic definitions. *)
@@ -38,7 +37,7 @@ let load_syntax_constant i ((sp,kn),(_,pat,onlyparse)) =
let is_alias_of_already_visible_name sp = function
| _,NRef ref ->
- let (dir,id) = repr_qualid (shortest_qualid_of_global Id.Set.empty ref) in
+ let (dir,id) = repr_qualid (Nametab.shortest_qualid_of_global Id.Set.empty ref) in
DirPath.is_empty dir && Id.equal id (basename sp)
| _ ->
false
@@ -83,11 +82,11 @@ let out_pat (ids,ac) = (List.map (fun (id,((_,sc),typ)) -> (id,sc)) ids,ac)
let declare_syntactic_definition local id onlyparse pat =
let _ = add_leaf id (in_syntax_constant (local,in_pat pat,onlyparse)) in ()
-let pr_syndef kn = pr_qualid (shortest_qualid_of_syndef Id.Set.empty kn)
+let pr_syndef kn = pr_qualid (Nametab.shortest_qualid_of_syndef Id.Set.empty kn)
let pr_compat_warning (kn, def, v) =
let pp_def = match def with
- | [], NRef r -> spc () ++ str "is" ++ spc () ++ pr_global_env Id.Set.empty r
+ | [], NRef r -> spc () ++ str "is" ++ spc () ++ Nametab.pr_global_env Id.Set.empty r
| _ -> strbrk " is a compatibility notation"
in
pr_syndef kn ++ pp_def
diff --git a/kernel/cClosure.ml b/kernel/cClosure.ml
index 819a66c190..c558689595 100644
--- a/kernel/cClosure.ml
+++ b/kernel/cClosure.ml
@@ -224,11 +224,6 @@ let unfold_red kn =
* abstractions, storing a representation (of type 'a) of the body of
* this constant or abstraction.
* * i_tab is the cache table of the results
- * * i_repr is the function to get the representation from the current
- * state of the cache and the body of the constant. The result
- * is stored in the table.
- * * i_rels is the array of free rel variables together with their optional
- * body
*
* ref_value_cache searchs in the tab, otherwise uses i_repr to
* compute the result and store it in the table. If the constant can't
@@ -256,74 +251,12 @@ end
module KeyTable = Hashtbl.Make(IdKeyHash)
-let eq_table_key = IdKeyHash.equal
-
-type 'a infos_tab = 'a KeyTable.t
-
-type 'a infos_cache = {
- i_repr : 'a infos -> 'a infos_tab -> constr -> 'a;
- i_env : env;
- i_sigma : existential -> constr option;
- i_rels : (Constr.rel_declaration * lazy_val) Range.t;
- i_share : bool;
-}
-
-and 'a infos = {
- i_flags : reds;
- i_cache : 'a infos_cache }
-
-let info_flags info = info.i_flags
-let info_env info = info.i_cache.i_env
-
open Context.Named.Declaration
let assoc_defined id env = match Environ.lookup_named id env with
| LocalDef (_, c, _) -> c
| _ -> raise Not_found
-let ref_value_cache ({i_cache = cache;_} as infos) tab ref =
- try
- Some (KeyTable.find tab ref)
- with Not_found ->
- try
- let body =
- match ref with
- | RelKey n ->
- let open! Context.Rel.Declaration in
- let i = n - 1 in
- let (d, _) =
- try Range.get cache.i_rels i
- with Invalid_argument _ -> raise Not_found
- in
- begin match d with
- | LocalAssum _ -> raise Not_found
- | LocalDef (_, t, _) -> lift n t
- end
- | VarKey id -> assoc_defined id cache.i_env
- | ConstKey cst -> constant_value_in cache.i_env cst
- in
- let v = cache.i_repr infos tab body in
- KeyTable.add tab ref v;
- Some v
- with
- | Not_found (* List.assoc *)
- | NotEvaluableConst _ (* Const *)
- -> None
-
-let evar_value cache ev =
- cache.i_sigma ev
-
-let create ~repr ~share flgs env evars =
- let cache =
- { i_repr = repr;
- i_env = env;
- i_sigma = evars;
- i_rels = env.env_rel_context.env_rel_map;
- i_share = share;
- }
- in { i_flags = flgs; i_cache = cache }
-
-
(**********************************************************************)
(* Lazy reduction: the one used in kernel operations *)
@@ -391,6 +324,23 @@ let update ~share v1 no t =
v1)
else {norm=no;term=t}
+(** Reduction cache *)
+
+type infos_cache = {
+ i_env : env;
+ i_sigma : existential -> constr option;
+ i_share : bool;
+}
+
+type clos_infos = {
+ i_flags : reds;
+ i_cache : infos_cache }
+
+type clos_tab = fconstr KeyTable.t
+
+let info_flags info = info.i_flags
+let info_env info = info.i_cache.i_env
+
(**********************************************************************)
(* The type of (machine) stacks (= lambda-bar-calculus' contexts) *)
@@ -539,6 +489,8 @@ let mk_clos e t =
| (CoFix _|Lambda _|Fix _|Prod _|Evar _|App _|Case _|Cast _|LetIn _|Proj _) ->
{norm = Red; term = FCLOS(t,e)}
+let inject c = mk_clos (subs_id 0) c
+
(** Hand-unrolling of the map function to bypass the call to the generic array
allocation *)
let mk_clos_vect env v = match v with
@@ -550,6 +502,35 @@ let mk_clos_vect env v = match v with
[|mk_clos env v0; mk_clos env v1; mk_clos env v2; mk_clos env v3|]
| v -> Array.Fun1.map mk_clos env v
+let ref_value_cache ({ i_cache = cache; _ }) tab ref =
+ try
+ Some (KeyTable.find tab ref)
+ with Not_found ->
+ try
+ let body =
+ match ref with
+ | RelKey n ->
+ let open! Context.Rel.Declaration in
+ let i = n - 1 in
+ let (d, _) =
+ try Range.get cache.i_env.env_rel_context.env_rel_map i
+ with Invalid_argument _ -> raise Not_found
+ in
+ begin match d with
+ | LocalAssum _ -> raise Not_found
+ | LocalDef (_, t, _) -> lift n t
+ end
+ | VarKey id -> assoc_defined id cache.i_env
+ | ConstKey cst -> constant_value_in cache.i_env cst
+ in
+ let v = inject body in
+ KeyTable.add tab ref v;
+ Some v
+ with
+ | Not_found (* List.assoc *)
+ | NotEvaluableConst _ (* Const *)
+ -> None
+
(* The inverse of mk_clos: move back to constr *)
let rec to_constr lfts v =
match v.term with
@@ -944,7 +925,7 @@ let rec knr info tab m stk =
| FLetIn (_,v,_,bd,e) when red_set info.i_flags fZETA ->
knit info tab (subs_cons([|v|],e)) bd stk
| FEvar(ev,env) ->
- (match evar_value info.i_cache ev with
+ (match info.i_cache.i_sigma ev with
Some c -> knit info tab env c stk
| None -> (m,stk))
| FLOCKED | FRel _ | FAtom _ | FFlex _ | FInd _ | FApp _ | FProj _
@@ -1040,8 +1021,6 @@ let whd_val info tab v =
let norm_val info tab v =
with_stats (lazy (kl info tab v))
-let inject c = mk_clos (subs_id 0) c
-
let whd_stack infos tab m stk = match m.norm with
| Whnf | Norm ->
(** No need to perform [kni] nor to unlock updates because
@@ -1052,19 +1031,19 @@ let whd_stack infos tab m stk = match m.norm with
let () = if infos.i_cache.i_share then ignore (fapp_stack k) in (* to unlock Zupdates! *)
k
-(* cache of constants: the body is computed only when needed. *)
-type clos_infos = fconstr infos
-
let create_clos_infos ?(evars=fun _ -> None) flgs env =
let share = (Environ.typing_flags env).Declarations.share_reduction in
- create ~share ~repr:(fun _ _ c -> inject c) flgs env evars
+ let cache = {
+ i_env = env;
+ i_sigma = evars;
+ i_share = share;
+ } in
+ { i_flags = flgs; i_cache = cache }
let create_tab () = KeyTable.create 17
let oracle_of_infos infos = Environ.oracle infos.i_cache.i_env
-let env_of_infos infos = infos.i_cache.i_env
-
let infos_with_reds infos reds =
{ infos with i_flags = reds }
diff --git a/kernel/cClosure.mli b/kernel/cClosure.mli
index 2a018d172a..1ee4bccc25 100644
--- a/kernel/cClosure.mli
+++ b/kernel/cClosure.mli
@@ -98,25 +98,7 @@ val unfold_red : evaluable_global_reference -> reds
(***********************************************************************)
type table_key = Constant.t Univ.puniverses tableKey
-type 'a infos_cache
-type 'a infos_tab
-type 'a infos = {
- i_flags : reds;
- i_cache : 'a infos_cache }
-
-val ref_value_cache: 'a infos -> 'a infos_tab -> table_key -> 'a option
-val create:
- repr:('a infos -> 'a infos_tab -> constr -> 'a) ->
- share:bool ->
- reds ->
- env ->
- (existential -> constr option) ->
- 'a infos
-val create_tab : unit -> 'a infos_tab
-val evar_value : 'a infos_cache -> existential -> constr option
-
-val info_env : 'a infos -> env
-val info_flags: 'a infos -> reds
+module KeyTable : Hashtbl.S with type key = table_key
(***********************************************************************
s Lazy reduction. *)
@@ -173,7 +155,6 @@ val stack_tail : int -> stack -> stack
val stack_nth : stack -> int -> fconstr
val zip_term : (fconstr -> constr) -> constr -> stack -> constr
val eta_expand_stack : stack -> stack
-val unfold_projection : 'a infos -> Projection.t -> stack_member option
(** To lazy reduce a constr, create a [clos_infos] with
[create_clos_infos], inject the term to reduce with [inject]; then use
@@ -193,27 +174,32 @@ val destFLambda :
(fconstr subs -> constr -> fconstr) -> fconstr -> Name.t * fconstr * fconstr
(** Global and local constant cache *)
-type clos_infos = fconstr infos
+type clos_infos
+type clos_tab
val create_clos_infos :
?evars:(existential->constr option) -> reds -> env -> clos_infos
val oracle_of_infos : clos_infos -> Conv_oracle.oracle
-val env_of_infos : 'a infos -> env
+val create_tab : unit -> clos_tab
+
+val info_env : clos_infos -> env
+val info_flags: clos_infos -> reds
+val unfold_projection : clos_infos -> Projection.t -> stack_member option
val infos_with_reds : clos_infos -> reds -> clos_infos
(** Reduction function *)
(** [norm_val] is for strong normalization *)
-val norm_val : clos_infos -> fconstr infos_tab -> fconstr -> constr
+val norm_val : clos_infos -> clos_tab -> fconstr -> constr
(** [whd_val] is for weak head normalization *)
-val whd_val : clos_infos -> fconstr infos_tab -> fconstr -> constr
+val whd_val : clos_infos -> clos_tab -> fconstr -> constr
(** [whd_stack] performs weak head normalization in a given stack. It
stops whenever a reduction is blocked. *)
val whd_stack :
- clos_infos -> fconstr infos_tab -> fconstr -> stack -> fconstr * stack
+ clos_infos -> clos_tab -> fconstr -> stack -> fconstr * stack
(** [eta_expand_ind_stack env ind c s t] computes stacks correspoding
to the conversion of the eta expansion of t, considered as an inhabitant
@@ -230,9 +216,7 @@ val eta_expand_ind_stack : env -> inductive -> fconstr -> stack ->
(** Conversion auxiliary functions to do step by step normalisation *)
(** [unfold_reference] unfolds references in a [fconstr] *)
-val unfold_reference : clos_infos -> fconstr infos_tab -> table_key -> fconstr option
-
-val eq_table_key : table_key -> table_key -> bool
+val unfold_reference : clos_infos -> clos_tab -> table_key -> fconstr option
(***********************************************************************
i This is for lazy debug *)
@@ -243,9 +227,9 @@ val lift_fconstr_vect : int -> fconstr array -> fconstr array
val mk_clos : fconstr subs -> constr -> fconstr
val mk_clos_vect : fconstr subs -> constr array -> fconstr array
-val kni: clos_infos -> fconstr infos_tab -> fconstr -> stack -> fconstr * stack
-val knr: clos_infos -> fconstr infos_tab -> fconstr -> stack -> fconstr * stack
-val kl : clos_infos -> fconstr infos_tab -> fconstr -> constr
+val kni: clos_infos -> clos_tab -> fconstr -> stack -> fconstr * stack
+val knr: clos_infos -> clos_tab -> fconstr -> stack -> fconstr * stack
+val kl : clos_infos -> clos_tab -> fconstr -> constr
val to_constr : lift -> fconstr -> constr
diff --git a/kernel/constr.ml b/kernel/constr.ml
index b490aa5092..d7f35da10d 100644
--- a/kernel/constr.ml
+++ b/kernel/constr.ml
@@ -452,6 +452,27 @@ let fold f acc c = match kind c with
| CoFix (_,(_lna,tl,bl)) ->
Array.fold_left2 (fun acc t b -> f (f acc t) b) acc tl bl
+let fold_with_full_binders g f n acc c =
+ let open Context.Rel.Declaration in
+ match kind c with
+ | Rel _ | Meta _ | Var _ | Sort _ | Const _ | Ind _ | Construct _ -> acc
+ | Cast (c,_, t) -> f n (f n acc c) t
+ | Prod (na,t,c) -> f (g (LocalAssum (na,t)) n) (f n acc t) c
+ | Lambda (na,t,c) -> f (g (LocalAssum (na,t)) n) (f n acc t) c
+ | LetIn (na,b,t,c) -> f (g (LocalDef (na,b,t)) n) (f n (f n acc b) t) c
+ | App (c,l) -> Array.fold_left (f n) (f n acc c) l
+ | Proj (_,c) -> f n acc c
+ | Evar (_,l) -> Array.fold_left (f n) acc l
+ | Case (_,p,c,bl) -> Array.fold_left (f n) (f n (f n acc p) c) bl
+ | Fix (_,(lna,tl,bl)) ->
+ let n' = CArray.fold_left2 (fun c n t -> g (LocalAssum (n,t)) c) n lna tl in
+ let fd = Array.map2 (fun t b -> (t,b)) tl bl in
+ Array.fold_left (fun acc (t,b) -> f n' (f n acc t) b) acc fd
+ | CoFix (_,(lna,tl,bl)) ->
+ let n' = CArray.fold_left2 (fun c n t -> g (LocalAssum (n,t)) c) n lna tl in
+ let fd = Array.map2 (fun t b -> (t,b)) tl bl in
+ Array.fold_left (fun acc (t,b) -> f n' (f n acc t) b) acc fd
+
(* [iter f c] iters [f] on the immediate subterms of [c]; it is
not recursive and the order with which subterms are processed is
not specified *)
diff --git a/kernel/constr.mli b/kernel/constr.mli
index c012f04260..8753c20eac 100644
--- a/kernel/constr.mli
+++ b/kernel/constr.mli
@@ -470,6 +470,10 @@ val map_return_predicate_with_full_binders : ((constr, constr) Context.Rel.Decla
val fold : ('a -> constr -> 'a) -> 'a -> constr -> 'a
+val fold_with_full_binders :
+ (rel_declaration -> 'a -> 'a) -> ('a -> 'b -> constr -> 'b) ->
+ 'a -> 'b -> constr -> 'b
+
(** [map f c] maps [f] on the immediate subterms of [c]; it is
not recursive and the order with which subterms are processed is
not specified *)
diff --git a/kernel/conv_oracle.ml b/kernel/conv_oracle.ml
index c74f2ab318..ac78064235 100644
--- a/kernel/conv_oracle.ml
+++ b/kernel/conv_oracle.ml
@@ -83,18 +83,27 @@ let fold_strategy f { var_opacity; cst_opacity; _ } accu =
let get_transp_state { var_trstate; cst_trstate; _ } = (var_trstate, cst_trstate)
+let dep_order l2r k1 k2 = match k1, k2 with
+| RelKey _, RelKey _ -> l2r
+| RelKey _, (VarKey _ | ConstKey _) -> true
+| VarKey _, RelKey _ -> false
+| VarKey _, VarKey _ -> l2r
+| VarKey _, ConstKey _ -> true
+| ConstKey _, (RelKey _ | VarKey _) -> false
+| ConstKey _, ConstKey _ -> l2r
+
(* Unfold the first constant only if it is "more transparent" than the
second one. In case of tie, use the recommended default. *)
let oracle_order f o l2r k1 k2 =
match get_strategy o f k1, get_strategy o f k2 with
- | Expand, Expand -> l2r
+ | Expand, Expand -> dep_order l2r k1 k2
| Expand, (Opaque | Level _) -> true
| (Opaque | Level _), Expand -> false
- | Opaque, Opaque -> l2r
+ | Opaque, Opaque -> dep_order l2r k1 k2
| Level _, Opaque -> true
| Opaque, Level _ -> false
| Level n1, Level n2 ->
- if Int.equal n1 n2 then l2r
+ if Int.equal n1 n2 then dep_order l2r k1 k2
else n1 < n2
let get_strategy o = get_strategy o (fun x -> x)
diff --git a/kernel/entries.ml b/kernel/entries.ml
index 94248ad26b..c5bcd74072 100644
--- a/kernel/entries.ml
+++ b/kernel/entries.ml
@@ -16,14 +16,6 @@ open Constr
constants/axioms, mutual inductive definitions, modules and module
types *)
-
-(** {6 Local entries } *)
-
-type local_entry =
- | LocalDefEntry of constr
- | LocalAssumEntry of constr
-
-
(** {6 Declaration of inductive types. } *)
(** Assume the following definition in concrete syntax:
@@ -54,7 +46,7 @@ type mutual_inductive_entry = {
record in their respective projections. Not used by the kernel.
Some None: non-primitive record *)
mind_entry_finite : Declarations.recursivity_kind;
- mind_entry_params : (Id.t * local_entry) list;
+ mind_entry_params : Constr.rel_context;
mind_entry_inds : one_inductive_entry list;
mind_entry_universes : inductive_universes;
(* universe constraints and the constraints for subtyping of
diff --git a/kernel/environ.mli b/kernel/environ.mli
index 3d653bcfe0..43bfe7c2fb 100644
--- a/kernel/environ.mli
+++ b/kernel/environ.mli
@@ -156,6 +156,9 @@ val fold_named_context :
(env -> Constr.named_declaration -> 'a -> 'a) -> env -> init:'a -> 'a
val set_universes : env -> UGraph.t -> env
+(** This is used to update universes during a proof for the sake of
+ evar map-unaware functions, eg [Typing] calling
+ [Typeops.check_hyps_inclusion]. *)
(** Recurrence on [named_context] starting from younger decl *)
val fold_named_context_reverse :
diff --git a/kernel/indtypes.ml b/kernel/indtypes.ml
index b976469ff7..0346026aa4 100644
--- a/kernel/indtypes.ml
+++ b/kernel/indtypes.ml
@@ -271,7 +271,8 @@ let typecheck_inductive env mie =
| Polymorphic_ind_entry ctx -> push_context ctx env
| Cumulative_ind_entry cumi -> push_context (Univ.CumulativityInfo.univ_context cumi) env
in
- let (env_params,paramsctxt) = infer_local_decls env' mie.mind_entry_params in
+ let env_params = check_context env' mie.mind_entry_params in
+ let paramsctxt = mie.mind_entry_params in
(* We first type arity of each inductive definition *)
(* This allows building the environment of arities and to share *)
(* the set of constraints *)
diff --git a/kernel/names.ml b/kernel/names.ml
index 7cd749de1d..18560d5f8d 100644
--- a/kernel/names.ml
+++ b/kernel/names.ml
@@ -872,6 +872,8 @@ struct
let equal (c, b) (c', b') = Repr.equal c c' && b == b'
+ let repr_equal p p' = Repr.equal (repr p) (repr p')
+
let hash (c, b) = (if b then 0 else 1) + Repr.hash c
module SyntacticOrd = struct
diff --git a/kernel/names.mli b/kernel/names.mli
index 37930c12e2..98995752a2 100644
--- a/kernel/names.mli
+++ b/kernel/names.mli
@@ -608,6 +608,9 @@ module Projection : sig
val hcons : t -> t
(** Hashconsing of projections. *)
+ val repr_equal : t -> t -> bool
+ (** Ignoring the unfolding boolean. *)
+
val compare : t -> t -> int
val map : (MutInd.t -> MutInd.t) -> t -> t
diff --git a/kernel/reduction.ml b/kernel/reduction.ml
index 00576476ab..18697d07e5 100644
--- a/kernel/reduction.ml
+++ b/kernel/reduction.ml
@@ -316,8 +316,8 @@ let compare_stacks f fmind lft1 stk1 lft2 stk2 cuniv =
type conv_tab = {
cnv_inf : clos_infos;
- lft_tab : fconstr infos_tab;
- rgt_tab : fconstr infos_tab;
+ lft_tab : clos_tab;
+ rgt_tab : clos_tab;
}
(** Invariant: for any tl ∈ lft_tab and tr ∈ rgt_tab, there is no mutable memory
location contained both in tl and in tr. *)
@@ -346,7 +346,7 @@ and eqappr cv_pb l2r infos (lft1,st1) (lft2,st2) cuniv =
| (Sort s1, Sort s2) ->
if not (is_empty_stack v1 && is_empty_stack v2) then
anomaly (Pp.str "conversion was given ill-typed terms (Sort).");
- sort_cmp_universes (env_of_infos infos.cnv_inf) cv_pb s1 s2 cuniv
+ sort_cmp_universes (info_env infos.cnv_inf) cv_pb s1 s2 cuniv
| (Meta n, Meta m) ->
if Int.equal n m
then convert_stacks l2r infos lft1 lft2 v1 v2 cuniv
diff --git a/kernel/safe_typing.ml b/kernel/safe_typing.ml
index 625b7e5073..12f9592ab7 100644
--- a/kernel/safe_typing.ml
+++ b/kernel/safe_typing.ml
@@ -216,15 +216,55 @@ let get_opaque_body env cbo =
(Opaqueproof.force_proof (Environ.opaque_tables env) opaque,
Opaqueproof.force_constraints (Environ.opaque_tables env) opaque)
-type private_constants = Term_typing.side_effects
+type side_effect = {
+ from_env : Declarations.structure_body CEphemeron.key;
+ eff : Entries.side_eff list;
+}
-let empty_private_constants = Term_typing.empty_seff
-let add_private = Term_typing.add_seff
-let concat_private = Term_typing.concat_seff
-let mk_pure_proof = Term_typing.mk_pure_proof
-let inline_private_constants_in_constr = Term_typing.inline_side_effects
-let inline_private_constants_in_definition_entry = Term_typing.inline_entry_side_effects
-let side_effects_of_private_constants = Term_typing.uniq_seff
+module SideEffects :
+sig
+ type t
+ val repr : t -> side_effect list
+ val empty : t
+ val add : side_effect -> t -> t
+ val concat : t -> t -> t
+end =
+struct
+
+module SeffOrd = struct
+open Entries
+type t = side_effect
+let compare e1 e2 =
+ let cmp e1 e2 = Constant.CanOrd.compare e1.seff_constant e2.seff_constant in
+ List.compare cmp e1.eff e2.eff
+end
+
+module SeffSet = Set.Make(SeffOrd)
+
+type t = { seff : side_effect list; elts : SeffSet.t }
+(** Invariant: [seff] is a permutation of the elements of [elts] *)
+
+let repr eff = eff.seff
+let empty = { seff = []; elts = SeffSet.empty }
+let add x es =
+ if SeffSet.mem x es.elts then es
+ else { seff = x :: es.seff; elts = SeffSet.add x es.elts }
+let concat xes yes =
+ List.fold_right add xes.seff yes
+
+end
+
+type private_constants = SideEffects.t
+
+let side_effects_of_private_constants l =
+ let ans = List.rev (SideEffects.repr l) in
+ List.map_append (fun { eff; _ } -> eff) ans
+
+let empty_private_constants = SideEffects.empty
+let add_private mb eff effs =
+ let from_env = CEphemeron.create mb in
+ SideEffects.add { eff; from_env } effs
+let concat_private = SideEffects.concat
let make_eff env cst r =
let open Entries in
@@ -257,7 +297,7 @@ let universes_of_private eff =
| Monomorphic_const ctx -> ctx :: acc
| Polymorphic_const _ -> acc
in
- List.fold_left fold [] (Term_typing.uniq_seff eff)
+ List.fold_left fold [] (side_effects_of_private_constants eff)
let env_of_safe_env senv = senv.env
let env_of_senv = env_of_safe_env
@@ -507,8 +547,218 @@ let add_constant_aux ~in_section senv (kn, cb) =
in
senv''
+let mk_pure_proof c = (c, Univ.ContextSet.empty), SideEffects.empty
+
+let inline_side_effects env body side_eff =
+ let open Entries in
+ let open Constr in
+ (** First step: remove the constants that are still in the environment *)
+ let filter { eff = se; from_env = mb } =
+ let map e = (e.seff_constant, e.seff_body, e.seff_env) in
+ let cbl = List.map map se in
+ let not_exists (c,_,_) =
+ try ignore(Environ.lookup_constant c env); false
+ with Not_found -> true in
+ let cbl = List.filter not_exists cbl in
+ (cbl, mb)
+ in
+ (* CAVEAT: we assure that most recent effects come first *)
+ let side_eff = List.map filter (SideEffects.repr side_eff) in
+ let sigs = List.rev_map (fun (cbl, mb) -> mb, List.length cbl) side_eff in
+ let side_eff = List.fold_left (fun accu (cbl, _) -> cbl @ accu) [] side_eff in
+ let side_eff = List.rev side_eff in
+ (** Most recent side-effects first in side_eff *)
+ if List.is_empty side_eff then (body, Univ.ContextSet.empty, sigs)
+ else
+ (** Second step: compute the lifts and substitutions to apply *)
+ let cname c = Name (Label.to_id (Constant.label c)) in
+ let fold (subst, var, ctx, args) (c, cb, b) =
+ let (b, opaque) = match cb.const_body, b with
+ | Def b, _ -> (Mod_subst.force_constr b, false)
+ | OpaqueDef _, `Opaque (b,_) -> (b, true)
+ | _ -> assert false
+ in
+ match cb.const_universes with
+ | Monomorphic_const univs ->
+ (** Abstract over the term at the top of the proof *)
+ let ty = cb.const_type in
+ let subst = Cmap_env.add c (Inr var) subst in
+ let ctx = Univ.ContextSet.union ctx univs in
+ (subst, var + 1, ctx, (cname c, b, ty, opaque) :: args)
+ | Polymorphic_const _auctx ->
+ (** Inline the term to emulate universe polymorphism *)
+ let subst = Cmap_env.add c (Inl b) subst in
+ (subst, var, ctx, args)
+ in
+ let (subst, len, ctx, args) = List.fold_left fold (Cmap_env.empty, 1, Univ.ContextSet.empty, []) side_eff in
+ (** Third step: inline the definitions *)
+ let rec subst_const i k t = match Constr.kind t with
+ | Const (c, u) ->
+ let data = try Some (Cmap_env.find c subst) with Not_found -> None in
+ begin match data with
+ | None -> t
+ | Some (Inl b) ->
+ (** [b] is closed but may refer to other constants *)
+ subst_const i k (Vars.subst_instance_constr u b)
+ | Some (Inr n) ->
+ mkRel (k + n - i)
+ end
+ | Rel n ->
+ (** Lift free rel variables *)
+ if n <= k then t
+ else mkRel (n + len - i - 1)
+ | _ -> Constr.map_with_binders ((+) 1) (fun k t -> subst_const i k t) k t
+ in
+ let map_args i (na, b, ty, opaque) =
+ (** Both the type and the body may mention other constants *)
+ let ty = subst_const (len - i - 1) 0 ty in
+ let b = subst_const (len - i - 1) 0 b in
+ (na, b, ty, opaque)
+ in
+ let args = List.mapi map_args args in
+ let body = subst_const 0 0 body in
+ let fold_arg (na, b, ty, opaque) accu =
+ if opaque then mkApp (mkLambda (na, ty, accu), [|b|])
+ else mkLetIn (na, b, ty, accu)
+ in
+ let body = List.fold_right fold_arg args body in
+ (body, ctx, sigs)
+
+let inline_private_constants_in_definition_entry env ce =
+ let open Entries in
+ { ce with
+ const_entry_body = Future.chain
+ ce.const_entry_body (fun ((body, ctx), side_eff) ->
+ let body, ctx',_ = inline_side_effects env body side_eff in
+ let ctx' = Univ.ContextSet.union ctx ctx' in
+ (body, ctx'), ());
+ }
+
+let inline_private_constants_in_constr env body side_eff =
+ pi1 (inline_side_effects env body side_eff)
+
+let rec is_nth_suffix n l suf =
+ if Int.equal n 0 then l == suf
+ else match l with
+ | [] -> false
+ | _ :: l -> is_nth_suffix (pred n) l suf
+
+(* Given the list of signatures of side effects, checks if they match.
+ * I.e. if they are ordered descendants of the current revstruct.
+ Returns the number of effects that can be trusted. *)
+let check_signatures curmb sl =
+ let is_direct_ancestor accu (mb, how_many) =
+ match accu with
+ | None -> None
+ | Some (n, curmb) ->
+ try
+ let mb = CEphemeron.get mb in
+ if is_nth_suffix how_many mb curmb
+ then Some (n + how_many, mb)
+ else None
+ with CEphemeron.InvalidKey -> None in
+ let sl = List.fold_left is_direct_ancestor (Some (0, curmb)) sl in
+ match sl with
+ | None -> 0
+ | Some (n, _) -> n
+
+
+let constant_entry_of_side_effect cb u =
+ let open Entries in
+ let univs =
+ match cb.const_universes with
+ | Monomorphic_const uctx ->
+ Monomorphic_const_entry uctx
+ | Polymorphic_const auctx ->
+ Polymorphic_const_entry (Univ.AUContext.repr auctx)
+ in
+ let pt =
+ match cb.const_body, u with
+ | OpaqueDef _, `Opaque (b, c) -> b, c
+ | Def b, `Nothing -> Mod_subst.force_constr b, Univ.ContextSet.empty
+ | _ -> assert false in
+ DefinitionEntry {
+ const_entry_body = Future.from_val (pt, ());
+ const_entry_secctx = None;
+ const_entry_feedback = None;
+ const_entry_type = Some cb.const_type;
+ const_entry_universes = univs;
+ const_entry_opaque = Declareops.is_opaque cb;
+ const_entry_inline_code = cb.const_inline_code }
+
+let turn_direct orig =
+ let open Entries in
+ let cb = orig.seff_body in
+ if Declareops.is_opaque cb then
+ let p = match orig.seff_env with
+ | `Opaque (b, c) -> (b, c)
+ | _ -> assert false
+ in
+ let const_body = OpaqueDef (Opaqueproof.create (Future.from_val p)) in
+ let cb = { cb with const_body } in
+ { orig with seff_body = cb }
+ else orig
+
+let export_eff eff =
+ let open Entries in
+ (eff.seff_constant, eff.seff_body, eff.seff_role)
+
+let export_side_effects mb env c =
+ let open Entries in
+ let body = c.const_entry_body in
+ let _, eff = Future.force body in
+ let ce = { c with
+ Entries.const_entry_body = Future.chain body
+ (fun (b_ctx, _) -> b_ctx, ()) } in
+ let not_exists e =
+ try ignore(Environ.lookup_constant e.seff_constant env); false
+ with Not_found -> true in
+ let aux (acc,sl) { eff = se; from_env = mb } =
+ let cbl = List.filter not_exists se in
+ if List.is_empty cbl then acc, sl
+ else cbl :: acc, (mb,List.length cbl) :: sl in
+ let seff, signatures = List.fold_left aux ([],[]) (SideEffects.repr eff) in
+ let trusted = check_signatures mb signatures in
+ let push_seff env eff =
+ let { seff_constant = kn; seff_body = cb ; _ } = eff in
+ let env = Environ.add_constant kn cb env in
+ match cb.const_universes with
+ | Polymorphic_const _ -> env
+ | Monomorphic_const ctx ->
+ let ctx = match eff.seff_env with
+ | `Nothing -> ctx
+ | `Opaque(_, ctx') -> Univ.ContextSet.union ctx' ctx
+ in
+ Environ.push_context_set ~strict:true ctx env
+ in
+ let rec translate_seff sl seff acc env =
+ match seff with
+ | [] -> List.rev acc, ce
+ | cbs :: rest ->
+ if Int.equal sl 0 then
+ let env, cbs =
+ List.fold_left (fun (env,cbs) eff ->
+ let { seff_constant = kn; seff_body = ocb; seff_env = u ; _ } = eff in
+ let ce = constant_entry_of_side_effect ocb u in
+ let cb = Term_typing.translate_constant Term_typing.Pure env kn ce in
+ let eff = { eff with
+ seff_body = cb;
+ seff_env = `Nothing;
+ } in
+ (push_seff env eff, export_eff eff :: cbs))
+ (env,[]) cbs in
+ translate_seff 0 rest (cbs @ acc) env
+ else
+ let cbs_len = List.length cbs in
+ let cbs = List.map turn_direct cbs in
+ let env = List.fold_left push_seff env cbs in
+ let ecbs = List.map export_eff cbs in
+ translate_seff (sl - cbs_len) rest (ecbs @ acc) env
+ in
+ translate_seff trusted seff [] env
+
let export_private_constants ~in_section ce senv =
- let exported, ce = Term_typing.export_side_effects senv.revstruct senv.env ce in
+ let exported, ce = export_side_effects senv.revstruct senv.env ce in
let bodies = List.map (fun (kn, cb, _) -> (kn, cb)) exported in
let exported = List.map (fun (kn, _, r) -> (kn, r)) exported in
let senv = List.fold_left (add_constant_aux ~in_section) senv bodies in
@@ -520,7 +770,12 @@ let add_constant ~in_section l decl senv =
let cb =
match decl with
| ConstantEntry (EffectEntry, ce) ->
- Term_typing.translate_constant (Term_typing.SideEffects senv.revstruct) senv.env kn ce
+ let handle env body eff =
+ let body, uctx, signatures = inline_side_effects env body eff in
+ let trusted = check_signatures senv.revstruct signatures in
+ body, uctx, trusted
+ in
+ Term_typing.translate_constant (Term_typing.SideEffects handle) senv.env kn ce
| ConstantEntry (PureEntry, ce) ->
Term_typing.translate_constant Term_typing.Pure senv.env kn ce
| GlobalRecipe r ->
diff --git a/kernel/term_typing.ml b/kernel/term_typing.ml
index 5ccc23eefc..fb1b3e236c 100644
--- a/kernel/term_typing.ml
+++ b/kernel/term_typing.ml
@@ -27,159 +27,12 @@ module NamedDecl = Context.Named.Declaration
(* Insertion of constants and parameters in environment. *)
-type side_effect = {
- from_env : Declarations.structure_body CEphemeron.key;
- eff : side_eff list;
-}
-
-module SideEffects :
-sig
- type t
- val repr : t -> side_effect list
- val empty : t
- val add : side_effect -> t -> t
- val concat : t -> t -> t
-end =
-struct
-
-module SeffOrd = struct
-type t = side_effect
-let compare e1 e2 =
- let cmp e1 e2 = Constant.CanOrd.compare e1.seff_constant e2.seff_constant in
- List.compare cmp e1.eff e2.eff
-end
-
-module SeffSet = Set.Make(SeffOrd)
-
-type t = { seff : side_effect list; elts : SeffSet.t }
-(** Invariant: [seff] is a permutation of the elements of [elts] *)
-
-let repr eff = eff.seff
-let empty = { seff = []; elts = SeffSet.empty }
-let add x es =
- if SeffSet.mem x es.elts then es
- else { seff = x :: es.seff; elts = SeffSet.add x es.elts }
-let concat xes yes =
- List.fold_right add xes.seff yes
-
-end
-
-type side_effects = SideEffects.t
+type 'a effect_handler =
+ env -> Constr.t -> 'a -> (Constr.t * Univ.ContextSet.t * int)
type _ trust =
| Pure : unit trust
-| SideEffects : structure_body -> side_effects trust
-
-let uniq_seff_rev = SideEffects.repr
-let uniq_seff l =
- let ans = List.rev (SideEffects.repr l) in
- List.map_append (fun { eff ; _ } -> eff) ans
-
-let empty_seff = SideEffects.empty
-let add_seff mb eff effs =
- let from_env = CEphemeron.create mb in
- SideEffects.add { eff; from_env } effs
-let concat_seff = SideEffects.concat
-
-let mk_pure_proof c = (c, Univ.ContextSet.empty), empty_seff
-
-let inline_side_effects env body ctx side_eff =
- (** First step: remove the constants that are still in the environment *)
- let filter { eff = se; from_env = mb } =
- let map e = (e.seff_constant, e.seff_body, e.seff_env) in
- let cbl = List.map map se in
- let not_exists (c,_,_) =
- try ignore(Environ.lookup_constant c env); false
- with Not_found -> true in
- let cbl = List.filter not_exists cbl in
- (cbl, mb)
- in
- (* CAVEAT: we assure that most recent effects come first *)
- let side_eff = List.map filter (uniq_seff_rev side_eff) in
- let sigs = List.rev_map (fun (cbl, mb) -> mb, List.length cbl) side_eff in
- let side_eff = List.fold_left (fun accu (cbl, _) -> cbl @ accu) [] side_eff in
- let side_eff = List.rev side_eff in
- (** Most recent side-effects first in side_eff *)
- if List.is_empty side_eff then (body, ctx, sigs)
- else
- (** Second step: compute the lifts and substitutions to apply *)
- let cname c = Name (Label.to_id (Constant.label c)) in
- let fold (subst, var, ctx, args) (c, cb, b) =
- let (b, opaque) = match cb.const_body, b with
- | Def b, _ -> (Mod_subst.force_constr b, false)
- | OpaqueDef _, `Opaque (b,_) -> (b, true)
- | _ -> assert false
- in
- match cb.const_universes with
- | Monomorphic_const univs ->
- (** Abstract over the term at the top of the proof *)
- let ty = cb.const_type in
- let subst = Cmap_env.add c (Inr var) subst in
- let ctx = Univ.ContextSet.union ctx univs in
- (subst, var + 1, ctx, (cname c, b, ty, opaque) :: args)
- | Polymorphic_const _auctx ->
- (** Inline the term to emulate universe polymorphism *)
- let subst = Cmap_env.add c (Inl b) subst in
- (subst, var, ctx, args)
- in
- let (subst, len, ctx, args) = List.fold_left fold (Cmap_env.empty, 1, ctx, []) side_eff in
- (** Third step: inline the definitions *)
- let rec subst_const i k t = match Constr.kind t with
- | Const (c, u) ->
- let data = try Some (Cmap_env.find c subst) with Not_found -> None in
- begin match data with
- | None -> t
- | Some (Inl b) ->
- (** [b] is closed but may refer to other constants *)
- subst_const i k (Vars.subst_instance_constr u b)
- | Some (Inr n) ->
- mkRel (k + n - i)
- end
- | Rel n ->
- (** Lift free rel variables *)
- if n <= k then t
- else mkRel (n + len - i - 1)
- | _ -> Constr.map_with_binders ((+) 1) (fun k t -> subst_const i k t) k t
- in
- let map_args i (na, b, ty, opaque) =
- (** Both the type and the body may mention other constants *)
- let ty = subst_const (len - i - 1) 0 ty in
- let b = subst_const (len - i - 1) 0 b in
- (na, b, ty, opaque)
- in
- let args = List.mapi map_args args in
- let body = subst_const 0 0 body in
- let fold_arg (na, b, ty, opaque) accu =
- if opaque then mkApp (mkLambda (na, ty, accu), [|b|])
- else mkLetIn (na, b, ty, accu)
- in
- let body = List.fold_right fold_arg args body in
- (body, ctx, sigs)
-
-let rec is_nth_suffix n l suf =
- if Int.equal n 0 then l == suf
- else match l with
- | [] -> false
- | _ :: l -> is_nth_suffix (pred n) l suf
-
-(* Given the list of signatures of side effects, checks if they match.
- * I.e. if they are ordered descendants of the current revstruct.
- Returns the number of effects that can be trusted. *)
-let check_signatures curmb sl =
- let is_direct_ancestor accu (mb, how_many) =
- match accu with
- | None -> None
- | Some (n, curmb) ->
- try
- let mb = CEphemeron.get mb in
- if is_nth_suffix how_many mb curmb
- then Some (n + how_many, mb)
- else None
- with CEphemeron.InvalidKey -> None in
- let sl = List.fold_left is_direct_ancestor (Some (0, curmb)) sl in
- match sl with
- | None -> 0
- | Some (n, _) -> n
+| SideEffects : 'a effect_handler -> 'a trust
let skip_trusted_seff sl b e =
let rec aux sl b e acc =
@@ -259,9 +112,9 @@ let infer_declaration (type a) ~(trust : a trust) env (dcl : a constant_entry) =
let j = infer env body in
let _ = judge_of_cast env j DEFAULTcast tyj in
j, uctx
- | SideEffects mb ->
- let (body, uctx, signatures) = inline_side_effects env body uctx side_eff in
- let valid_signatures = check_signatures mb signatures in
+ | SideEffects handle ->
+ let (body, uctx', valid_signatures) = handle env body side_eff in
+ let uctx = Univ.ContextSet.union uctx uctx' in
let env = push_context_set uctx env in
let body,env,ectx = skip_trusted_seff valid_signatures body env in
let j = infer env body in
@@ -286,9 +139,11 @@ let infer_declaration (type a) ~(trust : a trust) env (dcl : a constant_entry) =
let { const_entry_type = typ; const_entry_opaque = opaque ; _ } = c in
let { const_entry_body = body; const_entry_feedback = feedback_id; _ } = c in
let (body, ctx), side_eff = Future.join body in
- let body, ctx, _ = match trust with
- | Pure -> body, ctx, []
- | SideEffects _ -> inline_side_effects env body ctx side_eff
+ let body, ctx = match trust with
+ | Pure -> body, ctx
+ | SideEffects handle ->
+ let body, ctx', _ = handle env body side_eff in
+ body, Univ.ContextSet.union ctx ctx'
in
let env, usubst, univs = match c.const_entry_universes with
| Monomorphic_const_entry univs ->
@@ -431,101 +286,6 @@ let translate_constant mb env kn ce =
build_constant_declaration kn env
(infer_declaration ~trust:mb env ce)
-let constant_entry_of_side_effect cb u =
- let univs =
- match cb.const_universes with
- | Monomorphic_const uctx ->
- Monomorphic_const_entry uctx
- | Polymorphic_const auctx ->
- Polymorphic_const_entry (Univ.AUContext.repr auctx)
- in
- let pt =
- match cb.const_body, u with
- | OpaqueDef _, `Opaque (b, c) -> b, c
- | Def b, `Nothing -> Mod_subst.force_constr b, Univ.ContextSet.empty
- | _ -> assert false in
- DefinitionEntry {
- const_entry_body = Future.from_val (pt, ());
- const_entry_secctx = None;
- const_entry_feedback = None;
- const_entry_type = Some cb.const_type;
- const_entry_universes = univs;
- const_entry_opaque = Declareops.is_opaque cb;
- const_entry_inline_code = cb.const_inline_code }
-;;
-
-let turn_direct orig =
- let cb = orig.seff_body in
- if Declareops.is_opaque cb then
- let p = match orig.seff_env with
- | `Opaque (b, c) -> (b, c)
- | _ -> assert false
- in
- let const_body = OpaqueDef (Opaqueproof.create (Future.from_val p)) in
- let cb = { cb with const_body } in
- { orig with seff_body = cb }
- else orig
-
-type exported_side_effect =
- Constant.t * constant_body * side_effect_role
-
-let export_eff eff =
- (eff.seff_constant, eff.seff_body, eff.seff_role)
-
-let export_side_effects mb env c =
- let { const_entry_body = body; _ } = c in
- let _, eff = Future.force body in
- let ce = { c with
- const_entry_body = Future.chain body
- (fun (b_ctx, _) -> b_ctx, ()) } in
- let not_exists e =
- try ignore(Environ.lookup_constant e.seff_constant env); false
- with Not_found -> true in
- let aux (acc,sl) { eff = se; from_env = mb } =
- let cbl = List.filter not_exists se in
- if List.is_empty cbl then acc, sl
- else cbl :: acc, (mb,List.length cbl) :: sl in
- let seff, signatures = List.fold_left aux ([],[]) (uniq_seff_rev eff) in
- let trusted = check_signatures mb signatures in
- let push_seff env eff =
- let { seff_constant = kn; seff_body = cb ; _ } = eff in
- let env = Environ.add_constant kn cb env in
- match cb.const_universes with
- | Polymorphic_const _ -> env
- | Monomorphic_const ctx ->
- let ctx = match eff.seff_env with
- | `Nothing -> ctx
- | `Opaque(_, ctx') -> Univ.ContextSet.union ctx' ctx
- in
- Environ.push_context_set ~strict:true ctx env
- in
- let rec translate_seff sl seff acc env =
- match seff with
- | [] -> List.rev acc, ce
- | cbs :: rest ->
- if Int.equal sl 0 then
- let env, cbs =
- List.fold_left (fun (env,cbs) eff ->
- let { seff_constant = kn; seff_body = ocb; seff_env = u ; _ } = eff in
- let ce = constant_entry_of_side_effect ocb u in
- let cb = translate_constant Pure env kn ce in
- let eff = { eff with
- seff_body = cb;
- seff_env = `Nothing;
- } in
- (push_seff env eff, export_eff eff :: cbs))
- (env,[]) cbs in
- translate_seff 0 rest (cbs @ acc) env
- else
- let cbs_len = List.length cbs in
- let cbs = List.map turn_direct cbs in
- let env = List.fold_left push_seff env cbs in
- let ecbs = List.map export_eff cbs in
- translate_seff (sl - cbs_len) rest (ecbs @ acc) env
- in
- translate_seff trusted seff [] env
-;;
-
let translate_local_assum env t =
let j = infer env t in
let t = Typeops.assumption_of_judgment env j in
@@ -578,13 +338,3 @@ let translate_local_def env _id centry =
(* Insertion of inductive types. *)
let translate_mind env kn mie = Indtypes.check_inductive env kn mie
-
-let inline_entry_side_effects env ce = { ce with
- const_entry_body = Future.chain
- ce.const_entry_body (fun ((body, ctx), side_eff) ->
- let body, ctx',_ = inline_side_effects env body ctx side_eff in
- (body, ctx'), ());
-}
-
-let inline_side_effects env body side_eff =
- pi1 (inline_side_effects env body Univ.ContextSet.empty side_eff)
diff --git a/kernel/term_typing.mli b/kernel/term_typing.mli
index ab25090b00..faf434c142 100644
--- a/kernel/term_typing.mli
+++ b/kernel/term_typing.mli
@@ -14,53 +14,27 @@ open Environ
open Declarations
open Entries
-type side_effects
+(** Handlers are used to manage side-effects. The ['a] type stands for the type
+ of side-effects, and it is parametric because they are only defined later
+ on. Handlers inline the provided side-effects into the term, and return
+ the set of additional global constraints that need to be added for the term
+ to be well typed. *)
+type 'a effect_handler =
+ env -> Constr.t -> 'a -> (Constr.t * Univ.ContextSet.t * int)
type _ trust =
| Pure : unit trust
-| SideEffects : structure_body -> side_effects trust
+| SideEffects : 'a effect_handler -> 'a trust
val translate_local_def : env -> Id.t -> section_def_entry ->
constr * types
val translate_local_assum : env -> types -> types
-val mk_pure_proof : constr -> side_effects proof_output
-
-val inline_side_effects : env -> constr -> side_effects -> constr
-(** Returns the term where side effects have been turned into let-ins or beta
- redexes. *)
-
-val inline_entry_side_effects :
- env -> side_effects definition_entry -> unit definition_entry
-(** Same as {!inline_side_effects} but applied to entries. Only modifies the
- {!Entries.const_entry_body} field. It is meant to get a term out of a not
- yet type checked proof. *)
-
-val empty_seff : side_effects
-val add_seff : Declarations.structure_body -> Entries.side_eff list -> side_effects -> side_effects
-val concat_seff : side_effects -> side_effects -> side_effects
-(** [concat_seff e1 e2] adds the side-effects of [e1] to [e2], i.e. effects in
- [e1] must be more recent than those of [e2]. *)
-val uniq_seff : side_effects -> side_eff list
-(** Return the list of individual side-effects in the order of their
- creation. *)
-
val translate_constant :
'a trust -> env -> Constant.t -> 'a constant_entry ->
constant_body
-type exported_side_effect =
- Constant.t * constant_body * side_effect_role
-
-(* Given a constant entry containing side effects it exports them (either
- * by re-checking them or trusting them). Returns the constant bodies to
- * be pushed in the safe_env by safe typing. The main constant entry
- * needs to be translated as usual after this step. *)
-val export_side_effects :
- structure_body -> env -> side_effects definition_entry ->
- exported_side_effect list * unit definition_entry
-
val translate_mind :
env -> MutInd.t -> mutual_inductive_entry -> mutual_inductive_body
diff --git a/kernel/typeops.ml b/kernel/typeops.ml
index 164a47dd9a..1bb2d3c79c 100644
--- a/kernel/typeops.ml
+++ b/kernel/typeops.ml
@@ -319,6 +319,52 @@ let check_fixpoint env lna lar vdef vdeft =
with NotConvertibleVect i ->
error_ill_typed_rec_body env i lna (make_judgev vdef vdeft) lar
+(* Global references *)
+
+let type_of_global_in_context env r =
+ let open Names.GlobRef in
+ match r with
+ | VarRef id -> Environ.named_type id env, Univ.AUContext.empty
+ | ConstRef c ->
+ let cb = Environ.lookup_constant c env in
+ let univs = Declareops.constant_polymorphic_context cb in
+ cb.Declarations.const_type, univs
+ | IndRef ind ->
+ let (mib,_ as specif) = Inductive.lookup_mind_specif env ind in
+ let univs = Declareops.inductive_polymorphic_context mib in
+ let inst = Univ.make_abstract_instance univs in
+ let env = Environ.push_context ~strict:false (Univ.AUContext.repr univs) env in
+ Inductive.type_of_inductive env (specif, inst), univs
+ | ConstructRef cstr ->
+ let (mib,_ as specif) =
+ Inductive.lookup_mind_specif env (inductive_of_constructor cstr)
+ in
+ let univs = Declareops.inductive_polymorphic_context mib in
+ let inst = Univ.make_abstract_instance univs in
+ Inductive.type_of_constructor (cstr,inst) specif, univs
+
+(* Build a fresh instance for a given context, its associated substitution and
+ the instantiated constraints. *)
+
+let constr_of_global_in_context env r =
+ let open GlobRef in
+ match r with
+ | VarRef id -> mkVar id, Univ.AUContext.empty
+ | ConstRef c ->
+ let cb = lookup_constant c env in
+ let univs = Declareops.constant_polymorphic_context cb in
+ mkConstU (c, Univ.make_abstract_instance univs), univs
+ | IndRef ind ->
+ let (mib,_) = Inductive.lookup_mind_specif env ind in
+ let univs = Declareops.inductive_polymorphic_context mib in
+ mkIndU (ind, Univ.make_abstract_instance univs), univs
+ | ConstructRef cstr ->
+ let (mib,_) =
+ Inductive.lookup_mind_specif env (inductive_of_constructor cstr)
+ in
+ let univs = Declareops.inductive_polymorphic_context mib in
+ mkConstructU (cstr, Univ.make_abstract_instance univs), univs
+
(************************************************************************)
(************************************************************************)
@@ -469,25 +515,19 @@ let infer_v env cv =
(* Typing of several terms. *)
-let infer_local_decl env id = function
- | Entries.LocalDefEntry c ->
- let () = check_wellformed_universes env c in
- let t = execute env c in
- RelDecl.LocalDef (Name id, c, t)
- | Entries.LocalAssumEntry c ->
- let () = check_wellformed_universes env c in
- let t = execute env c in
- RelDecl.LocalAssum (Name id, check_assumption env c t)
-
-let infer_local_decls env decls =
- let rec inferec env = function
- | (id, d) :: l ->
- let (env, l) = inferec env l in
- let d = infer_local_decl env id d in
- (push_rel d env, Context.Rel.add d l)
- | [] -> (env, Context.Rel.empty)
- in
- inferec env decls
+let check_context env rels =
+ let open Context.Rel.Declaration in
+ Context.Rel.fold_outside (fun d env ->
+ match d with
+ | LocalAssum (_,ty) ->
+ let _ = infer_type env ty in
+ push_rel d env
+ | LocalDef (_,bd,ty) ->
+ let j1 = infer env bd in
+ let _ = infer_type env ty in
+ conv_leq false env j1.uj_type ty;
+ push_rel d env)
+ rels ~init:env
let judge_of_prop = make_judge mkProp type1
let judge_of_set = make_judge mkSet type1
diff --git a/kernel/typeops.mli b/kernel/typeops.mli
index 57acdfe4b5..d24002065b 100644
--- a/kernel/typeops.mli
+++ b/kernel/typeops.mli
@@ -12,7 +12,6 @@ open Names
open Constr
open Univ
open Environ
-open Entries
(** {6 Typing functions (not yet tagged as safe) }
@@ -27,8 +26,8 @@ val infer : env -> constr -> unsafe_judgment
val infer_v : env -> constr array -> unsafe_judgment array
val infer_type : env -> types -> unsafe_type_judgment
-val infer_local_decls :
- env -> (Id.t * local_entry) list -> (env * Constr.rel_context)
+val check_context :
+ env -> Constr.rel_context -> env
(** {6 Basic operations of the typing machine. } *)
@@ -54,11 +53,10 @@ val type_of_variable : env -> variable -> types
val judge_of_variable : env -> variable -> unsafe_judgment
(** {6 type of a constant } *)
-
+val type_of_constant_in : env -> pconstant -> types
val judge_of_constant : env -> pconstant -> unsafe_judgment
(** {6 type of an applied projection } *)
-
val judge_of_projection : env -> Projection.t -> unsafe_judgment -> unsafe_judgment
(** {6 Type of application. } *)
@@ -89,9 +87,7 @@ val judge_of_cast :
unsafe_judgment
(** {6 Inductive types. } *)
-
val judge_of_inductive : env -> inductive puniverses -> unsafe_judgment
-
val judge_of_constructor : env -> constructor puniverses -> unsafe_judgment
(** {6 Type of Cases. } *)
@@ -99,7 +95,25 @@ val judge_of_case : env -> case_info
-> unsafe_judgment -> unsafe_judgment -> unsafe_judgment array
-> unsafe_judgment
-val type_of_constant_in : env -> pconstant -> types
+(** {6 Type of global references. } *)
+
+val type_of_global_in_context : env -> GlobRef.t -> types * Univ.AUContext.t
+(** Returns the type of the global reference, by creating a fresh
+ instance of polymorphic references and computing their
+ instantiated universe context. The type should not be used
+ without pushing it's universe context in the environmnent of
+ usage. For non-universe-polymorphic constants, it does not
+ matter. *)
+
+(** {6 Building a term from a global reference *)
+
+(** Map a global reference to a term in its local universe
+ context. The term should not be used without pushing it's universe
+ context in the environmnent of usage. For non-universe-polymorphic
+ constants, it does not matter. *)
+val constr_of_global_in_context : env -> GlobRef.t -> types * Univ.AUContext.t
+
+(** {6 Miscellaneous. } *)
(** Check that hyps are included in env and fails with error otherwise *)
val check_hyps_inclusion : env -> ('a -> constr) -> 'a -> Constr.named_context -> unit
diff --git a/lib/coqProject_file.ml b/lib/coqProject_file.ml
index c2bcd73fff..d0b01453a0 100644
--- a/lib/coqProject_file.ml
+++ b/lib/coqProject_file.ml
@@ -29,6 +29,7 @@ type project = {
v_files : string sourced list;
mli_files : string sourced list;
ml4_files : string sourced list;
+ mlg_files : string sourced list;
ml_files : string sourced list;
mllib_files : string sourced list;
mlpack_files : string sourced list;
@@ -66,6 +67,7 @@ let mk_project project_file makefile install_kind use_ocamlopt = {
v_files = [];
mli_files = [];
ml4_files = [];
+ mlg_files = [];
ml_files = [];
mllib_files = [];
mlpack_files = [];
@@ -223,6 +225,7 @@ let process_cmd_line orig_dir proj args =
{ proj with v_files = proj.v_files @ [sourced f] }
| ".ml" -> { proj with ml_files = proj.ml_files @ [sourced f] }
| ".ml4" -> { proj with ml4_files = proj.ml4_files @ [sourced f] }
+ | ".mlg" -> { proj with mlg_files = proj.mlg_files @ [sourced f] }
| ".mli" -> { proj with mli_files = proj.mli_files @ [sourced f] }
| ".mllib" -> { proj with mllib_files = proj.mllib_files @ [sourced f] }
| ".mlpack" -> { proj with mlpack_files = proj.mlpack_files @ [sourced f] }
@@ -249,9 +252,9 @@ let rec find_project_file ~from ~projfile_name =
else find_project_file ~from:newdir ~projfile_name
;;
-let all_files { v_files; ml_files; mli_files; ml4_files;
+let all_files { v_files; ml_files; mli_files; ml4_files; mlg_files;
mllib_files; mlpack_files } =
- v_files @ mli_files @ ml4_files @ ml_files @ mllib_files @ mlpack_files
+ v_files @ mli_files @ ml4_files @ mlg_files @ ml_files @ mllib_files @ mlpack_files
let map_sourced_list f l = List.map (fun x -> f x.thing) l
;;
diff --git a/lib/coqProject_file.mli b/lib/coqProject_file.mli
index 5780bb5d78..2a6a09a9a0 100644
--- a/lib/coqProject_file.mli
+++ b/lib/coqProject_file.mli
@@ -23,6 +23,7 @@ type project = {
v_files : string sourced list;
mli_files : string sourced list;
ml4_files : string sourced list;
+ mlg_files : string sourced list;
ml_files : string sourced list;
mllib_files : string sourced list;
mlpack_files : string sourced list;
diff --git a/lib/lib.mllib b/lib/lib.mllib
index 41b3622a99..206b2504db 100644
--- a/lib/lib.mllib
+++ b/lib/lib.mllib
@@ -1,5 +1,3 @@
-Coq_config
-
Hook
Flags
Control
diff --git a/library/coqlib.ml b/library/coqlib.ml
index 677515981a..784360dc8a 100644
--- a/library/coqlib.ml
+++ b/library/coqlib.ml
@@ -14,7 +14,6 @@ open Pp
open Names
open Libnames
open Globnames
-open Nametab
let make_dir l = DirPath.make (List.rev_map Id.of_string l)
@@ -79,7 +78,7 @@ let register_ref s c =
(* Generic functions to find Coq objects *)
let has_suffix_in_dirs dirs ref =
- let dir = dirpath (path_of_global ref) in
+ let dir = dirpath (Nametab.path_of_global ref) in
List.exists (fun d -> is_dirpath_prefix_of d dir) dirs
let gen_reference_in_modules locstr dirs s =
@@ -228,8 +227,7 @@ type coq_eq_data = {
(* Leibniz equality on Type *)
-let build_eqdata_gen lib str =
- let _ = check_required_library lib in {
+let build_eqdata_gen str = {
eq = lib_ref ("core." ^ str ^ ".type");
ind = lib_ref ("core." ^ str ^ ".ind");
refl = lib_ref ("core." ^ str ^ ".refl");
@@ -238,9 +236,9 @@ let build_eqdata_gen lib str =
congr = lib_ref ("core." ^ str ^ ".congr");
}
-let build_coq_eq_data () = build_eqdata_gen logic_module_name "eq"
-let build_coq_jmeq_data () = build_eqdata_gen jmeq_module_name "JMeq"
-let build_coq_identity_data () = build_eqdata_gen datatypes_module_name "identity"
+let build_coq_eq_data () = build_eqdata_gen "eq"
+let build_coq_jmeq_data () = build_eqdata_gen "JMeq"
+let build_coq_identity_data () = build_eqdata_gen "identity"
(* Inversion data... *)
diff --git a/library/global.ml b/library/global.ml
index 1ad72afea1..3781ff3230 100644
--- a/library/global.ml
+++ b/library/global.ml
@@ -167,48 +167,8 @@ let env_of_context hyps =
open Globnames
-(** Build a fresh instance for a given context, its associated substitution and
- the instantiated constraints. *)
-
-let constr_of_global_in_context env r =
- let open Constr in
- match r with
- | VarRef id -> mkVar id, Univ.AUContext.empty
- | ConstRef c ->
- let cb = Environ.lookup_constant c env in
- let univs = Declareops.constant_polymorphic_context cb in
- mkConstU (c, Univ.make_abstract_instance univs), univs
- | IndRef ind ->
- let (mib, oib as specif) = Inductive.lookup_mind_specif env ind in
- let univs = Declareops.inductive_polymorphic_context mib in
- mkIndU (ind, Univ.make_abstract_instance univs), univs
- | ConstructRef cstr ->
- let (mib,oib as specif) =
- Inductive.lookup_mind_specif env (inductive_of_constructor cstr)
- in
- let univs = Declareops.inductive_polymorphic_context mib in
- mkConstructU (cstr, Univ.make_abstract_instance univs), univs
-
-let type_of_global_in_context env r =
- match r with
- | VarRef id -> Environ.named_type id env, Univ.AUContext.empty
- | ConstRef c ->
- let cb = Environ.lookup_constant c env in
- let univs = Declareops.constant_polymorphic_context cb in
- cb.Declarations.const_type, univs
- | IndRef ind ->
- let (mib, oib as specif) = Inductive.lookup_mind_specif env ind in
- let univs = Declareops.inductive_polymorphic_context mib in
- let inst = Univ.make_abstract_instance univs in
- let env = Environ.push_context ~strict:false (Univ.AUContext.repr univs) env in
- Inductive.type_of_inductive env (specif, inst), univs
- | ConstructRef cstr ->
- let (mib,oib as specif) =
- Inductive.lookup_mind_specif env (inductive_of_constructor cstr)
- in
- let univs = Declareops.inductive_polymorphic_context mib in
- let inst = Univ.make_abstract_instance univs in
- Inductive.type_of_constructor (cstr,inst) specif, univs
+let constr_of_global_in_context = Typeops.constr_of_global_in_context
+let type_of_global_in_context = Typeops.type_of_global_in_context
let universes_of_global gr =
universes_of_global (env ()) gr
diff --git a/library/global.mli b/library/global.mli
index 29255a70ff..42a8005a4f 100644
--- a/library/global.mli
+++ b/library/global.mli
@@ -129,17 +129,11 @@ val is_type_in_type : GlobRef.t -> bool
val constr_of_global_in_context : Environ.env ->
GlobRef.t -> Constr.types * Univ.AUContext.t
-(** Returns the type of the constant in its local universe
- context. The type should not be used without pushing it's universe
- context in the environmnent of usage. For non-universe-polymorphic
- constants, it does not matter. *)
+ [@@ocaml.deprecated "alias of [Typeops.constr_of_global_in_context]"]
val type_of_global_in_context : Environ.env ->
GlobRef.t -> Constr.types * Univ.AUContext.t
-(** Returns the type of the constant in its local universe
- context. The type should not be used without pushing it's universe
- context in the environmnent of usage. For non-universe-polymorphic
- constants, it does not matter. *)
+ [@@ocaml.deprecated "alias of [Typeops.type_of_global]"]
(** Returns the universe context of the global reference (whatever its polymorphic status is). *)
val universes_of_global : GlobRef.t -> Univ.AUContext.t
diff --git a/man/coq-interface.1 b/man/coq-interface.1
deleted file mode 100644
index ee013d952e..0000000000
--- a/man/coq-interface.1
+++ /dev/null
@@ -1,37 +0,0 @@
-.TH COQ 1 "April 25, 2001"
-
-.SH NAME
-coq\-interface \- Customized Coq toplevel to make user interfaces
-
-
-.SH SYNOPSIS
-.B coq-interface
-[
-.B options
-]
-
-.SH DESCRIPTION
-
-.B coq-interface
-is a Coq customized toplevel system for Coq containing some modules
-useful for the graphical interface. This program is not for the casual
-user.
-
-.SH OPTIONS
-
-.TP
-.B \-h
-Help. Will give you the complete list of options accepted by
-coq-interface (the same as coqtop).
-
-.SH SEE ALSO
-
-.BR coqc (1),
-.BR coqdep (1),
-.BR coqtop (1),
-.BR coq\-parser (1).
-.br
-.I
-The Coq Reference Manual.
-.I
-The Coq web site: http://coq.inria.fr
diff --git a/man/coq-parser.1 b/man/coq-parser.1
deleted file mode 100644
index 23dc820193..0000000000
--- a/man/coq-parser.1
+++ /dev/null
@@ -1,30 +0,0 @@
-.TH COQ 1 "April 25, 2001"
-
-.SH NAME
-coq\-parser \- Coq parser
-
-
-.SH SYNOPSIS
-.B coq\-parser
-[
-.B options
-]
-
-.SH DESCRIPTION
-
-.B parser
-is a program reading Coq proof developments and outputing them in the
-structured format given in the INRIA technical report RT154. This
-program is not for the casual user.
-
-.SH SEE ALSO
-
-.BR coq\-interface (1),
-.BR coqc (1),
-.BR coqtop (1),
-.BR coqdep (1).
-.br
-.I
-The Coq Reference Manual.
-.I
-The Coq web site: http://coq.inria.fr
diff --git a/man/dune b/man/dune
new file mode 100644
index 0000000000..359e780545
--- /dev/null
+++ b/man/dune
@@ -0,0 +1,10 @@
+(install
+ (section man)
+ (package coq)
+ (files coqc.1 coqtop.1 coqtop.byte.1 coqtop.opt.1 coqchk.1 coqdep.1 coqdoc.1 coq_makefile.1 coq-tex.1 coqwc.1))
+
+(install
+ (section man)
+ (package coqide)
+ (files coqide.1))
+
diff --git a/parsing/cLexer.ml4 b/parsing/cLexer.ml
index 9c421f5b76..2230dfc47c 100644
--- a/parsing/cLexer.ml4
+++ b/parsing/cLexer.ml
@@ -229,9 +229,11 @@ let unlocated f x = f x
(* try f x with Loc.Exc_located (_, exc) -> raise exc *)
let check_keyword str =
- let rec loop_symb = parser
- | [< ' (' ' | '\n' | '\r' | '\t' | '"') >] -> bad_token str
- | [< s >] ->
+ let rec loop_symb s = match Stream.peek s with
+ | Some (' ' | '\n' | '\r' | '\t' | '"') ->
+ Stream.junk s;
+ bad_token str
+ | _ ->
match unlocated lookup_utf8 Ploc.dummy s with
| Utf8Token (_,n) -> njunk n s; loop_symb s
| AsciiChar -> Stream.junk s; loop_symb s
@@ -240,12 +242,14 @@ let check_keyword str =
loop_symb (Stream.of_string str)
let check_ident str =
- let rec loop_id intail = parser
- | [< ' ('a'..'z' | 'A'..'Z' | '_'); s >] ->
+ let rec loop_id intail s = match Stream.peek s with
+ | Some ('a'..'z' | 'A'..'Z' | '_') ->
+ Stream.junk s;
loop_id true s
- | [< ' ('0'..'9' | ''') when intail; s >] ->
+ | Some ('0'..'9' | '\'') when intail ->
+ Stream.junk s;
loop_id true s
- | [< s >] ->
+ | _ ->
match unlocated lookup_utf8 Ploc.dummy s with
| Utf8Token (st, n) when not intail && Unicode.is_valid_ident_initial st -> njunk n s; loop_id true s
| Utf8Token (st, n) when intail && Unicode.is_valid_ident_trailing st ->
@@ -308,10 +312,11 @@ let warn_unrecognized_unicode =
strbrk (Printf.sprintf "Not considering unicode character \"%s\" of unknown \
lexical status as part of identifier \"%s\"." u id))
-let rec ident_tail loc len = parser
- | [< ' ('a'..'z' | 'A'..'Z' | '0'..'9' | ''' | '_' as c); s >] ->
+let rec ident_tail loc len s = match Stream.peek s with
+ | Some ('a'..'z' | 'A'..'Z' | '0'..'9' | '\'' | '_' as c) ->
+ Stream.junk s;
ident_tail loc (store len c) s
- | [< s >] ->
+ | _ ->
match lookup_utf8 loc s with
| Utf8Token (st, n) when Unicode.is_valid_ident_trailing st ->
ident_tail loc (nstore n len s) s
@@ -321,9 +326,9 @@ let rec ident_tail loc len = parser
warn_unrecognized_unicode ~loc:!@loc (u,id); len
| _ -> len
-let rec number len = parser
- | [< ' ('0'..'9' as c); s >] -> number (store len c) s
- | [< >] -> len
+let rec number len s = match Stream.peek s with
+ | Some ('0'..'9' as c) -> Stream.junk s; number (store len c) s
+ | _ -> len
let warn_comment_terminator_in_string =
CWarnings.create ~name:"comment-terminator-in-string" ~category:"parsing"
@@ -335,31 +340,43 @@ let warn_comment_terminator_in_string =
(* If the string being lexed is in a comment, [comm_level] is Some i with i the
current level of comments nesting. Otherwise, [comm_level] is None. *)
-let rec string loc ~comm_level bp len = parser
- | [< ''"'; esc=(parser [<''"' >] -> true | [< >] -> false); s >] ->
+let rec string loc ~comm_level bp len s = match Stream.peek s with
+ | Some '"' ->
+ Stream.junk s;
+ let esc =
+ match Stream.peek s with
+ Some '"' -> Stream.junk s; true
+ | _ -> false
+ in
if esc then string loc ~comm_level bp (store len '"') s else (loc, len)
- | [< ''('; s >] ->
- (parser
- | [< ''*'; s >] ->
+ | Some '(' ->
+ Stream.junk s;
+ (fun s -> match Stream.peek s with
+ | Some '*' ->
+ Stream.junk s;
let comm_level = Option.map succ comm_level in
string loc ~comm_level
bp (store (store len '(') '*')
s
- | [< >] ->
+ | _ ->
string loc ~comm_level bp (store len '(') s) s
- | [< ''*'; s >] ->
- (parser
- | [< '')'; s >] ->
+ | Some '*' ->
+ Stream.junk s;
+ (fun s -> match Stream.peek s with
+ | Some ')' ->
+ Stream.junk s;
let () = match comm_level with
| Some 0 ->
- warn_comment_terminator_in_string ~loc:!@loc ()
+ warn_comment_terminator_in_string ~loc:!@loc ()
| _ -> ()
in
let comm_level = Option.map pred comm_level in
string loc ~comm_level bp (store (store len '*') ')') s
- | [< >] ->
+ | _ ->
string loc ~comm_level bp (store len '*') s) s
- | [< ''\n' as c; s >] ep ->
+ | Some ('\n' as c) ->
+ Stream.junk s;
+ let ep = Stream.count s in
(* If we are parsing a comment, the string if not part of a token so we
update the first line of the location. Otherwise, we update the last
line. *)
@@ -368,8 +385,12 @@ let rec string loc ~comm_level bp len = parser
else bump_loc_line_last loc ep
in
string loc ~comm_level bp (store len c) s
- | [< 'c; s >] -> string loc ~comm_level bp (store len c) s
- | [< _ = Stream.empty >] ep ->
+ | Some c ->
+ Stream.junk s;
+ string loc ~comm_level bp (store len c) s
+ | _ ->
+ let _ = Stream.empty s in
+ let ep = Stream.count s in
let loc = set_loc_pos loc bp ep in
err loc Unterminated_string
@@ -441,25 +462,50 @@ let comment_stop ep =
comment_begin := None;
between_commands := false
-let rec comment loc bp = parser bp2
- | [< ''(';
- loc = (parser
- | [< ''*'; s >] -> push_string "(*"; comment loc bp s
- | [< >] -> push_string "("; loc );
- s >] -> comment loc bp s
- | [< ''*';
- loc = parser
- | [< '')' >] -> push_string "*)"; loc
- | [< s >] -> real_push_char '*'; comment loc bp s >] -> loc
- | [< ''"'; s >] ->
+let rec comment loc bp s =
+ let bp2 = Stream.count s in
+ match Stream.peek s with
+ Some '(' ->
+ Stream.junk s;
+ let loc =
+ try
+ match Stream.peek s with
+ Some '*' ->
+ Stream.junk s;
+ push_string "(*"; comment loc bp s
+ | _ -> push_string "("; loc
+ with Stream.Failure -> raise (Stream.Error "")
+ in
+ comment loc bp s
+ | Some '*' ->
+ Stream.junk s;
+ begin try
+ match Stream.peek s with
+ Some ')' -> Stream.junk s; push_string "*)"; loc
+ | _ -> real_push_char '*'; comment loc bp s
+ with Stream.Failure -> raise (Stream.Error "")
+ end
+ | Some '"' ->
+ Stream.junk s;
let loc, len = string loc ~comm_level:(Some 0) bp2 0 s in
push_string "\""; push_string (get_buff len); push_string "\"";
comment loc bp s
- | [< _ = Stream.empty >] ep ->
+ | _ ->
+ match try Some (Stream.empty s) with Stream.Failure -> None with
+ | Some _ ->
+ let ep = Stream.count s in
let loc = set_loc_pos loc bp ep in
err loc Unterminated_comment
- | [< ''\n' as z; s >] ep -> real_push_char z; comment (bump_loc_line loc ep) bp s
- | [< 'z; s >] -> real_push_char z; comment loc bp s
+ | _ ->
+ match Stream.peek s with
+ Some ('\n' as z) ->
+ Stream.junk s;
+ let ep = Stream.count s in
+ real_push_char z; comment (bump_loc_line loc ep) bp s
+ | Some z ->
+ Stream.junk s;
+ real_push_char z; comment loc bp s
+ | _ -> raise Stream.Failure
(* Parse a special token, using the [token_tree] *)
@@ -526,12 +572,16 @@ let process_chars loc bp c cs =
(* Parse what follows a dot *)
-let parse_after_dot loc c bp =
- parser
- | [< ' ('a'..'z' | 'A'..'Z' | '_' as d); len = ident_tail loc (store 0 d); s >] ->
+let parse_after_dot loc c bp s = match Stream.peek s with
+ | Some ('a'..'z' | 'A'..'Z' | '_' as d) ->
+ Stream.junk s;
+ let len =
+ try ident_tail loc (store 0 d) s with
+ Stream.Failure -> raise (Stream.Error "")
+ in
let field = get_buff len in
(try find_keyword loc ("."^field) s with Not_found -> FIELD field)
- | [< s >] ->
+ | _ ->
match lookup_utf8 loc s with
| Utf8Token (st, n) when Unicode.is_valid_ident_initial st ->
let len = ident_tail loc (nstore n 0 s) s in
@@ -559,12 +609,23 @@ let blank_or_eof cs =
(* Parse a token in a char stream *)
-let rec next_token loc = parser bp
- | [< ''\n' as c; s >] ep ->
+let rec next_token loc s =
+ let bp = Stream.count s in
+ match Stream.peek s with
+ | Some ('\n' as c) ->
+ Stream.junk s;
+ let ep = Stream.count s in
comm_loc bp; push_char c; next_token (bump_loc_line loc ep) s
- | [< '' ' | '\t' | '\r' as c; s >] ->
+ | Some (' ' | '\t' | '\r' as c) ->
+ Stream.junk s;
comm_loc bp; push_char c; next_token loc s
- | [< ''.' as c; t = parse_after_dot loc c bp; s >] ep ->
+ | Some ('.' as c) ->
+ Stream.junk s;
+ let t =
+ try parse_after_dot loc c bp s with
+ Stream.Failure -> raise (Stream.Error "")
+ in
+ let ep = Stream.count s in
comment_stop bp;
(* We enforce that "." should either be part of a larger keyword,
for instance ".(", or followed by a blank or eof. *)
@@ -575,42 +636,68 @@ let rec next_token loc = parser bp
between_commands := true;
| _ -> ()
in
- (t, set_loc_pos loc bp ep)
- | [< ' ('-'|'+'|'*' as c); s >] ->
+ t, set_loc_pos loc bp ep
+ | Some ('-' | '+' | '*' as c) ->
+ Stream.junk s;
let t,new_between_commands =
if !between_commands then process_sequence loc bp c s, true
else process_chars loc bp c s,false
in
comment_stop bp; between_commands := new_between_commands; t
- | [< ''?'; s >] ep ->
+ | Some '?' ->
+ Stream.junk s;
+ let ep = Stream.count s in
let t = parse_after_qmark loc bp s in
comment_stop bp; (t, set_loc_pos loc bp ep)
- | [< ' ('a'..'z' | 'A'..'Z' | '_' as c);
- len = ident_tail loc (store 0 c); s >] ep ->
+ | Some ('a'..'z' | 'A'..'Z' | '_' as c) ->
+ Stream.junk s;
+ let len =
+ try ident_tail loc (store 0 c) s with
+ Stream.Failure -> raise (Stream.Error "")
+ in
+ let ep = Stream.count s in
let id = get_buff len in
comment_stop bp;
(try find_keyword loc id s with Not_found -> IDENT id), set_loc_pos loc bp ep
- | [< ' ('0'..'9' as c); len = number (store 0 c) >] ep ->
+ | Some ('0'..'9' as c) ->
+ Stream.junk s;
+ let len =
+ try number (store 0 c) s with
+ Stream.Failure -> raise (Stream.Error "")
+ in
+ let ep = Stream.count s in
comment_stop bp;
(INT (get_buff len), set_loc_pos loc bp ep)
- | [< ''\"'; (loc,len) = string loc ~comm_level:None bp 0 >] ep ->
+ | Some '\"' ->
+ Stream.junk s;
+ let (loc, len) =
+ try string loc ~comm_level:None bp 0 s with
+ Stream.Failure -> raise (Stream.Error "")
+ in
+ let ep = Stream.count s in
comment_stop bp;
(STRING (get_buff len), set_loc_pos loc bp ep)
- | [< ' ('(' as c);
- t = parser
- | [< ''*'; s >] ->
+ | Some ('(' as c) ->
+ Stream.junk s;
+ begin try
+ match Stream.peek s with
+ | Some '*' ->
+ Stream.junk s;
comm_loc bp;
push_string "(*";
let loc = comment loc bp s in next_token loc s
- | [< t = process_chars loc bp c >] -> comment_stop bp; t >] ->
- t
- | [< ' ('{' | '}' as c); s >] ep ->
+ | _ -> let t = process_chars loc bp c s in comment_stop bp; t
+ with Stream.Failure -> raise (Stream.Error "")
+ end
+ | Some ('{' | '}' as c) ->
+ Stream.junk s;
+ let ep = Stream.count s in
let t,new_between_commands =
if !between_commands then (KEYWORD (String.make 1 c), set_loc_pos loc bp ep), true
else process_chars loc bp c s, false
in
comment_stop bp; between_commands := new_between_commands; t
- | [< s >] ->
+ | _ ->
match lookup_utf8 loc s with
| Utf8Token (st, n) when Unicode.is_valid_ident_initial st ->
let len = ident_tail loc (nstore n 0 s) s in
diff --git a/parsing/dune b/parsing/dune
index b70612a52b..0669e3a3c2 100644
--- a/parsing/dune
+++ b/parsing/dune
@@ -2,12 +2,8 @@
(name parsing)
(public_name coq.parsing)
(wrapped false)
- (libraries proofs))
-
-(rule
- (targets cLexer.ml)
- (deps (:ml4-file cLexer.ml4))
- (action (run camlp5o -loc loc -impl %{ml4-file} -o %{targets})))
+ (flags :standard -open Gramlib)
+ (libraries coq.gramlib proofs))
(rule
(targets g_prim.ml)
diff --git a/plugins/btauto/refl_btauto.ml b/plugins/btauto/refl_btauto.ml
index b9ad1ff6d8..07f50f6cd5 100644
--- a/plugins/btauto/refl_btauto.ml
+++ b/plugins/btauto/refl_btauto.ml
@@ -10,7 +10,7 @@
open Constr
-let bt_lib_constr n = lazy (UnivGen.constr_of_global @@ Coqlib.lib_ref n)
+let bt_lib_constr n = lazy (UnivGen.constr_of_monomorphic_global @@ Coqlib.lib_ref n)
let decomp_term sigma (c : Constr.t) =
Constr.kind (EConstr.Unsafe.to_constr (Termops.strip_outer_cast sigma (EConstr.of_constr c)))
diff --git a/plugins/extraction/table.ml b/plugins/extraction/table.ml
index 7b4fd280bd..f6eea3c5c4 100644
--- a/plugins/extraction/table.ml
+++ b/plugins/extraction/table.ml
@@ -446,7 +446,7 @@ let error_MPfile_as_mod mp b =
let argnames_of_global r =
let env = Global.env () in
- let typ, _ = Global.type_of_global_in_context env r in
+ let typ, _ = Typeops.type_of_global_in_context env r in
let rels,_ =
decompose_prod (Reduction.whd_all env typ) in
List.rev_map fst rels
@@ -878,7 +878,7 @@ let extract_constant_inline inline r ids s =
match g with
| ConstRef kn ->
let env = Global.env () in
- let typ, _ = Global.type_of_global_in_context env (ConstRef kn) in
+ let typ, _ = Typeops.type_of_global_in_context env (ConstRef kn) in
let typ = Reduction.whd_all env typ in
if Reduction.is_arity env typ
then begin
diff --git a/plugins/firstorder/rules.ml b/plugins/firstorder/rules.ml
index 8fa676de44..b0c4785d7a 100644
--- a/plugins/firstorder/rules.ml
+++ b/plugins/firstorder/rules.ml
@@ -233,12 +233,11 @@ let ll_forall_tac prod backtrack id continue seq=
(* special for compatibility with old Intuition *)
-let constant str = UnivGen.constr_of_global
- @@ Coqlib.lib_ref str
+let constant str = Coqlib.lib_ref str
let defined_connectives = lazy
- [AllOccurrences, EvalConstRef (fst (Constr.destConst (constant "core.not.type")));
- AllOccurrences, EvalConstRef (fst (Constr.destConst (constant "core.iff.type")))]
+ [AllOccurrences, EvalConstRef (destConstRef (constant "core.not.type"));
+ AllOccurrences, EvalConstRef (destConstRef (constant "core.iff.type"))]
let normalize_evaluables=
Proofview.Goal.enter begin fun gl ->
diff --git a/plugins/funind/functional_principles_proofs.ml b/plugins/funind/functional_principles_proofs.ml
index 98d68d3db7..ad1114b733 100644
--- a/plugins/funind/functional_principles_proofs.ml
+++ b/plugins/funind/functional_principles_proofs.ml
@@ -414,9 +414,9 @@ let rewrite_until_var arg_num eq_ids : tactic =
let rec_pte_id = Id.of_string "Hrec"
let clean_hyp_with_heq ptes_infos eq_hyps hyp_id env sigma =
- let coq_False = EConstr.of_constr (UnivGen.constr_of_global @@ Coqlib.lib_ref "core.False.type") in
- let coq_True = EConstr.of_constr (UnivGen.constr_of_global @@ Coqlib.lib_ref "core.True.type") in
- let coq_I = EConstr.of_constr (UnivGen.constr_of_global @@ Coqlib.lib_ref "core.True.I") in
+ let coq_False = EConstr.of_constr (UnivGen.constr_of_monomorphic_global @@ Coqlib.lib_ref "core.False.type") in
+ let coq_True = EConstr.of_constr (UnivGen.constr_of_monomorphic_global @@ Coqlib.lib_ref "core.True.type") in
+ let coq_I = EConstr.of_constr (UnivGen.constr_of_monomorphic_global @@ Coqlib.lib_ref "core.True.I") in
let rec scan_type context type_of_hyp : tactic =
if isLetIn sigma type_of_hyp then
let real_type_of_hyp = it_mkProd_or_LetIn type_of_hyp context in
@@ -1605,7 +1605,7 @@ let prove_principle_for_gen
match !tcc_lemma_ref with
| Undefined -> user_err Pp.(str "No tcc proof !!")
| Value lemma -> EConstr.of_constr lemma
- | Not_needed -> EConstr.of_constr (UnivGen.constr_of_global @@ Coqlib.lib_ref "core.True.I")
+ | Not_needed -> EConstr.of_constr (UnivGen.constr_of_monomorphic_global @@ Coqlib.lib_ref "core.True.I")
in
(* let rec list_diff del_list check_list = *)
(* match del_list with *)
diff --git a/plugins/funind/indfun_common.ml b/plugins/funind/indfun_common.ml
index 03a64988e4..28a9542167 100644
--- a/plugins/funind/indfun_common.ml
+++ b/plugins/funind/indfun_common.ml
@@ -116,7 +116,7 @@ let def_of_const t =
[@@@ocaml.warning "-3"]
let coq_constant s =
- UnivGen.constr_of_global @@
+ UnivGen.constr_of_monomorphic_global @@
Coqlib.gen_reference_in_modules "RecursiveDefinition"
Coqlib.init_modules s;;
@@ -311,7 +311,7 @@ let pr_info f_info =
str "function_constant_type := " ++
(try
Printer.pr_lconstr_env env sigma
- (fst (Global.type_of_global_in_context env (ConstRef f_info.function_constant)))
+ (fst (Typeops.type_of_global_in_context env (ConstRef f_info.function_constant)))
with e when CErrors.noncritical e -> mt ()) ++ fnl () ++
str "equation_lemma := " ++ pr_ocst f_info.equation_lemma ++ fnl () ++
str "completeness_lemma :=" ++ pr_ocst f_info.completeness_lemma ++ fnl () ++
@@ -441,7 +441,7 @@ let jmeq () =
try
Coqlib.check_required_library Coqlib.jmeq_module_name;
EConstr.of_constr @@
- UnivGen.constr_of_global @@
+ UnivGen.constr_of_monomorphic_global @@
Coqlib.lib_ref "core.JMeq.type"
with e when CErrors.noncritical e -> raise (ToShow e)
@@ -449,7 +449,7 @@ let jmeq_refl () =
try
Coqlib.check_required_library Coqlib.jmeq_module_name;
EConstr.of_constr @@
- UnivGen.constr_of_global @@
+ UnivGen.constr_of_monomorphic_global @@
Coqlib.lib_ref "core.JMeq.refl"
with e when CErrors.noncritical e -> raise (ToShow e)
@@ -463,7 +463,7 @@ let acc_rel = function () -> EConstr.of_constr (coq_constant "Acc")
let acc_inv_id = function () -> EConstr.of_constr (coq_constant "Acc_inv")
[@@@ocaml.warning "-3"]
-let well_founded_ltof () = EConstr.of_constr @@ UnivGen.constr_of_global @@
+let well_founded_ltof () = EConstr.of_constr @@ UnivGen.constr_of_monomorphic_global @@
Coqlib.find_reference "IndFun" ["Coq"; "Arith";"Wf_nat"] "well_founded_ltof"
[@@@ocaml.warning "+3"]
diff --git a/plugins/funind/invfun.ml b/plugins/funind/invfun.ml
index b8973a18dc..b0842c3721 100644
--- a/plugins/funind/invfun.ml
+++ b/plugins/funind/invfun.ml
@@ -81,7 +81,7 @@ let thin ids gl = Proofview.V82.of_tactic (Tactics.clear ids) gl
let make_eq () =
try
- EConstr.of_constr (UnivGen.constr_of_global (Coqlib.lib_ref "core.eq.type"))
+ EConstr.of_constr (UnivGen.constr_of_monomorphic_global (Coqlib.lib_ref "core.eq.type"))
with _ -> assert false
(* [generate_type g_to_f f graph i] build the completeness (resp. correctness) lemma type if [g_to_f = true]
@@ -511,7 +511,7 @@ and intros_with_rewrite_aux : Tacmach.tactic =
intros_with_rewrite
] g
end
- | Ind _ when EConstr.eq_constr sigma t (EConstr.of_constr (UnivGen.constr_of_global @@ Coqlib.lib_ref "core.False.type")) ->
+ | Ind _ when EConstr.eq_constr sigma t (EConstr.of_constr (UnivGen.constr_of_monomorphic_global @@ Coqlib.lib_ref "core.False.type")) ->
Proofview.V82.of_tactic tauto g
| Case(_,_,v,_) ->
tclTHENLIST[
diff --git a/plugins/funind/plugin_base.dune b/plugins/funind/plugin_base.dune
index 002eb28eea..9f583234d8 100644
--- a/plugins/funind/plugin_base.dune
+++ b/plugins/funind/plugin_base.dune
@@ -2,4 +2,5 @@
(name recdef_plugin)
(public_name coq.plugins.recdef)
(synopsis "Coq's functional induction plugin")
+ (flags :standard -open Gramlib)
(libraries coq.plugins.extraction))
diff --git a/plugins/funind/recdef.ml b/plugins/funind/recdef.ml
index ca3160f4c4..f9df3aed45 100644
--- a/plugins/funind/recdef.ml
+++ b/plugins/funind/recdef.ml
@@ -51,7 +51,7 @@ open Context.Rel.Declaration
(* Ugly things which should not be here *)
[@@@ocaml.warning "-3"]
-let coq_constant m s = EConstr.of_constr @@ constr_of_global @@
+let coq_constant m s = EConstr.of_constr @@ UnivGen.constr_of_monomorphic_global @@
Coqlib.find_reference "RecursiveDefinition" m s
let arith_Nat = ["Coq"; "Arith";"PeanoNat";"Nat"]
@@ -63,7 +63,7 @@ let pr_leconstr_rd =
let coq_init_constant s =
EConstr.of_constr (
- constr_of_global @@
+ UnivGen.constr_of_monomorphic_global @@
Coqlib.gen_reference_in_modules "RecursiveDefinition" Coqlib.init_modules s)
[@@@ocaml.warning "+3"]
@@ -97,7 +97,7 @@ let type_of_const sigma t =
Typeops.type_of_constant_in (Global.env()) (sp, u)
|_ -> assert false
-let constant sl s = constr_of_global (find_reference sl s)
+let constant sl s = UnivGen.constr_of_monomorphic_global (find_reference sl s)
let const_of_ref = function
ConstRef kn -> kn
@@ -1241,7 +1241,7 @@ let get_current_subgoals_types () =
exception EmptySubgoals
let build_and_l sigma l =
- let and_constr = UnivGen.constr_of_global @@ Coqlib.lib_ref "core.and.type" in
+ let and_constr = UnivGen.constr_of_monomorphic_global @@ Coqlib.lib_ref "core.and.type" in
let conj_constr = Coqlib.build_coq_conj () in
let mk_and p1 p2 =
mkApp(EConstr.of_constr and_constr,[|p1;p2|]) in
diff --git a/plugins/ltac/plugin_base.dune b/plugins/ltac/plugin_base.dune
index 5611f5ba16..1b31655310 100644
--- a/plugins/ltac/plugin_base.dune
+++ b/plugins/ltac/plugin_base.dune
@@ -3,6 +3,7 @@
(public_name coq.plugins.ltac)
(synopsis "Coq's LTAC tactic language")
(modules :standard \ tauto)
+ (flags :standard -open Gramlib)
(libraries coq.stm))
(library
diff --git a/plugins/ltac/rewrite.ml b/plugins/ltac/rewrite.ml
index 9dd98a4ab7..9f7669f1d5 100644
--- a/plugins/ltac/rewrite.ml
+++ b/plugins/ltac/rewrite.ml
@@ -89,8 +89,8 @@ let goalevars evars = fst evars
let cstrevars evars = snd evars
let new_cstr_evar (evd,cstrs) env t =
- let s = Typeclasses.set_resolvable Evd.Store.empty false in
- let (evd', t) = Evarutil.new_evar ~store:s env evd t in
+ (** We handle the typeclass resolution of constraints ourselves *)
+ let (evd', t) = Evarutil.new_evar env evd ~typeclass_candidate:false t in
let ev, _ = destEvar evd' t in
(evd', Evar.Set.add ev cstrs), t
@@ -632,9 +632,6 @@ let solve_remaining_by env sigma holes by =
let no_constraints cstrs =
fun ev _ -> not (Evar.Set.mem ev cstrs)
-let all_constraints cstrs =
- fun ev _ -> Evar.Set.mem ev cstrs
-
let poly_inverse sort =
if sort then PropGlobal.inverse else TypeGlobal.inverse
@@ -1453,10 +1450,11 @@ let apply_strategy (s : strategy) env unfresh concl (prop, cstr) evars =
res
let solve_constraints env (evars,cstrs) =
- let filter = all_constraints cstrs in
- Typeclasses.resolve_typeclasses env ~filter ~split:false ~fail:true
- (Typeclasses.mark_resolvables ~filter evars)
-
+ let oldtcs = Evd.get_typeclass_evars evars in
+ let evars' = Evd.set_typeclass_evars evars cstrs in
+ let evars' = Typeclasses.resolve_typeclasses env ~filter:all_evars ~split:false ~fail:true evars' in
+ Evd.set_typeclass_evars evars' oldtcs
+
let nf_zeta =
Reductionops.clos_norm_flags (CClosure.RedFlags.mkflags [CClosure.RedFlags.fZETA])
diff --git a/plugins/ltac/tacenv.ml b/plugins/ltac/tacenv.ml
index 1f2c722b34..a88285c9ee 100644
--- a/plugins/ltac/tacenv.ml
+++ b/plugins/ltac/tacenv.ml
@@ -115,7 +115,6 @@ let interp_ml_tactic { mltac_name = s; mltac_index = i } =
(* Summary and Object declaration *)
-open Nametab
open Libobject
type ltac_entry = {
@@ -153,19 +152,19 @@ let tac_deprecation kn =
let load_md i ((sp, kn), (local, id, b, t, deprecation)) = match id with
| None ->
- let () = if not local then push_tactic (Until i) sp kn in
+ let () = if not local then push_tactic (Nametab.Until i) sp kn in
add ~deprecation kn b t
| Some kn0 -> replace kn0 kn t
let open_md i ((sp, kn), (local, id, b, t, deprecation)) = match id with
| None ->
- let () = if not local then push_tactic (Exactly i) sp kn in
+ let () = if not local then push_tactic (Nametab.Exactly i) sp kn in
add ~deprecation kn b t
| Some kn0 -> replace kn0 kn t
let cache_md ((sp, kn), (local, id ,b, t, deprecation)) = match id with
| None ->
- let () = push_tactic (Until 1) sp kn in
+ let () = push_tactic (Nametab.Until 1) sp kn in
add ~deprecation kn b t
| Some kn0 -> replace kn0 kn t
diff --git a/plugins/ltac/tacintern.ml b/plugins/ltac/tacintern.ml
index 5501cf92a5..55412c74bb 100644
--- a/plugins/ltac/tacintern.ml
+++ b/plugins/ltac/tacintern.ml
@@ -19,7 +19,6 @@ open Util
open Names
open Libnames
open Globnames
-open Nametab
open Smartlocate
open Constrexpr
open Termops
@@ -98,7 +97,7 @@ let intern_global_reference ist qid =
ArgVar (make ?loc:qid.CAst.loc @@ qualid_basename qid)
else
try ArgArg (qid.CAst.loc,locate_global_with_alias qid)
- with Not_found -> error_global_not_found qid
+ with Not_found -> Nametab.error_global_not_found qid
let intern_ltac_variable ist qid =
if qualid_is_ident qid && find_var (qualid_basename qid) ist then
@@ -150,7 +149,7 @@ let intern_isolated_tactic_reference strict ist qid =
try ConstrMayEval (ConstrTerm (intern_constr_reference strict ist qid))
with Not_found ->
(* Reference not found *)
- error_global_not_found qid
+ Nametab.error_global_not_found qid
(* Internalize an applied tactic reference *)
@@ -169,7 +168,7 @@ let intern_applied_tactic_reference ist qid =
try intern_applied_global_tactic_reference qid
with Not_found ->
(* Reference not found *)
- error_global_not_found qid
+ Nametab.error_global_not_found qid
(* Intern a reference parsed in a non-tactic entry *)
@@ -190,7 +189,7 @@ let intern_non_tactic_reference strict ist qid =
TacGeneric ipat
else
(* Reference not found *)
- error_global_not_found qid
+ Nametab.error_global_not_found qid
let intern_message_token ist = function
| (MsgString _ | MsgInt _ as x) -> x
@@ -302,7 +301,7 @@ let intern_evaluable_global_reference ist qid =
try evaluable_of_global_reference ist.genv (locate_global_with_alias ~head:true qid)
with Not_found ->
if qualid_is_ident qid && not !strict_check then EvalVarRef (qualid_basename qid)
- else error_global_not_found qid
+ else Nametab.error_global_not_found qid
let intern_evaluable_reference_or_by_notation ist = function
| {v=AN r} -> intern_evaluable_global_reference ist r
@@ -377,7 +376,7 @@ let intern_typed_pattern_or_ref_with_occurrences ist (l,p) =
subterm matched when a pattern *)
let r = match r with
| {v=AN r} -> r
- | {loc} -> (qualid_of_path ?loc (path_of_global (smart_global r))) in
+ | {loc} -> (qualid_of_path ?loc (Nametab.path_of_global (smart_global r))) in
let sign = {
Constrintern.ltac_vars = ist.ltacvars;
ltac_bound = Id.Set.empty;
diff --git a/plugins/ltac/tacinterp.ml b/plugins/ltac/tacinterp.ml
index f90e889678..b60b77595b 100644
--- a/plugins/ltac/tacinterp.ml
+++ b/plugins/ltac/tacinterp.ml
@@ -23,7 +23,6 @@ open Names
open Nameops
open Libnames
open Globnames
-open Nametab
open Refiner
open Tacmach.New
open Tactic_debug
@@ -358,7 +357,7 @@ let interp_reference ist env sigma = function
with Not_found ->
try
VarRef (get_id (Environ.lookup_named id env))
- with Not_found -> error_global_not_found (qualid_of_ident ?loc id)
+ with Not_found -> Nametab.error_global_not_found (qualid_of_ident ?loc id)
let try_interp_evaluable env (loc, id) =
let v = Environ.lookup_named id env in
@@ -374,14 +373,14 @@ let interp_evaluable ist env sigma = function
with Not_found ->
match r with
| EvalConstRef _ -> r
- | _ -> error_global_not_found (qualid_of_ident ?loc id)
+ | _ -> Nametab.error_global_not_found (qualid_of_ident ?loc id)
end
| ArgArg (r,None) -> r
| ArgVar {loc;v=id} ->
try try_interp_ltac_var (coerce_to_evaluable_ref env sigma) ist (Some (env,sigma)) (make ?loc id)
with Not_found ->
try try_interp_evaluable env (loc, id)
- with Not_found -> error_global_not_found (qualid_of_ident ?loc id)
+ with Not_found -> Nametab.error_global_not_found (qualid_of_ident ?loc id)
(* Interprets an hypothesis name *)
let interp_occurrences ist occs =
@@ -640,7 +639,7 @@ let interp_closed_typed_pattern_with_occurrences ist env sigma (occs, a) =
Inr (pattern_of_constr env sigma (EConstr.to_constr sigma c)) in
(try try_interp_ltac_var coerce_eval_ref_or_constr ist (Some (env,sigma)) (make ?loc id)
with Not_found ->
- error_global_not_found (qualid_of_ident ?loc id))
+ Nametab.error_global_not_found (qualid_of_ident ?loc id))
| Inl (ArgArg _ as b) -> Inl (interp_evaluable ist env sigma b)
| Inr c -> Inr (interp_typed_pattern ist env sigma c) in
interp_occurrences ist occs, p
diff --git a/plugins/nsatz/nsatz.ml b/plugins/nsatz/nsatz.ml
index 11d0a4a44d..ef60a23e80 100644
--- a/plugins/nsatz/nsatz.ml
+++ b/plugins/nsatz/nsatz.ml
@@ -135,7 +135,7 @@ let mul = function
| (Const n,q) when eq_num n num_1 -> q
| (p,q) -> Mul(p,q)
-let gen_constant n = lazy (UnivGen.constr_of_global (Coqlib.lib_ref n))
+let gen_constant n = lazy (UnivGen.constr_of_monomorphic_global (Coqlib.lib_ref n))
let tpexpr = gen_constant "plugins.setoid_ring.pexpr"
let ttconst = gen_constant "plugins.setoid_ring.const"
@@ -540,7 +540,7 @@ let nsatz lpol =
let return_term t =
let a =
- mkApp (UnivGen.constr_of_global @@ Coqlib.lib_ref "core.eq.refl",[|tllp ();t|]) in
+ mkApp (UnivGen.constr_of_monomorphic_global @@ Coqlib.lib_ref "core.eq.refl",[|tllp ();t|]) in
let a = EConstr.of_constr a in
generalize [a]
diff --git a/plugins/omega/coq_omega.ml b/plugins/omega/coq_omega.ml
index 09bd4cd352..d8adb17710 100644
--- a/plugins/omega/coq_omega.ml
+++ b/plugins/omega/coq_omega.ml
@@ -186,7 +186,8 @@ let reset_all () =
To use the constant Zplus, one must type "Lazy.force coq_Zplus"
This is the right way to access to Coq constants in tactics ML code *)
-let gen_constant k = lazy (k |> Coqlib.lib_ref |> UnivGen.constr_of_global |> EConstr.of_constr)
+let gen_constant k = lazy (k |> Coqlib.lib_ref |> UnivGen.constr_of_monomorphic_global
+ |> EConstr.of_constr)
(* Zarith *)
diff --git a/plugins/rtauto/refl_tauto.ml b/plugins/rtauto/refl_tauto.ml
index 79418da27c..840a05e02b 100644
--- a/plugins/rtauto/refl_tauto.ml
+++ b/plugins/rtauto/refl_tauto.ml
@@ -26,11 +26,11 @@ let step_count = ref 0
let node_count = ref 0
-let li_False = lazy (destInd (UnivGen.constr_of_global @@ Coqlib.lib_ref "core.False.type"))
-let li_and = lazy (destInd (UnivGen.constr_of_global @@ Coqlib.lib_ref "core.and.type"))
-let li_or = lazy (destInd (UnivGen.constr_of_global @@ Coqlib.lib_ref "core.or.type"))
+let li_False = lazy (destInd (UnivGen.constr_of_monomorphic_global @@ Coqlib.lib_ref "core.False.type"))
+let li_and = lazy (destInd (UnivGen.constr_of_monomorphic_global @@ Coqlib.lib_ref "core.and.type"))
+let li_or = lazy (destInd (UnivGen.constr_of_monomorphic_global @@ Coqlib.lib_ref "core.or.type"))
-let gen_constant n = lazy (UnivGen.constr_of_global (Coqlib.lib_ref n))
+let gen_constant n = lazy (UnivGen.constr_of_monomorphic_global (Coqlib.lib_ref n))
let l_xI = gen_constant "num.pos.xI"
let l_xO = gen_constant "num.pos.xO"
diff --git a/plugins/setoid_ring/newring.ml b/plugins/setoid_ring/newring.ml
index 9585826109..a2dce621d9 100644
--- a/plugins/setoid_ring/newring.ml
+++ b/plugins/setoid_ring/newring.ml
@@ -164,7 +164,7 @@ let ltac_call tac (args:glob_tactic_arg list) =
let dummy_goal env sigma =
let (gl,_,sigma) =
- Goal.V82.mk_goal sigma (named_context_val env) EConstr.mkProp Evd.Store.empty in
+ Goal.V82.mk_goal sigma (named_context_val env) EConstr.mkProp in
{Evd.it = gl; Evd.sigma = sigma}
let constr_of evd v = match Value.to_constr v with
@@ -206,7 +206,7 @@ let exec_tactic env evd n f args =
let nf c = constr_of evd c in
Array.map nf !tactic_res, Evd.universe_context_set evd
-let gen_constant n = lazy (EConstr.of_constr (UnivGen.constr_of_global (Coqlib.lib_ref n)))
+let gen_constant n = lazy (EConstr.of_constr (UnivGen.constr_of_monomorphic_global (Coqlib.lib_ref n)))
let gen_reference n = lazy (Coqlib.lib_ref n)
let coq_mk_Setoid = gen_constant "plugins.setoid_ring.Build_Setoid_Theory"
@@ -251,7 +251,7 @@ let plugin_modules =
]
let my_constant c =
- lazy (EConstr.of_constr (UnivGen.constr_of_global @@ Coqlib.gen_reference_in_modules "Ring" plugin_modules c))
+ lazy (EConstr.of_constr (UnivGen.constr_of_monomorphic_global @@ Coqlib.gen_reference_in_modules "Ring" plugin_modules c))
[@@ocaml.warning "-3"]
let my_reference c =
lazy (Coqlib.gen_reference_in_modules "Ring" plugin_modules c)
@@ -901,7 +901,7 @@ let ftheory_to_obj : field_info -> obj =
let field_equality evd r inv req =
match EConstr.kind !evd req with
| App (f, [| _ |]) when eq_constr_nounivs !evd f (Lazy.force coq_eq) ->
- let c = UnivGen.constr_of_global Coqlib.(lib_ref "core.eq.congr") in
+ let c = UnivGen.constr_of_monomorphic_global Coqlib.(lib_ref "core.eq.congr") in
let c = EConstr.of_constr c in
mkApp(c,[|r;r;inv|])
| _ ->
diff --git a/plugins/ssr/plugin_base.dune b/plugins/ssr/plugin_base.dune
index de9053f1a0..a13524bb52 100644
--- a/plugins/ssr/plugin_base.dune
+++ b/plugins/ssr/plugin_base.dune
@@ -3,4 +3,5 @@
(public_name coq.plugins.ssreflect)
(synopsis "Coq's ssreflect plugin")
(modules_without_implementation ssrast)
+ (flags :standard -open Gramlib)
(libraries coq.plugins.ssrmatching))
diff --git a/plugins/ssr/ssrcommon.ml b/plugins/ssr/ssrcommon.ml
index 1492cfb4e4..a284c3bfc7 100644
--- a/plugins/ssr/ssrcommon.ml
+++ b/plugins/ssr/ssrcommon.ml
@@ -730,13 +730,10 @@ let pf_abs_prod name gl c cl = pf_mkprod gl c ~name (Termops.subst_term (project
(** look up a name in the ssreflect internals module *)
let ssrdirpath = DirPath.make [Id.of_string "ssreflect"]
let ssrqid name = Libnames.make_qualid ssrdirpath (Id.of_string name)
-let ssrtopqid name = Libnames.qualid_of_ident (Id.of_string name)
-let locate_reference qid =
- Smartlocate.global_of_extended_global (Nametab.locate_extended qid)
let mkSsrRef name =
- try locate_reference (ssrqid name) with Not_found ->
- try locate_reference (ssrtopqid name) with Not_found ->
- CErrors.user_err (Pp.str "Small scale reflection library not loaded")
+ let qn = Format.sprintf "plugins.ssreflect.%s" name in
+ if Coqlib.has_ref qn then Coqlib.lib_ref qn else
+ CErrors.user_err Pp.(str "Small scale reflection library not loaded (" ++ str name ++ str ")")
let mkSsrRRef name = (DAst.make @@ GRef (mkSsrRef name,None)), None
let mkSsrConst name env sigma =
EConstr.fresh_global env sigma (mkSsrRef name)
@@ -1220,7 +1217,7 @@ let genclrtac cl cs clr =
(fun type_err gl ->
tclTHEN
(tclTHEN (Proofview.V82.of_tactic (Tactics.elim_type (EConstr.of_constr
- (UnivGen.constr_of_global @@ Coqlib.(lib_ref "core.False.type"))))) (old_cleartac clr))
+ (UnivGen.constr_of_monomorphic_global @@ Coqlib.(lib_ref "core.False.type"))))) (old_cleartac clr))
(fun gl -> raise type_err)
gl))
(old_cleartac clr)
@@ -1365,7 +1362,7 @@ let tacTYPEOF c = Goal.enter_one ~__LOC__ (fun g ->
(** This tactic creates a partial proof realizing the introduction rule, but
does not check anything. *)
-let unsafe_intro env store decl b =
+let unsafe_intro env decl b =
let open Context.Named.Declaration in
Refine.refine ~typecheck:false begin fun sigma ->
let ctx = Environ.named_context_val env in
@@ -1374,7 +1371,7 @@ let unsafe_intro env store decl b =
let ninst = EConstr.mkRel 1 :: inst in
let nb = EConstr.Vars.subst1 (EConstr.mkVar (get_id decl)) b in
let sigma, ev =
- Evarutil.new_evar_instance nctx sigma nb ~principal:true ~store ninst in
+ Evarutil.new_evar_instance nctx sigma nb ~principal:true ninst in
sigma, EConstr.mkNamedLambda_or_LetIn decl ev
end
@@ -1418,7 +1415,7 @@ let-in even after reduction, it fails. In case of success, the original name
and final id are passed to the continuation [k] which gets evaluated. *)
let tclINTRO ~id ~conclusion:k = Goal.enter begin fun gl ->
let open Context in
- let env, sigma, extra, g = Goal.(env gl, sigma gl, extra gl, concl gl) in
+ let env, sigma, g = Goal.(env gl, sigma gl, concl gl) in
let decl, t, no_red = decompose_assum env sigma g in
let original_name = Rel.Declaration.get_name decl in
let already_used = Tacmach.New.pf_ids_of_hyps gl in
@@ -1433,7 +1430,7 @@ let tclINTRO ~id ~conclusion:k = Goal.enter begin fun gl ->
in
if List.mem id already_used then
errorstrm Pp.(Id.print id ++ str" already used");
- unsafe_intro env extra (set_decl_id id decl) t <*>
+ unsafe_intro env (set_decl_id id decl) t <*>
(if no_red then tclUNIT () else tclFULL_BETAIOTA) <*>
k ~orig_name:original_name ~new_name:id
end
diff --git a/plugins/ssr/ssrcommon.mli b/plugins/ssr/ssrcommon.mli
index 9ba23467e7..566a933522 100644
--- a/plugins/ssr/ssrcommon.mli
+++ b/plugins/ssr/ssrcommon.mli
@@ -212,8 +212,7 @@ val pf_abs_prod :
EConstr.t -> Goal.goal Evd.sigma * EConstr.types
val mkSsrRRef : string -> Glob_term.glob_constr * 'a option
-val mkSsrRef : string -> GlobRef.t
-val mkSsrConst :
+val mkSsrConst :
string ->
env -> evar_map -> evar_map * EConstr.t
val pf_mkSsrConst :
diff --git a/plugins/ssr/ssreflect.v b/plugins/ssr/ssreflect.v
index 460bdc6d23..e43cab094b 100644
--- a/plugins/ssr/ssreflect.v
+++ b/plugins/ssr/ssreflect.v
@@ -159,6 +159,10 @@ Definition abstract (statement : Type) (id : nat) (lock : abstract_lock) :=
Notation "<hidden n >" := (abstract _ n _).
Notation "T (* n *)" := (abstract T n abstract_key).
+Register abstract_lock as plugins.ssreflect.abstract_lock.
+Register abstract_key as plugins.ssreflect.abstract_key.
+Register abstract as plugins.ssreflect.abstract.
+
(* Constants for tactic-views *)
Inductive external_view : Type := tactic_view of Type.
@@ -287,6 +291,8 @@ Variant phant (p : Type) := Phant.
Definition protect_term (A : Type) (x : A) : A := x.
+Register protect_term as plugins.ssreflect.protect_term.
+
(* The ssreflect idiom for a non-keyed pattern: *)
(* - unkeyed t wiil match any subterm that unifies with t, regardless of *)
(* whether it displays the same head symbol as t. *)
@@ -336,6 +342,9 @@ Notation nosimpl t := (let: tt := tt in t).
Lemma master_key : unit. Proof. exact tt. Qed.
Definition locked A := let: tt := master_key in fun x : A => x.
+Register master_key as plugins.ssreflect.master_key.
+Register locked as plugins.ssreflect.locked.
+
Lemma lock A x : x = locked x :> A. Proof. unlock; reflexivity. Qed.
(* Needed for locked predicates, in particular for eqType's. *)
@@ -395,12 +404,18 @@ Definition ssr_have_let Pgoal Plemma step
(rest : let x : Plemma := step in Pgoal) : Pgoal := rest.
Arguments ssr_have_let [Pgoal].
+Register ssr_have as plugins.ssreflect.ssr_have.
+Register ssr_have_let as plugins.ssreflect.ssr_have_let.
+
Definition ssr_suff Plemma Pgoal step (rest : Plemma) : Pgoal := step rest.
Arguments ssr_suff Plemma [Pgoal].
Definition ssr_wlog := ssr_suff.
Arguments ssr_wlog Plemma [Pgoal].
+Register ssr_suff as plugins.ssreflect.ssr_suff.
+Register ssr_wlog as plugins.ssreflect.ssr_wlog.
+
(* Internal N-ary congruence lemmas for the congr tactic. *)
Fixpoint nary_congruence_statement (n : nat)
@@ -425,6 +440,9 @@ Lemma ssr_congr_arrow Plemma Pgoal : Plemma = Pgoal -> Plemma -> Pgoal.
Proof. by move->. Qed.
Arguments ssr_congr_arrow : clear implicits.
+Register nary_congruence as plugins.ssreflect.nary_congruence.
+Register ssr_congr_arrow as plugins.ssreflect.ssr_congr_arrow.
+
(* View lemmas that don't use reflection. *)
Section ApplyIff.
diff --git a/plugins/ssr/ssrelim.ml b/plugins/ssr/ssrelim.ml
index 7f9a9e125e..5067d8af31 100644
--- a/plugins/ssr/ssrelim.ml
+++ b/plugins/ssr/ssrelim.ml
@@ -16,7 +16,6 @@ open Printer
open Term
open Constr
open Termops
-open Globnames
open Tactypes
open Tacmach
@@ -98,6 +97,11 @@ let subgoals_tys sigma (relctx, concl) =
* generalize the equality in case eqid is not None
* 4. build the tactic handle intructions and clears as required in ipats and
* by eqid *)
+
+let get_eq_type gl =
+ let eq, gl = pf_fresh_global Coqlib.(lib_ref "core.eq.type") gl in
+ gl, EConstr.of_constr eq
+
let ssrelim ?(ind=ref None) ?(is_case=false) deps what ?elim eqid elim_intro_tac gl =
(* some sanity checks *)
let oc, orig_clr, occ, c_gen, gl = match what with
@@ -115,8 +119,6 @@ let ssrelim ?(ind=ref None) ?(is_case=false) deps what ?elim eqid elim_intro_tac
let orig_gl, concl, env = gl, pf_concl gl, pf_env gl in
ppdebug(lazy(Pp.str(if is_case then "==CASE==" else "==ELIM==")));
let fire_subst gl t = Reductionops.nf_evar (project gl) t in
- let eq, gl = pf_fresh_global Coqlib.(lib_ref "core.eq.type") gl in
- let eq = EConstr.of_constr eq in
let is_undef_pat = function
| sigma, T t -> EConstr.isEvar sigma (EConstr.of_constr t)
| _ -> false in
@@ -322,6 +324,7 @@ let ssrelim ?(ind=ref None) ?(is_case=false) deps what ?elim eqid elim_intro_tac
let k = List.length deps in
let c = fire_subst gl (List.assoc (n_elim_args - k - 1) elim_args) in
let gl, t = pfe_type_of gl c in
+ let gl, eq = get_eq_type gl in
let gen_eq_tac, gl =
let refl = EConstr.mkApp (eq, [|t; c; c|]) in
let new_concl = EConstr.mkArrow refl (EConstr.Vars.lift 1 (pf_concl orig_gl)) in
@@ -421,7 +424,7 @@ let injectl2rtac sigma c = match EConstr.kind sigma c with
let is_injection_case c gl =
let gl, cty = pfe_type_of gl c in
let (mind,_), _ = pf_reduce_to_quantified_ind gl cty in
- GlobRef.equal (IndRef mind) Coqlib.(lib_ref "core.eq.type")
+ Coqlib.check_ind_ref "core.eq.type" mind
let perform_injection c gl =
let gl, cty = pfe_type_of gl c in
diff --git a/plugins/ssr/ssrvernac.mlg b/plugins/ssr/ssrvernac.mlg
index 876751911b..940defb743 100644
--- a/plugins/ssr/ssrvernac.mlg
+++ b/plugins/ssr/ssrvernac.mlg
@@ -360,7 +360,7 @@ let coerce_search_pattern_to_sort hpat =
Pattern.PApp (fp, args') in
let hr, na = splay_search_pattern 0 hpat in
let dc, ht =
- let hr, _ = Global.type_of_global_in_context (Global.env ()) hr (** FIXME *) in
+ let hr, _ = Typeops.type_of_global_in_context env hr (** FIXME *) in
Reductionops.splay_prod env sigma (EConstr.of_constr hr) in
let np = List.length dc in
if np < na then CErrors.user_err (Pp.str "too many arguments in head search pattern") else
diff --git a/plugins/ssrmatching/plugin_base.dune b/plugins/ssrmatching/plugin_base.dune
index 06f67c3774..1450a94de1 100644
--- a/plugins/ssrmatching/plugin_base.dune
+++ b/plugins/ssrmatching/plugin_base.dune
@@ -2,4 +2,5 @@
(name ssrmatching_plugin)
(public_name coq.plugins.ssrmatching)
(synopsis "Coq ssrmatching plugin")
+ (flags :standard -open Gramlib)
(libraries coq.plugins.ltac))
diff --git a/plugins/ssrmatching/ssrmatching.ml b/plugins/ssrmatching/ssrmatching.ml
index 4a63dd4708..7f67487f5d 100644
--- a/plugins/ssrmatching/ssrmatching.ml
+++ b/plugins/ssrmatching/ssrmatching.ml
@@ -356,8 +356,10 @@ let nf_open_term sigma0 ise c =
let unif_end env sigma0 ise0 pt ok =
let ise = Evarconv.solve_unif_constraints_with_heuristics env ise0 in
+ let tcs = Evd.get_typeclass_evars ise in
let s, uc, t = nf_open_term sigma0 ise pt in
let ise1 = create_evar_defs s in
+ let ise1 = Evd.set_typeclass_evars ise1 (Evar.Set.filter (fun ev -> Evd.is_undefined ise1 ev) tcs) in
let ise1 = Evd.set_universe_context ise1 uc in
let ise2 = Typeclasses.resolve_typeclasses ~fail:true env ise1 in
if not (ok ise) then raise NoProgress else
@@ -1045,7 +1047,7 @@ let thin id sigma goal =
match ans with
| None -> sigma
| Some (sigma, hyps, concl) ->
- let (gl,ev,sigma) = Goal.V82.mk_goal sigma hyps concl (Goal.V82.extra sigma goal) in
+ let (gl,ev,sigma) = Goal.V82.mk_goal sigma hyps concl in
let sigma = Goal.V82.partial_solution_to sigma goal gl ev in
sigma
diff --git a/pretyping/cases.ml b/pretyping/cases.ml
index 54e847988b..164f5ab96d 100644
--- a/pretyping/cases.ml
+++ b/pretyping/cases.ml
@@ -296,8 +296,7 @@ let inductive_template env sigma tmloc ind =
let ty = EConstr.of_constr ty in
let ty' = substl subst ty in
let sigma, e =
- Evarutil.new_evar env ~src:(hole_source n)
- sigma ty'
+ Evarutil.new_evar env ~src:(hole_source n) ~typeclass_candidate:false sigma ty'
in
(sigma, e::subst,e::evarl,n+1)
| LocalDef (na,b,ty) ->
@@ -1698,7 +1697,7 @@ let abstract_tycon ?loc env sigma subst tycon extenv t =
(fun i _ ->
try list_assoc_in_triple i subst0 with Not_found -> mkRel i)
1 (rel_context !!env) in
- let sigma, ev' = Evarutil.new_evar ~src !!env sigma ty in
+ let sigma, ev' = Evarutil.new_evar ~src ~typeclass_candidate:false !!env sigma ty in
begin match solve_simple_eqn (evar_conv_x full_transparent_state) !!env sigma (None,ev,substl inst ev') with
| Success evd -> evdref := evd
| UnifFailure _ -> assert false
@@ -1734,7 +1733,7 @@ let abstract_tycon ?loc env sigma subst tycon extenv t =
(named_context !!extenv) in
let filter = Filter.make (rel_filter @ named_filter) in
let candidates = List.rev (u :: List.map mkRel vl) in
- let sigma, ev = Evarutil.new_evar !!extenv ~src ~filter ~candidates sigma ty in
+ let sigma, ev = Evarutil.new_evar !!extenv ~src ~filter ~candidates ~typeclass_candidate:false sigma ty in
let () = evdref := sigma in
lift k ev
in
diff --git a/pretyping/cbv.ml b/pretyping/cbv.ml
index 265909980b..5061aeff88 100644
--- a/pretyping/cbv.ml
+++ b/pretyping/cbv.ml
@@ -134,7 +134,12 @@ let mkSTACK = function
| STACK(0,v0,stk0), stk -> STACK(0,v0,stack_concat stk0 stk)
| v,stk -> STACK(0,v,stk)
-type cbv_infos = { tab : cbv_value infos_tab; infos : cbv_value infos; sigma : Evd.evar_map }
+type cbv_infos = {
+ env : Environ.env;
+ tab : cbv_value KeyTable.t;
+ reds : RedFlags.reds;
+ sigma : Evd.evar_map
+}
(* Change: zeta reduction cannot be avoided in CBV *)
@@ -260,8 +265,8 @@ let rec norm_head info env t stack =
| Proj (p, c) ->
let p' =
- if red_set (info_flags info.infos) (fCONST (Projection.constant p))
- && red_set (info_flags info.infos) fBETA
+ if red_set info.reds (fCONST (Projection.constant p))
+ && red_set info.reds fBETA
then Projection.unfold p
else p
in
@@ -280,16 +285,16 @@ let rec norm_head info env t stack =
| Var id -> norm_head_ref 0 info env stack (VarKey id)
| Const sp ->
- Reductionops.reduction_effect_hook (env_of_infos info.infos) info.sigma
+ Reductionops.reduction_effect_hook info.env info.sigma
(fst sp) (lazy (reify_stack t stack));
norm_head_ref 0 info env stack (ConstKey sp)
| LetIn (_, b, _, c) ->
(* zeta means letin are contracted; delta without zeta means we *)
(* allow bindings but leave let's in place *)
- if red_set (info_flags info.infos) fZETA then
+ if red_set info.reds fZETA then
(* New rule: for Cbv, Delta does not apply to locally bound variables
- or red_set (info_flags info.infos) fDELTA
+ or red_set info.reds fDELTA
*)
let env' = subs_cons ([|cbv_stack_term info TOP env b|],env) in
norm_head info env' c stack
@@ -297,7 +302,7 @@ let rec norm_head info env t stack =
(CBN(t,env), stack) (* Should we consider a commutative cut ? *)
| Evar ev ->
- (match evar_value info.infos.i_cache ev with
+ (match Reductionops.safe_evar_value info.sigma ev with
Some c -> norm_head info env c stack
| None ->
let e, xs = ev in
@@ -317,8 +322,8 @@ let rec norm_head info env t stack =
| Prod _ -> (CBN(t,env), stack)
and norm_head_ref k info env stack normt =
- if red_set_ref (info_flags info.infos) normt then
- match ref_value_cache info.infos info.tab normt with
+ if red_set_ref info.reds normt then
+ match cbv_value_cache info normt with
| Some body ->
if !debug_cbv then Feedback.msg_debug Pp.(str "Unfolding " ++ debug_pr_key normt);
strip_appl (shift_value k body) stack
@@ -343,7 +348,7 @@ and cbv_stack_term info stack env t =
and cbv_stack_value info env = function
(* a lambda meets an application -> BETA *)
| (LAM (nlams,ctxt,b,env), APP (args, stk))
- when red_set (info_flags info.infos) fBETA ->
+ when red_set info.reds fBETA ->
let nargs = Array.length args in
if nargs == nlams then
cbv_stack_term info stk (subs_cons(args,env)) b
@@ -357,31 +362,31 @@ and cbv_stack_value info env = function
(* a Fix applied enough -> IOTA *)
| (FIXP(fix,env,[||]), stk)
- when fixp_reducible (info_flags info.infos) fix stk ->
+ when fixp_reducible info.reds fix stk ->
let (envf,redfix) = contract_fixp env fix in
cbv_stack_term info stk envf redfix
(* constructor guard satisfied or Cofix in a Case -> IOTA *)
| (COFIXP(cofix,env,[||]), stk)
- when cofixp_reducible (info_flags info.infos) cofix stk->
+ when cofixp_reducible info.reds cofix stk->
let (envf,redfix) = contract_cofixp env cofix in
cbv_stack_term info stk envf redfix
(* constructor in a Case -> IOTA *)
| (CONSTR(((sp,n),u),[||]), APP(args,CASE(_,br,ci,env,stk)))
- when red_set (info_flags info.infos) fMATCH ->
+ when red_set info.reds fMATCH ->
let cargs =
Array.sub args ci.ci_npar (Array.length args - ci.ci_npar) in
cbv_stack_term info (stack_app cargs stk) env br.(n-1)
(* constructor of arity 0 in a Case -> IOTA *)
| (CONSTR(((_,n),u),[||]), CASE(_,br,_,env,stk))
- when red_set (info_flags info.infos) fMATCH ->
+ when red_set info.reds fMATCH ->
cbv_stack_term info stk env br.(n-1)
(* constructor in a Projection -> IOTA *)
| (CONSTR(((sp,n),u),[||]), APP(args,PROJ(p,stk)))
- when red_set (info_flags info.infos) fMATCH && Projection.unfolded p ->
+ when red_set info.reds fMATCH && Projection.unfolded p ->
let arg = args.(Projection.npars p + Projection.arg p) in
cbv_stack_value info env (strip_appl arg stk)
@@ -393,6 +398,29 @@ and cbv_stack_value info env = function
(* definitely a value *)
| (head,stk) -> mkSTACK(head, stk)
+and cbv_value_cache info ref = match KeyTable.find info.tab ref with
+| v -> Some v
+| exception Not_found ->
+ try
+ let body = match ref with
+ | RelKey n ->
+ let open Context.Rel.Declaration in
+ begin match Environ.lookup_rel n info.env with
+ | LocalDef (_, c, _) -> lift n c
+ | LocalAssum _ -> raise Not_found
+ end
+ | VarKey id ->
+ let open Context.Named.Declaration in
+ begin match Environ.lookup_named id info.env with
+ | LocalDef (_, c, _) -> c
+ | LocalAssum _ -> raise Not_found
+ end
+ | ConstKey cst -> Environ.constant_value_in info.env cst
+ in
+ let v = cbv_stack_term info TOP (subs_id 0) body in
+ let () = KeyTable.add info.tab ref v in
+ Some v
+ with Not_found | Environ.NotEvaluableConst _ -> None
(* When we are sure t will never produce a redex with its stack, we
* normalize (even under binders) the applied terms and we build the
@@ -453,11 +481,5 @@ let cbv_norm infos constr =
EConstr.of_constr (with_stats (lazy (cbv_norm_term infos (subs_id 0) constr)))
(* constant bodies are normalized at the first expansion *)
-let create_cbv_infos flgs env sigma =
- let infos = create
- ~share:true (** Not used by cbv *)
- ~repr:(fun old_info tab c -> cbv_stack_term { tab; infos = old_info; sigma } TOP (subs_id 0) c)
- flgs
- env
- (Reductionops.safe_evar_value sigma) in
- { tab = CClosure.create_tab (); infos; sigma }
+let create_cbv_infos reds env sigma =
+ { tab = KeyTable.create 91; reds; env; sigma }
diff --git a/pretyping/classops.ml b/pretyping/classops.ml
index b026397abf..2c2a8fe49e 100644
--- a/pretyping/classops.ml
+++ b/pretyping/classops.ml
@@ -15,7 +15,6 @@ open Names
open Constr
open Libnames
open Globnames
-open Nametab
open Libobject
open Mod_subst
@@ -228,14 +227,14 @@ let string_of_class = function
| CL_FUN -> "Funclass"
| CL_SORT -> "Sortclass"
| CL_CONST sp ->
- string_of_qualid (shortest_qualid_of_global Id.Set.empty (ConstRef sp))
+ string_of_qualid (Nametab.shortest_qualid_of_global Id.Set.empty (ConstRef sp))
| CL_PROJ sp ->
let sp = Projection.Repr.constant sp in
- string_of_qualid (shortest_qualid_of_global Id.Set.empty (ConstRef sp))
+ string_of_qualid (Nametab.shortest_qualid_of_global Id.Set.empty (ConstRef sp))
| CL_IND sp ->
- string_of_qualid (shortest_qualid_of_global Id.Set.empty (IndRef sp))
+ string_of_qualid (Nametab.shortest_qualid_of_global Id.Set.empty (IndRef sp))
| CL_SECVAR sp ->
- string_of_qualid (shortest_qualid_of_global Id.Set.empty (VarRef sp))
+ string_of_qualid (Nametab.shortest_qualid_of_global Id.Set.empty (VarRef sp))
let pr_class x = str (string_of_class x)
@@ -380,7 +379,7 @@ type coercion = {
(* Computation of the class arity *)
let reference_arity_length ref =
- let t, _ = Global.type_of_global_in_context (Global.env ()) ref in
+ let t, _ = Typeops.type_of_global_in_context (Global.env ()) ref in
List.length (fst (Reductionops.splay_arity (Global.env()) Evd.empty (EConstr.of_constr t))) (** FIXME *)
let projection_arity_length p =
@@ -520,7 +519,7 @@ module CoercionPrinting =
let compare = GlobRef.Ordered.compare
let encode = coercion_of_reference
let subst = subst_coe_typ
- let printer x = pr_global_env Id.Set.empty x
+ let printer x = Nametab.pr_global_env Id.Set.empty x
let key = ["Printing";"Coercion"]
let title = "Explicitly printed coercions: "
let member_message x b =
diff --git a/pretyping/detyping.ml b/pretyping/detyping.ml
index 592057ab41..072ac9deed 100644
--- a/pretyping/detyping.ml
+++ b/pretyping/detyping.ml
@@ -25,7 +25,6 @@ open Termops
open Namegen
open Libnames
open Globnames
-open Nametab
open Mod_subst
open Decl_kinds
open Context.Named.Declaration
@@ -58,7 +57,7 @@ let add_name_opt na b t (nenv, env) =
(* Tools for printing of Cases *)
let encode_inductive r =
- let indsp = global_inductive r in
+ let indsp = Nametab.global_inductive r in
let constr_lengths = constructors_nrealargs indsp in
(indsp,constr_lengths)
@@ -97,7 +96,7 @@ module PrintingInductiveMake =
let compare = ind_ord
let encode = Test.encode
let subst subst obj = subst_ind subst obj
- let printer ind = pr_global_env Id.Set.empty (IndRef ind)
+ let printer ind = Nametab.pr_global_env Id.Set.empty (IndRef ind)
let key = ["Printing";Test.field]
let title = Test.title
let member_message x = Test.member_message (printer x)
diff --git a/pretyping/dune b/pretyping/dune
index 6609b4e328..14bce92de1 100644
--- a/pretyping/dune
+++ b/pretyping/dune
@@ -3,4 +3,4 @@
(synopsis "Coq's Type Inference Component (Pretyper)")
(public_name coq.pretyping)
(wrapped false)
- (libraries camlp5.gramlib engine))
+ (libraries engine))
diff --git a/pretyping/evarconv.ml b/pretyping/evarconv.ml
index f0ff1aa93b..6a75be352b 100644
--- a/pretyping/evarconv.ml
+++ b/pretyping/evarconv.ml
@@ -711,8 +711,7 @@ and evar_eqappr_x ?(rhs_is_already_stuck = false) ts env evd pbty
in
ise_try evd [f1; f2]
- | Proj (p, c), Proj (p', c')
- when Constant.equal (Projection.constant p) (Projection.constant p') ->
+ | Proj (p, c), Proj (p', c') when Projection.repr_equal p p' ->
let f1 i =
ise_and i
[(fun i -> evar_conv_x ts env i CONV c c');
diff --git a/pretyping/evarsolve.ml b/pretyping/evarsolve.ml
index 22f438c00c..674f6846ae 100644
--- a/pretyping/evarsolve.ml
+++ b/pretyping/evarsolve.ml
@@ -1238,33 +1238,31 @@ let check_evar_instance evd evk1 body conv_algo =
| Success evd -> evd
| UnifFailure _ -> raise (IllTypedInstance (evenv,ty, evi.evar_concl))
-let update_evar_source ev1 ev2 evd =
+let update_evar_info ev1 ev2 evd =
+ (* We update the source of obligation evars during evar-evar unifications. *)
let loc, evs2 = evar_source ev2 evd in
- match evs2 with
- | (Evar_kinds.QuestionMark _ | Evar_kinds.ImplicitArg (_, _, false)) ->
- let evi = Evd.find evd ev1 in
- Evd.add evd ev1 {evi with evar_source = loc, evs2}
- | _ -> evd
-
+ let evi = Evd.find evd ev1 in
+ Evd.add evd ev1 {evi with evar_source = loc, evs2}
+
let solve_evar_evar_l2r force f g env evd aliases pbty ev1 (evk2,_ as ev2) =
try
let evd,body = project_evar_on_evar force g env evd aliases 0 pbty ev1 ev2 in
- let evd' = Evd.define evk2 body evd in
- let evd' = update_evar_source (fst (destEvar evd body)) evk2 evd' in
- check_evar_instance evd' evk2 body g
+ let evd' = Evd.define_with_evar evk2 body evd in
+ let evd' =
+ if is_obligation_evar evd evk2 then
+ update_evar_info evk2 (fst (destEvar evd' body)) evd'
+ else evd'
+ in
+ check_evar_instance evd' evk2 body g
with EvarSolvedOnTheFly (evd,c) ->
f env evd pbty ev2 c
let opp_problem = function None -> None | Some b -> Some (not b)
let preferred_orientation evd evk1 evk2 =
- let _,src1 = (Evd.find_undefined evd evk1).evar_source in
- let _,src2 = (Evd.find_undefined evd evk2).evar_source in
- (* This is a heuristic useful for program to work *)
- match src1,src2 with
- | (Evar_kinds.QuestionMark _ | Evar_kinds.ImplicitArg (_, _, false)) , _ -> true
- | _, (Evar_kinds.QuestionMark _ | Evar_kinds.ImplicitArg (_, _, false)) -> false
- | _ -> true
+ if is_obligation_evar evd evk1 then true
+ else if is_obligation_evar evd evk2 then false
+ else true
let solve_evar_evar_aux force f g env evd pbty (evk1,args1 as ev1) (evk2,args2 as ev2) =
let aliases = make_alias_map env evd in
diff --git a/pretyping/indrec.ml b/pretyping/indrec.ml
index e49ba75b3f..89f64d328a 100644
--- a/pretyping/indrec.ml
+++ b/pretyping/indrec.ml
@@ -29,7 +29,6 @@ open Inductive
open Inductiveops
open Environ
open Reductionops
-open Nametab
open Context.Rel.Declaration
type dep_flag = bool
@@ -618,6 +617,6 @@ let lookup_eliminator ind_sp s =
user_err ~hdr:"default_elim"
(strbrk "Cannot find the elimination combinator " ++
Id.print id ++ strbrk ", the elimination of the inductive definition " ++
- pr_global_env Id.Set.empty (IndRef ind_sp) ++
+ Nametab.pr_global_env Id.Set.empty (IndRef ind_sp) ++
strbrk " on sort " ++ Termops.pr_sort_family s ++
strbrk " is probably not allowed.")
diff --git a/pretyping/inferCumulativity.ml b/pretyping/inferCumulativity.ml
index a56a8314e6..422a05c19a 100644
--- a/pretyping/inferCumulativity.ml
+++ b/pretyping/inferCumulativity.ml
@@ -196,7 +196,7 @@ let infer_inductive env mie =
Array.fold_left (fun variances u -> LMap.add u Variance.Irrelevant variances)
LMap.empty uarray
in
- let env, _ = Typeops.infer_local_decls env params in
+ let env = Typeops.check_context env params in
let variances = List.fold_left (fun variances entry ->
let variances = infer_arity_constructor true
env variances entry.mind_entry_arity
diff --git a/pretyping/pretyping.ml b/pretyping/pretyping.ml
index f2881e4a19..37afcf75e1 100644
--- a/pretyping/pretyping.ml
+++ b/pretyping/pretyping.ml
@@ -510,6 +510,15 @@ let rec pretype k0 resolve_tc (tycon : type_constraint) (env : GlobEnv.t) (sigma
| Some ty -> sigma, ty
| None -> new_type_evar env sigma loc in
let sigma, uj_val = new_evar env sigma ~src:(loc,k) ~naming ty in
+ let sigma =
+ if Flags.is_program_mode () then
+ match k with
+ | Evar_kinds.QuestionMark _
+ | Evar_kinds.ImplicitArg (_, _, false) ->
+ Evd.set_obligation_evar sigma (fst (destEvar sigma uj_val))
+ | _ -> sigma
+ else sigma
+ in
sigma, { uj_val; uj_type = ty }
| GHole (k, _naming, Some arg) ->
diff --git a/pretyping/recordops.ml b/pretyping/recordops.ml
index f8dc5ba4d6..5d74b59b27 100644
--- a/pretyping/recordops.ml
+++ b/pretyping/recordops.ml
@@ -20,7 +20,6 @@ open Util
open Pp
open Names
open Globnames
-open Nametab
open Constr
open Libobject
open Mod_subst
@@ -330,7 +329,7 @@ let add_canonical_structure x = Lib.add_anonymous_leaf (inCanonStruc x)
let error_not_structure ref description =
user_err ~hdr:"object_declare"
(str"Could not declare a canonical structure " ++
- (Id.print (basename_of_global ref) ++ str"." ++ spc() ++
+ (Id.print (Nametab.basename_of_global ref) ++ str"." ++ spc() ++
description))
let check_and_decompose_canonical_structure ref =
diff --git a/pretyping/reductionops.ml b/pretyping/reductionops.ml
index 5dbe95a471..367a48cb5e 100644
--- a/pretyping/reductionops.ml
+++ b/pretyping/reductionops.ml
@@ -398,8 +398,7 @@ struct
match x, y with
| Cst_const (c1,u1), Cst_const (c2, u2) ->
Constant.equal c1 c2 && Univ.Instance.equal u1 u2
- | Cst_proj p1, Cst_proj p2 ->
- Constant.equal (Projection.constant p1) (Projection.constant p2)
+ | Cst_proj p1, Cst_proj p2 -> Projection.repr_equal p1 p2
| _, _ -> false
in
let rec equal_rec sk1 sk2 =
diff --git a/pretyping/typeclasses.ml b/pretyping/typeclasses.ml
index ce12aaeeb0..4aea2c3db9 100644
--- a/pretyping/typeclasses.ml
+++ b/pretyping/typeclasses.ml
@@ -166,6 +166,21 @@ let rec is_class_type evd c =
let is_class_evar evd evi =
is_class_type evd evi.Evd.evar_concl
+let is_class_constr sigma c =
+ try let gr, u = Termops.global_of_constr sigma c in
+ GlobRef.Map.mem gr !classes
+ with Not_found -> false
+
+let rec is_maybe_class_type evd c =
+ let c, _ = Termops.decompose_app_vect evd c in
+ match EConstr.kind evd c with
+ | Prod (_, _, t) -> is_maybe_class_type evd t
+ | Cast (t, _, _) -> is_maybe_class_type evd t
+ | Evar _ -> true
+ | _ -> is_class_constr evd c
+
+let () = Hook.set Evd.is_maybe_typeclass_hook (fun evd c -> is_maybe_class_type evd (EConstr.of_constr c))
+
(*
* classes persistent object
*)
@@ -279,7 +294,7 @@ let build_subclasses ~check env sigma glob { hint_priority = pri } =
(fun () -> incr i;
Nameops.add_suffix _id ("_subinstance_" ^ string_of_int !i))
in
- let ty, ctx = Global.type_of_global_in_context env glob in
+ let ty, ctx = Typeops.type_of_global_in_context env glob in
let inst, ctx = UnivGen.fresh_instance_from ctx None in
let ty = Vars.subst_instance_constr inst ty in
let ty = EConstr.of_constr ty in
@@ -420,7 +435,7 @@ let remove_instance i =
remove_instance_hint i.is_impl
let declare_instance info local glob =
- let ty, _ = Global.type_of_global_in_context (Global.env ()) glob in
+ let ty, _ = Typeops.type_of_global_in_context (Global.env ()) glob in
let info = Option.default {hint_priority = None; hint_pattern = None} info in
match class_of_constr Evd.empty (EConstr.of_constr ty) with
| Some (rels, ((tc,_), args) as _cl) ->
@@ -481,63 +496,29 @@ let instances r =
let is_class gr =
GlobRef.Map.exists (fun _ v -> GlobRef.equal v.cl_impl gr) !classes
-(* To embed a boolean for resolvability status.
- This is essentially a hack to mark which evars correspond to
- goals and do not need to be resolved when we have nested [resolve_all_evars]
- calls (e.g. when doing apply in an External hint in typeclass_instances).
- Would be solved by having real evars-as-goals.
-
- Nota: we will only check the resolvability status of undefined evars.
- *)
-
-let resolvable = Proofview.Unsafe.typeclass_resolvable
-
-let set_resolvable s b =
- if b then Store.remove s resolvable
- else Store.set s resolvable ()
-
-let is_resolvable evi =
- assert (match evi.evar_body with Evar_empty -> true | _ -> false);
- Option.is_empty (Store.get evi.evar_extra resolvable)
-
-let mark_resolvability_undef b evi =
- if is_resolvable evi == (b : bool) then evi
- else
- let t = set_resolvable evi.evar_extra b in
- { evi with evar_extra = t }
-
-let mark_resolvability b evi =
- assert (match evi.evar_body with Evar_empty -> true | _ -> false);
- mark_resolvability_undef b evi
-
-let mark_unresolvable evi = mark_resolvability false evi
-let mark_resolvable evi = mark_resolvability true evi
-
open Evar_kinds
-type evar_filter = Evar.t -> Evar_kinds.t -> bool
+type evar_filter = Evar.t -> Evar_kinds.t Lazy.t -> bool
+
+let make_unresolvables filter evd =
+ let tcs = Evd.get_typeclass_evars evd in
+ Evd.set_typeclass_evars evd (Evar.Set.filter (fun x -> not (filter x)) tcs)
let all_evars _ _ = true
-let all_goals _ = function VarInstance _ | GoalEvar -> true | _ -> false
+let all_goals _ source =
+ match Lazy.force source with
+ | VarInstance _ | GoalEvar -> true
+ | _ -> false
+
let no_goals ev evi = not (all_goals ev evi)
-let no_goals_or_obligations _ = function
+let no_goals_or_obligations _ source =
+ match Lazy.force source with
| VarInstance _ | GoalEvar | QuestionMark _ -> false
| _ -> true
-let mark_resolvability filter b sigma =
- let map ev evi =
- if filter ev (snd evi.evar_source) then mark_resolvability_undef b evi
- else evi
- in
- Evd.raw_map_undefined map sigma
-
-let mark_unresolvables ?(filter=all_evars) sigma = mark_resolvability filter false sigma
-let mark_resolvables ?(filter=all_evars) sigma = mark_resolvability filter true sigma
-
let has_typeclasses filter evd =
- let check ev evi =
- filter ev (snd evi.evar_source) && is_resolvable evi && is_class_evar evd evi
- in
- Evar.Map.exists check (Evd.undefined_map evd)
+ let tcs = get_typeclass_evars evd in
+ let check ev = filter ev (lazy (snd (Evd.find evd ev).evar_source)) in
+ Evar.Set.exists check tcs
let get_solve_all_instances, solve_all_instances_hook = Hook.make ()
@@ -548,7 +529,7 @@ let solve_all_instances env evd filter unique split fail =
(* let solve_classeskey = CProfile.declare_profile "solve_typeclasses" *)
(* let solve_problem = CProfile.profile5 solve_classeskey solve_problem *)
-let resolve_typeclasses ?(fast_path = true) ?(filter=no_goals) ?(unique=get_typeclasses_unique_solutions ())
+let resolve_typeclasses ?(filter=no_goals) ?(unique=get_typeclasses_unique_solutions ())
?(split=true) ?(fail=true) env evd =
- if fast_path && not (has_typeclasses filter evd) then evd
+ if not (has_typeclasses filter evd) then evd
else solve_all_instances env evd filter unique split fail
diff --git a/pretyping/typeclasses.mli b/pretyping/typeclasses.mli
index f0437be4ed..ee9c83dad3 100644
--- a/pretyping/typeclasses.mli
+++ b/pretyping/typeclasses.mli
@@ -93,7 +93,7 @@ val instance_constructor : typeclass EConstr.puniverses -> EConstr.t list ->
EConstr.t option * EConstr.t
(** Filter which evars to consider for resolution. *)
-type evar_filter = Evar.t -> Evar_kinds.t -> bool
+type evar_filter = Evar.t -> Evar_kinds.t Lazy.t -> bool
val all_evars : evar_filter
val all_goals : evar_filter
val no_goals : evar_filter
@@ -107,16 +107,12 @@ val no_goals_or_obligations : evar_filter
An unresolvable evar is an evar the type-class engine will NOT try to solve
*)
-val set_resolvable : Evd.Store.t -> bool -> Evd.Store.t
-val is_resolvable : evar_info -> bool
-val mark_unresolvable : evar_info -> evar_info
-val mark_unresolvables : ?filter:evar_filter -> evar_map -> evar_map
-val mark_resolvables : ?filter:evar_filter -> evar_map -> evar_map
-val mark_resolvable : evar_info -> evar_info
+val make_unresolvables : (Evar.t -> bool) -> evar_map -> evar_map
+
val is_class_evar : evar_map -> evar_info -> bool
val is_class_type : evar_map -> EConstr.types -> bool
-val resolve_typeclasses : ?fast_path:bool -> ?filter:evar_filter -> ?unique:bool ->
+val resolve_typeclasses : ?filter:evar_filter -> ?unique:bool ->
?split:bool -> ?fail:bool -> env -> evar_map -> evar_map
val resolve_one_typeclass : ?unique:bool -> env -> evar_map -> EConstr.types -> evar_map * EConstr.constr
diff --git a/printing/dune b/printing/dune
index 3392342165..837ac48009 100644
--- a/printing/dune
+++ b/printing/dune
@@ -2,5 +2,6 @@
(name printing)
(synopsis "Coq's Term Pretty Printing Library")
(public_name coq.printing)
+ (flags :standard -open Gramlib)
(wrapped false)
(libraries parsing proofs))
diff --git a/printing/prettyp.ml b/printing/prettyp.ml
index e6f82c60ee..4619e049e0 100644
--- a/printing/prettyp.ml
+++ b/printing/prettyp.ml
@@ -71,7 +71,7 @@ let int_or_no n = if Int.equal n 0 then str "no" else int n
let print_basename sp = pr_global (ConstRef sp)
let print_ref reduce ref udecl =
- let typ, univs = Global.type_of_global_in_context (Global.env ()) ref in
+ let typ, univs = Typeops.type_of_global_in_context (Global.env ()) ref in
let inst = Univ.make_abstract_instance univs in
let bl = UnivNames.universe_binders_with_opt_names ref udecl in
let sigma = Evd.from_ctx (UState.of_binders bl) in
@@ -147,7 +147,7 @@ let print_renames_list prefix l =
hv 2 (prlist_with_sep pr_comma (fun x -> x) (List.map Name.print l))]
let need_expansion impl ref =
- let typ, _ = Global.type_of_global_in_context (Global.env ()) ref in
+ let typ, _ = Typeops.type_of_global_in_context (Global.env ()) ref in
let ctx = Term.prod_assum typ in
let nprods = List.count is_local_assum ctx in
not (List.is_empty impl) && List.length impl >= nprods &&
@@ -823,7 +823,7 @@ let print_opaque_name env sigma qid =
| IndRef (sp,_) ->
print_inductive sp None
| ConstructRef cstr as gr ->
- let ty, ctx = Global.type_of_global_in_context env gr in
+ let ty, ctx = Typeops.type_of_global_in_context env gr in
let ty = EConstr.of_constr ty in
let open EConstr in
print_typed_value_in_env env sigma (mkConstruct cstr, ty)
diff --git a/printing/printer.ml b/printing/printer.ml
index 990bdaad7d..3cf995a005 100644
--- a/printing/printer.ml
+++ b/printing/printer.ml
@@ -15,7 +15,6 @@ open Names
open Constr
open Environ
open Globnames
-open Nametab
open Evd
open Refiner
open Constrextern
@@ -242,7 +241,7 @@ let pr_abstract_cumulativity_info sigma cumi =
(**********************************************************************)
(* Global references *)
-let pr_global_env = pr_global_env
+let pr_global_env = Nametab.pr_global_env
let pr_global = pr_global_env Id.Set.empty
let pr_universe_instance evd inst =
diff --git a/proofs/clenv.ml b/proofs/clenv.ml
index 95e908c4dd..d25ae38c53 100644
--- a/proofs/clenv.ml
+++ b/proofs/clenv.ml
@@ -324,21 +324,21 @@ let adjust_meta_source evd mv = function
*)
let clenv_pose_metas_as_evars clenv dep_mvs =
- let rec fold clenv = function
- | [] -> clenv
+ let rec fold clenv evs = function
+ | [] -> clenv, evs
| mv::mvs ->
let ty = clenv_meta_type clenv mv in
(* Postpone the evar-ization if dependent on another meta *)
(* This assumes no cycle in the dependencies - is it correct ? *)
- if occur_meta clenv.evd ty then fold clenv (mvs@[mv])
+ if occur_meta clenv.evd ty then fold clenv evs (mvs@[mv])
else
let src = evar_source_of_meta mv clenv.evd in
let src = adjust_meta_source clenv.evd mv src in
let evd = clenv.evd in
let (evd, evar) = new_evar (cl_env clenv) evd ~src ty in
let clenv = clenv_assign mv evar {clenv with evd=evd} in
- fold clenv mvs in
- fold clenv dep_mvs
+ fold clenv (fst (destEvar evd evar) :: evs) mvs in
+ fold clenv [] dep_mvs
(******************************************************************)
@@ -608,8 +608,7 @@ let make_evar_clause env sigma ?len t =
else match EConstr.kind sigma t with
| Cast (t, _, _) -> clrec (sigma, holes) n t
| Prod (na, t1, t2) ->
- let store = Typeclasses.set_resolvable Evd.Store.empty false in
- let (sigma, ev) = new_evar ~store env sigma t1 in
+ let (sigma, ev) = new_evar env sigma ~typeclass_candidate:false t1 in
let dep = not (noccurn sigma 1 t2) in
let hole = {
hole_evar = ev;
diff --git a/proofs/clenv.mli b/proofs/clenv.mli
index f9506290a0..03acb9e46e 100644
--- a/proofs/clenv.mli
+++ b/proofs/clenv.mli
@@ -72,7 +72,7 @@ val clenv_unique_resolver :
val clenv_dependent : clausenv -> metavariable list
-val clenv_pose_metas_as_evars : clausenv -> metavariable list -> clausenv
+val clenv_pose_metas_as_evars : clausenv -> metavariable list -> clausenv * Evar.t list
(** {6 Bindings } *)
diff --git a/proofs/clenvtac.ml b/proofs/clenvtac.ml
index ba4cde6d67..77f5804665 100644
--- a/proofs/clenvtac.ml
+++ b/proofs/clenvtac.ml
@@ -62,37 +62,19 @@ let clenv_pose_dependent_evars ?(with_evars=false) clenv =
(RefinerError (env, sigma, UnresolvedBindings (List.map (meta_name clenv.evd) dep_mvs)));
clenv_pose_metas_as_evars clenv dep_mvs
-(** Use our own fast path, more informative than from Typeclasses *)
-let check_tc evd =
- let has_resolvable = ref false in
- let check _ evi =
- let res = Typeclasses.is_resolvable evi in
- if res then
- let () = has_resolvable := true in
- Typeclasses.is_class_evar evd evi
- else false
- in
- let has_typeclass = Evar.Map.exists check (Evd.undefined_map evd) in
- (has_typeclass, !has_resolvable)
-
let clenv_refine ?(with_evars=false) ?(with_classes=true) clenv =
(** ppedrot: a Goal.enter here breaks things, because the tactic below may
solve goals by side effects, while the compatibility layer keeps those
useless goals. That deserves a FIXME. *)
Proofview.V82.tactic begin fun gl ->
- let clenv = clenv_pose_dependent_evars ~with_evars clenv in
+ let clenv, evars = clenv_pose_dependent_evars ~with_evars clenv in
let evd' =
if with_classes then
- let (has_typeclass, has_resolvable) = check_tc clenv.evd in
let evd' =
- if has_typeclass then
- Typeclasses.resolve_typeclasses ~fast_path:false ~filter:Typeclasses.all_evars
+ Typeclasses.resolve_typeclasses ~filter:Typeclasses.all_evars
~fail:(not with_evars) ~split:false clenv.env clenv.evd
- else clenv.evd
in
- if has_resolvable then
- Typeclasses.mark_unresolvables ~filter:Typeclasses.all_goals evd'
- else evd'
+ Typeclasses.make_unresolvables (fun x -> List.mem_f Evar.equal x evars) evd'
else clenv.evd
in
let clenv = { clenv with evd = evd' } in
@@ -101,6 +83,9 @@ let clenv_refine ?(with_evars=false) ?(with_classes=true) clenv =
(refine_no_check (clenv_cast_meta clenv (clenv_value clenv))) gl
end
+let clenv_pose_dependent_evars ?(with_evars=false) clenv =
+ fst (clenv_pose_dependent_evars ~with_evars clenv)
+
open Unification
let dft = default_unify_flags
diff --git a/proofs/goal.ml b/proofs/goal.ml
index c14c0a8a77..4e540de538 100644
--- a/proofs/goal.ml
+++ b/proofs/goal.ml
@@ -50,13 +50,8 @@ module V82 = struct
let evi = Evd.find evars gl in
evi.Evd.evar_concl
- (* Access to ".evar_extra" *)
- let extra evars gl =
- let evi = Evd.find evars gl in
- evi.Evd.evar_extra
-
(* Old style mk_goal primitive *)
- let mk_goal evars hyps concl extra =
+ let mk_goal evars hyps concl =
(* A goal created that way will not be used by refine and will not
be shelved. It must not appear as a future_goal, so the future
goals are restored to their initial value after the evar is
@@ -67,11 +62,9 @@ module V82 = struct
Evd.evar_filter = Evd.Filter.identity;
Evd.evar_body = Evd.Evar_empty;
Evd.evar_source = (Loc.tag Evar_kinds.GoalEvar);
- Evd.evar_candidates = None;
- Evd.evar_extra = extra }
+ Evd.evar_candidates = None }
in
- let evi = Typeclasses.mark_unresolvable evi in
- let (evars, evk) = Evarutil.new_pure_evar_full evars evi in
+ let (evars, evk) = Evarutil.new_pure_evar_full evars ~typeclass_candidate:false evi in
let evars = Evd.restore_future_goals evars prev_future_goals in
let ctxt = Environ.named_context_of_val hyps in
let inst = Array.map_of_list (NamedDecl.get_id %> EConstr.mkVar) ctxt in
diff --git a/proofs/goal.mli b/proofs/goal.mli
index a033d6daab..3b31cff8d7 100644
--- a/proofs/goal.mli
+++ b/proofs/goal.mli
@@ -39,16 +39,12 @@ module V82 : sig
(* Access to ".evar_concl" *)
val concl : Evd.evar_map -> goal -> EConstr.constr
- (* Access to ".evar_extra" *)
- val extra : Evd.evar_map -> goal -> Evd.Store.t
-
- (* Old style mk_goal primitive, returns a new goal with corresponding
+ (* Old style mk_goal primitive, returns a new goal with corresponding
hypotheses and conclusion, together with a term which is precisely
the evar corresponding to the goal, and an updated evar_map. *)
val mk_goal : Evd.evar_map ->
Environ.named_context_val ->
EConstr.constr ->
- Evd.Store.t ->
goal * EConstr.constr * Evd.evar_map
(* Instantiates a goal with an open term *)
diff --git a/proofs/logic.ml b/proofs/logic.ml
index 285240872e..254c93d0a2 100644
--- a/proofs/logic.ml
+++ b/proofs/logic.ml
@@ -350,7 +350,7 @@ let rec mk_refgoals sigma goal goalacc conclty trm =
let env = Goal.V82.env sigma goal in
let hyps = Goal.V82.hyps sigma goal in
let mk_goal hyps concl =
- Goal.V82.mk_goal sigma hyps concl (Goal.V82.extra sigma goal)
+ Goal.V82.mk_goal sigma hyps concl
in
if (not !check) && not (occur_meta sigma (EConstr.of_constr trm)) then
let t'ty = Retyping.get_type_of env sigma (EConstr.of_constr trm) in
@@ -433,7 +433,7 @@ and mk_hdgoals sigma goal goalacc trm =
let env = Goal.V82.env sigma goal in
let hyps = Goal.V82.hyps sigma goal in
let mk_goal hyps concl =
- Goal.V82.mk_goal sigma hyps concl (Goal.V82.extra sigma goal) in
+ Goal.V82.mk_goal sigma hyps concl in
match kind trm with
| Cast (c,_, ty) when isMeta c ->
check_typability env sigma ty;
diff --git a/proofs/proof.ml b/proofs/proof.ml
index 70a08e4966..8220949856 100644
--- a/proofs/proof.ml
+++ b/proofs/proof.ml
@@ -386,7 +386,7 @@ let run_tactic env tac pr =
(* Check that retrieved given up is empty *)
if not (List.is_empty retrieved_given_up) then
CErrors.anomaly Pp.(str "Evars generated outside of proof engine (e.g. V82, clear, ...) are not supposed to be explicitly given up.");
- let sigma = List.fold_left Proofview.Unsafe.mark_as_goal sigma retrieved in
+ let sigma = Proofview.Unsafe.mark_as_goals sigma retrieved in
Proofview.Unsafe.tclEVARS sigma >>= fun () ->
Proofview.tclUNIT retrieved
in
diff --git a/proofs/refine.ml b/proofs/refine.ml
index 05474d5f84..540a8bb420 100644
--- a/proofs/refine.ml
+++ b/proofs/refine.ml
@@ -105,7 +105,7 @@ let generic_refine ~typecheck f gl =
| Some id -> Evd.rename evk id sigma
in
(** Mark goals *)
- let sigma = CList.fold_left Proofview.Unsafe.mark_as_goal sigma comb in
+ let sigma = Proofview.Unsafe.mark_as_goals sigma comb in
let comb = CList.map (fun x -> Proofview.goal_with_state x state) comb in
let trace () = Pp.(hov 2 (str"simple refine"++spc()++
Termops.Internal.print_constr_env env sigma c)) in
diff --git a/proofs/tacmach.ml b/proofs/tacmach.ml
index 5d1faf1465..388bf8efb5 100644
--- a/proofs/tacmach.ml
+++ b/proofs/tacmach.ml
@@ -68,7 +68,10 @@ let pf_ids_set_of_hyps gls =
let pf_get_new_id id gls =
next_ident_away id (pf_ids_set_of_hyps gls)
-let pf_global gls id = EConstr.of_constr (UnivGen.constr_of_global (Constrintern.construct_reference (pf_hyps gls) id))
+let pf_global gls id =
+ let env = pf_env gls in
+ let sigma = project gls in
+ Evd.fresh_global env sigma (Constrintern.construct_reference (pf_hyps gls) id)
let pf_reduction_of_red_expr gls re c =
let (redfun, _) = reduction_of_red_expr (pf_env gls) re in
diff --git a/proofs/tacmach.mli b/proofs/tacmach.mli
index 3432ad4afa..f302960870 100644
--- a/proofs/tacmach.mli
+++ b/proofs/tacmach.mli
@@ -34,7 +34,7 @@ val pf_hyps_types : goal sigma -> (Id.t * types) list
val pf_nth_hyp_id : goal sigma -> int -> Id.t
val pf_last_hyp : goal sigma -> named_declaration
val pf_ids_of_hyps : goal sigma -> Id.t list
-val pf_global : goal sigma -> Id.t -> constr
+val pf_global : goal sigma -> Id.t -> evar_map * constr
val pf_unsafe_type_of : goal sigma -> constr -> types
val pf_type_of : goal sigma -> constr -> evar_map * types
val pf_hnf_type_of : goal sigma -> constr -> types
diff --git a/tactics/autorewrite.ml b/tactics/autorewrite.ml
index 8e296de617..76cbdee0d5 100644
--- a/tactics/autorewrite.ml
+++ b/tactics/autorewrite.ml
@@ -226,7 +226,7 @@ let decompose_applied_relation metas env sigma c ctype left2right =
let eqclause = Clenv.mk_clenv_from_env env sigma None (EConstr.of_constr c,ty) in
let eqclause =
if metas then eqclause
- else clenv_pose_metas_as_evars eqclause (Evd.undefined_metas eqclause.evd)
+ else fst (clenv_pose_metas_as_evars eqclause (Evd.undefined_metas eqclause.evd))
in
let (equiv, args) = EConstr.decompose_app sigma (Clenv.clenv_type eqclause) in
let rec split_last_two = function
diff --git a/tactics/class_tactics.ml b/tactics/class_tactics.ml
index 9bd406e14d..81cf9289d1 100644
--- a/tactics/class_tactics.ml
+++ b/tactics/class_tactics.ml
@@ -494,15 +494,15 @@ let top_sort evm undefs =
let tosee = ref undefs in
let rec visit ev evi =
let evs = Evarutil.undefined_evars_of_evar_info evm evi in
- tosee := Evar.Map.remove ev !tosee;
+ tosee := Evar.Set.remove ev !tosee;
Evar.Set.iter (fun ev ->
- if Evar.Map.mem ev !tosee then
- visit ev (Evar.Map.find ev !tosee)) evs;
+ if Evar.Set.mem ev !tosee then
+ visit ev (Evd.find evm ev)) evs;
l' := ev :: !l';
in
- while not (Evar.Map.is_empty !tosee) do
- let ev, evi = Evar.Map.min_binding !tosee in
- visit ev evi
+ while not (Evar.Set.is_empty !tosee) do
+ let ev = Evar.Set.choose !tosee in
+ visit ev (Evd.find evm ev)
done;
List.rev !l'
@@ -512,15 +512,9 @@ let top_sort evm undefs =
*)
let evars_to_goals p evm =
- let goals = ref Evar.Map.empty in
- let map ev evi =
- let evi, goal = p evm ev evi in
- let () = if goal then goals := Evar.Map.add ev evi !goals in
- evi
- in
- let evm = Evd.raw_map_undefined map evm in
- if Evar.Map.is_empty !goals then None
- else Some (!goals, evm)
+ let goals, nongoals = Evar.Set.partition (p evm) (Evd.get_typeclass_evars evm) in
+ if Evar.Set.is_empty goals then None
+ else Some (goals, nongoals)
(** Making local hints *)
let make_resolve_hyp env sigma st flags only_classes pri decl =
@@ -641,14 +635,6 @@ module Search = struct
occur_existential evd concl
else true
- let mark_unresolvables sigma goals =
- List.fold_left
- (fun sigma gl ->
- let evi = Evd.find_undefined sigma gl in
- let evi' = Typeclasses.mark_unresolvable evi in
- Evd.add sigma gl evi')
- sigma goals
-
(** The general hint application tactic.
tac1 + tac2 .... The choice of OR or ORELSE is determined
depending on the dependencies of the goal and the unique/Prop
@@ -779,7 +765,7 @@ module Search = struct
shelve_goals shelved <*>
(if List.is_empty goals then tclUNIT ()
else
- let sigma' = mark_unresolvables sigma goals in
+ let sigma' = make_unresolvables (fun x -> List.mem_f Evar.equal x goals) sigma in
with_shelf (Unsafe.tclEVARS sigma' <*> Unsafe.tclNEWGOALS (CList.map Proofview.with_empty_state goals)) >>=
fun s -> result s i (Some (Option.default 0 k + j)))
end
@@ -941,14 +927,15 @@ module Search = struct
let run_on_evars env evm p tac =
match evars_to_goals p evm with
| None -> None (* This happens only because there's no evar having p *)
- | Some (goals, evm') ->
+ | Some (goals, nongoals) ->
let goals =
if !typeclasses_dependency_order then
- top_sort evm' goals
- else List.map (fun (ev, _) -> ev) (Evar.Map.bindings goals)
+ top_sort evm goals
+ else Evar.Set.elements goals
in
+ let evm = Evd.set_typeclass_evars evm Evar.Set.empty in
let fgoals = Evd.save_future_goals evm in
- let _, pv = Proofview.init evm' [] in
+ let _, pv = Proofview.init evm [] in
let pv = Proofview.unshelve goals pv in
try
let (), pv', (unsafe, shelved, gaveup), _ =
@@ -967,7 +954,13 @@ module Search = struct
acc && okev) evm' true);
let fgoals = Evd.shelve_on_future_goals shelved fgoals in
let evm' = Evd.restore_future_goals evm' fgoals in
+ let nongoals' =
+ Evar.Set.fold (fun ev acc -> match Evarutil.advance evm' ev with
+ | Some ev' -> Evar.Set.add ev acc
+ | None -> acc) nongoals (Evd.get_typeclass_evars evm')
+ in
let evm' = evars_reset_evd ~with_conv_pbs:true ~with_univs:false evm' evm in
+ let evm' = Evd.set_typeclass_evars evm' nongoals' in
Some evm'
else raise Not_found
with Logic_monad.TacticFailure _ -> raise Not_found
@@ -1019,7 +1012,7 @@ let deps_of_constraints cstrs evm p =
let evar_dependencies pred evm p =
Evd.fold_undefined
(fun ev evi _ ->
- if Typeclasses.is_resolvable evi && pred evm ev evi then
+ if Evd.is_typeclass_evar evm ev && pred evm ev evi then
let evars = Evar.Set.add ev (Evarutil.undefined_evars_of_evar_info evm evi)
in Intpart.union_set evars p
else ())
@@ -1035,8 +1028,7 @@ let split_evars pred evm =
let is_inference_forced p evd ev =
try
- let evi = Evd.find_undefined evd ev in
- if Typeclasses.is_resolvable evi && snd (p ev evi)
+ if Evar.Set.mem ev (Evd.get_typeclass_evars evd) && p ev
then
let (loc, k) = evar_source ev evd in
match k with
@@ -1068,55 +1060,32 @@ let error_unresolvable env comp evd =
Pretype_errors.unsatisfiable_constraints env evd ev comp
(** Check if an evar is concerned by the current resolution attempt,
- (and in particular is in the current component), and also update
- its evar_info.
- Invariant : this should only be applied to undefined evars,
- and return undefined evar_info *)
+ (and in particular is in the current component).
+ Invariant : this should only be applied to undefined evars. *)
-let select_and_update_evars p oevd in_comp evd ev evi =
- assert (evi.evar_body == Evar_empty);
+let select_and_update_evars p oevd in_comp evd ev =
try
- let oevi = Evd.find_undefined oevd ev in
- if Typeclasses.is_resolvable oevi then
- Typeclasses.mark_unresolvable evi,
- (in_comp ev && p evd ev evi)
- else evi, false
- with Not_found ->
- Typeclasses.mark_unresolvable evi, p evd ev evi
+ if Evd.is_typeclass_evar oevd ev then
+ (in_comp ev && p evd ev (Evd.find evd ev))
+ else false
+ with Not_found -> false
(** Do we still have unresolved evars that should be resolved ? *)
let has_undefined p oevd evd =
- let check ev evi = snd (p oevd ev evi) in
+ let check ev evi = p oevd ev in
Evar.Map.exists check (Evd.undefined_map evd)
-(** Revert the resolvability status of evars after resolution,
- potentially unprotecting some evars that were set unresolvable
- just for this call to resolution. *)
-
-let revert_resolvability oevd evd =
- let map ev evi =
- try
- if not (Typeclasses.is_resolvable evi) then
- let evi' = Evd.find_undefined oevd ev in
- if Typeclasses.is_resolvable evi' then
- Typeclasses.mark_resolvable evi
- else evi
- else evi
- with Not_found -> evi
- in
- Evd.raw_map_undefined map evd
-
exception Unresolved
(** If [do_split] is [true], we try to separate the problem in
several components and then solve them separately *)
let resolve_all_evars debug depth unique env p oevd do_split fail =
- let split = if do_split then split_evars p oevd else [Evar.Set.empty] in
- let in_comp comp ev = if do_split then Evar.Set.mem ev comp else true
- in
+ let tcs = Evd.get_typeclass_evars oevd in
+ let split = if do_split then split_evars p oevd else [tcs] in
+ let in_comp comp ev = if do_split then Evar.Set.mem ev comp else true in
let rec docomp evd = function
- | [] -> revert_resolvability oevd evd
+ | [] -> evd
| comp :: comps ->
let p = select_and_update_evars p oevd (in_comp comp) in
try
@@ -1134,7 +1103,9 @@ let resolve_all_evars debug depth unique env p oevd do_split fail =
let initial_select_evars filter =
fun evd ev evi ->
- filter ev (snd evi.Evd.evar_source) &&
+ filter ev (Lazy.from_val (snd evi.Evd.evar_source)) &&
+ (** Typeclass evars can contain evars whose conclusion is not
+ yet determined to be a class or not. *)
Typeclasses.is_class_evar evd evi
let resolve_typeclass_evars debug depth unique env evd filter split fail =
@@ -1161,8 +1132,7 @@ let _ =
let resolve_one_typeclass env ?(sigma=Evd.from_env env) gl unique =
let (term, sigma) = Hints.wrap_hint_warning_fun env sigma begin fun sigma ->
let nc, gl, subst, _ = Evarutil.push_rel_context_to_named_context env sigma gl in
- let (gl,t,sigma) =
- Goal.V82.mk_goal sigma nc gl Store.empty in
+ let (gl,t,sigma) = Goal.V82.mk_goal sigma nc gl in
let (ev, _) = destEvar sigma t in
let gls = { it = gl ; sigma = sigma; } in
let hints = searchtable_map typeclasses_db in
@@ -1227,5 +1197,6 @@ let autoapply c i =
unify_e_resolve false flags gl
((c,cty,Univ.ContextSet.empty),0,ce) <*>
Proofview.tclEVARMAP >>= (fun sigma ->
- let sigma = Typeclasses.mark_unresolvables ~filter:Typeclasses.all_goals sigma in
+ let sigma = Typeclasses.make_unresolvables
+ (fun ev -> Typeclasses.all_goals ev (Lazy.from_val (snd (Evd.find sigma ev).evar_source))) sigma in
Proofview.Unsafe.tclEVARS sigma) end
diff --git a/tactics/eqdecide.ml b/tactics/eqdecide.ml
index f2bc679aac..6388aa2c33 100644
--- a/tactics/eqdecide.ml
+++ b/tactics/eqdecide.ml
@@ -72,11 +72,10 @@ let choose_noteq eqonleft =
let generalize_right mk typ c1 c2 =
Proofview.Goal.enter begin fun gl ->
let env = Proofview.Goal.env gl in
- let store = Proofview.Goal.extra gl in
Refine.refine ~typecheck:false begin fun sigma ->
let na = Name (next_name_away_with_default "x" Anonymous (Termops.vars_of_env env)) in
let newconcl = mkProd (na, typ, mk typ c1 (mkRel 1)) in
- let (sigma, x) = Evarutil.new_evar env sigma ~principal:true ~store newconcl in
+ let (sigma, x) = Evarutil.new_evar env sigma ~principal:true newconcl in
(sigma, mkApp (x, [|c2|]))
end
end
diff --git a/tactics/hints.ml b/tactics/hints.ml
index 0c10f32c86..2f2d32e887 100644
--- a/tactics/hints.ml
+++ b/tactics/hints.ml
@@ -942,7 +942,7 @@ let make_extern pri pat tacast =
let make_mode ref m =
let open Term in
- let ty, _ = Global.type_of_global_in_context (Global.env ()) ref in
+ let ty, _ = Typeops.type_of_global_in_context (Global.env ()) ref in
let ctx, t = decompose_prod ty in
let n = List.length ctx in
let m' = Array.of_list m in
diff --git a/tactics/tactics.ml b/tactics/tactics.ml
index 18ddc9318d..a6a104ccca 100644
--- a/tactics/tactics.ml
+++ b/tactics/tactics.ml
@@ -117,14 +117,14 @@ let _ =
(** This tactic creates a partial proof realizing the introduction rule, but
does not check anything. *)
-let unsafe_intro env store decl b =
+let unsafe_intro env decl b =
Refine.refine ~typecheck:false begin fun sigma ->
let ctx = named_context_val env in
let nctx = push_named_context_val decl ctx in
let inst = List.map (NamedDecl.get_id %> mkVar) (named_context env) in
let ninst = mkRel 1 :: inst in
let nb = subst1 (mkVar (NamedDecl.get_id decl)) b in
- let (sigma, ev) = new_evar_instance nctx sigma nb ~principal:true ~store ninst in
+ let (sigma, ev) = new_evar_instance nctx sigma nb ~principal:true ninst in
(sigma, mkLambda_or_LetIn (NamedDecl.to_rel_decl decl) ev)
end
@@ -133,7 +133,6 @@ let introduction id =
let concl = Proofview.Goal.concl gl in
let sigma = Tacmach.New.project gl in
let hyps = named_context_val (Proofview.Goal.env gl) in
- let store = Proofview.Goal.extra gl in
let env = Proofview.Goal.env gl in
let () = if mem_named_context_val id hyps then
user_err ~hdr:"Tactics.introduction"
@@ -141,8 +140,8 @@ let introduction id =
in
let open Context.Named.Declaration in
match EConstr.kind sigma concl with
- | Prod (_, t, b) -> unsafe_intro env store (LocalAssum (id, t)) b
- | LetIn (_, c, t, b) -> unsafe_intro env store (LocalDef (id, c, t)) b
+ | Prod (_, t, b) -> unsafe_intro env (LocalAssum (id, t)) b
+ | LetIn (_, c, t, b) -> unsafe_intro env (LocalDef (id, c, t)) b
| _ -> raise (RefinerError (env, sigma, IntroNeedsProduct))
end
@@ -152,7 +151,6 @@ let error msg = CErrors.user_err Pp.(str msg)
let convert_concl ?(check=true) ty k =
Proofview.Goal.enter begin fun gl ->
let env = Proofview.Goal.env gl in
- let store = Proofview.Goal.extra gl in
let conclty = Proofview.Goal.concl gl in
Refine.refine ~typecheck:false begin fun sigma ->
let sigma =
@@ -162,7 +160,7 @@ let convert_concl ?(check=true) ty k =
| None -> error "Not convertible."
| Some sigma -> sigma
end else sigma in
- let (sigma, x) = Evarutil.new_evar env sigma ~principal:true ~store ty in
+ let (sigma, x) = Evarutil.new_evar env sigma ~principal:true ty in
let ans = if k == DEFAULTcast then x else mkCast(x,k,conclty) in
(sigma, ans)
end
@@ -173,11 +171,10 @@ let convert_hyp ?(check=true) d =
let env = Proofview.Goal.env gl in
let sigma = Tacmach.New.project gl in
let ty = Proofview.Goal.concl gl in
- let store = Proofview.Goal.extra gl in
let sign = convert_hyp check (named_context_val env) sigma d in
let env = reset_with_named_context sign env in
Refine.refine ~typecheck:false begin fun sigma ->
- Evarutil.new_evar env sigma ~principal:true ~store ty
+ Evarutil.new_evar env sigma ~principal:true ty
end
end
@@ -284,12 +281,11 @@ let move_hyp id dest =
let env = Proofview.Goal.env gl in
let sigma = Tacmach.New.project gl in
let ty = Proofview.Goal.concl gl in
- let store = Proofview.Goal.extra gl in
let sign = named_context_val env in
let sign' = move_hyp_in_named_context env sigma id dest sign in
let env = reset_with_named_context sign' env in
Refine.refine ~typecheck:false begin fun sigma ->
- Evarutil.new_evar env sigma ~principal:true ~store ty
+ Evarutil.new_evar env sigma ~principal:true ty
end
end
@@ -313,7 +309,6 @@ let rename_hyp repl =
Proofview.Goal.enter begin fun gl ->
let hyps = Proofview.Goal.hyps gl in
let concl = Proofview.Goal.concl gl in
- let store = Proofview.Goal.extra gl in
let env = Proofview.Goal.env gl in
let sigma = Proofview.Goal.sigma gl in
(** Check that we do not mess variables *)
@@ -344,7 +339,7 @@ let rename_hyp repl =
let nctx = val_of_named_context nhyps in
let instance = List.map (NamedDecl.get_id %> mkVar) hyps in
Refine.refine ~typecheck:false begin fun sigma ->
- Evarutil.new_evar_instance nctx sigma nconcl ~principal:true ~store instance
+ Evarutil.new_evar_instance nctx sigma nconcl ~principal:true instance
end
end
@@ -445,7 +440,6 @@ let internal_cut_gen ?(check=true) dir replace id t =
let env = Proofview.Goal.env gl in
let sigma = Tacmach.New.project gl in
let concl = Proofview.Goal.concl gl in
- let store = Proofview.Goal.extra gl in
let sign = named_context_val env in
let sign',t,concl,sigma =
if replace then
@@ -464,10 +458,10 @@ let internal_cut_gen ?(check=true) dir replace id t =
let (sigma,ev,ev') =
if dir then
let (sigma, ev) = Evarutil.new_evar_from_context sign sigma nf_t in
- let (sigma, ev') = Evarutil.new_evar_from_context sign' sigma ~principal:true ~store concl in
+ let (sigma, ev') = Evarutil.new_evar_from_context sign' sigma ~principal:true concl in
(sigma,ev,ev')
else
- let (sigma, ev') = Evarutil.new_evar_from_context sign' sigma ~principal:true ~store concl in
+ let (sigma, ev') = Evarutil.new_evar_from_context sign' sigma ~principal:true concl in
let (sigma, ev) = Evarutil.new_evar_from_context sign sigma nf_t in
(sigma,ev,ev') in
let term = mkLetIn (Name id, ev, t, EConstr.Vars.subst_var id ev') in
@@ -2102,11 +2096,10 @@ let keep hyps =
let apply_type ~typecheck newcl args =
Proofview.Goal.enter begin fun gl ->
let env = Proofview.Goal.env gl in
- let store = Proofview.Goal.extra gl in
Refine.refine ~typecheck begin fun sigma ->
let newcl = nf_betaiota env sigma newcl (* As in former Logic.refine *) in
let (sigma, ev) =
- Evarutil.new_evar env sigma ~principal:true ~store newcl in
+ Evarutil.new_evar env sigma ~principal:true newcl in
(sigma, applist (ev, args))
end
end
@@ -2120,13 +2113,12 @@ let bring_hyps hyps =
else
Proofview.Goal.enter begin fun gl ->
let env = Proofview.Goal.env gl in
- let store = Proofview.Goal.extra gl in
let concl = Tacmach.New.pf_concl gl in
let newcl = List.fold_right mkNamedProd_or_LetIn hyps concl in
let args = Array.of_list (Context.Named.to_instance mkVar hyps) in
Refine.refine ~typecheck:false begin fun sigma ->
let (sigma, ev) =
- Evarutil.new_evar env sigma ~principal:true ~store newcl in
+ Evarutil.new_evar env sigma ~principal:true newcl in
(sigma, mkApp (ev, args))
end
end
@@ -2668,7 +2660,7 @@ let mk_eq_name env id {CAst.loc;v=ido} =
(* unsafe *)
-let mkletin_goal env sigma store with_eq dep (id,lastlhyp,ccl,c) ty =
+let mkletin_goal env sigma with_eq dep (id,lastlhyp,ccl,c) ty =
let open Context.Named.Declaration in
let t = match ty with Some t -> t | _ -> typ_of env sigma c in
let decl = if dep then LocalDef (id,c,t)
@@ -2683,11 +2675,11 @@ let mkletin_goal env sigma store with_eq dep (id,lastlhyp,ccl,c) ty =
let eq = applist (eq,args) in
let refl = applist (refl, [t;mkVar id]) in
let newenv = insert_before [LocalAssum (heq,eq); decl] lastlhyp env in
- let (sigma, x) = new_evar newenv sigma ~principal:true ~store ccl in
+ let (sigma, x) = new_evar newenv sigma ~principal:true ccl in
(sigma, mkNamedLetIn id c t (mkNamedLetIn heq refl eq x))
| None ->
let newenv = insert_before [decl] lastlhyp env in
- let (sigma, x) = new_evar newenv sigma ~principal:true ~store ccl in
+ let (sigma, x) = new_evar newenv sigma ~principal:true ccl in
(sigma, mkNamedLetIn id c t x)
let pose_tac na c =
@@ -4431,7 +4423,6 @@ let pose_induction_arg_then isrec with_evars (is_arg_pure_hyp,from_prefix) elim
let sigma = Proofview.Goal.sigma gl in
let env = Proofview.Goal.env gl in
let ccl = Proofview.Goal.concl gl in
- let store = Proofview.Goal.extra gl in
let check = check_enough_applied env sigma elim in
let (sigma', c) = use_bindings env sigma elim false (c0,lbind) t0 in
let abs = AbstractPattern (from_prefix,check,Name id,(pending,c),cls,false) in
@@ -4457,7 +4448,7 @@ let pose_induction_arg_then isrec with_evars (is_arg_pure_hyp,from_prefix) elim
let b = not with_evars && with_eq != None in
let (sigma, c) = use_bindings env sigma elim b (c0,lbind) t0 in
let t = Retyping.get_type_of env sigma c in
- mkletin_goal env sigma store with_eq false (id,lastlhyp,ccl,c) (Some t)
+ mkletin_goal env sigma with_eq false (id,lastlhyp,ccl,c) (Some t)
end;
if with_evars then Proofview.shelve_unifiable else guard_no_unifiable;
if is_arg_pure_hyp
@@ -4478,7 +4469,7 @@ let pose_induction_arg_then isrec with_evars (is_arg_pure_hyp,from_prefix) elim
let tac =
Tacticals.New.tclTHENLIST [
Refine.refine ~typecheck:false begin fun sigma ->
- mkletin_goal env sigma store with_eq true (id,lastlhyp,ccl,c) None
+ mkletin_goal env sigma with_eq true (id,lastlhyp,ccl,c) None
end;
(tac inhyps)
]
diff --git a/test-suite/bugs/closed/bug_8785.v b/test-suite/bugs/closed/bug_8785.v
new file mode 100644
index 0000000000..b10569499e
--- /dev/null
+++ b/test-suite/bugs/closed/bug_8785.v
@@ -0,0 +1,44 @@
+Universe u v w.
+Inductive invertible {X:Type@{u}} {Y:Type} (f:X->Y) : Prop := .
+
+Inductive FiniteT : Type -> Prop :=
+ | add_finite: forall T:Type@{v}, FiniteT T -> FiniteT (option T)
+ | bij_finite: forall (X:Type@{w}) (Y:Type) (f:X->Y), FiniteT X ->
+ invertible f -> FiniteT Y.
+
+Set Printing Universes.
+
+Axiom a : False.
+(*
+Constraint v <= u.
+Constraint v <= w.
+*)
+Lemma finite_subtype: forall (X:Type) (P:X->Prop),
+ FiniteT X -> (forall x:X, P x \/ ~ P x) ->
+ FiniteT {x:X | P x}.
+Proof.
+intros.
+induction H.
+
+destruct (H0 None).
+elim a.
+
+pose (g := fun (x:{x:T | P (Some x)}) =>
+ match x return {x:option T | P x} with
+ | exist _ x0 i => exist (fun x:option T => P x) (Some x0) i
+ end).
+apply bij_finite with _ g.
+apply IHFiniteT.
+intro; apply H0.
+elim a.
+
+pose (g := fun (x:{x:X | P (f x)}) =>
+ match x with
+ | exist _ x0 i => exist (fun x:Y => P x) (f x0) i
+ end).
+apply bij_finite with _ g.
+apply IHFiniteT.
+intro; apply H0.
+elim a.
+
+Qed.
diff --git a/test-suite/bugs/closed/bug_8794.v b/test-suite/bugs/closed/bug_8794.v
new file mode 100644
index 0000000000..5ff0b30260
--- /dev/null
+++ b/test-suite/bugs/closed/bug_8794.v
@@ -0,0 +1,11 @@
+(* This used to raise an anomaly in 8.8 *)
+
+Inductive T := Tau (t : T).
+
+Notation idT t := (match t with Tau t => Tau t end).
+
+Lemma match_itree : forall (t : T), t = idT t.
+Proof. destruct t; auto. Qed.
+
+Lemma what (k : unit -> T) : k tt = k tt.
+Proof. rewrite match_itree. Abort.
diff --git a/test-suite/coq-makefile/arg/_CoqProject b/test-suite/coq-makefile/arg/_CoqProject
index 53dc963997..ed31a58247 100644
--- a/test-suite/coq-makefile/arg/_CoqProject
+++ b/test-suite/coq-makefile/arg/_CoqProject
@@ -4,7 +4,7 @@
-arg "-w default"
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/compat-subdirs/_CoqProject b/test-suite/coq-makefile/compat-subdirs/_CoqProject
index 4f44bde22a..1f914a71b0 100644
--- a/test-suite/coq-makefile/compat-subdirs/_CoqProject
+++ b/test-suite/coq-makefile/compat-subdirs/_CoqProject
@@ -3,7 +3,7 @@
-I src
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/coqdoc1/_CoqProject b/test-suite/coq-makefile/coqdoc1/_CoqProject
index 35792066bb..aa9473eaf0 100644
--- a/test-suite/coq-makefile/coqdoc1/_CoqProject
+++ b/test-suite/coq-makefile/coqdoc1/_CoqProject
@@ -3,7 +3,7 @@
-I src
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/coqdoc2/_CoqProject b/test-suite/coq-makefile/coqdoc2/_CoqProject
index d2a547d47b..0068554d72 100644
--- a/test-suite/coq-makefile/coqdoc2/_CoqProject
+++ b/test-suite/coq-makefile/coqdoc2/_CoqProject
@@ -3,7 +3,7 @@
-I src/
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/emptyprefix/_CoqProject b/test-suite/coq-makefile/emptyprefix/_CoqProject
index 5678a8edbb..3133342f6c 100644
--- a/test-suite/coq-makefile/emptyprefix/_CoqProject
+++ b/test-suite/coq-makefile/emptyprefix/_CoqProject
@@ -4,7 +4,7 @@
-arg "-w default"
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/extend-subdirs/_CoqProject b/test-suite/coq-makefile/extend-subdirs/_CoqProject
index 69f47302e1..61136e82f0 100644
--- a/test-suite/coq-makefile/extend-subdirs/_CoqProject
+++ b/test-suite/coq-makefile/extend-subdirs/_CoqProject
@@ -3,7 +3,7 @@
-I src
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/findlib-package/_CoqProject b/test-suite/coq-makefile/findlib-package/_CoqProject
index 69f47302e1..61136e82f0 100644
--- a/test-suite/coq-makefile/findlib-package/_CoqProject
+++ b/test-suite/coq-makefile/findlib-package/_CoqProject
@@ -3,7 +3,7 @@
-I src
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/latex1/_CoqProject b/test-suite/coq-makefile/latex1/_CoqProject
index 35792066bb..aa9473eaf0 100644
--- a/test-suite/coq-makefile/latex1/_CoqProject
+++ b/test-suite/coq-makefile/latex1/_CoqProject
@@ -3,7 +3,7 @@
-I src
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/merlin1/_CoqProject b/test-suite/coq-makefile/merlin1/_CoqProject
index 69f47302e1..61136e82f0 100644
--- a/test-suite/coq-makefile/merlin1/_CoqProject
+++ b/test-suite/coq-makefile/merlin1/_CoqProject
@@ -3,7 +3,7 @@
-I src
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/mlpack1/_CoqProject b/test-suite/coq-makefile/mlpack1/_CoqProject
index 69f47302e1..61136e82f0 100644
--- a/test-suite/coq-makefile/mlpack1/_CoqProject
+++ b/test-suite/coq-makefile/mlpack1/_CoqProject
@@ -3,7 +3,7 @@
-I src
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/mlpack2/_CoqProject b/test-suite/coq-makefile/mlpack2/_CoqProject
index 51864a87ae..3db54e0a0b 100644
--- a/test-suite/coq-makefile/mlpack2/_CoqProject
+++ b/test-suite/coq-makefile/mlpack2/_CoqProject
@@ -3,7 +3,7 @@
-I src/
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/multiroot/_CoqProject b/test-suite/coq-makefile/multiroot/_CoqProject
index b384bb6d97..f53eef99a8 100644
--- a/test-suite/coq-makefile/multiroot/_CoqProject
+++ b/test-suite/coq-makefile/multiroot/_CoqProject
@@ -4,7 +4,7 @@
-I src/
./src/test_plugin.mllib
-./src/test.ml4
+./src/test.mlg
./src/test.mli
./src/test_aux.ml
./src/test_aux.mli
diff --git a/test-suite/coq-makefile/native1/_CoqProject b/test-suite/coq-makefile/native1/_CoqProject
index a6fa17348c..847b2c00a9 100644
--- a/test-suite/coq-makefile/native1/_CoqProject
+++ b/test-suite/coq-makefile/native1/_CoqProject
@@ -4,7 +4,7 @@
-arg -native-compiler
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/only/_CoqProject b/test-suite/coq-makefile/only/_CoqProject
index 357384fddf..619a8fa459 100644
--- a/test-suite/coq-makefile/only/_CoqProject
+++ b/test-suite/coq-makefile/only/_CoqProject
@@ -3,7 +3,7 @@
-I src/
./src/test_plugin.mlpack
-./src/test.ml4
+./src/test.mlg
./src/test.mli
./src/test_aux.ml
./src/test_aux.mli
diff --git a/test-suite/coq-makefile/plugin1/_CoqProject b/test-suite/coq-makefile/plugin1/_CoqProject
index 4eddc9d708..ab7876d868 100644
--- a/test-suite/coq-makefile/plugin1/_CoqProject
+++ b/test-suite/coq-makefile/plugin1/_CoqProject
@@ -3,7 +3,7 @@
-I src
src/test_plugin.mllib
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/plugin2/_CoqProject b/test-suite/coq-makefile/plugin2/_CoqProject
index 0bf1e07f25..94eed53130 100644
--- a/test-suite/coq-makefile/plugin2/_CoqProject
+++ b/test-suite/coq-makefile/plugin2/_CoqProject
@@ -3,7 +3,7 @@
-I src/
src/test_plugin.mllib
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/plugin3/_CoqProject b/test-suite/coq-makefile/plugin3/_CoqProject
index 2028d49a8b..8e8a7ab074 100644
--- a/test-suite/coq-makefile/plugin3/_CoqProject
+++ b/test-suite/coq-makefile/plugin3/_CoqProject
@@ -3,7 +3,7 @@
-I src/
./src/test_plugin.mllib
-./src/test.ml4
+./src/test.mlg
./src/test.mli
./src/test_aux.ml
./src/test_aux.mli
diff --git a/test-suite/coq-makefile/quick2vo/_CoqProject b/test-suite/coq-makefile/quick2vo/_CoqProject
index 69f47302e1..61136e82f0 100644
--- a/test-suite/coq-makefile/quick2vo/_CoqProject
+++ b/test-suite/coq-makefile/quick2vo/_CoqProject
@@ -3,7 +3,7 @@
-I src
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/template/init.sh b/test-suite/coq-makefile/template/init.sh
index 2e066d30d9..30be5e6456 100755
--- a/test-suite/coq-makefile/template/init.sh
+++ b/test-suite/coq-makefile/template/init.sh
@@ -11,7 +11,7 @@ mkdir -p theories/sub
cp ../../template/theories/sub/testsub.v theories/sub
cp ../../template/theories/test.v theories
-cp ../../template/src/test.ml4 src
+cp ../../template/src/test.mlg src
cp ../../template/src/test_aux.mli src
cp ../../template/src/test.mli src
cp ../../template/src/test_plugin.mlpack src
diff --git a/test-suite/coq-makefile/template/src/test.ml4 b/test-suite/coq-makefile/template/src/test.mlg
index 72765abe04..7a166f3b98 100644
--- a/test-suite/coq-makefile/template/src/test.ml4
+++ b/test-suite/coq-makefile/template/src/test.mlg
@@ -1,13 +1,17 @@
+{
open Ltac_plugin
+}
DECLARE PLUGIN "test_plugin"
+{
let () = Mltop.add_known_plugin (fun () -> ()) "test_plugin";;
+}
VERNAC COMMAND EXTEND Test CLASSIFIED AS SIDEFF
- | [ "TestCommand" ] -> [ () ]
+ | [ "TestCommand" ] -> { () }
END
TACTIC EXTEND test
-| [ "test_tactic" ] -> [ Test_aux.tac ]
+| [ "test_tactic" ] -> { Test_aux.tac }
END
diff --git a/test-suite/coq-makefile/uninstall1/_CoqProject b/test-suite/coq-makefile/uninstall1/_CoqProject
index 35792066bb..aa9473eaf0 100644
--- a/test-suite/coq-makefile/uninstall1/_CoqProject
+++ b/test-suite/coq-makefile/uninstall1/_CoqProject
@@ -3,7 +3,7 @@
-I src
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/uninstall2/_CoqProject b/test-suite/coq-makefile/uninstall2/_CoqProject
index d2a547d47b..0068554d72 100644
--- a/test-suite/coq-makefile/uninstall2/_CoqProject
+++ b/test-suite/coq-makefile/uninstall2/_CoqProject
@@ -3,7 +3,7 @@
-I src/
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/validate1/_CoqProject b/test-suite/coq-makefile/validate1/_CoqProject
index 69f47302e1..61136e82f0 100644
--- a/test-suite/coq-makefile/validate1/_CoqProject
+++ b/test-suite/coq-makefile/validate1/_CoqProject
@@ -3,7 +3,7 @@
-I src
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/coq-makefile/vio2vo/_CoqProject b/test-suite/coq-makefile/vio2vo/_CoqProject
index 69f47302e1..61136e82f0 100644
--- a/test-suite/coq-makefile/vio2vo/_CoqProject
+++ b/test-suite/coq-makefile/vio2vo/_CoqProject
@@ -3,7 +3,7 @@
-I src
src/test_plugin.mlpack
-src/test.ml4
+src/test.mlg
src/test.mli
src/test_aux.ml
src/test_aux.mli
diff --git a/test-suite/misc/poly-capture-global-univs/_CoqProject b/test-suite/misc/poly-capture-global-univs/_CoqProject
index 70ec246062..e5dc3cff56 100644
--- a/test-suite/misc/poly-capture-global-univs/_CoqProject
+++ b/test-suite/misc/poly-capture-global-univs/_CoqProject
@@ -1,7 +1,7 @@
-Q theories Evil
-I src
-src/evil.ml4
+src/evil.mlg
src/evilImpl.ml
src/evilImpl.mli
src/evil_plugin.mlpack
diff --git a/test-suite/misc/poly-capture-global-univs/src/evil.ml4 b/test-suite/misc/poly-capture-global-univs/src/evil.ml4
deleted file mode 100644
index 565e979aaa..0000000000
--- a/test-suite/misc/poly-capture-global-univs/src/evil.ml4
+++ /dev/null
@@ -1,9 +0,0 @@
-
-open Stdarg
-open EvilImpl
-
-DECLARE PLUGIN "evil_plugin"
-
-VERNAC COMMAND FUNCTIONAL EXTEND VernacEvil CLASSIFIED AS SIDEFF
-| [ "Evil" ident(x) ident(y) ] -> [ fun ~atts ~st -> evil x y; st ]
-END
diff --git a/test-suite/misc/poly-capture-global-univs/src/evil.mlg b/test-suite/misc/poly-capture-global-univs/src/evil.mlg
new file mode 100644
index 0000000000..edd22b1d29
--- /dev/null
+++ b/test-suite/misc/poly-capture-global-univs/src/evil.mlg
@@ -0,0 +1,10 @@
+{
+open Stdarg
+open EvilImpl
+}
+
+DECLARE PLUGIN "evil_plugin"
+
+VERNAC COMMAND EXTEND VernacEvil CLASSIFIED AS SIDEFF
+| [ "Evil" ident(x) ident(y) ] -> { evil x y }
+END
diff --git a/test-suite/output/RecordFieldErrors.out b/test-suite/output/RecordFieldErrors.out
new file mode 100644
index 0000000000..5b67f632c9
--- /dev/null
+++ b/test-suite/output/RecordFieldErrors.out
@@ -0,0 +1,14 @@
+The command has indeed failed with message:
+unit: Not a projection.
+The command has indeed failed with message:
+unit: Not a projection.
+The command has indeed failed with message:
+This record contains fields of both t and t'.
+The command has indeed failed with message:
+unit: Not a projection.
+The command has indeed failed with message:
+This record defines several times the field foo.
+The command has indeed failed with message:
+This record defines several times the field unit.
+The command has indeed failed with message:
+unit: Not a projection of inductive t.
diff --git a/test-suite/output/RecordFieldErrors.v b/test-suite/output/RecordFieldErrors.v
new file mode 100644
index 0000000000..27aa07822b
--- /dev/null
+++ b/test-suite/output/RecordFieldErrors.v
@@ -0,0 +1,38 @@
+(** Check that various errors in record fields are reported with the correct
+underlying issue. *)
+
+Record t :=
+ { foo: unit }.
+
+Record t' :=
+ { bar: unit }.
+
+Fail Check {| unit := tt |}.
+(* unit: Not a projection. *)
+
+Fail Check {| unit := tt;
+ foo := tt |}.
+(* unit: Not a projection. *)
+
+Fail Check {| foo := tt;
+ bar := tt |}.
+(* This record contains fields of both t and t'. *)
+
+Fail Check {| unit := tt;
+ unit := tt |}.
+(* unit: Not a projection. *)
+
+Fail Check {| foo := tt;
+ foo := tt |}.
+(* This record defines several times the field foo. *)
+
+Fail Check {| foo := tt;
+ unit := tt;
+ unit := tt |}.
+(* This is slightly wrong (would prefer "unit: Not a projection."), but it's
+acceptable and seems an unlikely mistake. *)
+(* This record defines several times the field unit. *)
+
+Fail Check {| foo := tt;
+ unit := tt |}.
+(* unit: Not a projection of inductive t. *)
diff --git a/theories/Strings/ByteVector.v b/theories/Strings/ByteVector.v
new file mode 100644
index 0000000000..16f26002d2
--- /dev/null
+++ b/theories/Strings/ByteVector.v
@@ -0,0 +1,56 @@
+(************************************************************************)
+(* * The Coq Proof Assistant / The Coq Development Team *)
+(* v * INRIA, CNRS and contributors - Copyright 1999-2018 *)
+(* <O___,, * (see CREDITS file for the list of authors) *)
+(* \VV/ **************************************************************)
+(* // * This file is distributed under the terms of the *)
+(* * GNU Lesser General Public License Version 2.1 *)
+(* * (see LICENSE file for the text of the license) *)
+(************************************************************************)
+
+Require Import Ascii Basics Bvector Psatz String Vector.
+Export VectorNotations.
+Open Scope program_scope.
+Open Scope string_scope.
+
+Definition ByteVector := Vector.t ascii.
+
+Definition ByteNil : ByteVector 0 := Vector.nil ascii.
+
+Definition little_endian_to_string {n : nat} (v : ByteVector n) : string :=
+ fold_right String v "".
+
+Definition to_string {n : nat} : ByteVector n -> string :=
+ little_endian_to_string ∘ rev.
+
+Fixpoint little_endian_of_string (s : string) : ByteVector (length s) :=
+ match s with
+ | "" => ByteNil
+ | String b s' => b :: little_endian_of_string s'
+ end.
+
+Definition of_string (s : string) : ByteVector (length s) :=
+ rev (little_endian_of_string s).
+
+Fixpoint to_Bvector {n : nat} (v : ByteVector n) : Bvector (n * 8) :=
+ match v with
+ | [] => []
+ | Ascii b0 b1 b2 b3 b4 b5 b6 b7::v' =>
+ b0::b1::b2::b3::b4::b5::b6::b7::to_Bvector v'
+ end.
+
+Fixpoint of_Bvector {n : nat} : Bvector (n * 8) -> ByteVector n :=
+ match n with
+ | 0 => fun _ => []
+ | S n' =>
+ fun v =>
+ let (b0, v1) := uncons v in
+ let (b1, v2) := uncons v1 in
+ let (b2, v3) := uncons v2 in
+ let (b3, v4) := uncons v3 in
+ let (b4, v5) := uncons v4 in
+ let (b5, v6) := uncons v5 in
+ let (b6, v7) := uncons v6 in
+ let (b7, v8) := uncons v7 in
+ Ascii b0 b1 b2 b3 b4 b5 b6 b7::of_Bvector v8
+ end.
diff --git a/theories/Vectors/VectorDef.v b/theories/Vectors/VectorDef.v
index 390ca78c0e..4a2bddf35c 100644
--- a/theories/Vectors/VectorDef.v
+++ b/theories/Vectors/VectorDef.v
@@ -132,6 +132,9 @@ replace v (Fin.of_nat_lt H).
Definition tl {A} := @caseS _ (fun n v => t A n) (fun h n t => t).
Global Arguments tl {A} {n} v.
+(** Destruct a non empty vector *)
+Definition uncons {A} {n : nat} (v : t A (S n)) : A * t A n := (hd v, tl v).
+
(** Remove last element of a non-empty vector *)
Definition shiftout {A} := @rectS _ (fun n _ => t A n) (fun a => [])
(fun h _ _ H => h :: H).
diff --git a/tools/CoqMakefile.in b/tools/CoqMakefile.in
index 403ad61798..e3fa0c24fe 100644
--- a/tools/CoqMakefile.in
+++ b/tools/CoqMakefile.in
@@ -21,6 +21,7 @@ VFILES := $(COQMF_VFILES)
MLIFILES := $(COQMF_MLIFILES)
MLFILES := $(COQMF_MLFILES)
ML4FILES := $(COQMF_ML4FILES)
+MLGFILES := $(COQMF_MLGFILES)
MLPACKFILES := $(COQMF_MLPACKFILES)
MLLIBFILES := $(COQMF_MLLIBFILES)
CMDLINE_VFILES := $(COQMF_CMDLINE_VFILES)
@@ -87,6 +88,7 @@ COQTOP ?= "$(COQBIN)coqtop"
COQCHK ?= "$(COQBIN)coqchk"
COQDEP ?= "$(COQBIN)coqdep"
COQDOC ?= "$(COQBIN)coqdoc"
+COQPP ?= "$(COQBIN)coqpp"
COQMKFILE ?= "$(COQBIN)coq_makefile"
# Timing scripts
@@ -241,6 +243,7 @@ VDFILE := .coqdeps
ALLSRCFILES := \
$(ML4FILES) \
+ $(MLGFILES) \
$(MLFILES) \
$(MLPACKFILES) \
$(MLLIBFILES) \
@@ -262,6 +265,7 @@ TEXFILES = $(VFILES:.v=.tex)
GTEXFILES = $(VFILES:.v=.g.tex)
CMOFILES = \
$(ML4FILES:.ml4=.cmo) \
+ $(MLGFILES:.mlg=.cmo) \
$(MLFILES:.ml=.cmo) \
$(MLPACKFILES:.mlpack=.cmo)
CMXFILES = $(CMOFILES:.cmo=.cmx)
@@ -277,7 +281,7 @@ CMXSFILES = \
$(MLPACKFILES:.mlpack=.cmxs) \
$(CMXAFILES:.cmxa=.cmxs) \
$(if $(MLPACKFILES)$(CMXAFILES),,\
- $(ML4FILES:.ml4=.cmxs) $(MLFILES:.ml=.cmxs))
+ $(ML4FILES:.ml4=.cmxs) $(MLGFILES:.mlg=.cmxs) $(MLFILES:.ml=.cmxs))
# files that are packed into a plugin (no extension)
PACKEDFILES = \
@@ -555,6 +559,7 @@ clean::
$(HIDE)rm -f $(CMXSFILES)
$(HIDE)rm -f $(CMOFILES:.cmo=.o)
$(HIDE)rm -f $(CMXAFILES:.cmxa=.a)
+ $(HIDE)rm -f $(MLGFILES:.mlg=.ml)
$(HIDE)rm -f $(ALLDFILES)
$(HIDE)rm -f $(NATIVEFILES)
$(HIDE)find . -name .coq-native -type d -empty -delete
@@ -602,11 +607,17 @@ $(ML4FILES:.ml4=.cmx): %.cmx: %.ml4
$(SHOW)'CAMLOPT -pp -c $(FOR_PACK) $<'
$(HIDE)$(CAMLOPTC) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) $(PP) $(FOR_PACK) -impl $<
-$(MLFILES:.ml=.cmo): %.cmo: %.ml
+$(MLGFILES:.mlg=.ml): %.ml: %.mlg
+ $(SHOW)'COQPP $<'
+ $(HIDE)$(COQPP) $<
+
+# Stupid hack around a deficient syntax: we cannot concatenate two expansions
+$(filter %.cmo, $(MLFILES:.ml=.cmo) $(MLGFILES:.mlg=.cmo)): %.cmo: %.ml
$(SHOW)'CAMLC -c $<'
$(HIDE)$(CAMLC) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) $<
-$(MLFILES:.ml=.cmx): %.cmx: %.ml
+# Same hack
+$(filter %.cmx, $(MLFILES:.ml=.cmx) $(MLGFILES:.mlg=.cmx)): %.cmx: %.ml
$(SHOW)'CAMLOPT -c $(FOR_PACK) $<'
$(HIDE)$(CAMLOPTC) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) $(FOR_PACK) $<
@@ -647,7 +658,7 @@ $(MLPACKFILES:.mlpack=.cmx): %.cmx: | %.mlpack
$(HIDE)$(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) -pack -o $@ $^
# This rule is for _CoqProject with no .mllib nor .mlpack
-$(filter-out $(MLLIBFILES:.mllib=.cmxs) $(MLPACKFILES:.mlpack=.cmxs) $(addsuffix .cmxs,$(PACKEDFILES)) $(addsuffix .cmxs,$(LIBEDFILES)),$(MLFILES:.ml=.cmxs) $(ML4FILES:.ml4=.cmxs)): %.cmxs: %.cmx
+$(filter-out $(MLLIBFILES:.mllib=.cmxs) $(MLPACKFILES:.mlpack=.cmxs) $(addsuffix .cmxs,$(PACKEDFILES)) $(addsuffix .cmxs,$(LIBEDFILES)),$(MLFILES:.ml=.cmxs) $(ML4FILES:.ml4=.cmxs) $(MLGFILES:.mlg=.cmxs)): %.cmxs: %.cmx
$(SHOW)'[deprecated,use-mllib-or-mlpack] CAMLOPT -shared -o $@'
$(HIDE)$(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) \
-shared -o $@ $<
@@ -716,6 +727,10 @@ $(addsuffix .d,$(ML4FILES)): %.ml4.d: %.ml4
$(SHOW)'CAMLDEP -pp $<'
$(HIDE)$(CAMLDEP) $(OCAMLLIBS) $(PP) -impl "$<" $(redir_if_ok)
+$(addsuffix .d,$(MLGFILES)): %.mlg.d: %.ml
+ $(SHOW)'CAMLDEP $<'
+ $(HIDE)$(CAMLDEP) $(OCAMLLIBS) "$<" $(redir_if_ok)
+
$(addsuffix .d,$(MLFILES)): %.ml.d: %.ml
$(SHOW)'CAMLDEP $<'
$(HIDE)$(CAMLDEP) $(OCAMLLIBS) "$<" $(redir_if_ok)
diff --git a/tools/coq_makefile.ml b/tools/coq_makefile.ml
index 7bb1390cad..ca5a232edb 100644
--- a/tools/coq_makefile.ml
+++ b/tools/coq_makefile.ml
@@ -218,7 +218,7 @@ let generate_conf_coq_config oc =
;;
let generate_conf_files oc
- { v_files; mli_files; ml4_files; ml_files; mllib_files; mlpack_files; }
+ { v_files; mli_files; ml4_files; mlg_files; ml_files; mllib_files; mlpack_files; }
=
let module S = String in
let map = map_sourced_list in
@@ -227,6 +227,7 @@ let generate_conf_files oc
fprintf oc "COQMF_MLIFILES = %s\n" (S.concat " " (map quote mli_files));
fprintf oc "COQMF_MLFILES = %s\n" (S.concat " " (map quote ml_files));
fprintf oc "COQMF_ML4FILES = %s\n" (S.concat " " (map quote ml4_files));
+ fprintf oc "COQMF_MLGFILES = %s\n" (S.concat " " (map quote mlg_files));
fprintf oc "COQMF_MLPACKFILES = %s\n" (S.concat " " (map quote mlpack_files));
fprintf oc "COQMF_MLLIBFILES = %s\n" (S.concat " " (map quote mllib_files));
let cmdline_vfiles = filter_cmdline v_files in
@@ -283,7 +284,7 @@ let generate_conf oc project args =
let ensure_root_dir
({ ml_includes; r_includes; q_includes;
- v_files; ml_files; mli_files; ml4_files;
+ v_files; ml_files; mli_files; ml4_files; mlg_files;
mllib_files; mlpack_files } as project)
=
let exists f = List.exists (forget_source > f) in
@@ -293,8 +294,8 @@ let ensure_root_dir
|| exists (fun ({ canonical_path = x },_) -> is_prefix x here) r_includes
|| exists (fun ({ canonical_path = x },_) -> is_prefix x here) q_includes
|| (not_tops v_files &&
- not_tops mli_files && not_tops ml4_files && not_tops ml_files &&
- not_tops mllib_files && not_tops mlpack_files)
+ not_tops mli_files && not_tops ml4_files && not_tops mlg_files &&
+ not_tops ml_files && not_tops mllib_files && not_tops mlpack_files)
then
project
else
diff --git a/tools/coqdep_common.ml b/tools/coqdep_common.ml
index 6a913ea894..713b2ad2b6 100644
--- a/tools/coqdep_common.ml
+++ b/tools/coqdep_common.ml
@@ -496,9 +496,9 @@ let rec suffixes = function
let add_caml_known phys_dir _ f =
let basename,suff =
- get_extension f [".ml";".mli";".ml4";".mllib";".mlpack"] in
+ get_extension f [".ml";".mli";".ml4";".mlg";".mllib";".mlpack"] in
match suff with
- | ".ml"|".ml4" -> add_ml_known basename (Some phys_dir) suff
+ | ".ml"|".ml4"|".mlg" -> add_ml_known basename (Some phys_dir) suff
| ".mli" -> add_mli_known basename (Some phys_dir) suff
| ".mllib" -> add_mllib_known basename (Some phys_dir) suff
| ".mlpack" -> add_mlpack_known basename (Some phys_dir) suff
@@ -584,12 +584,12 @@ let rec treat_file old_dirname old_name =
in
Array.iter (treat_file (Some newdirname)) (Sys.readdir complete_name))
| S_REG ->
- (match get_extension name [".v";".ml";".mli";".ml4";".mllib";".mlpack"] with
+ (match get_extension name [".v";".ml";".mli";".ml4";".mlg";".mllib";".mlpack"] with
| (base,".v") ->
let name = file_name base dirname
and absname = absolute_file_name base dirname in
addQueue vAccu (name, absname)
- | (base,(".ml"|".ml4" as ext)) -> addQueue mlAccu (base,ext,dirname)
+ | (base,(".ml"|".ml4"|".mlg" as ext)) -> addQueue mlAccu (base,ext,dirname)
| (base,".mli") -> addQueue mliAccu (base,dirname)
| (base,".mllib") -> addQueue mllibAccu (base,dirname)
| (base,".mlpack") -> addQueue mlpackAccu (base,dirname)
diff --git a/tools/ocamllibdep.mll b/tools/ocamllibdep.mll
index 053a0435ce..155296362f 100644
--- a/tools/ocamllibdep.mll
+++ b/tools/ocamllibdep.mll
@@ -145,9 +145,9 @@ let mllibAccu = ref ([] : (string * dir) list)
let mlpackAccu = ref ([] : (string * dir) list)
let add_caml_known phys_dir f =
- let basename,suff = get_extension f [".ml";".ml4";".mlpack"] in
+ let basename,suff = get_extension f [".ml";".ml4";".mlg";".mlpack"] in
match suff with
- | ".ml"|".ml4" -> add_ml_known basename (Some phys_dir) suff
+ | ".ml"|".ml4"|".mlg" -> add_ml_known basename (Some phys_dir) suff
| ".mlpack" -> add_mlpack_known basename (Some phys_dir) suff
| _ -> ()
diff --git a/toplevel/dune b/toplevel/dune
index f51e50aaa3..c2f9cd662e 100644
--- a/toplevel/dune
+++ b/toplevel/dune
@@ -3,6 +3,7 @@
(public_name coq.toplevel)
(synopsis "Coq's Interactive Shell [terminal-based]")
(wrapped false)
+ (flags :standard -open Gramlib)
(libraries num coq.stm))
; Coqlevel provides the `Num` library to plugins, we could also use
; -linkall in the plugins file, to be discussed.
diff --git a/vernac/assumptions.ml b/vernac/assumptions.ml
index 15c0278f47..6beac2032d 100644
--- a/vernac/assumptions.ml
+++ b/vernac/assumptions.ml
@@ -162,27 +162,6 @@ let label_of = function
| ConstructRef ((kn,_),_) -> MutInd.label kn
| VarRef id -> Label.of_id id
-let fold_constr_with_full_binders g f n acc c =
- let open Context.Rel.Declaration in
- match Constr.kind c with
- | Rel _ | Meta _ | Var _ | Sort _ | Const _ | Ind _ | Construct _ -> acc
- | Cast (c,_, t) -> f n (f n acc c) t
- | Prod (na,t,c) -> f (g (LocalAssum (na,t)) n) (f n acc t) c
- | Lambda (na,t,c) -> f (g (LocalAssum (na,t)) n) (f n acc t) c
- | LetIn (na,b,t,c) -> f (g (LocalDef (na,b,t)) n) (f n (f n acc b) t) c
- | App (c,l) -> Array.fold_left (f n) (f n acc c) l
- | Proj (p,c) -> f n acc c
- | Evar (_,l) -> Array.fold_left (f n) acc l
- | Case (_,p,c,bl) -> Array.fold_left (f n) (f n (f n acc p) c) bl
- | Fix (_,(lna,tl,bl)) ->
- let n' = CArray.fold_left2 (fun c n t -> g (LocalAssum (n,t)) c) n lna tl in
- let fd = Array.map2 (fun t b -> (t,b)) tl bl in
- Array.fold_left (fun acc (t,b) -> f n' (f n acc t) b) acc fd
- | CoFix (_,(lna,tl,bl)) ->
- let n' = CArray.fold_left2 (fun c n t -> g (LocalAssum (n,t)) c) n lna tl in
- let fd = Array.map2 (fun t b -> (t,b)) tl bl in
- Array.fold_left (fun acc (t,b) -> f n' (f n acc t) b) acc fd
-
let rec traverse current ctx accu t = match Constr.kind t with
| Var id ->
let body () = id |> Global.lookup_named |> NamedDecl.get_value in
@@ -205,10 +184,10 @@ let rec traverse current ctx accu t = match Constr.kind t with
traverse_object
~inhabits:(current,ctx,Vars.subst1 mkProp oty) accu body (ConstRef kn)
| _ ->
- fold_constr_with_full_binders
+ Constr.fold_with_full_binders
Context.Rel.add (traverse current) ctx accu t
end
-| _ -> fold_constr_with_full_binders
+| _ -> Constr.fold_with_full_binders
Context.Rel.add (traverse current) ctx accu t
and traverse_object ?inhabits (curr, data, ax2ty) body obj =
diff --git a/vernac/auto_ind_decl.ml b/vernac/auto_ind_decl.ml
index 148d4437fa..fa1b8eeb3e 100644
--- a/vernac/auto_ind_decl.ml
+++ b/vernac/auto_ind_decl.ml
@@ -63,20 +63,20 @@ exception ConstructorWithNonParametricInductiveType of inductive
exception DecidabilityIndicesNotSupported
(* Some pre declaration of constant we are going to use *)
-let andb_prop = fun _ -> UnivGen.constr_of_global (Coqlib.lib_ref "core.bool.andb_prop")
+let andb_prop = fun _ -> UnivGen.constr_of_monomorphic_global (Coqlib.lib_ref "core.bool.andb_prop")
let andb_true_intro = fun _ ->
- UnivGen.constr_of_global
+ UnivGen.constr_of_monomorphic_global
(Coqlib.lib_ref "core.bool.andb_true_intro")
(* We avoid to use lazy as the binding of constants can change *)
-let bb () = UnivGen.constr_of_global (Coqlib.lib_ref "core.bool.type")
-let tt () = UnivGen.constr_of_global (Coqlib.lib_ref "core.bool.true")
-let ff () = UnivGen.constr_of_global (Coqlib.lib_ref "core.bool.false")
-let eq () = UnivGen.constr_of_global (Coqlib.lib_ref "core.eq.type")
+let bb () = UnivGen.constr_of_monomorphic_global (Coqlib.lib_ref "core.bool.type")
+let tt () = UnivGen.constr_of_monomorphic_global (Coqlib.lib_ref "core.bool.true")
+let ff () = UnivGen.constr_of_monomorphic_global (Coqlib.lib_ref "core.bool.false")
+let eq () = UnivGen.constr_of_monomorphic_global (Coqlib.lib_ref "core.eq.type")
-let sumbool () = UnivGen.constr_of_global (Coqlib.lib_ref "core.sumbool.type")
-let andb = fun _ -> UnivGen.constr_of_global (Coqlib.lib_ref "core.bool.andb")
+let sumbool () = UnivGen.constr_of_monomorphic_global (Coqlib.lib_ref "core.sumbool.type")
+let andb = fun _ -> UnivGen.constr_of_monomorphic_global (Coqlib.lib_ref "core.bool.andb")
let induct_on c = induction false None c None None
let destruct_on c = destruct false None c None None
@@ -113,7 +113,7 @@ let mkFullInd (ind,u) n =
else mkIndU (ind,u)
let check_bool_is_defined () =
- try let _ = Global.type_of_global_in_context (Global.env ()) Coqlib.(lib_ref "core.bool.type") in ()
+ try let _ = Typeops.type_of_global_in_context (Global.env ()) Coqlib.(lib_ref "core.bool.type") in ()
with e when CErrors.noncritical e -> raise (UndefinedCst "bool")
let check_no_indices mib =
@@ -873,7 +873,7 @@ let compute_dec_goal ind lnamesparrec nparrec =
create_input (
mkNamedProd n (mkFullInd ind (2*nparrec)) (
mkNamedProd m (mkFullInd ind (2*nparrec+1)) (
- mkApp(sumbool(),[|eqnm;mkApp (UnivGen.constr_of_global @@ Coqlib.lib_ref "core.not.type",[|eqnm|])|])
+ mkApp(sumbool(),[|eqnm;mkApp (UnivGen.constr_of_monomorphic_global @@ Coqlib.lib_ref "core.not.type",[|eqnm|])|])
)
)
)
diff --git a/vernac/class.ml b/vernac/class.ml
index 614b2181d9..ab43d5c8ff 100644
--- a/vernac/class.ml
+++ b/vernac/class.ml
@@ -21,7 +21,6 @@ open Environ
open Classops
open Declare
open Globnames
-open Nametab
open Decl_kinds
let strength_min l = if List.mem `LOCAL l then `LOCAL else `GLOBAL
@@ -66,7 +65,7 @@ let explain_coercion_error g = function
let check_reference_arity ref =
let env = Global.env () in
- let c, _ = Global.type_of_global_in_context env ref in
+ let c, _ = Typeops.type_of_global_in_context env ref in
if not (Reductionops.is_arity env (Evd.from_env env) (EConstr.of_constr c)) (** FIXME *) then
raise (CoercionError (NotAClass ref))
@@ -249,7 +248,7 @@ let warn_uniform_inheritance =
let add_new_coercion_core coef stre poly source target isid =
check_source source;
- let t, _ = Global.type_of_global_in_context (Global.env ()) coef in
+ let t, _ = Typeops.type_of_global_in_context (Global.env ()) coef in
if coercion_exists coef then raise (CoercionError AlreadyExists);
let lp,tg = decompose_prod_assum t in
let llp = List.length lp in
@@ -310,7 +309,7 @@ let add_coercion_hook poly local ref =
| Global -> false
in
let () = try_add_new_coercion ref ~local poly in
- let msg = pr_global_env Id.Set.empty ref ++ str " is now a coercion" in
+ let msg = Nametab.pr_global_env Id.Set.empty ref ++ str " is now a coercion" in
Flags.if_verbose Feedback.msg_info msg
let add_coercion_hook poly = Lemmas.mk_hook (add_coercion_hook poly)
diff --git a/vernac/classes.ml b/vernac/classes.ml
index 3cf5e9bfdf..84ffb84206 100644
--- a/vernac/classes.ml
+++ b/vernac/classes.ml
@@ -12,7 +12,6 @@
module CVars = Vars
open Names
open EConstr
-open Nametab
open CErrors
open Util
open Typeclasses_errors
@@ -67,10 +66,10 @@ let intern_info {hint_priority;hint_pattern} =
(** TODO: add subinstances *)
let existing_instance glob g info =
- let c = global g in
+ let c = Nametab.global g in
let info = Option.default Hints.empty_hint_info info in
let info = intern_info info in
- let instance, _ = Global.type_of_global_in_context (Global.env ()) c in
+ let instance, _ = Typeops.type_of_global_in_context (Global.env ()) c in
let _, r = Term.decompose_prod_assum instance in
match class_of_constr Evd.empty (EConstr.of_constr r) with
| Some (_, ((tc,u), _)) -> add_instance (new_instance tc info glob c)
diff --git a/vernac/comInductive.ml b/vernac/comInductive.ml
index 7b28895814..5ff3032ec4 100644
--- a/vernac/comInductive.ml
+++ b/vernac/comInductive.ml
@@ -22,7 +22,6 @@ open Nameops
open Constrexpr
open Constrexpr_ops
open Constrintern
-open Nametab
open Impargs
open Reductionops
open Indtypes
@@ -103,10 +102,6 @@ let mk_mltype_data sigma env assums arity indname =
let is_ml_type = is_sort env sigma arity in
(is_ml_type,indname,assums)
-let prepare_param = function
- | LocalAssum (na,t) -> Name.get_id na, LocalAssumEntry t
- | LocalDef (na,b,_) -> Name.get_id na, LocalDefEntry b
-
(** Make the arity conclusion flexible to avoid generating an upper bound universe now,
only if the universe does not appear anywhere else.
This is really a hack to stay compatible with the semantics of template polymorphic
@@ -464,7 +459,7 @@ let interp_mutual_inductive_gen env0 ~template udecl (uparamsl,paramsl,indl) not
in
(* Build the mutual inductive entry *)
let mind_ent =
- { mind_entry_params = List.map prepare_param ctx_params;
+ { mind_entry_params = ctx_params;
mind_entry_record = None;
mind_entry_finite = finite;
mind_entry_inds = entries;
@@ -575,6 +570,6 @@ let do_mutual_inductive ~template udecl indl cum poly prv ~uniform finite =
(* Declare the possible notations of inductive types *)
List.iter (Metasyntax.add_notation_interpretation (Global.env ())) ntns;
(* Declare the coercions *)
- List.iter (fun qid -> Class.try_add_new_coercion (locate qid) ~local:false poly) coes;
+ List.iter (fun qid -> Class.try_add_new_coercion (Nametab.locate qid) ~local:false poly) coes;
(* If positivity is assumed declares itself as unsafe. *)
if Environ.deactivated_guard (Global.env ()) then Feedback.feedback Feedback.AddedAxiom else ()
diff --git a/vernac/dune b/vernac/dune
index 45b567d631..4673251002 100644
--- a/vernac/dune
+++ b/vernac/dune
@@ -3,6 +3,7 @@
(synopsis "Coq's Vernacular Language")
(public_name coq.vernac)
(wrapped false)
+ (flags :standard -open Gramlib)
(libraries tactics parsing))
(rule
diff --git a/vernac/himsg.ml b/vernac/himsg.ml
index a4b3a75c9f..ca77e03707 100644
--- a/vernac/himsg.ml
+++ b/vernac/himsg.ml
@@ -773,12 +773,13 @@ let pr_constraints printenv env sigma evars cstrs =
let explain_unsatisfiable_constraints env sigma constr comp =
let (_, constraints) = Evd.extract_all_conv_pbs sigma in
+ let tcs = Evd.get_typeclass_evars sigma in
let undef = Evd.undefined_map sigma in
(** Only keep evars that are subject to resolution and members of the given
component. *)
- let is_kept evk evi = match comp with
- | None -> Typeclasses.is_resolvable evi
- | Some comp -> Typeclasses.is_resolvable evi && Evar.Set.mem evk comp
+ let is_kept evk _ = match comp with
+ | None -> Evar.Set.mem evk tcs
+ | Some comp -> Evar.Set.mem evk tcs && Evar.Set.mem evk comp
in
let undef =
let m = Evar.Map.filter is_kept undef in
diff --git a/vernac/indschemes.ml b/vernac/indschemes.ml
index 5f2818c12b..d8cd429e6e 100644
--- a/vernac/indschemes.ml
+++ b/vernac/indschemes.ml
@@ -33,7 +33,6 @@ open Globnames
open Goptions
open Nameops
open Termops
-open Nametab
open Smartlocate
open Vernacexpr
open Ind_tables
@@ -369,7 +368,7 @@ requested
| InSet -> recs ^ "_nodep"
| InType -> recs ^ "t_nodep")
) in
- let newid = add_suffix (basename_of_global (IndRef ind)) suffix in
+ let newid = add_suffix (Nametab.basename_of_global (IndRef ind)) suffix in
let newref = CAst.make newid in
((newref,isdep,ind,z)::l1),l2
in
@@ -387,7 +386,7 @@ let do_mutual_induction_scheme ?(force_mutual=false) lnamedepindsort =
let evd, indu, inst =
match inst with
| None ->
- let _, ctx = Global.type_of_global_in_context env0 (IndRef ind) in
+ let _, ctx = Typeops.type_of_global_in_context env0 (IndRef ind) in
let u, ctx = UnivGen.fresh_instance_from ctx None in
let evd = Evd.from_ctx (UState.of_context_set ctx) in
evd, (ind,u), Some u
diff --git a/vernac/obligations.ml b/vernac/obligations.ml
index 0ac97a74e4..fbf552e649 100644
--- a/vernac/obligations.ml
+++ b/vernac/obligations.ml
@@ -266,7 +266,7 @@ let eterm_obligations env name evm fs ?status t ty =
let hide_obligation () =
Coqlib.check_required_library ["Coq";"Program";"Tactics"];
- UnivGen.constr_of_global (Coqlib.lib_ref "program.tactics.obligation")
+ UnivGen.constr_of_monomorphic_global (Coqlib.lib_ref "program.tactics.obligation")
let pperror cmd = CErrors.user_err ~hdr:"Program" cmd
let error s = pperror (str s)
diff --git a/vernac/record.ml b/vernac/record.ml
index 724b6e62fe..7a4c38e972 100644
--- a/vernac/record.ml
+++ b/vernac/record.ml
@@ -191,14 +191,6 @@ let typecheck_params_and_fields finite def poly pl ps records =
let ans = List.map2 map data typs in
ubinders, univs, template, newps, imps, ans
-let degenerate_decl decl =
- let id = match RelDecl.get_name decl with
- | Name id -> id
- | Anonymous -> anomaly (Pp.str "Unnamed record variable.") in
- match decl with
- | LocalAssum (_,t) -> (id, LocalAssumEntry t)
- | LocalDef (_,b,_) -> (id, LocalDefEntry b)
-
type record_error =
| MissingProj of Id.t * Id.t list
| BadTypedProj of Id.t * env * Type_errors.type_error
@@ -437,7 +429,7 @@ let declare_structure finite ubinders univs paramimpls params template ?(kind=St
in
let blocks = List.mapi mk_block record_data in
let mie =
- { mind_entry_params = List.map degenerate_decl params;
+ { mind_entry_params = params;
mind_entry_record = Some (if !primitive_flag then Some binder_name else None);
mind_entry_finite = finite;
mind_entry_inds = blocks;
@@ -574,8 +566,8 @@ let declare_class finite def cum ubinders univs id idbuild paramimpls params ari
List.map map data
-let add_constant_class cst =
- let ty, univs = Global.type_of_global_in_context (Global.env ()) (ConstRef cst) in
+let add_constant_class env cst =
+ let ty, univs = Typeops.type_of_global_in_context env (ConstRef cst) in
let ctx, arity = decompose_prod_assum ty in
let tc =
{ cl_univs = univs;
@@ -589,12 +581,12 @@ let add_constant_class cst =
in add_class tc;
set_typeclass_transparency (EvalConstRef cst) false false
-let add_inductive_class ind =
- let mind, oneind = Global.lookup_inductive ind in
+let add_inductive_class env ind =
+ let mind, oneind = Inductive.lookup_mind_specif env ind in
let k =
let ctx = oneind.mind_arity_ctxt in
let univs = Declareops.inductive_polymorphic_context mind in
- let env = push_context ~strict:false (Univ.AUContext.repr univs) (Global.env ()) in
+ let env = push_context ~strict:false (Univ.AUContext.repr univs) env in
let env = push_rel_context ctx env in
let inst = Univ.make_abstract_instance univs in
let ty = Inductive.type_of_inductive env ((mind, oneind), inst) in
@@ -612,11 +604,12 @@ let warn_already_existing_class =
Printer.pr_global g ++ str " is already declared as a typeclass.")
let declare_existing_class g =
+ let env = Global.env () in
if Typeclasses.is_class g then warn_already_existing_class g
else
match g with
- | ConstRef x -> add_constant_class x
- | IndRef x -> add_inductive_class x
+ | ConstRef x -> add_constant_class env x
+ | IndRef x -> add_inductive_class env x
| _ -> user_err ~hdr:"declare_existing_class"
(Pp.str"Unsupported class type, only constants and inductives are allowed")
diff --git a/vernac/search.ml b/vernac/search.ml
index 04dcb7d565..1fac28358a 100644
--- a/vernac/search.ml
+++ b/vernac/search.ml
@@ -18,7 +18,6 @@ open Environ
open Pattern
open Libnames
open Globnames
-open Nametab
module NamedDecl = Context.Named.Declaration
@@ -80,7 +79,7 @@ let iter_declarations (fn : GlobRef.t -> env -> constr -> unit) =
| "CONSTANT" ->
let cst = Global.constant_of_delta_kn kn in
let gr = ConstRef cst in
- let (typ, _) = Global.type_of_global_in_context (Global.env ()) gr in
+ let (typ, _) = Typeops.type_of_global_in_context (Global.env ()) gr in
fn gr env typ
| "INDUCTIVE" ->
let mind = Global.mind_of_delta_kn kn in
@@ -192,7 +191,7 @@ let rec head_filter pat ref env sigma typ =
| _ -> false
let full_name_of_reference ref =
- let (dir,id) = repr_path (path_of_global ref) in
+ let (dir,id) = repr_path (Nametab.path_of_global ref) in
DirPath.to_string dir ^ "." ^ Id.to_string id
(** Whether a reference is blacklisted *)
@@ -204,14 +203,14 @@ let blacklist_filter_aux () =
List.for_all is_not_bl l
let module_filter (mods, outside) ref env typ =
- let sp = path_of_global ref in
+ let sp = Nametab.path_of_global ref in
let sl = dirpath sp in
let is_outside md = not (is_dirpath_prefix_of md sl) in
let is_inside md = is_dirpath_prefix_of md sl in
if outside then List.for_all is_outside mods
else List.is_empty mods || List.exists is_inside mods
-let name_of_reference ref = Id.to_string (basename_of_global ref)
+let name_of_reference ref = Id.to_string (Nametab.basename_of_global ref)
let search_about_filter query gr env typ = match query with
| GlobSearchSubPattern pat ->
diff --git a/vernac/vernacentries.ml b/vernac/vernacentries.ml
index 48d4165830..1190d73258 100644
--- a/vernac/vernacentries.ml
+++ b/vernac/vernacentries.ml
@@ -1101,6 +1101,8 @@ let warn_arguments_assert =
[args] is the main list of arguments statuses,
[more_implicits] is a list of extra lists of implicit statuses *)
let vernac_arguments ~atts reference args more_implicits nargs_for_red flags =
+ let env = Global.env () in
+ let sigma = Evd.from_env env in
let assert_flag = List.mem `Assert flags in
let rename_flag = List.mem `Rename flags in
let clear_scopes_flag = List.mem `ClearScopes flags in
@@ -1125,9 +1127,7 @@ let vernac_arguments ~atts reference args more_implicits nargs_for_red flags =
let sr = smart_global reference in
let inf_names =
- let ty, _ = Global.type_of_global_in_context (Global.env ()) sr in
- let env = Global.env () in
- let sigma = Evd.from_env env in
+ let ty, _ = Typeops.type_of_global_in_context env sr in
Impargs.compute_implicits_names env sigma (EConstr.of_constr ty)
in
let prev_names =