aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Makefile.build1
-rw-r--r--Makefile.make2
-rw-r--r--dev/doc/changes.md6
-rw-r--r--gramlib/gramlib.mllib1
-rw-r--r--gramlib/grammar.ml212
-rw-r--r--gramlib/grammar.mli7
-rw-r--r--gramlib/plexing.ml3
-rw-r--r--gramlib/plexing.mli6
-rw-r--r--gramlib/ploc.ml13
-rw-r--r--gramlib/ploc.mli11
-rw-r--r--parsing/cLexer.ml43
-rw-r--r--parsing/pcoq.ml44
-rw-r--r--parsing/pcoq.mli2
-rw-r--r--plugins/ltac/g_tactic.mlg14
-rw-r--r--plugins/ssr/ssrparser.mlg70
-rw-r--r--plugins/ssrmatching/g_ssrmatching.mlg4
-rw-r--r--printing/proof_diffs.ml4
-rw-r--r--toplevel/coqloop.ml6
-rw-r--r--toplevel/vernac.ml2
-rw-r--r--vernac/pvernac.ml4
-rw-r--r--vernac/vernacinterp.ml2
21 files changed, 186 insertions, 271 deletions
diff --git a/Makefile.build b/Makefile.build
index 2b626506a3..d0948baf07 100644
--- a/Makefile.build
+++ b/Makefile.build
@@ -621,7 +621,6 @@ gramlib/.pack:
# gramlib.ml contents
gramlib/.pack/gramlib.ml: | gramlib/.pack
echo " \
-module Ploc = Gramlib__Ploc \
module Plexing = Gramlib__Plexing \
module Gramext = Gramlib__Gramext \
module Grammar = Gramlib__Grammar" > $@
diff --git a/Makefile.make b/Makefile.make
index dc123820ee..c68a71a832 100644
--- a/Makefile.make
+++ b/Makefile.make
@@ -101,7 +101,7 @@ EXISTINGMLI := $(call find, '*.mli')
## Files that will be generated
# GRAMFILES must be in linking order
-GRAMFILES=$(addprefix gramlib/.pack/gramlib__,Ploc Plexing Gramext Grammar)
+GRAMFILES=$(addprefix gramlib/.pack/gramlib__,Plexing Gramext Grammar)
GRAMMLFILES := $(addsuffix .ml, $(GRAMFILES))
GRAMMLIFILES := $(addsuffix .mli, $(GRAMFILES))
GENGRAMMLFILES := $(GRAMMLFILES) gramlib/.pack/gramlib.ml # why is gramlib.ml not in GRAMMLFILES?
diff --git a/dev/doc/changes.md b/dev/doc/changes.md
index 26c4b01c9f..b61c7d3423 100644
--- a/dev/doc/changes.md
+++ b/dev/doc/changes.md
@@ -1,3 +1,9 @@
+## Changes between Coq 8.13 and Coq 8.14
+
+Gramlib
+
+- A few functions change their interfaces to take benefit of a new abstraction level `LStream` for streams with location function.
+
## Changes between Coq 8.12 and Coq 8.13
### Code formatting
diff --git a/gramlib/gramlib.mllib b/gramlib/gramlib.mllib
index 4c915b2b05..e7e08f8ae1 100644
--- a/gramlib/gramlib.mllib
+++ b/gramlib/gramlib.mllib
@@ -1,4 +1,3 @@
-Ploc
Plexing
Gramext
Grammar
diff --git a/gramlib/grammar.ml b/gramlib/grammar.ml
index 3807b27272..77444b991a 100644
--- a/gramlib/grammar.ml
+++ b/gramlib/grammar.ml
@@ -17,7 +17,7 @@ module type S = sig
module Parsable : sig
type t
- val make : ?loc:Loc.t -> char Stream.t -> t
+ val make : ?source:Loc.source -> char Stream.t -> t
val comments : t -> ((int * int) * string) list
end
@@ -29,8 +29,9 @@ module type S = sig
val create : string -> 'a t
val parse : 'a t -> Parsable.t -> 'a
val name : 'a t -> string
- val of_parser : string -> (Plexing.location_function -> te Stream.t -> 'a) -> 'a t
- val parse_token_stream : 'a t -> te Stream.t -> 'a
+ type 'a parser_fun = { parser_fun : te LStream.t -> 'a }
+ val of_parser : string -> 'a parser_fun -> 'a t
+ val parse_token_stream : 'a t -> te LStream.t -> 'a
val print : Format.formatter -> 'a t -> unit
end
@@ -114,7 +115,7 @@ module GMake (L : Plexing.S) = struct
type te = L.te
type 'c pattern = 'c L.pattern
-type 'a parser_t = L.te Stream.t -> 'a
+type 'a parser_t = L.te LStream.t -> 'a
type grammar =
{ gtokens : (string * string option, int ref) Hashtbl.t }
@@ -148,7 +149,7 @@ type 'a ty_entry = {
and 'a ty_desc =
| Dlevels of 'a ty_level list
-| Dparser of (Plexing.location_function -> 'a parser_t)
+| Dparser of 'a parser_t
and 'a ty_level = Level : (_, _, 'a) ty_rec_level -> 'a ty_level
@@ -956,14 +957,6 @@ let print_entry ppf e =
end;
fprintf ppf " ]@]"
-let floc = ref (fun _ -> failwith "internal error when computing location")
-
-let loc_of_token_interval bp ep =
- if bp == ep then
- if bp == 0 then Ploc.dummy else Ploc.after (!floc (bp - 1)) 0 1
- else
- let loc1 = !floc bp in let loc2 = !floc (pred ep) in Loc.merge loc1 loc2
-
let name_of_symbol : type s tr a. s ty_entry -> (s, tr, a) ty_symbol -> string =
fun entry ->
function
@@ -1115,10 +1108,10 @@ let top_tree : type s tr a. s ty_entry -> (s, tr, a) ty_tree -> (s, tr, a) ty_tr
| LocAct (_, _) -> raise Stream.Failure | DeadEnd -> raise Stream.Failure
let skip_if_empty bp p strm =
- if Stream.count strm == bp then fun a -> p strm
+ if LStream.count strm == bp then fun a -> p strm
else raise Stream.Failure
-let continue entry bp a symb son p1 (strm__ : _ Stream.t) =
+let continue entry bp a symb son p1 (strm__ : _ LStream.t) =
let a = (entry_of_symb entry symb).econtinue 0 bp a strm__ in
let act =
try p1 strm__ with
@@ -1129,7 +1122,7 @@ let continue entry bp a symb son p1 (strm__ : _ Stream.t) =
(** Recover from a success on [symb] with result [a] followed by a
failure on [son] in a rule of the form [a = symb; son] *)
let do_recover parser_of_tree entry nlevn alevn bp a symb son
- (strm__ : _ Stream.t) =
+ (strm__ : _ LStream.t) =
try
(* Try to replay the son with the top occurrence of NEXT (by
default at level nlevn) and trailing SELF (by default at alevn)
@@ -1148,7 +1141,7 @@ let do_recover parser_of_tree entry nlevn alevn bp a symb son
« OPT "!"; ident » fails to see an ident and the OPT was resolved
into the empty sequence, with application e.g. to being able to
safely write « LIST1 [ OPT "!"; id = ident -> id] ». *)
- skip_if_empty bp (fun (strm__ : _ Stream.t) -> raise Stream.Failure)
+ skip_if_empty bp (fun (strm__ : _ LStream.t) -> raise Stream.Failure)
strm__
with Stream.Failure ->
(* In case of success on just SELF, NEXT or an explicit call to
@@ -1164,19 +1157,6 @@ let do_recover parser_of_tree entry nlevn alevn bp a symb son
let recover parser_of_tree entry nlevn alevn bp a symb son strm =
do_recover parser_of_tree entry nlevn alevn bp a symb son strm
-let token_count = ref 0
-
-let peek_nth n strm =
- let list = Stream.npeek n strm in
- token_count := Stream.count strm + n;
- let rec loop list n =
- match list, n with
- x :: _, 1 -> Some x
- | _ :: l, n -> loop l (n - 1)
- | [], _ -> None
- in
- loop list n
-
let item_skipped = ref false
let call_and_push ps al strm =
@@ -1196,16 +1176,16 @@ let token_ematch tok =
let rec parser_of_tree : type s tr r. s ty_entry -> int -> int -> (s, tr, r) ty_tree -> r parser_t =
fun entry nlevn alevn ->
function
- DeadEnd -> (fun (strm__ : _ Stream.t) -> raise Stream.Failure)
- | LocAct (act, _) -> (fun (strm__ : _ Stream.t) -> act)
+ DeadEnd -> (fun (strm__ : _ LStream.t) -> raise Stream.Failure)
+ | LocAct (act, _) -> (fun (strm__ : _ LStream.t) -> act)
| Node (_, {node = Sself; son = LocAct (act, _); brother = DeadEnd}) ->
(* SELF on the right-hand side of the last rule *)
- (fun (strm__ : _ Stream.t) ->
+ (fun (strm__ : _ LStream.t) ->
let a = entry.estart alevn strm__ in act a)
| Node (_, {node = Sself; son = LocAct (act, _); brother = bro}) ->
(* SELF on the right-hand side of a rule *)
let p2 = parser_of_tree entry nlevn alevn bro in
- (fun (strm__ : _ Stream.t) ->
+ (fun (strm__ : _ LStream.t) ->
match
try Some (entry.estart alevn strm__) with Stream.Failure -> None
with
@@ -1222,8 +1202,8 @@ let rec parser_of_tree : type s tr r. s ty_entry -> int -> int -> (s, tr, r) ty_
let ps = parser_of_symbol entry nlevn s in
let p1 = parser_of_tree entry nlevn alevn son in
let p1 = parser_cont p1 entry nlevn alevn s son in
- (fun (strm__ : _ Stream.t) ->
- let bp = Stream.count strm__ in
+ (fun (strm__ : _ LStream.t) ->
+ let bp = LStream.count strm__ in
let a = ps strm__ in
let act =
try p1 bp a strm__ with
@@ -1249,8 +1229,8 @@ let rec parser_of_tree : type s tr r. s ty_entry -> int -> int -> (s, tr, r) ty_
let p1 = parser_of_tree entry nlevn alevn son in
let p1 = parser_cont p1 entry nlevn alevn s son in
let p2 = parser_of_tree entry nlevn alevn bro in
- (fun (strm : _ Stream.t) ->
- let bp = Stream.count strm in
+ (fun (strm : _ LStream.t) ->
+ let bp = LStream.count strm in
match try Some (ps strm) with Stream.Failure -> None with
Some a ->
begin match
@@ -1268,11 +1248,11 @@ let rec parser_of_tree : type s tr r. s ty_entry -> int -> int -> (s, tr, r) ty_
let p1 =
parser_of_token_list entry son p1 rev_tokl last_tok
in
- fun (strm__ : _ Stream.t) ->
+ fun (strm__ : _ LStream.t) ->
try p1 strm__ with Stream.Failure -> p2 strm__
and parser_cont : type s tr tr' a r.
(a -> r) parser_t -> s ty_entry -> int -> int -> (s, tr, a) ty_symbol -> (s, tr', a -> r) ty_tree -> int -> a -> (a -> r) parser_t =
- fun p1 entry nlevn alevn s son bp a (strm__ : _ Stream.t) ->
+ fun p1 entry nlevn alevn s son bp a (strm__ : _ LStream.t) ->
try p1 strm__ with
Stream.Failure ->
recover parser_of_tree entry nlevn alevn bp a s son strm__
@@ -1280,18 +1260,15 @@ and parser_of_token_list : type s tr lt r f.
s ty_entry -> (s, tr, lt -> r) ty_tree ->
(int -> lt -> (lt -> r) parser_t) -> (r, f) tok_list -> lt pattern -> f parser_t =
fun entry son p1 rev_tokl last_tok ->
- let n = tok_list_length rev_tokl + 1 in
+ let n = tok_list_length rev_tokl in
let plast : r parser_t =
let tematch = token_ematch last_tok in
let ps strm =
- match peek_nth n strm with
- Some tok ->
- let r = tematch tok in
- for _i = 1 to n do Stream.junk strm done; r
- | None -> raise Stream.Failure
+ let r = tematch (LStream.peek_nth n strm) in
+ for _i = 0 to n do LStream.junk strm done; r
in
- fun (strm : _ Stream.t) ->
- let bp = Stream.count strm in
+ fun (strm : _ LStream.t) ->
+ let bp = LStream.count strm in
let a = ps strm in
match try Some (p1 bp a strm) with Stream.Failure -> None with
Some act -> act a
@@ -1302,12 +1279,8 @@ and parser_of_token_list : type s tr lt r f.
| TokNil -> plast
| TokCns (tok, tokl) ->
let tematch = token_ematch tok in
- let ps strm =
- match peek_nth n strm with
- Some tok -> tematch tok
- | None -> raise Stream.Failure
- in
- let plast = fun (strm : _ Stream.t) ->
+ let ps strm = tematch (LStream.peek_nth n strm) in
+ let plast = fun (strm : _ LStream.t) ->
let a = ps strm in let act = plast strm in act a in
loop (n - 1) tokl plast in
loop (n - 1) rev_tokl plast
@@ -1317,17 +1290,17 @@ and parser_of_symbol : type s tr a.
function
| Slist0 s ->
let ps = call_and_push (parser_of_symbol entry nlevn s) in
- let rec loop al (strm__ : _ Stream.t) =
+ let rec loop al (strm__ : _ LStream.t) =
match try Some (ps al strm__) with Stream.Failure -> None with
Some al -> loop al strm__
| _ -> al
in
- (fun (strm__ : _ Stream.t) ->
+ (fun (strm__ : _ LStream.t) ->
let a = loop [] strm__ in List.rev a)
| Slist0sep (symb, sep, false) ->
let ps = call_and_push (parser_of_symbol entry nlevn symb) in
let pt = parser_of_symbol entry nlevn sep in
- let rec kont al (strm__ : _ Stream.t) =
+ let rec kont al (strm__ : _ LStream.t) =
match try Some (pt strm__) with Stream.Failure -> None with
Some v ->
let al =
@@ -1338,14 +1311,14 @@ and parser_of_symbol : type s tr a.
kont al strm__
| _ -> al
in
- (fun (strm__ : _ Stream.t) ->
+ (fun (strm__ : _ LStream.t) ->
match try Some (ps [] strm__) with Stream.Failure -> None with
Some al -> let a = kont al strm__ in List.rev a
| _ -> [])
| Slist0sep (symb, sep, true) ->
let ps = call_and_push (parser_of_symbol entry nlevn symb) in
let pt = parser_of_symbol entry nlevn sep in
- let rec kont al (strm__ : _ Stream.t) =
+ let rec kont al (strm__ : _ LStream.t) =
match try Some (pt strm__) with Stream.Failure -> None with
Some v ->
begin match
@@ -1356,24 +1329,24 @@ and parser_of_symbol : type s tr a.
end
| _ -> al
in
- (fun (strm__ : _ Stream.t) ->
+ (fun (strm__ : _ LStream.t) ->
match try Some (ps [] strm__) with Stream.Failure -> None with
Some al -> let a = kont al strm__ in List.rev a
| _ -> [])
| Slist1 s ->
let ps = call_and_push (parser_of_symbol entry nlevn s) in
- let rec loop al (strm__ : _ Stream.t) =
+ let rec loop al (strm__ : _ LStream.t) =
match try Some (ps al strm__) with Stream.Failure -> None with
Some al -> loop al strm__
| _ -> al
in
- (fun (strm__ : _ Stream.t) ->
+ (fun (strm__ : _ LStream.t) ->
let al = ps [] strm__ in
let a = loop al strm__ in List.rev a)
| Slist1sep (symb, sep, false) ->
let ps = call_and_push (parser_of_symbol entry nlevn symb) in
let pt = parser_of_symbol entry nlevn sep in
- let rec kont al (strm__ : _ Stream.t) =
+ let rec kont al (strm__ : _ LStream.t) =
match try Some (pt strm__) with Stream.Failure -> None with
Some v ->
let al =
@@ -1389,13 +1362,13 @@ and parser_of_symbol : type s tr a.
kont al strm__
| _ -> al
in
- (fun (strm__ : _ Stream.t) ->
+ (fun (strm__ : _ LStream.t) ->
let al = ps [] strm__ in
let a = kont al strm__ in List.rev a)
| Slist1sep (symb, sep, true) ->
let ps = call_and_push (parser_of_symbol entry nlevn symb) in
let pt = parser_of_symbol entry nlevn sep in
- let rec kont al (strm__ : _ Stream.t) =
+ let rec kont al (strm__ : _ LStream.t) =
match try Some (pt strm__) with Stream.Failure -> None with
Some v ->
begin match
@@ -1412,35 +1385,35 @@ and parser_of_symbol : type s tr a.
end
| _ -> al
in
- (fun (strm__ : _ Stream.t) ->
+ (fun (strm__ : _ LStream.t) ->
let al = ps [] strm__ in
let a = kont al strm__ in List.rev a)
| Sopt s ->
let ps = parser_of_symbol entry nlevn s in
- (fun (strm__ : _ Stream.t) ->
+ (fun (strm__ : _ LStream.t) ->
match try Some (ps strm__) with Stream.Failure -> None with
Some a -> Some a
| _ -> None)
| Stree t ->
let pt = parser_of_tree entry 1 0 t in
- (fun (strm__ : _ Stream.t) ->
- let bp = Stream.count strm__ in
+ (fun (strm__ : _ LStream.t) ->
+ let bp = LStream.count strm__ in
let a = pt strm__ in
- let ep = Stream.count strm__ in
- let loc = loc_of_token_interval bp ep in a loc)
- | Snterm e -> (fun (strm__ : _ Stream.t) -> e.estart 0 strm__)
+ let ep = LStream.count strm__ in
+ let loc = LStream.interval_loc bp ep strm__ in a loc)
+ | Snterm e -> (fun (strm__ : _ LStream.t) -> e.estart 0 strm__)
| Snterml (e, l) ->
- (fun (strm__ : _ Stream.t) -> e.estart (level_number e l) strm__)
- | Sself -> (fun (strm__ : _ Stream.t) -> entry.estart 0 strm__)
- | Snext -> (fun (strm__ : _ Stream.t) -> entry.estart nlevn strm__)
+ (fun (strm__ : _ LStream.t) -> e.estart (level_number e l) strm__)
+ | Sself -> (fun (strm__ : _ LStream.t) -> entry.estart 0 strm__)
+ | Snext -> (fun (strm__ : _ LStream.t) -> entry.estart nlevn strm__)
| Stoken tok -> parser_of_token entry tok
and parser_of_token : type s a.
s ty_entry -> a pattern -> a parser_t =
fun entry tok ->
let f = L.tok_match tok in
fun strm ->
- match Stream.peek strm with
- Some tok -> let r = f tok in Stream.junk strm; r
+ match LStream.peek strm with
+ Some tok -> let r = f tok in LStream.junk strm; r
| None -> raise Stream.Failure
and parse_top_symb : type s tr a. s ty_entry -> (s, tr, a) ty_symbol -> a parser_t =
fun entry symb ->
@@ -1475,7 +1448,7 @@ and parse_top_symb : type s tr a. s ty_entry -> (s, tr, a) ty_symbol -> a parser
let rec start_parser_of_levels entry clevn =
function
- [] -> (fun levn (strm__ : _ Stream.t) -> raise Stream.Failure)
+ [] -> (fun levn (strm__ : _ LStream.t) -> raise Stream.Failure)
| Level lev :: levs ->
let p1 = start_parser_of_levels entry (succ clevn) levs in
match lev.lprefix with
@@ -1497,11 +1470,11 @@ let rec start_parser_of_levels entry clevn =
if levn > clevn then match strm with parser []
else
*)
- let (strm__ : _ Stream.t) = strm in
- let bp = Stream.count strm__ in
+ let (strm__ : _ LStream.t) = strm in
+ let bp = LStream.count strm__ in
let act = p2 strm__ in
- let ep = Stream.count strm__ in
- let a = act (loc_of_token_interval bp ep) in
+ let ep = LStream.count strm__ in
+ let a = act (LStream.interval_loc bp ep strm__) in
entry.econtinue levn bp a strm)
| _ ->
fun levn strm ->
@@ -1509,12 +1482,12 @@ let rec start_parser_of_levels entry clevn =
(* Skip rules before [levn] *)
p1 levn strm
else
- let (strm__ : _ Stream.t) = strm in
- let bp = Stream.count strm__ in
+ let (strm__ : _ LStream.t) = strm in
+ let bp = LStream.count strm__ in
match try Some (p2 strm__) with Stream.Failure -> None with
Some act ->
- let ep = Stream.count strm__ in
- let a = act (loc_of_token_interval bp ep) in
+ let ep = LStream.count strm__ in
+ let a = act (LStream.interval_loc bp ep strm__) in
entry.econtinue levn bp a strm
| _ -> p1 levn strm__
@@ -1532,7 +1505,7 @@ let rec start_parser_of_levels entry clevn =
let rec continue_parser_of_levels entry clevn =
function
- [] -> (fun levn bp a (strm__ : _ Stream.t) -> raise Stream.Failure)
+ [] -> (fun levn bp a (strm__ : _ LStream.t) -> raise Stream.Failure)
| Level lev :: levs ->
let p1 = continue_parser_of_levels entry (succ clevn) levs in
match lev.lsuffix with
@@ -1549,21 +1522,21 @@ let rec continue_parser_of_levels entry clevn =
(* Skip rules before [levn] *)
p1 levn bp a strm
else
- let (strm__ : _ Stream.t) = strm in
+ let (strm__ : _ LStream.t) = strm in
try p1 levn bp a strm__ with
Stream.Failure ->
let act = p2 strm__ in
- let ep = Stream.count strm__ in
- let a = act a (loc_of_token_interval bp ep) in
+ let ep = LStream.count strm__ in
+ let a = act a (LStream.interval_loc bp ep strm__) in
entry.econtinue levn bp a strm
let continue_parser_of_entry entry =
match entry.edesc with
Dlevels elev ->
let p = continue_parser_of_levels entry 0 elev in
- (fun levn bp a (strm__ : _ Stream.t) ->
+ (fun levn bp a (strm__ : _ LStream.t) ->
try p levn bp a strm__ with Stream.Failure -> a)
- | Dparser p -> fun levn bp a (strm__ : _ Stream.t) -> raise Stream.Failure
+ | Dparser p -> fun levn bp a (strm__ : _ LStream.t) -> raise Stream.Failure
let empty_entry ename levn strm =
raise (Stream.Error ("entry [" ^ ename ^ "] is empty"))
@@ -1572,7 +1545,7 @@ let start_parser_of_entry entry =
match entry.edesc with
Dlevels [] -> empty_entry entry.ename
| Dlevels elev -> start_parser_of_levels entry 0 elev
- | Dparser p -> fun levn strm -> p !floc strm
+ | Dparser p -> fun levn strm -> p strm
(* Extend syntax *)
@@ -1611,51 +1584,31 @@ let delete_rule entry sl =
module Parsable = struct
type t =
- { pa_chr_strm : char Stream.t
- ; pa_tok_strm : L.te Stream.t
- ; pa_loc_func : Plexing.location_function
+ { pa_tok_strm : L.te LStream.t
; lexer_state : L.State.t ref }
let parse_parsable entry p =
let efun = entry.estart 0 in
let ts = p.pa_tok_strm in
- let cs = p.pa_chr_strm in
- let fun_loc = p.pa_loc_func in
- let restore =
- let old_floc = !floc in
- let old_tc = !token_count in
- fun () -> floc := old_floc; token_count := old_tc
- in
let get_loc () =
- try
- let cnt = Stream.count ts in
- (* Ensure that the token at location cnt has been peeked so that
- the location function knows about it *)
- let _ = Stream.peek ts in
- let loc = fun_loc cnt in
- if !token_count - 1 <= cnt then loc
- else Loc.merge loc (fun_loc (!token_count - 1))
- with Failure _ -> Ploc.make_unlined (Stream.count cs, Stream.count cs + 1)
+ let loc = LStream.get_loc (LStream.count ts) ts in
+ let loc' = LStream.max_peek_loc ts in
+ Loc.merge loc loc'
in
- floc := fun_loc;
- token_count := 0;
- try let r = efun ts in restore (); r with
+ try efun ts with
Stream.Failure ->
let loc = get_loc () in
- restore ();
Loc.raise ~loc (Stream.Error ("illegal begin of " ^ entry.ename))
| Stream.Error _ as exc ->
- let loc = get_loc () in restore ();
+ let loc = get_loc () in
Loc.raise ~loc exc
| exc ->
let exc,info = Exninfo.capture exc in
- let loc = Stream.count cs, Stream.count cs + 1 in
- restore ();
(* If the original exn had a loc, keep it *)
let info =
match Loc.get_loc info with
| Some _ -> info
- | None -> Loc.add_loc info (Ploc.make_unlined loc)
+ | None -> Loc.add_loc info (get_loc ())
in
Exninfo.iraise (exc,info)
@@ -1670,12 +1623,12 @@ module Parsable = struct
L.State.drop ();
Exninfo.iraise (exn,info)
- let make ?loc cs =
+ let make ?source cs =
let lexer_state = ref (L.State.init ()) in
L.State.set !lexer_state;
- let (ts, lf) = L.tok_func ?loc cs in
+ let ts = L.tok_func ?source cs in
lexer_state := L.State.get ();
- {pa_chr_strm = cs; pa_tok_strm = ts; pa_loc_func = lf; lexer_state}
+ {pa_tok_strm = ts; lexer_state}
let comments p = L.State.get_comments !(p.lexer_state)
@@ -1686,7 +1639,7 @@ module Entry = struct
let make n =
{ ename = n; estart = empty_entry n;
econtinue =
- (fun _ _ _ (strm__ : _ Stream.t) -> raise Stream.Failure);
+ (fun _ _ _ (strm__ : _ LStream.t) -> raise Stream.Failure);
edesc = Dlevels []}
let create = make
let parse (e : 'a t) p : 'a =
@@ -1694,11 +1647,12 @@ module Entry = struct
let parse_token_stream (e : 'a t) ts : 'a =
e.estart 0 ts
let name e = e.ename
- let of_parser n (p : Plexing.location_function -> te Stream.t -> 'a) : 'a t =
+ type 'a parser_fun = { parser_fun : te LStream.t -> 'a }
+ let of_parser n { parser_fun = (p : te LStream.t -> 'a) } : 'a t =
{ ename = n;
- estart = (fun _ -> p !floc);
+ estart = (fun _ -> p);
econtinue =
- (fun _ _ _ (strm__ : _ Stream.t) -> raise Stream.Failure);
+ (fun _ _ _ (strm__ : _ LStream.t) -> raise Stream.Failure);
edesc = Dparser p}
let print ppf e = fprintf ppf "%a@." print_entry e
end
@@ -1780,8 +1734,8 @@ end
module Unsafe = struct
let clear_entry e =
- e.estart <- (fun _ (strm__ : _ Stream.t) -> raise Stream.Failure);
- e.econtinue <- (fun _ _ _ (strm__ : _ Stream.t) -> raise Stream.Failure);
+ e.estart <- (fun _ (strm__ : _ LStream.t) -> raise Stream.Failure);
+ e.econtinue <- (fun _ _ _ (strm__ : _ LStream.t) -> raise Stream.Failure);
match e.edesc with
Dlevels _ -> e.edesc <- Dlevels []
| Dparser _ -> ()
diff --git a/gramlib/grammar.mli b/gramlib/grammar.mli
index 33006f6f65..c1605486cf 100644
--- a/gramlib/grammar.mli
+++ b/gramlib/grammar.mli
@@ -27,7 +27,7 @@ module type S = sig
module Parsable : sig
type t
- val make : ?loc:Loc.t -> char Stream.t -> t
+ val make : ?source:Loc.source -> char Stream.t -> t
val comments : t -> ((int * int) * string) list
end
@@ -39,8 +39,9 @@ module type S = sig
val create : string -> 'a t (* compat *)
val parse : 'a t -> Parsable.t -> 'a
val name : 'a t -> string
- val of_parser : string -> (Plexing.location_function -> te Stream.t -> 'a) -> 'a t
- val parse_token_stream : 'a t -> te Stream.t -> 'a
+ type 'a parser_fun = { parser_fun : te LStream.t -> 'a }
+ val of_parser : string -> 'a parser_fun -> 'a t
+ val parse_token_stream : 'a t -> te LStream.t -> 'a
val print : Format.formatter -> 'a t -> unit
end
diff --git a/gramlib/plexing.ml b/gramlib/plexing.ml
index ce3e38ff08..d6be29ccea 100644
--- a/gramlib/plexing.ml
+++ b/gramlib/plexing.ml
@@ -2,8 +2,7 @@
(* plexing.ml,v *)
(* Copyright (c) INRIA 2007-2017 *)
-type location_function = int -> Loc.t
-type 'te lexer_func = ?loc:Loc.t -> char Stream.t -> 'te Stream.t * location_function
+type 'te lexer_func = ?source:Loc.source -> char Stream.t -> 'te LStream.t
module type S = sig
type te
diff --git a/gramlib/plexing.mli b/gramlib/plexing.mli
index 0c190af635..84bdc53f0b 100644
--- a/gramlib/plexing.mli
+++ b/gramlib/plexing.mli
@@ -10,10 +10,8 @@
(** Lexer type *)
-type 'te lexer_func = ?loc:Loc.t -> char Stream.t -> 'te Stream.t * location_function
-and location_function = int -> Loc.t
- (** The type of a function giving the location of a token in the
- source from the token number in the stream (starting from zero). *)
+(** Returning a stream equipped with a location function *)
+type 'te lexer_func = ?source:Loc.source -> char Stream.t -> 'te LStream.t
module type S = sig
type te
diff --git a/gramlib/ploc.ml b/gramlib/ploc.ml
deleted file mode 100644
index 44ba0c1f44..0000000000
--- a/gramlib/ploc.ml
+++ /dev/null
@@ -1,13 +0,0 @@
-(* camlp5r *)
-(* ploc.ml,v *)
-(* Copyright (c) INRIA 2007-2017 *)
-
-open Loc
-
-let make_unlined (bp, ep) =
- {fname = InFile ""; line_nb = 1; bol_pos = 0; line_nb_last = -1; bol_pos_last = 0;
- bp = bp; ep = ep; }
-
-let dummy =
- {fname = InFile ""; line_nb = 1; bol_pos = 0; line_nb_last = -1; bol_pos_last = 0;
- bp = 0; ep = 0; }
diff --git a/gramlib/ploc.mli b/gramlib/ploc.mli
deleted file mode 100644
index fad883e11a..0000000000
--- a/gramlib/ploc.mli
+++ /dev/null
@@ -1,11 +0,0 @@
-(* camlp5r *)
-(* ploc.mli,v *)
-(* Copyright (c) INRIA 2007-2017 *)
-
-val make_unlined : int * int -> Loc.t
- (** [Ploc.make_unlined] is like [Ploc.make] except that the line number
- is not provided (to be used e.g. when the line number is unknown. *)
-
-val dummy : Loc.t
- (** [Ploc.dummy] is a dummy location, used in situations when location
- has no meaning. *)
diff --git a/parsing/cLexer.ml b/parsing/cLexer.ml
index d8d2f2a2ef..bb1797ee02 100644
--- a/parsing/cLexer.ml
+++ b/parsing/cLexer.ml
@@ -11,7 +11,6 @@
open Pp
open Util
open Tok
-open Gramlib
(* Dictionaries: trees annotated with string options, each node being a map
from chars to dictionaries (the subtrees). A trie, in other words. *)
@@ -115,7 +114,7 @@ let bad_token str = raise (Error.E (Bad_token str))
(* Update a loc without allocating an intermediate pair *)
let set_loc_pos loc bp ep =
- Ploc.sub loc (bp - loc.Loc.bp) (ep - bp)
+ Loc.sub loc (bp - loc.Loc.bp) (ep - bp)
(* Increase line number by 1 and update position of beginning of line *)
let bump_loc_line loc bol_pos =
@@ -216,7 +215,9 @@ let lookup_utf8 loc cs =
| Some ('\x80'..'\xFF' as c) -> lookup_utf8_tail loc c cs
| None -> EmptyStream
-let unlocated f x = f x
+let unlocated f x =
+ let dummy_loc = Loc.(initial ToplevelInput) in
+ f dummy_loc x
(** FIXME: should we still unloc the exception? *)
(* try f x with Loc.Exc_located (_, exc) -> raise exc *)
@@ -226,7 +227,7 @@ let check_keyword str =
Stream.junk s;
bad_token str
| _ ->
- match unlocated lookup_utf8 Ploc.dummy s with
+ match unlocated lookup_utf8 s with
| Utf8Token (_,n) -> njunk n s; loop_symb s
| AsciiChar -> Stream.junk s; loop_symb s
| EmptyStream -> ()
@@ -242,7 +243,7 @@ let check_ident str =
Stream.junk s;
loop_id true s
| _ ->
- match unlocated lookup_utf8 Ploc.dummy s with
+ match unlocated lookup_utf8 s with
| Utf8Token (st, n) when not intail && Unicode.is_valid_ident_initial st -> njunk n s; loop_id true s
| Utf8Token (st, n) when intail && Unicode.is_valid_ident_trailing st ->
njunk n s;
@@ -793,20 +794,6 @@ let next_token ~diff_mode loc s =
Printf.eprintf "(line %i, %i-%i)[%s]\n%!" (Ploc.line_nb loc) (Ploc.first_pos loc) (Ploc.last_pos loc) (Tok.to_string t);
r *)
-(* Location table system for creating tables associating a token count
- to its location in a char stream (the source) *)
-
-let locerr i =
- let m = "Lexer: location function called on token "^string_of_int i in
- invalid_arg m
-
-let loct_create () = Hashtbl.create 207
-
-let loct_func loct i =
- try Hashtbl.find loct i with Not_found -> locerr i
-
-let loct_add loct i loc = Hashtbl.add loct i loc
-
(** {6 The lexer of Coq} *)
(** Note: removing a token.
@@ -837,17 +824,13 @@ let token_text : type c. c Tok.p -> string = function
| PBULLET (Some s) -> "BULLET \"" ^ s ^ "\""
| PQUOTATION lbl -> "QUOTATION \"" ^ lbl ^ "\""
-let func next_token ?loc cs =
- let loct = loct_create () in
- let cur_loc = ref (Option.default Loc.(initial ToplevelInput) loc) in
- let ts =
- Stream.from
- (fun i ->
- let (tok, loc) = next_token !cur_loc cs in
- cur_loc := after loc;
- loct_add loct i loc; Some tok)
- in
- (ts, loct_func loct)
+let func next_token ?(source=Loc.ToplevelInput) cs =
+ let cur_loc = ref (Loc.initial source) in
+ LStream.from ~source
+ (fun i ->
+ let (tok, loc) = next_token !cur_loc cs in
+ cur_loc := after loc;
+ Some (tok,loc))
module MakeLexer (Diff : sig val mode : bool end) = struct
type te = Tok.t
diff --git a/parsing/pcoq.ml b/parsing/pcoq.ml
index cc9e1bb31d..3393bdab7b 100644
--- a/parsing/pcoq.ml
+++ b/parsing/pcoq.ml
@@ -21,49 +21,49 @@ struct
let err () = raise Stream.Failure
- type t = Gramlib.Plexing.location_function -> int -> Tok.t Stream.t -> int option
+ type t = int -> Tok.t LStream.t -> int option
- let rec contiguous tok n m =
+ let rec contiguous n m strm =
n == m ||
- let (_, ep) = Loc.unloc (tok n) in
- let (bp, _) = Loc.unloc (tok (n + 1)) in
- Int.equal ep bp && contiguous tok (succ n) m
+ let (_, ep) = Loc.unloc (LStream.get_loc n strm) in
+ let (bp, _) = Loc.unloc (LStream.get_loc (n + 1) strm) in
+ Int.equal ep bp && contiguous (succ n) m strm
- let check_no_space tok m strm =
- let n = Stream.count strm in
- if contiguous tok n (n+m-1) then Some m else None
+ let check_no_space m strm =
+ let n = LStream.count strm in
+ if contiguous n (n+m-1) strm then Some m else None
let to_entry s (lk : t) =
- let run tok strm = match lk tok 0 strm with None -> err () | Some _ -> () in
- Entry.of_parser s run
+ let run strm = match lk 0 strm with None -> err () | Some _ -> () in
+ Entry.(of_parser s { parser_fun = run })
- let (>>) (lk1 : t) lk2 tok n strm = match lk1 tok n strm with
+ let (>>) (lk1 : t) lk2 n strm = match lk1 n strm with
| None -> None
- | Some n -> lk2 tok n strm
+ | Some n -> lk2 n strm
- let (<+>) (lk1 : t) lk2 tok n strm = match lk1 tok n strm with
- | None -> lk2 tok n strm
+ let (<+>) (lk1 : t) lk2 n strm = match lk1 n strm with
+ | None -> lk2 n strm
| Some n -> Some n
- let lk_empty tok n strm = Some n
+ let lk_empty n strm = Some n
- let lk_kw kw tok n strm = match stream_nth n strm with
+ let lk_kw kw n strm = match LStream.peek_nth n strm with
| Tok.KEYWORD kw' | Tok.IDENT kw' -> if String.equal kw kw' then Some (n + 1) else None
| _ -> None
- let lk_kws kws tok n strm = match stream_nth n strm with
+ let lk_kws kws n strm = match LStream.peek_nth n strm with
| Tok.KEYWORD kw | Tok.IDENT kw -> if List.mem_f String.equal kw kws then Some (n + 1) else None
| _ -> None
- let lk_ident tok n strm = match stream_nth n strm with
+ let lk_ident n strm = match LStream.peek_nth n strm with
| Tok.IDENT _ -> Some (n + 1)
| _ -> None
- let lk_ident_except idents tok n strm = match stream_nth n strm with
+ let lk_ident_except idents n strm = match LStream.peek_nth n strm with
| Tok.IDENT ident when not (List.mem_f String.equal ident idents) -> Some (n + 1)
| _ -> None
- let lk_nat tok n strm = match stream_nth n strm with
+ let lk_nat n strm = match LStream.peek_nth n strm with
| Tok.NUMBER p when NumTok.Unsigned.is_nat p -> Some (n + 1)
| _ -> None
@@ -191,9 +191,9 @@ let eoi_entry en =
(* Parse a string, does NOT check if the entire string was read
(use eoi_entry) *)
-let parse_string f ?loc x =
+let parse_string f ?source x =
let strm = Stream.of_string x in
- Entry.parse f (Parsable.make ?loc strm)
+ Entry.parse f (Parsable.make ?source strm)
(* universes not used by Coq build but still used by some plugins *)
type gram_universe = string
diff --git a/parsing/pcoq.mli b/parsing/pcoq.mli
index 06d05a4797..d9165ff5a6 100644
--- a/parsing/pcoq.mli
+++ b/parsing/pcoq.mli
@@ -119,7 +119,7 @@ end
(** Parse a string *)
-val parse_string : 'a Entry.t -> ?loc:Loc.t -> string -> 'a
+val parse_string : 'a Entry.t -> ?source:Loc.source -> string -> 'a
val eoi_entry : 'a Entry.t -> 'a Entry.t
type gram_universe [@@deprecated "Deprecated in 8.13"]
diff --git a/plugins/ltac/g_tactic.mlg b/plugins/ltac/g_tactic.mlg
index cb430efb40..9dec55e020 100644
--- a/plugins/ltac/g_tactic.mlg
+++ b/plugins/ltac/g_tactic.mlg
@@ -59,28 +59,28 @@ open Extraargs
(* idem for (x1..xn:t) [n^2 complexity but exceptional use] *)
let check_for_coloneq =
- Pcoq.Entry.of_parser "lpar_id_colon"
- (fun _ strm ->
+ Pcoq.Entry.(of_parser "lpar_id_colon"
+ { parser_fun = fun strm ->
let rec skip_to_rpar p n =
- match List.last (Stream.npeek n strm) with
+ match List.last (LStream.npeek n strm) with
| KEYWORD "(" -> skip_to_rpar (p+1) (n+1)
| KEYWORD ")" -> if Int.equal p 0 then n+1 else skip_to_rpar (p-1) (n+1)
| KEYWORD "." -> err ()
| _ -> skip_to_rpar p (n+1) in
let rec skip_names n =
- match List.last (Stream.npeek n strm) with
+ match List.last (LStream.npeek n strm) with
| IDENT _ | KEYWORD "_" -> skip_names (n+1)
| KEYWORD ":" -> skip_to_rpar 0 (n+1) (* skip a constr *)
| _ -> err () in
let rec skip_binders n =
- match List.last (Stream.npeek n strm) with
+ match List.last (LStream.npeek n strm) with
| KEYWORD "(" -> skip_binders (skip_names (n+1))
| IDENT _ | KEYWORD "_" -> skip_binders (n+1)
| KEYWORD ":=" -> ()
| _ -> err () in
- match stream_nth 0 strm with
+ match LStream.peek_nth 0 strm with
| KEYWORD "(" -> skip_binders 2
- | _ -> err ())
+ | _ -> err () })
let lookup_at_as_comma =
let open Pcoq.Lookahead in
diff --git a/plugins/ssr/ssrparser.mlg b/plugins/ssr/ssrparser.mlg
index ad85f68b03..04341fad91 100644
--- a/plugins/ssr/ssrparser.mlg
+++ b/plugins/ssr/ssrparser.mlg
@@ -249,25 +249,25 @@ let pr_ssrsimpl _ _ _ = pr_simpl
let wit_ssrsimplrep = add_genarg "ssrsimplrep" (fun env sigma -> pr_simpl)
-let test_ssrslashnum b1 b2 _ strm =
- match Util.stream_nth 0 strm with
+let test_ssrslashnum b1 b2 strm =
+ match LStream.peek_nth 0 strm with
| Tok.KEYWORD "/" ->
- (match Util.stream_nth 1 strm with
+ (match LStream.peek_nth 1 strm with
| Tok.NUMBER _ when b1 ->
- (match Util.stream_nth 2 strm with
+ (match LStream.peek_nth 2 strm with
| Tok.KEYWORD "=" | Tok.KEYWORD "/=" when not b2 -> ()
| Tok.KEYWORD "/" ->
if not b2 then () else begin
- match Util.stream_nth 3 strm with
+ match LStream.peek_nth 3 strm with
| Tok.NUMBER _ -> ()
| _ -> raise Stream.Failure
end
| _ -> raise Stream.Failure)
| Tok.KEYWORD "/" when not b1 ->
- (match Util.stream_nth 2 strm with
+ (match LStream.peek_nth 2 strm with
| Tok.KEYWORD "=" when not b2 -> ()
| Tok.NUMBER _ when b2 ->
- (match Util.stream_nth 3 strm with
+ (match LStream.peek_nth 3 strm with
| Tok.KEYWORD "=" -> ()
| _ -> raise Stream.Failure)
| _ when not b2 -> ()
@@ -275,10 +275,10 @@ let test_ssrslashnum b1 b2 _ strm =
| Tok.KEYWORD "=" when not b1 && not b2 -> ()
| _ -> raise Stream.Failure)
| Tok.KEYWORD "//" when not b1 ->
- (match Util.stream_nth 1 strm with
+ (match LStream.peek_nth 1 strm with
| Tok.KEYWORD "=" when not b2 -> ()
| Tok.NUMBER _ when b2 ->
- (match Util.stream_nth 2 strm with
+ (match LStream.peek_nth 2 strm with
| Tok.KEYWORD "=" -> ()
| _ -> raise Stream.Failure)
| _ when not b2 -> ()
@@ -290,23 +290,23 @@ let test_ssrslashnum11 = test_ssrslashnum true true
let test_ssrslashnum01 = test_ssrslashnum false true
let test_ssrslashnum00 = test_ssrslashnum false false
-let negate_parser f tok x =
- let rc = try Some (f tok x) with Stream.Failure -> None in
+let negate_parser f x =
+ let rc = try Some (f x) with Stream.Failure -> None in
match rc with
| None -> ()
| Some _ -> raise Stream.Failure
let test_not_ssrslashnum =
- Pcoq.Entry.of_parser
- "test_not_ssrslashnum" (negate_parser test_ssrslashnum10)
+ Pcoq.Entry.(of_parser
+ "test_not_ssrslashnum" { parser_fun = negate_parser test_ssrslashnum10 })
let test_ssrslashnum00 =
- Pcoq.Entry.of_parser "test_ssrslashnum01" test_ssrslashnum00
+ Pcoq.Entry.(of_parser "test_ssrslashnum01" { parser_fun = test_ssrslashnum00 })
let test_ssrslashnum10 =
- Pcoq.Entry.of_parser "test_ssrslashnum10" test_ssrslashnum10
+ Pcoq.Entry.(of_parser "test_ssrslashnum10" { parser_fun = test_ssrslashnum10 })
let test_ssrslashnum11 =
- Pcoq.Entry.of_parser "test_ssrslashnum11" test_ssrslashnum11
+ Pcoq.Entry.(of_parser "test_ssrslashnum11" { parser_fun = test_ssrslashnum11 })
let test_ssrslashnum01 =
- Pcoq.Entry.of_parser "test_ssrslashnum01" test_ssrslashnum01
+ Pcoq.Entry.(of_parser "test_ssrslashnum01" { parser_fun = test_ssrslashnum01 })
}
@@ -488,23 +488,23 @@ END
(* Old kinds of terms *)
-let input_ssrtermkind _ strm = match Util.stream_nth 0 strm with
+let input_ssrtermkind strm = match LStream.peek_nth 0 strm with
| Tok.KEYWORD "(" -> InParens
| Tok.KEYWORD "@" -> WithAt
| _ -> NoFlag
-let ssrtermkind = Pcoq.Entry.of_parser "ssrtermkind" input_ssrtermkind
+let ssrtermkind = Pcoq.Entry.(of_parser "ssrtermkind" { parser_fun = input_ssrtermkind })
(* New kinds of terms *)
-let input_term_annotation _ strm =
- match Stream.npeek 2 strm with
+let input_term_annotation strm =
+ match LStream.npeek 2 strm with
| Tok.KEYWORD "(" :: Tok.KEYWORD "(" :: _ -> `DoubleParens
| Tok.KEYWORD "(" :: _ -> `Parens
| Tok.KEYWORD "@" :: _ -> `At
| _ -> `None
let term_annotation =
- Pcoq.Entry.of_parser "term_annotation" input_term_annotation
+ Pcoq.Entry.(of_parser "term_annotation" { parser_fun = input_term_annotation })
(* terms *)
@@ -837,29 +837,29 @@ END
{
-let reject_ssrhid _ strm =
- match Util.stream_nth 0 strm with
+let reject_ssrhid strm =
+ match LStream.peek_nth 0 strm with
| Tok.KEYWORD "[" ->
- (match Util.stream_nth 1 strm with
+ (match LStream.peek_nth 1 strm with
| Tok.KEYWORD ":" -> raise Stream.Failure
| _ -> ())
| _ -> ()
-let test_nohidden = Pcoq.Entry.of_parser "test_ssrhid" reject_ssrhid
+let test_nohidden = Pcoq.Entry.(of_parser "test_ssrhid" { parser_fun = reject_ssrhid })
-let rec reject_binder crossed_paren k tok s =
+let rec reject_binder crossed_paren k s =
match
- try Some (Util.stream_nth k s)
+ try Some (LStream.peek_nth k s)
with Stream.Failure -> None
with
- | Some (Tok.KEYWORD "(") when not crossed_paren -> reject_binder true (k+1) tok s
- | Some (Tok.IDENT _) when crossed_paren -> reject_binder true (k+1) tok s
+ | Some (Tok.KEYWORD "(") when not crossed_paren -> reject_binder true (k+1) s
+ | Some (Tok.IDENT _) when crossed_paren -> reject_binder true (k+1) s
| Some (Tok.KEYWORD ":" | Tok.KEYWORD ":=") when crossed_paren ->
raise Stream.Failure
| Some (Tok.KEYWORD ")") when crossed_paren -> raise Stream.Failure
| _ -> if crossed_paren then () else raise Stream.Failure
-let _test_nobinder = Pcoq.Entry.of_parser "test_nobinder" (reject_binder false 0)
+let _test_nobinder = Pcoq.Entry.(of_parser "test_nobinder" { parser_fun = reject_binder false 0 })
}
@@ -1673,7 +1673,7 @@ let ssr_id_of_string loc s =
^ "Scripts with explicit references to anonymous variables are fragile."))
end; Id.of_string s
-let ssr_null_entry = Pcoq.Entry.of_parser "ssr_null" (fun _ _ -> ())
+let ssr_null_entry = Pcoq.Entry.(of_parser "ssr_null" { parser_fun = fun _ -> () })
}
@@ -2407,13 +2407,13 @@ let lbrace = Char.chr 123
(** Workaround to a limitation of coqpp *)
let test_ssr_rw_syntax =
- let test _ strm =
+ let test strm =
if not !ssr_rw_syntax then raise Stream.Failure else
if is_ssr_loaded () then () else
- match Util.stream_nth 0 strm with
+ match LStream.peek_nth 0 strm with
| Tok.KEYWORD key when List.mem key.[0] [lbrace; '['; '/'] -> ()
| _ -> raise Stream.Failure in
- Pcoq.Entry.of_parser "test_ssr_rw_syntax" test
+ Pcoq.Entry.(of_parser "test_ssr_rw_syntax" { parser_fun = test })
}
diff --git a/plugins/ssrmatching/g_ssrmatching.mlg b/plugins/ssrmatching/g_ssrmatching.mlg
index 7022949ab6..b917e43b7c 100644
--- a/plugins/ssrmatching/g_ssrmatching.mlg
+++ b/plugins/ssrmatching/g_ssrmatching.mlg
@@ -66,11 +66,11 @@ END
{
-let input_ssrtermkind _ strm = match Util.stream_nth 0 strm with
+let input_ssrtermkind strm = match LStream.peek_nth 0 strm with
| Tok.KEYWORD "(" -> InParens
| Tok.KEYWORD "@" -> WithAt
| _ -> NoFlag
-let ssrtermkind = Pcoq.Entry.of_parser "ssrtermkind" input_ssrtermkind
+let ssrtermkind = Pcoq.Entry.(of_parser "ssrtermkind" { parser_fun = input_ssrtermkind })
}
diff --git a/printing/proof_diffs.ml b/printing/proof_diffs.ml
index 9ba64717d9..804fbdea85 100644
--- a/printing/proof_diffs.ml
+++ b/printing/proof_diffs.ml
@@ -91,7 +91,7 @@ let tokenize_string s =
But I don't understand how it's used--it looks like things get appended to it but
it never gets cleared. *)
let rec stream_tok acc str =
- let e = Stream.next str in
+ let e = LStream.next str in
if Tok.(equal e EOI) then
List.rev acc
else
@@ -101,7 +101,7 @@ let tokenize_string s =
try
let istr = Stream.of_string s in
let lex = CLexer.LexerDiff.tok_func istr in
- let toks = stream_tok [] (fst lex) in
+ let toks = stream_tok [] lex in
CLexer.Lexer.State.set st;
toks
with exn ->
diff --git a/toplevel/coqloop.ml b/toplevel/coqloop.ml
index 4faecd2e62..0265c0063c 100644
--- a/toplevel/coqloop.ml
+++ b/toplevel/coqloop.ml
@@ -242,12 +242,12 @@ let set_prompt prompt =
(* Read the input stream until a dot is encountered *)
let parse_to_dot =
- let rec dot tok st = match Stream.next st with
+ let rec dot st = match LStream.next st with
| Tok.KEYWORD ("."|"...") -> ()
| Tok.EOI -> ()
- | _ -> dot tok st
+ | _ -> dot st
in
- Pcoq.Entry.of_parser "Coqtoplevel.dot" dot
+ Pcoq.Entry.(of_parser "Coqtoplevel.dot" { parser_fun = dot })
(* If an error occurred while parsing, we try to read the input until a dot
token is encountered.
diff --git a/toplevel/vernac.ml b/toplevel/vernac.ml
index 92e664d56b..917acfad51 100644
--- a/toplevel/vernac.ml
+++ b/toplevel/vernac.ml
@@ -88,7 +88,7 @@ let load_vernac_core ~echo ~check ~interactive ~state file =
let input_cleanup () = close_in in_chan; Option.iter close_in in_echo in
let in_pa =
- Pcoq.Parsable.make ~loc:(Loc.initial (Loc.InFile file))
+ Pcoq.Parsable.make ~source:(Loc.InFile file)
(Stream.of_channel in_chan) in
let open State in
diff --git a/vernac/pvernac.ml b/vernac/pvernac.ml
index a7de34dd09..bf0874c8e5 100644
--- a/vernac/pvernac.ml
+++ b/vernac/pvernac.ml
@@ -66,8 +66,8 @@ module Vernac_ =
| Some ename -> find_proof_mode ename
let command_entry =
- Pcoq.Entry.of_parser "command_entry"
- (fun _ strm -> Pcoq.Entry.parse_token_stream (select_tactic_entry !command_entry_ref) strm)
+ Pcoq.Entry.(of_parser "command_entry"
+ { parser_fun = (fun strm -> Pcoq.Entry.parse_token_stream (select_tactic_entry !command_entry_ref) strm) })
end
diff --git a/vernac/vernacinterp.ml b/vernac/vernacinterp.ml
index 4098401bf0..84f4421c2e 100644
--- a/vernac/vernacinterp.ml
+++ b/vernac/vernacinterp.ml
@@ -186,7 +186,7 @@ and vernac_load ~verbosely fname =
let input =
let longfname = Loadpath.locate_file fname in
let in_chan = Util.open_utf8_file_in longfname in
- Pcoq.Parsable.make ~loc:(Loc.initial (Loc.InFile longfname)) (Stream.of_channel in_chan) in
+ Pcoq.Parsable.make ~source:(Loc.InFile longfname) (Stream.of_channel in_chan) in
(* Parsing loop *)
let v_mod = if verbosely then Flags.verbosely else Flags.silently in
let parse_sentence proof_mode = Flags.with_option Flags.we_are_parsing