aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gramlib/grammar.ml35
-rw-r--r--gramlib/grammar.mli17
-rw-r--r--parsing/cLexer.ml34
-rw-r--r--parsing/cLexer.mli10
-rw-r--r--parsing/pcoq.ml72
-rw-r--r--parsing/pcoq.mli9
-rw-r--r--printing/proof_diffs.ml7
-rw-r--r--toplevel/vernac.ml2
8 files changed, 73 insertions, 113 deletions
diff --git a/gramlib/grammar.ml b/gramlib/grammar.ml
index 0eed42f290..ac6f752670 100644
--- a/gramlib/grammar.ml
+++ b/gramlib/grammar.ml
@@ -8,7 +8,18 @@ open Util
(* Functorial interface *)
-module type GLexerType = Plexing.Lexer
+module type GLexerType = sig
+ include Plexing.Lexer
+
+ module State : sig
+ type t
+ val init : unit -> t
+ val set : t -> unit
+ val get : unit -> t
+ val drop : unit -> unit
+ val get_comments : t -> ((int * int) * string) list
+ end
+end
type norec
type mayrec
@@ -20,6 +31,7 @@ module type S = sig
module Parsable : sig
type t
val make : ?loc:Loc.t -> char Stream.t -> t
+ val comments : t -> ((int * int) * string) list
end
val tokens : string -> (string option * int) list
@@ -1520,7 +1532,7 @@ module Parsable = struct
{ pa_chr_strm : char Stream.t
; pa_tok_strm : L.te Stream.t
; pa_loc_func : Plexing.location_function
- }
+ ; lexer_state : L.State.t ref }
let parse_parsable entry p =
let efun = entry.estart 0 in
@@ -1556,9 +1568,26 @@ module Parsable = struct
let loc = Stream.count cs, Stream.count cs + 1 in
restore (); Ploc.raise (Ploc.make_unlined loc) exc
+ let parse_parsable e p =
+ L.State.set !(p.lexer_state);
+ try
+ let c = parse_parsable e p in
+ p.lexer_state := L.State.get ();
+ c
+ with Ploc.Exc (loc,e) ->
+ L.State.drop ();
+ let loc' = Loc.get_loc (Exninfo.info e) in
+ let loc = match loc' with None -> loc | Some loc -> loc in
+ Loc.raise ~loc e
+
let make ?loc cs =
+ let lexer_state = ref (L.State.init ()) in
+ L.State.set !lexer_state;
let (ts, lf) = L.tok_func ?loc cs in
- {pa_chr_strm = cs; pa_tok_strm = ts; pa_loc_func = lf}
+ lexer_state := L.State.get ();
+ {pa_chr_strm = cs; pa_tok_strm = ts; pa_loc_func = lf; lexer_state}
+
+ let comments p = L.State.get_comments !(p.lexer_state)
end
diff --git a/gramlib/grammar.mli b/gramlib/grammar.mli
index 4280181109..7738e32cab 100644
--- a/gramlib/grammar.mli
+++ b/gramlib/grammar.mli
@@ -15,9 +15,21 @@
rule "an entry cannot call an entry of another grammar" by
normal OCaml typing. *)
-module type GLexerType = Plexing.Lexer
- (** The input signature for the functor [Grammar.GMake]: [te] is the
+(** The input signature for the functor [Grammar.GMake]: [te] is the
type of the tokens. *)
+module type GLexerType = sig
+ include Plexing.Lexer
+
+ module State : sig
+ type t
+ val init : unit -> t
+ val set : t -> unit
+ val get : unit -> t
+ val drop : unit -> unit
+ val get_comments : t -> ((int * int) * string) list
+ end
+end
+
type norec
type mayrec
@@ -29,6 +41,7 @@ module type S = sig
module Parsable : sig
type t
val make : ?loc:Loc.t -> char Stream.t -> t
+ val comments : t -> ((int * int) * string) list
end
val tokens : string -> (string option * int) list
diff --git a/parsing/cLexer.ml b/parsing/cLexer.ml
index a39da96a53..85640cabba 100644
--- a/parsing/cLexer.ml
+++ b/parsing/cLexer.ml
@@ -392,22 +392,6 @@ let comments = ref []
let current_comment = Buffer.create 8192
let between_commands = ref true
-(* The state of the lexer visible from outside *)
-type lexer_state = int option * string * bool * ((int * int) * string) list
-
-let init_lexer_state () = (None,"",true,[])
-let set_lexer_state (o,s,b,c) =
- comment_begin := o;
- Buffer.clear current_comment; Buffer.add_string current_comment s;
- between_commands := b;
- comments := c
-let get_lexer_state () =
- (!comment_begin, Buffer.contents current_comment, !between_commands, !comments)
-let drop_lexer_state () =
- set_lexer_state (init_lexer_state ())
-
-let get_comment_state (_,_,_,c) = c
-
let real_push_char c = Buffer.add_char current_comment c
(* Add a char if it is between two commands, if it is a newline or
@@ -851,6 +835,24 @@ module MakeLexer (Diff : sig val mode : bool end) = struct
let tok_removing = (fun _ -> ())
let tok_match = Tok.match_pattern
let tok_text = token_text
+
+ (* The state of the lexer visible from outside *)
+ module State = struct
+
+ type t = int option * string * bool * ((int * int) * string) list
+
+ let init () = (None,"",true,[])
+ let set (o,s,b,c) =
+ comment_begin := o;
+ Buffer.clear current_comment; Buffer.add_string current_comment s;
+ between_commands := b;
+ comments := c
+ let get () =
+ (!comment_begin, Buffer.contents current_comment, !between_commands, !comments)
+ let drop () = set (init ())
+ let get_comments (_,_,_,c) = c
+
+ end
end
module Lexer = MakeLexer (struct let mode = false end)
diff --git a/parsing/cLexer.mli b/parsing/cLexer.mli
index 2c1284c4db..67b8310bde 100644
--- a/parsing/cLexer.mli
+++ b/parsing/cLexer.mli
@@ -63,15 +63,6 @@ module Error : sig
val to_string : t -> string
end
-(* Mainly for comments state, etc... *)
-type lexer_state
-
-val init_lexer_state : unit -> lexer_state
-val set_lexer_state : lexer_state -> unit
-val get_lexer_state : unit -> lexer_state
-val drop_lexer_state : unit -> unit
-val get_comment_state : lexer_state -> ((int * int) * string) list
-
(** Create a lexer. true enables alternate handling for computing diffs.
It ensures that, ignoring white space, the concatenated tokens equal the input
string. Specifically:
@@ -81,5 +72,6 @@ as if it was unquoted, possibly becoming multiple tokens
it was not in a comment, possibly becoming multiple tokens
- return any unrecognized Ascii or UTF-8 character as a string
*)
+
module LexerDiff :
Gramlib.Grammar.GLexerType with type te = Tok.t and type 'c pattern = 'c Tok.p
diff --git a/parsing/pcoq.ml b/parsing/pcoq.ml
index 8f3c1e029c..59f92e7790 100644
--- a/parsing/pcoq.ml
+++ b/parsing/pcoq.ml
@@ -14,77 +14,7 @@ open Genarg
open Gramlib
(** The parser of Coq *)
-
-module G : sig
-
- include Grammar.S
- with type te = Tok.t
- and type 'c pattern = 'c Tok.p
-
- val comment_state : Parsable.t -> ((int * int) * string) list
-
-end = struct
-
- module G_ = Grammar.GMake(CLexer.Lexer)
-
- type te = G_.te
- type 'c pattern = 'c G_.pattern
-
- type coq_parsable = G_.Parsable.t * CLexer.lexer_state ref
-
- let coq_parsable ?loc c =
- let state = ref (CLexer.init_lexer_state ()) in
- CLexer.set_lexer_state !state;
- let a = G_.Parsable.make ?loc c in
- state := CLexer.get_lexer_state ();
- (a,state)
-
- let comment_state (p,state) =
- CLexer.get_comment_state !state
-
- module Parsable = struct
- type t = coq_parsable
- let make = coq_parsable
- (* let comment_state = comment_state *)
- end
-
- let tokens = G_.tokens
-
- type 'a single_extend_statement = 'a G_.single_extend_statement
- type 'a extend_statement = 'a G_.extend_statement =
- { pos : Gramlib.Gramext.position option
- ; data : 'a single_extend_statement list
- }
-
- module Entry = struct
- include G_.Entry
-
- let parse e (p,state) =
- CLexer.set_lexer_state !state;
- try
- let c = G_.Entry.parse e p in
- state := CLexer.get_lexer_state ();
- c
- with Ploc.Exc (loc,e) ->
- CLexer.drop_lexer_state ();
- let loc' = Loc.get_loc (Exninfo.info e) in
- let loc = match loc' with None -> loc | Some loc -> loc in
- Loc.raise ~loc e
-
- end
-
- module Symbol = G_.Symbol
- module Rule = G_.Rule
- module Rules = G_.Rules
- module Production = G_.Production
- module Unsafe = G_.Unsafe
-
- let safe_extend = G_.safe_extend
- let safe_delete_rule = G_.safe_delete_rule
- let level_of_nonterm = G_.level_of_nonterm
- let generalize_symbol = G_.generalize_symbol
- let mk_rule = G_.mk_rule
-end
+module G = Grammar.GMake(CLexer.Lexer)
module Entry = struct
include G.Entry
diff --git a/parsing/pcoq.mli b/parsing/pcoq.mli
index 0352c1d55b..5d21b39dee 100644
--- a/parsing/pcoq.mli
+++ b/parsing/pcoq.mli
@@ -15,14 +15,9 @@ open Libnames
(** The parser of Coq *)
-module G : sig
-
- include Gramlib.Grammar.S
-
- val comment_state : Parsable.t -> ((int * int) * string) list
-
-end with type te = Tok.t and type 'a pattern = 'a Tok.p
+module G : Gramlib.Grammar.S with type te = Tok.t and type 'a pattern = 'a Tok.p
+(** Compatibility module, please avoid *)
module Entry : sig
type 'a t = 'a G.Entry.t
val create : string -> 'a t
diff --git a/printing/proof_diffs.ml b/printing/proof_diffs.ml
index fb91ea7b5c..9d75341795 100644
--- a/printing/proof_diffs.ml
+++ b/printing/proof_diffs.ml
@@ -98,18 +98,17 @@ let tokenize_string s =
else
stream_tok ((Tok.extract_string true e) :: acc) str
in
- let st = CLexer.get_lexer_state () in
+ let st = CLexer.Lexer.State.get () in
try
let istr = Stream.of_string s in
let lex = CLexer.LexerDiff.tok_func istr in
let toks = stream_tok [] (fst lex) in
- CLexer.set_lexer_state st;
+ CLexer.Lexer.State.set st;
toks
with exn ->
- CLexer.set_lexer_state st;
+ CLexer.Lexer.State.set st;
raise (Diff_Failure "Input string is not lexable");;
-
type hyp_info = {
idents: string list;
rhs_pp: Pp.t;
diff --git a/toplevel/vernac.ml b/toplevel/vernac.ml
index ce4b85c75f..f501e89ed5 100644
--- a/toplevel/vernac.ml
+++ b/toplevel/vernac.ml
@@ -100,7 +100,7 @@ let load_vernac_core ~echo ~check ~interactive ~state file =
with
| None ->
input_cleanup ();
- state, ids, Pcoq.G.comment_state in_pa
+ state, ids, Pcoq.G.Parsable.comments in_pa
| Some ast ->
(* Printing of AST for -compile-verbose *)
Option.iter (vernac_echo ?loc:ast.CAst.loc) in_echo;