aboutsummaryrefslogtreecommitdiff
path: root/gramlib
diff options
context:
space:
mode:
Diffstat (limited to 'gramlib')
-rw-r--r--gramlib/LICENSE29
-rw-r--r--gramlib/dune4
-rw-r--r--gramlib/gramext.ml461
-rw-r--r--gramlib/gramext.mli65
-rw-r--r--gramlib/gramlib.mllib4
-rw-r--r--gramlib/grammar.ml864
-rw-r--r--gramlib/grammar.mli83
-rw-r--r--gramlib/plexing.ml18
-rw-r--r--gramlib/plexing.mli37
-rw-r--r--gramlib/ploc.ml26
-rw-r--r--gramlib/ploc.mli40
11 files changed, 1631 insertions, 0 deletions
diff --git a/gramlib/LICENSE b/gramlib/LICENSE
new file mode 100644
index 0000000000..b696affde7
--- /dev/null
+++ b/gramlib/LICENSE
@@ -0,0 +1,29 @@
+gramlib was derived from Daniel de Rauglaudre's camlp5
+(https://github.com/camlp5/camlp5) whose licence follows:
+
+* Copyright (c) 2007-2017, INRIA (Institut National de Recherches en
+* Informatique et Automatique). All rights reserved.
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are met:
+*
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above copyright
+* notice, this list of conditions and the following disclaimer in the
+* documentation and/or other materials provided with the distribution.
+* * Neither the name of INRIA, nor the names of its contributors may be
+* used to endorse or promote products derived from this software without
+* specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED BY INRIA AND CONTRIBUTORS ``AS IS'' AND
+* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INRIA AND
+* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+* SUCH DAMAGE.
diff --git a/gramlib/dune b/gramlib/dune
new file mode 100644
index 0000000000..8ca6aff25a
--- /dev/null
+++ b/gramlib/dune
@@ -0,0 +1,4 @@
+(library
+ (name gramlib)
+ (public_name coq.gramlib)
+ (libraries coq.lib))
diff --git a/gramlib/gramext.ml b/gramlib/gramext.ml
new file mode 100644
index 0000000000..46c2688f05
--- /dev/null
+++ b/gramlib/gramext.ml
@@ -0,0 +1,461 @@
+(* camlp5r *)
+(* gramext.ml,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+open Printf
+
+type 'a parser_t = 'a Stream.t -> Obj.t
+
+type 'te grammar =
+ { gtokens : (Plexing.pattern, int ref) Hashtbl.t;
+ glexer : 'te Plexing.lexer }
+
+type 'te g_entry =
+ { egram : 'te grammar;
+ ename : string;
+ elocal : bool;
+ mutable estart : int -> 'te parser_t;
+ mutable econtinue : int -> int -> Obj.t -> 'te parser_t;
+ mutable edesc : 'te g_desc }
+and 'te g_desc =
+ Dlevels of 'te g_level list
+ | Dparser of 'te parser_t
+and 'te g_level =
+ { assoc : g_assoc;
+ lname : string option;
+ lsuffix : 'te g_tree;
+ lprefix : 'te g_tree }
+and g_assoc = NonA | RightA | LeftA
+and 'te g_symbol =
+ | Snterm of 'te g_entry
+ | Snterml of 'te g_entry * string
+ | Slist0 of 'te g_symbol
+ | Slist0sep of 'te g_symbol * 'te g_symbol * bool
+ | Slist1 of 'te g_symbol
+ | Slist1sep of 'te g_symbol * 'te g_symbol * bool
+ | Sopt of 'te g_symbol
+ | Sself
+ | Snext
+ | Stoken of Plexing.pattern
+ | Stree of 'te g_tree
+and g_action = Obj.t
+and 'te g_tree =
+ Node of 'te g_node
+ | LocAct of g_action * g_action list
+ | DeadEnd
+and 'te g_node =
+ { node : 'te g_symbol; son : 'te g_tree; brother : 'te g_tree }
+and err_fun = unit -> string
+
+type position =
+ First
+ | Last
+ | Before of string
+ | After of string
+ | Level of string
+
+let rec derive_eps =
+ function
+ Slist0 _ -> true
+ | Slist0sep (_, _, _) -> true
+ | Sopt _ -> true
+ | Stree t -> tree_derive_eps t
+ | Slist1 _ | Slist1sep (_, _, _) | Snterm _ |
+ Snterml (_, _) | Snext | Sself | Stoken _ ->
+ false
+and tree_derive_eps =
+ function
+ LocAct (_, _) -> true
+ | Node {node = s; brother = bro; son = son} ->
+ derive_eps s && tree_derive_eps son || tree_derive_eps bro
+ | DeadEnd -> false
+
+let rec eq_symbol s1 s2 =
+ match s1, s2 with
+ Snterm e1, Snterm e2 -> e1 == e2
+ | Snterml (e1, l1), Snterml (e2, l2) -> e1 == e2 && l1 = l2
+ | Slist0 s1, Slist0 s2 -> eq_symbol s1 s2
+ | Slist0sep (s1, sep1, b1), Slist0sep (s2, sep2, b2) ->
+ eq_symbol s1 s2 && eq_symbol sep1 sep2 && b1 = b2
+ | Slist1 s1, Slist1 s2 -> eq_symbol s1 s2
+ | Slist1sep (s1, sep1, b1), Slist1sep (s2, sep2, b2) ->
+ eq_symbol s1 s2 && eq_symbol sep1 sep2 && b1 = b2
+ | Sopt s1, Sopt s2 -> eq_symbol s1 s2
+ | Stree _, Stree _ -> false
+ | _ -> s1 = s2
+
+let is_before s1 s2 =
+ match s1, s2 with
+ Stoken ("ANY", _), _ -> false
+ | _, Stoken ("ANY", _) -> true
+ | Stoken (_, s), Stoken (_, "") when s <> "" -> true
+ | Stoken _, Stoken _ -> false
+ | Stoken _, _ -> true
+ | _ -> false
+
+let insert_tree ~warning entry_name gsymbols action tree =
+ let rec insert symbols tree =
+ match symbols with
+ s :: sl -> insert_in_tree s sl tree
+ | [] ->
+ match tree with
+ Node {node = s; son = son; brother = bro} ->
+ Node {node = s; son = son; brother = insert [] bro}
+ | LocAct (old_action, action_list) ->
+ begin match warning with
+ | None -> ()
+ | Some warn_fn ->
+ let msg =
+ "<W> Grammar extension: " ^
+ (if entry_name <> "" then "" else "in ["^entry_name^"%s], ") ^
+ "some rule has been masked" in
+ warn_fn msg
+ end;
+ LocAct (action, old_action :: action_list)
+ | DeadEnd -> LocAct (action, [])
+ and insert_in_tree s sl tree =
+ match try_insert s sl tree with
+ Some t -> t
+ | None -> Node {node = s; son = insert sl DeadEnd; brother = tree}
+ and try_insert s sl tree =
+ match tree with
+ Node {node = s1; son = son; brother = bro} ->
+ if eq_symbol s s1 then
+ let t = Node {node = s1; son = insert sl son; brother = bro} in
+ Some t
+ else if is_before s1 s || derive_eps s && not (derive_eps s1) then
+ let bro =
+ match try_insert s sl bro with
+ Some bro -> bro
+ | None -> Node {node = s; son = insert sl DeadEnd; brother = bro}
+ in
+ let t = Node {node = s1; son = son; brother = bro} in Some t
+ else
+ begin match try_insert s sl bro with
+ Some bro ->
+ let t = Node {node = s1; son = son; brother = bro} in Some t
+ | None -> None
+ end
+ | LocAct (_, _) | DeadEnd -> None
+ in
+ insert gsymbols tree
+
+let srules ~warning rl =
+ let t =
+ List.fold_left
+ (fun tree (symbols, action) -> insert_tree ~warning "" symbols action tree)
+ DeadEnd rl
+ in
+ Stree t
+
+let is_level_labelled n lev =
+ match lev.lname with
+ Some n1 -> n = n1
+ | None -> false
+
+let insert_level ~warning entry_name e1 symbols action slev =
+ match e1 with
+ true ->
+ {assoc = slev.assoc; lname = slev.lname;
+ lsuffix = insert_tree ~warning entry_name symbols action slev.lsuffix;
+ lprefix = slev.lprefix}
+ | false ->
+ {assoc = slev.assoc; lname = slev.lname; lsuffix = slev.lsuffix;
+ lprefix = insert_tree ~warning entry_name symbols action slev.lprefix}
+
+let empty_lev lname assoc =
+ let assoc =
+ match assoc with
+ Some a -> a
+ | None -> LeftA
+ in
+ {assoc = assoc; lname = lname; lsuffix = DeadEnd; lprefix = DeadEnd}
+
+let change_lev ~warning lev n lname assoc =
+ let a =
+ match assoc with
+ None -> lev.assoc
+ | Some a ->
+ if a <> lev.assoc then
+ begin
+ match warning with
+ | None -> ()
+ | Some warn_fn ->
+ warn_fn ("<W> Changing associativity of level \""^n^"\"")
+ end;
+ a
+ in
+ begin match lname with
+ Some n ->
+ if lname <> lev.lname then
+ begin match warning with
+ | None -> ()
+ | Some warn_fn ->
+ warn_fn ("<W> Level label \""^n^"\" ignored")
+ end;
+ | None -> ()
+ end;
+ {assoc = a; lname = lev.lname; lsuffix = lev.lsuffix; lprefix = lev.lprefix}
+
+let get_level ~warning entry position levs =
+ match position with
+ Some First -> [], empty_lev, levs
+ | Some Last -> levs, empty_lev, []
+ | Some (Level n) ->
+ let rec get =
+ function
+ [] ->
+ eprintf "No level labelled \"%s\" in entry \"%s\"\n" n
+ entry.ename;
+ flush stderr;
+ failwith "Grammar.extend"
+ | lev :: levs ->
+ if is_level_labelled n lev then [], change_lev ~warning lev n, levs
+ else
+ let (levs1, rlev, levs2) = get levs in lev :: levs1, rlev, levs2
+ in
+ get levs
+ | Some (Before n) ->
+ let rec get =
+ function
+ [] ->
+ eprintf "No level labelled \"%s\" in entry \"%s\"\n" n
+ entry.ename;
+ flush stderr;
+ failwith "Grammar.extend"
+ | lev :: levs ->
+ if is_level_labelled n lev then [], empty_lev, lev :: levs
+ else
+ let (levs1, rlev, levs2) = get levs in lev :: levs1, rlev, levs2
+ in
+ get levs
+ | Some (After n) ->
+ let rec get =
+ function
+ [] ->
+ eprintf "No level labelled \"%s\" in entry \"%s\"\n" n
+ entry.ename;
+ flush stderr;
+ failwith "Grammar.extend"
+ | lev :: levs ->
+ if is_level_labelled n lev then [lev], empty_lev, levs
+ else
+ let (levs1, rlev, levs2) = get levs in lev :: levs1, rlev, levs2
+ in
+ get levs
+ | None ->
+ match levs with
+ lev :: levs -> [], change_lev ~warning lev "<top>", levs
+ | [] -> [], empty_lev, []
+
+let change_to_self entry =
+ function
+ Snterm e when e == entry -> Sself
+ | x -> x
+
+let get_initial entry =
+ function
+ Sself :: symbols -> true, symbols
+ | symbols -> false, symbols
+
+let insert_tokens gram symbols =
+ let rec insert =
+ function
+ | Slist0 s -> insert s
+ | Slist1 s -> insert s
+ | Slist0sep (s, t, _) -> insert s; insert t
+ | Slist1sep (s, t, _) -> insert s; insert t
+ | Sopt s -> insert s
+ | Stree t -> tinsert t
+ | Stoken ("ANY", _) -> ()
+ | Stoken tok ->
+ gram.glexer.Plexing.tok_using tok;
+ let r =
+ try Hashtbl.find gram.gtokens tok with
+ Not_found -> let r = ref 0 in Hashtbl.add gram.gtokens tok r; r
+ in
+ incr r
+ | Snterm _ | Snterml (_, _) | Snext | Sself -> ()
+ and tinsert =
+ function
+ Node {node = s; brother = bro; son = son} ->
+ insert s; tinsert bro; tinsert son
+ | LocAct (_, _) | DeadEnd -> ()
+ in
+ List.iter insert symbols
+
+let levels_of_rules ~warning entry position rules =
+ let elev =
+ match entry.edesc with
+ Dlevels elev -> elev
+ | Dparser _ ->
+ eprintf "Error: entry not extensible: \"%s\"\n" entry.ename;
+ flush stderr;
+ failwith "Grammar.extend"
+ in
+ if rules = [] then elev
+ else
+ let (levs1, make_lev, levs2) = get_level ~warning entry position elev in
+ let (levs, _) =
+ List.fold_left
+ (fun (levs, make_lev) (lname, assoc, level) ->
+ let lev = make_lev lname assoc in
+ let lev =
+ List.fold_left
+ (fun lev (symbols, action) ->
+ let symbols = List.map (change_to_self entry) symbols in
+ let (e1, symbols) = get_initial entry symbols in
+ insert_tokens entry.egram symbols;
+ insert_level ~warning entry.ename e1 symbols action lev)
+ lev level
+ in
+ lev :: levs, empty_lev)
+ ([], make_lev) rules
+ in
+ levs1 @ List.rev levs @ levs2
+
+let logically_eq_symbols entry =
+ let rec eq_symbols s1 s2 =
+ match s1, s2 with
+ Snterm e1, Snterm e2 -> e1.ename = e2.ename
+ | Snterm e1, Sself -> e1.ename = entry.ename
+ | Sself, Snterm e2 -> entry.ename = e2.ename
+ | Snterml (e1, l1), Snterml (e2, l2) -> e1.ename = e2.ename && l1 = l2
+ | Slist0 s1, Slist0 s2 -> eq_symbols s1 s2
+ | Slist0sep (s1, sep1, b1), Slist0sep (s2, sep2, b2) ->
+ eq_symbols s1 s2 && eq_symbols sep1 sep2 && b1 = b2
+ | Slist1 s1, Slist1 s2 -> eq_symbols s1 s2
+ | Slist1sep (s1, sep1, b1), Slist1sep (s2, sep2, b2) ->
+ eq_symbols s1 s2 && eq_symbols sep1 sep2 && b1 = b2
+ | Sopt s1, Sopt s2 -> eq_symbols s1 s2
+ | Stree t1, Stree t2 -> eq_trees t1 t2
+ | _ -> s1 = s2
+ and eq_trees t1 t2 =
+ match t1, t2 with
+ Node n1, Node n2 ->
+ eq_symbols n1.node n2.node && eq_trees n1.son n2.son &&
+ eq_trees n1.brother n2.brother
+ | (LocAct (_, _) | DeadEnd), (LocAct (_, _) | DeadEnd) -> true
+ | _ -> false
+ in
+ eq_symbols
+
+(* [delete_rule_in_tree] returns
+ [Some (dsl, t)] if success
+ [dsl] =
+ Some (list of deleted nodes) if branch deleted
+ None if action replaced by previous version of action
+ [t] = remaining tree
+ [None] if failure *)
+
+let delete_rule_in_tree entry =
+ let rec delete_in_tree symbols tree =
+ match symbols, tree with
+ s :: sl, Node n ->
+ if logically_eq_symbols entry s n.node then delete_son sl n
+ else
+ begin match delete_in_tree symbols n.brother with
+ Some (dsl, t) ->
+ Some (dsl, Node {node = n.node; son = n.son; brother = t})
+ | None -> None
+ end
+ | s :: sl, _ -> None
+ | [], Node n ->
+ begin match delete_in_tree [] n.brother with
+ Some (dsl, t) ->
+ Some (dsl, Node {node = n.node; son = n.son; brother = t})
+ | None -> None
+ end
+ | [], DeadEnd -> None
+ | [], LocAct (_, []) -> Some (Some [], DeadEnd)
+ | [], LocAct (_, action :: list) -> Some (None, LocAct (action, list))
+ and delete_son sl n =
+ match delete_in_tree sl n.son with
+ Some (Some dsl, DeadEnd) -> Some (Some (n.node :: dsl), n.brother)
+ | Some (Some dsl, t) ->
+ let t = Node {node = n.node; son = t; brother = n.brother} in
+ Some (Some (n.node :: dsl), t)
+ | Some (None, t) ->
+ let t = Node {node = n.node; son = t; brother = n.brother} in
+ Some (None, t)
+ | None -> None
+ in
+ delete_in_tree
+
+let rec decr_keyw_use gram =
+ function
+ Stoken tok ->
+ let r = Hashtbl.find gram.gtokens tok in
+ decr r;
+ if !r == 0 then
+ begin
+ Hashtbl.remove gram.gtokens tok;
+ gram.glexer.Plexing.tok_removing tok
+ end
+ | Slist0 s -> decr_keyw_use gram s
+ | Slist1 s -> decr_keyw_use gram s
+ | Slist0sep (s1, s2, _) -> decr_keyw_use gram s1; decr_keyw_use gram s2
+ | Slist1sep (s1, s2, _) -> decr_keyw_use gram s1; decr_keyw_use gram s2
+ | Sopt s -> decr_keyw_use gram s
+ | Stree t -> decr_keyw_use_in_tree gram t
+ | Sself | Snext | Snterm _ | Snterml (_, _) -> ()
+and decr_keyw_use_in_tree gram =
+ function
+ DeadEnd | LocAct (_, _) -> ()
+ | Node n ->
+ decr_keyw_use gram n.node;
+ decr_keyw_use_in_tree gram n.son;
+ decr_keyw_use_in_tree gram n.brother
+
+let rec delete_rule_in_suffix entry symbols =
+ function
+ lev :: levs ->
+ begin match delete_rule_in_tree entry symbols lev.lsuffix with
+ Some (dsl, t) ->
+ begin match dsl with
+ Some dsl -> List.iter (decr_keyw_use entry.egram) dsl
+ | None -> ()
+ end;
+ begin match t with
+ DeadEnd when lev.lprefix == DeadEnd -> levs
+ | _ ->
+ let lev =
+ {assoc = lev.assoc; lname = lev.lname; lsuffix = t;
+ lprefix = lev.lprefix}
+ in
+ lev :: levs
+ end
+ | None ->
+ let levs = delete_rule_in_suffix entry symbols levs in lev :: levs
+ end
+ | [] -> raise Not_found
+
+let rec delete_rule_in_prefix entry symbols =
+ function
+ lev :: levs ->
+ begin match delete_rule_in_tree entry symbols lev.lprefix with
+ Some (dsl, t) ->
+ begin match dsl with
+ Some dsl -> List.iter (decr_keyw_use entry.egram) dsl
+ | None -> ()
+ end;
+ begin match t with
+ DeadEnd when lev.lsuffix == DeadEnd -> levs
+ | _ ->
+ let lev =
+ {assoc = lev.assoc; lname = lev.lname; lsuffix = lev.lsuffix;
+ lprefix = t}
+ in
+ lev :: levs
+ end
+ | None ->
+ let levs = delete_rule_in_prefix entry symbols levs in lev :: levs
+ end
+ | [] -> raise Not_found
+
+let delete_rule_in_level_list entry symbols levs =
+ match symbols with
+ Sself :: symbols -> delete_rule_in_suffix entry symbols levs
+ | Snterm e :: symbols when e == entry ->
+ delete_rule_in_suffix entry symbols levs
+ | _ -> delete_rule_in_prefix entry symbols levs
diff --git a/gramlib/gramext.mli b/gramlib/gramext.mli
new file mode 100644
index 0000000000..f1e294fb4c
--- /dev/null
+++ b/gramlib/gramext.mli
@@ -0,0 +1,65 @@
+(* camlp5r *)
+(* gramext.mli,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+type 'a parser_t = 'a Stream.t -> Obj.t
+
+type 'te grammar =
+ { gtokens : (Plexing.pattern, int ref) Hashtbl.t;
+ glexer : 'te Plexing.lexer }
+
+type 'te g_entry =
+ { egram : 'te grammar;
+ ename : string;
+ elocal : bool;
+ mutable estart : int -> 'te parser_t;
+ mutable econtinue : int -> int -> Obj.t -> 'te parser_t;
+ mutable edesc : 'te g_desc }
+and 'te g_desc =
+ Dlevels of 'te g_level list
+ | Dparser of 'te parser_t
+and 'te g_level =
+ { assoc : g_assoc;
+ lname : string option;
+ lsuffix : 'te g_tree;
+ lprefix : 'te g_tree }
+and g_assoc = NonA | RightA | LeftA
+and 'te g_symbol =
+ | Snterm of 'te g_entry
+ | Snterml of 'te g_entry * string
+ | Slist0 of 'te g_symbol
+ | Slist0sep of 'te g_symbol * 'te g_symbol * bool
+ | Slist1 of 'te g_symbol
+ | Slist1sep of 'te g_symbol * 'te g_symbol * bool
+ | Sopt of 'te g_symbol
+ | Sself
+ | Snext
+ | Stoken of Plexing.pattern
+ | Stree of 'te g_tree
+and g_action = Obj.t
+and 'te g_tree =
+ Node of 'te g_node
+ | LocAct of g_action * g_action list
+ | DeadEnd
+and 'te g_node =
+ { node : 'te g_symbol; son : 'te g_tree; brother : 'te g_tree }
+and err_fun = unit -> string
+
+type position =
+ First
+ | Last
+ | Before of string
+ | After of string
+ | Level of string
+
+val levels_of_rules : warning:(string -> unit) option ->
+ 'te g_entry -> position option ->
+ (string option * g_assoc option * ('te g_symbol list * g_action) list)
+ list ->
+ 'te g_level list
+
+val srules : warning:(string -> unit) option -> ('te g_symbol list * g_action) list -> 'te g_symbol
+val eq_symbol : 'a g_symbol -> 'a g_symbol -> bool
+
+val delete_rule_in_level_list :
+ 'te g_entry -> 'te g_symbol list -> 'te g_level list -> 'te g_level list
diff --git a/gramlib/gramlib.mllib b/gramlib/gramlib.mllib
new file mode 100644
index 0000000000..4c915b2b05
--- /dev/null
+++ b/gramlib/gramlib.mllib
@@ -0,0 +1,4 @@
+Ploc
+Plexing
+Gramext
+Grammar
diff --git a/gramlib/grammar.ml b/gramlib/grammar.ml
new file mode 100644
index 0000000000..0ad11d075f
--- /dev/null
+++ b/gramlib/grammar.ml
@@ -0,0 +1,864 @@
+(* camlp5r *)
+(* grammar.ml,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+open Gramext
+open Format
+
+external gramext_action : 'a -> g_action = "%identity"
+
+let rec flatten_tree =
+ function
+ DeadEnd -> []
+ | LocAct (_, _) -> [[]]
+ | Node {node = n; brother = b; son = s} ->
+ List.map (fun l -> n :: l) (flatten_tree s) @ flatten_tree b
+
+let utf8_print = ref true
+
+let utf8_string_escaped s =
+ let b = Buffer.create (String.length s) in
+ let rec loop i =
+ if i = String.length s then Buffer.contents b
+ else
+ begin
+ begin match s.[i] with
+ '"' -> Buffer.add_string b "\\\""
+ | '\\' -> Buffer.add_string b "\\\\"
+ | '\n' -> Buffer.add_string b "\\n"
+ | '\t' -> Buffer.add_string b "\\t"
+ | '\r' -> Buffer.add_string b "\\r"
+ | '\b' -> Buffer.add_string b "\\b"
+ | c -> Buffer.add_char b c
+ end;
+ loop (i + 1)
+ end
+ in
+ loop 0
+
+let string_escaped s =
+ if !utf8_print then utf8_string_escaped s else String.escaped s
+
+let print_str ppf s = fprintf ppf "\"%s\"" (string_escaped s)
+
+let rec print_symbol ppf =
+ function
+ | Slist0 s -> fprintf ppf "LIST0 %a" print_symbol1 s
+ | Slist0sep (s, t, osep) ->
+ fprintf ppf "LIST0 %a SEP %a%s" print_symbol1 s print_symbol1 t
+ (if osep then " OPT_SEP" else "")
+ | Slist1 s -> fprintf ppf "LIST1 %a" print_symbol1 s
+ | Slist1sep (s, t, osep) ->
+ fprintf ppf "LIST1 %a SEP %a%s" print_symbol1 s print_symbol1 t
+ (if osep then " OPT_SEP" else "")
+ | Sopt s -> fprintf ppf "OPT %a" print_symbol1 s
+ | Stoken (con, prm) when con <> "" && prm <> "" ->
+ fprintf ppf "%s@ %a" con print_str prm
+ | Snterml (e, l) ->
+ fprintf ppf "%s%s@ LEVEL@ %a" e.ename (if e.elocal then "*" else "")
+ print_str l
+ | Snterm _ | Snext | Sself | Stoken _ | Stree _ as s ->
+ print_symbol1 ppf s
+and print_symbol1 ppf =
+ function
+ | Snterm e -> fprintf ppf "%s%s" e.ename (if e.elocal then "*" else "")
+ | Sself -> pp_print_string ppf "SELF"
+ | Snext -> pp_print_string ppf "NEXT"
+ | Stoken ("", s) -> print_str ppf s
+ | Stoken (con, "") -> pp_print_string ppf con
+ | Stree t -> print_level ppf pp_print_space (flatten_tree t)
+ | Snterml (_, _) | Slist0 _ | Slist0sep (_, _, _) |
+ Slist1 _ | Slist1sep (_, _, _) | Sopt _ | Stoken _ as s ->
+ fprintf ppf "(%a)" print_symbol s
+and print_rule ppf symbols =
+ fprintf ppf "@[<hov 0>";
+ let _ =
+ List.fold_left
+ (fun sep symbol ->
+ fprintf ppf "%t%a" sep print_symbol symbol;
+ fun ppf -> fprintf ppf ";@ ")
+ (fun ppf -> ()) symbols
+ in
+ fprintf ppf "@]"
+and print_level ppf pp_print_space rules =
+ fprintf ppf "@[<hov 0>[ ";
+ let _ =
+ List.fold_left
+ (fun sep rule ->
+ fprintf ppf "%t%a" sep print_rule rule;
+ fun ppf -> fprintf ppf "%a| " pp_print_space ())
+ (fun ppf -> ()) rules
+ in
+ fprintf ppf " ]@]"
+
+let print_levels ppf elev =
+ let _ =
+ List.fold_left
+ (fun sep lev ->
+ let rules =
+ List.map (fun t -> Sself :: t) (flatten_tree lev.lsuffix) @
+ flatten_tree lev.lprefix
+ in
+ fprintf ppf "%t@[<hov 2>" sep;
+ begin match lev.lname with
+ Some n -> fprintf ppf "%a@;<1 2>" print_str n
+ | None -> ()
+ end;
+ begin match lev.assoc with
+ LeftA -> fprintf ppf "LEFTA"
+ | RightA -> fprintf ppf "RIGHTA"
+ | NonA -> fprintf ppf "NONA"
+ end;
+ fprintf ppf "@]@;<1 2>";
+ print_level ppf pp_force_newline rules;
+ fun ppf -> fprintf ppf "@,| ")
+ (fun ppf -> ()) elev
+ in
+ ()
+
+let print_entry ppf e =
+ fprintf ppf "@[<v 0>[ ";
+ begin match e.edesc with
+ Dlevels elev -> print_levels ppf elev
+ | Dparser _ -> fprintf ppf "<parser>"
+ end;
+ fprintf ppf " ]@]"
+
+let floc = ref (fun _ -> failwith "internal error when computing location")
+
+let loc_of_token_interval bp ep =
+ if bp == ep then
+ if bp == 0 then Ploc.dummy else Ploc.after (!floc (bp - 1)) 0 1
+ else
+ let loc1 = !floc bp in let loc2 = !floc (pred ep) in Loc.merge loc1 loc2
+
+let name_of_symbol entry =
+ function
+ Snterm e -> "[" ^ e.ename ^ "]"
+ | Snterml (e, l) -> "[" ^ e.ename ^ " level " ^ l ^ "]"
+ | Sself | Snext -> "[" ^ entry.ename ^ "]"
+ | Stoken tok -> entry.egram.glexer.Plexing.tok_text tok
+ | _ -> "???"
+
+let rec get_token_list entry rev_tokl last_tok tree =
+ match tree with
+ Node {node = Stoken tok; son = son; brother = DeadEnd} ->
+ get_token_list entry (last_tok :: rev_tokl) tok son
+ | _ -> if rev_tokl = [] then None else Some (rev_tokl, last_tok, tree)
+
+let rec name_of_symbol_failed entry =
+ function
+ | Slist0 s -> name_of_symbol_failed entry s
+ | Slist0sep (s, _, _) -> name_of_symbol_failed entry s
+ | Slist1 s -> name_of_symbol_failed entry s
+ | Slist1sep (s, _, _) -> name_of_symbol_failed entry s
+ | Sopt s -> name_of_symbol_failed entry s
+ | Stree t -> name_of_tree_failed entry t
+ | s -> name_of_symbol entry s
+and name_of_tree_failed entry =
+ function
+ Node {node = s; brother = bro; son = son} ->
+ let tokl =
+ match s with
+ Stoken tok -> get_token_list entry [] tok son
+ | _ -> None
+ in
+ begin match tokl with
+ None ->
+ let txt = name_of_symbol_failed entry s in
+ let txt =
+ match s, son with
+ Sopt _, Node _ -> txt ^ " or " ^ name_of_tree_failed entry son
+ | _ -> txt
+ in
+ let txt =
+ match bro with
+ DeadEnd | LocAct (_, _) -> txt
+ | Node _ -> txt ^ " or " ^ name_of_tree_failed entry bro
+ in
+ txt
+ | Some (rev_tokl, last_tok, son) ->
+ List.fold_left
+ (fun s tok ->
+ (if s = "" then "" else s ^ " ") ^
+ entry.egram.glexer.Plexing.tok_text tok)
+ "" (List.rev (last_tok :: rev_tokl))
+ end
+ | DeadEnd | LocAct (_, _) -> "???"
+
+let tree_failed entry prev_symb_result prev_symb tree =
+ let txt = name_of_tree_failed entry tree in
+ let txt =
+ match prev_symb with
+ Slist0 s ->
+ let txt1 = name_of_symbol_failed entry s in
+ txt1 ^ " or " ^ txt ^ " expected"
+ | Slist1 s ->
+ let txt1 = name_of_symbol_failed entry s in
+ txt1 ^ " or " ^ txt ^ " expected"
+ | Slist0sep (s, sep, _) ->
+ begin match Obj.magic prev_symb_result with
+ [] ->
+ let txt1 = name_of_symbol_failed entry s in
+ txt1 ^ " or " ^ txt ^ " expected"
+ | _ ->
+ let txt1 = name_of_symbol_failed entry sep in
+ txt1 ^ " or " ^ txt ^ " expected"
+ end
+ | Slist1sep (s, sep, _) ->
+ begin match Obj.magic prev_symb_result with
+ [] ->
+ let txt1 = name_of_symbol_failed entry s in
+ txt1 ^ " or " ^ txt ^ " expected"
+ | _ ->
+ let txt1 = name_of_symbol_failed entry sep in
+ txt1 ^ " or " ^ txt ^ " expected"
+ end
+ | Sopt _ | Stree _ -> txt ^ " expected"
+ | _ -> txt ^ " expected after " ^ name_of_symbol_failed entry prev_symb
+ in
+ txt ^ " (in [" ^ entry.ename ^ "])"
+
+let symb_failed entry prev_symb_result prev_symb symb =
+ let tree = Node {node = symb; brother = DeadEnd; son = DeadEnd} in
+ tree_failed entry prev_symb_result prev_symb tree
+
+external app : Obj.t -> 'a = "%identity"
+
+let is_level_labelled n lev =
+ match lev.lname with
+ Some n1 -> n = n1
+ | None -> false
+
+let level_number entry lab =
+ let rec lookup levn =
+ function
+ [] -> failwith ("unknown level " ^ lab)
+ | lev :: levs ->
+ if is_level_labelled lab lev then levn else lookup (succ levn) levs
+ in
+ match entry.edesc with
+ Dlevels elev -> lookup 0 elev
+ | Dparser _ -> raise Not_found
+
+let rec top_symb entry =
+ function
+ Sself | Snext -> Snterm entry
+ | Snterml (e, _) -> Snterm e
+ | Slist1sep (s, sep, b) -> Slist1sep (top_symb entry s, sep, b)
+ | _ -> raise Stream.Failure
+
+let entry_of_symb entry =
+ function
+ Sself | Snext -> entry
+ | Snterm e -> e
+ | Snterml (e, _) -> e
+ | _ -> raise Stream.Failure
+
+let top_tree entry =
+ function
+ Node {node = s; brother = bro; son = son} ->
+ Node {node = top_symb entry s; brother = bro; son = son}
+ | LocAct (_, _) | DeadEnd -> raise Stream.Failure
+
+let skip_if_empty bp p strm =
+ if Stream.count strm == bp then gramext_action (fun a -> p strm)
+ else raise Stream.Failure
+
+let continue entry bp a s son p1 (strm__ : _ Stream.t) =
+ let a = (entry_of_symb entry s).econtinue 0 bp a strm__ in
+ let act =
+ try p1 strm__ with
+ Stream.Failure -> raise (Stream.Error (tree_failed entry a s son))
+ in
+ gramext_action (fun _ -> app act a)
+
+let do_recover parser_of_tree entry nlevn alevn bp a s son
+ (strm__ : _ Stream.t) =
+ try parser_of_tree entry nlevn alevn (top_tree entry son) strm__ with
+ Stream.Failure ->
+ try
+ skip_if_empty bp (fun (strm__ : _ Stream.t) -> raise Stream.Failure)
+ strm__
+ with Stream.Failure ->
+ continue entry bp a s son (parser_of_tree entry nlevn alevn son)
+ strm__
+
+let recover parser_of_tree entry nlevn alevn bp a s son strm =
+ do_recover parser_of_tree entry nlevn alevn bp a s son strm
+
+let token_count = ref 0
+
+let peek_nth n strm =
+ let list = Stream.npeek n strm in
+ token_count := Stream.count strm + n;
+ let rec loop list n =
+ match list, n with
+ x :: _, 1 -> Some x
+ | _ :: l, n -> loop l (n - 1)
+ | [], _ -> None
+ in
+ loop list n
+
+let item_skipped = ref false
+
+let call_and_push ps al strm =
+ item_skipped := false;
+ let a = ps strm in
+ let al = if !item_skipped then al else a :: al in item_skipped := false; al
+
+let token_ematch gram tok =
+ let tematch = gram.glexer.Plexing.tok_match tok in
+ fun tok -> Obj.repr (tematch tok : string)
+
+let rec parser_of_tree entry nlevn alevn =
+ function
+ DeadEnd -> (fun (strm__ : _ Stream.t) -> raise Stream.Failure)
+ | LocAct (act, _) -> (fun (strm__ : _ Stream.t) -> act)
+ | Node {node = Sself; son = LocAct (act, _); brother = DeadEnd} ->
+ (fun (strm__ : _ Stream.t) ->
+ let a = entry.estart alevn strm__ in app act a)
+ | Node {node = Sself; son = LocAct (act, _); brother = bro} ->
+ let p2 = parser_of_tree entry nlevn alevn bro in
+ (fun (strm__ : _ Stream.t) ->
+ match
+ try Some (entry.estart alevn strm__) with Stream.Failure -> None
+ with
+ Some a -> app act a
+ | _ -> p2 strm__)
+ | Node {node = s; son = son; brother = DeadEnd} ->
+ let tokl =
+ match s with
+ Stoken tok -> get_token_list entry [] tok son
+ | _ -> None
+ in
+ begin match tokl with
+ None ->
+ let ps = parser_of_symbol entry nlevn s in
+ let p1 = parser_of_tree entry nlevn alevn son in
+ let p1 = parser_cont p1 entry nlevn alevn s son in
+ (fun (strm__ : _ Stream.t) ->
+ let bp = Stream.count strm__ in
+ let a = ps strm__ in
+ let act =
+ try p1 bp a strm__ with
+ Stream.Failure ->
+ raise (Stream.Error (tree_failed entry a s son))
+ in
+ app act a)
+ | Some (rev_tokl, last_tok, son) ->
+ let lt = Stoken last_tok in
+ let p1 = parser_of_tree entry nlevn alevn son in
+ let p1 = parser_cont p1 entry nlevn alevn lt son in
+ parser_of_token_list entry s son p1
+ (fun (strm__ : _ Stream.t) -> raise Stream.Failure) rev_tokl
+ last_tok
+ end
+ | Node {node = s; son = son; brother = bro} ->
+ let tokl =
+ match s with
+ Stoken tok -> get_token_list entry [] tok son
+ | _ -> None
+ in
+ match tokl with
+ None ->
+ let ps = parser_of_symbol entry nlevn s in
+ let p1 = parser_of_tree entry nlevn alevn son in
+ let p1 = parser_cont p1 entry nlevn alevn s son in
+ let p2 = parser_of_tree entry nlevn alevn bro in
+ (fun (strm : _ Stream.t) ->
+ let bp = Stream.count strm in
+ match try Some (ps strm) with Stream.Failure -> None with
+ Some a ->
+ begin match
+ (try Some (p1 bp a strm) with Stream.Failure -> None)
+ with
+ Some act -> app act a
+ | None -> raise (Stream.Error (tree_failed entry a s son))
+ end
+ | None -> p2 strm)
+ | Some (rev_tokl, last_tok, son) ->
+ let lt = Stoken last_tok in
+ let p2 = parser_of_tree entry nlevn alevn bro in
+ let p1 = parser_of_tree entry nlevn alevn son in
+ let p1 = parser_cont p1 entry nlevn alevn lt son in
+ let p1 =
+ parser_of_token_list entry lt son p1 p2 rev_tokl last_tok
+ in
+ fun (strm__ : _ Stream.t) ->
+ try p1 strm__ with Stream.Failure -> p2 strm__
+and parser_cont p1 entry nlevn alevn s son bp a (strm__ : _ Stream.t) =
+ try p1 strm__ with
+ Stream.Failure ->
+ recover parser_of_tree entry nlevn alevn bp a s son strm__
+and parser_of_token_list entry s son p1 p2 rev_tokl last_tok =
+ let plast =
+ let n = List.length rev_tokl + 1 in
+ let tematch = token_ematch entry.egram last_tok in
+ let ps strm =
+ match peek_nth n strm with
+ Some tok ->
+ let r = tematch tok in
+ for _i = 1 to n do Stream.junk strm done; Obj.repr r
+ | None -> raise Stream.Failure
+ in
+ fun (strm : _ Stream.t) ->
+ let bp = Stream.count strm in
+ let a = ps strm in
+ match try Some (p1 bp a strm) with Stream.Failure -> None with
+ Some act -> app act a
+ | None -> raise (Stream.Error (tree_failed entry a s son))
+ in
+ match List.rev rev_tokl with
+ [] -> (fun (strm__ : _ Stream.t) -> plast strm__)
+ | tok :: tokl ->
+ let tematch = token_ematch entry.egram tok in
+ let ps strm =
+ match peek_nth 1 strm with
+ Some tok -> tematch tok
+ | None -> raise Stream.Failure
+ in
+ let p1 =
+ let rec loop n =
+ function
+ [] -> plast
+ | tok :: tokl ->
+ let tematch = token_ematch entry.egram tok in
+ let ps strm =
+ match peek_nth n strm with
+ Some tok -> tematch tok
+ | None -> raise Stream.Failure
+ in
+ let p1 = loop (n + 1) tokl in
+ fun (strm__ : _ Stream.t) ->
+ let a = ps strm__ in let act = p1 strm__ in app act a
+ in
+ loop 2 tokl
+ in
+ fun (strm__ : _ Stream.t) ->
+ let a = ps strm__ in let act = p1 strm__ in app act a
+and parser_of_symbol entry nlevn =
+ function
+ | Slist0 s ->
+ let ps = call_and_push (parser_of_symbol entry nlevn s) in
+ let rec loop al (strm__ : _ Stream.t) =
+ match try Some (ps al strm__) with Stream.Failure -> None with
+ Some al -> loop al strm__
+ | _ -> al
+ in
+ (fun (strm__ : _ Stream.t) ->
+ let a = loop [] strm__ in Obj.repr (List.rev a))
+ | Slist0sep (symb, sep, false) ->
+ let ps = call_and_push (parser_of_symbol entry nlevn symb) in
+ let pt = parser_of_symbol entry nlevn sep in
+ let rec kont al (strm__ : _ Stream.t) =
+ match try Some (pt strm__) with Stream.Failure -> None with
+ Some v ->
+ let al =
+ try ps al strm__ with
+ Stream.Failure ->
+ raise (Stream.Error (symb_failed entry v sep symb))
+ in
+ kont al strm__
+ | _ -> al
+ in
+ (fun (strm__ : _ Stream.t) ->
+ match try Some (ps [] strm__) with Stream.Failure -> None with
+ Some al -> let a = kont al strm__ in Obj.repr (List.rev a)
+ | _ -> Obj.repr [])
+ | Slist0sep (symb, sep, true) ->
+ let ps = call_and_push (parser_of_symbol entry nlevn symb) in
+ let pt = parser_of_symbol entry nlevn sep in
+ let rec kont al (strm__ : _ Stream.t) =
+ match try Some (pt strm__) with Stream.Failure -> None with
+ Some v ->
+ begin match
+ (try Some (ps al strm__) with Stream.Failure -> None)
+ with
+ Some al -> kont al strm__
+ | _ -> al
+ end
+ | _ -> al
+ in
+ (fun (strm__ : _ Stream.t) ->
+ match try Some (ps [] strm__) with Stream.Failure -> None with
+ Some al -> let a = kont al strm__ in Obj.repr (List.rev a)
+ | _ -> Obj.repr [])
+ | Slist1 s ->
+ let ps = call_and_push (parser_of_symbol entry nlevn s) in
+ let rec loop al (strm__ : _ Stream.t) =
+ match try Some (ps al strm__) with Stream.Failure -> None with
+ Some al -> loop al strm__
+ | _ -> al
+ in
+ (fun (strm__ : _ Stream.t) ->
+ let al = ps [] strm__ in
+ let a = loop al strm__ in Obj.repr (List.rev a))
+ | Slist1sep (symb, sep, false) ->
+ let ps = call_and_push (parser_of_symbol entry nlevn symb) in
+ let pt = parser_of_symbol entry nlevn sep in
+ let rec kont al (strm__ : _ Stream.t) =
+ match try Some (pt strm__) with Stream.Failure -> None with
+ Some v ->
+ let al =
+ try ps al strm__ with
+ Stream.Failure ->
+ let a =
+ try parse_top_symb entry symb strm__ with
+ Stream.Failure ->
+ raise (Stream.Error (symb_failed entry v sep symb))
+ in
+ a :: al
+ in
+ kont al strm__
+ | _ -> al
+ in
+ (fun (strm__ : _ Stream.t) ->
+ let al = ps [] strm__ in
+ let a = kont al strm__ in Obj.repr (List.rev a))
+ | Slist1sep (symb, sep, true) ->
+ let ps = call_and_push (parser_of_symbol entry nlevn symb) in
+ let pt = parser_of_symbol entry nlevn sep in
+ let rec kont al (strm__ : _ Stream.t) =
+ match try Some (pt strm__) with Stream.Failure -> None with
+ Some v ->
+ begin match
+ (try Some (ps al strm__) with Stream.Failure -> None)
+ with
+ Some al -> kont al strm__
+ | _ ->
+ match
+ try Some (parse_top_symb entry symb strm__) with
+ Stream.Failure -> None
+ with
+ Some a -> kont (a :: al) strm__
+ | _ -> al
+ end
+ | _ -> al
+ in
+ (fun (strm__ : _ Stream.t) ->
+ let al = ps [] strm__ in
+ let a = kont al strm__ in Obj.repr (List.rev a))
+ | Sopt s ->
+ let ps = parser_of_symbol entry nlevn s in
+ (fun (strm__ : _ Stream.t) ->
+ match try Some (ps strm__) with Stream.Failure -> None with
+ Some a -> Obj.repr (Some a)
+ | _ -> Obj.repr None)
+ | Stree t ->
+ let pt = parser_of_tree entry 1 0 t in
+ (fun (strm__ : _ Stream.t) ->
+ let bp = Stream.count strm__ in
+ let a = pt strm__ in
+ let ep = Stream.count strm__ in
+ let loc = loc_of_token_interval bp ep in app a loc)
+ | Snterm e -> (fun (strm__ : _ Stream.t) -> e.estart 0 strm__)
+ | Snterml (e, l) ->
+ (fun (strm__ : _ Stream.t) -> e.estart (level_number e l) strm__)
+ | Sself -> (fun (strm__ : _ Stream.t) -> entry.estart 0 strm__)
+ | Snext -> (fun (strm__ : _ Stream.t) -> entry.estart nlevn strm__)
+ | Stoken tok -> parser_of_token entry tok
+and parser_of_token entry tok =
+ let f = entry.egram.glexer.Plexing.tok_match tok in
+ fun strm ->
+ match Stream.peek strm with
+ Some tok -> let r = f tok in Stream.junk strm; Obj.repr r
+ | None -> raise Stream.Failure
+and parse_top_symb entry symb = parser_of_symbol entry 0 (top_symb entry symb)
+
+let rec start_parser_of_levels entry clevn =
+ function
+ [] -> (fun levn (strm__ : _ Stream.t) -> raise Stream.Failure)
+ | lev :: levs ->
+ let p1 = start_parser_of_levels entry (succ clevn) levs in
+ match lev.lprefix with
+ DeadEnd -> p1
+ | tree ->
+ let alevn =
+ match lev.assoc with
+ LeftA | NonA -> succ clevn
+ | RightA -> clevn
+ in
+ let p2 = parser_of_tree entry (succ clevn) alevn tree in
+ match levs with
+ [] ->
+ (fun levn strm ->
+ (* this code should be there but is commented to preserve
+ compatibility with previous versions... with this code,
+ the grammar entry e: [[ "x"; a = e | "y" ]] should fail
+ because it should be: e: [RIGHTA[ "x"; a = e | "y" ]]...
+ if levn > clevn then match strm with parser []
+ else
+ *)
+ let (strm__ : _ Stream.t) = strm in
+ let bp = Stream.count strm__ in
+ let act = p2 strm__ in
+ let ep = Stream.count strm__ in
+ let a = app act (loc_of_token_interval bp ep) in
+ entry.econtinue levn bp a strm)
+ | _ ->
+ fun levn strm ->
+ if levn > clevn then p1 levn strm
+ else
+ let (strm__ : _ Stream.t) = strm in
+ let bp = Stream.count strm__ in
+ match try Some (p2 strm__) with Stream.Failure -> None with
+ Some act ->
+ let ep = Stream.count strm__ in
+ let a = app act (loc_of_token_interval bp ep) in
+ entry.econtinue levn bp a strm
+ | _ -> p1 levn strm__
+
+let rec continue_parser_of_levels entry clevn =
+ function
+ [] -> (fun levn bp a (strm__ : _ Stream.t) -> raise Stream.Failure)
+ | lev :: levs ->
+ let p1 = continue_parser_of_levels entry (succ clevn) levs in
+ match lev.lsuffix with
+ DeadEnd -> p1
+ | tree ->
+ let alevn =
+ match lev.assoc with
+ LeftA | NonA -> succ clevn
+ | RightA -> clevn
+ in
+ let p2 = parser_of_tree entry (succ clevn) alevn tree in
+ fun levn bp a strm ->
+ if levn > clevn then p1 levn bp a strm
+ else
+ let (strm__ : _ Stream.t) = strm in
+ try p1 levn bp a strm__ with
+ Stream.Failure ->
+ let act = p2 strm__ in
+ let ep = Stream.count strm__ in
+ let a = app act a (loc_of_token_interval bp ep) in
+ entry.econtinue levn bp a strm
+
+let continue_parser_of_entry entry =
+ match entry.edesc with
+ Dlevels elev ->
+ let p = continue_parser_of_levels entry 0 elev in
+ (fun levn bp a (strm__ : _ Stream.t) ->
+ try p levn bp a strm__ with Stream.Failure -> a)
+ | Dparser p -> fun levn bp a (strm__ : _ Stream.t) -> raise Stream.Failure
+
+let empty_entry ename levn strm =
+ raise (Stream.Error ("entry [" ^ ename ^ "] is empty"))
+
+let start_parser_of_entry entry =
+ match entry.edesc with
+ Dlevels [] -> empty_entry entry.ename
+ | Dlevels elev -> start_parser_of_levels entry 0 elev
+ | Dparser p -> fun levn strm -> p strm
+
+(* Extend syntax *)
+
+let init_entry_functions entry =
+ entry.estart <-
+ (fun lev strm ->
+ let f = start_parser_of_entry entry in entry.estart <- f; f lev strm);
+ entry.econtinue <-
+ (fun lev bp a strm ->
+ let f = continue_parser_of_entry entry in
+ entry.econtinue <- f; f lev bp a strm)
+
+let extend_entry ~warning entry position rules =
+ try
+ let elev = Gramext.levels_of_rules ~warning entry position rules in
+ entry.edesc <- Dlevels elev; init_entry_functions entry
+ with Plexing.Error s ->
+ Printf.eprintf "Lexer initialization error:\n- %s\n" s;
+ flush stderr;
+ failwith "Grammar.extend"
+
+(* Deleting a rule *)
+
+let delete_rule entry sl =
+ match entry.edesc with
+ Dlevels levs ->
+ let levs = Gramext.delete_rule_in_level_list entry sl levs in
+ entry.edesc <- Dlevels levs;
+ entry.estart <-
+ (fun lev strm ->
+ let f = start_parser_of_entry entry in
+ entry.estart <- f; f lev strm);
+ entry.econtinue <-
+ (fun lev bp a strm ->
+ let f = continue_parser_of_entry entry in
+ entry.econtinue <- f; f lev bp a strm)
+ | Dparser _ -> ()
+
+(* Normal interface *)
+
+let create_toktab () = Hashtbl.create 301
+let gcreate glexer =
+ {gtokens = create_toktab (); glexer = glexer }
+
+let tokens g con =
+ let list = ref [] in
+ Hashtbl.iter
+ (fun (p_con, p_prm) c -> if p_con = con then list := (p_prm, !c) :: !list)
+ g.gtokens;
+ !list
+
+type 'te gen_parsable =
+ { pa_chr_strm : char Stream.t;
+ pa_tok_strm : 'te Stream.t;
+ pa_loc_func : Plexing.location_function }
+
+let parse_parsable entry p =
+ let efun = entry.estart 0 in
+ let ts = p.pa_tok_strm in
+ let cs = p.pa_chr_strm in
+ let fun_loc = p.pa_loc_func in
+ let restore =
+ let old_floc = !floc in
+ let old_tc = !token_count in
+ fun () -> floc := old_floc; token_count := old_tc
+ in
+ let get_loc () =
+ try
+ let cnt = Stream.count ts in
+ let loc = fun_loc cnt in
+ if !token_count - 1 <= cnt then loc
+ else Loc.merge loc (fun_loc (!token_count - 1))
+ with Failure _ -> Ploc.make_unlined (Stream.count cs, Stream.count cs + 1)
+ in
+ floc := fun_loc;
+ token_count := 0;
+ try let r = efun ts in restore (); r with
+ Stream.Failure ->
+ let loc = get_loc () in
+ restore ();
+ Ploc.raise loc (Stream.Error ("illegal begin of " ^ entry.ename))
+ | Stream.Error _ as exc ->
+ let loc = get_loc () in restore (); Ploc.raise loc exc
+ | exc ->
+ let loc = Stream.count cs, Stream.count cs + 1 in
+ restore (); Ploc.raise (Ploc.make_unlined loc) exc
+
+(* Unsafe *)
+
+let clear_entry e =
+ e.estart <- (fun _ (strm__ : _ Stream.t) -> raise Stream.Failure);
+ e.econtinue <- (fun _ _ _ (strm__ : _ Stream.t) -> raise Stream.Failure);
+ match e.edesc with
+ Dlevels _ -> e.edesc <- Dlevels []
+ | Dparser _ -> ()
+
+(* Functorial interface *)
+
+module type GLexerType = sig type te val lexer : te Plexing.lexer end
+
+module type S =
+ sig
+ type te
+ type parsable
+ val parsable : char Stream.t -> parsable
+ val tokens : string -> (string * int) list
+ module Entry :
+ sig
+ type 'a e
+ val create : string -> 'a e
+ val parse : 'a e -> parsable -> 'a
+ val name : 'a e -> string
+ val of_parser : string -> (te Stream.t -> 'a) -> 'a e
+ val parse_token_stream : 'a e -> te Stream.t -> 'a
+ val print : Format.formatter -> 'a e -> unit
+ end
+ type ('self, 'a) ty_symbol
+ type ('self, 'f, 'r) ty_rule
+ type 'a ty_production
+ val s_nterm : 'a Entry.e -> ('self, 'a) ty_symbol
+ val s_nterml : 'a Entry.e -> string -> ('self, 'a) ty_symbol
+ val s_list0 : ('self, 'a) ty_symbol -> ('self, 'a list) ty_symbol
+ val s_list0sep :
+ ('self, 'a) ty_symbol -> ('self, 'b) ty_symbol -> bool ->
+ ('self, 'a list) ty_symbol
+ val s_list1 : ('self, 'a) ty_symbol -> ('self, 'a list) ty_symbol
+ val s_list1sep :
+ ('self, 'a) ty_symbol -> ('self, 'b) ty_symbol -> bool ->
+ ('self, 'a list) ty_symbol
+ val s_opt : ('self, 'a) ty_symbol -> ('self, 'a option) ty_symbol
+ val s_self : ('self, 'self) ty_symbol
+ val s_next : ('self, 'self) ty_symbol
+ val s_token : Plexing.pattern -> ('self, string) ty_symbol
+ val s_rules : warning:(string -> unit) option -> 'a ty_production list -> ('self, 'a) ty_symbol
+ val r_stop : ('self, 'r, 'r) ty_rule
+ val r_next :
+ ('self, 'a, 'r) ty_rule -> ('self, 'b) ty_symbol ->
+ ('self, 'b -> 'a, 'r) ty_rule
+ val production : ('a, 'f, Loc.t -> 'a) ty_rule * 'f -> 'a ty_production
+ module Unsafe :
+ sig
+ val clear_entry : 'a Entry.e -> unit
+ end
+ val safe_extend : warning:(string -> unit) option ->
+ 'a Entry.e -> Gramext.position option ->
+ (string option * Gramext.g_assoc option * 'a ty_production list)
+ list ->
+ unit
+ val safe_delete_rule : 'a Entry.e -> ('a, 'r, 'f) ty_rule -> unit
+ end
+
+module GMake (L : GLexerType) =
+ struct
+ type te = L.te
+ type parsable = te gen_parsable
+ let gram = gcreate L.lexer
+ let parsable cs =
+ let (ts, lf) = L.lexer.Plexing.tok_func cs in
+ {pa_chr_strm = cs; pa_tok_strm = ts; pa_loc_func = lf}
+ let tokens = tokens gram
+ module Entry =
+ struct
+ type 'a e = te g_entry
+ let create n =
+ {egram = gram; ename = n; elocal = false; estart = empty_entry n;
+ econtinue =
+ (fun _ _ _ (strm__ : _ Stream.t) -> raise Stream.Failure);
+ edesc = Dlevels []}
+ external obj : 'a e -> te Gramext.g_entry = "%identity"
+ let parse (e : 'a e) p : 'a =
+ Obj.magic (parse_parsable e p : Obj.t)
+ let parse_token_stream (e : 'a e) ts : 'a =
+ Obj.magic (e.estart 0 ts : Obj.t)
+ let name e = e.ename
+ let of_parser n (p : te Stream.t -> 'a) : 'a e =
+ {egram = gram; ename = n; elocal = false;
+ estart = (fun _ -> (Obj.magic p : te Stream.t -> Obj.t));
+ econtinue =
+ (fun _ _ _ (strm__ : _ Stream.t) -> raise Stream.Failure);
+ edesc = Dparser (Obj.magic p : te Stream.t -> Obj.t)}
+ let print ppf e = fprintf ppf "%a@." print_entry (obj e)
+ end
+ type ('self, 'a) ty_symbol = te Gramext.g_symbol
+ type ('self, 'f, 'r) ty_rule = ('self, Obj.t) ty_symbol list
+ type 'a ty_production = ('a, Obj.t, Obj.t) ty_rule * Gramext.g_action
+ let s_nterm e = Snterm e
+ let s_nterml e l = Snterml (e, l)
+ let s_list0 s = Slist0 s
+ let s_list0sep s sep b = Slist0sep (s, sep, b)
+ let s_list1 s = Slist1 s
+ let s_list1sep s sep b = Slist1sep (s, sep, b)
+ let s_opt s = Sopt s
+ let s_self = Sself
+ let s_next = Snext
+ let s_token tok = Stoken tok
+ let s_rules ~warning (t : Obj.t ty_production list) = Gramext.srules ~warning (Obj.magic t)
+ let r_stop = []
+ let r_next r s = r @ [s]
+ let production
+ (p : ('a, 'f, Loc.t -> 'a) ty_rule * 'f) : 'a ty_production =
+ Obj.magic p
+ module Unsafe =
+ struct
+ let clear_entry = clear_entry
+ end
+ let safe_extend ~warning e pos
+ (r :
+ (string option * Gramext.g_assoc option * Obj.t ty_production list)
+ list) =
+ extend_entry ~warning e pos (Obj.magic r)
+ let safe_delete_rule e r = delete_rule (Entry.obj e) r
+ end
diff --git a/gramlib/grammar.mli b/gramlib/grammar.mli
new file mode 100644
index 0000000000..bde07ddc48
--- /dev/null
+++ b/gramlib/grammar.mli
@@ -0,0 +1,83 @@
+(* camlp5r *)
+(* grammar.mli,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+(** Extensible grammars.
+
+ This module implements the Camlp5 extensible grammars system.
+ Grammars entries can be extended using the [EXTEND] statement,
+ added by loading the Camlp5 [pa_extend.cmo] file. *)
+
+(** {6 Functorial interface} *)
+
+ (** Alternative for grammars use. Grammars are no more Ocaml values:
+ there is no type for them. Modules generated preserve the
+ rule "an entry cannot call an entry of another grammar" by
+ normal OCaml typing. *)
+
+module type GLexerType = sig type te val lexer : te Plexing.lexer end
+ (** The input signature for the functor [Grammar.GMake]: [te] is the
+ type of the tokens. *)
+
+module type S =
+ sig
+ type te
+ type parsable
+ val parsable : char Stream.t -> parsable
+ val tokens : string -> (string * int) list
+ module Entry :
+ sig
+ type 'a e
+ val create : string -> 'a e
+ val parse : 'a e -> parsable -> 'a
+ val name : 'a e -> string
+ val of_parser : string -> (te Stream.t -> 'a) -> 'a e
+ val parse_token_stream : 'a e -> te Stream.t -> 'a
+ val print : Format.formatter -> 'a e -> unit
+ end
+ type ('self, 'a) ty_symbol
+ type ('self, 'f, 'r) ty_rule
+ type 'a ty_production
+ val s_nterm : 'a Entry.e -> ('self, 'a) ty_symbol
+ val s_nterml : 'a Entry.e -> string -> ('self, 'a) ty_symbol
+ val s_list0 : ('self, 'a) ty_symbol -> ('self, 'a list) ty_symbol
+ val s_list0sep :
+ ('self, 'a) ty_symbol -> ('self, 'b) ty_symbol -> bool ->
+ ('self, 'a list) ty_symbol
+ val s_list1 : ('self, 'a) ty_symbol -> ('self, 'a list) ty_symbol
+ val s_list1sep :
+ ('self, 'a) ty_symbol -> ('self, 'b) ty_symbol -> bool ->
+ ('self, 'a list) ty_symbol
+ val s_opt : ('self, 'a) ty_symbol -> ('self, 'a option) ty_symbol
+ val s_self : ('self, 'self) ty_symbol
+ val s_next : ('self, 'self) ty_symbol
+ val s_token : Plexing.pattern -> ('self, string) ty_symbol
+ val s_rules : warning:(string -> unit) option -> 'a ty_production list -> ('self, 'a) ty_symbol
+ val r_stop : ('self, 'r, 'r) ty_rule
+ val r_next :
+ ('self, 'a, 'r) ty_rule -> ('self, 'b) ty_symbol ->
+ ('self, 'b -> 'a, 'r) ty_rule
+ val production : ('a, 'f, Loc.t -> 'a) ty_rule * 'f -> 'a ty_production
+
+ module Unsafe :
+ sig
+ val clear_entry : 'a Entry.e -> unit
+ end
+ val safe_extend : warning:(string -> unit) option ->
+ 'a Entry.e -> Gramext.position option ->
+ (string option * Gramext.g_assoc option * 'a ty_production list)
+ list ->
+ unit
+ val safe_delete_rule : 'a Entry.e -> ('a, 'f, 'r) ty_rule -> unit
+ end
+ (** Signature type of the functor [Grammar.GMake]. The types and
+ functions are almost the same than in generic interface, but:
+- Grammars are not values. Functions holding a grammar as parameter
+ do not have this parameter yet.
+- The type [parsable] is used in function [parse] instead of
+ the char stream, avoiding the possible loss of tokens.
+- The type of tokens (expressions and patterns) can be any
+ type (instead of (string * string)); the module parameter
+ must specify a way to show them as (string * string) *)
+
+module GMake (L : GLexerType) : S with type te = L.te
diff --git a/gramlib/plexing.ml b/gramlib/plexing.ml
new file mode 100644
index 0000000000..f99a3c2480
--- /dev/null
+++ b/gramlib/plexing.ml
@@ -0,0 +1,18 @@
+(* camlp5r *)
+(* plexing.ml,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+type pattern = string * string
+
+exception Error of string
+
+type location_function = int -> Loc.t
+type 'te lexer_func = char Stream.t -> 'te Stream.t * location_function
+
+type 'te lexer =
+ { tok_func : 'te lexer_func;
+ tok_using : pattern -> unit;
+ tok_removing : pattern -> unit;
+ tok_match : pattern -> 'te -> string;
+ tok_text : pattern -> string;
+ }
diff --git a/gramlib/plexing.mli b/gramlib/plexing.mli
new file mode 100644
index 0000000000..eed4082e00
--- /dev/null
+++ b/gramlib/plexing.mli
@@ -0,0 +1,37 @@
+(* camlp5r *)
+(* plexing.mli,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+(** Lexing for Camlp5 grammars.
+
+ This module defines the Camlp5 lexer type to be used in extensible
+ grammars (see module [Grammar]). It also provides some useful functions
+ to create lexers. *)
+
+type pattern = string * string
+ (* Type for values used by the generated code of the EXTEND
+ statement to represent terminals in entry rules.
+- The first string is the constructor name (must start with
+ an uppercase character). When it is empty, the second string
+ is supposed to be a keyword.
+- The second string is the constructor parameter. Empty if it
+ has no parameter (corresponding to the 'wildcard' pattern).
+- The way tokens patterns are interpreted to parse tokens is done
+ by the lexer, function [tok_match] below. *)
+
+exception Error of string
+ (** A lexing error exception to be used by lexers. *)
+
+(** Lexer type *)
+
+type 'te lexer =
+ { tok_func : 'te lexer_func;
+ tok_using : pattern -> unit;
+ tok_removing : pattern -> unit;
+ tok_match : pattern -> 'te -> string;
+ tok_text : pattern -> string;
+ }
+and 'te lexer_func = char Stream.t -> 'te Stream.t * location_function
+and location_function = int -> Loc.t
+ (** The type of a function giving the location of a token in the
+ source from the token number in the stream (starting from zero). *)
diff --git a/gramlib/ploc.ml b/gramlib/ploc.ml
new file mode 100644
index 0000000000..9342fc6c1d
--- /dev/null
+++ b/gramlib/ploc.ml
@@ -0,0 +1,26 @@
+(* camlp5r *)
+(* ploc.ml,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+open Loc
+
+let make_unlined (bp, ep) =
+ {fname = InFile ""; line_nb = 1; bol_pos = 0; line_nb_last = -1; bol_pos_last = 0;
+ bp = bp; ep = ep; comm = ""; ecomm = ""}
+
+let dummy =
+ {fname = InFile ""; line_nb = 1; bol_pos = 0; line_nb_last = -1; bol_pos_last = 0;
+ bp = 0; ep = 0; comm = ""; ecomm = ""}
+
+(* *)
+
+let sub loc sh len = {loc with bp = loc.bp + sh; ep = loc.bp + sh + len}
+let after loc sh len = {loc with bp = loc.ep + sh; ep = loc.ep + sh + len}
+let with_comment loc comm = {loc with comm = comm}
+
+exception Exc of Loc.t * exn
+
+let raise loc exc =
+ match exc with
+ Exc (_, _) -> raise exc
+ | _ -> raise (Exc (loc, exc))
diff --git a/gramlib/ploc.mli b/gramlib/ploc.mli
new file mode 100644
index 0000000000..100fbc7271
--- /dev/null
+++ b/gramlib/ploc.mli
@@ -0,0 +1,40 @@
+(* camlp5r *)
+(* ploc.mli,v *)
+(* Copyright (c) INRIA 2007-2017 *)
+
+(* located exceptions *)
+
+exception Exc of Loc.t * exn
+ (** [Ploc.Exc loc e] is an encapsulation of the exception [e] with
+ the input location [loc]. To be used to specify a location
+ for an error. This exception must not be raised by [raise] but
+ rather by [Ploc.raise] (see below), to prevent the risk of several
+ encapsulations of [Ploc.Exc]. *)
+
+val raise : Loc.t -> exn -> 'a
+ (** [Ploc.raise loc e], if [e] is already the exception [Ploc.Exc],
+ re-raise it (ignoring the new location [loc]), else raise the
+ exception [Ploc.Exc loc e]. *)
+
+val make_unlined : int * int -> Loc.t
+ (** [Ploc.make_unlined] is like [Ploc.make] except that the line number
+ is not provided (to be used e.g. when the line number is unknown. *)
+
+val dummy : Loc.t
+ (** [Ploc.dummy] is a dummy location, used in situations when location
+ has no meaning. *)
+
+(* combining locations *)
+
+val sub : Loc.t -> int -> int -> Loc.t
+ (** [Ploc.sub loc sh len] is the location [loc] shifted with [sh]
+ characters and with length [len]. The previous ending position
+ of the location is lost. *)
+
+val after : Loc.t -> int -> int -> Loc.t
+ (** [Ploc.after loc sh len] is the location just after loc (starting at
+ the end position of [loc]) shifted with [sh] characters and of length
+ [len]. *)
+
+val with_comment : Loc.t -> string -> Loc.t
+ (** Change the comment part of the given location *)