From d14df293d2696f00a8de137bea9fe3a89e0bdeb0 Mon Sep 17 00:00:00 2001 From: letouzey Date: Wed, 19 Dec 2001 11:42:11 +0000 Subject: reparation du make depend et du .depend git-svn-id: svn+ssh://scm.gforge.inria.fr/svn/coq/trunk@2334 85f007b7-540e-0410-9357-904b9bb8a0f7 --- contrib/interface/dad.ml | 358 ------------- contrib/interface/dad.ml4 | 358 +++++++++++++ contrib/interface/line_parser.ml | 235 --------- contrib/interface/line_parser.ml4 | 235 +++++++++ contrib/xml/xml.ml | 80 --- contrib/xml/xml.ml4 | 80 +++ contrib/xml/xmlcommand.ml | 1037 ------------------------------------- contrib/xml/xmlcommand.ml4 | 1037 +++++++++++++++++++++++++++++++++++++ 8 files changed, 1710 insertions(+), 1710 deletions(-) delete mode 100644 contrib/interface/dad.ml create mode 100644 contrib/interface/dad.ml4 delete mode 100755 contrib/interface/line_parser.ml create mode 100755 contrib/interface/line_parser.ml4 delete mode 100644 contrib/xml/xml.ml create mode 100644 contrib/xml/xml.ml4 delete mode 100644 contrib/xml/xmlcommand.ml create mode 100644 contrib/xml/xmlcommand.ml4 (limited to 'contrib') diff --git a/contrib/interface/dad.ml b/contrib/interface/dad.ml deleted file mode 100644 index ce38174044..0000000000 --- a/contrib/interface/dad.ml +++ /dev/null @@ -1,358 +0,0 @@ -(* This file contains an ml version of drag-and-drop. *) - -(* #use "/net/home/bertot/experiments/pcoq/src/dad/dad.ml" *) - -open Names;; -open Term;; -open Rawterm;; -open Util;; -open Environ;; -open Tactics;; -open Tacticals;; -open Pattern;; -open Reduction;; -open Ctast;; -open Termast;; -open Astterm;; -open Vernacinterp;; -open Nametab - -open Proof_type;; -open Proof_trees;; -open Tacmach;; -open Typing;; -open Pp;; - -open Paths;; - -(* In a first approximation, drag-and-drop rules are like in CtCoq - 1/ a pattern, - 2,3/ Two paths: start and end positions, - 4/ the degree: the number of steps the algorithm should go up from the - longest common prefix, - 5/ the tail path: the suffix of the longest common prefix of length the - degree, - 6/ the command pattern, where meta variables are represented by objects - of the form Node(_,"META"; [Num(_,i)]) -*) - - -type dad_rule = Ctast.t * int list * int list * int * int list * Ctast.t;; - -(* This value will be used systematically when constructing objects of - type Ctast.t, the value is stupid and meaningless, but it is needed - to construct well-typed terms. *) - -let zz = (0,0);; - -(* This function receives a length n, a path p, and a term and returns a - couple whose first component is the subterm designated by the prefix - of p of length n, and the second component is the rest of the path *) - -let rec get_subterm (depth:int) (path: int list) (constr:constr) = - match depth, path, kind_of_term constr with - 0, l, c -> (constr,l) - | n, 2::a::tl, App(func,arr) -> - get_subterm (n - 2) tl arr.(a-1) - | _,l,_ -> failwith (int_list_to_string - "wrong path or wrong form of term" - l);; - -(* This function maps a substitution on an abstract syntax tree. The - first argument, an object of type env, is necessary to - transform constr terms into abstract syntax trees. The second argument is - the substitution, a list of pairs linking an integer and a constr term. *) -let map_subst (env :env) - (subst:(int * Term.constr) list) = - let rec map_subst_aux = function - Node(_, "META", [Num(_, i)]) -> - let constr = List.assoc i subst in - Ctast.ast_to_ct (ast_of_constr false env constr) - | Node(loc, s, l) -> Node(loc, s, List.map map_subst_aux l) - | ast -> ast in - map_subst_aux;; - - -(* This function is really the one that is important. *) -let rec find_cmd (l:(string * dad_rule) list) env constr p p1 p2 = - match l with - [] -> failwith "nothing happens" - | (name, (pat, p_f, p_l, deg, p_r, cmd))::tl -> - let length = List.length p in - try - if deg > length then - failwith "internal" - else - let term_to_match, p_r = - try - get_subterm (length - deg) p constr - with - Failure s -> failwith "internal" in - let _, constr_pat = - interp_constrpattern Evd.empty (Global.env()) - (ct_to_ast pat) in - let subst = matches constr_pat term_to_match in - if (is_prefix p_f (p_r@p1)) & (is_prefix p_l (p_r@p2)) then - map_subst env subst cmd - else - failwith "internal" - with - Failure "internal" -> find_cmd tl env constr p p1 p2 - | PatternMatchingFailure -> find_cmd tl env constr p p1 p2;; - - -let dad_rule_list = ref ([]: (string * dad_rule) list);; - - -(* \\ This function is also used in pbp. *) -let rec tactic_args_to_ints = function - [] -> [] - | (Integer n)::l -> n::(tactic_args_to_ints l) - | _ -> failwith "expecting only numbers";; - -(* We assume that the two lists of integers for the tactic are simply - given in one list, separated by a dummy tactic. *) -let rec part_tac_args l = function - [] -> l,[] - | (Tacexp a)::tl -> l, (tactic_args_to_ints tl) - | (Integer n)::tl -> part_tac_args (n::l) tl - | _ -> failwith "expecting only numbers and the word \"to\"";; - - -(* The dad_tac tactic takes a display_function as argument. This makes - it possible to use it in pcoq, but also in other contexts, just by - changing the output routine. *) -let dad_tac display_function = function - l -> let p1, p2 = part_tac_args [] l in - (function g -> - let (p_a, p1prime, p2prime) = decompose_path (List.rev p1,p2) in - (display_function - (find_cmd (!dad_rule_list) (pf_env g) - (pf_concl g) p_a p1prime p2prime)); - tclIDTAC g);; - -(* Now we enter dad rule list management. *) - -let add_dad_rule name patt p1 p2 depth pr command = - dad_rule_list := (name, - (patt, p1, p2, depth, pr, command))::!dad_rule_list;; - -let rec remove_if_exists name = function - [] -> false, [] - | ((a,b) as rule1)::tl -> if a = name then - let result1, l = (remove_if_exists name tl) in - true, l - else - let result1, l = remove_if_exists name tl in - result1, (rule1::l);; - -let remove_dad_rule name = - let result1, result2 = remove_if_exists name !dad_rule_list in - if result1 then - failwith("No such name among the drag and drop rules " ^ name) - else - dad_rule_list := result2;; - -let dad_rule_names () = - List.map (function (s,_) -> s) !dad_rule_list;; - -(* this function is inspired from matches_core in pattern.ml *) -let constrain ((n : int),(pat : constr_pattern)) sigma = - if List.mem_assoc n sigma then - if pat = (List.assoc n sigma) then sigma - else failwith "internal" - else - (n,pat)::sigma - -(* This function is inspired from matches_core in pattern.ml *) -let more_general_pat pat1 pat2 = - let rec match_rec sigma p1 p2 = - match p1, p2 with - | PMeta (Some n), m -> constrain (n,m) sigma - - | PMeta None, m -> sigma - - | PRef (VarRef sp1), PRef(VarRef sp2) when sp1 = sp2 -> sigma - - | PVar v1, PVar v2 when v1 = v2 -> sigma - - | PRef ref1, PRef ref2 when ref1 = ref2 -> sigma - - | PRel n1, PRel n2 when n1 = n2 -> sigma - - | PSort (RProp c1), PSort (RProp c2) when c1 = c2 -> sigma - - | PSort (RType _), PSort (RType _) -> sigma - - | PApp (c1,arg1), PApp (c2,arg2) -> - (try array_fold_left2 match_rec (match_rec sigma c1 c2) arg1 arg2 - with Invalid_argument _ -> failwith "internal") - | _ -> failwith "unexpected case in more_general_pat" in - try let _ = match_rec [] pat1 pat2 in true - with Failure "internal" -> false;; - -let more_general r1 r2 = - match r1,r2 with - (_,(patt1,p11,p12,_,_,_)), - (_,(patt2,p21,p22,_,_,_)) -> - (more_general_pat patt1 patt2) & - (is_prefix p11 p21) & (is_prefix p12 p22);; - -let not_less_general r1 r2 = - not (match r1,r2 with - (_,(patt1,p11,p12,_,_,_)), - (_,(patt2,p21,p22,_,_,_)) -> - (more_general_pat patt1 patt2) & - (is_prefix p21 p11) & (is_prefix p22 p12));; - -let rec add_in_list_sorting rule1 = function - [] -> [rule1] - | (b::tl) as this_list -> - if more_general rule1 b then - b::(add_in_list_sorting rule1 tl) - else if not_less_general rule1 b then - let tl2 = add_in_list_sorting_aux rule1 tl in - (match tl2 with - [] -> rule1::this_list - | _ -> b::tl2) - else - rule1::this_list -and add_in_list_sorting_aux rule1 = function - [] -> [] - | b::tl -> - if more_general rule1 b then - b::(add_in_list_sorting rule1 tl) - else - let tl2 = add_in_list_sorting_aux rule1 tl in - (match tl2 with - [] -> [] - | _ -> rule1::tl2);; - -let rec sort_list = function - [] -> [] - | a::l -> add_in_list_sorting a (sort_list l);; - -let mk_dad_meta n = Node(zz,"META",[Num(zz, n)]);; -let mk_rewrite1 ast = - Node(zz,"TACTICLIST", - [Node(zz,"RewriteLR", - [Node(zz, "COMMAND", ast);Node(zz,"BINDINGS",[])])]);; - -let mk_rewrite2 ast = - Node(zz,"TACTICLIST", - [Node(zz,"RewriteRL", - [Node(zz, "COMMAND", ast);Node(zz,"BINDINGS",[])])]);; - - - -let start_dad () = -vinterp_add "AddDadRule" - (function - | [VARG_STRING name; VARG_CONSTR pat; VARG_NUMBERLIST p1; - VARG_NUMBERLIST p2; VARG_TACTIC com] -> - (function () -> - let pr = match decompose_path (p1, p2) with pr, _, _ -> pr in - (add_dad_rule name (Ctast.ast_to_ct pat) p1 p2 (List.length pr) pr (Ctast.ast_to_ct com))) - | _ -> errorlabstrm "AddDadRule1" - [< str "AddDadRule2">]); -add_dad_rule "distributivity-inv" -(Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,4)]);Node(zz,"META",[Num(zz,3)])]);Node(zz,"META",[Num(zz,2)])])) -[2; 2] -[2; 1] -1 -[2] -(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "mult_plus_distr");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); - -add_dad_rule "distributivity1-r" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"META",[Num(zz,4)]);Node(zz,"META",[Num(zz,2)])]);Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])])) -[2; 2; 2; 2] -[] -0 -[] -(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "mult_plus_distr");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); - -add_dad_rule "distributivity1-l" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"META",[Num(zz,4)]);Node(zz,"META",[Num(zz,2)])]);Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])])) -[2; 1; 2; 2] -[] -0 -[] -(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "mult_plus_distr");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); - -add_dad_rule "associativity" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,4)]);Node(zz,"META",[Num(zz,3)])]);Node(zz,"META",[Num(zz,2)])])) -[2; 1] -[] -0 -[] -(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_assoc_r");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); - -add_dad_rule "minus-identity-lr" -(Node(zz,"APPLIST",[Nvar(zz,"minus");Node(zz,"META",[Num(zz,2)]);Node(zz,"META",[Num(zz,2)])])) -[2; 1] -[2; 2] -1 -[2] -(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "minus_n_n");(mk_dad_meta 2) ])])); - -add_dad_rule "minus-identity-rl" -(Node(zz,"APPLIST",[Nvar(zz,"minus");Node(zz,"META",[Num(zz,2)]);Node(zz,"META",[Num(zz,2)])])) -[2; 2] -[2; 1] -1 -[2] -(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "minus_n_n");(mk_dad_meta 2) ])])); - -add_dad_rule "plus-sym-rl" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])) -[2; 2] -[2; 1] -1 -[2] -(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_sym");(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); - -add_dad_rule "plus-sym-lr" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])) -[2; 1] -[2; 2] -1 -[2] -(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_sym");(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); - -add_dad_rule "absorb-0-r-rl" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,2)]);Nvar(zz,"O")])) -[2; 2] -[1] -0 -[] -(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "plus_n_O");(mk_dad_meta 2) ])])); - -add_dad_rule "absorb-0-r-lr" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,2)]);Nvar(zz,"O")])) -[1] -[2; 2] -0 -[] -(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "plus_n_O");(mk_dad_meta 2) ])])); - -add_dad_rule "plus-permute-lr" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,4)]);Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])])) -[2; 1] -[2; 2; 2; 1] -1 -[2] -(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_permute");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); - -add_dad_rule "plus-permute-rl" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,4)]);Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])])) -[2; 2; 2; 1] -[2; 1] -1 -[2] -(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_permute");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])]));; - -vinterp_add "StartDad" - (function - | [] -> - (function () -> start_dad()) - | _ -> errorlabstrm "StartDad" [< >]);; diff --git a/contrib/interface/dad.ml4 b/contrib/interface/dad.ml4 new file mode 100644 index 0000000000..ce38174044 --- /dev/null +++ b/contrib/interface/dad.ml4 @@ -0,0 +1,358 @@ +(* This file contains an ml version of drag-and-drop. *) + +(* #use "/net/home/bertot/experiments/pcoq/src/dad/dad.ml" *) + +open Names;; +open Term;; +open Rawterm;; +open Util;; +open Environ;; +open Tactics;; +open Tacticals;; +open Pattern;; +open Reduction;; +open Ctast;; +open Termast;; +open Astterm;; +open Vernacinterp;; +open Nametab + +open Proof_type;; +open Proof_trees;; +open Tacmach;; +open Typing;; +open Pp;; + +open Paths;; + +(* In a first approximation, drag-and-drop rules are like in CtCoq + 1/ a pattern, + 2,3/ Two paths: start and end positions, + 4/ the degree: the number of steps the algorithm should go up from the + longest common prefix, + 5/ the tail path: the suffix of the longest common prefix of length the + degree, + 6/ the command pattern, where meta variables are represented by objects + of the form Node(_,"META"; [Num(_,i)]) +*) + + +type dad_rule = Ctast.t * int list * int list * int * int list * Ctast.t;; + +(* This value will be used systematically when constructing objects of + type Ctast.t, the value is stupid and meaningless, but it is needed + to construct well-typed terms. *) + +let zz = (0,0);; + +(* This function receives a length n, a path p, and a term and returns a + couple whose first component is the subterm designated by the prefix + of p of length n, and the second component is the rest of the path *) + +let rec get_subterm (depth:int) (path: int list) (constr:constr) = + match depth, path, kind_of_term constr with + 0, l, c -> (constr,l) + | n, 2::a::tl, App(func,arr) -> + get_subterm (n - 2) tl arr.(a-1) + | _,l,_ -> failwith (int_list_to_string + "wrong path or wrong form of term" + l);; + +(* This function maps a substitution on an abstract syntax tree. The + first argument, an object of type env, is necessary to + transform constr terms into abstract syntax trees. The second argument is + the substitution, a list of pairs linking an integer and a constr term. *) +let map_subst (env :env) + (subst:(int * Term.constr) list) = + let rec map_subst_aux = function + Node(_, "META", [Num(_, i)]) -> + let constr = List.assoc i subst in + Ctast.ast_to_ct (ast_of_constr false env constr) + | Node(loc, s, l) -> Node(loc, s, List.map map_subst_aux l) + | ast -> ast in + map_subst_aux;; + + +(* This function is really the one that is important. *) +let rec find_cmd (l:(string * dad_rule) list) env constr p p1 p2 = + match l with + [] -> failwith "nothing happens" + | (name, (pat, p_f, p_l, deg, p_r, cmd))::tl -> + let length = List.length p in + try + if deg > length then + failwith "internal" + else + let term_to_match, p_r = + try + get_subterm (length - deg) p constr + with + Failure s -> failwith "internal" in + let _, constr_pat = + interp_constrpattern Evd.empty (Global.env()) + (ct_to_ast pat) in + let subst = matches constr_pat term_to_match in + if (is_prefix p_f (p_r@p1)) & (is_prefix p_l (p_r@p2)) then + map_subst env subst cmd + else + failwith "internal" + with + Failure "internal" -> find_cmd tl env constr p p1 p2 + | PatternMatchingFailure -> find_cmd tl env constr p p1 p2;; + + +let dad_rule_list = ref ([]: (string * dad_rule) list);; + + +(* \\ This function is also used in pbp. *) +let rec tactic_args_to_ints = function + [] -> [] + | (Integer n)::l -> n::(tactic_args_to_ints l) + | _ -> failwith "expecting only numbers";; + +(* We assume that the two lists of integers for the tactic are simply + given in one list, separated by a dummy tactic. *) +let rec part_tac_args l = function + [] -> l,[] + | (Tacexp a)::tl -> l, (tactic_args_to_ints tl) + | (Integer n)::tl -> part_tac_args (n::l) tl + | _ -> failwith "expecting only numbers and the word \"to\"";; + + +(* The dad_tac tactic takes a display_function as argument. This makes + it possible to use it in pcoq, but also in other contexts, just by + changing the output routine. *) +let dad_tac display_function = function + l -> let p1, p2 = part_tac_args [] l in + (function g -> + let (p_a, p1prime, p2prime) = decompose_path (List.rev p1,p2) in + (display_function + (find_cmd (!dad_rule_list) (pf_env g) + (pf_concl g) p_a p1prime p2prime)); + tclIDTAC g);; + +(* Now we enter dad rule list management. *) + +let add_dad_rule name patt p1 p2 depth pr command = + dad_rule_list := (name, + (patt, p1, p2, depth, pr, command))::!dad_rule_list;; + +let rec remove_if_exists name = function + [] -> false, [] + | ((a,b) as rule1)::tl -> if a = name then + let result1, l = (remove_if_exists name tl) in + true, l + else + let result1, l = remove_if_exists name tl in + result1, (rule1::l);; + +let remove_dad_rule name = + let result1, result2 = remove_if_exists name !dad_rule_list in + if result1 then + failwith("No such name among the drag and drop rules " ^ name) + else + dad_rule_list := result2;; + +let dad_rule_names () = + List.map (function (s,_) -> s) !dad_rule_list;; + +(* this function is inspired from matches_core in pattern.ml *) +let constrain ((n : int),(pat : constr_pattern)) sigma = + if List.mem_assoc n sigma then + if pat = (List.assoc n sigma) then sigma + else failwith "internal" + else + (n,pat)::sigma + +(* This function is inspired from matches_core in pattern.ml *) +let more_general_pat pat1 pat2 = + let rec match_rec sigma p1 p2 = + match p1, p2 with + | PMeta (Some n), m -> constrain (n,m) sigma + + | PMeta None, m -> sigma + + | PRef (VarRef sp1), PRef(VarRef sp2) when sp1 = sp2 -> sigma + + | PVar v1, PVar v2 when v1 = v2 -> sigma + + | PRef ref1, PRef ref2 when ref1 = ref2 -> sigma + + | PRel n1, PRel n2 when n1 = n2 -> sigma + + | PSort (RProp c1), PSort (RProp c2) when c1 = c2 -> sigma + + | PSort (RType _), PSort (RType _) -> sigma + + | PApp (c1,arg1), PApp (c2,arg2) -> + (try array_fold_left2 match_rec (match_rec sigma c1 c2) arg1 arg2 + with Invalid_argument _ -> failwith "internal") + | _ -> failwith "unexpected case in more_general_pat" in + try let _ = match_rec [] pat1 pat2 in true + with Failure "internal" -> false;; + +let more_general r1 r2 = + match r1,r2 with + (_,(patt1,p11,p12,_,_,_)), + (_,(patt2,p21,p22,_,_,_)) -> + (more_general_pat patt1 patt2) & + (is_prefix p11 p21) & (is_prefix p12 p22);; + +let not_less_general r1 r2 = + not (match r1,r2 with + (_,(patt1,p11,p12,_,_,_)), + (_,(patt2,p21,p22,_,_,_)) -> + (more_general_pat patt1 patt2) & + (is_prefix p21 p11) & (is_prefix p22 p12));; + +let rec add_in_list_sorting rule1 = function + [] -> [rule1] + | (b::tl) as this_list -> + if more_general rule1 b then + b::(add_in_list_sorting rule1 tl) + else if not_less_general rule1 b then + let tl2 = add_in_list_sorting_aux rule1 tl in + (match tl2 with + [] -> rule1::this_list + | _ -> b::tl2) + else + rule1::this_list +and add_in_list_sorting_aux rule1 = function + [] -> [] + | b::tl -> + if more_general rule1 b then + b::(add_in_list_sorting rule1 tl) + else + let tl2 = add_in_list_sorting_aux rule1 tl in + (match tl2 with + [] -> [] + | _ -> rule1::tl2);; + +let rec sort_list = function + [] -> [] + | a::l -> add_in_list_sorting a (sort_list l);; + +let mk_dad_meta n = Node(zz,"META",[Num(zz, n)]);; +let mk_rewrite1 ast = + Node(zz,"TACTICLIST", + [Node(zz,"RewriteLR", + [Node(zz, "COMMAND", ast);Node(zz,"BINDINGS",[])])]);; + +let mk_rewrite2 ast = + Node(zz,"TACTICLIST", + [Node(zz,"RewriteRL", + [Node(zz, "COMMAND", ast);Node(zz,"BINDINGS",[])])]);; + + + +let start_dad () = +vinterp_add "AddDadRule" + (function + | [VARG_STRING name; VARG_CONSTR pat; VARG_NUMBERLIST p1; + VARG_NUMBERLIST p2; VARG_TACTIC com] -> + (function () -> + let pr = match decompose_path (p1, p2) with pr, _, _ -> pr in + (add_dad_rule name (Ctast.ast_to_ct pat) p1 p2 (List.length pr) pr (Ctast.ast_to_ct com))) + | _ -> errorlabstrm "AddDadRule1" + [< str "AddDadRule2">]); +add_dad_rule "distributivity-inv" +(Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,4)]);Node(zz,"META",[Num(zz,3)])]);Node(zz,"META",[Num(zz,2)])])) +[2; 2] +[2; 1] +1 +[2] +(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "mult_plus_distr");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); + +add_dad_rule "distributivity1-r" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"META",[Num(zz,4)]);Node(zz,"META",[Num(zz,2)])]);Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])])) +[2; 2; 2; 2] +[] +0 +[] +(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "mult_plus_distr");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); + +add_dad_rule "distributivity1-l" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"META",[Num(zz,4)]);Node(zz,"META",[Num(zz,2)])]);Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])])) +[2; 1; 2; 2] +[] +0 +[] +(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "mult_plus_distr");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); + +add_dad_rule "associativity" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,4)]);Node(zz,"META",[Num(zz,3)])]);Node(zz,"META",[Num(zz,2)])])) +[2; 1] +[] +0 +[] +(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_assoc_r");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); + +add_dad_rule "minus-identity-lr" +(Node(zz,"APPLIST",[Nvar(zz,"minus");Node(zz,"META",[Num(zz,2)]);Node(zz,"META",[Num(zz,2)])])) +[2; 1] +[2; 2] +1 +[2] +(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "minus_n_n");(mk_dad_meta 2) ])])); + +add_dad_rule "minus-identity-rl" +(Node(zz,"APPLIST",[Nvar(zz,"minus");Node(zz,"META",[Num(zz,2)]);Node(zz,"META",[Num(zz,2)])])) +[2; 2] +[2; 1] +1 +[2] +(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "minus_n_n");(mk_dad_meta 2) ])])); + +add_dad_rule "plus-sym-rl" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])) +[2; 2] +[2; 1] +1 +[2] +(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_sym");(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); + +add_dad_rule "plus-sym-lr" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])) +[2; 1] +[2; 2] +1 +[2] +(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_sym");(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); + +add_dad_rule "absorb-0-r-rl" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,2)]);Nvar(zz,"O")])) +[2; 2] +[1] +0 +[] +(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "plus_n_O");(mk_dad_meta 2) ])])); + +add_dad_rule "absorb-0-r-lr" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,2)]);Nvar(zz,"O")])) +[1] +[2; 2] +0 +[] +(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "plus_n_O");(mk_dad_meta 2) ])])); + +add_dad_rule "plus-permute-lr" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,4)]);Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])])) +[2; 1] +[2; 2; 2; 1] +1 +[2] +(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_permute");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); + +add_dad_rule "plus-permute-rl" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,4)]);Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])])) +[2; 2; 2; 1] +[2; 1] +1 +[2] +(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_permute");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])]));; + +vinterp_add "StartDad" + (function + | [] -> + (function () -> start_dad()) + | _ -> errorlabstrm "StartDad" [< >]);; diff --git a/contrib/interface/line_parser.ml b/contrib/interface/line_parser.ml deleted file mode 100755 index 91eda0eed0..0000000000 --- a/contrib/interface/line_parser.ml +++ /dev/null @@ -1,235 +0,0 @@ -(* line-oriented Syntactic analyser for a Coq parser *) -(* This parser expects a very small number of commands, each given on a complete -line. Some of these commands are then followed by a text fragment terminated -by a precise keyword, which is also expected to appear alone on a line. *) - -(* The main parsing loop procedure is "parser_loop", given at the end of this -file. It read lines one by one and checks whether they can be parsed using -a very simple parser. This very simple parser uses a lexer, which is also given -in this file. - -The lexical analyser: - There are only 5 sorts of tokens *) -type simple_tokens = Tspace | Tid of string | Tint of int | Tstring of string | - Tlbracket | Trbracket;; - -(* When recognizing identifiers or strings, the lexical analyser accumulates - the characters in a buffer, using the command add_in_buff. To recuperate - the characters, one can use get_buff (this code was inspired by the - code in src/meta/lexer.ml of Coq revision 6.1) *) -let add_in_buff,get_buff = - let buff = ref (String.create 80) in - (fun i x -> - let len = String.length !buff in - if i >= len then (buff := !buff ^ (String.create len);()); - String.set !buff i x; - succ i), - (fun len -> String.sub !buff 0 len);; - -(* Identifiers are [a-zA-Z_][a-zA-Z0-9_]*. When arriving here the first - character has already been recognized. *) -let rec ident len = parser - [<''_' | 'a'..'z' | 'A'..'Z' | '0'..'9' as c; s >] -> - ident (add_in_buff len c) s -| [< >] -> let str = get_buff len in Tid(str);; - -(* While recognizing integers, one constructs directly the integer value. - The ascii code of '0' is important for this. *) -let code0 = Char.code '0';; - -let get_digit c = Char.code c - code0;; - -(* Integers are [0-9]* - The variable intval is the integer value of the text that has already - been recognized. As for identifiers, the first character has already been - recognized. *) - -let rec parse_int intval = parser - [< ''0'..'9' as c ; i=parse_int (10 * intval + get_digit c)>] -> i -| [< >] -> Tint intval;; - -(* The string lexer is borrowed from the string parser of Coq V6.1 - This may be a problem if convention have changed in Coq, - However this parser is only used to recognize file names which should - not contain too many special characters *) - -let rec spec_char = parser - [< ''n' >] -> '\n' -| [< ''t' >] -> '\t' -| [< ''b' >] -> '\008' -| [< ''r' >] -> '\013' -| [< ''0'..'9' as c; v= (spec1 (get_digit c)) >] -> - Char.chr v -| [< 'x >] -> x - -and spec1 v = parser - [< ''0'..'9' as c; s >] -> spec1 (10*v+(get_digit c)) s -| [< >] -> v -;; - -(* This is the actual string lexical analyser. Strings are - QUOT([^QUOT\\]|\\[0-9]*|\\[^0-9])QUOT (the word QUOT is used - to represents double quotation characters, that cannot be used - freely, even inside comments. *) - -let rec string len = parser - [< ''"' >] -> len -| [<''\\' ; - len = (parser [< ''\n' >] -> len - | [< c=spec_char >] -> add_in_buff len c); - s >] -> string len s -| [< 'x; s >] -> string (add_in_buff len x) s;; - -(* The lexical analyser repeats the recognized given by next_token: - spaces and tabulations are ignored, identifiers, integers, - strings, opening and closing square brackets. Lexical errors are - ignored ! *) -let rec next_token = parser count - [< '' ' | '\t'; tok = next_token >] -> tok -| [< ''_' | 'a'..'z' | 'A'..'Z' as c;i = (ident (add_in_buff 0 c))>] -> i -| [< ''0'..'9' as c ; i = (parse_int (get_digit c))>] -> i -| [< ''"' ; len = (string 0) >] -> Tstring (get_buff len) -| [< ''[' >] -> Tlbracket -| [< '']' >] -> Trbracket -| [< '_ ; x = next_token >] -> x;; - -(* A very simple lexical analyser to recognize a integer value behind - blank characters *) - -let rec next_int = parser count - [< '' ' | '\t'; v = next_int >] -> v -| [< ''0'..'9' as c; i = (parse_int (get_digit c))>] -> - (match i with - Tint n -> n - | _ -> failwith "unexpected branch in next_int");; - -(* This is the actual lexical analyser, implemented as a function on a stream. - It will be used with the Stream.from primitive to construct a function - of type char Stream.t -> simple_token option Stream.t *) -let token_stream cs _ = - try let tok = next_token cs in - Some tok - with Stream.Failure -> None;; - -(* Two of the actions of the parser request that one reads the rest of - the input up to a specific string stop_string. This is done - with a function that transform the input_channel into a pair of - char Stream.t, reading from the input_channel all the lines to - the stop_string first. *) - - -let rec gather_strings stop_string input_channel = - let buff = input_line input_channel in - if buff = stop_string then - [] - else - (buff::(gather_strings stop_string input_channel));; - - -(* the result of this function is supposed to be used in a Stream.from - construction. *) - -let line_list_to_stream string_list = - let count = ref 0 in - let buff = ref "" in - let reserve = ref string_list in - let current_length = ref 0 in - (fun i -> if (i - !count) >= !current_length then - begin - count := !count + !current_length + 1; - match !reserve with - | [] -> None - | s1::rest -> - begin - buff := s1; - current_length := String.length !buff; - reserve := rest; - Some '\n' - end - end - else - Some(String.get !buff (i - !count)));; - - -(* In older revisions of this file you would find a function that - does line oriented breakdown of the input channel without resorting to - a list of lines. However, the need for the list of line appeared when - we wanted to have a channel and a list of strings describing the same - data, one for regular parsing and the other for error recovery. *) - -let channel_to_stream_and_string_list stop_string input_channel = - let string_list = gather_strings stop_string input_channel in - (line_list_to_stream string_list, string_list);; - -let flush_until_end_of_stream char_stream = - Stream.iter (function _ -> ()) char_stream;; - -(* There are only 5 kinds of lines recognized by our little parser. - Unrecognized lines are ignored. *) -type parser_request = - | PRINT_VERSION - | PARSE_STRING of string - (* parse_string [] then text and && END--OF--DATA *) - | QUIET_PARSE_STRING - (* quiet_parse_string then text and && END--OF--DATA *) - | PARSE_FILE of string - (* parse_file *) - | ADD_PATH of string - (* add_path *) - | LOAD_SYNTAX of string - (* load_syntax_file *) - | GARBAGE -;; - -(* The procedure parser_loop should never terminate while the input_channel is - not closed. This procedure receives the functions called for each sentence - as arguments. Thus the code is completely independent from the Coq sources. *) -let parser_loop functions input_channel = - let print_version_action, - parse_string_action, - quiet_parse_string_action, - parse_file_action, - add_path_action, - load_syntax_action = functions in - let rec parser_loop_rec input_channel = - (let line = input_line input_channel in - let reqid, parser_request = - try - (match Stream.from (token_stream (Stream.of_string line)) with - parser - | [< 'Tid "print_version" >] -> - 0, PRINT_VERSION - | [< 'Tid "parse_string" ; 'Tint reqid ; 'Tlbracket ; - 'Tid phylum ; 'Trbracket >] - -> reqid,PARSE_STRING phylum - | [< 'Tid "quiet_parse_string" >] -> - 0,QUIET_PARSE_STRING - | [< 'Tid "parse_file" ; 'Tint reqid ; 'Tstring fname >] -> - reqid, PARSE_FILE fname - | [< 'Tid "add_path"; 'Tint reqid ; 'Tstring directory >] - -> reqid, ADD_PATH directory - | [< 'Tid "load_syntax_file"; 'Tint reqid; 'Tid module_name >] -> - reqid, LOAD_SYNTAX module_name - | [< 'Tid "quit_parser" >] -> raise End_of_file - | [< >] -> 0, GARBAGE) - with - Stream.Failure | Stream.Error _ -> 0,GARBAGE in - match parser_request with - PRINT_VERSION -> print_version_action () - | PARSE_STRING phylum -> - let regular_stream, string_list = - channel_to_stream_and_string_list "&& END--OF--DATA" input_channel in - parse_string_action reqid phylum (Stream.from regular_stream) - string_list;() - | QUIET_PARSE_STRING -> - let regular_stream, string_list = - channel_to_stream_and_string_list "&& END--OF--DATA" input_channel in - quiet_parse_string_action - (Stream.from regular_stream);() - | PARSE_FILE file_name -> - parse_file_action reqid file_name - | ADD_PATH path -> add_path_action reqid path - | LOAD_SYNTAX syn -> load_syntax_action reqid syn - | GARBAGE -> ()); - parser_loop_rec input_channel in - parser_loop_rec input_channel;; diff --git a/contrib/interface/line_parser.ml4 b/contrib/interface/line_parser.ml4 new file mode 100755 index 0000000000..91eda0eed0 --- /dev/null +++ b/contrib/interface/line_parser.ml4 @@ -0,0 +1,235 @@ +(* line-oriented Syntactic analyser for a Coq parser *) +(* This parser expects a very small number of commands, each given on a complete +line. Some of these commands are then followed by a text fragment terminated +by a precise keyword, which is also expected to appear alone on a line. *) + +(* The main parsing loop procedure is "parser_loop", given at the end of this +file. It read lines one by one and checks whether they can be parsed using +a very simple parser. This very simple parser uses a lexer, which is also given +in this file. + +The lexical analyser: + There are only 5 sorts of tokens *) +type simple_tokens = Tspace | Tid of string | Tint of int | Tstring of string | + Tlbracket | Trbracket;; + +(* When recognizing identifiers or strings, the lexical analyser accumulates + the characters in a buffer, using the command add_in_buff. To recuperate + the characters, one can use get_buff (this code was inspired by the + code in src/meta/lexer.ml of Coq revision 6.1) *) +let add_in_buff,get_buff = + let buff = ref (String.create 80) in + (fun i x -> + let len = String.length !buff in + if i >= len then (buff := !buff ^ (String.create len);()); + String.set !buff i x; + succ i), + (fun len -> String.sub !buff 0 len);; + +(* Identifiers are [a-zA-Z_][a-zA-Z0-9_]*. When arriving here the first + character has already been recognized. *) +let rec ident len = parser + [<''_' | 'a'..'z' | 'A'..'Z' | '0'..'9' as c; s >] -> + ident (add_in_buff len c) s +| [< >] -> let str = get_buff len in Tid(str);; + +(* While recognizing integers, one constructs directly the integer value. + The ascii code of '0' is important for this. *) +let code0 = Char.code '0';; + +let get_digit c = Char.code c - code0;; + +(* Integers are [0-9]* + The variable intval is the integer value of the text that has already + been recognized. As for identifiers, the first character has already been + recognized. *) + +let rec parse_int intval = parser + [< ''0'..'9' as c ; i=parse_int (10 * intval + get_digit c)>] -> i +| [< >] -> Tint intval;; + +(* The string lexer is borrowed from the string parser of Coq V6.1 + This may be a problem if convention have changed in Coq, + However this parser is only used to recognize file names which should + not contain too many special characters *) + +let rec spec_char = parser + [< ''n' >] -> '\n' +| [< ''t' >] -> '\t' +| [< ''b' >] -> '\008' +| [< ''r' >] -> '\013' +| [< ''0'..'9' as c; v= (spec1 (get_digit c)) >] -> + Char.chr v +| [< 'x >] -> x + +and spec1 v = parser + [< ''0'..'9' as c; s >] -> spec1 (10*v+(get_digit c)) s +| [< >] -> v +;; + +(* This is the actual string lexical analyser. Strings are + QUOT([^QUOT\\]|\\[0-9]*|\\[^0-9])QUOT (the word QUOT is used + to represents double quotation characters, that cannot be used + freely, even inside comments. *) + +let rec string len = parser + [< ''"' >] -> len +| [<''\\' ; + len = (parser [< ''\n' >] -> len + | [< c=spec_char >] -> add_in_buff len c); + s >] -> string len s +| [< 'x; s >] -> string (add_in_buff len x) s;; + +(* The lexical analyser repeats the recognized given by next_token: + spaces and tabulations are ignored, identifiers, integers, + strings, opening and closing square brackets. Lexical errors are + ignored ! *) +let rec next_token = parser count + [< '' ' | '\t'; tok = next_token >] -> tok +| [< ''_' | 'a'..'z' | 'A'..'Z' as c;i = (ident (add_in_buff 0 c))>] -> i +| [< ''0'..'9' as c ; i = (parse_int (get_digit c))>] -> i +| [< ''"' ; len = (string 0) >] -> Tstring (get_buff len) +| [< ''[' >] -> Tlbracket +| [< '']' >] -> Trbracket +| [< '_ ; x = next_token >] -> x;; + +(* A very simple lexical analyser to recognize a integer value behind + blank characters *) + +let rec next_int = parser count + [< '' ' | '\t'; v = next_int >] -> v +| [< ''0'..'9' as c; i = (parse_int (get_digit c))>] -> + (match i with + Tint n -> n + | _ -> failwith "unexpected branch in next_int");; + +(* This is the actual lexical analyser, implemented as a function on a stream. + It will be used with the Stream.from primitive to construct a function + of type char Stream.t -> simple_token option Stream.t *) +let token_stream cs _ = + try let tok = next_token cs in + Some tok + with Stream.Failure -> None;; + +(* Two of the actions of the parser request that one reads the rest of + the input up to a specific string stop_string. This is done + with a function that transform the input_channel into a pair of + char Stream.t, reading from the input_channel all the lines to + the stop_string first. *) + + +let rec gather_strings stop_string input_channel = + let buff = input_line input_channel in + if buff = stop_string then + [] + else + (buff::(gather_strings stop_string input_channel));; + + +(* the result of this function is supposed to be used in a Stream.from + construction. *) + +let line_list_to_stream string_list = + let count = ref 0 in + let buff = ref "" in + let reserve = ref string_list in + let current_length = ref 0 in + (fun i -> if (i - !count) >= !current_length then + begin + count := !count + !current_length + 1; + match !reserve with + | [] -> None + | s1::rest -> + begin + buff := s1; + current_length := String.length !buff; + reserve := rest; + Some '\n' + end + end + else + Some(String.get !buff (i - !count)));; + + +(* In older revisions of this file you would find a function that + does line oriented breakdown of the input channel without resorting to + a list of lines. However, the need for the list of line appeared when + we wanted to have a channel and a list of strings describing the same + data, one for regular parsing and the other for error recovery. *) + +let channel_to_stream_and_string_list stop_string input_channel = + let string_list = gather_strings stop_string input_channel in + (line_list_to_stream string_list, string_list);; + +let flush_until_end_of_stream char_stream = + Stream.iter (function _ -> ()) char_stream;; + +(* There are only 5 kinds of lines recognized by our little parser. + Unrecognized lines are ignored. *) +type parser_request = + | PRINT_VERSION + | PARSE_STRING of string + (* parse_string [] then text and && END--OF--DATA *) + | QUIET_PARSE_STRING + (* quiet_parse_string then text and && END--OF--DATA *) + | PARSE_FILE of string + (* parse_file *) + | ADD_PATH of string + (* add_path *) + | LOAD_SYNTAX of string + (* load_syntax_file *) + | GARBAGE +;; + +(* The procedure parser_loop should never terminate while the input_channel is + not closed. This procedure receives the functions called for each sentence + as arguments. Thus the code is completely independent from the Coq sources. *) +let parser_loop functions input_channel = + let print_version_action, + parse_string_action, + quiet_parse_string_action, + parse_file_action, + add_path_action, + load_syntax_action = functions in + let rec parser_loop_rec input_channel = + (let line = input_line input_channel in + let reqid, parser_request = + try + (match Stream.from (token_stream (Stream.of_string line)) with + parser + | [< 'Tid "print_version" >] -> + 0, PRINT_VERSION + | [< 'Tid "parse_string" ; 'Tint reqid ; 'Tlbracket ; + 'Tid phylum ; 'Trbracket >] + -> reqid,PARSE_STRING phylum + | [< 'Tid "quiet_parse_string" >] -> + 0,QUIET_PARSE_STRING + | [< 'Tid "parse_file" ; 'Tint reqid ; 'Tstring fname >] -> + reqid, PARSE_FILE fname + | [< 'Tid "add_path"; 'Tint reqid ; 'Tstring directory >] + -> reqid, ADD_PATH directory + | [< 'Tid "load_syntax_file"; 'Tint reqid; 'Tid module_name >] -> + reqid, LOAD_SYNTAX module_name + | [< 'Tid "quit_parser" >] -> raise End_of_file + | [< >] -> 0, GARBAGE) + with + Stream.Failure | Stream.Error _ -> 0,GARBAGE in + match parser_request with + PRINT_VERSION -> print_version_action () + | PARSE_STRING phylum -> + let regular_stream, string_list = + channel_to_stream_and_string_list "&& END--OF--DATA" input_channel in + parse_string_action reqid phylum (Stream.from regular_stream) + string_list;() + | QUIET_PARSE_STRING -> + let regular_stream, string_list = + channel_to_stream_and_string_list "&& END--OF--DATA" input_channel in + quiet_parse_string_action + (Stream.from regular_stream);() + | PARSE_FILE file_name -> + parse_file_action reqid file_name + | ADD_PATH path -> add_path_action reqid path + | LOAD_SYNTAX syn -> load_syntax_action reqid syn + | GARBAGE -> ()); + parser_loop_rec input_channel in + parser_loop_rec input_channel;; diff --git a/contrib/xml/xml.ml b/contrib/xml/xml.ml deleted file mode 100644 index 60e16ab19b..0000000000 --- a/contrib/xml/xml.ml +++ /dev/null @@ -1,80 +0,0 @@ -(***********************************************************************) -(* v * The Coq Proof Assistant / The Coq Development Team *) -(* *) -(* 18/10/2000 *) -(* *) -(* This module defines a pretty-printer and the stream of commands to the pp *) -(* *) -(******************************************************************************) - - -(* the type token for XML cdata, empty elements and not-empty elements *) -(* Usage: *) -(* Str cdata *) -(* Empty (element_name, [attrname1, value1 ; ... ; attrnamen, valuen] *) -(* NEmpty (element_name, [attrname1, value2 ; ... ; attrnamen, valuen], *) -(* content *) -type token = Str of string - | Empty of string * (string * string) list - | NEmpty of string * (string * string) list * token Stream.t -;; - -(* currified versions of the constructors make the code more readable *) -let xml_empty name attrs = [< 'Empty(name,attrs) >] -let xml_nempty name attrs content = [< 'NEmpty(name,attrs,content) >] -let xml_cdata str = [< 'Str str >] - -(* Usage: *) -(* pp tokens None pretty prints the output on stdout *) -(* pp tokens (Some filename) pretty prints the output on the file filename *) -let pp strm fn = - let channel = ref stdout in - let rec pp_r m = - parser - [< 'Str a ; s >] -> - print_spaces m ; - fprint_string (a ^ "\n") ; - pp_r m s - | [< 'Empty(n,l) ; s >] -> - print_spaces m ; - fprint_string ("<" ^ n) ; - List.iter (function (n,v) -> fprint_string (" " ^ n ^ "=\"" ^ v ^ "\"")) l; - fprint_string "/>\n" ; - pp_r m s - | [< 'NEmpty(n,l,c) ; s >] -> - print_spaces m ; - fprint_string ("<" ^ n) ; - List.iter (function (n,v) -> fprint_string (" " ^ n ^ "=\"" ^ v ^ "\"")) l; - fprint_string ">\n" ; - pp_r (m+1) c ; - print_spaces m ; - fprint_string ("\n") ; - pp_r m s - | [< >] -> () - and print_spaces m = - for i = 1 to m do fprint_string " " done - and fprint_string str = - output_string !channel str - in - match fn with - Some filename -> - let filename = filename ^ ".xml" in - channel := open_out filename ; - pp_r 0 strm ; - close_out !channel ; - print_string ("\nWriting on file \"" ^ filename ^ "\" was succesfull\n"); - flush stdout - | None -> - pp_r 0 strm -;; diff --git a/contrib/xml/xml.ml4 b/contrib/xml/xml.ml4 new file mode 100644 index 0000000000..60e16ab19b --- /dev/null +++ b/contrib/xml/xml.ml4 @@ -0,0 +1,80 @@ +(***********************************************************************) +(* v * The Coq Proof Assistant / The Coq Development Team *) +(* *) +(* 18/10/2000 *) +(* *) +(* This module defines a pretty-printer and the stream of commands to the pp *) +(* *) +(******************************************************************************) + + +(* the type token for XML cdata, empty elements and not-empty elements *) +(* Usage: *) +(* Str cdata *) +(* Empty (element_name, [attrname1, value1 ; ... ; attrnamen, valuen] *) +(* NEmpty (element_name, [attrname1, value2 ; ... ; attrnamen, valuen], *) +(* content *) +type token = Str of string + | Empty of string * (string * string) list + | NEmpty of string * (string * string) list * token Stream.t +;; + +(* currified versions of the constructors make the code more readable *) +let xml_empty name attrs = [< 'Empty(name,attrs) >] +let xml_nempty name attrs content = [< 'NEmpty(name,attrs,content) >] +let xml_cdata str = [< 'Str str >] + +(* Usage: *) +(* pp tokens None pretty prints the output on stdout *) +(* pp tokens (Some filename) pretty prints the output on the file filename *) +let pp strm fn = + let channel = ref stdout in + let rec pp_r m = + parser + [< 'Str a ; s >] -> + print_spaces m ; + fprint_string (a ^ "\n") ; + pp_r m s + | [< 'Empty(n,l) ; s >] -> + print_spaces m ; + fprint_string ("<" ^ n) ; + List.iter (function (n,v) -> fprint_string (" " ^ n ^ "=\"" ^ v ^ "\"")) l; + fprint_string "/>\n" ; + pp_r m s + | [< 'NEmpty(n,l,c) ; s >] -> + print_spaces m ; + fprint_string ("<" ^ n) ; + List.iter (function (n,v) -> fprint_string (" " ^ n ^ "=\"" ^ v ^ "\"")) l; + fprint_string ">\n" ; + pp_r (m+1) c ; + print_spaces m ; + fprint_string ("\n") ; + pp_r m s + | [< >] -> () + and print_spaces m = + for i = 1 to m do fprint_string " " done + and fprint_string str = + output_string !channel str + in + match fn with + Some filename -> + let filename = filename ^ ".xml" in + channel := open_out filename ; + pp_r 0 strm ; + close_out !channel ; + print_string ("\nWriting on file \"" ^ filename ^ "\" was succesfull\n"); + flush stdout + | None -> + pp_r 0 strm +;; diff --git a/contrib/xml/xmlcommand.ml b/contrib/xml/xmlcommand.ml deleted file mode 100644 index 7b6303130b..0000000000 --- a/contrib/xml/xmlcommand.ml +++ /dev/null @@ -1,1037 +0,0 @@ -(***********************************************************************) -(* v * The Coq Proof Assistant / The Coq Development Team *) -(* *) -(* 06/12/2000 *) -(* *) -(******************************************************************************) - - -(* CONFIGURATION PARAMETERS *) - -let dtdname = "http://www.cs.unibo.it/helm/dtd/cic.dtd";; -let typesdtdname = "http://www.cs.unibo.it/helm/dtd/cictypes.dtd";; -let verbose = ref false;; (* always set to true during a "Print XML All" *) - -(* UTILITY FUNCTIONS *) - -let print_if_verbose s = if !verbose then print_string s;; - -type tag = - Constant - | Inductive - | Variable -;; - -(* Next exception is used only inside print_object and tag_of_string_tag *) -exception Uninteresting;; - -let tag_of_string_tag = - function - "CONSTANT" - | "PARAMETER" -> Constant - | "INDUCTIVE" -> Inductive - | "VARIABLE" -> Variable - | _ -> raise Uninteresting -;; - -let ext_of_tag = - function - Constant -> "con" - | Inductive -> "ind" - | Variable -> "var" -;; - -(* Internally, for Coq V7, params of inductive types are associated *) -(* not to the whole block of mutual inductive (as it was in V6) but to *) -(* each member of the block; but externally, all params are required *) -(* to be the same; the following function checks that the parameters *) -(* of each inductive of a same block are all the same, then returns *) -(* this number; it fails otherwise *) -let extract_nparams pack = - let module D = Declarations in - let module U = Util in - let module S = Sign in - - let {D.mind_nparams=nparams0} = pack.(0) in - let arity0 = pack.(0).D.mind_user_arity in - let params0, _ = S.decompose_prod_n_assum nparams0 arity0 in - for i = 1 to Array.length pack - 1 do - let {D.mind_nparams=nparamsi} = pack.(i) in - let arityi = pack.(i).D.mind_user_arity in - let paramsi, _ = S.decompose_prod_n_assum nparamsi arityi in - if params0 <> paramsi then U.error "Cannot convert a block of inductive definitions with parameters specific to each inductive to a block of mutual inductive definitions with parameters global to the whole block" - done; - nparams0 - -(* could_have_namesakes sp = true iff o is an object that could be cooked and *) -(* than that could exists in cooked form with the same name in a super *) -(* section of the actual section *) -let could_have_namesakes o sp = (* namesake = omonimo in italian *) - let module D = Declare in - let tag = Libobject.object_tag o in - print_if_verbose ("Object tag: " ^ tag ^ "\n") ; - match tag with - "CONSTANT" -> - (match D.constant_strength sp with - | D.DischargeAt _ -> false (* a local definition *) - | D.NotDeclare -> false (* not a definition *) - | D.NeverDischarge -> true (* a non-local one *) - ) - | "PARAMETER" (* axioms and *) - | "INDUCTIVE" -> true (* mutual inductive types are never local *) - | "VARIABLE" -> false (* variables are local, so no namesakes *) - | _ -> false (* uninteresting thing that won't be printed*) -;; - - -(* uri_of_path sp is a broken (wrong) uri pointing to the object whose *) -(* section path is sp *) -let uri_of_path sp tag = - let module N = Names in - let module No = Nameops in - let ext_of_sp sp = ext_of_tag tag in - let dir0 = No.extend_dirpath (No.dirpath sp) (No.basename sp) in - let dir = List.map N.string_of_id (List.rev (N.repr_dirpath dir0)) in - "cic:/" ^ (String.concat "/" dir) ^ "." ^ (ext_of_sp sp) -;; - -let string_of_sort = - let module T = Term in - function - T.Prop(T.Pos) -> "Set" - | T.Prop(T.Null) -> "Prop" - | T.Type(_) -> "Type" -;; - -let string_of_name = - let module N = Names in - function - N.Name id -> N.string_of_id id - | _ -> Util.anomaly "this should not happen" -;; - -(* A SIMPLE DATA STRUCTURE AND SOME FUNCTIONS TO MANAGE THE CURRENT *) -(* ENVIRONMENT (= [l1, ..., ln] WHERE li IS THE LIST OF VARIABLES *) -(* DECLARED IN THE i-th SUPER-SECTION OF THE CURRENT SECTION *) - -(*CSC: commento sbagliato ed obsoleto *) -(* list of variables availables in the actual section *) -let pvars = ref ([[]] : string list list);; -let cumenv = ref Environ.empty_env;; - -let string_of_vars_list = - let add_prefix n s = string_of_int n ^ ": " ^ s in - let rec aux = - function - [n,v] -> (n,v) - | (n,v)::tl -> - let (k,s) = aux tl in - if k = n then (k, v ^ " " ^ s) else (n, v ^ " " ^ add_prefix k s) - | [] -> assert false - in - function - [] -> "" - | vl -> let (k,s) = aux vl in add_prefix k s -;; - -(* -let string_of_pvars pvars hyps = - let rec aux = - function - [] -> (0, "") - | he::tl -> - let (n,s) = aux tl in - (n + 1, - string_of_int n ^ ": " ^ (String.concat " ") (List.rev he) ^ - match s with "" -> "" | _ -> " " ^ s - ) - in - snd (aux (List.rev pvars)) -;; -*) - -let string_of_pvars pvars hyps = - let find_depth pvars v = - let rec aux n = - function - [] -> assert false -(* (-1,"?") For Print XML *) -(* -print_string "\nPVARS:" ; -List.iter (List.iter print_string) pvars ; -print_string "\nHYPS:" ; -List.iter print_string hyps ; -print_string "\n" ; -flush stdout ; -assert false -*) - | l::_ when List.mem v l -> (n,v) - | _::tl -> aux (n+1) tl - in - aux 0 pvars - in - string_of_vars_list (List.map (find_depth pvars) (List.rev hyps)) -;; - -type variables_type = - Definition of string * Term.constr * Term.types - | Assumption of string * Term.constr -;; - -let add_to_pvars x = - let module E = Environ in - let v = - match x with - Definition (v, bod, typ) -> - cumenv := - E.push_named_decl (Names.id_of_string v, Some bod, typ) !cumenv ; - v - | Assumption (v, typ) -> - cumenv := - E.push_named_decl (Names.id_of_string v, None, typ) !cumenv ; - v - in - match !pvars with - [] -> assert false - | (he::tl) -> pvars := (v::he)::tl -;; - -let get_depth_of_var v = - let rec aux n = - function - [] -> None - | (he::tl) -> if List.mem v he then Some n else aux (n + 1) tl - in - aux 0 !pvars -;; - -(* FUNCTIONS TO CREATE IDENTIFIERS *) - -(* seed to create the next identifier *) -let next_id_cic = ref 0;; -let next_id_types = ref 0;; - -(* reset_ids () reset the identifier seed to 0 *) -let reset_ids () = - next_id_cic := 0 ; - next_id_types := 0 -;; - -(* get_next_id () returns fresh identifier *) -let get_next_id = - function - `T -> - let res = - "t" ^ string_of_int !next_id_types - in - incr next_id_types ; - res - | `I -> - let res = - "i" ^ string_of_int !next_id_cic - in - incr next_id_cic ; - res -;; - -type innersort = - InnerProp of Term.constr (* the constr is the type of the term *) - | InnerSet - | InnerType -;; - -(* FUNCTION TO PRINT A SINGLE TERM OF CIC *) -(* print_term t l where t is a term of Cic returns a stream of XML tokens *) -(* suitable to be printed via the pretty printer Xml.pp. l is the list of *) -(* bound names *) -let print_term inner_types l env csr = - let module N = Names in - let module E = Environ in - let module T = Term in - let module X = Xml in - let module R = Retyping in - let rec names_to_ids = - function - [] -> [] - | (N.Name id)::l -> id::(names_to_ids l) - | _ -> names_to_ids l - in - - let inner_type_display env term = - let type_of_term = - Reduction.nf_betaiota (R.get_type_of env (Evd.empty) term) - in - match R.get_sort_family_of env (Evd.empty) type_of_term with - T.InProp -> InnerProp type_of_term - | T.InSet -> InnerSet - | T.InType -> InnerType - in - -(*WHICH FORCE FUNCTION DEPENDS ON THE ORDERING YOU WANT - let rec force = - parser - [< 'he ; tl >] -> let tl = (force tl) in [< 'he ; tl >] - | [< >] -> [<>] - in -*) - let force x = x in - - let rec term_display idradix in_lambda l env cstr = - let next_id = get_next_id idradix in - let add_sort_attribute do_output_type l' = - let xmlinnertype = inner_type_display env cstr in - match xmlinnertype with - InnerProp type_of_term -> - if do_output_type & idradix = `I then - begin - let pp_cmds = term_display `T false l env type_of_term in - inner_types := (next_id, pp_cmds)::!inner_types ; - end ; - ("sort","Prop")::l' - | InnerSet -> ("sort","Set")::l' - | InnerType -> ("sort","Type")::l' - in - let add_type_attribute l' = - ("type", string_of_sort (R.get_sort_of env (Evd.empty) cstr))::l' - in - (* kind_of_term helps doing pattern matching hiding the lower level of *) - (* coq coding of terms (the one of the logical framework) *) - match T.kind_of_term cstr with - T.Rel n -> - let id = - match List.nth l (n - 1) with - N.Name id -> id - | N.Anonymous -> Nameops.make_ident "_" None - in - X.xml_empty "REL" - (add_sort_attribute false - ["value",(string_of_int n) ; - "binder",(N.string_of_id id) ; - "id", next_id]) - | T.Var id -> - let depth = - match get_depth_of_var (N.string_of_id id) with - None -> "?" (* when printing via Show XML Proof or Print XML id *) - (* no infos about variables uri are known *) -(*V7 posso usare nametab, forse*) - | Some n -> string_of_int n - in - X.xml_empty "VAR" - (add_sort_attribute false - ["relUri",depth ^ "," ^ (N.string_of_id id) ; - "id", next_id]) - | T.Meta n -> - X.xml_empty "META" - (add_sort_attribute false ["no",(string_of_int n) ; "id", next_id]) - | T.Sort s -> - X.xml_empty "SORT" ["value",(string_of_sort s) ; "id", next_id] - | T.Cast (t1,t2) -> - X.xml_nempty "CAST" (add_sort_attribute false ["id", next_id]) -(force - [< X.xml_nempty "term" [] (term_display idradix false l env t1) ; - X.xml_nempty "type" [] (term_display idradix false l env t2) - >] -) - | T.LetIn (nid,s,t,d)-> - let nid' = Nameops.next_name_away nid (names_to_ids l) in - X.xml_nempty "LETIN" (add_sort_attribute true ["id", next_id]) -(force - [< X.xml_nempty "term" [] (term_display idradix false l env s) ; - X.xml_nempty "letintarget" ["binder",(N.string_of_id nid')] - (term_display idradix false - ((N.Name nid')::l) - (E.push_rel (N.Name nid', Some s, t) env) - d - ) - >] -) - | T.Prod (N.Name _ as nid, t1, t2) -> - let nid' = Nameops.next_name_away nid (names_to_ids l) in - X.xml_nempty "PROD" (add_type_attribute ["id", next_id]) -(force - [< X.xml_nempty "source" [] (term_display idradix false l env t1) ; - X.xml_nempty "target" - (if idradix = `T && - T.noccurn 1 t2 - then [] - else ["binder",(N.string_of_id nid')]) - (term_display idradix false - ((N.Name nid')::l) - (E.push_rel (N.Name nid', None, t1) env) - t2 - ) - >] -) - | T.Prod (N.Anonymous as nid, t1, t2) -> - X.xml_nempty "PROD" (add_type_attribute ["id", next_id]) -(force - [< X.xml_nempty "source" [] (term_display idradix false l env t1) ; - X.xml_nempty "target" [] - (term_display idradix false - (nid::l) - (E.push_rel (nid, None, t1) env) - t2 - ) - >] -) - | T.Lambda (N.Name _ as nid, t1, t2) -> - let nid' = Nameops.next_name_away nid (names_to_ids l) in - X.xml_nempty "LAMBDA" (add_sort_attribute (not in_lambda) ["id",next_id]) -(force - [< X.xml_nempty "source" [] (term_display idradix false l env t1) ; - X.xml_nempty "target" ["binder",(N.string_of_id nid')] - (term_display idradix true - ((N.Name nid')::l) - (E.push_rel (N.Name nid', None, t1) env) - t2 - ) - >] -) - | T.Lambda (N.Anonymous as nid, t1, t2) -> - X.xml_nempty "LAMBDA" (add_sort_attribute (not in_lambda) ["id", next_id]) -(force - [< X.xml_nempty "source" [] (term_display idradix false l env t1) ; - X.xml_nempty "target" [] - (term_display idradix true - (nid::l) - (E.push_rel (nid, None, t1) env) - t2 - ) - >] -) - | T.App (h,t) -> - X.xml_nempty "APPLY" (add_sort_attribute true ["id", next_id]) -(force - [< (term_display idradix false l env h) ; - (Array.fold_right - (fun x i -> [< (term_display idradix false l env x); i >]) t [<>]) - >] -) - | T.Const sp -> - X.xml_empty "CONST" - (add_sort_attribute false - ["uri",(uri_of_path sp Constant) ; "id", next_id]) - | T.Ind (sp,i) -> - X.xml_empty "MUTIND" - ["uri",(uri_of_path sp Inductive) ; - "noType",(string_of_int i) ; - "id", next_id] - | T.Construct ((sp,i),j) -> - X.xml_empty "MUTCONSTRUCT" - (add_sort_attribute false - ["uri",(uri_of_path sp Inductive) ; - "noType",(string_of_int i) ; - "noConstr",(string_of_int j) ; - "id", next_id]) - | T.Case ({T.ci_ind=(sp,i)},ty,term,a) -> - let (uri, typeno) = (uri_of_path sp Inductive),i in - X.xml_nempty "MUTCASE" - (add_sort_attribute true - ["uriType",uri ; "noType",(string_of_int typeno) ; "id",next_id]) -(force - [< X.xml_nempty "patternsType" [] - [< (term_display idradix false l env ty) >] ; - X.xml_nempty "inductiveTerm" [] - [< (term_display idradix false l env term)>]; - Array.fold_right - (fun x i -> - [< X.xml_nempty "pattern" [] - [] ; i>] - ) a [<>] - >] -) - | T.Fix ((ai,i),((f,t,b) as rec_decl)) -> - X.xml_nempty "FIX" - (add_sort_attribute true ["noFun", (string_of_int i) ; "id",next_id]) -(force - [< Array.fold_right - (fun (fi,ti,bi,ai) i -> - [< X.xml_nempty "FixFunction" - ["name", (string_of_name fi); "recIndex", (string_of_int ai)] - [< X.xml_nempty "type" [] - [< term_display idradix false l env ti >] ; - X.xml_nempty "body" [] [< - term_display idradix false - ((Array.to_list f)@l) - (E.push_rec_types rec_decl env) - bi - >] - >] ; - i - >] - ) - (Array.mapi (fun j x -> (x,t.(j),b.(j),ai.(j)) ) f ) - [<>] - >] -) - | T.CoFix (i,((f,t,b) as rec_decl)) -> - X.xml_nempty "COFIX" - (add_sort_attribute true ["noFun", (string_of_int i) ; "id",next_id]) -(force - [< Array.fold_right - (fun (fi,ti,bi) i -> - [< X.xml_nempty "CofixFunction" ["name", (string_of_name fi)] - [< X.xml_nempty "type" [] - [< term_display idradix false l env ti >] ; - X.xml_nempty "body" [] [< - term_display idradix false - ((Array.to_list f)@l) - (E.push_rec_types rec_decl env) - bi - >] - >] ; - i - >] - ) - (Array.mapi (fun j x -> (x,t.(j),b.(j)) ) f ) [<>] - >] -) - | T.Evar _ -> - Util.anomaly "Evar node in a term!!!" - in - (*CSC: ad l vanno andrebbero aggiunti i nomi da non *) - (*CSC: mascherare (costanti? variabili?) *) - term_display `I false l env csr -;; - -(* FUNCTIONS TO PRINT A SINGLE OBJECT OF COQ *) - -(* print_current_proof term type id conjectures *) -(* where term is the term of the proof in progress p *) -(* and type is the type of p *) -(* and id is the identifier (name) of p *) -(* and conjectures is the list of conjectures (cic terms) *) -(* returns a stream of XML tokens suitable to be pretty printed via Xml.pp *) -let print_current_proof c typ id mv inner_types = - let module X = Xml in - let env = (Safe_typing.env_of_safe_env (Global.safe_env ())) in - [< X.xml_cdata "\n" ; - X.xml_cdata ("\n\n") ; - X.xml_nempty "CurrentProof" ["name",id ; "id", get_next_id `I] - [< List.fold_right - (fun (j,t) i -> - [< X.xml_nempty "Conjecture" ["no",(string_of_int j)] - [< print_term inner_types [] env t >] ; i >]) - mv [<>] ; - X.xml_nempty "body" [] [< print_term inner_types [] env c >] ; - X.xml_nempty "type" [] [< print_term inner_types [] env typ >] - >] - >] -;; - -(* print_axiom id type params *) -(* where type is the type of an axiom a *) -(* and id is the identifier (name) of a *) -(* and params is the list of formal params for a *) -(* returns a stream of XML tokens suitable to be pretty printed via Xml.pp *) -let print_axiom id typ fv hyps env inner_types = - let module X = Xml in - let module N = Names in - [< X.xml_cdata "\n" ; - X.xml_cdata ("\n\n") ; - X.xml_nempty "Axiom" - ["name",(N.string_of_id id) ; - "id", get_next_id `I ; - "params",(string_of_pvars fv hyps)] - [< X.xml_nempty "type" [] (print_term inner_types [] env typ) >] - >] -;; - -(* print_definition id term type params hyps *) -(* where id is the identifier (name) of a definition d *) -(* and term is the term (body) of d *) -(* and type is the type of d *) -(* and params is the list of formal params for d *) -(* returns a stream of XML tokens suitable to be pretty printed via Xml.pp *) -let print_definition id c typ fv hyps env inner_types = - let module X = Xml in - let module N = Names in - [< X.xml_cdata "\n" ; - X.xml_cdata ("\n\n") ; - X.xml_nempty "Definition" - ["name",(N.string_of_id id) ; - "id", get_next_id `I ; - "params",(string_of_pvars fv hyps)] - [< X.xml_nempty "body" [] (print_term inner_types [] env c) ; - X.xml_nempty "type" [] (print_term inner_types [] env typ) >] - >] -;; - -(* print_variable id type *) -(* where id is the identifier (name) of a variable v *) -(* and type is the type of v *) -(* returns a stream of XML tokens suitable to be pretty printed via Xml.pp *) -let print_variable id body typ env inner_types = - let module X = Xml in - let module N = Names in - [< X.xml_cdata "\n" ; - X.xml_cdata ("\n\n") ; - X.xml_nempty "Variable" ["name",(N.string_of_id id); "id", get_next_id `I] - [< (match body with - None -> [<>] - | Some body -> - X.xml_nempty "body" [] (print_term inner_types [] env body) - ) ; - X.xml_nempty "type" [] (print_term inner_types [] env typ) - >] - >] -;; - -(* print_mutual_inductive_packet p *) -(* where p is a mutual inductive packet (an inductive definition in a block *) -(* of mutual inductive definitions) *) -(* returns a stream of XML tokens suitable to be pretty printed via Xml.pp *) -(* Used only by print_mutual_inductive *) -let print_mutual_inductive_packet inner_types names env finite p = - let module D = Declarations in - let module N = Names in - let module T = Term in - let module X = Xml in - let {D.mind_consnames=consnames ; - D.mind_typename=typename ; - D.mind_nf_lc=lc ; - D.mind_nf_arity=arity} = p - in - [< X.xml_nempty "InductiveType" - ["name",(N.string_of_id typename) ; - "inductive",(string_of_bool finite) - ] - [< - X.xml_nempty "arity" [] - (print_term inner_types [] env (T.body_of_type arity)) ; - (Array.fold_right - (fun (name,lc) i -> - [< X.xml_nempty "Constructor" ["name",(N.string_of_id name)] - (print_term inner_types names env lc) ; - i - >]) - (Array.mapi (fun j x -> (x,T.body_of_type lc.(j)) ) consnames ) - [<>] - ) - >] - >] -;; - -(* print_mutual_inductive packs params nparams *) -(* where packs is the list of inductive definitions in the block of *) -(* mutual inductive definitions b *) -(* and params is the list of formal params for b *) -(* and nparams is the number of "parameters" in the arity of the *) -(* mutual inductive types *) -(* returns a stream of XML tokens suitable to be pretty printed via Xml.pp *) -let print_mutual_inductive finite packs fv hyps env inner_types = - let module D = Declarations in - let module E = Environ in - let module X = Xml in - let module N = Names in - let nparams = extract_nparams packs in - let names = - List.map - (fun p -> let {D.mind_typename=typename} = p in N.Name(typename)) - (Array.to_list packs) - in - let env = - List.fold_right - (fun {D.mind_typename=typename ; D.mind_nf_arity=arity} env -> - E.push_rel (N.Name typename, None, arity) env) - (Array.to_list packs) - env - in - [< X.xml_cdata "\n" ; - X.xml_cdata ("\n\n") ; - X.xml_nempty "InductiveDefinition" - ["noParams",string_of_int nparams ; - "id",get_next_id `I ; - "params",(string_of_pvars fv hyps)] - [< (Array.fold_right - (fun x i -> - [< print_mutual_inductive_packet - inner_types names env finite x ; i >] - ) packs [< >] - ) - >] - >] -;; - -let string_list_of_named_context_list = - List.map - (function (n,_,_) -> Names.string_of_id n) -;; - -let types_filename_of_filename = - function - Some f -> Some (f ^ ".types") - | None -> None -;; - -let pp_cmds_of_inner_types inner_types target_uri = - let module X = Xml in - let rec stream_of_list = - function - [] -> [<>] - | he::tl -> [< he ; stream_of_list tl >] - in - [< X.xml_cdata "\n" ; - X.xml_cdata ("\n\n"); - X.xml_nempty "InnerTypes" ["of",target_uri] - (stream_of_list - (List.map - (function - (id,type_pp_cmds) -> X.xml_nempty "TYPE" ["of",id] type_pp_cmds) - (List.rev inner_types) - )) - >] -;; - -(* print id dest *) -(* where sp is the qualified identifier (section path) of a *) -(* definition/theorem, variable or inductive definition *) -(* and dest is either None (for stdout) or (Some filename) *) -(* pretty prints via Xml.pp the object whose identifier is id on dest *) -(* Note: it is printed only (and directly) the most cooked available *) -(* form of the definition (all the parameters are *) -(* lambda-abstracted, but the object can still refer to variables) *) -let print qid fn = - let module D = Declarations in - let module G = Global in - let module N = Names in - let module Nt = Nametab in - let module T = Term in - let module X = Xml in - let (_,id) = Nt.repr_qualid qid in - let glob_ref = Nametab.locate qid in - let env = (Safe_typing.env_of_safe_env (G.safe_env ())) in - reset_ids () ; - let inner_types = ref [] in - let sp,tag,pp_cmds = - match glob_ref with - Nt.VarRef id -> - let sp = Declare.find_section_variable id in - let (_,body,typ) = G.lookup_named id in - sp,Variable,print_variable id body (T.body_of_type typ) env inner_types - | Nt.ConstRef sp -> - let {D.const_body=val0 ; D.const_type = typ ; D.const_hyps = hyps} = - G.lookup_constant sp in - let hyps = string_list_of_named_context_list hyps in - let typ = T.body_of_type typ in - sp,Constant, - begin - match val0 with - None -> print_axiom id typ [] hyps env inner_types - | Some c -> print_definition id c typ [] hyps env inner_types - end - | Nt.IndRef (sp,_) -> - let {D.mind_packets=packs ; - D.mind_hyps=hyps; - D.mind_finite=finite} = G.lookup_mind sp in - let hyps = string_list_of_named_context_list hyps in - sp,Inductive, - print_mutual_inductive finite packs [] hyps env inner_types - | Nt.ConstructRef _ -> - Util.anomaly ("print: this should not happen") - in - Xml.pp pp_cmds fn ; - Xml.pp (pp_cmds_of_inner_types !inner_types (uri_of_path sp tag)) - (types_filename_of_filename fn) -;; - -(* show dest *) -(* where dest is either None (for stdout) or (Some filename) *) -(* pretty prints via Xml.pp the proof in progress on dest *) -let show fn = - let pftst = Pfedit.get_pftreestate () in - let str = Names.string_of_id (Pfedit.get_current_proof_name ()) in - let pf = Tacmach.proof_of_pftreestate pftst in - let typ = (Proof_trees.goal_of_proof pf).Evd.evar_concl in - let val0,mv = Tacmach.extract_open_pftreestate pftst in - reset_ids () ; - let inner_types = ref [] in - Xml.pp - (print_current_proof val0 typ str mv inner_types) - fn ; - Xml.pp (pp_cmds_of_inner_types !inner_types ("?" ^ str ^ "?")) - (types_filename_of_filename fn) -;; - -(* FUNCTIONS TO PRINT AN ENTIRE SECTION OF COQ *) - -(* list of (name, uncooked sp, most cooked sp, uncooked leaf object, *) -(* list of possible variables, directory name) *) -(* used to collect informations on the uncooked and most cooked forms *) -(* of objects that could be cooked (have namesakes) *) -let printed = ref [];; - -(* makes a filename from a directory name, a section path and an extension *) -let mkfilename dn sp ext = - let dir_list_of_dirpath s = - let rec aux n = - try - let pos = String.index_from s n '/' in - let head = String.sub s n (pos - n) in - head::(aux (pos + 1)) - with - Not_found -> [String.sub s n (String.length s - n)] - in - aux 0 - in - let rec join_dirs cwd = - function - [] -> assert false - | [he] -> "/" ^ he - | he::tail -> - let newcwd = cwd ^ "/" ^ he in - (try - Unix.mkdir newcwd 0o775 - with _ -> () (* Let's ignore the errors on mkdir *) - ) ; - "/" ^ he ^ join_dirs newcwd tail - in - let module L = Library in - let module S = System in - let module N = Names in - let module No = Nameops in - match dn with - None -> None - | Some basedir -> - let dir0 = No.extend_dirpath (No.dirpath sp) (No.basename sp) in - let dir = List.map N.string_of_id (List.rev (N.repr_dirpath dir0)) in - Some (basedir ^ join_dirs basedir dir ^ "." ^ ext) -;; - -(* print_object leaf_object id section_path directory_name formal_vars *) -(* where leaf_object is a leaf object *) -(* and id is the identifier (name) of the definition/theorem *) -(* or of the inductive definition o *) -(* and section_path is the section path of o *) -(* and directory_name is the base directory in which putting the print *) -(* and formal_vars is the list of parameters (variables) of o *) -(* pretty prints via Xml.pp the object o in the right directory deduced *) -(* from directory_name and section_path *) -(* Note: it is printed only the uncooked available form of the object plus *) -(* the list of parameters of the object deduced from it's most cooked *) -(* form *) -let print_object lobj id sp dn fv env = - let module D = Declarations in - let module E = Environ in - let module G = Global in - let module N = Names in - let module T = Term in - let module X = Xml in - let strtag = Libobject.object_tag lobj in - try - let tag = tag_of_string_tag strtag in - reset_ids () ; - let inner_types = ref [] in - let pp_cmds = - match strtag with - "CONSTANT" (* = Definition, Theorem *) - | "PARAMETER" (* = Axiom *) -> - let {D.const_body=val0 ; D.const_type = typ ; D.const_hyps = hyps} = - G.lookup_constant sp - in - let hyps = string_list_of_named_context_list hyps in - let typ = T.body_of_type typ in - begin - match val0 with - None -> print_axiom id typ fv hyps env inner_types - | Some c -> print_definition id c typ fv hyps env inner_types - end - | "INDUCTIVE" -> - let - {D.mind_packets=packs ; - D.mind_hyps = hyps; - D.mind_finite = finite - } = G.lookup_mind sp - in - let hyps = string_list_of_named_context_list hyps in - print_mutual_inductive finite packs fv hyps env inner_types - | "VARIABLE" -> - let (_,(_,varentry,_)) = Declare.out_variable lobj in - begin - match varentry with - Declare.SectionLocalDef (body,optt) -> - let typ = match optt with - | None -> Retyping.get_type_of env Evd.empty body - | Some t -> t in - add_to_pvars (Definition (N.string_of_id id, body, typ)) ; - print_variable id (Some body) typ env inner_types - | Declare.SectionLocalAssum typ -> - add_to_pvars (Assumption (N.string_of_id id, typ)) ; - print_variable id None (T.body_of_type typ) env inner_types - end - | "CLASS" - | "COERCION" - | _ -> raise Uninteresting - and fileext () = ext_of_tag tag in - let fn = (mkfilename dn sp (fileext ())) in - Xml.pp pp_cmds fn ; - Xml.pp (pp_cmds_of_inner_types !inner_types (uri_of_path sp tag)) - (types_filename_of_filename fn) - with - Uninteresting -> () -;; - -(* print_library_segment state bprintleaf directory_name *) -(* where state is a list of (section-path, node) *) -(* and bprintleaf is true iff the leaf objects should be printed *) -(* and directory_name is the name of the base directory in which to print *) -(* the files *) -(* print via print_node all the nodes (leafs excluded if bprintleaf is false) *)(* in state *) -let rec print_library_segment state bprintleaf dn = - List.iter - (function (sp, node) -> - print_if_verbose ("Print_library_segment: " ^ Names.string_of_path sp ^ "\n") ; - print_node node (Nameops.basename sp) sp bprintleaf dn ; - print_if_verbose "\n" - ) (List.rev state) -(* print_node node id section_path bprintleaf directory_name *) -(* prints a single node (and all it's subnodes via print_library_segment *) -and print_node n id sp bprintleaf dn = - let module L = Lib in - match n with - L.Leaf o -> - print_if_verbose "LEAF\n" ; - if bprintleaf then - begin - if not (List.mem id !printed) then - (* this is an uncooked term or a local (with no namesakes) term *) - begin -try - if could_have_namesakes o sp then - begin - (* this is an uncooked term *) - print_if_verbose ("OK, stampo solo questa volta " ^ Names.string_of_id id ^ "\n") ; - print_object o id sp dn !pvars !cumenv ; - printed := id::!printed - end - else - begin - (* this is a local term *) - print_if_verbose ("OK, stampo " ^ Names.string_of_id id ^ "\n") ; - print_object o id sp dn !pvars !cumenv - end -with _ -> print_if_verbose ("EXCEPTION RAISED!!!\n"); - end - else - begin - (* update the least cooked sp *) - print_if_verbose ("Suppongo gia' stampato " ^ Names.string_of_id id ^ "\n") - end - end - | L.OpenedSection (dir,_) -> - let id = snd (Nameops.split_dirpath dir) in - print_if_verbose ("OpenDir " ^ Names.string_of_id id ^ "\n") - | L.ClosedSection (_,dir,state) -> - let id = snd (Nameops.split_dirpath dir) in - print_if_verbose("ClosedDir " ^ Names.string_of_id id ^ "\n") ; - if bprintleaf then - begin - (* open a new scope *) - pvars := []::!pvars ; - print_library_segment state bprintleaf dn ; - (* close the scope *) - match !pvars with - [] -> assert false - | he::tl -> pvars := tl - end ; - print_if_verbose "/ClosedDir\n" - | L.Module s -> - print_if_verbose ("Module " ^ (Names.string_of_dirpath s) ^ "\n") - | L.FrozenState _ -> - print_if_verbose ("FrozenState\n") -;; - -(* print_closed_section module_name node directory_name *) -(* where module_name is the name of a module *) -(* and node is the library segment of the module *) -(* and directory_name is the base directory in which to put the xml files *) -(* prints all the leaf objects of the tree rooted in node using *) -(* print_library_segment on all its sons *) -let print_closed_section s ls dn = - let module L = Lib in - printed := [] ; - pvars := [[]] ; - cumenv := Safe_typing.env_of_safe_env (Global.safe_env ()) ; - print_if_verbose ("Module " ^ s ^ ":\n") ; - print_library_segment ls true dn ; - print_if_verbose "\n/Module\n" ; -;; - -(* printModule identifier directory_name *) -(* where identifier is the name of a module (section) d *) -(* and directory_name is the directory in which to root all the xml files *) -(* prints all the xml files and directories corresponding to the subsections *) -(* and terms of the module d *) -(* Note: the terms are printed in their uncooked form plus the informations *) -(* on the parameters of their most cooked form *) -let printModule qid dn = - let module L = Library in - let module N = Nametab in - let module X = Xml in - - let (_,dir_path,_) = L.locate_qualified_library qid in - - let str = N.string_of_qualid qid in - let ls = L.module_segment (Some dir_path) in - print_if_verbose ("MODULE_BEGIN " ^ str ^ " " ^ - (L.module_full_filename dir_path) ^ "\n") ; - print_closed_section str (List.rev ls) dn ; - print_if_verbose ("MODULE_END " ^ str ^ " " ^ - (L.module_full_filename dir_path) ^ "\n") -;; - -(* printSection identifier directory_name *) -(* where identifier is the name of a closed section d *) -(* and directory_name is the directory in which to root all the xml files *) -(* prints all the xml files and directories corresponding to the subsections *) -(* and terms of the closed section d *) -(* Note: the terms are printed in their uncooked form plus the informations *) -(* on the parameters of their most cooked form *) -let printSection id dn = - let module L = Library in - let module N = Names in - let module No = Nameops in - let module X = Xml in - let sp = Lib.make_path id in - let ls = - let rec find_closed_section = - function - [] -> raise Not_found - | (_,Lib.ClosedSection (_,dir,ls))::_ when snd (No.split_dirpath dir) = id - -> ls - | _::t -> find_closed_section t - in - print_string ("Searching " ^ Names.string_of_path sp ^ "\n") ; - find_closed_section (Lib.contents_after None) - in - let str = N.string_of_id id in - print_if_verbose ("SECTION_BEGIN " ^ str ^ " " ^ N.string_of_path sp ^ "\n"); - print_closed_section str ls dn ; - print_if_verbose ("SECTION_END " ^ str ^ " " ^ N.string_of_path sp ^ "\n") -;; - -(* print All () prints what is the structure of the current environment of *) -(* Coq. No terms are printed. Useful only for debugging *) -let printAll () = - let state = Library.module_segment None in - let oldverbose = !verbose in - verbose := true ; - print_library_segment state false None ; -(* - let ml = Library.loaded_modules () in - List.iter (function i -> printModule (Names.id_of_string i) None) ml ; -*) - verbose := oldverbose -;; diff --git a/contrib/xml/xmlcommand.ml4 b/contrib/xml/xmlcommand.ml4 new file mode 100644 index 0000000000..7b6303130b --- /dev/null +++ b/contrib/xml/xmlcommand.ml4 @@ -0,0 +1,1037 @@ +(***********************************************************************) +(* v * The Coq Proof Assistant / The Coq Development Team *) +(* *) +(* 06/12/2000 *) +(* *) +(******************************************************************************) + + +(* CONFIGURATION PARAMETERS *) + +let dtdname = "http://www.cs.unibo.it/helm/dtd/cic.dtd";; +let typesdtdname = "http://www.cs.unibo.it/helm/dtd/cictypes.dtd";; +let verbose = ref false;; (* always set to true during a "Print XML All" *) + +(* UTILITY FUNCTIONS *) + +let print_if_verbose s = if !verbose then print_string s;; + +type tag = + Constant + | Inductive + | Variable +;; + +(* Next exception is used only inside print_object and tag_of_string_tag *) +exception Uninteresting;; + +let tag_of_string_tag = + function + "CONSTANT" + | "PARAMETER" -> Constant + | "INDUCTIVE" -> Inductive + | "VARIABLE" -> Variable + | _ -> raise Uninteresting +;; + +let ext_of_tag = + function + Constant -> "con" + | Inductive -> "ind" + | Variable -> "var" +;; + +(* Internally, for Coq V7, params of inductive types are associated *) +(* not to the whole block of mutual inductive (as it was in V6) but to *) +(* each member of the block; but externally, all params are required *) +(* to be the same; the following function checks that the parameters *) +(* of each inductive of a same block are all the same, then returns *) +(* this number; it fails otherwise *) +let extract_nparams pack = + let module D = Declarations in + let module U = Util in + let module S = Sign in + + let {D.mind_nparams=nparams0} = pack.(0) in + let arity0 = pack.(0).D.mind_user_arity in + let params0, _ = S.decompose_prod_n_assum nparams0 arity0 in + for i = 1 to Array.length pack - 1 do + let {D.mind_nparams=nparamsi} = pack.(i) in + let arityi = pack.(i).D.mind_user_arity in + let paramsi, _ = S.decompose_prod_n_assum nparamsi arityi in + if params0 <> paramsi then U.error "Cannot convert a block of inductive definitions with parameters specific to each inductive to a block of mutual inductive definitions with parameters global to the whole block" + done; + nparams0 + +(* could_have_namesakes sp = true iff o is an object that could be cooked and *) +(* than that could exists in cooked form with the same name in a super *) +(* section of the actual section *) +let could_have_namesakes o sp = (* namesake = omonimo in italian *) + let module D = Declare in + let tag = Libobject.object_tag o in + print_if_verbose ("Object tag: " ^ tag ^ "\n") ; + match tag with + "CONSTANT" -> + (match D.constant_strength sp with + | D.DischargeAt _ -> false (* a local definition *) + | D.NotDeclare -> false (* not a definition *) + | D.NeverDischarge -> true (* a non-local one *) + ) + | "PARAMETER" (* axioms and *) + | "INDUCTIVE" -> true (* mutual inductive types are never local *) + | "VARIABLE" -> false (* variables are local, so no namesakes *) + | _ -> false (* uninteresting thing that won't be printed*) +;; + + +(* uri_of_path sp is a broken (wrong) uri pointing to the object whose *) +(* section path is sp *) +let uri_of_path sp tag = + let module N = Names in + let module No = Nameops in + let ext_of_sp sp = ext_of_tag tag in + let dir0 = No.extend_dirpath (No.dirpath sp) (No.basename sp) in + let dir = List.map N.string_of_id (List.rev (N.repr_dirpath dir0)) in + "cic:/" ^ (String.concat "/" dir) ^ "." ^ (ext_of_sp sp) +;; + +let string_of_sort = + let module T = Term in + function + T.Prop(T.Pos) -> "Set" + | T.Prop(T.Null) -> "Prop" + | T.Type(_) -> "Type" +;; + +let string_of_name = + let module N = Names in + function + N.Name id -> N.string_of_id id + | _ -> Util.anomaly "this should not happen" +;; + +(* A SIMPLE DATA STRUCTURE AND SOME FUNCTIONS TO MANAGE THE CURRENT *) +(* ENVIRONMENT (= [l1, ..., ln] WHERE li IS THE LIST OF VARIABLES *) +(* DECLARED IN THE i-th SUPER-SECTION OF THE CURRENT SECTION *) + +(*CSC: commento sbagliato ed obsoleto *) +(* list of variables availables in the actual section *) +let pvars = ref ([[]] : string list list);; +let cumenv = ref Environ.empty_env;; + +let string_of_vars_list = + let add_prefix n s = string_of_int n ^ ": " ^ s in + let rec aux = + function + [n,v] -> (n,v) + | (n,v)::tl -> + let (k,s) = aux tl in + if k = n then (k, v ^ " " ^ s) else (n, v ^ " " ^ add_prefix k s) + | [] -> assert false + in + function + [] -> "" + | vl -> let (k,s) = aux vl in add_prefix k s +;; + +(* +let string_of_pvars pvars hyps = + let rec aux = + function + [] -> (0, "") + | he::tl -> + let (n,s) = aux tl in + (n + 1, + string_of_int n ^ ": " ^ (String.concat " ") (List.rev he) ^ + match s with "" -> "" | _ -> " " ^ s + ) + in + snd (aux (List.rev pvars)) +;; +*) + +let string_of_pvars pvars hyps = + let find_depth pvars v = + let rec aux n = + function + [] -> assert false +(* (-1,"?") For Print XML *) +(* +print_string "\nPVARS:" ; +List.iter (List.iter print_string) pvars ; +print_string "\nHYPS:" ; +List.iter print_string hyps ; +print_string "\n" ; +flush stdout ; +assert false +*) + | l::_ when List.mem v l -> (n,v) + | _::tl -> aux (n+1) tl + in + aux 0 pvars + in + string_of_vars_list (List.map (find_depth pvars) (List.rev hyps)) +;; + +type variables_type = + Definition of string * Term.constr * Term.types + | Assumption of string * Term.constr +;; + +let add_to_pvars x = + let module E = Environ in + let v = + match x with + Definition (v, bod, typ) -> + cumenv := + E.push_named_decl (Names.id_of_string v, Some bod, typ) !cumenv ; + v + | Assumption (v, typ) -> + cumenv := + E.push_named_decl (Names.id_of_string v, None, typ) !cumenv ; + v + in + match !pvars with + [] -> assert false + | (he::tl) -> pvars := (v::he)::tl +;; + +let get_depth_of_var v = + let rec aux n = + function + [] -> None + | (he::tl) -> if List.mem v he then Some n else aux (n + 1) tl + in + aux 0 !pvars +;; + +(* FUNCTIONS TO CREATE IDENTIFIERS *) + +(* seed to create the next identifier *) +let next_id_cic = ref 0;; +let next_id_types = ref 0;; + +(* reset_ids () reset the identifier seed to 0 *) +let reset_ids () = + next_id_cic := 0 ; + next_id_types := 0 +;; + +(* get_next_id () returns fresh identifier *) +let get_next_id = + function + `T -> + let res = + "t" ^ string_of_int !next_id_types + in + incr next_id_types ; + res + | `I -> + let res = + "i" ^ string_of_int !next_id_cic + in + incr next_id_cic ; + res +;; + +type innersort = + InnerProp of Term.constr (* the constr is the type of the term *) + | InnerSet + | InnerType +;; + +(* FUNCTION TO PRINT A SINGLE TERM OF CIC *) +(* print_term t l where t is a term of Cic returns a stream of XML tokens *) +(* suitable to be printed via the pretty printer Xml.pp. l is the list of *) +(* bound names *) +let print_term inner_types l env csr = + let module N = Names in + let module E = Environ in + let module T = Term in + let module X = Xml in + let module R = Retyping in + let rec names_to_ids = + function + [] -> [] + | (N.Name id)::l -> id::(names_to_ids l) + | _ -> names_to_ids l + in + + let inner_type_display env term = + let type_of_term = + Reduction.nf_betaiota (R.get_type_of env (Evd.empty) term) + in + match R.get_sort_family_of env (Evd.empty) type_of_term with + T.InProp -> InnerProp type_of_term + | T.InSet -> InnerSet + | T.InType -> InnerType + in + +(*WHICH FORCE FUNCTION DEPENDS ON THE ORDERING YOU WANT + let rec force = + parser + [< 'he ; tl >] -> let tl = (force tl) in [< 'he ; tl >] + | [< >] -> [<>] + in +*) + let force x = x in + + let rec term_display idradix in_lambda l env cstr = + let next_id = get_next_id idradix in + let add_sort_attribute do_output_type l' = + let xmlinnertype = inner_type_display env cstr in + match xmlinnertype with + InnerProp type_of_term -> + if do_output_type & idradix = `I then + begin + let pp_cmds = term_display `T false l env type_of_term in + inner_types := (next_id, pp_cmds)::!inner_types ; + end ; + ("sort","Prop")::l' + | InnerSet -> ("sort","Set")::l' + | InnerType -> ("sort","Type")::l' + in + let add_type_attribute l' = + ("type", string_of_sort (R.get_sort_of env (Evd.empty) cstr))::l' + in + (* kind_of_term helps doing pattern matching hiding the lower level of *) + (* coq coding of terms (the one of the logical framework) *) + match T.kind_of_term cstr with + T.Rel n -> + let id = + match List.nth l (n - 1) with + N.Name id -> id + | N.Anonymous -> Nameops.make_ident "_" None + in + X.xml_empty "REL" + (add_sort_attribute false + ["value",(string_of_int n) ; + "binder",(N.string_of_id id) ; + "id", next_id]) + | T.Var id -> + let depth = + match get_depth_of_var (N.string_of_id id) with + None -> "?" (* when printing via Show XML Proof or Print XML id *) + (* no infos about variables uri are known *) +(*V7 posso usare nametab, forse*) + | Some n -> string_of_int n + in + X.xml_empty "VAR" + (add_sort_attribute false + ["relUri",depth ^ "," ^ (N.string_of_id id) ; + "id", next_id]) + | T.Meta n -> + X.xml_empty "META" + (add_sort_attribute false ["no",(string_of_int n) ; "id", next_id]) + | T.Sort s -> + X.xml_empty "SORT" ["value",(string_of_sort s) ; "id", next_id] + | T.Cast (t1,t2) -> + X.xml_nempty "CAST" (add_sort_attribute false ["id", next_id]) +(force + [< X.xml_nempty "term" [] (term_display idradix false l env t1) ; + X.xml_nempty "type" [] (term_display idradix false l env t2) + >] +) + | T.LetIn (nid,s,t,d)-> + let nid' = Nameops.next_name_away nid (names_to_ids l) in + X.xml_nempty "LETIN" (add_sort_attribute true ["id", next_id]) +(force + [< X.xml_nempty "term" [] (term_display idradix false l env s) ; + X.xml_nempty "letintarget" ["binder",(N.string_of_id nid')] + (term_display idradix false + ((N.Name nid')::l) + (E.push_rel (N.Name nid', Some s, t) env) + d + ) + >] +) + | T.Prod (N.Name _ as nid, t1, t2) -> + let nid' = Nameops.next_name_away nid (names_to_ids l) in + X.xml_nempty "PROD" (add_type_attribute ["id", next_id]) +(force + [< X.xml_nempty "source" [] (term_display idradix false l env t1) ; + X.xml_nempty "target" + (if idradix = `T && + T.noccurn 1 t2 + then [] + else ["binder",(N.string_of_id nid')]) + (term_display idradix false + ((N.Name nid')::l) + (E.push_rel (N.Name nid', None, t1) env) + t2 + ) + >] +) + | T.Prod (N.Anonymous as nid, t1, t2) -> + X.xml_nempty "PROD" (add_type_attribute ["id", next_id]) +(force + [< X.xml_nempty "source" [] (term_display idradix false l env t1) ; + X.xml_nempty "target" [] + (term_display idradix false + (nid::l) + (E.push_rel (nid, None, t1) env) + t2 + ) + >] +) + | T.Lambda (N.Name _ as nid, t1, t2) -> + let nid' = Nameops.next_name_away nid (names_to_ids l) in + X.xml_nempty "LAMBDA" (add_sort_attribute (not in_lambda) ["id",next_id]) +(force + [< X.xml_nempty "source" [] (term_display idradix false l env t1) ; + X.xml_nempty "target" ["binder",(N.string_of_id nid')] + (term_display idradix true + ((N.Name nid')::l) + (E.push_rel (N.Name nid', None, t1) env) + t2 + ) + >] +) + | T.Lambda (N.Anonymous as nid, t1, t2) -> + X.xml_nempty "LAMBDA" (add_sort_attribute (not in_lambda) ["id", next_id]) +(force + [< X.xml_nempty "source" [] (term_display idradix false l env t1) ; + X.xml_nempty "target" [] + (term_display idradix true + (nid::l) + (E.push_rel (nid, None, t1) env) + t2 + ) + >] +) + | T.App (h,t) -> + X.xml_nempty "APPLY" (add_sort_attribute true ["id", next_id]) +(force + [< (term_display idradix false l env h) ; + (Array.fold_right + (fun x i -> [< (term_display idradix false l env x); i >]) t [<>]) + >] +) + | T.Const sp -> + X.xml_empty "CONST" + (add_sort_attribute false + ["uri",(uri_of_path sp Constant) ; "id", next_id]) + | T.Ind (sp,i) -> + X.xml_empty "MUTIND" + ["uri",(uri_of_path sp Inductive) ; + "noType",(string_of_int i) ; + "id", next_id] + | T.Construct ((sp,i),j) -> + X.xml_empty "MUTCONSTRUCT" + (add_sort_attribute false + ["uri",(uri_of_path sp Inductive) ; + "noType",(string_of_int i) ; + "noConstr",(string_of_int j) ; + "id", next_id]) + | T.Case ({T.ci_ind=(sp,i)},ty,term,a) -> + let (uri, typeno) = (uri_of_path sp Inductive),i in + X.xml_nempty "MUTCASE" + (add_sort_attribute true + ["uriType",uri ; "noType",(string_of_int typeno) ; "id",next_id]) +(force + [< X.xml_nempty "patternsType" [] + [< (term_display idradix false l env ty) >] ; + X.xml_nempty "inductiveTerm" [] + [< (term_display idradix false l env term)>]; + Array.fold_right + (fun x i -> + [< X.xml_nempty "pattern" [] + [] ; i>] + ) a [<>] + >] +) + | T.Fix ((ai,i),((f,t,b) as rec_decl)) -> + X.xml_nempty "FIX" + (add_sort_attribute true ["noFun", (string_of_int i) ; "id",next_id]) +(force + [< Array.fold_right + (fun (fi,ti,bi,ai) i -> + [< X.xml_nempty "FixFunction" + ["name", (string_of_name fi); "recIndex", (string_of_int ai)] + [< X.xml_nempty "type" [] + [< term_display idradix false l env ti >] ; + X.xml_nempty "body" [] [< + term_display idradix false + ((Array.to_list f)@l) + (E.push_rec_types rec_decl env) + bi + >] + >] ; + i + >] + ) + (Array.mapi (fun j x -> (x,t.(j),b.(j),ai.(j)) ) f ) + [<>] + >] +) + | T.CoFix (i,((f,t,b) as rec_decl)) -> + X.xml_nempty "COFIX" + (add_sort_attribute true ["noFun", (string_of_int i) ; "id",next_id]) +(force + [< Array.fold_right + (fun (fi,ti,bi) i -> + [< X.xml_nempty "CofixFunction" ["name", (string_of_name fi)] + [< X.xml_nempty "type" [] + [< term_display idradix false l env ti >] ; + X.xml_nempty "body" [] [< + term_display idradix false + ((Array.to_list f)@l) + (E.push_rec_types rec_decl env) + bi + >] + >] ; + i + >] + ) + (Array.mapi (fun j x -> (x,t.(j),b.(j)) ) f ) [<>] + >] +) + | T.Evar _ -> + Util.anomaly "Evar node in a term!!!" + in + (*CSC: ad l vanno andrebbero aggiunti i nomi da non *) + (*CSC: mascherare (costanti? variabili?) *) + term_display `I false l env csr +;; + +(* FUNCTIONS TO PRINT A SINGLE OBJECT OF COQ *) + +(* print_current_proof term type id conjectures *) +(* where term is the term of the proof in progress p *) +(* and type is the type of p *) +(* and id is the identifier (name) of p *) +(* and conjectures is the list of conjectures (cic terms) *) +(* returns a stream of XML tokens suitable to be pretty printed via Xml.pp *) +let print_current_proof c typ id mv inner_types = + let module X = Xml in + let env = (Safe_typing.env_of_safe_env (Global.safe_env ())) in + [< X.xml_cdata "\n" ; + X.xml_cdata ("\n\n") ; + X.xml_nempty "CurrentProof" ["name",id ; "id", get_next_id `I] + [< List.fold_right + (fun (j,t) i -> + [< X.xml_nempty "Conjecture" ["no",(string_of_int j)] + [< print_term inner_types [] env t >] ; i >]) + mv [<>] ; + X.xml_nempty "body" [] [< print_term inner_types [] env c >] ; + X.xml_nempty "type" [] [< print_term inner_types [] env typ >] + >] + >] +;; + +(* print_axiom id type params *) +(* where type is the type of an axiom a *) +(* and id is the identifier (name) of a *) +(* and params is the list of formal params for a *) +(* returns a stream of XML tokens suitable to be pretty printed via Xml.pp *) +let print_axiom id typ fv hyps env inner_types = + let module X = Xml in + let module N = Names in + [< X.xml_cdata "\n" ; + X.xml_cdata ("\n\n") ; + X.xml_nempty "Axiom" + ["name",(N.string_of_id id) ; + "id", get_next_id `I ; + "params",(string_of_pvars fv hyps)] + [< X.xml_nempty "type" [] (print_term inner_types [] env typ) >] + >] +;; + +(* print_definition id term type params hyps *) +(* where id is the identifier (name) of a definition d *) +(* and term is the term (body) of d *) +(* and type is the type of d *) +(* and params is the list of formal params for d *) +(* returns a stream of XML tokens suitable to be pretty printed via Xml.pp *) +let print_definition id c typ fv hyps env inner_types = + let module X = Xml in + let module N = Names in + [< X.xml_cdata "\n" ; + X.xml_cdata ("\n\n") ; + X.xml_nempty "Definition" + ["name",(N.string_of_id id) ; + "id", get_next_id `I ; + "params",(string_of_pvars fv hyps)] + [< X.xml_nempty "body" [] (print_term inner_types [] env c) ; + X.xml_nempty "type" [] (print_term inner_types [] env typ) >] + >] +;; + +(* print_variable id type *) +(* where id is the identifier (name) of a variable v *) +(* and type is the type of v *) +(* returns a stream of XML tokens suitable to be pretty printed via Xml.pp *) +let print_variable id body typ env inner_types = + let module X = Xml in + let module N = Names in + [< X.xml_cdata "\n" ; + X.xml_cdata ("\n\n") ; + X.xml_nempty "Variable" ["name",(N.string_of_id id); "id", get_next_id `I] + [< (match body with + None -> [<>] + | Some body -> + X.xml_nempty "body" [] (print_term inner_types [] env body) + ) ; + X.xml_nempty "type" [] (print_term inner_types [] env typ) + >] + >] +;; + +(* print_mutual_inductive_packet p *) +(* where p is a mutual inductive packet (an inductive definition in a block *) +(* of mutual inductive definitions) *) +(* returns a stream of XML tokens suitable to be pretty printed via Xml.pp *) +(* Used only by print_mutual_inductive *) +let print_mutual_inductive_packet inner_types names env finite p = + let module D = Declarations in + let module N = Names in + let module T = Term in + let module X = Xml in + let {D.mind_consnames=consnames ; + D.mind_typename=typename ; + D.mind_nf_lc=lc ; + D.mind_nf_arity=arity} = p + in + [< X.xml_nempty "InductiveType" + ["name",(N.string_of_id typename) ; + "inductive",(string_of_bool finite) + ] + [< + X.xml_nempty "arity" [] + (print_term inner_types [] env (T.body_of_type arity)) ; + (Array.fold_right + (fun (name,lc) i -> + [< X.xml_nempty "Constructor" ["name",(N.string_of_id name)] + (print_term inner_types names env lc) ; + i + >]) + (Array.mapi (fun j x -> (x,T.body_of_type lc.(j)) ) consnames ) + [<>] + ) + >] + >] +;; + +(* print_mutual_inductive packs params nparams *) +(* where packs is the list of inductive definitions in the block of *) +(* mutual inductive definitions b *) +(* and params is the list of formal params for b *) +(* and nparams is the number of "parameters" in the arity of the *) +(* mutual inductive types *) +(* returns a stream of XML tokens suitable to be pretty printed via Xml.pp *) +let print_mutual_inductive finite packs fv hyps env inner_types = + let module D = Declarations in + let module E = Environ in + let module X = Xml in + let module N = Names in + let nparams = extract_nparams packs in + let names = + List.map + (fun p -> let {D.mind_typename=typename} = p in N.Name(typename)) + (Array.to_list packs) + in + let env = + List.fold_right + (fun {D.mind_typename=typename ; D.mind_nf_arity=arity} env -> + E.push_rel (N.Name typename, None, arity) env) + (Array.to_list packs) + env + in + [< X.xml_cdata "\n" ; + X.xml_cdata ("\n\n") ; + X.xml_nempty "InductiveDefinition" + ["noParams",string_of_int nparams ; + "id",get_next_id `I ; + "params",(string_of_pvars fv hyps)] + [< (Array.fold_right + (fun x i -> + [< print_mutual_inductive_packet + inner_types names env finite x ; i >] + ) packs [< >] + ) + >] + >] +;; + +let string_list_of_named_context_list = + List.map + (function (n,_,_) -> Names.string_of_id n) +;; + +let types_filename_of_filename = + function + Some f -> Some (f ^ ".types") + | None -> None +;; + +let pp_cmds_of_inner_types inner_types target_uri = + let module X = Xml in + let rec stream_of_list = + function + [] -> [<>] + | he::tl -> [< he ; stream_of_list tl >] + in + [< X.xml_cdata "\n" ; + X.xml_cdata ("\n\n"); + X.xml_nempty "InnerTypes" ["of",target_uri] + (stream_of_list + (List.map + (function + (id,type_pp_cmds) -> X.xml_nempty "TYPE" ["of",id] type_pp_cmds) + (List.rev inner_types) + )) + >] +;; + +(* print id dest *) +(* where sp is the qualified identifier (section path) of a *) +(* definition/theorem, variable or inductive definition *) +(* and dest is either None (for stdout) or (Some filename) *) +(* pretty prints via Xml.pp the object whose identifier is id on dest *) +(* Note: it is printed only (and directly) the most cooked available *) +(* form of the definition (all the parameters are *) +(* lambda-abstracted, but the object can still refer to variables) *) +let print qid fn = + let module D = Declarations in + let module G = Global in + let module N = Names in + let module Nt = Nametab in + let module T = Term in + let module X = Xml in + let (_,id) = Nt.repr_qualid qid in + let glob_ref = Nametab.locate qid in + let env = (Safe_typing.env_of_safe_env (G.safe_env ())) in + reset_ids () ; + let inner_types = ref [] in + let sp,tag,pp_cmds = + match glob_ref with + Nt.VarRef id -> + let sp = Declare.find_section_variable id in + let (_,body,typ) = G.lookup_named id in + sp,Variable,print_variable id body (T.body_of_type typ) env inner_types + | Nt.ConstRef sp -> + let {D.const_body=val0 ; D.const_type = typ ; D.const_hyps = hyps} = + G.lookup_constant sp in + let hyps = string_list_of_named_context_list hyps in + let typ = T.body_of_type typ in + sp,Constant, + begin + match val0 with + None -> print_axiom id typ [] hyps env inner_types + | Some c -> print_definition id c typ [] hyps env inner_types + end + | Nt.IndRef (sp,_) -> + let {D.mind_packets=packs ; + D.mind_hyps=hyps; + D.mind_finite=finite} = G.lookup_mind sp in + let hyps = string_list_of_named_context_list hyps in + sp,Inductive, + print_mutual_inductive finite packs [] hyps env inner_types + | Nt.ConstructRef _ -> + Util.anomaly ("print: this should not happen") + in + Xml.pp pp_cmds fn ; + Xml.pp (pp_cmds_of_inner_types !inner_types (uri_of_path sp tag)) + (types_filename_of_filename fn) +;; + +(* show dest *) +(* where dest is either None (for stdout) or (Some filename) *) +(* pretty prints via Xml.pp the proof in progress on dest *) +let show fn = + let pftst = Pfedit.get_pftreestate () in + let str = Names.string_of_id (Pfedit.get_current_proof_name ()) in + let pf = Tacmach.proof_of_pftreestate pftst in + let typ = (Proof_trees.goal_of_proof pf).Evd.evar_concl in + let val0,mv = Tacmach.extract_open_pftreestate pftst in + reset_ids () ; + let inner_types = ref [] in + Xml.pp + (print_current_proof val0 typ str mv inner_types) + fn ; + Xml.pp (pp_cmds_of_inner_types !inner_types ("?" ^ str ^ "?")) + (types_filename_of_filename fn) +;; + +(* FUNCTIONS TO PRINT AN ENTIRE SECTION OF COQ *) + +(* list of (name, uncooked sp, most cooked sp, uncooked leaf object, *) +(* list of possible variables, directory name) *) +(* used to collect informations on the uncooked and most cooked forms *) +(* of objects that could be cooked (have namesakes) *) +let printed = ref [];; + +(* makes a filename from a directory name, a section path and an extension *) +let mkfilename dn sp ext = + let dir_list_of_dirpath s = + let rec aux n = + try + let pos = String.index_from s n '/' in + let head = String.sub s n (pos - n) in + head::(aux (pos + 1)) + with + Not_found -> [String.sub s n (String.length s - n)] + in + aux 0 + in + let rec join_dirs cwd = + function + [] -> assert false + | [he] -> "/" ^ he + | he::tail -> + let newcwd = cwd ^ "/" ^ he in + (try + Unix.mkdir newcwd 0o775 + with _ -> () (* Let's ignore the errors on mkdir *) + ) ; + "/" ^ he ^ join_dirs newcwd tail + in + let module L = Library in + let module S = System in + let module N = Names in + let module No = Nameops in + match dn with + None -> None + | Some basedir -> + let dir0 = No.extend_dirpath (No.dirpath sp) (No.basename sp) in + let dir = List.map N.string_of_id (List.rev (N.repr_dirpath dir0)) in + Some (basedir ^ join_dirs basedir dir ^ "." ^ ext) +;; + +(* print_object leaf_object id section_path directory_name formal_vars *) +(* where leaf_object is a leaf object *) +(* and id is the identifier (name) of the definition/theorem *) +(* or of the inductive definition o *) +(* and section_path is the section path of o *) +(* and directory_name is the base directory in which putting the print *) +(* and formal_vars is the list of parameters (variables) of o *) +(* pretty prints via Xml.pp the object o in the right directory deduced *) +(* from directory_name and section_path *) +(* Note: it is printed only the uncooked available form of the object plus *) +(* the list of parameters of the object deduced from it's most cooked *) +(* form *) +let print_object lobj id sp dn fv env = + let module D = Declarations in + let module E = Environ in + let module G = Global in + let module N = Names in + let module T = Term in + let module X = Xml in + let strtag = Libobject.object_tag lobj in + try + let tag = tag_of_string_tag strtag in + reset_ids () ; + let inner_types = ref [] in + let pp_cmds = + match strtag with + "CONSTANT" (* = Definition, Theorem *) + | "PARAMETER" (* = Axiom *) -> + let {D.const_body=val0 ; D.const_type = typ ; D.const_hyps = hyps} = + G.lookup_constant sp + in + let hyps = string_list_of_named_context_list hyps in + let typ = T.body_of_type typ in + begin + match val0 with + None -> print_axiom id typ fv hyps env inner_types + | Some c -> print_definition id c typ fv hyps env inner_types + end + | "INDUCTIVE" -> + let + {D.mind_packets=packs ; + D.mind_hyps = hyps; + D.mind_finite = finite + } = G.lookup_mind sp + in + let hyps = string_list_of_named_context_list hyps in + print_mutual_inductive finite packs fv hyps env inner_types + | "VARIABLE" -> + let (_,(_,varentry,_)) = Declare.out_variable lobj in + begin + match varentry with + Declare.SectionLocalDef (body,optt) -> + let typ = match optt with + | None -> Retyping.get_type_of env Evd.empty body + | Some t -> t in + add_to_pvars (Definition (N.string_of_id id, body, typ)) ; + print_variable id (Some body) typ env inner_types + | Declare.SectionLocalAssum typ -> + add_to_pvars (Assumption (N.string_of_id id, typ)) ; + print_variable id None (T.body_of_type typ) env inner_types + end + | "CLASS" + | "COERCION" + | _ -> raise Uninteresting + and fileext () = ext_of_tag tag in + let fn = (mkfilename dn sp (fileext ())) in + Xml.pp pp_cmds fn ; + Xml.pp (pp_cmds_of_inner_types !inner_types (uri_of_path sp tag)) + (types_filename_of_filename fn) + with + Uninteresting -> () +;; + +(* print_library_segment state bprintleaf directory_name *) +(* where state is a list of (section-path, node) *) +(* and bprintleaf is true iff the leaf objects should be printed *) +(* and directory_name is the name of the base directory in which to print *) +(* the files *) +(* print via print_node all the nodes (leafs excluded if bprintleaf is false) *)(* in state *) +let rec print_library_segment state bprintleaf dn = + List.iter + (function (sp, node) -> + print_if_verbose ("Print_library_segment: " ^ Names.string_of_path sp ^ "\n") ; + print_node node (Nameops.basename sp) sp bprintleaf dn ; + print_if_verbose "\n" + ) (List.rev state) +(* print_node node id section_path bprintleaf directory_name *) +(* prints a single node (and all it's subnodes via print_library_segment *) +and print_node n id sp bprintleaf dn = + let module L = Lib in + match n with + L.Leaf o -> + print_if_verbose "LEAF\n" ; + if bprintleaf then + begin + if not (List.mem id !printed) then + (* this is an uncooked term or a local (with no namesakes) term *) + begin +try + if could_have_namesakes o sp then + begin + (* this is an uncooked term *) + print_if_verbose ("OK, stampo solo questa volta " ^ Names.string_of_id id ^ "\n") ; + print_object o id sp dn !pvars !cumenv ; + printed := id::!printed + end + else + begin + (* this is a local term *) + print_if_verbose ("OK, stampo " ^ Names.string_of_id id ^ "\n") ; + print_object o id sp dn !pvars !cumenv + end +with _ -> print_if_verbose ("EXCEPTION RAISED!!!\n"); + end + else + begin + (* update the least cooked sp *) + print_if_verbose ("Suppongo gia' stampato " ^ Names.string_of_id id ^ "\n") + end + end + | L.OpenedSection (dir,_) -> + let id = snd (Nameops.split_dirpath dir) in + print_if_verbose ("OpenDir " ^ Names.string_of_id id ^ "\n") + | L.ClosedSection (_,dir,state) -> + let id = snd (Nameops.split_dirpath dir) in + print_if_verbose("ClosedDir " ^ Names.string_of_id id ^ "\n") ; + if bprintleaf then + begin + (* open a new scope *) + pvars := []::!pvars ; + print_library_segment state bprintleaf dn ; + (* close the scope *) + match !pvars with + [] -> assert false + | he::tl -> pvars := tl + end ; + print_if_verbose "/ClosedDir\n" + | L.Module s -> + print_if_verbose ("Module " ^ (Names.string_of_dirpath s) ^ "\n") + | L.FrozenState _ -> + print_if_verbose ("FrozenState\n") +;; + +(* print_closed_section module_name node directory_name *) +(* where module_name is the name of a module *) +(* and node is the library segment of the module *) +(* and directory_name is the base directory in which to put the xml files *) +(* prints all the leaf objects of the tree rooted in node using *) +(* print_library_segment on all its sons *) +let print_closed_section s ls dn = + let module L = Lib in + printed := [] ; + pvars := [[]] ; + cumenv := Safe_typing.env_of_safe_env (Global.safe_env ()) ; + print_if_verbose ("Module " ^ s ^ ":\n") ; + print_library_segment ls true dn ; + print_if_verbose "\n/Module\n" ; +;; + +(* printModule identifier directory_name *) +(* where identifier is the name of a module (section) d *) +(* and directory_name is the directory in which to root all the xml files *) +(* prints all the xml files and directories corresponding to the subsections *) +(* and terms of the module d *) +(* Note: the terms are printed in their uncooked form plus the informations *) +(* on the parameters of their most cooked form *) +let printModule qid dn = + let module L = Library in + let module N = Nametab in + let module X = Xml in + + let (_,dir_path,_) = L.locate_qualified_library qid in + + let str = N.string_of_qualid qid in + let ls = L.module_segment (Some dir_path) in + print_if_verbose ("MODULE_BEGIN " ^ str ^ " " ^ + (L.module_full_filename dir_path) ^ "\n") ; + print_closed_section str (List.rev ls) dn ; + print_if_verbose ("MODULE_END " ^ str ^ " " ^ + (L.module_full_filename dir_path) ^ "\n") +;; + +(* printSection identifier directory_name *) +(* where identifier is the name of a closed section d *) +(* and directory_name is the directory in which to root all the xml files *) +(* prints all the xml files and directories corresponding to the subsections *) +(* and terms of the closed section d *) +(* Note: the terms are printed in their uncooked form plus the informations *) +(* on the parameters of their most cooked form *) +let printSection id dn = + let module L = Library in + let module N = Names in + let module No = Nameops in + let module X = Xml in + let sp = Lib.make_path id in + let ls = + let rec find_closed_section = + function + [] -> raise Not_found + | (_,Lib.ClosedSection (_,dir,ls))::_ when snd (No.split_dirpath dir) = id + -> ls + | _::t -> find_closed_section t + in + print_string ("Searching " ^ Names.string_of_path sp ^ "\n") ; + find_closed_section (Lib.contents_after None) + in + let str = N.string_of_id id in + print_if_verbose ("SECTION_BEGIN " ^ str ^ " " ^ N.string_of_path sp ^ "\n"); + print_closed_section str ls dn ; + print_if_verbose ("SECTION_END " ^ str ^ " " ^ N.string_of_path sp ^ "\n") +;; + +(* print All () prints what is the structure of the current environment of *) +(* Coq. No terms are printed. Useful only for debugging *) +let printAll () = + let state = Library.module_segment None in + let oldverbose = !verbose in + verbose := true ; + print_library_segment state false None ; +(* + let ml = Library.loaded_modules () in + List.iter (function i -> printModule (Names.id_of_string i) None) ml ; +*) + verbose := oldverbose +;; -- cgit v1.2.3