From d14df293d2696f00a8de137bea9fe3a89e0bdeb0 Mon Sep 17 00:00:00 2001 From: letouzey Date: Wed, 19 Dec 2001 11:42:11 +0000 Subject: reparation du make depend et du .depend git-svn-id: svn+ssh://scm.gforge.inria.fr/svn/coq/trunk@2334 85f007b7-540e-0410-9357-904b9bb8a0f7 --- contrib/interface/dad.ml | 358 -------------------------------------- contrib/interface/dad.ml4 | 358 ++++++++++++++++++++++++++++++++++++++ contrib/interface/line_parser.ml | 235 ------------------------- contrib/interface/line_parser.ml4 | 235 +++++++++++++++++++++++++ 4 files changed, 593 insertions(+), 593 deletions(-) delete mode 100644 contrib/interface/dad.ml create mode 100644 contrib/interface/dad.ml4 delete mode 100755 contrib/interface/line_parser.ml create mode 100755 contrib/interface/line_parser.ml4 (limited to 'contrib/interface') diff --git a/contrib/interface/dad.ml b/contrib/interface/dad.ml deleted file mode 100644 index ce38174044..0000000000 --- a/contrib/interface/dad.ml +++ /dev/null @@ -1,358 +0,0 @@ -(* This file contains an ml version of drag-and-drop. *) - -(* #use "/net/home/bertot/experiments/pcoq/src/dad/dad.ml" *) - -open Names;; -open Term;; -open Rawterm;; -open Util;; -open Environ;; -open Tactics;; -open Tacticals;; -open Pattern;; -open Reduction;; -open Ctast;; -open Termast;; -open Astterm;; -open Vernacinterp;; -open Nametab - -open Proof_type;; -open Proof_trees;; -open Tacmach;; -open Typing;; -open Pp;; - -open Paths;; - -(* In a first approximation, drag-and-drop rules are like in CtCoq - 1/ a pattern, - 2,3/ Two paths: start and end positions, - 4/ the degree: the number of steps the algorithm should go up from the - longest common prefix, - 5/ the tail path: the suffix of the longest common prefix of length the - degree, - 6/ the command pattern, where meta variables are represented by objects - of the form Node(_,"META"; [Num(_,i)]) -*) - - -type dad_rule = Ctast.t * int list * int list * int * int list * Ctast.t;; - -(* This value will be used systematically when constructing objects of - type Ctast.t, the value is stupid and meaningless, but it is needed - to construct well-typed terms. *) - -let zz = (0,0);; - -(* This function receives a length n, a path p, and a term and returns a - couple whose first component is the subterm designated by the prefix - of p of length n, and the second component is the rest of the path *) - -let rec get_subterm (depth:int) (path: int list) (constr:constr) = - match depth, path, kind_of_term constr with - 0, l, c -> (constr,l) - | n, 2::a::tl, App(func,arr) -> - get_subterm (n - 2) tl arr.(a-1) - | _,l,_ -> failwith (int_list_to_string - "wrong path or wrong form of term" - l);; - -(* This function maps a substitution on an abstract syntax tree. The - first argument, an object of type env, is necessary to - transform constr terms into abstract syntax trees. The second argument is - the substitution, a list of pairs linking an integer and a constr term. *) -let map_subst (env :env) - (subst:(int * Term.constr) list) = - let rec map_subst_aux = function - Node(_, "META", [Num(_, i)]) -> - let constr = List.assoc i subst in - Ctast.ast_to_ct (ast_of_constr false env constr) - | Node(loc, s, l) -> Node(loc, s, List.map map_subst_aux l) - | ast -> ast in - map_subst_aux;; - - -(* This function is really the one that is important. *) -let rec find_cmd (l:(string * dad_rule) list) env constr p p1 p2 = - match l with - [] -> failwith "nothing happens" - | (name, (pat, p_f, p_l, deg, p_r, cmd))::tl -> - let length = List.length p in - try - if deg > length then - failwith "internal" - else - let term_to_match, p_r = - try - get_subterm (length - deg) p constr - with - Failure s -> failwith "internal" in - let _, constr_pat = - interp_constrpattern Evd.empty (Global.env()) - (ct_to_ast pat) in - let subst = matches constr_pat term_to_match in - if (is_prefix p_f (p_r@p1)) & (is_prefix p_l (p_r@p2)) then - map_subst env subst cmd - else - failwith "internal" - with - Failure "internal" -> find_cmd tl env constr p p1 p2 - | PatternMatchingFailure -> find_cmd tl env constr p p1 p2;; - - -let dad_rule_list = ref ([]: (string * dad_rule) list);; - - -(* \\ This function is also used in pbp. *) -let rec tactic_args_to_ints = function - [] -> [] - | (Integer n)::l -> n::(tactic_args_to_ints l) - | _ -> failwith "expecting only numbers";; - -(* We assume that the two lists of integers for the tactic are simply - given in one list, separated by a dummy tactic. *) -let rec part_tac_args l = function - [] -> l,[] - | (Tacexp a)::tl -> l, (tactic_args_to_ints tl) - | (Integer n)::tl -> part_tac_args (n::l) tl - | _ -> failwith "expecting only numbers and the word \"to\"";; - - -(* The dad_tac tactic takes a display_function as argument. This makes - it possible to use it in pcoq, but also in other contexts, just by - changing the output routine. *) -let dad_tac display_function = function - l -> let p1, p2 = part_tac_args [] l in - (function g -> - let (p_a, p1prime, p2prime) = decompose_path (List.rev p1,p2) in - (display_function - (find_cmd (!dad_rule_list) (pf_env g) - (pf_concl g) p_a p1prime p2prime)); - tclIDTAC g);; - -(* Now we enter dad rule list management. *) - -let add_dad_rule name patt p1 p2 depth pr command = - dad_rule_list := (name, - (patt, p1, p2, depth, pr, command))::!dad_rule_list;; - -let rec remove_if_exists name = function - [] -> false, [] - | ((a,b) as rule1)::tl -> if a = name then - let result1, l = (remove_if_exists name tl) in - true, l - else - let result1, l = remove_if_exists name tl in - result1, (rule1::l);; - -let remove_dad_rule name = - let result1, result2 = remove_if_exists name !dad_rule_list in - if result1 then - failwith("No such name among the drag and drop rules " ^ name) - else - dad_rule_list := result2;; - -let dad_rule_names () = - List.map (function (s,_) -> s) !dad_rule_list;; - -(* this function is inspired from matches_core in pattern.ml *) -let constrain ((n : int),(pat : constr_pattern)) sigma = - if List.mem_assoc n sigma then - if pat = (List.assoc n sigma) then sigma - else failwith "internal" - else - (n,pat)::sigma - -(* This function is inspired from matches_core in pattern.ml *) -let more_general_pat pat1 pat2 = - let rec match_rec sigma p1 p2 = - match p1, p2 with - | PMeta (Some n), m -> constrain (n,m) sigma - - | PMeta None, m -> sigma - - | PRef (VarRef sp1), PRef(VarRef sp2) when sp1 = sp2 -> sigma - - | PVar v1, PVar v2 when v1 = v2 -> sigma - - | PRef ref1, PRef ref2 when ref1 = ref2 -> sigma - - | PRel n1, PRel n2 when n1 = n2 -> sigma - - | PSort (RProp c1), PSort (RProp c2) when c1 = c2 -> sigma - - | PSort (RType _), PSort (RType _) -> sigma - - | PApp (c1,arg1), PApp (c2,arg2) -> - (try array_fold_left2 match_rec (match_rec sigma c1 c2) arg1 arg2 - with Invalid_argument _ -> failwith "internal") - | _ -> failwith "unexpected case in more_general_pat" in - try let _ = match_rec [] pat1 pat2 in true - with Failure "internal" -> false;; - -let more_general r1 r2 = - match r1,r2 with - (_,(patt1,p11,p12,_,_,_)), - (_,(patt2,p21,p22,_,_,_)) -> - (more_general_pat patt1 patt2) & - (is_prefix p11 p21) & (is_prefix p12 p22);; - -let not_less_general r1 r2 = - not (match r1,r2 with - (_,(patt1,p11,p12,_,_,_)), - (_,(patt2,p21,p22,_,_,_)) -> - (more_general_pat patt1 patt2) & - (is_prefix p21 p11) & (is_prefix p22 p12));; - -let rec add_in_list_sorting rule1 = function - [] -> [rule1] - | (b::tl) as this_list -> - if more_general rule1 b then - b::(add_in_list_sorting rule1 tl) - else if not_less_general rule1 b then - let tl2 = add_in_list_sorting_aux rule1 tl in - (match tl2 with - [] -> rule1::this_list - | _ -> b::tl2) - else - rule1::this_list -and add_in_list_sorting_aux rule1 = function - [] -> [] - | b::tl -> - if more_general rule1 b then - b::(add_in_list_sorting rule1 tl) - else - let tl2 = add_in_list_sorting_aux rule1 tl in - (match tl2 with - [] -> [] - | _ -> rule1::tl2);; - -let rec sort_list = function - [] -> [] - | a::l -> add_in_list_sorting a (sort_list l);; - -let mk_dad_meta n = Node(zz,"META",[Num(zz, n)]);; -let mk_rewrite1 ast = - Node(zz,"TACTICLIST", - [Node(zz,"RewriteLR", - [Node(zz, "COMMAND", ast);Node(zz,"BINDINGS",[])])]);; - -let mk_rewrite2 ast = - Node(zz,"TACTICLIST", - [Node(zz,"RewriteRL", - [Node(zz, "COMMAND", ast);Node(zz,"BINDINGS",[])])]);; - - - -let start_dad () = -vinterp_add "AddDadRule" - (function - | [VARG_STRING name; VARG_CONSTR pat; VARG_NUMBERLIST p1; - VARG_NUMBERLIST p2; VARG_TACTIC com] -> - (function () -> - let pr = match decompose_path (p1, p2) with pr, _, _ -> pr in - (add_dad_rule name (Ctast.ast_to_ct pat) p1 p2 (List.length pr) pr (Ctast.ast_to_ct com))) - | _ -> errorlabstrm "AddDadRule1" - [< str "AddDadRule2">]); -add_dad_rule "distributivity-inv" -(Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,4)]);Node(zz,"META",[Num(zz,3)])]);Node(zz,"META",[Num(zz,2)])])) -[2; 2] -[2; 1] -1 -[2] -(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "mult_plus_distr");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); - -add_dad_rule "distributivity1-r" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"META",[Num(zz,4)]);Node(zz,"META",[Num(zz,2)])]);Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])])) -[2; 2; 2; 2] -[] -0 -[] -(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "mult_plus_distr");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); - -add_dad_rule "distributivity1-l" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"META",[Num(zz,4)]);Node(zz,"META",[Num(zz,2)])]);Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])])) -[2; 1; 2; 2] -[] -0 -[] -(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "mult_plus_distr");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); - -add_dad_rule "associativity" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,4)]);Node(zz,"META",[Num(zz,3)])]);Node(zz,"META",[Num(zz,2)])])) -[2; 1] -[] -0 -[] -(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_assoc_r");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); - -add_dad_rule "minus-identity-lr" -(Node(zz,"APPLIST",[Nvar(zz,"minus");Node(zz,"META",[Num(zz,2)]);Node(zz,"META",[Num(zz,2)])])) -[2; 1] -[2; 2] -1 -[2] -(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "minus_n_n");(mk_dad_meta 2) ])])); - -add_dad_rule "minus-identity-rl" -(Node(zz,"APPLIST",[Nvar(zz,"minus");Node(zz,"META",[Num(zz,2)]);Node(zz,"META",[Num(zz,2)])])) -[2; 2] -[2; 1] -1 -[2] -(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "minus_n_n");(mk_dad_meta 2) ])])); - -add_dad_rule "plus-sym-rl" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])) -[2; 2] -[2; 1] -1 -[2] -(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_sym");(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); - -add_dad_rule "plus-sym-lr" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])) -[2; 1] -[2; 2] -1 -[2] -(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_sym");(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); - -add_dad_rule "absorb-0-r-rl" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,2)]);Nvar(zz,"O")])) -[2; 2] -[1] -0 -[] -(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "plus_n_O");(mk_dad_meta 2) ])])); - -add_dad_rule "absorb-0-r-lr" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,2)]);Nvar(zz,"O")])) -[1] -[2; 2] -0 -[] -(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "plus_n_O");(mk_dad_meta 2) ])])); - -add_dad_rule "plus-permute-lr" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,4)]);Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])])) -[2; 1] -[2; 2; 2; 1] -1 -[2] -(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_permute");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); - -add_dad_rule "plus-permute-rl" -(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,4)]);Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])])) -[2; 2; 2; 1] -[2; 1] -1 -[2] -(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_permute");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])]));; - -vinterp_add "StartDad" - (function - | [] -> - (function () -> start_dad()) - | _ -> errorlabstrm "StartDad" [< >]);; diff --git a/contrib/interface/dad.ml4 b/contrib/interface/dad.ml4 new file mode 100644 index 0000000000..ce38174044 --- /dev/null +++ b/contrib/interface/dad.ml4 @@ -0,0 +1,358 @@ +(* This file contains an ml version of drag-and-drop. *) + +(* #use "/net/home/bertot/experiments/pcoq/src/dad/dad.ml" *) + +open Names;; +open Term;; +open Rawterm;; +open Util;; +open Environ;; +open Tactics;; +open Tacticals;; +open Pattern;; +open Reduction;; +open Ctast;; +open Termast;; +open Astterm;; +open Vernacinterp;; +open Nametab + +open Proof_type;; +open Proof_trees;; +open Tacmach;; +open Typing;; +open Pp;; + +open Paths;; + +(* In a first approximation, drag-and-drop rules are like in CtCoq + 1/ a pattern, + 2,3/ Two paths: start and end positions, + 4/ the degree: the number of steps the algorithm should go up from the + longest common prefix, + 5/ the tail path: the suffix of the longest common prefix of length the + degree, + 6/ the command pattern, where meta variables are represented by objects + of the form Node(_,"META"; [Num(_,i)]) +*) + + +type dad_rule = Ctast.t * int list * int list * int * int list * Ctast.t;; + +(* This value will be used systematically when constructing objects of + type Ctast.t, the value is stupid and meaningless, but it is needed + to construct well-typed terms. *) + +let zz = (0,0);; + +(* This function receives a length n, a path p, and a term and returns a + couple whose first component is the subterm designated by the prefix + of p of length n, and the second component is the rest of the path *) + +let rec get_subterm (depth:int) (path: int list) (constr:constr) = + match depth, path, kind_of_term constr with + 0, l, c -> (constr,l) + | n, 2::a::tl, App(func,arr) -> + get_subterm (n - 2) tl arr.(a-1) + | _,l,_ -> failwith (int_list_to_string + "wrong path or wrong form of term" + l);; + +(* This function maps a substitution on an abstract syntax tree. The + first argument, an object of type env, is necessary to + transform constr terms into abstract syntax trees. The second argument is + the substitution, a list of pairs linking an integer and a constr term. *) +let map_subst (env :env) + (subst:(int * Term.constr) list) = + let rec map_subst_aux = function + Node(_, "META", [Num(_, i)]) -> + let constr = List.assoc i subst in + Ctast.ast_to_ct (ast_of_constr false env constr) + | Node(loc, s, l) -> Node(loc, s, List.map map_subst_aux l) + | ast -> ast in + map_subst_aux;; + + +(* This function is really the one that is important. *) +let rec find_cmd (l:(string * dad_rule) list) env constr p p1 p2 = + match l with + [] -> failwith "nothing happens" + | (name, (pat, p_f, p_l, deg, p_r, cmd))::tl -> + let length = List.length p in + try + if deg > length then + failwith "internal" + else + let term_to_match, p_r = + try + get_subterm (length - deg) p constr + with + Failure s -> failwith "internal" in + let _, constr_pat = + interp_constrpattern Evd.empty (Global.env()) + (ct_to_ast pat) in + let subst = matches constr_pat term_to_match in + if (is_prefix p_f (p_r@p1)) & (is_prefix p_l (p_r@p2)) then + map_subst env subst cmd + else + failwith "internal" + with + Failure "internal" -> find_cmd tl env constr p p1 p2 + | PatternMatchingFailure -> find_cmd tl env constr p p1 p2;; + + +let dad_rule_list = ref ([]: (string * dad_rule) list);; + + +(* \\ This function is also used in pbp. *) +let rec tactic_args_to_ints = function + [] -> [] + | (Integer n)::l -> n::(tactic_args_to_ints l) + | _ -> failwith "expecting only numbers";; + +(* We assume that the two lists of integers for the tactic are simply + given in one list, separated by a dummy tactic. *) +let rec part_tac_args l = function + [] -> l,[] + | (Tacexp a)::tl -> l, (tactic_args_to_ints tl) + | (Integer n)::tl -> part_tac_args (n::l) tl + | _ -> failwith "expecting only numbers and the word \"to\"";; + + +(* The dad_tac tactic takes a display_function as argument. This makes + it possible to use it in pcoq, but also in other contexts, just by + changing the output routine. *) +let dad_tac display_function = function + l -> let p1, p2 = part_tac_args [] l in + (function g -> + let (p_a, p1prime, p2prime) = decompose_path (List.rev p1,p2) in + (display_function + (find_cmd (!dad_rule_list) (pf_env g) + (pf_concl g) p_a p1prime p2prime)); + tclIDTAC g);; + +(* Now we enter dad rule list management. *) + +let add_dad_rule name patt p1 p2 depth pr command = + dad_rule_list := (name, + (patt, p1, p2, depth, pr, command))::!dad_rule_list;; + +let rec remove_if_exists name = function + [] -> false, [] + | ((a,b) as rule1)::tl -> if a = name then + let result1, l = (remove_if_exists name tl) in + true, l + else + let result1, l = remove_if_exists name tl in + result1, (rule1::l);; + +let remove_dad_rule name = + let result1, result2 = remove_if_exists name !dad_rule_list in + if result1 then + failwith("No such name among the drag and drop rules " ^ name) + else + dad_rule_list := result2;; + +let dad_rule_names () = + List.map (function (s,_) -> s) !dad_rule_list;; + +(* this function is inspired from matches_core in pattern.ml *) +let constrain ((n : int),(pat : constr_pattern)) sigma = + if List.mem_assoc n sigma then + if pat = (List.assoc n sigma) then sigma + else failwith "internal" + else + (n,pat)::sigma + +(* This function is inspired from matches_core in pattern.ml *) +let more_general_pat pat1 pat2 = + let rec match_rec sigma p1 p2 = + match p1, p2 with + | PMeta (Some n), m -> constrain (n,m) sigma + + | PMeta None, m -> sigma + + | PRef (VarRef sp1), PRef(VarRef sp2) when sp1 = sp2 -> sigma + + | PVar v1, PVar v2 when v1 = v2 -> sigma + + | PRef ref1, PRef ref2 when ref1 = ref2 -> sigma + + | PRel n1, PRel n2 when n1 = n2 -> sigma + + | PSort (RProp c1), PSort (RProp c2) when c1 = c2 -> sigma + + | PSort (RType _), PSort (RType _) -> sigma + + | PApp (c1,arg1), PApp (c2,arg2) -> + (try array_fold_left2 match_rec (match_rec sigma c1 c2) arg1 arg2 + with Invalid_argument _ -> failwith "internal") + | _ -> failwith "unexpected case in more_general_pat" in + try let _ = match_rec [] pat1 pat2 in true + with Failure "internal" -> false;; + +let more_general r1 r2 = + match r1,r2 with + (_,(patt1,p11,p12,_,_,_)), + (_,(patt2,p21,p22,_,_,_)) -> + (more_general_pat patt1 patt2) & + (is_prefix p11 p21) & (is_prefix p12 p22);; + +let not_less_general r1 r2 = + not (match r1,r2 with + (_,(patt1,p11,p12,_,_,_)), + (_,(patt2,p21,p22,_,_,_)) -> + (more_general_pat patt1 patt2) & + (is_prefix p21 p11) & (is_prefix p22 p12));; + +let rec add_in_list_sorting rule1 = function + [] -> [rule1] + | (b::tl) as this_list -> + if more_general rule1 b then + b::(add_in_list_sorting rule1 tl) + else if not_less_general rule1 b then + let tl2 = add_in_list_sorting_aux rule1 tl in + (match tl2 with + [] -> rule1::this_list + | _ -> b::tl2) + else + rule1::this_list +and add_in_list_sorting_aux rule1 = function + [] -> [] + | b::tl -> + if more_general rule1 b then + b::(add_in_list_sorting rule1 tl) + else + let tl2 = add_in_list_sorting_aux rule1 tl in + (match tl2 with + [] -> [] + | _ -> rule1::tl2);; + +let rec sort_list = function + [] -> [] + | a::l -> add_in_list_sorting a (sort_list l);; + +let mk_dad_meta n = Node(zz,"META",[Num(zz, n)]);; +let mk_rewrite1 ast = + Node(zz,"TACTICLIST", + [Node(zz,"RewriteLR", + [Node(zz, "COMMAND", ast);Node(zz,"BINDINGS",[])])]);; + +let mk_rewrite2 ast = + Node(zz,"TACTICLIST", + [Node(zz,"RewriteRL", + [Node(zz, "COMMAND", ast);Node(zz,"BINDINGS",[])])]);; + + + +let start_dad () = +vinterp_add "AddDadRule" + (function + | [VARG_STRING name; VARG_CONSTR pat; VARG_NUMBERLIST p1; + VARG_NUMBERLIST p2; VARG_TACTIC com] -> + (function () -> + let pr = match decompose_path (p1, p2) with pr, _, _ -> pr in + (add_dad_rule name (Ctast.ast_to_ct pat) p1 p2 (List.length pr) pr (Ctast.ast_to_ct com))) + | _ -> errorlabstrm "AddDadRule1" + [< str "AddDadRule2">]); +add_dad_rule "distributivity-inv" +(Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,4)]);Node(zz,"META",[Num(zz,3)])]);Node(zz,"META",[Num(zz,2)])])) +[2; 2] +[2; 1] +1 +[2] +(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "mult_plus_distr");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); + +add_dad_rule "distributivity1-r" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"META",[Num(zz,4)]);Node(zz,"META",[Num(zz,2)])]);Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])])) +[2; 2; 2; 2] +[] +0 +[] +(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "mult_plus_distr");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); + +add_dad_rule "distributivity1-l" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"META",[Num(zz,4)]);Node(zz,"META",[Num(zz,2)])]);Node(zz,"APPLIST",[Nvar(zz,"mult");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])])) +[2; 1; 2; 2] +[] +0 +[] +(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "mult_plus_distr");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); + +add_dad_rule "associativity" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,4)]);Node(zz,"META",[Num(zz,3)])]);Node(zz,"META",[Num(zz,2)])])) +[2; 1] +[] +0 +[] +(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_assoc_r");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); + +add_dad_rule "minus-identity-lr" +(Node(zz,"APPLIST",[Nvar(zz,"minus");Node(zz,"META",[Num(zz,2)]);Node(zz,"META",[Num(zz,2)])])) +[2; 1] +[2; 2] +1 +[2] +(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "minus_n_n");(mk_dad_meta 2) ])])); + +add_dad_rule "minus-identity-rl" +(Node(zz,"APPLIST",[Nvar(zz,"minus");Node(zz,"META",[Num(zz,2)]);Node(zz,"META",[Num(zz,2)])])) +[2; 2] +[2; 1] +1 +[2] +(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "minus_n_n");(mk_dad_meta 2) ])])); + +add_dad_rule "plus-sym-rl" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])) +[2; 2] +[2; 1] +1 +[2] +(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_sym");(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); + +add_dad_rule "plus-sym-lr" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])) +[2; 1] +[2; 2] +1 +[2] +(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_sym");(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); + +add_dad_rule "absorb-0-r-rl" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,2)]);Nvar(zz,"O")])) +[2; 2] +[1] +0 +[] +(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "plus_n_O");(mk_dad_meta 2) ])])); + +add_dad_rule "absorb-0-r-lr" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,2)]);Nvar(zz,"O")])) +[1] +[2; 2] +0 +[] +(mk_rewrite2 ([Node(zz, "APPLIST", [Nvar(zz, "plus_n_O");(mk_dad_meta 2) ])])); + +add_dad_rule "plus-permute-lr" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,4)]);Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])])) +[2; 1] +[2; 2; 2; 1] +1 +[2] +(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_permute");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])])); + +add_dad_rule "plus-permute-rl" +(Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,4)]);Node(zz,"APPLIST",[Nvar(zz,"plus");Node(zz,"META",[Num(zz,3)]);Node(zz,"META",[Num(zz,2)])])])) +[2; 2; 2; 1] +[2; 1] +1 +[2] +(mk_rewrite1 ([Node(zz, "APPLIST", [Nvar(zz, "plus_permute");(mk_dad_meta 4) ;(mk_dad_meta 3) ;(mk_dad_meta 2) ])]));; + +vinterp_add "StartDad" + (function + | [] -> + (function () -> start_dad()) + | _ -> errorlabstrm "StartDad" [< >]);; diff --git a/contrib/interface/line_parser.ml b/contrib/interface/line_parser.ml deleted file mode 100755 index 91eda0eed0..0000000000 --- a/contrib/interface/line_parser.ml +++ /dev/null @@ -1,235 +0,0 @@ -(* line-oriented Syntactic analyser for a Coq parser *) -(* This parser expects a very small number of commands, each given on a complete -line. Some of these commands are then followed by a text fragment terminated -by a precise keyword, which is also expected to appear alone on a line. *) - -(* The main parsing loop procedure is "parser_loop", given at the end of this -file. It read lines one by one and checks whether they can be parsed using -a very simple parser. This very simple parser uses a lexer, which is also given -in this file. - -The lexical analyser: - There are only 5 sorts of tokens *) -type simple_tokens = Tspace | Tid of string | Tint of int | Tstring of string | - Tlbracket | Trbracket;; - -(* When recognizing identifiers or strings, the lexical analyser accumulates - the characters in a buffer, using the command add_in_buff. To recuperate - the characters, one can use get_buff (this code was inspired by the - code in src/meta/lexer.ml of Coq revision 6.1) *) -let add_in_buff,get_buff = - let buff = ref (String.create 80) in - (fun i x -> - let len = String.length !buff in - if i >= len then (buff := !buff ^ (String.create len);()); - String.set !buff i x; - succ i), - (fun len -> String.sub !buff 0 len);; - -(* Identifiers are [a-zA-Z_][a-zA-Z0-9_]*. When arriving here the first - character has already been recognized. *) -let rec ident len = parser - [<''_' | 'a'..'z' | 'A'..'Z' | '0'..'9' as c; s >] -> - ident (add_in_buff len c) s -| [< >] -> let str = get_buff len in Tid(str);; - -(* While recognizing integers, one constructs directly the integer value. - The ascii code of '0' is important for this. *) -let code0 = Char.code '0';; - -let get_digit c = Char.code c - code0;; - -(* Integers are [0-9]* - The variable intval is the integer value of the text that has already - been recognized. As for identifiers, the first character has already been - recognized. *) - -let rec parse_int intval = parser - [< ''0'..'9' as c ; i=parse_int (10 * intval + get_digit c)>] -> i -| [< >] -> Tint intval;; - -(* The string lexer is borrowed from the string parser of Coq V6.1 - This may be a problem if convention have changed in Coq, - However this parser is only used to recognize file names which should - not contain too many special characters *) - -let rec spec_char = parser - [< ''n' >] -> '\n' -| [< ''t' >] -> '\t' -| [< ''b' >] -> '\008' -| [< ''r' >] -> '\013' -| [< ''0'..'9' as c; v= (spec1 (get_digit c)) >] -> - Char.chr v -| [< 'x >] -> x - -and spec1 v = parser - [< ''0'..'9' as c; s >] -> spec1 (10*v+(get_digit c)) s -| [< >] -> v -;; - -(* This is the actual string lexical analyser. Strings are - QUOT([^QUOT\\]|\\[0-9]*|\\[^0-9])QUOT (the word QUOT is used - to represents double quotation characters, that cannot be used - freely, even inside comments. *) - -let rec string len = parser - [< ''"' >] -> len -| [<''\\' ; - len = (parser [< ''\n' >] -> len - | [< c=spec_char >] -> add_in_buff len c); - s >] -> string len s -| [< 'x; s >] -> string (add_in_buff len x) s;; - -(* The lexical analyser repeats the recognized given by next_token: - spaces and tabulations are ignored, identifiers, integers, - strings, opening and closing square brackets. Lexical errors are - ignored ! *) -let rec next_token = parser count - [< '' ' | '\t'; tok = next_token >] -> tok -| [< ''_' | 'a'..'z' | 'A'..'Z' as c;i = (ident (add_in_buff 0 c))>] -> i -| [< ''0'..'9' as c ; i = (parse_int (get_digit c))>] -> i -| [< ''"' ; len = (string 0) >] -> Tstring (get_buff len) -| [< ''[' >] -> Tlbracket -| [< '']' >] -> Trbracket -| [< '_ ; x = next_token >] -> x;; - -(* A very simple lexical analyser to recognize a integer value behind - blank characters *) - -let rec next_int = parser count - [< '' ' | '\t'; v = next_int >] -> v -| [< ''0'..'9' as c; i = (parse_int (get_digit c))>] -> - (match i with - Tint n -> n - | _ -> failwith "unexpected branch in next_int");; - -(* This is the actual lexical analyser, implemented as a function on a stream. - It will be used with the Stream.from primitive to construct a function - of type char Stream.t -> simple_token option Stream.t *) -let token_stream cs _ = - try let tok = next_token cs in - Some tok - with Stream.Failure -> None;; - -(* Two of the actions of the parser request that one reads the rest of - the input up to a specific string stop_string. This is done - with a function that transform the input_channel into a pair of - char Stream.t, reading from the input_channel all the lines to - the stop_string first. *) - - -let rec gather_strings stop_string input_channel = - let buff = input_line input_channel in - if buff = stop_string then - [] - else - (buff::(gather_strings stop_string input_channel));; - - -(* the result of this function is supposed to be used in a Stream.from - construction. *) - -let line_list_to_stream string_list = - let count = ref 0 in - let buff = ref "" in - let reserve = ref string_list in - let current_length = ref 0 in - (fun i -> if (i - !count) >= !current_length then - begin - count := !count + !current_length + 1; - match !reserve with - | [] -> None - | s1::rest -> - begin - buff := s1; - current_length := String.length !buff; - reserve := rest; - Some '\n' - end - end - else - Some(String.get !buff (i - !count)));; - - -(* In older revisions of this file you would find a function that - does line oriented breakdown of the input channel without resorting to - a list of lines. However, the need for the list of line appeared when - we wanted to have a channel and a list of strings describing the same - data, one for regular parsing and the other for error recovery. *) - -let channel_to_stream_and_string_list stop_string input_channel = - let string_list = gather_strings stop_string input_channel in - (line_list_to_stream string_list, string_list);; - -let flush_until_end_of_stream char_stream = - Stream.iter (function _ -> ()) char_stream;; - -(* There are only 5 kinds of lines recognized by our little parser. - Unrecognized lines are ignored. *) -type parser_request = - | PRINT_VERSION - | PARSE_STRING of string - (* parse_string [] then text and && END--OF--DATA *) - | QUIET_PARSE_STRING - (* quiet_parse_string then text and && END--OF--DATA *) - | PARSE_FILE of string - (* parse_file *) - | ADD_PATH of string - (* add_path *) - | LOAD_SYNTAX of string - (* load_syntax_file *) - | GARBAGE -;; - -(* The procedure parser_loop should never terminate while the input_channel is - not closed. This procedure receives the functions called for each sentence - as arguments. Thus the code is completely independent from the Coq sources. *) -let parser_loop functions input_channel = - let print_version_action, - parse_string_action, - quiet_parse_string_action, - parse_file_action, - add_path_action, - load_syntax_action = functions in - let rec parser_loop_rec input_channel = - (let line = input_line input_channel in - let reqid, parser_request = - try - (match Stream.from (token_stream (Stream.of_string line)) with - parser - | [< 'Tid "print_version" >] -> - 0, PRINT_VERSION - | [< 'Tid "parse_string" ; 'Tint reqid ; 'Tlbracket ; - 'Tid phylum ; 'Trbracket >] - -> reqid,PARSE_STRING phylum - | [< 'Tid "quiet_parse_string" >] -> - 0,QUIET_PARSE_STRING - | [< 'Tid "parse_file" ; 'Tint reqid ; 'Tstring fname >] -> - reqid, PARSE_FILE fname - | [< 'Tid "add_path"; 'Tint reqid ; 'Tstring directory >] - -> reqid, ADD_PATH directory - | [< 'Tid "load_syntax_file"; 'Tint reqid; 'Tid module_name >] -> - reqid, LOAD_SYNTAX module_name - | [< 'Tid "quit_parser" >] -> raise End_of_file - | [< >] -> 0, GARBAGE) - with - Stream.Failure | Stream.Error _ -> 0,GARBAGE in - match parser_request with - PRINT_VERSION -> print_version_action () - | PARSE_STRING phylum -> - let regular_stream, string_list = - channel_to_stream_and_string_list "&& END--OF--DATA" input_channel in - parse_string_action reqid phylum (Stream.from regular_stream) - string_list;() - | QUIET_PARSE_STRING -> - let regular_stream, string_list = - channel_to_stream_and_string_list "&& END--OF--DATA" input_channel in - quiet_parse_string_action - (Stream.from regular_stream);() - | PARSE_FILE file_name -> - parse_file_action reqid file_name - | ADD_PATH path -> add_path_action reqid path - | LOAD_SYNTAX syn -> load_syntax_action reqid syn - | GARBAGE -> ()); - parser_loop_rec input_channel in - parser_loop_rec input_channel;; diff --git a/contrib/interface/line_parser.ml4 b/contrib/interface/line_parser.ml4 new file mode 100755 index 0000000000..91eda0eed0 --- /dev/null +++ b/contrib/interface/line_parser.ml4 @@ -0,0 +1,235 @@ +(* line-oriented Syntactic analyser for a Coq parser *) +(* This parser expects a very small number of commands, each given on a complete +line. Some of these commands are then followed by a text fragment terminated +by a precise keyword, which is also expected to appear alone on a line. *) + +(* The main parsing loop procedure is "parser_loop", given at the end of this +file. It read lines one by one and checks whether they can be parsed using +a very simple parser. This very simple parser uses a lexer, which is also given +in this file. + +The lexical analyser: + There are only 5 sorts of tokens *) +type simple_tokens = Tspace | Tid of string | Tint of int | Tstring of string | + Tlbracket | Trbracket;; + +(* When recognizing identifiers or strings, the lexical analyser accumulates + the characters in a buffer, using the command add_in_buff. To recuperate + the characters, one can use get_buff (this code was inspired by the + code in src/meta/lexer.ml of Coq revision 6.1) *) +let add_in_buff,get_buff = + let buff = ref (String.create 80) in + (fun i x -> + let len = String.length !buff in + if i >= len then (buff := !buff ^ (String.create len);()); + String.set !buff i x; + succ i), + (fun len -> String.sub !buff 0 len);; + +(* Identifiers are [a-zA-Z_][a-zA-Z0-9_]*. When arriving here the first + character has already been recognized. *) +let rec ident len = parser + [<''_' | 'a'..'z' | 'A'..'Z' | '0'..'9' as c; s >] -> + ident (add_in_buff len c) s +| [< >] -> let str = get_buff len in Tid(str);; + +(* While recognizing integers, one constructs directly the integer value. + The ascii code of '0' is important for this. *) +let code0 = Char.code '0';; + +let get_digit c = Char.code c - code0;; + +(* Integers are [0-9]* + The variable intval is the integer value of the text that has already + been recognized. As for identifiers, the first character has already been + recognized. *) + +let rec parse_int intval = parser + [< ''0'..'9' as c ; i=parse_int (10 * intval + get_digit c)>] -> i +| [< >] -> Tint intval;; + +(* The string lexer is borrowed from the string parser of Coq V6.1 + This may be a problem if convention have changed in Coq, + However this parser is only used to recognize file names which should + not contain too many special characters *) + +let rec spec_char = parser + [< ''n' >] -> '\n' +| [< ''t' >] -> '\t' +| [< ''b' >] -> '\008' +| [< ''r' >] -> '\013' +| [< ''0'..'9' as c; v= (spec1 (get_digit c)) >] -> + Char.chr v +| [< 'x >] -> x + +and spec1 v = parser + [< ''0'..'9' as c; s >] -> spec1 (10*v+(get_digit c)) s +| [< >] -> v +;; + +(* This is the actual string lexical analyser. Strings are + QUOT([^QUOT\\]|\\[0-9]*|\\[^0-9])QUOT (the word QUOT is used + to represents double quotation characters, that cannot be used + freely, even inside comments. *) + +let rec string len = parser + [< ''"' >] -> len +| [<''\\' ; + len = (parser [< ''\n' >] -> len + | [< c=spec_char >] -> add_in_buff len c); + s >] -> string len s +| [< 'x; s >] -> string (add_in_buff len x) s;; + +(* The lexical analyser repeats the recognized given by next_token: + spaces and tabulations are ignored, identifiers, integers, + strings, opening and closing square brackets. Lexical errors are + ignored ! *) +let rec next_token = parser count + [< '' ' | '\t'; tok = next_token >] -> tok +| [< ''_' | 'a'..'z' | 'A'..'Z' as c;i = (ident (add_in_buff 0 c))>] -> i +| [< ''0'..'9' as c ; i = (parse_int (get_digit c))>] -> i +| [< ''"' ; len = (string 0) >] -> Tstring (get_buff len) +| [< ''[' >] -> Tlbracket +| [< '']' >] -> Trbracket +| [< '_ ; x = next_token >] -> x;; + +(* A very simple lexical analyser to recognize a integer value behind + blank characters *) + +let rec next_int = parser count + [< '' ' | '\t'; v = next_int >] -> v +| [< ''0'..'9' as c; i = (parse_int (get_digit c))>] -> + (match i with + Tint n -> n + | _ -> failwith "unexpected branch in next_int");; + +(* This is the actual lexical analyser, implemented as a function on a stream. + It will be used with the Stream.from primitive to construct a function + of type char Stream.t -> simple_token option Stream.t *) +let token_stream cs _ = + try let tok = next_token cs in + Some tok + with Stream.Failure -> None;; + +(* Two of the actions of the parser request that one reads the rest of + the input up to a specific string stop_string. This is done + with a function that transform the input_channel into a pair of + char Stream.t, reading from the input_channel all the lines to + the stop_string first. *) + + +let rec gather_strings stop_string input_channel = + let buff = input_line input_channel in + if buff = stop_string then + [] + else + (buff::(gather_strings stop_string input_channel));; + + +(* the result of this function is supposed to be used in a Stream.from + construction. *) + +let line_list_to_stream string_list = + let count = ref 0 in + let buff = ref "" in + let reserve = ref string_list in + let current_length = ref 0 in + (fun i -> if (i - !count) >= !current_length then + begin + count := !count + !current_length + 1; + match !reserve with + | [] -> None + | s1::rest -> + begin + buff := s1; + current_length := String.length !buff; + reserve := rest; + Some '\n' + end + end + else + Some(String.get !buff (i - !count)));; + + +(* In older revisions of this file you would find a function that + does line oriented breakdown of the input channel without resorting to + a list of lines. However, the need for the list of line appeared when + we wanted to have a channel and a list of strings describing the same + data, one for regular parsing and the other for error recovery. *) + +let channel_to_stream_and_string_list stop_string input_channel = + let string_list = gather_strings stop_string input_channel in + (line_list_to_stream string_list, string_list);; + +let flush_until_end_of_stream char_stream = + Stream.iter (function _ -> ()) char_stream;; + +(* There are only 5 kinds of lines recognized by our little parser. + Unrecognized lines are ignored. *) +type parser_request = + | PRINT_VERSION + | PARSE_STRING of string + (* parse_string [] then text and && END--OF--DATA *) + | QUIET_PARSE_STRING + (* quiet_parse_string then text and && END--OF--DATA *) + | PARSE_FILE of string + (* parse_file *) + | ADD_PATH of string + (* add_path *) + | LOAD_SYNTAX of string + (* load_syntax_file *) + | GARBAGE +;; + +(* The procedure parser_loop should never terminate while the input_channel is + not closed. This procedure receives the functions called for each sentence + as arguments. Thus the code is completely independent from the Coq sources. *) +let parser_loop functions input_channel = + let print_version_action, + parse_string_action, + quiet_parse_string_action, + parse_file_action, + add_path_action, + load_syntax_action = functions in + let rec parser_loop_rec input_channel = + (let line = input_line input_channel in + let reqid, parser_request = + try + (match Stream.from (token_stream (Stream.of_string line)) with + parser + | [< 'Tid "print_version" >] -> + 0, PRINT_VERSION + | [< 'Tid "parse_string" ; 'Tint reqid ; 'Tlbracket ; + 'Tid phylum ; 'Trbracket >] + -> reqid,PARSE_STRING phylum + | [< 'Tid "quiet_parse_string" >] -> + 0,QUIET_PARSE_STRING + | [< 'Tid "parse_file" ; 'Tint reqid ; 'Tstring fname >] -> + reqid, PARSE_FILE fname + | [< 'Tid "add_path"; 'Tint reqid ; 'Tstring directory >] + -> reqid, ADD_PATH directory + | [< 'Tid "load_syntax_file"; 'Tint reqid; 'Tid module_name >] -> + reqid, LOAD_SYNTAX module_name + | [< 'Tid "quit_parser" >] -> raise End_of_file + | [< >] -> 0, GARBAGE) + with + Stream.Failure | Stream.Error _ -> 0,GARBAGE in + match parser_request with + PRINT_VERSION -> print_version_action () + | PARSE_STRING phylum -> + let regular_stream, string_list = + channel_to_stream_and_string_list "&& END--OF--DATA" input_channel in + parse_string_action reqid phylum (Stream.from regular_stream) + string_list;() + | QUIET_PARSE_STRING -> + let regular_stream, string_list = + channel_to_stream_and_string_list "&& END--OF--DATA" input_channel in + quiet_parse_string_action + (Stream.from regular_stream);() + | PARSE_FILE file_name -> + parse_file_action reqid file_name + | ADD_PATH path -> add_path_action reqid path + | LOAD_SYNTAX syn -> load_syntax_action reqid syn + | GARBAGE -> ()); + parser_loop_rec input_channel in + parser_loop_rec input_channel;; -- cgit v1.2.3