summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/_tags2
-rw-r--r--src/ast.ml1
-rw-r--r--src/ast_util.ml7
-rw-r--r--src/initial_check.ml15
-rw-r--r--src/lexer.mll1
-rw-r--r--src/lexer2.mll286
-rw-r--r--src/monomorphise.ml87
-rw-r--r--src/myocamlbuild.ml6
-rw-r--r--src/parse_ast.ml7
-rw-r--r--src/parser.mly30
-rw-r--r--src/parser2.mly997
-rw-r--r--src/pretty_print_common.ml23
-rw-r--r--src/pretty_print_lem_ast.ml16
-rw-r--r--src/pretty_print_sail.ml15
-rw-r--r--src/process_file.ml9
-rw-r--r--src/process_file.mli3
-rw-r--r--src/reporting_basic.ml5
-rw-r--r--src/reporting_basic.mli1
-rw-r--r--src/sail.ml8
-rw-r--r--src/spec_analysis.ml2
-rw-r--r--src/test/pattern.sail4
-rw-r--r--src/type_check.ml493
-rw-r--r--src/type_check.mli13
-rw-r--r--src/util.ml4
-rw-r--r--src/util.mli2
25 files changed, 1863 insertions, 174 deletions
diff --git a/src/_tags b/src/_tags
index 83ca12e0..d8653781 100644
--- a/src/_tags
+++ b/src/_tags
@@ -1,4 +1,4 @@
-true: -traverse, debug
+true: -traverse, debug, use_menhir
<**/*.ml>: bin_annot, annot
<lem_interp> or <test>: include
<sail.{byte,native}>: use_pprint, use_nums, use_unix
diff --git a/src/ast.ml b/src/ast.ml
index 11b4d29c..6a74d5b2 100644
--- a/src/ast.ml
+++ b/src/ast.ml
@@ -229,6 +229,7 @@ typ_aux = (* Type expressions, of kind $_$ *)
| Typ_fn of typ * typ * effect (* Function type (first-order only in user code) *)
| Typ_tup of (typ) list (* Tuple type *)
| Typ_app of id * (typ_arg) list (* type constructor application *)
+ | Typ_exist of kid list * n_constraint * typ
and typ =
Typ_aux of typ_aux * l
diff --git a/src/ast_util.ml b/src/ast_util.ml
index cc21f2af..67381c52 100644
--- a/src/ast_util.ml
+++ b/src/ast_util.ml
@@ -212,6 +212,8 @@ and string_of_typ_aux = function
| Typ_app (id, args) -> string_of_id id ^ "<" ^ string_of_list ", " string_of_typ_arg args ^ ">"
| Typ_fn (typ_arg, typ_ret, eff) ->
string_of_typ typ_arg ^ " -> " ^ string_of_typ typ_ret ^ " effect " ^ string_of_effect eff
+ | Typ_exist (kids, nc, typ) ->
+ "exist " ^ string_of_list " " string_of_kid kids ^ ", " ^ string_of_n_constraint nc ^ ". " ^ string_of_typ typ
and string_of_typ_arg = function
| Typ_arg_aux (typ_arg, l) -> string_of_typ_arg_aux typ_arg
and string_of_typ_arg_aux = function
@@ -219,8 +221,7 @@ and string_of_typ_arg_aux = function
| Typ_arg_typ typ -> string_of_typ typ
| Typ_arg_order o -> string_of_order o
| Typ_arg_effect eff -> string_of_effect eff
-
-let rec string_of_n_constraint = function
+and string_of_n_constraint = function
| NC_aux (NC_fixed (n1, n2), _) -> string_of_nexp n1 ^ " = " ^ string_of_nexp n2
| NC_aux (NC_not_equal (n1, n2), _) -> string_of_nexp n1 ^ " != " ^ string_of_nexp n2
| NC_aux (NC_bounded_ge (n1, n2), _) -> string_of_nexp n1 ^ " >= " ^ string_of_nexp n2
@@ -323,6 +324,8 @@ and string_of_lexp (LEXP_aux (lexp, _)) =
| LEXP_cast (typ, v) -> "(" ^ string_of_typ typ ^ ") " ^ string_of_id v
| LEXP_tup lexps -> "(" ^ string_of_list ", " string_of_lexp lexps ^ ")"
| LEXP_vector (lexp, exp) -> string_of_lexp lexp ^ "[" ^ string_of_exp exp ^ "]"
+ | LEXP_vector_range (lexp, exp1, exp2) ->
+ string_of_lexp lexp ^ "[" ^ string_of_exp exp1 ^ ".." ^ string_of_exp exp2 ^ "]"
| LEXP_field (lexp, id) -> string_of_lexp lexp ^ "." ^ string_of_id id
| LEXP_memory (f, xs) -> string_of_id f ^ "(" ^ string_of_list ", " string_of_exp xs ^ ")"
| _ -> "LEXP"
diff --git a/src/initial_check.ml b/src/initial_check.ml
index 4d6db996..1db9d80b 100644
--- a/src/initial_check.ml
+++ b/src/initial_check.ml
@@ -73,7 +73,7 @@ let rec kind_to_string kind = match kind.k with
| K_Lam (kinds,kind) -> "Lam [" ^ string_of_list ", " kind_to_string kinds ^ "] -> " ^ (kind_to_string kind)
(*Envs is a tuple of used names (currently unused), map from id to kind, default order for vector types and literal vectors *)
-type envs = Nameset.t * kind Envmap.t * order
+type envs = Nameset.t * kind Envmap.t * order
type 'a envs_out = 'a * envs
let id_to_string (Id_aux(id,l)) =
@@ -207,6 +207,11 @@ let rec to_ast_typ (k_env : kind Envmap.t) (def_ord : order) (t: Parse_ast.atyp)
else typ_error l "Type constructor given incorrect number of arguments" (Some id) None None
| None -> typ_error l "Required a type constructor, encountered an unbound identifier" (Some id) None None
| _ -> typ_error l "Required a type constructor, encountered a base kind variable" (Some id) None None)
+ | Parse_ast.ATyp_exist (kids, nc, atyp) ->
+ let kids = List.map to_ast_var kids in
+ let k_env = List.fold_left Envmap.insert k_env (List.map (fun kid -> (var_to_string kid, {k=K_Nat})) kids) in
+ let exist_typ = to_ast_typ k_env def_ord atyp in
+ Typ_exist (kids, to_ast_nexp_constraint k_env nc, exist_typ)
| _ -> typ_error l "Required an item of kind Type, encountered an illegal form for this kind" None None None
), l)
@@ -231,7 +236,7 @@ and to_ast_nexp (k_env : kind Envmap.t) (n: Parse_ast.atyp) : Ast.nexp =
let n1 = to_ast_nexp k_env t1 in
let n2 = to_ast_nexp k_env t2 in
Nexp_aux (Nexp_minus (n1, n2), l)
- | _ -> typ_error l "Requred an item of kind Nat, encountered an illegal form for this kind" None None None)
+ | _ -> typ_error l "Required an item of kind Nat, encountered an illegal form for this kind" None None None)
and to_ast_order (k_env : kind Envmap.t) (def_ord : order) (o: Parse_ast.atyp) : Ast.order =
match o with
@@ -297,10 +302,10 @@ and to_ast_typ_arg (k_env : kind Envmap.t) (def_ord : order) (kind : kind) (arg
| K_Nat -> Typ_arg_nexp (to_ast_nexp k_env arg)
| K_Ord -> Typ_arg_order (to_ast_order k_env def_ord arg)
| K_Efct -> Typ_arg_effect (to_ast_effects k_env arg)
- | _ -> raise (Reporting_basic.err_unreachable l "To_ast_typ_arg received Lam kind or infer kind")),
+ | _ -> raise (Reporting_basic.err_unreachable l ("To_ast_typ_arg received Lam kind or infer kind: " ^ kind_to_string kind))),
l)
-let rec to_ast_nexp_constraint (k_env : kind Envmap.t) (c : Parse_ast.n_constraint) : n_constraint =
+and to_ast_nexp_constraint (k_env : kind Envmap.t) (c : Parse_ast.n_constraint) : n_constraint =
match c with
| Parse_ast.NC_aux(nc,l) ->
NC_aux( (match nc with
@@ -326,6 +331,8 @@ let rec to_ast_nexp_constraint (k_env : kind Envmap.t) (c : Parse_ast.n_constrai
NC_or (to_ast_nexp_constraint k_env nc1, to_ast_nexp_constraint k_env nc2)
| Parse_ast.NC_and (nc1, nc2) ->
NC_and (to_ast_nexp_constraint k_env nc1, to_ast_nexp_constraint k_env nc2)
+ | Parse_ast.NC_true -> NC_true
+ | Parse_ast.NC_false -> NC_false
), l)
(* Transforms a typquant while building first the kind environment of declared variables, and also the kind environment in context *)
diff --git a/src/lexer.mll b/src/lexer.mll
index 45d8b3c2..228ca38f 100644
--- a/src/lexer.mll
+++ b/src/lexer.mll
@@ -79,6 +79,7 @@ let kw_table =
("cast", (fun _ -> Cast));
("false", (fun _ -> False));
("forall", (fun _ -> Forall));
+ ("exist", (fun _ -> Exist));
("foreach", (fun _ -> Foreach));
("function", (fun x -> Function_));
("overload", (fun _ -> Overload));
diff --git a/src/lexer2.mll b/src/lexer2.mll
new file mode 100644
index 00000000..66e33878
--- /dev/null
+++ b/src/lexer2.mll
@@ -0,0 +1,286 @@
+(**************************************************************************)
+(* Sail *)
+(* *)
+(* Copyright (c) 2013-2017 *)
+(* Kathyrn Gray *)
+(* Shaked Flur *)
+(* Stephen Kell *)
+(* Gabriel Kerneis *)
+(* Robert Norton-Wright *)
+(* Christopher Pulte *)
+(* Peter Sewell *)
+(* *)
+(* All rights reserved. *)
+(* *)
+(* This software was developed by the University of Cambridge Computer *)
+(* Laboratory as part of the Rigorous Engineering of Mainstream Systems *)
+(* (REMS) project, funded by EPSRC grant EP/K008528/1. *)
+(* *)
+(* Redistribution and use in source and binary forms, with or without *)
+(* modification, are permitted provided that the following conditions *)
+(* are met: *)
+(* 1. Redistributions of source code must retain the above copyright *)
+(* notice, this list of conditions and the following disclaimer. *)
+(* 2. Redistributions in binary form must reproduce the above copyright *)
+(* notice, this list of conditions and the following disclaimer in *)
+(* the documentation and/or other materials provided with the *)
+(* distribution. *)
+(* *)
+(* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' *)
+(* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED *)
+(* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A *)
+(* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR *)
+(* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, *)
+(* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT *)
+(* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF *)
+(* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND *)
+(* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, *)
+(* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT *)
+(* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF *)
+(* SUCH DAMAGE. *)
+(**************************************************************************)
+
+{
+open Parser2
+module M = Map.Make(String)
+exception LexError of string * Lexing.position
+
+let r = fun s -> s (* Ulib.Text.of_latin1 *)
+(* Available as Scanf.unescaped since OCaml 4.0 but 3.12 is still common *)
+let unescaped s = Scanf.sscanf ("\"" ^ s ^ "\"") "%S%!" (fun x -> x)
+
+type prec = Infix | InfixL | InfixR
+
+let mk_operator prec n op =
+ match prec, n with
+ | Infix, 0 -> Op0 op
+ | Infix, 1 -> Op1 op
+ | Infix, 2 -> Op2 op
+ | Infix, 3 -> Op3 op
+ | Infix, 4 -> Op4 op
+ | Infix, 5 -> Op5 op
+ | Infix, 6 -> Op6 op
+ | Infix, 7 -> Op7 op
+ | Infix, 8 -> Op8 op
+ | Infix, 9 -> Op9 op
+ | InfixL, 0 -> Op0l op
+ | InfixL, 1 -> Op1l op
+ | InfixL, 2 -> Op2l op
+ | InfixL, 3 -> Op3l op
+ | InfixL, 4 -> Op4l op
+ | InfixL, 5 -> Op5l op
+ | InfixL, 6 -> Op6l op
+ | InfixL, 7 -> Op7l op
+ | InfixL, 8 -> Op8l op
+ | InfixL, 9 -> Op9l op
+ | InfixR, 0 -> Op0r op
+ | InfixR, 1 -> Op1r op
+ | InfixR, 2 -> Op2r op
+ | InfixR, 3 -> Op3r op
+ | InfixR, 4 -> Op4r op
+ | InfixR, 5 -> Op5r op
+ | InfixR, 6 -> Op6r op
+ | InfixR, 7 -> Op7r op
+ | InfixR, 8 -> Op8r op
+ | InfixR, 9 -> Op9r op
+ | _, _ -> assert false
+
+let operators = ref M.empty
+
+let kw_table =
+ List.fold_left
+ (fun r (x,y) -> M.add x y r)
+ M.empty
+ [
+ ("and", (fun _ -> And));
+ ("as", (fun _ -> As));
+ ("assert", (fun _ -> Assert));
+ ("bitzero", (fun _ -> Bitzero));
+ ("bitone", (fun _ -> Bitone));
+ ("by", (fun _ -> By));
+ ("match", (fun _ -> Match));
+ ("clause", (fun _ -> Clause));
+ ("const", (fun _ -> Const));
+ ("dec", (fun _ -> Dec));
+ ("def", (fun _ -> Def));
+ ("op", (fun _ -> Op));
+ ("default", (fun _ -> Default));
+ ("deinfix", (fun _ -> Deinfix));
+ ("effect", (fun _ -> Effect));
+ ("Effect", (fun _ -> EFFECT));
+ ("end", (fun _ -> End));
+ ("enum", (fun _ -> Enum));
+ ("else", (fun _ -> Else));
+ ("exit", (fun _ -> Exit));
+ ("extern", (fun _ -> Extern));
+ ("cast", (fun _ -> Cast));
+ ("false", (fun _ -> False));
+ ("forall", (fun _ -> Forall));
+ ("foreach", (fun _ -> Foreach));
+ ("function", (fun x -> Function_));
+ ("overload", (fun _ -> Overload));
+ ("if", (fun x -> If_));
+ ("in", (fun x -> In));
+ ("inc", (fun _ -> Inc));
+ ("IN", (fun x -> IN));
+ ("let", (fun x -> Let_));
+ ("member", (fun x -> Member));
+ ("Int", (fun x -> Int));
+ ("Order", (fun x -> Order));
+ ("pure", (fun x -> Pure));
+ ("rec", (fun x -> Rec));
+ ("register", (fun x -> Register));
+ ("return", (fun x -> Return));
+ ("scattered", (fun x -> Scattered));
+ ("sizeof", (fun x -> Sizeof));
+ ("constraint", (fun x -> Constraint));
+ ("struct", (fun x -> Struct));
+ ("then", (fun x -> Then));
+ ("true", (fun x -> True));
+ ("Type", (fun x -> TYPE));
+ ("type", (fun x -> Typedef));
+ ("undefined", (fun x -> Undefined));
+ ("union", (fun x -> Union));
+ ("with", (fun x -> With));
+ ("val", (fun x -> Val));
+
+ ("div", (fun x -> Div_));
+ ("mod", (fun x -> Mod));
+ ("mod_s", (fun x -> ModUnderS));
+ ("quot", (fun x -> Quot));
+ ("quot_s", (fun x -> QuotUnderS));
+ ("rem", (fun x -> Rem));
+
+ ("barr", (fun x -> Barr));
+ ("depend", (fun x -> Depend));
+ ("rreg", (fun x -> Rreg));
+ ("wreg", (fun x -> Wreg));
+ ("rmem", (fun x -> Rmem));
+ ("rmemt", (fun x -> Rmemt));
+ ("wmem", (fun x -> Wmem));
+ ("wmv", (fun x -> Wmv));
+ ("wmvt", (fun x -> Wmvt));
+ ("eamem", (fun x -> Eamem));
+ ("exmem", (fun x -> Exmem));
+ ("undef", (fun x -> Undef));
+ ("unspec", (fun x -> Unspec));
+ ("nondet", (fun x -> Nondet));
+ ("escape", (fun x -> Escape));
+ ]
+
+}
+
+let ws = [' ''\t']+
+let letter = ['a'-'z''A'-'Z']
+let digit = ['0'-'9']
+let binarydigit = ['0'-'1']
+let hexdigit = ['0'-'9''A'-'F''a'-'f']
+let alphanum = letter|digit
+let startident = letter|'_'
+let ident = alphanum|['_''\'']
+let tyvar_start = '\''
+let oper_char = ['!''$''%''&''*''+''-''.''/'':''<''=''>''?''@''^''|''~']
+let operator = oper_char+ ('_' ident)?
+let escape_sequence = ('\\' ['\\''\"''\'''n''t''b''r']) | ('\\' digit digit digit) | ('\\' 'x' hexdigit hexdigit)
+
+rule token = parse
+ | ws
+ { token lexbuf }
+ | "\n"
+ { Lexing.new_line lexbuf;
+ token lexbuf }
+ | "&" { (Amp(r"&")) }
+ | "@" { (At "@") }
+ | "|" { Bar }
+ | "2" ws "^" { TwoCaret }
+ | "^" { (Caret(r"^")) }
+ | ":" { Colon(r ":") }
+ | "," { Comma }
+ | ".." { DotDot }
+ | "." { Dot }
+ | "=" { (Eq(r"=")) }
+ | ">" { (Gt(r">")) }
+ | "-" { Minus }
+ | "<" { (Lt(r"<")) }
+ | "+" { (Plus(r"+")) }
+ | ";" { Semi }
+ | "*" { (Star(r"*")) }
+ | "_" { Under }
+ | "{" { Lcurly }
+ | "}" { Rcurly }
+ | "()" { Unit(r"()") }
+ | "(" { Lparen }
+ | ")" { Rparen }
+ | "[" { Lsquare }
+ | "]" { Rsquare }
+ | "!=" { (ExclEq(r"!=")) }
+ | ">=" { (GtEq(r">=")) }
+ | "->" { MinusGt }
+ | "=>" { EqGt(r "=>") }
+ | "<=" { (LtEq(r"<=")) }
+ | "infix" ws (digit as p) ws (operator as op)
+ { operators := M.add op (mk_operator Infix (int_of_string (Char.escaped p)) op) !operators;
+ token lexbuf }
+ | "infixl" ws (digit as p) ws (operator as op)
+ { operators := M.add op (mk_operator InfixL (int_of_string (Char.escaped p)) op) !operators;
+ token lexbuf }
+ | "infixr" ws (digit as p) ws (operator as op)
+ { operators := M.add op (mk_operator InfixR (int_of_string (Char.escaped p)) op) !operators;
+ token lexbuf }
+ | operator as op
+ { try M.find op !operators
+ with Not_found -> raise (LexError ("Operator fixity undeclared", Lexing.lexeme_start_p lexbuf)) }
+ | "(*" { comment (Lexing.lexeme_start_p lexbuf) 0 lexbuf; token lexbuf }
+ | "*)" { raise (LexError("Unbalanced comment", Lexing.lexeme_start_p lexbuf)) }
+ | (tyvar_start startident ident* as i) ":" { TyDecl(r i) }
+ | (startident ident* as i) ":" { Decl(r i) }
+ | tyvar_start startident ident* as i { TyVar(r i) }
+ | startident ident* as i { if M.mem i kw_table then
+ (M.find i kw_table) ()
+ (* else if
+ List.mem i default_type_names ||
+ List.mem i !custom_type_names then
+ TyId(r i) *)
+ else Id(r i) }
+ | (digit+ as i1) "." (digit+ as i2) { (Real (i1 ^ "." ^ i2)) }
+ | "-" (digit* as i1) "." (digit+ as i2) { (Real ("-" ^ i1 ^ "." ^ i2)) }
+ | digit+ as i { (Num(int_of_string i)) }
+ | "-" digit+ as i { (Num(int_of_string i)) }
+ | "0b" (binarydigit+ as i) { (Bin(i)) }
+ | "0x" (hexdigit+ as i) { (Hex(i)) }
+ | '"' { (String(
+ string (Lexing.lexeme_start_p lexbuf) (Buffer.create 10) lexbuf)) }
+ | eof { Eof }
+ | _ as c { raise (LexError(
+ Printf.sprintf "Unexpected character: %c" c,
+ Lexing.lexeme_start_p lexbuf)) }
+
+
+and comment pos depth = parse
+ | "(*" { comment pos (depth+1) lexbuf }
+ | "*)" { if depth = 0 then ()
+ else if depth > 0 then comment pos (depth-1) lexbuf
+ else assert false }
+ | "\n" { Lexing.new_line lexbuf;
+ comment pos depth lexbuf }
+ | '"' { ignore(string (Lexing.lexeme_start_p lexbuf) (Buffer.create 10) lexbuf);
+ comment pos depth lexbuf }
+ | _ { comment pos depth lexbuf }
+ | eof { raise (LexError("Unbalanced comment", pos)) }
+
+and string pos b = parse
+ | ([^'"''\n''\\']*'\n' as i) { Lexing.new_line lexbuf;
+ Buffer.add_string b i;
+ string pos b lexbuf }
+ | ([^'"''\n''\\']* as i) { Buffer.add_string b i; string pos b lexbuf }
+ | escape_sequence as i { Buffer.add_string b i; string pos b lexbuf }
+ | '\\' '\n' ws { Lexing.new_line lexbuf; string pos b lexbuf }
+ | '\\' { assert false (*raise (Reporting_basic.Fatal_error (Reporting_basic.Err_syntax (pos,
+ "illegal backslash escape in string"*) }
+ | '"' { let s = unescaped(Buffer.contents b) in
+ (*try Ulib.UTF8.validate s; s
+ with Ulib.UTF8.Malformed_code ->
+ raise (Reporting_basic.Fatal_error (Reporting_basic.Err_syntax (pos,
+ "String literal is not valid utf8"))) *) s }
+ | eof { assert false (*raise (Reporting_basic.Fatal_error (Reporting_basic.Err_syntax (pos,
+ "String literal not terminated")))*) }
diff --git a/src/monomorphise.ml b/src/monomorphise.ml
index 63be60b2..27b5237e 100644
--- a/src/monomorphise.ml
+++ b/src/monomorphise.ml
@@ -411,6 +411,91 @@ let remove_bound env pat =
let bound = bindings_from_pat pat in
List.fold_left (fun sub v -> ISubst.remove v env) env bound
+(* Remove explicit existential types from the AST, so that the sizes of
+ bitvectors will be filled in throughout.
+
+ Problems: there might be other existential types that we want to keep (e.g.
+ because they describe conditions needed for a vector index to be in range),
+ and inference might not be able to find a sufficiently precise type. *)
+let rec deexist_exp (E_aux (e,(l,(annot : Type_check.tannot))) as exp) =
+ let re e = E_aux (e,(l,annot)) in
+ match e with
+ | E_block es -> re (E_block (List.map deexist_exp es))
+ | E_nondet es -> re (E_nondet (List.map deexist_exp es))
+ | E_id _
+ | E_lit _
+ | E_sizeof _
+ | E_constraint _
+ -> (*Type_check.strip_exp*) exp
+ | E_cast (Typ_aux (Typ_exist (kids, nc, ty),l),(E_aux (_,(l',annot')) as e)) ->
+(* let env,_ = env_typ_expected l' annot' in
+ let plain_e = deexist_exp e in
+ let E_aux (_,(_,annot'')) = Type_check.infer_exp env plain_e in
+*)
+ deexist_exp e
+ | E_cast (ty,e) -> re (E_cast (ty,deexist_exp e))
+ | E_app (id,args) -> re (E_app (id,List.map deexist_exp args))
+ | E_app_infix (e1,id,e2) -> re (E_app_infix (deexist_exp e1,id,deexist_exp e2))
+ | E_tuple es -> re (E_tuple (List.map deexist_exp es))
+ | E_if (e1,e2,e3) -> re (E_if (deexist_exp e1,deexist_exp e2,deexist_exp e3))
+ | E_for (id,e1,e2,e3,ord,e4) ->
+ re (E_for (id,deexist_exp e1,deexist_exp e2,deexist_exp e3,ord,deexist_exp e4))
+ | E_vector es -> re (E_vector (List.map deexist_exp es))
+ | E_vector_indexed (ies,def) ->
+ re (E_vector_indexed
+ (List.map (fun (i,e) -> (i,deexist_exp e)) ies,
+ match def with
+ | Def_val_aux (Def_val_empty,(l,ann)) -> Def_val_aux (Def_val_empty,(l,ann))
+ | Def_val_aux (Def_val_dec e,(l,ann)) -> Def_val_aux (Def_val_dec (deexist_exp e),(l,ann))))
+ | E_vector_access (e1,e2) -> re (E_vector_access (deexist_exp e1,deexist_exp e2))
+ | E_vector_subrange (e1,e2,e3) -> re (E_vector_subrange (deexist_exp e1,deexist_exp e2,deexist_exp e3))
+ | E_vector_update (e1,e2,e3) -> re (E_vector_update (deexist_exp e1,deexist_exp e2,deexist_exp e3))
+ | E_vector_update_subrange (e1,e2,e3,e4) ->
+ re (E_vector_update_subrange (deexist_exp e1,deexist_exp e2,deexist_exp e3,deexist_exp e4))
+ | E_vector_append (e1,e2) -> re (E_vector_append (deexist_exp e1,deexist_exp e2))
+ | E_list es -> re (E_list (List.map deexist_exp es))
+ | E_cons (e1,e2) -> re (E_cons (deexist_exp e1,deexist_exp e2))
+ | E_record _ -> (*Type_check.strip_exp*) exp (* TODO *)
+ | E_record_update _ -> (*Type_check.strip_exp*) exp (* TODO *)
+ | E_field (e1,fld) -> re (E_field (deexist_exp e1,fld))
+ | E_case (e1,cases) -> re (E_case (deexist_exp e1, List.map deexist_pexp cases))
+ | E_let (lb,e1) -> re (E_let (deexist_letbind lb, deexist_exp e1))
+ | E_assign (le,e1) -> re (E_assign (deexist_lexp le, deexist_exp e1))
+ | E_exit e1 -> re (E_exit (deexist_exp e1))
+ | E_return e1 -> re (E_return (deexist_exp e1))
+ | E_assert (e1,e2) -> re (E_assert (deexist_exp e1,deexist_exp e2))
+and deexist_pexp (Pat_aux (pe,(l,annot))) =
+ match pe with
+ | Pat_exp (p,e) -> Pat_aux (Pat_exp ((*Type_check.strip_pat*) p,deexist_exp e),(l,annot))
+ | Pat_when (p,e1,e2) -> Pat_aux (Pat_when ((*Type_check.strip_pat*) p,deexist_exp e1,deexist_exp e2),(l,annot))
+and deexist_letbind (LB_aux (lb,(l,annot))) =
+ match lb with (* TODO, drop tysc if there's an exist? Do they even appear here? *)
+ | LB_val_explicit (tysc,p,e) -> LB_aux (LB_val_explicit (tysc,(*Type_check.strip_pat*) p,deexist_exp e),(l,annot))
+ | LB_val_implicit (p,e) -> LB_aux (LB_val_implicit ((*Type_check.strip_pat*) p,deexist_exp e),(l,annot))
+and deexist_lexp (LEXP_aux (le,(l,annot))) =
+ let re le = LEXP_aux (le,(l,annot)) in
+ match le with
+ | LEXP_id id -> re (LEXP_id id)
+ | LEXP_memory (id,es) -> re (LEXP_memory (id,List.map deexist_exp es))
+ | LEXP_cast (Typ_aux (Typ_exist _,_),id) -> re (LEXP_id id)
+ | LEXP_cast (ty,id) -> re (LEXP_cast (ty,id))
+ | LEXP_tup lexps -> re (LEXP_tup (List.map deexist_lexp lexps))
+ | LEXP_vector (le,e) -> re (LEXP_vector (deexist_lexp le, deexist_exp e))
+ | LEXP_vector_range (le,e1,e2) -> re (LEXP_vector_range (deexist_lexp le, deexist_exp e1, deexist_exp e2))
+ | LEXP_field (le,id) -> re (LEXP_field (deexist_lexp le, id))
+
+let deexist_funcl (FCL_aux (FCL_Funcl (id,p,e),(l,annot))) =
+ FCL_aux (FCL_Funcl (id, (*Type_check.strip_pat*) p, deexist_exp e),(l,annot))
+
+let deexist_def = function
+ | DEF_kind kd -> DEF_kind kd
+ | DEF_type td -> DEF_type td
+ | DEF_fundef (FD_aux (FD_function (recopt,topt,effopt,fcls),(l,annot))) ->
+ DEF_fundef (FD_aux (FD_function (recopt,topt,effopt,List.map deexist_funcl fcls),(l,annot)))
+ | x -> x
+
+let deexist (Defs defs) = Defs (List.map deexist_def defs)
+
(* Attempt simple pattern matches *)
let lit_match = function
@@ -1056,5 +1141,5 @@ let split_defs splits defs =
in
Defs (List.concat (List.map map_def defs))
in
- map_locs splits defs'
+ deexist (map_locs splits defs')
diff --git a/src/myocamlbuild.ml b/src/myocamlbuild.ml
index 765553d3..697ee9bd 100644
--- a/src/myocamlbuild.ml
+++ b/src/myocamlbuild.ml
@@ -101,5 +101,11 @@ dispatch begin function
mv (basename (env "%.lem")) (dirname (env "%.lem"))
]);
+ rule "old parser"
+ ~insert:(`top)
+ ~prods: ["parser.ml"; "parser.mli"]
+ ~dep: "parser.mly"
+ (fun env builder -> Cmd(S[V"OCAMLYACC"; T(tags_of_pathname "parser.mly"++"ocaml"++"parser"++"ocamlyacc"); Px "parser.mly"]));
+
| _ -> ()
end ;;
diff --git a/src/parse_ast.ml b/src/parse_ast.ml
index 526dffa8..8ce5e77d 100644
--- a/src/parse_ast.ml
+++ b/src/parse_ast.ml
@@ -144,18 +144,19 @@ atyp_aux = (* expressions of all kinds, to be translated to types, nats, orders
| ATyp_fn of atyp * atyp * atyp (* Function type (first-order only in user code), last atyp is an effect *)
| ATyp_tup of (atyp) list (* Tuple type *)
| ATyp_app of id * (atyp) list (* type constructor application *)
+ | ATyp_exist of kid list * n_constraint * atyp
and atyp =
ATyp_aux of atyp_aux * l
-type
+and
kinded_id_aux = (* optionally kind-annotated identifier *)
KOpt_none of kid (* identifier *)
| KOpt_kind of kind * kid (* kind-annotated variable *)
-type
+and
n_constraint_aux = (* constraint over kind $_$ *)
NC_fixed of atyp * atyp
| NC_bounded_ge of atyp * atyp
@@ -164,6 +165,8 @@ n_constraint_aux = (* constraint over kind $_$ *)
| NC_nat_set_bounded of kid * (int) list
| NC_or of n_constraint * n_constraint
| NC_and of n_constraint * n_constraint
+ | NC_true
+ | NC_false
and
n_constraint =
diff --git a/src/parser.mly b/src/parser.mly
index 62a2c9f4..d6da6e63 100644
--- a/src/parser.mly
+++ b/src/parser.mly
@@ -129,7 +129,7 @@ let make_vector_sugar order_set is_inc typ typ1 =
/*Terminals with no content*/
%token And Alias As Assert Bitzero Bitone Bits By Case Clause Const Dec Def Default Deinfix Effect EFFECT End
-%token Enumerate Else Exit Extern False Forall Foreach Overload Function_ If_ In IN Inc Let_ Member Nat NatNum Order Cast
+%token Enumerate Else Exit Extern False Forall Exist Foreach Overload Function_ If_ In IN Inc Let_ Member Nat NatNum Order Cast
%token Pure Rec Register Return Scattered Sizeof Struct Switch Then True TwoStarStar Type TYPE Typedef
%token Undefined Union With When Val Constraint
%token Barr Depend Rreg Wreg Rmem Rmemt Wmem Wmv Wmvt Eamem Exmem Undef Unspec Nondet Escape
@@ -286,6 +286,12 @@ tyvar:
| TyVar
{ (Kid_aux((Var($1)),loc ())) }
+tyvars:
+ | tyvar
+ { [$1] }
+ | tyvar tyvars
+ { $1 :: $2 }
+
atomic_kind:
| TYPE
{ bkloc BK_type }
@@ -431,7 +437,7 @@ nexp_typ4:
{ tloc (ATyp_id $2) }
| tyvar
{ tloc (ATyp_var $1) }
- | Lparen tup_typ Rparen
+ | Lparen exist_typ Rparen
{ $2 }
tup_typ_list:
@@ -446,10 +452,18 @@ tup_typ:
| Lparen tup_typ_list Rparen
{ tloc (ATyp_tup $2) }
-typ:
+exist_typ:
+ | Exist tyvars Comma nexp_constraint Dot tup_typ
+ { tloc (ATyp_exist ($2, $4, $6)) }
+ | Exist tyvars Dot tup_typ
+ { tloc (ATyp_exist ($2, NC_aux (NC_true, loc ()), $4)) }
| tup_typ
{ $1 }
- | tup_typ MinusGt tup_typ Effect effect_typ
+
+typ:
+ | exist_typ
+ { $1 }
+ | tup_typ MinusGt exist_typ Effect effect_typ
{ tloc (ATyp_fn($1,$3,$5)) }
lit:
@@ -483,7 +497,7 @@ atomic_pat:
{ ploc P_wild }
| Lparen pat As id Rparen
{ ploc (P_as($2,$4)) }
- | Lparen tup_typ Rparen atomic_pat
+ | Lparen exist_typ Rparen atomic_pat
{ ploc (P_typ($2,$4)) }
| id
{ ploc (P_app($1,[])) }
@@ -569,7 +583,7 @@ atomic_exp:
{ eloc (E_lit($1)) }
| Lparen exp Rparen
{ $2 }
- | Lparen tup_typ Rparen atomic_exp
+ | Lparen exist_typ Rparen atomic_exp
{ eloc (E_cast($2,$4)) }
| Lparen comma_exps Rparen
{ eloc (E_tuple($2)) }
@@ -1083,6 +1097,10 @@ nexp_constraint2:
{ NC_aux(NC_nat_set_bounded($1,$4), loc ()) }
| tyvar IN Lcurly nums Rcurly
{ NC_aux(NC_nat_set_bounded($1,$4), loc ()) }
+ | True
+ { NC_aux (NC_true, loc ()) }
+ | False
+ { NC_aux (NC_false, loc ()) }
| Lparen nexp_constraint Rparen
{ $2 }
diff --git a/src/parser2.mly b/src/parser2.mly
new file mode 100644
index 00000000..85305ad3
--- /dev/null
+++ b/src/parser2.mly
@@ -0,0 +1,997 @@
+/**************************************************************************/
+/* Sail */
+/* */
+/* Copyright (c) 2013-2017 */
+/* Kathyrn Gray */
+/* Shaked Flur */
+/* Stephen Kell */
+/* Gabriel Kerneis */
+/* Robert Norton-Wright */
+/* Christopher Pulte */
+/* Peter Sewell */
+/* */
+/* All rights reserved. */
+/* */
+/* This software was developed by the University of Cambridge Computer */
+/* Laboratory as part of the Rigorous Engineering of Mainstream Systems */
+/* (REMS) project, funded by EPSRC grant EP/K008528/1. */
+/* */
+/* Redistribution and use in source and binary forms, with or without */
+/* modification, are permitted provided that the following conditions */
+/* are met: */
+/* 1. Redistributions of source code must retain the above copyright */
+/* notice, this list of conditions and the following disclaimer. */
+/* 2. Redistributions in binary form must reproduce the above copyright */
+/* notice, this list of conditions and the following disclaimer in */
+/* the documentation and/or other materials provided with the */
+/* distribution. */
+/* */
+/* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' */
+/* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED */
+/* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A */
+/* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR */
+/* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, */
+/* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT */
+/* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF */
+/* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND */
+/* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, */
+/* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
+/* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF */
+/* SUCH DAMAGE. */
+/**************************************************************************/
+
+%{
+
+let r = fun x -> x (* Ulib.Text.of_latin1 *)
+
+open Parse_ast
+
+let loc n m = Range (m, n)
+
+let mk_id i n m = Id_aux (i, loc m n)
+let mk_kid str n m = Kid_aux (Var str, loc n m)
+
+let deinfix (Id_aux (Id v, l)) = Id_aux (DeIid v, l)
+
+let mk_effect e n m = BE_aux (e, loc n m)
+let mk_typ t n m = ATyp_aux (t, loc n m)
+let mk_pat p n m = P_aux (p, loc n m)
+let mk_pexp p n m = Pat_aux (p, loc n m)
+let mk_exp e n m = E_aux (e, loc n m)
+let mk_lit l n m = L_aux (l, loc n m)
+let mk_lit_exp l n m = mk_exp (E_lit (mk_lit l n m)) n m
+let mk_typschm tq t n m = TypSchm_aux (TypSchm_ts (tq, t), loc n m)
+let mk_nc nc n m = NC_aux (nc, loc n m)
+
+let mk_funcl f n m = FCL_aux (f, loc n m)
+let mk_fun fn n m = FD_aux (fn, loc n m)
+let mk_td t n m = TD_aux (t, loc n m)
+let mk_vs v n m = VS_aux (v, loc n m)
+let mk_reg_dec d n m = DEC_aux (d, loc n m)
+let mk_default d n m = DT_aux (d, loc n m)
+
+let qi_id_of_kopt (KOpt_aux (kopt_aux, l) as kopt) = QI_aux (QI_id kopt, l)
+
+let mk_recn = (Rec_aux((Rec_nonrec), Unknown))
+let mk_typqn = (TypQ_aux(TypQ_no_forall,Unknown))
+let mk_tannotn = Typ_annot_opt_aux(Typ_annot_opt_none,Unknown)
+let mk_eannotn = Effect_opt_aux(Effect_opt_pure,Unknown)
+let mk_namesectn = Name_sect_aux(Name_sect_none,Unknown)
+
+type lchain =
+ LC_lt
+| LC_lteq
+| LC_nexp of atyp
+
+let rec desugar_lchain chain s e =
+ match chain with
+ | [LC_nexp n1; LC_lteq; LC_nexp n2] ->
+ mk_nc (NC_bounded_le (n1, n2)) s e
+ | [LC_nexp n1; LC_lt; LC_nexp n2] ->
+ mk_nc (NC_bounded_le (mk_typ (ATyp_sum (n1, mk_typ (ATyp_constant 1) s e)) s e, n2)) s e
+ | (LC_nexp n1 :: LC_lteq :: LC_nexp n2 :: chain) ->
+ let nc1 = mk_nc (NC_bounded_le (n1, n2)) s e in
+ mk_nc (NC_and (nc1, desugar_lchain (LC_nexp n2 :: chain) s e)) s e
+ | (LC_nexp n1 :: LC_lt :: LC_nexp n2 :: chain) ->
+ let nc1 = mk_nc (NC_bounded_le (mk_typ (ATyp_sum (n1, mk_typ (ATyp_constant 1) s e)) s e, n2)) s e in
+ mk_nc (NC_and (nc1, desugar_lchain (LC_nexp n2 :: chain) s e)) s e
+ | _ -> assert false
+
+type rchain =
+ RC_gt
+| RC_gteq
+| RC_nexp of atyp
+
+let rec desugar_rchain chain s e =
+ match chain with
+ | [RC_nexp n1; RC_gteq; RC_nexp n2] ->
+ mk_nc (NC_bounded_ge (n1, n2)) s e
+ | [RC_nexp n1; RC_gt; RC_nexp n2] ->
+ mk_nc (NC_bounded_ge (n1, mk_typ (ATyp_sum (n2, mk_typ (ATyp_constant 1) s e)) s e)) s e
+ | (RC_nexp n1 :: RC_gteq :: RC_nexp n2 :: chain) ->
+ let nc1 = mk_nc (NC_bounded_ge (n1, n2)) s e in
+ mk_nc (NC_and (nc1, desugar_rchain (RC_nexp n2 :: chain) s e)) s e
+ | (RC_nexp n1 :: RC_gt :: RC_nexp n2 :: chain) ->
+ let nc1 = mk_nc (NC_bounded_ge (n1, mk_typ (ATyp_sum (n2, mk_typ (ATyp_constant 1) s e)) s e)) s e in
+ mk_nc (NC_and (nc1, desugar_rchain (RC_nexp n2 :: chain) s e)) s e
+ | _ -> assert false
+
+%}
+
+/*Terminals with no content*/
+
+%token And As Assert Bitzero Bitone Bits By Match Clause Const Dec Def Default Deinfix Effect EFFECT End Op
+%token Enum Else Exit Extern False Forall Exist Foreach Overload Function_ If_ In IN Inc Let_ Member Int Order Cast
+%token Pure Rec Register Return Scattered Sizeof Struct Then True TwoCaret Type TYPE Typedef
+%token Undefined Union With Val Constraint
+%token Barr Depend Rreg Wreg Rmem Rmemt Wmem Wmv Wmvt Eamem Exmem Undef Unspec Nondet Escape
+
+%nonassoc Then
+%nonassoc Else
+
+%token Div_ Mod ModUnderS Quot Rem QuotUnderS
+
+%token Bar Comma Dot Eof Minus Semi Under DotDot
+%token Lcurly Rcurly Lparen Rparen Lsquare Rsquare
+%token BarBar BarSquare BarBarSquare ColonEq ColonGt ColonSquare
+%token MinusGt LtBar LtColon SquareBar SquareBarBar
+
+/*Terminals with content*/
+
+%token <string> Id TyVar Decl TyDecl
+%token <int> Num
+%token <string> String Bin Hex Real
+
+%token <string> Amp At Caret Div Eq Excl Gt Lt Plus Star EqGt Unit
+%token <string> Colon ExclEq
+%token <string> GtEq GtEqPlus GtGt GtGtGt GtPlus HashGtGt HashLtLt
+%token <string> LtEq LtEqPlus LtGt LtLt LtLtLt LtPlus StarStar
+
+%token <string> Op0 Op1 Op2 Op3 Op4 Op5 Op6 Op7 Op8 Op9
+%token <string> Op0l Op1l Op2l Op3l Op4l Op5l Op6l Op7l Op8l Op9l
+%token <string> Op0r Op1r Op2r Op3r Op4r Op5r Op6r Op7r Op8r Op9r
+
+%start file
+%type <Parse_ast.defs> defs
+%type <Parse_ast.defs> file
+
+%%
+
+id:
+ | Id { mk_id (Id $1) $startpos $endpos }
+
+ | Op Op0 { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op1 { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op2 { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op3 { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op4 { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op5 { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op6 { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op7 { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op8 { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op9 { mk_id (DeIid $2) $startpos $endpos }
+
+ | Op Op0l { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op1l { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op2l { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op3l { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op4l { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op5l { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op6l { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op7l { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op8l { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op9l { mk_id (DeIid $2) $startpos $endpos }
+
+ | Op Op0r { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op1r { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op2r { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op3r { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op4r { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op5r { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op6r { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op7r { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op8r { mk_id (DeIid $2) $startpos $endpos }
+ | Op Op9r { mk_id (DeIid $2) $startpos $endpos }
+
+ | Op Plus { mk_id (DeIid "+") $startpos $endpos }
+ | Op Minus { mk_id (DeIid "-") $startpos $endpos }
+ | Op Star { mk_id (DeIid "*") $startpos $endpos }
+
+op0: Op0 { mk_id (Id $1) $startpos $endpos }
+op1: Op1 { mk_id (Id $1) $startpos $endpos }
+op2: Op2 { mk_id (Id $1) $startpos $endpos }
+op3: Op3 { mk_id (Id $1) $startpos $endpos }
+op4: Op4 { mk_id (Id $1) $startpos $endpos }
+op5: Op5 { mk_id (Id $1) $startpos $endpos }
+op6: Op6 { mk_id (Id $1) $startpos $endpos }
+op7: Op7 { mk_id (Id $1) $startpos $endpos }
+op8: Op8 { mk_id (Id $1) $startpos $endpos }
+op9: Op9 { mk_id (Id $1) $startpos $endpos }
+
+op0l: Op0l { mk_id (Id $1) $startpos $endpos }
+op1l: Op1l { mk_id (Id $1) $startpos $endpos }
+op2l: Op2l { mk_id (Id $1) $startpos $endpos }
+op3l: Op3l { mk_id (Id $1) $startpos $endpos }
+op4l: Op4l { mk_id (Id $1) $startpos $endpos }
+op5l: Op5l { mk_id (Id $1) $startpos $endpos }
+op6l: Op6l { mk_id (Id $1) $startpos $endpos }
+op7l: Op7l { mk_id (Id $1) $startpos $endpos }
+op8l: Op8l { mk_id (Id $1) $startpos $endpos }
+op9l: Op9l { mk_id (Id $1) $startpos $endpos }
+
+op0r: Op0r { mk_id (Id $1) $startpos $endpos }
+op1r: Op1r { mk_id (Id $1) $startpos $endpos }
+op2r: Op2r { mk_id (Id $1) $startpos $endpos }
+op3r: Op3r { mk_id (Id $1) $startpos $endpos }
+op4r: Op4r { mk_id (Id $1) $startpos $endpos }
+op5r: Op5r { mk_id (Id $1) $startpos $endpos }
+op6r: Op6r { mk_id (Id $1) $startpos $endpos }
+op7r: Op7r { mk_id (Id $1) $startpos $endpos }
+op8r: Op8r { mk_id (Id $1) $startpos $endpos }
+op9r: Op9r { mk_id (Id $1) $startpos $endpos }
+
+decl:
+ | Decl
+ { mk_id (Id $1) $startpos $endpos }
+
+id_list:
+ | id
+ { [$1] }
+ | id Comma id_list
+ { $1 :: $3 }
+
+kid:
+ | TyVar
+ { mk_kid $1 $startpos $endpos }
+
+tydecl:
+ | TyDecl
+ { mk_kid $1 $startpos $endpos }
+
+kid_list:
+ | kid
+ { [$1] }
+ | kid kid_list
+ { $1 :: $2 }
+
+nc:
+ | nc Bar nc_and
+ { mk_nc (NC_or ($1, $3)) $startpos $endpos }
+ | nc_and
+ { $1 }
+
+nc_and:
+ | nc_and Amp atomic_nc
+ { mk_nc (NC_and ($1, $3)) $startpos $endpos }
+ | atomic_nc
+ { $1 }
+
+atomic_nc:
+ | True
+ { mk_nc NC_true $startpos $endpos }
+ | False
+ { mk_nc NC_false $startpos $endpos }
+ | typ Eq typ
+ { mk_nc (NC_fixed ($1, $3)) $startpos $endpos }
+ | typ ExclEq typ
+ { mk_nc (NC_not_equal ($1, $3)) $startpos $endpos }
+ | nc_lchain
+ { desugar_lchain $1 $startpos $endpos }
+ | nc_rchain
+ { desugar_rchain $1 $startpos $endpos }
+ | Lparen nc Rparen
+ { $2 }
+ | kid In Lcurly num_list Rcurly
+ { mk_nc (NC_nat_set_bounded ($1, $4)) $startpos $endpos }
+
+num_list:
+ | Num
+ { [$1] }
+ | Num Comma num_list
+ { $1 :: $3 }
+
+nc_lchain:
+ | typ LtEq typ
+ { [LC_nexp $1; LC_lteq; LC_nexp $3] }
+ | typ Lt typ
+ { [LC_nexp $1; LC_lt; LC_nexp $3] }
+ | typ LtEq nc_lchain
+ { LC_nexp $1 :: LC_lteq :: $3 }
+ | typ Lt nc_lchain
+ { LC_nexp $1 :: LC_lt :: $3 }
+
+nc_rchain:
+ | typ GtEq typ
+ { [RC_nexp $1; RC_gteq; RC_nexp $3] }
+ | typ Gt typ
+ { [RC_nexp $1; RC_gt; RC_nexp $3] }
+ | typ GtEq nc_rchain
+ { RC_nexp $1 :: RC_gteq :: $3 }
+ | typ Gt nc_rchain
+ { RC_nexp $1 :: RC_gt :: $3 }
+
+typ:
+ | typ0
+ { $1 }
+
+/* The following implements all nine levels of user-defined precedence for
+operators in types, with both left, right and non-associative operators */
+
+typ0:
+ | typ1 op0 typ1 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ0l op0l typ1 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ1 op0r typ0r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ1 { $1 }
+typ0l:
+ | typ1 op0 typ1 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ0l op0l typ1 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ1 { $1 }
+typ0r:
+ | typ1 op0 typ1 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ1 op0r typ0r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ1 { $1 }
+
+typ1:
+ | typ2 op1 typ2 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ1l op1l typ2 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ2 op1r typ1r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ2 { $1 }
+typ1l:
+ | typ2 op1 typ2 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ1l op1l typ2 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ2 { $1 }
+typ1r:
+ | typ2 op1 typ2 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ2 op1r typ1r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ2 { $1 }
+
+typ2:
+ | typ3 op2 typ3 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ2l op2l typ3 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ3 op2r typ2r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ3 { $1 }
+typ2l:
+ | typ3 op2 typ3 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ2l op2l typ3 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ3 { $1 }
+typ2r:
+ | typ3 op2 typ3 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ3 op2r typ2r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ3 { $1 }
+
+typ3:
+ | typ4 op3 typ4 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ3l op3l typ4 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ4 op3r typ3r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ4 { $1 }
+typ3l:
+ | typ4 op3 typ4 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ3l op3l typ4 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ4 { $1 }
+typ3r:
+ | typ4 op3 typ4 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ4 op3r typ3r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ4 { $1 }
+
+typ4:
+ | typ5 op4 typ5 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ4l op4l typ5 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ5 op4r typ4r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ5 { $1 }
+typ4l:
+ | typ5 op4 typ5 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ4l op4l typ5 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ5 { $1 }
+typ4r:
+ | typ5 op4 typ5 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ5 op4r typ4r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ5 { $1 }
+
+typ5:
+ | typ6 op5 typ6 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ5l op5l typ6 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ6 op5r typ5r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ6 { $1 }
+typ5l:
+ | typ6 op5 typ6 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ5l op5l typ6 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ6 { $1 }
+typ5r:
+ | typ6 op5 typ6 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ6 op5r typ5 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ6 { $1 }
+
+typ6:
+ | typ7 op6 typ7 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ6l op6l typ7 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ7 op6r typ6r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ6l Plus typ7 { mk_typ (ATyp_sum ($1, $3)) $startpos $endpos }
+ | typ6l Minus typ7 { mk_typ (ATyp_minus ($1, $3)) $startpos $endpos }
+ | typ7 { $1 }
+typ6l:
+ | typ7 op6 typ7 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ6l op6l typ7 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ6l Plus typ7 { mk_typ (ATyp_sum ($1, $3)) $startpos $endpos }
+ | typ6l Minus typ7 { mk_typ (ATyp_minus ($1, $3)) $startpos $endpos }
+ | typ7 { $1 }
+typ6r:
+ | typ7 op6 typ7 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ7 op6r typ6r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ7 { $1 }
+
+typ7:
+ | typ8 op7 typ8 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ7l op7l typ8 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ8 op7r typ7r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ7l Star typ8 { mk_typ (ATyp_times ($1, $3)) $startpos $endpos }
+ | typ8 { $1 }
+typ7l:
+ | typ8 op7 typ8 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ7l op7l typ8 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ7l Star typ8 { mk_typ (ATyp_times ($1, $3)) $startpos $endpos }
+ | typ8 { $1 }
+typ7r:
+ | typ8 op7 typ8 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ8 op7r typ7r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ8 { $1 }
+
+typ8:
+ | typ9 op8 typ9 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ8l op8l typ9 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ9 op8r typ8r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | TwoCaret typ9 { mk_typ (ATyp_exp $2) $startpos $endpos }
+ | typ9 { $1 }
+typ8l:
+ | typ9 op8 typ9 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ8l op8l typ9 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | TwoCaret typ9 { mk_typ (ATyp_exp $2) $startpos $endpos }
+ | typ9 { $1 }
+typ8r:
+ | typ9 op8 typ9 { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ9 op8r typ8r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | TwoCaret typ9 { mk_typ (ATyp_exp $2) $startpos $endpos }
+ | typ9 { $1 }
+
+typ9:
+ | atomic_typ op9 atomic_typ { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ9l op9l atomic_typ { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | atomic_typ op9r typ9r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | atomic_typ { $1 }
+typ9l:
+ | atomic_typ op9 atomic_typ { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | typ9l op9l atomic_typ { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | atomic_typ { $1 }
+typ9r:
+ | atomic_typ op9 atomic_typ { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | atomic_typ op9r typ9r { mk_typ (ATyp_app (deinfix $2, [$1; $3])) $startpos $endpos }
+ | atomic_typ { $1 }
+
+atomic_typ:
+ | id
+ { mk_typ (ATyp_id $1) $startpos $endpos }
+ | kid
+ { mk_typ (ATyp_var $1) $startpos $endpos }
+ | Num
+ { mk_typ (ATyp_constant $1) $startpos $endpos }
+ | Dec
+ { mk_typ ATyp_dec $startpos $endpos }
+ | Inc
+ { mk_typ ATyp_inc $startpos $endpos }
+ | id Lparen typ_list Rparen
+ { mk_typ (ATyp_app ($1, $3)) $startpos $endpos }
+ | Lparen typ Rparen
+ { $2 }
+ | Lparen typ Comma typ_list Rparen
+ { mk_typ (ATyp_tup ($2 :: $4)) $startpos $endpos }
+ | Lcurly kid_list Dot typ Rcurly
+ { mk_typ (ATyp_exist ($2, NC_aux (NC_true, loc $startpos $endpos), $4)) $startpos $endpos }
+ | Lcurly kid_list Comma nc Dot typ Rcurly
+ { mk_typ (ATyp_exist ($2, $4, $6)) $startpos $endpos }
+
+typ_list:
+ | typ
+ { [$1] }
+ | typ Comma typ_list
+ { $1 :: $3 }
+
+base_kind:
+ | Int
+ { BK_aux (BK_nat, loc $startpos $endpos) }
+ | TYPE
+ { BK_aux (BK_type, loc $startpos $endpos) }
+ | Order
+ { BK_aux (BK_order, loc $startpos $endpos) }
+
+kind:
+ | base_kind
+ { K_aux (K_kind [$1], loc $startpos $endpos) }
+
+kopt:
+ | tydecl kind
+ { KOpt_aux (KOpt_kind ($2, $1), loc $startpos $endpos) }
+ | Lparen kid Colon kind Rparen
+ { KOpt_aux (KOpt_kind ($4, $2), loc $startpos $endpos) }
+ | kid
+ { KOpt_aux (KOpt_none $1, loc $startpos $endpos) }
+
+kopt_list:
+ | kopt
+ { [$1] }
+ | kopt kopt_list
+ { $1 :: $2 }
+
+typquant:
+ | kopt_list Comma nc
+ { let qi_nc = QI_aux (QI_const $3, loc $startpos($3) $endpos($3)) in
+ TypQ_aux (TypQ_tq (List.map qi_id_of_kopt $1 @ [qi_nc]), loc $startpos $endpos) }
+ | kopt_list
+ { TypQ_aux (TypQ_tq (List.map qi_id_of_kopt $1), loc $startpos $endpos) }
+
+effect:
+ | Barr
+ { mk_effect BE_barr $startpos $endpos }
+ | Depend
+ { mk_effect BE_depend $startpos $endpos }
+ | Rreg
+ { mk_effect BE_rreg $startpos $endpos }
+ | Wreg
+ { mk_effect BE_wreg $startpos $endpos }
+ | Rmem
+ { mk_effect BE_rmem $startpos $endpos }
+ | Rmemt
+ { mk_effect BE_rmemt $startpos $endpos }
+ | Wmem
+ { mk_effect BE_wmem $startpos $endpos }
+ | Wmv
+ { mk_effect BE_wmv $startpos $endpos }
+ | Wmvt
+ { mk_effect BE_wmvt $startpos $endpos }
+ | Eamem
+ { mk_effect BE_eamem $startpos $endpos }
+ | Exmem
+ { mk_effect BE_exmem $startpos $endpos }
+ | Undef
+ { mk_effect BE_undef $startpos $endpos }
+ | Unspec
+ { mk_effect BE_unspec $startpos $endpos }
+ | Nondet
+ { mk_effect BE_nondet $startpos $endpos }
+ | Escape
+ { mk_effect BE_escape $startpos $endpos }
+
+effect_list:
+ | effect
+ { [$1] }
+ | effect Comma effect_list
+ { $1::$3 }
+
+effect_set:
+ | Lcurly effect_list Rcurly
+ { mk_typ (ATyp_set $2) $startpos $endpos }
+ | Pure
+ { mk_typ (ATyp_set []) $startpos $endpos }
+
+typschm:
+ | typ MinusGt typ
+ { (fun s e -> mk_typschm mk_typqn (mk_typ (ATyp_fn ($1, $3, mk_typ (ATyp_set []) s e)) s e) s e) $startpos $endpos }
+ | Forall typquant Dot typ MinusGt typ
+ { (fun s e -> mk_typschm $2 (mk_typ (ATyp_fn ($4, $6, mk_typ (ATyp_set []) s e)) s e) s e) $startpos $endpos }
+ | typ MinusGt typ Effect effect_set
+ { (fun s e -> mk_typschm mk_typqn (mk_typ (ATyp_fn ($1, $3, $5)) s e) s e) $startpos $endpos }
+ | Forall typquant Dot typ MinusGt typ Effect effect_set
+ { (fun s e -> mk_typschm $2 (mk_typ (ATyp_fn ($4, $6, $8)) s e) s e) $startpos $endpos }
+
+pat:
+ | atomic_pat
+ { $1 }
+ | atomic_pat As id
+ { mk_pat (P_as ($1, $3)) $startpos $endpos }
+ | Lparen pat Comma pat_list Rparen
+ { mk_pat (P_tup ($2 :: $4)) $startpos $endpos }
+
+pat_list:
+ | pat
+ { [$1] }
+ | pat Comma pat_list
+ { $1 :: $3 }
+
+atomic_pat:
+ | Under
+ { mk_pat (P_wild) $startpos $endpos }
+ | lit
+ { mk_pat (P_lit $1) $startpos $endpos }
+ | id
+ { mk_pat (P_id $1) $startpos $endpos }
+ | pat Colon typ
+ { mk_pat (P_typ ($3, $1)) $startpos $endpos }
+ | decl typ
+ { mk_pat (P_typ ($2, mk_pat (P_id $1) $startpos $endpos($1))) $startpos $endpos }
+ | Lparen pat Rparen
+ { $2 }
+
+lit:
+ | True
+ { mk_lit L_true $startpos $endpos }
+ | False
+ { mk_lit L_false $startpos $endpos }
+ | Unit
+ { mk_lit L_unit $startpos $endpos }
+ | Num
+ { mk_lit (L_num $1) $startpos $endpos }
+ | Undefined
+ { mk_lit L_undef $startpos $endpos }
+ | Bitzero
+ { mk_lit L_zero $startpos $endpos }
+ | Bitone
+ { mk_lit L_one $startpos $endpos }
+ | Bin
+ { mk_lit (L_bin $1) $startpos $endpos }
+ | Hex
+ { mk_lit (L_hex $1) $startpos $endpos }
+ | String
+ { mk_lit (L_string $1) $startpos $endpos }
+
+exp:
+ | cast_exp Colon typ
+ { mk_exp (E_cast ($3, $1)) $startpos $endpos }
+ | cast_exp
+ { $1 }
+
+cast_exp:
+ | exp0
+ { $1 }
+ | atomic_exp Eq cast_exp
+ { mk_exp (E_assign ($1, $3)) $startpos $endpos }
+ | Let_ letbind In cast_exp
+ { mk_exp (E_let ($2, $4)) $startpos $endpos }
+ | Lcurly block Rcurly
+ { mk_exp (E_block $2) $startpos $endpos }
+ | Return cast_exp
+ { mk_exp (E_return $2) $startpos $endpos }
+ | If_ exp Then cast_exp Else cast_exp
+ { mk_exp (E_if ($2, $4, $6)) $startpos $endpos }
+ | If_ exp Then cast_exp
+ { mk_exp (E_if ($2, $4, mk_lit_exp L_unit $endpos($4) $endpos($4))) $startpos $endpos }
+ | Match exp0 Lcurly case_list Rcurly
+ { mk_exp (E_case ($2, $4)) $startpos $endpos }
+ | Lparen exp Comma exp_list Rparen
+ { mk_exp (E_tuple ($2 :: $4)) $startpos $endpos }
+
+/* The following implements all nine levels of user-defined precedence for
+operators in expressions, with both left, right and non-associative operators */
+
+exp0:
+ | exp1 op0 exp1 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp0l op0l exp1 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp1 op0r exp0r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp1 { $1 }
+exp0l:
+ | exp1 op0 exp1 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp0l op0l exp1 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp1 { $1 }
+exp0r:
+ | exp1 op0 exp1 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp1 op0r exp0r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp1 { $1 }
+
+exp1:
+ | exp2 op1 exp2 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp1l op1l exp2 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp2 op1r exp1r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp2 { $1 }
+exp1l:
+ | exp2 op1 exp2 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp1l op1l exp2 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp2 { $1 }
+exp1r:
+ | exp2 op1 exp2 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp2 op1r exp1r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp2 { $1 }
+
+exp2:
+ | exp3 op2 exp3 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp2l op2l exp3 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp3 op2r exp2r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp3 { $1 }
+exp2l:
+ | exp3 op2 exp3 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp2l op2l exp3 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp3 { $1 }
+exp2r:
+ | exp3 op2 exp3 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp3 op2r exp2r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp3 { $1 }
+
+exp3:
+ | exp4 op3 exp4 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp3l op3l exp4 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp4 op3r exp3r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp4 { $1 }
+exp3l:
+ | exp4 op3 exp4 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp3l op3l exp4 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp4 { $1 }
+exp3r:
+ | exp4 op3 exp4 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp4 op3r exp3r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp4 { $1 }
+
+exp4:
+ | exp5 op4 exp5 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp4l op4l exp5 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp5 op4r exp4r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp5 { $1 }
+exp4l:
+ | exp5 op4 exp5 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp4l op4l exp5 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp5 { $1 }
+exp4r:
+ | exp5 op4 exp5 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp5 op4r exp4r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp5 { $1 }
+
+exp5:
+ | exp6 op5 exp6 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp5l op5l exp6 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp6 op5r exp5r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp6 At exp5r { mk_exp (E_vector_append ($1, $3)) $startpos $endpos }
+ | exp6 { $1 }
+exp5l:
+ | exp6 op5 exp6 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp5l op5l exp6 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp6 { $1 }
+exp5r:
+ | exp6 op5 exp6 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp6 op5r exp5r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp6 At exp5r { mk_exp (E_vector_append ($1, $3)) $startpos $endpos }
+ | exp6 { $1 }
+
+exp6:
+ | exp7 op6 exp7 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp6l op6l exp7 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp7 op6r exp6r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp6l Plus exp7 { mk_exp (E_app_infix ($1, mk_id (Id "+") $startpos($2) $endpos($2), $3)) $startpos $endpos }
+ | exp6l Minus exp7 { mk_exp (E_app_infix ($1, mk_id (Id "-") $startpos($2) $endpos($2), $3)) $startpos $endpos }
+ | exp7 { $1 }
+exp6l:
+ | exp7 op6 exp7 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp6l op6l exp7 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp6l Plus exp7 { mk_exp (E_app_infix ($1, mk_id (Id "+") $startpos($2) $endpos($2), $3)) $startpos $endpos }
+ | exp6l Minus exp7 { mk_exp (E_app_infix ($1, mk_id (Id "-") $startpos($2) $endpos($2), $3)) $startpos $endpos }
+ | exp7 { $1 }
+exp6r:
+ | exp7 op6 exp7 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp7 op6r exp6r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp7 { $1 }
+
+exp7:
+ | exp8 op7 exp8 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp7l op7l exp8 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp8 op7r exp7r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp7l Star exp8 { mk_exp (E_app_infix ($1, mk_id (Id "*") $startpos($2) $endpos($2), $3)) $startpos $endpos }
+ | exp8 { $1 }
+exp7l:
+ | exp8 op7 exp8 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp7l op7l exp8 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp7l Star exp8 { mk_exp (E_app_infix ($1, mk_id (Id "*") $startpos($2) $endpos($2), $3)) $startpos $endpos }
+ | exp8 { $1 }
+exp7r:
+ | exp8 op7 exp8 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp8 op7r exp7r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp8 { $1 }
+
+exp8:
+ | exp9 op8 exp9 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp8l op8l exp9 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp9 op8r exp8r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | TwoCaret exp9 { mk_exp (E_app (mk_id (Id "pow2") $startpos($1) $endpos($1), [$2])) $startpos $endpos }
+ | exp9 { $1 }
+exp8l:
+ | exp9 op8 exp9 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp8l op8l exp9 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | TwoCaret exp9 { mk_exp (E_app (mk_id (Id "pow2") $startpos($1) $endpos($1), [$2])) $startpos $endpos }
+ | exp9 { $1 }
+exp8r:
+ | exp9 op8 exp9 { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp9 op8r exp8r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | TwoCaret exp9 { mk_exp (E_app (mk_id (Id "pow2") $startpos($1) $endpos($1), [$2])) $startpos $endpos }
+ | exp9 { $1 }
+
+exp9:
+ | atomic_exp op9 atomic_exp { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp9l op9l atomic_exp { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | atomic_exp op9r exp9r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | atomic_exp { $1 }
+exp9l:
+ | atomic_exp op9 atomic_exp { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | exp9l op9l atomic_exp { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | atomic_exp { $1 }
+exp9r:
+ | atomic_exp op9 atomic_exp { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | atomic_exp op9r exp9r { mk_exp (E_app_infix ($1, $2, $3)) $startpos $endpos }
+ | atomic_exp { $1 }
+
+case:
+ | pat EqGt exp
+ { mk_pexp (Pat_exp ($1, $3)) $startpos $endpos }
+ | pat If_ exp EqGt exp
+ { mk_pexp (Pat_when ($1, $3, $5)) $startpos $endpos }
+
+case_list:
+ | case
+ { [$1] }
+ | case Comma case_list
+ { $1 :: $3 }
+
+block:
+ | exp
+ { [$1] }
+ | Let_ letbind Semi block
+ { [mk_exp (E_let ($2, mk_exp (E_block $4) $startpos($4) $endpos)) $startpos $endpos] }
+ | exp Semi /* Allow trailing semicolon in block */
+ { [$1] }
+ | exp Semi block
+ { $1 :: $3 }
+
+%inline letbind:
+ | pat Eq exp
+ { LB_aux (LB_val_implicit ($1, $3), loc $startpos $endpos) }
+
+atomic_exp:
+ | lit
+ { mk_exp (E_lit $1) $startpos $endpos }
+ | decl atomic_typ
+ { mk_exp (E_cast ($2, mk_exp (E_id $1) $startpos $endpos($1))) $startpos $endpos }
+ | id
+ { mk_exp (E_id $1) $startpos $endpos }
+ | id Unit
+ { mk_exp (E_app ($1, [mk_lit_exp L_unit $startpos($2) $endpos])) $startpos $endpos }
+ | id Lparen exp_list Rparen
+ { mk_exp (E_app ($1, $3)) $startpos $endpos }
+ | Sizeof Lparen typ Rparen
+ { mk_exp (E_sizeof $3) $startpos $endpos }
+ | Constraint Lparen nc Rparen
+ { mk_exp (E_constraint $3) $startpos $endpos }
+ | Exit Unit
+ { mk_exp (E_exit (mk_lit_exp L_unit $startpos($2) $endpos)) $startpos $endpos }
+ | Exit Lparen exp Rparen
+ { mk_exp (E_exit $3) $startpos $endpos }
+ | Assert Lparen exp Comma exp Rparen
+ { mk_exp (E_assert ($3, $5)) $startpos $endpos }
+ | atomic_exp Lsquare exp Rsquare
+ { mk_exp (E_vector_access ($1, $3)) $startpos $endpos }
+ | atomic_exp Lsquare exp DotDot exp Rsquare
+ { mk_exp (E_vector_subrange ($1, $3, $5)) $startpos $endpos }
+ | Lsquare exp_list Rsquare
+ { mk_exp (E_vector $2) $startpos $endpos }
+ | Lsquare exp With atomic_exp Eq exp Rsquare
+ { mk_exp (E_vector_update ($2, $4, $6)) $startpos $endpos }
+ | Lsquare exp With atomic_exp DotDot atomic_exp Eq exp Rsquare
+ { mk_exp (E_vector_update_subrange ($2, $4, $6, $8)) $startpos $endpos }
+ | Lparen exp Rparen
+ { $2 }
+
+exp_list:
+ | exp
+ { [$1] }
+ | exp Comma exp_list
+ { $1 :: $3 }
+
+funcl:
+ | id pat Eq exp
+ { mk_funcl (FCL_Funcl ($1, $2, $4)) $startpos $endpos }
+
+type_def:
+ | Typedef id typquant Eq typ
+ { mk_td (TD_abbrev ($2, mk_namesectn, mk_typschm $3 $5 $startpos($3) $endpos)) $startpos $endpos }
+ | Typedef id Eq typ
+ { mk_td (TD_abbrev ($2, mk_namesectn, mk_typschm mk_typqn $4 $startpos($4) $endpos)) $startpos $endpos }
+ | Struct id typquant Eq Lcurly struct_fields Rcurly
+ { mk_td (TD_record ($2, mk_namesectn, $3, $6, false)) $startpos $endpos }
+ | Enum id Eq enum_bar
+ { mk_td (TD_enum ($2, mk_namesectn, $4, false)) $startpos $endpos }
+ | Enum id Eq Lcurly enum Rcurly
+ { mk_td (TD_enum ($2, mk_namesectn, $5, false)) $startpos $endpos }
+ | Union id typquant Eq Lcurly type_unions Rcurly
+ { mk_td (TD_variant ($2, mk_namesectn, $3, $6, false)) $startpos $endpos }
+
+enum_bar:
+ | id
+ { [$1] }
+ | id Bar enum_bar
+ { $1 :: $3 }
+
+enum:
+ | id
+ { [$1] }
+ | id Comma enum
+ { $1 :: $3 }
+
+struct_field:
+ | decl typ
+ { ($2, $1) }
+ | id Colon typ
+ { ($3, $1) }
+
+struct_fields:
+ | struct_field
+ { [$1] }
+ | struct_field Comma
+ { [$1] }
+ | struct_field Comma struct_fields
+ { $1 :: $3 }
+
+type_union:
+ | decl typ
+ { Tu_aux (Tu_ty_id ($2, $1), loc $startpos $endpos) }
+ | id Colon typ
+ { Tu_aux (Tu_ty_id ($3, $1), loc $startpos $endpos) }
+ | id
+ { Tu_aux (Tu_id $1, loc $startpos $endpos) }
+
+type_unions:
+ | type_union
+ { [$1] }
+ | type_union Comma
+ { [$1] }
+ | type_union Comma type_unions
+ { $1 :: $3 }
+
+fun_def:
+ | Function_ funcl
+ { mk_fun (FD_function (mk_recn, mk_tannotn, mk_eannotn, [$2])) $startpos $endpos }
+
+let_def:
+ | Let_ letbind
+ { $2 }
+
+val_spec_def:
+ | Val decl typschm
+ { mk_vs (VS_val_spec ($3, $2)) $startpos $endpos }
+ | Val id Colon typschm
+ { mk_vs (VS_val_spec ($4, $2)) $startpos $endpos }
+
+register_def:
+ | Register decl typ
+ { mk_reg_dec (DEC_reg ($3, $2)) $startpos $endpos }
+ | Register id Colon typ
+ { mk_reg_dec (DEC_reg ($4, $2)) $startpos $endpos }
+
+default:
+ | Default base_kind Inc
+ { mk_default (DT_order ($2, mk_typ ATyp_inc $startpos($3) $endpos)) $startpos $endpos }
+ | Default base_kind Dec
+ { mk_default (DT_order ($2, mk_typ ATyp_dec $startpos($3) $endpos)) $startpos $endpos }
+
+def:
+ | fun_def
+ { DEF_fundef $1 }
+ | val_spec_def
+ { DEF_spec $1 }
+ | type_def
+ { DEF_type $1 }
+ | let_def
+ { DEF_val $1 }
+ | register_def
+ { DEF_reg_dec $1 }
+ | Overload id Eq Lcurly id_list Rcurly
+ { DEF_overload ($2, $5) }
+ | Overload id Eq enum_bar
+ { DEF_overload ($2, $4) }
+ | default
+ { DEF_default $1 }
+
+defs_list:
+ | def
+ { [$1] }
+ | def defs_list
+ { $1::$2 }
+
+defs:
+ | defs_list
+ { (Defs $1) }
+
+file:
+ | defs Eof
+ { $1 }
+
diff --git a/src/pretty_print_common.ml b/src/pretty_print_common.ml
index 39c5d97d..bd43c1a7 100644
--- a/src/pretty_print_common.ml
+++ b/src/pretty_print_common.ml
@@ -112,7 +112,7 @@ let doc_ord (Ord_aux(o,_)) = match o with
| Ord_inc -> string "inc"
| Ord_dec -> string "dec"
-let doc_typ, doc_atomic_typ, doc_nexp =
+let doc_typ, doc_atomic_typ, doc_nexp, doc_nexp_constraint =
(* following the structure of parser for precedence *)
let rec typ ty = fn_typ ty
and fn_typ ((Typ_aux (t, _)) as ty) = match t with
@@ -120,6 +120,8 @@ let doc_typ, doc_atomic_typ, doc_nexp =
separate space [tup_typ arg; arrow; fn_typ ret; string "effect"; doc_effects efct]
| _ -> tup_typ ty
and tup_typ ((Typ_aux (t, _)) as ty) = match t with
+ | Typ_exist (kids, nc, ty) ->
+ separate space [string "exist"; separate_map space doc_var kids ^^ comma; nexp_constraint nc ^^ dot; typ ty]
| Typ_tup typs -> parens (separate_map comma_sp app_typ typs)
| _ -> app_typ ty
and app_typ ((Typ_aux (t, _)) as ty) = match t with
@@ -215,8 +217,23 @@ let doc_typ, doc_atomic_typ, doc_nexp =
| Nexp_neg _ | Nexp_exp _ | Nexp_times _ | Nexp_sum _ | Nexp_minus _->
group (parens (nexp ne))
- (* expose doc_typ, doc_atomic_typ and doc_nexp *)
- in typ, atomic_typ, atomic_nexp_typ
+ and nexp_constraint (NC_aux(nc,_)) = match nc with
+ | NC_fixed(n1,n2) -> doc_op equals (nexp n1) (nexp n2)
+ | NC_not_equal (n1, n2) -> doc_op (string "!=") (nexp n1) (nexp n2)
+ | NC_bounded_ge(n1,n2) -> doc_op (string ">=") (nexp n1) (nexp n2)
+ | NC_bounded_le(n1,n2) -> doc_op (string "<=") (nexp n1) (nexp n2)
+ | NC_nat_set_bounded(v,bounds) ->
+ doc_op (string "IN") (doc_var v)
+ (braces (separate_map comma_sp doc_int bounds))
+ | NC_or (nc1, nc2) ->
+ parens (separate space [nexp_constraint nc1; string "|"; nexp_constraint nc2])
+ | NC_and (nc1, nc2) ->
+ separate space [nexp_constraint nc1; string "&"; nexp_constraint nc2]
+ | NC_true -> string "true"
+ | NC_false -> string "false"
+
+ (* expose doc_typ, doc_atomic_typ, doc_nexp and doc_nexp_constraint *)
+ in typ, atomic_typ, nexp, nexp_constraint
let pp_format_id (Id_aux(i,_)) =
match i with
diff --git a/src/pretty_print_lem_ast.ml b/src/pretty_print_lem_ast.ml
index 0875aee7..018a93f5 100644
--- a/src/pretty_print_lem_ast.ml
+++ b/src/pretty_print_lem_ast.ml
@@ -154,6 +154,7 @@ let rec pp_format_typ_lem (Typ_aux(t,l)) =
(pp_format_effects_lem efct) ^ ")"
| Typ_tup(typs) -> "(Typ_tup [" ^ (list_format "; " pp_format_typ_lem typs) ^ "])"
| Typ_app(id,args) -> "(Typ_app " ^ (pp_format_id_lem id) ^ " [" ^ (list_format "; " pp_format_typ_arg_lem args) ^ "])"
+ | Typ_exist(kids,nc,typ) -> "(Typ_exist ([" ^ list_format ";" pp_format_var_lem kids ^ "], " ^ pp_format_nexp_constraint_lem nc ^ ", " ^ pp_format_typ_lem typ ^ "))"
| Typ_wild -> "Typ_wild") ^ " " ^
(pp_format_l_lem l) ^ ")"
and pp_format_nexp_lem (Nexp_aux(n,l)) =
@@ -212,14 +213,7 @@ and pp_format_typ_arg_lem (Typ_arg_aux(t,l)) =
| Typ_arg_order(o) -> "(Typ_arg_order " ^ pp_format_ord_lem o ^ ")"
| Typ_arg_effect(e) -> "(Typ_arg_effect " ^ pp_format_effects_lem e ^ ")") ^ " " ^
(pp_format_l_lem l) ^ ")"
-
-let pp_lem_typ ppf t = base ppf (pp_format_typ_lem t)
-let pp_lem_nexp ppf n = base ppf (pp_format_nexp_lem n)
-let pp_lem_ord ppf o = base ppf (pp_format_ord_lem o)
-let pp_lem_effects ppf e = base ppf (pp_format_effects_lem e)
-let pp_lem_beffect ppf be = base ppf (pp_format_base_effect_lem be)
-
-let rec pp_format_nexp_constraint_lem (NC_aux(nc,l)) =
+and pp_format_nexp_constraint_lem (NC_aux(nc,l)) =
"(NC_aux " ^
(match nc with
| NC_fixed(n1,n2) -> "(NC_fixed " ^ pp_format_nexp_lem n1 ^ " " ^ pp_format_nexp_lem n2 ^ ")"
@@ -235,6 +229,12 @@ let rec pp_format_nexp_constraint_lem (NC_aux(nc,l)) =
"])") ^ " " ^
(pp_format_l_lem l) ^ ")"
+let pp_lem_typ ppf t = base ppf (pp_format_typ_lem t)
+let pp_lem_nexp ppf n = base ppf (pp_format_nexp_lem n)
+let pp_lem_ord ppf o = base ppf (pp_format_ord_lem o)
+let pp_lem_effects ppf e = base ppf (pp_format_effects_lem e)
+let pp_lem_beffect ppf be = base ppf (pp_format_base_effect_lem be)
+
let pp_lem_nexp_constraint ppf nc = base ppf (pp_format_nexp_constraint_lem nc)
let pp_format_qi_lem (QI_aux(qi,lq)) =
diff --git a/src/pretty_print_sail.ml b/src/pretty_print_sail.ml
index 4428bb62..a63df7ea 100644
--- a/src/pretty_print_sail.ml
+++ b/src/pretty_print_sail.ml
@@ -58,19 +58,6 @@ let doc_bkind (BK_aux(k,_)) =
let doc_kind (K_aux(K_kind(klst),_)) =
separate_map (spaces arrow) doc_bkind klst
-let rec doc_nexp_constraint (NC_aux(nc,_)) = match nc with
- | NC_fixed(n1,n2) -> doc_op equals (doc_nexp n1) (doc_nexp n2)
- | NC_not_equal (n1, n2) -> doc_op (string "!=") (doc_nexp n1) (doc_nexp n2)
- | NC_bounded_ge(n1,n2) -> doc_op (string ">=") (doc_nexp n1) (doc_nexp n2)
- | NC_bounded_le(n1,n2) -> doc_op (string "<=") (doc_nexp n1) (doc_nexp n2)
- | NC_nat_set_bounded(v,bounds) ->
- doc_op (string "IN") (doc_var v)
- (braces (separate_map comma_sp doc_int bounds))
- | NC_or (nc1, nc2) ->
- parens (separate space [doc_nexp_constraint nc1; string "|"; doc_nexp_constraint nc2])
- | NC_and (nc1, nc2) ->
- separate space [doc_nexp_constraint nc1; string "&"; doc_nexp_constraint nc2]
-
let doc_qi (QI_aux(qi,_)) = match qi with
| QI_const n_const -> doc_nexp_constraint n_const
| QI_id(KOpt_aux(ki,_)) ->
@@ -295,7 +282,7 @@ let doc_exp, doc_let =
let cases = separate_map (break 1) doc_case pexps in
surround 2 1 opening cases rbrace
| E_sizeof n ->
- separate space [string "sizeof"; doc_nexp n]
+ parens (separate space [string "sizeof"; doc_nexp n])
| E_constraint nc ->
string "constraint" ^^ parens (doc_nexp_constraint nc)
| E_exit e ->
diff --git a/src/process_file.ml b/src/process_file.ml
index 95562969..6d4bcea0 100644
--- a/src/process_file.ml
+++ b/src/process_file.ml
@@ -40,6 +40,8 @@
(* SUCH DAMAGE. *)
(**************************************************************************)
+let opt_new_parser = ref false
+
type out_type =
| Lem_ast_out
| Lem_out of string option
@@ -71,7 +73,11 @@ let parse_file (f : string) : Parse_ast.defs =
close_in in_chan;
let lexbuf, in_chan = get_lexbuf f in
try
- let ast = Parser.file Lexer.token lexbuf in
+ let ast =
+ if !opt_new_parser
+ then Parser2.file Lexer2.token lexbuf
+ else Parser.file Lexer.token lexbuf
+ in
close_in in_chan; ast
with
| Parsing.Parse_error ->
@@ -90,7 +96,6 @@ let load_file order env f =
let ast = convert_ast order (parse_file f) in
Type_check.check env ast
-let opt_new_typecheck = ref false
let opt_just_check = ref false
let opt_ddump_tc_ast = ref false
let opt_ddump_rewrite_ast = ref None
diff --git a/src/process_file.mli b/src/process_file.mli
index d0dae469..cd867b0d 100644
--- a/src/process_file.mli
+++ b/src/process_file.mli
@@ -51,7 +51,7 @@ val rewrite_ast_ocaml : Type_check.tannot Ast.defs -> Type_check.tannot Ast.defs
val load_file_no_check : Ast.order -> string -> unit Ast.defs
val load_file : Ast.order -> Type_check.Env.t -> string -> Type_check.tannot Ast.defs * Type_check.Env.t
-val opt_new_typecheck : bool ref
+val opt_new_parser : bool ref
val opt_just_check : bool ref
val opt_ddump_tc_ast : bool ref
val opt_ddump_rewrite_ast : ((string * int) option) ref
@@ -73,3 +73,4 @@ val output :
files existed before. If it is set to [false] and an output file already exists,
the output file is only updated, if its content really changes. *)
val always_replace_files : bool ref
+
diff --git a/src/reporting_basic.ml b/src/reporting_basic.ml
index 5ff43208..69c5c084 100644
--- a/src/reporting_basic.ml
+++ b/src/reporting_basic.ml
@@ -220,5 +220,6 @@ let report_error e =
let (m1, verb_pos, pos_l, m2) = dest_err e in
(print_err_internal verb_pos false pos_l m1 m2; exit 1)
-
-
+let print_error e =
+ let (m1, verb_pos, pos_l, m2) = dest_err e in
+ print_err_internal verb_pos false pos_l m1 m2
diff --git a/src/reporting_basic.mli b/src/reporting_basic.mli
index 559be9d4..3d1cbe13 100644
--- a/src/reporting_basic.mli
+++ b/src/reporting_basic.mli
@@ -104,3 +104,4 @@ val err_typ_dual : Parse_ast.l -> Parse_ast.l -> string -> exn
raising a [Fatal_error] exception is recommended. *)
val report_error : error -> 'a
+val print_error : error -> unit
diff --git a/src/sail.ml b/src/sail.ml
index b08272cd..cf366d42 100644
--- a/src/sail.ml
+++ b/src/sail.ml
@@ -88,11 +88,11 @@ let options = Arg.align ([
| [fname;line;var] -> opt_mono_split := ((fname,int_of_string line),var)::!opt_mono_split
| _ -> raise (Arg.Bad (s ^ " not of form <filename>:<line>:<variable>"))),
"<filename>:<line>:<variable> to case split for monomorphisation");
- ( "-new_typecheck",
- Arg.Set opt_new_typecheck,
- " (experimental) use new typechecker with Z3 constraint solving");
+ ( "-new_parser",
+ Arg.Set Process_file.opt_new_parser,
+ " (experimental) use new parser");
( "-just_check",
- Arg.Tuple [Arg.Set opt_new_typecheck; Arg.Set opt_just_check],
+ Arg.Set opt_just_check,
" (experimental) terminate immediately after typechecking, implies -new_typecheck");
( "-ddump_tc_ast",
Arg.Set opt_ddump_tc_ast,
diff --git a/src/spec_analysis.ml b/src/spec_analysis.ml
index fdd56ecc..1475da7a 100644
--- a/src/spec_analysis.ml
+++ b/src/spec_analysis.ml
@@ -238,7 +238,7 @@ let rec fv_of_typ consider_var bound used (Typ_aux (t,_)) : Nameset.t =
| Typ_fn(arg,ret,_) -> fv_of_typ consider_var bound (fv_of_typ consider_var bound used arg) ret
| Typ_tup ts -> List.fold_right (fun t n -> fv_of_typ consider_var bound n t) ts used
| Typ_app(id,targs) ->
- List.fold_right (fun ta n -> fv_of_targ consider_var bound n ta) targs (conditional_add_typ bound used id)
+ List.fold_right (fun ta n -> fv_of_targ consider_var bound n ta) targs (conditional_add_typ bound used id)
and fv_of_targ consider_var bound used (Ast.Typ_arg_aux(targ,_)) : Nameset.t = match targ with
| Typ_arg_typ t -> fv_of_typ consider_var bound used t
diff --git a/src/test/pattern.sail b/src/test/pattern.sail
index cabe7dad..97e5a0ef 100644
--- a/src/test/pattern.sail
+++ b/src/test/pattern.sail
@@ -4,9 +4,9 @@ register nat x
register nat y
typedef wordsize = forall Nat 'n, 'n IN {8,16,32}. [|'n|]
-let forall Nat 'n. (wordsize<'n>) word = 8
+(* let forall Nat 'n. (wordsize<'n>) word = 8 *)
-function nat main _ = {
+function nat main () = {
(* works - x and y are set to 42 *)
n := 1;
diff --git a/src/type_check.ml b/src/type_check.ml
index cc222ac7..e69a40e8 100644
--- a/src/type_check.ml
+++ b/src/type_check.ml
@@ -50,6 +50,11 @@ let opt_tc_debug = ref 0
let opt_no_effects = ref false
let depth = ref 0
+let rec take n xs = match n, xs with
+ | 0, _ -> []
+ | n, [] -> []
+ | n, (x :: xs) -> x :: take (n - 1) xs
+
let rec indent n = match n with
| 0 -> ""
| n -> "| " ^ indent (n - 1)
@@ -82,6 +87,7 @@ let unaux_typ (Typ_aux (typ, _)) = typ
let mk_typ typ = Typ_aux (typ, Parse_ast.Unknown)
let mk_typ_arg arg = Typ_arg_aux (arg, Parse_ast.Unknown)
let mk_id str = Id_aux (Id str, Parse_ast.Unknown)
+let mk_kid str = Kid_aux (Var ("'" ^ str), Parse_ast.Unknown)
let mk_infix_id str = Id_aux (DeIid str, Parse_ast.Unknown)
let mk_id_typ id = Typ_aux (Typ_id id, Parse_ast.Unknown)
@@ -91,22 +97,55 @@ let dec_ord = Ord_aux (Ord_dec, Parse_ast.Unknown)
let mk_ord ord_aux = Ord_aux (ord_aux, Parse_ast.Unknown)
+let rec nexp_simp (Nexp_aux (nexp, l)) = Nexp_aux (nexp_simp_aux nexp, l)
+and nexp_simp_aux = function
+ | Nexp_sum (n1, n2) ->
+ begin
+ let (Nexp_aux (n1_simp, _) as n1) = nexp_simp n1 in
+ let (Nexp_aux (n2_simp, _) as n2) = nexp_simp n2 in
+ match n1_simp, n2_simp with
+ | Nexp_constant c1, Nexp_constant c2 -> Nexp_constant (c1 + c2)
+ | _, Nexp_neg n2 -> Nexp_minus (n1, n2)
+ | _, _ -> Nexp_sum (n1, n2)
+ end
+ | Nexp_times (n1, n2) ->
+ begin
+ let (Nexp_aux (n1_simp, _) as n1) = nexp_simp n1 in
+ let (Nexp_aux (n2_simp, _) as n2) = nexp_simp n2 in
+ match n1_simp, n2_simp with
+ | Nexp_constant c1, Nexp_constant c2 -> Nexp_constant (c1 * c2)
+ | _, _ -> Nexp_times (n1, n2)
+ end
+ | Nexp_minus (n1, n2) ->
+ begin
+ let (Nexp_aux (n1_simp, _) as n1) = nexp_simp n1 in
+ let (Nexp_aux (n2_simp, _) as n2) = nexp_simp n2 in
+ typ_debug ("SIMP: " ^ string_of_nexp n1 ^ " - " ^ string_of_nexp n2);
+ match n1_simp, n2_simp with
+ | Nexp_constant c1, Nexp_constant c2 -> Nexp_constant (c1 - c2)
+ | _, _ -> Nexp_minus (n1, n2)
+ end
+ | nexp -> nexp
+
let int_typ = mk_id_typ (mk_id "int")
let nat_typ = mk_id_typ (mk_id "nat")
let unit_typ = mk_id_typ (mk_id "unit")
let bit_typ = mk_id_typ (mk_id "bit")
let real_typ = mk_id_typ (mk_id "real")
let app_typ id args = mk_typ (Typ_app (id, args))
-let atom_typ nexp = mk_typ (Typ_app (mk_id "atom", [mk_typ_arg (Typ_arg_nexp nexp)]))
-let range_typ nexp1 nexp2 = mk_typ (Typ_app (mk_id "range", [mk_typ_arg (Typ_arg_nexp nexp1); mk_typ_arg (Typ_arg_nexp nexp2)]))
+let atom_typ nexp =
+ mk_typ (Typ_app (mk_id "atom", [mk_typ_arg (Typ_arg_nexp (nexp_simp nexp))]))
+let range_typ nexp1 nexp2 =
+ mk_typ (Typ_app (mk_id "range", [mk_typ_arg (Typ_arg_nexp (nexp_simp nexp1));
+ mk_typ_arg (Typ_arg_nexp (nexp_simp nexp2))]))
let bool_typ = mk_id_typ (mk_id "bool")
let string_typ = mk_id_typ (mk_id "string")
let list_typ typ = mk_typ (Typ_app (mk_id "list", [mk_typ_arg (Typ_arg_typ typ)]))
let vector_typ n m ord typ =
mk_typ (Typ_app (mk_id "vector",
- [mk_typ_arg (Typ_arg_nexp n);
- mk_typ_arg (Typ_arg_nexp m);
+ [mk_typ_arg (Typ_arg_nexp (nexp_simp n));
+ mk_typ_arg (Typ_arg_nexp (nexp_simp m));
mk_typ_arg (Typ_arg_order ord);
mk_typ_arg (Typ_arg_typ typ)]))
@@ -145,7 +184,6 @@ let nc_false = mk_nc NC_false
let mk_lit l = E_aux (E_lit (L_aux (l, Parse_ast.Unknown)), (Parse_ast.Unknown, ()))
-(* FIXME: Can now negate all n_constraints *)
let rec nc_negate (NC_aux (nc, _)) =
match nc with
| NC_bounded_ge (n1, n2) -> nc_lt n1 n2
@@ -202,6 +240,18 @@ let quant_items : typquant -> quant_item list = function
| TypQ_aux (TypQ_tq qis, _) -> qis
| TypQ_aux (TypQ_no_forall, _) -> []
+let quant_split typq =
+ let qi_kopt = function
+ | QI_aux (QI_id kopt, _) -> [kopt]
+ | _ -> []
+ in
+ let qi_nc = function
+ | QI_aux (QI_const nc, _) -> [nc]
+ | _ -> []
+ in
+ let qis = quant_items typq in
+ List.concat (List.map qi_kopt qis), List.concat (List.map qi_nc qis)
+
let kopt_kid (KOpt_aux (kopt_aux, _)) =
match kopt_aux with
| KOpt_none kid | KOpt_kind (_, kid) -> kid
@@ -261,6 +311,8 @@ and typ_subst_nexp_aux sv subst = function
| Typ_fn (typ1, typ2, effs) -> Typ_fn (typ_subst_nexp sv subst typ1, typ_subst_nexp sv subst typ2, effs)
| Typ_tup typs -> Typ_tup (List.map (typ_subst_nexp sv subst) typs)
| Typ_app (f, args) -> Typ_app (f, List.map (typ_subst_arg_nexp sv subst) args)
+ | Typ_exist (kids, nc, typ) when KidSet.mem sv (KidSet.of_list kids) -> Typ_exist (kids, nc, typ)
+ | Typ_exist (kids, nc, typ) -> Typ_exist (kids, nc_subst_nexp sv subst nc, typ_subst_nexp sv subst typ)
and typ_subst_arg_nexp sv subst (Typ_arg_aux (arg, l)) = Typ_arg_aux (typ_subst_arg_nexp_aux sv subst arg, l)
and typ_subst_arg_nexp_aux sv subst = function
| Typ_arg_nexp nexp -> Typ_arg_nexp (nexp_subst sv subst nexp)
@@ -276,6 +328,7 @@ and typ_subst_typ_aux sv subst = function
| Typ_fn (typ1, typ2, effs) -> Typ_fn (typ_subst_typ sv subst typ1, typ_subst_typ sv subst typ2, effs)
| Typ_tup typs -> Typ_tup (List.map (typ_subst_typ sv subst) typs)
| Typ_app (f, args) -> Typ_app (f, List.map (typ_subst_arg_typ sv subst) args)
+ | Typ_exist (kids, nc, typ) -> Typ_exist (kids, nc, typ_subst_typ sv subst typ)
and typ_subst_arg_typ sv subst (Typ_arg_aux (arg, l)) = Typ_arg_aux (typ_subst_arg_typ_aux sv subst arg, l)
and typ_subst_arg_typ_aux sv subst = function
| Typ_arg_nexp nexp -> Typ_arg_nexp nexp
@@ -298,6 +351,7 @@ and typ_subst_order_aux sv subst = function
| Typ_fn (typ1, typ2, effs) -> Typ_fn (typ_subst_order sv subst typ1, typ_subst_order sv subst typ2, effs)
| Typ_tup typs -> Typ_tup (List.map (typ_subst_order sv subst) typs)
| Typ_app (f, args) -> Typ_app (f, List.map (typ_subst_arg_order sv subst) args)
+ | Typ_exist (kids, nc, typ) -> Typ_exist (kids, nc, typ_subst_order sv subst typ)
and typ_subst_arg_order sv subst (Typ_arg_aux (arg, l)) = Typ_arg_aux (typ_subst_arg_order_aux sv subst arg, l)
and typ_subst_arg_order_aux sv subst = function
| Typ_arg_nexp nexp -> Typ_arg_nexp nexp
@@ -313,6 +367,8 @@ and typ_subst_kid_aux sv subst = function
| Typ_fn (typ1, typ2, effs) -> Typ_fn (typ_subst_kid sv subst typ1, typ_subst_kid sv subst typ2, effs)
| Typ_tup typs -> Typ_tup (List.map (typ_subst_kid sv subst) typs)
| Typ_app (f, args) -> Typ_app (f, List.map (typ_subst_arg_kid sv subst) args)
+ | Typ_exist (kids, nc, typ) when KidSet.mem sv (KidSet.of_list kids) -> Typ_exist (kids, nc, typ)
+ | Typ_exist (kids, nc, typ) -> Typ_exist (kids, nc_subst_nexp sv (Nexp_var subst) nc, typ_subst_kid sv subst typ)
and typ_subst_arg_kid sv subst (Typ_arg_aux (arg, l)) = Typ_arg_aux (typ_subst_arg_kid_aux sv subst arg, l)
and typ_subst_arg_kid_aux sv subst = function
| Typ_arg_nexp nexp -> Typ_arg_nexp (nexp_subst sv (Nexp_var subst) nexp)
@@ -327,36 +383,6 @@ let quant_item_subst_kid_aux sv subst = function
if Kid.compare kid sv = 0 then QI_id (KOpt_aux (KOpt_kind (k, subst), l)) else qid
| QI_const nc -> QI_const (nc_subst_nexp sv (Nexp_var subst) nc)
-let rec nexp_simp (Nexp_aux (nexp, l)) = Nexp_aux (nexp_simp_aux nexp, l)
-and nexp_simp_aux = function
- | Nexp_sum (n1, n2) ->
- begin
- let (Nexp_aux (n1_simp, _) as n1) = nexp_simp n1 in
- let (Nexp_aux (n2_simp, _) as n2) = nexp_simp n2 in
- match n1_simp, n2_simp with
- | Nexp_constant c1, Nexp_constant c2 -> Nexp_constant (c1 + c2)
- | _, Nexp_neg n2 -> Nexp_minus (n1, n2)
- | _, _ -> Nexp_sum (n1, n2)
- end
- | Nexp_times (n1, n2) ->
- begin
- let (Nexp_aux (n1_simp, _) as n1) = nexp_simp n1 in
- let (Nexp_aux (n2_simp, _) as n2) = nexp_simp n2 in
- match n1_simp, n2_simp with
- | Nexp_constant c1, Nexp_constant c2 -> Nexp_constant (c1 * c2)
- | _, _ -> Nexp_times (n1, n2)
- end
- | Nexp_minus (n1, n2) ->
- begin
- let (Nexp_aux (n1_simp, _) as n1) = nexp_simp n1 in
- let (Nexp_aux (n2_simp, _) as n2) = nexp_simp n2 in
- typ_debug ("SIMP: " ^ string_of_nexp n1 ^ " - " ^ string_of_nexp n2);
- match n1_simp, n2_simp with
- | Nexp_constant c1, Nexp_constant c2 -> Nexp_constant (c1 - c2)
- | _, _ -> Nexp_minus (n1, n2)
- end
- | nexp -> nexp
-
let quant_item_subst_kid sv subst (QI_aux (quant, l)) = QI_aux (quant_item_subst_kid_aux sv subst quant, l)
let typquant_subst_kid_aux sv subst = function
@@ -400,8 +426,8 @@ module Env : sig
val add_typ_var : kid -> base_kind_aux -> t -> t
val get_ret_typ : t -> typ option
val add_ret_typ : typ -> t -> t
- val add_typ_synonym : id -> (typ_arg list -> typ) -> t -> t
- val get_typ_synonym : id -> t -> typ_arg list -> typ
+ val add_typ_synonym : id -> (t -> typ_arg list -> typ) -> t -> t
+ val get_typ_synonym : id -> t -> t -> typ_arg list -> typ
val add_overloads : id -> id list -> t -> t
val get_overloads : id -> t -> id list
val is_extern : id -> t -> bool
@@ -431,7 +457,7 @@ end = struct
regtyps : (int * int * (index_range * id) list) Bindings.t;
variants : (typquant * type_union list) Bindings.t;
typ_vars : base_kind_aux KBindings.t;
- typ_synonyms : (typ_arg list -> typ) Bindings.t;
+ typ_synonyms : (t -> typ_arg list -> typ) Bindings.t;
overloads : (id list) Bindings.t;
flow : (typ -> typ) Bindings.t;
enums : IdSet.t Bindings.t;
@@ -537,26 +563,30 @@ end = struct
type error if the type is badly formed. FIXME: Add arity to type
constructors, although arity checking for the builtin types does
seem to be done by the initial ast check. *)
- let rec wf_typ env (Typ_aux (typ_aux, l)) =
+ let rec wf_typ ?exs:(exs=KidSet.empty) env (Typ_aux (typ_aux, l)) =
match typ_aux with
| Typ_wild -> ()
| Typ_id id when bound_typ_id env id -> ()
| Typ_id id -> typ_error l ("Undefined type " ^ string_of_id id)
| Typ_var kid when KBindings.mem kid env.typ_vars -> ()
| Typ_var kid -> typ_error l ("Unbound kind identifier " ^ string_of_kid kid)
- | Typ_fn (typ_arg, typ_ret, effs) -> wf_typ env typ_arg; wf_typ env typ_ret
- | Typ_tup typs -> List.iter (wf_typ env) typs
- | Typ_app (id, args) when bound_typ_id env id -> List.iter (wf_typ_arg env) args
+ | Typ_fn (typ_arg, typ_ret, effs) -> wf_typ ~exs:exs env typ_arg; wf_typ ~exs:exs env typ_ret
+ | Typ_tup typs -> List.iter (wf_typ ~exs:exs env) typs
+ | Typ_app (id, args) when bound_typ_id env id -> List.iter (wf_typ_arg ~exs:exs env) args
| Typ_app (id, _) -> typ_error l ("Undefined type " ^ string_of_id id)
- and wf_typ_arg env (Typ_arg_aux (typ_arg_aux, _)) =
+ | Typ_exist ([], _, _) -> typ_error l ("Existential must have some type variables")
+ | Typ_exist (kids, nc, typ) when KidSet.is_empty exs -> wf_typ ~exs:(KidSet.of_list kids) env typ
+ | Typ_exist (_, _, _) -> typ_error l ("Nested existentials are not allowed")
+ and wf_typ_arg ?exs:(exs=KidSet.empty) env (Typ_arg_aux (typ_arg_aux, _)) =
match typ_arg_aux with
- | Typ_arg_nexp nexp -> wf_nexp env nexp
- | Typ_arg_typ typ -> wf_typ env typ
+ | Typ_arg_nexp nexp -> wf_nexp ~exs:exs env nexp
+ | Typ_arg_typ typ -> wf_typ ~exs:exs env typ
| Typ_arg_order ord -> wf_order env ord
| Typ_arg_effect _ -> () (* Check: is this ever used? *)
- and wf_nexp env (Nexp_aux (nexp_aux, l)) =
+ and wf_nexp ?exs:(exs=KidSet.empty) env (Nexp_aux (nexp_aux, l)) =
match nexp_aux with
- | Nexp_id _ -> typ_error l "Unimplemented: Nexp_id"
+ | Nexp_id _ -> ()
+ | Nexp_var kid when KidSet.mem kid exs -> ()
| Nexp_var kid ->
begin
match get_typ_var kid env with
@@ -566,11 +596,11 @@ end = struct
^ string_of_base_kind_aux kind ^ " but should have kind Nat")
end
| Nexp_constant _ -> ()
- | Nexp_times (nexp1, nexp2) -> wf_nexp env nexp1; wf_nexp env nexp2
- | Nexp_sum (nexp1, nexp2) -> wf_nexp env nexp1; wf_nexp env nexp2
- | Nexp_minus (nexp1, nexp2) -> wf_nexp env nexp1; wf_nexp env nexp2
- | Nexp_exp nexp -> wf_nexp env nexp (* MAYBE: Could put restrictions on what is allowed here *)
- | Nexp_neg nexp -> wf_nexp env nexp
+ | Nexp_times (nexp1, nexp2) -> wf_nexp ~exs:exs env nexp1; wf_nexp ~exs:exs env nexp2
+ | Nexp_sum (nexp1, nexp2) -> wf_nexp ~exs:exs env nexp1; wf_nexp ~exs:exs env nexp2
+ | Nexp_minus (nexp1, nexp2) -> wf_nexp ~exs:exs env nexp1; wf_nexp ~exs:exs env nexp2
+ | Nexp_exp nexp -> wf_nexp ~exs:exs env nexp (* MAYBE: Could put restrictions on what is allowed here *)
+ | Nexp_neg nexp -> wf_nexp ~exs:exs env nexp
and wf_order env (Ord_aux (ord_aux, l)) =
match ord_aux with
| Ord_var kid ->
@@ -840,7 +870,7 @@ end = struct
begin
try
let synonym = Bindings.find id env.typ_synonyms in
- expand_synonyms env (synonym args)
+ expand_synonyms env (synonym env args)
with
| Not_found -> Typ_aux (Typ_app (id, List.map (expand_synonyms_arg env) args), l)
end
@@ -848,7 +878,7 @@ end = struct
begin
try
let synonym = Bindings.find id env.typ_synonyms in
- expand_synonyms env (synonym [])
+ expand_synonyms env (synonym env [])
with
| Not_found -> Typ_aux (Typ_id id, l)
end
@@ -926,12 +956,52 @@ let lvector_typ env l typ =
let initial_env =
Env.empty
- |> Env.add_typ_synonym (mk_id "atom") (fun args -> mk_typ (Typ_app (mk_id "range", args @ args)))
+ |> Env.add_typ_synonym (mk_id "atom") (fun _ args -> mk_typ (Typ_app (mk_id "range", args @ args)))
+
+let ex_counter = ref 0
+
+let fresh_existential () =
+ let fresh = Kid_aux (Var ("'ex" ^ string_of_int !ex_counter), Parse_ast.Unknown) in
+ incr ex_counter; fresh
+
+let destructure_exist env typ =
+ match Env.expand_synonyms env typ with
+ | Typ_aux (Typ_exist (kids, nc, typ), _) ->
+ let fresh_kids = List.map (fun kid -> (kid, fresh_existential ())) kids in
+ let nc = List.fold_left (fun nc (kid, fresh) -> nc_subst_nexp kid (Nexp_var fresh) nc) nc fresh_kids in
+ let typ = List.fold_left (fun typ (kid, fresh) -> typ_subst_nexp kid (Nexp_var fresh) typ) typ fresh_kids in
+ Some (List.map snd fresh_kids, nc, typ)
+ | _ -> None
+
+let is_exist = function
+ | Typ_aux (Typ_exist (_, _, _), _) -> true
+ | _ -> false
+
+let exist_typ constr typ =
+ let fresh_kid = fresh_existential () in
+ mk_typ (Typ_exist ([fresh_kid], constr fresh_kid, typ fresh_kid))
+
+let destruct_vector_typ env typ =
+ let destruct_vector_typ' = function
+ | Typ_aux (Typ_app (id, [Typ_arg_aux (Typ_arg_nexp n1, _);
+ Typ_arg_aux (Typ_arg_nexp n2, _);
+ Typ_arg_aux (Typ_arg_order o, _);
+ Typ_arg_aux (Typ_arg_typ vtyp, _)]
+ ), _) when string_of_id id = "vector" -> Some (n1, n2, o, vtyp)
+ | typ -> None
+ in
+ destruct_vector_typ' (Env.expand_synonyms env typ)
(**************************************************************************)
(* 3. Subtyping and constraint solving *)
(**************************************************************************)
+let rec simp_typ (Typ_aux (typ_aux, l)) = Typ_aux (simp_typ_aux typ_aux, l)
+and simp_typ_aux = function
+ | Typ_exist (kids1, nc1, Typ_aux (Typ_exist (kids2, nc2, typ), _)) ->
+ Typ_exist (kids1 @ kids2, nc_and nc1 nc2, typ)
+ | typ_aux -> typ_aux
+
let order_eq (Ord_aux (ord_aux1, _)) (Ord_aux (ord_aux2, _)) =
match ord_aux1, ord_aux2 with
| Ord_inc, Ord_inc -> true
@@ -980,7 +1050,7 @@ let rec normalize_typ env (Typ_aux (typ, l)) =
let kid = Env.fresh_kid env in Tnf_index_sort (IS_prop (kid, [(nconstant 0, nvar kid)]))
| Typ_id v ->
begin
- try normalize_typ env (Env.get_typ_synonym v env []) with
+ try normalize_typ env (Env.get_typ_synonym v env env []) with
| Not_found -> Tnf_id v
end
| Typ_var kid -> Tnf_var kid
@@ -993,9 +1063,10 @@ let rec normalize_typ env (Typ_aux (typ, l)) =
Tnf_app (vector, List.map (normalize_typ_arg env) args)
| Typ_app (id, args) ->
begin
- try normalize_typ env (Env.get_typ_synonym id env args) with
+ try normalize_typ env (Env.get_typ_synonym id env env args) with
| Not_found -> Tnf_app (id, List.map (normalize_typ_arg env) args)
end
+ | Typ_exist (kids, nc, typ) -> typ_error l "Cannot normalise existential type"
| Typ_fn _ -> typ_error l ("Cannot normalize function type " ^ string_of_typ (Typ_aux (typ, l)))
and normalize_typ_arg env (Typ_arg_aux (typ_arg, _)) =
match typ_arg with
@@ -1153,39 +1224,54 @@ and tnf_args_eq env arg1 arg2 =
| Tnf_arg_typ tnf1, Tnf_arg_typ tnf2 -> subtyp_tnf env tnf1 tnf2 && subtyp_tnf env tnf2 tnf1
| _, _ -> assert false
-let subtyp l env typ1 typ2 =
- if subtyp_tnf env (normalize_typ env typ1) (normalize_typ env typ2)
- then ()
- else typ_error l (string_of_typ typ1
- ^ " is not a subtype of " ^ string_of_typ typ2
- ^ " in context " ^ string_of_list ", " string_of_n_constraint (Env.get_constraints env))
-
-let typ_equality l env typ1 typ2 =
- subtyp l env typ1 typ2; subtyp l env typ2 typ1
-
(**************************************************************************)
(* 4. Unification *)
(**************************************************************************)
+let rec nexp_frees ?exs:(exs=KidSet.empty) (Nexp_aux (nexp, l)) =
+ match nexp with
+ | Nexp_id _ -> KidSet.empty
+ | Nexp_var kid -> KidSet.singleton kid
+ | Nexp_constant _ -> KidSet.empty
+ | Nexp_times (n1, n2) -> KidSet.union (nexp_frees ~exs:exs n1) (nexp_frees ~exs:exs n2)
+ | Nexp_sum (n1, n2) -> KidSet.union (nexp_frees ~exs:exs n1) (nexp_frees ~exs:exs n2)
+ | Nexp_minus (n1, n2) -> KidSet.union (nexp_frees ~exs:exs n1) (nexp_frees ~exs:exs n2)
+ | Nexp_exp n -> nexp_frees ~exs:exs n
+ | Nexp_neg n -> nexp_frees ~exs:exs n
+
let order_frees (Ord_aux (ord_aux, l)) =
match ord_aux with
| Ord_var kid -> KidSet.singleton kid
| _ -> KidSet.empty
-let rec typ_frees (Typ_aux (typ_aux, l)) =
+let rec typ_frees ?exs:(exs=KidSet.empty) (Typ_aux (typ_aux, l)) =
match typ_aux with
| Typ_wild -> KidSet.empty
| Typ_id v -> KidSet.empty
+ | Typ_var kid when KidSet.mem kid exs -> KidSet.empty
| Typ_var kid -> KidSet.singleton kid
- | Typ_tup typs -> List.fold_left KidSet.union KidSet.empty (List.map typ_frees typs)
- | Typ_app (f, args) -> List.fold_left KidSet.union KidSet.empty (List.map typ_arg_frees args)
-and typ_arg_frees (Typ_arg_aux (typ_arg_aux, l)) =
+ | Typ_tup typs -> List.fold_left KidSet.union KidSet.empty (List.map (typ_frees ~exs:exs) typs)
+ | Typ_app (f, args) -> List.fold_left KidSet.union KidSet.empty (List.map (typ_arg_frees ~exs:exs) args)
+ | Typ_exist (kids, nc, typ) -> typ_frees ~exs:(KidSet.of_list kids) typ
+and typ_arg_frees ?exs:(exs=KidSet.empty) (Typ_arg_aux (typ_arg_aux, l)) =
match typ_arg_aux with
- | Typ_arg_nexp n -> nexp_frees n
- | Typ_arg_typ typ -> typ_frees typ
+ | Typ_arg_nexp n -> nexp_frees ~exs:exs n
+ | Typ_arg_typ typ -> typ_frees ~exs:exs typ
| Typ_arg_order ord -> order_frees ord
| Typ_arg_effect _ -> assert false
+let rec nexp_identical (Nexp_aux (nexp1, _)) (Nexp_aux (nexp2, _)) =
+ match nexp1, nexp2 with
+ | Nexp_id v1, Nexp_id v2 -> Id.compare v1 v2 = 0
+ | Nexp_var kid1, Nexp_var kid2 -> Kid.compare kid1 kid2 = 0
+ | Nexp_constant c1, Nexp_constant c2 -> c1 = c2
+ | Nexp_times (n1a, n1b), Nexp_times (n2a, n2b) -> nexp_identical n1a n2a && nexp_identical n1b n2b
+ | Nexp_sum (n1a, n1b), Nexp_sum (n2a, n2b) -> nexp_identical n1a n2a && nexp_identical n1b n2b
+ | Nexp_minus (n1a, n1b), Nexp_minus (n2a, n2b) -> nexp_identical n1a n2a && nexp_identical n1b n2b
+ | Nexp_exp n1, Nexp_exp n2 -> nexp_identical n1 n2
+ | Nexp_neg n1, Nexp_neg n2 -> nexp_identical n1 n2
+ | _, _ -> false
+
let ord_identical (Ord_aux (ord1, _)) (Ord_aux (ord2, _)) =
match ord1, ord2 with
| Ord_var kid1, Ord_var kid2 -> Kid.compare kid1 kid2 = 0
@@ -1264,6 +1350,13 @@ let rec unify_nexps l env goals (Nexp_aux (nexp_aux1, _) as nexp1) (Nexp_aux (ne
match nexp_aux2 with
| Nexp_times (n2a, n2b) when prove env (NC_aux (NC_fixed (n1a, n2a), Parse_ast.Unknown)) ->
unify_nexps l env goals n1b n2b
+ | Nexp_constant c2 ->
+ begin
+ match n1a with
+ | Nexp_aux (Nexp_constant c1,_) when c2 mod c1 = 0 ->
+ unify_nexps l env goals n1b (mk_nexp (Nexp_constant (c2 / c1)))
+ | _ -> unify_error l ("Cannot unify Nat expression " ^ string_of_nexp nexp1 ^ " with " ^ string_of_nexp nexp2)
+ end
| _ -> unify_error l ("Cannot unify Nat expression " ^ string_of_nexp nexp1 ^ " with " ^ string_of_nexp nexp2)
end
else if KidSet.is_empty (nexp_frees n1b)
@@ -1322,7 +1415,7 @@ let merge_unifiers l kid uvar1 uvar2 =
| Some u1, None -> Some u1
| None, None -> None
-let unify l env typ1 typ2 =
+let rec unify l env typ1 typ2 =
typ_print ("Unify " ^ string_of_typ typ1 ^ " with " ^ string_of_typ typ2);
let goals = KidSet.inter (KidSet.diff (typ_frees typ1) (typ_frees typ2)) (typ_frees typ1) in
let rec unify_typ l (Typ_aux (typ1_aux, _) as typ1) (Typ_aux (typ2_aux, _) as typ2) =
@@ -1385,8 +1478,15 @@ let unify l env typ1 typ2 =
| Typ_arg_effect _, Typ_arg_effect _ -> assert false
| _, _ -> unify_error l (string_of_typ_arg typ_arg1 ^ " cannot be unified with type argument " ^ string_of_typ_arg typ_arg2)
in
- let typ1, typ2 = Env.expand_synonyms env typ1, Env.expand_synonyms env typ2 in
- unify_typ l typ1 typ2
+ match destructure_exist env typ2 with
+ | Some (kids, nc, typ2) ->
+ let typ1, typ2 = Env.expand_synonyms env typ1, Env.expand_synonyms env typ2 in
+ let (unifiers, _, _) = unify l env typ1 typ2 in
+ typ_debug (string_of_list ", " (fun (kid, uvar) -> string_of_kid kid ^ " => " ^ string_of_uvar uvar) (KBindings.bindings unifiers));
+ unifiers, kids, Some nc
+ | None ->
+ let typ1, typ2 = Env.expand_synonyms env typ1, Env.expand_synonyms env typ2 in
+ unify_typ l typ1 typ2, [], None
let merge_uvars l unifiers1 unifiers2 =
try KBindings.merge (merge_unifiers l) unifiers1 unifiers2
@@ -1394,6 +1494,56 @@ let merge_uvars l unifiers1 unifiers2 =
| Unification_error (_, m) -> typ_error l ("Could not merge unification variables: " ^ m)
(**************************************************************************)
+(* 4.5. Subtyping with existentials *)
+(**************************************************************************)
+
+let nc_subst_uvar kid uvar nc =
+ match uvar with
+ | U_nexp nexp -> nc_subst_nexp kid (unaux_nexp nexp) nc
+ | _ -> nc
+
+let uv_nexp_constraint env (kid, uvar) =
+ match uvar with
+ | U_nexp nexp -> Env.add_constraint (nc_eq (nvar kid) nexp) env
+ | _ -> env
+
+let rec subtyp l env typ1 typ2 =
+ match destructure_exist env typ1, destructure_exist env typ2 with
+ | Some (kids, nc, typ1), _ ->
+ let env = List.fold_left (fun env kid -> Env.add_typ_var kid BK_nat env) env kids in
+ let env = Env.add_constraint nc env in
+ subtyp l env typ1 typ2
+ | _, Some (kids, nc, typ2) ->
+ let env = List.fold_left (fun env kid -> Env.add_typ_var kid BK_nat env) env kids in
+ let unifiers, existential_kids, existential_nc =
+ try unify l env typ2 typ1 with
+ | Unification_error (_, m) -> typ_error l m
+ in
+ let nc = List.fold_left (fun nc (kid, uvar) -> nc_subst_uvar kid uvar nc) nc (KBindings.bindings unifiers) in
+ let env = match existential_kids, existential_nc with
+ | [], None -> env
+ | _, Some enc ->
+ let env = List.fold_left (fun env kid -> Env.add_typ_var kid BK_nat env) env existential_kids in
+ let env = List.fold_left uv_nexp_constraint env (KBindings.bindings unifiers) in
+ Env.add_constraint enc env
+ in
+ if prove env nc then ()
+ else typ_error l ("Could not show " ^ string_of_typ typ1 ^ " is a subset of existential " ^ string_of_typ typ2)
+ | _, None ->
+ if subtyp_tnf env (normalize_typ env typ1) (normalize_typ env typ2)
+ then ()
+ else typ_error l (string_of_typ typ1
+ ^ " is not a subtype of " ^ string_of_typ typ2
+ ^ " in context " ^ string_of_list ", " string_of_n_constraint (Env.get_constraints env))
+
+let typ_equality l env typ1 typ2 =
+ subtyp l env typ1 typ2; subtyp l env typ2 typ1
+
+let subtype_check env typ1 typ2 =
+ try subtyp Parse_ast.Unknown env typ1 typ2; true with
+ | Type_error _ -> false
+
+(**************************************************************************)
(* 5. Type checking expressions *)
(**************************************************************************)
@@ -1556,6 +1706,8 @@ let rec assert_constraint (E_aux (exp_aux, l)) =
nc_and (assert_constraint x) (assert_constraint y)
| E_app_infix (x, op, y) when string_of_id op = "==" ->
nc_eq (assert_nexp x) (assert_nexp y)
+ | E_app_infix (x, op, y) when string_of_id op = ">=" ->
+ nc_gteq (assert_nexp x) (assert_nexp y)
| _ -> nc_true
type flow_constraint =
@@ -1621,6 +1773,8 @@ let rec match_typ env typ1 typ2 =
let Typ_aux (typ1_aux, _) = Env.expand_synonyms env typ1 in
let Typ_aux (typ2_aux, _) = Env.expand_synonyms env typ2 in
match typ1_aux, typ2_aux with
+ | Typ_exist (_, _, typ1), _ -> match_typ env typ1 typ2
+ | _, Typ_exist (_, _, typ2) -> match_typ env typ1 typ2
| Typ_wild, Typ_wild -> true
| _, Typ_var kid2 -> true
| Typ_id v1, Typ_id v2 when Id.compare v1 v2 = 0 -> true
@@ -1766,6 +1920,12 @@ let rec check_exp env (E_aux (exp_aux, (l, ())) as exp : unit exp) (Typ_aux (typ
end
in
try_overload (Env.get_overloads f env)
+ | E_return exp, _ ->
+ let checked_exp = match Env.get_ret_typ env with
+ | Some ret_typ -> crule check_exp env exp ret_typ
+ | None -> typ_error l "Cannot use return outside a function"
+ in
+ annot_exp (E_return checked_exp) typ
| E_app (f, xs), _ ->
let inferred_exp = infer_funapp l env f xs (Some typ) in
type_coercion env inferred_exp typ
@@ -1829,7 +1989,7 @@ and type_coercion env (E_aux (_, (l, _)) as annotated_exp) typ =
in
begin
try
- typ_debug "PERFORMING TYPE COERCION";
+ typ_debug ("PERFORMING TYPE COERCION: from " ^ string_of_typ (typ_of annotated_exp) ^ " to " ^ string_of_typ typ);
subtyp l env (typ_of annotated_exp) typ; annotated_exp
with
| Type_error (_, m) when Env.allow_casts env ->
@@ -1949,7 +2109,7 @@ and bind_pat env (P_aux (pat_aux, (l, ())) as pat) (Typ_aux (typ_aux, _) as typ)
begin
try
typ_debug ("Unifying " ^ string_of_bind (typq, ctor_typ) ^ " for pattern " ^ string_of_typ typ);
- let unifiers = unify l env ret_typ typ in
+ let unifiers, _, _ (* FIXME! *) = unify l env ret_typ typ in
typ_debug (string_of_list ", " (fun (kid, uvar) -> string_of_kid kid ^ " => " ^ string_of_uvar uvar) (KBindings.bindings unifiers));
let arg_typ' = subst_unifiers unifiers arg_typ in
let quants' = List.fold_left (fun qs (kid, uvar) -> instantiate_quants qs kid uvar) quants (KBindings.bindings unifiers) in
@@ -2078,7 +2238,7 @@ and bind_assignment env (LEXP_aux (lexp_aux, _) as lexp) (E_aux (_, (l, ())) as
| Typ_aux (Typ_id rectyp_id, _) | Typ_aux (Typ_app (rectyp_id, _), _) when Env.is_record rectyp_id env ->
let eff = if is_register then mk_effect [BE_wreg] else no_effect in
let (typq, Typ_aux (Typ_fn (rectyp_q, field_typ, _), _)) = Env.get_accessor rectyp_id field env in
- let unifiers = try unify l env rectyp_q regtyp with Unification_error (l, m) -> typ_error l ("Unification error: " ^ m) in
+ let unifiers, _, _ (* FIXME *) = try unify l env rectyp_q regtyp with Unification_error (l, m) -> typ_error l ("Unification error: " ^ m) in
let field_typ' = subst_unifiers unifiers field_typ in
let checked_exp = crule check_exp env exp field_typ' in
annot_assign (annot_lexp (LEXP_field (annot_lexp_effect inferred_flexp regtyp eff, field)) field_typ') checked_exp, env
@@ -2124,7 +2284,7 @@ and bind_lexp env (LEXP_aux (lexp_aux, (l, ())) as lexp) typ =
begin
subtyp l env typ typ_annot;
subtyp l env typ_annot vtyp;
- annot_lexp (LEXP_cast (typ_annot, v)) typ, env
+ annot_lexp (LEXP_cast (typ_annot, v)) typ, Env.add_local v (Mutable, typ_annot) env
end
| Register vtyp ->
begin
@@ -2200,7 +2360,16 @@ and bind_lexp env (LEXP_aux (lexp_aux, (l, ())) as lexp) typ =
annot_lexp (LEXP_vector (annot_lexp (LEXP_id v) vtyp, inferred_exp)) typ, env
| _ -> typ_error l ("Bad vector assignment: " ^ string_of_lexp lexp)
end
- | _ -> typ_error l ("Unhandled l-expression")
+ | LEXP_field (LEXP_aux (LEXP_id v, _), fid) ->
+ (* FIXME: will only work for ASL *)
+ let rec_id =
+ match Env.lookup_id v env with
+ | Register (Typ_aux (Typ_id rec_id, _)) -> rec_id
+ | _ -> typ_error l (string_of_lexp lexp ^ " must be a record register here")
+ in
+ let typq, (Typ_aux (Typ_fn (_, ret_typ, _), _)) = Env.get_accessor rec_id fid env in
+ annot_lexp_effect (LEXP_field (annot_lexp (LEXP_id v) (mk_id_typ rec_id), fid)) ret_typ (mk_effect [BE_wreg]), env
+ | _ -> typ_error l ("Unhandled l-expression " ^ string_of_lexp lexp)
and infer_exp env (E_aux (exp_aux, (l, ())) as exp) =
let annot_exp_effect exp typ eff = E_aux (exp, (l, Some (env, typ, eff))) in
@@ -2223,12 +2392,6 @@ and infer_exp env (E_aux (exp_aux, (l, ())) as exp) =
| E_sizeof nexp -> annot_exp (E_sizeof nexp) (mk_typ (Typ_app (mk_id "atom", [mk_typ_arg (Typ_arg_nexp nexp)])))
| E_constraint nc ->
annot_exp (E_constraint nc) bool_typ
- | E_return exp ->
- begin
- match Env.get_ret_typ env with
- | Some typ -> annot_exp (E_return (crule check_exp env exp typ)) (mk_typ (Typ_id (mk_id "unit")))
- | None -> typ_error l "Return found in non-function environment"
- end
| E_field (exp, field) ->
begin
let inferred_exp = irule infer_exp env exp in
@@ -2305,8 +2468,17 @@ and infer_exp env (E_aux (exp_aux, (l, ())) as exp) =
let checked_body = crule check_exp (Env.add_local v (Immutable, loop_vtyp) env) body unit_typ in
annot_exp (E_for (v, inferred_f, inferred_t, checked_step, ord, checked_body)) unit_typ *)
| Some (l1, l2), Some (u1, u2) when prove env (nc_lteq l2 u1) ->
- let checked_body = crule check_exp (Env.add_local v (Immutable, range_typ l1 u2) env) body unit_typ in
- annot_exp (E_for (v, inferred_f, inferred_t, checked_step, ord, checked_body)) unit_typ
+ let kid = mk_kid ("loop_" ^ string_of_id v) in
+ if KBindings.mem kid (Env.get_typ_vars env)
+ then typ_error l "Nested loop variables cannot have the same name"
+ else
+ begin
+ let env = Env.add_typ_var kid BK_nat env in
+ let env = Env.add_constraint (nc_and (nc_lteq l1 (nvar kid)) (nc_lteq (nvar kid) u2)) env in
+ let loop_vtyp = atom_typ (nvar kid) in
+ let checked_body = crule check_exp (Env.add_local v (Immutable, loop_vtyp) env) body unit_typ in
+ annot_exp (E_for (v, inferred_f, inferred_t, checked_step, ord, checked_body)) unit_typ
+ end
| _, _ -> typ_error l "Ranges in foreach overlap"
end
| E_if (cond, then_branch, else_branch) ->
@@ -2354,88 +2526,134 @@ and instantiation_of (E_aux (exp_aux, (l, _)) as exp) =
and infer_funapp' l env f (typq, f_typ) xs ret_ctx_typ =
let annot_exp exp typ eff = E_aux (exp, (l, Some (env, typ, eff))) in
let all_unifiers = ref KBindings.empty in
+ let ex_goal = ref None in
+ let prove_goal env = match !ex_goal with
+ | Some goal when prove env goal -> ()
+ | Some goal -> typ_error l ("Could not prove existential goal: " ^ string_of_n_constraint goal)
+ | None -> ()
+ in
+ let universals = Env.get_typ_vars env in
+ let universal_constraints = Env.get_constraints env in
+ let is_bound kid env = KBindings.mem kid (Env.get_typ_vars env) in
let rec number n = function
| [] -> []
| (x :: xs) -> (n, x) :: number (n + 1) xs
in
- let solve_quant = function
+ let solve_quant env = function
| QI_aux (QI_id _, _) -> false
| QI_aux (QI_const nc, _) -> prove env nc
in
- let rec instantiate quants typs ret_typ args =
+ let rec instantiate env quants typs ret_typ args =
match typs, args with
| (utyps, []), (uargs, []) ->
begin
typ_debug ("Got unresolved args: " ^ string_of_list ", " (fun (_, exp) -> string_of_exp exp) uargs);
- if List.for_all solve_quant quants
+ if List.for_all (solve_quant env) quants
then
let iuargs = List.map2 (fun utyp (n, uarg) -> (n, crule check_exp env uarg utyp)) utyps uargs in
- (iuargs, ret_typ)
+ (iuargs, ret_typ, env)
else typ_error l ("Quantifiers " ^ string_of_list ", " string_of_quant_item quants
^ " not resolved during application of " ^ string_of_id f)
end
- | (utyps, (typ :: typs)), (uargs, ((n, arg) :: args)) when KidSet.is_empty (typ_frees typ) ->
+ | (utyps, (typ :: typs)), (uargs, ((n, arg) :: args))
+ when List.for_all (fun kid -> is_bound kid env) (KidSet.elements (typ_frees typ)) ->
begin
let carg = crule check_exp env arg typ in
- let (iargs, ret_typ') = instantiate quants (utyps, typs) ret_typ (uargs, args) in
- ((n, carg) :: iargs, ret_typ')
+ let (iargs, ret_typ', env) = instantiate env quants (utyps, typs) ret_typ (uargs, args) in
+ ((n, carg) :: iargs, ret_typ', env)
end
| (utyps, (typ :: typs)), (uargs, ((n, arg) :: args)) ->
begin
- typ_debug ("INSTANTIATE: " ^ string_of_exp arg ^ " with " ^ string_of_typ typ ^ " NF " ^ string_of_tnf (normalize_typ env typ));
+ typ_debug ("INSTANTIATE: " ^ string_of_exp arg ^ " with " ^ string_of_typ typ);
let iarg = irule infer_exp env arg in
- typ_debug ("INFER: " ^ string_of_exp arg ^ " type " ^ string_of_typ (typ_of iarg) ^ " NF " ^ string_of_tnf (normalize_typ env (typ_of iarg)));
+ typ_debug ("INFER: " ^ string_of_exp arg ^ " type " ^ string_of_typ (typ_of iarg));
try
- let iarg, unifiers = type_coercion_unify env iarg typ in
+ let iarg, (unifiers, ex_kids, ex_nc) (* FIXME *) = type_coercion_unify env iarg typ in
typ_debug (string_of_list ", " (fun (kid, uvar) -> string_of_kid kid ^ " => " ^ string_of_uvar uvar) (KBindings.bindings unifiers));
+ typ_debug ("EX KIDS: " ^ string_of_list ", " string_of_kid ex_kids);
+ let env = match ex_kids, ex_nc with
+ | [], None -> env
+ | _, Some enc ->
+ let env = List.fold_left (fun env kid -> Env.add_typ_var kid BK_nat env) env ex_kids in
+ Env.add_constraint enc env
+ in
all_unifiers := merge_uvars l !all_unifiers unifiers;
let utyps' = List.map (subst_unifiers unifiers) utyps in
let typs' = List.map (subst_unifiers unifiers) typs in
let quants' = List.fold_left (fun qs (kid, uvar) -> instantiate_quants qs kid uvar) quants (KBindings.bindings unifiers) in
let ret_typ' = subst_unifiers unifiers ret_typ in
- let (iargs, ret_typ'') = instantiate quants' (utyps', typs') ret_typ' (uargs, args) in
- ((n, iarg) :: iargs, ret_typ'')
+ let (iargs, ret_typ'', env) = instantiate env quants' (utyps', typs') ret_typ' (uargs, args) in
+ ((n, iarg) :: iargs, ret_typ'', env)
with
| Unification_error (l, str) ->
typ_debug ("Unification error: " ^ str);
- instantiate quants (typ :: utyps, typs) ret_typ ((n, arg) :: uargs, args)
+ instantiate env quants (typ :: utyps, typs) ret_typ ((n, arg) :: uargs, args)
end
| (_, []), _ -> typ_error l ("Function " ^ string_of_id f ^ " applied to too many arguments")
| _, (_, []) -> typ_error l ("Function " ^ string_of_id f ^ " not applied to enough arguments")
in
- let instantiate_ret quants typs ret_typ =
+ let instantiate_ret env quants typs ret_typ =
match ret_ctx_typ with
- | None -> (quants, typs, ret_typ)
+ | None -> (quants, typs, ret_typ, env)
+ | Some rct when is_exist (Env.expand_synonyms env rct) -> (quants, typs, ret_typ, env)
| Some rct ->
begin
typ_debug ("RCT is " ^ string_of_typ rct);
typ_debug ("INSTANTIATE RETURN:" ^ string_of_typ ret_typ);
- let unifiers = try unify l env ret_typ rct with Unification_error _ -> typ_debug "UERROR"; KBindings.empty in
+ let unifiers, ex_kids, ex_nc =
+ try unify l env ret_typ rct with
+ | Unification_error _ -> typ_debug "UERROR"; KBindings.empty, [], None
+ in
typ_debug (string_of_list ", " (fun (kid, uvar) -> string_of_kid kid ^ " => " ^ string_of_uvar uvar) (KBindings.bindings unifiers));
+ if ex_kids = [] then () else (typ_debug ("EX GOAL: " ^ string_of_option string_of_n_constraint ex_nc); ex_goal := ex_nc);
all_unifiers := merge_uvars l !all_unifiers unifiers;
+ let env = List.fold_left (fun env kid -> Env.add_typ_var kid BK_nat env) env ex_kids in
let typs' = List.map (subst_unifiers unifiers) typs in
let quants' = List.fold_left (fun qs (kid, uvar) -> instantiate_quants qs kid uvar) quants (KBindings.bindings unifiers) in
- let ret_typ' = subst_unifiers unifiers ret_typ in
- (quants', typs', ret_typ')
+ let ret_typ' =
+ match ex_nc with
+ | None -> subst_unifiers unifiers ret_typ
+ | Some nc -> mk_typ (Typ_exist (ex_kids, nc, subst_unifiers unifiers ret_typ))
+ in
+ (quants', typs', ret_typ', env)
end
in
- let exp =
+ let (quants, typ_args, typ_ret, env), eff =
match Env.expand_synonyms env f_typ with
| Typ_aux (Typ_fn (Typ_aux (Typ_tup typ_args, _), typ_ret, eff), _) ->
- let (quants, typ_args, typ_ret) = instantiate_ret (quant_items typq) typ_args typ_ret in
- let (xs_instantiated, typ_ret) = instantiate quants ([], typ_args) typ_ret ([], number 0 xs) in
- let xs_reordered = List.map snd (List.sort (fun (n, _) (m, _) -> compare n m) xs_instantiated) in
- annot_exp (E_app (f, xs_reordered)) typ_ret eff
+ instantiate_ret env (quant_items typq) typ_args typ_ret, eff
| Typ_aux (Typ_fn (typ_arg, typ_ret, eff), _) ->
- let (quants, typ_args, typ_ret) = instantiate_ret (quant_items typq) [typ_arg] typ_ret in
- let (xs_instantiated, typ_ret) = instantiate quants ([], typ_args) typ_ret ([], number 0 xs) in
- let xs_reordered = List.map snd (List.sort (fun (n, _) (m, _) -> compare n m) xs_instantiated) in
- annot_exp (E_app (f, xs_reordered)) typ_ret eff
+ instantiate_ret env (quant_items typq) [typ_arg] typ_ret, eff
| _ -> typ_error l (string_of_typ f_typ ^ " is not a function type")
in
+ let (xs_instantiated, typ_ret, env) = instantiate env quants ([], typ_args) typ_ret ([], number 0 xs) in
+ let xs_reordered = List.map snd (List.sort (fun (n, _) (m, _) -> compare n m) xs_instantiated) in
+
+ prove_goal env;
+
+ let ty_vars = List.map fst (KBindings.bindings (Env.get_typ_vars env)) in
+ let existentials = List.filter (fun kid -> not (KBindings.mem kid universals)) ty_vars in
+ let num_new_ncs = List.length (Env.get_constraints env) - List.length universal_constraints in
+ let ex_constraints = take num_new_ncs (Env.get_constraints env) in
+
+ typ_debug ("Existentials: " ^ string_of_list ", " string_of_kid existentials);
+ typ_debug ("Existential constraints: " ^ string_of_list ", " string_of_n_constraint ex_constraints);
+
+ let typ_ret =
+ if KidSet.is_empty (KidSet.of_list existentials)
+ then (typ_debug "Returning Existential"; typ_ret)
+ else mk_typ (Typ_exist (existentials, List.fold_left nc_and nc_true ex_constraints, typ_ret))
+ in
+ let typ_ret = simp_typ typ_ret in
+ let exp = annot_exp (E_app (f, xs_reordered)) typ_ret eff in
+ typ_debug ("RETURNING: " ^ string_of_typ (typ_of exp));
match ret_ctx_typ with
- | None -> exp, !all_unifiers
- | Some rct -> type_coercion env exp rct, !all_unifiers
+ | None ->
+ exp, !all_unifiers
+ | Some rct ->
+ let exp = type_coercion env exp rct in
+ typ_debug ("RETURNING AFTER COERCION " ^ string_of_typ (typ_of exp));
+ exp, !all_unifiers
(**************************************************************************)
(* 6. Effect system *)
@@ -2717,7 +2935,18 @@ let check_tannotopt typq ret_typ = function
else typ_error l (string_of_bind (typq, ret_typ) ^ " and " ^ string_of_bind (annot_typq, annot_ret_typ) ^ " do not match between function and val spec")
let check_fundef env (FD_aux (FD_function (recopt, tannotopt, effectopt, funcls), (l, _)) as fd_aux) =
- let id = id_of_fundef fd_aux in
+ let id =
+ match (List.fold_right
+ (fun (FCL_aux (FCL_Funcl (id, _, _), _)) id' ->
+ match id' with
+ | Some id' -> if string_of_id id' = string_of_id id then Some id'
+ else typ_error l ("Function declaration expects all definitions to have the same name, "
+ ^ string_of_id id ^ " differs from other definitions of " ^ string_of_id id')
+ | None -> Some id) funcls None)
+ with
+ | Some id -> id
+ | None -> typ_error l "funcl list is empty"
+ in
typ_print ("\nChecking function " ^ string_of_id id);
let have_val_spec, (quant, (Typ_aux (Typ_fn (vtyp_arg, vtyp_ret, declared_eff), vl) as typ)), env =
try true, Env.get_val_spec id env, env with
@@ -2808,11 +3037,35 @@ let check_type_union env variant typq (Tu_aux (tu, l)) =
|> Env.add_union_id v (typq, typ')
|> Env.add_val_spec v (typq, typ')
+let mk_synonym typq typ =
+ let kopts, ncs = quant_split typq in
+ let rec subst_args kopts args =
+ match kopts, args with
+ | kopt :: kopts, Typ_arg_aux (Typ_arg_nexp arg, _) :: args when is_nat_kopt kopt ->
+ let typ, ncs = subst_args kopts args in
+ typ_subst_nexp (kopt_kid kopt) (unaux_nexp arg) typ,
+ List.map (nc_subst_nexp (kopt_kid kopt) (unaux_nexp arg)) ncs
+ | kopt :: kopts, Typ_arg_aux (Typ_arg_typ arg, _) :: args when is_typ_kopt kopt ->
+ let typ, ncs = subst_args kopts args in
+ typ_subst_typ (kopt_kid kopt) (unaux_typ arg) typ, ncs
+ | kopt :: kopts, Typ_arg_aux (Typ_arg_order arg, _) :: args when is_order_kopt kopt ->
+ let typ, ncs = subst_args kopts args in
+ typ_subst_order (kopt_kid kopt) (unaux_order arg) typ, ncs
+ | [], [] -> typ, ncs
+ | _, Typ_arg_aux (_, l) :: _ -> typ_error l "Synonym applied to bad arguments"
+ | _, _ -> typ_error Parse_ast.Unknown "Synonym applied to bad arguments"
+ in
+ fun env args ->
+ let typ, ncs = subst_args kopts args in
+ if List.for_all (prove env) ncs
+ then typ
+ else typ_error Parse_ast.Unknown "Could not prove constraints in type synonym"
+
let check_typedef env (TD_aux (tdef, (l, _))) =
let td_err () = raise (Reporting_basic.err_unreachable Parse_ast.Unknown "Unimplemented Typedef") in
match tdef with
| TD_abbrev(id, nmscm, (TypSchm_aux (TypSchm_ts (typq, typ), _))) ->
- [DEF_type (TD_aux (tdef, (l, None)))], Env.add_typ_synonym id (fun _ -> typ) env
+ [DEF_type (TD_aux (tdef, (l, None)))], Env.add_typ_synonym id (mk_synonym typq typ) env
| TD_record(id, nmscm, typq, fields, _) ->
[DEF_type (TD_aux (tdef, (l, None)))], Env.add_record id typq fields env
| TD_variant(id, nmscm, typq, arms, _) ->
diff --git a/src/type_check.mli b/src/type_check.mli
index fe582568..0a85624c 100644
--- a/src/type_check.mli
+++ b/src/type_check.mli
@@ -97,7 +97,7 @@ module Env : sig
any exceptions. *)
val get_ret_typ : t -> typ option
- val get_typ_synonym : id -> t -> typ_arg list -> typ
+ val get_typ_synonym : id -> t -> (t -> typ_arg list -> typ)
val get_overloads : id -> t -> id list
@@ -157,6 +157,8 @@ val npow2 : nexp -> nexp
val nvar : kid -> nexp
val nid : id -> nexp
+val nc_gteq : nexp -> nexp -> n_constraint
+
(* Sail builtin types. *)
val int_typ : typ
val nat_typ : typ
@@ -169,6 +171,7 @@ val string_typ : typ
val real_typ : typ
val vector_typ : nexp -> nexp -> order -> typ -> typ
val list_typ : typ -> typ
+val exist_typ : (kid -> n_constraint) -> (kid -> typ) -> typ
val inc_ord : order
val dec_ord : order
@@ -193,6 +196,10 @@ val strip_pat : 'a pat -> unit pat
re-checked. *)
val check_exp : Env.t -> unit exp -> typ -> tannot exp
+val infer_exp : Env.t -> unit exp -> tannot exp
+
+val subtype_check : Env.t -> typ -> typ -> bool
+
(* Partial functions: The expressions and patterns passed to these
functions must be guaranteed to have tannots of the form Some (env,
typ) for these to work. *)
@@ -210,6 +217,10 @@ val pat_typ_of : tannot pat -> typ
val effect_of : tannot exp -> effect
val effect_of_annot : tannot -> effect
+val destructure_atom_nexp : typ -> nexp
+
+val destruct_vector_typ : Env.t -> typ -> (nexp * nexp * order * typ) option
+
type uvar =
| U_nexp of nexp
| U_order of order
diff --git a/src/util.ml b/src/util.ml
index d2d4eea7..75732376 100644
--- a/src/util.ml
+++ b/src/util.ml
@@ -343,3 +343,7 @@ let rec string_of_list sep string_of = function
| [] -> ""
| [x] -> string_of x
| x::ls -> (string_of x) ^ sep ^ (string_of_list sep string_of ls)
+
+let string_of_option string_of = function
+ | None -> ""
+ | Some x -> string_of x
diff --git a/src/util.mli b/src/util.mli
index cfd6a19e..aa442ada 100644
--- a/src/util.mli
+++ b/src/util.mli
@@ -205,4 +205,6 @@ module ExtraSet : functor (S : Set.S) ->
(*Formatting functions*)
val string_of_list : string -> ('a -> string) -> 'a list -> string
+val string_of_option : ('a -> string) -> 'a option -> string
+
val split_on_char : char -> string -> string list