diff options
| author | Brian Campbell | 2018-11-26 17:32:29 +0000 |
|---|---|---|
| committer | Brian Campbell | 2018-11-26 17:32:29 +0000 |
| commit | 20ed809845c4b62235d3cbe203ecaefead943ac8 (patch) | |
| tree | 9e0f6fd96e980e5f71954ff4795382a39cf7844b /aarch64 | |
| parent | 2e271d91c58a2d4db4adbb4c47d34bcbe1a6992e (diff) | |
Add random generators for record types
Diffstat (limited to 'aarch64')
| -rw-r--r-- | aarch64/duopod/aarch64_duopod.lem | 5742 |
1 files changed, 5742 insertions, 0 deletions
diff --git a/aarch64/duopod/aarch64_duopod.lem b/aarch64/duopod/aarch64_duopod.lem new file mode 100644 index 00000000..9243c5ee --- /dev/null +++ b/aarch64/duopod/aarch64_duopod.lem @@ -0,0 +1,5742 @@ +(*Generated by Sail from aarch64_duopod.*) +open import Pervasives_extra +open import Sail_instr_kinds +open import Sail_values +open import Sail_operators_bitlists +open import Prompt_monad +open import Prompt +open import State +open import Aarch64_duopod_types +open import Aarch64_extras_embed_sequential + + + +val vcons : forall 'a. 'a -> list 'a -> list 'a + + + + + + + +val builtin_and_vec : forall 'n. bits 'n -> bits 'n -> bits 'n + + + +val builtin_or_vec : forall 'n. bits 'n -> bits 'n -> bits 'n + + + +val __raw_SetSlice_int : forall 'w. integer -> ii -> ii -> bits 'w -> ii + +val __GetSlice_int : integer -> ii -> ii -> list bitU + +let __GetSlice_int n m o = (get_slice_int n m o : list bitU) + +val __raw_SetSlice_bits : forall 'n 'w. integer -> integer -> bits 'n -> ii -> bits 'w -> bits 'n + +val __raw_GetSlice_bits : forall 'n 'w. integer -> integer -> bits 'n -> ii -> bits 'w + +val cast_unit_vec : bitU -> list bitU + +let cast_unit_vec b = match b with | B0 -> [B0] | B1 -> [B1] end + +val DecStr : ii -> string + +val HexStr : ii -> string + +val __TraceMemoryWrite : forall 'int8_times_n 'm. integer -> bits 'm -> bits 'int8_times_n -> unit + +val __InitRAM : integer -> ii -> list bitU -> list bitU -> unit + +let __InitRAM g__289 g__290 g__291 g__292 = () + +val __TraceMemoryRead : forall 'int8_times_n 'm. integer -> bits 'm -> bits 'int8_times_n -> unit + +val ex_nat : ii -> integer + +let ex_nat n = n + +val ex_int : ii -> integer + +let ex_int n = n + +val ex_range : integer -> integer + +val coerce_int_nat : ii -> M ii + +let coerce_int_nat x = + assert_exp true "Cannot coerce int to nat" >> + return x + +val break : unit -> unit + +let break () = () + +val undefined_exception : unit -> exception + +let undefined_exception () = + internal_pick + [Error_Undefined;Error_See (undefined_string ());Error_Implementation_Defined (undefined_string + ());Error_ReservedEncoding] + +val boolean_of_num : integer -> boolean + +let boolean_of_num arg_ = + let l__280 = arg_ in + if (eq l__280 (0:ii)) + then FALSE + else TRUE + +val num_of_boolean : boolean -> integer + +let num_of_boolean arg_ = match arg_ with | FALSE -> (0:ii) | TRUE -> (1:ii) end + +val undefined_boolean : unit -> boolean + +let undefined_boolean () = internal_pick [FALSE;TRUE] + +val signal_of_num : integer -> signal + +let signal_of_num arg_ = + let l__279 = arg_ in + if (eq l__279 (0:ii)) + then LOW + else HIGH + +val num_of_signal : signal -> integer + +let num_of_signal arg_ = match arg_ with | LOW -> (0:ii) | HIGH -> (1:ii) end + +val undefined_signal : unit -> signal + +let undefined_signal () = internal_pick [LOW;HIGH] + +val __RetCode_of_num : integer -> __RetCode + +let __RetCode_of_num arg_ = + let l__271 = arg_ in + if (eq l__271 (0:ii)) + then __RC_OK + else + if (eq l__271 (1:ii)) + then __RC_UNDEFINED + else + if (eq l__271 (2:ii)) + then __RC_UNPREDICTABLE + else + if (eq l__271 (3:ii)) + then __RC_SEE + else + if (eq l__271 (4:ii)) + then __RC_IMPLEMENTATION_DEFINED + else + if (eq l__271 (5:ii)) + then __RC_SUBARCHITECTURE_DEFINED + else + if (eq l__271 (6:ii)) + then __RC_EXCEPTION_TAKEN + else if (eq l__271 (7:ii)) then __RC_ASSERT_FAILED else __RC_UNMATCHED_CASE + +val num_of___RetCode : __RetCode -> integer + +let num_of___RetCode arg_ = + match arg_ with + | __RC_OK -> (0:ii) + | __RC_UNDEFINED -> (1:ii) + | __RC_UNPREDICTABLE -> (2:ii) + | __RC_SEE -> (3:ii) + | __RC_IMPLEMENTATION_DEFINED -> (4:ii) + | __RC_SUBARCHITECTURE_DEFINED -> (5:ii) + | __RC_EXCEPTION_TAKEN -> (6:ii) + | __RC_ASSERT_FAILED -> (7:ii) + | __RC_UNMATCHED_CASE -> (8:ii) + end + +val undefined___RetCode : unit -> __RetCode + +let undefined___RetCode () = + internal_pick + [__RC_OK;__RC_UNDEFINED;__RC_UNPREDICTABLE;__RC_SEE;__RC_IMPLEMENTATION_DEFINED;__RC_SUBARCHITECTURE_DEFINED;__RC_EXCEPTION_TAKEN;__RC_ASSERT_FAILED;__RC_UNMATCHED_CASE] + +val FPConvOp_of_num : integer -> FPConvOp + +let FPConvOp_of_num arg_ = + let l__267 = arg_ in + if (eq l__267 (0:ii)) + then FPConvOp_CVT_FtoI + else + if (eq l__267 (1:ii)) + then FPConvOp_CVT_ItoF + else + if (eq l__267 (2:ii)) + then FPConvOp_MOV_FtoI + else if (eq l__267 (3:ii)) then FPConvOp_MOV_ItoF else FPConvOp_CVT_FtoI_JS + +val num_of_FPConvOp : FPConvOp -> integer + +let num_of_FPConvOp arg_ = + match arg_ with + | FPConvOp_CVT_FtoI -> (0:ii) + | FPConvOp_CVT_ItoF -> (1:ii) + | FPConvOp_MOV_FtoI -> (2:ii) + | FPConvOp_MOV_ItoF -> (3:ii) + | FPConvOp_CVT_FtoI_JS -> (4:ii) + end + +val undefined_FPConvOp : unit -> FPConvOp + +let undefined_FPConvOp () = + internal_pick + [FPConvOp_CVT_FtoI;FPConvOp_CVT_ItoF;FPConvOp_MOV_FtoI;FPConvOp_MOV_ItoF;FPConvOp_CVT_FtoI_JS] + +val Exception_of_num : integer -> Exception + +let Exception_of_num arg_ = + let l__239 = arg_ in + if (eq l__239 (0:ii)) + then Exception_Uncategorized + else + if (eq l__239 (1:ii)) + then Exception_WFxTrap + else + if (eq l__239 (2:ii)) + then Exception_CP15RTTrap + else + if (eq l__239 (3:ii)) + then Exception_CP15RRTTrap + else + if (eq l__239 (4:ii)) + then Exception_CP14RTTrap + else + if (eq l__239 (5:ii)) + then Exception_CP14DTTrap + else + if (eq l__239 (6:ii)) + then Exception_AdvSIMDFPAccessTrap + else + if (eq l__239 (7:ii)) + then Exception_FPIDTrap + else + if (eq l__239 (8:ii)) + then Exception_PACTrap + else + if (eq l__239 (9:ii)) + then Exception_CP14RRTTrap + else + if (eq l__239 (10:ii)) + then Exception_IllegalState + else + if (eq l__239 (11:ii)) + then Exception_SupervisorCall + else + if (eq l__239 (12:ii)) + then Exception_HypervisorCall + else + if (eq l__239 (13:ii)) + then Exception_MonitorCall + else + if (eq l__239 (14:ii)) + then Exception_SystemRegisterTrap + else + if (eq l__239 (15:ii)) + then Exception_ERetTrap + else + if (eq l__239 (16:ii)) + then Exception_InstructionAbort + else + if (eq l__239 (17:ii)) + then Exception_PCAlignment + else + if (eq l__239 (18:ii)) + then Exception_DataAbort + else + if (eq l__239 (19:ii)) + then Exception_SPAlignment + else + if (eq l__239 (20:ii)) + then Exception_FPTrappedException + else + if (eq l__239 (21:ii)) + then Exception_SError + else + if (eq l__239 (22:ii)) + then Exception_Breakpoint + else + if (eq l__239 (23:ii)) + then Exception_SoftwareStep + else + if (eq l__239 (24:ii)) + then Exception_Watchpoint + else + if (eq l__239 (25:ii)) + then Exception_SoftwareBreakpoint + else + if (eq l__239 (26:ii)) + then Exception_VectorCatch + else if (eq l__239 (27:ii)) then Exception_IRQ else Exception_FIQ + +val num_of_Exception : Exception -> integer + +let num_of_Exception arg_ = + match arg_ with + | Exception_Uncategorized -> (0:ii) + | Exception_WFxTrap -> (1:ii) + | Exception_CP15RTTrap -> (2:ii) + | Exception_CP15RRTTrap -> (3:ii) + | Exception_CP14RTTrap -> (4:ii) + | Exception_CP14DTTrap -> (5:ii) + | Exception_AdvSIMDFPAccessTrap -> (6:ii) + | Exception_FPIDTrap -> (7:ii) + | Exception_PACTrap -> (8:ii) + | Exception_CP14RRTTrap -> (9:ii) + | Exception_IllegalState -> (10:ii) + | Exception_SupervisorCall -> (11:ii) + | Exception_HypervisorCall -> (12:ii) + | Exception_MonitorCall -> (13:ii) + | Exception_SystemRegisterTrap -> (14:ii) + | Exception_ERetTrap -> (15:ii) + | Exception_InstructionAbort -> (16:ii) + | Exception_PCAlignment -> (17:ii) + | Exception_DataAbort -> (18:ii) + | Exception_SPAlignment -> (19:ii) + | Exception_FPTrappedException -> (20:ii) + | Exception_SError -> (21:ii) + | Exception_Breakpoint -> (22:ii) + | Exception_SoftwareStep -> (23:ii) + | Exception_Watchpoint -> (24:ii) + | Exception_SoftwareBreakpoint -> (25:ii) + | Exception_VectorCatch -> (26:ii) + | Exception_IRQ -> (27:ii) + | Exception_FIQ -> (28:ii) + end + +val undefined_Exception : unit -> Exception + +let undefined_Exception () = + internal_pick + [Exception_Uncategorized;Exception_WFxTrap;Exception_CP15RTTrap;Exception_CP15RRTTrap;Exception_CP14RTTrap;Exception_CP14DTTrap;Exception_AdvSIMDFPAccessTrap;Exception_FPIDTrap;Exception_PACTrap;Exception_CP14RRTTrap;Exception_IllegalState;Exception_SupervisorCall;Exception_HypervisorCall;Exception_MonitorCall;Exception_SystemRegisterTrap;Exception_ERetTrap;Exception_InstructionAbort;Exception_PCAlignment;Exception_DataAbort;Exception_SPAlignment;Exception_FPTrappedException;Exception_SError;Exception_Breakpoint;Exception_SoftwareStep;Exception_Watchpoint;Exception_SoftwareBreakpoint;Exception_VectorCatch;Exception_IRQ;Exception_FIQ] + +val ArchVersion_of_num : integer -> ArchVersion + +let ArchVersion_of_num arg_ = + let l__236 = arg_ in + if (eq l__236 (0:ii)) + then ARMv8p0 + else if (eq l__236 (1:ii)) then ARMv8p1 else if (eq l__236 (2:ii)) then ARMv8p2 else ARMv8p3 + +val num_of_ArchVersion : ArchVersion -> integer + +let num_of_ArchVersion arg_ = + match arg_ with + | ARMv8p0 -> (0:ii) + | ARMv8p1 -> (1:ii) + | ARMv8p2 -> (2:ii) + | ARMv8p3 -> (3:ii) + end + +val undefined_ArchVersion : unit -> ArchVersion + +let undefined_ArchVersion () = internal_pick [ARMv8p0;ARMv8p1;ARMv8p2;ARMv8p3] + +val Unpredictable_of_num : integer -> Unpredictable + +let Unpredictable_of_num arg_ = + let l__192 = arg_ in + if (eq l__192 (0:ii)) + then Unpredictable_WBOVERLAPLD + else + if (eq l__192 (1:ii)) + then Unpredictable_WBOVERLAPST + else + if (eq l__192 (2:ii)) + then Unpredictable_LDPOVERLAP + else + if (eq l__192 (3:ii)) + then Unpredictable_BASEOVERLAP + else + if (eq l__192 (4:ii)) + then Unpredictable_DATAOVERLAP + else + if (eq l__192 (5:ii)) + then Unpredictable_DEVPAGE2 + else + if (eq l__192 (6:ii)) + then Unpredictable_INSTRDEVICE + else + if (eq l__192 (7:ii)) + then Unpredictable_RESCPACR + else + if (eq l__192 (8:ii)) + then Unpredictable_RESMAIR + else + if (eq l__192 (9:ii)) + then Unpredictable_RESTEXCB + else + if (eq l__192 (10:ii)) + then Unpredictable_RESPRRR + else + if (eq l__192 (11:ii)) + then Unpredictable_RESDACR + else + if (eq l__192 (12:ii)) + then Unpredictable_RESVTCRS + else + if (eq l__192 (13:ii)) + then Unpredictable_RESTnSZ + else + if (eq l__192 (14:ii)) + then Unpredictable_OORTnSZ + else + if (eq l__192 (15:ii)) + then Unpredictable_LARGEIPA + else + if (eq l__192 (16:ii)) + then Unpredictable_ESRCONDPASS + else + if (eq l__192 (17:ii)) + then Unpredictable_ILZEROIT + else + if (eq l__192 (18:ii)) + then Unpredictable_ILZEROT + else + if (eq l__192 (19:ii)) + then Unpredictable_BPVECTORCATCHPRI + else + if (eq l__192 (20:ii)) + then Unpredictable_VCMATCHHALF + else + if (eq l__192 (21:ii)) + then Unpredictable_VCMATCHDAPA + else + if (eq l__192 (22:ii)) + then Unpredictable_WPMASKANDBAS + else + if (eq l__192 (23:ii)) + then Unpredictable_WPBASCONTIGUOUS + else + if (eq l__192 (24:ii)) + then Unpredictable_RESWPMASK + else + if (eq l__192 (25:ii)) + then Unpredictable_WPMASKEDBITS + else + if (eq l__192 (26:ii)) + then Unpredictable_RESBPWPCTRL + else + if (eq l__192 (27:ii)) + then Unpredictable_BPNOTIMPL + else + if (eq l__192 (28:ii)) + then Unpredictable_RESBPTYPE + else + if (eq l__192 (29:ii)) + then Unpredictable_BPNOTCTXCMP + else + if (eq l__192 (30:ii)) + then Unpredictable_BPMATCHHALF + else + if (eq l__192 (31:ii)) + then Unpredictable_BPMISMATCHHALF + else + if (eq l__192 (32:ii)) + then Unpredictable_RESTARTALIGNPC + else + if (eq l__192 (33:ii)) + then Unpredictable_RESTARTZEROUPPERPC + else + if (eq l__192 (34:ii)) + then Unpredictable_ZEROUPPER + else + if (eq l__192 (35:ii)) + then Unpredictable_ERETZEROUPPERPC + else + if (eq l__192 (36:ii)) + then Unpredictable_A32FORCEALIGNPC + else + if (eq l__192 (37:ii)) + then Unpredictable_SMD + else + if (eq l__192 (38:ii)) + then Unpredictable_AFUPDATE + else + if (eq l__192 (39:ii)) + then Unpredictable_IESBinDebug + else + if (eq l__192 (40:ii)) + then Unpredictable_ZEROPMSEVFR + else + if (eq l__192 (41:ii)) + then Unpredictable_NOOPTYPES + else + if (eq l__192 (42:ii)) + then Unpredictable_ZEROMINLATENCY + else if (eq l__192 (43:ii)) then Unpredictable_CLEARERRITEZERO else Unpredictable_TBD + +val num_of_Unpredictable : Unpredictable -> integer + +let num_of_Unpredictable arg_ = + match arg_ with + | Unpredictable_WBOVERLAPLD -> (0:ii) + | Unpredictable_WBOVERLAPST -> (1:ii) + | Unpredictable_LDPOVERLAP -> (2:ii) + | Unpredictable_BASEOVERLAP -> (3:ii) + | Unpredictable_DATAOVERLAP -> (4:ii) + | Unpredictable_DEVPAGE2 -> (5:ii) + | Unpredictable_INSTRDEVICE -> (6:ii) + | Unpredictable_RESCPACR -> (7:ii) + | Unpredictable_RESMAIR -> (8:ii) + | Unpredictable_RESTEXCB -> (9:ii) + | Unpredictable_RESPRRR -> (10:ii) + | Unpredictable_RESDACR -> (11:ii) + | Unpredictable_RESVTCRS -> (12:ii) + | Unpredictable_RESTnSZ -> (13:ii) + | Unpredictable_OORTnSZ -> (14:ii) + | Unpredictable_LARGEIPA -> (15:ii) + | Unpredictable_ESRCONDPASS -> (16:ii) + | Unpredictable_ILZEROIT -> (17:ii) + | Unpredictable_ILZEROT -> (18:ii) + | Unpredictable_BPVECTORCATCHPRI -> (19:ii) + | Unpredictable_VCMATCHHALF -> (20:ii) + | Unpredictable_VCMATCHDAPA -> (21:ii) + | Unpredictable_WPMASKANDBAS -> (22:ii) + | Unpredictable_WPBASCONTIGUOUS -> (23:ii) + | Unpredictable_RESWPMASK -> (24:ii) + | Unpredictable_WPMASKEDBITS -> (25:ii) + | Unpredictable_RESBPWPCTRL -> (26:ii) + | Unpredictable_BPNOTIMPL -> (27:ii) + | Unpredictable_RESBPTYPE -> (28:ii) + | Unpredictable_BPNOTCTXCMP -> (29:ii) + | Unpredictable_BPMATCHHALF -> (30:ii) + | Unpredictable_BPMISMATCHHALF -> (31:ii) + | Unpredictable_RESTARTALIGNPC -> (32:ii) + | Unpredictable_RESTARTZEROUPPERPC -> (33:ii) + | Unpredictable_ZEROUPPER -> (34:ii) + | Unpredictable_ERETZEROUPPERPC -> (35:ii) + | Unpredictable_A32FORCEALIGNPC -> (36:ii) + | Unpredictable_SMD -> (37:ii) + | Unpredictable_AFUPDATE -> (38:ii) + | Unpredictable_IESBinDebug -> (39:ii) + | Unpredictable_ZEROPMSEVFR -> (40:ii) + | Unpredictable_NOOPTYPES -> (41:ii) + | Unpredictable_ZEROMINLATENCY -> (42:ii) + | Unpredictable_CLEARERRITEZERO -> (43:ii) + | Unpredictable_TBD -> (44:ii) + end + +val undefined_Unpredictable : unit -> Unpredictable + +let undefined_Unpredictable () = + internal_pick + [Unpredictable_WBOVERLAPLD;Unpredictable_WBOVERLAPST;Unpredictable_LDPOVERLAP;Unpredictable_BASEOVERLAP;Unpredictable_DATAOVERLAP;Unpredictable_DEVPAGE2;Unpredictable_INSTRDEVICE;Unpredictable_RESCPACR;Unpredictable_RESMAIR;Unpredictable_RESTEXCB;Unpredictable_RESPRRR;Unpredictable_RESDACR;Unpredictable_RESVTCRS;Unpredictable_RESTnSZ;Unpredictable_OORTnSZ;Unpredictable_LARGEIPA;Unpredictable_ESRCONDPASS;Unpredictable_ILZEROIT;Unpredictable_ILZEROT;Unpredictable_BPVECTORCATCHPRI;Unpredictable_VCMATCHHALF;Unpredictable_VCMATCHDAPA;Unpredictable_WPMASKANDBAS;Unpredictable_WPBASCONTIGUOUS;Unpredictable_RESWPMASK;Unpredictable_WPMASKEDBITS;Unpredictable_RESBPWPCTRL;Unpredictable_BPNOTIMPL;Unpredictable_RESBPTYPE;Unpredictable_BPNOTCTXCMP;Unpredictable_BPMATCHHALF;Unpredictable_BPMISMATCHHALF;Unpredictable_RESTARTALIGNPC;Unpredictable_RESTARTZEROUPPERPC;Unpredictable_ZEROUPPER;Unpredictable_ERETZEROUPPERPC;Unpredictable_A32FORCEALIGNPC;Unpredictable_SMD;Unpredictable_AFUPDATE;Unpredictable_IESBinDebug;Unpredictable_ZEROPMSEVFR;Unpredictable_NOOPTYPES;Unpredictable_ZEROMINLATENCY;Unpredictable_CLEARERRITEZERO;Unpredictable_TBD] + +val Constraint_of_num : integer -> Constraint + +let Constraint_of_num arg_ = + let l__178 = arg_ in + if (eq l__178 (0:ii)) + then Constraint_NONE + else + if (eq l__178 (1:ii)) + then Constraint_UNKNOWN + else + if (eq l__178 (2:ii)) + then Constraint_UNDEF + else + if (eq l__178 (3:ii)) + then Constraint_UNDEFEL0 + else + if (eq l__178 (4:ii)) + then Constraint_NOP + else + if (eq l__178 (5:ii)) + then Constraint_TRUE + else + if (eq l__178 (6:ii)) + then Constraint_FALSE + else + if (eq l__178 (7:ii)) + then Constraint_DISABLED + else + if (eq l__178 (8:ii)) + then Constraint_UNCOND + else + if (eq l__178 (9:ii)) + then Constraint_COND + else + if (eq l__178 (10:ii)) + then Constraint_ADDITIONAL_DECODE + else + if (eq l__178 (11:ii)) + then Constraint_WBSUPPRESS + else + if (eq l__178 (12:ii)) + then Constraint_FAULT + else if (eq l__178 (13:ii)) then Constraint_FORCE else Constraint_FORCENOSLCHECK + +val num_of_Constraint : Constraint -> integer + +let num_of_Constraint arg_ = + match arg_ with + | Constraint_NONE -> (0:ii) + | Constraint_UNKNOWN -> (1:ii) + | Constraint_UNDEF -> (2:ii) + | Constraint_UNDEFEL0 -> (3:ii) + | Constraint_NOP -> (4:ii) + | Constraint_TRUE -> (5:ii) + | Constraint_FALSE -> (6:ii) + | Constraint_DISABLED -> (7:ii) + | Constraint_UNCOND -> (8:ii) + | Constraint_COND -> (9:ii) + | Constraint_ADDITIONAL_DECODE -> (10:ii) + | Constraint_WBSUPPRESS -> (11:ii) + | Constraint_FAULT -> (12:ii) + | Constraint_FORCE -> (13:ii) + | Constraint_FORCENOSLCHECK -> (14:ii) + end + +val undefined_Constraint : unit -> Constraint + +let undefined_Constraint () = + internal_pick + [Constraint_NONE;Constraint_UNKNOWN;Constraint_UNDEF;Constraint_UNDEFEL0;Constraint_NOP;Constraint_TRUE;Constraint_FALSE;Constraint_DISABLED;Constraint_UNCOND;Constraint_COND;Constraint_ADDITIONAL_DECODE;Constraint_WBSUPPRESS;Constraint_FAULT;Constraint_FORCE;Constraint_FORCENOSLCHECK] + +val InstrSet_of_num : integer -> InstrSet + +let InstrSet_of_num arg_ = + let l__176 = arg_ in + if (eq l__176 (0:ii)) + then InstrSet_A64 + else if (eq l__176 (1:ii)) then InstrSet_A32 else InstrSet_T32 + +val num_of_InstrSet : InstrSet -> integer + +let num_of_InstrSet arg_ = + match arg_ with | InstrSet_A64 -> (0:ii) | InstrSet_A32 -> (1:ii) | InstrSet_T32 -> (2:ii) end + +val undefined_InstrSet : unit -> InstrSet + +let undefined_InstrSet () = internal_pick [InstrSet_A64;InstrSet_A32;InstrSet_T32] + +val undefined_ProcState : unit -> ProcState + +let undefined_ProcState () = + <| ProcState_N = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_Z = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_C = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_V = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_D = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_A = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_I = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_F = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_PAN = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_UAO = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_SS = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_IL = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_EL = (undefined_vector (2:ii) (undefined_bit ()) : list bitU); + ProcState_nRW = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_SP = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_Q = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_GE = (undefined_vector (4:ii) (undefined_bit ()) : list bitU); + ProcState_IT = (undefined_vector (8:ii) (undefined_bit ()) : list bitU); + ProcState_J = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_T = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_E = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + ProcState_M = (undefined_vector (5:ii) (undefined_bit ()) : list bitU) |> + +val BranchType_of_num : integer -> BranchType + +let BranchType_of_num arg_ = + let l__170 = arg_ in + if (eq l__170 (0:ii)) + then BranchType_CALL + else + if (eq l__170 (1:ii)) + then BranchType_ERET + else + if (eq l__170 (2:ii)) + then BranchType_DBGEXIT + else + if (eq l__170 (3:ii)) + then BranchType_RET + else + if (eq l__170 (4:ii)) + then BranchType_JMP + else if (eq l__170 (5:ii)) then BranchType_EXCEPTION else BranchType_UNKNOWN + +val num_of_BranchType : BranchType -> integer + +let num_of_BranchType arg_ = + match arg_ with + | BranchType_CALL -> (0:ii) + | BranchType_ERET -> (1:ii) + | BranchType_DBGEXIT -> (2:ii) + | BranchType_RET -> (3:ii) + | BranchType_JMP -> (4:ii) + | BranchType_EXCEPTION -> (5:ii) + | BranchType_UNKNOWN -> (6:ii) + end + +val undefined_BranchType : unit -> BranchType + +let undefined_BranchType () = + internal_pick + [BranchType_CALL;BranchType_ERET;BranchType_DBGEXIT;BranchType_RET;BranchType_JMP;BranchType_EXCEPTION;BranchType_UNKNOWN] + +val undefined_ExceptionRecord : unit -> ExceptionRecord + +let undefined_ExceptionRecord () = + <| ExceptionRecord_typ = (undefined_Exception ()); + ExceptionRecord_syndrome = (undefined_vector (25:ii) (undefined_bit ()) : list bitU); + ExceptionRecord_vaddress = (undefined_vector (64:ii) (undefined_bit ()) : list bitU); + ExceptionRecord_ipavalid = (undefined_bool ()); + ExceptionRecord_ipaddress = (undefined_vector (52:ii) (undefined_bit ()) : list bitU) |> + +val Fault_of_num : integer -> Fault + +let Fault_of_num arg_ = + let l__152 = arg_ in + if (eq l__152 (0:ii)) + then Fault_None + else + if (eq l__152 (1:ii)) + then Fault_AccessFlag + else + if (eq l__152 (2:ii)) + then Fault_Alignment + else + if (eq l__152 (3:ii)) + then Fault_Background + else + if (eq l__152 (4:ii)) + then Fault_Domain + else + if (eq l__152 (5:ii)) + then Fault_Permission + else + if (eq l__152 (6:ii)) + then Fault_Translation + else + if (eq l__152 (7:ii)) + then Fault_AddressSize + else + if (eq l__152 (8:ii)) + then Fault_SyncExternal + else + if (eq l__152 (9:ii)) + then Fault_SyncExternalOnWalk + else + if (eq l__152 (10:ii)) + then Fault_SyncParity + else + if (eq l__152 (11:ii)) + then Fault_SyncParityOnWalk + else + if (eq l__152 (12:ii)) + then Fault_AsyncParity + else + if (eq l__152 (13:ii)) + then Fault_AsyncExternal + else + if (eq l__152 (14:ii)) + then Fault_Debug + else + if (eq l__152 (15:ii)) + then Fault_TLBConflict + else + if (eq l__152 (16:ii)) + then Fault_Lockdown + else if (eq l__152 (17:ii)) then Fault_Exclusive else Fault_ICacheMaint + +val num_of_Fault : Fault -> integer + +let num_of_Fault arg_ = + match arg_ with + | Fault_None -> (0:ii) + | Fault_AccessFlag -> (1:ii) + | Fault_Alignment -> (2:ii) + | Fault_Background -> (3:ii) + | Fault_Domain -> (4:ii) + | Fault_Permission -> (5:ii) + | Fault_Translation -> (6:ii) + | Fault_AddressSize -> (7:ii) + | Fault_SyncExternal -> (8:ii) + | Fault_SyncExternalOnWalk -> (9:ii) + | Fault_SyncParity -> (10:ii) + | Fault_SyncParityOnWalk -> (11:ii) + | Fault_AsyncParity -> (12:ii) + | Fault_AsyncExternal -> (13:ii) + | Fault_Debug -> (14:ii) + | Fault_TLBConflict -> (15:ii) + | Fault_Lockdown -> (16:ii) + | Fault_Exclusive -> (17:ii) + | Fault_ICacheMaint -> (18:ii) + end + +val undefined_Fault : unit -> Fault + +let undefined_Fault () = + internal_pick + [Fault_None;Fault_AccessFlag;Fault_Alignment;Fault_Background;Fault_Domain;Fault_Permission;Fault_Translation;Fault_AddressSize;Fault_SyncExternal;Fault_SyncExternalOnWalk;Fault_SyncParity;Fault_SyncParityOnWalk;Fault_AsyncParity;Fault_AsyncExternal;Fault_Debug;Fault_TLBConflict;Fault_Lockdown;Fault_Exclusive;Fault_ICacheMaint] + +val AccType_of_num : integer -> AccType + +let AccType_of_num arg_ = + let l__137 = arg_ in + if (eq l__137 (0:ii)) + then AccType_NORMAL + else + if (eq l__137 (1:ii)) + then AccType_VEC + else + if (eq l__137 (2:ii)) + then AccType_STREAM + else + if (eq l__137 (3:ii)) + then AccType_VECSTREAM + else + if (eq l__137 (4:ii)) + then AccType_ATOMIC + else + if (eq l__137 (5:ii)) + then AccType_ATOMICRW + else + if (eq l__137 (6:ii)) + then AccType_ORDERED + else + if (eq l__137 (7:ii)) + then AccType_ORDEREDRW + else + if (eq l__137 (8:ii)) + then AccType_LIMITEDORDERED + else + if (eq l__137 (9:ii)) + then AccType_UNPRIV + else + if (eq l__137 (10:ii)) + then AccType_IFETCH + else + if (eq l__137 (11:ii)) + then AccType_PTW + else + if (eq l__137 (12:ii)) + then AccType_DC + else + if (eq l__137 (13:ii)) + then AccType_IC + else if (eq l__137 (14:ii)) then AccType_DCZVA else AccType_AT + +val num_of_AccType : AccType -> integer + +let num_of_AccType arg_ = + match arg_ with + | AccType_NORMAL -> (0:ii) + | AccType_VEC -> (1:ii) + | AccType_STREAM -> (2:ii) + | AccType_VECSTREAM -> (3:ii) + | AccType_ATOMIC -> (4:ii) + | AccType_ATOMICRW -> (5:ii) + | AccType_ORDERED -> (6:ii) + | AccType_ORDEREDRW -> (7:ii) + | AccType_LIMITEDORDERED -> (8:ii) + | AccType_UNPRIV -> (9:ii) + | AccType_IFETCH -> (10:ii) + | AccType_PTW -> (11:ii) + | AccType_DC -> (12:ii) + | AccType_IC -> (13:ii) + | AccType_DCZVA -> (14:ii) + | AccType_AT -> (15:ii) + end + +val undefined_AccType : unit -> AccType + +let undefined_AccType () = + internal_pick + [AccType_NORMAL;AccType_VEC;AccType_STREAM;AccType_VECSTREAM;AccType_ATOMIC;AccType_ATOMICRW;AccType_ORDERED;AccType_ORDEREDRW;AccType_LIMITEDORDERED;AccType_UNPRIV;AccType_IFETCH;AccType_PTW;AccType_DC;AccType_IC;AccType_DCZVA;AccType_AT] + +val undefined_FaultRecord : unit -> FaultRecord + +let undefined_FaultRecord () = + <| FaultRecord_typ = (undefined_Fault ()); + FaultRecord_acctype = (undefined_AccType ()); + FaultRecord_ipaddress = (undefined_vector (52:ii) (undefined_bit ()) : list bitU); + FaultRecord_s2fs1walk = (undefined_bool ()); + FaultRecord_write = (undefined_bool ()); + FaultRecord_level = (undefined_int ()); + FaultRecord_extflag = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + FaultRecord_secondstage = (undefined_bool ()); + FaultRecord_domain = (undefined_vector (4:ii) (undefined_bit ()) : list bitU); + FaultRecord_errortype = (undefined_vector (2:ii) (undefined_bit ()) : list bitU); + FaultRecord_debugmoe = (undefined_vector (4:ii) (undefined_bit ()) : list bitU) |> + +val MBReqDomain_of_num : integer -> MBReqDomain + +let MBReqDomain_of_num arg_ = + let l__134 = arg_ in + if (eq l__134 (0:ii)) + then MBReqDomain_Nonshareable + else + if (eq l__134 (1:ii)) + then MBReqDomain_InnerShareable + else if (eq l__134 (2:ii)) then MBReqDomain_OuterShareable else MBReqDomain_FullSystem + +val num_of_MBReqDomain : MBReqDomain -> integer + +let num_of_MBReqDomain arg_ = + match arg_ with + | MBReqDomain_Nonshareable -> (0:ii) + | MBReqDomain_InnerShareable -> (1:ii) + | MBReqDomain_OuterShareable -> (2:ii) + | MBReqDomain_FullSystem -> (3:ii) + end + +val undefined_MBReqDomain : unit -> MBReqDomain + +let undefined_MBReqDomain () = + internal_pick + [MBReqDomain_Nonshareable;MBReqDomain_InnerShareable;MBReqDomain_OuterShareable;MBReqDomain_FullSystem] + +val MBReqTypes_of_num : integer -> MBReqTypes + +let MBReqTypes_of_num arg_ = + let l__132 = arg_ in + if (eq l__132 (0:ii)) + then MBReqTypes_Reads + else if (eq l__132 (1:ii)) then MBReqTypes_Writes else MBReqTypes_All + +val num_of_MBReqTypes : MBReqTypes -> integer + +let num_of_MBReqTypes arg_ = + match arg_ with + | MBReqTypes_Reads -> (0:ii) + | MBReqTypes_Writes -> (1:ii) + | MBReqTypes_All -> (2:ii) + end + +val undefined_MBReqTypes : unit -> MBReqTypes + +let undefined_MBReqTypes () = internal_pick [MBReqTypes_Reads;MBReqTypes_Writes;MBReqTypes_All] + +val MemType_of_num : integer -> MemType + +let MemType_of_num arg_ = + let l__131 = arg_ in + if (eq l__131 (0:ii)) + then MemType_Normal + else MemType_Device + +val num_of_MemType : MemType -> integer + +let num_of_MemType arg_ = match arg_ with | MemType_Normal -> (0:ii) | MemType_Device -> (1:ii) end + +val undefined_MemType : unit -> MemType + +let undefined_MemType () = internal_pick [MemType_Normal;MemType_Device] + +val DeviceType_of_num : integer -> DeviceType + +let DeviceType_of_num arg_ = + let l__128 = arg_ in + if (eq l__128 (0:ii)) + then DeviceType_GRE + else + if (eq l__128 (1:ii)) + then DeviceType_nGRE + else if (eq l__128 (2:ii)) then DeviceType_nGnRE else DeviceType_nGnRnE + +val num_of_DeviceType : DeviceType -> integer + +let num_of_DeviceType arg_ = + match arg_ with + | DeviceType_GRE -> (0:ii) + | DeviceType_nGRE -> (1:ii) + | DeviceType_nGnRE -> (2:ii) + | DeviceType_nGnRnE -> (3:ii) + end + +val undefined_DeviceType : unit -> DeviceType + +let undefined_DeviceType () = + internal_pick [DeviceType_GRE;DeviceType_nGRE;DeviceType_nGnRE;DeviceType_nGnRnE] + +val undefined_MemAttrHints : unit -> MemAttrHints + +let undefined_MemAttrHints () = + <| MemAttrHints_attrs = (undefined_vector (2:ii) (undefined_bit ()) : list bitU); + MemAttrHints_hints = (undefined_vector (2:ii) (undefined_bit ()) : list bitU); + MemAttrHints_transient = (undefined_bool ()) |> + +val undefined_MemoryAttributes : unit -> MemoryAttributes + +let undefined_MemoryAttributes () = + <| MemoryAttributes_typ = (undefined_MemType ()); + MemoryAttributes_device = (undefined_DeviceType ()); + MemoryAttributes_inner = (undefined_MemAttrHints ()); + MemoryAttributes_outer = (undefined_MemAttrHints ()); + MemoryAttributes_shareable = (undefined_bool ()); + MemoryAttributes_outershareable = (undefined_bool ()) |> + +val undefined_FullAddress : unit -> FullAddress + +let undefined_FullAddress () = + <| FullAddress_physicaladdress = (undefined_vector (52:ii) (undefined_bit ()) : list bitU); + FullAddress_NS = (undefined_vector (1:ii) (undefined_bit ()) : list bitU) |> + +val undefined_AddressDescriptor : unit -> AddressDescriptor + +let undefined_AddressDescriptor () = + <| AddressDescriptor_fault = (undefined_FaultRecord ()); + AddressDescriptor_memattrs = (undefined_MemoryAttributes ()); + AddressDescriptor_paddress = (undefined_FullAddress ()); + AddressDescriptor_vaddress = (undefined_vector (64:ii) (undefined_bit ()) : list bitU) |> + +val undefined_DescriptorUpdate : unit -> DescriptorUpdate + +let undefined_DescriptorUpdate () = + <| DescriptorUpdate_AF = (undefined_bool ()); + DescriptorUpdate_AP = (undefined_bool ()); + DescriptorUpdate_descaddr = (undefined_AddressDescriptor ()) |> + +val MemAtomicOp_of_num : integer -> MemAtomicOp + +let MemAtomicOp_of_num arg_ = + let l__120 = arg_ in + if (eq l__120 (0:ii)) + then MemAtomicOp_ADD + else + if (eq l__120 (1:ii)) + then MemAtomicOp_BIC + else + if (eq l__120 (2:ii)) + then MemAtomicOp_EOR + else + if (eq l__120 (3:ii)) + then MemAtomicOp_ORR + else + if (eq l__120 (4:ii)) + then MemAtomicOp_SMAX + else + if (eq l__120 (5:ii)) + then MemAtomicOp_SMIN + else + if (eq l__120 (6:ii)) + then MemAtomicOp_UMAX + else if (eq l__120 (7:ii)) then MemAtomicOp_UMIN else MemAtomicOp_SWP + +val num_of_MemAtomicOp : MemAtomicOp -> integer + +let num_of_MemAtomicOp arg_ = + match arg_ with + | MemAtomicOp_ADD -> (0:ii) + | MemAtomicOp_BIC -> (1:ii) + | MemAtomicOp_EOR -> (2:ii) + | MemAtomicOp_ORR -> (3:ii) + | MemAtomicOp_SMAX -> (4:ii) + | MemAtomicOp_SMIN -> (5:ii) + | MemAtomicOp_UMAX -> (6:ii) + | MemAtomicOp_UMIN -> (7:ii) + | MemAtomicOp_SWP -> (8:ii) + end + +val undefined_MemAtomicOp : unit -> MemAtomicOp + +let undefined_MemAtomicOp () = + internal_pick + [MemAtomicOp_ADD;MemAtomicOp_BIC;MemAtomicOp_EOR;MemAtomicOp_ORR;MemAtomicOp_SMAX;MemAtomicOp_SMIN;MemAtomicOp_UMAX;MemAtomicOp_UMIN;MemAtomicOp_SWP] + +val FPType_of_num : integer -> FPType + +let FPType_of_num arg_ = + let l__116 = arg_ in + if (eq l__116 (0:ii)) + then FPType_Nonzero + else + if (eq l__116 (1:ii)) + then FPType_Zero + else + if (eq l__116 (2:ii)) + then FPType_Infinity + else if (eq l__116 (3:ii)) then FPType_QNaN else FPType_SNaN + +val num_of_FPType : FPType -> integer + +let num_of_FPType arg_ = + match arg_ with + | FPType_Nonzero -> (0:ii) + | FPType_Zero -> (1:ii) + | FPType_Infinity -> (2:ii) + | FPType_QNaN -> (3:ii) + | FPType_SNaN -> (4:ii) + end + +val undefined_FPType : unit -> FPType + +let undefined_FPType () = + internal_pick [FPType_Nonzero;FPType_Zero;FPType_Infinity;FPType_QNaN;FPType_SNaN] + +val FPExc_of_num : integer -> FPExc + +let FPExc_of_num arg_ = + let l__111 = arg_ in + if (eq l__111 (0:ii)) + then FPExc_InvalidOp + else + if (eq l__111 (1:ii)) + then FPExc_DivideByZero + else + if (eq l__111 (2:ii)) + then FPExc_Overflow + else + if (eq l__111 (3:ii)) + then FPExc_Underflow + else if (eq l__111 (4:ii)) then FPExc_Inexact else FPExc_InputDenorm + +val num_of_FPExc : FPExc -> integer + +let num_of_FPExc arg_ = + match arg_ with + | FPExc_InvalidOp -> (0:ii) + | FPExc_DivideByZero -> (1:ii) + | FPExc_Overflow -> (2:ii) + | FPExc_Underflow -> (3:ii) + | FPExc_Inexact -> (4:ii) + | FPExc_InputDenorm -> (5:ii) + end + +val undefined_FPExc : unit -> FPExc + +let undefined_FPExc () = + internal_pick + [FPExc_InvalidOp;FPExc_DivideByZero;FPExc_Overflow;FPExc_Underflow;FPExc_Inexact;FPExc_InputDenorm] + +val FPRounding_of_num : integer -> FPRounding + +let FPRounding_of_num arg_ = + let l__106 = arg_ in + if (eq l__106 (0:ii)) + then FPRounding_TIEEVEN + else + if (eq l__106 (1:ii)) + then FPRounding_POSINF + else + if (eq l__106 (2:ii)) + then FPRounding_NEGINF + else + if (eq l__106 (3:ii)) + then FPRounding_ZERO + else if (eq l__106 (4:ii)) then FPRounding_TIEAWAY else FPRounding_ODD + +val num_of_FPRounding : FPRounding -> integer + +let num_of_FPRounding arg_ = + match arg_ with + | FPRounding_TIEEVEN -> (0:ii) + | FPRounding_POSINF -> (1:ii) + | FPRounding_NEGINF -> (2:ii) + | FPRounding_ZERO -> (3:ii) + | FPRounding_TIEAWAY -> (4:ii) + | FPRounding_ODD -> (5:ii) + end + +val undefined_FPRounding : unit -> FPRounding + +let undefined_FPRounding () = + internal_pick + [FPRounding_TIEEVEN;FPRounding_POSINF;FPRounding_NEGINF;FPRounding_ZERO;FPRounding_TIEAWAY;FPRounding_ODD] + +val SysRegAccess_of_num : integer -> SysRegAccess + +let SysRegAccess_of_num arg_ = + let l__102 = arg_ in + if (eq l__102 (0:ii)) + then SysRegAccess_OK + else + if (eq l__102 (1:ii)) + then SysRegAccess_UNDEFINED + else + if (eq l__102 (2:ii)) + then SysRegAccess_TrapToEL1 + else if (eq l__102 (3:ii)) then SysRegAccess_TrapToEL2 else SysRegAccess_TrapToEL3 + +val num_of_SysRegAccess : SysRegAccess -> integer + +let num_of_SysRegAccess arg_ = + match arg_ with + | SysRegAccess_OK -> (0:ii) + | SysRegAccess_UNDEFINED -> (1:ii) + | SysRegAccess_TrapToEL1 -> (2:ii) + | SysRegAccess_TrapToEL2 -> (3:ii) + | SysRegAccess_TrapToEL3 -> (4:ii) + end + +val undefined_SysRegAccess : unit -> SysRegAccess + +let undefined_SysRegAccess () = + internal_pick + [SysRegAccess_OK;SysRegAccess_UNDEFINED;SysRegAccess_TrapToEL1;SysRegAccess_TrapToEL2;SysRegAccess_TrapToEL3] + +val SRType_of_num : integer -> SRType + +let SRType_of_num arg_ = + let l__98 = arg_ in + if (eq l__98 (0:ii)) + then SRType_LSL + else + if (eq l__98 (1:ii)) + then SRType_LSR + else + if (eq l__98 (2:ii)) + then SRType_ASR + else if (eq l__98 (3:ii)) then SRType_ROR else SRType_RRX + +val num_of_SRType : SRType -> integer + +let num_of_SRType arg_ = + match arg_ with + | SRType_LSL -> (0:ii) + | SRType_LSR -> (1:ii) + | SRType_ASR -> (2:ii) + | SRType_ROR -> (3:ii) + | SRType_RRX -> (4:ii) + end + +val undefined_SRType : unit -> SRType + +let undefined_SRType () = internal_pick [SRType_LSL;SRType_LSR;SRType_ASR;SRType_ROR;SRType_RRX] + +val ShiftType_of_num : integer -> ShiftType + +let ShiftType_of_num arg_ = + let l__95 = arg_ in + if (eq l__95 (0:ii)) + then ShiftType_LSL + else + if (eq l__95 (1:ii)) + then ShiftType_LSR + else if (eq l__95 (2:ii)) then ShiftType_ASR else ShiftType_ROR + +val num_of_ShiftType : ShiftType -> integer + +let num_of_ShiftType arg_ = + match arg_ with + | ShiftType_LSL -> (0:ii) + | ShiftType_LSR -> (1:ii) + | ShiftType_ASR -> (2:ii) + | ShiftType_ROR -> (3:ii) + end + +val undefined_ShiftType : unit -> ShiftType + +let undefined_ShiftType () = internal_pick [ShiftType_LSL;ShiftType_LSR;ShiftType_ASR;ShiftType_ROR] + +val PrefetchHint_of_num : integer -> PrefetchHint + +let PrefetchHint_of_num arg_ = + let l__93 = arg_ in + if (eq l__93 (0:ii)) + then Prefetch_READ + else if (eq l__93 (1:ii)) then Prefetch_WRITE else Prefetch_EXEC + +val num_of_PrefetchHint : PrefetchHint -> integer + +let num_of_PrefetchHint arg_ = + match arg_ with + | Prefetch_READ -> (0:ii) + | Prefetch_WRITE -> (1:ii) + | Prefetch_EXEC -> (2:ii) + end + +val undefined_PrefetchHint : unit -> PrefetchHint + +let undefined_PrefetchHint () = internal_pick [Prefetch_READ;Prefetch_WRITE;Prefetch_EXEC] + +val InterruptID_of_num : integer -> InterruptID + +let InterruptID_of_num arg_ = + let l__89 = arg_ in + if (eq l__89 (0:ii)) + then InterruptID_PMUIRQ + else + if (eq l__89 (1:ii)) + then InterruptID_COMMIRQ + else + if (eq l__89 (2:ii)) + then InterruptID_CTIIRQ + else if (eq l__89 (3:ii)) then InterruptID_COMMRX else InterruptID_COMMTX + +val num_of_InterruptID : InterruptID -> integer + +let num_of_InterruptID arg_ = + match arg_ with + | InterruptID_PMUIRQ -> (0:ii) + | InterruptID_COMMIRQ -> (1:ii) + | InterruptID_CTIIRQ -> (2:ii) + | InterruptID_COMMRX -> (3:ii) + | InterruptID_COMMTX -> (4:ii) + end + +val undefined_InterruptID : unit -> InterruptID + +let undefined_InterruptID () = + internal_pick + [InterruptID_PMUIRQ;InterruptID_COMMIRQ;InterruptID_CTIIRQ;InterruptID_COMMRX;InterruptID_COMMTX] + +val CrossTriggerOut_of_num : integer -> CrossTriggerOut + +let CrossTriggerOut_of_num arg_ = + let l__82 = arg_ in + if (eq l__82 (0:ii)) + then CrossTriggerOut_DebugRequest + else + if (eq l__82 (1:ii)) + then CrossTriggerOut_RestartRequest + else + if (eq l__82 (2:ii)) + then CrossTriggerOut_IRQ + else + if (eq l__82 (3:ii)) + then CrossTriggerOut_RSVD3 + else + if (eq l__82 (4:ii)) + then CrossTriggerOut_TraceExtIn0 + else + if (eq l__82 (5:ii)) + then CrossTriggerOut_TraceExtIn1 + else if (eq l__82 (6:ii)) then CrossTriggerOut_TraceExtIn2 else CrossTriggerOut_TraceExtIn3 + +val num_of_CrossTriggerOut : CrossTriggerOut -> integer + +let num_of_CrossTriggerOut arg_ = + match arg_ with + | CrossTriggerOut_DebugRequest -> (0:ii) + | CrossTriggerOut_RestartRequest -> (1:ii) + | CrossTriggerOut_IRQ -> (2:ii) + | CrossTriggerOut_RSVD3 -> (3:ii) + | CrossTriggerOut_TraceExtIn0 -> (4:ii) + | CrossTriggerOut_TraceExtIn1 -> (5:ii) + | CrossTriggerOut_TraceExtIn2 -> (6:ii) + | CrossTriggerOut_TraceExtIn3 -> (7:ii) + end + +val undefined_CrossTriggerOut : unit -> CrossTriggerOut + +let undefined_CrossTriggerOut () = + internal_pick + [CrossTriggerOut_DebugRequest;CrossTriggerOut_RestartRequest;CrossTriggerOut_IRQ;CrossTriggerOut_RSVD3;CrossTriggerOut_TraceExtIn0;CrossTriggerOut_TraceExtIn1;CrossTriggerOut_TraceExtIn2;CrossTriggerOut_TraceExtIn3] + +val CrossTriggerIn_of_num : integer -> CrossTriggerIn + +let CrossTriggerIn_of_num arg_ = + let l__75 = arg_ in + if (eq l__75 (0:ii)) + then CrossTriggerIn_CrossHalt + else + if (eq l__75 (1:ii)) + then CrossTriggerIn_PMUOverflow + else + if (eq l__75 (2:ii)) + then CrossTriggerIn_RSVD2 + else + if (eq l__75 (3:ii)) + then CrossTriggerIn_RSVD3 + else + if (eq l__75 (4:ii)) + then CrossTriggerIn_TraceExtOut0 + else + if (eq l__75 (5:ii)) + then CrossTriggerIn_TraceExtOut1 + else if (eq l__75 (6:ii)) then CrossTriggerIn_TraceExtOut2 else CrossTriggerIn_TraceExtOut3 + +val num_of_CrossTriggerIn : CrossTriggerIn -> integer + +let num_of_CrossTriggerIn arg_ = + match arg_ with + | CrossTriggerIn_CrossHalt -> (0:ii) + | CrossTriggerIn_PMUOverflow -> (1:ii) + | CrossTriggerIn_RSVD2 -> (2:ii) + | CrossTriggerIn_RSVD3 -> (3:ii) + | CrossTriggerIn_TraceExtOut0 -> (4:ii) + | CrossTriggerIn_TraceExtOut1 -> (5:ii) + | CrossTriggerIn_TraceExtOut2 -> (6:ii) + | CrossTriggerIn_TraceExtOut3 -> (7:ii) + end + +val undefined_CrossTriggerIn : unit -> CrossTriggerIn + +let undefined_CrossTriggerIn () = + internal_pick + [CrossTriggerIn_CrossHalt;CrossTriggerIn_PMUOverflow;CrossTriggerIn_RSVD2;CrossTriggerIn_RSVD3;CrossTriggerIn_TraceExtOut0;CrossTriggerIn_TraceExtOut1;CrossTriggerIn_TraceExtOut2;CrossTriggerIn_TraceExtOut3] + +val MemBarrierOp_of_num : integer -> MemBarrierOp + +let MemBarrierOp_of_num arg_ = + let l__73 = arg_ in + if (eq l__73 (0:ii)) + then MemBarrierOp_DSB + else if (eq l__73 (1:ii)) then MemBarrierOp_DMB else MemBarrierOp_ISB + +val num_of_MemBarrierOp : MemBarrierOp -> integer + +let num_of_MemBarrierOp arg_ = + match arg_ with + | MemBarrierOp_DSB -> (0:ii) + | MemBarrierOp_DMB -> (1:ii) + | MemBarrierOp_ISB -> (2:ii) + end + +val undefined_MemBarrierOp : unit -> MemBarrierOp + +let undefined_MemBarrierOp () = internal_pick [MemBarrierOp_DSB;MemBarrierOp_DMB;MemBarrierOp_ISB] + +val undefined_AccessDescriptor : unit -> AccessDescriptor + +let undefined_AccessDescriptor () = + <| AccessDescriptor_acctype = (undefined_AccType ()); + AccessDescriptor_page_table_walk = (undefined_bool ()); + AccessDescriptor_secondstage = (undefined_bool ()); + AccessDescriptor_s2fs1walk = (undefined_bool ()); + AccessDescriptor_level = (undefined_int ()) |> + +val undefined_Permissions : unit -> Permissions + +let undefined_Permissions () = + <| Permissions_ap = (undefined_vector (3:ii) (undefined_bit ()) : list bitU); + Permissions_xn = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + Permissions_xxn = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + Permissions_pxn = (undefined_vector (1:ii) (undefined_bit ()) : list bitU) |> + +val undefined_TLBRecord : unit -> TLBRecord + +let undefined_TLBRecord () = + <| TLBRecord_perms = (undefined_Permissions ()); + TLBRecord_nG = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + TLBRecord_domain = (undefined_vector (4:ii) (undefined_bit ()) : list bitU); + TLBRecord_contiguous = (undefined_bool ()); + TLBRecord_level = (undefined_int ()); + TLBRecord_blocksize = (undefined_int ()); + TLBRecord_descupdate = (undefined_DescriptorUpdate ()); + TLBRecord_CnP = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + TLBRecord_addrdesc = (undefined_AddressDescriptor ()) |> + +val ImmediateOp_of_num : integer -> ImmediateOp + +let ImmediateOp_of_num arg_ = + let l__70 = arg_ in + if (eq l__70 (0:ii)) + then ImmediateOp_MOVI + else + if (eq l__70 (1:ii)) + then ImmediateOp_MVNI + else if (eq l__70 (2:ii)) then ImmediateOp_ORR else ImmediateOp_BIC + +val num_of_ImmediateOp : ImmediateOp -> integer + +let num_of_ImmediateOp arg_ = + match arg_ with + | ImmediateOp_MOVI -> (0:ii) + | ImmediateOp_MVNI -> (1:ii) + | ImmediateOp_ORR -> (2:ii) + | ImmediateOp_BIC -> (3:ii) + end + +val undefined_ImmediateOp : unit -> ImmediateOp + +let undefined_ImmediateOp () = + internal_pick [ImmediateOp_MOVI;ImmediateOp_MVNI;ImmediateOp_ORR;ImmediateOp_BIC] + +val MoveWideOp_of_num : integer -> MoveWideOp + +let MoveWideOp_of_num arg_ = + let l__68 = arg_ in + if (eq l__68 (0:ii)) + then MoveWideOp_N + else if (eq l__68 (1:ii)) then MoveWideOp_Z else MoveWideOp_K + +val num_of_MoveWideOp : MoveWideOp -> integer + +let num_of_MoveWideOp arg_ = + match arg_ with | MoveWideOp_N -> (0:ii) | MoveWideOp_Z -> (1:ii) | MoveWideOp_K -> (2:ii) end + +val undefined_MoveWideOp : unit -> MoveWideOp + +let undefined_MoveWideOp () = internal_pick [MoveWideOp_N;MoveWideOp_Z;MoveWideOp_K] + +val SystemAccessType_of_num : integer -> SystemAccessType + +let SystemAccessType_of_num arg_ = + let l__66 = arg_ in + if (eq l__66 (0:ii)) + then SystemAccessType_RT + else if (eq l__66 (1:ii)) then SystemAccessType_RRT else SystemAccessType_DT + +val num_of_SystemAccessType : SystemAccessType -> integer + +let num_of_SystemAccessType arg_ = + match arg_ with + | SystemAccessType_RT -> (0:ii) + | SystemAccessType_RRT -> (1:ii) + | SystemAccessType_DT -> (2:ii) + end + +val undefined_SystemAccessType : unit -> SystemAccessType + +let undefined_SystemAccessType () = + internal_pick [SystemAccessType_RT;SystemAccessType_RRT;SystemAccessType_DT] + +val VBitOp_of_num : integer -> VBitOp + +let VBitOp_of_num arg_ = + let l__63 = arg_ in + if (eq l__63 (0:ii)) + then VBitOp_VBIF + else + if (eq l__63 (1:ii)) + then VBitOp_VBIT + else if (eq l__63 (2:ii)) then VBitOp_VBSL else VBitOp_VEOR + +val num_of_VBitOp : VBitOp -> integer + +let num_of_VBitOp arg_ = + match arg_ with + | VBitOp_VBIF -> (0:ii) + | VBitOp_VBIT -> (1:ii) + | VBitOp_VBSL -> (2:ii) + | VBitOp_VEOR -> (3:ii) + end + +val undefined_VBitOp : unit -> VBitOp + +let undefined_VBitOp () = internal_pick [VBitOp_VBIF;VBitOp_VBIT;VBitOp_VBSL;VBitOp_VEOR] + +val TimeStamp_of_num : integer -> TimeStamp + +let TimeStamp_of_num arg_ = + let l__61 = arg_ in + if (eq l__61 (0:ii)) + then TimeStamp_None + else if (eq l__61 (1:ii)) then TimeStamp_Virtual else TimeStamp_Physical + +val num_of_TimeStamp : TimeStamp -> integer + +let num_of_TimeStamp arg_ = + match arg_ with + | TimeStamp_None -> (0:ii) + | TimeStamp_Virtual -> (1:ii) + | TimeStamp_Physical -> (2:ii) + end + +val undefined_TimeStamp : unit -> TimeStamp + +let undefined_TimeStamp () = internal_pick [TimeStamp_None;TimeStamp_Virtual;TimeStamp_Physical] + +val PrivilegeLevel_of_num : integer -> PrivilegeLevel + +let PrivilegeLevel_of_num arg_ = + let l__58 = arg_ in + if (eq l__58 (0:ii)) + then PL3 + else if (eq l__58 (1:ii)) then PL2 else if (eq l__58 (2:ii)) then PL1 else PL0 + +val num_of_PrivilegeLevel : PrivilegeLevel -> integer + +let num_of_PrivilegeLevel arg_ = + match arg_ with | PL3 -> (0:ii) | PL2 -> (1:ii) | PL1 -> (2:ii) | PL0 -> (3:ii) end + +val undefined_PrivilegeLevel : unit -> PrivilegeLevel + +let undefined_PrivilegeLevel () = internal_pick [PL3;PL2;PL1;PL0] + +val undefined_AArch32_SErrorSyndrome : unit -> AArch32_SErrorSyndrome + +let undefined_AArch32_SErrorSyndrome () = + <| AArch32_SErrorSyndrome_AET = (undefined_vector (2:ii) (undefined_bit ()) : list bitU); + AArch32_SErrorSyndrome_ExT = (undefined_vector (1:ii) (undefined_bit ()) : list bitU) |> + +val SystemOp_of_num : integer -> SystemOp + +let SystemOp_of_num arg_ = + let l__54 = arg_ in + if (eq l__54 (0:ii)) + then Sys_AT + else + if (eq l__54 (1:ii)) + then Sys_DC + else if (eq l__54 (2:ii)) then Sys_IC else if (eq l__54 (3:ii)) then Sys_TLBI else Sys_SYS + +val num_of_SystemOp : SystemOp -> integer + +let num_of_SystemOp arg_ = + match arg_ with + | Sys_AT -> (0:ii) + | Sys_DC -> (1:ii) + | Sys_IC -> (2:ii) + | Sys_TLBI -> (3:ii) + | Sys_SYS -> (4:ii) + end + +val undefined_SystemOp : unit -> SystemOp + +let undefined_SystemOp () = internal_pick [Sys_AT;Sys_DC;Sys_IC;Sys_TLBI;Sys_SYS] + +val undefined_PCSample : unit -> PCSample + +let undefined_PCSample () = + <| PCSample_valid_name = (undefined_bool ()); + PCSample_pc = (undefined_vector (64:ii) (undefined_bit ()) : list bitU); + PCSample_el = (undefined_vector (2:ii) (undefined_bit ()) : list bitU); + PCSample_rw = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + PCSample_ns = (undefined_vector (1:ii) (undefined_bit ()) : list bitU); + PCSample_contextidr = (undefined_vector (32:ii) (undefined_bit ()) : list bitU); + PCSample_contextidr_el2 = (undefined_vector (32:ii) (undefined_bit ()) : list bitU); + PCSample_vmid = (undefined_vector (16:ii) (undefined_bit ()) : list bitU) |> + +val ReduceOp_of_num : integer -> ReduceOp + +let ReduceOp_of_num arg_ = + let l__49 = arg_ in + if (eq l__49 (0:ii)) + then ReduceOp_FMINNUM + else + if (eq l__49 (1:ii)) + then ReduceOp_FMAXNUM + else + if (eq l__49 (2:ii)) + then ReduceOp_FMIN + else + if (eq l__49 (3:ii)) + then ReduceOp_FMAX + else if (eq l__49 (4:ii)) then ReduceOp_FADD else ReduceOp_ADD + +val num_of_ReduceOp : ReduceOp -> integer + +let num_of_ReduceOp arg_ = + match arg_ with + | ReduceOp_FMINNUM -> (0:ii) + | ReduceOp_FMAXNUM -> (1:ii) + | ReduceOp_FMIN -> (2:ii) + | ReduceOp_FMAX -> (3:ii) + | ReduceOp_FADD -> (4:ii) + | ReduceOp_ADD -> (5:ii) + end + +val undefined_ReduceOp : unit -> ReduceOp + +let undefined_ReduceOp () = + internal_pick + [ReduceOp_FMINNUM;ReduceOp_FMAXNUM;ReduceOp_FMIN;ReduceOp_FMAX;ReduceOp_FADD;ReduceOp_ADD] + +val LogicalOp_of_num : integer -> LogicalOp + +let LogicalOp_of_num arg_ = + let l__47 = arg_ in + if (eq l__47 (0:ii)) + then LogicalOp_AND + else if (eq l__47 (1:ii)) then LogicalOp_EOR else LogicalOp_ORR + +val num_of_LogicalOp : LogicalOp -> integer + +let num_of_LogicalOp arg_ = + match arg_ with | LogicalOp_AND -> (0:ii) | LogicalOp_EOR -> (1:ii) | LogicalOp_ORR -> (2:ii) end + +val undefined_LogicalOp : unit -> LogicalOp + +let undefined_LogicalOp () = internal_pick [LogicalOp_AND;LogicalOp_EOR;LogicalOp_ORR] + +val ExtendType_of_num : integer -> ExtendType + +let ExtendType_of_num arg_ = + let l__40 = arg_ in + if (eq l__40 (0:ii)) + then ExtendType_SXTB + else + if (eq l__40 (1:ii)) + then ExtendType_SXTH + else + if (eq l__40 (2:ii)) + then ExtendType_SXTW + else + if (eq l__40 (3:ii)) + then ExtendType_SXTX + else + if (eq l__40 (4:ii)) + then ExtendType_UXTB + else + if (eq l__40 (5:ii)) + then ExtendType_UXTH + else if (eq l__40 (6:ii)) then ExtendType_UXTW else ExtendType_UXTX + +val num_of_ExtendType : ExtendType -> integer + +let num_of_ExtendType arg_ = + match arg_ with + | ExtendType_SXTB -> (0:ii) + | ExtendType_SXTH -> (1:ii) + | ExtendType_SXTW -> (2:ii) + | ExtendType_SXTX -> (3:ii) + | ExtendType_UXTB -> (4:ii) + | ExtendType_UXTH -> (5:ii) + | ExtendType_UXTW -> (6:ii) + | ExtendType_UXTX -> (7:ii) + end + +val undefined_ExtendType : unit -> ExtendType + +let undefined_ExtendType () = + internal_pick + [ExtendType_SXTB;ExtendType_SXTH;ExtendType_SXTW;ExtendType_SXTX;ExtendType_UXTB;ExtendType_UXTH;ExtendType_UXTW;ExtendType_UXTX] + +val SystemHintOp_of_num : integer -> SystemHintOp + +let SystemHintOp_of_num arg_ = + let l__33 = arg_ in + if (eq l__33 (0:ii)) + then SystemHintOp_NOP + else + if (eq l__33 (1:ii)) + then SystemHintOp_YIELD + else + if (eq l__33 (2:ii)) + then SystemHintOp_WFE + else + if (eq l__33 (3:ii)) + then SystemHintOp_WFI + else + if (eq l__33 (4:ii)) + then SystemHintOp_SEV + else + if (eq l__33 (5:ii)) + then SystemHintOp_SEVL + else if (eq l__33 (6:ii)) then SystemHintOp_ESB else SystemHintOp_PSB + +val num_of_SystemHintOp : SystemHintOp -> integer + +let num_of_SystemHintOp arg_ = + match arg_ with + | SystemHintOp_NOP -> (0:ii) + | SystemHintOp_YIELD -> (1:ii) + | SystemHintOp_WFE -> (2:ii) + | SystemHintOp_WFI -> (3:ii) + | SystemHintOp_SEV -> (4:ii) + | SystemHintOp_SEVL -> (5:ii) + | SystemHintOp_ESB -> (6:ii) + | SystemHintOp_PSB -> (7:ii) + end + +val undefined_SystemHintOp : unit -> SystemHintOp + +let undefined_SystemHintOp () = + internal_pick + [SystemHintOp_NOP;SystemHintOp_YIELD;SystemHintOp_WFE;SystemHintOp_WFI;SystemHintOp_SEV;SystemHintOp_SEVL;SystemHintOp_ESB;SystemHintOp_PSB] + +val MemOp_of_num : integer -> MemOp + +let MemOp_of_num arg_ = + let l__31 = arg_ in + if (eq l__31 (0:ii)) + then MemOp_LOAD + else if (eq l__31 (1:ii)) then MemOp_STORE else MemOp_PREFETCH + +val num_of_MemOp : MemOp -> integer + +let num_of_MemOp arg_ = + match arg_ with | MemOp_LOAD -> (0:ii) | MemOp_STORE -> (1:ii) | MemOp_PREFETCH -> (2:ii) end + +val undefined_MemOp : unit -> MemOp + +let undefined_MemOp () = internal_pick [MemOp_LOAD;MemOp_STORE;MemOp_PREFETCH] + +val OpType_of_num : integer -> OpType + +let OpType_of_num arg_ = + let l__27 = arg_ in + if (eq l__27 (0:ii)) + then OpType_Load + else + if (eq l__27 (1:ii)) + then OpType_Store + else + if (eq l__27 (2:ii)) + then OpType_LoadAtomic + else if (eq l__27 (3:ii)) then OpType_Branch else OpType_Other + +val num_of_OpType : OpType -> integer + +let num_of_OpType arg_ = + match arg_ with + | OpType_Load -> (0:ii) + | OpType_Store -> (1:ii) + | OpType_LoadAtomic -> (2:ii) + | OpType_Branch -> (3:ii) + | OpType_Other -> (4:ii) + end + +val undefined_OpType : unit -> OpType + +let undefined_OpType () = + internal_pick [OpType_Load;OpType_Store;OpType_LoadAtomic;OpType_Branch;OpType_Other] + +val FPUnaryOp_of_num : integer -> FPUnaryOp + +let FPUnaryOp_of_num arg_ = + let l__24 = arg_ in + if (eq l__24 (0:ii)) + then FPUnaryOp_ABS + else + if (eq l__24 (1:ii)) + then FPUnaryOp_MOV + else if (eq l__24 (2:ii)) then FPUnaryOp_NEG else FPUnaryOp_SQRT + +val num_of_FPUnaryOp : FPUnaryOp -> integer + +let num_of_FPUnaryOp arg_ = + match arg_ with + | FPUnaryOp_ABS -> (0:ii) + | FPUnaryOp_MOV -> (1:ii) + | FPUnaryOp_NEG -> (2:ii) + | FPUnaryOp_SQRT -> (3:ii) + end + +val undefined_FPUnaryOp : unit -> FPUnaryOp + +let undefined_FPUnaryOp () = + internal_pick [FPUnaryOp_ABS;FPUnaryOp_MOV;FPUnaryOp_NEG;FPUnaryOp_SQRT] + +val CompareOp_of_num : integer -> CompareOp + +let CompareOp_of_num arg_ = + let l__20 = arg_ in + if (eq l__20 (0:ii)) + then CompareOp_GT + else + if (eq l__20 (1:ii)) + then CompareOp_GE + else + if (eq l__20 (2:ii)) + then CompareOp_EQ + else if (eq l__20 (3:ii)) then CompareOp_LE else CompareOp_LT + +val num_of_CompareOp : CompareOp -> integer + +let num_of_CompareOp arg_ = + match arg_ with + | CompareOp_GT -> (0:ii) + | CompareOp_GE -> (1:ii) + | CompareOp_EQ -> (2:ii) + | CompareOp_LE -> (3:ii) + | CompareOp_LT -> (4:ii) + end + +val undefined_CompareOp : unit -> CompareOp + +let undefined_CompareOp () = + internal_pick [CompareOp_GT;CompareOp_GE;CompareOp_EQ;CompareOp_LE;CompareOp_LT] + +val PSTATEField_of_num : integer -> PSTATEField + +let PSTATEField_of_num arg_ = + let l__16 = arg_ in + if (eq l__16 (0:ii)) + then PSTATEField_DAIFSet + else + if (eq l__16 (1:ii)) + then PSTATEField_DAIFClr + else + if (eq l__16 (2:ii)) + then PSTATEField_PAN + else if (eq l__16 (3:ii)) then PSTATEField_UAO else PSTATEField_SP + +val num_of_PSTATEField : PSTATEField -> integer + +let num_of_PSTATEField arg_ = + match arg_ with + | PSTATEField_DAIFSet -> (0:ii) + | PSTATEField_DAIFClr -> (1:ii) + | PSTATEField_PAN -> (2:ii) + | PSTATEField_UAO -> (3:ii) + | PSTATEField_SP -> (4:ii) + end + +val undefined_PSTATEField : unit -> PSTATEField + +let undefined_PSTATEField () = + internal_pick + [PSTATEField_DAIFSet;PSTATEField_DAIFClr;PSTATEField_PAN;PSTATEField_UAO;PSTATEField_SP] + +val FPMaxMinOp_of_num : integer -> FPMaxMinOp + +let FPMaxMinOp_of_num arg_ = + let l__13 = arg_ in + if (eq l__13 (0:ii)) + then FPMaxMinOp_MAX + else + if (eq l__13 (1:ii)) + then FPMaxMinOp_MIN + else if (eq l__13 (2:ii)) then FPMaxMinOp_MAXNUM else FPMaxMinOp_MINNUM + +val num_of_FPMaxMinOp : FPMaxMinOp -> integer + +let num_of_FPMaxMinOp arg_ = + match arg_ with + | FPMaxMinOp_MAX -> (0:ii) + | FPMaxMinOp_MIN -> (1:ii) + | FPMaxMinOp_MAXNUM -> (2:ii) + | FPMaxMinOp_MINNUM -> (3:ii) + end + +val undefined_FPMaxMinOp : unit -> FPMaxMinOp + +let undefined_FPMaxMinOp () = + internal_pick [FPMaxMinOp_MAX;FPMaxMinOp_MIN;FPMaxMinOp_MAXNUM;FPMaxMinOp_MINNUM] + +val CountOp_of_num : integer -> CountOp + +let CountOp_of_num arg_ = + let l__11 = arg_ in + if (eq l__11 (0:ii)) + then CountOp_CLZ + else if (eq l__11 (1:ii)) then CountOp_CLS else CountOp_CNT + +val num_of_CountOp : CountOp -> integer + +let num_of_CountOp arg_ = + match arg_ with | CountOp_CLZ -> (0:ii) | CountOp_CLS -> (1:ii) | CountOp_CNT -> (2:ii) end + +val undefined_CountOp : unit -> CountOp + +let undefined_CountOp () = internal_pick [CountOp_CLZ;CountOp_CLS;CountOp_CNT] + +val VFPNegMul_of_num : integer -> VFPNegMul + +let VFPNegMul_of_num arg_ = + let l__9 = arg_ in + if (eq l__9 (0:ii)) + then VFPNegMul_VNMLA + else if (eq l__9 (1:ii)) then VFPNegMul_VNMLS else VFPNegMul_VNMUL + +val num_of_VFPNegMul : VFPNegMul -> integer + +let num_of_VFPNegMul arg_ = + match arg_ with + | VFPNegMul_VNMLA -> (0:ii) + | VFPNegMul_VNMLS -> (1:ii) + | VFPNegMul_VNMUL -> (2:ii) + end + +val undefined_VFPNegMul : unit -> VFPNegMul + +let undefined_VFPNegMul () = internal_pick [VFPNegMul_VNMLA;VFPNegMul_VNMLS;VFPNegMul_VNMUL] + +val VBitOps_of_num : integer -> VBitOps + +let VBitOps_of_num arg_ = + let l__7 = arg_ in + if (eq l__7 (0:ii)) + then VBitOps_VBIF + else if (eq l__7 (1:ii)) then VBitOps_VBIT else VBitOps_VBSL + +val num_of_VBitOps : VBitOps -> integer + +let num_of_VBitOps arg_ = + match arg_ with | VBitOps_VBIF -> (0:ii) | VBitOps_VBIT -> (1:ii) | VBitOps_VBSL -> (2:ii) end + +val undefined_VBitOps : unit -> VBitOps + +let undefined_VBitOps () = internal_pick [VBitOps_VBIF;VBitOps_VBIT;VBitOps_VBSL] + +val VCGEtype_of_num : integer -> VCGEtype + +let VCGEtype_of_num arg_ = + let l__5 = arg_ in + if (eq l__5 (0:ii)) + then VCGEtype_signed + else if (eq l__5 (1:ii)) then VCGEtype_unsigned else VCGEtype_fp + +val num_of_VCGEtype : VCGEtype -> integer + +let num_of_VCGEtype arg_ = + match arg_ with + | VCGEtype_signed -> (0:ii) + | VCGEtype_unsigned -> (1:ii) + | VCGEtype_fp -> (2:ii) + end + +val undefined_VCGEtype : unit -> VCGEtype + +let undefined_VCGEtype () = internal_pick [VCGEtype_signed;VCGEtype_unsigned;VCGEtype_fp] + +val VCGTtype_of_num : integer -> VCGTtype + +let VCGTtype_of_num arg_ = + let l__3 = arg_ in + if (eq l__3 (0:ii)) + then VCGTtype_signed + else if (eq l__3 (1:ii)) then VCGTtype_unsigned else VCGTtype_fp + +val num_of_VCGTtype : VCGTtype -> integer + +let num_of_VCGTtype arg_ = + match arg_ with + | VCGTtype_signed -> (0:ii) + | VCGTtype_unsigned -> (1:ii) + | VCGTtype_fp -> (2:ii) + end + +val undefined_VCGTtype : unit -> VCGTtype + +let undefined_VCGTtype () = internal_pick [VCGTtype_signed;VCGTtype_unsigned;VCGTtype_fp] + +val __InstrEnc_of_num : integer -> __InstrEnc + +let __InstrEnc_of_num arg_ = + let l__0 = arg_ in + if (eq l__0 (0:ii)) + then __A64 + else if (eq l__0 (1:ii)) then __A32 else if (eq l__0 (2:ii)) then __T16 else __T32 + +val num_of___InstrEnc : __InstrEnc -> integer + +let num_of___InstrEnc arg_ = + match arg_ with | __A64 -> (0:ii) | __A32 -> (1:ii) | __T16 -> (2:ii) | __T32 -> (3:ii) end + +val undefined___InstrEnc : unit -> __InstrEnc + +let undefined___InstrEnc () = internal_pick [__A64;__A32;__T16;__T32] + +val __UNKNOWN_integer : unit -> ii + +let __UNKNOWN_integer () = (0:ii) + +val ThisInstrAddr : integer -> unit -> M (list bitU) + +let ThisInstrAddr (N__tv : integer) () = + (read_reg _PC_ref : M (list bitU)) >>= fun (w__0 : bits ty64) -> + return (slice w__0 (0:ii) N__tv : list bitU) + +val ThisInstr : unit -> M (list bitU) + +let ThisInstr () = (read_reg __ThisInstr_ref : M (list bitU)) + +val SynchronizeContext : unit -> unit + +let SynchronizeContext () = () + +val ProcessorID : unit -> ii + +let ProcessorID () = (0:ii) + +val __UNKNOWN_PrefetchHint : unit -> PrefetchHint + +let __UNKNOWN_PrefetchHint () = Prefetch_READ + +val __UNKNOWN_MemType : unit -> MemType + +let __UNKNOWN_MemType () = MemType_Normal + +val __UNKNOWN_MemOp : unit -> MemOp + +let __UNKNOWN_MemOp () = MemOp_LOAD + +let (M32_User : list bitU) = [B1;B0;B0;B0;B0] + +let (M32_Undef : list bitU) = [B1;B1;B0;B1;B1] + +let (M32_System : list bitU) = [B1;B1;B1;B1;B1] + +let (M32_Svc : list bitU) = [B1;B0;B0;B1;B1] + +let (M32_Monitor : list bitU) = [B1;B0;B1;B1;B0] + +let (M32_IRQ : list bitU) = [B1;B0;B0;B1;B0] + +let (M32_Hyp : list bitU) = [B1;B1;B0;B1;B0] + +let (M32_FIQ : list bitU) = [B1;B0;B0;B0;B1] + +let (M32_Abort : list bitU) = [B1;B0;B1;B1;B1] + +val __UNKNOWN_InstrSet : unit -> InstrSet + +let __UNKNOWN_InstrSet () = InstrSet_A64 + +val Hint_Prefetch : list bitU -> PrefetchHint -> ii -> bool -> unit + +let Hint_Prefetch address hint target stream = () + +val Hint_Branch : BranchType -> unit + +let Hint_Branch hint = () + +val HaveAnyAArch32 : unit -> bool + +let HaveAnyAArch32 () = false + +val __UNKNOWN_Fault : unit -> Fault + +let __UNKNOWN_Fault () = Fault_None + +val __UNKNOWN_boolean : unit -> bool + +let __UNKNOWN_boolean () = false + +val Unreachable : unit -> M unit + +let Unreachable () = assert_exp false "FALSE" + +val RBankSelect : list bitU -> ii -> ii -> ii -> ii -> ii -> ii -> ii -> M ii + +let RBankSelect mode usr fiq irq svc abt und hyp = + let (result : ii) = undefined_int () in + let _pat_ = mode in + (if (eq_vec _pat_ M32_User) + then + let (result : ii) = usr in + return result + else + (if (eq_vec _pat_ M32_FIQ) + then + let (result : ii) = fiq in + return result + else + (if (eq_vec _pat_ M32_IRQ) + then + let (result : ii) = irq in + return result + else + (if (eq_vec _pat_ M32_Svc) + then + let (result : ii) = svc in + return result + else + (if (eq_vec _pat_ M32_Abort) + then + let (result : ii) = abt in + return result + else + (if (eq_vec _pat_ M32_Hyp) + then + let (result : ii) = hyp in + return result + else + (if (eq_vec _pat_ M32_Undef) + then + let (result : ii) = und in + return result + else + (if (eq_vec _pat_ M32_System) + then + let (result : ii) = usr in + return result + else + Unreachable () >> + return result) >>= fun (result : ii) -> + return result) >>= fun (result : ii) -> + return result) >>= fun (result : ii) -> + return result) >>= fun (result : ii) -> + return result) >>= fun (result : ii) -> + return result) >>= fun (result : ii) -> + return result) >>= fun (result : ii) -> + return result) >>= fun (result : ii) -> + return result + +val TakeUnmaskedPhysicalSErrorInterrupts : bool -> M unit + +let TakeUnmaskedPhysicalSErrorInterrupts iesb_req = assert_exp false "FALSE" + +val __UNKNOWN_Exception : unit -> Exception + +let __UNKNOWN_Exception () = Exception_Uncategorized + +val ErrorSynchronizationBarrier : MBReqDomain -> MBReqTypes -> unit + +let ErrorSynchronizationBarrier domain types = () + +val EndOfInstruction : unit -> unit + +let EndOfInstruction () = () + +let (EL3 : list bitU) = [B1;B1] + +let (EL2 : list bitU) = [B1;B0] + +let (EL1 : list bitU) = [B0;B1] + +let (EL0 : list bitU) = [B0;B0] + +val __UNKNOWN_DeviceType : unit -> DeviceType + +let __UNKNOWN_DeviceType () = DeviceType_GRE + +let (DebugException_VectorCatch : list bitU) = [B0;B1;B0;B1] + +val __UNKNOWN_Constraint : unit -> Constraint + +let __UNKNOWN_Constraint () = Constraint_NONE + +val ConstrainUnpredictable : Unpredictable -> Constraint + +let ConstrainUnpredictable which = + match which with + | Unpredictable_WBOVERLAPLD -> Constraint_WBSUPPRESS + | Unpredictable_WBOVERLAPST -> Constraint_NONE + | Unpredictable_LDPOVERLAP -> Constraint_UNDEF + | Unpredictable_BASEOVERLAP -> Constraint_NONE + | Unpredictable_DATAOVERLAP -> Constraint_NONE + | Unpredictable_DEVPAGE2 -> Constraint_FAULT + | Unpredictable_INSTRDEVICE -> Constraint_NONE + | Unpredictable_RESCPACR -> Constraint_UNKNOWN + | Unpredictable_RESMAIR -> Constraint_UNKNOWN + | Unpredictable_RESTEXCB -> Constraint_UNKNOWN + | Unpredictable_RESDACR -> Constraint_UNKNOWN + | Unpredictable_RESPRRR -> Constraint_UNKNOWN + | Unpredictable_RESVTCRS -> Constraint_UNKNOWN + | Unpredictable_RESTnSZ -> Constraint_FORCE + | Unpredictable_OORTnSZ -> Constraint_FORCE + | Unpredictable_LARGEIPA -> Constraint_FORCE + | Unpredictable_ESRCONDPASS -> Constraint_FALSE + | Unpredictable_ILZEROIT -> Constraint_FALSE + | Unpredictable_ILZEROT -> Constraint_FALSE + | Unpredictable_BPVECTORCATCHPRI -> Constraint_TRUE + | Unpredictable_VCMATCHHALF -> Constraint_FALSE + | Unpredictable_VCMATCHDAPA -> Constraint_FALSE + | Unpredictable_WPMASKANDBAS -> Constraint_FALSE + | Unpredictable_WPBASCONTIGUOUS -> Constraint_FALSE + | Unpredictable_RESWPMASK -> Constraint_DISABLED + | Unpredictable_WPMASKEDBITS -> Constraint_FALSE + | Unpredictable_RESBPWPCTRL -> Constraint_DISABLED + | Unpredictable_BPNOTIMPL -> Constraint_DISABLED + | Unpredictable_RESBPTYPE -> Constraint_DISABLED + | Unpredictable_BPNOTCTXCMP -> Constraint_DISABLED + | Unpredictable_BPMATCHHALF -> Constraint_FALSE + | Unpredictable_BPMISMATCHHALF -> Constraint_FALSE + | Unpredictable_RESTARTALIGNPC -> Constraint_FALSE + | Unpredictable_RESTARTZEROUPPERPC -> Constraint_TRUE + | Unpredictable_ZEROUPPER -> Constraint_TRUE + | Unpredictable_ERETZEROUPPERPC -> Constraint_TRUE + | Unpredictable_A32FORCEALIGNPC -> Constraint_FALSE + | Unpredictable_SMD -> Constraint_UNDEF + | Unpredictable_AFUPDATE -> Constraint_TRUE + | Unpredictable_IESBinDebug -> Constraint_TRUE + | Unpredictable_CLEARERRITEZERO -> Constraint_FALSE + end + +val ClearExclusiveByAddress : FullAddress -> ii -> ii -> unit + +let ClearExclusiveByAddress paddress processorid size = () + +val __UNKNOWN_AccType : unit -> AccType + +let __UNKNOWN_AccType () = AccType_NORMAL + +val CreateAccessDescriptor : AccType -> AccessDescriptor + +let CreateAccessDescriptor acctype = + let (accdesc : AccessDescriptor) = undefined_AccessDescriptor () in + let (accdesc : AccessDescriptor) = <|accdesc with AccessDescriptor_acctype = acctype|> in + let (accdesc : AccessDescriptor) = <|accdesc with AccessDescriptor_page_table_walk = false|> in + accdesc + +val AArch64_CreateFaultRecord : Fault -> list bitU -> ii -> AccType -> bool -> list bitU -> list bitU -> bool -> bool -> FaultRecord + +let AArch64_CreateFaultRecord typ ipaddress level acctype write extflag errortype secondstage s2fs1walk = + let (fault : FaultRecord) = undefined_FaultRecord () in + let (fault : FaultRecord) = <|fault with FaultRecord_typ = typ|> in + let (fault : FaultRecord) = + <|fault with FaultRecord_domain = (undefined_vector (4:ii) (undefined_bit ()) : list bitU)|> in + let (fault : FaultRecord) = + <|fault with FaultRecord_debugmoe = (undefined_vector (4:ii) (undefined_bit ()) : list bitU)|> in + let (fault : FaultRecord) = <|fault with FaultRecord_errortype = errortype|> in + let (fault : FaultRecord) = <|fault with FaultRecord_ipaddress = ipaddress|> in + let (fault : FaultRecord) = <|fault with FaultRecord_level = level|> in + let (fault : FaultRecord) = <|fault with FaultRecord_acctype = acctype|> in + let (fault : FaultRecord) = <|fault with FaultRecord_write = write|> in + let (fault : FaultRecord) = <|fault with FaultRecord_extflag = extflag|> in + let (fault : FaultRecord) = <|fault with FaultRecord_secondstage = secondstage|> in + let (fault : FaultRecord) = <|fault with FaultRecord_s2fs1walk = s2fs1walk|> in + fault + +val AArch64_AlignmentFault : AccType -> bool -> bool -> FaultRecord + +let AArch64_AlignmentFault acctype iswrite secondstage = + let (ipaddress : bits ty52) = (undefined_vector (52:ii) (undefined_bit ()) : list bitU) in + let (level : ii) = undefined_int () in + let (extflag : bits ty1) = (undefined_vector (1:ii) (undefined_bit ()) : list bitU) in + let (errortype : bits ty2) = (undefined_vector (2:ii) (undefined_bit ()) : list bitU) in + let (s2fs1walk : bool) = undefined_bool () in + AArch64_CreateFaultRecord + Fault_Alignment + ipaddress + level + acctype + iswrite + extflag + errortype + secondstage + s2fs1walk + +val aget_SP : integer -> unit -> M (list bitU) + +let aget_SP (width__tv : integer) () = + assert_exp (or_bool + (eq width__tv (8:ii)) + (or_bool + (eq width__tv (16:ii)) + (or_bool (eq width__tv (32:ii)) (eq width__tv (64:ii))))) "((width == 8) || ((width == 16) || ((width == 32) || (width == 64))))" >> + read_reg PSTATE_ref >>= fun (w__0 : ProcState) -> + if (eq_vec w__0.ProcState_SP ([B0])) + then + (read_reg SP_EL0_ref : M (list bitU)) >>= fun (w__1 : bits ty64) -> + return (slice w__1 (0:ii) width__tv : list bitU) + else + read_reg PSTATE_ref >>= fun (w__2 : ProcState) -> + let p__288 = w__2.ProcState_EL in + let _pat_ = p__288 in + if (eq_vec _pat_ EL0) + then + (read_reg SP_EL0_ref : M (list bitU)) >>= fun (w__3 : bits ty64) -> + return (slice w__3 (0:ii) width__tv : list bitU) + else + if (eq_vec _pat_ EL1) + then + (read_reg SP_EL1_ref : M (list bitU)) >>= fun (w__4 : bits ty64) -> + return (slice w__4 (0:ii) width__tv : list bitU) + else + if (eq_vec _pat_ EL2) + then + (read_reg SP_EL2_ref : M (list bitU)) >>= fun (w__5 : bits ty64) -> + return (slice w__5 (0:ii) width__tv : list bitU) + else + (read_reg SP_EL3_ref : M (list bitU)) >>= fun (w__6 : bits ty64) -> + return (slice w__6 (0:ii) width__tv : list bitU) + +val __IMPDEF_boolean : string -> bool + +let __IMPDEF_boolean x = + if (eq x "Condition valid for trapped T32") + then true + else + if (eq x "Has Dot Product extension") + then true + else + if (eq x "Has RAS extension") + then true + else + if (eq x "Has SHA512 and SHA3 Crypto instructions") + then true + else + if (eq x "Has SM3 and SM4 Crypto instructions") + then true + else + if (eq x "Has basic Crypto instructions") + then true + else + if (eq x "Have CRC extension") + then true + else + if (eq x "Report I-cache maintenance fault in IFSR") + then true + else + if (eq x "Reserved Control Space EL0 Trapped") + then true + else + if (eq x "Translation fault on misprogrammed contiguous bit") + then true + else + if (eq x "UNDEF unallocated CP15 access at NS EL0") + then true + else if (eq x "UNDEF unallocated CP15 access at NS EL0") then true else false + +val ThisInstrLength : unit -> M ii + +let ThisInstrLength () = + read_reg __ThisInstrEnc_ref >>= fun (w__0 : __InstrEnc) -> + return (if (eq w__0 __T16) + then (16:ii) + else (32:ii)) + +val HaveEL : list bitU -> bool + +let HaveEL el = if (or_bool (eq_vec el EL1) (eq_vec el EL0)) then true else true + +val HighestEL : unit -> list bitU + +let HighestEL () = if (HaveEL EL3) then EL3 else if (HaveEL EL2) then EL2 else EL1 + +val HasArchVersion : ArchVersion -> bool + +let HasArchVersion version = + or_bool + (eq version ARMv8p0) + (or_bool (eq version ARMv8p1) (or_bool (eq version ARMv8p2) (eq version ARMv8p3))) + +val HaveVirtHostExt : unit -> bool + +let HaveVirtHostExt () = HasArchVersion ARMv8p1 + +val HaveUAOExt : unit -> bool + +let HaveUAOExt () = HasArchVersion ARMv8p2 + +val HaveRASExt : unit -> bool + +let HaveRASExt () = or_bool (HasArchVersion ARMv8p2) (__IMPDEF_boolean "Has RAS extension") + +val HavePANExt : unit -> bool + +let HavePANExt () = HasArchVersion ARMv8p1 + +val HavePACExt : unit -> bool + +let HavePACExt () = HasArchVersion ARMv8p3 + +val ConstrainUnpredictableBool : Unpredictable -> M bool + +let ConstrainUnpredictableBool which = + let (c : Constraint) = ConstrainUnpredictable which in + assert_exp (or_bool (eq c Constraint_TRUE) (eq c Constraint_FALSE)) "((c == Constraint_TRUE) || (c == Constraint_FALSE))" >> + return (eq c Constraint_TRUE) + +val LookUpRIndex : ii -> list bitU -> M ii + +let LookUpRIndex n mode = + assert_exp (and_bool (gteq n (0:ii)) (lteq n (14:ii))) "((n >= 0) && (n <= 14))" >> + let (result : ii) = undefined_int () in + let v_0 = n in + (if (eq v_0 (8:ii)) + then + RBankSelect mode (8:ii) (24:ii) (8:ii) (8:ii) (8:ii) (8:ii) (8:ii) >>= fun (w__0 : ii) -> + let (result : ii) = w__0 in + return result + else + (if (eq v_0 (9:ii)) + then + RBankSelect mode (9:ii) (25:ii) (9:ii) (9:ii) (9:ii) (9:ii) (9:ii) >>= fun (w__1 : ii) -> + let (result : ii) = w__1 in + return result + else + (if (eq v_0 (10:ii)) + then + RBankSelect mode (10:ii) (26:ii) (10:ii) (10:ii) (10:ii) (10:ii) (10:ii) >>= fun (w__2 : + ii) -> + let (result : ii) = w__2 in + return result + else + (if (eq v_0 (11:ii)) + then + RBankSelect mode (11:ii) (27:ii) (11:ii) (11:ii) (11:ii) (11:ii) (11:ii) >>= fun (w__3 : + ii) -> + let (result : ii) = w__3 in + return result + else + (if (eq v_0 (12:ii)) + then + RBankSelect mode (12:ii) (28:ii) (12:ii) (12:ii) (12:ii) (12:ii) (12:ii) >>= fun (w__4 : + ii) -> + let (result : ii) = w__4 in + return result + else + (if (eq v_0 (13:ii)) + then + RBankSelect mode (13:ii) (29:ii) (17:ii) (19:ii) (21:ii) (23:ii) (15:ii) >>= fun (w__5 : + ii) -> + let (result : ii) = w__5 in + return result + else + (if (eq v_0 (14:ii)) + then + RBankSelect mode (14:ii) (30:ii) (16:ii) (18:ii) (20:ii) (22:ii) (14:ii) >>= fun (w__6 : + ii) -> + let (result : ii) = w__6 in + return result + else + let (result : ii) = n in + return result) >>= fun (result : ii) -> + return result) >>= fun (result : ii) -> + return result) >>= fun (result : ii) -> + return result) >>= fun (result : ii) -> + return result) >>= fun (result : ii) -> + return result) >>= fun (result : ii) -> + return result) >>= fun (result : ii) -> + return result + +val AArch32_ExceptionClass : Exception -> M (ii * list bitU) + +let AArch32_ExceptionClass typ = + ThisInstrLength () >>= fun (w__0 : ii) -> + let (il : bits ty1) = if (eq (ex_int w__0) (32:ii)) then [B1] else [B0] in + let (ec : ii) = undefined_int () in + match typ with + | Exception_Uncategorized -> + let (ec : ii) = (0:ii) in + let (il : bits ty1) = [B1] in + return (ec,il) + | Exception_WFxTrap -> + let (ec : ii) = (1:ii) in + return (ec,il) + | Exception_CP15RTTrap -> + let (ec : ii) = (3:ii) in + return (ec,il) + | Exception_CP15RRTTrap -> + let (ec : ii) = (4:ii) in + return (ec,il) + | Exception_CP14RTTrap -> + let (ec : ii) = (5:ii) in + return (ec,il) + | Exception_CP14DTTrap -> + let (ec : ii) = (6:ii) in + return (ec,il) + | Exception_AdvSIMDFPAccessTrap -> + let (ec : ii) = (7:ii) in + return (ec,il) + | Exception_FPIDTrap -> + let (ec : ii) = (8:ii) in + return (ec,il) + | Exception_CP14RRTTrap -> + let (ec : ii) = (12:ii) in + return (ec,il) + | Exception_IllegalState -> + let (ec : ii) = (14:ii) in + let (il : bits ty1) = [B1] in + return (ec,il) + | Exception_SupervisorCall -> + let (ec : ii) = (17:ii) in + return (ec,il) + | Exception_HypervisorCall -> + let (ec : ii) = (18:ii) in + return (ec,il) + | Exception_MonitorCall -> + let (ec : ii) = (19:ii) in + return (ec,il) + | Exception_InstructionAbort -> + let (ec : ii) = (32:ii) in + let (il : bits ty1) = [B1] in + return (ec,il) + | Exception_PCAlignment -> + let (ec : ii) = (34:ii) in + let (il : bits ty1) = [B1] in + return (ec,il) + | Exception_DataAbort -> + let (ec : ii) = (36:ii) in + return (ec,il) + | Exception_FPTrappedException -> + let (ec : ii) = (40:ii) in + return (ec,il) + | _ -> + Unreachable () >> + return (ec,il) + end >>= fun ((ec : ii), (il : bits ty1)) -> + read_reg PSTATE_ref >>= fun (w__1 : ProcState) -> + let (ec : ii) = + if (and_bool + (or_bool (eq (ex_int ec) (32:ii)) (eq (ex_int ec) (36:ii))) + (eq_vec w__1.ProcState_EL EL2)) + then + let (ec : ii) = integerAdd (ex_int ec) (1:ii) in + ec + else ec in + return (ec,il) + +val EncodeLDFSC : Fault -> ii -> M (list bitU) + +let EncodeLDFSC typ level = + let (result : bits ty6) = (undefined_vector (6:ii) (undefined_bit ()) : list bitU) in + match typ with + | Fault_AddressSize -> + let result = + (concat_vec ([B0;B0;B0;B0]) (__GetSlice_int (2:ii) level (0:ii) : list bitU) : list bitU) in + assert_exp (or_bool + (eq level (0:ii)) + (or_bool (eq level (1:ii)) (or_bool (eq level (2:ii)) (eq level (3:ii))))) "((level == 0) || ((level == 1) || ((level == 2) || (level == 3))))" >> + return result + | Fault_AccessFlag -> + let result = + (concat_vec ([B0;B0;B1;B0]) (__GetSlice_int (2:ii) level (0:ii) : list bitU) : list bitU) in + assert_exp (or_bool (eq level (1:ii)) (or_bool (eq level (2:ii)) (eq level (3:ii)))) "((level == 1) || ((level == 2) || (level == 3)))" >> + return result + | Fault_Permission -> + let result = + (concat_vec ([B0;B0;B1;B1]) (__GetSlice_int (2:ii) level (0:ii) : list bitU) : list bitU) in + assert_exp (or_bool (eq level (1:ii)) (or_bool (eq level (2:ii)) (eq level (3:ii)))) "((level == 1) || ((level == 2) || (level == 3)))" >> + return result + | Fault_Translation -> + let result = + (concat_vec ([B0;B0;B0;B1]) (__GetSlice_int (2:ii) level (0:ii) : list bitU) : list bitU) in + assert_exp (or_bool + (eq level (0:ii)) + (or_bool (eq level (1:ii)) (or_bool (eq level (2:ii)) (eq level (3:ii))))) "((level == 0) || ((level == 1) || ((level == 2) || (level == 3))))" >> + return result + | Fault_SyncExternal -> + let (result : bits ty6) = [B0;B1;B0;B0;B0;B0] in + return result + | Fault_SyncExternalOnWalk -> + let result = + (concat_vec ([B0;B1;B0;B1]) (__GetSlice_int (2:ii) level (0:ii) : list bitU) : list bitU) in + assert_exp (or_bool + (eq level (0:ii)) + (or_bool (eq level (1:ii)) (or_bool (eq level (2:ii)) (eq level (3:ii))))) "((level == 0) || ((level == 1) || ((level == 2) || (level == 3))))" >> + return result + | Fault_SyncParity -> + let (result : bits ty6) = [B0;B1;B1;B0;B0;B0] in + return result + | Fault_SyncParityOnWalk -> + let result = + (concat_vec ([B0;B1;B1;B1]) (__GetSlice_int (2:ii) level (0:ii) : list bitU) : list bitU) in + assert_exp (or_bool + (eq level (0:ii)) + (or_bool (eq level (1:ii)) (or_bool (eq level (2:ii)) (eq level (3:ii))))) "((level == 0) || ((level == 1) || ((level == 2) || (level == 3))))" >> + return result + | Fault_AsyncParity -> + let (result : bits ty6) = [B0;B1;B1;B0;B0;B1] in + return result + | Fault_AsyncExternal -> + let (result : bits ty6) = [B0;B1;B0;B0;B0;B1] in + return result + | Fault_Alignment -> + let (result : bits ty6) = [B1;B0;B0;B0;B0;B1] in + return result + | Fault_Debug -> + let (result : bits ty6) = [B1;B0;B0;B0;B1;B0] in + return result + | Fault_TLBConflict -> + let (result : bits ty6) = [B1;B1;B0;B0;B0;B0] in + return result + | Fault_Lockdown -> + let (result : bits ty6) = [B1;B1;B0;B1;B0;B0] in + return result + | Fault_Exclusive -> + let (result : bits ty6) = [B1;B1;B0;B1;B0;B1] in + return result + | _ -> + Unreachable () >> + return result + end >>= fun (result : bits ty6) -> + return result + +val BigEndianReverse : list bitU -> M (list bitU) + +let rec BigEndianReverse value_name = + assert_exp (or_bool + (eq (length value_name) (8:ii)) + (or_bool + (eq (length value_name) (16:ii)) + (or_bool + (eq (length value_name) (32:ii)) + (or_bool (eq (length value_name) (64:ii)) (eq (length value_name) (128:ii)))))) "" >> + let half = integerDiv (length value_name) (2:ii) in + assert_exp true "" >> + if (eq (length value_name) (8:ii)) + then return value_name + else + (BigEndianReverse (slice value_name (0:ii) half : list bitU) : M (list bitU)) >>= fun (w__0 : + list bitU) -> + (BigEndianReverse (slice value_name half (integerMinus (length value_name) half) : list bitU) : M (list bitU)) >>= fun (w__1 : + list bitU) -> + return (concat_vec w__0 w__1 : list bitU) + +val AArch32_ReportHypEntry : ExceptionRecord -> M unit + +let AArch32_ReportHypEntry exception = + let (typ : Exception) = exception.ExceptionRecord_typ in + let (il : bits ty1) = (undefined_vector (1:ii) (undefined_bit ()) : list bitU) in + let (ec : ii) = undefined_int () in + (AArch32_ExceptionClass typ : M ((ii * list bitU))) >>= fun (tup__0, tup__1) -> + let ec = tup__0 in + let il = tup__1 in + let (iss : bits ty25) = exception.ExceptionRecord_syndrome in + let (il : bits ty1) = + if (and_bool + (or_bool (eq (ex_int ec) (36:ii)) (eq (ex_int ec) (37:ii))) + (eq_vec ([access_vec_dec iss (24:ii)]) ([B0]))) + then + let (il : bits ty1) = [B1] in + il + else il in + write_reg + HSR_ref + (concat_vec (concat_vec (__GetSlice_int (6:ii) ec (0:ii) : list bitU) il : list bitU) iss : list bitU) >> + (if (or_bool (eq typ Exception_InstructionAbort) (eq typ Exception_PCAlignment)) + then + write_reg HIFAR_ref (slice exception.ExceptionRecord_vaddress (0:ii) (32:ii) : list bitU) >> + write_reg HDFAR_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) + else + if (eq typ Exception_DataAbort) + then + write_reg HIFAR_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg HDFAR_ref (slice exception.ExceptionRecord_vaddress (0:ii) (32:ii) : list bitU) + else return ()) >> + if exception.ExceptionRecord_ipavalid + then + (read_reg HPFAR_ref : M (list bitU)) >>= fun (w__0 : list bitU) -> + write_reg + HPFAR_ref + (set_slice + (32:ii) + (28:ii) + w__0 + (4:ii) + (slice exception.ExceptionRecord_ipaddress (12:ii) (28:ii) : list bitU) : list bitU) + else + (read_reg HPFAR_ref : M (list bitU)) >>= fun (w__1 : list bitU) -> + write_reg + HPFAR_ref + (set_slice + (32:ii) + (28:ii) + w__1 + (4:ii) + (undefined_vector (28:ii) (undefined_bit ()) : list bitU) : list bitU) + +val Replicate : integer -> list bitU -> M (list bitU) + +let Replicate (N__tv : integer) x = + assert_exp (eq (ex_int (hardware_mod N__tv (length x))) (0:ii)) "((N MOD M) == 0)" >> + let p = ex_int (integerDiv N__tv (length x)) in + assert_exp true "" >> + return (replicate_bits x p : list bitU) + +val Zeros__0 : integer -> list bitU + +val Zeros__1 : integer -> unit -> list bitU + +let Zeros__0 N = (replicate_bits ([B0]) N : list bitU) + +let Zeros__1 (N__tv : integer) () = (Zeros__0 N__tv : list bitU) + +val ZeroExtend__0 : list bitU -> integer -> M (list bitU) + +val ZeroExtend__1 : integer -> list bitU -> M (list bitU) + +let ZeroExtend__0 x N = + assert_exp (gteq N (length x)) "" >> + return (concat_vec (Zeros__0 (integerMinus N (length x)) : list bitU) x : list bitU) + +let ZeroExtend__1 (N__tv : integer) x = (ZeroExtend__0 x N__tv : M (list bitU)) + +val aset_SP : list bitU -> M unit + +let aset_SP value_name = + assert_exp (or_bool (eq (length value_name) (32:ii)) (eq (length value_name) (64:ii))) "((width == 32) || (width == 64))" >> + read_reg PSTATE_ref >>= fun (w__0 : ProcState) -> + if (eq_vec w__0.ProcState_SP ([B0])) + then + (ZeroExtend__1 (64:ii) value_name : M (list bitU)) >>= fun (w__1 : bits ty64) -> + write_reg SP_EL0_ref w__1 + else + read_reg PSTATE_ref >>= fun (w__2 : ProcState) -> + let p__287 = w__2.ProcState_EL in + let _pat_ = p__287 in + if (eq_vec _pat_ EL0) + then + (ZeroExtend__1 (64:ii) value_name : M (list bitU)) >>= fun (w__3 : bits ty64) -> + write_reg SP_EL0_ref w__3 + else + if (eq_vec _pat_ EL1) + then + (ZeroExtend__1 (64:ii) value_name : M (list bitU)) >>= fun (w__4 : bits ty64) -> + write_reg SP_EL1_ref w__4 + else + if (eq_vec _pat_ EL2) + then + (ZeroExtend__1 (64:ii) value_name : M (list bitU)) >>= fun (w__5 : bits ty64) -> + write_reg SP_EL2_ref w__5 + else + (ZeroExtend__1 (64:ii) value_name : M (list bitU)) >>= fun (w__6 : bits ty64) -> + write_reg SP_EL3_ref w__6 + +val LSL_C : list bitU -> ii -> M (list bitU * list bitU) + +let LSL_C x shift = + assert_exp (gt shift (0:ii)) "(shift > 0)" >> + let extended_x = (concat_vec x (Zeros__0 shift : list bitU) : list bitU) in + let result = (slice extended_x (0:ii) (length x) : list bitU) in + let (carry_out : bits ty1) = [access_vec_dec extended_x (length result)] in + return (result,carry_out) + +val LSL : list bitU -> ii -> M (list bitU) + +let LSL x shift = + assert_exp (gteq shift (0:ii)) "(shift >= 0)" >> + let (__anon1 : bits ty1) = (undefined_vector (1:ii) (undefined_bit ()) : list bitU) in + let result = (undefined_vector (length x) (undefined_bit ()) : list bitU) in + (if (eq shift (0:ii)) + then + let result = x in + return (__anon1,result) + else + (LSL_C x shift : M ((list bitU * list bitU))) >>= fun (tup__0, tup__1) -> + let result = tup__0 in + let (__anon1 : bits ty1) = tup__1 in + return (__anon1,result)) >>= fun ((__anon1 : bits ty1), result) -> + return result + +val LSInstructionSyndrome : unit -> M (list bitU) + +let LSInstructionSyndrome () = + assert_exp false "FALSE" >> + return (Zeros__0 (11:ii) : list bitU) + +val IsZero : list bitU -> bool + +let IsZero x = eq_vec x (Zeros__0 (length x) : list bitU) + +val AddWithCarry : list bitU -> list bitU -> list bitU -> (list bitU * list bitU) + +let AddWithCarry x y carry_in = + let (unsigned_sum : ii) = integerAdd (integerAdd (uint x) (uint y)) (uint carry_in) in + let (signed_sum : ii) = integerAdd (integerAdd (sint x) (sint y)) (uint carry_in) in + let result = (__GetSlice_int (length x) unsigned_sum (0:ii) : list bitU) in + let (n : bits ty1) = [access_vec_dec result (integerMinus (length result) (1:ii))] in + let (z : bits ty1) = if (IsZero result) then [B1] else [B0] in + let (c : bits ty1) = if (eq (uint result) (ex_int unsigned_sum)) then [B0] else [B1] in + let (v : bits ty1) = if (eq (sint result) (ex_int signed_sum)) then [B0] else [B1] in + (result,(concat_vec (concat_vec (concat_vec n z : list bitU) c : list bitU) v : list bitU)) + +val GetPSRFromPSTATE : unit -> M (list bitU) + +let GetPSRFromPSTATE () = + let (spsr : bits ty32) = (Zeros__1 (32:ii) () : list bitU) in + read_reg PSTATE_ref >>= fun (w__0 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (31:ii) (31:ii) w__0.ProcState_N : list bitU) in + read_reg PSTATE_ref >>= fun (w__1 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (30:ii) (30:ii) w__1.ProcState_Z : list bitU) in + read_reg PSTATE_ref >>= fun (w__2 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (29:ii) (29:ii) w__2.ProcState_C : list bitU) in + read_reg PSTATE_ref >>= fun (w__3 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (28:ii) (28:ii) w__3.ProcState_V : list bitU) in + read_reg PSTATE_ref >>= fun (w__4 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (21:ii) (21:ii) w__4.ProcState_SS : list bitU) in + read_reg PSTATE_ref >>= fun (w__5 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (20:ii) (20:ii) w__5.ProcState_IL : list bitU) in + read_reg PSTATE_ref >>= fun (w__6 : ProcState) -> + (if (eq_vec w__6.ProcState_nRW ([B1])) + then + read_reg PSTATE_ref >>= fun (w__7 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (27:ii) (27:ii) w__7.ProcState_Q : list bitU) in + read_reg PSTATE_ref >>= fun (w__8 : ProcState) -> + let spsr = + (update_subrange_vec_dec + spsr + (26:ii) + (25:ii) + (subrange_vec_dec w__8.ProcState_IT (1:ii) (0:ii) : list bitU) : list bitU) in + read_reg PSTATE_ref >>= fun (w__9 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (19:ii) (16:ii) w__9.ProcState_GE : list bitU) in + read_reg PSTATE_ref >>= fun (w__10 : ProcState) -> + let spsr = + (update_subrange_vec_dec + spsr + (15:ii) + (10:ii) + (subrange_vec_dec w__10.ProcState_IT (7:ii) (2:ii) : list bitU) : list bitU) in + read_reg PSTATE_ref >>= fun (w__11 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (9:ii) (9:ii) w__11.ProcState_E : list bitU) in + read_reg PSTATE_ref >>= fun (w__12 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (8:ii) (8:ii) w__12.ProcState_A : list bitU) in + read_reg PSTATE_ref >>= fun (w__13 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (7:ii) (7:ii) w__13.ProcState_I : list bitU) in + read_reg PSTATE_ref >>= fun (w__14 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (6:ii) (6:ii) w__14.ProcState_F : list bitU) in + read_reg PSTATE_ref >>= fun (w__15 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (5:ii) (5:ii) w__15.ProcState_T : list bitU) in + read_reg PSTATE_ref >>= fun (w__16 : ProcState) -> + read_reg PSTATE_ref >>= fun (w__17 : ProcState) -> + assert_exp (eq_vec ([access_vec_dec w__16.ProcState_M (4:ii)]) w__17.ProcState_nRW) "(((PSTATE).M)<4> == (PSTATE).nRW)" >> + read_reg PSTATE_ref >>= fun (w__18 : ProcState) -> + let (spsr : bits ty32) = + (update_subrange_vec_dec spsr (4:ii) (0:ii) w__18.ProcState_M : list bitU) in + return spsr + else + read_reg PSTATE_ref >>= fun (w__19 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (9:ii) (9:ii) w__19.ProcState_D : list bitU) in + read_reg PSTATE_ref >>= fun (w__20 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (8:ii) (8:ii) w__20.ProcState_A : list bitU) in + read_reg PSTATE_ref >>= fun (w__21 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (7:ii) (7:ii) w__21.ProcState_I : list bitU) in + read_reg PSTATE_ref >>= fun (w__22 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (6:ii) (6:ii) w__22.ProcState_F : list bitU) in + read_reg PSTATE_ref >>= fun (w__23 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (4:ii) (4:ii) w__23.ProcState_nRW : list bitU) in + read_reg PSTATE_ref >>= fun (w__24 : ProcState) -> + let spsr = (update_subrange_vec_dec spsr (3:ii) (2:ii) w__24.ProcState_EL : list bitU) in + read_reg PSTATE_ref >>= fun (w__25 : ProcState) -> + let (spsr : bits ty32) = + (update_subrange_vec_dec spsr (0:ii) (0:ii) w__25.ProcState_SP : list bitU) in + return spsr) >>= fun (spsr : bits ty32) -> + return spsr + +val ExceptionSyndrome : Exception -> ExceptionRecord + +let ExceptionSyndrome typ = + let (r : ExceptionRecord) = undefined_ExceptionRecord () in + let (r : ExceptionRecord) = <|r with ExceptionRecord_typ = typ|> in + let (r : ExceptionRecord) = + <|r with ExceptionRecord_syndrome = (Zeros__1 (25:ii) () : list bitU)|> in + let (r : ExceptionRecord) = + <|r with ExceptionRecord_vaddress = (Zeros__1 (64:ii) () : list bitU)|> in + let (r : ExceptionRecord) = <|r with ExceptionRecord_ipavalid = false|> in + let (r : ExceptionRecord) = + <|r with ExceptionRecord_ipaddress = (Zeros__1 (52:ii) () : list bitU)|> in + r + +val SignExtend__0 : list bitU -> integer -> M (list bitU) + +val SignExtend__1 : integer -> list bitU -> M (list bitU) + +let SignExtend__0 x N = + assert_exp (gteq N (length x)) "" >> + return (concat_vec + (replicate_bits + ([access_vec_dec x (integerMinus (length x) (1:ii))]) + (integerMinus N (length x)) : list bitU) + x : list bitU) + +let SignExtend__1 (N__tv : integer) x = (SignExtend__0 x N__tv : M (list bitU)) + +val Ones__0 : integer -> list bitU + +val Ones__1 : integer -> unit -> list bitU + +let Ones__0 N = (replicate_bits ([B1]) N : list bitU) + +let Ones__1 (N__tv : integer) () = (Ones__0 N__tv : list bitU) + +val ExcVectorBase : unit -> M (list bitU) + +let ExcVectorBase () = + (read_reg SCTLR_ref : M (list bitU)) >>= fun (w__0 : bits ty32) -> + if (eq_vec ([access_vec_dec w__0 (13:ii)]) ([B1])) + then return (concat_vec (Ones__0 (16:ii) : list bitU) (Zeros__0 (16:ii) : list bitU) : list bitU) + else + (read_reg VBAR_ref : M (list bitU)) >>= fun (w__1 : bits ty32) -> + return (concat_vec (slice w__1 (5:ii) (27:ii) : list bitU) (Zeros__0 (5:ii) : list bitU) : list bitU) + +val Align__0 : ii -> ii -> ii + +val Align__1 : list bitU -> ii -> list bitU + +let Align__0 x y = integerDiv (integerMult y x) y + +let Align__1 x y = (__GetSlice_int (length x) (Align__0 (uint x) y) (0:ii) : list bitU) + +val aset__Mem : AddressDescriptor -> integer -> AccessDescriptor -> list bitU -> M unit + +let aset__Mem desc size accdesc value_name = + assert_exp (or_bool + (eq size (1:ii)) + (or_bool + (eq size (2:ii)) + (or_bool (eq size (4:ii)) (or_bool (eq size (8:ii)) (eq size (16:ii)))))) "((size == 1) || ((size == 2) || ((size == 4) || ((size == 8) || (size == 16)))))" >> + let (address : bits ty52) = desc.AddressDescriptor_paddress.FullAddress_physicaladdress in + assert_exp (eq_vec address (Align__1 address size : list bitU)) "(address == Align(address, size))" >> + if (eq_vec address (hex_slice "0x13000000" (52:ii) (0:ii) : list bitU)) + then + if (eq (uint value_name) (4:ii)) + then + let (_ : unit) = prerr_endline "Program exited by writing ^D to TUBE +" in + exit () + else return (putchar (uint (slice value_name (0:ii) (8:ii) : list bitU))) + else + (read_reg __Memory_ref : M (list bitU)) >>= fun (w__0 : list bitU) -> + write_ram (52:ii) size w__0 address value_name + +val aget__Mem : AddressDescriptor -> integer -> AccessDescriptor -> M (list bitU) + +let aget__Mem desc size accdesc = + assert_exp (or_bool + (eq size (1:ii)) + (or_bool + (eq size (2:ii)) + (or_bool (eq size (4:ii)) (or_bool (eq size (8:ii)) (eq size (16:ii)))))) "((size == 1) || ((size == 2) || ((size == 4) || ((size == 8) || (size == 16)))))" >> + let (address : bits ty52) = desc.AddressDescriptor_paddress.FullAddress_physicaladdress in + assert_exp (eq_vec address (Align__1 address size : list bitU)) "(address == Align(address, size))" >> + (read_reg __Memory_ref : M (list bitU)) >>= fun (w__0 : list bitU) -> + (read_ram (52:ii) size w__0 address : M (list bitU)) + +val aset_X : ii -> list bitU -> M unit + +let aset_X n value_name = + assert_exp (and_bool (gteq (ex_int n) (0:ii)) (lteq (ex_int n) (31:ii))) "((n >= 0) && (n <= 31))" >> + assert_exp (or_bool (eq (length value_name) (32:ii)) (eq (length value_name) (64:ii))) "((width == 32) || (width == 64))" >> + if (neq (ex_int n) (31:ii)) + then + read_reg _R_ref >>= fun (w__0 : list (list bitU)) -> + (ZeroExtend__0 value_name (64:ii) : M (list bitU)) >>= fun (w__1 : list bitU) -> + write_reg _R_ref (update_list_dec w__0 n w__1 : list (list bitU)) + else return () + +val aset_ELR__0 : list bitU -> list bitU -> M unit + +val aset_ELR__1 : list bitU -> M unit + +let aset_ELR__0 el value_name = + let (r : bits ty64) = value_name in + let _pat_ = el in + if (eq_vec _pat_ EL1) + then write_reg ELR_EL1_ref r + else + if (eq_vec _pat_ EL2) + then write_reg ELR_EL2_ref r + else if (eq_vec _pat_ EL3) then write_reg ELR_EL3_ref r else Unreachable () + +let aset_ELR__1 value_name = + read_reg PSTATE_ref >>= fun (w__0 : ProcState) -> + assert_exp (neq_vec w__0.ProcState_EL EL0) "" >> + read_reg PSTATE_ref >>= fun (w__1 : ProcState) -> + aset_ELR__0 w__1.ProcState_EL value_name + +val aget_X : integer -> ii -> M (list bitU) + +let aget_X (width__tv : integer) n = + assert_exp (and_bool (gteq n (0:ii)) (lteq n (31:ii))) "((n >= 0) && (n <= 31))" >> + assert_exp (or_bool + (eq width__tv (8:ii)) + (or_bool + (eq width__tv (16:ii)) + (or_bool (eq width__tv (32:ii)) (eq width__tv (64:ii))))) "((width == 8) || ((width == 16) || ((width == 32) || (width == 64))))" >> + if (neq n (31:ii)) + then + read_reg _R_ref >>= fun (w__0 : list (bits ty64)) -> + return (slice (access_list_dec w__0 n : list bitU) (0:ii) width__tv : list bitU) + else return (Zeros__0 width__tv : list bitU) + +val Prefetch : list bitU -> list bitU -> unit + +let Prefetch address prfop = + let (hint : PrefetchHint) = undefined_PrefetchHint () in + let (target : ii) = undefined_int () in + let (stream : bool) = undefined_bool () in + let b__0 = (slice prfop (3:ii) (2:ii) : list bitU) in + let (hint : PrefetchHint) = + if (eq_vec b__0 ([B0;B0])) + then + let (hint : PrefetchHint) = Prefetch_READ in + hint + else + let (hint : PrefetchHint) = + if (eq_vec b__0 ([B0;B1])) + then + let (hint : PrefetchHint) = Prefetch_EXEC in + hint + else + let (hint : PrefetchHint) = + if (eq_vec b__0 ([B1;B0])) + then + let (hint : PrefetchHint) = Prefetch_WRITE in + hint + else hint in + hint in + hint in + let (target : ii) = uint (slice prfop (1:ii) (2:ii) : list bitU) in + let (stream : bool) = neq_vec ([access_vec_dec prfop (0:ii)]) ([B0]) in + let (_ : unit) = Hint_Prefetch address hint target stream in + () + +val IsSecondStage : FaultRecord -> M bool + +let IsSecondStage fault = + assert_exp (neq fault.FaultRecord_typ Fault_None) "((fault).type != Fault_None)" >> + return fault.FaultRecord_secondstage + +val IsFault : AddressDescriptor -> bool + +let IsFault addrdesc = neq addrdesc.AddressDescriptor_fault.FaultRecord_typ Fault_None + +val IsExternalSyncAbort__0 : Fault -> M bool + +val IsExternalSyncAbort__1 : FaultRecord -> M bool + +let IsExternalSyncAbort__0 typ = + assert_exp (neq typ Fault_None) "" >> + return (or_bool + (eq typ Fault_SyncExternal) + (or_bool + (eq typ Fault_SyncParity) + (or_bool (eq typ Fault_SyncExternalOnWalk) (eq typ Fault_SyncParityOnWalk)))) + +let IsExternalSyncAbort__1 fault = IsExternalSyncAbort__0 fault.FaultRecord_typ + +val IsExternalAbort__0 : Fault -> M bool + +val IsExternalAbort__1 : FaultRecord -> M bool + +let IsExternalAbort__0 typ = + assert_exp (neq typ Fault_None) "" >> + return (or_bool + (eq typ Fault_SyncExternal) + (or_bool + (eq typ Fault_SyncParity) + (or_bool + (eq typ Fault_SyncExternalOnWalk) + (or_bool + (eq typ Fault_SyncParityOnWalk) + (or_bool (eq typ Fault_AsyncExternal) (eq typ Fault_AsyncParity)))))) + +let IsExternalAbort__1 fault = IsExternalAbort__0 fault.FaultRecord_typ + +val IsDebugException : FaultRecord -> M bool + +let IsDebugException fault = + assert_exp (neq fault.FaultRecord_typ Fault_None) "((fault).type != Fault_None)" >> + return (eq fault.FaultRecord_typ Fault_Debug) + +val IPAValid : FaultRecord -> M bool + +let IPAValid fault = + assert_exp (neq fault.FaultRecord_typ Fault_None) "((fault).type != Fault_None)" >> + return (if fault.FaultRecord_s2fs1walk + then + or_bool + (eq fault.FaultRecord_typ Fault_AccessFlag) + (or_bool + (eq fault.FaultRecord_typ Fault_Permission) + (or_bool + (eq fault.FaultRecord_typ Fault_Translation) + (eq fault.FaultRecord_typ Fault_AddressSize))) + else + if fault.FaultRecord_secondstage + then + or_bool + (eq fault.FaultRecord_typ Fault_AccessFlag) + (or_bool + (eq fault.FaultRecord_typ Fault_Translation) + (eq fault.FaultRecord_typ Fault_AddressSize)) + else false) + +val aarch64_integer_arithmetic_addsub_immediate : ii -> integer -> list bitU -> ii -> bool -> bool -> M unit + +let aarch64_integer_arithmetic_addsub_immediate d datasize imm n setflags sub_op = + let dbytes = ex_int (integerDiv datasize (8:ii)) in + assert_exp true "datasize constraint" >> + assert_exp true "dbytes constraint" >> + let result = (undefined_vector datasize (undefined_bit ()) : list bitU) in + (if (eq n (31:ii)) + then (aget_SP datasize () : M (list bitU)) + else (aget_X datasize n : M (list bitU))) >>= fun operand1 -> + let operand2 = imm in + let (nzcv : bits ty4) = (undefined_vector (4:ii) (undefined_bit ()) : list bitU) in + let (carry_in : bits ty1) = (undefined_vector (1:ii) (undefined_bit ()) : list bitU) in + let ((carry_in : bits ty1), operand2) = + if sub_op + then + let operand2 = (not_vec operand2 : list bitU) in + let (carry_in : bits ty1) = [B1] in + (carry_in,operand2) + else + let (carry_in : bits ty1) = [B0] in + (carry_in,operand2) in + let (tup__0, tup__1) = (AddWithCarry operand1 operand2 carry_in : (list bitU * list bitU)) in + let result = tup__0 in + let nzcv = tup__1 in + (if setflags + then + let (tup__0, tup__1, tup__2, tup__3) = + ((subrange_vec_dec nzcv (3:ii) (3:ii) : list bitU),(subrange_vec_dec nzcv (2:ii) (2:ii) : list bitU),(subrange_vec_dec + nzcv + (1:ii) + (1:ii) : list bitU),(subrange_vec_dec + nzcv + (0:ii) + (0:ii) : list bitU)) in + read_reg PSTATE_ref >>= fun (w__2 : ProcState) -> + write_reg PSTATE_ref <|w__2 with ProcState_N = tup__0|> >> + read_reg PSTATE_ref >>= fun (w__3 : ProcState) -> + write_reg PSTATE_ref <|w__3 with ProcState_Z = tup__1|> >> + read_reg PSTATE_ref >>= fun (w__4 : ProcState) -> + write_reg PSTATE_ref <|w__4 with ProcState_C = tup__2|> >> + read_reg PSTATE_ref >>= fun (w__5 : ProcState) -> + write_reg PSTATE_ref <|w__5 with ProcState_V = tup__3|> + else return ()) >> + if (and_bool (eq d (31:ii)) (not setflags)) + then aset_SP result + else aset_X d result + +val HighestELUsingAArch32 : unit -> bool + +let HighestELUsingAArch32 () = if (not (HaveAnyAArch32 ())) then false else false + +val aget_SCR_GEN : unit -> M (list bitU) + +let aget_SCR_GEN () = + assert_exp (HaveEL EL3) "HaveEL(EL3)" >> + let (r : bits ty32) = (undefined_vector (32:ii) (undefined_bit ()) : list bitU) in + (if (HighestELUsingAArch32 ()) + then + (read_reg SCR_ref : M (list bitU)) >>= fun (w__0 : bits ty32) -> + let (r : bits ty32) = w__0 in + return r + else + (read_reg SCR_EL3_ref : M (list bitU)) >>= fun (w__1 : bits ty32) -> + let (r : bits ty32) = w__1 in + return r) >>= fun (r : bits ty32) -> + return r + +val IsSecureBelowEL3 : unit -> M bool + +let IsSecureBelowEL3 () = + if (HaveEL EL3) + then + (aget_SCR_GEN () : M (list bitU)) >>= fun (w__0 : list bitU) -> + return (eq_vec ([access_vec_dec w__0 (0:ii)]) ([B0])) + else return (if (HaveEL EL2) then false else false) + +val UsingAArch32 : unit -> M bool + +let UsingAArch32 () = + read_reg PSTATE_ref >>= fun (w__0 : ProcState) -> + let (aarch32 : bool) = eq_vec w__0.ProcState_nRW ([B1]) in + (if (not (HaveAnyAArch32 ())) + then assert_exp (not aarch32) "!(aarch32)" + else return ()) >> + (if (HighestELUsingAArch32 ()) + then assert_exp aarch32 "aarch32" + else return ()) >> + return aarch32 + +val aset_SPSR : list bitU -> M unit + +let aset_SPSR value_name = + UsingAArch32 () >>= fun (w__0 : bool) -> + if w__0 + then + read_reg PSTATE_ref >>= fun (w__1 : ProcState) -> + let p__286 = w__1.ProcState_M in + let _pat_ = p__286 in + if (eq_vec _pat_ M32_FIQ) + then write_reg SPSR_fiq_ref value_name + else + if (eq_vec _pat_ M32_IRQ) + then write_reg SPSR_irq_ref value_name + else + if (eq_vec _pat_ M32_Svc) + then write_reg SPSR_svc_ref value_name + else + if (eq_vec _pat_ M32_Monitor) + then write_reg SPSR_mon_ref value_name + else + if (eq_vec _pat_ M32_Abort) + then write_reg SPSR_abt_ref value_name + else + if (eq_vec _pat_ M32_Hyp) + then write_reg SPSR_hyp_ref value_name + else + if (eq_vec _pat_ M32_Undef) + then write_reg SPSR_und_ref value_name + else Unreachable () + else + read_reg PSTATE_ref >>= fun (w__2 : ProcState) -> + let p__285 = w__2.ProcState_EL in + let _pat_ = p__285 in + if (eq_vec _pat_ EL1) + then write_reg SPSR_EL1_ref value_name + else + if (eq_vec _pat_ EL2) + then write_reg SPSR_EL2_ref value_name + else if (eq_vec _pat_ EL3) then write_reg SPSR_EL3_ref value_name else Unreachable () + +val IsSecure : unit -> M bool + +let IsSecure () = + UsingAArch32 () >>= fun (w__0 : bool) -> + read_reg PSTATE_ref >>= fun (w__1 : ProcState) -> + if (and_bool (and_bool (HaveEL EL3) (not w__0)) (eq_vec w__1.ProcState_EL EL3)) + then return true + else + UsingAArch32 () >>= fun (w__2 : bool) -> + read_reg PSTATE_ref >>= fun (w__3 : ProcState) -> + if (and_bool (and_bool (HaveEL EL3) w__2) (eq_vec w__3.ProcState_M M32_Monitor)) + then return true + else IsSecureBelowEL3 () + +val CurrentInstrSet : unit -> M InstrSet + +let CurrentInstrSet () = + let (result : InstrSet) = undefined_InstrSet () in + UsingAArch32 () >>= fun (w__0 : bool) -> + (if w__0 + then + read_reg PSTATE_ref >>= fun (w__1 : ProcState) -> + let (result : InstrSet) = + if (eq_vec w__1.ProcState_T ([B0])) + then InstrSet_A32 + else InstrSet_T32 in + return result + else + let (result : InstrSet) = InstrSet_A64 in + return result) >>= fun (result : InstrSet) -> + return result + +val AArch32_ExecutingCP10or11Instr : unit -> M bool + +let AArch32_ExecutingCP10or11Instr () = + (ThisInstr () : M (list bitU)) >>= fun (instr : bits ty32) -> + CurrentInstrSet () >>= fun (instr_set : InstrSet) -> + assert_exp (or_bool (eq instr_set InstrSet_A32) (eq instr_set InstrSet_T32)) "((instr_set == InstrSet_A32) || (instr_set == InstrSet_T32))" >> + return (if (eq instr_set InstrSet_A32) + then + and_bool + (or_bool + (eq_vec (slice instr (24:ii) (4:ii) : list bitU) ([B1;B1;B1;B0])) + (eq_vec (slice instr (25:ii) (3:ii) : list bitU) ([B1;B1;B0]))) + (eq_vec + (and_vec (slice instr (8:ii) (4:ii) : list bitU) ([B1;B1;B1;B0]) : list bitU) + ([B1;B0;B1;B0])) + else + and_bool + (and_bool + (eq_vec + (and_vec (slice instr (28:ii) (4:ii) : list bitU) ([B1;B1;B1;B0]) : list bitU) + ([B1;B1;B1;B0])) + (or_bool + (eq_vec (slice instr (24:ii) (4:ii) : list bitU) ([B1;B1;B1;B0])) + (eq_vec (slice instr (25:ii) (3:ii) : list bitU) ([B1;B1;B0])))) + (eq_vec + (and_vec (slice instr (8:ii) (4:ii) : list bitU) ([B1;B1;B1;B0]) : list bitU) + ([B1;B0;B1;B0]))) + +val HaveAArch32EL : list bitU -> bool + +let HaveAArch32EL el = + if (not (HaveEL el)) + then false + else + if (not (HaveAnyAArch32 ())) + then false + else + if (HighestELUsingAArch32 ()) + then true + else + if (eq_vec el (HighestEL () : list bitU)) + then false + else if (eq_vec el EL0) then true else true + +val ELStateUsingAArch32K : list bitU -> bool -> M (bool * bool) + +let ELStateUsingAArch32K el secure = + let (aarch32 : bool) = undefined_bool () in + let (known : bool) = true in + let (aarch32_at_el1 : bool) = undefined_bool () in + let (aarch32_below_el3 : bool) = undefined_bool () in + (if (not (HaveAArch32EL el)) + then + let (aarch32 : bool) = false in + return (aarch32,aarch32_at_el1,aarch32_below_el3,known) + else + (if (HighestELUsingAArch32 ()) + then + let (aarch32 : bool) = true in + return (aarch32,aarch32_at_el1,aarch32_below_el3,known) + else + (read_reg SCR_EL3_ref : M (list bitU)) >>= fun (w__0 : bits ty32) -> + let aarch32_below_el3 = + and_bool (HaveEL EL3) (eq_vec ([access_vec_dec w__0 (10:ii)]) ([B0])) in + (read_reg HCR_EL2_ref : M (list bitU)) >>= fun (w__1 : bits ty64) -> + (read_reg HCR_EL2_ref : M (list bitU)) >>= fun (w__2 : bits ty64) -> + (read_reg HCR_EL2_ref : M (list bitU)) >>= fun (w__3 : bits ty64) -> + let aarch32_at_el1 = + or_bool + aarch32_below_el3 + (and_bool + (and_bool + (and_bool (HaveEL EL2) (not secure)) + (eq_vec ([access_vec_dec w__1 (31:ii)]) ([B0]))) + (not + (and_bool + (and_bool + (eq_vec ([access_vec_dec w__2 (34:ii)]) ([B1])) + (eq_vec ([access_vec_dec w__3 (27:ii)]) ([B1]))) + (HaveVirtHostExt ())))) in + (if (and_bool (eq_vec el EL0) (not aarch32_at_el1)) + then + read_reg PSTATE_ref >>= fun (w__4 : ProcState) -> + (if (eq_vec w__4.ProcState_EL EL0) + then + read_reg PSTATE_ref >>= fun (w__5 : ProcState) -> + let (aarch32 : bool) = eq_vec w__5.ProcState_nRW ([B1]) in + return (aarch32,known) + else + let (known : bool) = false in + return (aarch32,known)) >>= fun ((aarch32 : bool), (known : bool)) -> + return (aarch32,known) + else + let (aarch32 : bool) = + or_bool + (and_bool aarch32_below_el3 (neq_vec el EL3)) + (and_bool aarch32_at_el1 (or_bool (eq_vec el EL1) (eq_vec el EL0))) in + return (aarch32,known)) >>= fun ((aarch32 : bool), (known : bool)) -> + return (aarch32,aarch32_at_el1,aarch32_below_el3,known)) >>= fun ((aarch32 : bool), (aarch32_at_el1 : + bool), (aarch32_below_el3 : bool), (known : bool)) -> + return (aarch32,aarch32_at_el1,aarch32_below_el3,known)) >>= fun ((aarch32 : bool), (aarch32_at_el1 : + bool), (aarch32_below_el3 : bool), (known : bool)) -> + let (aarch32 : bool) = + if (not known) + then + let (aarch32 : bool) = undefined_bool () in + aarch32 + else aarch32 in + return (known,aarch32) + +val ELStateUsingAArch32 : list bitU -> bool -> M bool + +let ELStateUsingAArch32 el secure = + let (aarch32 : bool) = undefined_bool () in + let (known : bool) = undefined_bool () in + ELStateUsingAArch32K el secure >>= fun (tup__0, tup__1) -> + let known = tup__0 in + let aarch32 = tup__1 in + assert_exp known "known" >> + return aarch32 + +val ELUsingAArch32 : list bitU -> M bool + +let ELUsingAArch32 el = + IsSecureBelowEL3 () >>= fun (w__0 : bool) -> + ELStateUsingAArch32 el w__0 + +val ELIsInHost : list bitU -> M bool + +let ELIsInHost el = + IsSecureBelowEL3 () >>= fun (w__0 : bool) -> + ELUsingAArch32 EL2 >>= fun (w__1 : bool) -> + (read_reg HCR_EL2_ref : M (list bitU)) >>= fun (w__2 : bits ty64) -> + (read_reg HCR_EL2_ref : M (list bitU)) >>= fun (w__3 : bits ty64) -> + return (and_bool + (and_bool + (and_bool (and_bool (not w__0) (HaveVirtHostExt ())) (not w__1)) + (eq_vec ([access_vec_dec w__2 (34:ii)]) ([B1]))) + (or_bool + (eq_vec el EL2) + (and_bool (eq_vec el EL0) (eq_vec ([access_vec_dec w__3 (27:ii)]) ([B1]))))) + +val S1TranslationRegime__0 : list bitU -> M (list bitU) + +val S1TranslationRegime__1 : unit -> M (list bitU) + +let S1TranslationRegime__0 el = + if (neq_vec el EL0) + then return el + else + ELUsingAArch32 EL3 >>= fun (w__0 : bool) -> + (read_reg SCR_ref : M (list bitU)) >>= fun (w__1 : bits ty32) -> + if (and_bool (and_bool (HaveEL EL3) w__0) (eq_vec ([access_vec_dec w__1 (0:ii)]) ([B0]))) + then return EL3 + else + ELIsInHost el >>= fun (w__2 : bool) -> + return (if (and_bool (HaveVirtHostExt ()) w__2) + then EL2 + else EL1) + +let S1TranslationRegime__1 () = + read_reg PSTATE_ref >>= fun (w__0 : ProcState) -> + (S1TranslationRegime__0 w__0.ProcState_EL : M (list bitU)) + +val aset_FAR__0 : list bitU -> list bitU -> M unit + +val aset_FAR__1 : list bitU -> M unit + +let aset_FAR__0 regime value_name = + let (r : bits ty64) = value_name in + let _pat_ = regime in + if (eq_vec _pat_ EL1) + then write_reg FAR_EL1_ref r + else + if (eq_vec _pat_ EL2) + then write_reg FAR_EL2_ref r + else if (eq_vec _pat_ EL3) then write_reg FAR_EL3_ref r else Unreachable () + +let aset_FAR__1 value_name = + (S1TranslationRegime__1 () : M (list bitU)) >>= fun (w__0 : list bitU) -> + aset_FAR__0 w__0 value_name + +val aset_ESR__0 : list bitU -> list bitU -> M unit + +val aset_ESR__1 : list bitU -> M unit + +let aset_ESR__0 regime value_name = + let (r : bits ty32) = value_name in + let _pat_ = regime in + if (eq_vec _pat_ EL1) + then write_reg ESR_EL1_ref r + else + if (eq_vec _pat_ EL2) + then write_reg ESR_EL2_ref r + else if (eq_vec _pat_ EL3) then write_reg ESR_EL3_ref r else Unreachable () + +let aset_ESR__1 value_name = + (S1TranslationRegime__1 () : M (list bitU)) >>= fun (w__0 : list bitU) -> + aset_ESR__0 w__0 value_name + +val aget_VBAR__0 : list bitU -> M (list bitU) + +val aget_VBAR__1 : unit -> M (list bitU) + +let aget_VBAR__0 regime = + let (r : bits ty64) = (undefined_vector (64:ii) (undefined_bit ()) : list bitU) in + let _pat_ = regime in + (if (eq_vec _pat_ EL1) + then + (read_reg VBAR_EL1_ref : M (list bitU)) >>= fun (w__0 : bits ty64) -> + let (r : bits ty64) = w__0 in + return r + else + (if (eq_vec _pat_ EL2) + then + (read_reg VBAR_EL2_ref : M (list bitU)) >>= fun (w__1 : bits ty64) -> + let (r : bits ty64) = w__1 in + return r + else + (if (eq_vec _pat_ EL3) + then + (read_reg VBAR_EL3_ref : M (list bitU)) >>= fun (w__2 : bits ty64) -> + let (r : bits ty64) = w__2 in + return r + else + Unreachable () >> + return r) >>= fun (r : bits ty64) -> + return r) >>= fun (r : bits ty64) -> + return r) >>= fun (r : bits ty64) -> + return r + +let aget_VBAR__1 () = + (S1TranslationRegime__1 () : M (list bitU)) >>= fun (w__0 : list bitU) -> + (aget_VBAR__0 w__0 : M (list bitU)) + +val aget_SCTLR__0 : list bitU -> M (list bitU) + +val aget_SCTLR__1 : unit -> M (list bitU) + +let aget_SCTLR__0 regime = + let (r : bits ty32) = (undefined_vector (32:ii) (undefined_bit ()) : list bitU) in + let _pat_ = regime in + (if (eq_vec _pat_ EL1) + then + (read_reg SCTLR_EL1_ref : M (list bitU)) >>= fun (w__0 : bits ty32) -> + let (r : bits ty32) = w__0 in + return r + else + (if (eq_vec _pat_ EL2) + then + (read_reg SCTLR_EL2_ref : M (list bitU)) >>= fun (w__1 : bits ty32) -> + let (r : bits ty32) = w__1 in + return r + else + (if (eq_vec _pat_ EL3) + then + (read_reg SCTLR_EL3_ref : M (list bitU)) >>= fun (w__2 : bits ty32) -> + let (r : bits ty32) = w__2 in + return r + else + Unreachable () >> + return r) >>= fun (r : bits ty32) -> + return r) >>= fun (r : bits ty32) -> + return r) >>= fun (r : bits ty32) -> + return r + +let aget_SCTLR__1 () = + (S1TranslationRegime__1 () : M (list bitU)) >>= fun (w__0 : list bitU) -> + (aget_SCTLR__0 w__0 : M (list bitU)) + +val BigEndian : unit -> M bool + +let BigEndian () = + let (bigend : bool) = undefined_bool () in + UsingAArch32 () >>= fun (w__0 : bool) -> + (if w__0 + then + read_reg PSTATE_ref >>= fun (w__1 : ProcState) -> + let (bigend : bool) = neq_vec w__1.ProcState_E ([B0]) in + return bigend + else + read_reg PSTATE_ref >>= fun (w__2 : ProcState) -> + (if (eq_vec w__2.ProcState_EL EL0) + then + (aget_SCTLR__1 () : M (list bitU)) >>= fun (w__3 : list bitU) -> + let (bigend : bool) = neq_vec ([access_vec_dec w__3 (24:ii)]) ([B0]) in + return bigend + else + (aget_SCTLR__1 () : M (list bitU)) >>= fun (w__4 : list bitU) -> + let (bigend : bool) = neq_vec ([access_vec_dec w__4 (25:ii)]) ([B0]) in + return bigend) >>= fun (bigend : bool) -> + return bigend) >>= fun (bigend : bool) -> + return bigend + +val IsInHost : unit -> M bool + +let IsInHost () = + read_reg PSTATE_ref >>= fun (w__0 : ProcState) -> + ELIsInHost w__0.ProcState_EL + +val BadMode : list bitU -> bool + +let BadMode mode = + let (valid_name : bool) = undefined_bool () in + let _pat_ = mode in + let (valid_name : bool) = + if (eq_vec _pat_ M32_Monitor) + then + let (valid_name : bool) = HaveAArch32EL EL3 in + valid_name + else + let (valid_name : bool) = + if (eq_vec _pat_ M32_Hyp) + then + let (valid_name : bool) = HaveAArch32EL EL2 in + valid_name + else + let (valid_name : bool) = + if (eq_vec _pat_ M32_FIQ) + then + let (valid_name : bool) = HaveAArch32EL EL1 in + valid_name + else + let (valid_name : bool) = + if (eq_vec _pat_ M32_IRQ) + then + let (valid_name : bool) = HaveAArch32EL EL1 in + valid_name + else + let (valid_name : bool) = + if (eq_vec _pat_ M32_Svc) + then + let (valid_name : bool) = HaveAArch32EL EL1 in + valid_name + else + let (valid_name : bool) = + if (eq_vec _pat_ M32_Abort) + then + let (valid_name : bool) = HaveAArch32EL EL1 in + valid_name + else + let (valid_name : bool) = + if (eq_vec _pat_ M32_Undef) + then + let (valid_name : bool) = HaveAArch32EL EL1 in + valid_name + else + let (valid_name : bool) = + if (eq_vec _pat_ M32_System) + then + let (valid_name : bool) = HaveAArch32EL EL1 in + valid_name + else + let (valid_name : bool) = + if (eq_vec _pat_ M32_User) + then + let (valid_name : bool) = HaveAArch32EL EL0 in + valid_name + else + let (valid_name : bool) = false in + valid_name in + valid_name in + valid_name in + valid_name in + valid_name in + valid_name in + valid_name in + valid_name in + valid_name in + not valid_name + +val aset_Rmode : ii -> list bitU -> list bitU -> M unit + +let aset_Rmode n mode value_name = + assert_exp (and_bool (gteq (ex_int n) (0:ii)) (lteq (ex_int n) (14:ii))) "((n >= 0) && (n <= 14))" >> + IsSecure () >>= fun (w__0 : bool) -> + (if (not w__0) + then assert_exp (neq_vec mode M32_Monitor) "(mode != M32_Monitor)" + else return ()) >> + assert_exp (not (BadMode mode)) "!(BadMode(mode))" >> + if (eq_vec mode M32_Monitor) + then + if (eq (ex_int n) (13:ii)) + then write_reg SP_mon_ref value_name + else + if (eq (ex_int n) (14:ii)) + then write_reg LR_mon_ref value_name + else + read_reg _R_ref >>= fun (w__1 : list (bits ty64)) -> + let (__tmp_1 : bits ty64) = (access_list_dec w__1 n : list bitU) in + let __tmp_1 = (update_subrange_vec_dec __tmp_1 (31:ii) (0:ii) value_name : list bitU) in + read_reg _R_ref >>= fun (w__2 : list (list bitU)) -> + write_reg _R_ref (update_list_dec w__2 n __tmp_1 : list (list bitU)) + else + ConstrainUnpredictableBool Unpredictable_ZEROUPPER >>= fun (w__3 : bool) -> + if (and_bool (not (HighestELUsingAArch32 ())) w__3) + then + read_reg _R_ref >>= fun (w__4 : list (list bitU)) -> + LookUpRIndex n mode >>= fun (w__5 : ii) -> + (ZeroExtend__0 value_name (64:ii) : M (list bitU)) >>= fun (w__6 : list bitU) -> + write_reg _R_ref (update_list_dec w__4 w__5 w__6 : list (list bitU)) + else + read_reg _R_ref >>= fun (w__7 : list (bits ty64)) -> + LookUpRIndex n mode >>= fun (w__8 : ii) -> + let (__tmp_2 : bits ty64) = (access_list_dec w__7 w__8 : list bitU) in + let __tmp_2 = (update_subrange_vec_dec __tmp_2 (31:ii) (0:ii) value_name : list bitU) in + read_reg _R_ref >>= fun (w__9 : list (list bitU)) -> + LookUpRIndex n mode >>= fun (w__10 : ii) -> + write_reg _R_ref (update_list_dec w__9 w__10 __tmp_2 : list (list bitU)) + +val aset_R : ii -> list bitU -> M unit + +let aset_R n value_name = + read_reg PSTATE_ref >>= fun (w__0 : ProcState) -> + aset_Rmode n w__0.ProcState_M value_name + +val ELFromM32 : list bitU -> M (bool * list bitU) + +let ELFromM32 mode = + let (el : bits ty2) = (undefined_vector (2:ii) (undefined_bit ()) : list bitU) in + let (valid_name : bool) = not (BadMode mode) in + let _pat_ = mode in + (if (eq_vec _pat_ M32_Monitor) + then + let (el : bits ty2) = EL3 in + return (el,valid_name) + else + (if (eq_vec _pat_ M32_Hyp) + then + let el = EL2 in + (aget_SCR_GEN () : M (list bitU)) >>= fun (w__0 : list bitU) -> + let (valid_name : bool) = + and_bool + valid_name + (or_bool (not (HaveEL EL3)) (eq_vec ([access_vec_dec w__0 (0:ii)]) ([B1]))) in + return (el,valid_name) + else + (if (eq_vec _pat_ M32_FIQ) + then + (read_reg SCR_ref : M (list bitU)) >>= fun (w__1 : bits ty32) -> + let (el : bits ty2) = + if (and_bool + (and_bool (HaveEL EL3) (HighestELUsingAArch32 ())) + (eq_vec ([access_vec_dec w__1 (0:ii)]) ([B0]))) + then EL3 + else EL1 in + return (el,valid_name) + else + (if (eq_vec _pat_ M32_IRQ) + then + (read_reg SCR_ref : M (list bitU)) >>= fun (w__2 : bits ty32) -> + let (el : bits ty2) = + if (and_bool + (and_bool (HaveEL EL3) (HighestELUsingAArch32 ())) + (eq_vec ([access_vec_dec w__2 (0:ii)]) ([B0]))) + then EL3 + else EL1 in + return (el,valid_name) + else + (if (eq_vec _pat_ M32_Svc) + then + (read_reg SCR_ref : M (list bitU)) >>= fun (w__3 : bits ty32) -> + let (el : bits ty2) = + if (and_bool + (and_bool (HaveEL EL3) (HighestELUsingAArch32 ())) + (eq_vec ([access_vec_dec w__3 (0:ii)]) ([B0]))) + then EL3 + else EL1 in + return (el,valid_name) + else + (if (eq_vec _pat_ M32_Abort) + then + (read_reg SCR_ref : M (list bitU)) >>= fun (w__4 : bits ty32) -> + let (el : bits ty2) = + if (and_bool + (and_bool (HaveEL EL3) (HighestELUsingAArch32 ())) + (eq_vec ([access_vec_dec w__4 (0:ii)]) ([B0]))) + then EL3 + else EL1 in + return (el,valid_name) + else + (if (eq_vec _pat_ M32_Undef) + then + (read_reg SCR_ref : M (list bitU)) >>= fun (w__5 : bits ty32) -> + let (el : bits ty2) = + if (and_bool + (and_bool (HaveEL EL3) (HighestELUsingAArch32 ())) + (eq_vec ([access_vec_dec w__5 (0:ii)]) ([B0]))) + then EL3 + else EL1 in + return (el,valid_name) + else + (if (eq_vec _pat_ M32_System) + then + (read_reg SCR_ref : M (list bitU)) >>= fun (w__6 : bits ty32) -> + let (el : bits ty2) = + if (and_bool + (and_bool (HaveEL EL3) (HighestELUsingAArch32 ())) + (eq_vec ([access_vec_dec w__6 (0:ii)]) ([B0]))) + then EL3 + else EL1 in + return (el,valid_name) + else + let ((el : bits ty2), (valid_name : bool)) = + if (eq_vec _pat_ M32_User) + then + let (el : bits ty2) = EL0 in + (el,valid_name) + else + let (valid_name : bool) = false in + (el,valid_name) in + return (el,valid_name)) >>= fun ((el : bits ty2), (valid_name : bool)) -> + return (el,valid_name)) >>= fun ((el : bits ty2), (valid_name : bool)) -> + return (el,valid_name)) >>= fun ((el : bits ty2), (valid_name : bool)) -> + return (el,valid_name)) >>= fun ((el : bits ty2), (valid_name : bool)) -> + return (el,valid_name)) >>= fun ((el : bits ty2), (valid_name : bool)) -> + return (el,valid_name)) >>= fun ((el : bits ty2), (valid_name : bool)) -> + return (el,valid_name)) >>= fun ((el : bits ty2), (valid_name : bool)) -> + return (el,valid_name)) >>= fun ((el : bits ty2), (valid_name : bool)) -> + let (el : bits ty2) = + if (not valid_name) + then + let (el : bits ty2) = (undefined_vector (2:ii) (undefined_bit ()) : list bitU) in + el + else el in + return (valid_name,el) + +val AArch32_WriteMode : list bitU -> M unit + +let AArch32_WriteMode mode = + let (el : bits ty2) = (undefined_vector (2:ii) (undefined_bit ()) : list bitU) in + let (valid_name : bool) = undefined_bool () in + (ELFromM32 mode : M ((bool * list bitU))) >>= fun (tup__0, tup__1) -> + let valid_name = tup__0 in + let el = tup__1 in + assert_exp valid_name "valid" >> + read_reg PSTATE_ref >>= fun (w__0 : ProcState) -> + write_reg PSTATE_ref <|w__0 with ProcState_M = mode|> >> + read_reg PSTATE_ref >>= fun (w__1 : ProcState) -> + write_reg PSTATE_ref <|w__1 with ProcState_EL = el|> >> + read_reg PSTATE_ref >>= fun (w__2 : ProcState) -> + write_reg PSTATE_ref <|w__2 with ProcState_nRW = ([B1])|> >> + read_reg PSTATE_ref >>= fun (w__3 : ProcState) -> + write_reg + PSTATE_ref + <|w__3 with + ProcState_SP = + (if (or_bool (eq_vec mode M32_User) (eq_vec mode M32_System)) + then [B0] + else [B1])|> + +val AddrTop : list bitU -> bool -> list bitU -> M ii + +let AddrTop address IsInstr el = + assert_exp (HaveEL el) "HaveEL(el)" >> + (S1TranslationRegime__0 el : M (list bitU)) >>= fun (regime : bits ty2) -> + let (tbid : bits ty1) = (undefined_vector (1:ii) (undefined_bit ()) : list bitU) in + let (tbi : bits ty1) = (undefined_vector (1:ii) (undefined_bit ()) : list bitU) in + ELUsingAArch32 regime >>= fun (w__0 : bool) -> + (if w__0 + then return ((31:ii),tbi,tbid) + else + let _pat_ = regime in + (if (eq_vec _pat_ EL1) + then + (if (eq_vec ([access_vec_dec address (55:ii)]) ([B1])) + then + (read_reg TCR_EL1_ref : M (list bitU)) >>= fun (w__1 : bits ty64) -> + return ([access_vec_dec w__1 (38:ii)]) + else + (read_reg TCR_EL1_ref : M (list bitU)) >>= fun (w__2 : bits ty64) -> + return ([access_vec_dec w__2 (37:ii)])) >>= fun (w__3 : list bitU) -> + let tbi = w__3 in + (if (HavePACExt ()) + then + (if (eq_vec ([access_vec_dec address (55:ii)]) ([B1])) + then + (read_reg TCR_EL1_ref : M (list bitU)) >>= fun (w__4 : bits ty64) -> + return ([access_vec_dec w__4 (52:ii)]) + else + (read_reg TCR_EL1_ref : M (list bitU)) >>= fun (w__5 : bits ty64) -> + return ([access_vec_dec w__5 (51:ii)])) >>= fun (w__6 : list bitU) -> + let (tbid : bits ty1) = w__6 in + return tbid + else return tbid) >>= fun (tbid : bits ty1) -> + return (tbi,tbid) + else + (if (eq_vec _pat_ EL2) + then + ELIsInHost el >>= fun (w__7 : bool) -> + (if (and_bool (HaveVirtHostExt ()) w__7) + then + (if (eq_vec ([access_vec_dec address (55:ii)]) ([B1])) + then + (read_reg TCR_EL2_ref : M (list bitU)) >>= fun (w__8 : bits ty64) -> + return ([access_vec_dec w__8 (38:ii)]) + else + (read_reg TCR_EL2_ref : M (list bitU)) >>= fun (w__9 : bits ty64) -> + return ([access_vec_dec w__9 (37:ii)])) >>= fun (w__10 : list bitU) -> + let tbi = w__10 in + (if (HavePACExt ()) + then + (if (eq_vec ([access_vec_dec address (55:ii)]) ([B1])) + then + (read_reg TCR_EL2_ref : M (list bitU)) >>= fun (w__11 : bits ty64) -> + return ([access_vec_dec w__11 (52:ii)]) + else + (read_reg TCR_EL2_ref : M (list bitU)) >>= fun (w__12 : bits ty64) -> + return ([access_vec_dec w__12 (51:ii)])) >>= fun (w__13 : list bitU) -> + let (tbid : bits ty1) = w__13 in + return tbid + else return tbid) >>= fun (tbid : bits ty1) -> + return (tbi,tbid) + else + (read_reg TCR_EL2_ref : M (list bitU)) >>= fun (w__14 : bits ty64) -> + let tbi = [access_vec_dec w__14 (20:ii)] in + (if (HavePACExt ()) + then + (read_reg TCR_EL2_ref : M (list bitU)) >>= fun (w__15 : bits ty64) -> + let (tbid : bits ty1) = [access_vec_dec w__15 (29:ii)] in + return tbid + else return tbid) >>= fun (tbid : bits ty1) -> + return (tbi,tbid)) >>= fun ((tbi : bits ty1), (tbid : bits ty1)) -> + return (tbi,tbid) + else + (read_reg TCR_EL3_ref : M (list bitU)) >>= fun (w__16 : bits ty32) -> + let tbi = [access_vec_dec w__16 (20:ii)] in + (if (HavePACExt ()) + then + (read_reg TCR_EL3_ref : M (list bitU)) >>= fun (w__17 : bits ty32) -> + let (tbid : bits ty1) = [access_vec_dec w__17 (29:ii)] in + return tbid + else return tbid) >>= fun (tbid : bits ty1) -> + return (tbi,tbid)) >>= fun ((tbi : bits ty1), (tbid : bits ty1)) -> + return (tbi,tbid)) >>= fun ((tbi : bits ty1), (tbid : bits ty1)) -> + return (if (and_bool + (eq_vec tbi ([B1])) + (or_bool (or_bool (not (HavePACExt ())) (eq_vec tbid ([B0]))) (not IsInstr))) + then (55:ii) + else (63:ii),tbi,tbid)) >>= fun ((w__18 : ii), (tbi : bits ty1), (tbid : bits ty1)) -> + return w__18 + +val AArch64_TranslateAddress : list bitU -> AccType -> bool -> bool -> ii -> M AddressDescriptor + +val AArch64_NoFault : unit -> FaultRecord + +let AArch64_NoFault () = + let (ipaddress : bits ty52) = (undefined_vector (52:ii) (undefined_bit ()) : list bitU) in + let (level : ii) = undefined_int () in + let (acctype : AccType) = AccType_NORMAL in + let (iswrite : bool) = undefined_bool () in + let (extflag : bits ty1) = (undefined_vector (1:ii) (undefined_bit ()) : list bitU) in + let (errortype : bits ty2) = (undefined_vector (2:ii) (undefined_bit ()) : list bitU) in + let (secondstage : bool) = false in + let (s2fs1walk : bool) = false in + AArch64_CreateFaultRecord + Fault_None + ipaddress + level + acctype + iswrite + extflag + errortype + secondstage + s2fs1walk + +let AArch64_TranslateAddress vaddress acctype iswrite wasaligned size = + (ZeroExtend__1 (64:ii) vaddress : M (list bitU)) >>= fun (w__0 : bits ty64) -> + return (<| AddressDescriptor_fault = (AArch64_NoFault ()); + AddressDescriptor_memattrs = + (<| MemoryAttributes_typ = MemType_Normal; + MemoryAttributes_device = DeviceType_GRE; + MemoryAttributes_inner = + (<| MemAttrHints_attrs = ([B0;B0]); + MemAttrHints_hints = ([B0;B0]); + MemAttrHints_transient = false |>); + MemoryAttributes_outer = + (<| MemAttrHints_attrs = ([B0;B0]); + MemAttrHints_hints = ([B0;B0]); + MemAttrHints_transient = false |>); + MemoryAttributes_shareable = true; + MemoryAttributes_outershareable = true |>); + AddressDescriptor_paddress = + (<| FullAddress_physicaladdress = (slice vaddress (0:ii) (52:ii) : list bitU) |>); + AddressDescriptor_vaddress = w__0 |>) + +val AArch64_MaybeZeroRegisterUppers : unit -> M unit + +let AArch64_MaybeZeroRegisterUppers () = + UsingAArch32 () >>= fun (w__0 : bool) -> + assert_exp w__0 "UsingAArch32()" >> + let (include_R15_name : bool) = undefined_bool () in + let (last : integer) = undefined_range (14:ii) (30:ii) in + let (first : integer) = (0:ii) in + read_reg PSTATE_ref >>= fun (w__1 : ProcState) -> + ELUsingAArch32 EL1 >>= fun (w__2 : bool) -> + (if (and_bool (eq_vec w__1.ProcState_EL EL0) (not w__2)) + then + let (first : integer) = (0:ii) in + let (last : integer) = (14:ii) in + let (include_R15_name : bool) = false in + return (first,include_R15_name,last) + else + read_reg PSTATE_ref >>= fun (w__3 : ProcState) -> + read_reg PSTATE_ref >>= fun (w__4 : ProcState) -> + IsSecure () >>= fun (w__5 : bool) -> + ELUsingAArch32 EL2 >>= fun (w__6 : bool) -> + let ((first : integer), (include_R15_name : bool), (last : integer)) = + if (and_bool + (and_bool + (and_bool + (or_bool (eq_vec w__3.ProcState_EL EL0) (eq_vec w__4.ProcState_EL EL1)) + (HaveEL EL2)) + (not w__5)) + (not w__6)) + then + let (first : integer) = (0:ii) in + let (last : integer) = (30:ii) in + let (include_R15_name : bool) = false in + (first,include_R15_name,last) + else + let (first : integer) = (0:ii) in + let (last : integer) = (30:ii) in + let (include_R15_name : bool) = true in + (first,include_R15_name,last) in + return (first,include_R15_name,last)) >>= fun ((first : integer), (include_R15_name : bool), (last : + integer)) -> + (foreachM (index_list first last (1:ii)) () + (fun n () -> + ConstrainUnpredictableBool Unpredictable_ZEROUPPER >>= fun (w__7 : bool) -> + if (and_bool (or_bool (neq (ex_int n) (ex_int (15:ii))) include_R15_name) w__7) + then + read_reg _R_ref >>= fun (w__8 : list (bits ty64)) -> + let (__tmp_3 : bits ty64) = (access_list_dec w__8 n : list bitU) in + let __tmp_3 = + (update_subrange_vec_dec __tmp_3 (63:ii) (32:ii) (Zeros__0 (32:ii) : list bitU) : list bitU) in + read_reg _R_ref >>= fun (w__9 : list (list bitU)) -> + write_reg _R_ref (update_list_dec w__9 n __tmp_3 : list (list bitU)) + else return ())) + +val AArch64_FaultSyndrome : bool -> FaultRecord -> M (list bitU) + +let AArch64_FaultSyndrome d_side fault = + assert_exp (neq fault.FaultRecord_typ Fault_None) "((fault).type != Fault_None)" >> + let (iss : bits ty25) = (Zeros__1 (25:ii) () : list bitU) in + IsExternalSyncAbort__1 fault >>= fun (w__0 : bool) -> + let (iss : bits ty25) = + if (and_bool (HaveRASExt ()) w__0) + then + let (iss : bits ty25) = + (set_slice (25:ii) (2:ii) iss (11:ii) fault.FaultRecord_errortype : list bitU) in + iss + else iss in + (if d_side + then + IsSecondStage fault >>= fun (w__1 : bool) -> + (if (and_bool w__1 (not fault.FaultRecord_s2fs1walk)) + then + (LSInstructionSyndrome () : M (list bitU)) >>= fun (w__2 : list bitU) -> + let (iss : bits ty25) = (set_slice (25:ii) (11:ii) iss (14:ii) w__2 : list bitU) in + return iss + else return iss) >>= fun (iss : bits ty25) -> + let (iss : bits ty25) = + if (or_bool + (eq fault.FaultRecord_acctype AccType_DC) + (or_bool + (eq fault.FaultRecord_acctype AccType_IC) + (eq fault.FaultRecord_acctype AccType_AT))) + then + let (iss : bits ty25) = (set_slice (25:ii) (1:ii) iss (8:ii) ([B1]) : list bitU) in + let (iss : bits ty25) = (set_slice (25:ii) (1:ii) iss (6:ii) ([B1]) : list bitU) in + iss + else + let (iss : bits ty25) = + (set_slice (25:ii) (1:ii) iss (6:ii) (if fault.FaultRecord_write then [B1] else [B0]) : list bitU) in + iss in + return iss + else return iss) >>= fun (iss : bits ty25) -> + IsExternalAbort__1 fault >>= fun (w__3 : bool) -> + let (iss : bits ty25) = + if w__3 + then + let (iss : bits ty25) = + (set_slice (25:ii) (1:ii) iss (9:ii) fault.FaultRecord_extflag : list bitU) in + iss + else iss in + let iss = + (set_slice (25:ii) (1:ii) iss (7:ii) (if fault.FaultRecord_s2fs1walk then [B1] else [B0]) : list bitU) in + (EncodeLDFSC fault.FaultRecord_typ fault.FaultRecord_level : M (list bitU)) >>= fun (w__4 : + list bitU) -> + let (iss : bits ty25) = (set_slice (25:ii) (6:ii) iss (0:ii) w__4 : list bitU) in + return iss + +val AArch64_AbortSyndrome : Exception -> FaultRecord -> list bitU -> M ExceptionRecord + +let AArch64_AbortSyndrome typ fault vaddress = + let (exception : ExceptionRecord) = ExceptionSyndrome typ in + let (d_side : bool) = or_bool (eq typ Exception_DataAbort) (eq typ Exception_Watchpoint) in + (AArch64_FaultSyndrome d_side fault : M (list bitU)) >>= fun (w__0 : bits ty25) -> + let exception = <|exception with ExceptionRecord_syndrome = w__0|> in + (ZeroExtend__1 (64:ii) vaddress : M (list bitU)) >>= fun (w__1 : bits ty64) -> + let exception = <|exception with ExceptionRecord_vaddress = w__1|> in + IPAValid fault >>= fun (w__2 : bool) -> + let (exception : ExceptionRecord) = + if w__2 + then + let (exception : ExceptionRecord) = <|exception with ExceptionRecord_ipavalid = true|> in + let (exception : ExceptionRecord) = + <|exception with ExceptionRecord_ipaddress = fault.FaultRecord_ipaddress|> in + exception + else + let (exception : ExceptionRecord) = <|exception with ExceptionRecord_ipavalid = false|> in + exception in + return exception + +val AArch64_ExceptionClass : Exception -> list bitU -> M (ii * list bitU) + +let AArch64_ExceptionClass typ target_el = + ThisInstrLength () >>= fun (w__0 : ii) -> + let (il : bits ty1) = if (eq (ex_int w__0) (32:ii)) then [B1] else [B0] in + UsingAArch32 () >>= fun (from_32 : bool) -> + assert_exp (or_bool from_32 (eq_vec il ([B1]))) "(from_32 || (il == '1'))" >> + let (ec : ii) = undefined_int () in + match typ with + | Exception_Uncategorized -> + let (ec : ii) = (0:ii) in + let (il : bits ty1) = [B1] in + return (ec,il) + | Exception_WFxTrap -> + let (ec : ii) = (1:ii) in + return (ec,il) + | Exception_CP15RTTrap -> + let ec = (3:ii) in + assert_exp from_32 "from_32" >> + return (ec,il) + | Exception_CP15RRTTrap -> + let ec = (4:ii) in + assert_exp from_32 "from_32" >> + return (ec,il) + | Exception_CP14RTTrap -> + let ec = (5:ii) in + assert_exp from_32 "from_32" >> + return (ec,il) + | Exception_CP14DTTrap -> + let ec = (6:ii) in + assert_exp from_32 "from_32" >> + return (ec,il) + | Exception_AdvSIMDFPAccessTrap -> + let (ec : ii) = (7:ii) in + return (ec,il) + | Exception_FPIDTrap -> + let (ec : ii) = (8:ii) in + return (ec,il) + | Exception_CP14RRTTrap -> + let ec = (12:ii) in + assert_exp from_32 "from_32" >> + return (ec,il) + | Exception_IllegalState -> + let (ec : ii) = (14:ii) in + let (il : bits ty1) = [B1] in + return (ec,il) + | Exception_SupervisorCall -> + let (ec : ii) = (17:ii) in + return (ec,il) + | Exception_HypervisorCall -> + let (ec : ii) = (18:ii) in + return (ec,il) + | Exception_MonitorCall -> + let (ec : ii) = (19:ii) in + return (ec,il) + | Exception_SystemRegisterTrap -> + let ec = (24:ii) in + assert_exp (not from_32) "!(from_32)" >> + return (ec,il) + | Exception_InstructionAbort -> + let (ec : ii) = (32:ii) in + let (il : bits ty1) = [B1] in + return (ec,il) + | Exception_PCAlignment -> + let (ec : ii) = (34:ii) in + let (il : bits ty1) = [B1] in + return (ec,il) + | Exception_DataAbort -> + let (ec : ii) = (36:ii) in + return (ec,il) + | Exception_SPAlignment -> + let ec = (38:ii) in + let il = [B1] in + assert_exp (not from_32) "!(from_32)" >> + return (ec,il) + | Exception_FPTrappedException -> + let (ec : ii) = (40:ii) in + return (ec,il) + | Exception_SError -> + let (ec : ii) = (47:ii) in + let (il : bits ty1) = [B1] in + return (ec,il) + | Exception_Breakpoint -> + let (ec : ii) = (48:ii) in + let (il : bits ty1) = [B1] in + return (ec,il) + | Exception_SoftwareStep -> + let (ec : ii) = (50:ii) in + let (il : bits ty1) = [B1] in + return (ec,il) + | Exception_Watchpoint -> + let (ec : ii) = (52:ii) in + let (il : bits ty1) = [B1] in + return (ec,il) + | Exception_SoftwareBreakpoint -> + let (ec : ii) = (56:ii) in + return (ec,il) + | Exception_VectorCatch -> + let ec = (58:ii) in + let il = [B1] in + assert_exp from_32 "from_32" >> + return (ec,il) + | _ -> + Unreachable () >> + return (ec,il) + end >>= fun ((ec : ii), (il : bits ty1)) -> + read_reg PSTATE_ref >>= fun (w__1 : ProcState) -> + let (ec : ii) = + if (and_bool + (or_bool + (eq (ex_int ec) (32:ii)) + (or_bool + (eq (ex_int ec) (36:ii)) + (or_bool + (eq (ex_int ec) (48:ii)) + (or_bool (eq (ex_int ec) (50:ii)) (eq (ex_int ec) (52:ii)))))) + (eq_vec target_el w__1.ProcState_EL)) + then + let (ec : ii) = integerAdd (ex_int ec) (1:ii) in + ec + else ec in + let (ec : ii) = + if (and_bool + (or_bool + (eq (ex_int ec) (17:ii)) + (or_bool + (eq (ex_int ec) (18:ii)) + (or_bool + (eq (ex_int ec) (19:ii)) + (or_bool (eq (ex_int ec) (40:ii)) (eq (ex_int ec) (56:ii)))))) + (not from_32)) + then + let (ec : ii) = integerAdd (ex_int ec) (4:ii) in + ec + else ec in + return (ec,il) + +val AArch64_ReportException : ExceptionRecord -> list bitU -> M unit + +let AArch64_ReportException exception target_el = + let (typ : Exception) = exception.ExceptionRecord_typ in + let (il : bits ty1) = (undefined_vector (1:ii) (undefined_bit ()) : list bitU) in + let (ec : ii) = undefined_int () in + (AArch64_ExceptionClass typ target_el : M ((ii * list bitU))) >>= fun (tup__0, tup__1) -> + let ec = tup__0 in + let il = tup__1 in + let (iss : bits ty25) = exception.ExceptionRecord_syndrome in + let (il : bits ty1) = + if (and_bool + (or_bool (eq (ex_int ec) (36:ii)) (eq (ex_int ec) (37:ii))) + (eq_vec ([access_vec_dec iss (24:ii)]) ([B0]))) + then + let (il : bits ty1) = [B1] in + il + else il in + aset_ESR__0 + target_el + (concat_vec (concat_vec (__GetSlice_int (6:ii) ec (0:ii) : list bitU) il : list bitU) iss : list bitU) >> + (if (or_bool + (eq typ Exception_InstructionAbort) + (or_bool + (eq typ Exception_PCAlignment) + (or_bool (eq typ Exception_DataAbort) (eq typ Exception_Watchpoint)))) + then aset_FAR__0 target_el exception.ExceptionRecord_vaddress + else aset_FAR__0 target_el (undefined_vector (64:ii) (undefined_bit ()) : list bitU)) >> + if (eq_vec target_el EL2) + then + if exception.ExceptionRecord_ipavalid + then + (read_reg HPFAR_EL2_ref : M (list bitU)) >>= fun (w__0 : list bitU) -> + write_reg + HPFAR_EL2_ref + (set_slice + (64:ii) + (40:ii) + w__0 + (4:ii) + (slice exception.ExceptionRecord_ipaddress (12:ii) (40:ii) : list bitU) : list bitU) + else + (read_reg HPFAR_EL2_ref : M (list bitU)) >>= fun (w__1 : list bitU) -> + write_reg + HPFAR_EL2_ref + (set_slice + (64:ii) + (40:ii) + w__1 + (4:ii) + (undefined_vector (40:ii) (undefined_bit ()) : list bitU) : list bitU) + else return () + +val AArch64_BranchAddr : list bitU -> M (list bitU) + +let AArch64_BranchAddr vaddress = + UsingAArch32 () >>= fun (w__0 : bool) -> + assert_exp (not w__0) "!(UsingAArch32())" >> + read_reg PSTATE_ref >>= fun (w__1 : ProcState) -> + AddrTop vaddress true w__1.ProcState_EL >>= fun (w__2 : ii) -> + coerce_int_nat w__2 >>= fun (msbit : ii) -> + if (eq (ex_nat msbit) (63:ii)) + then return vaddress + else + read_reg PSTATE_ref >>= fun (w__3 : ProcState) -> + read_reg PSTATE_ref >>= fun (w__4 : ProcState) -> + IsInHost () >>= fun (w__5 : bool) -> + if (and_bool + (or_bool (or_bool (eq_vec w__3.ProcState_EL EL0) (eq_vec w__4.ProcState_EL EL1)) w__5) + (eq_vec ([access_vec_dec vaddress msbit]) ([B1]))) + then + (SignExtend__1 + (64:ii) + (slice vaddress (0:ii) (integerAdd (ex_nat msbit) (1:ii)) : (list bitU)) : M (list bitU)) + else + (ZeroExtend__1 + (64:ii) + (slice vaddress (0:ii) (integerAdd (ex_nat msbit) (1:ii)) : (list bitU)) : M (list bitU)) + +val BranchTo : list bitU -> BranchType -> M unit + +let BranchTo target branch_type = + write_reg __BranchTaken_ref true >> + let (_ : unit) = Hint_Branch branch_type in + if (eq (length target) (32:ii)) + then + UsingAArch32 () >>= fun (w__0 : bool) -> + assert_exp w__0 "UsingAArch32()" >> + (ZeroExtend__1 (64:ii) target : M (list bitU)) >>= fun (w__1 : bits ty64) -> + write_reg _PC_ref w__1 + else + UsingAArch32 () >>= fun (w__2 : bool) -> + assert_exp (and_bool (eq (length target) (64:ii)) (not w__2)) "((N == 64) && !(UsingAArch32()))" >> + (AArch64_BranchAddr (slice target (0:ii) (64:ii) : list bitU) : M (list bitU)) >>= fun (w__3 : + bits ty64) -> + write_reg _PC_ref w__3 + +val AArch64_TakeException : list bitU -> ExceptionRecord -> list bitU -> ii -> M unit + +let AArch64_TakeException target_el exception preferred_exception_return vect_offset__arg = + let vect_offset = vect_offset__arg in + let (_ : unit) = SynchronizeContext () in + ELUsingAArch32 target_el >>= fun (w__0 : bool) -> + read_reg PSTATE_ref >>= fun (w__1 : ProcState) -> + assert_exp (and_bool + (and_bool (HaveEL target_el) (not w__0)) + (gteq (uint target_el) (uint w__1.ProcState_EL))) "((HaveEL(target_el) && !(ELUsingAArch32(target_el))) && (UInt(target_el) >= UInt((PSTATE).EL)))" >> + UsingAArch32 () >>= fun (from_32 : bool) -> + (if from_32 + then AArch64_MaybeZeroRegisterUppers () + else return ()) >> + read_reg PSTATE_ref >>= fun (w__2 : ProcState) -> + (if (gt (uint target_el) (uint w__2.ProcState_EL)) + then + let (lower_32 : bool) = undefined_bool () in + (if (eq_vec target_el EL3) + then + IsSecure () >>= fun (w__3 : bool) -> + (if (and_bool (not w__3) (HaveEL EL2)) + then + ELUsingAArch32 EL2 >>= fun (w__4 : bool) -> + let (lower_32 : bool) = w__4 in + return lower_32 + else + ELUsingAArch32 EL1 >>= fun (w__5 : bool) -> + let (lower_32 : bool) = w__5 in + return lower_32) >>= fun (lower_32 : bool) -> + return lower_32 + else + IsInHost () >>= fun (w__6 : bool) -> + read_reg PSTATE_ref >>= fun (w__7 : ProcState) -> + (if (and_bool (and_bool w__6 (eq_vec w__7.ProcState_EL EL0)) (eq_vec target_el EL2)) + then + ELUsingAArch32 EL0 >>= fun (w__8 : bool) -> + let (lower_32 : bool) = w__8 in + return lower_32 + else + ELUsingAArch32 (sub_vec_int target_el (1:ii) : list bitU) >>= fun (w__9 : bool) -> + let (lower_32 : bool) = w__9 in + return lower_32) >>= fun (lower_32 : bool) -> + return lower_32) >>= fun (lower_32 : bool) -> + let (vect_offset : ii) = integerAdd vect_offset (if lower_32 then (1536:ii) else (1024:ii)) in + return vect_offset + else + read_reg PSTATE_ref >>= fun (w__10 : ProcState) -> + let (vect_offset : ii) = + if (eq_vec w__10.ProcState_SP ([B1])) + then + let (vect_offset : ii) = integerAdd (ex_int vect_offset) (512:ii) in + vect_offset + else vect_offset in + return vect_offset) >>= fun (vect_offset : ii) -> + (GetPSRFromPSTATE () : M (list bitU)) >>= fun (spsr : bits ty32) -> + (if (HaveUAOExt ()) + then + read_reg PSTATE_ref >>= fun (w__11 : ProcState) -> + write_reg PSTATE_ref <|w__11 with ProcState_UAO = ([B0])|> + else return ()) >> + (if (not + (or_bool + (eq exception.ExceptionRecord_typ Exception_IRQ) + (eq exception.ExceptionRecord_typ Exception_FIQ))) + then AArch64_ReportException exception target_el + else return ()) >> + read_reg PSTATE_ref >>= fun (w__12 : ProcState) -> + write_reg PSTATE_ref <|w__12 with ProcState_EL = target_el|> >> + read_reg PSTATE_ref >>= fun (w__13 : ProcState) -> + write_reg PSTATE_ref <|w__13 with ProcState_nRW = ([B0])|> >> + read_reg PSTATE_ref >>= fun (w__14 : ProcState) -> + write_reg PSTATE_ref <|w__14 with ProcState_SP = ([B1])|> >> + aset_SPSR spsr >> + aset_ELR__1 preferred_exception_return >> + read_reg PSTATE_ref >>= fun (w__15 : ProcState) -> + write_reg PSTATE_ref <|w__15 with ProcState_SS = ([B0])|> >> + let split_vec = [B1;B1;B1;B1] in + let (tup__0, tup__1, tup__2, tup__3) = + ((subrange_vec_dec split_vec (3:ii) (3:ii) : list bitU),(subrange_vec_dec + split_vec + (2:ii) + (2:ii) : list bitU),(subrange_vec_dec + split_vec + (1:ii) + (1:ii) : list bitU),(subrange_vec_dec + split_vec + (0:ii) + (0:ii) : list bitU)) in + read_reg PSTATE_ref >>= fun (w__16 : ProcState) -> + write_reg PSTATE_ref <|w__16 with ProcState_D = tup__0|> >> + read_reg PSTATE_ref >>= fun (w__17 : ProcState) -> + write_reg PSTATE_ref <|w__17 with ProcState_A = tup__1|> >> + read_reg PSTATE_ref >>= fun (w__18 : ProcState) -> + write_reg PSTATE_ref <|w__18 with ProcState_I = tup__2|> >> + read_reg PSTATE_ref >>= fun (w__19 : ProcState) -> + write_reg PSTATE_ref <|w__19 with ProcState_F = tup__3|> >> + read_reg PSTATE_ref >>= fun (w__20 : ProcState) -> + write_reg PSTATE_ref <|w__20 with ProcState_IL = ([B0])|> >> + (if from_32 + then + read_reg PSTATE_ref >>= fun (w__21 : ProcState) -> + write_reg PSTATE_ref <|w__21 with ProcState_IT = ([B0;B0;B0;B0;B0;B0;B0;B0])|> >> + read_reg PSTATE_ref >>= fun (w__22 : ProcState) -> + write_reg PSTATE_ref <|w__22 with ProcState_T = ([B0])|> + else return ()) >> + read_reg PSTATE_ref >>= fun (w__23 : ProcState) -> + read_reg PSTATE_ref >>= fun (w__24 : ProcState) -> + ELIsInHost EL0 >>= fun (w__25 : bool) -> + (aget_SCTLR__1 () : M (list bitU)) >>= fun (w__26 : list bitU) -> + (if (and_bool + (and_bool + (HavePANExt ()) + (or_bool + (eq_vec w__23.ProcState_EL EL1) + (and_bool (eq_vec w__24.ProcState_EL EL2) w__25))) + (eq_vec ([access_vec_dec w__26 (23:ii)]) ([B0]))) + then + read_reg PSTATE_ref >>= fun (w__27 : ProcState) -> + write_reg PSTATE_ref <|w__27 with ProcState_PAN = ([B1])|> + else return ()) >> + (aget_VBAR__1 () : M (list bitU)) >>= fun (w__28 : list bitU) -> + BranchTo + (concat_vec + (slice w__28 (11:ii) (53:ii) : list bitU) + (__GetSlice_int (11:ii) vect_offset (0:ii) : list bitU) : list bitU) + BranchType_EXCEPTION >> + let (iesb_req : bool) = undefined_bool () in + (aget_SCTLR__1 () : M (list bitU)) >>= fun (w__29 : list bitU) -> + (if (and_bool (HaveRASExt ()) (eq_vec ([access_vec_dec w__29 (21:ii)]) ([B1]))) + then + let (_ : unit) = ErrorSynchronizationBarrier MBReqDomain_FullSystem MBReqTypes_All in + let iesb_req = true in + TakeUnmaskedPhysicalSErrorInterrupts iesb_req >> + return iesb_req + else return iesb_req) >>= fun (iesb_req : bool) -> + return (EndOfInstruction ()) + +val AArch64_WatchpointException : list bitU -> FaultRecord -> M unit + +let AArch64_WatchpointException vaddress fault = + read_reg PSTATE_ref >>= fun (w__0 : ProcState) -> + assert_exp (neq_vec w__0.ProcState_EL EL3) "((PSTATE).EL != EL3)" >> + IsSecure () >>= fun (w__1 : bool) -> + read_reg PSTATE_ref >>= fun (w__2 : ProcState) -> + read_reg PSTATE_ref >>= fun (w__3 : ProcState) -> + (read_reg HCR_EL2_ref : M (list bitU)) >>= fun (w__4 : bits ty64) -> + (read_reg MDCR_EL2_ref : M (list bitU)) >>= fun (w__5 : bits ty32) -> + let (route_to_el2 : bool) = + and_bool + (and_bool + (and_bool (HaveEL EL2) (not w__1)) + (or_bool (eq_vec w__2.ProcState_EL EL0) (eq_vec w__3.ProcState_EL EL1))) + (or_bool + (eq_vec ([access_vec_dec w__4 (27:ii)]) ([B1])) + (eq_vec ([access_vec_dec w__5 (8:ii)]) ([B1]))) in + (ThisInstrAddr (64:ii) () : M (list bitU)) >>= fun (preferred_exception_return : bits ty64) -> + let (vect_offset : ii) = (0:ii) in + AArch64_AbortSyndrome Exception_Watchpoint fault vaddress >>= fun (exception : ExceptionRecord) -> + read_reg PSTATE_ref >>= fun (w__6 : ProcState) -> + if (or_bool (eq_vec w__6.ProcState_EL EL2) route_to_el2) + then AArch64_TakeException EL2 exception preferred_exception_return vect_offset + else AArch64_TakeException EL1 exception preferred_exception_return vect_offset + +val AArch64_VectorCatchException : FaultRecord -> M unit + +let AArch64_VectorCatchException fault = + read_reg PSTATE_ref >>= fun (w__0 : ProcState) -> + assert_exp (neq_vec w__0.ProcState_EL EL2) "((PSTATE).EL != EL2)" >> + IsSecure () >>= fun (w__1 : bool) -> + (read_reg HCR_EL2_ref : M (list bitU)) >>= fun (w__2 : bits ty64) -> + (read_reg MDCR_EL2_ref : M (list bitU)) >>= fun (w__3 : bits ty32) -> + assert_exp (and_bool + (and_bool (HaveEL EL2) (not w__1)) + (or_bool + (eq_vec ([access_vec_dec w__2 (27:ii)]) ([B1])) + (eq_vec ([access_vec_dec w__3 (8:ii)]) ([B1])))) "((HaveEL(EL2) && !(IsSecure())) && (((HCR_EL2).TGE == '1') || ((MDCR_EL2).TDE == '1')))" >> + (ThisInstrAddr (64:ii) () : M (list bitU)) >>= fun (preferred_exception_return : bits ty64) -> + let (vect_offset : ii) = (0:ii) in + let (vaddress : bits ty64) = (undefined_vector (64:ii) (undefined_bit ()) : list bitU) in + AArch64_AbortSyndrome Exception_VectorCatch fault vaddress >>= fun (exception : ExceptionRecord) -> + AArch64_TakeException EL2 exception preferred_exception_return vect_offset + +val AArch64_UndefinedFault : unit -> M unit + +let AArch64_UndefinedFault () = + IsSecure () >>= fun (w__0 : bool) -> + read_reg PSTATE_ref >>= fun (w__1 : ProcState) -> + (read_reg HCR_EL2_ref : M (list bitU)) >>= fun (w__2 : bits ty64) -> + let (route_to_el2 : bool) = + and_bool + (and_bool (and_bool (HaveEL EL2) (not w__0)) (eq_vec w__1.ProcState_EL EL0)) + (eq_vec ([access_vec_dec w__2 (27:ii)]) ([B1])) in + (ThisInstrAddr (64:ii) () : M (list bitU)) >>= fun (preferred_exception_return : bits ty64) -> + let (vect_offset : ii) = (0:ii) in + let (exception : ExceptionRecord) = ExceptionSyndrome Exception_Uncategorized in + read_reg PSTATE_ref >>= fun (w__3 : ProcState) -> + if (gt (uint w__3.ProcState_EL) (uint EL1)) + then + read_reg PSTATE_ref >>= fun (w__4 : ProcState) -> + AArch64_TakeException w__4.ProcState_EL exception preferred_exception_return vect_offset + else + if route_to_el2 + then AArch64_TakeException EL2 exception preferred_exception_return vect_offset + else AArch64_TakeException EL1 exception preferred_exception_return vect_offset + +val AArch64_SPAlignmentFault : unit -> M unit + +let AArch64_SPAlignmentFault () = + (ThisInstrAddr (64:ii) () : M (list bitU)) >>= fun (preferred_exception_return : bits ty64) -> + let (vect_offset : ii) = (0:ii) in + let (exception : ExceptionRecord) = ExceptionSyndrome Exception_SPAlignment in + read_reg PSTATE_ref >>= fun (w__0 : ProcState) -> + if (gt (uint w__0.ProcState_EL) (uint EL1)) + then + read_reg PSTATE_ref >>= fun (w__1 : ProcState) -> + AArch64_TakeException w__1.ProcState_EL exception preferred_exception_return vect_offset + else + IsSecure () >>= fun (w__2 : bool) -> + (read_reg HCR_EL2_ref : M (list bitU)) >>= fun (w__3 : bits ty64) -> + if (and_bool (and_bool (HaveEL EL2) (not w__2)) (eq_vec ([access_vec_dec w__3 (27:ii)]) ([B1]))) + then AArch64_TakeException EL2 exception preferred_exception_return vect_offset + else AArch64_TakeException EL1 exception preferred_exception_return vect_offset + +val CheckSPAlignment : unit -> M unit + +let CheckSPAlignment () = + (aget_SP (64:ii) () : M (list bitU)) >>= fun (sp : bits ty64) -> + let (stack_align_check : bool) = undefined_bool () in + read_reg PSTATE_ref >>= fun (w__0 : ProcState) -> + (if (eq_vec w__0.ProcState_EL EL0) + then + (aget_SCTLR__1 () : M (list bitU)) >>= fun (w__1 : list bitU) -> + let (stack_align_check : bool) = neq_vec ([access_vec_dec w__1 (4:ii)]) ([B0]) in + return stack_align_check + else + (aget_SCTLR__1 () : M (list bitU)) >>= fun (w__2 : list bitU) -> + let (stack_align_check : bool) = neq_vec ([access_vec_dec w__2 (3:ii)]) ([B0]) in + return stack_align_check) >>= fun (stack_align_check : bool) -> + if (and_bool stack_align_check (neq_vec sp (Align__1 sp (16:ii) : list bitU))) + then AArch64_SPAlignmentFault () + else return () + +val AArch64_InstructionAbort : list bitU -> FaultRecord -> M unit + +let AArch64_InstructionAbort vaddress fault = + (read_reg SCR_EL3_ref : M (list bitU)) >>= fun (w__0 : bits ty32) -> + IsExternalAbort__1 fault >>= fun (w__1 : bool) -> + let (route_to_el3 : bool) = + and_bool (and_bool (HaveEL EL3) (eq_vec ([access_vec_dec w__0 (3:ii)]) ([B1]))) w__1 in + IsSecure () >>= fun (w__2 : bool) -> + read_reg PSTATE_ref >>= fun (w__3 : ProcState) -> + read_reg PSTATE_ref >>= fun (w__4 : ProcState) -> + (read_reg HCR_EL2_ref : M (list bitU)) >>= fun (w__5 : bits ty64) -> + IsSecondStage fault >>= fun (w__6 : bool) -> + (read_reg HCR_EL2_ref : M (list bitU)) >>= fun (w__7 : bits ty64) -> + IsExternalAbort__1 fault >>= fun (w__8 : bool) -> + let (route_to_el2 : bool) = + and_bool + (and_bool + (and_bool (HaveEL EL2) (not w__2)) + (or_bool (eq_vec w__3.ProcState_EL EL0) (eq_vec w__4.ProcState_EL EL1))) + (or_bool + (or_bool (eq_vec ([access_vec_dec w__5 (27:ii)]) ([B1])) w__6) + (and_bool (and_bool (HaveRASExt ()) (eq_vec ([access_vec_dec w__7 (37:ii)]) ([B1]))) w__8)) in + (ThisInstrAddr (64:ii) () : M (list bitU)) >>= fun (preferred_exception_return : bits ty64) -> + let (vect_offset : ii) = (0:ii) in + AArch64_AbortSyndrome Exception_InstructionAbort fault vaddress >>= fun (exception : + ExceptionRecord) -> + read_reg PSTATE_ref >>= fun (w__9 : ProcState) -> + if (or_bool (eq_vec w__9.ProcState_EL EL3) route_to_el3) + then AArch64_TakeException EL3 exception preferred_exception_return vect_offset + else + read_reg PSTATE_ref >>= fun (w__10 : ProcState) -> + if (or_bool (eq_vec w__10.ProcState_EL EL2) route_to_el2) + then AArch64_TakeException EL2 exception preferred_exception_return vect_offset + else AArch64_TakeException EL1 exception preferred_exception_return vect_offset + +val AArch64_DataAbort : list bitU -> FaultRecord -> M unit + +let AArch64_DataAbort vaddress fault = + (read_reg SCR_EL3_ref : M (list bitU)) >>= fun (w__0 : bits ty32) -> + IsExternalAbort__1 fault >>= fun (w__1 : bool) -> + let (route_to_el3 : bool) = + and_bool (and_bool (HaveEL EL3) (eq_vec ([access_vec_dec w__0 (3:ii)]) ([B1]))) w__1 in + IsSecure () >>= fun (w__2 : bool) -> + read_reg PSTATE_ref >>= fun (w__3 : ProcState) -> + read_reg PSTATE_ref >>= fun (w__4 : ProcState) -> + (read_reg HCR_EL2_ref : M (list bitU)) >>= fun (w__5 : bits ty64) -> + IsSecondStage fault >>= fun (w__6 : bool) -> + (read_reg HCR_EL2_ref : M (list bitU)) >>= fun (w__7 : bits ty64) -> + IsExternalAbort__1 fault >>= fun (w__8 : bool) -> + let (route_to_el2 : bool) = + and_bool + (and_bool + (and_bool (HaveEL EL2) (not w__2)) + (or_bool (eq_vec w__3.ProcState_EL EL0) (eq_vec w__4.ProcState_EL EL1))) + (or_bool + (or_bool (eq_vec ([access_vec_dec w__5 (27:ii)]) ([B1])) w__6) + (and_bool (and_bool (HaveRASExt ()) (eq_vec ([access_vec_dec w__7 (37:ii)]) ([B1]))) w__8)) in + (ThisInstrAddr (64:ii) () : M (list bitU)) >>= fun (preferred_exception_return : bits ty64) -> + let (vect_offset : ii) = (0:ii) in + AArch64_AbortSyndrome Exception_DataAbort fault vaddress >>= fun (exception : ExceptionRecord) -> + read_reg PSTATE_ref >>= fun (w__9 : ProcState) -> + if (or_bool (eq_vec w__9.ProcState_EL EL3) route_to_el3) + then AArch64_TakeException EL3 exception preferred_exception_return vect_offset + else + read_reg PSTATE_ref >>= fun (w__10 : ProcState) -> + if (or_bool (eq_vec w__10.ProcState_EL EL2) route_to_el2) + then AArch64_TakeException EL2 exception preferred_exception_return vect_offset + else AArch64_TakeException EL1 exception preferred_exception_return vect_offset + +val AArch64_BreakpointException : FaultRecord -> M unit + +let AArch64_BreakpointException fault = + read_reg PSTATE_ref >>= fun (w__0 : ProcState) -> + assert_exp (neq_vec w__0.ProcState_EL EL3) "((PSTATE).EL != EL3)" >> + IsSecure () >>= fun (w__1 : bool) -> + read_reg PSTATE_ref >>= fun (w__2 : ProcState) -> + read_reg PSTATE_ref >>= fun (w__3 : ProcState) -> + (read_reg HCR_EL2_ref : M (list bitU)) >>= fun (w__4 : bits ty64) -> + (read_reg MDCR_EL2_ref : M (list bitU)) >>= fun (w__5 : bits ty32) -> + let (route_to_el2 : bool) = + and_bool + (and_bool + (and_bool (HaveEL EL2) (not w__1)) + (or_bool (eq_vec w__2.ProcState_EL EL0) (eq_vec w__3.ProcState_EL EL1))) + (or_bool + (eq_vec ([access_vec_dec w__4 (27:ii)]) ([B1])) + (eq_vec ([access_vec_dec w__5 (8:ii)]) ([B1]))) in + (ThisInstrAddr (64:ii) () : M (list bitU)) >>= fun (preferred_exception_return : bits ty64) -> + let (vect_offset : ii) = (0:ii) in + let (vaddress : bits ty64) = (undefined_vector (64:ii) (undefined_bit ()) : list bitU) in + AArch64_AbortSyndrome Exception_Breakpoint fault vaddress >>= fun (exception : ExceptionRecord) -> + read_reg PSTATE_ref >>= fun (w__6 : ProcState) -> + if (or_bool (eq_vec w__6.ProcState_EL EL2) route_to_el2) + then AArch64_TakeException EL2 exception preferred_exception_return vect_offset + else AArch64_TakeException EL1 exception preferred_exception_return vect_offset + +val AArch64_Abort : list bitU -> FaultRecord -> M unit + +let AArch64_Abort vaddress fault = + IsDebugException fault >>= fun (w__0 : bool) -> + if w__0 + then + if (eq fault.FaultRecord_acctype AccType_IFETCH) + then + UsingAArch32 () >>= fun (w__1 : bool) -> + if (and_bool w__1 (eq_vec fault.FaultRecord_debugmoe DebugException_VectorCatch)) + then AArch64_VectorCatchException fault + else AArch64_BreakpointException fault + else AArch64_WatchpointException vaddress fault + else + if (eq fault.FaultRecord_acctype AccType_IFETCH) + then AArch64_InstructionAbort vaddress fault + else AArch64_DataAbort vaddress fault + +val AArch64_aset_MemSingle : list bitU -> integer -> AccType -> bool -> list bitU -> M unit + +let AArch64_aset_MemSingle address size acctype wasaligned value_name = + assert_exp (or_bool + (eq size (1:ii)) + (or_bool + (eq size (2:ii)) + (or_bool (eq size (4:ii)) (or_bool (eq size (8:ii)) (eq size (16:ii)))))) "((size == 1) || ((size == 2) || ((size == 4) || ((size == 8) || (size == 16)))))" >> + assert_exp (eq_vec address (Align__1 address size : list bitU)) "(address == Align(address, size))" >> + let (memaddrdesc : AddressDescriptor) = undefined_AddressDescriptor () in + let (iswrite : bool) = true in + AArch64_TranslateAddress address acctype iswrite wasaligned size >>= fun (w__0 : + AddressDescriptor) -> + let memaddrdesc = w__0 in + (if (IsFault memaddrdesc) + then AArch64_Abort address memaddrdesc.AddressDescriptor_fault + else return ()) >> + let (_ : unit) = + if memaddrdesc.AddressDescriptor_memattrs.MemoryAttributes_shareable + then ClearExclusiveByAddress memaddrdesc.AddressDescriptor_paddress (ProcessorID ()) size + else () in + let (accdesc : AccessDescriptor) = CreateAccessDescriptor acctype in + aset__Mem memaddrdesc size accdesc value_name + +val AArch64_aget_MemSingle : list bitU -> integer -> AccType -> bool -> M (list bitU) + +let AArch64_aget_MemSingle address size acctype wasaligned = + assert_exp (or_bool + (eq size (1:ii)) + (or_bool + (eq size (2:ii)) + (or_bool (eq size (4:ii)) (or_bool (eq size (8:ii)) (eq size (16:ii)))))) "((size == 1) || ((size == 2) || ((size == 4) || ((size == 8) || (size == 16)))))" >> + assert_exp (eq_vec address (Align__1 address size : list bitU)) "(address == Align(address, size))" >> + let (memaddrdesc : AddressDescriptor) = undefined_AddressDescriptor () in + let value_name = (undefined_vector (integerMult (8:ii) size) (undefined_bit ()) : list bitU) in + let (iswrite : bool) = false in + AArch64_TranslateAddress address acctype iswrite wasaligned size >>= fun (w__0 : + AddressDescriptor) -> + let memaddrdesc = w__0 in + (if (IsFault memaddrdesc) + then AArch64_Abort address memaddrdesc.AddressDescriptor_fault + else return ()) >> + let (accdesc : AccessDescriptor) = CreateAccessDescriptor acctype in + (aget__Mem memaddrdesc size accdesc : M (list bitU)) >>= fun w__1 -> + let value_name = w__1 in + return value_name + +val AArch64_CheckAlignment : list bitU -> ii -> AccType -> bool -> M bool + +let AArch64_CheckAlignment address alignment acctype iswrite = + let (aligned : bool) = eq_vec address (Align__1 address alignment : list bitU) in + let (atomic : bool) = or_bool (eq acctype AccType_ATOMIC) (eq acctype AccType_ATOMICRW) in + let (ordered : bool) = + or_bool + (eq acctype AccType_ORDERED) + (or_bool (eq acctype AccType_ORDEREDRW) (eq acctype AccType_LIMITEDORDERED)) in + let (vector_name : bool) = eq acctype AccType_VEC in + (aget_SCTLR__1 () : M (list bitU)) >>= fun (w__0 : list bitU) -> + let (check' : bool) = + or_bool (or_bool atomic ordered) (eq_vec ([access_vec_dec w__0 (1:ii)]) ([B1])) in + let (secondstage : bool) = undefined_bool () in + (if (and_bool check' (not aligned)) + then + let secondstage = false in + AArch64_Abort address (AArch64_AlignmentFault acctype iswrite secondstage) >> + return secondstage + else return secondstage) >>= fun (secondstage : bool) -> + return aligned + +val aset_Mem : list bitU -> integer -> AccType -> list bitU -> M unit + +let aset_Mem address size acctype value_name__arg = + let value_name = value_name__arg in + let (i : ii) = undefined_int () in + let (iswrite : bool) = true in + BigEndian () >>= fun (w__0 : bool) -> + (if w__0 + then + (BigEndianReverse value_name : M (list bitU)) >>= fun (w__1 : list bitU) -> + let (value_name : list bitU) = w__1 in + return value_name + else return value_name) >>= fun (value_name : list bitU) -> + AArch64_CheckAlignment address size acctype iswrite >>= fun (aligned : bool) -> + let (atomic : bool) = undefined_bool () in + let (atomic : bool) = + if (or_bool + (neq size (16:ii)) + (not (or_bool (eq acctype AccType_VEC) (eq acctype AccType_VECSTREAM)))) + then + let (atomic : bool) = aligned in + atomic + else + let (atomic : bool) = eq_vec address (Align__1 address (8:ii) : list bitU) in + atomic in + let (c : Constraint) = undefined_Constraint () in + (if (not atomic) + then + assert_exp (gt size (1:ii)) "(size > 1)" >> + AArch64_aset_MemSingle + address + (1:ii) + acctype + aligned + (slice value_name (0:ii) (8:ii) : list bitU) >> + (if (not aligned) + then + let c = ConstrainUnpredictable Unpredictable_DEVPAGE2 in + assert_exp (or_bool (eq c Constraint_FAULT) (eq c Constraint_NONE)) "((c == Constraint_FAULT) || (c == Constraint_NONE))" >> + let (aligned : bool) = + if (eq c Constraint_NONE) + then + let (aligned : bool) = true in + aligned + else aligned in + return (aligned,c) + else return (aligned,c)) >>= fun ((aligned : bool), (c : Constraint)) -> + (foreachM (index_list (1:ii) (integerMinus size (1:ii)) (1:ii)) () + (fun i () -> + AArch64_aset_MemSingle + (add_vec_int address i : list bitU) + (1:ii) + acctype + aligned + (slice value_name (integerMult (8:ii) i) (8:ii) : list bitU))) >> + return (aligned,c) + else + (if (and_bool + (eq size (16:ii)) + (or_bool (eq acctype AccType_VEC) (eq acctype AccType_VECSTREAM))) + then + AArch64_aset_MemSingle + address + (8:ii) + acctype + aligned + (slice value_name (0:ii) (64:ii) : list bitU) >> + AArch64_aset_MemSingle + (add_vec_int address (8:ii) : list bitU) + (8:ii) + acctype + aligned + (slice value_name (64:ii) (64:ii) : list bitU) + else AArch64_aset_MemSingle address size acctype aligned value_name) >> + return (aligned,c)) >>= fun ((aligned : bool), (c : Constraint)) -> + return () + +val aget_Mem : list bitU -> integer -> AccType -> M (list bitU) + +let aget_Mem address size acctype = + assert_exp (or_bool + (eq size (1:ii)) + (or_bool + (eq size (2:ii)) + (or_bool (eq size (4:ii)) (or_bool (eq size (8:ii)) (eq size (16:ii)))))) "((size == 1) || ((size == 2) || ((size == 4) || ((size == 8) || (size == 16)))))" >> + let value_name = (undefined_vector (integerMult (8:ii) size) (undefined_bit ()) : list bitU) in + let (i : ii) = undefined_int () in + let (iswrite : bool) = false in + AArch64_CheckAlignment address size acctype iswrite >>= fun (aligned : bool) -> + let (atomic : bool) = undefined_bool () in + let (atomic : bool) = + if (or_bool + (neq size (16:ii)) + (not (or_bool (eq acctype AccType_VEC) (eq acctype AccType_VECSTREAM)))) + then + let (atomic : bool) = aligned in + atomic + else + let (atomic : bool) = eq_vec address (Align__1 address (8:ii) : list bitU) in + atomic in + let (c : Constraint) = undefined_Constraint () in + (if (not atomic) + then + assert_exp (gt size (1:ii)) "(size > 1)" >> + (AArch64_aget_MemSingle address (1:ii) acctype aligned : M (list bitU)) >>= fun (w__0 : + list bitU) -> + let value_name = + (set_slice (integerMult (8:ii) size) (8:ii) value_name (0:ii) w__0 : list bitU) in + (if (not aligned) + then + let c = ConstrainUnpredictable Unpredictable_DEVPAGE2 in + assert_exp (or_bool (eq c Constraint_FAULT) (eq c Constraint_NONE)) "((c == Constraint_FAULT) || (c == Constraint_NONE))" >> + let (aligned : bool) = + if (eq c Constraint_NONE) + then + let (aligned : bool) = true in + aligned + else aligned in + return (aligned,c) + else return (aligned,c)) >>= fun ((aligned : bool), (c : Constraint)) -> + (foreachM (index_list (1:ii) (integerMinus size (1:ii)) (1:ii)) value_name + (fun i value_name -> + (AArch64_aget_MemSingle (add_vec_int address i : list bitU) (1:ii) acctype aligned : M (list bitU)) >>= fun (w__1 : + list bitU) -> + let value_name = + (set_slice (integerMult (8:ii) size) (8:ii) value_name (integerMult (8:ii) i) w__1 : list bitU) in + return value_name)) >>= fun value_name -> + return (aligned,c,value_name) + else + (if (and_bool + (eq size (16:ii)) + (or_bool (eq acctype AccType_VEC) (eq acctype AccType_VECSTREAM))) + then + (AArch64_aget_MemSingle address (8:ii) acctype aligned : M (list bitU)) >>= fun (w__2 : + list bitU) -> + let value_name = + (set_slice (integerMult (8:ii) size) (64:ii) value_name (0:ii) w__2 : list bitU) in + (AArch64_aget_MemSingle (add_vec_int address (8:ii) : list bitU) (8:ii) acctype aligned : M (list bitU)) >>= fun (w__3 : + list bitU) -> + let value_name = + (set_slice (integerMult (8:ii) size) (64:ii) value_name (64:ii) w__3 : list bitU) in + return value_name + else + (AArch64_aget_MemSingle address size acctype aligned : M (list bitU)) >>= fun w__4 -> + let value_name = w__4 in + return value_name) >>= fun value_name -> + return (aligned,c,value_name)) >>= fun ((aligned : bool), (c : Constraint), value_name) -> + BigEndian () >>= fun (w__5 : bool) -> + (if w__5 + then + (BigEndianReverse value_name : M (list bitU)) >>= fun w__6 -> + let value_name = w__6 in + return value_name + else return value_name) >>= fun value_name -> + return value_name + +val AArch32_EnterMode : list bitU -> list bitU -> ii -> ii -> M unit + +let AArch32_EnterMode target_mode preferred_exception_return lr_offset vect_offset = + let (_ : unit) = SynchronizeContext () in + ELUsingAArch32 EL1 >>= fun (w__0 : bool) -> + read_reg PSTATE_ref >>= fun (w__1 : ProcState) -> + assert_exp (and_bool w__0 (neq_vec w__1.ProcState_EL EL2)) "(ELUsingAArch32(EL1) && ((PSTATE).EL != EL2))" >> + (GetPSRFromPSTATE () : M (list bitU)) >>= fun (spsr : bits ty32) -> + read_reg PSTATE_ref >>= fun (w__2 : ProcState) -> + (if (eq_vec w__2.ProcState_M M32_Monitor) + then + (read_reg SCR_ref : M (list bitU)) >>= fun (w__3 : list bitU) -> + write_reg SCR_ref (set_slice (32:ii) (1:ii) w__3 (0:ii) ([B0]) : list bitU) + else return ()) >> + AArch32_WriteMode target_mode >> + aset_SPSR spsr >> + aset_R (14:ii) (add_vec_int preferred_exception_return lr_offset : list bitU) >> + read_reg PSTATE_ref >>= fun (w__4 : ProcState) -> + (read_reg SCTLR_ref : M (list bitU)) >>= fun (w__5 : bits ty32) -> + write_reg PSTATE_ref <|w__4 with ProcState_T = ([access_vec_dec w__5 (30:ii)])|> >> + read_reg PSTATE_ref >>= fun (w__6 : ProcState) -> + write_reg PSTATE_ref <|w__6 with ProcState_SS = ([B0])|> >> + (if (eq_vec target_mode M32_FIQ) + then + let split_vec = [B1;B1;B1] in + let (tup__0, tup__1, tup__2) = + ((subrange_vec_dec split_vec (2:ii) (2:ii) : list bitU),(subrange_vec_dec + split_vec + (1:ii) + (1:ii) : list bitU),(subrange_vec_dec + split_vec + (0:ii) + (0:ii) : list bitU)) in + read_reg PSTATE_ref >>= fun (w__7 : ProcState) -> + write_reg PSTATE_ref <|w__7 with ProcState_A = tup__0|> >> + read_reg PSTATE_ref >>= fun (w__8 : ProcState) -> + write_reg PSTATE_ref <|w__8 with ProcState_I = tup__1|> >> + read_reg PSTATE_ref >>= fun (w__9 : ProcState) -> + write_reg PSTATE_ref <|w__9 with ProcState_F = tup__2|> + else + if (or_bool (eq_vec target_mode M32_Abort) (eq_vec target_mode M32_IRQ)) + then + let split_vec = [B1;B1] in + let (tup__0, tup__1) = + ((subrange_vec_dec split_vec (1:ii) (1:ii) : list bitU),(subrange_vec_dec + split_vec + (0:ii) + (0:ii) : list bitU)) in + read_reg PSTATE_ref >>= fun (w__10 : ProcState) -> + write_reg PSTATE_ref <|w__10 with ProcState_A = tup__0|> >> + read_reg PSTATE_ref >>= fun (w__11 : ProcState) -> + write_reg PSTATE_ref <|w__11 with ProcState_I = tup__1|> + else + read_reg PSTATE_ref >>= fun (w__12 : ProcState) -> + write_reg PSTATE_ref <|w__12 with ProcState_I = ([B1])|>) >> + read_reg PSTATE_ref >>= fun (w__13 : ProcState) -> + (read_reg SCTLR_ref : M (list bitU)) >>= fun (w__14 : bits ty32) -> + write_reg PSTATE_ref <|w__13 with ProcState_E = ([access_vec_dec w__14 (25:ii)])|> >> + read_reg PSTATE_ref >>= fun (w__15 : ProcState) -> + write_reg PSTATE_ref <|w__15 with ProcState_IL = ([B0])|> >> + read_reg PSTATE_ref >>= fun (w__16 : ProcState) -> + write_reg PSTATE_ref <|w__16 with ProcState_IT = ([B0;B0;B0;B0;B0;B0;B0;B0])|> >> + (read_reg SCTLR_ref : M (list bitU)) >>= fun (w__17 : bits ty32) -> + (if (and_bool (HavePANExt ()) (eq_vec ([access_vec_dec w__17 (23:ii)]) ([B0]))) + then + read_reg PSTATE_ref >>= fun (w__18 : ProcState) -> + write_reg PSTATE_ref <|w__18 with ProcState_PAN = ([B1])|> + else return ()) >> + (ExcVectorBase () : M (list bitU)) >>= fun (w__19 : list bitU) -> + BranchTo + (concat_vec + (slice w__19 (5:ii) (27:ii) : list bitU) + (__GetSlice_int (5:ii) vect_offset (0:ii) : list bitU) : list bitU) + BranchType_UNKNOWN >> + return (EndOfInstruction ()) + +val AArch32_GeneralExceptionsToAArch64 : unit -> M bool + +let AArch32_GeneralExceptionsToAArch64 () = + read_reg PSTATE_ref >>= fun (w__0 : ProcState) -> + ELUsingAArch32 EL1 >>= fun (w__1 : bool) -> + IsSecure () >>= fun (w__2 : bool) -> + ELUsingAArch32 EL2 >>= fun (w__3 : bool) -> + (read_reg HCR_EL2_ref : M (list bitU)) >>= fun (w__4 : bits ty64) -> + return (or_bool + (and_bool (eq_vec w__0.ProcState_EL EL0) (not w__1)) + (and_bool + (and_bool (and_bool (HaveEL EL2) (not w__2)) (not w__3)) + (eq_vec ([access_vec_dec w__4 (27:ii)]) ([B1])))) + +val AArch32_EnterHypMode : ExceptionRecord -> list bitU -> ii -> M unit + +let AArch32_EnterHypMode exception preferred_exception_return vect_offset = + let (_ : unit) = SynchronizeContext () in + IsSecure () >>= fun (w__0 : bool) -> + ELUsingAArch32 EL2 >>= fun (w__1 : bool) -> + assert_exp (and_bool (and_bool (HaveEL EL2) (not w__0)) w__1) "((HaveEL(EL2) && !(IsSecure())) && ELUsingAArch32(EL2))" >> + (GetPSRFromPSTATE () : M (list bitU)) >>= fun (spsr : bits ty32) -> + (if (not + (or_bool + (eq exception.ExceptionRecord_typ Exception_IRQ) + (eq exception.ExceptionRecord_typ Exception_FIQ))) + then AArch32_ReportHypEntry exception + else return ()) >> + AArch32_WriteMode M32_Hyp >> + aset_SPSR spsr >> + write_reg ELR_hyp_ref preferred_exception_return >> + read_reg PSTATE_ref >>= fun (w__2 : ProcState) -> + (read_reg HSCTLR_ref : M (list bitU)) >>= fun (w__3 : bits ty32) -> + write_reg PSTATE_ref <|w__2 with ProcState_T = ([access_vec_dec w__3 (30:ii)])|> >> + read_reg PSTATE_ref >>= fun (w__4 : ProcState) -> + write_reg PSTATE_ref <|w__4 with ProcState_SS = ([B0])|> >> + (aget_SCR_GEN () : M (list bitU)) >>= fun (w__5 : list bitU) -> + (if (or_bool (not (HaveEL EL3)) (eq_vec ([access_vec_dec w__5 (3:ii)]) ([B0]))) + then + read_reg PSTATE_ref >>= fun (w__6 : ProcState) -> + write_reg PSTATE_ref <|w__6 with ProcState_A = ([B1])|> + else return ()) >> + (aget_SCR_GEN () : M (list bitU)) >>= fun (w__7 : list bitU) -> + (if (or_bool (not (HaveEL EL3)) (eq_vec ([access_vec_dec w__7 (1:ii)]) ([B0]))) + then + read_reg PSTATE_ref >>= fun (w__8 : ProcState) -> + write_reg PSTATE_ref <|w__8 with ProcState_I = ([B1])|> + else return ()) >> + (aget_SCR_GEN () : M (list bitU)) >>= fun (w__9 : list bitU) -> + (if (or_bool (not (HaveEL EL3)) (eq_vec ([access_vec_dec w__9 (2:ii)]) ([B0]))) + then + read_reg PSTATE_ref >>= fun (w__10 : ProcState) -> + write_reg PSTATE_ref <|w__10 with ProcState_F = ([B1])|> + else return ()) >> + read_reg PSTATE_ref >>= fun (w__11 : ProcState) -> + (read_reg HSCTLR_ref : M (list bitU)) >>= fun (w__12 : bits ty32) -> + write_reg PSTATE_ref <|w__11 with ProcState_E = ([access_vec_dec w__12 (25:ii)])|> >> + read_reg PSTATE_ref >>= fun (w__13 : ProcState) -> + write_reg PSTATE_ref <|w__13 with ProcState_IL = ([B0])|> >> + read_reg PSTATE_ref >>= fun (w__14 : ProcState) -> + write_reg PSTATE_ref <|w__14 with ProcState_IT = ([B0;B0;B0;B0;B0;B0;B0;B0])|> >> + (read_reg HVBAR_ref : M (list bitU)) >>= fun (w__15 : bits ty32) -> + BranchTo + (concat_vec + (slice w__15 (5:ii) (27:ii) : list bitU) + (__GetSlice_int (5:ii) vect_offset (0:ii) : list bitU) : list bitU) + BranchType_UNKNOWN >> + return (EndOfInstruction ()) + +val AArch32_TakeUndefInstrException__0 : unit -> M unit + +val AArch32_TakeUndefInstrException__1 : ExceptionRecord -> M unit + +let AArch32_TakeUndefInstrException__1 exception = + IsSecure () >>= fun (w__0 : bool) -> + read_reg PSTATE_ref >>= fun (w__1 : ProcState) -> + (read_reg HCR_ref : M (list bitU)) >>= fun (w__2 : bits ty32) -> + let (route_to_hyp : bool) = + and_bool + (and_bool (and_bool (HaveEL EL2) (not w__0)) (eq_vec w__1.ProcState_EL EL0)) + (eq_vec ([access_vec_dec w__2 (27:ii)]) ([B1])) in + (ThisInstrAddr (32:ii) () : M (list bitU)) >>= fun (preferred_exception_return : bits ty32) -> + let (vect_offset : ii) = (4:ii) in + CurrentInstrSet () >>= fun (w__3 : InstrSet) -> + let (lr_offset : ii) = if (eq w__3 InstrSet_A32) then (4:ii) else (2:ii) in + read_reg PSTATE_ref >>= fun (w__4 : ProcState) -> + if (eq_vec w__4.ProcState_EL EL2) + then AArch32_EnterHypMode exception preferred_exception_return vect_offset + else + if route_to_hyp + then AArch32_EnterHypMode exception preferred_exception_return (20:ii) + else AArch32_EnterMode M32_Undef preferred_exception_return lr_offset vect_offset + +let AArch32_TakeUndefInstrException__0 () = + let (exception : ExceptionRecord) = ExceptionSyndrome Exception_Uncategorized in + AArch32_TakeUndefInstrException__1 exception + +val UnallocatedEncoding : unit -> M unit + +let UnallocatedEncoding () = + UsingAArch32 () >>= fun (w__0 : bool) -> + AArch32_ExecutingCP10or11Instr () >>= fun (w__1 : bool) -> + (if (and_bool w__0 w__1) + then + (read_reg FPEXC_ref : M (list bitU)) >>= fun (w__2 : list bitU) -> + write_reg FPEXC_ref (set_slice (32:ii) (1:ii) w__2 (29:ii) ([B0]) : list bitU) + else return ()) >> + UsingAArch32 () >>= fun (w__3 : bool) -> + AArch32_GeneralExceptionsToAArch64 () >>= fun (w__4 : bool) -> + if (and_bool w__3 (not w__4)) + then AArch32_TakeUndefInstrException__0 () + else AArch64_UndefinedFault () + +val aarch64_memory_single_general_immediate_unsigned : AccType -> integer -> MemOp -> ii -> list bitU -> bool -> integer -> bool -> ii -> bool -> M unit + +let aarch64_memory_single_general_immediate_unsigned acctype datasize memop n offset postindex regsize signed t wback__arg = + assert_exp true "destsize constraint" >> + let dbytes = ex_int (integerDiv datasize (8:ii)) in + assert_exp true "datasize constraint" >> + assert_exp true "dbytes constraint" >> + let wback = wback__arg in + let (address : bits ty64) = (undefined_vector (64:ii) (undefined_bit ()) : list bitU) in + let data = (undefined_vector datasize (undefined_bit ()) : list bitU) in + let (wb_unknown : bool) = false in + let (rt_unknown : bool) = false in + let (c : Constraint) = undefined_Constraint () in + (if (and_bool + (and_bool (and_bool (eq memop MemOp_LOAD) wback) (eq (ex_int n) (ex_int t))) + (neq (ex_int n) (31:ii))) + then + let c = ConstrainUnpredictable Unpredictable_WBOVERLAPLD in + assert_exp (or_bool + (eq c Constraint_WBSUPPRESS) + (or_bool + (eq c Constraint_UNKNOWN) + (or_bool (eq c Constraint_UNDEF) (eq c Constraint_NOP)))) "((c == Constraint_WBSUPPRESS) || ((c == Constraint_UNKNOWN) || ((c == Constraint_UNDEF) || (c == Constraint_NOP))))" >> + match c with + | Constraint_WBSUPPRESS -> + let (wback : bool) = false in + return (wb_unknown,wback) + | Constraint_UNKNOWN -> + let (wb_unknown : bool) = true in + return (wb_unknown,wback) + | Constraint_UNDEF -> + UnallocatedEncoding () >> + return (wb_unknown,wback) + | Constraint_NOP -> return (wb_unknown,wback) + end >>= fun ((wb_unknown : bool), (wback : bool)) -> + return (c,wb_unknown,wback) + else return (c,wb_unknown,wback)) >>= fun ((c : Constraint), (wb_unknown : bool), (wback : bool)) -> + (if (and_bool + (and_bool (and_bool (eq memop MemOp_STORE) wback) (eq (ex_int n) (ex_int t))) + (neq (ex_int n) (31:ii))) + then + let c = ConstrainUnpredictable Unpredictable_WBOVERLAPST in + assert_exp (or_bool + (eq c Constraint_NONE) + (or_bool + (eq c Constraint_UNKNOWN) + (or_bool (eq c Constraint_UNDEF) (eq c Constraint_NOP)))) "((c == Constraint_NONE) || ((c == Constraint_UNKNOWN) || ((c == Constraint_UNDEF) || (c == Constraint_NOP))))" >> + match c with + | Constraint_NONE -> + let (rt_unknown : bool) = false in + return rt_unknown + | Constraint_UNKNOWN -> + let (rt_unknown : bool) = true in + return rt_unknown + | Constraint_UNDEF -> + UnallocatedEncoding () >> + return rt_unknown + | Constraint_NOP -> return rt_unknown + end >>= fun (rt_unknown : bool) -> + return (c,rt_unknown) + else return (c,rt_unknown)) >>= fun ((c : Constraint), (rt_unknown : bool)) -> + (if (eq (ex_int n) (31:ii)) + then + (if (neq memop MemOp_PREFETCH) + then CheckSPAlignment () + else return ()) >> + (aget_SP (64:ii) () : M (list bitU)) >>= fun (w__0 : bits ty64) -> + let (address : bits ty64) = w__0 in + return address + else + (aget_X (64:ii) n : M (list bitU)) >>= fun (w__1 : bits ty64) -> + let (address : bits ty64) = w__1 in + return address) >>= fun (address : bits ty64) -> + let (address : bits ty64) = + if (not postindex) + then + let (address : bits ty64) = (add_vec address offset : list bitU) in + address + else address in + match memop with + | MemOp_STORE -> + (if rt_unknown + then + let data = (undefined_vector (length data) (undefined_bit ()) : list bitU) in + return data + else + (aget_X (length data) t : M (list bitU)) >>= fun w__2 -> + let data = w__2 in + return data) >>= fun data -> + aset_Mem address dbytes acctype data >> + return data + | MemOp_LOAD -> + (aget_Mem address dbytes acctype : M (list bitU)) >>= fun w__3 -> + let data = w__3 in + (if signed + then + (SignExtend__0 data regsize : M (list bitU)) >>= fun (w__4 : list bitU) -> + aset_X t w__4 + else + (ZeroExtend__0 data regsize : M (list bitU)) >>= fun (w__5 : list bitU) -> + aset_X t w__5) >> + return data + | MemOp_PREFETCH -> return data + end >>= fun data -> + (if wback + then + let (address : bits ty64) = + if wb_unknown + then + let (address : bits ty64) = (undefined_vector (64:ii) (undefined_bit ()) : list bitU) in + address + else + let (address : bits ty64) = + if postindex + then + let (address : bits ty64) = (add_vec address offset : list bitU) in + address + else address in + address in + (if (eq (ex_int n) (31:ii)) + then aset_SP address + else aset_X n address) >> + return address + else return address) >>= fun (address : bits ty64) -> + return () + +val aarch64_memory_single_general_immediate_signed_postidx : AccType -> integer -> MemOp -> ii -> list bitU -> bool -> integer -> bool -> ii -> bool -> M unit + +let aarch64_memory_single_general_immediate_signed_postidx acctype datasize memop n offset postindex regsize signed t wback__arg = + assert_exp true "destsize constraint" >> + let dbytes = ex_int (integerDiv datasize (8:ii)) in + assert_exp true "datasize constraint" >> + assert_exp true "dbytes constraint" >> + let wback = wback__arg in + let (address : bits ty64) = (undefined_vector (64:ii) (undefined_bit ()) : list bitU) in + let data = (undefined_vector datasize (undefined_bit ()) : list bitU) in + let (wb_unknown : bool) = false in + let (rt_unknown : bool) = false in + let (c : Constraint) = undefined_Constraint () in + (if (and_bool + (and_bool (and_bool (eq memop MemOp_LOAD) wback) (eq (ex_int n) (ex_int t))) + (neq (ex_int n) (31:ii))) + then + let c = ConstrainUnpredictable Unpredictable_WBOVERLAPLD in + assert_exp (or_bool + (eq c Constraint_WBSUPPRESS) + (or_bool + (eq c Constraint_UNKNOWN) + (or_bool (eq c Constraint_UNDEF) (eq c Constraint_NOP)))) "((c == Constraint_WBSUPPRESS) || ((c == Constraint_UNKNOWN) || ((c == Constraint_UNDEF) || (c == Constraint_NOP))))" >> + match c with + | Constraint_WBSUPPRESS -> + let (wback : bool) = false in + return (wb_unknown,wback) + | Constraint_UNKNOWN -> + let (wb_unknown : bool) = true in + return (wb_unknown,wback) + | Constraint_UNDEF -> + UnallocatedEncoding () >> + return (wb_unknown,wback) + | Constraint_NOP -> return (wb_unknown,wback) + end >>= fun ((wb_unknown : bool), (wback : bool)) -> + return (c,wb_unknown,wback) + else return (c,wb_unknown,wback)) >>= fun ((c : Constraint), (wb_unknown : bool), (wback : bool)) -> + (if (and_bool + (and_bool (and_bool (eq memop MemOp_STORE) wback) (eq (ex_int n) (ex_int t))) + (neq (ex_int n) (31:ii))) + then + let c = ConstrainUnpredictable Unpredictable_WBOVERLAPST in + assert_exp (or_bool + (eq c Constraint_NONE) + (or_bool + (eq c Constraint_UNKNOWN) + (or_bool (eq c Constraint_UNDEF) (eq c Constraint_NOP)))) "((c == Constraint_NONE) || ((c == Constraint_UNKNOWN) || ((c == Constraint_UNDEF) || (c == Constraint_NOP))))" >> + match c with + | Constraint_NONE -> + let (rt_unknown : bool) = false in + return rt_unknown + | Constraint_UNKNOWN -> + let (rt_unknown : bool) = true in + return rt_unknown + | Constraint_UNDEF -> + UnallocatedEncoding () >> + return rt_unknown + | Constraint_NOP -> return rt_unknown + end >>= fun (rt_unknown : bool) -> + return (c,rt_unknown) + else return (c,rt_unknown)) >>= fun ((c : Constraint), (rt_unknown : bool)) -> + (if (eq (ex_int n) (31:ii)) + then + (if (neq memop MemOp_PREFETCH) + then CheckSPAlignment () + else return ()) >> + (aget_SP (64:ii) () : M (list bitU)) >>= fun (w__0 : bits ty64) -> + let (address : bits ty64) = w__0 in + return address + else + (aget_X (64:ii) n : M (list bitU)) >>= fun (w__1 : bits ty64) -> + let (address : bits ty64) = w__1 in + return address) >>= fun (address : bits ty64) -> + let (address : bits ty64) = + if (not postindex) + then + let (address : bits ty64) = (add_vec address offset : list bitU) in + address + else address in + match memop with + | MemOp_STORE -> + (if rt_unknown + then + let data = (undefined_vector (length data) (undefined_bit ()) : list bitU) in + return data + else + (aget_X (length data) t : M (list bitU)) >>= fun w__2 -> + let data = w__2 in + return data) >>= fun data -> + aset_Mem address dbytes acctype data >> + return data + | MemOp_LOAD -> + (aget_Mem address dbytes acctype : M (list bitU)) >>= fun w__3 -> + let data = w__3 in + (if signed + then + (SignExtend__0 data regsize : M (list bitU)) >>= fun (w__4 : list bitU) -> + aset_X t w__4 + else + (ZeroExtend__0 data regsize : M (list bitU)) >>= fun (w__5 : list bitU) -> + aset_X t w__5) >> + return data + | MemOp_PREFETCH -> return data + end >>= fun data -> + (if wback + then + let (address : bits ty64) = + if wb_unknown + then + let (address : bits ty64) = (undefined_vector (64:ii) (undefined_bit ()) : list bitU) in + address + else + let (address : bits ty64) = + if postindex + then + let (address : bits ty64) = (add_vec address offset : list bitU) in + address + else address in + address in + (if (eq (ex_int n) (31:ii)) + then aset_SP address + else aset_X n address) >> + return address + else return address) >>= fun (address : bits ty64) -> + return () + +val memory_single_general_immediate_unsigned_aarch64_memory_single_general_immediate_unsigned__decode : list bitU -> list bitU -> list bitU -> list bitU -> list bitU -> list bitU -> M unit + +let memory_single_general_immediate_unsigned_aarch64_memory_single_general_immediate_unsigned__decode size V opc imm12 Rn Rt = + write_reg __unconditional_ref true >> + let (wback : bool) = false in + let (postindex : bool) = false in + let (scale : ii) = uint size in + (ZeroExtend__0 imm12 (64:ii) : M (list bitU)) >>= fun (w__0 : list bitU) -> + (LSL w__0 scale : M (list bitU)) >>= fun (offset : bits ty64) -> + let (n : ii) = uint Rn in + let (t : ii) = uint Rt in + let (acctype : AccType) = AccType_NORMAL in + let (memop : MemOp) = undefined_MemOp () in + let (signed : bool) = undefined_bool () in + let (regsize : ii) = undefined_int () in + (if (eq_vec ([access_vec_dec opc (1:ii)]) ([B0])) + then + let (memop : MemOp) = + if (eq_vec ([access_vec_dec opc (0:ii)]) ([B1])) + then MemOp_LOAD + else MemOp_STORE in + let (regsize : ii) = if (eq_vec size ([B1;B1])) then (64:ii) else (32:ii) in + let (signed : bool) = false in + return (memop,regsize,signed) + else + (if (eq_vec size ([B1;B1])) + then + let memop = MemOp_PREFETCH in + (if (eq_vec ([access_vec_dec opc (0:ii)]) ([B1])) + then UnallocatedEncoding () + else return ()) >> + return (memop,regsize,signed) + else + let memop = MemOp_LOAD in + (if (and_bool (eq_vec size ([B1;B0])) (eq_vec ([access_vec_dec opc (0:ii)]) ([B1]))) + then UnallocatedEncoding () + else return ()) >> + let (regsize : ii) = + if (eq_vec ([access_vec_dec opc (0:ii)]) ([B1])) + then (32:ii) + else (64:ii) in + let (signed : bool) = true in + return (memop,regsize,signed)) >>= fun ((memop : MemOp), (regsize : ii), (signed : bool)) -> + return (memop,regsize,signed)) >>= fun ((memop : MemOp), (regsize : ii), (signed : bool)) -> + let (datasize : ii) = shl_int (8:ii) scale in + aarch64_memory_single_general_immediate_unsigned + acctype + (ex_int datasize) + memop + n + offset + postindex + (ex_int regsize) + signed + t + wback + +val memory_single_general_immediate_unsigned_aarch64_memory_single_general_immediate_signed_postidx__decode : list bitU -> list bitU -> list bitU -> list bitU -> list bitU -> list bitU -> M unit + +let memory_single_general_immediate_unsigned_aarch64_memory_single_general_immediate_signed_postidx__decode size V opc imm12 Rn Rt = + write_reg __unconditional_ref true >> + let (wback : bool) = false in + let (postindex : bool) = false in + let (scale : ii) = uint size in + (ZeroExtend__0 imm12 (64:ii) : M (list bitU)) >>= fun (w__0 : list bitU) -> + (LSL w__0 scale : M (list bitU)) >>= fun (offset : bits ty64) -> + let (n : ii) = uint Rn in + let (t : ii) = uint Rt in + let (acctype : AccType) = AccType_NORMAL in + let (memop : MemOp) = undefined_MemOp () in + let (signed : bool) = undefined_bool () in + let (regsize : ii) = undefined_int () in + (if (eq_vec ([access_vec_dec opc (1:ii)]) ([B0])) + then + let (memop : MemOp) = + if (eq_vec ([access_vec_dec opc (0:ii)]) ([B1])) + then MemOp_LOAD + else MemOp_STORE in + let (regsize : ii) = if (eq_vec size ([B1;B1])) then (64:ii) else (32:ii) in + let (signed : bool) = false in + return (memop,regsize,signed) + else + (if (eq_vec size ([B1;B1])) + then + UnallocatedEncoding () >> + return (memop,regsize,signed) + else + let memop = MemOp_LOAD in + (if (and_bool (eq_vec size ([B1;B0])) (eq_vec ([access_vec_dec opc (0:ii)]) ([B1]))) + then UnallocatedEncoding () + else return ()) >> + let (regsize : ii) = + if (eq_vec ([access_vec_dec opc (0:ii)]) ([B1])) + then (32:ii) + else (64:ii) in + let (signed : bool) = true in + return (memop,regsize,signed)) >>= fun ((memop : MemOp), (regsize : ii), (signed : bool)) -> + return (memop,regsize,signed)) >>= fun ((memop : MemOp), (regsize : ii), (signed : bool)) -> + let (datasize : ii) = shl_int (8:ii) scale in + aarch64_memory_single_general_immediate_signed_postidx + acctype + (ex_int datasize) + memop + n + offset + postindex + (ex_int regsize) + signed + t + wback + +val memory_single_general_immediate_signed_preidx_aarch64_memory_single_general_immediate_signed_postidx__decode : list bitU -> list bitU -> list bitU -> list bitU -> list bitU -> list bitU -> M unit + +let memory_single_general_immediate_signed_preidx_aarch64_memory_single_general_immediate_signed_postidx__decode size V opc imm9 Rn Rt = + write_reg __unconditional_ref true >> + let (wback : bool) = true in + let (postindex : bool) = false in + let (scale : ii) = uint size in + (SignExtend__0 imm9 (64:ii) : M (list bitU)) >>= fun (offset : bits ty64) -> + let (n : ii) = uint Rn in + let (t : ii) = uint Rt in + let (acctype : AccType) = AccType_NORMAL in + let (memop : MemOp) = undefined_MemOp () in + let (signed : bool) = undefined_bool () in + let (regsize : ii) = undefined_int () in + (if (eq_vec ([access_vec_dec opc (1:ii)]) ([B0])) + then + let (memop : MemOp) = + if (eq_vec ([access_vec_dec opc (0:ii)]) ([B1])) + then MemOp_LOAD + else MemOp_STORE in + let (regsize : ii) = if (eq_vec size ([B1;B1])) then (64:ii) else (32:ii) in + let (signed : bool) = false in + return (memop,regsize,signed) + else + (if (eq_vec size ([B1;B1])) + then + UnallocatedEncoding () >> + return (memop,regsize,signed) + else + let memop = MemOp_LOAD in + (if (and_bool (eq_vec size ([B1;B0])) (eq_vec ([access_vec_dec opc (0:ii)]) ([B1]))) + then UnallocatedEncoding () + else return ()) >> + let (regsize : ii) = + if (eq_vec ([access_vec_dec opc (0:ii)]) ([B1])) + then (32:ii) + else (64:ii) in + let (signed : bool) = true in + return (memop,regsize,signed)) >>= fun ((memop : MemOp), (regsize : ii), (signed : bool)) -> + return (memop,regsize,signed)) >>= fun ((memop : MemOp), (regsize : ii), (signed : bool)) -> + let (datasize : ii) = shl_int (8:ii) scale in + aarch64_memory_single_general_immediate_signed_postidx + acctype + (ex_int datasize) + memop + n + offset + postindex + (ex_int regsize) + signed + t + wback + +val memory_single_general_immediate_signed_postidx_aarch64_memory_single_general_immediate_signed_postidx__decode : list bitU -> list bitU -> list bitU -> list bitU -> list bitU -> list bitU -> M unit + +let memory_single_general_immediate_signed_postidx_aarch64_memory_single_general_immediate_signed_postidx__decode size V opc imm9 Rn Rt = + write_reg __unconditional_ref true >> + let (wback : bool) = true in + let (postindex : bool) = true in + let (scale : ii) = uint size in + (SignExtend__0 imm9 (64:ii) : M (list bitU)) >>= fun (offset : bits ty64) -> + let (n : ii) = uint Rn in + let (t : ii) = uint Rt in + let (acctype : AccType) = AccType_NORMAL in + let (memop : MemOp) = undefined_MemOp () in + let (signed : bool) = undefined_bool () in + let (regsize : ii) = undefined_int () in + (if (eq_vec ([access_vec_dec opc (1:ii)]) ([B0])) + then + let (memop : MemOp) = + if (eq_vec ([access_vec_dec opc (0:ii)]) ([B1])) + then MemOp_LOAD + else MemOp_STORE in + let (regsize : ii) = if (eq_vec size ([B1;B1])) then (64:ii) else (32:ii) in + let (signed : bool) = false in + return (memop,regsize,signed) + else + (if (eq_vec size ([B1;B1])) + then + UnallocatedEncoding () >> + return (memop,regsize,signed) + else + let memop = MemOp_LOAD in + (if (and_bool (eq_vec size ([B1;B0])) (eq_vec ([access_vec_dec opc (0:ii)]) ([B1]))) + then UnallocatedEncoding () + else return ()) >> + let (regsize : ii) = + if (eq_vec ([access_vec_dec opc (0:ii)]) ([B1])) + then (32:ii) + else (64:ii) in + let (signed : bool) = true in + return (memop,regsize,signed)) >>= fun ((memop : MemOp), (regsize : ii), (signed : bool)) -> + return (memop,regsize,signed)) >>= fun ((memop : MemOp), (regsize : ii), (signed : bool)) -> + let (datasize : ii) = shl_int (8:ii) scale in + aarch64_memory_single_general_immediate_signed_postidx + acctype + (ex_int datasize) + memop + n + offset + postindex + (ex_int regsize) + signed + t + wback + +val ReservedValue : unit -> M unit + +let ReservedValue () = + UsingAArch32 () >>= fun (w__0 : bool) -> + AArch32_GeneralExceptionsToAArch64 () >>= fun (w__1 : bool) -> + if (and_bool w__0 (not w__1)) + then AArch32_TakeUndefInstrException__0 () + else AArch64_UndefinedFault () + +val integer_arithmetic_addsub_immediate_decode : list bitU -> list bitU -> list bitU -> list bitU -> list bitU -> list bitU -> list bitU -> M unit + +let integer_arithmetic_addsub_immediate_decode sf op S shift imm12 Rn Rd = + write_reg __unconditional_ref true >> + let (d : ii) = uint Rd in + let (n : ii) = uint Rn in + let (datasize : integer) = if (eq_vec sf ([B1])) then (64:ii) else (32:ii) in + let (sub_op : bool) = eq_vec op ([B1]) in + let (setflags : bool) = eq_vec S ([B1]) in + let imm = (undefined_vector datasize (undefined_bit ()) : list bitU) in + let b__0 = shift in + (if (eq_vec b__0 ([B0;B0])) + then + (ZeroExtend__0 imm12 datasize : M (list bitU)) >>= fun w__0 -> + let imm = w__0 in + return imm + else + (if (eq_vec b__0 ([B0;B1])) + then + (ZeroExtend__0 (concat_vec imm12 (Zeros__0 (12:ii) : list bitU) : list bitU) datasize : M (list bitU)) >>= fun w__1 -> + let imm = w__1 in + return imm + else + ReservedValue () >> + return imm) >>= fun imm -> + return imm) >>= fun imm -> + aarch64_integer_arithmetic_addsub_immediate d datasize imm n setflags sub_op + +val decode : list bitU -> M unit + +let decode op_code = + if (eq_vec (subrange_vec_dec op_code (29:ii) (24:ii) : list bitU) ([B1;B1;B1;B0;B0;B1])) + then + let (size : bits ty2) = (subrange_vec_dec op_code (31:ii) (30:ii) : list bitU) in + let (V : bits ty1) = [access_vec_dec op_code (26:ii)] in + let (opc : bits ty2) = (subrange_vec_dec op_code (23:ii) (22:ii) : list bitU) in + let (imm12 : bits ty12) = (subrange_vec_dec op_code (21:ii) (10:ii) : list bitU) in + let (Rn : bits ty5) = (subrange_vec_dec op_code (9:ii) (5:ii) : list bitU) in + let (Rt : bits ty5) = (subrange_vec_dec op_code (4:ii) (0:ii) : list bitU) in + memory_single_general_immediate_unsigned_aarch64_memory_single_general_immediate_unsigned__decode + size + V + opc + imm12 + Rn + Rt + else + if (and_bool + (eq_vec (subrange_vec_dec op_code (29:ii) (24:ii) : list bitU) ([B1;B1;B1;B0;B0;B0])) + (and_bool + (eq_vec (subrange_vec_dec op_code (21:ii) (21:ii) : list bitU) ([B0])) + (eq_vec (subrange_vec_dec op_code (11:ii) (10:ii) : list bitU) ([B0;B1])))) + then + let (size : bits ty2) = (subrange_vec_dec op_code (31:ii) (30:ii) : list bitU) in + let (V : bits ty1) = [access_vec_dec op_code (26:ii)] in + let (opc : bits ty2) = (subrange_vec_dec op_code (23:ii) (22:ii) : list bitU) in + let (imm9 : bits ty9) = (subrange_vec_dec op_code (20:ii) (12:ii) : list bitU) in + let (Rn : bits ty5) = (subrange_vec_dec op_code (9:ii) (5:ii) : list bitU) in + let (Rt : bits ty5) = (subrange_vec_dec op_code (4:ii) (0:ii) : list bitU) in + memory_single_general_immediate_signed_postidx_aarch64_memory_single_general_immediate_signed_postidx__decode + size + V + opc + imm9 + Rn + Rt + else + if (and_bool + (eq_vec (subrange_vec_dec op_code (29:ii) (24:ii) : list bitU) ([B1;B1;B1;B0;B0;B0])) + (and_bool + (eq_vec (subrange_vec_dec op_code (21:ii) (21:ii) : list bitU) ([B0])) + (eq_vec (subrange_vec_dec op_code (11:ii) (10:ii) : list bitU) ([B1;B1])))) + then + let (size : bits ty2) = (subrange_vec_dec op_code (31:ii) (30:ii) : list bitU) in + let (V : bits ty1) = [access_vec_dec op_code (26:ii)] in + let (opc : bits ty2) = (subrange_vec_dec op_code (23:ii) (22:ii) : list bitU) in + let (imm9 : bits ty9) = (subrange_vec_dec op_code (20:ii) (12:ii) : list bitU) in + let (Rn : bits ty5) = (subrange_vec_dec op_code (9:ii) (5:ii) : list bitU) in + let (Rt : bits ty5) = (subrange_vec_dec op_code (4:ii) (0:ii) : list bitU) in + memory_single_general_immediate_signed_preidx_aarch64_memory_single_general_immediate_signed_postidx__decode + size + V + opc + imm9 + Rn + Rt + else + if (eq_vec (subrange_vec_dec op_code (29:ii) (24:ii) : list bitU) ([B1;B1;B1;B0;B0;B1])) + then + let (size : bits ty2) = (subrange_vec_dec op_code (31:ii) (30:ii) : list bitU) in + let (V : bits ty1) = [access_vec_dec op_code (26:ii)] in + let (opc : bits ty2) = (subrange_vec_dec op_code (23:ii) (22:ii) : list bitU) in + let (imm12 : bits ty12) = (subrange_vec_dec op_code (21:ii) (10:ii) : list bitU) in + let (Rn : bits ty5) = (subrange_vec_dec op_code (9:ii) (5:ii) : list bitU) in + let (Rt : bits ty5) = (subrange_vec_dec op_code (4:ii) (0:ii) : list bitU) in + memory_single_general_immediate_unsigned_aarch64_memory_single_general_immediate_signed_postidx__decode + size + V + opc + imm12 + Rn + Rt + else + let (sf : bits ty1) = [access_vec_dec op_code (31:ii)] in + let (op : bits ty1) = [access_vec_dec op_code (30:ii)] in + let (S : bits ty1) = [access_vec_dec op_code (29:ii)] in + let (shift : bits ty2) = (subrange_vec_dec op_code (23:ii) (22:ii) : list bitU) in + let (imm12 : bits ty12) = (subrange_vec_dec op_code (21:ii) (10:ii) : list bitU) in + let (Rn : bits ty5) = (subrange_vec_dec op_code (9:ii) (5:ii) : list bitU) in + let (Rd : bits ty5) = (subrange_vec_dec op_code (4:ii) (0:ii) : list bitU) in + integer_arithmetic_addsub_immediate_decode sf op S shift imm12 Rn Rd + +val initialize_registers : unit -> M unit + +let initialize_registers () = + write_reg __unconditional_ref (undefined_bool ()) >> + write_reg __ThisInstrEnc_ref (undefined___InstrEnc ()) >> + write_reg __ThisInstr_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg __Memory_ref (undefined_vector (52:ii) (undefined_bit ()) : list bitU) >> + write_reg __BranchTaken_ref (undefined_bool ()) >> + write_reg + _R_ref + (undefined_vector (31:ii) (undefined_vector (64:ii) (undefined_bit ()) : list bitU) : list (list bitU)) >> + write_reg _PC_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg VBAR_EL3_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg VBAR_EL2_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg VBAR_EL1_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg VBAR_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg TCR_EL3_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg TCR_EL2_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg TCR_EL1_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg SP_mon_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg SP_EL3_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg SP_EL2_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg SP_EL1_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg SP_EL0_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg SPSR_und_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg SPSR_svc_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg SPSR_mon_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg SPSR_irq_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg SPSR_hyp_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg SPSR_fiq_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg SPSR_abt_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg SPSR_EL3_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg SPSR_EL2_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg SPSR_EL1_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg SCTLR_EL3_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg SCTLR_EL2_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg SCTLR_EL1_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg SCTLR_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg SCR_EL3_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg SCR_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg PSTATE_ref (undefined_ProcState ()) >> + write_reg MDCR_EL2_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg LR_mon_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg HVBAR_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg HSR_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg HSCTLR_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg HPFAR_EL2_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg HPFAR_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg HIFAR_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg HDFAR_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg HCR_EL2_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg HCR_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg FPEXC_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg FAR_EL3_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg FAR_EL2_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg FAR_EL1_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg ESR_EL3_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg ESR_EL2_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg ESR_EL1_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg ELR_hyp_ref (undefined_vector (32:ii) (undefined_bit ()) : list bitU) >> + write_reg ELR_EL3_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg ELR_EL2_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) >> + write_reg ELR_EL1_ref (undefined_vector (64:ii) (undefined_bit ()) : list bitU) + + |
