summaryrefslogtreecommitdiff
path: root/aarch64_small/armV8_embed_sequential.lem
diff options
context:
space:
mode:
authorChristopher Pulte2019-02-12 10:25:15 +0000
committerChristopher Pulte2019-02-12 10:25:15 +0000
commitb847a472a1f853d783d1af5f8eb033b97f33be5b (patch)
treefc095d2a48ea79ca0ca30a757f578f1973074b4f /aarch64_small/armV8_embed_sequential.lem
parentc43a026cdbcca769096e46d4515db2fd566cbb33 (diff)
checking in in-progress translation of Shaked's handwritten sail1 ARM model to sail2
Diffstat (limited to 'aarch64_small/armV8_embed_sequential.lem')
-rw-r--r--aarch64_small/armV8_embed_sequential.lem5561
1 files changed, 5561 insertions, 0 deletions
diff --git a/aarch64_small/armV8_embed_sequential.lem b/aarch64_small/armV8_embed_sequential.lem
new file mode 100644
index 00000000..21841fe1
--- /dev/null
+++ b/aarch64_small/armV8_embed_sequential.lem
@@ -0,0 +1,5561 @@
+(*Generated by Sail from armV8.sail.*)
+open import Pervasives_extra
+open import Sail_impl_base
+open import State
+open import Sail_values
+open import ArmV8_embed_types
+open import ArmV8_extras_embed_sequential
+let lsl' (n, m) = n * (pow (2:ii) m)
+
+let not_implemented message = exit message
+
+let not_implemented_extern message = exit message
+
+let info message = ()
+
+let UNKNOWN = (0:ii)
+
+let M32_User = Vector [B1;B0;B0;B0;B0] 4 false
+
+let M32_FIQ = Vector [B1;B0;B0;B0;B1] 4 false
+
+let M32_IRQ = Vector [B1;B0;B0;B1;B0] 4 false
+
+let M32_Svc = Vector [B1;B0;B0;B1;B1] 4 false
+
+let M32_Monitor = Vector [B1;B0;B1;B1;B0] 4 false
+
+let M32_Abort = Vector [B1;B0;B1;B1;B1] 4 false
+
+let M32_Hyp = Vector [B1;B1;B0;B1;B0] 4 false
+
+let M32_Undef = Vector [B1;B1;B0;B1;B1] 4 false
+
+let M32_System = Vector [B1;B1;B1;B1;B1] 4 false
+
+let EL3 = Vector [B1;B1] 1 false
+
+let EL2 = Vector [B1;B0] 1 false
+
+let EL1 = Vector [B0;B1] 1 false
+
+let EL0 = Vector [B0;B0] 1 false
+
+let DebugHalt_Breakpoint = Vector [B0;B0;B0;B1;B1;B1] 5 false
+
+let DebugHalt_EDBGRQ = Vector [B0;B1;B0;B0;B1;B1] 5 false
+
+let DebugHalt_Step_Normal = Vector [B0;B1;B1;B0;B1;B1] 5 false
+
+let DebugHalt_Step_Exclusive = Vector [B0;B1;B1;B1;B1;B1] 5 false
+
+let DebugHalt_OSUnlockCatch = Vector [B1;B0;B0;B0;B1;B1] 5 false
+
+let DebugHalt_ResetCatch = Vector [B1;B0;B0;B1;B1;B1] 5 false
+
+let DebugHalt_Watchpoint = Vector [B1;B0;B1;B0;B1;B1] 5 false
+
+let DebugHalt_HaltInstruction = Vector [B1;B0;B1;B1;B1;B1] 5 false
+
+let DebugHalt_SoftwareAccess = Vector [B1;B1;B0;B0;B1;B1] 5 false
+
+let DebugHalt_ExceptionCatch = Vector [B1;B1;B0;B1;B1;B1] 5 false
+
+let DebugHalt_Step_NoSyndrome = Vector [B1;B1;B1;B0;B1;B1] 5 false
+
+let HighestSetBit x =
+ let N = length (reset_vector_start (set_vector_start_to_length x)) in
+ let result = (0:ii) in
+ let break = B0 in
+ let (break, result) =
+ (foreach_dec (N - (1:ii),(0:ii),(1:ii)) (break,result)
+ (fun i (break,result) ->
+ let (result, break) =
+ if bitU_to_bool
+ ((~break) &.
+ (eq
+ (match (access (set_vector_start_to_length x) i) with
+ | B0 -> (0:ii)
+ | B1 -> (1:ii)
+ end,
+ (1:ii))))
+ then
+ let result = i in
+ let break = B1 in
+ (result,break)
+ else (result,break) in
+ (break,result))) in
+ if bitU_to_bool break
+ then Just result
+ else Nothing
+
+let IsZero x = eq_vec_range (set_vector_start_to_length x, (0:ii))
+
+let NOT x =
+ set_vector_start_to_length (bitwise_not (reset_vector_start (set_vector_start_to_length x)))
+
+let NOT' x = ~x
+
+let SInt x = signed (reset_vector_start (set_vector_start_to_length x))
+
+let UInt x = unsigned (reset_vector_start (set_vector_start_to_length x))
+
+let Zeros N' = set_vector_start_to_length (to_vec_dec (N',(0:ii)))
+
+let BitReverse data =
+ let N = length (reset_vector_start (set_vector_start_to_length data)) in
+ let result = set_vector_start_to_length (to_vec_dec (length data,(0:ii))) in
+ (foreach_inc ((0:ii),N - (1:ii),(1:ii)) result
+ (fun i result ->
+ update_pos result ((N - i) - (1:ii)) (access (set_vector_start_to_length data) i)))
+
+let ELUsingAArch32 el = B0
+
+let Hint_Yield () = ()
+
+let SendEvent () = ()
+
+let Unreachable () = assert' B0 (Just "Unreachable reached")
+
+let UsingAArch32 () = B0
+
+let _Rs =
+ Vector ["R30";"R29";"R28";"R27";"R26";"R25";"R24";"R23";"R22";"R21";"R20";"R19";"R18";"R17";"R16";"R15";"R14";"R13";"R12";"R11";"R10";
+ "R9";"R8";"R7";"R6";"R5";"R4";"R3";"R2";"R1";"R0"] 30 false
+
+let IMPLEMENTATION_DEFINED =
+ <| IMPLEMENTATION_DEFINED_type_HaveCRCExt = B1;
+ IMPLEMENTATION_DEFINED_type_HaveAArch32EL = B0;
+ IMPLEMENTATION_DEFINED_type_HaveAnyAArch32 = B0;
+ IMPLEMENTATION_DEFINED_type_HaveEL2 = B0;
+ IMPLEMENTATION_DEFINED_type_HaveEL3 = B0;
+ IMPLEMENTATION_DEFINED_type_HighestELUsingAArch32 = B0;
+ IMPLEMENTATION_DEFINED_type_IsSecureBelowEL3 = B0 |>
+
+let AArch64_ResetControlRegisters cold_reset = ()
+
+let DCPSInstruction target_el = not_implemented "DCPSInstruction"
+
+let DRPSInstruction () = not_implemented "DRPSInstruction"
+
+let Halt reason = not_implemented "Halt"
+
+let Align' (x, y) = y * (quot x y)
+
+let CountLeadingZeroBits x =
+ match (HighestSetBit (reset_vector_start (set_vector_start_to_length x))) with
+ | Nothing -> length (reset_vector_start (set_vector_start_to_length x))
+ | Just (n) -> ((length (reset_vector_start (set_vector_start_to_length x))) - (1:ii)) - n
+ end
+
+let IsZeroBit x =
+ if bitU_to_bool (IsZero (reset_vector_start (set_vector_start_to_length x)))
+ then B1
+ else B0
+
+let LSL_C (x, shift) =
+ let extended_x =
+ set_vector_start_to_length
+ ((set_vector_start_to_length x) ^^
+ (set_vector_start_to_length (set_vector_start_to_length (Zeros shift)))) in
+ let result =
+ set_vector_start_to_length
+ (set_vector_start_to_length
+ (mask (length x,reset_vector_start (set_vector_start_to_length extended_x)))) in
+ let carry_out =
+ access (set_vector_start_to_length extended_x) (length
+ (reset_vector_start (set_vector_start_to_length
+ x))) in
+ (set_vector_start_to_length result,carry_out)
+
+let Min (a, b) = if bitU_to_bool (lteq (a, b)) then a else b
+
+let uMin (a, b) = if bitU_to_bool (lteq (a, b)) then a else b
+
+let Replicate (N', x) =
+ let (N, M) =
+ (length (reset_vector_start (set_vector_start_to_length (to_vec_dec (N',(0:ii))))),length
+ (reset_vector_start (set_vector_start_to_length
+ (to_vec_dec
+ (length
+ x,
+ (0:ii)))))) in
+ let _ = assert' (eq_range (modulo N M, (0:ii))) (Nothing) in
+ let result = set_vector_start_to_length (to_vec_dec (N',(0:ii))) in
+ let zeros = set_vector_start_to_length (Zeros ((length result) - (length x))) in
+ (foreach_inc (M,N,M) result
+ (fun i result ->
+ set_vector_start_to_length
+ (bitwise_or
+ (set_vector_start_to_length (bitwise_leftshift (set_vector_start_to_length result, M)),
+ set_vector_start_to_length
+ ((set_vector_start_to_length zeros) ^^ (set_vector_start_to_length x))))))
+
+let ZeroExtend (N', x) =
+ set_vector_start_to_length
+ ((set_vector_start_to_length
+ (set_vector_start_to_length (Zeros (N' + ((length x) * ((0-1):ii)))))) ^^
+ (set_vector_start_to_length x))
+
+let Poly32Mod2 (data, poly) =
+ let result = set_vector_start_to_length data in
+ let N = length (reset_vector_start (set_vector_start_to_length data)) in
+ let _ = assert' (gt (N, (32:ii))) (Nothing) in
+ let data' = set_vector_start_to_length data in
+ let zeros = set_vector_start_to_length (Zeros ((length data) - (32:ii))) in
+ let data' =
+ (foreach_dec (N - (1:ii),(32:ii),(1:ii)) data'
+ (fun i data' ->
+ if bitU_to_bool
+ (eq
+ (match (access (set_vector_start_to_length data') i) with
+ | B0 -> (0:ii)
+ | B1 -> (1:ii)
+ end,
+ (1:ii)))
+ then
+ update
+ data' (i - (1:ii)) (0:ii)
+ (set_vector_start_to_length
+ (bitwise_xor
+ (set_vector_start_to_length
+ (slice (set_vector_start_to_length data') (i - (1:ii)) (0:ii)),
+ set_vector_start_to_length
+ ((set_vector_start_to_length poly) ^^
+ (set_vector_start_to_length
+ (slice (set_vector_start_to_length zeros) (i - (33:ii)) (0:ii)))))))
+ else data')) in
+ slice (set_vector_start_to_length data') (31:ii) (0:ii)
+
+let ClearExclusiveLocal processorid =
+ info "The model does not implement the exclusive monitors explicitly."
+
+let ProcessorID () = (0:ii)
+
+let rec BigEndianReverse value =
+ let width = length (reset_vector_start (set_vector_start_to_length value)) in
+ let half = quot width (2:ii) in
+ if bitU_to_bool (eq_range (width, (8:ii)))
+ then set_vector_start_to_length value
+ else
+ set_vector_start_to_length
+ ((set_vector_start_to_length
+ (BigEndianReverse
+ (reset_vector_start (set_vector_start_to_length
+ (slice (set_vector_start_to_length value) (half - (1:ii)) (0:ii)))))) ^^
+ (set_vector_start_to_length
+ (BigEndianReverse
+ (reset_vector_start (set_vector_start_to_length
+ (slice (set_vector_start_to_length value) (width - (1:ii)) half))))))
+
+let ClearEventRegister () = not_implemented_extern "ClearEventRegister"
+
+let EventRegisterSet () = not_implemented_extern "EventRegisterSet"
+
+let EventRegistered () = not_implemented_extern "EventRegistered"
+
+let InterruptPending () = not_implemented_extern "InterruptPending"
+
+let WaitForEvent () = not_implemented_extern "WaitForEvent"
+
+let WaitForInterrupt () = not_implemented_extern "WaitForInterrupt"
+
+let AArch64_SPAlignmentFault () = not_implemented "AArch64_SPAlignmentFault"
+
+let AArch64_SoftwareBreakpoint immediate = not_implemented "AArch64_SoftwareBreakpoint"
+
+let AArch64_CallHypervisor immediate = not_implemented "AArch64_CallHypervisor"
+
+let AArch64_CallSecureMonitor immediate = not_implemented "AArch64_CallSecureMonitor"
+
+let AArch64_CallSupervisor immediate = not_implemented "AArch64_CallSupervisor"
+
+let AArch64_SystemRegisterTrap (target_el, op0, op2, op1, crn, rt, crm, dir) =
+ not_implemented "AArch64_SystemRegisterTrap"
+
+let AArch64_UndefinedFault () = not_implemented "AArch64_UndefinedFault"
+
+let rPC () = read_reg _PC
+
+let AArch64_ExceptionReturn (new_pc, spsr) = not_implemented "AArch64_ExceptionReturn"
+
+let UInt_reg x = unsigned x
+
+let signalDBGEN () = not_implemented_extern "signalDBGEN"
+
+let signelNIDEN () = not_implemented_extern "signalNIDEN"
+
+let signalSPIDEN () = not_implemented_extern "signalSPIDEN"
+
+let signalDPNIDEN () = not_implemented_extern "signalSPNIDEN"
+
+let Halted () =
+ read_reg_field EDSCR "STATUS" >>= fun w__0 ->
+ read_reg_field EDSCR "STATUS" >>= fun w__1 ->
+ return (~((eq_vec
+ (set_vector_start_to_length w__0,
+ set_vector_start_to_length (Vector [B0;B0;B0;B0;B0;B1] 5 false))) |.
+ (eq_vec
+ (set_vector_start_to_length w__1,
+ set_vector_start_to_length (Vector [B0;B0;B0;B0;B1;B0] 5 false)))))
+
+let HaveCRCExt () = IMPLEMENTATION_DEFINED.IMPLEMENTATION_DEFINED_type_HaveCRCExt
+
+let ExclusiveMonitorsStatus () =
+ let _ = info "The model does not implement the exclusive monitors explicitly." in
+ not_implemented "ExclusiveMonitorsStatus should not be called" >>
+ return B0
+
+let Hint_Branch hint =
+ info "This hint can be used for hardware optimization that has no effect on the model."
+
+let ResetExternalDebugRegisters b = not_implemented_extern "ResetExternalDebugRegisters"
+
+let HaveAnyAArch32 () = IMPLEMENTATION_DEFINED.IMPLEMENTATION_DEFINED_type_HaveAnyAArch32
+
+let AArch64_WFxTrap (target_el, is_wfe) = not_implemented "AArch64_WFxTrap"
+
+let AArch64_CheckUnallocatedSystemAccess (op0, op1, crn, crm, op2, read) =
+ match (op0,op1,crn,crm,op2,read) with
+ | (Vector [B0;B0] _ _, Vector [B0;B0;B0] _ _, Vector [B0;B1;B0;B0] _ _, _, Vector [B1;B0;B1] _ _, _) ->
+ read_reg_field CurrentEL "EL" >>= fun w__0 ->
+ return (lt_vec (set_vector_start_to_length w__0, set_vector_start_to_length EL1))
+ | (Vector [B0;B0] _ _, Vector [B0;B1;B1] _ _, Vector [B0;B1;B0;B0] _ _, _, Vector [B1;B1;B0] _ _, _) ->
+ return B0
+ | (Vector [B0;B0] _ _, Vector [B0;B1;B1] _ _, Vector [B0;B1;B0;B0] _ _, _, Vector [B1;B1;B1] _ _, _) ->
+ return B0
+ | (Vector [B1;B1] _ _, Vector [B0;B1;B1] _ _, Vector [B0;B1;B0;B0] _ _, Vector [B0;B0;B1;B0] _ _, Vector [B0;B0;B0] _ _, _) ->
+ return B0
+ | (Vector [B1;B1] _ _, Vector [B0;B1;B1] _ _, Vector [B0;B1;B0;B0] _ _, Vector [B0;B0;B1;B0] _ _, Vector [B0;B0;B1] _ _, _) ->
+ return B0
+ end
+
+let SysOp_R (op0, op1, crn, crm, op2) =
+ not_implemented "SysOp_R" >>
+ return (to_vec_dec ((64:ii),(0:ii)))
+
+let SysOp_W (op0, op1, crn, crm, op2, _val) = not_implemented "SysOp_W"
+
+let TxNestingLevelfp = RFull "TxNestingLevel"
+
+let TXIDR_EL0_DEPTHfp = RField ("TXIDR_EL0","DEPTH")
+
+let PSTATE_Nfp = RField ("NZCV","N")
+
+let PSTATE_Zfp = RField ("NZCV","Z")
+
+let PSTATE_Cfp = RField ("NZCV","C")
+
+let PSTATE_Vfp = RField ("NZCV","V")
+
+let PSTATE_Dfp = RField ("DAIF","D")
+
+let PSTATE_Afp = RField ("DAIF","A")
+
+let PSTATE_Ifp = RField ("DAIF","I")
+
+let PSTATE_Ffp = RField ("DAIF","F")
+
+let PSTATE_ELfp = RFull "CurrentEL"
+
+let PSTATE_SPfp = RField ("SPSel","SP")
+
+let _PCfp = RFull "_PC"
+
+let Hint_Prefetch (addr, hint, target, stream) = ()
+
+let AArch64_IsExclusiveVA (address, processorid, size) =
+ let _ = info "The model does not implement the exclusive monitors explicitly." in
+ B1
+
+let AArch64_MarkExclusiveVA (address, processorid, size) =
+ info "The model does not implement the exclusive monitors explicitly."
+
+let wPSTATE_NZCV ((), Vector [n;z;c;v] _ _) =
+ write_reg_bitfield NZCV "N" n >>
+ write_reg_bitfield NZCV "Z" z >>
+ write_reg_bitfield NZCV "C" c >>
+ write_reg_bitfield NZCV "V" v
+
+let wPSTATE_DAIF ((), Vector [d;a;i;f] _ _) =
+ write_reg_bitfield DAIF "D" d >>
+ write_reg_bitfield DAIF "A" a >>
+ write_reg_bitfield DAIF "I" i >>
+ write_reg_bitfield DAIF "F" f
+
+let Int (x, unsigned) =
+ if bitU_to_bool unsigned
+ then UInt (reset_vector_start (set_vector_start_to_length x))
+ else SInt (reset_vector_start (set_vector_start_to_length x))
+
+let ClearExclusiveByAddress (paddress, processorid, size) =
+ info "The model does not implement the exclusive monitors explicitly."
+
+let IsExclusiveGlobal (paddress, processorid, size) =
+ let _ = info "The model does not implement the exclusive monitors explicitly." in
+ B1
+
+let IsExclusiveLocal (paddress, processorid, size) =
+ let _ = info "The model does not implement the exclusive monitors explicitly." in
+ B1
+
+let MarkExclusiveGlobal (paddress, processorid, size) =
+ info "The model does not implement the exclusive monitors explicitly."
+
+let MarkExclusiveLocal (paddress, processorid, size) =
+ info "The model does not implement the exclusive monitors explicitly."
+
+let PAMax () =
+ let pa_size = (0:ii) in
+ read_reg_field ID_AA64MMFR0_EL1 "PARange" >>= fun w__0 ->
+ let pa_size =
+ match w__0 with
+ | Vector [B0;B0;B0;B0] _ _ -> (32:ii)
+ | Vector [B0;B0;B0;B1] _ _ -> (36:ii)
+ | Vector [B0;B0;B1;B0] _ _ -> (40:ii)
+ | Vector [B0;B0;B1;B1] _ _ -> (42:ii)
+ | Vector [B0;B1;B0;B0] _ _ -> (44:ii)
+ | Vector [B0;B1;B0;B1] _ _ -> (48:ii)
+ | _ -> pa_size
+ end in
+ return pa_size
+
+let AddWithCarry (x, y, carry_in) =
+ let unsigned_sum =
+ ((UInt (reset_vector_start (set_vector_start_to_length x))) +
+ (UInt (reset_vector_start (set_vector_start_to_length y)))) +
+ (UInt (reset_vector_start (set_vector_start_to_length (Vector [carry_in] 0 false)))) in
+ let signed_sum =
+ ((SInt (reset_vector_start (set_vector_start_to_length x))) +
+ (SInt (reset_vector_start (set_vector_start_to_length y)))) +
+ (UInt (reset_vector_start (set_vector_start_to_length (Vector [carry_in] 0 false)))) in
+ let result = set_vector_start_to_length (to_vec_dec (length x,unsigned_sum)) in
+ let n =
+ access (set_vector_start_to_length result) ((length
+ (reset_vector_start (set_vector_start_to_length
+ result))) - (1:ii)) in
+ let z =
+ if bitU_to_bool (IsZero (reset_vector_start (set_vector_start_to_length result)))
+ then B1
+ else B0 in
+ let c =
+ if bitU_to_bool
+ (eq_range (UInt (reset_vector_start (set_vector_start_to_length result)), unsigned_sum))
+ then B0
+ else B1 in
+ let v =
+ if bitU_to_bool
+ (eq_range (SInt (reset_vector_start (set_vector_start_to_length result)), signed_sum))
+ then B0
+ else B1 in
+ (set_vector_start_to_length result,set_vector_start 3
+ ((set_vector_start_to_length (Vector [n] 0 false)) ^^
+ (set_vector_start_to_length
+ ((set_vector_start_to_length (Vector [z] 0 false)) ^^
+ (set_vector_start_to_length
+ ((set_vector_start_to_length (Vector [c] 0 false)) ^^
+ (set_vector_start_to_length (Vector [v] 0 false))))))))
+
+let ConditionHolds _cond =
+ let result = B0 in
+ match (slice _cond (3:ii) (1:ii)) with
+ | Vector [B0;B0;B0] _ _ ->
+ read_reg_bitfield NZCV "Z" >>= fun w__0 ->
+ let result = eq (match w__0 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ return result
+ | Vector [B0;B0;B1] _ _ ->
+ read_reg_bitfield NZCV "C" >>= fun w__1 ->
+ let result = eq (match w__1 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ return result
+ | Vector [B0;B1;B0] _ _ ->
+ read_reg_bitfield NZCV "N" >>= fun w__2 ->
+ let result = eq (match w__2 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ return result
+ | Vector [B0;B1;B1] _ _ ->
+ read_reg_bitfield NZCV "V" >>= fun w__3 ->
+ let result = eq (match w__3 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ return result
+ | Vector [B1;B0;B0] _ _ ->
+ read_reg_bitfield NZCV "C" >>= fun w__4 ->
+ read_reg_bitfield NZCV "Z" >>= fun w__5 ->
+ let result =
+ (eq (match w__4 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))) &.
+ (eq (match w__5 with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii))) in
+ return result
+ | Vector [B1;B0;B1] _ _ ->
+ read_reg_bitfield NZCV "N" >>= fun w__6 ->
+ read_reg_bitfield NZCV "V" >>= fun w__7 ->
+ let result = eq_bit (w__6, w__7) in
+ return result
+ | Vector [B1;B1;B0] _ _ ->
+ read_reg_bitfield NZCV "N" >>= fun w__8 ->
+ read_reg_bitfield NZCV "V" >>= fun w__9 ->
+ read_reg_bitfield NZCV "Z" >>= fun w__10 ->
+ let result =
+ (eq_bit (w__8, w__9)) &. (eq (match w__10 with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii))) in
+ return result
+ | Vector [B1;B1;B1] _ _ -> return B1
+ end >>= fun result ->
+ let result =
+ if bitU_to_bool
+ ((eq (match (access _cond (0:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))) &.
+ (neq_vec
+ (set_vector_start_to_length _cond,
+ set_vector_start_to_length (Vector [B1;B1;B1;B1] 3 false))))
+ then ~result
+ else result in
+ return result
+
+let DecodeShift op =
+ match toNatural (unsigned (reset_vector_start op)) with
+ | (0:nn) -> ShiftType_LSL
+ | (1:nn) -> ShiftType_LSR
+ | (2:nn) -> ShiftType_ASR
+ | (3:nn) -> ShiftType_ROR
+ end
+
+let HaveEL el =
+ if bitU_to_bool
+ ((eq_vec (set_vector_start_to_length el, set_vector_start_to_length EL1)) |.
+ (eq_vec (set_vector_start_to_length el, set_vector_start_to_length EL0)))
+ then B1
+ else
+ if bitU_to_bool (eq_vec (set_vector_start_to_length el, set_vector_start_to_length EL2))
+ then IMPLEMENTATION_DEFINED.IMPLEMENTATION_DEFINED_type_HaveEL2
+ else
+ if bitU_to_bool (eq_vec (set_vector_start_to_length el, set_vector_start_to_length EL3))
+ then IMPLEMENTATION_DEFINED.IMPLEMENTATION_DEFINED_type_HaveEL3
+ else
+ let _ = assert' B0 (Nothing) in
+ B0
+
+let System_Put (op0, op1, crn, crm, op2, _val) =
+ match toNaturalFiveTup (op0,op1,crn,crm,op2) with
+ | ((3:nn), (3:nn), (4:nn), (2:nn), (0:nn)) -> write_reg NZCV (slice _val (31:ii) (0:ii))
+ | ((3:nn), (3:nn), (4:nn), (2:nn), (1:nn)) -> write_reg DAIF (slice _val (31:ii) (0:ii))
+ | ((3:nn), (3:nn), (13:nn), (0:nn), (2:nn)) -> write_reg TPIDR_EL0 (slice _val (63:ii) (0:ii))
+ end
+
+let DataSynchronizationBarrier (domain, types) =
+ (if bitU_to_bool (neq (domain, MBReqDomain_FullSystem))
+ then not_implemented "DataSynchronizationBarrier: not MBReqDomain_FullSystem"
+ else return ()) >>
+ match types with
+ | MBReqTypes_Reads -> DataSynchronizationBarrier_Reads ()
+ | MBReqTypes_Writes -> DataSynchronizationBarrier_Writes ()
+ | MBReqTypes_All -> DataSynchronizationBarrier_All ()
+ end
+
+let rELR el =
+ let r = to_vec_dec ((64:ii),(0:ii)) in
+ if bitU_to_bool (eq_vec (set_vector_start_to_length el, set_vector_start_to_length EL1))
+ then read_reg ELR_EL1
+ else
+ if bitU_to_bool (eq_vec (set_vector_start_to_length el, set_vector_start_to_length EL2))
+ then read_reg ELR_EL2
+ else
+ if bitU_to_bool (eq_vec (set_vector_start_to_length el, set_vector_start_to_length EL3))
+ then read_reg ELR_EL3
+ else return r
+
+let DataMemoryBarrier (domain, types) =
+ (if bitU_to_bool
+ ((neq (domain, MBReqDomain_FullSystem)) &. (neq (domain, MBReqDomain_InnerShareable)))
+ then not_implemented "DataMemoryBarrier: not MBReqDomain_FullSystem or _InnerShareable"
+ else return ()) >>
+ match types with
+ | MBReqTypes_Reads -> DataMemoryBarrier_Reads ()
+ | MBReqTypes_Writes -> DataMemoryBarrier_Writes ()
+ | MBReqTypes_All -> DataMemoryBarrier_All ()
+ end
+
+let wMem_Addr (address, size, acctype, excl) =
+ match (excl,acctype) with
+ | (B0, AccType_NORMAL) -> wMem_Addr_NORMAL (reset_vector_start address,size)
+ | (B0, AccType_STREAM) -> wMem_Addr_NORMAL (reset_vector_start address,size)
+ | (B0, AccType_UNPRIV) -> wMem_Addr_NORMAL (reset_vector_start address,size)
+ | (B0, AccType_ORDERED) -> wMem_Addr_ORDERED (reset_vector_start address,size)
+ | (B1, AccType_ATOMIC) -> wMem_Addr_ATOMIC (reset_vector_start address,size)
+ | (B1, AccType_ORDERED) -> wMem_Addr_ATOMIC_ORDERED (reset_vector_start address,size)
+ | _ -> not_implemented "unrecognised memory access"
+ end
+
+let SCTLR regime =
+ if bitU_to_bool (eq_vec (set_vector_start_to_length regime, set_vector_start_to_length EL1))
+ then SCTLR_EL1
+ else
+ if bitU_to_bool (eq_vec (set_vector_start_to_length regime, set_vector_start_to_length EL2))
+ then SCTLR_EL2
+ else
+ if bitU_to_bool (eq_vec (set_vector_start_to_length regime, set_vector_start_to_length EL3))
+ then SCTLR_EL3
+ else
+ let _ = assert' B0 (Just "SCTLR_type unreachable") in
+ SCTLR_EL1
+
+let DecodeRegExtend op =
+ match op with
+ | Vector [B0;B0;B0] _ _ -> ExtendType_UXTB
+ | Vector [B0;B0;B1] _ _ -> ExtendType_UXTH
+ | Vector [B0;B1;B0] _ _ -> ExtendType_UXTW
+ | Vector [B0;B1;B1] _ _ -> ExtendType_UXTX
+ | Vector [B1;B0;B0] _ _ -> ExtendType_SXTB
+ | Vector [B1;B0;B1] _ _ -> ExtendType_SXTH
+ | Vector [B1;B1;B0] _ _ -> ExtendType_SXTW
+ | Vector [B1;B1;B1] _ _ -> ExtendType_SXTX
+ end
+
+let rSP N' =
+ read_reg_bitfield SPSel "SP" >>= fun w__0 ->
+ if bitU_to_bool (eq (match w__0 with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)))
+ then
+ read_reg SP_EL0 >>= fun w__1 ->
+ return (set_vector_start_to_length (mask (N',w__1)))
+ else
+ read_reg_field CurrentEL "EL" >>= fun pstate_el ->
+ if bitU_to_bool (eq_vec (set_vector_start_to_length pstate_el, set_vector_start_to_length EL0))
+ then
+ read_reg SP_EL0 >>= fun w__2 ->
+ return (set_vector_start_to_length (mask (N',w__2)))
+ else
+ if bitU_to_bool
+ (eq_vec (set_vector_start_to_length pstate_el, set_vector_start_to_length EL1))
+ then
+ read_reg SP_EL1 >>= fun w__3 ->
+ return (set_vector_start_to_length (mask (N',w__3)))
+ else
+ if bitU_to_bool
+ (eq_vec (set_vector_start_to_length pstate_el, set_vector_start_to_length EL2))
+ then
+ read_reg SP_EL2 >>= fun w__4 ->
+ return (set_vector_start_to_length (mask (N',w__4)))
+ else
+ if bitU_to_bool
+ (eq_vec (set_vector_start_to_length pstate_el, set_vector_start_to_length EL3))
+ then
+ read_reg SP_EL3 >>= fun w__5 ->
+ return (set_vector_start_to_length (mask (N',w__5)))
+ else
+ let _ = assert' B0 (Nothing) in
+ read_reg SP_EL3 >>= fun w__6 ->
+ return (set_vector_start_to_length (mask (N',w__6)))
+
+let rSPSR () =
+ let result = to_vec_dec ((32:ii),(0:ii)) in
+ if bitU_to_bool (UsingAArch32 ())
+ then
+ not_implemented "rSPSR UsingAArch32" >>
+ return result
+ else
+ read_reg_field CurrentEL "EL" >>= fun pstate_el ->
+ if bitU_to_bool (eq_vec (set_vector_start_to_length pstate_el, set_vector_start_to_length EL1))
+ then read_reg SPSR_EL1
+ else
+ if bitU_to_bool
+ (eq_vec (set_vector_start_to_length pstate_el, set_vector_start_to_length EL2))
+ then read_reg SPSR_EL2
+ else
+ if bitU_to_bool
+ (eq_vec (set_vector_start_to_length pstate_el, set_vector_start_to_length EL3))
+ then read_reg SPSR_EL3
+ else return result
+
+let _R =
+ Vector [UndefinedRegister 0;R30;R29;R28;R27;R26;R25;R24;R23;R22;R21;R20;R19;R18;R17;R16;R15;R14;R13;R12;R11;
+ R10;R9;R8;R7;R6;R5;R4;R3;R2;R1;R0] 31 false
+
+let _V =
+ Vector [UndefinedRegister 0;V31;V30;V29;V28;V27;V26;V25;V24;V23;V22;V21;V20;V19;V18;V17;V16;V15;V14;V13;V12;
+ V11;V10;V9;V8;V7;V6;V5;V4;V3;V2;V1;V0] 32 false
+
+let ReservedValue () = AArch64_UndefinedFault ()
+
+let UnallocatedEncoding () = AArch64_UndefinedFault ()
+
+let CountLeadingSignBits x =
+ CountLeadingZeroBits
+ (reset_vector_start (set_vector_start_to_length
+ (bitwise_xor
+ (set_vector_start_to_length
+ (slice (set_vector_start_to_length x)
+ ((length (reset_vector_start (set_vector_start_to_length x))) - (1:ii)) (1:ii)),
+ set_vector_start_to_length
+ (slice (set_vector_start_to_length x)
+ ((length (reset_vector_start (set_vector_start_to_length x))) - (2:ii)) (0:ii))))))
+
+let LSR_C (x, shift) =
+ let extended_x =
+ set_vector_start_to_length
+ (ZeroExtend ((length x) + shift,reset_vector_start (set_vector_start_to_length x))) in
+ let result =
+ set_vector_start_to_length
+ (slice (set_vector_start_to_length extended_x)
+ ((shift + (length (reset_vector_start (set_vector_start_to_length x)))) - (1:ii)) shift) in
+ let carry_out = access (set_vector_start_to_length extended_x) (shift - (1:ii)) in
+ (set_vector_start_to_length result,carry_out)
+
+let Ones N' =
+ set_vector_start_to_length
+ (Replicate (N',reset_vector_start (set_vector_start_to_length (Vector [B1] 0 false))))
+
+let SignExtend (N', ((Vector (h::_) _ _) as x)) =
+ let remainder = slice_raw x (1:ii) ((length (reset_vector_start x)) - (1:ii)) in
+ set_vector_start_to_length
+ ((set_vector_start_to_length
+ (set_vector_start_to_length
+ (Replicate
+ (N' - (length x),
+ reset_vector_start (set_vector_start_to_length (Vector [h] 0 false)))))) ^^
+ (set_vector_start_to_length x))
+
+let supported_instructions instr = match instr with | _ -> Just instr end
+
+let LSL (x, shift) =
+ let result = set_vector_start_to_length (to_vec_dec (length x,(0:ii))) in
+ if bitU_to_bool (eq_range (shift, (0:ii)))
+ then set_vector_start_to_length x
+ else
+ let (result', _) =
+ match (LSL_C (reset_vector_start (set_vector_start_to_length x),shift)) with
+ | (v0v', v1v') -> (v0v',v1v')
+ end in
+ set_vector_start_to_length result'
+
+let ThisInstrAddr N' =
+ let N = length (reset_vector_start (set_vector_start_to_length (to_vec_dec (N',(0:ii))))) in
+ let _ =
+ assert' ((eq_range (N, (64:ii))) |. ((eq_range (N, (32:ii))) &. (UsingAArch32 ()))) (Nothing) in
+ rPC () >>= fun w__0 ->
+ return (set_vector_start_to_length
+ (mask (N',reset_vector_start (set_vector_start_to_length w__0))))
+
+let AArch64_Abort (vaddress, fault) = not_implemented "AArch64_Abort"
+
+let rV (N', n) =
+ read_reg (access _V n) >>= fun w__0 ->
+ return (set_vector_start_to_length
+ (mask (N',reset_vector_start (set_vector_start_to_length w__0))))
+
+let rVpart (N', n, part) =
+ if bitU_to_bool (eq_range (part, (0:ii)))
+ then
+ read_reg (access _V n) >>= fun w__0 ->
+ return (set_vector_start_to_length
+ (set_vector_start_to_length (mask (N',reset_vector_start (set_vector_start_to_length w__0)))))
+ else
+ let _ =
+ assert' (eq_range
+ (length (reset_vector_start (set_vector_start_to_length (to_vec_dec (N',(0:ii))))),
+ (64:ii))) (Nothing) in
+ read_reg_range (access _V n) (127:ii) (64:ii) >>= fun w__1 ->
+ return (set_vector_start_to_length (set_vector_start 63 w__1))
+
+let ExternalInvasiveDebugEnabled () =
+ signalDBGEN () >>= fun w__0 ->
+ return (eq (w__0, HIGH))
+
+let Align (x, y) =
+ set_vector_start_to_length
+ (to_vec_dec (length x,Align' (UInt (reset_vector_start (set_vector_start_to_length x)),y)))
+
+let empty_read_buffer =
+ <| read_buffer_type_size = (0:ii);
+ read_buffer_type_acctype = AccType_NORMAL;
+ read_buffer_type_exclusive = B0;
+ read_buffer_type_address = (to_vec_dec ((64:ii),(0:ii))) |>
+
+let empty_write_buffer =
+ <| write_buffer_type_size = (0:ii);
+ write_buffer_type_acctype = AccType_NORMAL;
+ write_buffer_type_exclusive = B0;
+ write_buffer_type_address = (to_vec_dec ((64:ii),(0:ii)));
+ write_buffer_type_value = (to_vec_dec ((128:ii),(0:ii))) |>
+
+let HighestELUsingAArch32 () =
+ if bitU_to_bool (~(HaveAnyAArch32 ()))
+ then B0
+ else IMPLEMENTATION_DEFINED.IMPLEMENTATION_DEFINED_type_HighestELUsingAArch32
+
+let rX (N', n) =
+ if bitU_to_bool (neq_range (n, (31:ii)))
+ then
+ read_reg (access _R n) >>= fun w__0 ->
+ return (set_vector_start_to_length
+ (mask (N',reset_vector_start (set_vector_start_to_length w__0))))
+ else return (set_vector_start_to_length (Zeros N'))
+
+let rELR' () =
+ read_reg_field CurrentEL "EL" >>= fun w__0 ->
+ let _ =
+ assert' (neq_vec (set_vector_start_to_length w__0, set_vector_start_to_length EL0)) (Nothing) in
+ read_reg_field CurrentEL "EL" >>= fun w__1 ->
+ rELR (reset_vector_start (set_vector_start 1 w__1))
+
+let rSPIFP = [RFull "SP_EL0";PSTATE_SPfp]
+
+let wSPFP = ([PSTATE_SPfp],[RFull "SP_EL0"])
+
+let xFP n = if bitU_to_bool (neq_range (n, (31:ii))) then [RFull (access _Rs n)] else []
+
+let BigEndianIFP = if bitU_to_bool (UsingAArch32 ()) then [RFull "PSTATE_E"] else [PSTATE_ELfp]
+
+let flush_write_buffer_exclusive write_buffer =
+ let _ = assert' write_buffer.write_buffer_type_exclusive (Nothing) in
+ match write_buffer.write_buffer_type_acctype with
+ | AccType_ATOMIC ->
+ wMem_Val_ATOMIC
+ (write_buffer.write_buffer_type_size,
+ reset_vector_start (set_vector_start_to_length
+ (slice (set_vector_start_to_length write_buffer.write_buffer_type_value)
+ ((write_buffer.write_buffer_type_size * (8:ii)) - (1:ii)) (0:ii))))
+ | AccType_ORDERED ->
+ wMem_Val_ATOMIC
+ (write_buffer.write_buffer_type_size,
+ reset_vector_start (set_vector_start_to_length
+ (slice (set_vector_start_to_length write_buffer.write_buffer_type_value)
+ ((write_buffer.write_buffer_type_size * (8:ii)) - (1:ii)) (0:ii))))
+ | _ ->
+ not_implemented "unrecognised memory access" >>
+ return B0
+ end
+
+let AArch64_CreateFaultRecord (type', ipaddress, level, acctype, write, extflag, secondstage, s2fs1walk) =
+ <| FaultRecord_type' = type';
+ FaultRecord_domain = (to_vec_dec ((4:ii),UNKNOWN));
+ FaultRecord_debugmoe = (to_vec_dec ((4:ii),UNKNOWN));
+ FaultRecord_ipaddress = ipaddress;
+ FaultRecord_level = level;
+ FaultRecord_acctype = acctype;
+ FaultRecord_write = write;
+ FaultRecord_extflag = extflag;
+ FaultRecord_secondstage = secondstage;
+ FaultRecord_s2fs1walk = s2fs1walk |>
+
+let flush_write_buffer write_buffer =
+ let _ = assert' (eq_bit (write_buffer.write_buffer_type_exclusive, B0)) (Nothing) in
+ match write_buffer.write_buffer_type_acctype with
+ | AccType_NORMAL ->
+ wMem_Val_NORMAL
+ (write_buffer.write_buffer_type_size,
+ reset_vector_start (set_vector_start_to_length
+ (slice (set_vector_start_to_length write_buffer.write_buffer_type_value)
+ ((write_buffer.write_buffer_type_size * (8:ii)) - (1:ii)) (0:ii))))
+ | AccType_STREAM ->
+ wMem_Val_NORMAL
+ (write_buffer.write_buffer_type_size,
+ reset_vector_start (set_vector_start_to_length
+ (slice (set_vector_start_to_length write_buffer.write_buffer_type_value)
+ ((write_buffer.write_buffer_type_size * (8:ii)) - (1:ii)) (0:ii))))
+ | AccType_UNPRIV ->
+ wMem_Val_NORMAL
+ (write_buffer.write_buffer_type_size,
+ reset_vector_start (set_vector_start_to_length
+ (slice (set_vector_start_to_length write_buffer.write_buffer_type_value)
+ ((write_buffer.write_buffer_type_size * (8:ii)) - (1:ii)) (0:ii))))
+ | AccType_ORDERED ->
+ wMem_Val_NORMAL
+ (write_buffer.write_buffer_type_size,
+ reset_vector_start (set_vector_start_to_length
+ (slice (set_vector_start_to_length write_buffer.write_buffer_type_value)
+ ((write_buffer.write_buffer_type_size * (8:ii)) - (1:ii)) (0:ii))))
+ | _ -> not_implemented "unrecognised memory access"
+ end
+
+let System_Get (op0, op1, crn, crm, op2) =
+ match toNaturalFiveTup (op0,op1,crn,crm,op2) with
+ | ((3:nn), (3:nn), (4:nn), (2:nn), (0:nn)) ->
+ read_reg NZCV >>= fun w__0 ->
+ return (ZeroExtend ((64:ii),w__0))
+ | ((3:nn), (3:nn), (4:nn), (2:nn), (1:nn)) ->
+ read_reg DAIF >>= fun w__1 ->
+ return (ZeroExtend ((64:ii),w__1))
+ | ((3:nn), (3:nn), (13:nn), (0:nn), (2:nn)) -> read_reg TPIDR_EL0
+ end
+
+let Prefetch (address, prfop) =
+ let hint =
+ match toNatural (0:ii) with
+ | (0:nn) -> Prefetch_READ
+ | (1:nn) -> Prefetch_WRITE
+ | (2:nn) -> Prefetch_EXEC
+ end in
+ let target = (0:ii) in
+ let stream = B0 in
+ let returnv = B0 in
+ let (returnv, hint) =
+ match (slice prfop (4:ii) (3:ii)) with
+ | Vector [B0;B0] _ _ ->
+ let hint = Prefetch_READ in
+ (returnv,hint)
+ | Vector [B0;B1] _ _ ->
+ let hint = Prefetch_EXEC in
+ (returnv,hint)
+ | Vector [B1;B0] _ _ ->
+ let hint = Prefetch_WRITE in
+ (returnv,hint)
+ | Vector [B1;B1] _ _ ->
+ let returnv = B1 in
+ (returnv,hint)
+ end in
+ let (target, stream) =
+ if bitU_to_bool (~returnv)
+ then
+ let target = unsigned (reset_vector_start (slice prfop (2:ii) (1:ii))) in
+ let stream = neq (match (access prfop (0:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)) in
+ (target,stream)
+ else (target,stream) in
+ ()
+
+let DoubleLockStatus () =
+ if bitU_to_bool (ELUsingAArch32 (reset_vector_start EL1))
+ then
+ read_reg_bitfield DBGOSDLR "DLK" >>= fun w__0 ->
+ read_reg_bitfield DBGPRCR "CORENPDRQ" >>= fun w__1 ->
+ Halted () >>= fun w__2 ->
+ return ((eq (match w__0 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))) &.
+ ((eq (match w__1 with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii))) &. (~w__2)))
+ else
+ read_reg_bitfield OSDLR_EL1 "DLK" >>= fun w__3 ->
+ read_reg_bitfield DBGPRCR_EL1 "CORENPDRQ" >>= fun w__4 ->
+ Halted () >>= fun w__5 ->
+ return ((eq (match w__3 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))) &.
+ ((eq (match w__4 with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii))) &. (~w__5)))
+
+let wSP ((), value) =
+ read_reg_bitfield SPSel "SP" >>= fun w__0 ->
+ if bitU_to_bool (eq (match w__0 with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)))
+ then write_reg SP_EL0 (ZeroExtend ((64:ii),reset_vector_start (set_vector_start_to_length value)))
+ else
+ read_reg_field CurrentEL "EL" >>= fun pstate_el ->
+ if bitU_to_bool (eq_vec (set_vector_start_to_length pstate_el, set_vector_start_to_length EL0))
+ then
+ write_reg SP_EL0 (ZeroExtend ((64:ii),reset_vector_start (set_vector_start_to_length value)))
+ else
+ if bitU_to_bool
+ (eq_vec (set_vector_start_to_length pstate_el, set_vector_start_to_length EL1))
+ then
+ write_reg
+ SP_EL1
+ (ZeroExtend ((64:ii),reset_vector_start (set_vector_start_to_length value)))
+ else
+ if bitU_to_bool
+ (eq_vec (set_vector_start_to_length pstate_el, set_vector_start_to_length EL2))
+ then
+ write_reg
+ SP_EL2
+ (ZeroExtend ((64:ii),reset_vector_start (set_vector_start_to_length value)))
+ else
+ if bitU_to_bool
+ (eq_vec (set_vector_start_to_length pstate_el, set_vector_start_to_length EL3))
+ then
+ write_reg
+ SP_EL3
+ (ZeroExtend ((64:ii),reset_vector_start (set_vector_start_to_length value)))
+ else return (assert' B0 (Nothing))
+
+let decodeTMCommit () = Just (TMCommit)
+
+let decodeTMTest () = Just (TMTest)
+
+let decodeTMAbort ((Vector [R;_;_;_;_;_] _ _) as v__975) =
+ let imm5 = slice_raw v__975 (1:ii) (5:ii) in
+ Just (TMAbort (R,reset_vector_start imm5))
+
+let wV (n, value) =
+ write_reg
+ (access _V n)
+ (ZeroExtend ((128:ii),reset_vector_start (set_vector_start_to_length value)))
+
+let wVpart (n, part, value) =
+ if bitU_to_bool (eq_range (part, (0:ii)))
+ then
+ write_reg
+ (access _V n)
+ (ZeroExtend ((128:ii),reset_vector_start (set_vector_start_to_length value)))
+ else
+ let _ =
+ assert' (eq_range
+ (length
+ (reset_vector_start (set_vector_start_to_length
+ (to_vec_dec (length value,(0:ii))))),
+ (64:ii))) (Nothing) in
+ write_reg_range (access _V n) (127:ii) (64:ii) (set_vector_start 127 value)
+
+let wX (n, value) =
+ if bitU_to_bool (neq_range (n, (31:ii)))
+ then
+ write_reg
+ (access _R n)
+ (ZeroExtend ((64:ii),reset_vector_start (set_vector_start_to_length value)))
+ else return ()
+
+let decodeAdvSIMDLoadStoreMultiStruct machineCode =
+ not_implemented "decodeAdvSIMDLoadStoreMultiStruct" >>
+ return (Just (Unallocated))
+
+let decodeAdvSIMDLoadStoreMultiStructPostIndexed machineCode =
+ not_implemented "decodeAdvSIMDLoadStoreMultiStructPostIndexed" >>
+ return (Just (Unallocated))
+
+let decodeAdvSIMDLoadStoreSingleStruct machineCode =
+ not_implemented "decodeAdvSIMDLoadStoreSingleStruct" >>
+ return (Just (Unallocated))
+
+let decodeAdvSIMDLoadStoreSingleStructPostIndexed machineCode =
+ not_implemented "decodeAdvSIMDLoadStoreSingleStructPostIndexed" >>
+ return (Just (Unallocated))
+
+let decodeDataSIMDFPoint1 machineCode =
+ not_implemented "decodeDataSIMDFPoint1" >>
+ return (Just (Unallocated))
+
+let decodeDataSIMDFPoint2 machineCode =
+ not_implemented "decodeDataSIMDFPoint2" >>
+ return (Just (Unallocated))
+
+let AArch64_CheckForWFxTrap (target_el, is_wfe) =
+ let _ = assert' (HaveEL (reset_vector_start target_el)) (Nothing) in
+ let trap = B0 in
+ (if bitU_to_bool (eq_vec (set_vector_start_to_length target_el, set_vector_start_to_length EL1))
+ then
+ (if bitU_to_bool is_wfe
+ then
+ read_reg_bitfield SCTLR_EL1 "nTWE" >>= fun w__0 ->
+ return (match w__0 with | B0 -> (0:ii) | B1 -> (1:ii) end)
+ else
+ read_reg_bitfield SCTLR_EL1 "nTWI" >>= fun w__1 ->
+ return (match w__1 with | B0 -> (0:ii) | B1 -> (1:ii) end)) >>= fun w__2 ->
+ let trap = eq (w__2, (0:ii)) in
+ return trap
+ else
+ if bitU_to_bool (eq_vec (set_vector_start_to_length target_el, set_vector_start_to_length EL2))
+ then
+ (if bitU_to_bool is_wfe
+ then
+ read_reg_bitfield HCR_EL2 "TWE" >>= fun w__3 ->
+ return (match w__3 with | B0 -> (0:ii) | B1 -> (1:ii) end)
+ else
+ read_reg_bitfield HCR_EL2 "TWI" >>= fun w__4 ->
+ return (match w__4 with | B0 -> (0:ii) | B1 -> (1:ii) end)) >>= fun w__5 ->
+ let trap = eq (w__5, (1:ii)) in
+ return trap
+ else
+ if bitU_to_bool
+ (eq_vec (set_vector_start_to_length target_el, set_vector_start_to_length EL3))
+ then
+ (if bitU_to_bool is_wfe
+ then
+ read_reg_bitfield SCR_EL3 "TWE" >>= fun w__6 ->
+ return (match w__6 with | B0 -> (0:ii) | B1 -> (1:ii) end)
+ else
+ read_reg_bitfield SCR_EL3 "TWI" >>= fun w__7 ->
+ return (match w__7 with | B0 -> (0:ii) | B1 -> (1:ii) end)) >>= fun w__8 ->
+ let trap = eq (w__8, (1:ii)) in
+ return trap
+ else
+ let _ = assert' B0 (Nothing) in
+ return trap) >>= fun trap ->
+ if bitU_to_bool trap
+ then AArch64_WFxTrap (reset_vector_start target_el,is_wfe)
+ else return ()
+
+let wmem_kind (acctype, exclusive) =
+ if bitU_to_bool exclusive
+ then
+ match acctype with
+ | AccType_ATOMIC -> return (IK_mem_write Write_exclusive)
+ | AccType_ORDERED -> return (IK_mem_write Write_exclusive_release)
+ | _ ->
+ not_implemented "unimplemented memory access" >>
+ return (IK_mem_write Write_exclusive)
+ end
+ else
+ match acctype with
+ | AccType_NORMAL -> return (IK_mem_write Write_plain)
+ | AccType_STREAM -> return (IK_mem_write Write_plain)
+ | AccType_UNPRIV -> return (IK_mem_write Write_plain)
+ | AccType_ORDERED -> return (IK_mem_write Write_release)
+ | _ ->
+ not_implemented "unimplemented memory access" >>
+ return (IK_mem_write Write_plain)
+ end
+
+let BranchTo (target, branch_type) =
+ let target' = set_vector_start_to_length target in
+ let _ = Hint_Branch branch_type in
+ (if bitU_to_bool
+ (eq_range (length (reset_vector_start (set_vector_start_to_length target)), (32:ii)))
+ then
+ let _ = assert' (UsingAArch32 ()) (Nothing) in
+ write_reg _PC (ZeroExtend ((64:ii),reset_vector_start (set_vector_start_to_length target))) >>
+ return target'
+ else
+ let _ =
+ assert' ((eq_range (length (reset_vector_start (set_vector_start_to_length target)), (64:ii))) &.
+ (~(UsingAArch32 ()))) (Nothing) in
+ read_reg_field CurrentEL "EL" >>= fun pstate_el ->
+ (if bitU_to_bool
+ (eq_vec (set_vector_start_to_length pstate_el, set_vector_start_to_length EL0))
+ then
+ read_reg_bitfield TCR_EL1 "TBI1" >>= fun w__0 ->
+ let target' =
+ if bitU_to_bool
+ ((eq
+ (match (access (set_vector_start_to_length target') (55:ii)) with
+ | B0 -> (0:ii)
+ | B1 -> (1:ii)
+ end,
+ (1:ii))) &. (eq (match w__0 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))))
+ then
+ update
+ target' (63:ii) (56:ii)
+ (set_vector_start 63 (Vector [B1;B1;B1;B1;B1;B1;B1;B1] 7 false))
+ else target' in
+ read_reg_bitfield TCR_EL1 "TBI0" >>= fun w__1 ->
+ let target' =
+ if bitU_to_bool
+ ((eq
+ (match (access (set_vector_start_to_length target') (55:ii)) with
+ | B0 -> (0:ii)
+ | B1 -> (1:ii)
+ end,
+ (0:ii))) &. (eq (match w__1 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))))
+ then
+ update
+ target' (63:ii) (56:ii)
+ (set_vector_start 63 (Vector [B0;B0;B0;B0;B0;B0;B0;B0] 7 false))
+ else target' in
+ return target'
+ else
+ if bitU_to_bool
+ (eq_vec (set_vector_start_to_length pstate_el, set_vector_start_to_length EL1))
+ then
+ read_reg_bitfield TCR_EL1 "TBI1" >>= fun w__2 ->
+ let target' =
+ if bitU_to_bool
+ ((eq
+ (match (access (set_vector_start_to_length target') (55:ii)) with
+ | B0 -> (0:ii)
+ | B1 -> (1:ii)
+ end,
+ (1:ii))) &. (eq (match w__2 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))))
+ then
+ update
+ target' (63:ii) (56:ii)
+ (set_vector_start 63 (Vector [B1;B1;B1;B1;B1;B1;B1;B1] 7 false))
+ else target' in
+ read_reg_bitfield TCR_EL1 "TBI0" >>= fun w__3 ->
+ let target' =
+ if bitU_to_bool
+ ((eq
+ (match (access (set_vector_start_to_length target') (55:ii)) with
+ | B0 -> (0:ii)
+ | B1 -> (1:ii)
+ end,
+ (0:ii))) &. (eq (match w__3 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))))
+ then
+ update
+ target' (63:ii) (56:ii)
+ (set_vector_start 63 (Vector [B0;B0;B0;B0;B0;B0;B0;B0] 7 false))
+ else target' in
+ return target'
+ else
+ if bitU_to_bool
+ (eq_vec (set_vector_start_to_length pstate_el, set_vector_start_to_length EL2))
+ then
+ read_reg_bitfield TCR_EL2 "TBI" >>= fun w__4 ->
+ let target' =
+ if bitU_to_bool (eq (match w__4 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then
+ update
+ target' (63:ii) (56:ii)
+ (set_vector_start 63 (Vector [B0;B0;B0;B0;B0;B0;B0;B0] 7 false))
+ else target' in
+ return target'
+ else
+ if bitU_to_bool
+ (eq_vec (set_vector_start_to_length pstate_el, set_vector_start_to_length EL3))
+ then
+ read_reg_bitfield TCR_EL3 "TBI" >>= fun w__5 ->
+ let target' =
+ if bitU_to_bool (eq (match w__5 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then
+ update
+ target' (63:ii) (56:ii)
+ (set_vector_start 63 (Vector [B0;B0;B0;B0;B0;B0;B0;B0] 7 false))
+ else target' in
+ return target'
+ else return target') >>= fun target' ->
+ write_reg _PC target' >>
+ return target') >>= fun target' ->
+ return ()
+
+let rmem_kind (acctype, exclusive) =
+ if bitU_to_bool exclusive
+ then
+ match acctype with
+ | AccType_ATOMIC -> return (IK_mem_read Read_exclusive)
+ | AccType_ORDERED -> return (IK_mem_read Read_exclusive_acquire)
+ | _ ->
+ not_implemented "unimplemented memory access" >>
+ return (IK_mem_read Read_exclusive)
+ end
+ else
+ return (match acctype with
+ | AccType_NORMAL -> IK_mem_read Read_plain
+ | AccType_ATOMIC -> IK_mem_read Read_plain
+ | AccType_STREAM -> IK_mem_read Read_stream
+ | AccType_UNPRIV -> IK_mem_read Read_plain
+ | AccType_ORDERED -> IK_mem_read Read_acquire
+ end)
+
+let decodeTMStart Rt =
+ let t = UInt_reg (reset_vector_start Rt) in
+ Just (TMStart t)
+
+let decodeAddSubtractWithCarry ((Vector [sf;op;S;B1;B1;B0;B1;B0;B0;B0;B0;_;_;_;_;_;B0;B0;B0;B0;B0;B0;_;_;_;_;_;_;_;_;_;_] _ _) as v__969) =
+ let Rm = slice_raw v__969 (11:ii) (15:ii) in
+ let Rn = slice_raw v__969 (22:ii) (26:ii) in
+ let Rd = slice_raw v__969 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let m = UInt_reg (reset_vector_start Rm) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let sub_op = eq (match op with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ let setflags = eq (match S with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ Just (AddSubCarry (d,n,m,datasize,sub_op,setflags))
+
+let decodeConditionalCompareRegister ((Vector [sf;op;B1;B1;B1;B0;B1;B0;B0;B1;B0;_;_;_;_;_;_;_;_;_;B0;B0;_;_;_;_;_;B0;_;_;_;_] _ _) as v__961) =
+ let Rm = slice_raw v__961 (11:ii) (15:ii) in
+ let _cond = slice_raw v__961 (16:ii) (19:ii) in
+ let Rn = slice_raw v__961 (22:ii) (26:ii) in
+ let nzcv = slice_raw v__961 (28:ii) (31:ii) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let m = UInt_reg (reset_vector_start Rm) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let sub_op = eq (match op with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ let condition = set_vector_start 3 _cond in
+ let flags = set_vector_start 3 nzcv in
+ Just (ConditionalCompareRegister (n,m,datasize,sub_op,reset_vector_start condition,reset_vector_start flags))
+
+let decodeConditionalSelect ((Vector [sf;op;B0;B1;B1;B0;B1;B0;B1;B0;B0;_;_;_;_;_;_;_;_;_;B0;o2;_;_;_;_;_;_;_;_;_;_] _ _) as v__954) =
+ let Rm = slice_raw v__954 (11:ii) (15:ii) in
+ let _cond = slice_raw v__954 (16:ii) (19:ii) in
+ let Rn = slice_raw v__954 (22:ii) (26:ii) in
+ let Rd = slice_raw v__954 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let m = UInt_reg (reset_vector_start Rm) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let condition = set_vector_start 3 _cond in
+ let else_inv = eq (match op with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ let else_inc = eq (match o2 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ Just (ConditionalSelect (d,n,m,datasize,reset_vector_start condition,else_inv,else_inc))
+
+let BranchToFP (iR, oR) =
+ (if bitU_to_bool (UsingAArch32 ())
+ then iR
+ else PSTATE_ELfp :: iR,_PCfp :: oR)
+
+let decodeImplementationDefined = function
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B1;B0;B0;B0;B0;B1;B0;B1;B1;B1;_;B1;B1;B0;B0;B0;B0;B0;B0;B0;B0;B0;B0;B0;isEnd] _ _) as v__920) ->
+ Just (ImplementationDefinedTestBeginEnd isEnd)
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B1;B0;B0;B0;B0;B1;B0;B1;B1;B1;_;B1;B1;B0;B0;B0;B0;B0;B0;B0;B0;B0;B0;B1;B0] _ _) as v__932) ->
+ Just (ImplementationDefinedStopFetching)
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B1;B0;B0;B0;B0;B1;B0;B1;B1;B1;_;B1;B1;B0;B0;B0;B0;B0;B0;B0;B0;B0;B0;B1;B1] _ _) as v__943) ->
+ Just (ImplementationDefinedThreadStart)
+ end
+
+let NZCVfp = [PSTATE_Vfp;PSTATE_Cfp;PSTATE_Zfp;PSTATE_Nfp]
+
+let decodeConditionalCompareImmediate ((Vector [sf;op;B1;B1;B1;B0;B1;B0;B0;B1;B0;_;_;_;_;_;_;_;_;_;B1;B0;_;_;_;_;_;B0;_;_;_;_] _ _) as v__912) =
+ let imm5 = slice_raw v__912 (11:ii) (15:ii) in
+ let _cond = slice_raw v__912 (16:ii) (19:ii) in
+ let Rn = slice_raw v__912 (22:ii) (26:ii) in
+ let nzcv = slice_raw v__912 (28:ii) (31:ii) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let sub_op = eq (match op with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ let condition = set_vector_start 3 _cond in
+ let flags = set_vector_start 3 nzcv in
+ let imm =
+ set_vector_start_to_length
+ (ZeroExtend (datasize,reset_vector_start (set_vector_start_to_length imm5))) in
+ Just (ConditionalCompareImmediate (n,datasize,sub_op,reset_vector_start condition,reset_vector_start flags,reset_vector_start (set_vector_start_to_length
+ imm)))
+
+let decodeMoveWideImmediate ((Vector [sf;_;_;B1;B0;B0;B1;B0;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__909) =
+ let opc = slice_raw v__909 (1:ii) (2:ii) in
+ let hw = slice_raw v__909 (9:ii) (10:ii) in
+ let imm16 = slice_raw v__909 (11:ii) (26:ii) in
+ let Rd = slice_raw v__909 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let imm = imm16 in
+ let pos = (0:ii) in
+ let opcode =
+ match toNatural (0:ii) with
+ | (0:nn) -> MoveWideOp_N
+ | (1:nn) -> MoveWideOp_Z
+ | (2:nn) -> MoveWideOp_K
+ end in
+ match opc with
+ | Vector [B0;B0] _ _ -> return MoveWideOp_N
+ | Vector [B1;B0] _ _ -> return MoveWideOp_Z
+ | Vector [B1;B1] _ _ -> return MoveWideOp_K
+ | _ ->
+ UnallocatedEncoding () >>
+ return opcode
+ end >>= fun opcode ->
+ (if bitU_to_bool
+ ((eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii))) &.
+ (eq (match (access hw (1:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ let pos =
+ UInt
+ (reset_vector_start (set_vector_start_to_length
+ ((set_vector_start_to_length hw) ^^
+ (set_vector_start_to_length (Vector [B0;B0;B0;B0] 3 false))))) in
+ return (Just (MoveWide (d,datasize,reset_vector_start imm,pos,opcode)))
+
+let decodeLoadStoreExclusive ((Vector [_;_;B0;B0;B1;B0;B0;B0;o2;L;o1;_;_;_;_;_;o0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__903) =
+ let size = slice_raw v__903 (0:ii) (1:ii) in
+ let Rs = slice_raw v__903 (11:ii) (15:ii) in
+ let Rt2 = slice_raw v__903 (17:ii) (21:ii) in
+ let Rn = slice_raw v__903 (22:ii) (26:ii) in
+ let Rt = slice_raw v__903 (27:ii) (31:ii) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let t = UInt_reg (reset_vector_start Rt) in
+ let t2 = UInt_reg (reset_vector_start Rt2) in
+ let s = UInt_reg (reset_vector_start Rs) in
+ (if bitU_to_bool
+ ((eq_vec
+ (set_vector_start_to_length (Vector [o2;o1;o0] 2 false),
+ set_vector_start_to_length (Vector [B1;B0;B0] 2 false))) |.
+ (eq_vec
+ (set_vector_start_to_length (Vector [o2;o1] 1 false),
+ set_vector_start_to_length (Vector [B1;B1] 1 false))))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ (if bitU_to_bool
+ ((eq (match o1 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))) &.
+ (eq (match (access size (1:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii))))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ let acctype =
+ if bitU_to_bool (eq (match o0 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then AccType_ORDERED
+ else AccType_ATOMIC in
+ let excl = eq (match o2 with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)) in
+ let pair = eq (match o1 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ let memop =
+ if bitU_to_bool (eq (match L with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then MemOp_LOAD
+ else MemOp_STORE in
+ let elsize = lsl' ((8:ii),UInt (reset_vector_start (set_vector_start_to_length size))) in
+ let regsize = if bitU_to_bool (eq_range (elsize, (64:ii))) then (64:ii) else (32:ii) in
+ let datasize = if bitU_to_bool pair then elsize * (2:ii) else elsize in
+ return (Just (LoadStoreAcqExc (n,t,t2,s,acctype,excl,pair,memop,elsize,regsize,datasize)))
+
+let sharedDecodeLoadImmediate (opc, size, Rn, Rt, wback, postindex, scale, offset, acctype, prefetchAllowed) =
+ let n = UInt_reg Rn in
+ let t = UInt_reg Rt in
+ let memop =
+ match toNatural (0:ii) with
+ | (0:nn) -> MemOp_LOAD
+ | (1:nn) -> MemOp_STORE
+ | (2:nn) -> MemOp_PREFETCH
+ end in
+ let _signed = B0 in
+ let regsize = (64:ii) in
+ (if bitU_to_bool (eq (match (access opc (1:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)))
+ then
+ let memop =
+ if bitU_to_bool
+ (eq (match (access opc (0:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then MemOp_LOAD
+ else MemOp_STORE in
+ let regsize =
+ if bitU_to_bool
+ (eq_vec
+ (set_vector_start_to_length size,
+ set_vector_start_to_length (Vector [B1;B1] 1 false)))
+ then (64:ii)
+ else (32:ii) in
+ let _signed = B0 in
+ return (memop,regsize,_signed)
+ else
+ (if bitU_to_bool
+ (eq_vec
+ (set_vector_start_to_length size,
+ set_vector_start_to_length (Vector [B1;B1] 1 false)))
+ then
+ (if bitU_to_bool prefetchAllowed
+ then
+ let memop = MemOp_PREFETCH in
+ (if bitU_to_bool
+ (eq (match (access opc (0:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ return memop
+ else
+ UnallocatedEncoding () >>
+ return memop) >>= fun memop ->
+ return (regsize,_signed,memop)
+ else
+ let memop = MemOp_LOAD in
+ (if bitU_to_bool
+ ((eq_vec
+ (set_vector_start_to_length size,
+ set_vector_start_to_length (Vector [B1;B0] 1 false))) &.
+ (eq (match (access opc (0:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ let regsize =
+ if bitU_to_bool
+ (eq (match (access opc (0:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (32:ii)
+ else (64:ii) in
+ let _signed = B1 in
+ return (regsize,_signed,memop)) >>= fun (regsize, _signed, memop) ->
+ return (memop,regsize,_signed)) >>= fun (memop, regsize, _signed) ->
+ let datasize = lsl' ((8:ii),scale) in
+ return (Just (LoadImmediate (n,t,acctype,memop,_signed,wback,postindex,reset_vector_start offset,regsize,datasize)))
+
+let sharedDecodeLoadRegister (Rn, Rt, Rm, opc, size, wback, postindex, scale, extend_type, shift) =
+ let n = UInt_reg Rn in
+ let t = UInt_reg Rt in
+ let m = UInt_reg Rm in
+ let acctype = AccType_NORMAL in
+ let memop =
+ match toNatural (0:ii) with
+ | (0:nn) -> MemOp_LOAD
+ | (1:nn) -> MemOp_STORE
+ | (2:nn) -> MemOp_PREFETCH
+ end in
+ let _signed = B0 in
+ let regsize = (64:ii) in
+ (if bitU_to_bool (eq (match (access opc (1:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)))
+ then
+ let memop =
+ if bitU_to_bool
+ (eq (match (access opc (0:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then MemOp_LOAD
+ else MemOp_STORE in
+ let regsize =
+ if bitU_to_bool
+ (eq_vec
+ (set_vector_start_to_length size,
+ set_vector_start_to_length (Vector [B1;B1] 1 false)))
+ then (64:ii)
+ else (32:ii) in
+ let _signed = B0 in
+ return (memop,regsize,_signed)
+ else
+ (if bitU_to_bool
+ (eq_vec
+ (set_vector_start_to_length size,
+ set_vector_start_to_length (Vector [B1;B1] 1 false)))
+ then
+ let memop = MemOp_PREFETCH in
+ (if bitU_to_bool
+ (eq (match (access opc (0:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ return (regsize,_signed,memop)
+ else
+ let memop = MemOp_LOAD in
+ (if bitU_to_bool
+ ((eq_vec
+ (set_vector_start_to_length size,
+ set_vector_start_to_length (Vector [B1;B0] 1 false))) &.
+ (eq (match (access opc (0:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ let regsize =
+ if bitU_to_bool
+ (eq (match (access opc (0:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (32:ii)
+ else (64:ii) in
+ let _signed = B1 in
+ return (regsize,_signed,memop)) >>= fun (regsize, _signed, memop) ->
+ return (memop,regsize,_signed)) >>= fun (memop, regsize, _signed) ->
+ let datasize = lsl' ((8:ii),scale) in
+ return (Just (LoadRegister (n,t,m,acctype,memop,_signed,wback,postindex,extend_type,shift,regsize,datasize)))
+
+let ConditionHoldsIFP _cond =
+ match (slice _cond (3:ii) (1:ii)) with
+ | Vector [B0;B0;B0] _ _ -> [PSTATE_Zfp]
+ | Vector [B0;B0;B1] _ _ -> [PSTATE_Cfp]
+ | Vector [B0;B1;B0] _ _ -> [PSTATE_Nfp]
+ | Vector [B0;B1;B1] _ _ -> [PSTATE_Vfp]
+ | Vector [B1;B0;B0] _ _ -> [PSTATE_Zfp;PSTATE_Cfp]
+ | Vector [B1;B0;B1] _ _ -> [PSTATE_Vfp;PSTATE_Nfp]
+ | Vector [B1;B1;B0] _ _ -> [PSTATE_Zfp;PSTATE_Vfp;PSTATE_Nfp]
+ | Vector [B1;B1;B1] _ _ -> []
+ end
+
+let decodeAddSubtractImmediate ((Vector [sf;op;S;B1;B0;B0;B0;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__898) =
+ let shift = slice_raw v__898 (8:ii) (9:ii) in
+ let imm12 = slice_raw v__898 (10:ii) (21:ii) in
+ let Rn = slice_raw v__898 (22:ii) (26:ii) in
+ let Rd = slice_raw v__898 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let sub_op = eq (match op with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ let setflags = eq (match S with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ let imm = set_vector_start_to_length (to_vec_dec (datasize,(0:ii))) in
+ match shift with
+ | Vector [B0;B0] _ _ ->
+ let imm =
+ set_vector_start_to_length
+ (ZeroExtend (datasize,reset_vector_start (set_vector_start_to_length imm12))) in
+ return imm
+ | Vector [B0;B1] _ _ ->
+ let imm =
+ set_vector_start_to_length
+ (ZeroExtend
+ (datasize,
+ reset_vector_start (set_vector_start_to_length
+ ((set_vector_start_to_length imm12) ^^
+ (set_vector_start_to_length
+ (duplicate_bits (set_vector_start_to_length (Vector [B0] 0 false), (12:ii)))))))) in
+ return imm
+ | Vector [B1;_] _ _ ->
+ ReservedValue () >>
+ return imm
+ end >>= fun imm ->
+ return (Just (AddSubImmediate (d,n,datasize,sub_op,setflags,reset_vector_start (set_vector_start_to_length
+ imm))))
+
+let decodeData3Source = function
+ | ((Vector [sf;B0;B0;B1;B1;B0;B1;B1;B0;B0;B0;_;_;_;_;_;o0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__878) ->
+ let Rm = slice_raw v__878 (11:ii) (15:ii) in
+ let Ra = slice_raw v__878 (17:ii) (21:ii) in
+ let Rn = slice_raw v__878 (22:ii) (26:ii) in
+ let Rd = slice_raw v__878 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let m = UInt_reg (reset_vector_start Rm) in
+ let a = UInt_reg (reset_vector_start Ra) in
+ let destsize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let datasize = destsize in
+ let sub_op = eq (match o0 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ Just (MultiplyAddSub (d,n,m,a,destsize,datasize,sub_op))
+ | ((Vector [B1;B0;B0;B1;B1;B0;B1;B1;U;B0;B1;_;_;_;_;_;o0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__884) ->
+ let Rm = slice_raw v__884 (11:ii) (15:ii) in
+ let Ra = slice_raw v__884 (17:ii) (21:ii) in
+ let Rn = slice_raw v__884 (22:ii) (26:ii) in
+ let Rd = slice_raw v__884 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let m = UInt_reg (reset_vector_start Rm) in
+ let a = UInt_reg (reset_vector_start Ra) in
+ let destsize = (64:ii) in
+ let datasize = (32:ii) in
+ let sub_op = eq (match o0 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ let _unsigned = eq (match U with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ Just (MultiplyAddSubLong (d,n,m,a,destsize,datasize,sub_op,_unsigned))
+ | ((Vector [B1;B0;B0;B1;B1;B0;B1;B1;U;B1;B0;_;_;_;_;_;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__891) ->
+ let Rm = slice_raw v__891 (11:ii) (15:ii) in
+ let Ra = slice_raw v__891 (17:ii) (21:ii) in
+ let Rn = slice_raw v__891 (22:ii) (26:ii) in
+ let Rd = slice_raw v__891 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let m = UInt_reg (reset_vector_start Rm) in
+ let a = UInt_reg (reset_vector_start Ra) in
+ let destsize = (64:ii) in
+ let datasize = destsize in
+ let _unsigned = eq (match U with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ Just (MultiplyHigh (d,n,m,a,destsize,datasize,_unsigned))
+ end
+
+let decodeExtract ((Vector [sf;B0;B0;B1;B0;B0;B1;B1;B1;N;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__872) =
+ let Rm = slice_raw v__872 (11:ii) (15:ii) in
+ let imms = slice_raw v__872 (16:ii) (21:ii) in
+ let Rn = slice_raw v__872 (22:ii) (26:ii) in
+ let Rd = slice_raw v__872 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let m = UInt_reg (reset_vector_start Rm) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let lsb = (0:ii) in
+ (if bitU_to_bool (neq_bit (N, sf))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ (if bitU_to_bool
+ ((eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii))) &.
+ (eq (match (access imms (5:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))))
+ then ReservedValue ()
+ else return ()) >>
+ let lsb = UInt (reset_vector_start (set_vector_start_to_length imms)) in
+ return (Just (ExtractRegister (d,n,m,datasize,lsb)))
+
+let decodeAddSubtractExtendedRegister ((Vector [sf;op;S;B0;B1;B0;B1;B1;B0;B0;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__865) =
+ let Rm = slice_raw v__865 (11:ii) (15:ii) in
+ let option_v = slice_raw v__865 (16:ii) (18:ii) in
+ let imm3 = slice_raw v__865 (19:ii) (21:ii) in
+ let Rn = slice_raw v__865 (22:ii) (26:ii) in
+ let Rd = slice_raw v__865 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let m = UInt_reg (reset_vector_start Rm) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let sub_op = eq (match op with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ let setflags = eq (match S with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ let extend_type = DecodeRegExtend (reset_vector_start (set_vector_start 2 option_v)) in
+ let shift = UInt (reset_vector_start (set_vector_start_to_length imm3)) in
+ (if bitU_to_bool (gt (shift, (4:ii)))
+ then ReservedValue ()
+ else return ()) >>
+ return (Just (AddSubExtendRegister (d,n,m,datasize,sub_op,setflags,extend_type,shift)))
+
+let decodeAddSubtractShiftedRegister ((Vector [sf;op;S;B0;B1;B0;B1;B1;_;_;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__859) =
+ let shift = slice_raw v__859 (8:ii) (9:ii) in
+ let Rm = slice_raw v__859 (11:ii) (15:ii) in
+ let imm6 = slice_raw v__859 (16:ii) (21:ii) in
+ let Rn = slice_raw v__859 (22:ii) (26:ii) in
+ let Rd = slice_raw v__859 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let m = UInt_reg (reset_vector_start Rm) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let sub_op = eq (match op with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ let setflags = eq (match S with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ (if bitU_to_bool
+ (eq_vec
+ (set_vector_start_to_length shift,
+ set_vector_start_to_length (Vector [B1;B1] 1 false)))
+ then ReservedValue ()
+ else return ()) >>
+ (if bitU_to_bool
+ ((eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii))) &.
+ (eq (match (access imm6 (5:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))))
+ then ReservedValue ()
+ else return ()) >>
+ let shift_type = DecodeShift (reset_vector_start (set_vector_start 1 shift)) in
+ let shift_amount = UInt (reset_vector_start (set_vector_start_to_length imm6)) in
+ return (Just (AddSubShiftedRegister (d,n,m,datasize,sub_op,setflags,shift_type,shift_amount)))
+
+let decodeLogicalShiftedRegister ((Vector [sf;_;_;B0;B1;B0;B1;B0;_;_;N;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__855) =
+ let opc = slice_raw v__855 (1:ii) (2:ii) in
+ let shift = slice_raw v__855 (8:ii) (9:ii) in
+ let Rm = slice_raw v__855 (11:ii) (15:ii) in
+ let imm6 = slice_raw v__855 (16:ii) (21:ii) in
+ let Rn = slice_raw v__855 (22:ii) (26:ii) in
+ let Rd = slice_raw v__855 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let m = UInt_reg (reset_vector_start Rm) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let setflags = B0 in
+ let op = LogicalOp_AND in
+ let (op, setflags) =
+ match opc with
+ | Vector [B0;B0] _ _ ->
+ let op = LogicalOp_AND in
+ let setflags = B0 in
+ (op,setflags)
+ | Vector [B0;B1] _ _ ->
+ let op = LogicalOp_ORR in
+ let setflags = B0 in
+ (op,setflags)
+ | Vector [B1;B0] _ _ ->
+ let op = LogicalOp_EOR in
+ let setflags = B0 in
+ (op,setflags)
+ | Vector [B1;B1] _ _ ->
+ let op = LogicalOp_AND in
+ let setflags = B1 in
+ (op,setflags)
+ end in
+ (if bitU_to_bool
+ ((eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii))) &.
+ (eq (match (access imm6 (5:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))))
+ then ReservedValue ()
+ else return ()) >>
+ let shift_type = DecodeShift (reset_vector_start (set_vector_start 1 shift)) in
+ let shift_amount = UInt (reset_vector_start (set_vector_start_to_length imm6)) in
+ let invert = eq (match N with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ return (Just (LogicalShiftedRegister (d,n,m,datasize,setflags,op,shift_type,shift_amount,invert)))
+
+let decodeData1Source = function
+ | ((Vector [sf;B1;B0;B1;B1;B0;B1;B0;B1;B1;B0;B0;B0;B0;B0;B0;B0;B0;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__840) ->
+ let opc = slice_raw v__840 (20:ii) (21:ii) in
+ let Rn = slice_raw v__840 (22:ii) (26:ii) in
+ let Rd = slice_raw v__840 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let op =
+ match toNatural (0:ii) with
+ | (0:nn) -> RevOp_RBIT
+ | (1:nn) -> RevOp_REV16
+ | (2:nn) -> RevOp_REV32
+ | (3:nn) -> RevOp_REV64
+ end in
+ match opc with
+ | Vector [B0;B0] _ _ -> return RevOp_RBIT
+ | Vector [B0;B1] _ _ -> return RevOp_REV16
+ | Vector [B1;B0] _ _ -> return RevOp_REV32
+ | Vector [B1;B1] _ _ ->
+ (if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ return RevOp_REV64
+ end >>= fun op ->
+ return (Just (Reverse (d,n,datasize,op)))
+ | ((Vector [sf;B1;B0;B1;B1;B0;B1;B0;B1;B1;B0;B0;B0;B0;B0;B0;B0;B0;B0;B1;B0;op;_;_;_;_;_;_;_;_;_;_] _ _) as v__847) ->
+ let Rn = slice_raw v__847 (22:ii) (26:ii) in
+ let Rd = slice_raw v__847 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let opcode =
+ if bitU_to_bool (eq (match op with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)))
+ then CountOp_CLZ
+ else CountOp_CLS in
+ return (Just (CountLeading (d,n,datasize,opcode)))
+ end
+
+let decodeUnconditionalBranchRegister = function
+ | ((Vector [B1;B1;B0;B1;B0;B1;B1;B0;B0;_;_;B1;B1;B1;B1;B1;B0;B0;B0;B0;B0;B0;_;_;_;_;_;B0;B0;B0;B0;B0] _ _) as v__820) ->
+ let op = slice_raw v__820 (9:ii) (10:ii) in
+ let Rn = slice_raw v__820 (22:ii) (26:ii) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let branch_type =
+ match toNatural (0:ii) with
+ | (0:nn) -> BranchType_CALL
+ | (1:nn) -> BranchType_ERET
+ | (2:nn) -> BranchType_DBGEXIT
+ | (3:nn) -> BranchType_RET
+ | (4:nn) -> BranchType_JMP
+ | (5:nn) -> BranchType_EXCEPTION
+ | (6:nn) -> BranchType_UNKNOWN
+ end in
+ match op with
+ | Vector [B0;B0] _ _ -> return BranchType_JMP
+ | Vector [B0;B1] _ _ -> return BranchType_CALL
+ | Vector [B1;B0] _ _ -> return BranchType_RET
+ | _ ->
+ UnallocatedEncoding () >>
+ return branch_type
+ end >>= fun branch_type ->
+ return (Just (BranchRegister (n,branch_type)))
+ | ((Vector [B1;B1;B0;B1;B0;B1;B1;B0;B1;B0;B0;B1;B1;B1;B1;B1;B0;B0;B0;B0;B0;B0;B1;B1;B1;B1;B1;B0;B0;B0;B0;B0] _ _) as v__826) ->
+ return (Just (ExceptionReturn))
+ | ((Vector [B1;B1;B0;B1;B0;B1;B1;B0;B1;B0;B1;B1;B1;B1;B1;B1;B0;B0;B0;B0;B0;B0;B1;B1;B1;B1;B1;B0;B0;B0;B0;B0] _ _) as v__833) ->
+ return (Just (DebugRestorePState))
+ end
+
+let decodeData2Source = function
+ | ((Vector [sf;B0;B0;B1;B1;B0;B1;B0;B1;B1;B0;_;_;_;_;_;B0;B0;B0;B0;B1;o1;_;_;_;_;_;_;_;_;_;_] _ _) as v__800) ->
+ let Rm = slice_raw v__800 (11:ii) (15:ii) in
+ let Rn = slice_raw v__800 (22:ii) (26:ii) in
+ let Rd = slice_raw v__800 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let m = UInt_reg (reset_vector_start Rm) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let _unsigned = eq (match o1 with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)) in
+ return (Just (Division (d,n,m,datasize,_unsigned)))
+ | ((Vector [sf;B0;B0;B1;B1;B0;B1;B0;B1;B1;B0;_;_;_;_;_;B0;B0;B1;B0;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__807) ->
+ let Rm = slice_raw v__807 (11:ii) (15:ii) in
+ let op2 = slice_raw v__807 (20:ii) (21:ii) in
+ let Rn = slice_raw v__807 (22:ii) (26:ii) in
+ let Rd = slice_raw v__807 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let m = UInt_reg (reset_vector_start Rm) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let shift_type = DecodeShift (reset_vector_start (set_vector_start 1 op2)) in
+ return (Just (Shift (d,n,m,datasize,shift_type)))
+ | ((Vector [sf;B0;B0;B1;B1;B0;B1;B0;B1;B1;B0;_;_;_;_;_;B0;B1;B0;C;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__813) ->
+ let Rm = slice_raw v__813 (11:ii) (15:ii) in
+ let sz = slice_raw v__813 (20:ii) (21:ii) in
+ let Rn = slice_raw v__813 (22:ii) (26:ii) in
+ let Rd = slice_raw v__813 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let m = UInt_reg (reset_vector_start Rm) in
+ (if bitU_to_bool
+ ((eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))) &.
+ (neq_vec
+ (set_vector_start_to_length sz,
+ set_vector_start_to_length (Vector [B1;B1] 1 false))))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ (if bitU_to_bool
+ ((eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii))) &.
+ (eq_vec
+ (set_vector_start_to_length sz,
+ set_vector_start_to_length (Vector [B1;B1] 1 false))))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ let size = lsl' ((8:ii),UInt (reset_vector_start (set_vector_start_to_length sz))) in
+ let crc32c = eq (match C with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ return (Just (CRC (d,n,m,size,crc32c)))
+ end
+
+let decodeExceptionGeneration = function
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B0;B0;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;B0;B0;B0;B0;B1] _ _) as v__771) ->
+ let imm16 = slice_raw v__771 (11:ii) (26:ii) in
+ let imm = imm16 in
+ return (Just (GenerateExceptionEL1 (reset_vector_start imm)))
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B0;B0;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;B0;B0;B0;B1;B0] _ _) as v__776) ->
+ let imm16 = slice_raw v__776 (11:ii) (26:ii) in
+ let imm = imm16 in
+ return (Just (GenerateExceptionEL2 (reset_vector_start imm)))
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B0;B0;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;B0;B0;B0;B1;B1] _ _) as v__781) ->
+ let imm16 = slice_raw v__781 (11:ii) (26:ii) in
+ let imm = imm16 in
+ return (Just (GenerateExceptionEL3 (reset_vector_start imm)))
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B0;B0;B0;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;B0;B0;B0;B0;B0] _ _) as v__786) ->
+ let imm16 = slice_raw v__786 (11:ii) (26:ii) in
+ let comment = imm16 in
+ return (Just (DebugBreakpoint (reset_vector_start comment)))
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B0;B0;B1;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;B0;B0;B0;B0;B0] _ _) as v__791) ->
+ let imm16 = slice_raw v__791 (11:ii) (26:ii) in
+ return (Just (ExternalDebugBreakpoint))
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B0;B1;B0;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;B0;B0;B0;_;_] _ _) as v__796) ->
+ let imm16 = slice_raw v__796 (11:ii) (26:ii) in
+ let LL = slice_raw v__796 (30:ii) (31:ii) in
+ let target_level = LL in
+ (if bitU_to_bool
+ (eq_vec
+ (set_vector_start_to_length LL,
+ set_vector_start_to_length (Vector [B0;B0] 1 false)))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ return (Just (DebugSwitchToExceptionLevel (reset_vector_start target_level)))
+ end
+
+let decodeSystem = function
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B1;B0;B0;B0;B0;B0;_;_;_;B0;B1;B0;B0;_;_;_;_;_;_;_;B1;B1;B1;B1;B1] _ _) as v__706) ->
+ let op1 = slice_raw v__706 (13:ii) (15:ii) in
+ let CRm = slice_raw v__706 (20:ii) (23:ii) in
+ let op2 = slice_raw v__706 (24:ii) (26:ii) in
+ let operand = set_vector_start 3 CRm in
+ let field' =
+ match toNatural (0:ii) with
+ | (0:nn) -> PSTATEField_DAIFSet
+ | (1:nn) -> PSTATEField_DAIFClr
+ | (2:nn) -> PSTATEField_SP
+ end in
+ match ((set_vector_start_to_length op1) ^^ (set_vector_start_to_length op2)) with
+ | ((Vector [B0;B0;B0;B1;B0;B1] _ _) as v__712) -> return PSTATEField_SP
+ | ((Vector [B0;B1;B1;B1;B1;B0] _ _) as v__715) -> return PSTATEField_DAIFSet
+ | ((Vector [B0;B1;B1;B1;B1;B1] _ _) as v__718) -> return PSTATEField_DAIFClr
+ | _ ->
+ UnallocatedEncoding () >>
+ return field'
+ end >>= fun field' ->
+ return (Just (MoveSystemImmediate (reset_vector_start operand,field')))
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B1;B0;B0;B0;B0;B0;B0;B1;B1;B0;B0;B1;B0;_;_;_;_;_;_;_;B1;B1;B1;B1;B1] _ _) as v__721) ->
+ let CRm = slice_raw v__721 (20:ii) (23:ii) in
+ let op2 = slice_raw v__721 (24:ii) (26:ii) in
+ let op =
+ match toNatural (0:ii) with
+ | (0:nn) -> SystemHintOp_NOP
+ | (1:nn) -> SystemHintOp_YIELD
+ | (2:nn) -> SystemHintOp_WFE
+ | (3:nn) -> SystemHintOp_WFI
+ | (4:nn) -> SystemHintOp_SEV
+ | (5:nn) -> SystemHintOp_SEVL
+ end in
+ let op =
+ match ((set_vector_start_to_length CRm) ^^ (set_vector_start_to_length op2)) with
+ | ((Vector [B0;B0;B0;B0;B0;B0;B0] _ _) as v__728) -> SystemHintOp_NOP
+ | ((Vector [B0;B0;B0;B0;B0;B0;B1] _ _) as v__731) -> SystemHintOp_YIELD
+ | ((Vector [B0;B0;B0;B0;B0;B1;B0] _ _) as v__734) -> SystemHintOp_WFE
+ | ((Vector [B0;B0;B0;B0;B0;B1;B1] _ _) as v__737) -> SystemHintOp_WFI
+ | ((Vector [B0;B0;B0;B0;B1;B0;B0] _ _) as v__740) -> SystemHintOp_SEV
+ | ((Vector [B0;B0;B0;B0;B1;B0;B1] _ _) as v__743) -> SystemHintOp_SEVL
+ | _ -> SystemHintOp_NOP
+ end in
+ return (Just (Hint op))
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B1;B0;B0;B0;B0;B0;B0;B1;B1;B0;B0;B1;B1;_;_;_;_;B0;B1;B0;B1;B1;B1;B1;B1] _ _) as v__746) ->
+ let CRm = slice_raw v__746 (20:ii) (23:ii) in
+ let imm = unsigned (reset_vector_start CRm) in
+ return (Just (ClearExclusiveMonitor imm))
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B1;B0;B0;B0;B0;B0;B0;B1;B1;B0;B0;B1;B1;_;_;_;_;B1;_;_;B1;B1;B1;B1;B1] _ _) as v__754) ->
+ let CRm = slice_raw v__754 (20:ii) (23:ii) in
+ let opc = slice_raw v__754 (25:ii) (26:ii) in
+ let op =
+ match toNatural (0:ii) with
+ | (0:nn) -> MemBarrierOp_DSB
+ | (1:nn) -> MemBarrierOp_DMB
+ | (2:nn) -> MemBarrierOp_ISB
+ end in
+ let domain =
+ match toNatural (0:ii) with
+ | (0:nn) -> MBReqDomain_Nonshareable
+ | (1:nn) -> MBReqDomain_InnerShareable
+ | (2:nn) -> MBReqDomain_OuterShareable
+ | (3:nn) -> MBReqDomain_FullSystem
+ end in
+ let types =
+ match toNatural (0:ii) with
+ | (0:nn) -> MBReqTypes_Reads
+ | (1:nn) -> MBReqTypes_Writes
+ | (2:nn) -> MBReqTypes_All
+ end in
+ match opc with
+ | Vector [B0;B0] _ _ -> return MemBarrierOp_DSB
+ | Vector [B0;B1] _ _ -> return MemBarrierOp_DMB
+ | Vector [B1;B0] _ _ -> return MemBarrierOp_ISB
+ | _ ->
+ UnallocatedEncoding () >>
+ return op
+ end >>= fun op ->
+ let domain =
+ match (slice CRm (3:ii) (2:ii)) with
+ | Vector [B0;B0] _ _ -> MBReqDomain_OuterShareable
+ | Vector [B0;B1] _ _ -> MBReqDomain_Nonshareable
+ | Vector [B1;B0] _ _ -> MBReqDomain_InnerShareable
+ | Vector [B1;B1] _ _ -> MBReqDomain_FullSystem
+ end in
+ let (domain, types) =
+ match (slice CRm (1:ii) (0:ii)) with
+ | Vector [B0;B1] _ _ ->
+ let types = MBReqTypes_Reads in
+ (domain,types)
+ | Vector [B1;B0] _ _ ->
+ let types = MBReqTypes_Writes in
+ (domain,types)
+ | Vector [B1;B1] _ _ ->
+ let types = MBReqTypes_All in
+ (domain,types)
+ | _ ->
+ let types = MBReqTypes_All in
+ let domain = MBReqDomain_FullSystem in
+ (domain,types)
+ end in
+ return (Just (Barrier (op,domain,types)))
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B1;B0;B0;L;B0;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__762) ->
+ let op1 = slice_raw v__762 (13:ii) (15:ii) in
+ let CRn = slice_raw v__762 (16:ii) (19:ii) in
+ let CRm = slice_raw v__762 (20:ii) (23:ii) in
+ let op2 = slice_raw v__762 (24:ii) (26:ii) in
+ let Rt = slice_raw v__762 (27:ii) (31:ii) in
+ let t = UInt_reg (reset_vector_start Rt) in
+ let sys_op0 = (1:ii) in
+ let sys_op1 = UInt (reset_vector_start (set_vector_start_to_length op1)) in
+ let sys_op2 = UInt (reset_vector_start (set_vector_start_to_length op2)) in
+ let sys_crn = UInt (reset_vector_start (set_vector_start_to_length CRn)) in
+ let sys_crm = UInt (reset_vector_start (set_vector_start_to_length CRm)) in
+ let has_result = eq (match L with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ return (Just (System (t,sys_op0,sys_op1,sys_op2,sys_crn,sys_crm,has_result)))
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B1;B0;B0;L;B1;o0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__766) ->
+ let op1 = slice_raw v__766 (13:ii) (15:ii) in
+ let CRn = slice_raw v__766 (16:ii) (19:ii) in
+ let CRm = slice_raw v__766 (20:ii) (23:ii) in
+ let op2 = slice_raw v__766 (24:ii) (26:ii) in
+ let Rt = slice_raw v__766 (27:ii) (31:ii) in
+ let t = UInt_reg (reset_vector_start Rt) in
+ let sys_op0 =
+ (2:ii) + (UInt (reset_vector_start (set_vector_start_to_length (Vector [o0] 0 false)))) in
+ let sys_op1 = UInt (reset_vector_start (set_vector_start_to_length op1)) in
+ let sys_op2 = UInt (reset_vector_start (set_vector_start_to_length op2)) in
+ let sys_crn = UInt (reset_vector_start (set_vector_start_to_length CRn)) in
+ let sys_crm = UInt (reset_vector_start (set_vector_start_to_length CRm)) in
+ let read = eq (match L with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ return (Just (MoveSystemRegister (t,sys_op0,sys_op1,sys_op2,sys_crn,sys_crm,read)))
+ end
+
+let ASR_C (x, shift) =
+ let extended_x =
+ set_vector_start_to_length
+ (SignExtend (shift + (length x),reset_vector_start (set_vector_start_to_length x))) in
+ let result =
+ set_vector_start_to_length
+ (slice (set_vector_start_to_length extended_x)
+ ((shift + (length (reset_vector_start (set_vector_start_to_length x)))) - (1:ii)) shift) in
+ let carry_out = access (set_vector_start_to_length extended_x) (shift - (1:ii)) in
+ (set_vector_start_to_length result,carry_out)
+
+let ASR (x, shift) =
+ let result = set_vector_start_to_length (to_vec_dec (length x,(0:ii))) in
+ if bitU_to_bool (eq_range (shift, (0:ii)))
+ then set_vector_start_to_length x
+ else
+ let (result', _) =
+ match (ASR_C (reset_vector_start (set_vector_start_to_length x),shift)) with
+ | (v0v', v1v') -> (v0v',v1v')
+ end in
+ set_vector_start_to_length result'
+
+let LSR (x, shift) =
+ let result = set_vector_start_to_length (to_vec_dec (length x,(0:ii))) in
+ if bitU_to_bool (eq_range (shift, (0:ii)))
+ then set_vector_start_to_length x
+ else
+ let (result', _) =
+ match (LSR_C (reset_vector_start (set_vector_start_to_length x),shift)) with
+ | (v0v', v1v') -> (v0v',v1v')
+ end in
+ set_vector_start_to_length result'
+
+let AArch64_ResetGeneralRegisters () =
+ (foreachM_inc ((0:ii),(30:ii),(1:ii)) ()
+ (fun i _ ->
+ wX (i,reset_vector_start (set_vector_start_to_length (to_vec_dec ((64:ii),UNKNOWN))))))
+
+let AArch64_ResetSIMDFPRegisters () =
+ (foreachM_inc ((0:ii),(31:ii),(1:ii)) ()
+ (fun i _ ->
+ wV (i,reset_vector_start (set_vector_start_to_length (to_vec_dec ((128:ii),UNKNOWN))))))
+
+let wMem'IFP = BigEndianIFP
+
+let IsFault addrdesc = neq (addrdesc.AddressDescriptor_fault.FaultRecord_type', Fault_None)
+
+let Extend (N', x, unsigned) =
+ if bitU_to_bool unsigned
+ then
+ set_vector_start_to_length (ZeroExtend (N',reset_vector_start (set_vector_start_to_length x)))
+ else
+ set_vector_start_to_length (SignExtend (N',reset_vector_start (set_vector_start_to_length x)))
+
+let _rMem (read_buffer, desc, size, acctype, exclusive) =
+ if bitU_to_bool (eq_range (read_buffer.read_buffer_type_size, (0:ii)))
+ then
+ <| read_buffer_type_acctype = acctype;
+ read_buffer_type_exclusive = exclusive;
+ read_buffer_type_address = desc.AddressDescriptor_paddress.FullAddress_physicaladdress;
+ read_buffer_type_size = size |>
+ else
+ let _ = assert' (eq (read_buffer.read_buffer_type_acctype, acctype)) (Nothing) in
+ let _ = assert' (eq_bit (read_buffer.read_buffer_type_exclusive, exclusive)) (Nothing) in
+ let _ =
+ assert' (eq_vec
+ (set_vector_start_to_length
+ (set_vector_start 63
+ (add_VIV
+ (reset_vector_start (set_vector_start_to_length
+ read_buffer.read_buffer_type_address))
+ read_buffer.read_buffer_type_size)),
+ set_vector_start_to_length
+ desc.AddressDescriptor_paddress.FullAddress_physicaladdress)) (Nothing) in
+ <|read_buffer with read_buffer_type_size = (read_buffer.read_buffer_type_size + size)|>
+
+let CheckSPAlignmentIFP = PSTATE_ELfp :: rSPIFP
+
+let SCR_GEN () =
+ let _ = assert' (HaveEL (reset_vector_start EL3)) (Nothing) in
+ if bitU_to_bool (HighestELUsingAArch32 ())
+ then SCR
+ else SCR_EL3
+
+let AArch64_AlignmentFault (acctype, iswrite, secondstage) =
+ let ipaddress = to_vec_dec ((48:ii),UNKNOWN) in
+ let level = UNKNOWN in
+ let extflag = if bitU_to_bool (is_one UNKNOWN) then B1 else B0 in
+ let s2fs1walk = if bitU_to_bool (is_one UNKNOWN) then B1 else B0 in
+ AArch64_CreateFaultRecord
+ (Fault_Alignment,
+ reset_vector_start ipaddress,
+ level,
+ acctype,
+ iswrite,
+ extflag,
+ secondstage,
+ s2fs1walk)
+
+let AArch64_NoFault () =
+ let ipaddress = to_vec_dec ((48:ii),UNKNOWN) in
+ let level = UNKNOWN in
+ let acctype = AccType_NORMAL in
+ let iswrite = if bitU_to_bool (is_one UNKNOWN) then B1 else B0 in
+ let extflag = if bitU_to_bool (is_one UNKNOWN) then B1 else B0 in
+ let secondstage = B0 in
+ let s2fs1walk = B0 in
+ AArch64_CreateFaultRecord
+ (Fault_None,
+ reset_vector_start ipaddress,
+ level,
+ acctype,
+ iswrite,
+ extflag,
+ secondstage,
+ s2fs1walk)
+
+let decodeConditionalBranchImmediate ((Vector [B0;B1;B0;B1;B0;B1;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;B0;_;_;_;_] _ _) as v__702) =
+ let imm19 = slice_raw v__702 (8:ii) (26:ii) in
+ let _cond = slice_raw v__702 (28:ii) (31:ii) in
+ let offset =
+ SignExtend
+ ((64:ii),
+ reset_vector_start (set_vector_start_to_length
+ ((set_vector_start_to_length imm19) ^^
+ (set_vector_start_to_length (Vector [B0;B0] 1 false))))) in
+ let condition = set_vector_start 3 _cond in
+ Just (BranchConditional (reset_vector_start offset,reset_vector_start condition))
+
+let _wMem (write_buffer, desc, size, acctype, exclusive, value) =
+ if bitU_to_bool (eq_range (write_buffer.write_buffer_type_size, (0:ii)))
+ then
+ <| write_buffer_type_acctype = acctype;
+ write_buffer_type_exclusive = exclusive;
+ write_buffer_type_address = desc.AddressDescriptor_paddress.FullAddress_physicaladdress;
+ write_buffer_type_value =
+ (ZeroExtend ((128:ii),reset_vector_start (set_vector_start_to_length value)));
+ write_buffer_type_size = size |>
+ else
+ let _ = assert' (eq (write_buffer.write_buffer_type_acctype, acctype)) (Nothing) in
+ let _ = assert' (eq_bit (write_buffer.write_buffer_type_exclusive, exclusive)) (Nothing) in
+ let _ =
+ assert' (eq_vec
+ (set_vector_start_to_length
+ (set_vector_start 63
+ (add_VIV
+ (reset_vector_start (set_vector_start_to_length
+ write_buffer.write_buffer_type_address))
+ write_buffer.write_buffer_type_size)),
+ set_vector_start_to_length
+ desc.AddressDescriptor_paddress.FullAddress_physicaladdress)) (Nothing) in
+ <|write_buffer with
+ write_buffer_type_value =
+ (ZeroExtend
+ ((128:ii),
+ reset_vector_start (set_vector_start_to_length
+ ((set_vector_start_to_length value) ^^
+ (set_vector_start_to_length
+ (slice (set_vector_start_to_length write_buffer.write_buffer_type_value)
+ ((write_buffer.write_buffer_type_size * (8:ii)) - (1:ii)) (0:ii))))))); write_buffer_type_size =
+ (write_buffer.write_buffer_type_size + size)|>
+
+let HaveAArch32EL el =
+ if bitU_to_bool (~(HaveEL (reset_vector_start el)))
+ then B0
+ else
+ if bitU_to_bool (~(HaveAnyAArch32 ()))
+ then B0
+ else
+ if bitU_to_bool (HighestELUsingAArch32 ())
+ then B1
+ else
+ if bitU_to_bool (eq_vec (set_vector_start_to_length el, set_vector_start_to_length EL0))
+ then B1
+ else IMPLEMENTATION_DEFINED.IMPLEMENTATION_DEFINED_type_HaveAArch32EL
+
+let decodeUnconditionalBranchImmediate ((Vector [op;B0;B0;B1;B0;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__699) =
+ let imm26 = slice_raw v__699 (6:ii) (31:ii) in
+ let branch_type =
+ if bitU_to_bool (eq (match op with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then BranchType_CALL
+ else BranchType_JMP in
+ let offset =
+ SignExtend
+ ((64:ii),
+ reset_vector_start (set_vector_start_to_length
+ ((set_vector_start_to_length imm26) ^^
+ (set_vector_start_to_length (Vector [B0;B0] 1 false))))) in
+ Just (BranchImmediate (branch_type,reset_vector_start offset))
+
+let decodeCompareBranchImmediate ((Vector [sf;B0;B1;B1;B0;B1;B0;op;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__695) =
+ let imm19 = slice_raw v__695 (8:ii) (26:ii) in
+ let Rt = slice_raw v__695 (27:ii) (31:ii) in
+ let t = UInt_reg (reset_vector_start Rt) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let iszero = eq (match op with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)) in
+ let offset =
+ SignExtend
+ ((64:ii),
+ reset_vector_start (set_vector_start_to_length
+ ((set_vector_start_to_length imm19) ^^
+ (set_vector_start_to_length (Vector [B0;B0] 1 false))))) in
+ Just (CompareAndBranch (t,datasize,iszero,reset_vector_start offset))
+
+let decodePCRelAddressing ((Vector [op;_;_;B1;B0;B0;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__692) =
+ let immlo = slice_raw v__692 (1:ii) (2:ii) in
+ let immhi = slice_raw v__692 (8:ii) (26:ii) in
+ let Rd = slice_raw v__692 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let page = eq (match op with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)) in
+ let imm = to_vec_dec ((64:ii),(0:ii)) in
+ let imm =
+ if bitU_to_bool page
+ then
+ SignExtend
+ ((64:ii),
+ reset_vector_start (set_vector_start_to_length
+ ((set_vector_start_to_length immhi) ^^
+ (set_vector_start_to_length
+ ((set_vector_start_to_length immlo) ^^
+ (set_vector_start_to_length
+ (duplicate_bits (set_vector_start_to_length (Vector [B0] 0 false), (12:ii)))))))))
+ else
+ SignExtend
+ ((64:ii),
+ reset_vector_start (set_vector_start_to_length
+ ((set_vector_start_to_length immhi) ^^ (set_vector_start_to_length immlo)))) in
+ Just (Address (d,page,reset_vector_start imm))
+
+let decodeTestBranchImmediate ((Vector [b5;B0;B1;B1;B0;B1;B1;op;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__688) =
+ let b40 = slice_raw v__688 (8:ii) (12:ii) in
+ let imm14 = slice_raw v__688 (13:ii) (26:ii) in
+ let Rt = slice_raw v__688 (27:ii) (31:ii) in
+ let t = UInt_reg (reset_vector_start Rt) in
+ let datasize =
+ if bitU_to_bool (eq (match b5 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let bit_pos =
+ UInt
+ (reset_vector_start (set_vector_start_to_length
+ ((set_vector_start_to_length (Vector [b5] 0 false)) ^^ (set_vector_start_to_length b40)))) in
+ let bit_val = op in
+ let offset =
+ SignExtend
+ ((64:ii),
+ reset_vector_start (set_vector_start_to_length
+ ((set_vector_start_to_length imm14) ^^
+ (set_vector_start_to_length (Vector [B0;B0] 1 false))))) in
+ Just (TestBitAndBranch (t,datasize,bit_pos,bit_val,reset_vector_start offset))
+
+let decodeLoadStoreRegisterRegisterOffset ((Vector [_;_;B1;B1;B1;B0;B0;B0;_;_;B1;_;_;_;_;_;_;_;_;S;B1;B0;_;_;_;_;_;_;_;_;_;_] _ _) as v__681) =
+ let size = slice_raw v__681 (0:ii) (1:ii) in
+ let opc = slice_raw v__681 (8:ii) (9:ii) in
+ let Rm = slice_raw v__681 (11:ii) (15:ii) in
+ let option_v = slice_raw v__681 (16:ii) (18:ii) in
+ let Rn = slice_raw v__681 (22:ii) (26:ii) in
+ let Rt = slice_raw v__681 (27:ii) (31:ii) in
+ let wback = B0 in
+ let postindex = B0 in
+ let scale = UInt (reset_vector_start (set_vector_start_to_length size)) in
+ (if bitU_to_bool
+ (eq
+ (match (access (set_vector_start_to_length option_v) (1:ii)) with
+ | B0 -> (0:ii)
+ | B1 -> (1:ii)
+ end,
+ (0:ii)))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ let extend_type = DecodeRegExtend (reset_vector_start (set_vector_start 2 option_v)) in
+ let shift =
+ if bitU_to_bool (eq (match S with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then scale
+ else (0:ii) in
+ sharedDecodeLoadRegister
+ (reset_vector_start Rn,
+ reset_vector_start Rt,
+ reset_vector_start Rm,
+ reset_vector_start (set_vector_start 1 opc),
+ reset_vector_start (set_vector_start 1 size),
+ wback,
+ postindex,
+ scale,
+ extend_type,
+ shift)
+
+let decodeLoadRegisterLiteral ((Vector [_;_;B0;B1;B1;B0;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__677) =
+ let opc = slice_raw v__677 (0:ii) (1:ii) in
+ let imm19 = slice_raw v__677 (8:ii) (26:ii) in
+ let Rt = slice_raw v__677 (27:ii) (31:ii) in
+ let t = UInt_reg (reset_vector_start Rt) in
+ let memop = MemOp_LOAD in
+ let _signed = B0 in
+ let size = (0:ii) in
+ let offset = to_vec_dec ((64:ii),(4:ii)) in
+ let (memop, _signed, size) =
+ match opc with
+ | Vector [B0;B0] _ _ ->
+ let size = (4:ii) in
+ (memop,_signed,size)
+ | Vector [B0;B1] _ _ ->
+ let size = (8:ii) in
+ (memop,_signed,size)
+ | Vector [B1;B0] _ _ ->
+ let size = (4:ii) in
+ let _signed = B1 in
+ (memop,_signed,size)
+ | Vector [B1;B1] _ _ ->
+ let memop = MemOp_PREFETCH in
+ (memop,_signed,size)
+ end in
+ let offset =
+ SignExtend
+ ((64:ii),
+ reset_vector_start (set_vector_start_to_length
+ ((set_vector_start_to_length imm19) ^^
+ (set_vector_start_to_length (Vector [B0;B0] 1 false))))) in
+ let datasize = size * (8:ii) in
+ Just (LoadLiteral (t,memop,_signed,size,reset_vector_start offset,datasize))
+
+let ROR_C (x, shift) =
+ let N = length (reset_vector_start (set_vector_start_to_length x)) in
+ let m = modulo shift N in
+ let result =
+ set_vector_start_to_length
+ (bitwise_or
+ (set_vector_start_to_length (LSR (reset_vector_start (set_vector_start_to_length x),m)),
+ set_vector_start_to_length (LSL (reset_vector_start (set_vector_start_to_length x),N - m)))) in
+ let carry_out = access (set_vector_start_to_length result) (N - (1:ii)) in
+ (set_vector_start_to_length result,carry_out)
+
+let decodeSystemImplementationDefined machineCode =
+ match machineCode with
+ | ((Vector [_;_;_;_;_;_;_;_;_;_;_;B0;B1;_;_;_;B1;_;B1;B1;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__647) ->
+ return (decodeImplementationDefined (reset_vector_start machineCode))
+ | ((Vector [_;_;_;_;_;_;_;_;_;_;_;B1;B1;_;_;_;B1;_;B1;B1;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__657) ->
+ return (decodeImplementationDefined (reset_vector_start machineCode))
+ | ((Vector [_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__667) ->
+ decodeSystem (reset_vector_start machineCode)
+ end
+
+let decodeLoadStoreRegisterImmediatePostIndexed ((Vector [_;_;B1;B1;B1;B0;B0;B0;_;_;B0;_;_;_;_;_;_;_;_;_;B0;B1;_;_;_;_;_;_;_;_;_;_] _ _) as v__641) =
+ let size = slice_raw v__641 (0:ii) (1:ii) in
+ let opc = slice_raw v__641 (8:ii) (9:ii) in
+ let imm9 = slice_raw v__641 (11:ii) (19:ii) in
+ let Rn = slice_raw v__641 (22:ii) (26:ii) in
+ let Rt = slice_raw v__641 (27:ii) (31:ii) in
+ let wback = B1 in
+ let postindex = B1 in
+ let scale = UInt (reset_vector_start (set_vector_start_to_length size)) in
+ let offset = SignExtend ((64:ii),reset_vector_start (set_vector_start_to_length imm9)) in
+ sharedDecodeLoadImmediate
+ (reset_vector_start (set_vector_start 1 opc),
+ reset_vector_start (set_vector_start 1 size),
+ reset_vector_start Rn,
+ reset_vector_start Rt,
+ wback,
+ postindex,
+ scale,
+ reset_vector_start offset,
+ AccType_NORMAL,
+ B0)
+
+let decodeLoadStoreRegisterImmediatePreIndexed ((Vector [_;_;B1;B1;B1;B0;B0;B0;_;_;B0;_;_;_;_;_;_;_;_;_;B1;B1;_;_;_;_;_;_;_;_;_;_] _ _) as v__635) =
+ let size = slice_raw v__635 (0:ii) (1:ii) in
+ let opc = slice_raw v__635 (8:ii) (9:ii) in
+ let imm9 = slice_raw v__635 (11:ii) (19:ii) in
+ let Rn = slice_raw v__635 (22:ii) (26:ii) in
+ let Rt = slice_raw v__635 (27:ii) (31:ii) in
+ let wback = B1 in
+ let postindex = B0 in
+ let scale = UInt (reset_vector_start (set_vector_start_to_length size)) in
+ let offset = SignExtend ((64:ii),reset_vector_start (set_vector_start_to_length imm9)) in
+ sharedDecodeLoadImmediate
+ (reset_vector_start (set_vector_start 1 opc),
+ reset_vector_start (set_vector_start 1 size),
+ reset_vector_start Rn,
+ reset_vector_start Rt,
+ wback,
+ postindex,
+ scale,
+ reset_vector_start offset,
+ AccType_NORMAL,
+ B0)
+
+let decodeLoadStoreRegisterUnprivileged ((Vector [_;_;B1;B1;B1;B0;B0;B0;_;_;B0;_;_;_;_;_;_;_;_;_;B1;B0;_;_;_;_;_;_;_;_;_;_] _ _) as v__629) =
+ let size = slice_raw v__629 (0:ii) (1:ii) in
+ let opc = slice_raw v__629 (8:ii) (9:ii) in
+ let imm9 = slice_raw v__629 (11:ii) (19:ii) in
+ let Rn = slice_raw v__629 (22:ii) (26:ii) in
+ let Rt = slice_raw v__629 (27:ii) (31:ii) in
+ let wback = B0 in
+ let postindex = B0 in
+ let scale = UInt (reset_vector_start (set_vector_start_to_length size)) in
+ let offset = SignExtend ((64:ii),reset_vector_start (set_vector_start_to_length imm9)) in
+ sharedDecodeLoadImmediate
+ (reset_vector_start (set_vector_start 1 opc),
+ reset_vector_start (set_vector_start 1 size),
+ reset_vector_start Rn,
+ reset_vector_start Rt,
+ wback,
+ postindex,
+ scale,
+ reset_vector_start offset,
+ AccType_UNPRIV,
+ B0)
+
+let decodeLoadStoreRegisterUnscaledImmediate ((Vector [_;_;B1;B1;B1;B0;B0;B0;_;_;B0;_;_;_;_;_;_;_;_;_;B0;B0;_;_;_;_;_;_;_;_;_;_] _ _) as v__623) =
+ let size = slice_raw v__623 (0:ii) (1:ii) in
+ let opc = slice_raw v__623 (8:ii) (9:ii) in
+ let imm9 = slice_raw v__623 (11:ii) (19:ii) in
+ let Rn = slice_raw v__623 (22:ii) (26:ii) in
+ let Rt = slice_raw v__623 (27:ii) (31:ii) in
+ let wback = B0 in
+ let postindex = B0 in
+ let scale = UInt (reset_vector_start (set_vector_start_to_length size)) in
+ let offset = SignExtend ((64:ii),reset_vector_start (set_vector_start_to_length imm9)) in
+ sharedDecodeLoadImmediate
+ (reset_vector_start (set_vector_start 1 opc),
+ reset_vector_start (set_vector_start 1 size),
+ reset_vector_start Rn,
+ reset_vector_start Rt,
+ wback,
+ postindex,
+ scale,
+ reset_vector_start offset,
+ AccType_NORMAL,
+ B1)
+
+let decodeLoadStoreRegisterUnsignedImmediate ((Vector [_;_;B1;B1;B1;B0;B0;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__619) =
+ let size = slice_raw v__619 (0:ii) (1:ii) in
+ let opc = slice_raw v__619 (8:ii) (9:ii) in
+ let imm12 = slice_raw v__619 (10:ii) (21:ii) in
+ let Rn = slice_raw v__619 (22:ii) (26:ii) in
+ let Rt = slice_raw v__619 (27:ii) (31:ii) in
+ let wback = B0 in
+ let postindex = B0 in
+ let scale = UInt (reset_vector_start (set_vector_start_to_length size)) in
+ let offset =
+ LSL
+ (reset_vector_start (set_vector_start_to_length
+ (ZeroExtend ((64:ii),reset_vector_start (set_vector_start_to_length imm12)))),
+ scale) in
+ sharedDecodeLoadImmediate
+ (reset_vector_start (set_vector_start 1 opc),
+ reset_vector_start (set_vector_start 1 size),
+ reset_vector_start Rn,
+ reset_vector_start Rt,
+ wback,
+ postindex,
+ scale,
+ reset_vector_start offset,
+ AccType_NORMAL,
+ B1)
+
+let decodeLoadStoreNoAllocatePairOffset ((Vector [_;_;B1;B0;B1;B0;B0;B0;B0;L;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__614) =
+ let opc = slice_raw v__614 (0:ii) (1:ii) in
+ let imm7 = slice_raw v__614 (10:ii) (16:ii) in
+ let Rt2 = slice_raw v__614 (17:ii) (21:ii) in
+ let Rn = slice_raw v__614 (22:ii) (26:ii) in
+ let Rt = slice_raw v__614 (27:ii) (31:ii) in
+ let wback = B0 in
+ let postindex = B0 in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let t = UInt_reg (reset_vector_start Rt) in
+ let t2 = UInt_reg (reset_vector_start Rt2) in
+ let acctype = AccType_STREAM in
+ let memop =
+ if bitU_to_bool (eq (match L with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then MemOp_LOAD
+ else MemOp_STORE in
+ (if bitU_to_bool (eq (match (access opc (0:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ let scale =
+ (2:ii) +
+ (UInt (reset_vector_start (set_vector_start_to_length (Vector [access opc (1:ii)] 0 false)))) in
+ let datasize = lsl' ((8:ii),scale) in
+ let offset =
+ LSL
+ (reset_vector_start (set_vector_start_to_length
+ (SignExtend ((64:ii),reset_vector_start (set_vector_start_to_length imm7)))),
+ scale) in
+ return (Just (LoadStorePairNonTemp (wback,postindex,n,t,t2,acctype,memop,scale,datasize,reset_vector_start offset)))
+
+let sharedDecodeLoadStorePair (L, opc, imm7, Rn, Rt, Rt2, wback, postindex) =
+ let n = UInt_reg Rn in
+ let t = UInt_reg Rt in
+ let t2 = UInt_reg Rt2 in
+ let acctype = AccType_NORMAL in
+ let memop = if bitU_to_bool (eq (L, (1:ii))) then MemOp_LOAD else MemOp_STORE in
+ (if bitU_to_bool
+ ((eq_vec
+ (set_vector_start_to_length
+ (Vector [if bitU_to_bool (is_one L) then B1 else B0;access opc (0:ii)] 1 false),
+ set_vector_start_to_length (Vector [B0;B1] 1 false))) |.
+ (eq_vec
+ (set_vector_start_to_length opc,
+ set_vector_start_to_length (Vector [B1;B1] 1 false))))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ let _signed = neq (match (access opc (0:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)) in
+ let scale =
+ (2:ii) +
+ (UInt (reset_vector_start (set_vector_start_to_length (Vector [access opc (1:ii)] 0 false)))) in
+ let datasize = lsl' ((8:ii),scale) in
+ let offset =
+ LSL
+ (reset_vector_start (set_vector_start_to_length (SignExtend ((64:ii),reset_vector_start imm7))),
+ scale) in
+ return (Just (LoadStorePair (wback,postindex,n,t,t2,acctype,memop,_signed,datasize,reset_vector_start offset)))
+
+let decodeDataRegister machineCode =
+ match machineCode with
+ | ((Vector [_;_;_;B0;B1;B0;B1;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__484) ->
+ decodeLogicalShiftedRegister (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;_;B0;B1;B0;B1;B1;_;_;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__497) ->
+ decodeAddSubtractShiftedRegister (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;_;B0;B1;B0;B1;B1;_;_;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__510) ->
+ decodeAddSubtractExtendedRegister (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;_;B1;B1;B0;B1;B0;B0;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__523) ->
+ return (decodeAddSubtractWithCarry
+ (reset_vector_start (set_vector_start_to_length machineCode)))
+ | ((Vector [_;_;_;B1;B1;B0;B1;B0;B0;B1;B0;_;_;_;_;_;_;_;_;_;B0;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__536) ->
+ return (decodeConditionalCompareRegister (reset_vector_start machineCode))
+ | ((Vector [_;_;_;B1;B1;B0;B1;B0;B0;B1;B0;_;_;_;_;_;_;_;_;_;B1;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__549) ->
+ return (decodeConditionalCompareImmediate (reset_vector_start machineCode))
+ | ((Vector [_;_;_;B1;B1;B0;B1;B0;B1;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__562) ->
+ return (decodeConditionalSelect (reset_vector_start machineCode))
+ | ((Vector [_;_;_;B1;B1;B0;B1;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__575) ->
+ return (decodeData3Source (reset_vector_start machineCode))
+ | ((Vector [_;B0;_;B1;B1;B0;B1;B0;B1;B1;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__588) ->
+ decodeData2Source (reset_vector_start machineCode)
+ | ((Vector [_;B1;_;B1;B1;B0;B1;B0;B1;B1;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__601) ->
+ decodeData1Source (reset_vector_start machineCode)
+ end
+
+let ROR (x, shift) =
+ let result = set_vector_start_to_length (to_vec_dec (length x,(0:ii))) in
+ if bitU_to_bool (eq_range (shift, (0:ii)))
+ then set_vector_start_to_length x
+ else
+ let (result', _) =
+ match (ROR_C (reset_vector_start (set_vector_start_to_length x),shift)) with
+ | (v0v', v1v') -> (v0v',v1v')
+ end in
+ set_vector_start_to_length result'
+
+let wMemIFP = wMem'IFP
+
+let decodeLoadStoreRegisterPairOffset ((Vector [_;_;B1;B0;B1;B0;B0;B1;B0;L;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__479) =
+ let opc = slice_raw v__479 (0:ii) (1:ii) in
+ let imm7 = slice_raw v__479 (10:ii) (16:ii) in
+ let Rt2 = slice_raw v__479 (17:ii) (21:ii) in
+ let Rn = slice_raw v__479 (22:ii) (26:ii) in
+ let Rt = slice_raw v__479 (27:ii) (31:ii) in
+ let wback = B0 in
+ let postindex = B0 in
+ sharedDecodeLoadStorePair
+ (match L with | B0 -> (0:ii) | B1 -> (1:ii) end,
+ reset_vector_start (set_vector_start 1 opc),
+ reset_vector_start (set_vector_start_to_length imm7),
+ reset_vector_start Rn,
+ reset_vector_start Rt,
+ reset_vector_start Rt2,
+ wback,
+ postindex)
+
+let decodeLoadStoreRegisterPairPostIndexed ((Vector [_;_;B1;B0;B1;B0;B0;B0;B1;L;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__474) =
+ let opc = slice_raw v__474 (0:ii) (1:ii) in
+ let imm7 = slice_raw v__474 (10:ii) (16:ii) in
+ let Rt2 = slice_raw v__474 (17:ii) (21:ii) in
+ let Rn = slice_raw v__474 (22:ii) (26:ii) in
+ let Rt = slice_raw v__474 (27:ii) (31:ii) in
+ let wback = B1 in
+ let postindex = B1 in
+ sharedDecodeLoadStorePair
+ (match L with | B0 -> (0:ii) | B1 -> (1:ii) end,
+ reset_vector_start (set_vector_start 1 opc),
+ reset_vector_start (set_vector_start_to_length imm7),
+ reset_vector_start Rn,
+ reset_vector_start Rt,
+ reset_vector_start Rt2,
+ wback,
+ postindex)
+
+let decodeLoadStoreRegisterPairPreIndexed ((Vector [_;_;B1;B0;B1;B0;B0;B1;B1;L;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__469) =
+ let opc = slice_raw v__469 (0:ii) (1:ii) in
+ let imm7 = slice_raw v__469 (10:ii) (16:ii) in
+ let Rt2 = slice_raw v__469 (17:ii) (21:ii) in
+ let Rn = slice_raw v__469 (22:ii) (26:ii) in
+ let Rt = slice_raw v__469 (27:ii) (31:ii) in
+ let wback = B1 in
+ let postindex = B0 in
+ sharedDecodeLoadStorePair
+ (match L with | B0 -> (0:ii) | B1 -> (1:ii) end,
+ reset_vector_start (set_vector_start 1 opc),
+ reset_vector_start (set_vector_start_to_length imm7),
+ reset_vector_start Rn,
+ reset_vector_start Rt,
+ reset_vector_start Rt2,
+ wback,
+ postindex)
+
+let IsSecureBelowEL3 () =
+ if bitU_to_bool (HaveEL (reset_vector_start EL3))
+ then
+ read_reg_bitfield (SCR_GEN ()) "NS" >>= fun w__0 ->
+ return (eq (match w__0 with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)))
+ else
+ return (if bitU_to_bool (HaveEL (reset_vector_start EL2))
+ then B0
+ else IMPLEMENTATION_DEFINED.IMPLEMENTATION_DEFINED_type_IsSecureBelowEL3)
+
+let AArch64_ResetSpecialRegisters () =
+ write_reg SP_EL0 (to_vec_dec ((64:ii),UNKNOWN)) >>
+ write_reg SP_EL1 (to_vec_dec ((64:ii),UNKNOWN)) >>
+ write_reg SPSR_EL1 (to_vec_dec ((32:ii),UNKNOWN)) >>
+ write_reg ELR_EL1 (to_vec_dec ((64:ii),UNKNOWN)) >>
+ (if bitU_to_bool (HaveEL (reset_vector_start EL2))
+ then
+ write_reg SP_EL2 (to_vec_dec ((64:ii),UNKNOWN)) >>
+ write_reg SPSR_EL2 (to_vec_dec ((32:ii),UNKNOWN)) >>
+ write_reg ELR_EL2 (to_vec_dec ((64:ii),UNKNOWN))
+ else return ()) >>
+ (if bitU_to_bool (HaveEL (reset_vector_start EL3))
+ then
+ write_reg SP_EL3 (to_vec_dec ((64:ii),UNKNOWN)) >>
+ write_reg SPSR_EL3 (to_vec_dec ((32:ii),UNKNOWN)) >>
+ write_reg ELR_EL3 (to_vec_dec ((64:ii),UNKNOWN))
+ else return ()) >>
+ let _ =
+ if bitU_to_bool (HaveAArch32EL (reset_vector_start EL1))
+ then
+ let SPSR_fiq = to_vec_dec ((32:ii),UNKNOWN) in
+ let SPSR_irq = to_vec_dec ((32:ii),UNKNOWN) in
+ let SPSR_abt = to_vec_dec ((32:ii),UNKNOWN) in
+ let SPSR_und = to_vec_dec ((32:ii),UNKNOWN) in
+ ()
+ else () in
+ let DLR_EL0 = to_vec_dec ((64:ii),UNKNOWN) in
+ let DSPSR_EL0 = to_vec_dec ((32:ii),UNKNOWN) in
+ return ()
+
+let DecodeBitMasks (M', immN, imms, immr, immediate) =
+ let M = length (reset_vector_start (set_vector_start_to_length (to_vec_dec (M',(0:ii))))) in
+ let levels = to_vec_dec ((6:ii),(0:ii)) in
+ let len =
+ match (HighestSetBit
+ (reset_vector_start (set_vector_start_to_length
+ ((set_vector_start_to_length (Vector [immN] 0 false)) ^^
+ (set_vector_start_to_length
+ (NOT (reset_vector_start (set_vector_start_to_length imms)))))))) with
+ | Nothing ->
+ let _ = assert' B0 (Just "DecodeBitMasks: HighestSetBit returned None") in
+ (0:ii)
+ | Just (c) -> c
+ end in
+ (if bitU_to_bool (lt (len, (1:ii)))
+ then ReservedValue ()
+ else return ()) >>
+ let _ = assert' (gteq (M, lsl' ((1:ii),len))) (Nothing) in
+ let levels =
+ ZeroExtend
+ ((6:ii),
+ reset_vector_start (set_vector_start_to_length
+ (duplicate_bits (set_vector_start_to_length (Vector [B1] 0 false), len)))) in
+ (if bitU_to_bool
+ (immediate &.
+ (eq_vec
+ (set_vector_start_to_length
+ (bitwise_and (set_vector_start_to_length imms, set_vector_start_to_length levels)),
+ set_vector_start_to_length levels)))
+ then ReservedValue ()
+ else return ()) >>
+ let S =
+ set_vector_start 5
+ (bitwise_and (set_vector_start_to_length imms, set_vector_start_to_length levels)) in
+ let R =
+ set_vector_start 5
+ (bitwise_and (set_vector_start_to_length immr, set_vector_start_to_length levels)) in
+ let diff =
+ set_vector_start 5
+ (minus_VVV
+ (reset_vector_start (set_vector_start_to_length S))
+ (reset_vector_start (set_vector_start_to_length R))) in
+ let esize = lsl' ((1:ii),len) in
+ let d = set_vector_start 5 (slice diff (len - (1:ii)) (0:ii)) in
+ let welem =
+ set_vector_start_to_length
+ (ZeroExtend
+ (esize,
+ reset_vector_start (set_vector_start_to_length
+ (duplicate_bits
+ (set_vector_start_to_length (Vector [B1] 0 false),
+ add_VII (reset_vector_start (set_vector_start_to_length S)) (1:ii)))))) in
+ let telem =
+ set_vector_start_to_length
+ (ZeroExtend
+ (esize,
+ reset_vector_start (set_vector_start_to_length
+ (duplicate_bits
+ (set_vector_start_to_length (Vector [B1] 0 false),
+ add_VII (reset_vector_start (set_vector_start_to_length d)) (1:ii)))))) in
+ let wmask =
+ Replicate
+ (M',
+ reset_vector_start (set_vector_start_to_length
+ (ROR
+ (reset_vector_start (set_vector_start_to_length welem),
+ unsigned (reset_vector_start R))))) in
+ let tmask = Replicate (M',reset_vector_start (set_vector_start_to_length telem)) in
+ return (set_vector_start_to_length wmask,set_vector_start_to_length tmask)
+
+let AArch64_TranslateAddress (vaddress, acctype, iswrite, wasaligned, size) =
+ let _ =
+ info
+ "Translation is not implemented, return same address as the virtual (no fault, normal, shareable, non-secure)." in
+ <| AddressDescriptor_fault = (AArch64_NoFault ());
+ AddressDescriptor_memattrs =
+ (<| MemoryAttributes_type' = MemType_Normal;
+ MemoryAttributes_shareable = B1 |>);
+ AddressDescriptor_paddress =
+ (<| FullAddress_physicaladdress = vaddress;
+ FullAddress_NS = B1 |>) |>
+
+let ExtendReg (N', _reg, type', shift) =
+ rX (N',_reg) >>= fun w__0 ->
+ let _val = set_vector_start_to_length w__0 in
+ let _unsigned = B0 in
+ let len = (0:ii) in
+ let (_unsigned, len) =
+ match type' with
+ | ExtendType_SXTB ->
+ let _unsigned = B0 in
+ let len = (8:ii) in
+ (_unsigned,len)
+ | ExtendType_SXTH ->
+ let _unsigned = B0 in
+ let len = (16:ii) in
+ (_unsigned,len)
+ | ExtendType_SXTW ->
+ let _unsigned = B0 in
+ let len = (32:ii) in
+ (_unsigned,len)
+ | ExtendType_SXTX ->
+ let _unsigned = B0 in
+ let len = (64:ii) in
+ (_unsigned,len)
+ | ExtendType_UXTB ->
+ let _unsigned = B1 in
+ let len = (8:ii) in
+ (_unsigned,len)
+ | ExtendType_UXTH ->
+ let _unsigned = B1 in
+ let len = (16:ii) in
+ (_unsigned,len)
+ | ExtendType_UXTW ->
+ let _unsigned = B1 in
+ let len = (32:ii) in
+ (_unsigned,len)
+ | ExtendType_UXTX ->
+ let _unsigned = B1 in
+ let len = (64:ii) in
+ (_unsigned,len)
+ end in
+ let len = uMin (len,(length (reset_vector_start (set_vector_start_to_length _val))) - shift) in
+ return (set_vector_start_to_length
+ (Extend
+ (length _val,
+ reset_vector_start (set_vector_start_to_length
+ ((set_vector_start_to_length
+ (slice (set_vector_start_to_length _val) (len - (1:ii)) (0:ii))) ^^
+ (set_vector_start_to_length (set_vector_start_to_length (Zeros shift))))),
+ _unsigned)))
+
+let ShiftReg (N', _reg, type', amount) =
+ rX (N',_reg) >>= fun w__0 ->
+ let result = set_vector_start_to_length w__0 in
+ let result =
+ match type' with
+ | ShiftType_LSL ->
+ set_vector_start_to_length
+ (LSL (reset_vector_start (set_vector_start_to_length result),amount))
+ | ShiftType_LSR ->
+ set_vector_start_to_length
+ (LSR (reset_vector_start (set_vector_start_to_length result),amount))
+ | ShiftType_ASR ->
+ set_vector_start_to_length
+ (ASR (reset_vector_start (set_vector_start_to_length result),amount))
+ | ShiftType_ROR ->
+ set_vector_start_to_length
+ (ROR (reset_vector_start (set_vector_start_to_length result),amount))
+ end in
+ return (set_vector_start_to_length result)
+
+let initial_analysis instr =
+ let iR = [] in
+ let oR = [] in
+ let aR = [] in
+ let Nias = [NIAFP_successor] in
+ let Dia = DIAFP_none in
+ let ik = IK_simple in
+ match instr with
+ | TMStart (t) ->
+ let iR = TxNestingLevelfp :: TXIDR_EL0_DEPTHfp :: iR in
+ let oR = TxNestingLevelfp :: (oR ++ (xFP t)) in
+ let ik = IK_trans Transaction_start in
+ return (aR,Nias,iR,oR,ik)
+ | TMCommit ->
+ let iR = TxNestingLevelfp :: iR in
+ let oR = TxNestingLevelfp :: oR in
+ let ik = IK_trans Transaction_commit in
+ return (aR,Nias,iR,oR,ik)
+ | TMAbort (retry,reason) ->
+ let iR = TxNestingLevelfp :: iR in
+ let ik = IK_trans Transaction_abort in
+ return (aR,Nias,iR,oR,ik)
+ | TMTest ->
+ let iR = TxNestingLevelfp :: iR in
+ let oR = (RFull "NZCV") :: oR in
+ return (aR,Nias,iR,oR,ik)
+ | CompareAndBranch (t,datasize,iszero,offset) ->
+ let iR = iR ++ (xFP t) in
+ let (i, o) = match (BranchToFP (iR,oR)) with | (v0v', v1v') -> (v0v',v1v') end in
+ let iR = i in
+ let oR = o in
+ rPC () >>= fun w__0 ->
+ let nia' =
+ set_vector_start 63
+ (add_VVV
+ (reset_vector_start (set_vector_start_to_length w__0))
+ (reset_vector_start (set_vector_start_to_length offset))) in
+ let Nias = [NIAFP_concrete_address (reset_vector_start nia');NIAFP_successor] in
+ let ik = IK_branch in
+ return (aR,Nias,iR,oR,ik)
+ | BranchConditional (offset,condition) ->
+ let iR = iR ++ (ConditionHoldsIFP (reset_vector_start condition)) in
+ let (i, o) = match (BranchToFP (iR,oR)) with | (v2v', v3v') -> (v2v',v3v') end in
+ let iR = i in
+ let oR = o in
+ rPC () >>= fun w__1 ->
+ let Nias =
+ [NIAFP_concrete_address (reset_vector_start (set_vector_start 63
+ (add_VVV
+ (reset_vector_start (set_vector_start_to_length w__1))
+ (reset_vector_start (set_vector_start_to_length offset)))));NIAFP_successor] in
+ let ik = IK_branch in
+ return (aR,Nias,iR,oR,ik)
+ | GenerateExceptionEL1 (imm) ->
+ not_implemented "GenerateExceptionEL1" >>
+ return (aR,Nias,iR,oR,ik)
+ | GenerateExceptionEL2 (imm) ->
+ not_implemented "GenerateExceptionEL2" >>
+ return (aR,Nias,iR,oR,ik)
+ | GenerateExceptionEL3 (imm) ->
+ not_implemented "GenerateExceptionEL3" >>
+ return (aR,Nias,iR,oR,ik)
+ | DebugBreakpoint (comment) ->
+ not_implemented "DebugBreakpoint" >>
+ return (aR,Nias,iR,oR,ik)
+ | ExternalDebugBreakpoint ->
+ not_implemented "ExternalDebugBreakpoint" >>
+ return (aR,Nias,iR,oR,ik)
+ | DebugSwitchToExceptionLevel (target_level) ->
+ not_implemented "DebugSwitchToExceptionLevel" >>
+ return (aR,Nias,iR,oR,ik)
+ | MoveSystemImmediate (operand,field') ->
+ let (iR, oR) =
+ match field' with
+ | PSTATEField_SP ->
+ let oR = PSTATE_SPfp :: oR in
+ (iR,oR)
+ | PSTATEField_DAIFSet ->
+ let iR = iR ++ [PSTATE_Ffp;PSTATE_Ifp;PSTATE_Afp;PSTATE_Dfp] in
+ let oR = oR ++ [PSTATE_Ffp;PSTATE_Ifp;PSTATE_Afp;PSTATE_Dfp] in
+ (iR,oR)
+ | PSTATEField_DAIFClr ->
+ let iR = iR ++ [PSTATE_Ffp;PSTATE_Ifp;PSTATE_Afp;PSTATE_Dfp] in
+ let oR = oR ++ [PSTATE_Ffp;PSTATE_Ifp;PSTATE_Afp;PSTATE_Dfp] in
+ (iR,oR)
+ end in
+ return (aR,Nias,iR,oR,ik)
+ | Hint (op) ->
+ match op with
+ | SystemHintOp_YIELD -> return ()
+ | SystemHintOp_WFE ->
+ EventRegistered () >>= fun w__2 ->
+ if bitU_to_bool w__2
+ then return ()
+ else not_implemented "Hint(SystemHintOp_WFE);"
+ | SystemHintOp_WFI -> not_implemented "Hint(SystemHintOp_WFI);"
+ | SystemHintOp_SEV -> return ()
+ | SystemHintOp_SEVL -> not_implemented "Hint(SystemHintOp_SEVL);"
+ | _ -> return ()
+ end >>
+ return (aR,Nias,iR,oR,ik)
+ | ClearExclusiveMonitor (imm) -> return (aR,Nias,iR,oR,ik)
+ | Barrier (op,domain,types) ->
+ let ik =
+ match op with
+ | MemBarrierOp_DSB ->
+ match types with
+ | MBReqTypes_Reads -> IK_barrier Barrier_DSB_LD
+ | MBReqTypes_Writes -> IK_barrier Barrier_DSB_ST
+ | MBReqTypes_All -> IK_barrier Barrier_DSB
+ end
+ | MemBarrierOp_DMB ->
+ match types with
+ | MBReqTypes_Reads -> IK_barrier Barrier_DMB_LD
+ | MBReqTypes_Writes -> IK_barrier Barrier_DMB_ST
+ | MBReqTypes_All -> IK_barrier Barrier_DMB
+ end
+ | MemBarrierOp_ISB -> IK_barrier Barrier_ISB
+ end in
+ return (aR,Nias,iR,oR,ik)
+ | System (t,sys_op0,sys_op1,sys_op2,sys_crn,sys_crm,has_result) ->
+ let oR = oR ++ (xFP t) in
+ not_implemented "System" >>
+ return (aR,Nias,iR,oR,ik)
+ | MoveSystemRegister (t,sys_op0,sys_op1,sys_op2,sys_crn,sys_crm,read) ->
+ let (oR, iR) =
+ if bitU_to_bool read
+ then
+ let oR = oR ++ (xFP t) in
+ let iR =
+ match toNaturalFiveTup (sys_op0,sys_op1,sys_crn,sys_crm,sys_op2) with
+ | ((3:nn), (3:nn), (4:nn), (2:nn), (0:nn)) -> (RFull "NZCV") :: iR
+ | ((3:nn), (3:nn), (4:nn), (2:nn), (1:nn)) -> (RFull "DAIF") :: iR
+ | ((3:nn), (3:nn), (13:nn), (0:nn), (2:nn)) -> (RFull "TPIDR_EL0") :: iR
+ end in
+ (oR,iR)
+ else
+ let iR = iR ++ (xFP t) in
+ let oR =
+ match toNaturalFiveTup (sys_op0,sys_op1,sys_crn,sys_crm,sys_op2) with
+ | ((3:nn), (3:nn), (4:nn), (2:nn), (0:nn)) -> (RFull "NZCV") :: oR
+ | ((3:nn), (3:nn), (4:nn), (2:nn), (1:nn)) -> (RFull "DAIF") :: oR
+ | ((3:nn), (3:nn), (13:nn), (0:nn), (2:nn)) -> (RFull "TPIDR_EL0") :: oR
+ end in
+ (oR,iR) in
+ return (aR,Nias,iR,oR,ik)
+ | ImplementationDefinedTestBeginEnd (isEnd) -> return (aR,Nias,iR,oR,ik)
+ | ImplementationDefinedStopFetching -> return (aR,Nias,iR,oR,ik)
+ | ImplementationDefinedThreadStart -> return (aR,Nias,iR,oR,ik)
+ | TestBitAndBranch (t,datasize,bit_pos,bit_val,offset) ->
+ let iR = (xFP t) ++ iR in
+ let (i, o) = match (BranchToFP (iR,oR)) with | (v4v', v5v') -> (v4v',v5v') end in
+ let iR = i in
+ let oR = o in
+ rPC () >>= fun w__3 ->
+ let Nias =
+ [NIAFP_concrete_address (reset_vector_start (set_vector_start 63
+ (add_VVV
+ (reset_vector_start (set_vector_start_to_length w__3))
+ (reset_vector_start (set_vector_start_to_length offset)))));NIAFP_successor] in
+ let ik = IK_branch in
+ return (aR,Nias,iR,oR,ik)
+ | BranchImmediate (branch_type,offset) ->
+ let (iR, oR) =
+ if bitU_to_bool (eq (branch_type, BranchType_CALL))
+ then
+ let iR = _PCfp :: iR in
+ let oR = (xFP (30:ii)) ++ oR in
+ (iR,oR)
+ else (iR,oR) in
+ let (i, o) = match (BranchToFP (iR,oR)) with | (v6v', v7v') -> (v6v',v7v') end in
+ let iR = i in
+ let oR = o in
+ rPC () >>= fun w__4 ->
+ let Nias =
+ [NIAFP_concrete_address (reset_vector_start (set_vector_start 63
+ (add_VVV
+ (reset_vector_start (set_vector_start_to_length w__4))
+ (reset_vector_start (set_vector_start_to_length offset)))))] in
+ let ik = IK_branch in
+ return (aR,Nias,iR,oR,ik)
+ | BranchRegister (n,branch_type) ->
+ let iR = iR ++ (xFP n) in
+ let (iR, oR) =
+ if bitU_to_bool (eq (branch_type, BranchType_CALL))
+ then
+ let iR = _PCfp :: iR in
+ let oR = (xFP (30:ii)) ++ oR in
+ (iR,oR)
+ else (iR,oR) in
+ let (i, o) = match (BranchToFP (iR,oR)) with | (v8v', v9v') -> (v8v',v9v') end in
+ let iR = i in
+ let oR = o in
+ let Nias =
+ if bitU_to_bool (eq_range (n, (31:ii)))
+ then [NIAFP_concrete_address (reset_vector_start (to_vec_dec ((64:ii),(0:ii))))]
+ else [NIAFP_indirect_address] in
+ let ik = IK_branch in
+ return (aR,Nias,iR,oR,ik)
+ | ExceptionReturn ->
+ not_implemented "ExceptionReturn" >>
+ return (aR,Nias,iR,oR,ik)
+ | DebugRestorePState ->
+ not_implemented "DebugRestorePState" >>
+ return (aR,Nias,iR,oR,ik)
+ | LoadLiteral (t,memop,_signed,size,offset,datasize) ->
+ let iR = _PCfp :: iR in
+ let oR = (xFP t) ++ oR in
+ let aR = _PCfp :: aR in
+ let (aR, ik) =
+ match memop with
+ | MemOp_LOAD ->
+ let ik = IK_mem_read Read_plain in
+ (aR,ik)
+ | MemOp_PREFETCH ->
+ let ik = IK_simple in
+ let aR = [] in
+ (aR,ik)
+ end in
+ return (aR,Nias,iR,oR,ik)
+ | LoadStoreAcqExc (n,t,t2,s,acctype,excl,pair,memop,elsize,regsize,datasize) ->
+ let rt_unknown = B0 in
+ let rn_unknown = B0 in
+ let (iR, aR) =
+ if bitU_to_bool (eq_range (n, (31:ii)))
+ then
+ let iR = CheckSPAlignmentIFP ++ iR in
+ let iR = rSPIFP ++ iR in
+ let aR = rSPIFP ++ aR in
+ (iR,aR)
+ else
+ let (iR, aR) =
+ if bitU_to_bool rn_unknown
+ then (iR,aR)
+ else
+ let iR = (xFP n) ++ iR in
+ let aR = (xFP n) ++ aR in
+ (iR,aR) in
+ (iR,aR) in
+ match memop with
+ | MemOp_STORE ->
+ let iR =
+ if bitU_to_bool rt_unknown
+ then iR
+ else if bitU_to_bool pair then (xFP t) ++ ((xFP t2) ++ iR) else (xFP t) ++ iR in
+ (if bitU_to_bool excl
+ then
+ let iR = iR ++ wMemIFP in
+ let oR = (xFP s) ++ oR in
+ wmem_kind (acctype,B1) >>= fun w__5 ->
+ let ik = w__5 in
+ return (iR,oR,ik)
+ else
+ let iR = iR ++ wMemIFP in
+ wmem_kind (acctype,B0) >>= fun w__6 ->
+ let ik = w__6 in
+ return (iR,oR,ik)) >>= fun (iR, oR, ik) ->
+ return (aR,iR,oR,ik)
+ | MemOp_LOAD ->
+ (if bitU_to_bool pair
+ then
+ let (iR, oR) =
+ if bitU_to_bool rt_unknown
+ then
+ let oR = (xFP t) ++ oR in
+ (iR,oR)
+ else
+ let (iR, oR) =
+ if bitU_to_bool (eq_range (elsize, (32:ii)))
+ then
+ let iR = iR ++ BigEndianIFP in
+ let oR = (xFP t) ++ ((xFP t2) ++ oR) in
+ (iR,oR)
+ else
+ let oR = (xFP t) ++ ((xFP t2) ++ oR) in
+ (iR,oR) in
+ (iR,oR) in
+ rmem_kind (acctype,B1) >>= fun w__7 ->
+ let ik = w__7 in
+ return (oR,iR,ik)
+ else
+ let oR = (xFP t) ++ oR in
+ rmem_kind (acctype,excl) >>= fun w__8 ->
+ let ik = w__8 in
+ return (oR,iR,ik)) >>= fun (oR, iR, ik) ->
+ return (aR,iR,oR,ik)
+ | MemOp_PREFETCH ->
+ let aR = [] in
+ return (aR,iR,oR,ik)
+ end >>= fun (aR, iR, oR, ik) ->
+ return (aR,Nias,iR,oR,ik)
+ | LoadStorePairNonTemp (wback,postindex,n,t,t2,acctype,memop,scale,datasize,offset) ->
+ let rt_unknown = B0 in
+ let (iR, aR) =
+ if bitU_to_bool (eq_range (n, (31:ii)))
+ then
+ let iR = CheckSPAlignmentIFP ++ iR in
+ let iR = rSPIFP ++ iR in
+ let aR = rSPIFP ++ aR in
+ (iR,aR)
+ else
+ let iR = (xFP n) ++ iR in
+ let aR = (xFP n) ++ aR in
+ (iR,aR) in
+ let (iR, oR) =
+ if bitU_to_bool wback
+ then
+ let (iR, oR) =
+ if bitU_to_bool (eq_range (n, (31:ii)))
+ then
+ let (i, o) = match wSPFP with | (v10v', v11v') -> (v10v',v11v') end in
+ let iR = i ++ iR in
+ let oR = o ++ oR in
+ (iR,oR)
+ else
+ let oR = (xFP n) ++ oR in
+ (iR,oR) in
+ (iR,oR)
+ else (iR,oR) in
+ match memop with
+ | MemOp_STORE ->
+ let iR = if bitU_to_bool (rt_unknown &. (eq_range (t, n))) then iR else (xFP t) ++ iR in
+ let iR = if bitU_to_bool (rt_unknown &. (eq_range (t2, n))) then iR else (xFP t2) ++ iR in
+ let iR = wMemIFP ++ iR in
+ wmem_kind (acctype,B0) >>= fun w__9 ->
+ let ik = w__9 in
+ return (oR,iR,ik)
+ | MemOp_LOAD ->
+ let oR = (xFP t) ++ ((xFP t2) ++ oR) in
+ rmem_kind (acctype,B0) >>= fun w__10 ->
+ let ik = w__10 in
+ return (oR,iR,ik)
+ end >>= fun (oR, iR, ik) ->
+ return (aR,Nias,iR,oR,ik)
+ | LoadImmediate (n,t,acctype,memop,_signed,wback,postindex,offset,regsize,datasize) ->
+ let wb_unknown = B0 in
+ let rt_unknown = B0 in
+ let (iR, aR) =
+ if bitU_to_bool (eq_range (n, (31:ii)))
+ then
+ let iR = CheckSPAlignmentIFP ++ iR in
+ let iR = rSPIFP ++ iR in
+ let aR = rSPIFP ++ aR in
+ (iR,aR)
+ else
+ let iR = (xFP n) ++ iR in
+ let aR = (xFP n) ++ aR in
+ (iR,aR) in
+ let (iR, oR) =
+ if bitU_to_bool wback
+ then
+ let (iR, oR) =
+ if bitU_to_bool (eq_range (n, (31:ii)))
+ then
+ let (i, o) = match wSPFP with | (v12v', v13v') -> (v12v',v13v') end in
+ let iR = i ++ iR in
+ let oR = o ++ oR in
+ (iR,oR)
+ else
+ let oR = (xFP n) ++ oR in
+ (iR,oR) in
+ (iR,oR)
+ else (iR,oR) in
+ match memop with
+ | MemOp_STORE ->
+ let iR = if bitU_to_bool rt_unknown then iR else (xFP t) ++ iR in
+ let iR = wMemIFP ++ iR in
+ wmem_kind (acctype,B0) >>= fun w__11 ->
+ let ik = w__11 in
+ return (aR,oR,iR,ik)
+ | MemOp_LOAD ->
+ let oR = (xFP t) ++ oR in
+ rmem_kind (acctype,B0) >>= fun w__12 ->
+ let ik = w__12 in
+ return (aR,oR,iR,ik)
+ | MemOp_PREFETCH ->
+ let aR = [] in
+ return (aR,oR,iR,ik)
+ end >>= fun (aR, oR, iR, ik) ->
+ return (aR,Nias,iR,oR,ik)
+ | LoadRegister (n,t,m,acctype,memop,_signed,wback,postindex,extend_type,shift,regsize,datasize) ->
+ let iR = (xFP m) ++ iR in
+ let aR = (xFP m) ++ aR in
+ let wb_unknown = B0 in
+ let rt_unknown = B0 in
+ let (iR, aR) =
+ if bitU_to_bool (eq_range (n, (31:ii)))
+ then
+ let iR = CheckSPAlignmentIFP ++ iR in
+ let iR = rSPIFP ++ iR in
+ let aR = rSPIFP ++ aR in
+ (iR,aR)
+ else
+ let iR = (xFP n) ++ iR in
+ let aR = (xFP n) ++ aR in
+ (iR,aR) in
+ let (iR, oR) =
+ if bitU_to_bool wback
+ then
+ let (iR, oR) =
+ if bitU_to_bool (eq_range (n, (31:ii)))
+ then
+ let (i, o) = match wSPFP with | (v14v', v15v') -> (v14v',v15v') end in
+ let iR = i ++ iR in
+ let oR = o ++ oR in
+ (iR,oR)
+ else
+ let oR = (xFP n) ++ oR in
+ (iR,oR) in
+ (iR,oR)
+ else (iR,oR) in
+ match memop with
+ | MemOp_STORE ->
+ let iR = if bitU_to_bool rt_unknown then iR else (xFP t) ++ iR in
+ let iR = wMemIFP ++ iR in
+ wmem_kind (acctype,B0) >>= fun w__13 ->
+ let ik = w__13 in
+ return (aR,oR,iR,ik)
+ | MemOp_LOAD ->
+ let oR = (xFP t) ++ oR in
+ rmem_kind (acctype,B0) >>= fun w__14 ->
+ let ik = w__14 in
+ return (aR,oR,iR,ik)
+ | MemOp_PREFETCH ->
+ let aR = [] in
+ return (aR,oR,iR,ik)
+ end >>= fun (aR, oR, iR, ik) ->
+ return (aR,Nias,iR,oR,ik)
+ | LoadStorePair (wback,postindex,n,t,t2,acctype,memop,_signed,datasize,offset) ->
+ let rt_unknown = B0 in
+ let wb_unknown = B0 in
+ let (iR, aR) =
+ if bitU_to_bool (eq_range (n, (31:ii)))
+ then
+ let iR = CheckSPAlignmentIFP ++ iR in
+ let iR = rSPIFP ++ iR in
+ let aR = rSPIFP ++ aR in
+ (iR,aR)
+ else
+ let iR = (xFP n) ++ iR in
+ let aR = (xFP n) ++ aR in
+ (iR,aR) in
+ let (iR, oR) =
+ if bitU_to_bool wback
+ then
+ let (iR, oR) =
+ if bitU_to_bool (eq_range (n, (31:ii)))
+ then
+ let (i, o) = match wSPFP with | (v16v', v17v') -> (v16v',v17v') end in
+ let iR = i ++ iR in
+ let oR = o ++ oR in
+ (iR,oR)
+ else
+ let oR = (xFP n) ++ oR in
+ (iR,oR) in
+ (iR,oR)
+ else (iR,oR) in
+ match memop with
+ | MemOp_STORE ->
+ let iR = if bitU_to_bool (rt_unknown &. (eq_range (t, n))) then iR else (xFP t) ++ iR in
+ let iR = if bitU_to_bool (rt_unknown &. (eq_range (t2, n))) then iR else (xFP t2) ++ iR in
+ let iR = wMemIFP ++ iR in
+ wmem_kind (acctype,B0) >>= fun w__15 ->
+ let ik = w__15 in
+ return (oR,iR,ik)
+ | MemOp_LOAD ->
+ let oR = (xFP t) ++ oR in
+ let oR = (xFP t2) ++ oR in
+ rmem_kind (acctype,B0) >>= fun w__16 ->
+ let ik = w__16 in
+ return (oR,iR,ik)
+ end >>= fun (oR, iR, ik) ->
+ return (aR,Nias,iR,oR,ik)
+ | AddSubImmediate (d,n,datasize,sub_op,setflags,imm) ->
+ let iR = (if bitU_to_bool (eq_range (n, (31:ii))) then rSPIFP else xFP n) ++ iR in
+ let oR = if bitU_to_bool setflags then NZCVfp ++ oR else oR in
+ let (iR, oR) =
+ if bitU_to_bool ((eq_range (d, (31:ii))) &. (~setflags))
+ then
+ let (i, o) = match wSPFP with | (v18v', v19v') -> (v18v',v19v') end in
+ let iR = i ++ iR in
+ let oR = o ++ oR in
+ (iR,oR)
+ else
+ let oR = (xFP d) ++ oR in
+ (iR,oR) in
+ return (aR,Nias,iR,oR,ik)
+ | BitfieldMove (d,n,datasize,inzero,extend,R,S,wmask,tmask) ->
+ let iR = if bitU_to_bool inzero then iR else (xFP d) ++ iR in
+ let iR = (xFP n) ++ iR in
+ let oR = (xFP d) ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | ExtractRegister (d,n,m,datasize,lsb) ->
+ let iR = (xFP n) ++ ((xFP m) ++ iR) in
+ let oR = (xFP d) ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | LogicalImmediate (d,n,datasize,setflags,op,imm) ->
+ let iR = (xFP n) ++ iR in
+ let oR = if bitU_to_bool setflags then NZCVfp ++ oR else oR in
+ let (iR, oR) =
+ if bitU_to_bool ((eq_range (d, (31:ii))) &. (~setflags))
+ then
+ let (i, o) = match wSPFP with | (v20v', v21v') -> (v20v',v21v') end in
+ let iR = i ++ iR in
+ let oR = o ++ oR in
+ (iR,oR)
+ else
+ let oR = (xFP d) ++ oR in
+ (iR,oR) in
+ return (aR,Nias,iR,oR,ik)
+ | MoveWide (d,datasize,imm,pos,opcode) ->
+ let iR = if bitU_to_bool (eq (opcode, MoveWideOp_K)) then (xFP d) ++ iR else iR in
+ let oR = (xFP d) ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | Address (d,page,imm) ->
+ let iR = _PCfp :: iR in
+ let oR = (xFP d) ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | AddSubExtendRegister (d,n,m,datasize,sub_op,setflags,extend_type,shift) ->
+ let iR = (if bitU_to_bool (eq_range (n, (31:ii))) then rSPIFP else xFP n) ++ iR in
+ let iR = (xFP m) ++ iR in
+ let oR = if bitU_to_bool setflags then NZCVfp ++ oR else oR in
+ let (iR, oR) =
+ if bitU_to_bool ((eq_range (d, (31:ii))) &. (~setflags))
+ then
+ let (i, o) = match wSPFP with | (v22v', v23v') -> (v22v',v23v') end in
+ let iR = i ++ iR in
+ let oR = o ++ oR in
+ (iR,oR)
+ else
+ let oR = (xFP d) ++ oR in
+ (iR,oR) in
+ return (aR,Nias,iR,oR,ik)
+ | AddSubShiftedRegister (d,n,m,datasize,sub_op,setflags,shift_type,shift_amount) ->
+ let iR = (xFP n) ++ ((xFP m) ++ iR) in
+ let oR = if bitU_to_bool setflags then NZCVfp ++ oR else oR in
+ let oR = (xFP d) ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | AddSubCarry (d,n,m,datasize,sub_op,setflags) ->
+ let iR = (xFP n) ++ ((xFP m) ++ iR) in
+ let iR = PSTATE_Cfp :: iR in
+ let oR = if bitU_to_bool setflags then NZCVfp ++ oR else oR in
+ let oR = (xFP d) ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | ConditionalCompareImmediate (n,datasize,sub_op,condition,flags,imm) ->
+ let iR = (xFP n) ++ iR in
+ let iR = (ConditionHoldsIFP (reset_vector_start condition)) ++ iR in
+ let oR = NZCVfp ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | ConditionalCompareRegister (n,m,datasize,sub_op,condition,flags) ->
+ let iR = (xFP n) ++ ((xFP m) ++ iR) in
+ let iR = (ConditionHoldsIFP (reset_vector_start condition)) ++ iR in
+ let oR = NZCVfp ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | ConditionalSelect (d,n,m,datasize,condition,else_inv,else_inc) ->
+ let iR = (xFP n) ++ ((xFP m) ++ iR) in
+ let iR = (ConditionHoldsIFP (reset_vector_start condition)) ++ iR in
+ let oR = (xFP d) ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | Reverse (d,n,datasize,op) ->
+ let iR = (xFP n) ++ iR in
+ let oR = (xFP d) ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | CountLeading (d,n,datasize,opcode) ->
+ let iR = (xFP n) ++ iR in
+ let oR = (xFP d) ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | Division (d,n,m,datasize,_unsigned) ->
+ let iR = (xFP n) ++ ((xFP m) ++ iR) in
+ let oR = (xFP d) ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | Shift (d,n,m,datasize,shift_type) ->
+ let iR = (xFP m) ++ iR in
+ let iR = (xFP n) ++ iR in
+ let oR = (xFP d) ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | CRC (d,n,m,size,crc32c) ->
+ let iR = (xFP n) ++ ((xFP m) ++ iR) in
+ let oR = (xFP d) ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | MultiplyAddSub (d,n,m,a,destsize,datasize,sub_op) ->
+ let iR = (xFP n) ++ iR in
+ let iR = (xFP m) ++ iR in
+ let iR = (xFP a) ++ iR in
+ let oR = (xFP d) ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | MultiplyAddSubLong (d,n,m,a,destsize,datasize,sub_op,_unsigned) ->
+ let iR = (xFP n) ++ iR in
+ let iR = (xFP m) ++ iR in
+ let iR = (xFP a) ++ iR in
+ let oR = (xFP d) ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | MultiplyHigh (d,n,m,a,destsize,datasize,_unsigned) ->
+ let iR = (xFP n) ++ ((xFP m) ++ iR) in
+ let oR = (xFP d) ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ | LogicalShiftedRegister (d,n,m,datasize,setflags,op,shift_type,shift_amount,invert) ->
+ let iR = (xFP n) ++ ((xFP m) ++ iR) in
+ let oR = if bitU_to_bool setflags then NZCVfp ++ oR else oR in
+ let oR = (xFP d) ++ oR in
+ return (aR,Nias,iR,oR,ik)
+ end >>= fun (aR, Nias, iR, oR, ik) ->
+ return (iR,oR,aR,Nias,Dia,ik)
+
+let decodeBranchesExceptionSystem machineCode =
+ match machineCode with
+ | ((Vector [_;B0;B0;B1;B0;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__392) ->
+ return (decodeUnconditionalBranchImmediate
+ (reset_vector_start (set_vector_start_to_length machineCode)))
+ | ((Vector [_;B0;B1;B1;B0;B1;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__403) ->
+ return (decodeCompareBranchImmediate
+ (reset_vector_start (set_vector_start_to_length machineCode)))
+ | ((Vector [_;B0;B1;B1;B0;B1;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__414) ->
+ return (decodeTestBranchImmediate (reset_vector_start (set_vector_start_to_length machineCode)))
+ | ((Vector [B0;B1;B0;B1;B0;B1;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__425) ->
+ return (decodeConditionalBranchImmediate
+ (reset_vector_start (set_vector_start_to_length machineCode)))
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__436) ->
+ decodeExceptionGeneration (reset_vector_start machineCode)
+ | ((Vector [B1;B1;B0;B1;B0;B1;B0;B1;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__447) ->
+ decodeSystemImplementationDefined (reset_vector_start machineCode)
+ | ((Vector [B1;B1;B0;B1;B0;B1;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__458) ->
+ decodeUnconditionalBranchRegister (reset_vector_start machineCode)
+ end
+
+let IsSecure () =
+ read_reg_field CurrentEL "EL" >>= fun w__0 ->
+ if bitU_to_bool
+ ((HaveEL (reset_vector_start EL3)) &.
+ ((~(UsingAArch32 ())) &.
+ (eq_vec (set_vector_start_to_length w__0, set_vector_start_to_length EL3))))
+ then return B1
+ else
+ read_reg PSTATE_M >>= fun w__1 ->
+ if bitU_to_bool
+ ((HaveEL (reset_vector_start EL3)) &.
+ ((UsingAArch32 ()) &. (eq_vec (w__1, set_vector_start_to_length M32_Monitor))))
+ then return B1
+ else IsSecureBelowEL3 ()
+
+let decodeLogicalImmediate ((Vector [sf;_;_;B1;B0;B0;B1;B0;B0;N;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__388) =
+ let opc = slice_raw v__388 (1:ii) (2:ii) in
+ let immr = slice_raw v__388 (10:ii) (15:ii) in
+ let imms = slice_raw v__388 (16:ii) (21:ii) in
+ let Rn = slice_raw v__388 (22:ii) (26:ii) in
+ let Rd = slice_raw v__388 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let setflags = B0 in
+ let op = LogicalOp_AND in
+ let (op, setflags) =
+ match opc with
+ | Vector [B0;B0] _ _ ->
+ let op = LogicalOp_AND in
+ let setflags = B0 in
+ (op,setflags)
+ | Vector [B0;B1] _ _ ->
+ let op = LogicalOp_ORR in
+ let setflags = B0 in
+ (op,setflags)
+ | Vector [B1;B0] _ _ ->
+ let op = LogicalOp_EOR in
+ let setflags = B0 in
+ (op,setflags)
+ | Vector [B1;B1] _ _ ->
+ let op = LogicalOp_AND in
+ let setflags = B1 in
+ (op,setflags)
+ end in
+ (if bitU_to_bool
+ ((eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii))) &.
+ (neq (match N with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii))))
+ then ReservedValue ()
+ else return ()) >>
+ DecodeBitMasks (datasize,N,reset_vector_start imms,reset_vector_start immr,B1) >>= fun w__0 ->
+ let (imm, _) = match w__0 with | (v0v', v1v') -> (v0v',v1v') end in
+ return (Just (LogicalImmediate (d,n,datasize,setflags,op,reset_vector_start (set_vector_start_to_length
+ imm))))
+
+let decodeBitfield ((Vector [sf;_;_;B1;B0;B0;B1;B1;B0;N;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__384) =
+ let opc = slice_raw v__384 (1:ii) (2:ii) in
+ let immr = slice_raw v__384 (10:ii) (15:ii) in
+ let imms = slice_raw v__384 (16:ii) (21:ii) in
+ let Rn = slice_raw v__384 (22:ii) (26:ii) in
+ let Rd = slice_raw v__384 (27:ii) (31:ii) in
+ let d = UInt_reg (reset_vector_start Rd) in
+ let n = UInt_reg (reset_vector_start Rn) in
+ let datasize =
+ if bitU_to_bool (eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then (64:ii)
+ else (32:ii) in
+ let inzero = B0 in
+ let extend = B0 in
+ let R = (0:ii) in
+ let S = (0:ii) in
+ match opc with
+ | Vector [B0;B0] _ _ ->
+ let inzero = B1 in
+ let extend = B1 in
+ return (inzero,extend)
+ | Vector [B0;B1] _ _ ->
+ let inzero = B0 in
+ let extend = B0 in
+ return (inzero,extend)
+ | Vector [B1;B0] _ _ ->
+ let inzero = B1 in
+ let extend = B0 in
+ return (inzero,extend)
+ | Vector [B1;B1] _ _ ->
+ UnallocatedEncoding () >>
+ return (inzero,extend)
+ end >>= fun (inzero, extend) ->
+ (if bitU_to_bool
+ ((eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))) &.
+ (neq (match N with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))))
+ then ReservedValue ()
+ else return ()) >>
+ (if bitU_to_bool
+ ((eq (match sf with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii))) &.
+ ((neq (match N with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii))) |.
+ ((neq (match (access immr (5:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii))) |.
+ (neq (match (access imms (5:ii)) with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii))))))
+ then ReservedValue ()
+ else return ()) >>
+ let R = UInt (reset_vector_start (set_vector_start_to_length immr)) in
+ let S = UInt (reset_vector_start (set_vector_start_to_length imms)) in
+ DecodeBitMasks (datasize,N,reset_vector_start imms,reset_vector_start immr,B0) >>= fun (wmask, tmask) ->
+ return (Just (BitfieldMove (d,n,datasize,inzero,extend,R,S,reset_vector_start (set_vector_start_to_length
+ wmask),reset_vector_start (set_vector_start_to_length tmask))))
+
+let AArch64_SetExclusiveMonitors (address, size) =
+ let acctype = AccType_ATOMIC in
+ let iswrite = B0 in
+ let aligned =
+ neq_vec
+ (set_vector_start_to_length address,
+ set_vector_start_to_length
+ (Align (reset_vector_start (set_vector_start_to_length address),size))) in
+ let memaddrdesc =
+ AArch64_TranslateAddress (reset_vector_start address,acctype,iswrite,aligned,size) in
+ if bitU_to_bool (IsFault memaddrdesc)
+ then ()
+ else
+ let _ =
+ if bitU_to_bool memaddrdesc.AddressDescriptor_memattrs.MemoryAttributes_shareable
+ then MarkExclusiveGlobal (memaddrdesc.AddressDescriptor_paddress,ProcessorID (),size)
+ else () in
+ let _ = MarkExclusiveLocal (memaddrdesc.AddressDescriptor_paddress,ProcessorID (),size) in
+ AArch64_MarkExclusiveVA (reset_vector_start address,ProcessorID (),size)
+
+let AArch64_TakeReset cold_reset =
+ let _ = assert' (~(HighestELUsingAArch32 ())) (Nothing) in
+ write_reg PSTATE_nRW (to_vec_dec ((1:ii),(0:ii))) >>
+ (if bitU_to_bool (HaveEL (reset_vector_start EL3))
+ then
+ write_reg_field CurrentEL "EL" (set_vector_start 3 EL3) >>
+ write_reg_bitfield SCR_EL3 "NS" B0
+ else
+ if bitU_to_bool (HaveEL (reset_vector_start EL2))
+ then write_reg_field CurrentEL "EL" (set_vector_start 3 EL2)
+ else write_reg_field CurrentEL "EL" (set_vector_start 3 EL1)) >>
+ let _ = AArch64_ResetControlRegisters cold_reset in
+ write_reg_bitfield SPSel "SP" B0 >>
+ wPSTATE_DAIF ((),reset_vector_start (Vector [B1;B1;B1;B1] 3 false)) >>
+ let PSTATE_SS = (0:ii) in
+ let PSTATE_IL = (0:ii) in
+ AArch64_ResetGeneralRegisters () >>
+ AArch64_ResetSIMDFPRegisters () >>
+ AArch64_ResetSpecialRegisters () >>
+ ResetExternalDebugRegisters cold_reset >>
+ let rv = to_vec_dec ((64:ii),(0:ii)) in
+ (if bitU_to_bool (HaveEL (reset_vector_start EL3))
+ then read_reg RVBAR_EL3
+ else
+ if bitU_to_bool (HaveEL (reset_vector_start EL2))
+ then read_reg RVBAR_EL2
+ else read_reg RVBAR_EL1) >>= fun rv ->
+ PAMax () >>= fun w__3 ->
+ let _ =
+ assert' ((IsZero
+ (reset_vector_start (set_vector_start_to_length
+ (slice (set_vector_start_to_length rv) (63:ii) w__3)))) &.
+ (IsZero
+ (reset_vector_start (set_vector_start_to_length
+ (slice (set_vector_start_to_length rv) (1:ii) (0:ii)))))) (Just "reset vector not correctly aligned") in
+ BranchTo (reset_vector_start (set_vector_start_to_length rv),BranchType_UNKNOWN)
+
+let AArch64_rMemSingle (read_buffer, address, size, acctype, wasaligned, exclusive) =
+ let _ =
+ assert' (eq_vec
+ (set_vector_start_to_length address,
+ set_vector_start_to_length
+ (Align (reset_vector_start (set_vector_start_to_length address),size)))) (Nothing) in
+ let value = set_vector_start_to_length (to_vec_dec (size * (8:ii),(0:ii))) in
+ let iswrite = B0 in
+ let memaddrdesc =
+ AArch64_TranslateAddress (reset_vector_start address,acctype,iswrite,wasaligned,size) in
+ (if bitU_to_bool (IsFault memaddrdesc)
+ then AArch64_Abort (reset_vector_start address,memaddrdesc.AddressDescriptor_fault)
+ else return ()) >>
+ return (_rMem (read_buffer,memaddrdesc,size,acctype,exclusive))
+
+let AArch64_wMemSingle (write_buffer, address, size, acctype, wasaligned, exclusive, value) =
+ let _ =
+ assert' (eq_vec
+ (set_vector_start_to_length address,
+ set_vector_start_to_length
+ (Align (reset_vector_start (set_vector_start_to_length address),size)))) (Nothing) in
+ let iswrite = B1 in
+ let memaddrdesc =
+ AArch64_TranslateAddress (reset_vector_start address,acctype,iswrite,wasaligned,size) in
+ (if bitU_to_bool (IsFault memaddrdesc)
+ then AArch64_Abort (reset_vector_start address,memaddrdesc.AddressDescriptor_fault)
+ else return ()) >>
+ let _ =
+ if bitU_to_bool memaddrdesc.AddressDescriptor_memattrs.MemoryAttributes_shareable
+ then ClearExclusiveByAddress (memaddrdesc.AddressDescriptor_paddress,ProcessorID (),size)
+ else () in
+ return (_wMem
+ (write_buffer,
+ memaddrdesc,
+ size,
+ acctype,
+ exclusive,
+ reset_vector_start (set_vector_start_to_length value)))
+
+let AArch64_ExclusiveMonitorsPass (address, size) =
+ let acctype = AccType_ATOMIC in
+ let iswrite = B1 in
+ let aligned =
+ eq_vec
+ (set_vector_start_to_length address,
+ set_vector_start_to_length
+ (Align (reset_vector_start (set_vector_start_to_length address),size))) in
+ (if bitU_to_bool (~aligned)
+ then
+ let secondstage = B0 in
+ AArch64_Abort (reset_vector_start address,AArch64_AlignmentFault (acctype,iswrite,secondstage))
+ else return ()) >>
+ let passed = AArch64_IsExclusiveVA (reset_vector_start address,ProcessorID (),size) in
+ (if bitU_to_bool (~passed)
+ then return (B0,passed)
+ else
+ let memaddrdesc =
+ AArch64_TranslateAddress (reset_vector_start address,acctype,iswrite,aligned,size) in
+ (if bitU_to_bool (IsFault memaddrdesc)
+ then AArch64_Abort (reset_vector_start address,memaddrdesc.AddressDescriptor_fault)
+ else return ()) >>
+ let passed = IsExclusiveLocal (memaddrdesc.AddressDescriptor_paddress,ProcessorID (),size) in
+ let passed =
+ if bitU_to_bool passed
+ then
+ let _ = ClearExclusiveLocal (ProcessorID ()) in
+ if bitU_to_bool memaddrdesc.AddressDescriptor_memattrs.MemoryAttributes_shareable
+ then IsExclusiveGlobal (memaddrdesc.AddressDescriptor_paddress,ProcessorID (),size)
+ else passed
+ else passed in
+ return (passed,passed)) >>= fun (w__0, passed) ->
+ return w__0
+
+let decodeLoadsStores machineCode =
+ match machineCode with
+ | ((Vector [_;_;B0;B0;B1;B0;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__80) ->
+ decodeLoadStoreExclusive (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;B0;B1;B1;_;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__99) ->
+ return (decodeLoadRegisterLiteral (reset_vector_start (set_vector_start_to_length machineCode)))
+ | ((Vector [_;_;B1;B0;B1;_;B0;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__118) ->
+ decodeLoadStoreNoAllocatePairOffset
+ (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;B1;B0;B1;_;B0;B0;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__137) ->
+ decodeLoadStoreRegisterPairPostIndexed
+ (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;B1;B0;B1;_;B0;B1;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__156) ->
+ decodeLoadStoreRegisterPairOffset (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;B1;B0;B1;_;B0;B1;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__175) ->
+ decodeLoadStoreRegisterPairPreIndexed
+ (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;B1;B1;B1;_;B0;B0;_;_;B0;_;_;_;_;_;_;_;_;_;B0;B0;_;_;_;_;_;_;_;_;_;_] _ _) as v__194) ->
+ decodeLoadStoreRegisterUnscaledImmediate
+ (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;B1;B1;B1;_;B0;B0;_;_;B0;_;_;_;_;_;_;_;_;_;B0;B1;_;_;_;_;_;_;_;_;_;_] _ _) as v__213) ->
+ decodeLoadStoreRegisterImmediatePostIndexed
+ (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;B1;B1;B1;_;B0;B0;_;_;B0;_;_;_;_;_;_;_;_;_;B1;B0;_;_;_;_;_;_;_;_;_;_] _ _) as v__232) ->
+ decodeLoadStoreRegisterUnprivileged
+ (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;B1;B1;B1;_;B0;B0;_;_;B0;_;_;_;_;_;_;_;_;_;B1;B1;_;_;_;_;_;_;_;_;_;_] _ _) as v__251) ->
+ decodeLoadStoreRegisterImmediatePreIndexed
+ (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;B1;B1;B1;_;B0;B0;_;_;B1;_;_;_;_;_;_;_;_;_;B1;B0;_;_;_;_;_;_;_;_;_;_] _ _) as v__270) ->
+ decodeLoadStoreRegisterRegisterOffset
+ (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;B1;B1;B1;_;B0;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__289) ->
+ decodeLoadStoreRegisterUnsignedImmediate
+ (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [B0;_;B0;B0;B1;B1;B0;B0;B0;_;B0;B0;B0;B0;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__308) ->
+ decodeAdvSIMDLoadStoreMultiStruct (reset_vector_start machineCode)
+ | ((Vector [B0;_;B0;B0;B1;B1;B0;B0;B1;_;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__327) ->
+ decodeAdvSIMDLoadStoreMultiStructPostIndexed (reset_vector_start machineCode)
+ | ((Vector [B0;_;B0;B0;B1;B1;B0;B1;B0;_;_;B0;B0;B0;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__346) ->
+ decodeAdvSIMDLoadStoreSingleStruct (reset_vector_start machineCode)
+ | ((Vector [B0;_;B0;B0;B1;B1;B0;B1;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__365) ->
+ decodeAdvSIMDLoadStoreSingleStructPostIndexed (reset_vector_start machineCode)
+ end
+
+let S1TranslationRegime () =
+ read_reg_field CurrentEL "EL" >>= fun w__0 ->
+ if bitU_to_bool (neq_vec (set_vector_start_to_length w__0, set_vector_start_to_length EL0))
+ then
+ read_reg_field CurrentEL "EL" >>= fun w__1 ->
+ return (set_vector_start 1 w__1)
+ else
+ IsSecure () >>= fun w__2 ->
+ return (if bitU_to_bool
+ (w__2 &.
+ ((HaveEL (reset_vector_start EL3)) &. (ELUsingAArch32 (reset_vector_start EL3))))
+ then EL3
+ else EL1)
+
+let AArch64_CheckForSMCTrap imm =
+ IsSecure () >>= fun w__0 ->
+ read_reg_field CurrentEL "EL" >>= fun w__1 ->
+ read_reg_field CurrentEL "EL" >>= fun w__2 ->
+ read_reg_bitfield HCR_EL2 "TSC" >>= fun w__3 ->
+ let route_to_el2 =
+ (HaveEL (reset_vector_start EL2)) &.
+ ((~w__0) &.
+ (((eq_vec (set_vector_start_to_length w__1, set_vector_start_to_length EL0)) |.
+ (eq_vec (set_vector_start_to_length w__2, set_vector_start_to_length EL1))) &.
+ (eq (match w__3 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))))) in
+ if bitU_to_bool route_to_el2
+ then not_implemented "AArch64_CheckForSMCTrap route_to_el2"
+ else return ()
+
+let CheckSystemAccess (op0, op1, crn, crm, op2, rt, read) =
+ let unallocated = B0 in
+ let need_secure = B0 in
+ let min_EL = to_vec_dec ((2:ii),(0:ii)) in
+ match op1 with
+ | ((Vector [B0;B0;_] _ _) as v__77) ->
+ let min_EL = EL1 in
+ return (need_secure,min_EL)
+ | Vector [B0;B1;B0] _ _ ->
+ let min_EL = EL1 in
+ return (need_secure,min_EL)
+ | Vector [B0;B1;B1] _ _ ->
+ let min_EL = EL0 in
+ return (need_secure,min_EL)
+ | Vector [B1;B0;B0] _ _ ->
+ let min_EL = EL2 in
+ return (need_secure,min_EL)
+ | Vector [B1;B0;B1] _ _ ->
+ UnallocatedEncoding () >>
+ return (need_secure,min_EL)
+ | Vector [B1;B1;B0] _ _ ->
+ let min_EL = EL3 in
+ return (need_secure,min_EL)
+ | Vector [B1;B1;B1] _ _ ->
+ let min_EL = EL1 in
+ let need_secure = B1 in
+ return (need_secure,min_EL)
+ end >>= fun (need_secure, min_EL) ->
+ read_reg_field CurrentEL "EL" >>= fun w__0 ->
+ if bitU_to_bool (lt_vec (set_vector_start_to_length w__0, set_vector_start_to_length min_EL))
+ then UnallocatedEncoding ()
+ else
+ IsSecure () >>= fun w__1 ->
+ if bitU_to_bool (need_secure &. (~w__1))
+ then UnallocatedEncoding ()
+ else
+ AArch64_CheckUnallocatedSystemAccess
+ (reset_vector_start op0,
+ reset_vector_start op1,
+ reset_vector_start crn,
+ reset_vector_start crm,
+ reset_vector_start op2,
+ read) >>= fun w__2 ->
+ if bitU_to_bool w__2
+ then UnallocatedEncoding ()
+ else return ()
+
+let ExternalSecureInvasiveDebugEnabled () =
+ IsSecure () >>= fun w__0 ->
+ if bitU_to_bool ((~(HaveEL (reset_vector_start EL3))) &. (~w__0))
+ then return B0
+ else
+ ExternalInvasiveDebugEnabled () >>= fun w__1 ->
+ signalSPIDEN () >>= fun w__2 ->
+ return (w__1 &. (eq (w__2, HIGH)))
+
+let decodeDataImmediate machineCode =
+ match machineCode with
+ | ((Vector [_;_;_;B1;B0;B0;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__35) ->
+ return (decodePCRelAddressing (reset_vector_start (set_vector_start_to_length machineCode)))
+ | ((Vector [_;_;_;B1;B0;B0;B0;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__42) ->
+ decodeAddSubtractImmediate (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;_;B1;B0;B0;B1;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__49) ->
+ decodeLogicalImmediate (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;_;B1;B0;B0;B1;B0;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__56) ->
+ decodeMoveWideImmediate (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;_;B1;B0;B0;B1;B1;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__63) ->
+ decodeBitfield (reset_vector_start (set_vector_start_to_length machineCode))
+ | ((Vector [_;_;_;B1;B0;B0;B1;B1;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__70) ->
+ decodeExtract (reset_vector_start (set_vector_start_to_length machineCode))
+ end
+
+let SCTLR' () =
+ S1TranslationRegime () >>= fun w__0 ->
+ return (SCTLR (reset_vector_start w__0))
+
+let HaltingAllowed () =
+ Halted () >>= fun w__0 ->
+ DoubleLockStatus () >>= fun w__1 ->
+ if bitU_to_bool (w__0 |. w__1)
+ then return B0
+ else
+ IsSecure () >>= fun w__2 ->
+ if bitU_to_bool w__2
+ then ExternalSecureInvasiveDebugEnabled ()
+ else ExternalInvasiveDebugEnabled ()
+
+let decode machineCode =
+ match machineCode with
+ | ((Vector [_;_;_;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__0) ->
+ return (Just (Unallocated))
+ | ((Vector [_;_;_;B1;B0;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__5) ->
+ decodeDataImmediate (reset_vector_start machineCode)
+ | ((Vector [_;_;_;B1;B0;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__10) ->
+ decodeBranchesExceptionSystem (reset_vector_start machineCode)
+ | ((Vector [_;_;_;_;B1;_;B0;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__15) ->
+ decodeLoadsStores (reset_vector_start machineCode)
+ | ((Vector [_;_;_;_;B1;B0;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__20) ->
+ decodeDataRegister (reset_vector_start machineCode)
+ | ((Vector [_;_;_;B0;B1;B1;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__25) ->
+ decodeDataSIMDFPoint1 (reset_vector_start machineCode)
+ | ((Vector [_;_;_;B1;B1;B1;B1;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_;_] _ _) as v__30) ->
+ decodeDataSIMDFPoint2 (reset_vector_start machineCode)
+ | _ -> return (Nothing)
+ end
+
+let BigEndian () =
+ let bigend = B0 in
+ if bitU_to_bool (UsingAArch32 ())
+ then
+ read_reg PSTATE_E >>= fun w__0 ->
+ let bigend = neq_vec_range (w__0, (0:ii)) in
+ return bigend
+ else
+ read_reg_field CurrentEL "EL" >>= fun w__1 ->
+ if bitU_to_bool (eq_vec (set_vector_start_to_length w__1, set_vector_start_to_length EL0))
+ then
+ read_reg_bitfield SCTLR_EL1 "E0E" >>= fun w__2 ->
+ let bigend = neq (match w__2 with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)) in
+ return bigend
+ else
+ SCTLR' () >>= fun w__3 ->
+ read_reg_bitfield w__3 "EE" >>= fun w__4 ->
+ let bigend = neq (match w__4 with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)) in
+ return bigend
+
+let flush_read_buffer (read_buffer, size) =
+ let _ = assert' (eq_range (read_buffer.read_buffer_type_size, size)) (Nothing) in
+ let value = set_vector_start_to_length (to_vec_dec (size * (8:ii),(0:ii))) in
+ (if bitU_to_bool read_buffer.read_buffer_type_exclusive
+ then
+ match read_buffer.read_buffer_type_acctype with
+ | AccType_ATOMIC ->
+ rMem_ATOMIC
+ (reset_vector_start read_buffer.read_buffer_type_address,
+ read_buffer.read_buffer_type_size) >>= fun w__0 ->
+ return (set_vector_start_to_length w__0)
+ | AccType_ORDERED ->
+ rMem_ATOMIC_ORDERED
+ (reset_vector_start read_buffer.read_buffer_type_address,
+ read_buffer.read_buffer_type_size) >>= fun w__1 ->
+ return (set_vector_start_to_length w__1)
+ | _ ->
+ not_implemented "unimplemented memory access" >>
+ return value
+ end
+ else
+ match read_buffer.read_buffer_type_acctype with
+ | AccType_NORMAL ->
+ rMem_NORMAL
+ (reset_vector_start read_buffer.read_buffer_type_address,
+ read_buffer.read_buffer_type_size) >>= fun w__2 ->
+ return (set_vector_start_to_length w__2)
+ | AccType_STREAM ->
+ rMem_STREAM
+ (reset_vector_start read_buffer.read_buffer_type_address,
+ read_buffer.read_buffer_type_size) >>= fun w__3 ->
+ return (set_vector_start_to_length w__3)
+ | AccType_UNPRIV ->
+ rMem_NORMAL
+ (reset_vector_start read_buffer.read_buffer_type_address,
+ read_buffer.read_buffer_type_size) >>= fun w__4 ->
+ return (set_vector_start_to_length w__4)
+ | AccType_ORDERED ->
+ rMem_ORDERED
+ (reset_vector_start read_buffer.read_buffer_type_address,
+ read_buffer.read_buffer_type_size) >>= fun w__5 ->
+ return (set_vector_start_to_length w__5)
+ | AccType_ATOMIC -> return value
+ end) >>= fun value ->
+ BigEndian () >>= fun w__6 ->
+ let value =
+ if bitU_to_bool w__6
+ then
+ set_vector_start_to_length
+ (BigEndianReverse (reset_vector_start (set_vector_start_to_length value)))
+ else value in
+ return (set_vector_start_to_length value)
+
+let CheckSPAlignment () =
+ rSP (64:ii) >>= fun sp ->
+ let stack_align_check = B0 in
+ read_reg_field CurrentEL "EL" >>= fun w__0 ->
+ (if bitU_to_bool (eq_vec (set_vector_start_to_length w__0, set_vector_start_to_length EL0))
+ then
+ read_reg_bitfield SCTLR_EL1 "SA0" >>= fun w__1 ->
+ let stack_align_check = neq (match w__1 with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)) in
+ return stack_align_check
+ else
+ SCTLR' () >>= fun w__2 ->
+ read_reg_bitfield w__2 "SA" >>= fun w__3 ->
+ let stack_align_check = neq (match w__3 with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)) in
+ return stack_align_check) >>= fun stack_align_check ->
+ if bitU_to_bool
+ (stack_align_check &.
+ (neq_vec
+ (set_vector_start_to_length sp,
+ set_vector_start_to_length
+ (Align (reset_vector_start (set_vector_start_to_length sp),(16:ii))))))
+ then AArch64_SPAlignmentFault ()
+ else return ()
+
+let AArch64_CheckAlignment (address, size, acctype, iswrite) =
+ let aligned =
+ eq_vec
+ (set_vector_start_to_length address,
+ set_vector_start_to_length
+ (Align (reset_vector_start (set_vector_start_to_length address),size))) in
+ SCTLR' () >>= fun w__0 ->
+ read_reg_bitfield w__0 "A" >>= fun A ->
+ (if bitU_to_bool
+ ((~aligned) &.
+ ((eq (acctype, AccType_ATOMIC)) |.
+ ((eq (acctype, AccType_ORDERED)) |.
+ (eq (match A with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii))))))
+ then
+ let secondstage = B0 in
+ AArch64_Abort (reset_vector_start address,AArch64_AlignmentFault (acctype,iswrite,secondstage))
+ else return ()) >>
+ return aligned
+
+let rMem' (read_buffer, address, size, acctype, exclusive) =
+ let read_buffer' = read_buffer in
+ let i = (0:ii) in
+ let iswrite = B0 in
+ AArch64_CheckAlignment (reset_vector_start address,size,acctype,iswrite) >>= fun aligned ->
+ let atomic =
+ (aligned &. (~((eq (acctype, AccType_VEC)) |. (eq (acctype, AccType_VECSTREAM))))) |.
+ (eq_range (size, (1:ii))) in
+ if bitU_to_bool (~atomic)
+ then
+ let _ = assert' (~exclusive) (Nothing) in
+ let _ = assert' (gt (size, (1:ii))) (Nothing) in
+ if bitU_to_bool (~exclusive)
+ then
+ AArch64_rMemSingle (read_buffer',reset_vector_start address,(1:ii),acctype,aligned,B0) >>= fun w__0 ->
+ let read_buffer' = w__0 in
+ (foreachM_inc ((1:ii),size - (1:ii),(1:ii)) read_buffer'
+ (fun i read_buffer' ->
+ AArch64_rMemSingle
+ (read_buffer',
+ reset_vector_start (set_vector_start 63
+ (add_VIV (reset_vector_start (set_vector_start_to_length address)) i)),
+ (1:ii),
+ acctype,
+ aligned,
+ B0)))
+ else return read_buffer'
+ else AArch64_rMemSingle (read_buffer',reset_vector_start address,size,acctype,aligned,exclusive)
+
+let wMem' (write_buffer, address, size, acctype, value, exclusive) =
+ let write_buffer' = write_buffer in
+ let value' = set_vector_start_to_length value in
+ let i = (0:ii) in
+ let iswrite = B1 in
+ BigEndian () >>= fun w__0 ->
+ let value' =
+ if bitU_to_bool w__0
+ then
+ set_vector_start_to_length
+ (BigEndianReverse (reset_vector_start (set_vector_start_to_length value')))
+ else value' in
+ AArch64_CheckAlignment (reset_vector_start address,size,acctype,iswrite) >>= fun aligned ->
+ let atomic =
+ (aligned &. (~((eq (acctype, AccType_VEC)) |. (eq (acctype, AccType_VECSTREAM))))) |.
+ (eq_range (size, (1:ii))) in
+ let exclusiveSuccess = B0 in
+ if bitU_to_bool (~atomic)
+ then
+ let _ = assert' (~exclusive) (Nothing) in
+ if bitU_to_bool (~exclusive)
+ then
+ let _ = assert' (gt (size, (1:ii))) (Nothing) in
+ AArch64_wMemSingle
+ (write_buffer',
+ reset_vector_start address,
+ (1:ii),
+ acctype,
+ aligned,
+ B0,
+ reset_vector_start (set_vector_start_to_length
+ (slice (set_vector_start_to_length value') (7:ii) (0:ii)))) >>= fun w__1 ->
+ let write_buffer' = w__1 in
+ (foreachM_inc ((1:ii),size - (1:ii),(1:ii)) write_buffer'
+ (fun i write_buffer' ->
+ AArch64_wMemSingle
+ (write_buffer',
+ reset_vector_start (set_vector_start 63
+ (add_VIV (reset_vector_start (set_vector_start_to_length address)) i)),
+ (1:ii),
+ acctype,
+ aligned,
+ B0,
+ reset_vector_start (set_vector_start_to_length
+ (slice (set_vector_start_to_length value') (((8:ii) * i) + (7:ii)) ((8:ii) * i))))))
+ else return write_buffer'
+ else
+ AArch64_wMemSingle
+ (write_buffer',
+ reset_vector_start address,
+ size,
+ acctype,
+ aligned,
+ exclusive,
+ reset_vector_start (set_vector_start_to_length value'))
+
+let rMem (read_buffer, address, size, acctype) =
+ rMem' (read_buffer,reset_vector_start address,size,acctype,B0)
+
+let rMem_exclusive (read_buffer, address, size, acctype) =
+ rMem' (read_buffer,reset_vector_start address,size,acctype,B1)
+
+let wMem (write_buffer, address, size, acctype, value) =
+ wMem'
+ (write_buffer,
+ reset_vector_start address,
+ size,
+ acctype,
+ reset_vector_start (set_vector_start_to_length value),
+ B0)
+
+let wMem_exclusive (write_buffer, address, size, acctype, value) =
+ wMem'
+ (write_buffer,
+ reset_vector_start address,
+ size,
+ acctype,
+ reset_vector_start (set_vector_start_to_length value),
+ B1)
+
+
+
+let execute_TMStart t =
+ read_reg TxNestingLevel >>= fun nesting ->
+ read_reg_field TXIDR_EL0 "DEPTH" >>= fun w__0 ->
+ if bitU_to_bool
+ (lt_vec_unsigned (set_vector_start_to_length nesting, set_vector_start_to_length w__0))
+ then
+ write_reg
+ TxNestingLevel
+ (set_vector_start 7 (add_VIV (reset_vector_start (set_vector_start_to_length nesting)) (1:ii))) >>
+ let status = to_vec_dec ((64:ii),(0:ii)) in
+ (if bitU_to_bool (eq_vec_range (set_vector_start_to_length nesting, (0:ii)))
+ then read_reg TMStartEffect
+ else return status) >>= fun status ->
+ wX (t,reset_vector_start (set_vector_start_to_length status))
+ else
+ let status = to_vec_dec ((64:ii),(0:ii)) in
+ let status = update_pos status (10:ii) B1 in
+ write_reg TMAbortEffect status
+
+let execute_TMCommit () =
+ read_reg TxNestingLevel >>= fun nesting ->
+ (if bitU_to_bool (eq_vec_range (set_vector_start_to_length nesting, (1:ii)))
+ then TMCommitEffect ()
+ else
+ if bitU_to_bool (eq_vec_range (set_vector_start_to_length nesting, (0:ii)))
+ then AArch64_UndefinedFault ()
+ else return ()) >>
+ write_reg
+ TxNestingLevel
+ (set_vector_start 7 (minus_VIV (reset_vector_start (set_vector_start_to_length nesting)) (1:ii)))
+
+let execute_TMTest () =
+ read_reg TxNestingLevel >>= fun w__0 ->
+ if bitU_to_bool (gt_vec_range (w__0, (0:ii)))
+ then wPSTATE_NZCV ((),reset_vector_start (Vector [B0;B0;B0;B0] 3 false))
+ else wPSTATE_NZCV ((),reset_vector_start (Vector [B0;B1;B0;B0] 3 false))
+
+let execute_TMAbort (retry, reason) =
+ read_reg TxNestingLevel >>= fun w__0 ->
+ if bitU_to_bool (gt_vec_range (w__0, (0:ii)))
+ then
+ let status = to_vec_dec ((64:ii),(0:ii)) in
+ let status = update status (4:ii) (0:ii) reason in
+ let status = update_pos status (8:ii) retry in
+ let status = update_pos status (9:ii) B1 in
+ write_reg TMAbortEffect status
+ else return ()
+
+let execute_CompareAndBranch (t, datasize, iszero, offset) =
+ rX (datasize,t) >>= fun w__0 ->
+ let operand1 = set_vector_start_to_length w__0 in
+ if bitU_to_bool
+ (eq_bit (IsZero (reset_vector_start (set_vector_start_to_length operand1)), iszero))
+ then
+ rPC () >>= fun w__1 ->
+ BranchTo
+ (reset_vector_start (set_vector_start_to_length
+ (add_VVV
+ (reset_vector_start (set_vector_start_to_length w__1))
+ (reset_vector_start (set_vector_start_to_length offset)))),
+ BranchType_JMP)
+ else return ()
+
+let execute_BranchConditional (offset, condition) =
+ ConditionHolds (reset_vector_start condition) >>= fun w__0 ->
+ if bitU_to_bool w__0
+ then
+ rPC () >>= fun w__1 ->
+ BranchTo
+ (reset_vector_start (set_vector_start_to_length
+ (add_VVV
+ (reset_vector_start (set_vector_start_to_length w__1))
+ (reset_vector_start (set_vector_start_to_length offset)))),
+ BranchType_JMP)
+ else return ()
+
+let execute_GenerateExceptionEL1 imm = AArch64_CallSupervisor (reset_vector_start imm)
+
+let execute_GenerateExceptionEL2 imm =
+ read_reg_field CurrentEL "EL" >>= fun w__0 ->
+ read_reg_field CurrentEL "EL" >>= fun w__1 ->
+ IsSecure () >>= fun w__2 ->
+ (if bitU_to_bool
+ ((~(HaveEL (reset_vector_start EL2))) |.
+ ((eq_vec (set_vector_start_to_length w__0, set_vector_start_to_length EL0)) |.
+ ((eq_vec (set_vector_start_to_length w__1, set_vector_start_to_length EL1)) &. w__2)))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ (if bitU_to_bool (HaveEL (reset_vector_start EL3))
+ then read_reg_bitfield SCR_EL3 "HCE"
+ else
+ read_reg_bitfield HCR_EL2 "HCD" >>= fun w__4 ->
+ return (NOT' w__4)) >>= fun hvc_enable ->
+ if bitU_to_bool (eq (match hvc_enable with | B0 -> (0:ii) | B1 -> (1:ii) end, (0:ii)))
+ then AArch64_UndefinedFault ()
+ else AArch64_CallHypervisor (reset_vector_start imm)
+
+let execute_GenerateExceptionEL3 imm =
+ read_reg_field CurrentEL "EL" >>= fun w__0 ->
+ (if bitU_to_bool
+ ((~(HaveEL (reset_vector_start EL3))) |.
+ (eq_vec (set_vector_start_to_length w__0, set_vector_start_to_length EL0)))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ AArch64_CheckForSMCTrap (reset_vector_start imm) >>
+ read_reg_bitfield SCR_EL3 "SMD" >>= fun w__1 ->
+ if bitU_to_bool (eq (match w__1 with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then AArch64_UndefinedFault ()
+ else AArch64_CallSecureMonitor (reset_vector_start imm)
+
+let execute_DebugBreakpoint comment = AArch64_SoftwareBreakpoint (reset_vector_start comment)
+
+let execute_ExternalDebugBreakpoint () = Halt (reset_vector_start DebugHalt_HaltInstruction)
+
+let execute_DebugSwitchToExceptionLevel target_level =
+ DCPSInstruction (reset_vector_start target_level)
+
+let execute_MoveSystemImmediate (operand, field') =
+ match field' with
+ | PSTATEField_SP -> write_reg_bitfield SPSel "SP" (access operand (0:ii))
+ | PSTATEField_DAIFSet ->
+ read_reg_bitfield DAIF "D" >>= fun w__0 ->
+ write_reg_bitfield DAIF "D" (w__0 |. (access operand (3:ii))) >>
+ read_reg_bitfield DAIF "A" >>= fun w__1 ->
+ write_reg_bitfield DAIF "A" (w__1 |. (access operand (2:ii))) >>
+ read_reg_bitfield DAIF "I" >>= fun w__2 ->
+ write_reg_bitfield DAIF "I" (w__2 |. (access operand (1:ii))) >>
+ read_reg_bitfield DAIF "F" >>= fun w__3 ->
+ write_reg_bitfield DAIF "F" (w__3 |. (access operand (0:ii)))
+ | PSTATEField_DAIFClr ->
+ read_reg_bitfield DAIF "D" >>= fun w__4 ->
+ write_reg_bitfield DAIF "D" (w__4 &. (NOT' (access operand (3:ii)))) >>
+ read_reg_bitfield DAIF "A" >>= fun w__5 ->
+ write_reg_bitfield DAIF "A" (w__5 &. (NOT' (access operand (2:ii)))) >>
+ read_reg_bitfield DAIF "I" >>= fun w__6 ->
+ write_reg_bitfield DAIF "I" (w__6 &. (NOT' (access operand (1:ii)))) >>
+ read_reg_bitfield DAIF "F" >>= fun w__7 ->
+ write_reg_bitfield DAIF "F" (w__7 &. (NOT' (access operand (0:ii))))
+ end
+
+let execute_Hint op =
+ match op with
+ | SystemHintOp_YIELD -> return (Hint_Yield ())
+ | SystemHintOp_WFE ->
+ EventRegistered () >>= fun w__0 ->
+ if bitU_to_bool w__0
+ then ClearEventRegister ()
+ else
+ read_reg_field CurrentEL "EL" >>= fun w__1 ->
+ (if bitU_to_bool (eq_vec (set_vector_start_to_length w__1, set_vector_start_to_length EL0))
+ then AArch64_CheckForWFxTrap (reset_vector_start EL1,B1)
+ else return ()) >>
+ IsSecure () >>= fun w__2 ->
+ read_reg_field CurrentEL "EL" >>= fun w__3 ->
+ read_reg_field CurrentEL "EL" >>= fun w__4 ->
+ (if bitU_to_bool
+ ((HaveEL (reset_vector_start EL2)) &.
+ ((~w__2) &.
+ ((eq_vec (set_vector_start_to_length w__3, set_vector_start_to_length EL0)) |.
+ (eq_vec (set_vector_start_to_length w__4, set_vector_start_to_length EL1)))))
+ then AArch64_CheckForWFxTrap (reset_vector_start EL2,B1)
+ else return ()) >>
+ read_reg_field CurrentEL "EL" >>= fun w__5 ->
+ (if bitU_to_bool
+ ((HaveEL (reset_vector_start EL3)) &.
+ (neq_vec (set_vector_start_to_length w__5, set_vector_start_to_length EL3)))
+ then AArch64_CheckForWFxTrap (reset_vector_start EL3,B1)
+ else return ()) >>
+ WaitForEvent ()
+ | SystemHintOp_WFI ->
+ InterruptPending () >>= fun w__6 ->
+ if bitU_to_bool (~w__6)
+ then
+ read_reg_field CurrentEL "EL" >>= fun w__7 ->
+ (if bitU_to_bool (eq_vec (set_vector_start_to_length w__7, set_vector_start_to_length EL0))
+ then AArch64_CheckForWFxTrap (reset_vector_start EL1,B0)
+ else return ()) >>
+ IsSecure () >>= fun w__8 ->
+ read_reg_field CurrentEL "EL" >>= fun w__9 ->
+ read_reg_field CurrentEL "EL" >>= fun w__10 ->
+ (if bitU_to_bool
+ ((HaveEL (reset_vector_start EL2)) &.
+ ((~w__8) &.
+ ((eq_vec (set_vector_start_to_length w__9, set_vector_start_to_length EL0)) |.
+ (eq_vec (set_vector_start_to_length w__10, set_vector_start_to_length EL1)))))
+ then AArch64_CheckForWFxTrap (reset_vector_start EL2,B0)
+ else return ()) >>
+ read_reg_field CurrentEL "EL" >>= fun w__11 ->
+ (if bitU_to_bool
+ ((HaveEL (reset_vector_start EL3)) &.
+ (neq_vec (set_vector_start_to_length w__11, set_vector_start_to_length EL3)))
+ then AArch64_CheckForWFxTrap (reset_vector_start EL3,B0)
+ else return ()) >>
+ WaitForInterrupt ()
+ else return ()
+ | SystemHintOp_SEV -> return (SendEvent ())
+ | SystemHintOp_SEVL -> EventRegisterSet ()
+ | _ -> return ()
+ end
+
+let execute_ClearExclusiveMonitor imm = return (ClearExclusiveLocal (ProcessorID ()))
+
+let execute_Barrier (op, domain, types) =
+ match op with
+ | MemBarrierOp_DSB -> DataSynchronizationBarrier (domain,types)
+ | MemBarrierOp_DMB -> DataMemoryBarrier (domain,types)
+ | MemBarrierOp_ISB -> InstructionSynchronizationBarrier ()
+ end
+
+let execute_System (t, sys_op0, sys_op1, sys_op2, sys_crn, sys_crm, has_result) =
+ if bitU_to_bool has_result
+ then
+ SysOp_R (sys_op0,sys_op1,sys_crn,sys_crm,sys_op2) >>= fun w__0 ->
+ wX (t,reset_vector_start (set_vector_start_to_length w__0))
+ else
+ rX ((64:ii),t) >>= fun w__1 ->
+ SysOp_W (sys_op0,sys_op1,sys_crn,sys_crm,sys_op2,reset_vector_start w__1)
+
+let execute_MoveSystemRegister (t, sys_op0, sys_op1, sys_op2, sys_crn, sys_crm, read) =
+ if bitU_to_bool read
+ then
+ System_Get (sys_op0,sys_op1,sys_crn,sys_crm,sys_op2) >>= fun w__0 ->
+ wX (t,reset_vector_start (set_vector_start_to_length w__0))
+ else
+ rX ((64:ii),t) >>= fun w__1 ->
+ System_Put (sys_op0,sys_op1,sys_crn,sys_crm,sys_op2,reset_vector_start w__1)
+
+let execute_ImplementationDefinedTestBeginEnd isEnd =
+ return (if bitU_to_bool isEnd
+ then info "test ends"
+ else info "test begins")
+
+let execute_ImplementationDefinedStopFetching () = return (info "stop fetching instructions")
+
+let execute_ImplementationDefinedThreadStart () = return (info "thread start")
+
+let execute_TestBitAndBranch (t, datasize, bit_pos, bit_val, offset) =
+ rX (datasize,t) >>= fun w__0 ->
+ let operand = set_vector_start_to_length w__0 in
+ if bitU_to_bool (eq_bit (access (set_vector_start_to_length operand) bit_pos, bit_val))
+ then
+ rPC () >>= fun w__1 ->
+ BranchTo
+ (reset_vector_start (set_vector_start_to_length
+ (add_VVV
+ (reset_vector_start (set_vector_start_to_length w__1))
+ (reset_vector_start (set_vector_start_to_length offset)))),
+ BranchType_JMP)
+ else return ()
+
+let execute_BranchImmediate (branch_type, offset) =
+ (if bitU_to_bool (eq (branch_type, BranchType_CALL))
+ then
+ rPC () >>= fun w__0 ->
+ wX
+ ((30:ii),
+ reset_vector_start (set_vector_start_to_length
+ (add_VIV (reset_vector_start (set_vector_start_to_length w__0)) (4:ii))))
+ else return ()) >>
+ rPC () >>= fun w__1 ->
+ BranchTo
+ (reset_vector_start (set_vector_start_to_length
+ (add_VVV
+ (reset_vector_start (set_vector_start_to_length w__1))
+ (reset_vector_start (set_vector_start_to_length offset)))),
+ branch_type)
+
+let execute_BranchRegister (n, branch_type) =
+ rX ((64:ii),n) >>= fun target ->
+ (if bitU_to_bool (eq (branch_type, BranchType_CALL))
+ then
+ rPC () >>= fun w__0 ->
+ wX
+ ((30:ii),
+ reset_vector_start (set_vector_start_to_length
+ (add_VIV (reset_vector_start (set_vector_start_to_length w__0)) (4:ii))))
+ else return ()) >>
+ BranchTo (reset_vector_start (set_vector_start_to_length target),branch_type)
+
+let execute_ExceptionReturn () =
+ rELR' () >>= fun w__0 ->
+ rSPSR () >>= fun w__1 ->
+ AArch64_ExceptionReturn (reset_vector_start w__0,reset_vector_start w__1)
+
+let execute_DebugRestorePState () = DRPSInstruction ()
+
+let execute_LoadLiteral (t, memop, _signed, size, offset, datasize) =
+ rPC () >>= fun w__0 ->
+ let address =
+ set_vector_start 63
+ (add_VVV
+ (reset_vector_start (set_vector_start_to_length w__0))
+ (reset_vector_start (set_vector_start_to_length offset))) in
+ let data = set_vector_start_to_length (to_vec_dec (datasize,(0:ii))) in
+ match memop with
+ | MemOp_LOAD ->
+ rMem (empty_read_buffer,reset_vector_start address,size,AccType_NORMAL) >>= fun w__1 ->
+ flush_read_buffer (w__1,size) >>= fun w__2 ->
+ let data = set_vector_start_to_length w__2 in
+ (if bitU_to_bool _signed
+ then
+ wX
+ (t,
+ reset_vector_start (set_vector_start_to_length
+ (SignExtend ((64:ii),reset_vector_start (set_vector_start_to_length data)))))
+ else wX (t,reset_vector_start (set_vector_start_to_length data))) >>
+ return data
+ | MemOp_PREFETCH -> return data
+ end >>= fun data ->
+ return ()
+
+let execute_LoadStoreAcqExc (n, t, t2, s, acctype, excl, pair, memop, elsize, regsize, datasize) =
+ let address = to_vec_dec ((64:ii),(0:ii)) in
+ let data = set_vector_start_to_length (to_vec_dec (datasize,(0:ii))) in
+ let dbytes = quot datasize (8:ii) in
+ let rt_unknown = B0 in
+ let rn_unknown = B0 in
+ (if bitU_to_bool ((eq (memop, MemOp_LOAD)) &. (pair &. (eq_range (t, t2))))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ (if bitU_to_bool ((eq (memop, MemOp_STORE)) &. excl)
+ then
+ (if bitU_to_bool ((eq_range (s, t)) |. (pair &. (eq_range (s, t2))))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ if bitU_to_bool ((eq_range (s, n)) &. (neq_range (n, (31:ii))))
+ then UnallocatedEncoding ()
+ else return ()
+ else return ()) >>
+ let status = B0 in
+ (if bitU_to_bool ((eq (memop, MemOp_STORE)) &. excl)
+ then
+ speculate_exclusive_success () >>= fun w__0 ->
+ let status = if bitU_to_bool w__0 then B0 else B1 in
+ wX
+ (s,
+ reset_vector_start (set_vector_start_to_length
+ (ZeroExtend
+ ((32:ii),
+ reset_vector_start (set_vector_start_to_length (Vector [status] 0 false)))))) >>
+ return status
+ else return status) >>= fun status ->
+ (if bitU_to_bool (eq (match status with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then return (address,data,status)
+ else
+ (if bitU_to_bool (eq_range (n, (31:ii)))
+ then
+ CheckSPAlignment () >>
+ rSP (64:ii)
+ else
+ if bitU_to_bool rn_unknown
+ then
+ let address = to_vec_dec ((64:ii),UNKNOWN) in
+ return address
+ else rX ((64:ii),n)) >>= fun address ->
+ match memop with
+ | MemOp_STORE ->
+ wMem_Addr (reset_vector_start address,dbytes,acctype,excl) >>
+ (if bitU_to_bool rt_unknown
+ then
+ let data = set_vector_start_to_length (to_vec_dec (length data,UNKNOWN)) in
+ return data
+ else
+ if bitU_to_bool pair
+ then
+ let _ = assert' excl (Nothing) in
+ rX (regsize,t) >>= fun w__3 ->
+ let el1 = set_vector_start_to_length w__3 in
+ rX (length el1,t2) >>= fun w__4 ->
+ let el2 = set_vector_start_to_length w__4 in
+ BigEndian () >>= fun w__5 ->
+ let data =
+ if bitU_to_bool w__5
+ then
+ set_vector_start_to_length
+ ((set_vector_start_to_length el1) ^^ (set_vector_start_to_length el2))
+ else
+ set_vector_start_to_length
+ ((set_vector_start_to_length el2) ^^ (set_vector_start_to_length el1)) in
+ return data
+ else
+ rX (length data,t) >>= fun w__6 ->
+ return (set_vector_start_to_length w__6)) >>= fun data ->
+ (if bitU_to_bool excl
+ then
+ let status = B1 in
+ AArch64_ExclusiveMonitorsPass (reset_vector_start address,dbytes) >>= fun w__7 ->
+ if bitU_to_bool w__7
+ then
+ wMem_exclusive
+ (empty_write_buffer,
+ reset_vector_start address,
+ dbytes,
+ acctype,
+ reset_vector_start (set_vector_start_to_length data)) >>= fun w__8 ->
+ flush_write_buffer_exclusive w__8
+ else return status
+ else
+ wMem
+ (empty_write_buffer,
+ reset_vector_start address,
+ dbytes,
+ acctype,
+ reset_vector_start (set_vector_start_to_length data)) >>= fun w__10 ->
+ flush_write_buffer w__10 >>
+ return status) >>= fun status ->
+ return (data,status)
+ | MemOp_LOAD ->
+ let _ =
+ if bitU_to_bool excl
+ then AArch64_SetExclusiveMonitors (reset_vector_start address,dbytes)
+ else () in
+ (if bitU_to_bool pair
+ then
+ let _ = assert' excl (Nothing) in
+ if bitU_to_bool rt_unknown
+ then
+ wX
+ (t,
+ reset_vector_start (set_vector_start_to_length (to_vec_dec (length data,UNKNOWN)))) >>
+ return data
+ else
+ if bitU_to_bool (eq_range (elsize, (32:ii)))
+ then
+ rMem_exclusive (empty_read_buffer,reset_vector_start address,dbytes,acctype) >>= fun w__11 ->
+ flush_read_buffer (w__11,dbytes) >>= fun w__12 ->
+ let data = set_vector_start_to_length w__12 in
+ BigEndian () >>= fun w__13 ->
+ (if bitU_to_bool w__13
+ then
+ wX
+ (t,
+ reset_vector_start (set_vector_start_to_length
+ (slice (set_vector_start_to_length data) (datasize - (1:ii)) elsize))) >>
+ wX
+ (t2,
+ reset_vector_start (set_vector_start_to_length
+ (slice (set_vector_start_to_length data) (elsize - (1:ii)) (0:ii))))
+ else
+ wX
+ (t,
+ reset_vector_start (set_vector_start_to_length
+ (slice (set_vector_start_to_length data) (elsize - (1:ii)) (0:ii)))) >>
+ wX
+ (t2,
+ reset_vector_start (set_vector_start_to_length
+ (slice (set_vector_start_to_length data) (datasize - (1:ii)) elsize)))) >>
+ return data
+ else
+ (if bitU_to_bool
+ (neq_vec
+ (set_vector_start_to_length address,
+ set_vector_start_to_length
+ (Align (reset_vector_start (set_vector_start_to_length address),dbytes))))
+ then
+ let iswrite = B0 in
+ let secondstage = B0 in
+ AArch64_Abort
+ (reset_vector_start address,
+ AArch64_AlignmentFault (acctype,iswrite,secondstage))
+ else return ()) >>
+ rMem_exclusive
+ (empty_read_buffer,
+ reset_vector_start (set_vector_start 63
+ (add_VIV (reset_vector_start (set_vector_start_to_length address)) (0:ii))),
+ (8:ii),
+ acctype) >>= fun read_buffer ->
+ rMem_exclusive
+ (read_buffer,
+ reset_vector_start (set_vector_start 63
+ (add_VIV (reset_vector_start (set_vector_start_to_length address)) (8:ii))),
+ (8:ii),
+ acctype) >>= fun w__14 ->
+ let read_buffer = w__14 in
+ flush_read_buffer (read_buffer,(8:ii) * (2:ii)) >>= fun w__15 ->
+ let value = set_vector_start 127 w__15 in
+ wX (t,reset_vector_start (set_vector_start_to_length (slice value (63:ii) (0:ii)))) >>
+ wX
+ (t2,
+ reset_vector_start (set_vector_start_to_length (slice value (127:ii) (64:ii)))) >>
+ return data
+ else
+ rMem' (empty_read_buffer,reset_vector_start address,dbytes,acctype,excl) >>= fun w__16 ->
+ flush_read_buffer (w__16,dbytes) >>= fun w__17 ->
+ let data = set_vector_start_to_length w__17 in
+ wX
+ (t,
+ reset_vector_start (set_vector_start_to_length
+ (set_vector_start_to_length
+ (ZeroExtend (regsize,reset_vector_start (set_vector_start_to_length data)))))) >>
+ return data) >>= fun data ->
+ return (data,status)
+ end >>= fun (data, status) ->
+ return (address,data,status)) >>= fun (address, data, status) ->
+ return ()
+
+let execute_LoadStorePairNonTemp (wback, postindex, n, t, t2, acctype, memop, scale, datasize, offset) =
+ let address = to_vec_dec ((64:ii),(0:ii)) in
+ let data1 = set_vector_start_to_length (to_vec_dec (datasize,(0:ii))) in
+ let data2 = set_vector_start_to_length (to_vec_dec (length data1,(0:ii))) in
+ let dbytes = quot datasize (8:ii) in
+ let rt_unknown = B0 in
+ (if bitU_to_bool ((eq (memop, MemOp_LOAD)) &. (eq_range (t, t2)))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ (if bitU_to_bool (eq_range (n, (31:ii)))
+ then
+ CheckSPAlignment () >>
+ rSP (64:ii)
+ else rX ((64:ii),n)) >>= fun address ->
+ let address =
+ if bitU_to_bool (~postindex)
+ then
+ set_vector_start 63
+ (add_VVV
+ (reset_vector_start (set_vector_start_to_length address))
+ (reset_vector_start (set_vector_start_to_length offset)))
+ else address in
+ (if bitU_to_bool (eq (memop, MemOp_STORE))
+ then wMem_Addr (reset_vector_start address,dbytes * (2:ii),acctype,B0)
+ else return ()) >>
+ (if bitU_to_bool wback
+ then
+ let address' =
+ if bitU_to_bool (~postindex)
+ then address
+ else
+ add_VVV
+ (reset_vector_start (set_vector_start_to_length address))
+ (reset_vector_start (set_vector_start_to_length offset)) in
+ if bitU_to_bool (eq_range (n, (31:ii)))
+ then wSP ((),reset_vector_start (set_vector_start_to_length address'))
+ else wX (n,reset_vector_start (set_vector_start_to_length address'))
+ else return ()) >>
+ match memop with
+ | MemOp_STORE ->
+ (if bitU_to_bool (rt_unknown &. (eq_range (t, n)))
+ then
+ let data1 = set_vector_start_to_length (to_vec_dec (length data1,UNKNOWN)) in
+ return data1
+ else
+ rX (length data1,t) >>= fun w__2 ->
+ return (set_vector_start_to_length w__2)) >>= fun data1 ->
+ (if bitU_to_bool (rt_unknown &. (eq_range (t2, n)))
+ then
+ let data2 = set_vector_start_to_length (to_vec_dec (length data1,UNKNOWN)) in
+ return data2
+ else
+ rX (length data1,t2) >>= fun w__3 ->
+ return (set_vector_start_to_length w__3)) >>= fun data2 ->
+ wMem
+ (empty_write_buffer,
+ reset_vector_start (set_vector_start 63
+ (add_VIV (reset_vector_start (set_vector_start_to_length address)) (0:ii))),
+ dbytes,
+ acctype,
+ reset_vector_start (set_vector_start_to_length data1)) >>= fun write_buffer ->
+ wMem
+ (write_buffer,
+ reset_vector_start (set_vector_start 63
+ (add_VIV (reset_vector_start (set_vector_start_to_length address)) dbytes)),
+ dbytes,
+ acctype,
+ reset_vector_start (set_vector_start_to_length data2)) >>= fun w__4 ->
+ let write_buffer = w__4 in
+ flush_write_buffer write_buffer >>
+ return (data1,data2)
+ | MemOp_LOAD ->
+ rMem
+ (empty_read_buffer,
+ reset_vector_start (set_vector_start 63
+ (add_VIV (reset_vector_start (set_vector_start_to_length address)) (0:ii))),
+ dbytes,
+ acctype) >>= fun read_buffer ->
+ rMem
+ (read_buffer,
+ reset_vector_start (set_vector_start 63
+ (add_VIV (reset_vector_start (set_vector_start_to_length address)) dbytes)),
+ dbytes,
+ acctype) >>= fun w__5 ->
+ let read_buffer = w__5 in
+ flush_read_buffer (read_buffer,dbytes * (2:ii)) >>= fun w__6 ->
+ let read_data = set_vector_start_to_length w__6 in
+ let data1 =
+ set_vector_start_to_length
+ (slice (set_vector_start_to_length read_data) (datasize - (1:ii)) (0:ii)) in
+ let data2 =
+ set_vector_start_to_length
+ (slice (set_vector_start_to_length read_data) ((datasize * (2:ii)) - (1:ii)) datasize) in
+ let (data1, data2) =
+ if bitU_to_bool rt_unknown
+ then
+ let data1 = set_vector_start_to_length (to_vec_dec (length data1,UNKNOWN)) in
+ let data2 = set_vector_start_to_length (to_vec_dec (length data1,UNKNOWN)) in
+ (data1,data2)
+ else (data1,data2) in
+ wX (t,reset_vector_start (set_vector_start_to_length data1)) >>
+ wX (t2,reset_vector_start (set_vector_start_to_length data2)) >>
+ return (data1,data2)
+ end >>= fun (data1, data2) ->
+ return ()
+
+let execute_LoadImmediate (n, t, acctype, memop, _signed, wback, postindex, offset, regsize, datasize) =
+ let address = to_vec_dec ((64:ii),(0:ii)) in
+ let data = set_vector_start_to_length (to_vec_dec (datasize,(0:ii))) in
+ let wb_unknown = B0 in
+ let rt_unknown = B0 in
+ (if bitU_to_bool
+ ((eq (memop, MemOp_LOAD)) &. (wback &. ((eq_range (n, t)) &. (neq_range (n, (31:ii))))))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ (if bitU_to_bool
+ ((eq (memop, MemOp_STORE)) &. (wback &. ((eq_range (n, t)) &. (neq_range (n, (31:ii))))))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ (if bitU_to_bool (eq_range (n, (31:ii)))
+ then
+ (if bitU_to_bool (neq (memop, MemOp_PREFETCH))
+ then CheckSPAlignment ()
+ else return ()) >>
+ rSP (64:ii)
+ else rX ((64:ii),n)) >>= fun address ->
+ let address =
+ if bitU_to_bool (~postindex)
+ then
+ set_vector_start 63
+ (add_VVV
+ (reset_vector_start (set_vector_start_to_length address))
+ (reset_vector_start (set_vector_start_to_length offset)))
+ else address in
+ (if bitU_to_bool (eq (memop, MemOp_STORE))
+ then wMem_Addr (reset_vector_start address,quot datasize (8:ii),acctype,B0)
+ else return ()) >>
+ (if bitU_to_bool wback
+ then
+ let address' =
+ if bitU_to_bool (~postindex)
+ then address
+ else
+ add_VVV
+ (reset_vector_start (set_vector_start_to_length address))
+ (reset_vector_start (set_vector_start_to_length offset)) in
+ if bitU_to_bool (eq_range (n, (31:ii)))
+ then wSP ((),reset_vector_start (set_vector_start_to_length address'))
+ else wX (n,reset_vector_start (set_vector_start_to_length address'))
+ else return ()) >>
+ match memop with
+ | MemOp_STORE ->
+ (if bitU_to_bool rt_unknown
+ then
+ let data = set_vector_start_to_length (to_vec_dec (length data,UNKNOWN)) in
+ return data
+ else
+ rX (length data,t) >>= fun w__2 ->
+ return (set_vector_start_to_length w__2)) >>= fun data ->
+ wMem
+ (empty_write_buffer,
+ reset_vector_start address,
+ quot datasize (8:ii),
+ acctype,
+ reset_vector_start (set_vector_start_to_length data)) >>= fun w__3 ->
+ flush_write_buffer w__3 >>
+ return data
+ | MemOp_LOAD ->
+ rMem (empty_read_buffer,reset_vector_start address,quot datasize (8:ii),acctype) >>= fun w__4 ->
+ flush_read_buffer (w__4,quot datasize (8:ii)) >>= fun w__5 ->
+ let data = set_vector_start_to_length w__5 in
+ (if bitU_to_bool _signed
+ then
+ wX
+ (t,
+ reset_vector_start (set_vector_start_to_length
+ (set_vector_start_to_length
+ (SignExtend (regsize,reset_vector_start (set_vector_start_to_length data))))))
+ else
+ wX
+ (t,
+ reset_vector_start (set_vector_start_to_length
+ (set_vector_start_to_length
+ (ZeroExtend (regsize,reset_vector_start (set_vector_start_to_length data))))))) >>
+ return data
+ | MemOp_PREFETCH -> return data
+ end >>= fun data ->
+ return ()
+
+let execute_LoadRegister (n, t, m, acctype, memop, _signed, wback, postindex, extend_type, shift, regsize, datasize) =
+ ExtendReg ((64:ii),m,extend_type,shift) >>= fun offset ->
+ let address = to_vec_dec ((64:ii),(0:ii)) in
+ let data = set_vector_start_to_length (to_vec_dec (datasize,(0:ii))) in
+ let wb_unknown = B0 in
+ let rt_unknown = B0 in
+ (if bitU_to_bool
+ ((eq (memop, MemOp_LOAD)) &. (wback &. ((eq_range (n, t)) &. (neq_range (n, (31:ii))))))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ (if bitU_to_bool
+ ((eq (memop, MemOp_STORE)) &. (wback &. ((eq_range (n, t)) &. (neq_range (n, (31:ii))))))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ (if bitU_to_bool (eq_range (n, (31:ii)))
+ then
+ (if bitU_to_bool (neq (memop, MemOp_PREFETCH))
+ then CheckSPAlignment ()
+ else return ()) >>
+ rSP (64:ii)
+ else rX ((64:ii),n)) >>= fun address ->
+ let address =
+ if bitU_to_bool (~postindex)
+ then
+ set_vector_start 63
+ (add_VVV
+ (reset_vector_start (set_vector_start_to_length address))
+ (reset_vector_start (set_vector_start_to_length offset)))
+ else address in
+ (if bitU_to_bool (eq (memop, MemOp_STORE))
+ then wMem_Addr (reset_vector_start address,quot datasize (8:ii),acctype,B0)
+ else return ()) >>
+ (if bitU_to_bool wback
+ then
+ let address' =
+ if bitU_to_bool (~postindex)
+ then address
+ else
+ add_VVV
+ (reset_vector_start (set_vector_start_to_length address))
+ (reset_vector_start (set_vector_start_to_length offset)) in
+ if bitU_to_bool (eq_range (n, (31:ii)))
+ then wSP ((),reset_vector_start (set_vector_start_to_length address'))
+ else wX (n,reset_vector_start (set_vector_start_to_length address'))
+ else return ()) >>
+ match memop with
+ | MemOp_STORE ->
+ (if bitU_to_bool rt_unknown
+ then
+ let data = set_vector_start_to_length (to_vec_dec (length data,UNKNOWN)) in
+ return data
+ else
+ rX (length data,t) >>= fun w__2 ->
+ return (set_vector_start_to_length w__2)) >>= fun data ->
+ wMem
+ (empty_write_buffer,
+ reset_vector_start address,
+ quot datasize (8:ii),
+ acctype,
+ reset_vector_start (set_vector_start_to_length data)) >>= fun w__3 ->
+ flush_write_buffer w__3 >>
+ return data
+ | MemOp_LOAD ->
+ rMem (empty_read_buffer,reset_vector_start address,quot datasize (8:ii),acctype) >>= fun w__4 ->
+ flush_read_buffer (w__4,quot datasize (8:ii)) >>= fun w__5 ->
+ let data = set_vector_start_to_length w__5 in
+ (if bitU_to_bool _signed
+ then
+ wX
+ (t,
+ reset_vector_start (set_vector_start_to_length
+ (set_vector_start_to_length
+ (SignExtend (regsize,reset_vector_start (set_vector_start_to_length data))))))
+ else
+ wX
+ (t,
+ reset_vector_start (set_vector_start_to_length
+ (set_vector_start_to_length
+ (ZeroExtend (regsize,reset_vector_start (set_vector_start_to_length data))))))) >>
+ return data
+ | MemOp_PREFETCH -> return data
+ end >>= fun data ->
+ return ()
+
+let execute_LoadStorePair (wback, postindex, n, t, t2, acctype, memop, _signed, datasize, offset) =
+ let address = to_vec_dec ((64:ii),(0:ii)) in
+ let data1 = set_vector_start_to_length (to_vec_dec (datasize,(0:ii))) in
+ let data2 = set_vector_start_to_length (to_vec_dec (length data1,(0:ii))) in
+ let dbytes = quot datasize (8:ii) in
+ let rt_unknown = B0 in
+ let wb_unknown = B0 in
+ (if bitU_to_bool
+ ((eq (memop, MemOp_LOAD)) &.
+ (wback &. (((eq_range (t, n)) |. (eq_range (t2, n))) &. (neq_range (n, (31:ii))))))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ (if bitU_to_bool
+ ((eq (memop, MemOp_STORE)) &.
+ (wback &. (((eq_range (t, n)) |. (eq_range (t2, n))) &. (neq_range (n, (31:ii))))))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ (if bitU_to_bool ((eq (memop, MemOp_LOAD)) &. (eq_range (t, t2)))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ (if bitU_to_bool (eq_range (n, (31:ii)))
+ then
+ CheckSPAlignment () >>
+ rSP (64:ii)
+ else rX ((64:ii),n)) >>= fun address ->
+ let address =
+ if bitU_to_bool (~postindex)
+ then
+ set_vector_start 63
+ (add_VVV
+ (reset_vector_start (set_vector_start_to_length address))
+ (reset_vector_start (set_vector_start_to_length offset)))
+ else address in
+ (if bitU_to_bool (eq (memop, MemOp_STORE))
+ then wMem_Addr (reset_vector_start address,dbytes * (2:ii),acctype,B0)
+ else return ()) >>
+ (if bitU_to_bool wback
+ then
+ let address' =
+ if bitU_to_bool (~postindex)
+ then address
+ else
+ add_VVV
+ (reset_vector_start (set_vector_start_to_length address))
+ (reset_vector_start (set_vector_start_to_length offset)) in
+ if bitU_to_bool (eq_range (n, (31:ii)))
+ then wSP ((),reset_vector_start (set_vector_start_to_length address'))
+ else wX (n,reset_vector_start (set_vector_start_to_length address'))
+ else return ()) >>
+ match memop with
+ | MemOp_STORE ->
+ (if bitU_to_bool (rt_unknown &. (eq_range (t, n)))
+ then
+ let data1 = set_vector_start_to_length (to_vec_dec (length data1,UNKNOWN)) in
+ return data1
+ else
+ rX (length data1,t) >>= fun w__2 ->
+ return (set_vector_start_to_length w__2)) >>= fun data1 ->
+ (if bitU_to_bool (rt_unknown &. (eq_range (t2, n)))
+ then
+ let data2 = set_vector_start_to_length (to_vec_dec (length data1,UNKNOWN)) in
+ return data2
+ else
+ rX (length data1,t2) >>= fun w__3 ->
+ return (set_vector_start_to_length w__3)) >>= fun data2 ->
+ wMem
+ (empty_write_buffer,
+ reset_vector_start (set_vector_start 63
+ (add_VIV (reset_vector_start (set_vector_start_to_length address)) (0:ii))),
+ dbytes,
+ acctype,
+ reset_vector_start (set_vector_start_to_length data1)) >>= fun write_buffer ->
+ wMem
+ (write_buffer,
+ reset_vector_start (set_vector_start 63
+ (add_VIV (reset_vector_start (set_vector_start_to_length address)) dbytes)),
+ dbytes,
+ acctype,
+ reset_vector_start (set_vector_start_to_length data2)) >>= fun w__4 ->
+ let write_buffer = w__4 in
+ flush_write_buffer write_buffer >>
+ return (data1,data2)
+ | MemOp_LOAD ->
+ rMem
+ (empty_read_buffer,
+ reset_vector_start (set_vector_start 63
+ (add_VIV (reset_vector_start (set_vector_start_to_length address)) (0:ii))),
+ dbytes,
+ acctype) >>= fun read_buffer ->
+ rMem
+ (read_buffer,
+ reset_vector_start (set_vector_start 63
+ (add_VIV (reset_vector_start (set_vector_start_to_length address)) dbytes)),
+ dbytes,
+ acctype) >>= fun w__5 ->
+ let read_buffer = w__5 in
+ flush_read_buffer (read_buffer,dbytes * (2:ii)) >>= fun w__6 ->
+ let read_data = set_vector_start_to_length w__6 in
+ let data1 =
+ set_vector_start_to_length
+ (slice (set_vector_start_to_length read_data) (datasize - (1:ii)) (0:ii)) in
+ let data2 =
+ set_vector_start_to_length
+ (slice (set_vector_start_to_length read_data) ((datasize * (2:ii)) - (1:ii)) datasize) in
+ let (data1, data2) =
+ if bitU_to_bool rt_unknown
+ then
+ let data1 = set_vector_start_to_length (to_vec_dec (length data1,UNKNOWN)) in
+ let data2 = set_vector_start_to_length (to_vec_dec (length data1,UNKNOWN)) in
+ (data1,data2)
+ else (data1,data2) in
+ (if bitU_to_bool _signed
+ then
+ wX
+ (t,
+ reset_vector_start (set_vector_start_to_length
+ (SignExtend ((64:ii),reset_vector_start (set_vector_start_to_length data1))))) >>
+ wX
+ (t2,
+ reset_vector_start (set_vector_start_to_length
+ (SignExtend ((64:ii),reset_vector_start (set_vector_start_to_length data2)))))
+ else
+ wX (t,reset_vector_start (set_vector_start_to_length data1)) >>
+ wX (t2,reset_vector_start (set_vector_start_to_length data2))) >>
+ return (data1,data2)
+ end >>= fun (data1, data2) ->
+ return ()
+
+let execute_AddSubImmediate (d, n, datasize, sub_op, setflags, imm) =
+ (if bitU_to_bool (eq_range (n, (31:ii)))
+ then
+ rSP datasize >>= fun w__0 ->
+ return (set_vector_start_to_length w__0)
+ else
+ rX (datasize,n) >>= fun w__1 ->
+ return (set_vector_start_to_length w__1)) >>= fun operand1 ->
+ let operand2 = set_vector_start_to_length imm in
+ let carry_in = B0 in
+ let (operand2, carry_in) =
+ if bitU_to_bool sub_op
+ then
+ let operand2 =
+ set_vector_start_to_length (NOT (reset_vector_start (set_vector_start_to_length operand2))) in
+ let carry_in = B1 in
+ (operand2,carry_in)
+ else
+ let carry_in = B0 in
+ (operand2,carry_in) in
+ let (result, nzcv) =
+ match (AddWithCarry
+ (reset_vector_start (set_vector_start_to_length operand1),
+ reset_vector_start (set_vector_start_to_length operand2),
+ carry_in)) with
+ | (v0v', v1v') -> (v0v',v1v')
+ end in
+ (if bitU_to_bool setflags
+ then wPSTATE_NZCV ((),reset_vector_start nzcv)
+ else return ()) >>
+ if bitU_to_bool ((eq_range (d, (31:ii))) &. (~setflags))
+ then wSP ((),reset_vector_start (set_vector_start_to_length result))
+ else wX (d,reset_vector_start (set_vector_start_to_length result))
+
+let execute_BitfieldMove (d, n, datasize, inzero, extend, R, S, wmask, tmask) =
+ (if bitU_to_bool inzero
+ then return (set_vector_start_to_length (Zeros datasize))
+ else
+ rX (datasize,d) >>= fun w__0 ->
+ return (set_vector_start_to_length w__0)) >>= fun dst ->
+ rX (datasize,n) >>= fun w__1 ->
+ let src = set_vector_start_to_length w__1 in
+ let bot =
+ set_vector_start_to_length
+ (bitwise_or
+ (set_vector_start_to_length
+ (bitwise_and
+ (set_vector_start_to_length dst,
+ set_vector_start_to_length
+ (NOT (reset_vector_start (set_vector_start_to_length wmask))))),
+ set_vector_start_to_length
+ (bitwise_and
+ (set_vector_start_to_length
+ (ROR (reset_vector_start (set_vector_start_to_length src),R)),
+ set_vector_start_to_length wmask)))) in
+ let top =
+ if bitU_to_bool extend
+ then
+ set_vector_start_to_length
+ (Replicate
+ (length bot,
+ reset_vector_start (set_vector_start_to_length
+ (Vector [access (set_vector_start_to_length src) S] 0 false))))
+ else set_vector_start_to_length dst in
+ wX
+ (d,
+ reset_vector_start (set_vector_start_to_length
+ (bitwise_or
+ (set_vector_start_to_length
+ (bitwise_and
+ (set_vector_start_to_length top,
+ set_vector_start_to_length
+ (NOT (reset_vector_start (set_vector_start_to_length tmask))))),
+ set_vector_start_to_length
+ (bitwise_and (set_vector_start_to_length bot, set_vector_start_to_length tmask))))))
+
+let execute_ExtractRegister (d, n, m, datasize, lsb) =
+ let result = set_vector_start_to_length (to_vec_dec (datasize,(0:ii))) in
+ rX (datasize,n) >>= fun w__0 ->
+ let operand1 = set_vector_start_to_length w__0 in
+ rX (datasize,m) >>= fun w__1 ->
+ let operand2 = set_vector_start_to_length w__1 in
+ let concat =
+ set_vector_start_to_length
+ ((set_vector_start_to_length operand1) ^^ (set_vector_start_to_length operand2)) in
+ let result =
+ set_vector_start_to_length
+ (slice (set_vector_start_to_length concat) ((lsb + datasize) - (1:ii)) lsb) in
+ wX (d,reset_vector_start (set_vector_start_to_length result))
+
+let execute_LogicalImmediate (d, n, datasize, setflags, op, imm) =
+ let result = set_vector_start_to_length (to_vec_dec (datasize,(0:ii))) in
+ rX (datasize,n) >>= fun w__0 ->
+ let operand1 = set_vector_start_to_length w__0 in
+ let operand2 = set_vector_start_to_length imm in
+ let result =
+ match op with
+ | LogicalOp_AND ->
+ set_vector_start_to_length
+ (bitwise_and (set_vector_start_to_length operand1, set_vector_start_to_length operand2))
+ | LogicalOp_ORR ->
+ set_vector_start_to_length
+ (bitwise_or (set_vector_start_to_length operand1, set_vector_start_to_length operand2))
+ | LogicalOp_EOR ->
+ set_vector_start_to_length
+ (bitwise_xor (set_vector_start_to_length operand1, set_vector_start_to_length operand2))
+ end in
+ (if bitU_to_bool setflags
+ then
+ wPSTATE_NZCV
+ ((),
+ reset_vector_start (set_vector_start 3
+ ((set_vector_start_to_length
+ (Vector [access (set_vector_start_to_length result) ((length
+ (reset_vector_start (set_vector_start_to_length
+ result))) - (1:ii))] 0 false)) ^^
+ (set_vector_start_to_length
+ ((set_vector_start_to_length
+ (Vector [IsZeroBit (reset_vector_start (set_vector_start_to_length result))] 0 false)) ^^
+ (set_vector_start_to_length (Vector [B0;B0] 1 false)))))))
+ else return ()) >>
+ if bitU_to_bool ((eq_range (d, (31:ii))) &. (~setflags))
+ then wSP ((),reset_vector_start (set_vector_start_to_length result))
+ else wX (d,reset_vector_start (set_vector_start_to_length result))
+
+let execute_MoveWide (d, datasize, imm, pos, opcode) =
+ let result = set_vector_start_to_length (to_vec_dec (datasize,(0:ii))) in
+ (if bitU_to_bool (eq (opcode, MoveWideOp_K))
+ then
+ rX (datasize,d) >>= fun w__0 ->
+ return (set_vector_start_to_length w__0)
+ else
+ let result = set_vector_start_to_length (Zeros datasize) in
+ return result) >>= fun result ->
+ let result = update result (pos + (15:ii)) pos (set_vector_start_to_length imm) in
+ let result =
+ if bitU_to_bool (eq (opcode, MoveWideOp_N))
+ then set_vector_start_to_length (NOT (reset_vector_start (set_vector_start_to_length result)))
+ else result in
+ wX (d,reset_vector_start (set_vector_start_to_length result))
+
+let execute_Address (d, page, imm) =
+ rPC () >>= fun base ->
+ let base = if bitU_to_bool page then update base (11:ii) (0:ii) (Zeros (12:ii)) else base in
+ wX
+ (d,
+ reset_vector_start (set_vector_start_to_length
+ (add_VVV
+ (reset_vector_start (set_vector_start_to_length base))
+ (reset_vector_start (set_vector_start_to_length imm)))))
+
+let execute_AddSubExtendRegister (d, n, m, datasize, sub_op, setflags, extend_type, shift) =
+ (if bitU_to_bool (eq_range (n, (31:ii)))
+ then
+ rSP datasize >>= fun w__0 ->
+ return (set_vector_start_to_length w__0)
+ else
+ rX (datasize,n) >>= fun w__1 ->
+ return (set_vector_start_to_length w__1)) >>= fun operand1 ->
+ ExtendReg (datasize,m,extend_type,shift) >>= fun w__2 ->
+ let operand2 = set_vector_start_to_length w__2 in
+ let carry_in = B0 in
+ let (operand2, carry_in) =
+ if bitU_to_bool sub_op
+ then
+ let operand2 =
+ set_vector_start_to_length (NOT (reset_vector_start (set_vector_start_to_length operand2))) in
+ let carry_in = B1 in
+ (operand2,carry_in)
+ else
+ let carry_in = B0 in
+ (operand2,carry_in) in
+ let (result, nzcv) =
+ match (AddWithCarry
+ (reset_vector_start (set_vector_start_to_length operand1),
+ reset_vector_start (set_vector_start_to_length operand2),
+ carry_in)) with
+ | (v2v', v3v') -> (v2v',v3v')
+ end in
+ (if bitU_to_bool setflags
+ then wPSTATE_NZCV ((),reset_vector_start nzcv)
+ else return ()) >>
+ if bitU_to_bool ((eq_range (d, (31:ii))) &. (~setflags))
+ then wSP ((),reset_vector_start (set_vector_start_to_length result))
+ else wX (d,reset_vector_start (set_vector_start_to_length result))
+
+let execute_AddSubShiftedRegister (d, n, m, datasize, sub_op, setflags, shift_type, shift_amount) =
+ rX (datasize,n) >>= fun w__0 ->
+ let operand1 = set_vector_start_to_length w__0 in
+ ShiftReg (datasize,m,shift_type,shift_amount) >>= fun w__1 ->
+ let operand2 = set_vector_start_to_length w__1 in
+ let carry_in = B0 in
+ let (operand2, carry_in) =
+ if bitU_to_bool sub_op
+ then
+ let operand2 =
+ set_vector_start_to_length (NOT (reset_vector_start (set_vector_start_to_length operand2))) in
+ let carry_in = B1 in
+ (operand2,carry_in)
+ else
+ let carry_in = B0 in
+ (operand2,carry_in) in
+ let (result, nzcv) =
+ match (AddWithCarry
+ (reset_vector_start (set_vector_start_to_length operand1),
+ reset_vector_start (set_vector_start_to_length operand2),
+ carry_in)) with
+ | (v4v', v5v') -> (v4v',v5v')
+ end in
+ (if bitU_to_bool setflags
+ then wPSTATE_NZCV ((),reset_vector_start nzcv)
+ else return ()) >>
+ wX (d,reset_vector_start (set_vector_start_to_length result))
+
+let execute_AddSubCarry (d, n, m, datasize, sub_op, setflags) =
+ rX (datasize,n) >>= fun w__0 ->
+ let operand1 = set_vector_start_to_length w__0 in
+ rX (datasize,m) >>= fun w__1 ->
+ let operand2 = set_vector_start_to_length w__1 in
+ let operand2 =
+ if bitU_to_bool sub_op
+ then set_vector_start_to_length (NOT (reset_vector_start (set_vector_start_to_length operand2)))
+ else operand2 in
+ read_reg_bitfield NZCV "C" >>= fun w__2 ->
+ let (result, nzcv) =
+ match (AddWithCarry
+ (reset_vector_start (set_vector_start_to_length operand1),
+ reset_vector_start (set_vector_start_to_length operand2),
+ w__2)) with
+ | (v6v', v7v') -> (v6v',v7v')
+ end in
+ (if bitU_to_bool setflags
+ then wPSTATE_NZCV ((),reset_vector_start nzcv)
+ else return ()) >>
+ wX (d,reset_vector_start (set_vector_start_to_length result))
+
+let execute_ConditionalCompareImmediate (n, datasize, sub_op, condition, flags, imm) =
+ rX (datasize,n) >>= fun w__0 ->
+ let operand1 = set_vector_start_to_length w__0 in
+ let operand2 = set_vector_start_to_length imm in
+ let carry_in = B0 in
+ let flags' = flags in
+ ConditionHolds (reset_vector_start condition) >>= fun w__1 ->
+ let (operand2, carry_in, flags') =
+ if bitU_to_bool w__1
+ then
+ let (operand2, carry_in) =
+ if bitU_to_bool sub_op
+ then
+ let operand2 =
+ set_vector_start_to_length
+ (NOT (reset_vector_start (set_vector_start_to_length operand2))) in
+ let carry_in = B1 in
+ (operand2,carry_in)
+ else (operand2,carry_in) in
+ let (_, nzcv) =
+ match (AddWithCarry
+ (reset_vector_start (set_vector_start_to_length operand1),
+ reset_vector_start (set_vector_start_to_length operand2),
+ carry_in)) with
+ | (v8v', v9v') -> (v8v',v9v')
+ end in
+ let flags' = nzcv in
+ (operand2,carry_in,flags')
+ else (operand2,carry_in,flags') in
+ wPSTATE_NZCV ((),reset_vector_start flags')
+
+let execute_ConditionalCompareRegister (n, m, datasize, sub_op, condition, flags) =
+ rX (datasize,n) >>= fun w__0 ->
+ let operand1 = set_vector_start_to_length w__0 in
+ rX (datasize,m) >>= fun w__1 ->
+ let operand2 = set_vector_start_to_length w__1 in
+ let carry_in = B0 in
+ let flags' = flags in
+ ConditionHolds (reset_vector_start condition) >>= fun w__2 ->
+ let (operand2, carry_in, flags') =
+ if bitU_to_bool w__2
+ then
+ let (operand2, carry_in) =
+ if bitU_to_bool sub_op
+ then
+ let operand2 =
+ set_vector_start_to_length
+ (NOT (reset_vector_start (set_vector_start_to_length operand2))) in
+ let carry_in = B1 in
+ (operand2,carry_in)
+ else (operand2,carry_in) in
+ let (_, nzcv) =
+ match (AddWithCarry
+ (reset_vector_start (set_vector_start_to_length operand1),
+ reset_vector_start (set_vector_start_to_length operand2),
+ carry_in)) with
+ | (v10v', v11v') -> (v10v',v11v')
+ end in
+ let flags' = nzcv in
+ (operand2,carry_in,flags')
+ else (operand2,carry_in,flags') in
+ wPSTATE_NZCV ((),reset_vector_start flags')
+
+let execute_ConditionalSelect (d, n, m, datasize, condition, else_inv, else_inc) =
+ let result = set_vector_start_to_length (to_vec_dec (datasize,(0:ii))) in
+ rX (datasize,n) >>= fun w__0 ->
+ let operand1 = set_vector_start_to_length w__0 in
+ rX (datasize,m) >>= fun w__1 ->
+ let operand2 = set_vector_start_to_length w__1 in
+ ConditionHolds (reset_vector_start condition) >>= fun w__2 ->
+ let result =
+ if bitU_to_bool w__2
+ then set_vector_start_to_length operand1
+ else
+ let result = set_vector_start_to_length operand2 in
+ let result =
+ if bitU_to_bool else_inv
+ then
+ set_vector_start_to_length (NOT (reset_vector_start (set_vector_start_to_length result)))
+ else result in
+ if bitU_to_bool else_inc
+ then
+ set_vector_start_to_length
+ (add_VIV (reset_vector_start (set_vector_start_to_length result)) (1:ii))
+ else result in
+ wX (d,reset_vector_start (set_vector_start_to_length result))
+
+let execute_Reverse (d, n, datasize, op) =
+ let result = set_vector_start_to_length (to_vec_dec (datasize,(0:ii))) in
+ let V = to_vec_dec ((6:ii),(0:ii)) in
+ let V =
+ match op with
+ | RevOp_REV16 -> Vector [B0;B0;B1;B0;B0;B0] 5 false
+ | RevOp_REV32 -> Vector [B0;B1;B1;B0;B0;B0] 5 false
+ | RevOp_REV64 -> Vector [B1;B1;B1;B0;B0;B0] 5 false
+ | RevOp_RBIT ->
+ if bitU_to_bool (eq_range (datasize, (64:ii)))
+ then Vector [B1;B1;B1;B1;B1;B1] 5 false
+ else Vector [B0;B1;B1;B1;B1;B1] 5 false
+ end in
+ rX (datasize,n) >>= fun w__0 ->
+ let result = set_vector_start_to_length w__0 in
+ let result =
+ (foreach_inc ((0:ii),(5:ii),(1:ii)) result
+ (fun vbit result ->
+ if bitU_to_bool (eq (match (access V vbit) with | B0 -> (0:ii) | B1 -> (1:ii) end, (1:ii)))
+ then
+ let tmp = set_vector_start_to_length result in
+ let vsize = lsl' ((1:ii),vbit) in
+ (foreach_inc ((0:ii),datasize - (1:ii),(2:ii) * vsize) result
+ (fun base result ->
+ let result =
+ update
+ result ((base + vsize) - (1:ii)) base
+ (set_vector_start_to_length
+ (slice (set_vector_start_to_length tmp)
+ ((base + ((2:ii) * vsize)) - (1:ii)) (base + vsize))) in
+ update
+ result ((base + ((2:ii) * vsize)) - (1:ii)) (base + vsize)
+ (set_vector_start_to_length
+ (slice (set_vector_start_to_length tmp) ((base + vsize) - (1:ii)) base))))
+ else result)) in
+ wX (d,reset_vector_start (set_vector_start_to_length result))
+
+let execute_CountLeading (d, n, datasize, opcode) =
+ let result = (0:ii) in
+ rX (datasize,n) >>= fun w__0 ->
+ let operand1 = set_vector_start_to_length w__0 in
+ let result =
+ if bitU_to_bool (eq (opcode, CountOp_CLZ))
+ then CountLeadingZeroBits (reset_vector_start (set_vector_start_to_length operand1))
+ else CountLeadingSignBits (reset_vector_start (set_vector_start_to_length operand1)) in
+ wX (d,reset_vector_start (set_vector_start_to_length (to_vec_dec (datasize,result))))
+
+let execute_Division (d, n, m, datasize, _unsigned) =
+ rX (datasize,n) >>= fun w__0 ->
+ let operand1 = set_vector_start_to_length w__0 in
+ rX (datasize,m) >>= fun w__1 ->
+ let operand2 = set_vector_start_to_length w__1 in
+ let result = (0:ii) in
+ let result =
+ if bitU_to_bool (IsZero (reset_vector_start (set_vector_start_to_length operand2)))
+ then (0:ii)
+ else
+ quot
+ (Int (reset_vector_start (set_vector_start_to_length operand1),_unsigned))
+ (Int (reset_vector_start (set_vector_start_to_length operand2),_unsigned)) in
+ wX (d,reset_vector_start (set_vector_start_to_length (to_vec_dec (datasize,result))))
+
+let execute_Shift (d, n, m, datasize, shift_type) =
+ let result = set_vector_start_to_length (to_vec_dec (datasize,(0:ii))) in
+ rX (datasize,m) >>= fun w__0 ->
+ let operand2 = set_vector_start_to_length w__0 in
+ ShiftReg
+ (datasize,
+ n,
+ shift_type,
+ modulo (UInt (reset_vector_start (set_vector_start_to_length operand2))) datasize) >>= fun w__1 ->
+ let result = set_vector_start_to_length w__1 in
+ wX (d,reset_vector_start (set_vector_start_to_length result))
+
+let execute_CRC (d, n, m, size, crc32c) =
+ (if bitU_to_bool (~(HaveCRCExt ()))
+ then UnallocatedEncoding ()
+ else return ()) >>
+ rX ((32:ii),n) >>= fun acc ->
+ rX (size,m) >>= fun w__0 ->
+ let _val = set_vector_start_to_length w__0 in
+ let poly =
+ if bitU_to_bool crc32c
+ then
+ Vector [B0;B0;B0;B1;B1;B1;B1;B0;B1;B1;B0;B1;B1;B1;B0;B0;B0;B1;B1;B0;B1;
+ B1;B1;B1;B0;B1;B0;B0;B0;B0;B0;B1] 31 false
+ else
+ Vector [B0;B0;B0;B0;B0;B1;B0;B0;B1;B1;B0;B0;B0;B0;B0;B1;B0;B0;B0;B1;B1;
+ B1;B0;B1;B1;B0;B1;B1;B0;B1;B1;B1] 31 false in
+ let tempacc =
+ set_vector_start_to_length
+ ((set_vector_start_to_length
+ (BitReverse (reset_vector_start (set_vector_start_to_length acc)))) ^^
+ (set_vector_start_to_length (set_vector_start_to_length (Zeros (length _val))))) in
+ let tempval =
+ set_vector_start_to_length
+ ((set_vector_start_to_length
+ (BitReverse (reset_vector_start (set_vector_start_to_length _val)))) ^^
+ (Zeros (32:ii))) in
+ wX
+ (d,
+ reset_vector_start (set_vector_start_to_length
+ (BitReverse
+ (reset_vector_start (set_vector_start_to_length
+ (Poly32Mod2
+ (reset_vector_start (set_vector_start_to_length
+ (bitwise_xor
+ (set_vector_start_to_length tempacc,
+ set_vector_start_to_length tempval))),
+ reset_vector_start poly)))))))
+
+let execute_MultiplyAddSub (d, n, m, a, destsize, datasize, sub_op) =
+ rX (datasize,n) >>= fun w__0 ->
+ let operand1 = set_vector_start_to_length w__0 in
+ rX (datasize,m) >>= fun w__1 ->
+ let operand2 = set_vector_start_to_length w__1 in
+ rX (destsize,a) >>= fun w__2 ->
+ let operand3 = set_vector_start_to_length w__2 in
+ let result = (0:ii) in
+ let result =
+ if bitU_to_bool sub_op
+ then
+ (UInt (reset_vector_start (set_vector_start_to_length operand3))) -
+ ((UInt (reset_vector_start (set_vector_start_to_length operand1))) *
+ (UInt (reset_vector_start (set_vector_start_to_length operand2))))
+ else
+ (UInt (reset_vector_start (set_vector_start_to_length operand3))) +
+ ((UInt (reset_vector_start (set_vector_start_to_length operand1))) *
+ (UInt (reset_vector_start (set_vector_start_to_length operand2)))) in
+ wX (d,reset_vector_start (set_vector_start_to_length (to_vec_dec (destsize,result))))
+
+let execute_MultiplyAddSubLong (d, n, m, a, destsize, datasize, sub_op, _unsigned) =
+ rX (datasize,n) >>= fun w__0 ->
+ let operand1 = set_vector_start_to_length w__0 in
+ rX (datasize,m) >>= fun w__1 ->
+ let operand2 = set_vector_start_to_length w__1 in
+ rX (destsize,a) >>= fun w__2 ->
+ let operand3 = set_vector_start_to_length w__2 in
+ let result = (0:ii) in
+ let result =
+ if bitU_to_bool sub_op
+ then
+ (Int (reset_vector_start (set_vector_start_to_length operand3),_unsigned)) -
+ ((Int (reset_vector_start (set_vector_start_to_length operand1),_unsigned)) *
+ (Int (reset_vector_start (set_vector_start_to_length operand2),_unsigned)))
+ else
+ (Int (reset_vector_start (set_vector_start_to_length operand3),_unsigned)) +
+ ((Int (reset_vector_start (set_vector_start_to_length operand1),_unsigned)) *
+ (Int (reset_vector_start (set_vector_start_to_length operand2),_unsigned))) in
+ wX (d,reset_vector_start (set_vector_start_to_length (to_vec_dec ((64:ii),result))))
+
+let execute_MultiplyHigh (d, n, m, a, destsize, datasize, _unsigned) =
+ rX (destsize,n) >>= fun w__0 ->
+ let operand1 = set_vector_start_to_length w__0 in
+ rX (destsize,m) >>= fun w__1 ->
+ let operand2 = set_vector_start_to_length w__1 in
+ let result = (0:ii) in
+ let result =
+ (Int (reset_vector_start (set_vector_start_to_length operand1),_unsigned)) *
+ (Int (reset_vector_start (set_vector_start_to_length operand2),_unsigned)) in
+ wX
+ (d,
+ reset_vector_start (set_vector_start_to_length
+ (slice (to_vec_dec ((128:ii),result)) (127:ii) (64:ii))))
+
+let execute_LogicalShiftedRegister (d, n, m, datasize, setflags, op, shift_type, shift_amount, invert) =
+ rX (datasize,n) >>= fun w__0 ->
+ let operand1 = set_vector_start_to_length w__0 in
+ ShiftReg (datasize,m,shift_type,shift_amount) >>= fun w__1 ->
+ let operand2 = set_vector_start_to_length w__1 in
+ let operand2 =
+ if bitU_to_bool invert
+ then set_vector_start_to_length (NOT (reset_vector_start (set_vector_start_to_length operand2)))
+ else operand2 in
+ let result = set_vector_start_to_length (to_vec_dec (datasize,(0:ii))) in
+ let result =
+ match op with
+ | LogicalOp_AND ->
+ set_vector_start_to_length
+ (bitwise_and (set_vector_start_to_length operand1, set_vector_start_to_length operand2))
+ | LogicalOp_ORR ->
+ set_vector_start_to_length
+ (bitwise_or (set_vector_start_to_length operand1, set_vector_start_to_length operand2))
+ | LogicalOp_EOR ->
+ set_vector_start_to_length
+ (bitwise_xor (set_vector_start_to_length operand1, set_vector_start_to_length operand2))
+ end in
+ (if bitU_to_bool setflags
+ then
+ wPSTATE_NZCV
+ ((),
+ reset_vector_start (set_vector_start 3
+ ((set_vector_start_to_length
+ (Vector [access (set_vector_start_to_length result) (datasize - (1:ii))] 0 false)) ^^
+ (set_vector_start_to_length
+ ((set_vector_start_to_length
+ (Vector [IsZeroBit (reset_vector_start (set_vector_start_to_length result))] 0 false)) ^^
+ (set_vector_start_to_length (Vector [B0;B0] 1 false)))))))
+ else return ()) >>
+ wX (d,reset_vector_start (set_vector_start_to_length result))
+
+let execute = function
+
+ | TMStart (t) -> execute_TMStart (t)
+ | TMCommit -> execute_TMCommit ()
+ | TMTest -> execute_TMTest ()
+ | TMAbort (retry,reason) -> execute_TMAbort (retry,reason)
+ | CompareAndBranch (t,datasize,iszero,offset) -> execute_CompareAndBranch (t,datasize,iszero,offset)
+ | BranchConditional (offset,condition) -> execute_BranchConditional (offset,condition)
+ | GenerateExceptionEL1 (imm) -> execute_GenerateExceptionEL1 (imm)
+ | GenerateExceptionEL2 (imm) -> execute_GenerateExceptionEL2 (imm)
+ | GenerateExceptionEL3 (imm) -> execute_GenerateExceptionEL3 (imm)
+ | DebugBreakpoint (comment) -> execute_DebugBreakpoint (comment)
+ | ExternalDebugBreakpoint -> execute_ExternalDebugBreakpoint ()
+ | DebugSwitchToExceptionLevel (target_level) -> execute_DebugSwitchToExceptionLevel (target_level)
+ | MoveSystemImmediate (operand,field') -> execute_MoveSystemImmediate (operand,field')
+ | Hint (op) -> execute_Hint (op)
+ | ClearExclusiveMonitor (imm) -> execute_ClearExclusiveMonitor (imm)
+ | Barrier (op,domain,types) -> execute_Barrier (op,domain,types)
+ | System (t,sys_op0,sys_op1,sys_op2,sys_crn,sys_crm,has_result) -> execute_System (t,sys_op0,sys_op1,sys_op2,sys_crn,sys_crm,has_result)
+ | MoveSystemRegister (t,sys_op0,sys_op1,sys_op2,sys_crn,sys_crm,read) -> execute_MoveSystemRegister (t,sys_op0,sys_op1,sys_op2,sys_crn,sys_crm,read)
+ | ImplementationDefinedTestBeginEnd (isEnd) -> execute_ImplementationDefinedTestBeginEnd (isEnd)
+ | ImplementationDefinedStopFetching -> execute_ImplementationDefinedStopFetching ()
+ | ImplementationDefinedThreadStart -> execute_ImplementationDefinedThreadStart ()
+ | TestBitAndBranch (t,datasize,bit_pos,bit_val,offset) -> execute_TestBitAndBranch (t,datasize,bit_pos,bit_val,offset)
+ | BranchImmediate (branch_type,offset) -> execute_BranchImmediate (branch_type,offset)
+ | BranchRegister (n,branch_type) -> execute_BranchRegister (n,branch_type)
+ | ExceptionReturn -> execute_ExceptionReturn ()
+ | DebugRestorePState -> execute_DebugRestorePState ()
+ | LoadLiteral (t,memop,_signed,size,offset,datasize) -> execute_LoadLiteral (t,memop,_signed,size,offset,datasize)
+ | LoadStoreAcqExc (n,t,t2,s,acctype,excl,pair,memop,elsize,regsize,datasize) -> execute_LoadStoreAcqExc (n,t,t2,s,acctype,excl,pair,memop,elsize,regsize,datasize)
+ | LoadStorePairNonTemp (wback,postindex,n,t,t2,acctype,memop,scale,datasize,offset) -> execute_LoadStorePairNonTemp (wback,postindex,n,t,t2,acctype,memop,scale,datasize,offset)
+ | LoadImmediate (n,t,acctype,memop,_signed,wback,postindex,offset,regsize,datasize) -> execute_LoadImmediate (n,t,acctype,memop,_signed,wback,postindex,offset,regsize,datasize)
+ | LoadRegister (n,t,m,acctype,memop,_signed,wback,postindex,extend_type,shift,regsize,datasize) -> execute_LoadRegister (n,t,m,acctype,memop,_signed,wback,postindex,extend_type,shift,regsize,datasize)
+ | LoadStorePair (wback,postindex,n,t,t2,acctype,memop,_signed,datasize,offset) -> execute_LoadStorePair (wback,postindex,n,t,t2,acctype,memop,_signed,datasize,offset)
+ | AddSubImmediate (d,n,datasize,sub_op,setflags,imm) -> execute_AddSubImmediate (d,n,datasize,sub_op,setflags,imm)
+ | BitfieldMove (d,n,datasize,inzero,extend,R,S,wmask,tmask) -> execute_BitfieldMove (d,n,datasize,inzero,extend,R,S,wmask,tmask)
+ | ExtractRegister (d,n,m,datasize,lsb) -> execute_ExtractRegister (d,n,m,datasize,lsb)
+ | LogicalImmediate (d,n,datasize,setflags,op,imm) -> execute_LogicalImmediate (d,n,datasize,setflags,op,imm)
+ | MoveWide (d,datasize,imm,pos,opcode) -> execute_MoveWide (d,datasize,imm,pos,opcode)
+ | Address (d,page,imm) -> execute_Address (d,page,imm)
+ | AddSubExtendRegister (d,n,m,datasize,sub_op,setflags,extend_type,shift) -> execute_AddSubExtendRegister (d,n,m,datasize,sub_op,setflags,extend_type,shift)
+ | AddSubShiftedRegister (d,n,m,datasize,sub_op,setflags,shift_type,shift_amount) -> execute_AddSubShiftedRegister (d,n,m,datasize,sub_op,setflags,shift_type,shift_amount)
+ | AddSubCarry (d,n,m,datasize,sub_op,setflags) -> execute_AddSubCarry (d,n,m,datasize,sub_op,setflags)
+ | ConditionalCompareImmediate (n,datasize,sub_op,condition,flags,imm) -> execute_ConditionalCompareImmediate (n,datasize,sub_op,condition,flags,imm)
+ | ConditionalCompareRegister (n,m,datasize,sub_op,condition,flags) -> execute_ConditionalCompareRegister (n,m,datasize,sub_op,condition,flags)
+ | ConditionalSelect (d,n,m,datasize,condition,else_inv,else_inc) -> execute_ConditionalSelect (d,n,m,datasize,condition,else_inv,else_inc)
+ | Reverse (d,n,datasize,op) -> execute_Reverse (d,n,datasize,op)
+ | CountLeading (d,n,datasize,opcode) -> execute_CountLeading (d,n,datasize,opcode)
+ | Division (d,n,m,datasize,_unsigned) -> execute_Division (d,n,m,datasize,_unsigned)
+ | Shift (d,n,m,datasize,shift_type) -> execute_Shift (d,n,m,datasize,shift_type)
+ | CRC (d,n,m,size,crc32c) -> execute_CRC (d,n,m,size,crc32c)
+ | MultiplyAddSub (d,n,m,a,destsize,datasize,sub_op) -> execute_MultiplyAddSub (d,n,m,a,destsize,datasize,sub_op)
+ | MultiplyAddSubLong (d,n,m,a,destsize,datasize,sub_op,_unsigned) -> execute_MultiplyAddSubLong (d,n,m,a,destsize,datasize,sub_op,_unsigned)
+ | MultiplyHigh (d,n,m,a,destsize,datasize,_unsigned) -> execute_MultiplyHigh (d,n,m,a,destsize,datasize,_unsigned)
+ | LogicalShiftedRegister (d,n,m,datasize,setflags,op,shift_type,shift_amount,invert) -> execute_LogicalShiftedRegister (d,n,m,datasize,setflags,op,shift_type,shift_amount,invert)
+ end
+