1 2 /*---------------------------------------------------------------*/ 3 /*--- begin guest_arm64_defs.h ---*/ 4 /*---------------------------------------------------------------*/ 5 /* 6 This file is part of Valgrind, a dynamic binary instrumentation 7 framework. 8 9 Copyright (C) 2013-2017 OpenWorks 10 info@open-works.net 11 12 This program is free software; you can redistribute it and/or 13 modify it under the terms of the GNU General Public License as 14 published by the Free Software Foundation; either version 2 of the 15 License, or (at your option) any later version. 16 17 This program is distributed in the hope that it will be useful, but 18 WITHOUT ANY WARRANTY; without even the implied warranty of 19 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 General Public License for more details. 21 22 You should have received a copy of the GNU General Public License 23 along with this program; if not, write to the Free Software 24 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 25 02110-1301, USA. 26 27 The GNU General Public License is contained in the file COPYING. 28 */ 29 30 #ifndef __VEX_GUEST_ARM64_DEFS_H 31 #define __VEX_GUEST_ARM64_DEFS_H 32 33 #include "libvex_basictypes.h" 34 #include "guest_generic_bb_to_IR.h" // DisResult 35 36 /*---------------------------------------------------------*/ 37 /*--- arm64 to IR conversion ---*/ 38 /*---------------------------------------------------------*/ 39 40 /* Convert one ARM64 insn to IR. See the type DisOneInstrFn in 41 guest_generic_bb_to_IR.h. */ 42 extern 43 DisResult disInstr_ARM64 ( IRSB* irbb, 44 Bool (*resteerOkFn) ( void*, Addr ), 45 Bool resteerCisOk, 46 void* callback_opaque, 47 const UChar* guest_code, 48 Long delta, 49 Addr guest_IP, 50 VexArch guest_arch, 51 const VexArchInfo* archinfo, 52 const VexAbiInfo* abiinfo, 53 VexEndness host_endness, 54 Bool sigill_diag ); 55 56 /* Used by the optimiser to specialise calls to helpers. */ 57 extern 58 IRExpr* guest_arm64_spechelper ( const HChar* function_name, 59 IRExpr** args, 60 IRStmt** precedingStmts, 61 Int n_precedingStmts ); 62 63 /* Describes to the optimser which part of the guest state require 64 precise memory exceptions. This is logically part of the guest 65 state description. */ 66 extern 67 Bool guest_arm64_state_requires_precise_mem_exns ( Int, Int, 68 VexRegisterUpdates ); 69 70 extern 71 VexGuestLayout arm64Guest_layout; 72 73 74 /*---------------------------------------------------------*/ 75 /*--- arm64 guest helpers ---*/ 76 /*---------------------------------------------------------*/ 77 78 /* --- CLEAN HELPERS --- */ 79 80 /* Calculate NZCV from the supplied thunk components, in the positions 81 they appear in the CPSR, viz bits 31:28 for N Z C V respectively. 82 Returned bits 63:32 and 27:0 are zero. */ 83 extern 84 ULong arm64g_calculate_flags_nzcv ( ULong cc_op, ULong cc_dep1, 85 ULong cc_dep2, ULong cc_dep3 ); 86 87 /* Calculate the C flag from the thunk components, in the lowest bit 88 of the word (bit 0). */ 89 extern 90 ULong arm64g_calculate_flag_c ( ULong cc_op, ULong cc_dep1, 91 ULong cc_dep2, ULong cc_dep3 ); 92 93 //ZZ /* Calculate the V flag from the thunk components, in the lowest bit 94 //ZZ of the word (bit 0). */ 95 //ZZ extern 96 //ZZ UInt armg_calculate_flag_v ( UInt cc_op, UInt cc_dep1, 97 //ZZ UInt cc_dep2, UInt cc_dep3 ); 98 //ZZ 99 /* Calculate the specified condition from the thunk components, in the 100 lowest bit of the word (bit 0). */ 101 extern 102 ULong arm64g_calculate_condition ( /* ARM64Condcode << 4 | cc_op */ 103 ULong cond_n_op , 104 ULong cc_dep1, 105 ULong cc_dep2, ULong cc_dep3 ); 106 107 //ZZ /* Calculate the QC flag from the thunk components, in the lowest bit 108 //ZZ of the word (bit 0). */ 109 //ZZ extern 110 //ZZ UInt armg_calculate_flag_qc ( UInt resL1, UInt resL2, 111 //ZZ UInt resR1, UInt resR2 ); 112 113 extern ULong arm64g_calc_crc32b ( ULong acc, ULong bits ); 114 extern ULong arm64g_calc_crc32h ( ULong acc, ULong bits ); 115 extern ULong arm64g_calc_crc32w ( ULong acc, ULong bits ); 116 extern ULong arm64g_calc_crc32x ( ULong acc, ULong bits ); 117 118 extern ULong arm64g_calc_crc32cb ( ULong acc, ULong bits ); 119 extern ULong arm64g_calc_crc32ch ( ULong acc, ULong bits ); 120 extern ULong arm64g_calc_crc32cw ( ULong acc, ULong bits ); 121 extern ULong arm64g_calc_crc32cx ( ULong acc, ULong bits ); 122 123 /* --- DIRTY HELPERS --- */ 124 125 extern ULong arm64g_dirtyhelper_MRS_CNTVCT_EL0 ( void ); 126 127 extern ULong arm64g_dirtyhelper_MRS_CNTFRQ_EL0 ( void ); 128 129 extern void arm64g_dirtyhelper_PMULLQ ( /*OUT*/V128* res, 130 ULong arg1, ULong arg2 ); 131 132 extern void arm64g_dirtyhelper_AESE ( /*OUT*/V128* res, 133 ULong argHi, ULong argLo ); 134 extern void arm64g_dirtyhelper_AESD ( /*OUT*/V128* res, 135 ULong argHi, ULong argLo ); 136 extern void arm64g_dirtyhelper_AESMC ( /*OUT*/V128* res, 137 ULong argHi, ULong argLo ); 138 extern void arm64g_dirtyhelper_AESIMC ( /*OUT*/V128* res, 139 ULong argHi, ULong argLo ); 140 141 extern 142 void arm64g_dirtyhelper_SHA1C ( /*OUT*/V128* res, ULong dHi, ULong dLo, 143 ULong nHi, ULong nLo, ULong mHi, ULong mLo ); 144 extern 145 void arm64g_dirtyhelper_SHA1H ( /*OUT*/V128* res, 146 ULong nHi, ULong nLo ); 147 extern 148 void arm64g_dirtyhelper_SHA1M ( /*OUT*/V128* res, ULong dHi, ULong dLo, 149 ULong nHi, ULong nLo, ULong mHi, ULong mLo ); 150 extern 151 void arm64g_dirtyhelper_SHA1P ( /*OUT*/V128* res, ULong dHi, ULong dLo, 152 ULong nHi, ULong nLo, ULong mHi, ULong mLo ); 153 extern 154 void arm64g_dirtyhelper_SHA1SU0 ( /*OUT*/V128* res, ULong dHi, ULong dLo, 155 ULong nHi, ULong nLo, ULong mHi, ULong mLo ); 156 extern 157 void arm64g_dirtyhelper_SHA1SU1 ( /*OUT*/V128* res, ULong dHi, ULong dLo, 158 ULong nHi, ULong nLo ); 159 extern 160 void arm64g_dirtyhelper_SHA256H2 ( /*OUT*/V128* res, ULong dHi, ULong dLo, 161 ULong nHi, ULong nLo, ULong mHi, ULong mLo ); 162 extern 163 void arm64g_dirtyhelper_SHA256H ( /*OUT*/V128* res, ULong dHi, ULong dLo, 164 ULong nHi, ULong nLo, ULong mHi, ULong mLo ); 165 extern 166 void arm64g_dirtyhelper_SHA256SU0 ( /*OUT*/V128* res, ULong dHi, ULong dLo, 167 ULong nHi, ULong nLo ); 168 extern 169 void arm64g_dirtyhelper_SHA256SU1 ( /*OUT*/V128* res, ULong dHi, ULong dLo, 170 ULong nHi, ULong nLo, 171 ULong mHi, ULong mLo ); 172 173 174 /*---------------------------------------------------------*/ 175 /*--- Condition code stuff ---*/ 176 /*---------------------------------------------------------*/ 177 178 /* Flag masks. Defines positions of flag bits in the NZCV 179 register. */ 180 #define ARM64G_CC_SHIFT_N 31 181 #define ARM64G_CC_SHIFT_Z 30 182 #define ARM64G_CC_SHIFT_C 29 183 #define ARM64G_CC_SHIFT_V 28 184 //ZZ #define ARMG_CC_SHIFT_Q 27 185 //ZZ 186 //ZZ #define ARMG_CC_MASK_N (1 << ARMG_CC_SHIFT_N) 187 //ZZ #define ARMG_CC_MASK_Z (1 << ARMG_CC_SHIFT_Z) 188 //ZZ #define ARMG_CC_MASK_C (1 << ARMG_CC_SHIFT_C) 189 //ZZ #define ARMG_CC_MASK_V (1 << ARMG_CC_SHIFT_V) 190 //ZZ #define ARMG_CC_MASK_Q (1 << ARMG_CC_SHIFT_Q) 191 192 /* Flag thunk descriptors. A four-word thunk is used to record 193 details of the most recent flag-setting operation, so NZCV can 194 be computed later if needed. 195 196 The four words are: 197 198 CC_OP, which describes the operation. 199 200 CC_DEP1, CC_DEP2, CC_NDEP. These are arguments to the 201 operation. We want set up the mcx_masks in flag helper calls 202 involving these fields so that Memcheck "believes" that the 203 resulting flags are data-dependent on both CC_DEP1 and 204 CC_DEP2. Hence the name DEP. 205 206 When building the thunk, it is always necessary to write words into 207 CC_DEP1/2 and NDEP, even if those args are not used given the CC_OP 208 field. This is important because otherwise Memcheck could give 209 false positives as it does not understand the relationship between 210 the CC_OP field and CC_DEP1/2/NDEP, and so believes that the 211 definedness of the stored flags always depends on all 3 DEP values. 212 213 A summary of the field usages is: 214 215 OP DEP1 DEP2 DEP3 216 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 217 218 OP_COPY curr_NZCV:28x0 unused unused 219 OP_ADD32 argL argR unused 220 OP_ADD64 argL argR unused 221 OP_SUB32 argL argR unused 222 OP_SUB64 argL argR unused 223 OP_ADC32 argL argR 63x0:old_C 224 OP_ADC64 argL argR 63x0:old_C 225 OP_SBC32 argL argR 63x0:old_C 226 OP_SBC64 argL argR 63x0:old_C 227 OP_LOGIC32 result unused unused 228 OP_LOGIC64 result unused unused 229 //ZZ OP_MUL result unused 30x0:old_C:old_V 230 //ZZ OP_MULL resLO32 resHI32 30x0:old_C:old_V 231 //ZZ */ 232 233 enum { 234 ARM64G_CC_OP_COPY=0, /* DEP1 = NZCV in 31:28, DEP2 = 0, DEP3 = 0 235 just copy DEP1 to output */ 236 237 ARM64G_CC_OP_ADD32, /* DEP1 = argL (Rn), DEP2 = argR (shifter_op), 238 DEP3 = 0 */ 239 240 ARM64G_CC_OP_ADD64, /* DEP1 = argL (Rn), DEP2 = argR (shifter_op), 241 DEP3 = 0 */ 242 243 ARM64G_CC_OP_SUB32, /* DEP1 = argL (Rn), DEP2 = argR (shifter_op), 244 DEP3 = 0 */ 245 246 ARM64G_CC_OP_SUB64, /* DEP1 = argL (Rn), DEP2 = argR (shifter_op), 247 DEP3 = 0 */ 248 249 ARM64G_CC_OP_ADC32, /* DEP1 = argL (Rn), DEP2 = arg2 (shifter_op), 250 DEP3 = oldC (in LSB) */ 251 252 ARM64G_CC_OP_ADC64, /* DEP1 = argL (Rn), DEP2 = arg2 (shifter_op), 253 DEP3 = oldC (in LSB) */ 254 255 ARM64G_CC_OP_SBC32, /* DEP1 = argL (Rn), DEP2 = arg2 (shifter_op), 256 DEP3 = oldC (in LSB) */ 257 258 ARM64G_CC_OP_SBC64, /* DEP1 = argL (Rn), DEP2 = arg2 (shifter_op), 259 DEP3 = oldC (in LSB) */ 260 261 ARM64G_CC_OP_LOGIC32, /* DEP1 = result, DEP2 = 0, DEP3 = 0 */ 262 ARM64G_CC_OP_LOGIC64, /* DEP1 = result, DEP2 = 0, DEP3 = 0 */ 263 264 //ZZ ARMG_CC_OP_MUL, /* DEP1 = result, DEP2 = 0, DEP3 = oldC:old_V 265 //ZZ (in bits 1:0) */ 266 //ZZ 267 //ZZ ARMG_CC_OP_MULL, /* DEP1 = resLO32, DEP2 = resHI32, DEP3 = oldC:old_V 268 //ZZ (in bits 1:0) */ 269 270 ARM64G_CC_OP_NUMBER 271 }; 272 273 /* XXXX because of the calling conventions for 274 arm64g_calculate_condition, all these OP values MUST be in the range 275 0 .. 15 only (viz, 4-bits). */ 276 277 278 279 /* Defines conditions which we can ask for */ 280 281 typedef 282 enum { 283 ARM64CondEQ = 0, /* equal : Z=1 */ 284 ARM64CondNE = 1, /* not equal : Z=0 */ 285 286 ARM64CondCS = 2, /* >=u (higher or same) (aka HS) : C=1 */ 287 ARM64CondCC = 3, /* <u (lower) (aka LO) : C=0 */ 288 289 ARM64CondMI = 4, /* minus (negative) : N=1 */ 290 ARM64CondPL = 5, /* plus (zero or +ve) : N=0 */ 291 292 ARM64CondVS = 6, /* overflow : V=1 */ 293 ARM64CondVC = 7, /* no overflow : V=0 */ 294 295 ARM64CondHI = 8, /* >u (higher) : C=1 && Z=0 */ 296 ARM64CondLS = 9, /* <=u (lower or same) : C=0 || Z=1 */ 297 298 ARM64CondGE = 10, /* >=s (signed greater or equal) : N=V */ 299 ARM64CondLT = 11, /* <s (signed less than) : N!=V */ 300 301 ARM64CondGT = 12, /* >s (signed greater) : Z=0 && N=V */ 302 ARM64CondLE = 13, /* <=s (signed less or equal) : Z=1 || N!=V */ 303 304 ARM64CondAL = 14, /* always (unconditional) : 1 */ 305 ARM64CondNV = 15 /* always (unconditional) : 1 */ 306 } 307 ARM64Condcode; 308 309 #endif /* ndef __VEX_GUEST_ARM64_DEFS_H */ 310 311 /*---------------------------------------------------------------*/ 312 /*--- end guest_arm64_defs.h ---*/ 313 /*---------------------------------------------------------------*/ 314