1 // Copyright 2014 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_COMPILER_BACKEND_ARM64_INSTRUCTION_CODES_ARM64_H_ 6 #define V8_COMPILER_BACKEND_ARM64_INSTRUCTION_CODES_ARM64_H_ 7 8 namespace v8 { 9 namespace internal { 10 namespace compiler { 11 12 // ARM64-specific opcodes that specify which assembly sequence to emit. 13 // Most opcodes specify a single instruction. 14 #define TARGET_ARCH_OPCODE_LIST(V) \ 15 V(Arm64Add) \ 16 V(Arm64Add32) \ 17 V(Arm64And) \ 18 V(Arm64And32) \ 19 V(Arm64Bic) \ 20 V(Arm64Bic32) \ 21 V(Arm64Clz) \ 22 V(Arm64Clz32) \ 23 V(Arm64Cmp) \ 24 V(Arm64Cmp32) \ 25 V(Arm64Cmn) \ 26 V(Arm64Cmn32) \ 27 V(Arm64Cnt) \ 28 V(Arm64Tst) \ 29 V(Arm64Tst32) \ 30 V(Arm64Or) \ 31 V(Arm64Or32) \ 32 V(Arm64Orn) \ 33 V(Arm64Orn32) \ 34 V(Arm64Eor) \ 35 V(Arm64Eor32) \ 36 V(Arm64Eon) \ 37 V(Arm64Eon32) \ 38 V(Arm64Saddlp) \ 39 V(Arm64Sub) \ 40 V(Arm64Sub32) \ 41 V(Arm64Mul) \ 42 V(Arm64Mul32) \ 43 V(Arm64Smull) \ 44 V(Arm64Smull2) \ 45 V(Arm64Uaddlp) \ 46 V(Arm64Umull) \ 47 V(Arm64Umull2) \ 48 V(Arm64Madd) \ 49 V(Arm64Madd32) \ 50 V(Arm64Msub) \ 51 V(Arm64Msub32) \ 52 V(Arm64Mneg) \ 53 V(Arm64Mneg32) \ 54 V(Arm64Idiv) \ 55 V(Arm64Idiv32) \ 56 V(Arm64Udiv) \ 57 V(Arm64Udiv32) \ 58 V(Arm64Imod) \ 59 V(Arm64Imod32) \ 60 V(Arm64Umod) \ 61 V(Arm64Umod32) \ 62 V(Arm64Not) \ 63 V(Arm64Not32) \ 64 V(Arm64Lsl) \ 65 V(Arm64Lsl32) \ 66 V(Arm64Lsr) \ 67 V(Arm64Lsr32) \ 68 V(Arm64Asr) \ 69 V(Arm64Asr32) \ 70 V(Arm64Ror) \ 71 V(Arm64Ror32) \ 72 V(Arm64Mov32) \ 73 V(Arm64Sxtb32) \ 74 V(Arm64Sxth32) \ 75 V(Arm64Sxtb) \ 76 V(Arm64Sxth) \ 77 V(Arm64Sxtw) \ 78 V(Arm64Sbfx) \ 79 V(Arm64Sbfx32) \ 80 V(Arm64Ubfx) \ 81 V(Arm64Ubfx32) \ 82 V(Arm64Ubfiz32) \ 83 V(Arm64Bfi) \ 84 V(Arm64Rbit) \ 85 V(Arm64Rbit32) \ 86 V(Arm64Rev) \ 87 V(Arm64Rev32) \ 88 V(Arm64TestAndBranch32) \ 89 V(Arm64TestAndBranch) \ 90 V(Arm64CompareAndBranch32) \ 91 V(Arm64CompareAndBranch) \ 92 V(Arm64Claim) \ 93 V(Arm64Poke) \ 94 V(Arm64PokePair) \ 95 V(Arm64Peek) \ 96 V(Arm64Float32Cmp) \ 97 V(Arm64Float32Add) \ 98 V(Arm64Float32Sub) \ 99 V(Arm64Float32Mul) \ 100 V(Arm64Float32Div) \ 101 V(Arm64Float32Abs) \ 102 V(Arm64Float32Neg) \ 103 V(Arm64Float32Sqrt) \ 104 V(Arm64Float32Fnmul) \ 105 V(Arm64Float32RoundDown) \ 106 V(Arm64Float32Max) \ 107 V(Arm64Float32Min) \ 108 V(Arm64Float64Cmp) \ 109 V(Arm64Float64Add) \ 110 V(Arm64Float64Sub) \ 111 V(Arm64Float64Mul) \ 112 V(Arm64Float64Div) \ 113 V(Arm64Float64Mod) \ 114 V(Arm64Float64Max) \ 115 V(Arm64Float64Min) \ 116 V(Arm64Float64Abs) \ 117 V(Arm64Float64Neg) \ 118 V(Arm64Float64Sqrt) \ 119 V(Arm64Float64Fnmul) \ 120 V(Arm64Float64RoundDown) \ 121 V(Arm64Float32RoundUp) \ 122 V(Arm64Float64RoundUp) \ 123 V(Arm64Float64RoundTiesAway) \ 124 V(Arm64Float32RoundTruncate) \ 125 V(Arm64Float64RoundTruncate) \ 126 V(Arm64Float32RoundTiesEven) \ 127 V(Arm64Float64RoundTiesEven) \ 128 V(Arm64Float64SilenceNaN) \ 129 V(Arm64Float32ToFloat64) \ 130 V(Arm64Float64ToFloat32) \ 131 V(Arm64Float32ToInt32) \ 132 V(Arm64Float64ToInt32) \ 133 V(Arm64Float32ToUint32) \ 134 V(Arm64Float64ToUint32) \ 135 V(Arm64Float32ToInt64) \ 136 V(Arm64Float64ToInt64) \ 137 V(Arm64Float32ToUint64) \ 138 V(Arm64Float64ToUint64) \ 139 V(Arm64Int32ToFloat32) \ 140 V(Arm64Int32ToFloat64) \ 141 V(Arm64Int64ToFloat32) \ 142 V(Arm64Int64ToFloat64) \ 143 V(Arm64Uint32ToFloat32) \ 144 V(Arm64Uint32ToFloat64) \ 145 V(Arm64Uint64ToFloat32) \ 146 V(Arm64Uint64ToFloat64) \ 147 V(Arm64Float64ExtractLowWord32) \ 148 V(Arm64Float64ExtractHighWord32) \ 149 V(Arm64Float64InsertLowWord32) \ 150 V(Arm64Float64InsertHighWord32) \ 151 V(Arm64Float64MoveU64) \ 152 V(Arm64U64MoveFloat64) \ 153 V(Arm64LdrS) \ 154 V(Arm64StrS) \ 155 V(Arm64LdrD) \ 156 V(Arm64StrD) \ 157 V(Arm64LdrQ) \ 158 V(Arm64StrQ) \ 159 V(Arm64Ldrb) \ 160 V(Arm64Ldrsb) \ 161 V(Arm64Strb) \ 162 V(Arm64Ldrh) \ 163 V(Arm64Ldrsh) \ 164 V(Arm64Strh) \ 165 V(Arm64Ldrsw) \ 166 V(Arm64LdrW) \ 167 V(Arm64StrW) \ 168 V(Arm64Ldr) \ 169 V(Arm64LdrDecompressTaggedSigned) \ 170 V(Arm64LdrDecompressTaggedPointer) \ 171 V(Arm64LdrDecompressAnyTagged) \ 172 V(Arm64Str) \ 173 V(Arm64StrCompressTagged) \ 174 V(Arm64DmbIsh) \ 175 V(Arm64DsbIsb) \ 176 V(Arm64Sxtl) \ 177 V(Arm64Sxtl2) \ 178 V(Arm64Uxtl) \ 179 V(Arm64Uxtl2) \ 180 V(Arm64F64x2Splat) \ 181 V(Arm64F64x2ExtractLane) \ 182 V(Arm64F64x2ReplaceLane) \ 183 V(Arm64F64x2Abs) \ 184 V(Arm64F64x2Neg) \ 185 V(Arm64F64x2Sqrt) \ 186 V(Arm64F64x2Add) \ 187 V(Arm64F64x2Sub) \ 188 V(Arm64F64x2Mul) \ 189 V(Arm64F64x2Div) \ 190 V(Arm64F64x2Min) \ 191 V(Arm64F64x2Max) \ 192 V(Arm64F64x2Eq) \ 193 V(Arm64F64x2Ne) \ 194 V(Arm64F64x2Lt) \ 195 V(Arm64F64x2Le) \ 196 V(Arm64F64x2Qfma) \ 197 V(Arm64F64x2Qfms) \ 198 V(Arm64F64x2Pmin) \ 199 V(Arm64F64x2Pmax) \ 200 V(Arm64F32x4Splat) \ 201 V(Arm64F32x4ExtractLane) \ 202 V(Arm64F32x4ReplaceLane) \ 203 V(Arm64F32x4SConvertI32x4) \ 204 V(Arm64F32x4UConvertI32x4) \ 205 V(Arm64F32x4Abs) \ 206 V(Arm64F32x4Neg) \ 207 V(Arm64F32x4Sqrt) \ 208 V(Arm64F32x4RecipApprox) \ 209 V(Arm64F32x4RecipSqrtApprox) \ 210 V(Arm64F32x4Add) \ 211 V(Arm64F32x4AddHoriz) \ 212 V(Arm64F32x4Sub) \ 213 V(Arm64F32x4Mul) \ 214 V(Arm64F32x4Div) \ 215 V(Arm64F32x4Min) \ 216 V(Arm64F32x4Max) \ 217 V(Arm64F32x4Eq) \ 218 V(Arm64F32x4Ne) \ 219 V(Arm64F32x4Lt) \ 220 V(Arm64F32x4Le) \ 221 V(Arm64F32x4Qfma) \ 222 V(Arm64F32x4Qfms) \ 223 V(Arm64F32x4Pmin) \ 224 V(Arm64F32x4Pmax) \ 225 V(Arm64I64x2Splat) \ 226 V(Arm64I64x2ExtractLane) \ 227 V(Arm64I64x2ReplaceLane) \ 228 V(Arm64I64x2Neg) \ 229 V(Arm64I64x2Shl) \ 230 V(Arm64I64x2ShrS) \ 231 V(Arm64I64x2Add) \ 232 V(Arm64I64x2Sub) \ 233 V(Arm64I64x2Mul) \ 234 V(Arm64I64x2Eq) \ 235 V(Arm64I64x2ShrU) \ 236 V(Arm64I32x4Splat) \ 237 V(Arm64I32x4ExtractLane) \ 238 V(Arm64I32x4ReplaceLane) \ 239 V(Arm64I32x4SConvertF32x4) \ 240 V(Arm64I32x4Neg) \ 241 V(Arm64I32x4Shl) \ 242 V(Arm64I32x4ShrS) \ 243 V(Arm64I32x4Add) \ 244 V(Arm64I32x4AddHoriz) \ 245 V(Arm64I32x4Sub) \ 246 V(Arm64I32x4Mul) \ 247 V(Arm64I32x4Mla) \ 248 V(Arm64I32x4Mls) \ 249 V(Arm64I32x4MinS) \ 250 V(Arm64I32x4MaxS) \ 251 V(Arm64I32x4Eq) \ 252 V(Arm64I32x4Ne) \ 253 V(Arm64I32x4GtS) \ 254 V(Arm64I32x4GeS) \ 255 V(Arm64I32x4UConvertF32x4) \ 256 V(Arm64I32x4ShrU) \ 257 V(Arm64I32x4MinU) \ 258 V(Arm64I32x4MaxU) \ 259 V(Arm64I32x4GtU) \ 260 V(Arm64I32x4GeU) \ 261 V(Arm64I32x4Abs) \ 262 V(Arm64I32x4BitMask) \ 263 V(Arm64I32x4DotI16x8S) \ 264 V(Arm64I16x8Splat) \ 265 V(Arm64I16x8ExtractLaneU) \ 266 V(Arm64I16x8ExtractLaneS) \ 267 V(Arm64I16x8ReplaceLane) \ 268 V(Arm64I16x8Neg) \ 269 V(Arm64I16x8Shl) \ 270 V(Arm64I16x8ShrS) \ 271 V(Arm64I16x8SConvertI32x4) \ 272 V(Arm64I16x8Add) \ 273 V(Arm64I16x8AddSatS) \ 274 V(Arm64I16x8AddHoriz) \ 275 V(Arm64I16x8Sub) \ 276 V(Arm64I16x8SubSatS) \ 277 V(Arm64I16x8Mul) \ 278 V(Arm64I16x8Mla) \ 279 V(Arm64I16x8Mls) \ 280 V(Arm64I16x8MinS) \ 281 V(Arm64I16x8MaxS) \ 282 V(Arm64I16x8Eq) \ 283 V(Arm64I16x8Ne) \ 284 V(Arm64I16x8GtS) \ 285 V(Arm64I16x8GeS) \ 286 V(Arm64I16x8ShrU) \ 287 V(Arm64I16x8UConvertI32x4) \ 288 V(Arm64I16x8AddSatU) \ 289 V(Arm64I16x8SubSatU) \ 290 V(Arm64I16x8MinU) \ 291 V(Arm64I16x8MaxU) \ 292 V(Arm64I16x8GtU) \ 293 V(Arm64I16x8GeU) \ 294 V(Arm64I16x8RoundingAverageU) \ 295 V(Arm64I16x8Q15MulRSatS) \ 296 V(Arm64I16x8Abs) \ 297 V(Arm64I16x8BitMask) \ 298 V(Arm64I8x16Splat) \ 299 V(Arm64I8x16ExtractLaneU) \ 300 V(Arm64I8x16ExtractLaneS) \ 301 V(Arm64I8x16ReplaceLane) \ 302 V(Arm64I8x16Neg) \ 303 V(Arm64I8x16Shl) \ 304 V(Arm64I8x16ShrS) \ 305 V(Arm64I8x16SConvertI16x8) \ 306 V(Arm64I8x16Add) \ 307 V(Arm64I8x16AddSatS) \ 308 V(Arm64I8x16Sub) \ 309 V(Arm64I8x16SubSatS) \ 310 V(Arm64I8x16Mul) \ 311 V(Arm64I8x16Mla) \ 312 V(Arm64I8x16Mls) \ 313 V(Arm64I8x16MinS) \ 314 V(Arm64I8x16MaxS) \ 315 V(Arm64I8x16Eq) \ 316 V(Arm64I8x16Ne) \ 317 V(Arm64I8x16GtS) \ 318 V(Arm64I8x16GeS) \ 319 V(Arm64I8x16ShrU) \ 320 V(Arm64I8x16UConvertI16x8) \ 321 V(Arm64I8x16AddSatU) \ 322 V(Arm64I8x16SubSatU) \ 323 V(Arm64I8x16MinU) \ 324 V(Arm64I8x16MaxU) \ 325 V(Arm64I8x16GtU) \ 326 V(Arm64I8x16GeU) \ 327 V(Arm64I8x16RoundingAverageU) \ 328 V(Arm64I8x16Abs) \ 329 V(Arm64I8x16BitMask) \ 330 V(Arm64S128Const) \ 331 V(Arm64S128Zero) \ 332 V(Arm64S128Dup) \ 333 V(Arm64S128And) \ 334 V(Arm64S128Or) \ 335 V(Arm64S128Xor) \ 336 V(Arm64S128Not) \ 337 V(Arm64S128Select) \ 338 V(Arm64S128AndNot) \ 339 V(Arm64S32x4ZipLeft) \ 340 V(Arm64S32x4ZipRight) \ 341 V(Arm64S32x4UnzipLeft) \ 342 V(Arm64S32x4UnzipRight) \ 343 V(Arm64S32x4TransposeLeft) \ 344 V(Arm64S32x4TransposeRight) \ 345 V(Arm64S32x4Shuffle) \ 346 V(Arm64S16x8ZipLeft) \ 347 V(Arm64S16x8ZipRight) \ 348 V(Arm64S16x8UnzipLeft) \ 349 V(Arm64S16x8UnzipRight) \ 350 V(Arm64S16x8TransposeLeft) \ 351 V(Arm64S16x8TransposeRight) \ 352 V(Arm64S8x16ZipLeft) \ 353 V(Arm64S8x16ZipRight) \ 354 V(Arm64S8x16UnzipLeft) \ 355 V(Arm64S8x16UnzipRight) \ 356 V(Arm64S8x16TransposeLeft) \ 357 V(Arm64S8x16TransposeRight) \ 358 V(Arm64S8x16Concat) \ 359 V(Arm64I8x16Swizzle) \ 360 V(Arm64I8x16Shuffle) \ 361 V(Arm64S32x2Reverse) \ 362 V(Arm64S16x4Reverse) \ 363 V(Arm64S16x2Reverse) \ 364 V(Arm64S8x8Reverse) \ 365 V(Arm64S8x4Reverse) \ 366 V(Arm64S8x2Reverse) \ 367 V(Arm64V128AnyTrue) \ 368 V(Arm64V32x4AllTrue) \ 369 V(Arm64V16x8AllTrue) \ 370 V(Arm64V8x16AllTrue) \ 371 V(Arm64LoadSplat) \ 372 V(Arm64S128Load8x8S) \ 373 V(Arm64S128Load8x8U) \ 374 V(Arm64S128Load16x4S) \ 375 V(Arm64S128Load16x4U) \ 376 V(Arm64S128Load32x2S) \ 377 V(Arm64S128Load32x2U) \ 378 V(Arm64S128Load32Zero) \ 379 V(Arm64S128Load64Zero) \ 380 V(Arm64Word64AtomicLoadUint8) \ 381 V(Arm64Word64AtomicLoadUint16) \ 382 V(Arm64Word64AtomicLoadUint32) \ 383 V(Arm64Word64AtomicLoadUint64) \ 384 V(Arm64Word64AtomicStoreWord8) \ 385 V(Arm64Word64AtomicStoreWord16) \ 386 V(Arm64Word64AtomicStoreWord32) \ 387 V(Arm64Word64AtomicStoreWord64) \ 388 V(Arm64Word64AtomicAddUint8) \ 389 V(Arm64Word64AtomicAddUint16) \ 390 V(Arm64Word64AtomicAddUint32) \ 391 V(Arm64Word64AtomicAddUint64) \ 392 V(Arm64Word64AtomicSubUint8) \ 393 V(Arm64Word64AtomicSubUint16) \ 394 V(Arm64Word64AtomicSubUint32) \ 395 V(Arm64Word64AtomicSubUint64) \ 396 V(Arm64Word64AtomicAndUint8) \ 397 V(Arm64Word64AtomicAndUint16) \ 398 V(Arm64Word64AtomicAndUint32) \ 399 V(Arm64Word64AtomicAndUint64) \ 400 V(Arm64Word64AtomicOrUint8) \ 401 V(Arm64Word64AtomicOrUint16) \ 402 V(Arm64Word64AtomicOrUint32) \ 403 V(Arm64Word64AtomicOrUint64) \ 404 V(Arm64Word64AtomicXorUint8) \ 405 V(Arm64Word64AtomicXorUint16) \ 406 V(Arm64Word64AtomicXorUint32) \ 407 V(Arm64Word64AtomicXorUint64) \ 408 V(Arm64Word64AtomicExchangeUint8) \ 409 V(Arm64Word64AtomicExchangeUint16) \ 410 V(Arm64Word64AtomicExchangeUint32) \ 411 V(Arm64Word64AtomicExchangeUint64) \ 412 V(Arm64Word64AtomicCompareExchangeUint8) \ 413 V(Arm64Word64AtomicCompareExchangeUint16) \ 414 V(Arm64Word64AtomicCompareExchangeUint32) \ 415 V(Arm64Word64AtomicCompareExchangeUint64) 416 417 // Addressing modes represent the "shape" of inputs to an instruction. 418 // Many instructions support multiple addressing modes. Addressing modes 419 // are encoded into the InstructionCode of the instruction and tell the 420 // code generator after register allocation which assembler method to call. 421 // 422 // We use the following local notation for addressing modes: 423 // 424 // R = register 425 // O = register or stack slot 426 // D = double register 427 // I = immediate (handle, external, int32) 428 // MRI = [register + immediate] 429 // MRR = [register + register] 430 #define TARGET_ADDRESSING_MODE_LIST(V) \ 431 V(MRI) /* [%r0 + K] */ \ 432 V(MRR) /* [%r0 + %r1] */ \ 433 V(Operand2_R_LSL_I) /* %r0 LSL K */ \ 434 V(Operand2_R_LSR_I) /* %r0 LSR K */ \ 435 V(Operand2_R_ASR_I) /* %r0 ASR K */ \ 436 V(Operand2_R_ROR_I) /* %r0 ROR K */ \ 437 V(Operand2_R_UXTB) /* %r0 UXTB (unsigned extend byte) */ \ 438 V(Operand2_R_UXTH) /* %r0 UXTH (unsigned extend halfword) */ \ 439 V(Operand2_R_SXTB) /* %r0 SXTB (signed extend byte) */ \ 440 V(Operand2_R_SXTH) /* %r0 SXTH (signed extend halfword) */ \ 441 V(Operand2_R_SXTW) /* %r0 SXTW (signed extend word) */ \ 442 V(Root) /* [%rr + K] */ 443 444 } // namespace compiler 445 } // namespace internal 446 } // namespace v8 447 448 #endif // V8_COMPILER_BACKEND_ARM64_INSTRUCTION_CODES_ARM64_H_ 449