1 // Copyright 2021 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_COMPILER_BACKEND_RISCV64_INSTRUCTION_CODES_RISCV64_H_ 6 #define V8_COMPILER_BACKEND_RISCV64_INSTRUCTION_CODES_RISCV64_H_ 7 8 namespace v8 { 9 namespace internal { 10 namespace compiler { 11 12 // RISC-V-specific opcodes that specify which assembly sequence to emit. 13 // Most opcodes specify a single instruction. 14 #define TARGET_ARCH_OPCODE_LIST(V) \ 15 V(RiscvAdd32) \ 16 V(RiscvAdd64) \ 17 V(RiscvAddOvf64) \ 18 V(RiscvSub32) \ 19 V(RiscvSub64) \ 20 V(RiscvSubOvf64) \ 21 V(RiscvMul32) \ 22 V(RiscvMulOvf32) \ 23 V(RiscvMulHigh32) \ 24 V(RiscvMulHigh64) \ 25 V(RiscvMulHighU32) \ 26 V(RiscvMul64) \ 27 V(RiscvDiv32) \ 28 V(RiscvDiv64) \ 29 V(RiscvDivU32) \ 30 V(RiscvDivU64) \ 31 V(RiscvMod32) \ 32 V(RiscvMod64) \ 33 V(RiscvModU32) \ 34 V(RiscvModU64) \ 35 V(RiscvAnd) \ 36 V(RiscvAnd32) \ 37 V(RiscvOr) \ 38 V(RiscvOr32) \ 39 V(RiscvNor) \ 40 V(RiscvNor32) \ 41 V(RiscvXor) \ 42 V(RiscvXor32) \ 43 V(RiscvClz32) \ 44 V(RiscvShl32) \ 45 V(RiscvShr32) \ 46 V(RiscvSar32) \ 47 V(RiscvZeroExtendWord) \ 48 V(RiscvSignExtendWord) \ 49 V(RiscvClz64) \ 50 V(RiscvCtz32) \ 51 V(RiscvCtz64) \ 52 V(RiscvPopcnt32) \ 53 V(RiscvPopcnt64) \ 54 V(RiscvShl64) \ 55 V(RiscvShr64) \ 56 V(RiscvSar64) \ 57 V(RiscvRor32) \ 58 V(RiscvRor64) \ 59 V(RiscvMov) \ 60 V(RiscvTst) \ 61 V(RiscvCmp) \ 62 V(RiscvCmpZero) \ 63 V(RiscvCmpS) \ 64 V(RiscvAddS) \ 65 V(RiscvSubS) \ 66 V(RiscvMulS) \ 67 V(RiscvDivS) \ 68 V(RiscvModS) \ 69 V(RiscvAbsS) \ 70 V(RiscvNegS) \ 71 V(RiscvSqrtS) \ 72 V(RiscvMaxS) \ 73 V(RiscvMinS) \ 74 V(RiscvCmpD) \ 75 V(RiscvAddD) \ 76 V(RiscvSubD) \ 77 V(RiscvMulD) \ 78 V(RiscvDivD) \ 79 V(RiscvModD) \ 80 V(RiscvAbsD) \ 81 V(RiscvNegD) \ 82 V(RiscvSqrtD) \ 83 V(RiscvMaxD) \ 84 V(RiscvMinD) \ 85 V(RiscvFloat64RoundDown) \ 86 V(RiscvFloat64RoundTruncate) \ 87 V(RiscvFloat64RoundUp) \ 88 V(RiscvFloat64RoundTiesEven) \ 89 V(RiscvFloat32RoundDown) \ 90 V(RiscvFloat32RoundTruncate) \ 91 V(RiscvFloat32RoundUp) \ 92 V(RiscvFloat32RoundTiesEven) \ 93 V(RiscvCvtSD) \ 94 V(RiscvCvtDS) \ 95 V(RiscvTruncWD) \ 96 V(RiscvRoundWD) \ 97 V(RiscvFloorWD) \ 98 V(RiscvCeilWD) \ 99 V(RiscvTruncWS) \ 100 V(RiscvRoundWS) \ 101 V(RiscvFloorWS) \ 102 V(RiscvCeilWS) \ 103 V(RiscvTruncLS) \ 104 V(RiscvTruncLD) \ 105 V(RiscvTruncUwD) \ 106 V(RiscvTruncUwS) \ 107 V(RiscvTruncUlS) \ 108 V(RiscvTruncUlD) \ 109 V(RiscvCvtDW) \ 110 V(RiscvCvtSL) \ 111 V(RiscvCvtSW) \ 112 V(RiscvCvtSUw) \ 113 V(RiscvCvtSUl) \ 114 V(RiscvCvtDL) \ 115 V(RiscvCvtDUw) \ 116 V(RiscvCvtDUl) \ 117 V(RiscvLb) \ 118 V(RiscvLbu) \ 119 V(RiscvSb) \ 120 V(RiscvLh) \ 121 V(RiscvUlh) \ 122 V(RiscvLhu) \ 123 V(RiscvUlhu) \ 124 V(RiscvSh) \ 125 V(RiscvUsh) \ 126 V(RiscvLd) \ 127 V(RiscvUld) \ 128 V(RiscvLw) \ 129 V(RiscvUlw) \ 130 V(RiscvLwu) \ 131 V(RiscvUlwu) \ 132 V(RiscvSw) \ 133 V(RiscvUsw) \ 134 V(RiscvSd) \ 135 V(RiscvUsd) \ 136 V(RiscvLoadFloat) \ 137 V(RiscvULoadFloat) \ 138 V(RiscvStoreFloat) \ 139 V(RiscvUStoreFloat) \ 140 V(RiscvLoadDouble) \ 141 V(RiscvULoadDouble) \ 142 V(RiscvStoreDouble) \ 143 V(RiscvUStoreDouble) \ 144 V(RiscvBitcastDL) \ 145 V(RiscvBitcastLD) \ 146 V(RiscvBitcastInt32ToFloat32) \ 147 V(RiscvBitcastFloat32ToInt32) \ 148 V(RiscvFloat64ExtractLowWord32) \ 149 V(RiscvFloat64ExtractHighWord32) \ 150 V(RiscvFloat64InsertLowWord32) \ 151 V(RiscvFloat64InsertHighWord32) \ 152 V(RiscvFloat32Max) \ 153 V(RiscvFloat64Max) \ 154 V(RiscvFloat32Min) \ 155 V(RiscvFloat64Min) \ 156 V(RiscvFloat64SilenceNaN) \ 157 V(RiscvPush) \ 158 V(RiscvPeek) \ 159 V(RiscvByteSwap64) \ 160 V(RiscvByteSwap32) \ 161 V(RiscvStoreToStackSlot) \ 162 V(RiscvStackClaim) \ 163 V(RiscvSignExtendByte) \ 164 V(RiscvSignExtendShort) \ 165 V(RiscvSync) \ 166 V(RiscvAssertEqual) \ 167 V(RiscvS128Const) \ 168 V(RiscvS128Zero) \ 169 V(RiscvS128AllOnes) \ 170 V(RiscvI32x4Splat) \ 171 V(RiscvI32x4ExtractLane) \ 172 V(RiscvI32x4ReplaceLane) \ 173 V(RiscvI32x4Add) \ 174 V(RiscvI32x4Sub) \ 175 V(RiscvF64x2Abs) \ 176 V(RiscvF64x2Neg) \ 177 V(RiscvF32x4Splat) \ 178 V(RiscvF32x4ExtractLane) \ 179 V(RiscvF32x4ReplaceLane) \ 180 V(RiscvF32x4SConvertI32x4) \ 181 V(RiscvF32x4UConvertI32x4) \ 182 V(RiscvI64x2SConvertI32x4Low) \ 183 V(RiscvI64x2SConvertI32x4High) \ 184 V(RiscvI64x2UConvertI32x4Low) \ 185 V(RiscvI64x2UConvertI32x4High) \ 186 V(RiscvI32x4Mul) \ 187 V(RiscvI32x4MaxS) \ 188 V(RiscvI32x4MinS) \ 189 V(RiscvI32x4Eq) \ 190 V(RiscvI32x4Ne) \ 191 V(RiscvI32x4Shl) \ 192 V(RiscvI32x4ShrS) \ 193 V(RiscvI32x4ShrU) \ 194 V(RiscvI32x4MaxU) \ 195 V(RiscvI32x4MinU) \ 196 V(RiscvI64x2GtS) \ 197 V(RiscvI64x2GeS) \ 198 V(RiscvI64x2Eq) \ 199 V(RiscvI64x2Ne) \ 200 V(RiscvF64x2Sqrt) \ 201 V(RiscvF64x2Add) \ 202 V(RiscvF64x2Sub) \ 203 V(RiscvF64x2Mul) \ 204 V(RiscvF64x2Div) \ 205 V(RiscvF64x2Min) \ 206 V(RiscvF64x2Max) \ 207 V(RiscvF64x2ConvertLowI32x4S) \ 208 V(RiscvF64x2ConvertLowI32x4U) \ 209 V(RiscvF64x2PromoteLowF32x4) \ 210 V(RiscvF64x2Eq) \ 211 V(RiscvF64x2Ne) \ 212 V(RiscvF64x2Lt) \ 213 V(RiscvF64x2Le) \ 214 V(RiscvF64x2Splat) \ 215 V(RiscvF64x2ExtractLane) \ 216 V(RiscvF64x2ReplaceLane) \ 217 V(RiscvF64x2Pmin) \ 218 V(RiscvF64x2Pmax) \ 219 V(RiscvF64x2Ceil) \ 220 V(RiscvF64x2Floor) \ 221 V(RiscvF64x2Trunc) \ 222 V(RiscvF64x2NearestInt) \ 223 V(RiscvI64x2Splat) \ 224 V(RiscvI64x2ExtractLane) \ 225 V(RiscvI64x2ReplaceLane) \ 226 V(RiscvI64x2Add) \ 227 V(RiscvI64x2Sub) \ 228 V(RiscvI64x2Mul) \ 229 V(RiscvI64x2Abs) \ 230 V(RiscvI64x2Neg) \ 231 V(RiscvI64x2Shl) \ 232 V(RiscvI64x2ShrS) \ 233 V(RiscvI64x2ShrU) \ 234 V(RiscvI64x2BitMask) \ 235 V(RiscvF32x4Abs) \ 236 V(RiscvF32x4Neg) \ 237 V(RiscvF32x4Sqrt) \ 238 V(RiscvF32x4RecipApprox) \ 239 V(RiscvF32x4RecipSqrtApprox) \ 240 V(RiscvF32x4Qfma) \ 241 V(RiscvF32x4Qfms) \ 242 V(RiscvF64x2Qfma) \ 243 V(RiscvF64x2Qfms) \ 244 V(RiscvF32x4Add) \ 245 V(RiscvF32x4Sub) \ 246 V(RiscvF32x4Mul) \ 247 V(RiscvF32x4Div) \ 248 V(RiscvF32x4Max) \ 249 V(RiscvF32x4Min) \ 250 V(RiscvF32x4Eq) \ 251 V(RiscvF32x4Ne) \ 252 V(RiscvF32x4Lt) \ 253 V(RiscvF32x4Le) \ 254 V(RiscvF32x4Pmin) \ 255 V(RiscvF32x4Pmax) \ 256 V(RiscvF32x4DemoteF64x2Zero) \ 257 V(RiscvF32x4Ceil) \ 258 V(RiscvF32x4Floor) \ 259 V(RiscvF32x4Trunc) \ 260 V(RiscvF32x4NearestInt) \ 261 V(RiscvI32x4SConvertF32x4) \ 262 V(RiscvI32x4UConvertF32x4) \ 263 V(RiscvI32x4Neg) \ 264 V(RiscvI32x4GtS) \ 265 V(RiscvI32x4GeS) \ 266 V(RiscvI32x4GtU) \ 267 V(RiscvI32x4GeU) \ 268 V(RiscvI32x4Abs) \ 269 V(RiscvI32x4BitMask) \ 270 V(RiscvI32x4TruncSatF64x2SZero) \ 271 V(RiscvI32x4TruncSatF64x2UZero) \ 272 V(RiscvI16x8Splat) \ 273 V(RiscvI16x8ExtractLaneU) \ 274 V(RiscvI16x8ExtractLaneS) \ 275 V(RiscvI16x8ReplaceLane) \ 276 V(RiscvI16x8Neg) \ 277 V(RiscvI16x8Shl) \ 278 V(RiscvI16x8ShrS) \ 279 V(RiscvI16x8ShrU) \ 280 V(RiscvI16x8Add) \ 281 V(RiscvI16x8AddSatS) \ 282 V(RiscvI16x8Sub) \ 283 V(RiscvI16x8SubSatS) \ 284 V(RiscvI16x8Mul) \ 285 V(RiscvI16x8MaxS) \ 286 V(RiscvI16x8MinS) \ 287 V(RiscvI16x8Eq) \ 288 V(RiscvI16x8Ne) \ 289 V(RiscvI16x8GtS) \ 290 V(RiscvI16x8GeS) \ 291 V(RiscvI16x8AddSatU) \ 292 V(RiscvI16x8SubSatU) \ 293 V(RiscvI16x8MaxU) \ 294 V(RiscvI16x8MinU) \ 295 V(RiscvI16x8GtU) \ 296 V(RiscvI16x8GeU) \ 297 V(RiscvI16x8RoundingAverageU) \ 298 V(RiscvI16x8Q15MulRSatS) \ 299 V(RiscvI16x8Abs) \ 300 V(RiscvI16x8BitMask) \ 301 V(RiscvI8x16Splat) \ 302 V(RiscvI8x16ExtractLaneU) \ 303 V(RiscvI8x16ExtractLaneS) \ 304 V(RiscvI8x16ReplaceLane) \ 305 V(RiscvI8x16Neg) \ 306 V(RiscvI8x16Shl) \ 307 V(RiscvI8x16ShrS) \ 308 V(RiscvI8x16Add) \ 309 V(RiscvI8x16AddSatS) \ 310 V(RiscvI8x16Sub) \ 311 V(RiscvI8x16SubSatS) \ 312 V(RiscvI8x16MaxS) \ 313 V(RiscvI8x16MinS) \ 314 V(RiscvI8x16Eq) \ 315 V(RiscvI8x16Ne) \ 316 V(RiscvI8x16GtS) \ 317 V(RiscvI8x16GeS) \ 318 V(RiscvI8x16ShrU) \ 319 V(RiscvI8x16AddSatU) \ 320 V(RiscvI8x16SubSatU) \ 321 V(RiscvI8x16MaxU) \ 322 V(RiscvI8x16MinU) \ 323 V(RiscvI8x16GtU) \ 324 V(RiscvI8x16GeU) \ 325 V(RiscvI8x16RoundingAverageU) \ 326 V(RiscvI8x16Abs) \ 327 V(RiscvI8x16BitMask) \ 328 V(RiscvI8x16Popcnt) \ 329 V(RiscvS128And) \ 330 V(RiscvS128Or) \ 331 V(RiscvS128Xor) \ 332 V(RiscvS128Not) \ 333 V(RiscvS128Select) \ 334 V(RiscvS128AndNot) \ 335 V(RiscvS128Load64Zero) \ 336 V(RiscvS128Load32Zero) \ 337 V(RiscvI32x4AllTrue) \ 338 V(RiscvI16x8AllTrue) \ 339 V(RiscvV128AnyTrue) \ 340 V(RiscvI8x16AllTrue) \ 341 V(RiscvI64x2AllTrue) \ 342 V(RiscvS32x4InterleaveRight) \ 343 V(RiscvS32x4InterleaveLeft) \ 344 V(RiscvS32x4PackEven) \ 345 V(RiscvS32x4PackOdd) \ 346 V(RiscvS32x4InterleaveEven) \ 347 V(RiscvS32x4InterleaveOdd) \ 348 V(RiscvS32x4Shuffle) \ 349 V(RiscvS16x8InterleaveRight) \ 350 V(RiscvS16x8InterleaveLeft) \ 351 V(RiscvS16x8PackEven) \ 352 V(RiscvS16x8PackOdd) \ 353 V(RiscvS16x8InterleaveEven) \ 354 V(RiscvS16x8InterleaveOdd) \ 355 V(RiscvS16x4Reverse) \ 356 V(RiscvS16x2Reverse) \ 357 V(RiscvS8x16InterleaveRight) \ 358 V(RiscvS8x16InterleaveLeft) \ 359 V(RiscvS8x16PackEven) \ 360 V(RiscvS8x16PackOdd) \ 361 V(RiscvS8x16InterleaveEven) \ 362 V(RiscvS8x16InterleaveOdd) \ 363 V(RiscvI8x16Shuffle) \ 364 V(RiscvS8x16Concat) \ 365 V(RiscvS8x8Reverse) \ 366 V(RiscvS8x4Reverse) \ 367 V(RiscvS8x2Reverse) \ 368 V(RiscvS128LoadSplat) \ 369 V(RiscvS128Load64ExtendS) \ 370 V(RiscvS128Load64ExtendU) \ 371 V(RiscvS128LoadLane) \ 372 V(RiscvS128StoreLane) \ 373 V(RiscvRvvLd) \ 374 V(RiscvRvvSt) \ 375 V(RiscvI32x4SConvertI16x8Low) \ 376 V(RiscvI32x4SConvertI16x8High) \ 377 V(RiscvI32x4UConvertI16x8Low) \ 378 V(RiscvI32x4UConvertI16x8High) \ 379 V(RiscvI16x8SConvertI8x16Low) \ 380 V(RiscvI16x8SConvertI8x16High) \ 381 V(RiscvI16x8SConvertI32x4) \ 382 V(RiscvI16x8UConvertI32x4) \ 383 V(RiscvI16x8UConvertI8x16Low) \ 384 V(RiscvI16x8UConvertI8x16High) \ 385 V(RiscvI8x16SConvertI16x8) \ 386 V(RiscvI8x16UConvertI16x8) \ 387 V(RiscvVwmul) \ 388 V(RiscvVwmulu) \ 389 V(RiscvVmvSx) \ 390 V(RiscvVcompress) \ 391 V(RiscvVaddVv) \ 392 V(RiscvVwadd) \ 393 V(RiscvVwaddu) \ 394 V(RiscvVrgather) \ 395 V(RiscvVslidedown) \ 396 V(RiscvWord64AtomicLoadUint64) \ 397 V(RiscvWord64AtomicStoreWord64) \ 398 V(RiscvWord64AtomicAddUint64) \ 399 V(RiscvWord64AtomicSubUint64) \ 400 V(RiscvWord64AtomicAndUint64) \ 401 V(RiscvWord64AtomicOrUint64) \ 402 V(RiscvWord64AtomicXorUint64) \ 403 V(RiscvWord64AtomicExchangeUint64) \ 404 V(RiscvWord64AtomicCompareExchangeUint64) \ 405 V(RiscvStoreCompressTagged) \ 406 V(RiscvLoadDecompressTaggedSigned) \ 407 V(RiscvLoadDecompressTaggedPointer) \ 408 V(RiscvLoadDecompressAnyTagged) 409 410 // Addressing modes represent the "shape" of inputs to an instruction. 411 // Many instructions support multiple addressing modes. Addressing modes 412 // are encoded into the InstructionCode of the instruction and tell the 413 // code generator after register allocation which assembler method to call. 414 // 415 // We use the following local notation for addressing modes: 416 // 417 // R = register 418 // O = register or stack slot 419 // D = double register 420 // I = immediate (handle, external, int32) 421 // MRI = [register + immediate] 422 // MRR = [register + register] 423 // Root = [kRootregister + immediate] 424 // TODO(plind): Add the new r6 address modes. 425 #define TARGET_ADDRESSING_MODE_LIST(V) \ 426 V(MRI) /* [%r0 + K] */ \ 427 V(MRR) /* [%r0 + %r1] */ \ 428 V(Root) /* [root + k] */ 429 430 } // namespace compiler 431 } // namespace internal 432 } // namespace v8 433 434 #endif // V8_COMPILER_BACKEND_RISCV64_INSTRUCTION_CODES_RISCV64_H_ 435