| /third_party/node/deps/v8/src/compiler/backend/loong64/ |
| D | code-generator-loong64.cc | 561 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); in AssembleArchInstruction() local 562 switch (arch_opcode) { in AssembleArchInstruction() 828 if (arch_opcode == kArchStoreWithWriteBarrier) { in AssembleArchInstruction() 836 DCHECK_EQ(kArchAtomicStoreWithWriteBarrier, arch_opcode); in AssembleArchInstruction() 1861 if (instr->arch_opcode() == kLoong64Tst) { in AssembleBranchToLabels() 1864 } else if (instr->arch_opcode() == kLoong64Add_d || in AssembleBranchToLabels() 1865 instr->arch_opcode() == kLoong64Sub_d) { in AssembleBranchToLabels() 1873 } else if (instr->arch_opcode() == kLoong64AddOvf_d || in AssembleBranchToLabels() 1874 instr->arch_opcode() == kLoong64SubOvf_d) { in AssembleBranchToLabels() 1884 UNSUPPORTED_COND(instr->arch_opcode(), condition); in AssembleBranchToLabels() [all …]
|
| /third_party/node/deps/v8/src/compiler/backend/ |
| D | jump-threading.cc | 99 } else if (instr->arch_opcode() == kArchJmp) { in ComputeForwarding() 214 } else if (instr->arch_opcode() == kArchJmp || in ApplyForwarding() 215 instr->arch_opcode() == kArchRet) { in ApplyForwarding()
|
| D | frame-elider.cc | 27 instr->arch_opcode() == ArchOpcode::kArchStackPointerGreaterThan || in MarkBlocks() 28 instr->arch_opcode() == ArchOpcode::kArchFramePointer) { in MarkBlocks()
|
| D | instruction.h | 885 ArchOpcode arch_opcode() const { return ArchOpcodeField::decode(opcode()); } in arch_opcode() function 895 return compiler::HasMemoryAccessMode(arch_opcode()); in HasMemoryAccessMode() 950 bool IsNop() const { return arch_opcode() == kArchNop; } in IsNop() 953 return arch_opcode() == ArchOpcode::kArchDeoptimize || in IsDeoptimizeCall() 961 bool IsJump() const { return arch_opcode() == ArchOpcode::kArchJmp; } in IsJump() 962 bool IsRet() const { return arch_opcode() == ArchOpcode::kArchRet; } in IsRet() 965 return arch_opcode() <= ArchOpcode::kArchTailCallWasm; in IsTailCall() 967 return arch_opcode() <= ArchOpcode::kArchTailCallAddress; in IsTailCall() 971 return arch_opcode() == ArchOpcode::kArchThrowTerminator; in IsThrow() 974 static constexpr bool IsCallWithDescriptorFlags(InstructionCode arch_opcode) { in IsCallWithDescriptorFlags() argument [all …]
|
| D | instruction-scheduler.h | 199 return (instr->arch_opcode() == kArchNop) && (instr->OutputCount() == 1) && in IsFixedRegisterParameter()
|
| D | instruction-scheduler.cc | 257 switch (instr->arch_opcode()) { in GetInstructionFlags()
|
| D | code-generator.cc | 121 DCHECK_EQ(instr->arch_opcode(), kArchStackPointerGreaterThan); in ShouldApplyOffsetToStackCheck()
|
| /third_party/node/deps/v8/src/compiler/backend/ia32/ |
| D | instruction-scheduler-ia32.cc | 18 switch (instr->arch_opcode()) { in GetTargetInstructionFlags() 408 switch (instr->arch_opcode()) { in GetInstructionLatency()
|
| D | code-generator-ia32.cc | 695 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); in AssembleArchInstruction() local 696 switch (arch_opcode) { in AssembleArchInstruction() 986 if (arch_opcode == kArchStoreWithWriteBarrier) { in AssembleArchInstruction() 3412 if (arch_opcode != kSSES8x2Reverse) { in AssembleArchInstruction() 3414 int8_t shuffle_mask = arch_opcode == kSSES8x4Reverse ? 0xB1 : 0x1B; in AssembleArchInstruction() 3431 if (arch_opcode != kAVXS8x2Reverse) { in AssembleArchInstruction() 3433 int8_t shuffle_mask = arch_opcode == kAVXS8x4Reverse ? 0xB1 : 0x1B; in AssembleArchInstruction()
|
| /third_party/node/deps/v8/src/compiler/backend/riscv64/ |
| D | code-generator-riscv64.cc | 632 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); in AssembleArchInstruction() local 633 switch (arch_opcode) { in AssembleArchInstruction() 3416 switch (arch_opcode) { in AssembleArchInstruction() 3465 if (instr->arch_opcode() == kRiscvTst) { in AssembleBranchToLabels() 3468 } else if (instr->arch_opcode() == kRiscvAdd64 || in AssembleBranchToLabels() 3469 instr->arch_opcode() == kRiscvSub64) { in AssembleBranchToLabels() 3474 } else if (instr->arch_opcode() == kRiscvAddOvf64 || in AssembleBranchToLabels() 3475 instr->arch_opcode() == kRiscvSubOvf64) { in AssembleBranchToLabels() 3485 UNSUPPORTED_COND(instr->arch_opcode(), condition); in AssembleBranchToLabels() 3487 } else if (instr->arch_opcode() == kRiscvMulOvf32) { in AssembleBranchToLabels() [all …]
|
| D | instruction-scheduler-riscv64.cc | 16 switch (instr->arch_opcode()) { in GetTargetInstructionFlags() 1091 switch (instr->arch_opcode()) { in GetInstructionLatency()
|
| /third_party/node/deps/v8/src/compiler/backend/mips64/ |
| D | code-generator-mips64.cc | 576 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); in AssembleArchInstruction() local 577 switch (arch_opcode) { in AssembleArchInstruction() 845 if (arch_opcode == kArchStoreWithWriteBarrier) { in AssembleArchInstruction() 848 DCHECK_EQ(kArchAtomicStoreWithWriteBarrier, arch_opcode); in AssembleArchInstruction() 3805 if (instr->arch_opcode() == kMips64Tst) { in AssembleBranchToLabels() 3808 } else if (instr->arch_opcode() == kMips64Dadd || in AssembleBranchToLabels() 3809 instr->arch_opcode() == kMips64Dsub) { in AssembleBranchToLabels() 3814 } else if (instr->arch_opcode() == kMips64DaddOvf || in AssembleBranchToLabels() 3815 instr->arch_opcode() == kMips64DsubOvf) { in AssembleBranchToLabels() 3825 UNSUPPORTED_COND(instr->arch_opcode(), condition); in AssembleBranchToLabels() [all …]
|
| D | instruction-scheduler-mips64.cc | 16 switch (instr->arch_opcode()) { in GetTargetInstructionFlags() 1265 switch (instr->arch_opcode()) { in GetInstructionLatency()
|
| /third_party/node/deps/v8/src/compiler/backend/arm64/ |
| D | instruction-scheduler-arm64.cc | 15 switch (instr->arch_opcode()) { in GetTargetInstructionFlags() 376 switch (instr->arch_opcode()) { in GetInstructionLatency()
|
| D | code-generator-arm64.cc | 685 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); in AssembleArchInstruction() local 686 switch (arch_opcode) { in AssembleArchInstruction() 2864 ArchOpcode opcode = instr->arch_opcode(); in AssembleArchBranch()
|
| /third_party/node/deps/v8/src/compiler/backend/x64/ |
| D | instruction-scheduler-x64.cc | 15 switch (instr->arch_opcode()) { in GetTargetInstructionFlags() 451 switch (instr->arch_opcode()) { in GetInstructionLatency()
|
| D | code-generator-x64.cc | 1188 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); in AssembleArchInstruction() local 1195 switch (arch_opcode) { in AssembleArchInstruction() 1494 if (arch_opcode == kArchStoreWithWriteBarrier) { in AssembleArchInstruction() 1499 DCHECK_EQ(arch_opcode, kArchAtomicStoreWithWriteBarrier); in AssembleArchInstruction() 4153 if (arch_opcode != kX64S8x2Reverse) { in AssembleArchInstruction() 4155 uint8_t shuffle_mask = arch_opcode == kX64S8x4Reverse ? 0xB1 : 0x1B; in AssembleArchInstruction()
|
| /third_party/node/deps/v8/src/compiler/backend/mips/ |
| D | code-generator-mips.cc | 613 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); in AssembleArchInstruction() local 614 switch (arch_opcode) { in AssembleArchInstruction() 884 if (arch_opcode == kArchStoreWithWriteBarrier) { in AssembleArchInstruction() 887 DCHECK_EQ(kArchAtomicStoreWithWriteBarrier, arch_opcode); in AssembleArchInstruction() 3624 if (instr->arch_opcode() == kMipsTst) { in AssembleBranchToLabels() 3627 } else if (instr->arch_opcode() == kMipsAddOvf || in AssembleBranchToLabels() 3628 instr->arch_opcode() == kMipsSubOvf) { in AssembleBranchToLabels() 3638 UNSUPPORTED_COND(instr->arch_opcode(), condition); in AssembleBranchToLabels() 3640 } else if (instr->arch_opcode() == kMipsMulOvf) { in AssembleBranchToLabels() 3652 } else if (instr->arch_opcode() == kMipsCmp) { in AssembleBranchToLabels() [all …]
|
| D | instruction-scheduler-mips.cc | 16 switch (instr->arch_opcode()) { in GetTargetInstructionFlags() 1388 switch (instr->arch_opcode()) { in GetInstructionLatency()
|
| /third_party/node/deps/v8/src/compiler/backend/arm/ |
| D | instruction-scheduler-arm.cc | 15 switch (instr->arch_opcode()) { in GetTargetInstructionFlags()
|
| D | code-generator-arm.cc | 671 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); in AssembleArchInstruction() local 672 switch (arch_opcode) { in AssembleArchInstruction() 938 if (arch_opcode == kArchStoreWithWriteBarrier) { in AssembleArchInstruction() 957 if (arch_opcode == kArchAtomicStoreWithWriteBarrier) { in AssembleArchInstruction() 970 if (arch_opcode == kArchAtomicStoreWithWriteBarrier && in AssembleArchInstruction()
|
| D | instruction-selector-arm.cc | 433 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); in EmitStore() local 438 } else if ((arch_opcode == kArmStr || arch_opcode == kAtomicStoreWord32) && in EmitStore() 444 if (arch_opcode == kArmVst1S128) { in EmitStore()
|
| /third_party/node/deps/v8/src/compiler/backend/s390/ |
| D | instruction-scheduler-s390.cc | 15 switch (instr->arch_opcode()) { in GetTargetInstructionFlags()
|
| D | code-generator-s390.cc | 3208 ArchOpcode op = instr->arch_opcode(); in AssembleArchBranch() 3287 ArchOpcode op = instr->arch_opcode(); in AssembleArchTrap() 3306 ArchOpcode op = instr->arch_opcode(); in AssembleArchBoolean()
|
| /third_party/node/deps/v8/src/compiler/backend/ppc/ |
| D | instruction-scheduler-ppc.cc | 15 switch (instr->arch_opcode()) { in GetTargetInstructionFlags()
|