/art/compiler/utils/x86/ |
D | constants_x86.h | 70 kNotEqual = 5, enumerator 83 kNotZero = kNotEqual,
|
D | assembler_x86_test.cc | 555 GetAssembler()->cmovl(x86::kNotEqual, x86::Register(x86::EDI), x86::Address( in TEST_F() 1289 GetAssembler()->j(x86::kNotEqual, &start); in TEST_F()
|
D | jni_macro_assembler_x86.cc | 555 __ j(kNotEqual, slow->Entry()); in ExceptionPoll()
|
/art/compiler/utils/x86_64/ |
D | constants_x86_64.h | 97 kNotEqual = 5, enumerator 110 kNotZero = kNotEqual,
|
D | assembler_x86_64_test.cc | 2165 GetAssembler()->cmov(x86_64::kNotEqual, x86_64::CpuRegister(x86_64::RDI), x86_64::Address( in TEST_F() 2179 GetAssembler()->cmov(x86_64::kNotEqual, x86_64::CpuRegister(x86_64::RDI), x86_64::Address( in TEST_F() 2214 GetAssembler()->j(x86_64::kNotEqual, &start); in TEST_F()
|
D | jni_macro_assembler_x86_64.cc | 646 __ j(kNotEqual, slow->Entry()); in ExceptionPoll()
|
/art/compiler/optimizing/ |
D | intrinsics_x86_64.cc | 112 __ j(kNotEqual, &loop); in EmitNativeCode() 647 __ j(kNotEqual, slow_path->GetEntryLabel()); in CheckPosition() 853 __ j(kNotEqual, &conditions_on_positions_validated); in VisitSystemArrayCopy() 861 __ j(kNotEqual, &conditions_on_positions_validated); in VisitSystemArrayCopy() 969 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel()); in VisitSystemArrayCopy() 992 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel()); in VisitSystemArrayCopy() 1020 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel()); in VisitSystemArrayCopy() 1023 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel()); in VisitSystemArrayCopy() 1049 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel()); in VisitSystemArrayCopy() 1120 __ j(kNotEqual, &loop); in VisitSystemArrayCopy() [all …]
|
D | intrinsics_x86.cc | 156 __ j(kNotEqual, &loop); in EmitNativeCode() 854 __ j(kNotEqual, slow_path->GetEntryLabel()); in CheckPosition() 1042 __ j(kNotEqual, &return_false); in VisitStringEquals() 1055 __ j(kNotEqual, &return_false); in VisitStringEquals() 1089 __ j(kNotEqual, &return_false); in VisitStringEquals() 1264 __ j(kNotEqual, ¬_found_label); in GenerateStringIndexOf() 2496 __ j(kNotEqual, &done); in GenTrailingZeros() 2513 __ j(kNotEqual, &done); in GenTrailingZeros() 2673 __ j(kNotEqual, &conditions_on_positions_validated); in VisitSystemArrayCopy() 2681 __ j(kNotEqual, &conditions_on_positions_validated); in VisitSystemArrayCopy() [all …]
|
D | loop_analysis.cc | 241 case HInstruction::InstructionKind::kNotEqual: in GetMachineInstructionCount()
|
D | code_generator_x86_64.cc | 962 case kCondNE: return kNotEqual; in X86_64IntegerCondition() 980 case kCondNE: return kNotEqual; in X86_64FPCondition() 1928 __ j(kNotEqual, true_target); in GenerateTestAndBranch() 2059 Condition cond = kNotEqual; in VisitSelect() 3795 __ j(kNotEqual, &retry); in GenerateRemFP() 5627 __ j(kNotEqual, slow_path->GetEntryLabel()); in VisitArraySet() 5630 __ j(kNotEqual, slow_path->GetEntryLabel()); in VisitArraySet() 5926 __ j(kNotEqual, slow_path->GetEntryLabel()); in GenerateSuspendCheck() 6671 __ j(kNotEqual, &zero); in VisitInstanceOf() 6710 __ j(kNotEqual, &loop); in VisitInstanceOf() [all …]
|
D | code_generator_x86.cc | 948 case kCondNE: return kNotEqual; in X86Condition() 966 case kCondNE: return kNotEqual; in X86UnsignedOrFPCondition() 1931 __ j(kNotEqual, true_target); in GenerateTestAndBranch() 2066 Condition cond = kNotEqual; in VisitSelect() 3808 __ j(kNotEqual, &retry); in GenerateRemFP() 4827 __ cmovl(kNotEqual, first_reg_hi, first_reg_lo); in VisitRor() 4828 __ cmovl(kNotEqual, first_reg_lo, temp_reg); in VisitRor() 6483 __ j(kNotEqual, slow_path->GetEntryLabel()); in VisitArraySet() 6486 __ j(kNotEqual, slow_path->GetEntryLabel()); in VisitArraySet() 6756 __ j(kNotEqual, slow_path->GetEntryLabel()); in GenerateSuspendCheck() [all …]
|
D | instruction_simplifier.cc | 756 case HInstruction::kNotEqual: in GetOppositeConditionSwapOps()
|
D | nodes.h | 4011 : HCondition(kNotEqual, first, second, dex_pc) {
|