Lines Matching refs:zero_reg
405 __ Branch(&non_outermost_js, ne, t2, Operand(zero_reg)); in Generate_JSEntryVariant()
469 __ sw(zero_reg, MemOperand(t1)); in Generate_JSEntryVariant()
644 __ Branch(&prepare_step_in_if_stepping, ne, t1, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline()
679 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline()
826 __ Branch(&heal_optimized_code_slot, ne, scratch1, Operand(zero_reg)); in TailCallOptimizedCodeSlot()
910 __ Branch(&extra_wide, ne, scratch2, Operand(zero_reg)); in AdvanceBytecodeOffsetOrReturn()
1013 __ Branch(&has_optimized_code_or_marker, ne, t1, Operand(zero_reg)); in Generate_InterpreterEntryTrampoline()
1038 __ sh(zero_reg, FieldMemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1072 __ Branch(&loop_header, ge, t0, Operand(zero_reg)); in Generate_InterpreterEntryTrampoline()
1082 Operand(zero_reg)); in Generate_InterpreterEntryTrampoline()
1165 __ Branch(&maybe_has_optimized_code, eq, t1, Operand(zero_reg)); in Generate_InterpreterEntryTrampoline()
1286 __ push(zero_reg); in Generate_InterpreterPushArgsThenConstructImpl()
1369 kScratchReg, Operand(zero_reg)); in Generate_InterpreterEnterBytecode()
1587 __ Branch(&no_arg, eq, a0, Operand(zero_reg)); in Generate_FunctionPrototypeApply()
1619 __ mov(a0, zero_reg); in Generate_FunctionPrototypeApply()
1633 __ Branch(&done, ne, a0, Operand(zero_reg)); in Generate_FunctionPrototypeCall()
1663 __ Branch(&no_arg, eq, a0, Operand(zero_reg)); in Generate_ReflectApply()
1707 __ Branch(&no_arg, eq, a0, Operand(zero_reg)); in Generate_ReflectConstruct()
1801 __ Addu(t1, a0, Operand(zero_reg)); in Generate_CallOrConstructVarargs()
1809 __ Branch(©, ge, t1, Operand(zero_reg)); in Generate_CallOrConstructVarargs()
1814 __ mov(t2, zero_reg); in Generate_CallOrConstructVarargs()
1857 __ Branch(&new_target_constructor, ne, t1, Operand(zero_reg)); in Generate_CallOrConstructForwardVarargs()
1900 __ Branch(&stack_done, le, t2, Operand(zero_reg)); in Generate_CallOrConstructForwardVarargs()
1922 __ Addu(t7, a0, Operand(zero_reg)); in Generate_CallOrConstructForwardVarargs()
1930 __ Branch(©, ge, t7, Operand(zero_reg)); in Generate_CallOrConstructForwardVarargs()
1946 __ Branch(&loop, ne, t2, Operand(zero_reg)); in Generate_CallOrConstructForwardVarargs()
1975 __ Branch(&class_constructor, ne, kScratchReg, Operand(zero_reg)); in Generate_CallFunction()
1987 __ Branch(&done_convert, ne, kScratchReg, Operand(zero_reg)); in Generate_CallFunction()
2115 __ Branch(&done_loop, lt, t0, Operand(zero_reg)); in Generate_CallBoundFunctionImpl()
2151 __ Branch(&non_callable, eq, t1, Operand(zero_reg)); in Generate_Call()
2196 __ Branch(&call_generic_stub, eq, t0, Operand(zero_reg)); in Generate_ConstructFunction()
2257 __ Branch(&done_loop, lt, t0, Operand(zero_reg)); in Generate_ConstructBoundFunction()
2298 __ Branch(&non_constructor, eq, t3, Operand(zero_reg)); in Generate_Construct()
2617 __ mov(a0, zero_reg); in Generate_CEntry()
2618 __ mov(a1, zero_reg); in Generate_CEntry()
2634 __ Branch(&zero, eq, cp, Operand(zero_reg)); in Generate_CEntry()
2670 __ ctc1(zero_reg, FCSR); in Generate_DoubleToI()
2687 __ Branch(&error, ne, scratch, Operand(zero_reg)); in Generate_DoubleToI()
2707 __ Movz(result_reg, zero_reg, scratch); in Generate_DoubleToI()
2708 __ Branch(&done, eq, scratch, Operand(zero_reg)); in Generate_DoubleToI()
2716 __ Branch(&normal_exponent, le, result_reg, Operand(zero_reg)); in Generate_DoubleToI()
2717 __ mov(result_reg, zero_reg); in Generate_DoubleToI()
2734 __ mov(input_high, zero_reg); in Generate_DoubleToI()
2752 __ Branch(&pos_shift, ge, scratch, Operand(zero_reg)); in Generate_DoubleToI()
2755 __ Subu(scratch, zero_reg, scratch); in Generate_DoubleToI()
2768 __ Subu(result_reg, zero_reg, input_high); in Generate_DoubleToI()
2811 __ Branch(&profiler_enabled, ne, t9, Operand(zero_reg)); in CallApiFunctionAndReturn()
2814 __ Branch(&profiler_enabled, ne, t9, Operand(zero_reg)); in CallApiFunctionAndReturn()
3060 __ sw(zero_reg, in Generate_CallApiGetter()
3161 __ bne(t2, zero_reg, &lastb); in Generate_MemCopyUint8Uint8()
3170 __ bne(t8, zero_reg, &unaligned); in Generate_MemCopyUint8Uint8()
3171 __ subu(a3, zero_reg, a0); // In delay slot. in Generate_MemCopyUint8Uint8()
3174 __ beq(a3, zero_reg, &aligned); // Already aligned. in Generate_MemCopyUint8Uint8()
3223 __ Branch(USE_DELAY_SLOT, &skip_pref, gt, v1, Operand(zero_reg)); in Generate_MemCopyUint8Uint8()
3317 __ Branch(&leave, le, a2, Operand(zero_reg)); in Generate_MemCopyUint8Uint8()
3336 __ beq(a3, zero_reg, &ua_chk16w); in Generate_MemCopyUint8Uint8()
3389 __ Branch(USE_DELAY_SLOT, &ua_skip_pref, gt, v1, Operand(zero_reg)); in Generate_MemCopyUint8Uint8()
3424 __ Branch(USE_DELAY_SLOT, &ua_skip_pref, gt, v1, Operand(zero_reg)); in Generate_MemCopyUint8Uint8()
3623 __ beq(a2, zero_reg, &leave); in Generate_MemCopyUint8Uint8()
3693 __ mov(a0, zero_reg); in Generate_DeoptimizationEntry()
3795 __ BranchShort(&inner_push_loop, ne, a3, Operand(zero_reg)); in Generate_DeoptimizationEntry()