• Home
  • Raw
  • Download

Lines Matching refs:codegen

127   void EmitNativeCode(CodeGenerator* codegen) override {  in EmitNativeCode()  argument
128 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen); in EmitNativeCode()
195 IntrinsicLocationsBuilderARMVIXL::IntrinsicLocationsBuilderARMVIXL(CodeGeneratorARMVIXL* codegen) in IntrinsicLocationsBuilderARMVIXL() argument
196 : allocator_(codegen->GetGraph()->GetAllocator()), in IntrinsicLocationsBuilderARMVIXL()
197 codegen_(codegen), in IntrinsicLocationsBuilderARMVIXL()
198 assembler_(codegen->GetAssembler()), in IntrinsicLocationsBuilderARMVIXL()
199 features_(codegen->GetInstructionSetFeatures()) {} in IntrinsicLocationsBuilderARMVIXL()
304 CodeGeneratorARMVIXL* codegen) { in GenNumberOfLeadingZeros() argument
305 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenNumberOfLeadingZeros()
316 vixl32::Label* final_label = codegen->GetFinalLabel(invoke, &end); in GenNumberOfLeadingZeros()
347 CodeGeneratorARMVIXL* codegen) { in GenNumberOfTrailingZeros() argument
350 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenNumberOfTrailingZeros()
358 vixl32::Label* final_label = codegen->GetFinalLabel(invoke, &end); in GenNumberOfTrailingZeros()
1093 CodeGeneratorARMVIXL* codegen, in GenerateVisitStringIndexOf() argument
1109 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathARMVIXL(invoke); in GenerateVisitStringIndexOf()
1110 codegen->AddSlowPath(slow_path); in GenerateVisitStringIndexOf()
1119 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathARMVIXL(invoke); in GenerateVisitStringIndexOf()
1120 codegen->AddSlowPath(slow_path); in GenerateVisitStringIndexOf()
1131 codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path); in GenerateVisitStringIndexOf()
1787 CodeGeneratorARMVIXL* codegen, in GenFPToFPCall() argument
1798 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc()); in GenFPToFPCall()
1806 CodeGeneratorARMVIXL* codegen, in GenFPFPToFPCall() argument
1820 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc()); in GenFPFPToFPCall()
2112 CodeGeneratorARMVIXL* codegen) { in GenHighestOneBit() argument
2115 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenHighestOneBit()
2145 ExactAssemblyScope it_scope(codegen->GetVIXLAssembler(), in GenHighestOneBit()
2178 CodeGeneratorARMVIXL* codegen) { in GenLowestOneBit() argument
2181 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenLowestOneBit()
2211 ExactAssemblyScope it_scope(codegen->GetVIXLAssembler(), in GenLowestOneBit()
2680 static inline bool Use64BitExclusiveLoadStore(bool atomic, CodeGeneratorARMVIXL* codegen) { in Use64BitExclusiveLoadStore() argument
2681 return atomic && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd(); in Use64BitExclusiveLoadStore()
2685 CodeGeneratorARMVIXL* codegen, in GenerateIntrinsicGet() argument
2700 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenerateIntrinsicGet()
2719 if (Use64BitExclusiveLoadStore(atomic, codegen)) { in GenerateIntrinsicGet()
2739 codegen->GenerateFieldLoadWithBakerReadBarrier( in GenerateIntrinsicGet()
2756 if (Use64BitExclusiveLoadStore(atomic, codegen)) { in GenerateIntrinsicGet()
2777 codegen->GenerateMemoryBarrier( in GenerateIntrinsicGet()
2783 codegen->MaybeGenerateReadBarrierSlow(invoke, out, out, base_loc, /* offset=*/ 0u, index_loc); in GenerateIntrinsicGet()
2788 CodeGeneratorARMVIXL* codegen, in CreateUnsafeGetLocations() argument
2810 (type == DataType::Type::kInt64 && Use64BitExclusiveLoadStore(atomic, codegen))) { in CreateUnsafeGetLocations()
2819 CodeGeneratorARMVIXL* codegen, in GenUnsafeGet() argument
2829 (type == DataType::Type::kInt64 && Use64BitExclusiveLoadStore(atomic, codegen))) { in GenUnsafeGet()
2833 codegen, in GenUnsafeGet()
2899 static void GenerateIntrinsicSet(CodeGeneratorARMVIXL* codegen, in GenerateIntrinsicSet() argument
2914 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenerateIntrinsicSet()
2916 codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyStore); in GenerateIntrinsicSet()
2945 if (Use64BitExclusiveLoadStore(atomic, codegen)) { in GenerateIntrinsicSet()
2962 if (Use64BitExclusiveLoadStore(atomic, codegen)) { in GenerateIntrinsicSet()
2982 codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyAny); in GenerateIntrinsicSet()
2987 CodeGeneratorARMVIXL* codegen, in CreateUnsafePutLocations() argument
3000 if (Use64BitExclusiveLoadStore(atomic, codegen)) { in CreateUnsafePutLocations()
3014 CodeGeneratorARMVIXL* codegen) { in GenUnsafePut() argument
3015 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenUnsafePut()
3023 if (type == DataType::Type::kInt64 && Use64BitExclusiveLoadStore(atomic, codegen)) { in GenUnsafePut()
3028 GenerateIntrinsicSet(codegen, in GenUnsafePut()
3044 codegen->MarkGCCard(temp, card, base, RegisterFrom(value), value_can_be_null); in GenUnsafePut()
3155 static void EmitLoadExclusive(CodeGeneratorARMVIXL* codegen, in EmitLoadExclusive() argument
3159 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in EmitLoadExclusive()
3195 static void EmitStoreExclusive(CodeGeneratorARMVIXL* codegen, in EmitStoreExclusive() argument
3200 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in EmitStoreExclusive()
3230 static void GenerateCompareAndSet(CodeGeneratorARMVIXL* codegen, in GenerateCompareAndSet() argument
3256 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenerateCompareAndSet()
3271 EmitLoadExclusive(codegen, type, ptr, old_value); in GenerateCompareAndSet()
3303 EmitStoreExclusive(codegen, type, ptr, store_result, new_value); in GenerateCompareAndSet()
3369 void EmitNativeCode(CodeGenerator* codegen) override { in EmitNativeCode() argument
3370 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen); in EmitNativeCode()
3482 static void GenUnsafeCas(HInvoke* invoke, DataType::Type type, CodeGeneratorARMVIXL* codegen) { in GenUnsafeCas() argument
3485 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenUnsafeCas()
3503 codegen->MarkGCCard(tmp_ptr, tmp, base, new_value, value_can_be_null); in GenUnsafeCas()
3514 new (codegen->GetScopedAllocator()) ReadBarrierCasSlowPathARMVIXL( in GenUnsafeCas()
3525 codegen); in GenUnsafeCas()
3526 codegen->AddSlowPath(slow_path); in GenUnsafeCas()
3532 codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyAny); in GenUnsafeCas()
3534 GenerateCompareAndSet(codegen, in GenUnsafeCas()
3546 codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyAny); in GenUnsafeCas()
3549 codegen->MaybeGenerateMarkingRegisterCheck(/*code=*/ 128, /*temp_loc=*/ LocationFrom(tmp_ptr)); in GenUnsafeCas()
3581 static void GenerateGetAndUpdate(CodeGeneratorARMVIXL* codegen, in GenerateGetAndUpdate() argument
3590 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenerateGetAndUpdate()
3632 EmitLoadExclusive(codegen, load_store_type, ptr, loaded_value); in GenerateGetAndUpdate()
3708 EmitStoreExclusive(codegen, load_store_type, ptr, store_result, new_value); in GenerateGetAndUpdate()
3785 static void GenerateSubTypeObjectCheckNoReadBarrier(CodeGeneratorARMVIXL* codegen, in GenerateSubTypeObjectCheckNoReadBarrier() argument
3790 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenerateSubTypeObjectCheckNoReadBarrier()
3821 CodeGeneratorARMVIXL* codegen, in GenerateVarHandleAccessModeAndVarTypeChecks() argument
3828 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenerateVarHandleAccessModeAndVarTypeChecks()
3867 GenerateSubTypeObjectCheckNoReadBarrier(codegen, slow_path, arg_reg, var_type_no_rb); in GenerateVarHandleAccessModeAndVarTypeChecks()
3874 CodeGeneratorARMVIXL* codegen, in GenerateVarHandleStaticFieldCheck() argument
3876 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenerateVarHandleStaticFieldCheck()
3892 CodeGeneratorARMVIXL* codegen, in GenerateVarHandleInstanceFieldChecks() argument
3894 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenerateVarHandleInstanceFieldChecks()
3926 codegen, slow_path, object, temp, /*object_can_be_null=*/ false); in GenerateVarHandleInstanceFieldChecks()
3942 CodeGeneratorARMVIXL* codegen, in GenerateVarHandleArrayChecks() argument
3944 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenerateVarHandleArrayChecks()
3974 codegen->GetAssembler()->MaybeUnpoisonHeapReference(temp); in GenerateVarHandleArrayChecks()
3990 codegen->GetAssembler()->MaybeUnpoisonHeapReference(temp2); in GenerateVarHandleArrayChecks()
3998 codegen->GetAssembler()->MaybeUnpoisonHeapReference(temp2); in GenerateVarHandleArrayChecks()
4008 codegen->GetCompilerOptions().IsBootImage() || in GenerateVarHandleArrayChecks()
4010 DCHECK(boot_image_available || codegen->GetCompilerOptions().IsJitCompiler()); in GenerateVarHandleArrayChecks()
4027 CodeGeneratorARMVIXL* codegen, in GenerateVarHandleCoordinateChecks() argument
4031 GenerateVarHandleStaticFieldCheck(invoke, codegen, slow_path); in GenerateVarHandleCoordinateChecks()
4033 GenerateVarHandleInstanceFieldChecks(invoke, codegen, slow_path); in GenerateVarHandleCoordinateChecks()
4036 GenerateVarHandleArrayChecks(invoke, codegen, slow_path); in GenerateVarHandleCoordinateChecks()
4041 CodeGeneratorARMVIXL* codegen, in GenerateVarHandleChecks() argument
4045 new (codegen->GetScopedAllocator()) VarHandleSlowPathARMVIXL(invoke, order); in GenerateVarHandleChecks()
4046 codegen->AddSlowPath(slow_path); in GenerateVarHandleChecks()
4048 GenerateVarHandleAccessModeAndVarTypeChecks(invoke, codegen, slow_path, type); in GenerateVarHandleChecks()
4049 GenerateVarHandleCoordinateChecks(invoke, codegen, slow_path); in GenerateVarHandleChecks()
4075 CodeGeneratorARMVIXL* codegen) { in GenerateVarHandleTarget() argument
4076 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenerateVarHandleTarget()
4094 codegen->GenerateGcRootFieldLoad(invoke, in GenerateVarHandleTarget()
4252 CodeGeneratorARMVIXL* codegen, in CreateVarHandleGetLocations() argument
4271 if (type == DataType::Type::kFloat64 && Use64BitExclusiveLoadStore(atomic, codegen)) { in CreateVarHandleGetLocations()
4281 CodeGeneratorARMVIXL* codegen, in GenerateVarHandleGet() argument
4289 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenerateVarHandleGet()
4295 slow_path = GenerateVarHandleChecks(invoke, codegen, order, type); in GenerateVarHandleGet()
4297 GenerateVarHandleTarget(invoke, target, codegen); in GenerateVarHandleGet()
4307 } else if (DataType::Is64BitType(type) && Use64BitExclusiveLoadStore(atomic, codegen)) { in GenerateVarHandleGet()
4323 if (Use64BitExclusiveLoadStore(atomic, codegen)) { in GenerateVarHandleGet()
4344 codegen, in GenerateVarHandleGet()
4400 CodeGeneratorARMVIXL* codegen, in CreateVarHandleSetLocations() argument
4417 if (Use64BitExclusiveLoadStore(atomic, codegen)) { in CreateVarHandleSetLocations()
4435 CodeGeneratorARMVIXL* codegen, in GenerateVarHandleSet() argument
4442 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenerateVarHandleSet()
4449 slow_path = GenerateVarHandleChecks(invoke, codegen, order, value_type); in GenerateVarHandleSet()
4451 GenerateVarHandleTarget(invoke, target, codegen); in GenerateVarHandleSet()
4458 if (DataType::Is64BitType(value_type) && Use64BitExclusiveLoadStore(atomic, codegen)) { in GenerateVarHandleSet()
4480 if (Use64BitExclusiveLoadStore(atomic, codegen)) { in GenerateVarHandleSet()
4506 GenerateIntrinsicSet(codegen, in GenerateVarHandleSet()
4522 codegen->MarkGCCard(temp, card, target.object, value_reg, /*value_can_be_null=*/ true); in GenerateVarHandleSet()
4627 CodeGeneratorARMVIXL* codegen, in GenerateVarHandleCompareAndSetOrExchange() argument
4639 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenerateVarHandleCompareAndSetOrExchange()
4648 slow_path = GenerateVarHandleChecks(invoke, codegen, order, value_type); in GenerateVarHandleCompareAndSetOrExchange()
4650 GenerateVarHandleTarget(invoke, target, codegen); in GenerateVarHandleCompareAndSetOrExchange()
4660 codegen->GenerateMemoryBarrier( in GenerateVarHandleCompareAndSetOrExchange()
4754 new (codegen->GetScopedAllocator()) ReadBarrierCasSlowPathARMVIXL( in GenerateVarHandleCompareAndSetOrExchange()
4765 codegen); in GenerateVarHandleCompareAndSetOrExchange()
4766 codegen->AddSlowPath(rb_slow_path); in GenerateVarHandleCompareAndSetOrExchange()
4771 GenerateCompareAndSet(codegen, in GenerateVarHandleCompareAndSetOrExchange()
4785 codegen->GenerateMemoryBarrier( in GenerateVarHandleCompareAndSetOrExchange()
4813 codegen->MarkGCCard(temp, card, target.object, RegisterFrom(new_value), new_value_can_be_null); in GenerateVarHandleCompareAndSetOrExchange()
4952 CodeGeneratorARMVIXL* codegen, in GenerateVarHandleGetAndUpdate() argument
4959 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenerateVarHandleGetAndUpdate()
4967 slow_path = GenerateVarHandleChecks(invoke, codegen, order, value_type); in GenerateVarHandleGetAndUpdate()
4969 GenerateVarHandleTarget(invoke, target, codegen); in GenerateVarHandleGetAndUpdate()
4979 codegen->GenerateMemoryBarrier( in GenerateVarHandleGetAndUpdate()
5074 GenerateGetAndUpdate(codegen, in GenerateVarHandleGetAndUpdate()
5085 codegen->GenerateMemoryBarrier( in GenerateVarHandleGetAndUpdate()
5109 codegen->GenerateIntrinsicCasMoveWithBakerReadBarrier(RegisterFrom(out), in GenerateVarHandleGetAndUpdate()
5112 codegen->GenerateReadBarrierSlow( in GenerateVarHandleGetAndUpdate()
5128 codegen->MarkGCCard(temp, card, target.object, RegisterFrom(arg), new_value_can_be_null); in GenerateVarHandleGetAndUpdate()
5258 CodeGeneratorARMVIXL* codegen = down_cast<CodeGeneratorARMVIXL*>(codegen_in); in EmitByteArrayViewCode() local
5259 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in EmitByteArrayViewCode()
5291 codegen->LoadClassRootForIntrinsic(temp2, ClassRoot::kJavaLangInvokeByteArrayViewVarHandle); in EmitByteArrayViewCode()
5340 GenerateVarHandleGet(invoke, codegen, order_, atomic_, /*byte_swap=*/ true); in EmitByteArrayViewCode()
5343 GenerateVarHandleSet(invoke, codegen, order_, atomic_, /*byte_swap=*/ true); in EmitByteArrayViewCode()
5348 invoke, codegen, order_, return_success_, strong_, /*byte_swap=*/ true); in EmitByteArrayViewCode()
5352 invoke, codegen, get_and_update_op_, order_, /*byte_swap=*/ true); in EmitByteArrayViewCode()