#!/usr/bin/env ruby # Copyright (c) 2021-2024 Huawei Device Co., Ltd. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. include_relative 'common.irt' function(:CallStaticPltResolver, params: {method: 'ptr'}, regmap: $full_regmap, regalloc_set: $panda_mask, mode: [:Boundary]) { # Arm32 is not supported if Options.arch == :arm32 Intrinsic(:UNREACHABLE).Terminator.void next end caller := LoadI(%fp).Imm("-WordSize()").ptr method_id := LoadI(method).Imm("cross_values::GetMethodCompiledEntryPointOffset(GetArch()) - WordSize()").ptr entry := LoadI(%tr).Imm(Constants::GET_CALLEE_METHOD).ptr callee := CallIndirect(entry, caller, method_id).ptr StoreI(method, callee).Imm("cross_values::GetMethodCompiledEntryPointOffset(GetArch()) + WordSize()").ptr entry := LoadI(callee).Imm("cross_values::GetMethodCompiledEntryPointOffset(GetArch())").ptr LiveOut(callee).DstReg(regmap[:arg0]).ptr res := LiveOut(entry).DstReg(regmap[:tmp0]).ptr Intrinsic(:TAIL_CALL, res).Terminator.void } function(:CallVirtualResolver, regmap: $full_regmap, regalloc_set: $panda_mask, mode: [:Boundary]) { # Arm32 is not supported if Options.arch == :arm32 Intrinsic(:UNREACHABLE).Terminator.void next end # Temp 0 register holds address of the cache entry for the given method slot := LiveIn(:tmp0) # Load method id from plt entry method_id := LoadI(slot).Imm("-WordSize()").ptr # Load pointer to caller method from `fp` register, which should point to the CFrame caller := LoadI(%fp).Imm("-WordSize()").ptr method := call_runtime(Constants::GET_CALLEE_METHOD, caller, method_id).ptr vtable_index := LoadI(method).Imm(Constants::GET_VTABLE_INDEX).u16 vtable_index := Cast(vtable_index).SrcType("DataType::UINT16").u32 vtable_index := AddI(vtable_index).Imm(1).u32 StoreI(slot, vtable_index).Imm(0).u32 # We return virtual table index in the temp 0 register LiveOut(vtable_index).DstReg(regmap[:tmp0]).u32 ReturnVoid().void } function(:ClassResolver, regmap: $full_regmap, regalloc_set: $panda_mask, mode: [:Boundary]) { # Arm32 is not supported if Options.arch == :arm32 Intrinsic(:UNREACHABLE).Terminator.void next end slot := LiveIn(:tmp0) class_id := LoadI(slot).Imm("-2 * WordSize()").ptr caller := LoadI(%fp).Imm("-WordSize()").ptr klass := call_runtime(Constants::RESOLVE_CLASS, caller, class_id).ptr StoreI(slot, klass).Imm(0).ptr If(LoadI(klass).Imm(Constants::CLASS_STATE_OFFSET).u32, "CLASS_STATE_INITIALIZED").Likely.EQ { StoreI(slot, klass).Imm("-WordSize()").ptr } LiveOut(klass).DstReg(regmap[:tmp0]).ptr ReturnVoid().void } function(:ClassInitResolver, regmap: $full_regmap, regalloc_set: $panda_mask, mode: [:Boundary]) { # Arm32 is not supported if Options.arch == :arm32 Intrinsic(:UNREACHABLE).Terminator.void next end slot := LiveIn(:tmp0) class_id := LoadI(slot).Imm("-WordSize()").ptr caller := LoadI(%fp).Imm("-WordSize()").ptr klass := call_runtime(Constants::INITIALIZE_CLASS_BY_ID, caller, class_id).ptr StoreI(slot, klass).Imm("WordSize()").ptr # After INITIALIZE_CLASS_BY_ID the class may be in INITIALIZED or INITIALIZING state. # Second case occurs when class initialization is triggered again during static constructor execution. # In this case we can't write class into the cache, because other thread would mistakenly think # that the class is already initialized instead of waiting for its initialization. If(LoadI(klass).Imm(Constants::CLASS_STATE_OFFSET).u32, "CLASS_STATE_INITIALIZED").Likely.EQ { StoreI(slot, klass).Imm(0).ptr } LiveOut(klass).DstReg(regmap[:tmp0]).ptr ReturnVoid().void } function(:IntfInlineCache, params: {method: 'ptr', obj: 'ptr', callee_id: 'word', cacheaddr: 'ptr'}, regmap: $tls_regmap, mode: [:FastPath], regalloc_set: $panda_mask) { # not support arm32 if Options.arch == :arm32 Intrinsic(:UNREACHABLE).Terminator.void Return(method).ptr next end # TODO(liyiming): will support x86_64 in future if Options.arch == :x86_64 Intrinsic(:UNREACHABLE).Terminator.void Return(method).ptr next end # Load class from obj cache_64 := LoadI(cacheaddr).Imm(0).i64 # wait issues 6943 solve klass_0 := LoadI(obj).Imm(Constants::OBJECT_CLASS_OFFSET).i64 # equal to "and x22, x22, #0xffffffff" klass_1 := Shl(klass_0, 32).i64 klass_64 := Shr(klass_1, 32).i64 klass := Bitcast(klass_64).SrcType("DataType::INT64").ptr cache_64_1 := Shl(cache_64, 32).i64 cache_64_2 := Shr(cache_64_1, 32).i64 # METHOD_POINT_COMPRESS:in class.methods_, method points's low 3bit is 0(in AARCH64 or amd64).We can use the # offset/0x8 of the pointer to implement compression. means offset >> 3 If(klass_64, cache_64_2).EQ.Likely.b { # --> fast path method_1 := AShr(cache_64, 32).i64 method_2 := Shl(method_1, 3).i64 method_head_fast := LoadI(klass).Imm(Constants::GET_CLASS_METHODS_OFFSET).ptr method_real := Add(method_head_fast, method_2).ptr Return(method_real).ptr } Else { # --> slow path ep_offset = get_entrypoint_offset("RESOLVE_VIRTUAL_CALL_AOT_SLOW_PATH") method_slow := Intrinsic(:SLOW_PATH_ENTRY, method, obj, callee_id, cacheaddr).AddImm(ep_offset).MethodAsImm("ResolveVirtualCallAotBridge").Terminator.ptr Intrinsic(:UNREACHABLE).Terminator.void } }