1#!/usr/bin/env ruby 2 3# Copyright (c) 2021-2024 Huawei Device Co., Ltd. 4# Licensed under the Apache License, Version 2.0 (the "License"); 5# you may not use this file except in compliance with the License. 6# You may obtain a copy of the License at 7# 8# http://www.apache.org/licenses/LICENSE-2.0 9# 10# Unless required by applicable law or agreed to in writing, software 11# distributed under the License is distributed on an "AS IS" BASIS, 12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13# See the License for the specific language governing permissions and 14# limitations under the License. 15 16include_relative 'common.irt' 17 18function(:CallStaticPltResolver, 19 params: {method: 'ptr'}, 20 regmap: $full_regmap, 21 regalloc_set: $panda_mask, 22 mode: [:Boundary]) { 23 # Arm32 is not supported 24 if Options.arch == :arm32 25 Intrinsic(:UNREACHABLE).Terminator.void 26 next 27 end 28 caller := LoadI(%fp).Imm("-WordSize()").ptr 29 method_id := LoadI(method).Imm("cross_values::GetMethodCompiledEntryPointOffset(GetArch()) - WordSize()").ptr 30 31 entry := LoadI(%tr).Imm(Constants::GET_CALLEE_METHOD).ptr 32 callee := CallIndirect(entry, caller, method_id).ptr 33 StoreI(method, callee).Imm("cross_values::GetMethodCompiledEntryPointOffset(GetArch()) + WordSize()").ptr 34 35 entry := LoadI(callee).Imm("cross_values::GetMethodCompiledEntryPointOffset(GetArch())").ptr 36 37 LiveOut(callee).DstReg(regmap[:arg0]).ptr 38 res := LiveOut(entry).DstReg(regmap[:tmp0]).ptr 39 40 Intrinsic(:TAIL_CALL, res).Terminator.void 41} 42 43function(:CallVirtualResolver, 44 regmap: $full_regmap, 45 regalloc_set: $panda_mask, 46 mode: [:Boundary]) { 47 # Arm32 is not supported 48 if Options.arch == :arm32 49 Intrinsic(:UNREACHABLE).Terminator.void 50 next 51 end 52 53 # Temp 0 register holds address of the cache entry for the given method 54 slot := LiveIn(:tmp0) 55 56 # Load method id from plt entry 57 method_id := LoadI(slot).Imm("-WordSize()").ptr 58 # Load pointer to caller method from `fp` register, which should point to the CFrame 59 caller := LoadI(%fp).Imm("-WordSize()").ptr 60 61 method := call_runtime(Constants::GET_CALLEE_METHOD, caller, method_id).ptr 62 63 vtable_index := LoadI(method).Imm(Constants::GET_VTABLE_INDEX).u16 64 vtable_index := Cast(vtable_index).SrcType("DataType::UINT16").u32 65 vtable_index := AddI(vtable_index).Imm(1).u32 66 67 StoreI(slot, vtable_index).Imm(0).u32 68 69 # We return virtual table index in the temp 0 register 70 LiveOut(vtable_index).DstReg(regmap[:tmp0]).u32 71 ReturnVoid().void 72} 73 74function(:ClassResolver, 75 regmap: $full_regmap, 76 regalloc_set: $panda_mask, 77 mode: [:Boundary]) { 78 # Arm32 is not supported 79 if Options.arch == :arm32 80 Intrinsic(:UNREACHABLE).Terminator.void 81 next 82 end 83 84 slot := LiveIn(:tmp0) 85 86 class_id := LoadI(slot).Imm("-2 * WordSize()").ptr 87 88 caller := LoadI(%fp).Imm("-WordSize()").ptr 89 klass := call_runtime(Constants::RESOLVE_CLASS, caller, class_id).ptr 90 91 StoreI(slot, klass).Imm(0).ptr 92 If(LoadI(klass).Imm(Constants::CLASS_STATE_OFFSET).u32, "CLASS_STATE_INITIALIZED").Likely.EQ { 93 StoreI(slot, klass).Imm("-WordSize()").ptr 94 } 95 96 LiveOut(klass).DstReg(regmap[:tmp0]).ptr 97 ReturnVoid().void 98} 99 100function(:ClassInitResolver, 101 regmap: $full_regmap, 102 regalloc_set: $panda_mask, 103 mode: [:Boundary]) { 104 # Arm32 is not supported 105 if Options.arch == :arm32 106 Intrinsic(:UNREACHABLE).Terminator.void 107 next 108 end 109 110 slot := LiveIn(:tmp0) 111 112 class_id := LoadI(slot).Imm("-WordSize()").ptr 113 114 caller := LoadI(%fp).Imm("-WordSize()").ptr 115 klass := call_runtime(Constants::INITIALIZE_CLASS_BY_ID, caller, class_id).ptr 116 117 StoreI(slot, klass).Imm("WordSize()").ptr 118 119 # After INITIALIZE_CLASS_BY_ID the class may be in INITIALIZED or INITIALIZING state. 120 # Second case occurs when class initialization is triggered again during static constructor execution. 121 # In this case we can't write class into the cache, because other thread would mistakenly think 122 # that the class is already initialized instead of waiting for its initialization. 123 If(LoadI(klass).Imm(Constants::CLASS_STATE_OFFSET).u32, "CLASS_STATE_INITIALIZED").Likely.EQ { 124 StoreI(slot, klass).Imm(0).ptr 125 } 126 127 LiveOut(klass).DstReg(regmap[:tmp0]).ptr 128 ReturnVoid().void 129} 130 131function(:IntfInlineCache, 132 params: {method: 'ptr', obj: 'ptr', callee_id: 'word', cacheaddr: 'ptr'}, 133 regmap: $tls_regmap, 134 mode: [:FastPath], 135 regalloc_set: $panda_mask) { 136 # not support arm32 137 if Options.arch == :arm32 138 Intrinsic(:UNREACHABLE).Terminator.void 139 Return(method).ptr 140 next 141 end 142 143 # TODO(liyiming): will support x86_64 in future 144 if Options.arch == :x86_64 145 Intrinsic(:UNREACHABLE).Terminator.void 146 Return(method).ptr 147 next 148 end 149 150 # Load class from obj 151 cache_64 := LoadI(cacheaddr).Imm(0).i64 152 # wait issues 6943 solve 153 klass_0 := LoadI(obj).Imm(Constants::OBJECT_CLASS_OFFSET).i64 154 # equal to "and x22, x22, #0xffffffff" 155 klass_1 := Shl(klass_0, 32).i64 156 klass_64 := Shr(klass_1, 32).i64 157 klass := Bitcast(klass_64).SrcType("DataType::INT64").ptr 158 159 cache_64_1 := Shl(cache_64, 32).i64 160 cache_64_2 := Shr(cache_64_1, 32).i64 161 162 # METHOD_POINT_COMPRESS:in class.methods_, method points's low 3bit is 0(in AARCH64 or amd64).We can use the 163 # offset/0x8 of the pointer to implement compression. means offset >> 3 164 If(klass_64, cache_64_2).EQ.Likely.b { 165 # --> fast path 166 method_1 := AShr(cache_64, 32).i64 167 method_2 := Shl(method_1, 3).i64 168 method_head_fast := LoadI(klass).Imm(Constants::GET_CLASS_METHODS_OFFSET).ptr 169 method_real := Add(method_head_fast, method_2).ptr 170 Return(method_real).ptr 171 } Else { 172 # --> slow path 173 ep_offset = get_entrypoint_offset("RESOLVE_VIRTUAL_CALL_AOT_SLOW_PATH") 174 method_slow := Intrinsic(:SLOW_PATH_ENTRY, method, obj, callee_id, cacheaddr).AddImm(ep_offset).MethodAsImm("ResolveVirtualCallAotBridge").Terminator.ptr 175 Intrinsic(:UNREACHABLE).Terminator.void 176 } 177} 178