1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 /*
17 Codegen Hi-Level implementation
18 */
19 #include <iomanip>
20 #include "operands.h"
21 #include "compiler_options.h"
22 #include "lib_call_inst.h"
23 #include "relocations.h"
24 #include "include/compiler_interface.h"
25 #include "ir-dyn-base-types.h"
26 #include "compiler/optimizer/ir/analysis.h"
27 #include "compiler/optimizer/ir/locations.h"
28 #include "compiler/optimizer/analysis/liveness_analyzer.h"
29 #include "optimizer/code_generator/method_properties.h"
30 #include "events/events.h"
31 #include "libpandabase/utils/tsan_interface.h"
32 #include "codegen.h"
33
34 namespace panda::compiler {
35
36 class OsrEntryStub {
FixIntervals(Codegen * codegen,Encoder * encoder)37 void FixIntervals(Codegen *codegen, Encoder *encoder)
38 {
39 auto &la = codegen->GetGraph()->GetAnalysis<LivenessAnalyzer>();
40 la.EnumerateLiveIntervalsForInst(save_state_, [this, codegen, encoder](const auto &li) {
41 auto inst = li->GetInst();
42 auto location = li->GetLocation();
43 // Skip live registers that are already in the input list of the OsrSaveState
44 const auto &ss_inputs = save_state_->GetInputs();
45 if (std::find_if(ss_inputs.begin(), ss_inputs.end(),
46 [inst](auto &input) { return input.GetInst() == inst; }) != ss_inputs.end()) {
47 return;
48 }
49 // Only constants allowed
50 switch (inst->GetOpcode()) {
51 case Opcode::LoadAndInitClass: {
52 auto klass = reinterpret_cast<uintptr_t>(inst->CastToLoadAndInitClass()->GetClass());
53 encoder->EncodeMov(codegen->ConvertRegister(inst->GetDstReg(), inst->GetType()),
54 Imm(reinterpret_cast<uintptr_t>(klass)));
55 break;
56 }
57 case Opcode::Constant: {
58 if (location.IsFixedRegister()) {
59 EncodeConstantMove(li, encoder);
60 } else if (location.IsStack()) {
61 auto slot = location.GetValue();
62 encoder->EncodeSti(
63 Imm(inst->CastToConstant()->GetRawValue()),
64 MemRef(codegen->SpReg(), codegen->GetFrameLayout().GetSpillOffsetFromSpInBytes(slot)));
65 } else {
66 ASSERT(location.IsConstant());
67 }
68 break;
69 }
70 // TODO (ekudriashov): UnresolvedLoadAndInitClass
71 default:
72 break;
73 }
74 });
75 }
76
EncodeConstantMove(const LifeIntervals * li,Encoder * encoder)77 static void EncodeConstantMove(const LifeIntervals *li, Encoder *encoder)
78 {
79 auto inst = li->GetInst();
80 switch (li->GetType()) {
81 case DataType::FLOAT64:
82 encoder->EncodeMov(Reg(li->GetReg(), FLOAT64_TYPE),
83 Imm(bit_cast<double>(inst->CastToConstant()->GetDoubleValue())));
84 break;
85 case DataType::FLOAT32:
86 encoder->EncodeMov(Reg(li->GetReg(), FLOAT32_TYPE),
87 Imm(bit_cast<float>(inst->CastToConstant()->GetFloatValue())));
88 break;
89 case DataType::UINT32:
90 encoder->EncodeMov(Reg(li->GetReg(), INT32_TYPE), Imm(inst->CastToConstant()->GetRawValue()));
91 break;
92 case DataType::UINT64:
93 encoder->EncodeMov(Reg(li->GetReg(), INT64_TYPE), Imm(inst->CastToConstant()->GetRawValue()));
94 break;
95 default:
96 UNREACHABLE();
97 }
98 }
99
100 public:
OsrEntryStub(Codegen * codegen,SaveStateInst * inst)101 OsrEntryStub(Codegen *codegen, SaveStateInst *inst)
102 : label_(codegen->GetEncoder()->CreateLabel()), save_state_(inst)
103 {
104 }
105
106 DEFAULT_MOVE_SEMANTIC(OsrEntryStub);
107 DEFAULT_COPY_SEMANTIC(OsrEntryStub);
108 ~OsrEntryStub() = default;
109
Generate(Codegen * codegen)110 void Generate(Codegen *codegen)
111 {
112 auto encoder = codegen->GetEncoder();
113 auto lr = codegen->GetTarget().GetLinkReg();
114 auto fl = codegen->GetFrameLayout();
115 codegen->CreateStackMap(save_state_->CastToSaveStateOsr());
116 ssize_t slot = CFrameLayout::LOCALS_START_SLOT + CFrameLayout::GetLocalsCount() - 1;
117 encoder->EncodeStp(codegen->FpReg(), lr,
118 MemRef(codegen->FpReg(), -fl.GetOffset<CFrameLayout::FP, CFrameLayout::BYTES>(slot)));
119
120 FixIntervals(codegen, encoder);
121 encoder->EncodeJump(label_);
122 }
123
GetInst()124 SaveStateInst *GetInst()
125 {
126 return save_state_;
127 }
128
GetLabel()129 auto &GetLabel()
130 {
131 return label_;
132 }
133
134 private:
135 LabelHolder::LabelId label_;
136 SaveStateInst *save_state_ {nullptr};
137 };
138
IrTypeToMetainfoType(DataType::Type type)139 static VRegInfo::Type IrTypeToMetainfoType(DataType::Type type)
140 {
141 switch (type) {
142 case DataType::UINT64:
143 case DataType::INT64:
144 return VRegInfo::Type::INT64;
145 case DataType::ANY:
146 return VRegInfo::Type::ANY;
147 case DataType::UINT32:
148 case DataType::UINT16:
149 case DataType::UINT8:
150 case DataType::INT32:
151 case DataType::INT16:
152 case DataType::INT8:
153 return VRegInfo::Type::INT32;
154 case DataType::FLOAT64:
155 return VRegInfo::Type::FLOAT64;
156 case DataType::FLOAT32:
157 return VRegInfo::Type::FLOAT32;
158 case DataType::BOOL:
159 return VRegInfo::Type::BOOL;
160 case DataType::REFERENCE:
161 return VRegInfo::Type::OBJECT;
162 default:
163 UNREACHABLE();
164 }
165 }
166
GetLanguageExtensionOffsetFromSpInBytes()167 size_t Codegen::GetLanguageExtensionOffsetFromSpInBytes()
168 {
169 auto frame_layout = GetFrameLayout();
170 size_t lang_ext_slots_count =
171 GetGraph()->GetRuntime()->GetLanguageExtensionSize() / PointerSize(GetGraph()->GetArch());
172 return frame_layout.GetSpillOffsetFromSpInBytes(lang_ext_slots_count + GetGraph()->GetExtSlotsStart() - 1);
173 }
174
Codegen(Graph * graph)175 Codegen::Codegen(Graph *graph)
176 : Optimization(graph),
177 allocator_(graph->GetAllocator()),
178 local_allocator_(graph->GetLocalAllocator()),
179 code_builder_(allocator_->New<CodeInfoBuilder>(graph->GetArch(), allocator_)),
180 slow_paths_(graph->GetLocalAllocator()->Adapter()),
181 slow_paths_map_(graph->GetLocalAllocator()->Adapter()),
182 frame_layout_(CFrameLayout(graph->GetArch(), graph->GetStackSlotsCount())),
183 osr_entries_(graph->GetLocalAllocator()->Adapter()),
184 vreg_indices_(GetAllocator()->Adapter()),
185 runtime_(graph->GetRuntime()),
186 target_(graph->GetArch()),
187 live_outs_(graph->GetLocalAllocator()->Adapter()),
188 disasm_(this),
189 spill_fills_resolver_(graph)
190 {
191 graph->SetCodeBuilder(code_builder_);
192 regfile_ = graph->GetRegisters();
193 if (regfile_ != nullptr) {
194 ASSERT(regfile_->IsValid());
195 ArenaVector<Reg> regs_usage(allocator_->Adapter());
196 Convert(®s_usage, graph->GetUsedRegs<DataType::INT64>(), INT64_TYPE);
197 Convert(®s_usage, graph->GetUsedRegs<DataType::FLOAT64>(), FLOAT64_TYPE);
198 regfile_->SetUsedRegs(regs_usage);
199 #ifndef NDEBUG
200 COMPILER_LOG(DEBUG, CODEGEN) << "Regalloc used registers scalar " << graph->GetUsedRegs<DataType::INT64>();
201 COMPILER_LOG(DEBUG, CODEGEN) << "Regalloc used registers vector " << graph->GetUsedRegs<DataType::FLOAT64>();
202 #endif
203 }
204
205 enc_ = graph->GetEncoder();
206 ASSERT(enc_ != nullptr && enc_->IsValid());
207 enc_->SetRegfile(regfile_);
208 if (enc_->InitMasm()) {
209 enc_->SetFrameLayout(GetFrameLayout());
210 }
211
212 callconv_ = graph->GetCallingConvention();
213 if (callconv_ != nullptr) {
214 ASSERT(callconv_->IsValid());
215 if (callconv_->GetEncoder() == nullptr) {
216 callconv_->SetEncoder(enc_);
217 }
218 }
219
220 auto method = graph->GetMethod();
221 // workaround for test
222 if (method != nullptr) {
223 method_id_ = graph->GetRuntime()->GetMethodId(method);
224 }
225 GetDisasm()->Init();
226 GetDisasm()->SetEncoder(GetEncoder());
227 }
228
GetPassName() const229 const char *Codegen::GetPassName() const
230 {
231 return "Codegen";
232 }
233
AbortIfFailed() const234 bool Codegen::AbortIfFailed() const
235 {
236 return true;
237 }
238
CreateFrameInfo()239 void Codegen::CreateFrameInfo()
240 {
241 // Create FrameInfo for CFrame
242 auto &fl = GetFrameLayout();
243 auto frame = GetGraph()->GetLocalAllocator()->New<FrameInfo>(
244 FrameInfo::PositionedCallers::Encode(true) | FrameInfo::PositionedCallees::Encode(true) |
245 FrameInfo::CallersRelativeFp::Encode(false) | FrameInfo::CalleesRelativeFp::Encode(true));
246 frame->SetFrameSize(fl.GetFrameSize<CFrameLayout::BYTES>());
247 frame->SetSpillsCount(fl.GetSpillsCount());
248
249 frame->SetCallersOffset(
250 fl.GetOffset<CFrameLayout::SP, CFrameLayout::SLOTS>(fl.GetStackStartSlot() + fl.GetCallerLastSlot(false)));
251 frame->SetFpCallersOffset(
252 fl.GetOffset<CFrameLayout::SP, CFrameLayout::SLOTS>(fl.GetStackStartSlot() + fl.GetCallerLastSlot(true)));
253 frame->SetCalleesOffset(
254 -fl.GetOffset<CFrameLayout::FP, CFrameLayout::SLOTS>(fl.GetStackStartSlot() + fl.GetCalleeLastSlot(false)));
255 frame->SetFpCalleesOffset(
256 -fl.GetOffset<CFrameLayout::FP, CFrameLayout::SLOTS>(fl.GetStackStartSlot() + fl.GetCalleeLastSlot(true)));
257
258 frame->SetSetupFrame(true);
259 frame->SetSaveFrameAndLinkRegs(true);
260 frame->SetSaveUnusedCalleeRegs(!GetGraph()->GetMethodProperties().GetCompactPrologueAllowed());
261 frame->SetAdjustSpReg(true);
262 frame->SetHasFloatRegs(GetGraph()->HasFloatRegs());
263
264 GetCodeBuilder()->SetHasFloatRegs(GetGraph()->HasFloatRegs());
265
266 SetFrameInfo(frame);
267 }
268
FillOnlyParameters(RegMask * live_regs,uint32_t num_params) const269 void Codegen::FillOnlyParameters(RegMask *live_regs, uint32_t num_params) const
270 {
271 ASSERT(num_params <= 6U);
272 if (GetArch() == Arch::AARCH64) {
273 num_params = AlignUp(num_params, 2U);
274 }
275 *live_regs &= GetTarget().GetParamRegsMask(num_params);
276 }
277
Convert(ArenaVector<Reg> * regs_usage,const ArenaVector<bool> * mask,TypeInfo type_info)278 void Codegen::Convert(ArenaVector<Reg> *regs_usage, const ArenaVector<bool> *mask, TypeInfo type_info)
279 {
280 ASSERT(regs_usage != nullptr);
281 // There are no used registers
282 if (mask == nullptr) {
283 return;
284 }
285 ASSERT(mask->size() == MAX_NUM_REGS);
286 for (uint32_t i = 0; i < MAX_NUM_REGS; ++i) {
287 if ((*mask)[i]) {
288 regs_usage->emplace_back(i, type_info);
289 }
290 }
291 }
292
293 #ifdef INTRINSIC_SLOW_PATH_ENTRY_ENABLED
CreateIrtocIntrinsic(IntrinsicInst * inst,Reg dst,SRCREGS src,RegMask * lvrmask)294 void Codegen::CreateIrtocIntrinsic(IntrinsicInst *inst, [[maybe_unused]] Reg dst, [[maybe_unused]] SRCREGS src,
295 [[maybe_unused]] RegMask *lvrmask)
296 {
297 switch (inst->GetIntrinsicId()) {
298 case RuntimeInterface::IntrinsicId::INTRINSIC_SLOW_PATH_ENTRY:
299 IntrinsicSlowPathEntry(inst);
300 break;
301 case RuntimeInterface::IntrinsicId::INTRINSIC_UNREACHABLE:
302 GetEncoder()->EncodeAbort();
303 break;
304 case RuntimeInterface::IntrinsicId::INTRINSIC_SAVE_REGISTERS_EP:
305 IntrinsicSaveRegisters(inst);
306 break;
307 case RuntimeInterface::IntrinsicId::INTRINSIC_RESTORE_REGISTERS_EP:
308 IntrinsicRestoreRegisters(inst);
309 break;
310 case RuntimeInterface::IntrinsicId::INTRINSIC_TAIL_CALL:
311 IntrinsicTailCall(inst);
312 break;
313 case RuntimeInterface::IntrinsicId::INTRINSIC_INTERPRETER_RETURN:
314 GetCallingConvention()->GenerateNativeEpilogue(*GetFrameInfo(), []() {});
315 break;
316 case RuntimeInterface::IntrinsicId::INTRINSIC_LOAD_ACQUIRE_MARK_WORD_EXCLUSIVE:
317 ASSERT(GetRuntime()->GetObjMarkWordOffset(GetArch()) == 0);
318 GetEncoder()->EncodeLdrExclusive(dst, src[0], true);
319 break;
320 case RuntimeInterface::IntrinsicId::INTRINSIC_STORE_RELEASE_MARK_WORD_EXCLUSIVE:
321 ASSERT(GetRuntime()->GetObjMarkWordOffset(GetArch()) == 0);
322 GetEncoder()->EncodeStrExclusive(dst, src[SECOND_OPERAND], src[0], true);
323 break;
324 case RuntimeInterface::IntrinsicId::INTRINSIC_COMPARE_AND_SET_MARK_WORD:
325 ASSERT(GetRuntime()->GetObjMarkWordOffset(GetArch()) == 0);
326 GetEncoder()->EncodeCompareAndSwap(dst, src[0], src[SECOND_OPERAND], src[THIRD_OPERAND]);
327 break;
328 default:
329 UNREACHABLE();
330 break;
331 }
332 }
333 #endif
334
BeginMethod()335 bool Codegen::BeginMethod()
336 {
337 // Do not try to encode too large graph
338 auto inst_size = GetGraph()->GetCurrentInstructionId();
339 auto insts_per_byte = GetEncoder()->MaxArchInstPerEncoded();
340 auto max_bits_in_inst = GetInstructionSizeBits(GetArch());
341 inst_size += slow_paths_.size() * INST_IN_SLOW_PATH;
342 if ((inst_size * insts_per_byte * max_bits_in_inst) > options.GetCompilerMaxGenCodeSize()) {
343 return false;
344 }
345 // After this - encoder aborted, if allocated too much size.
346 GetEncoder()->SetMaxAllocatedBytes(options.GetCompilerMaxGenCodeSize());
347
348 if (GetGraph()->IsAotMode()) {
349 GetEncoder()->SetCodeOffset(GetGraph()->GetAotData()->GetCodeOffset() + CodeInfo::GetCodeOffset(GetArch()));
350 } else {
351 GetEncoder()->SetCodeOffset(0);
352 }
353
354 code_builder_->BeginMethod(GetFrameLayout().GetFrameSize<CFrameLayout::BYTES>(), GetGraph()->GetVRegsCount());
355
356 GetEncoder()->BindLabel(GetLabelEntry());
357 SetStartCodeOffset(GetEncoder()->GetCursorOffset());
358
359 GeneratePrologue();
360
361 return GetEncoder()->GetResult();
362 }
363
GeneratePrologue()364 void Codegen::GeneratePrologue()
365 {
366 SCOPED_DISASM_STR(this, "Method Prologue");
367
368 GetCallingConvention()->GeneratePrologue(*frame_info_);
369
370 if (!GetGraph()->GetMode().IsNative()) {
371 GetEncoder()->EncodeSti(Imm(static_cast<uint8_t>(1)),
372 MemRef(ThreadReg(), GetRuntime()->GetTlsFrameKindOffset(GetArch())));
373 }
374 if (GetGraph()->IsDynamicMethod()) {
375 GenerateExtensionsForPrologue();
376 }
377 if (!GetGraph()->GetMode().IsNative()) {
378 // Create stack overflow check
379 GetEncoder()->EncodeStackOverflowCheck(-GetRuntime()->GetStackOverflowCheckOffset());
380 // Create empty stackmap for the stack overflow check
381 GetCodeBuilder()->BeginStackMap(0, 0, nullptr, 0, false, false);
382 GetCodeBuilder()->EndStackMap();
383 }
384
385 #if defined(EVENT_METHOD_ENTER_ENABLED) && EVENT_METHOD_ENTER_ENABLED != 0
386 if (GetGraph()->IsAotMode()) {
387 SCOPED_DISASM_STR(this, "LoadMethod for trace");
388 ScopedTmpReg method_reg(GetEncoder());
389 LoadMethod(method_reg);
390 InsertTrace({Imm(static_cast<size_t>(TraceId::METHOD_ENTER)), method_reg,
391 Imm(static_cast<size_t>(events::MethodEnterKind::COMPILED))});
392 } else {
393 InsertTrace({Imm(static_cast<size_t>(TraceId::METHOD_ENTER)),
394 Imm(reinterpret_cast<size_t>(GetGraph()->GetMethod())),
395 Imm(static_cast<size_t>(events::MethodEnterKind::COMPILED))});
396 }
397 #endif
398 }
399
GenerateEpilogue()400 void Codegen::GenerateEpilogue()
401 {
402 SCOPED_DISASM_STR(this, "Method Epilogue");
403
404 if (GetGraph()->IsDynamicMethod()) {
405 GenerateExtensionsForEpilogue();
406 }
407
408 #if defined(EVENT_METHOD_EXIT_ENABLED) && EVENT_METHOD_EXIT_ENABLED != 0
409 GetCallingConvention()->GenerateEpilogue(*frame_info_, [this]() {
410 if (GetGraph()->IsAotMode()) {
411 ScopedTmpReg method_reg(GetEncoder());
412 LoadMethod(method_reg);
413 InsertTrace({Imm(static_cast<size_t>(TraceId::METHOD_EXIT)), method_reg,
414 Imm(static_cast<size_t>(events::MethodExitKind::COMPILED))});
415 } else {
416 InsertTrace({Imm(static_cast<size_t>(TraceId::METHOD_EXIT)),
417 Imm(reinterpret_cast<size_t>(GetGraph()->GetMethod())),
418 Imm(static_cast<size_t>(events::MethodExitKind::COMPILED))});
419 }
420 });
421 #else
422 GetCallingConvention()->GenerateEpilogue(*frame_info_, []() {});
423 #endif
424 }
425
VisitGraph()426 bool Codegen::VisitGraph()
427 {
428 EncodeVisitor visitor(this);
429 visitor_ = &visitor;
430
431 const auto &blocks = GetGraph()->GetBlocksLinearOrder();
432
433 for (auto bb : blocks) {
434 GetEncoder()->BindLabel(bb->GetId());
435 for (auto inst : bb->AllInsts()) {
436 SCOPED_DISASM_INST(this, inst);
437 visitor.VisitInstruction(inst);
438 if (!visitor.GetResult()) {
439 COMPILER_LOG(DEBUG, CODEGEN)
440 << "Can't encode instruction: " << GetOpcodeString(inst->GetOpcode()) << *inst;
441 break;
442 }
443 }
444
445 if (bb->NeedsJump()) {
446 EmitJump(bb);
447 }
448
449 if (!visitor.GetResult()) {
450 return false;
451 }
452 }
453
454 auto insts_per_byte = GetEncoder()->MaxArchInstPerEncoded();
455 auto max_bits_in_inst = GetInstructionSizeBits(GetArch());
456 auto inst_size = GetGraph()->GetCurrentInstructionId() + slow_paths_.size() * INST_IN_SLOW_PATH;
457 if ((inst_size * insts_per_byte * max_bits_in_inst) > options.GetCompilerMaxGenCodeSize()) {
458 return false;
459 }
460
461 EmitSlowPaths();
462 visitor_ = nullptr;
463
464 return true;
465 }
466
EmitJump(const BasicBlock * bb)467 void Codegen::EmitJump(const BasicBlock *bb)
468 {
469 BasicBlock *suc_bb = nullptr;
470
471 if (bb->GetLastInst() == nullptr) {
472 ASSERT(bb->IsEmpty());
473 suc_bb = bb->GetSuccsBlocks()[0];
474 } else {
475 switch (bb->GetLastInst()->GetOpcode()) {
476 case Opcode::If:
477 case Opcode::IfImm:
478 ASSERT(bb->GetSuccsBlocks().size() == MAX_SUCCS_NUM);
479 suc_bb = bb->GetFalseSuccessor();
480 break;
481 default:
482 suc_bb = bb->GetSuccsBlocks()[0];
483 break;
484 }
485 }
486 SCOPED_DISASM_STR(this, std::string("Jump from BB_") + std::to_string(bb->GetId()) + " to BB_" +
487 std::to_string(suc_bb->GetId()));
488
489 auto label = suc_bb->GetId();
490 GetEncoder()->EncodeJump(label);
491 }
492
EndMethod()493 void Codegen::EndMethod()
494 {
495 for (auto &osr_stub : osr_entries_) {
496 SCOPED_DISASM_STR(this,
497 std::string("Osr stub for OsrStateStump ") + std::to_string(osr_stub->GetInst()->GetId()));
498 osr_stub->Generate(this);
499 }
500
501 GetEncoder()->Finalize();
502 }
503
504 // Allocates memory, copies generated code to it, sets the code to the graph's codegen data. Also this function
505 // encodes the code info and sets it to the graph.
CopyToCodeCache()506 bool Codegen::CopyToCodeCache()
507 {
508 auto code_entry = reinterpret_cast<void *>(GetEncoder()->GetLabelAddress(GetLabelEntry()));
509 auto code_size = GetEncoder()->GetCursorOffset();
510 bool save_all_callee_registers = !GetGraph()->GetMethodProperties().GetCompactPrologueAllowed();
511
512 auto code = reinterpret_cast<uint8_t *>(GetAllocator()->Alloc(code_size));
513 if (code == nullptr) {
514 return false;
515 }
516 memcpy_s(code, code_size, code_entry, code_size);
517 GetGraph()->SetData(EncodeDataType(code, code_size));
518
519 RegMask callee_regs;
520 VRegMask callee_vregs;
521 GetRegfile()->FillUsedCalleeSavedRegisters(&callee_regs, &callee_vregs, save_all_callee_registers);
522 constexpr size_t MAX_NUM_REGISTERS = 32;
523 static_assert(MAX_NUM_REGS <= MAX_NUM_REGISTERS && MAX_NUM_VREGS <= MAX_NUM_REGISTERS);
524 code_builder_->SetSavedCalleeRegsMask(static_cast<uint32_t>(callee_regs.to_ulong()),
525 static_cast<uint32_t>(callee_vregs.to_ulong()));
526
527 ArenaVector<uint8_t> code_info_data(GetGraph()->GetAllocator()->Adapter());
528 code_builder_->Encode(&code_info_data);
529 GetGraph()->SetCodeInfo(Span(code_info_data));
530
531 return true;
532 }
533
RunImpl()534 bool Codegen::RunImpl()
535 {
536 Initialize();
537
538 auto encoder = GetEncoder();
539 encoder->GetLabels()->CreateLabels(GetGraph()->GetVectorBlocks().size());
540 label_entry_ = encoder->CreateLabel();
541 label_exit_ = encoder->CreateLabel();
542
543 #ifndef NDEBUG
544 if (options.IsCompilerNonOptimizing()) {
545 // In case of non-optimizing compiler lowering pass is not run but low-level instructions may
546 // also appear on codegen so, to satisfy GraphChecker, the flag should be raised.
547 GetGraph()->SetLowLevelInstructionsEnabled();
548 }
549 #endif // NDEBUG
550
551 if ((GetCallingConvention() == nullptr) || (GetEncoder() == nullptr)) {
552 return false;
553 }
554
555 if (!GetEncoder()->GetResult()) {
556 return false;
557 }
558
559 // Remove registers from the temp registers, if they are in the regalloc mask, i.e. available for regalloc.
560 auto used_regs = ~GetGraph()->GetArchUsedRegs();
561 auto forbidden_temps = used_regs & GetTarget().GetTempRegsMask();
562 if (forbidden_temps.Any()) {
563 for (size_t i = forbidden_temps.GetMinRegister(); i <= forbidden_temps.GetMaxRegister(); i++) {
564 if (forbidden_temps[i]) {
565 encoder->AcquireScratchRegister(Reg(i, INT64_TYPE));
566 }
567 }
568 }
569
570 if (GetDisasm()->IsEnabled()) {
571 GetDisasm()->PrintMethodEntry(this);
572 }
573
574 if (!BeginMethod()) {
575 return false;
576 }
577
578 if (!VisitGraph()) {
579 return false;
580 }
581 EndMethod();
582
583 if (!CopyToCodeCache()) {
584 return false;
585 }
586
587 if (GetDisasm()->IsEnabled()) {
588 if (GetGraph()->GetMode().SupportManagedCode() && options.IsCompilerDisasmDumpCodeInfo()) {
589 GetDisasm()->PrintCodeInfo(this);
590 }
591 GetDisasm()->PrintCodeStatistics(this);
592 }
593
594 return true;
595 }
596
Initialize()597 void Codegen::Initialize()
598 {
599 CreateFrameInfo();
600
601 GetRegfile()->SetCalleeSaved(GetRegfile()->GetCalleeSaved());
602
603 if (!GetGraph()->SupportManagedCode()) {
604 for (auto inst : GetGraph()->GetStartBlock()->AllInsts()) {
605 if (inst->GetOpcode() == Opcode::LiveIn && GetTarget().GetTempRegsMask().Test(inst->GetDstReg())) {
606 GetEncoder()->AcquireScratchRegister(Reg(inst->GetDstReg(), INT64_TYPE));
607 }
608 }
609 }
610
611 bool has_calls = false;
612
613 for (auto bb : GetGraph()->GetBlocksLinearOrder()) {
614 // Calls may be in the middle of method
615 for (auto inst : bb->Insts()) {
616 // For throw instruction need jump2runtime same way
617 if (inst->IsCall() || inst->GetOpcode() == Opcode::Throw) {
618 has_calls = true;
619 break;
620 }
621 }
622 if (has_calls) {
623 break;
624 }
625 }
626
627 /* Convert Graph::GetUsedRegs(), which is std::vector<bool>, to simple
628 * RegMask and save it in the Codegen. These masks are used to determine
629 * which registers we need to save in prologue.
630 *
631 * NB! It's related to IRTOC specific prologue generation (see CodegenFastPath etc.).
632 * Arch specific CallingConvention::GenerateProlog() relies on reg usage information
633 * prepared in the Codegen constructor (before Initialize() is called).
634 */
635 auto fill_mask = [](RegMask *mask, auto *vector) {
636 if (vector == nullptr) {
637 return;
638 }
639 ASSERT(mask->size() >= vector->size());
640 mask->reset();
641 for (size_t i = 0; i < mask->size(); i++) {
642 if ((*vector)[i]) {
643 mask->set(i);
644 }
645 }
646 };
647 fill_mask(&used_regs_, GetGraph()->GetUsedRegs<DataType::INT64>());
648 fill_mask(&used_vregs_, GetGraph()->GetUsedRegs<DataType::FLOAT64>());
649 used_vregs_ &= GetTarget().GetAvailableVRegsMask();
650 used_regs_ &= GetTarget().GetAvailableRegsMask();
651 used_regs_ &= ~GetGraph()->GetArchUsedRegs();
652 used_vregs_ &= ~GetGraph()->GetArchUsedVRegs();
653
654 /* Remove used registers from Encoder's scratch registers */
655 RegMask used_temps = used_regs_ & GetTarget().GetTempRegsMask();
656 if (used_temps.any()) {
657 for (size_t i = 0; i < used_temps.size(); i++) {
658 if (used_temps[i]) {
659 GetEncoder()->AcquireScratchRegister(Reg(i, INT64_TYPE));
660 }
661 }
662 }
663 }
664
ConvertRegister(Register r,DataType::Type type)665 Reg Codegen::ConvertRegister(Register r, DataType::Type type)
666 {
667 switch (type) {
668 case DataType::BOOL:
669 case DataType::UINT8:
670 case DataType::INT8: {
671 return Reg(r, INT8_TYPE);
672 }
673 case DataType::UINT16:
674 case DataType::INT16: {
675 return Reg(r, INT16_TYPE);
676 }
677 case DataType::UINT32:
678 case DataType::INT32: {
679 return Reg(r, INT32_TYPE);
680 }
681 case DataType::UINT64:
682 case DataType::INT64:
683 case DataType::ANY: {
684 return Reg(r, INT64_TYPE);
685 }
686 case DataType::FLOAT32: {
687 return Reg(r, FLOAT32_TYPE);
688 }
689 case DataType::FLOAT64: {
690 return Reg(r, FLOAT64_TYPE);
691 }
692 case DataType::REFERENCE: {
693 return ConvertRegister(r, DataType::GetIntTypeForReference(GetArch()));
694 }
695 case DataType::POINTER: {
696 return Reg(r, ConvertDataType(DataType::POINTER, GetArch()));
697 }
698 default:
699 // Invalid converted register
700 return INVALID_REGISTER;
701 }
702 }
703
ConvertImm(uint64_t imm,DataType::Type type)704 Imm Codegen::ConvertImm(uint64_t imm, DataType::Type type)
705 {
706 switch (type) {
707 // NOLINTNEXTLINE(bugprone-branch-clone)
708 case DataType::BOOL:
709 case DataType::UINT8:
710 return Imm(bit_cast<int8_t, uint8_t>(imm));
711 case DataType::INT8:
712 return Imm(bit_cast<int8_t, uint8_t>(imm));
713 // NOLINTNEXTLINE(bugprone-branch-clone)
714 case DataType::UINT16:
715 return Imm(bit_cast<int16_t, uint16_t>(imm));
716 case DataType::INT16:
717 return Imm(bit_cast<int16_t, uint16_t>(imm));
718 // NOLINTNEXTLINE(bugprone-branch-clone)
719 case DataType::UINT32:
720 return Imm(bit_cast<int32_t, uint32_t>(imm));
721 case DataType::INT32:
722 return Imm(bit_cast<int32_t, uint32_t>(imm));
723 case DataType::ANY:
724 return Imm(bit_cast<int64_t, uint64_t>(imm));
725 // NOLINTNEXTLINE(bugprone-branch-clone)
726 case DataType::UINT64:
727 return Imm(bit_cast<int64_t, uint64_t>(imm));
728 case DataType::INT64:
729 return Imm(bit_cast<int64_t, uint64_t>(imm));
730 case DataType::FLOAT32:
731 return Imm(bit_cast<float, uint32_t>(static_cast<uint32_t>(imm)));
732 case DataType::FLOAT64:
733 return Imm(bit_cast<double, uint64_t>(imm));
734 case DataType::REFERENCE:
735 if (imm == 0) {
736 return Imm(0);
737 }
738 [[fallthrough]]; /* fall-through */
739 default:
740 // Invalid converted immediate
741 UNREACHABLE();
742 }
743 return INVALID_IMM;
744 }
745
746 // Panda don't support types less then 32, so we need sign or zero extended to 32
ConvertImmWithExtend(uint64_t imm,DataType::Type type)747 Imm Codegen::ConvertImmWithExtend(uint64_t imm, DataType::Type type)
748 {
749 switch (type) {
750 case DataType::BOOL:
751 case DataType::UINT8:
752 return Imm(static_cast<uint32_t>(static_cast<uint8_t>(imm)));
753 case DataType::INT8:
754 return Imm(static_cast<int32_t>(bit_cast<int8_t, uint8_t>(imm)));
755 case DataType::UINT16:
756 return Imm(static_cast<uint32_t>(static_cast<uint16_t>(imm)));
757 case DataType::INT16:
758 return Imm(static_cast<int32_t>(bit_cast<int16_t, uint16_t>(imm)));
759 // NOLINTNEXTLINE(bugprone-branch-clone)
760 case DataType::UINT32:
761 return Imm(bit_cast<int32_t, uint32_t>(imm));
762 case DataType::INT32:
763 return Imm(bit_cast<int32_t, uint32_t>(imm));
764 // NOLINTNEXTLINE(bugprone-branch-clone)
765 case DataType::UINT64:
766 return Imm(bit_cast<int64_t, uint64_t>(imm));
767 case DataType::INT64:
768 return Imm(bit_cast<int64_t, uint64_t>(imm));
769 case DataType::FLOAT32:
770 return Imm(bit_cast<float, uint32_t>(static_cast<uint32_t>(imm)));
771 case DataType::FLOAT64:
772 return Imm(bit_cast<double, uint64_t>(imm));
773 case DataType::ANY:
774 return Imm(bit_cast<uint64_t, uint64_t>(imm));
775 case DataType::REFERENCE:
776 if (imm == 0) {
777 return Imm(0);
778 }
779 [[fallthrough]]; /* fall-through */
780 default:
781 // Invalid converted immediate
782 UNREACHABLE();
783 }
784 return INVALID_IMM;
785 }
786
ConvertImm(ConstantInst * const_inst,DataType::Type type)787 Imm Codegen::ConvertImm(ConstantInst *const_inst, DataType::Type type)
788 {
789 switch (type) {
790 case DataType::BOOL:
791 case DataType::UINT8:
792 case DataType::INT8:
793 case DataType::UINT16:
794 case DataType::INT16:
795 case DataType::UINT32:
796 case DataType::INT32:
797 return Imm(static_cast<int32_t>(const_inst->GetIntValue()));
798 case DataType::UINT64:
799 case DataType::INT64:
800 if (const_inst->GetType() == DataType::ANY) {
801 return Imm(const_inst->GetRawValue());
802 }
803 return Imm(static_cast<int64_t>(const_inst->GetIntValue()));
804 case DataType::FLOAT32:
805 return Imm(const_inst->GetFloatValue());
806 case DataType::FLOAT64:
807 return Imm(const_inst->GetDoubleValue());
808 case DataType::ANY:
809 return Imm(const_inst->GetRawValue());
810 default:
811 // Invalid converted immediate
812 UNREACHABLE();
813 }
814 return INVALID_IMM;
815 }
816
ConvertCc(ConditionCode cc)817 Condition Codegen::ConvertCc(ConditionCode cc)
818 {
819 switch (cc) {
820 case CC_EQ:
821 return Condition::EQ;
822 case CC_NE:
823 return Condition::NE;
824 case CC_LT:
825 return Condition::LT;
826 case CC_LE:
827 return Condition::LE;
828 case CC_GT:
829 return Condition::GT;
830 case CC_GE:
831 return Condition::GE;
832 case CC_B:
833 return Condition::LO;
834 case CC_BE:
835 return Condition::LS;
836 case CC_A:
837 return Condition::HI;
838 case CC_AE:
839 return Condition::HS;
840 case CC_TST_EQ:
841 return Condition::TST_EQ;
842 case CC_TST_NE:
843 return Condition::TST_NE;
844 default:
845 UNREACHABLE();
846 return Condition::EQ;
847 }
848 return Condition::EQ;
849 }
850
ConvertCcOverflow(ConditionCode cc)851 Condition Codegen::ConvertCcOverflow(ConditionCode cc)
852 {
853 switch (cc) {
854 case CC_EQ:
855 return Condition::VS;
856 case CC_NE:
857 return Condition::VC;
858 default:
859 UNREACHABLE();
860 return Condition::VS;
861 }
862 return Condition::VS;
863 }
864
EmitSlowPaths()865 void Codegen::EmitSlowPaths()
866 {
867 for (auto slow_path : slow_paths_) {
868 slow_path->Generate(this);
869 }
870 }
871
CreateStackMap(Inst * inst,Inst * user)872 void Codegen::CreateStackMap(Inst *inst, Inst *user)
873 {
874 SaveStateInst *save_state = nullptr;
875 if (inst->IsSaveState()) {
876 save_state = static_cast<SaveStateInst *>(inst);
877 } else {
878 save_state = inst->GetSaveState();
879 }
880 ASSERT(save_state != nullptr);
881
882 bool require_vreg_map = inst->RequireRegMap();
883 uint32_t outer_bpc = inst->GetPc();
884 for (auto call_inst = save_state->GetCallerInst(); call_inst != nullptr;
885 call_inst = call_inst->GetSaveState()->GetCallerInst()) {
886 outer_bpc = call_inst->GetPc();
887 }
888
889 code_builder_->BeginStackMap(outer_bpc, GetEncoder()->GetCursorOffset(), save_state->GetRootsStackMask(),
890 save_state->GetRootsRegsMask().to_ulong(), require_vreg_map,
891 save_state->GetOpcode() == Opcode::SaveStateOsr);
892 if (user == nullptr) {
893 user = inst;
894 if (inst == save_state && inst->HasUsers()) {
895 for (auto &u : inst->GetUsers()) {
896 if (u.GetInst()->GetOpcode() != Opcode::ReturnInlined) {
897 user = u.GetInst();
898 break;
899 }
900 }
901 }
902 }
903 CreateStackMapRec(save_state, require_vreg_map, user);
904
905 code_builder_->EndStackMap();
906 if (GetDisasm()->IsEnabled()) {
907 GetDisasm()->PrintStackMap(this);
908 }
909 }
910
CreateStackMapRec(SaveStateInst * save_state,bool require_vreg_map,Inst * target_site)911 void Codegen::CreateStackMapRec(SaveStateInst *save_state, bool require_vreg_map, Inst *target_site)
912 {
913 bool has_inline_info = save_state->GetCallerInst() != nullptr;
914 size_t vregs_count = 0;
915 if (require_vreg_map) {
916 auto runtime = GetRuntime();
917 if (auto caller = save_state->GetCallerInst()) {
918 vregs_count = runtime->GetMethodRegistersCount(caller->GetCallMethod()) +
919 runtime->GetMethodArgumentsCount(caller->GetCallMethod()) + 1; // 1 for accumulator
920 } else {
921 vregs_count = runtime->GetMethodRegistersCount(save_state->GetMethod()) +
922 runtime->GetMethodArgumentsCount(save_state->GetMethod()) + 1; // 1 for accumulator
923 }
924 #ifndef NDEBUG
925 ASSERT_PRINT(!save_state->GetInputsWereDeleted(), "Some vregs were deleted from the save state");
926 #endif
927 }
928
929 if (auto call_inst = save_state->GetCallerInst()) {
930 CreateStackMapRec(call_inst->GetSaveState(), require_vreg_map, target_site);
931 auto method = GetGraph()->IsAotMode() ? nullptr : call_inst->GetCallMethod();
932 code_builder_->BeginInlineInfo(method, GetRuntime()->GetMethodId(call_inst->GetCallMethod()),
933 save_state->GetPc(), vregs_count);
934 }
935
936 if (require_vreg_map) {
937 CreateVRegMap(save_state, vregs_count, target_site);
938 }
939
940 if (has_inline_info) {
941 code_builder_->EndInlineInfo();
942 }
943 }
944
CreateVRegMap(SaveStateInst * save_state,size_t vregs_count,Inst * target_site)945 void Codegen::CreateVRegMap(SaveStateInst *save_state, size_t vregs_count, Inst *target_site)
946 {
947 vreg_indices_.clear();
948 vreg_indices_.resize(vregs_count, {-1, -1});
949 FillVregIndices(save_state);
950
951 ASSERT(GetGraph()->IsAnalysisValid<LivenessAnalyzer>());
952 auto &la = GetGraph()->GetAnalysis<LivenessAnalyzer>();
953 auto target_life_number = la.GetInstLifeIntervals(target_site)->GetBegin();
954
955 for (auto &input_index : vreg_indices_) {
956 if (input_index.first == -1 && input_index.second == -1) {
957 code_builder_->AddVReg(VRegInfo());
958 continue;
959 }
960 if (input_index.second != -1) {
961 auto imm = save_state->GetImmediate(input_index.second);
962 code_builder_->AddConstant(imm.value, IrTypeToMetainfoType(imm.type), imm.is_acc);
963 continue;
964 }
965 ASSERT(input_index.first != -1);
966 auto vreg = save_state->GetVirtualRegister(input_index.first);
967 auto input_inst = save_state->GetDataFlowInput(input_index.first);
968 auto interval = la.GetInstLifeIntervals(input_inst)->FindSiblingAt(target_life_number);
969 ASSERT(interval != nullptr);
970 CreateVreg(interval->GetLocation(), input_inst, vreg);
971 }
972 }
973
CreateVreg(const Location & location,Inst * inst,const VirtualRegister & vreg)974 void Codegen::CreateVreg(const Location &location, Inst *inst, const VirtualRegister &vreg)
975 {
976 switch (location.GetKind()) {
977 case LocationType::FP_REGISTER:
978 case LocationType::REGISTER: {
979 CreateVRegForRegister(location, inst, vreg);
980 break;
981 }
982 case LocationType::STACK_PARAMETER: {
983 auto slot = location.GetValue();
984 code_builder_->AddVReg(VRegInfo(GetFrameLayout().GetStackArgsStartSlot() - slot - CFrameSlots::Start(),
985 VRegInfo::Location::SLOT, IrTypeToMetainfoType(inst->GetType()),
986 vreg.IsAccumulator()));
987 break;
988 }
989 case LocationType::STACK: {
990 auto slot = location.GetValue();
991 if (!Is64BitsArch(GetArch())) {
992 slot = ((location.GetValue() << 1U) + 1);
993 }
994 code_builder_->AddVReg(VRegInfo(GetFrameLayout().GetFirstSpillSlot() + slot, VRegInfo::Location::SLOT,
995 IrTypeToMetainfoType(inst->GetType()), vreg.IsAccumulator()));
996 break;
997 }
998 case LocationType::IMMEDIATE: {
999 ASSERT(inst->IsConst());
1000 code_builder_->AddConstant(inst->CastToConstant()->GetRawValue(), IrTypeToMetainfoType(inst->GetType()),
1001 vreg.IsAccumulator());
1002 break;
1003 }
1004 default:
1005 // Reg-to-reg spill fill must not occurs within SaveState
1006 UNREACHABLE();
1007 }
1008 }
1009
FillVregIndices(SaveStateInst * save_state)1010 void Codegen::FillVregIndices(SaveStateInst *save_state)
1011 {
1012 for (size_t i = 0; i < save_state->GetInputsCount(); ++i) {
1013 size_t vreg_index = save_state->GetVirtualRegister(i).Value();
1014 ASSERT(vreg_index < vreg_indices_.size());
1015 vreg_indices_[vreg_index].first = i;
1016 }
1017 for (size_t i = 0; i < save_state->GetImmediatesCount(); i++) {
1018 auto vreg_imm = save_state->GetImmediate(i);
1019 ASSERT(vreg_imm.vreg < vreg_indices_.size());
1020 ASSERT(vreg_indices_[vreg_imm.vreg].first == -1);
1021 vreg_indices_[vreg_imm.vreg].second = i;
1022 }
1023 }
1024
CreateVRegForRegister(const Location & location,Inst * inst,const VirtualRegister & vreg)1025 void Codegen::CreateVRegForRegister(const Location &location, Inst *inst, const VirtualRegister &vreg)
1026 {
1027 bool is_osr = GetGraph()->IsOsrMode();
1028 bool is_fp = (location.GetKind() == LocationType::FP_REGISTER);
1029 auto reg_num = location.GetValue();
1030 auto reg = Reg(reg_num, is_fp ? FLOAT64_TYPE : INT64_TYPE);
1031 if (!is_osr && GetRegfile()->GetZeroReg() == reg) {
1032 code_builder_->AddConstant(0, IrTypeToMetainfoType(inst->GetType()), vreg.IsAccumulator());
1033 } else if (is_osr || GetRegfile()->IsCalleeRegister(reg)) {
1034 if (is_fp) {
1035 ASSERT(inst->GetType() != DataType::REFERENCE);
1036 ASSERT(is_osr || reg_num >= GetFirstCalleeReg(GetArch(), true));
1037 code_builder_->AddVReg(VRegInfo(reg_num, VRegInfo::Location::FP_REGISTER,
1038 IrTypeToMetainfoType(inst->GetType()), vreg.IsAccumulator()));
1039 } else {
1040 ASSERT(is_osr || reg_num >= GetFirstCalleeReg(GetArch(), false));
1041 code_builder_->AddVReg(VRegInfo(reg_num, VRegInfo::Location::REGISTER,
1042 IrTypeToMetainfoType(inst->GetType()), vreg.IsAccumulator()));
1043 }
1044 } else {
1045 ASSERT(reg_num >= GetFirstCallerReg(GetArch(), is_fp));
1046 auto last_slot = GetFrameLayout().GetCallerLastSlot(is_fp);
1047 reg_num -= GetFirstCallerReg(GetArch(), is_fp);
1048 code_builder_->AddVReg(VRegInfo(last_slot - reg_num, VRegInfo::Location::SLOT,
1049 IrTypeToMetainfoType(inst->GetType()), vreg.IsAccumulator()));
1050 }
1051 }
1052
CreateOsrEntry(SaveStateInst * save_state)1053 void Codegen::CreateOsrEntry(SaveStateInst *save_state)
1054 {
1055 auto &stub = osr_entries_.emplace_back(GetAllocator()->New<OsrEntryStub>(this, save_state));
1056 GetEncoder()->BindLabel(stub->GetLabel());
1057 }
1058
1059 /**
1060 * Insert tracing code to the generated code. See `Trace` method in the `runtime/entrypoints.cpp`.
1061 * TODO(compiler): we should rework parameters assigning algorithm, that is duplicated here.
1062 * @param params parameters to be passed to the TRACE entrypoint, first parameter must be TraceId value.
1063 */
InsertTrace(std::initializer_list<std::variant<Reg,Imm>> params)1064 void Codegen::InsertTrace(std::initializer_list<std::variant<Reg, Imm>> params)
1065 {
1066 SCOPED_DISASM_STR(this, "Trace");
1067 [[maybe_unused]] constexpr size_t MAX_PARAM_NUM = 8;
1068 ASSERT(params.size() <= MAX_PARAM_NUM);
1069 auto regfile = GetRegfile();
1070 auto save_regs = regfile->GetCallerSavedRegMask();
1071 save_regs.set(GetTarget().GetReturnRegId());
1072 auto save_vregs = regfile->GetCallerSavedVRegMask();
1073 save_vregs.set(GetTarget().GetReturnFpRegId());
1074
1075 SaveCallerRegisters(save_regs, save_vregs, false);
1076 FillCallParams(params);
1077 EmitCallRuntimeCode(nullptr, EntrypointId::TRACE);
1078 LoadCallerRegisters(save_regs, save_vregs, false);
1079 }
1080
CallIntrinsic(Inst * inst,RuntimeInterface::IntrinsicId id)1081 void Codegen::CallIntrinsic(Inst *inst, RuntimeInterface::IntrinsicId id)
1082 {
1083 SCOPED_DISASM_STR(this, "CallIntrinsic");
1084 if (GetGraph()->IsAotMode()) {
1085 auto aot_data = GetGraph()->GetAotData();
1086 intptr_t offset = aot_data->GetEntrypointOffset(GetEncoder()->GetCursorOffset(), static_cast<int32_t>(id));
1087 GetEncoder()->MakeCallAot(offset);
1088 } else {
1089 GetEncoder()->MakeCall(
1090 reinterpret_cast<const void *>(GetRuntime()->GetIntrinsicAddress(inst->IsRuntimeCall(), id)));
1091 }
1092 }
1093
FillCallParams(const std::initializer_list<std::variant<Reg,Imm>> & params)1094 void Codegen::FillCallParams(const std::initializer_list<std::variant<Reg, Imm>> ¶ms)
1095 {
1096 SCOPED_DISASM_STR(this, "FillCallParams");
1097 // Native call - do not add reserve parameters
1098 auto param_info = GetCallingConvention()->GetParameterInfo(0);
1099 ArenaVector<std::pair<Reg, Imm>> immediates(GetLocalAllocator()->Adapter());
1100 ArenaVector<Reg> sp_moves(GetLocalAllocator()->Adapter());
1101 auto reg_moves = GetGraph()->CreateInstSpillFill();
1102 for (auto ¶m : params) {
1103 Location dst;
1104 if (std::holds_alternative<Reg>(param)) {
1105 auto reg = std::get<Reg>(param);
1106 auto type = reg.GetType().ToDataType();
1107 dst = param_info->GetNextLocation(type);
1108 if (reg == SpReg()) {
1109 // SP should be handled separately, since on the ARM64 target it has ID out of range
1110 sp_moves.emplace_back(ConvertRegister(dst.GetValue(), type));
1111 } else {
1112 reg_moves->AddSpillFill(Location::MakeRegister(reg.GetId(), type), dst, type);
1113 }
1114 } else {
1115 auto imm = std::get<Imm>(param);
1116 auto type = imm.GetType().ToDataType();
1117 dst = param_info->GetNextLocation(type);
1118 auto reg = ConvertRegister(dst.GetValue(), type);
1119 immediates.emplace_back(reg, imm);
1120 }
1121
1122 if (dst.IsStackArgument()) {
1123 GetEncoder()->SetFalseResult();
1124 UNREACHABLE(); // Move to BoundaryFrame
1125 }
1126 }
1127
1128 // Resolve registers move order and encode
1129 spill_fills_resolver_.ResolveIfRequired(reg_moves);
1130 SpillFillEncoder(this, reg_moves).EncodeSpillFill();
1131
1132 // Encode immediates moves
1133 for (auto &imm_values : immediates) {
1134 GetEncoder()->EncodeMov(imm_values.first, imm_values.second);
1135 }
1136
1137 // Encode moves from SP reg
1138 for (auto dst : sp_moves) {
1139 GetEncoder()->EncodeMov(dst, SpReg());
1140 }
1141 }
1142
EmitCallRuntimeCode(Inst * inst,EntrypointId id)1143 bool Codegen::EmitCallRuntimeCode(Inst *inst, EntrypointId id)
1144 {
1145 MemRef entry(ThreadReg(), GetRuntime()->GetEntrypointTlsOffset(GetArch(), id));
1146 auto encoder = GetEncoder();
1147 encoder->MakeCall(entry);
1148
1149 SaveStateInst *save_state =
1150 (inst == nullptr || inst->IsSaveState()) ? static_cast<SaveStateInst *>(inst) : inst->GetSaveState();
1151 // StackMap should follow the call as the bridge function expects retaddr points to the stackmap
1152 if (save_state != nullptr) {
1153 CreateStackMap(inst);
1154 }
1155
1156 if (GetRuntime()->IsEntrypointNoreturn(id)) {
1157 if constexpr (DEBUG_BUILD) { // NOLINT
1158 encoder->EncodeAbort();
1159 }
1160 return false;
1161 }
1162 ASSERT(save_state == nullptr || inst->IsRuntimeCall());
1163
1164 return true;
1165 }
1166
EnsureParamsFitIn32Bit(std::initializer_list<std::variant<Reg,Imm>> params)1167 [[maybe_unused]] static bool EnsureParamsFitIn32Bit(std::initializer_list<std::variant<Reg, Imm>> params)
1168 {
1169 for (auto ¶m : params) {
1170 if (std::holds_alternative<Reg>(param)) {
1171 auto reg = std::get<Reg>(param);
1172 if (reg.GetSize() > WORD_SIZE) {
1173 return false;
1174 }
1175 } else {
1176 auto imm = std::get<Imm>(param);
1177 if (imm.GetSize() > WORD_SIZE) {
1178 return false;
1179 }
1180 }
1181 }
1182 return true;
1183 }
1184
CallRuntime(Inst * inst,EntrypointId id,Reg dst_reg,std::initializer_list<std::variant<Reg,Imm>> params,RegMask preserved_regs)1185 void Codegen::CallRuntime(Inst *inst, EntrypointId id, Reg dst_reg,
1186 std::initializer_list<std::variant<Reg, Imm>> params, RegMask preserved_regs)
1187 {
1188 ASSERT(inst != nullptr);
1189 CHECK_EQ(params.size(), GetRuntime()->GetEntrypointArgsNum(id));
1190 if (GetArch() == Arch::AARCH32) {
1191 // There is a problem with 64-bit parameters:
1192 // params number passed from entrypoints_gen.S.erb will be inconsistent with Aarch32 ABI.
1193 // Thus, runtime bridges will have wrong params number (\paramsnum macro argument).
1194 ASSERT(EnsureParamsFitIn32Bit(params));
1195 ASSERT(!dst_reg.IsValid() || dst_reg.GetSize() <= WORD_SIZE);
1196 }
1197
1198 SCOPED_DISASM_STR(this, std::string("CallRuntime: ") + GetRuntime()->GetEntrypointName(id));
1199 RegMask live_regs {preserved_regs | GetLiveRegisters(inst).first};
1200 RegMask params_mask;
1201 if (inst->HasImplicitRuntimeCall() && !GetRuntime()->IsEntrypointNoreturn(id)) {
1202 SaveRegistersForImplicitRuntime(inst, ¶ms_mask, &live_regs);
1203 }
1204 // parameter regs: their initial values must be stored by the caller
1205 // Other caller regs stored in bridges
1206 FillOnlyParameters(&live_regs, params.size());
1207 SaveCallerRegisters(live_regs, VRegMask(), true);
1208
1209 if (params.size() != 0) {
1210 FillCallParams(params);
1211 }
1212
1213 // Call Code
1214 if (!EmitCallRuntimeCode(inst, id)) {
1215 return;
1216 }
1217 if (dst_reg.IsValid()) {
1218 ASSERT(dst_reg.IsScalar());
1219 Reg ret_val = GetTarget().GetReturnReg(dst_reg.GetType());
1220 if (dst_reg.GetId() != ret_val.GetId()) {
1221 GetEncoder()->SetRegister(&live_regs, nullptr, ret_val, true);
1222 }
1223
1224 // We must:
1225 // sign extended INT8 and INT16 to INT32
1226 // zero extended UINT8 and UINT16 to UINT32
1227 if (dst_reg.GetSize() < WORD_SIZE) {
1228 bool is_signed = DataType::IsTypeSigned(inst->GetType());
1229 GetEncoder()->EncodeCast(dst_reg, is_signed, ret_val, is_signed);
1230 } else {
1231 GetEncoder()->EncodeMov(dst_reg, ret_val);
1232 }
1233 }
1234
1235 GetEncoder()->SetRegister(&live_regs, nullptr, dst_reg, false);
1236 LoadCallerRegisters(live_regs, VRegMask(), true);
1237
1238 if (!inst->HasImplicitRuntimeCall()) {
1239 return;
1240 }
1241 ASSERT(!GetRuntime()->IsEntrypointNoreturn(id));
1242 for (auto i = 0U; i < params_mask.size(); i++) {
1243 if (params_mask.test(i)) {
1244 inst->GetSaveState()->GetRootsRegsMask().reset(i);
1245 }
1246 }
1247 }
1248
SaveRegistersForImplicitRuntime(Inst * inst,RegMask * params_mask,RegMask * mask)1249 void Codegen::SaveRegistersForImplicitRuntime(Inst *inst, RegMask *params_mask, RegMask *mask)
1250 {
1251 auto &roots_mask = inst->GetSaveState()->GetRootsRegsMask();
1252 for (auto i = 0U; i < inst->GetInputsCount(); i++) {
1253 auto location = inst->GetLocation(i);
1254 if (location.IsRegister() && location.IsRegisterValid()) {
1255 auto val = location.GetValue();
1256 auto reg = Reg(val, INT64_TYPE);
1257 GetEncoder()->SetRegister(mask, nullptr, reg, true);
1258 if (DataType::IsReference(inst->GetInputType(i)) && !roots_mask.test(val)) {
1259 params_mask->set(val);
1260 roots_mask.set(val);
1261 }
1262 }
1263 }
1264 }
1265
CreateCheckForTLABWithConstSize(Inst * inst,Reg reg_tlab_start,Reg reg_tlab_size,size_t size,LabelHolder::LabelId label)1266 void Codegen::CreateCheckForTLABWithConstSize([[maybe_unused]] Inst *inst, Reg reg_tlab_start, Reg reg_tlab_size,
1267 size_t size, LabelHolder::LabelId label)
1268 {
1269 SCOPED_DISASM_STR(this, "CreateCheckForTLABWithConstSize");
1270 auto encoder = GetEncoder();
1271 if (encoder->CanEncodeImmAddSubCmp(size, WORD_SIZE, false)) {
1272 encoder->EncodeJump(label, reg_tlab_size, Imm(size), Condition::LO);
1273 // Change pointer to start free memory
1274 encoder->EncodeAdd(reg_tlab_size, reg_tlab_start, Imm(size));
1275 } else {
1276 ScopedTmpReg size_reg(encoder);
1277 encoder->EncodeMov(size_reg, Imm(size));
1278 encoder->EncodeJump(label, reg_tlab_size, size_reg, Condition::LO);
1279 encoder->EncodeAdd(reg_tlab_size, reg_tlab_start, size_reg);
1280 }
1281 }
1282
CreateDebugRuntimeCallsForNewObject(Inst * inst,Reg reg_tlab_start,size_t alloc_size,RegMask preserved)1283 void Codegen::CreateDebugRuntimeCallsForNewObject(Inst *inst, [[maybe_unused]] Reg reg_tlab_start, size_t alloc_size,
1284 RegMask preserved)
1285 {
1286 #ifndef NDEBUG
1287 if (options.IsCompilerEnableTlabEvents()) {
1288 static constexpr size_t type_index = 1;
1289
1290 InsertTrace({Imm(static_cast<size_t>(TraceId::TLAB_EVENT)), Imm(type_index), reg_tlab_start, Imm(alloc_size)});
1291 }
1292 #endif
1293 #if defined(PANDA_ASAN_ON) || defined(PANDA_TSAN_ON)
1294 CallRuntime(inst, EntrypointId::ANNOTATE_SANITIZERS, INVALID_REGISTER, {reg_tlab_start, Imm(alloc_size)},
1295 preserved);
1296 #endif
1297 if (GetRuntime()->IsTrackTlabAlloc()) {
1298 CallRuntime(inst, EntrypointId::WRITE_TLAB_STATS, INVALID_REGISTER, {Imm(alloc_size)}, preserved);
1299 }
1300 }
1301
CreateNewObjCall(NewObjectInst * new_obj)1302 void Codegen::CreateNewObjCall(NewObjectInst *new_obj)
1303 {
1304 auto dst = ConvertRegister(new_obj->GetDstReg(), new_obj->GetType());
1305 auto src = ConvertRegister(new_obj->GetSrcReg(0), new_obj->GetInputType(0));
1306 auto init_class = new_obj->GetInput(0).GetInst();
1307 auto src_class = ConvertRegister(new_obj->GetSrcReg(0), DataType::POINTER);
1308 auto runtime = GetRuntime();
1309 auto encoder = GetEncoder();
1310
1311 auto max_tlab_size = runtime->GetTLABMaxSize();
1312
1313 if (max_tlab_size == 0 || init_class->GetOpcode() != Opcode::LoadAndInitClass) {
1314 CallRuntime(new_obj, EntrypointId::CREATE_OBJECT_BY_CLASS, dst, {src});
1315 return;
1316 }
1317 auto klass = init_class->CastToLoadAndInitClass()->GetClass();
1318 if (klass == nullptr || !runtime->CanUseTlabForClass(klass)) {
1319 CallRuntime(new_obj, EntrypointId::CREATE_OBJECT_BY_CLASS, dst, {src});
1320 return;
1321 }
1322 auto class_size = runtime->GetClassSize(klass);
1323 auto alignment = runtime->GetTLABAlignment();
1324
1325 class_size = (class_size & ~(alignment - 1U)) + ((class_size % alignment) != 0U ? alignment : 0U);
1326 if (class_size > max_tlab_size) {
1327 CallRuntime(new_obj, EntrypointId::CREATE_OBJECT_BY_CLASS, dst, {src});
1328 return;
1329 }
1330
1331 auto param_regs {GetLiveRegisters(new_obj).first};
1332 param_regs &= GetTarget().GetParamRegsMask(2U);
1333
1334 SaveCallerRegisters(param_regs, VRegMask(), false);
1335
1336 FillCallParams({src_class, Imm(class_size)});
1337 MemRef entry(ThreadReg(), GetRuntime()->GetEntrypointTlsOffset(GetArch(), EntrypointId::ALLOCATE_OBJECT_TLAB));
1338 encoder->MakeCall(entry);
1339 CreateStackMap(new_obj);
1340
1341 GetEncoder()->EncodeMov(dst, GetTarget().GetReturnReg(dst.GetType()));
1342 param_regs.reset(dst.GetId());
1343 LoadCallerRegisters(param_regs, VRegMask(), false);
1344 }
1345
CreateNewObjCallOld(NewObjectInst * new_obj)1346 void Codegen::CreateNewObjCallOld(NewObjectInst *new_obj)
1347 {
1348 auto dst = ConvertRegister(new_obj->GetDstReg(), new_obj->GetType());
1349 auto src = ConvertRegister(new_obj->GetSrcReg(0), new_obj->GetInputType(0));
1350 auto init_class = new_obj->GetInput(0).GetInst();
1351 auto runtime = GetRuntime();
1352 auto max_tlab_size = runtime->GetTLABMaxSize();
1353 auto encoder = GetEncoder();
1354
1355 if (max_tlab_size == 0 || init_class->GetOpcode() != Opcode::LoadAndInitClass) {
1356 CallRuntime(new_obj, EntrypointId::CREATE_OBJECT_BY_CLASS, dst, {src});
1357 return;
1358 }
1359 auto klass = init_class->CastToLoadAndInitClass()->GetClass();
1360 if (klass == nullptr || !runtime->CanUseTlabForClass(klass)) {
1361 CallRuntime(new_obj, EntrypointId::CREATE_OBJECT_BY_CLASS, dst, {src});
1362 return;
1363 }
1364 auto class_size = runtime->GetClassSize(klass);
1365 auto alignment = runtime->GetTLABAlignment();
1366
1367 class_size = (class_size & ~(alignment - 1U)) + ((class_size % alignment) != 0U ? alignment : 0U);
1368 if (class_size > max_tlab_size) {
1369 CallRuntime(new_obj, EntrypointId::CREATE_OBJECT_BY_CLASS, dst, {src});
1370 return;
1371 }
1372 ScopedLiveTmpReg reg_tlab_start(encoder);
1373 ScopedLiveTmpReg reg_tlab_size(encoder);
1374
1375 auto slow_path = CreateSlowPath<SlowPathEntrypoint>(new_obj, EntrypointId::CREATE_OBJECT_BY_CLASS);
1376 CreateLoadTLABInformation(reg_tlab_start, reg_tlab_size);
1377 CreateCheckForTLABWithConstSize(new_obj, reg_tlab_start, reg_tlab_size, class_size, slow_path->GetLabel());
1378
1379 RegMask preserved_regs;
1380 encoder->SetRegister(&preserved_regs, nullptr, src);
1381 CreateDebugRuntimeCallsForNewObject(new_obj, reg_tlab_start, reinterpret_cast<size_t>(class_size), preserved_regs);
1382
1383 ScopedTmpReg tlab_reg(encoder);
1384 // Load pointer to tlab
1385 encoder->EncodeLdr(tlab_reg, false, MemRef(ThreadReg(), runtime->GetCurrentTLABOffset(GetArch())));
1386
1387 // Store pointer to the class
1388 encoder->EncodeStr(src, MemRef(reg_tlab_start, runtime->GetObjClassOffset(GetArch())));
1389 encoder->EncodeMov(dst, reg_tlab_start);
1390 reg_tlab_start.Release();
1391 // Store new pointer to start free memory in TLAB
1392 encoder->EncodeStrRelease(reg_tlab_size, MemRef(tlab_reg, runtime->GetTLABFreePointerOffset(GetArch())));
1393 slow_path->BindBackLabel(encoder);
1394 }
1395
LoadClassFromObject(Reg class_reg,Reg obj_reg)1396 void Codegen::LoadClassFromObject(Reg class_reg, Reg obj_reg)
1397 {
1398 Reg reg = ConvertRegister(class_reg.GetId(), DataType::REFERENCE);
1399 GetEncoder()->EncodeLdr(reg, false, MemRef(obj_reg, GetRuntime()->GetObjClassOffset(GetArch())));
1400 }
1401
CreateMultiArrayCall(CallInst * call_inst)1402 void Codegen::CreateMultiArrayCall(CallInst *call_inst)
1403 {
1404 SCOPED_DISASM_STR(this, "Create Call for MultiArray");
1405
1406 auto dst_reg = ConvertRegister(call_inst->GetDstReg(), call_inst->GetType());
1407 auto num_args = call_inst->GetInputsCount() - 2U; // first is class, last is save_state
1408
1409 ScopedTmpReg class_reg(GetEncoder());
1410 auto class_type = ConvertDataType(DataType::REFERENCE, GetArch());
1411 Reg class_orig = class_reg.GetReg().As(class_type);
1412 auto location = call_inst->GetLocation(0);
1413 ASSERT(location.IsFixedRegister() && location.IsRegisterValid());
1414
1415 GetEncoder()->EncodeMov(class_orig, ConvertRegister(location.GetValue(), DataType::INT32));
1416 CallRuntime(call_inst, EntrypointId::CREATE_MULTI_ARRAY, dst_reg, {class_reg, Imm(num_args), SpReg()});
1417 }
1418
CreateJumpToClassResolverPltShared(Inst * inst,Reg tmp_reg,RuntimeInterface::EntrypointId id)1419 void Codegen::CreateJumpToClassResolverPltShared(Inst *inst, Reg tmp_reg, RuntimeInterface::EntrypointId id)
1420 {
1421 auto encoder = GetEncoder();
1422 auto graph = GetGraph();
1423 auto aot_data = graph->GetAotData();
1424 auto offset = aot_data->GetSharedSlowPathOffset(id, encoder->GetCursorOffset());
1425 if (offset == 0 || !encoder->CanMakeCallByOffset(offset)) {
1426 SlowPathShared *slow_path;
1427 auto search = slow_paths_map_.find(id);
1428 if (search != slow_paths_map_.end()) {
1429 slow_path = search->second;
1430 ASSERT(slow_path->GetTmpReg().GetId() == tmp_reg.GetId());
1431 } else {
1432 slow_path = CreateSlowPath<SlowPathShared>(inst, id);
1433 slow_path->SetTmpReg(tmp_reg);
1434 slow_paths_map_[id] = slow_path;
1435 }
1436 encoder->MakeCall(slow_path->GetLabel());
1437 } else {
1438 encoder->MakeCallByOffset(offset);
1439 }
1440 CreateStackMap(inst);
1441 }
1442
CreateLoadClassFromPLT(Inst * inst,Reg tmp_reg,Reg dst,size_t class_id)1443 void Codegen::CreateLoadClassFromPLT(Inst *inst, Reg tmp_reg, Reg dst, size_t class_id)
1444 {
1445 auto encoder = GetEncoder();
1446 auto graph = GetGraph();
1447 auto aot_data = graph->GetAotData();
1448 intptr_t offset = aot_data->GetClassSlotOffset(encoder->GetCursorOffset(), class_id, false);
1449 auto label = encoder->CreateLabel();
1450 ASSERT(tmp_reg.GetId() != dst.GetId());
1451 ASSERT(inst->IsRuntimeCall());
1452 encoder->MakeLoadAotTableAddr(offset, tmp_reg, dst);
1453 encoder->EncodeJump(label, dst, Condition::NE);
1454
1455 // PLT Class Resolver has special calling convention:
1456 // First encoder temporary (tmp_reg) works as parameter and return value
1457 CHECK_EQ(tmp_reg.GetId(), encoder->GetTarget().GetTempRegsMask().GetMinRegister());
1458
1459 CreateJumpToClassResolverPltShared(inst, tmp_reg, EntrypointId::CLASS_RESOLVER);
1460
1461 encoder->EncodeMov(dst, tmp_reg);
1462 encoder->BindLabel(label);
1463 }
1464
CreateLoadTLABInformation(Reg reg_tlab_start,Reg reg_tlab_size)1465 void Codegen::CreateLoadTLABInformation(Reg reg_tlab_start, Reg reg_tlab_size)
1466 {
1467 SCOPED_DISASM_STR(this, "LoadTLABInformation");
1468 auto runtime = GetRuntime();
1469 // Load pointer to tlab
1470 GetEncoder()->EncodeLdr(reg_tlab_size, false, MemRef(ThreadReg(), runtime->GetCurrentTLABOffset(GetArch())));
1471 // Load pointer to start free memory in TLAB
1472 GetEncoder()->EncodeLdr(reg_tlab_start, false, MemRef(reg_tlab_size, runtime->GetTLABFreePointerOffset(GetArch())));
1473 // Load pointer to end free memory in TLAB
1474 GetEncoder()->EncodeLdr(reg_tlab_size, false, MemRef(reg_tlab_size, runtime->GetTLABEndPointerOffset(GetArch())));
1475 // Calculate size of the free memory
1476 GetEncoder()->EncodeSub(reg_tlab_size, reg_tlab_size, reg_tlab_start);
1477 }
1478
1479 // The function alignment up the value from alignment_reg using tmp_reg.
CreateAlignmentValue(Reg alignment_reg,Reg tmp_reg,size_t alignment)1480 void Codegen::CreateAlignmentValue(Reg alignment_reg, Reg tmp_reg, size_t alignment)
1481 {
1482 auto and_val = ~(alignment - 1U);
1483 // zeroed lower bits
1484 GetEncoder()->EncodeAnd(tmp_reg, alignment_reg, Imm(and_val));
1485 GetEncoder()->EncodeSub(alignment_reg, alignment_reg, tmp_reg);
1486
1487 auto end_label = GetEncoder()->CreateLabel();
1488
1489 // if zeroed value is different, add alignment
1490 GetEncoder()->EncodeJump(end_label, alignment_reg, Condition::EQ);
1491 GetEncoder()->EncodeAdd(tmp_reg, tmp_reg, Imm(alignment));
1492
1493 GetEncoder()->BindLabel(end_label);
1494 GetEncoder()->EncodeMov(alignment_reg, tmp_reg);
1495 }
1496
EmitGetUnresolvedCalleeMethod(CallInst * call_inst)1497 void Codegen::EmitGetUnresolvedCalleeMethod(CallInst *call_inst)
1498 {
1499 Reg param_0 = GetTarget().GetParamReg(0);
1500 if (GetGraph()->IsAotMode()) {
1501 // This temporary register is needed, because the stack slot for param_0 will be overwritten in runtime call
1502 // by pointer to the method(LoadMethod below)
1503 ScopedTmpReg tmp(GetEncoder());
1504 LoadMethod(tmp);
1505 CallRuntime(call_inst, EntrypointId::GET_UNKNOWN_CALLEE_METHOD, tmp.GetReg(),
1506 {tmp.GetReg(), Imm(call_inst->GetCallMethodId()), Imm(0)});
1507 GetEncoder()->EncodeMov(param_0, tmp.GetReg());
1508 return;
1509 }
1510
1511 ScopedTmpReg tmp(GetEncoder());
1512 auto utypes = GetRuntime()->GetUnresolvedTypes();
1513 auto skind = UnresolvedTypesInterface::SlotKind::METHOD;
1514 auto method_addr = utypes->GetTableSlot(call_inst->GetCallMethod(), call_inst->GetCallMethodId(), skind);
1515 GetEncoder()->EncodeMov(tmp.GetReg(), Imm(method_addr));
1516 GetEncoder()->EncodeLdr(tmp.GetReg(), false, MemRef(tmp.GetReg()));
1517 auto slow_path = CreateSlowPath<SlowPathUnresolved>(call_inst, EntrypointId::GET_UNKNOWN_CALLEE_METHOD);
1518 slow_path->SetUnresolvedType(call_inst->GetCallMethod(), call_inst->GetCallMethodId());
1519 slow_path->SetDstReg(tmp.GetReg());
1520 slow_path->SetSlotAddr(method_addr);
1521 GetEncoder()->EncodeJump(slow_path->GetLabel(), tmp.GetReg(), Condition::EQ);
1522 slow_path->BindBackLabel(GetEncoder());
1523 GetEncoder()->EncodeMov(param_0, tmp.GetReg());
1524 }
1525
EmitCreateCallCode(CallInst * call_inst)1526 void Codegen::EmitCreateCallCode(CallInst *call_inst)
1527 {
1528 ASSERT(call_inst->IsStaticCall());
1529 Reg param_0 = GetTarget().GetParamReg(0);
1530 if (!call_inst->IsUnresolved() && call_inst->GetCallMethod() == GetGraph()->GetMethod() &&
1531 GetArch() != Arch::AARCH32 && !GetGraph()->IsOsrMode() && !GetGraph()->GetMethodProperties().GetHasDeopt()) {
1532 if (GetGraph()->IsAotMode()) {
1533 LoadMethod(param_0);
1534 } else {
1535 GetEncoder()->EncodeMov(param_0, Imm(reinterpret_cast<size_t>(GetGraph()->GetMethod())));
1536 }
1537 GetEncoder()->MakeCallByOffset(GetStartCodeOffset() - GetEncoder()->GetCursorOffset());
1538 } else {
1539 if (call_inst->IsUnresolved()) {
1540 EmitGetUnresolvedCalleeMethod(call_inst);
1541 } else if (GetGraph()->IsAotMode()) {
1542 auto aot_data = GetGraph()->GetAotData();
1543 intptr_t offset = aot_data->GetPltSlotOffset(GetEncoder()->GetCursorOffset(), call_inst->GetCallMethodId());
1544 // PLT CallStatic Resolver transparently uses param_0 (Method) register
1545 GetEncoder()->MakeLoadAotTable(offset, param_0);
1546 } else { // usual JIT case
1547 auto method = call_inst->GetCallMethod();
1548 GetEncoder()->EncodeMov(param_0, Imm(reinterpret_cast<size_t>(method)));
1549 }
1550 size_t entry_point_offset = GetRuntime()->GetCompiledEntryPointOffset(GetArch());
1551 GetEncoder()->MakeCall(MemRef(param_0, entry_point_offset));
1552 }
1553 }
1554
EmitEpilogueForCreateCall(CallInst * call_inst)1555 void Codegen::EmitEpilogueForCreateCall(CallInst *call_inst)
1556 {
1557 CreateStackMap(call_inst);
1558
1559 // Restore frame pointer in the TLS
1560 GetEncoder()->EncodeStr(FpReg(), MemRef(ThreadReg(), GetRuntime()->GetTlsFrameOffset(GetArch())));
1561
1562 auto dst_reg = ConvertRegister(call_inst->GetDstReg(), call_inst->GetType());
1563 if (dst_reg.IsValid()) {
1564 auto arch = GetArch();
1565 auto return_type = call_inst->GetType();
1566 auto return_reg = GetTarget().GetReturnReg(dst_reg.GetType());
1567 // We must:
1568 // sign extended INT8 and INT16 to INT32
1569 // zero extended UINT8 and UINT16 to UINT32
1570 if (DataType::ShiftByType(return_type, arch) < DataType::ShiftByType(DataType::INT32, arch)) {
1571 bool is_signed = DataType::IsTypeSigned(return_type);
1572 GetEncoder()->EncodeCast(dst_reg, is_signed, Reg(return_reg.GetId(), INT32_TYPE), is_signed);
1573 } else {
1574 GetEncoder()->EncodeMov(dst_reg, return_reg);
1575 }
1576 }
1577 }
1578
CreateCall(CallInst * call_inst)1579 void Codegen::CreateCall(CallInst *call_inst)
1580 {
1581 if (call_inst->GetSaveState() != nullptr && call_inst->IsInlined()) {
1582 #if defined(EVENT_METHOD_ENTER_ENABLED) && EVENT_METHOD_ENTER_ENABLED != 0
1583 if (!GetGraph()->IsAotMode()) {
1584 InsertTrace({Imm(static_cast<size_t>(TraceId::METHOD_ENTER)),
1585 Imm(reinterpret_cast<size_t>(call_inst->GetCallMethod())),
1586 Imm(static_cast<size_t>(events::MethodEnterKind::INLINED))});
1587 }
1588 #endif
1589 return;
1590 }
1591 SCOPED_DISASM_STR(this, "Create Call for method pointer");
1592 ASSERT(!HasLiveCallerSavedRegs(call_inst));
1593 // TODO(igorban) support MakeCallByOffset in Aarch32Encoder(support adr instruction)
1594 EmitCreateCallCode(call_inst);
1595 EmitEpilogueForCreateCall(call_inst);
1596 }
1597
GetObjectReg(Codegen * codegen,CallInst * call_inst)1598 inline Reg GetObjectReg(Codegen *codegen, CallInst *call_inst)
1599 {
1600 auto location = call_inst->GetLocation(0);
1601 ASSERT(location.IsFixedRegister() && location.IsRegisterValid());
1602 auto obj_reg = codegen->ConvertRegister(location.GetValue(), call_inst->GetInputType(0));
1603 ASSERT(obj_reg != INVALID_REGISTER);
1604 return obj_reg;
1605 }
1606
CreateCallIntrinsic(IntrinsicInst * inst)1607 void Codegen::CreateCallIntrinsic(IntrinsicInst *inst)
1608 {
1609 if (inst->HasArgumentsOnStack()) {
1610 // Since for some intrinsics(f.e. TimedWaitNanos) we need SaveState instruction and
1611 // more than two arguments, we need to place arguments on the stack, but in same time we need to
1612 // create boundary frame
1613 LOG(WARNING, COMPILER) << "Intrinsics with arguments on stack are not supported";
1614 GetEncoder()->SetFalseResult();
1615 return;
1616 }
1617
1618 ASSERT(!HasLiveCallerSavedRegs(inst));
1619 if (inst->HasImms() && GetGraph()->SupportManagedCode()) {
1620 EncodeImms(inst->GetImms());
1621 }
1622 CallIntrinsic(inst, inst->GetIntrinsicId());
1623
1624 if (inst->GetSaveState() != nullptr) {
1625 // StackMap should follow the call as the bridge function expects retaddr points to the stackmap
1626 CreateStackMap(inst);
1627 }
1628
1629 if (inst->GetType() != DataType::VOID) {
1630 auto arch = GetArch();
1631 auto return_type = inst->GetType();
1632 auto dst_reg = ConvertRegister(inst->GetDstReg(), inst->GetType());
1633 auto return_reg = GetTarget().GetReturnReg(dst_reg.GetType());
1634 // We must:
1635 // sign extended INT8 and INT16 to INT32
1636 // zero extended UINT8 and UINT16 to UINT32
1637 if (DataType::ShiftByType(return_type, arch) < DataType::ShiftByType(DataType::INT32, arch)) {
1638 bool is_signed = DataType::IsTypeSigned(return_type);
1639 GetEncoder()->EncodeCast(dst_reg, is_signed, Reg(return_reg.GetId(), INT32_TYPE), is_signed);
1640 } else {
1641 GetEncoder()->EncodeMov(dst_reg, return_reg);
1642 }
1643 }
1644 }
1645
GetVtableShift()1646 uint32_t Codegen::GetVtableShift()
1647 {
1648 // The size of the VTable element is equal to the size of pointers for the architecture
1649 // (not the size of pointer to objects)
1650 constexpr uint32_t SHIFT_64_BITS = 3;
1651 constexpr uint32_t SHIFT_32_BITS = 2;
1652 return Is64BitsArch(GetGraph()->GetArch()) ? SHIFT_64_BITS : SHIFT_32_BITS;
1653 }
1654
CreateUnresolvedVirtualMethodLoad(CallInst * vcall,Reg method)1655 void Codegen::CreateUnresolvedVirtualMethodLoad(CallInst *vcall, Reg method)
1656 {
1657 SCOPED_DISASM_STR(this, "Create Call To ResolveUnknownVirtualCall");
1658 ASSERT(vcall->GetOpcode() == Opcode::UnresolvedCallVirtual);
1659
1660 ScopedTmpReg tmp_reg(GetEncoder(), ConvertDataType(DataType::REFERENCE, GetArch()));
1661 Reg obj_reg = GetObjectReg(this, vcall);
1662
1663 ASSERT(vcall->GetCallMethod() != nullptr);
1664 if (GetGraph()->IsAotMode()) {
1665 // TODO(zhroma): how to make more optimal?
1666 LoadMethod(method);
1667 CallRuntime(vcall, EntrypointId::RESOLVE_VIRTUAL_CALL_AOT, method,
1668 {method, obj_reg, Imm(vcall->GetCallMethodId()), Imm(0)});
1669 } else {
1670 auto runtime = GetRuntime();
1671 auto utypes = runtime->GetUnresolvedTypes();
1672 auto skind = UnresolvedTypesInterface::SlotKind::VIRTUAL_METHOD;
1673 // Try to load vtable index
1674 auto method_slot_addr = utypes->GetTableSlot(vcall->GetCallMethod(), vcall->GetCallMethodId(), skind);
1675 GetEncoder()->EncodeMov(method, Imm(method_slot_addr));
1676 GetEncoder()->EncodeLdr(method, false, MemRef(method));
1677
1678 // 0 means the virtual call is uninitialized or it is an interface call
1679 auto entrypoint = EntrypointId::RESOLVE_UNKNOWN_VIRTUAL_CALL;
1680 auto slow_path = CreateSlowPath<SlowPathUnresolved>(vcall, entrypoint);
1681 slow_path->SetUnresolvedType(vcall->GetCallMethod(), vcall->GetCallMethodId());
1682 slow_path->SetDstReg(method);
1683 slow_path->SetArgReg(obj_reg);
1684 slow_path->SetSlotAddr(method_slot_addr);
1685 GetEncoder()->EncodeJump(slow_path->GetLabel(), method, Condition::EQ);
1686
1687 // Load klass into tmp_reg
1688 LoadClassFromObject(tmp_reg, obj_reg);
1689
1690 auto tmp_reg_64 = Reg(tmp_reg.GetReg().GetId(), INT64_TYPE);
1691 // Load from VTable, address = (klass + (index << shift)) + vtable_offset
1692 GetEncoder()->EncodeAdd(tmp_reg_64, tmp_reg, Shift(method, GetVtableShift()));
1693 GetEncoder()->EncodeLdr(method, false,
1694 MemRef(tmp_reg_64, runtime->GetVTableOffset(GetArch()) - (1U << GetVtableShift())));
1695 slow_path->BindBackLabel(GetEncoder());
1696 }
1697 }
1698
PrepareCallVirtualAot(CallInst * call_inst,Reg method_reg)1699 void Codegen::PrepareCallVirtualAot(CallInst *call_inst, Reg method_reg)
1700 {
1701 auto runtime = GetRuntime();
1702 SCOPED_DISASM_STR(this, "AOT CallVirtual using PLT-GOT");
1703 ScopedTmpReg tmp_reg(GetEncoder(), ConvertDataType(DataType::REFERENCE, GetArch()));
1704 ASSERT(call_inst->IsRuntimeCall());
1705
1706 auto method_reg_64 = Reg(method_reg.GetId(), INT64_TYPE);
1707 auto tmp_reg_64 = Reg(tmp_reg.GetReg().GetId(), INT64_TYPE);
1708
1709 auto aot_data = GetGraph()->GetAotData();
1710 intptr_t offset = aot_data->GetVirtIndexSlotOffset(GetEncoder()->GetCursorOffset(), call_inst->GetCallMethodId());
1711 GetEncoder()->MakeLoadAotTableAddr(offset, tmp_reg_64, method_reg_64);
1712
1713 auto label = GetEncoder()->CreateLabel();
1714 GetEncoder()->EncodeJump(label, method_reg, Condition::NE);
1715
1716 GetEncoder()->EncodeMov(method_reg_64, tmp_reg_64);
1717
1718 // PLT CallVirtual Resolver has very special calling convention:
1719 // * First encoder temporary (method_reg) works as a parameter and return value
1720 CHECK_EQ(method_reg_64.GetId(), GetTarget().GetTempRegsMask().GetMinRegister());
1721 MemRef entry(ThreadReg(), runtime->GetEntrypointTlsOffset(GetArch(), EntrypointId::CALL_VIRTUAL_RESOLVER));
1722 GetEncoder()->MakeCall(entry);
1723
1724 // Need a stackmap to build correct boundary frame
1725 CreateStackMap(call_inst);
1726 GetEncoder()->BindLabel(label);
1727
1728 // Load klass into method_reg
1729 Reg obj_reg = GetObjectReg(this, call_inst);
1730 LoadClassFromObject(tmp_reg, obj_reg);
1731
1732 // Load from VTable, address = (klass + (index << shift)) + vtable_offset
1733 GetEncoder()->EncodeAdd(method_reg, tmp_reg_64, Shift(method_reg, GetVtableShift()));
1734 GetEncoder()->EncodeLdr(method_reg, false,
1735 MemRef(method_reg, runtime->GetVTableOffset(GetArch()) - (1U << GetVtableShift())));
1736 }
1737
PrepareCallVirtual(CallInst * call_inst,Reg method_reg)1738 void Codegen::PrepareCallVirtual(CallInst *call_inst, Reg method_reg)
1739 {
1740 auto obj_reg = GetObjectReg(this, call_inst);
1741 ASSERT(!RegisterKeepCallArgument(call_inst, method_reg));
1742 LoadClassFromObject(method_reg, obj_reg);
1743 // Get index
1744 auto runtime = GetRuntime();
1745 auto vtable_index = runtime->GetVTableIndex(call_inst->GetCallMethod());
1746 // Load from VTable, address = klass + ((index << shift) + vtable_offset)
1747 auto total_offset = runtime->GetVTableOffset(GetArch()) + (vtable_index << GetVtableShift());
1748 // Class ref was loaded to method_reg
1749 GetEncoder()->EncodeLdr(method_reg, false, MemRef(method_reg, total_offset));
1750 }
1751
PrepareAndEmitCallVirtual(CallInst * call_inst)1752 void Codegen::PrepareAndEmitCallVirtual(CallInst *call_inst)
1753 {
1754 auto runtime = GetRuntime();
1755 if (call_inst->IsUnresolved()) {
1756 ScopedTmpReg method_reg(GetEncoder());
1757 CreateUnresolvedVirtualMethodLoad(call_inst, method_reg);
1758 EmitCallVirtual(method_reg);
1759 } else if (runtime->IsInterfaceMethod(call_inst->GetCallMethod())) {
1760 SCOPED_DISASM_STR(this, "Create Call To ResolveVirtualCall");
1761 ScopedTmpReg tmp_reg(GetEncoder(), ConvertDataType(DataType::REFERENCE, GetArch()));
1762 Reg obj_reg = GetObjectReg(this, call_inst);
1763 ScopedTmpReg method_reg(GetEncoder());
1764 if (GetGraph()->IsAotMode()) {
1765 if (GetArch() == Arch::AARCH64) {
1766 IntfInlineCachePass(call_inst, method_reg, tmp_reg, obj_reg);
1767 } else {
1768 LoadMethod(method_reg);
1769 CallRuntime(call_inst, EntrypointId::RESOLVE_VIRTUAL_CALL_AOT, method_reg,
1770 {method_reg, obj_reg, Imm(call_inst->GetCallMethodId()), Imm(0)});
1771 }
1772 } else {
1773 CallRuntime(call_inst, EntrypointId::RESOLVE_VIRTUAL_CALL, method_reg,
1774 {Imm(reinterpret_cast<size_t>(call_inst->GetCallMethod())), obj_reg});
1775 }
1776 EmitCallVirtual(method_reg);
1777 } else if (GetGraph()->IsAotMode() && !GetGraph()->GetAotData()->GetUseCha()) {
1778 ScopedTmpReg method_reg(GetEncoder());
1779 PrepareCallVirtualAot(call_inst, method_reg);
1780 EmitCallVirtual(method_reg);
1781 } else { // JIT mode
1782 Reg method_reg = GetTarget().GetParamReg(0);
1783 PrepareCallVirtual(call_inst, method_reg);
1784 GetEncoder()->MakeCall(MemRef(method_reg, runtime->GetCompiledEntryPointOffset(GetArch())));
1785 }
1786 }
1787
IntfInlineCachePass(CallInst * call_inst,Reg method_reg,Reg tmp_reg,Reg obj_reg)1788 void Codegen::IntfInlineCachePass(CallInst *call_inst, Reg method_reg, Reg tmp_reg, Reg obj_reg)
1789 {
1790 // Cache structure:(offset addr)/(class addr) 32bit/32bit
1791 // -----------------------------------------------
1792 // (.aot_got)
1793 // ...
1794 // cache:offset/class <----------|
1795 // ... |
1796 // (.text) |
1797 // interface call start |
1798 // call runtime irtoc function
1799 // if call class == cache.class <------|
1800 // use cache.offset(method) <------|
1801 // else |
1802 // call RESOLVE_VIRTUAL_CALL_AOT |
1803 // save method‘s offset to cache >--|
1804 // return to (.text)
1805 // call method
1806 // -----------------------------------------------
1807 auto aot_data = GetGraph()->GetAotData();
1808 uint64_t intf_inline_cache_index = aot_data->GetIntfInlineCacheIndex();
1809 // TODO(liyiming): do LoadMethod in irtoc to reduce use tmp reg
1810 if (obj_reg.GetId() != tmp_reg.GetId()) {
1811 auto reg_tmp_64 = tmp_reg.As(INT64_TYPE);
1812 uint64_t offset =
1813 aot_data->GetInfInlineCacheSlotOffset(GetEncoder()->GetCursorOffset(), intf_inline_cache_index);
1814 GetEncoder()->MakeLoadAotTableAddr(offset, reg_tmp_64, INVALID_REGISTER);
1815 LoadMethod(method_reg);
1816 CallRuntime(call_inst, EntrypointId::INTF_INLINE_CACHE, method_reg,
1817 {method_reg, obj_reg, Imm(call_inst->GetCallMethodId()), reg_tmp_64});
1818 } else {
1819 // we don't have tmp reg here, so use x3 directly
1820 constexpr uint32_t REG_3 = 3;
1821 Reg reg_3 = Reg(REG_3, INT64_TYPE);
1822 ScopedTmpRegF64 vtmp(GetEncoder());
1823 GetEncoder()->EncodeMov(vtmp, reg_3);
1824 uint64_t offset =
1825 aot_data->GetInfInlineCacheSlotOffset(GetEncoder()->GetCursorOffset(), intf_inline_cache_index);
1826 GetEncoder()->MakeLoadAotTableAddr(offset, reg_3, INVALID_REGISTER);
1827 LoadMethod(method_reg);
1828 CallRuntime(call_inst, EntrypointId::INTF_INLINE_CACHE, method_reg,
1829 {method_reg, obj_reg, Imm(call_inst->GetCallMethodId()), reg_3});
1830 GetEncoder()->EncodeMov(reg_3, vtmp);
1831 }
1832
1833 intf_inline_cache_index++;
1834 aot_data->SetIntfInlineCacheIndex(intf_inline_cache_index);
1835 }
1836
EmitCallVirtual(Reg method_reg)1837 void Codegen::EmitCallVirtual(Reg method_reg)
1838 {
1839 Reg param_0 = GetTarget().GetParamReg(0);
1840 // Set method
1841 GetEncoder()->EncodeMov(param_0, method_reg);
1842
1843 size_t entry_point_offset = GetRuntime()->GetCompiledEntryPointOffset(GetArch());
1844 GetEncoder()->MakeCall(MemRef(param_0, entry_point_offset));
1845 }
1846
AddParamRegsInLiveMasks(RegMask * live_regs,VRegMask * live_vregs,const std::initializer_list<std::variant<Reg,Imm>> & params)1847 void Codegen::AddParamRegsInLiveMasks(RegMask *live_regs, VRegMask *live_vregs,
1848 const std::initializer_list<std::variant<Reg, Imm>> ¶ms)
1849 {
1850 auto enc = GetEncoder();
1851 auto callconv = GetCallingConvention();
1852 auto param_info = callconv->GetParameterInfo(0);
1853 for (auto ¶m : params) {
1854 if (std::holds_alternative<Reg>(param)) {
1855 auto reg = std::get<Reg>(param);
1856 auto curr_dst = param_info->GetNativeParam(reg.GetType());
1857 if (std::holds_alternative<Reg>(curr_dst)) {
1858 auto r = std::get<Reg>(curr_dst);
1859 if (r.IsScalar()) {
1860 live_regs->set(r.GetId());
1861 } else {
1862 live_vregs->set(r.GetId());
1863 }
1864 } else {
1865 enc->SetFalseResult();
1866 UNREACHABLE();
1867 }
1868 } else {
1869 auto imm = std::get<Imm>(param);
1870 auto curr_dst = param_info->GetNativeParam(imm.GetType());
1871 if (std::holds_alternative<Reg>(curr_dst)) {
1872 auto reg = std::get<Reg>(curr_dst);
1873 if (reg.IsScalar()) {
1874 live_regs->set(reg.GetId());
1875 } else {
1876 live_vregs->set(reg.GetId());
1877 }
1878 } else {
1879 enc->SetFalseResult();
1880 UNREACHABLE();
1881 }
1882 }
1883 }
1884 }
1885
CreateStubCall(Inst * inst,RuntimeInterface::IntrinsicId intrinsicId,Reg dst,const std::initializer_list<std::variant<Reg,Imm>> & params)1886 void Codegen::CreateStubCall(Inst *inst, RuntimeInterface::IntrinsicId intrinsicId, Reg dst,
1887 const std::initializer_list<std::variant<Reg, Imm>> ¶ms)
1888 {
1889 VRegMask live_vregs;
1890 RegMask live_regs;
1891 AddParamRegsInLiveMasks(&live_regs, &live_vregs, params);
1892 auto enc = GetEncoder();
1893 {
1894 SCOPED_DISASM_STR(this, "Save caller saved regs");
1895 SaveCallerRegisters(live_regs, live_vregs, true);
1896 }
1897
1898 FillCallParams(params);
1899 CallIntrinsic(inst, intrinsicId);
1900
1901 if (inst->GetSaveState() != nullptr) {
1902 CreateStackMap(inst);
1903 }
1904
1905 if (dst.IsValid()) {
1906 Reg ret_val = GetTarget().GetReturnReg(dst.GetType());
1907 if (dst.GetId() != ret_val.GetId()) {
1908 enc->SetRegister(&live_regs, &live_vregs, ret_val, true);
1909 }
1910 ASSERT(dst.IsScalar());
1911 enc->EncodeMov(dst, ret_val);
1912 }
1913
1914 {
1915 SCOPED_DISASM_STR(this, "Restore caller saved regs");
1916 enc->SetRegister(&live_regs, &live_vregs, dst, false);
1917 LoadCallerRegisters(live_regs, live_vregs, true);
1918 }
1919 }
1920
CreateVirtualCall(CallInst * call_inst)1921 void Codegen::CreateVirtualCall(CallInst *call_inst)
1922 {
1923 SCOPED_DISASM_STR(this, "Create Call for virtual method");
1924 if (call_inst->GetSaveState() != nullptr && call_inst->IsInlined()) {
1925 #if defined(EVENT_METHOD_ENTER_ENABLED) && EVENT_METHOD_ENTER_ENABLED != 0
1926 if (!GetGraph()->IsAotMode()) {
1927 InsertTrace({Imm(static_cast<size_t>(TraceId::METHOD_ENTER)),
1928 Imm(reinterpret_cast<size_t>(call_inst->GetCallMethod())),
1929 Imm(static_cast<size_t>(events::MethodEnterKind::INLINED))});
1930 }
1931 #endif
1932 return;
1933 }
1934 ASSERT(!HasLiveCallerSavedRegs(call_inst));
1935 PrepareAndEmitCallVirtual(call_inst);
1936 EmitEpilogueForCreateCall(call_inst);
1937 }
1938
CreateDynamicCall(CallInst * call_inst)1939 void Codegen::CreateDynamicCall(CallInst *call_inst)
1940 {
1941 SCOPED_DISASM_STR(this, "Create a dynamic call");
1942 RuntimeInterface *runtime = GetRuntime();
1943 Encoder *encoder = GetEncoder();
1944
1945 auto dst_reg = ConvertRegister(call_inst->GetDstReg(), call_inst->GetType());
1946 Reg param_0 = GetTarget().GetParamReg(0);
1947 Reg native_ptr_reg = Reg(param_0.GetId(), TypeInfo(static_cast<object_pointer_type>(0)));
1948 Reg param_num_args = GetTarget().GetParamReg(1);
1949 Reg param_func_obj = GetTarget().GetParamReg(2);
1950
1951 ASSERT(!HasLiveCallerSavedRegs(call_inst));
1952 // TODO(audovichenko): Check the following
1953 // * func_obj's tag is object
1954 // * func_obj's class is dynamic
1955 // * target method is not null
1956
1957 // Load method into param0 as follow:
1958 // param_0 = param_func_obj->GetMethod()->GetExternalPointer()
1959 // native_ptr_reg is a (may be) smaller view of param_0 register.
1960 encoder->EncodeLdr(native_ptr_reg, false, MemRef(param_func_obj, runtime->GetFunctionTargetOffset(GetArch())));
1961 encoder->EncodeLdr(param_0, false, MemRef(param_0, runtime->GetNativePointerTargetOffset(GetArch())));
1962
1963 ASSERT(call_inst->GetInputsCount() > 1);
1964 auto num_args = static_cast<uint32_t>(
1965 call_inst->GetInputsCount() -
1966 2); // '-2' means 1 for spill fill input and 1 for function object which should not be counted
1967 encoder->EncodeMov(param_num_args, Imm(num_args));
1968
1969 size_t entry_point_offset = runtime->GetCompiledEntryPointOffset(GetArch());
1970 encoder->MakeCall(MemRef(param_0, entry_point_offset));
1971
1972 CreateStackMap(call_inst);
1973
1974 if (dst_reg.IsValid()) {
1975 Reg ret_reg = GetTarget().GetReturnReg(dst_reg.GetType());
1976 encoder->EncodeMov(dst_reg, ret_reg);
1977 }
1978 }
1979
1980 template <typename T>
GetBarrierOperandValue(RuntimeInterface * runtime,panda::mem::BarrierPosition position,std::string_view name)1981 static T GetBarrierOperandValue(RuntimeInterface *runtime, panda::mem::BarrierPosition position, std::string_view name)
1982 {
1983 auto operand = runtime->GetBarrierOperand(position, name);
1984 return std::get<T>(operand.GetValue());
1985 }
1986
CallBarrier(RegMask live_regs,VRegMask live_vregs,EntrypointId id,const std::initializer_list<std::variant<Reg,Imm>> & params)1987 void Codegen::CallBarrier(RegMask live_regs, VRegMask live_vregs, EntrypointId id,
1988 const std::initializer_list<std::variant<Reg, Imm>> ¶ms)
1989 {
1990 SaveCallerRegisters(live_regs, live_vregs, true);
1991 FillCallParams(params);
1992 EmitCallRuntimeCode(nullptr, id);
1993 LoadCallerRegisters(live_regs, live_vregs, true);
1994 }
1995
CreatePreWRB(Inst * inst,MemRef mem,bool store_pair)1996 void Codegen::CreatePreWRB(Inst *inst, MemRef mem, bool store_pair)
1997 {
1998 auto runtime = GetRuntime();
1999 auto *enc = GetEncoder();
2000
2001 auto barrier_type = runtime->GetPreType();
2002 if (barrier_type == panda::mem::BarrierType::PRE_WRB_NONE) {
2003 return;
2004 }
2005 SCOPED_DISASM_STR(this, "Pre WRB");
2006 ASSERT(barrier_type == panda::mem::BarrierType::PRE_SATB_BARRIER);
2007 ScopedTmpReg tmp(enc);
2008 if (GetGraph()->IsOfflineCompilationMode()) {
2009 GetEncoder()->EncodeLdr(tmp, false, MemRef(ThreadReg(), runtime->GetTlsConcurrentMarkingAddrOffset(GetArch())));
2010 } else {
2011 auto concurrent_marker = reinterpret_cast<uintptr_t>(GetBarrierOperandValue<std::atomic<bool> *>(
2012 runtime, panda::mem::BarrierPosition::BARRIER_POSITION_PRE, "CONCURRENT_MARKING_ADDR"));
2013 enc->EncodeMov(tmp, Imm(concurrent_marker));
2014 }
2015 // Check marker
2016 auto marker_mem = MemRef(tmp);
2017 auto tmp_b = ConvertRegister(tmp.GetReg().GetId(), DataType::INT8);
2018 enc->EncodeLdr(tmp_b, false, marker_mem);
2019 auto label = GetEncoder()->CreateLabel();
2020 enc->EncodeJump(label, tmp_b, Condition::EQ);
2021 auto ref_type =
2022 inst->GetType() == DataType::ANY ? DataType::INT64 : DataType::GetIntTypeForReference(enc->GetArch());
2023 auto tmp_ref = ConvertRegister(tmp.GetReg().GetId(), ref_type);
2024 auto prev_offset = enc->GetCursorOffset();
2025 // Load old value
2026 if (IsVolatileMemInst(inst)) {
2027 enc->EncodeLdrAcquire(tmp_ref, false, mem);
2028 } else {
2029 enc->EncodeLdr(tmp_ref, false, mem);
2030 }
2031 TryInsertImplicitNullCheck(inst, prev_offset);
2032 enc->EncodeJump(label, tmp_ref, Condition::EQ);
2033 auto [live_regs, live_vregs] = GetLiveRegisters<true>(inst);
2034 CallBarrier(live_regs, live_vregs, EntrypointId::PRE_WRB_FUNC_NO_BRIDGE, {tmp_ref});
2035
2036 if (store_pair) {
2037 // store pair doesn't support index and scalar
2038 ASSERT(!mem.HasIndex() && !mem.HasScale());
2039 // calculate offset for second store
2040 auto second_offset = 1U << DataType::ShiftByType(DataType::REFERENCE, enc->GetArch());
2041 if (mem.HasDisp()) {
2042 second_offset += mem.GetDisp();
2043 }
2044 // Load old value
2045 if (IsVolatileMemInst(inst)) {
2046 enc->EncodeLdrAcquire(tmp_ref, false, MemRef(mem.GetBase(), second_offset));
2047 } else {
2048 enc->EncodeLdr(tmp_ref, false, MemRef(mem.GetBase(), second_offset));
2049 }
2050 enc->EncodeJump(label, tmp_ref, Condition::EQ);
2051 CallBarrier(live_regs, live_vregs, EntrypointId::PRE_WRB_FUNC_NO_BRIDGE, {tmp_ref});
2052 }
2053 enc->BindLabel(label);
2054 }
2055
EncodePostWRB(Inst * inst,MemRef mem,Reg reg1,Reg reg2,bool check_nullptr)2056 void Codegen::EncodePostWRB(Inst *inst, MemRef mem, Reg reg1, Reg reg2, bool check_nullptr)
2057 {
2058 ASSERT(reg1 != INVALID_REGISTER);
2059 auto barrier_type = GetRuntime()->GetPostType();
2060 ASSERT(barrier_type == panda::mem::BarrierType::POST_INTERGENERATIONAL_BARRIER ||
2061 barrier_type == panda::mem::BarrierType::POST_INTERREGION_BARRIER);
2062
2063 if (barrier_type == panda::mem::BarrierType::POST_INTERREGION_BARRIER) {
2064 CreatePostInterRegionBarrier(inst, mem, reg1, reg2, check_nullptr);
2065 return;
2066 }
2067 SCOPED_DISASM_STR(this, "Post WRB");
2068 CreatePostInterGenerationalBarrier(mem);
2069 }
2070
CreatePostWRB(Inst * inst,MemRef mem,Reg reg1,Reg reg2)2071 void Codegen::CreatePostWRB(Inst *inst, MemRef mem, Reg reg1, Reg reg2)
2072 {
2073 ASSERT(reg1 != INVALID_REGISTER);
2074 ASSERT(!mem.HasIndex() && !mem.HasDisp() && !mem.HasScale());
2075
2076 auto barrier_type = GetRuntime()->GetPostType();
2077 if (barrier_type == panda::mem::BarrierType::POST_WRB_NONE) {
2078 return;
2079 }
2080 ASSERT(barrier_type == panda::mem::BarrierType::POST_INTERGENERATIONAL_BARRIER ||
2081 barrier_type == panda::mem::BarrierType::POST_INTERREGION_BARRIER);
2082
2083 Inst *second_value;
2084 Inst *val = InstStoredValue(inst, &second_value);
2085 ASSERT(second_value == nullptr || reg2 != INVALID_REGISTER);
2086 if (val->GetOpcode() == Opcode::NullPtr) {
2087 // We can don't encode Post barrier for nullptr
2088 if (second_value == nullptr || second_value->GetOpcode() == Opcode::NullPtr) {
2089 return;
2090 }
2091 // CallPostWRB only for second reg
2092 EncodePostWRB(inst, mem, reg2, INVALID_REGISTER, !IsInstNotNull(second_value));
2093 return;
2094 }
2095 // Create PostWRB only for first value
2096 if (second_value != nullptr && second_value->GetOpcode() == Opcode::NullPtr) {
2097 reg2 = INVALID_REGISTER;
2098 }
2099 bool check_nullptr = true;
2100 if (reg2 == INVALID_REGISTER) {
2101 if (IsInstNotNull(val)) {
2102 check_nullptr = false;
2103 }
2104 } else {
2105 if (IsInstNotNull(val) && IsInstNotNull(second_value)) {
2106 check_nullptr = false;
2107 }
2108 }
2109 EncodePostWRB(inst, mem, reg1, reg2, check_nullptr);
2110 }
2111
CreatePostInterRegionBarrier(Inst * inst,MemRef mem,Reg reg1,Reg reg2,bool check_nullptr)2112 void Codegen::CreatePostInterRegionBarrier(Inst *inst, MemRef mem, Reg reg1, Reg reg2, bool check_nullptr)
2113 {
2114 SCOPED_DISASM_STR(this, "Post IR-WRB");
2115 auto *enc = GetEncoder();
2116 ASSERT(GetRuntime()->GetPostType() == panda::mem::BarrierType::POST_INTERREGION_BARRIER);
2117 ASSERT(reg1 != INVALID_REGISTER);
2118
2119 auto label = GetEncoder()->CreateLabel();
2120
2121 if (check_nullptr) {
2122 enc->EncodeJump(label, reg1, Condition::EQ);
2123 }
2124
2125 ScopedTmpReg tmp1(enc, ConvertDataType(DataType::REFERENCE, GetArch()));
2126 reg1 = ConvertRegister(reg1.GetId(), DataType::REFERENCE);
2127 if (reg2.IsValid()) {
2128 reg2 = ConvertRegister(reg2.GetId(), DataType::REFERENCE);
2129 }
2130 auto mem_reg = ConvertRegister(mem.GetBase().GetId(), DataType::REFERENCE);
2131
2132 auto region_size_bit = GetBarrierOperandValue<uint8_t>(
2133 GetRuntime(), panda::mem::BarrierPosition::BARRIER_POSITION_POST, "REGION_SIZE_BITS");
2134 // Compare first store value with mem
2135 enc->EncodeXor(tmp1, mem_reg, reg1);
2136 enc->EncodeShr(tmp1, tmp1, Imm(region_size_bit));
2137
2138 enc->EncodeJump(label, tmp1, Condition::EQ);
2139 tmp1.Release();
2140 auto [live_regs, live_vregs] = GetLiveRegisters<true>(inst);
2141
2142 CallBarrier(live_regs, live_vregs, EntrypointId::POST_WRB_UPDATE_CARD_FUNC_NO_BRIDGE, {mem_reg, reg1});
2143 enc->BindLabel(label);
2144
2145 if (reg2.IsValid() && reg1 != reg2) {
2146 auto label1 = GetEncoder()->CreateLabel();
2147 if (check_nullptr) {
2148 enc->EncodeJump(label1, reg2, Condition::EQ);
2149 }
2150
2151 ScopedTmpReg tmp2(enc, ConvertDataType(DataType::REFERENCE, GetArch()));
2152 // Compare second store value with mem
2153 enc->EncodeXor(tmp2, mem_reg, reg2);
2154 enc->EncodeShr(tmp2, tmp2, Imm(region_size_bit));
2155 enc->EncodeJump(label1, tmp2, Condition::EQ);
2156 tmp2.Release();
2157 CallBarrier(live_regs, live_vregs, EntrypointId::POST_WRB_UPDATE_CARD_FUNC_NO_BRIDGE, {mem_reg, reg2});
2158 enc->BindLabel(label1);
2159 }
2160 }
2161
CalculateCardIndex(MemRef mem,ScopedTmpReg * tmp,ScopedTmpReg * tmp1)2162 void Codegen::CalculateCardIndex(MemRef mem, ScopedTmpReg *tmp, ScopedTmpReg *tmp1)
2163 {
2164 auto tmp_type = tmp->GetReg().GetType();
2165 auto tmp1_type = tmp1->GetReg().GetType();
2166 auto *enc = GetEncoder();
2167 auto card_bits =
2168 GetBarrierOperandValue<uint8_t>(GetRuntime(), panda::mem::BarrierPosition::BARRIER_POSITION_POST, "CARD_BITS");
2169
2170 auto base_reg = mem.GetBase();
2171 ASSERT(base_reg != INVALID_REGISTER);
2172 if (base_reg.GetSize() < Reg(*tmp).GetSize()) {
2173 tmp->ChangeType(base_reg.GetType());
2174 tmp1->ChangeType(base_reg.GetType());
2175 }
2176 enc->EncodeSub(*tmp, base_reg, *tmp);
2177 ASSERT(!mem.HasDisp() && !mem.HasIndex());
2178 enc->EncodeShr(*tmp, *tmp, Imm(card_bits));
2179 tmp->ChangeType(tmp_type);
2180 tmp1->ChangeType(tmp1_type);
2181 }
2182
CreatePostInterGenerationalBarrier(MemRef mem)2183 void Codegen::CreatePostInterGenerationalBarrier(MemRef mem)
2184 {
2185 auto runtime = GetRuntime();
2186 auto *enc = GetEncoder();
2187 ASSERT(runtime->GetPostType() == panda::mem::BarrierType::POST_INTERGENERATIONAL_BARRIER);
2188 ScopedTmpReg tmp(enc);
2189 ScopedTmpReg tmp1(enc);
2190 // * load AddressOf(MIN_ADDR) -> min_addr
2191 if (GetGraph()->IsOfflineCompilationMode()) {
2192 GetEncoder()->EncodeLdr(tmp, false, MemRef(ThreadReg(), runtime->GetTlsCardTableMinAddrOffset(GetArch())));
2193 } else {
2194 auto min_address = reinterpret_cast<uintptr_t>(
2195 GetBarrierOperandValue<void *>(runtime, panda::mem::BarrierPosition::BARRIER_POSITION_POST, "MIN_ADDR"));
2196 enc->EncodeMov(tmp, Imm(min_address));
2197 }
2198 enc->EncodeLdr(tmp, false, MemRef(tmp));
2199 // * card_index = (AddressOf(obj.field) - min_addr) >> CARD_BITS // shift right
2200 CalculateCardIndex(mem, &tmp, &tmp1);
2201 // * load AddressOf(CARD_TABLE_ADDR) -> card_table_addr
2202 if (GetGraph()->IsOfflineCompilationMode()) {
2203 GetEncoder()->EncodeLdr(tmp1.GetReg().As(INT64_TYPE), false,
2204 MemRef(ThreadReg(), runtime->GetTlsCardTableAddrOffset(GetArch())));
2205 } else {
2206 auto card_table_addr = reinterpret_cast<uintptr_t>(GetBarrierOperandValue<uint8_t *>(
2207 runtime, panda::mem::BarrierPosition::BARRIER_POSITION_POST, "CARD_TABLE_ADDR"));
2208 enc->EncodeMov(tmp1, Imm(card_table_addr));
2209 }
2210 // * card_addr = card_table_addr + card_index
2211 enc->EncodeAdd(tmp, tmp1, tmp);
2212 // * store card_addr <- DIRTY_VAL
2213 auto dirty_val =
2214 GetBarrierOperandValue<uint8_t>(runtime, panda::mem::BarrierPosition::BARRIER_POSITION_POST, "DIRTY_VAL");
2215
2216 auto tmp1_b = ConvertRegister(tmp1.GetReg().GetId(), DataType::INT8);
2217 enc->EncodeMov(tmp1_b, Imm(dirty_val));
2218 enc->EncodeStr(tmp1_b, MemRef(tmp));
2219 }
2220
HasLiveCallerSavedRegs(Inst * inst)2221 bool Codegen::HasLiveCallerSavedRegs(Inst *inst)
2222 {
2223 auto [live_regs, live_fp_regs] = GetLiveRegisters<false>(inst);
2224 live_regs &= GetCallerRegsMask(GetArch(), false);
2225 live_fp_regs &= GetCallerRegsMask(GetArch(), true);
2226 return live_regs.Any() || live_fp_regs.Any();
2227 }
2228
SaveCallerRegisters(RegMask live_regs,VRegMask live_vregs,bool adjust_regs)2229 void Codegen::SaveCallerRegisters(RegMask live_regs, VRegMask live_vregs, bool adjust_regs)
2230 {
2231 SCOPED_DISASM_STR(this, "Save caller saved regs");
2232 auto base = GetFrameInfo()->GetCallersRelativeFp() ? GetTarget().GetFrameReg() : GetTarget().GetStackReg();
2233 live_regs &= ~GetEncoder()->GetAvailableScratchRegisters();
2234 live_vregs &= ~GetEncoder()->GetAvailableScratchFpRegisters();
2235 if (adjust_regs) {
2236 live_regs &= GetRegfile()->GetCallerSavedRegMask();
2237 live_vregs &= GetRegfile()->GetCallerSavedVRegMask();
2238 } else {
2239 live_regs &= GetCallerRegsMask(GetArch(), false);
2240 live_vregs &= GetCallerRegsMask(GetArch(), true);
2241 }
2242 GetEncoder()->SaveRegisters(live_regs, false, GetFrameInfo()->GetCallersOffset(), base,
2243 GetCallerRegsMask(GetArch(), false));
2244 GetEncoder()->SaveRegisters(live_vregs, true, GetFrameInfo()->GetFpCallersOffset(), base,
2245 GetCallerRegsMask(GetArch(), true));
2246 }
2247
LoadCallerRegisters(RegMask live_regs,VRegMask live_vregs,bool adjust_regs)2248 void Codegen::LoadCallerRegisters(RegMask live_regs, VRegMask live_vregs, bool adjust_regs)
2249 {
2250 SCOPED_DISASM_STR(this, "Restore caller saved regs");
2251 auto base = GetFrameInfo()->GetCallersRelativeFp() ? GetTarget().GetFrameReg() : GetTarget().GetStackReg();
2252 live_regs &= ~GetEncoder()->GetAvailableScratchRegisters();
2253 live_vregs &= ~GetEncoder()->GetAvailableScratchFpRegisters();
2254 if (adjust_regs) {
2255 live_regs &= GetRegfile()->GetCallerSavedRegMask();
2256 live_vregs &= GetRegfile()->GetCallerSavedVRegMask();
2257 } else {
2258 live_regs &= GetCallerRegsMask(GetArch(), false);
2259 live_vregs &= GetCallerRegsMask(GetArch(), true);
2260 }
2261 GetEncoder()->LoadRegisters(live_regs, false, GetFrameInfo()->GetCallersOffset(), base,
2262 GetCallerRegsMask(GetArch(), false));
2263 GetEncoder()->LoadRegisters(live_vregs, true, GetFrameInfo()->GetFpCallersOffset(), base,
2264 GetCallerRegsMask(GetArch(), true));
2265 }
2266
RegisterKeepCallArgument(CallInst * call_inst,Reg reg)2267 bool Codegen::RegisterKeepCallArgument(CallInst *call_inst, Reg reg)
2268 {
2269 for (auto i = 0U; i < call_inst->GetInputsCount(); i++) {
2270 auto location = call_inst->GetLocation(i);
2271 if (location.IsRegister() && location.GetValue() == reg.GetId()) {
2272 return true;
2273 }
2274 }
2275 return false;
2276 }
2277
LoadMethod(Reg dst)2278 void Codegen::LoadMethod(Reg dst)
2279 {
2280 ASSERT((CFrameMethod::GetOffsetFromSpInBytes(GetFrameLayout()) -
2281 (GetFrameLayout().GetMethodOffset<CFrameLayout::SP, CFrameLayout::BYTES>())) == 0);
2282 auto sp_offset = CFrameMethod::GetOffsetFromSpInBytes(GetFrameLayout());
2283 auto mem = MemRef(SpReg(), sp_offset);
2284 GetEncoder()->EncodeLdr(dst, false, mem);
2285 }
2286
StoreFreeSlot(Reg src)2287 void Codegen::StoreFreeSlot(Reg src)
2288 {
2289 ASSERT(src.GetSize() <= (GetFrameLayout().GetSlotSize() << 3U));
2290 auto sp_offset = GetFrameLayout().GetFreeSlotOffset<CFrameLayout::SP, CFrameLayout::BYTES>();
2291 auto mem = MemRef(SpReg(), sp_offset);
2292 GetEncoder()->EncodeStr(src, mem);
2293 }
2294
LoadFreeSlot(Reg dst)2295 void Codegen::LoadFreeSlot(Reg dst)
2296 {
2297 ASSERT(dst.GetSize() <= (GetFrameLayout().GetSlotSize() << 3U));
2298 auto sp_offset = GetFrameLayout().GetFreeSlotOffset<CFrameLayout::SP, CFrameLayout::BYTES>();
2299 auto mem = MemRef(SpReg(), sp_offset);
2300 GetEncoder()->EncodeLdr(dst, false, mem);
2301 }
2302
CreateReturn(const Inst * inst)2303 void Codegen::CreateReturn(const Inst *inst)
2304 {
2305 if (GetGraph()->GetMethodProperties().GetLastReturn() == inst) {
2306 GetEncoder()->BindLabel(GetLabelExit());
2307 GenerateEpilogue();
2308 } else {
2309 GetEncoder()->EncodeJump(GetLabelExit());
2310 }
2311 }
2312
2313 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
2314 #define UnaryOperation(opc) \
2315 void EncodeVisitor::Visit##opc(GraphVisitor *visitor, Inst *inst) \
2316 { \
2317 EncodeVisitor *enc = static_cast<EncodeVisitor *>(visitor); \
2318 auto type = inst->GetType(); \
2319 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type); \
2320 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type); \
2321 enc->GetEncoder()->Encode##opc(dst, src0); \
2322 }
2323
2324 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
2325 #define BinaryOperation(opc) \
2326 void EncodeVisitor::Visit##opc(GraphVisitor *visitor, Inst *inst) \
2327 { \
2328 auto type = inst->GetType(); \
2329 EncodeVisitor *enc = static_cast<EncodeVisitor *>(visitor); \
2330 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type); \
2331 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type); \
2332 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type); \
2333 enc->GetEncoder()->Encode##opc(dst, src0, src1); \
2334 }
2335
2336 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
2337 #define BinaryShiftedRegisterOperation(opc) \
2338 void EncodeVisitor::Visit##opc##SR(GraphVisitor *visitor, Inst *inst) \
2339 { \
2340 auto type = inst->GetType(); \
2341 EncodeVisitor *enc = static_cast<EncodeVisitor *>(visitor); \
2342 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type); \
2343 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type); \
2344 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type); \
2345 auto imm_shift_inst = static_cast<BinaryShiftedRegisterOperation *>(inst); \
2346 auto imm_value = static_cast<uint32_t>(imm_shift_inst->GetImm()) & (dst.GetSize() - 1); \
2347 auto shift = Shift(src1, imm_shift_inst->GetShiftType(), imm_value); \
2348 enc->GetEncoder()->Encode##opc(dst, src0, shift); \
2349 }
2350
2351 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
2352 #define INST_DEF(OPCODE, TYPE) TYPE(OPCODE)
2353
2354 ENCODE_MATH_LIST(INST_DEF)
ENCODE_INST_WITH_SHIFTED_OPERAND(INST_DEF)2355 ENCODE_INST_WITH_SHIFTED_OPERAND(INST_DEF)
2356
2357 #undef UnaryOperation
2358 #undef BinaryOperation
2359 #undef BinaryShiftedRegisterOperation
2360 #undef INST_DEF
2361
2362 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
2363 #define BinaryImmOperation(opc) \
2364 void EncodeVisitor::Visit##opc##I(GraphVisitor *visitor, Inst *inst) \
2365 { \
2366 auto binop = inst->CastTo##opc##I(); \
2367 EncodeVisitor *enc = static_cast<EncodeVisitor *>(visitor); \
2368 auto type = inst->GetType(); \
2369 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type); \
2370 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type); \
2371 enc->GetEncoder()->Encode##opc(dst, src0, enc->GetCodegen()->ConvertImm(binop->GetImm(), DataType::INT64)); \
2372 }
2373
2374 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
2375 #define BINARRY_IMM_OPS(DEF) DEF(Add) DEF(Sub) DEF(Shl) DEF(AShr) DEF(And) DEF(Or) DEF(Xor)
2376
2377 BINARRY_IMM_OPS(BinaryImmOperation)
2378
2379 #undef BINARRY_IMM_OPS
2380 #undef BinaryImmOperation
2381
2382 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
2383 #define BinarySignUnsignOperation(opc) \
2384 void EncodeVisitor::Visit##opc(GraphVisitor *visitor, Inst *inst) \
2385 { \
2386 auto type = inst->GetType(); \
2387 EncodeVisitor *enc = static_cast<EncodeVisitor *>(visitor); \
2388 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type); \
2389 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type); \
2390 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type); \
2391 auto arch = enc->GetCodegen()->GetArch(); \
2392 if (!Codegen::InstEncodedWithLibCall(inst, arch)) { \
2393 enc->GetEncoder()->Encode##opc(dst, IsTypeSigned(type), src0, src1); \
2394 return; \
2395 } \
2396 ASSERT(arch == Arch::AARCH32); \
2397 if (enc->cg_->GetGraph()->IsAotMode()) { \
2398 enc->GetEncoder()->SetFalseResult(); \
2399 return; \
2400 } \
2401 auto [live_regs, live_vregs] = enc->GetCodegen()->GetLiveRegisters(inst); \
2402 enc->GetEncoder()->SetRegister(&live_regs, &live_vregs, dst, false); \
2403 enc->GetCodegen()->SaveCallerRegisters(live_regs, live_vregs, true); \
2404 enc->GetEncoder()->Encode##opc(dst, IsTypeSigned(type), src0, src1); \
2405 enc->GetCodegen()->LoadCallerRegisters(live_regs, live_vregs, true); \
2406 }
2407
2408 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
2409 #define SIGN_UNSIGN_OPS(DEF) DEF(Div) DEF(Min) DEF(Max)
2410
2411 SIGN_UNSIGN_OPS(BinarySignUnsignOperation)
2412
2413 #undef SIGN_UNSIGN_OPS
2414 #undef BINARY_SIGN_UNSIGN_OPERATION
2415
2416 void EncodeVisitor::VisitMod(GraphVisitor *visitor, Inst *inst)
2417 {
2418 auto type = inst->GetType();
2419 auto *enc = static_cast<EncodeVisitor *>(visitor);
2420 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type);
2421 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type);
2422 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type);
2423 auto arch = enc->GetCodegen()->GetArch();
2424 if (!Codegen::InstEncodedWithLibCall(inst, arch)) {
2425 enc->GetEncoder()->EncodeMod(dst, IsTypeSigned(type), src0, src1);
2426 return;
2427 }
2428
2429 if (DataType::IsFloatType(type)) {
2430 RuntimeInterface::IntrinsicId entry =
2431 ((type == DataType::FLOAT32) ? RuntimeInterface::IntrinsicId::LIB_CALL_FMODF
2432 : RuntimeInterface::IntrinsicId::LIB_CALL_FMOD);
2433 auto [live_regs, live_vregs] = enc->GetCodegen()->GetLiveRegisters(inst);
2434 enc->GetCodegen()->SaveCallerRegisters(live_regs, live_vregs, true);
2435
2436 enc->GetCodegen()->FillCallParams({src0, src1});
2437 enc->GetCodegen()->CallIntrinsic(inst, entry);
2438
2439 auto ret_val = enc->GetCodegen()->GetTarget().GetReturnFpReg();
2440 if (ret_val.GetType().IsFloat()) {
2441 // ret_val is FLOAT64 for Arm64, AMD64 and ARM32 HardFP, but dst can be FLOAT32
2442 // so we convert ret_val to FLOAT32
2443 enc->GetEncoder()->EncodeMov(dst, Reg(ret_val.GetId(), dst.GetType()));
2444 } else {
2445 // case for ARM32 SoftFP
2446 enc->GetEncoder()->EncodeMov(dst,
2447 Reg(ret_val.GetId(), dst.GetSize() == WORD_SIZE ? INT32_TYPE : INT64_TYPE));
2448 }
2449
2450 enc->GetEncoder()->SetRegister(&live_regs, &live_vregs, dst, false);
2451 enc->GetCodegen()->LoadCallerRegisters(live_regs, live_vregs, true);
2452 return;
2453 }
2454
2455 ASSERT(arch == Arch::AARCH32);
2456 // TODO(pishin) Fix after supporting AOT mode for arm32
2457 if (enc->cg_->GetGraph()->IsAotMode()) {
2458 enc->GetEncoder()->SetFalseResult();
2459 return;
2460 }
2461 auto [live_regs, live_vregs] = enc->GetCodegen()->GetLiveRegisters(inst);
2462 enc->GetEncoder()->SetRegister(&live_regs, &live_vregs, dst, false);
2463 enc->GetCodegen()->SaveCallerRegisters(live_regs, live_vregs, true);
2464 enc->GetEncoder()->EncodeMod(dst, IsTypeSigned(type), src0, src1);
2465 enc->GetCodegen()->LoadCallerRegisters(live_regs, live_vregs, true);
2466 }
2467
VisitShrI(GraphVisitor * visitor,Inst * inst)2468 void EncodeVisitor::VisitShrI(GraphVisitor *visitor, Inst *inst)
2469 {
2470 auto binop = inst->CastToShrI();
2471 auto enc = static_cast<EncodeVisitor *>(visitor);
2472 auto type = inst->GetType();
2473 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type);
2474 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type);
2475 enc->GetEncoder()->EncodeShr(dst, src0, enc->GetCodegen()->ConvertImm(binop->GetImm(), DataType::INT64));
2476 }
2477
VisitMAdd(GraphVisitor * visitor,Inst * inst)2478 void EncodeVisitor::VisitMAdd(GraphVisitor *visitor, Inst *inst)
2479 {
2480 auto type = inst->GetType();
2481 auto enc = static_cast<EncodeVisitor *>(visitor);
2482 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type);
2483 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type);
2484 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type);
2485 constexpr int64_t IMM_2 = 2;
2486 auto src2 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(IMM_2), type);
2487 enc->GetEncoder()->EncodeMAdd(dst, src0, src1, src2);
2488 }
2489
VisitMSub(GraphVisitor * visitor,Inst * inst)2490 void EncodeVisitor::VisitMSub(GraphVisitor *visitor, Inst *inst)
2491 {
2492 auto type = inst->GetType();
2493 auto enc = static_cast<EncodeVisitor *>(visitor);
2494 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type);
2495 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type);
2496 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type);
2497 constexpr int64_t IMM_2 = 2;
2498 auto src2 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(IMM_2), type);
2499 enc->GetEncoder()->EncodeMSub(dst, src0, src1, src2);
2500 }
2501
VisitMNeg(GraphVisitor * visitor,Inst * inst)2502 void EncodeVisitor::VisitMNeg(GraphVisitor *visitor, Inst *inst)
2503 {
2504 auto type = inst->GetType();
2505 auto enc = static_cast<EncodeVisitor *>(visitor);
2506 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type);
2507 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type);
2508 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type);
2509 enc->GetEncoder()->EncodeMNeg(dst, src0, src1);
2510 }
2511
VisitOrNot(GraphVisitor * visitor,Inst * inst)2512 void EncodeVisitor::VisitOrNot(GraphVisitor *visitor, Inst *inst)
2513 {
2514 auto type = inst->GetType();
2515 auto enc = static_cast<EncodeVisitor *>(visitor);
2516 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type);
2517 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type);
2518 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type);
2519 enc->GetEncoder()->EncodeOrNot(dst, src0, src1);
2520 }
2521
VisitAndNot(GraphVisitor * visitor,Inst * inst)2522 void EncodeVisitor::VisitAndNot(GraphVisitor *visitor, Inst *inst)
2523 {
2524 auto type = inst->GetType();
2525 auto enc = static_cast<EncodeVisitor *>(visitor);
2526 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type);
2527 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type);
2528 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type);
2529 enc->GetEncoder()->EncodeAndNot(dst, src0, src1);
2530 }
2531
VisitXorNot(GraphVisitor * visitor,Inst * inst)2532 void EncodeVisitor::VisitXorNot(GraphVisitor *visitor, Inst *inst)
2533 {
2534 auto type = inst->GetType();
2535 auto enc = static_cast<EncodeVisitor *>(visitor);
2536 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type);
2537 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type);
2538 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type);
2539 enc->GetEncoder()->EncodeXorNot(dst, src0, src1);
2540 }
2541
VisitNegSR(GraphVisitor * visitor,Inst * inst)2542 void EncodeVisitor::VisitNegSR(GraphVisitor *visitor, Inst *inst)
2543 {
2544 auto type = inst->GetType();
2545 auto enc = static_cast<EncodeVisitor *>(visitor);
2546 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type);
2547 auto src = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type);
2548 auto imm_shift_inst = static_cast<UnaryShiftedRegisterOperation *>(inst);
2549 enc->GetEncoder()->EncodeNeg(dst, Shift(src, imm_shift_inst->GetShiftType(), imm_shift_inst->GetImm()));
2550 }
2551
VisitCast(GraphVisitor * visitor,Inst * inst)2552 void EncodeVisitor::VisitCast(GraphVisitor *visitor, Inst *inst)
2553 {
2554 auto enc = static_cast<EncodeVisitor *>(visitor);
2555 auto src_type = inst->GetInputType(0);
2556 auto dst_type = inst->GetType();
2557
2558 if (dst_type == DataType::ANY) {
2559 CastToAny(visitor, inst);
2560 return;
2561 }
2562
2563 bool src_signed = IsTypeSigned(src_type);
2564 bool dst_signed = IsTypeSigned(dst_type);
2565
2566 auto src = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), src_type);
2567 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), dst_type);
2568 if (dst_type == DataType::BOOL) {
2569 enc->GetEncoder()->EncodeCastToBool(dst, src);
2570 return;
2571 }
2572
2573 auto arch = enc->GetCodegen()->GetArch();
2574 if (!Codegen::InstEncodedWithLibCall(inst, arch)) {
2575 enc->GetEncoder()->EncodeCast(dst, dst_signed, src, src_signed);
2576 return;
2577 }
2578 ASSERT(arch == Arch::AARCH32);
2579 // TODO(pishin) Fix after supporting AOT mode for arm32
2580 if (enc->cg_->GetGraph()->IsAotMode()) {
2581 enc->GetEncoder()->SetFalseResult();
2582 return;
2583 }
2584 auto [live_regs, live_vregs] = enc->GetCodegen()->GetLiveRegisters(inst);
2585 enc->GetEncoder()->SetRegister(&live_regs, &live_vregs, dst, false);
2586 enc->GetCodegen()->SaveCallerRegisters(live_regs, live_vregs, true);
2587 enc->GetEncoder()->EncodeCast(dst, dst_signed, src, src_signed);
2588 enc->GetCodegen()->LoadCallerRegisters(live_regs, live_vregs, true);
2589 }
2590
VisitPhi(GraphVisitor * visitor,Inst * inst)2591 void EncodeVisitor::VisitPhi([[maybe_unused]] GraphVisitor *visitor, [[maybe_unused]] Inst *inst) {}
2592
CastToAny(GraphVisitor * visitor,Inst * inst)2593 void EncodeVisitor::CastToAny(GraphVisitor *visitor, Inst *inst)
2594 {
2595 auto src_type = inst->GetInputType(0);
2596 int64_t tag;
2597 switch (src_type) {
2598 case DataType::UINT8:
2599 case DataType::INT8:
2600 case DataType::UINT16:
2601 case DataType::INT16:
2602 case DataType::UINT32:
2603 case DataType::INT32:
2604 case DataType::UINT64:
2605 case DataType::INT64:
2606 tag = interpreter::INT;
2607 break;
2608 case DataType::FLOAT64:
2609 tag = interpreter::DOUBLE;
2610 break;
2611 case DataType::REFERENCE:
2612 if (inst->GetInput(0).GetInst()->GetOpcode() == Opcode::LoadString) {
2613 tag = interpreter::STRING;
2614 } else {
2615 tag = interpreter::OBJECT;
2616 }
2617 break;
2618 default:
2619 UNREACHABLE();
2620 }
2621
2622 auto enc = static_cast<EncodeVisitor *>(visitor);
2623 Codegen *codegen = enc->GetCodegen();
2624 Encoder *encoder = enc->GetEncoder();
2625 Reg src_reg = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), src_type);
2626 Reg dst_reg = codegen->ConvertRegister(inst->GetDstReg(), DataType::INT64);
2627 Reg tag_reg = codegen->ConvertRegister(inst->GetDstReg() + 1, DataType::INT64);
2628 if (src_reg.GetId() != dst_reg.GetId()) {
2629 encoder->EncodeMov(dst_reg, src_reg);
2630 }
2631 encoder->EncodeMov(tag_reg, Imm(tag));
2632 }
2633
VisitConstant(GraphVisitor * visitor,Inst * inst)2634 void EncodeVisitor::VisitConstant(GraphVisitor *visitor, Inst *inst)
2635 {
2636 auto *enc = static_cast<EncodeVisitor *>(visitor);
2637 if (inst->GetDstReg() == INVALID_REG) {
2638 return;
2639 }
2640 if (inst->GetDstReg() == enc->cg_->GetGraph()->GetZeroReg()) {
2641 ASSERT(IsZeroConstant(inst));
2642 ASSERT(enc->GetRegfile()->GetZeroReg() != INVALID_REGISTER);
2643 return;
2644 }
2645 auto *const_inst = inst->CastToConstant();
2646 auto type = inst->GetType();
2647 if (enc->cg_->GetGraph()->IsDynamicMethod() && type == DataType::INT64) {
2648 type = DataType::INT32;
2649 }
2650
2651 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type);
2652 Imm imm = enc->GetCodegen()->ConvertImm(const_inst, type);
2653 enc->GetEncoder()->EncodeMov(dst, imm);
2654 }
2655
VisitNullPtr(GraphVisitor * visitor,Inst * inst)2656 void EncodeVisitor::VisitNullPtr(GraphVisitor *visitor, Inst *inst)
2657 {
2658 auto *enc = static_cast<EncodeVisitor *>(visitor);
2659 if (inst->GetDstReg() == enc->cg_->GetGraph()->GetZeroReg()) {
2660 ASSERT_PRINT(enc->GetRegfile()->GetZeroReg() != INVALID_REGISTER,
2661 "NullPtr doesn't have correct destination register");
2662 return;
2663 }
2664 auto type = inst->GetType();
2665 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type);
2666 Imm imm = enc->GetCodegen()->ConvertImm(static_cast<uint64_t>(0), type);
2667 enc->GetEncoder()->EncodeMov(dst, imm);
2668 }
2669
2670 // Next visitors use calling convention
2671
VisitIndirectJump(GraphVisitor * visitor,Inst * inst)2672 void EncodeVisitor::VisitIndirectJump(GraphVisitor *visitor, Inst *inst)
2673 {
2674 auto *enc = static_cast<EncodeVisitor *>(visitor);
2675 auto src = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::POINTER); // pointer
2676 enc->GetEncoder()->EncodeJump(src);
2677 }
2678
VisitCallIndirect(CallIndirectInst * inst)2679 void Codegen::VisitCallIndirect(CallIndirectInst *inst)
2680 {
2681 auto location = inst->GetLocation(0);
2682 ASSERT(location.IsFixedRegister() && location.IsRegisterValid());
2683 auto src = Reg(location.GetValue(), GetTarget().GetPtrRegType());
2684 auto dst = ConvertRegister(inst->GetDstReg(), inst->GetType());
2685
2686 GetEncoder()->MakeCall(src);
2687 if (inst->HasUsers()) {
2688 GetEncoder()->EncodeMov(dst, GetTarget().GetReturnReg(dst.GetType()));
2689 }
2690 }
2691
VisitCall(CallInst * inst)2692 void Codegen::VisitCall(CallInst *inst)
2693 {
2694 ASSERT(GetGraph()->GetRelocationHandler() != nullptr);
2695 ASSERT(!HasLiveCallerSavedRegs(inst));
2696
2697 RelocationInfo relocation;
2698 relocation.data = inst->GetCallMethodId();
2699 GetEncoder()->MakeCall(&relocation);
2700 GetGraph()->GetRelocationHandler()->AddRelocation(relocation);
2701
2702 if (inst->HasUsers()) {
2703 auto dst_reg = ConvertRegister(inst->GetDstReg(), inst->GetType());
2704 ASSERT(dst_reg.IsValid());
2705 GetEncoder()->EncodeMov(dst_reg, GetTarget().GetReturnReg(dst_reg.GetType()));
2706 }
2707 }
2708
VisitCallIndirect(GraphVisitor * visitor,Inst * inst)2709 void EncodeVisitor::VisitCallIndirect(GraphVisitor *visitor, Inst *inst)
2710 {
2711 static_cast<EncodeVisitor *>(visitor)->GetCodegen()->VisitCallIndirect(inst->CastToCallIndirect());
2712 }
2713
VisitCall(GraphVisitor * visitor,Inst * inst)2714 void EncodeVisitor::VisitCall(GraphVisitor *visitor, Inst *inst)
2715 {
2716 static_cast<EncodeVisitor *>(visitor)->GetCodegen()->VisitCall(inst->CastToCall());
2717 }
2718
VisitCompare(GraphVisitor * visitor,Inst * inst)2719 void EncodeVisitor::VisitCompare(GraphVisitor *visitor, Inst *inst)
2720 {
2721 auto *enc = static_cast<EncodeVisitor *>(visitor);
2722
2723 auto type = inst->CastToCompare()->GetOperandsType();
2724 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
2725 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type);
2726 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type);
2727 auto cc = enc->GetCodegen()->ConvertCc(inst->CastToCompare()->GetCc());
2728 if (IsTestCc(cc)) {
2729 enc->GetEncoder()->EncodeCompareTest(dst, src0, src1, cc);
2730 } else {
2731 enc->GetEncoder()->EncodeCompare(dst, src0, src1, cc);
2732 }
2733 }
2734
VisitCmp(GraphVisitor * visitor,Inst * inst)2735 void EncodeVisitor::VisitCmp(GraphVisitor *visitor, Inst *inst)
2736 {
2737 auto *enc = static_cast<EncodeVisitor *>(visitor);
2738
2739 auto cmp_inst = inst->CastToCmp();
2740 auto type = cmp_inst->GetOperandsType();
2741
2742 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
2743 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type);
2744 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type);
2745
2746 Condition cc;
2747 if (DataType::IsFloatType(type)) {
2748 // TODO(igorban): check whether MI is fully correct here
2749 cc = cmp_inst->IsFcmpg() ? (Condition::MI) : (Condition::LT);
2750 } else if (IsTypeSigned(type)) {
2751 cc = Condition::LT;
2752 } else {
2753 cc = Condition::LO;
2754 }
2755 enc->GetEncoder()->EncodeCmp(dst, src0, src1, cc);
2756 }
2757
VisitReturnVoid(GraphVisitor * visitor,Inst * inst)2758 void EncodeVisitor::VisitReturnVoid(GraphVisitor *visitor, Inst *inst)
2759 {
2760 auto *enc = static_cast<EncodeVisitor *>(visitor);
2761 if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
2762 enc->GetEncoder()->EncodeMemoryBarrier(MemoryOrder::Release);
2763 }
2764
2765 enc->GetCodegen()->CreateReturn(inst);
2766 }
2767
VisitReturn(GraphVisitor * visitor,Inst * inst)2768 void EncodeVisitor::VisitReturn(GraphVisitor *visitor, Inst *inst)
2769 {
2770 auto *enc = static_cast<EncodeVisitor *>(visitor);
2771 auto src = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), inst->GetType());
2772 enc->GetEncoder()->EncodeMov(enc->GetCodegen()->GetTarget().GetReturnReg(src.GetType()), src);
2773
2774 enc->GetCodegen()->CreateReturn(inst);
2775 }
2776
VisitReturnI(GraphVisitor * visitor,Inst * inst)2777 void EncodeVisitor::VisitReturnI(GraphVisitor *visitor, Inst *inst)
2778 {
2779 auto *enc = static_cast<EncodeVisitor *>(visitor);
2780 auto codegen = enc->GetCodegen();
2781 auto rzero = enc->GetRegfile()->GetZeroReg();
2782 int64_t imm_val = inst->CastToReturnI()->GetImm();
2783 Imm imm = codegen->ConvertImmWithExtend(imm_val, inst->GetType());
2784
2785 auto return_reg = codegen->GetTarget().GetReturnReg(codegen->ConvertDataType(inst->GetType(), codegen->GetArch()));
2786
2787 if (imm_val == 0 && codegen->GetTarget().SupportZeroReg() && !DataType::IsFloatType(inst->GetType())) {
2788 enc->GetEncoder()->EncodeMov(return_reg, rzero);
2789 } else {
2790 enc->GetEncoder()->EncodeMov(return_reg, imm);
2791 }
2792
2793 enc->GetCodegen()->CreateReturn(inst);
2794 }
2795
2796 #if defined(EVENT_METHOD_EXIT_ENABLED) && EVENT_METHOD_EXIT_ENABLED != 0
GetCallInstFromReturnInlined(Inst * return_inlined)2797 static CallInst *GetCallInstFromReturnInlined(Inst *return_inlined)
2798 {
2799 auto ss = return_inlined->GetSaveState();
2800 for (auto &user : ss->GetUsers()) {
2801 auto inst = user.GetInst();
2802 if (inst->IsCall() && static_cast<CallInst *>(inst)->IsInlined()) {
2803 return static_cast<CallInst *>(inst);
2804 }
2805 }
2806 return nullptr;
2807 }
2808 #endif
2809
VisitReturnInlined(GraphVisitor * visitor,Inst * inst)2810 void EncodeVisitor::VisitReturnInlined([[maybe_unused]] GraphVisitor *visitor, [[maybe_unused]] Inst *inst)
2811 {
2812 auto *enc = static_cast<EncodeVisitor *>(visitor);
2813 if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
2814 enc->GetEncoder()->EncodeMemoryBarrier(MemoryOrder::Release);
2815 }
2816
2817 #if defined(EVENT_METHOD_EXIT_ENABLED) && EVENT_METHOD_EXIT_ENABLED != 0
2818 auto *enc = static_cast<EncodeVisitor *>(visitor);
2819 if (!enc->cg_->GetGraph()->IsAotMode()) {
2820 auto call_inst = GetCallInstFromReturnInlined(inst->CastToReturnInlined());
2821 ASSERT(call_inst != nullptr);
2822 static_cast<EncodeVisitor *>(visitor)->GetCodegen()->InsertTrace(
2823 {Imm(static_cast<size_t>(TraceId::METHOD_EXIT)), Imm(reinterpret_cast<size_t>(call_inst->GetCallMethod())),
2824 Imm(static_cast<size_t>(events::MethodExitKind::INLINED))});
2825 }
2826 #endif
2827 }
2828
VisitLoadConstArray(GraphVisitor * visitor,Inst * inst)2829 void EncodeVisitor::VisitLoadConstArray(GraphVisitor *visitor, Inst *inst)
2830 {
2831 auto *enc = static_cast<EncodeVisitor *>(visitor);
2832 auto method = inst->CastToLoadConstArray()->GetMethod();
2833 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
2834 auto array_type = inst->CastToLoadConstArray()->GetTypeId();
2835
2836 enc->GetCodegen()->CallRuntimeWithMethod(inst, method, EntrypointId::RESOLVE_LITERAL_ARRAY, dst, Imm(array_type));
2837 }
2838
VisitFillConstArray(GraphVisitor * visitor,Inst * inst)2839 void EncodeVisitor::VisitFillConstArray(GraphVisitor *visitor, Inst *inst)
2840 {
2841 auto type = inst->GetType();
2842 ASSERT(type != DataType::REFERENCE);
2843 auto *enc = static_cast<EncodeVisitor *>(visitor);
2844 auto encoder = enc->GetEncoder();
2845 auto runtime = enc->cg_->GetGraph()->GetRuntime();
2846 auto array_type = inst->CastToFillConstArray()->GetTypeId();
2847 auto arch = enc->cg_->GetGraph()->GetArch();
2848 auto src = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE);
2849 auto method = inst->CastToFillConstArray()->GetMethod();
2850 auto offset = runtime->GetArrayDataOffset(enc->GetCodegen()->GetArch());
2851 auto array_size = inst->CastToFillConstArray()->GetImm() << DataType::ShiftByType(type, arch);
2852 ScopedTmpReg array_reg(encoder, Codegen::ConvertDataType(DataType::GetIntTypeForReference(arch), arch));
2853 encoder->EncodeAdd(array_reg, src, Imm(offset));
2854
2855 ASSERT(array_size != 0);
2856
2857 if (enc->cg_->GetGraph()->IsAotMode()) {
2858 auto arr_offset = runtime->GetOffsetToConstArrayData(method, array_type);
2859 auto pf_offset = runtime->GetPandaFileOffset(arch);
2860 ScopedTmpReg method_reg(encoder);
2861 enc->GetCodegen()->LoadMethod(method_reg);
2862 // load pointer to panda file
2863 encoder->EncodeLdr(method_reg, false, MemRef(method_reg, pf_offset));
2864 // load pointer to binary file
2865 encoder->EncodeLdr(method_reg, false, MemRef(method_reg, runtime->GetBinaryFileBaseOffset(enc->GetArch())));
2866 // Get pointer to array data
2867 encoder->EncodeAdd(method_reg, method_reg, Imm(arr_offset));
2868 // call memcpy
2869 RuntimeInterface::IntrinsicId entry = RuntimeInterface::IntrinsicId::LIB_CALL_MEM_COPY;
2870 auto [live_regs, live_vregs] = enc->GetCodegen()->GetLiveRegisters(inst);
2871 enc->GetCodegen()->SaveCallerRegisters(live_regs, live_vregs, true);
2872
2873 enc->GetCodegen()->FillCallParams({array_reg, method_reg, Imm(array_size)});
2874 enc->GetCodegen()->CallIntrinsic(inst, entry);
2875 enc->GetCodegen()->LoadCallerRegisters(live_regs, live_vregs, true);
2876 } else {
2877 auto data = runtime->GetPointerToConstArrayData(method, array_type);
2878 // call memcpy
2879 RuntimeInterface::IntrinsicId entry = RuntimeInterface::IntrinsicId::LIB_CALL_MEM_COPY;
2880 auto [live_regs, live_vregs] = enc->GetCodegen()->GetLiveRegisters(inst);
2881 enc->GetCodegen()->SaveCallerRegisters(live_regs, live_vregs, true);
2882
2883 enc->GetCodegen()->FillCallParams({array_reg, Imm(data), Imm(array_size)});
2884 enc->GetCodegen()->CallIntrinsic(inst, entry);
2885 enc->GetCodegen()->LoadCallerRegisters(live_regs, live_vregs, true);
2886 }
2887 }
2888
VisitNewArray(Inst * inst)2889 void Codegen::VisitNewArray(Inst *inst)
2890 {
2891 auto method = inst->CastToNewArray()->GetMethod();
2892 auto dst = ConvertRegister(inst->GetDstReg(), inst->GetType());
2893 auto src_class = ConvertRegister(inst->GetSrcReg(NewArrayInst::INDEX_CLASS), DataType::POINTER);
2894 auto src_size = ConvertRegister(inst->GetSrcReg(NewArrayInst::INDEX_SIZE), DataType::Type::INT32);
2895 auto array_type = inst->CastToNewArray()->GetTypeId();
2896 auto runtime = GetGraph()->GetRuntime();
2897 auto encoder = GetEncoder();
2898
2899 auto max_tlab_size = runtime->GetTLABMaxSize();
2900
2901 // TODO(msherstennikov): support NewArray fast path for arm32
2902 if (max_tlab_size == 0 || GetArch() == Arch::AARCH32) {
2903 CallRuntime(inst, EntrypointId::CREATE_ARRAY, dst, {src_class, src_size});
2904 if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
2905 encoder->EncodeMemoryBarrier(MemoryOrder::Release);
2906 }
2907 return;
2908 }
2909
2910 auto len_inst = inst->GetDataFlowInput(0);
2911 auto class_array_size = runtime->GetClassArraySize(GetArch());
2912 uint64_t array_size = 0;
2913 uint64_t element_size = runtime->GetArrayElementSize(method, array_type);
2914 uint64_t alignment = runtime->GetTLABAlignment();
2915 if (len_inst->GetOpcode() == Opcode::Constant) {
2916 ASSERT(len_inst->GetType() == DataType::INT64);
2917 array_size = len_inst->CastToConstant()->GetIntValue() * element_size + class_array_size;
2918 array_size = (array_size & ~(alignment - 1U)) + ((array_size % alignment) != 0U ? alignment : 0U);
2919 if (array_size > max_tlab_size) {
2920 CallRuntime(inst, EntrypointId::CREATE_ARRAY, dst, {src_class, src_size});
2921 if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
2922 encoder->EncodeMemoryBarrier(MemoryOrder::Release);
2923 }
2924 return;
2925 }
2926 }
2927
2928 auto param_regs {GetLiveRegisters(inst).first};
2929 param_regs &= GetTarget().GetParamRegsMask(2U);
2930
2931 SaveCallerRegisters(param_regs, VRegMask(), false);
2932
2933 EntrypointId eid;
2934 switch (element_size) {
2935 case sizeof(uint8_t):
2936 eid = EntrypointId::ALLOCATE_ARRAY_TLAB8;
2937 break;
2938 case sizeof(uint16_t):
2939 eid = EntrypointId::ALLOCATE_ARRAY_TLAB16;
2940 break;
2941 case sizeof(uint32_t):
2942 eid = EntrypointId::ALLOCATE_ARRAY_TLAB32;
2943 break;
2944 case sizeof(uint64_t):
2945 eid = EntrypointId::ALLOCATE_ARRAY_TLAB64;
2946 break;
2947 default:
2948 UNREACHABLE();
2949 }
2950
2951 FillCallParams({src_class, src_size});
2952 MemRef entry(ThreadReg(), GetRuntime()->GetEntrypointTlsOffset(GetArch(), eid));
2953 encoder->MakeCall(entry);
2954 CreateStackMap(inst);
2955
2956 GetEncoder()->EncodeMov(dst, GetTarget().GetReturnReg(dst.GetType()));
2957 param_regs.reset(dst.GetId());
2958 LoadCallerRegisters(param_regs, VRegMask(), false);
2959
2960 if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
2961 encoder->EncodeMemoryBarrier(MemoryOrder::Release);
2962 }
2963 }
2964
VisitNewArray(GraphVisitor * visitor,Inst * inst)2965 void EncodeVisitor::VisitNewArray(GraphVisitor *visitor, Inst *inst)
2966 {
2967 auto *enc = static_cast<EncodeVisitor *>(visitor);
2968 return enc->GetCodegen()->VisitNewArray(inst);
2969 }
2970
VisitParameter(GraphVisitor * visitor,Inst * inst)2971 void EncodeVisitor::VisitParameter(GraphVisitor *visitor, Inst *inst)
2972 {
2973 /*
2974 Default register parameters pushed in ir_builder
2975 In regalloc filled spill/fill parameters part.
2976 */
2977 auto *enc = static_cast<EncodeVisitor *>(visitor);
2978 auto codegen = enc->GetCodegen();
2979 auto param_inst = inst->CastToParameter();
2980 auto sf = param_inst->GetLocationData();
2981
2982 if (codegen->GetGraph()->GetMode().SupportManagedCode() && codegen->GetGraph()->IsDynamicMethod() &&
2983 param_inst->GetArgNumber() > 0) {
2984 // In dynamic methods only the first parameter is mandatory
2985 // The rest parameters are optional. Actual number of passed parameters is known only
2986 // in runtime and is located in the 'r1' register.
2987 // All declared parameters which are not mapped to actual parameters must have
2988 // special value 'undefined'. That is why we have special handling of parameters
2989 // for dynamic methods.
2990 VisitDynamicMethodParameter(visitor, inst);
2991 return;
2992 }
2993
2994 if (sf.GetSrc() == sf.GetDst()) {
2995 return;
2996 }
2997
2998 auto tmp_sf = codegen->GetGraph()->CreateInstSpillFill();
2999 tmp_sf->AddSpillFill(sf);
3000 SpillFillEncoder(codegen, tmp_sf).EncodeSpillFill();
3001 }
3002
VisitDynamicMethodParameter(GraphVisitor * visitor,Inst * inst)3003 void EncodeVisitor::VisitDynamicMethodParameter(GraphVisitor *visitor, Inst *inst)
3004 {
3005 auto *enc = static_cast<EncodeVisitor *>(visitor);
3006 auto param_inst = inst->CastToParameter();
3007 ASSERT(param_inst->GetType() == DataType::ANY);
3008 Codegen *codegen = enc->GetCodegen();
3009 Encoder *encoder = enc->GetEncoder();
3010
3011 SCOPED_DISASM_STR(codegen, "VisitParameter");
3012
3013 auto sf = param_inst->GetLocationData();
3014 ASSERT(sf.DstValue() != INVALID_REG);
3015 Reg num_actual_args_reg = enc->GetCodegen()->GetTarget().GetParamReg(1);
3016
3017 LabelHolder::LabelId else_label = encoder->CreateLabel();
3018 LabelHolder::LabelId end_label = encoder->CreateLabel();
3019 encoder->EncodeJump(else_label, num_actual_args_reg, Imm(param_inst->GetArgNumber()), Condition::LE);
3020 HandleDynParamPassed(sf, enc);
3021 encoder->EncodeJump(end_label);
3022 encoder->BindLabel(else_label);
3023 HandleDynParamNotPassed(sf, enc);
3024 encoder->BindLabel(end_label);
3025 }
3026
HandleDynParamPassed(const SpillFillData & sf,EncodeVisitor * enc)3027 void EncodeVisitor::HandleDynParamPassed(const SpillFillData &sf, EncodeVisitor *enc)
3028 {
3029 if (sf.GetSrc() == sf.GetDst()) {
3030 return;
3031 }
3032 Codegen *codegen = enc->GetCodegen();
3033 Encoder *encoder = enc->GetEncoder();
3034
3035 if (sf.GetSrc().IsRegister()) {
3036 auto src_reg = codegen->ConvertRegister(sf.SrcValue(), sf.GetType());
3037 if (sf.GetDst().IsRegister()) { // param_reg -> dst_reg
3038 auto dst_reg = codegen->ConvertRegister(sf.DstValue(), DataType::ANY);
3039 encoder->EncodeMov(dst_reg, src_reg);
3040 } else {
3041 ASSERT(sf.GetDst().IsAnyStack()); // param_reg -> push to slot
3042 auto dst_mem = codegen->GetMemRefForSlot(sf.GetDst());
3043 encoder->EncodeStrz(src_reg, dst_mem);
3044 }
3045 return;
3046 }
3047
3048 ASSERT(sf.GetSrc().IsAnyStack());
3049 auto src_mem = codegen->GetMemRefForSlot(sf.GetSrc());
3050 if (sf.GetDst().IsRegister()) { // load from stack -> dst_reg
3051 auto dst_reg = codegen->ConvertRegister(sf.DstValue(), sf.GetType());
3052 encoder->EncodeLdr(dst_reg, false, src_mem);
3053 } else {
3054 ASSERT(sf.GetDst().IsAnyStack()); // load from stack -> push to slot
3055 auto dst_mem = codegen->GetMemRefForSlot(sf.GetDst());
3056 auto type_info = Codegen::ConvertDataType(sf.GetType(), codegen->GetArch());
3057 encoder->EncodeMemCopyz(src_mem, dst_mem, type_info.GetSize());
3058 }
3059 }
3060
HandleDynParamNotPassed(const SpillFillData & sf,EncodeVisitor * enc)3061 void EncodeVisitor::HandleDynParamNotPassed(const SpillFillData &sf, EncodeVisitor *enc)
3062 {
3063 Codegen *codegen = enc->GetCodegen();
3064 Encoder *encoder = enc->GetEncoder();
3065 auto value = enc->cg_->GetGraph()->GetRuntime()->GetDynamicPrimitiveUndefined();
3066
3067 if (sf.GetDst().IsRegister()) {
3068 Reg dst_val_reg = codegen->ConvertRegister(sf.DstValue(), DataType::INT64);
3069 encoder->EncodeMov(dst_val_reg, Imm(value));
3070 return;
3071 }
3072
3073 ASSERT(sf.GetDst().IsAnyStack());
3074 auto dest_val = codegen->GetMemRefForSlot(sf.GetDst());
3075 ScopedTmpReg reg(encoder, INT64_TYPE);
3076 encoder->EncodeMov(reg, Imm(value));
3077 encoder->EncodeStr(reg, dest_val);
3078 }
3079
VisitStoreArray(GraphVisitor * visitor,Inst * inst)3080 void EncodeVisitor::VisitStoreArray(GraphVisitor *visitor, Inst *inst)
3081 {
3082 auto *enc = static_cast<EncodeVisitor *>(visitor);
3083 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // array
3084 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), DataType::INT32); // index
3085 constexpr int64_t IMM_2 = 2;
3086 auto src2 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(IMM_2), inst->GetType()); // store value
3087 int32_t offset = enc->cg_->GetGraph()->GetRuntime()->GetArrayDataOffset(enc->GetCodegen()->GetArch());
3088 int32_t scale = DataType::ShiftByType(inst->GetType(), enc->GetCodegen()->GetArch());
3089
3090 ScopedLiveTmpReg tmp(enc->GetEncoder());
3091
3092 auto object_header_mem = MemRef(src0);
3093 enc->GetEncoder()->EncodeAdd(tmp, src0, Imm(offset));
3094 auto mem = MemRef(tmp, src1, scale);
3095 if (inst->CastToStoreArray()->GetNeedBarrier()) {
3096 enc->GetCodegen()->CreatePreWRB(inst, mem);
3097 }
3098 auto prev_offset = enc->GetEncoder()->GetCursorOffset();
3099 enc->GetEncoder()->EncodeStr(src2, mem);
3100 enc->GetCodegen()->TryInsertImplicitNullCheck(inst, prev_offset);
3101 if (inst->CastToStoreArray()->GetNeedBarrier()) {
3102 enc->GetCodegen()->CreatePostWRB(inst, object_header_mem, src2, INVALID_REGISTER);
3103 }
3104 }
3105
VisitSpillFill(GraphVisitor * visitor,Inst * inst)3106 void EncodeVisitor::VisitSpillFill(GraphVisitor *visitor, Inst *inst)
3107 {
3108 auto codegen = static_cast<EncodeVisitor *>(visitor)->GetCodegen();
3109 SpillFillEncoder(codegen, inst).EncodeSpillFill();
3110 }
3111
VisitSaveState(GraphVisitor * visitor,Inst * inst)3112 void EncodeVisitor::VisitSaveState([[maybe_unused]] GraphVisitor *visitor, [[maybe_unused]] Inst *inst)
3113 {
3114 // Nothing to do, SaveState is processed in its users.
3115 }
3116
VisitSaveStateDeoptimize(GraphVisitor * visitor,Inst * inst)3117 void EncodeVisitor::VisitSaveStateDeoptimize([[maybe_unused]] GraphVisitor *visitor, [[maybe_unused]] Inst *inst)
3118 {
3119 // Nothing to do, SaveStateDeoptimize is processed in its users.
3120 }
3121
VisitSaveStateOsr(GraphVisitor * visitor,Inst * inst)3122 void EncodeVisitor::VisitSaveStateOsr(GraphVisitor *visitor, Inst *inst)
3123 {
3124 static_cast<EncodeVisitor *>(visitor)->GetCodegen()->CreateOsrEntry(inst->CastToSaveStateOsr());
3125 }
3126
VisitLoadArray(GraphVisitor * visitor,Inst * inst)3127 void EncodeVisitor::VisitLoadArray(GraphVisitor *visitor, Inst *inst)
3128 {
3129 auto inst_load_array = inst->CastToLoadArray();
3130 auto enc = static_cast<EncodeVisitor *>(visitor);
3131 auto runtime = enc->cg_->GetGraph()->GetRuntime();
3132 ASSERT(inst_load_array->IsArray() || !runtime->IsCompressedStringsEnabled());
3133 if (static_cast<LoadInst *>(inst)->GetNeedBarrier()) {
3134 // !TODO Ishin Pavel inserts barriers for GC
3135 }
3136 auto type = inst->GetType();
3137 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // array
3138 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), DataType::INT32); // index
3139 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type); // load value
3140 int32_t offset = inst_load_array->IsArray() ? runtime->GetArrayDataOffset(enc->GetCodegen()->GetArch())
3141 : runtime->GetStringDataOffset(enc->GetArch());
3142 auto encoder = enc->GetEncoder();
3143 auto arch = encoder->GetArch();
3144 int32_t shift = DataType::ShiftByType(type, arch);
3145 ScopedTmpReg scoped_tmp(encoder, Codegen::ConvertDataType(DataType::GetIntTypeForReference(arch), arch));
3146 auto tmp = scoped_tmp.GetReg();
3147 encoder->EncodeAdd(tmp, src0, Imm(offset));
3148 auto mem = MemRef(tmp, src1, shift);
3149 auto prev_offset = enc->GetEncoder()->GetCursorOffset();
3150 encoder->EncodeLdr(dst, IsTypeSigned(type), mem);
3151 enc->GetCodegen()->TryInsertImplicitNullCheck(inst, prev_offset);
3152 }
3153
VisitLoadCompressedStringChar(GraphVisitor * visitor,Inst * inst)3154 void EncodeVisitor::VisitLoadCompressedStringChar(GraphVisitor *visitor, Inst *inst)
3155 {
3156 auto *enc = static_cast<EncodeVisitor *>(visitor);
3157 auto runtime = enc->cg_->GetGraph()->GetRuntime();
3158 auto type = inst->GetType();
3159 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // array
3160 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), DataType::INT32); // index
3161 auto src2 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(2), DataType::INT32); // length
3162 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type); // load value
3163 int32_t offset = runtime->GetStringDataOffset(enc->GetArch());
3164 auto encoder = enc->GetEncoder();
3165 auto arch = encoder->GetArch();
3166 int32_t shift = DataType::ShiftByType(type, arch);
3167 ScopedTmpReg scoped_tmp(encoder, Codegen::ConvertDataType(DataType::GetIntTypeForReference(arch), arch));
3168 auto tmp = scoped_tmp.GetReg();
3169
3170 ASSERT(encoder->CanEncodeCompressedStringCharAt());
3171 auto mask = runtime->GetStringCompressionMask();
3172 if (mask != 1) {
3173 UNREACHABLE(); // mask is hardcoded in JCL, but verify it just in case it's changed
3174 }
3175 enc->GetEncoder()->EncodeCompressedStringCharAt(dst, src0, src1, src2, tmp, offset, shift);
3176 }
3177
VisitLenArray(GraphVisitor * visitor,Inst * inst)3178 void EncodeVisitor::VisitLenArray(GraphVisitor *visitor, Inst *inst)
3179 {
3180 auto *enc = static_cast<EncodeVisitor *>(visitor);
3181 auto type = inst->GetType();
3182 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // array
3183 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type); // len array
3184
3185 auto len_array_inst = inst->CastToLenArray();
3186 auto runtime = enc->cg_->GetGraph()->GetRuntime();
3187 int64_t offset = len_array_inst->IsArray() ? runtime->GetArrayLengthOffset(enc->GetCodegen()->GetArch())
3188 : runtime->GetStringLengthOffset(enc->GetArch());
3189 auto mem = MemRef(src0, offset);
3190
3191 auto prev_offset = enc->GetEncoder()->GetCursorOffset();
3192 enc->GetEncoder()->EncodeLdr(dst, IsTypeSigned(type), mem);
3193 enc->GetCodegen()->TryInsertImplicitNullCheck(inst, prev_offset);
3194 }
3195
VisitBuiltin(GraphVisitor * visitor,Inst * inst)3196 void EncodeVisitor::VisitBuiltin([[maybe_unused]] GraphVisitor *visitor, [[maybe_unused]] Inst *inst)
3197 {
3198 UNREACHABLE();
3199 }
3200
VisitNullCheck(GraphVisitor * visitor,Inst * inst)3201 void EncodeVisitor::VisitNullCheck(GraphVisitor *visitor, Inst *inst)
3202 {
3203 if (inst->CastToNullCheck()->IsImplicit()) {
3204 return;
3205 }
3206 auto *enc = static_cast<EncodeVisitor *>(visitor);
3207 ASSERT(inst->GetInput(1).GetInst()->GetOpcode() == Opcode::SaveState ||
3208 inst->GetInput(1).GetInst()->GetOpcode() == Opcode::SaveStateDeoptimize);
3209
3210 auto slow_path =
3211 enc->GetCodegen()->CreateSlowPath<SlowPathImplicitNullCheck>(inst, EntrypointId::NULL_POINTER_EXCEPTION);
3212 auto src_type = inst->GetInputType(0);
3213 auto src = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), src_type);
3214 enc->GetEncoder()->EncodeJump(slow_path->GetLabel(), src, Condition::EQ);
3215 }
3216
VisitBoundsCheck(GraphVisitor * visitor,Inst * inst)3217 void EncodeVisitor::VisitBoundsCheck(GraphVisitor *visitor, Inst *inst)
3218 {
3219 auto *enc = static_cast<EncodeVisitor *>(visitor);
3220 auto len_type = inst->GetInputType(0);
3221 auto len = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), len_type);
3222 auto index_type = inst->GetInputType(1);
3223 auto index = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), index_type);
3224 [[maybe_unused]] constexpr int64_t IMM_2 = 2;
3225 ASSERT(inst->GetInput(IMM_2).GetInst()->GetOpcode() == Opcode::SaveState);
3226
3227 EntrypointId entrypoint = inst->CastToBoundsCheck()->IsArray() ? EntrypointId::ARRAY_INDEX_OUT_OF_BOUNDS_EXCEPTION
3228 : EntrypointId::STRING_INDEX_OUT_OF_BOUNDS_EXCEPTION;
3229
3230 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathCheck>(inst, entrypoint);
3231 enc->GetEncoder()->EncodeJump(slow_path->GetLabel(), index, len, Condition::HS);
3232 }
3233
VisitRefTypeCheck(GraphVisitor * visitor,Inst * inst)3234 void EncodeVisitor::VisitRefTypeCheck(GraphVisitor *visitor, Inst *inst)
3235 {
3236 auto *enc = static_cast<EncodeVisitor *>(visitor);
3237 auto encoder = enc->GetEncoder();
3238 auto array_reg = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE);
3239 auto ref_reg = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), DataType::REFERENCE);
3240 [[maybe_unused]] constexpr int64_t IMM_2 = 2;
3241 ASSERT(inst->GetInput(IMM_2).GetInst()->GetOpcode() == Opcode::SaveState);
3242 auto runtime = enc->cg_->GetGraph()->GetRuntime();
3243
3244 auto slow_path =
3245 enc->GetCodegen()->CreateSlowPath<SlowPathRefCheck>(inst, EntrypointId::CHECK_STORE_ARRAY_REFERENCE);
3246
3247 slow_path->SetRegs(array_reg, ref_reg);
3248 slow_path->CreateBackLabel(encoder);
3249
3250 // We don't check if stored object is nullptr
3251 encoder->EncodeJump(slow_path->GetBackLabel(), ref_reg, Condition::EQ);
3252
3253 ScopedTmpReg tmp_reg(encoder, Codegen::ConvertDataType(DataType::REFERENCE, enc->GetCodegen()->GetArch()));
3254 ScopedTmpReg tmp_reg1(encoder, Codegen::ConvertDataType(DataType::REFERENCE, enc->GetCodegen()->GetArch()));
3255
3256 // Get Class from array
3257 enc->GetCodegen()->LoadClassFromObject(tmp_reg, array_reg);
3258 // Get common Class from array class
3259 encoder->EncodeLdr(tmp_reg, false, MemRef(tmp_reg, runtime->GetClassComponentTypeOffset(enc->GetArch())));
3260 // Get Class from stored object
3261 enc->GetCodegen()->LoadClassFromObject(tmp_reg1, ref_reg);
3262
3263 // We calls CheckStoreArrayReference for checks Reference Type.
3264 // If the type is wrong, CheckStoreArrayReference doesn't return and calls Deoptimaze
3265 encoder->EncodeJump(slow_path->GetLabel(), tmp_reg, tmp_reg1, Condition::NE);
3266
3267 slow_path->BindBackLabel(encoder);
3268 }
3269
VisitZeroCheck(GraphVisitor * visitor,Inst * inst)3270 void EncodeVisitor::VisitZeroCheck(GraphVisitor *visitor, Inst *inst)
3271 {
3272 auto *enc = static_cast<EncodeVisitor *>(visitor);
3273 auto src_type = inst->GetInputType(0);
3274 auto src = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), src_type);
3275 ASSERT(inst->GetInput(1).GetInst()->GetOpcode() == Opcode::SaveState);
3276
3277 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathCheck>(inst, EntrypointId::ARITHMETIC_EXCEPTION);
3278 enc->GetEncoder()->EncodeJump(slow_path->GetLabel(), src, Condition::EQ);
3279 }
3280
VisitNegativeCheck(GraphVisitor * visitor,Inst * inst)3281 void EncodeVisitor::VisitNegativeCheck(GraphVisitor *visitor, Inst *inst)
3282 {
3283 auto *enc = static_cast<EncodeVisitor *>(visitor);
3284 auto src_type = inst->GetInputType(0);
3285 auto src = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), src_type);
3286 ASSERT(inst->GetInput(1).GetInst()->GetOpcode() == Opcode::SaveState);
3287
3288 auto slow_path =
3289 enc->GetCodegen()->CreateSlowPath<SlowPathCheck>(inst, EntrypointId::NEGATIVE_ARRAY_SIZE_EXCEPTION);
3290 enc->GetEncoder()->EncodeJump(slow_path->GetLabel(), src, Condition::LT);
3291 }
3292
VisitDeoptimizeIf(GraphVisitor * visitor,Inst * inst)3293 void EncodeVisitor::VisitDeoptimizeIf(GraphVisitor *visitor, Inst *inst)
3294 {
3295 auto *enc = static_cast<EncodeVisitor *>(visitor);
3296 auto src_type = inst->GetInput(0).GetInst()->GetType();
3297 auto src = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), src_type);
3298
3299 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathCheck>(inst, EntrypointId::DEOPTIMIZE);
3300
3301 // create jump to slow path if src is true
3302 enc->GetEncoder()->EncodeJump(slow_path->GetLabel(), src, Condition::NE);
3303 }
3304
VisitDeoptimizeCompare(GraphVisitor * visitor,Inst * inst)3305 void EncodeVisitor::VisitDeoptimizeCompare(GraphVisitor *visitor, Inst *inst)
3306 {
3307 auto *enc = static_cast<EncodeVisitor *>(visitor);
3308 auto deopt = inst->CastToDeoptimizeCompare();
3309 ASSERT(deopt->GetOperandsType() != DataType::NO_TYPE);
3310 auto src0 = enc->GetCodegen()->ConvertRegister(deopt->GetSrcReg(0), deopt->GetOperandsType());
3311 auto src1 = enc->GetCodegen()->ConvertRegister(deopt->GetSrcReg(1), deopt->GetOperandsType());
3312 auto slowPath = enc->GetCodegen()->CreateSlowPath<SlowPathCheck>(inst, EntrypointId::DEOPTIMIZE);
3313 enc->GetEncoder()->EncodeJump(slowPath->GetLabel(), src0, src1, enc->GetCodegen()->ConvertCc(deopt->GetCc()));
3314 }
3315
VisitDeoptimizeCompareImm(GraphVisitor * visitor,Inst * inst)3316 void EncodeVisitor::VisitDeoptimizeCompareImm(GraphVisitor *visitor, Inst *inst)
3317 {
3318 auto *enc = static_cast<EncodeVisitor *>(visitor);
3319 auto encoder = enc->GetEncoder();
3320 auto deopt = inst->CastToDeoptimizeCompareImm();
3321 ASSERT(deopt->GetOperandsType() != DataType::NO_TYPE);
3322 auto cc = deopt->GetCc();
3323 auto src0 = enc->GetCodegen()->ConvertRegister(deopt->GetSrcReg(0), deopt->GetOperandsType());
3324 auto slowPath = enc->GetCodegen()->CreateSlowPath<SlowPathCheck>(inst, EntrypointId::DEOPTIMIZE);
3325
3326 if (deopt->GetImm() == 0) {
3327 Arch arch = enc->GetCodegen()->GetArch();
3328 DataType::Type type = deopt->GetInput(0).GetInst()->GetType();
3329 ASSERT(!IsFloatType(type));
3330 if (IsTypeSigned(type) && (cc == ConditionCode::CC_LT || cc == ConditionCode::CC_GE)) {
3331 auto signBit = GetTypeSize(type, arch) - 1;
3332 if (cc == ConditionCode::CC_LT) {
3333 // x < 0
3334 encoder->EncodeBitTestAndBranch(slowPath->GetLabel(), src0, signBit, true);
3335 return;
3336 }
3337 if (cc == ConditionCode::CC_GE) {
3338 // x >= 0
3339 encoder->EncodeBitTestAndBranch(slowPath->GetLabel(), src0, signBit, false);
3340 return;
3341 }
3342 }
3343 if (enc->GetCodegen()->GetTarget().SupportZeroReg()) {
3344 auto zreg = enc->GetRegfile()->GetZeroReg();
3345 encoder->EncodeJump(slowPath->GetLabel(), src0, zreg, enc->GetCodegen()->ConvertCc(cc));
3346 } else {
3347 Imm imm = enc->GetCodegen()->ConvertImm(UINT64_C(0), type);
3348 encoder->EncodeJump(slowPath->GetLabel(), src0, imm, enc->GetCodegen()->ConvertCc(cc));
3349 }
3350 return;
3351 }
3352 encoder->EncodeJump(slowPath->GetLabel(), src0, Imm(deopt->GetImm()), enc->GetCodegen()->ConvertCc(cc));
3353 }
3354
VisitLoadString(GraphVisitor * visitor,Inst * inst)3355 void EncodeVisitor::VisitLoadString(GraphVisitor *visitor, Inst *inst)
3356 {
3357 auto *enc = static_cast<EncodeVisitor *>(visitor);
3358 auto method = inst->CastToLoadString()->GetMethod();
3359 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
3360 auto string_type = inst->CastToLoadString()->GetTypeId();
3361 auto graph = enc->cg_->GetGraph();
3362 auto encoder = enc->GetEncoder();
3363 ASSERT(inst->IsRuntimeCall());
3364
3365 // Static constructor invoked only once, so there is no sense in replacing regular
3366 // ResolveString runtime call with optimized version that will only slow down constructor's execution.
3367 auto is_cctor = graph->GetRuntime()->IsMethodStaticConstructor(method);
3368 object_pointer_type string_ptr {0};
3369
3370 if (graph->IsAotMode() && options.IsCompilerAotLoadStringPlt() && !is_cctor) {
3371 auto aot_data = graph->GetAotData();
3372 intptr_t slot_offset = aot_data->GetStringSlotOffset(encoder->GetCursorOffset(), string_type);
3373 ScopedTmpRegU64 addr_reg(encoder);
3374 ScopedTmpRegU64 tmp_dst(encoder);
3375 encoder->MakeLoadAotTableAddr(slot_offset, addr_reg, tmp_dst);
3376
3377 auto slow_path =
3378 enc->GetCodegen()->CreateSlowPath<SlowPathResolveStringAot>(inst, EntrypointId::RESOLVE_STRING_AOT);
3379 slow_path->SetDstReg(dst);
3380 slow_path->SetAddrReg(addr_reg);
3381 slow_path->SetStringId(string_type);
3382 slow_path->SetMethod(method);
3383 encoder->EncodeJump(slow_path->GetLabel(), tmp_dst, Imm(RuntimeInterface::RESOLVE_STRING_AOT_COUNTER_LIMIT),
3384 Condition::LT);
3385 encoder->EncodeMov(dst, Reg(tmp_dst.GetReg().GetId(), dst.GetType()));
3386 slow_path->BindBackLabel(encoder);
3387 } else if (!graph->IsAotMode() &&
3388 (string_ptr = graph->GetRuntime()->GetNonMovableString(method, string_type)) != 0) {
3389 encoder->EncodeMov(dst, Imm(string_ptr));
3390 EVENT_JIT_USE_RESOLVED_STRING(graph->GetRuntime()->GetMethodName(method), string_type);
3391 } else {
3392 enc->GetCodegen()->CallRuntimeWithMethod(inst, method, EntrypointId::RESOLVE_STRING, dst, Imm(string_type));
3393 }
3394 }
3395
VisitLoadObject(GraphVisitor * visitor,Inst * inst)3396 void EncodeVisitor::VisitLoadObject(GraphVisitor *visitor, Inst *inst)
3397 {
3398 auto *enc = static_cast<EncodeVisitor *>(visitor);
3399 auto load_obj = inst->CastToLoadObject();
3400 if (load_obj->GetNeedBarrier()) {
3401 // !TODO Ishin Pavel inserts barriers for GC
3402 }
3403 auto type = inst->GetType();
3404 auto src = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // obj
3405 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type); // load value
3406
3407 auto graph = enc->cg_->GetGraph();
3408 auto field = load_obj->GetObjField();
3409 auto offset = graph->GetRuntime()->GetFieldOffset(field);
3410 auto mem = MemRef(src, offset);
3411 auto prev_offset = enc->GetEncoder()->GetCursorOffset();
3412 if (load_obj->GetVolatile()) {
3413 enc->GetEncoder()->EncodeLdrAcquire(dst, IsTypeSigned(type), mem);
3414 } else {
3415 enc->GetEncoder()->EncodeLdr(dst, IsTypeSigned(type), mem);
3416 }
3417 enc->GetCodegen()->TryInsertImplicitNullCheck(inst, prev_offset);
3418 }
3419
VisitUnresolvedLoadObject(GraphVisitor * visitor,Inst * inst)3420 void EncodeVisitor::VisitUnresolvedLoadObject(GraphVisitor *visitor, Inst *inst)
3421 {
3422 auto *enc = static_cast<EncodeVisitor *>(visitor);
3423 auto graph = enc->cg_->GetGraph();
3424 auto load_obj = inst->CastToUnresolvedLoadObject();
3425 if (load_obj->GetNeedBarrier()) {
3426 // !TODO Ishin Pavel inserts barriers for GC
3427 }
3428 auto type = inst->GetType();
3429 auto src = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // obj
3430 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type); // load value
3431
3432 auto type_id = load_obj->GetTypeId();
3433 auto method = load_obj->GetMethod();
3434 ScopedTmpReg tmp_reg(enc->GetEncoder());
3435 if (graph->IsAotMode()) {
3436 // TODO(zhroma): consider caching ?
3437 enc->GetCodegen()->CallRuntimeWithMethod(inst, method, EntrypointId::GET_FIELD_OFFSET, tmp_reg, Imm(type_id));
3438 enc->GetEncoder()->EncodeAdd(tmp_reg, src, tmp_reg);
3439 // Unknown load, assume it can be volatile
3440 enc->GetEncoder()->EncodeLdrAcquire(dst, IsTypeSigned(type), MemRef(tmp_reg));
3441 } else {
3442 auto skind = UnresolvedTypesInterface::SlotKind::FIELD;
3443 auto field_offset_addr = graph->GetRuntime()->GetUnresolvedTypes()->GetTableSlot(method, type_id, skind);
3444
3445 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathUnresolved>(inst, EntrypointId::GET_FIELD_OFFSET);
3446 slow_path->SetUnresolvedType(method, type_id);
3447 slow_path->SetSlotAddr(field_offset_addr);
3448 enc->GetEncoder()->EncodeMov(tmp_reg, Imm(field_offset_addr));
3449 enc->GetEncoder()->EncodeLdr(tmp_reg, false, MemRef(tmp_reg));
3450 slow_path->SetDstReg(tmp_reg);
3451 enc->GetEncoder()->EncodeJump(slow_path->GetLabel(), tmp_reg, Condition::EQ);
3452 slow_path->BindBackLabel(enc->GetEncoder());
3453 enc->GetEncoder()->EncodeAdd(tmp_reg, src, tmp_reg);
3454 enc->GetEncoder()->EncodeLdrAcquire(dst, IsTypeSigned(type), MemRef(tmp_reg));
3455 }
3456 }
3457
VisitLoad(GraphVisitor * visitor,Inst * inst)3458 void EncodeVisitor::VisitLoad(GraphVisitor *visitor, Inst *inst)
3459 {
3460 auto *enc = static_cast<EncodeVisitor *>(visitor);
3461 auto load_by_offset = inst->CastToLoad();
3462
3463 auto type = inst->GetType();
3464 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0U), DataType::POINTER); // pointer
3465 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1U), DataType::UINT32); // offset
3466 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type); // load value
3467
3468 auto mem = MemRef(src0, src1, load_by_offset->GetScale());
3469
3470 if (load_by_offset->GetVolatile()) {
3471 enc->GetEncoder()->EncodeLdrAcquire(dst, IsTypeSigned(type), mem);
3472 } else {
3473 enc->GetEncoder()->EncodeLdr(dst, IsTypeSigned(type), mem);
3474 }
3475 }
3476
VisitLoadI(GraphVisitor * visitor,Inst * inst)3477 void EncodeVisitor::VisitLoadI(GraphVisitor *visitor, Inst *inst)
3478 {
3479 auto *enc = static_cast<EncodeVisitor *>(visitor);
3480 auto load_by_offset = inst->CastToLoadI();
3481
3482 auto type = inst->GetType();
3483 auto base = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0U), DataType::POINTER); // pointer
3484 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type); // load value
3485
3486 if (load_by_offset->GetVolatile()) {
3487 enc->GetEncoder()->EncodeLdrAcquire(dst, IsTypeSigned(type), MemRef(base, load_by_offset->GetImm()));
3488 } else {
3489 enc->GetEncoder()->EncodeLdr(dst, IsTypeSigned(type), MemRef(base, load_by_offset->GetImm()));
3490 }
3491 }
3492
VisitStoreI(GraphVisitor * visitor,Inst * inst)3493 void EncodeVisitor::VisitStoreI(GraphVisitor *visitor, Inst *inst)
3494 {
3495 auto *enc = static_cast<EncodeVisitor *>(visitor);
3496 auto store_inst = inst->CastToStoreI();
3497
3498 auto base = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0U), DataType::POINTER);
3499 auto src = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1U), inst->GetType());
3500 auto object_header_mem = MemRef(base);
3501 auto mem = MemRef(base, store_inst->GetImm());
3502 if (inst->CastToStoreI()->GetNeedBarrier()) {
3503 enc->GetCodegen()->CreatePreWRB(inst, mem);
3504 }
3505
3506 if (store_inst->GetVolatile()) {
3507 enc->GetEncoder()->EncodeStrRelease(src, mem);
3508 } else {
3509 enc->GetEncoder()->EncodeStr(src, mem);
3510 }
3511
3512 if (inst->CastToStoreI()->GetNeedBarrier()) {
3513 enc->GetCodegen()->CreatePostWRB(inst, object_header_mem, src, INVALID_REGISTER);
3514 }
3515 }
3516
VisitStoreObject(GraphVisitor * visitor,Inst * inst)3517 void EncodeVisitor::VisitStoreObject(GraphVisitor *visitor, Inst *inst)
3518 {
3519 auto *enc = static_cast<EncodeVisitor *>(visitor);
3520 auto store_obj = inst->CastToStoreObject();
3521 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // obj
3522 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), inst->GetType()); // store value
3523
3524 auto graph = enc->cg_->GetGraph();
3525 auto field = store_obj->GetObjField();
3526 auto offset = graph->GetRuntime()->GetFieldOffset(field);
3527 auto object_header_mem = MemRef(src0);
3528 auto mem = MemRef(src0, offset);
3529 if (inst->CastToStoreObject()->GetNeedBarrier()) {
3530 enc->GetCodegen()->CreatePreWRB(inst, mem);
3531 }
3532 auto prev_offset = enc->GetEncoder()->GetCursorOffset();
3533 if (store_obj->GetVolatile()) {
3534 enc->GetEncoder()->EncodeStrRelease(src1, mem);
3535 } else {
3536 enc->GetEncoder()->EncodeStr(src1, mem);
3537 }
3538 enc->GetCodegen()->TryInsertImplicitNullCheck(inst, prev_offset);
3539 if (inst->CastToStoreObject()->GetNeedBarrier()) {
3540 enc->GetCodegen()->CreatePostWRB(inst, object_header_mem, src1, INVALID_REGISTER);
3541 }
3542 }
3543
VisitUnresolvedStoreObject(GraphVisitor * visitor,Inst * inst)3544 void EncodeVisitor::VisitUnresolvedStoreObject(GraphVisitor *visitor, Inst *inst)
3545 {
3546 auto *enc = static_cast<EncodeVisitor *>(visitor);
3547 auto graph = enc->cg_->GetGraph();
3548 auto store_obj = inst->CastToUnresolvedStoreObject();
3549 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // obj
3550 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), inst->GetType()); // store value
3551
3552 auto type_id = store_obj->GetTypeId();
3553 auto method = store_obj->GetMethod();
3554 ScopedLiveTmpReg tmp_reg(enc->GetEncoder());
3555 if (graph->IsAotMode()) {
3556 // TODO(zhroma): consider caching ?
3557 enc->GetCodegen()->CallRuntimeWithMethod(inst, method, RuntimeInterface::EntrypointId::GET_FIELD_OFFSET,
3558 tmp_reg, Imm(type_id));
3559 } else {
3560 auto skind = UnresolvedTypesInterface::SlotKind::FIELD;
3561 auto field_offset_addr = graph->GetRuntime()->GetUnresolvedTypes()->GetTableSlot(method, type_id, skind);
3562 enc->GetEncoder()->EncodeMov(tmp_reg, Imm(field_offset_addr));
3563 enc->GetEncoder()->EncodeLdr(tmp_reg, false, MemRef(tmp_reg));
3564
3565 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathUnresolved>(inst, EntrypointId::GET_FIELD_OFFSET);
3566 slow_path->SetUnresolvedType(method, type_id);
3567 slow_path->SetDstReg(tmp_reg);
3568 slow_path->SetSlotAddr(field_offset_addr);
3569
3570 enc->GetEncoder()->EncodeJump(slow_path->GetLabel(), tmp_reg, Condition::EQ);
3571 slow_path->BindBackLabel(enc->GetEncoder());
3572 }
3573
3574 enc->GetEncoder()->EncodeAdd(tmp_reg, src0, tmp_reg);
3575 auto mem = MemRef(tmp_reg);
3576 auto object_header_mem = MemRef(src0);
3577 if (inst->CastToUnresolvedStoreObject()->GetNeedBarrier()) {
3578 enc->GetCodegen()->CreatePreWRB(inst, mem);
3579 }
3580 // Unknown store, assume it can be volatile
3581 enc->GetEncoder()->EncodeStrRelease(src1, mem);
3582 if (inst->CastToUnresolvedStoreObject()->GetNeedBarrier()) {
3583 enc->GetCodegen()->CreatePostWRB(inst, object_header_mem, src1, INVALID_REGISTER);
3584 }
3585 }
3586
VisitStore(GraphVisitor * visitor,Inst * inst)3587 void EncodeVisitor::VisitStore(GraphVisitor *visitor, Inst *inst)
3588 {
3589 auto *enc = static_cast<EncodeVisitor *>(visitor);
3590 auto store_by_offset = inst->CastToStore();
3591 auto type = inst->GetType();
3592 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0U), DataType::POINTER); // pointer
3593 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1U), DataType::UINT32); // offset
3594 auto src2 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(2U), type); // store value
3595
3596 auto object_header_mem = MemRef(src0);
3597 auto mem = MemRef(src0, src1, store_by_offset->GetScale());
3598 if (inst->CastToStore()->GetNeedBarrier()) {
3599 enc->GetCodegen()->CreatePreWRB(inst, mem);
3600 }
3601
3602 if (store_by_offset->GetVolatile()) {
3603 enc->GetEncoder()->EncodeStrRelease(src2, mem);
3604 } else {
3605 enc->GetEncoder()->EncodeStr(src2, mem);
3606 }
3607
3608 if (inst->CastToStore()->GetNeedBarrier()) {
3609 enc->GetCodegen()->CreatePostWRB(inst, object_header_mem, src2, INVALID_REGISTER);
3610 }
3611 }
3612
VisitInitClass(GraphVisitor * visitor,Inst * inst)3613 void EncodeVisitor::VisitInitClass(GraphVisitor *visitor, Inst *inst)
3614 {
3615 auto *enc = static_cast<EncodeVisitor *>(visitor);
3616 auto graph = enc->cg_->GetGraph();
3617 auto runtime = graph->GetRuntime();
3618 auto class_id = inst->CastToInitClass()->GetTypeId();
3619 auto encoder = enc->GetEncoder();
3620 ASSERT(inst->IsRuntimeCall());
3621
3622 if (graph->IsAotMode()) {
3623 ScopedTmpReg tmp_reg(encoder);
3624 ScopedTmpReg class_reg(encoder);
3625
3626 auto aot_data = graph->GetAotData();
3627 intptr_t offset = aot_data->GetClassSlotOffset(encoder->GetCursorOffset(), class_id, true);
3628 encoder->MakeLoadAotTableAddr(offset, tmp_reg, class_reg);
3629
3630 auto label = encoder->CreateLabel();
3631 encoder->EncodeJump(label, class_reg, Condition::NE);
3632
3633 // PLT Class Init Resolver has special calling convention:
3634 // First encoder temporary (tmp_reg) works as parameter and return value (which is unnecessary here)
3635 CHECK_EQ(tmp_reg.GetReg().GetId(), encoder->GetTarget().GetTempRegsMask().GetMinRegister());
3636 enc->GetCodegen()->CreateJumpToClassResolverPltShared(inst, tmp_reg.GetReg(),
3637 EntrypointId::CLASS_INIT_RESOLVER);
3638 encoder->BindLabel(label);
3639 } else { // JIT mode
3640 auto klass = reinterpret_cast<uintptr_t>(inst->CastToInitClass()->GetClass());
3641 ASSERT(klass != 0);
3642 if (!runtime->IsClassInitialized(klass)) {
3643 auto slow_path =
3644 enc->GetCodegen()->CreateSlowPath<SlowPathEntrypoint>(inst, EntrypointId::INITIALIZE_CLASS);
3645
3646 auto state_offset = runtime->GetClassStateOffset(enc->GetArch());
3647 int64_t init_value = runtime->GetClassInitializedValue();
3648 ScopedTmpReg tmp_reg(encoder);
3649 encoder->EncodeMov(tmp_reg, Imm(klass + state_offset));
3650 auto tmp_i8 = enc->GetCodegen()->ConvertRegister(tmp_reg.GetReg().GetId(), DataType::INT8);
3651 encoder->EncodeLdr(tmp_i8, false, MemRef(tmp_reg));
3652
3653 encoder->EncodeJump(slow_path->GetLabel(), tmp_i8, Imm(init_value), Condition::NE);
3654
3655 slow_path->BindBackLabel(encoder);
3656 }
3657 }
3658 }
3659
VisitLoadClass(GraphVisitor * visitor,Inst * inst)3660 void EncodeVisitor::VisitLoadClass(GraphVisitor *visitor, Inst *inst)
3661 {
3662 auto *enc = static_cast<EncodeVisitor *>(visitor);
3663 auto encoder = enc->GetEncoder();
3664 auto load_class = inst->CastToLoadClass();
3665 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
3666 auto graph = enc->cg_->GetGraph();
3667 auto type_id = load_class->GetTypeId();
3668 ASSERT(inst->IsRuntimeCall());
3669
3670 if (graph->IsAotMode()) {
3671 auto method_class_id = graph->GetRuntime()->GetClassIdForMethod(graph->GetMethod());
3672 if (method_class_id == type_id) {
3673 auto dst_ptr = dst.As(Codegen::ConvertDataType(DataType::POINTER, graph->GetArch()));
3674 enc->GetCodegen()->LoadMethod(dst_ptr);
3675 auto mem = MemRef(dst_ptr, graph->GetRuntime()->GetClassOffset(graph->GetArch()));
3676 encoder->EncodeLdr(dst.As(Codegen::ConvertDataType(DataType::REFERENCE, graph->GetArch())), false, mem);
3677 return;
3678 }
3679 ScopedTmpReg tmp_reg(encoder);
3680 enc->GetCodegen()->CreateLoadClassFromPLT(inst, tmp_reg, dst, type_id);
3681 } else { // JIT mode
3682 auto klass = load_class->GetClass();
3683 if (klass == nullptr) {
3684 FillLoadClassUnresolved(visitor, inst);
3685 } else {
3686 encoder->EncodeMov(dst, Imm(reinterpret_cast<uintptr_t>(klass)));
3687 }
3688 }
3689 }
3690
FillLoadClassUnresolved(GraphVisitor * visitor,Inst * inst)3691 void EncodeVisitor::FillLoadClassUnresolved(GraphVisitor *visitor, Inst *inst)
3692 {
3693 auto *enc = static_cast<EncodeVisitor *>(visitor);
3694 auto encoder = enc->GetEncoder();
3695 auto load_class = inst->CastToLoadClass();
3696 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
3697 auto graph = enc->cg_->GetGraph();
3698 auto type_id = load_class->GetTypeId();
3699 auto method = load_class->GetMethod();
3700 auto utypes = graph->GetRuntime()->GetUnresolvedTypes();
3701 auto klass_addr = utypes->GetTableSlot(method, type_id, UnresolvedTypesInterface::SlotKind::CLASS);
3702 Reg dst_ptr(dst.GetId(), enc->GetCodegen()->GetPtrRegType());
3703 encoder->EncodeMov(dst_ptr, Imm(klass_addr));
3704 encoder->EncodeLdr(dst, false, MemRef(dst_ptr));
3705 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathUnresolved>(inst, EntrypointId::RESOLVE_CLASS);
3706 slow_path->SetUnresolvedType(method, type_id);
3707 slow_path->SetDstReg(dst);
3708 slow_path->SetSlotAddr(klass_addr);
3709
3710 encoder->EncodeJump(slow_path->GetLabel(), dst, Condition::EQ);
3711 slow_path->BindBackLabel(encoder);
3712 }
3713
VisitLoadAndInitClass(GraphVisitor * visitor,Inst * inst)3714 void EncodeVisitor::VisitLoadAndInitClass(GraphVisitor *visitor, Inst *inst)
3715 {
3716 auto *enc = static_cast<EncodeVisitor *>(visitor);
3717 auto graph = enc->cg_->GetGraph();
3718 auto runtime = graph->GetRuntime();
3719 auto class_id = inst->CastToLoadAndInitClass()->GetTypeId();
3720 auto encoder = enc->GetEncoder();
3721 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType()); // load value
3722 ASSERT(inst->IsRuntimeCall());
3723
3724 if (graph->IsAotMode()) {
3725 auto method_class_id = runtime->GetClassIdForMethod(graph->GetMethod());
3726 if (method_class_id == class_id) {
3727 auto dst_ptr = dst.As(Codegen::ConvertDataType(DataType::POINTER, graph->GetArch()));
3728 enc->GetCodegen()->LoadMethod(dst_ptr);
3729 auto mem = MemRef(dst_ptr, graph->GetRuntime()->GetClassOffset(graph->GetArch()));
3730 encoder->EncodeLdr(dst.As(Codegen::ConvertDataType(DataType::REFERENCE, graph->GetArch())), false, mem);
3731 return;
3732 }
3733
3734 ScopedTmpReg tmp_reg(encoder);
3735
3736 auto aot_data = graph->GetAotData();
3737 intptr_t offset = aot_data->GetClassSlotOffset(encoder->GetCursorOffset(), class_id, true);
3738 encoder->MakeLoadAotTableAddr(offset, tmp_reg, dst);
3739
3740 auto label = encoder->CreateLabel();
3741 encoder->EncodeJump(label, dst, Condition::NE);
3742
3743 // PLT Class Init Resolver has special calling convention:
3744 // First encoder temporary (tmp_reg) works as parameter and return value
3745 CHECK_EQ(tmp_reg.GetReg().GetId(), encoder->GetTarget().GetTempRegsMask().GetMinRegister());
3746 enc->GetCodegen()->CreateJumpToClassResolverPltShared(inst, tmp_reg.GetReg(),
3747 EntrypointId::CLASS_INIT_RESOLVER);
3748
3749 encoder->EncodeMov(dst, tmp_reg);
3750 encoder->BindLabel(label);
3751 } else { // JIT mode
3752 auto klass = reinterpret_cast<uintptr_t>(inst->CastToLoadAndInitClass()->GetClass());
3753 encoder->EncodeMov(dst, Imm(klass));
3754
3755 if (runtime->IsClassInitialized(klass)) {
3756 return;
3757 }
3758
3759 auto method_class = runtime->GetClass(graph->GetMethod());
3760 if (method_class == inst->CastToLoadAndInitClass()->GetClass()) {
3761 return;
3762 }
3763
3764 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathEntrypoint>(inst, EntrypointId::INITIALIZE_CLASS);
3765
3766 auto state_offset = runtime->GetClassStateOffset(enc->GetArch());
3767 int64_t init_value = runtime->GetClassInitializedValue();
3768
3769 ScopedTmpReg state_reg(encoder, INT8_TYPE);
3770 encoder->EncodeLdr(state_reg, false, MemRef(dst, state_offset));
3771
3772 encoder->EncodeJump(slow_path->GetLabel(), state_reg, Imm(init_value), Condition::NE);
3773
3774 slow_path->BindBackLabel(encoder);
3775 }
3776 }
3777
VisitUnresolvedLoadAndInitClass(GraphVisitor * visitor,Inst * inst)3778 void EncodeVisitor::VisitUnresolvedLoadAndInitClass(GraphVisitor *visitor, Inst *inst)
3779 {
3780 auto *enc = static_cast<EncodeVisitor *>(visitor);
3781 auto graph = enc->cg_->GetGraph();
3782 auto class_id = inst->CastToUnresolvedLoadAndInitClass()->GetTypeId();
3783 auto encoder = enc->GetEncoder();
3784 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType()); // load value
3785 ASSERT(inst->IsRuntimeCall());
3786
3787 if (graph->IsAotMode()) {
3788 ScopedTmpReg tmp_reg(encoder);
3789
3790 auto aot_data = graph->GetAotData();
3791 intptr_t offset = aot_data->GetClassSlotOffset(encoder->GetCursorOffset(), class_id, true);
3792 encoder->MakeLoadAotTableAddr(offset, tmp_reg, dst);
3793
3794 auto label = encoder->CreateLabel();
3795 encoder->EncodeJump(label, dst, Condition::NE);
3796
3797 // PLT Class Init Resolver has special calling convention:
3798 // First encoder temporary (tmp_reg) works as parameter and return value
3799 CHECK_EQ(tmp_reg.GetReg().GetId(), encoder->GetTarget().GetTempRegsMask().GetMinRegister());
3800 enc->GetCodegen()->CreateJumpToClassResolverPltShared(inst, tmp_reg.GetReg(),
3801 EntrypointId::CLASS_INIT_RESOLVER);
3802
3803 encoder->EncodeMov(dst, tmp_reg);
3804 encoder->BindLabel(label);
3805 } else { // JIT mode
3806 auto method = inst->CastToUnresolvedLoadAndInitClass()->GetMethod();
3807 auto utypes = graph->GetRuntime()->GetUnresolvedTypes();
3808 auto klass_addr = utypes->GetTableSlot(method, class_id, UnresolvedTypesInterface::SlotKind::CLASS);
3809 Reg dst_ptr(dst.GetId(), enc->GetCodegen()->GetPtrRegType());
3810 encoder->EncodeMov(dst_ptr, Imm(klass_addr));
3811 encoder->EncodeLdr(dst, false, MemRef(dst_ptr));
3812
3813 auto slow_path =
3814 enc->GetCodegen()->CreateSlowPath<SlowPathUnresolved>(inst, EntrypointId::INITIALIZE_CLASS_BY_ID);
3815 slow_path->SetUnresolvedType(method, class_id);
3816 slow_path->SetDstReg(dst);
3817 slow_path->SetSlotAddr(klass_addr);
3818
3819 encoder->EncodeJump(slow_path->GetLabel(), dst, Condition::EQ);
3820 slow_path->BindBackLabel(encoder);
3821 }
3822 }
3823
VisitLoadStatic(GraphVisitor * visitor,Inst * inst)3824 void EncodeVisitor::VisitLoadStatic(GraphVisitor *visitor, Inst *inst)
3825 {
3826 auto *enc = static_cast<EncodeVisitor *>(visitor);
3827 auto load_static = inst->CastToLoadStatic();
3828 if (load_static->GetNeedBarrier()) {
3829 // !TODO Ishin Pavel inserts barriers for GC
3830 }
3831 auto type = inst->GetType();
3832 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type); // load value
3833 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // class
3834
3835 auto graph = enc->cg_->GetGraph();
3836 auto field = load_static->GetObjField();
3837 auto offset = graph->GetRuntime()->GetFieldOffset(field);
3838 auto mem = MemRef(src0, offset);
3839 if (load_static->GetVolatile()) {
3840 enc->GetEncoder()->EncodeLdrAcquire(dst, IsTypeSigned(type), mem);
3841 } else {
3842 enc->GetEncoder()->EncodeLdr(dst, IsTypeSigned(type), mem);
3843 }
3844 }
3845
VisitUnresolvedLoadStatic(GraphVisitor * visitor,Inst * inst)3846 void EncodeVisitor::VisitUnresolvedLoadStatic(GraphVisitor *visitor, Inst *inst)
3847 {
3848 auto *enc = static_cast<EncodeVisitor *>(visitor);
3849 auto graph = enc->cg_->GetGraph();
3850 auto load_static = inst->CastToUnresolvedLoadStatic();
3851 if (load_static->GetNeedBarrier()) {
3852 // !TODO Ishin Pavel inserts barriers for GC
3853 }
3854 auto type = inst->GetType();
3855 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type);
3856
3857 auto type_id = load_static->GetTypeId();
3858 auto method = load_static->GetMethod();
3859
3860 auto ref_reg = enc->GetCodegen()->ConvertRegister(0, DataType::REFERENCE);
3861 auto entrypoint = EntrypointId::GET_UNKNOWN_STATIC_FIELD_MEMORY_ADDRESS;
3862
3863 if (graph->IsAotMode()) {
3864 // We can't use dst for Load (src - used for get Ref-type size)
3865 if ((dst.GetSize() < ref_reg.GetSize()) || !dst.IsScalar()) {
3866 ScopedTmpReg tmp_reg(enc->GetEncoder());
3867 enc->GetCodegen()->CallRuntimeWithMethod(inst, method, entrypoint, tmp_reg, Imm(type_id), Imm(0));
3868 enc->GetEncoder()->EncodeLdrAcquire(dst, IsTypeSigned(type), MemRef(tmp_reg));
3869 return;
3870 }
3871 // TODO(zhroma): consider caching ?
3872 enc->GetCodegen()->CallRuntimeWithMethod(inst, method, entrypoint, dst, Imm(type_id), Imm(0));
3873 } else {
3874 auto skind = UnresolvedTypesInterface::SlotKind::FIELD;
3875 auto field_addr = graph->GetRuntime()->GetUnresolvedTypes()->GetTableSlot(method, type_id, skind);
3876
3877 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathUnresolved>(inst, entrypoint);
3878 slow_path->SetUnresolvedType(method, type_id);
3879 slow_path->SetSlotAddr(field_addr);
3880
3881 // We can't use dst for Load (src - used for get Ref-type size)
3882 if ((dst.GetSize() < ref_reg.GetSize()) || !dst.IsScalar()) {
3883 ScopedTmpReg tmp(enc->GetEncoder());
3884 enc->GetEncoder()->EncodeMov(tmp, Imm(field_addr));
3885 enc->GetEncoder()->EncodeLdr(tmp, false, MemRef(tmp));
3886 slow_path->SetDstReg(tmp);
3887 enc->GetEncoder()->EncodeJump(slow_path->GetLabel(), tmp, Condition::EQ);
3888 slow_path->BindBackLabel(enc->GetEncoder());
3889 // Unknown load, assume it can be volatile
3890 enc->GetEncoder()->EncodeLdrAcquire(dst, IsTypeSigned(type), MemRef(tmp));
3891 return;
3892 }
3893
3894 Reg dst_ptr(dst.GetId(), enc->GetCodegen()->GetPtrRegType());
3895 enc->GetEncoder()->EncodeMov(dst_ptr, Imm(field_addr));
3896 enc->GetEncoder()->EncodeLdr(dst, false, MemRef(dst_ptr));
3897
3898 slow_path->SetDstReg(dst);
3899
3900 enc->GetEncoder()->EncodeJump(slow_path->GetLabel(), dst, Condition::EQ);
3901 slow_path->BindBackLabel(enc->GetEncoder());
3902 }
3903
3904 // Unknown load, assume it can be volatile
3905 enc->GetEncoder()->EncodeLdrAcquire(dst, IsTypeSigned(type), MemRef(dst));
3906 }
3907
VisitStoreStatic(GraphVisitor * visitor,Inst * inst)3908 void EncodeVisitor::VisitStoreStatic(GraphVisitor *visitor, Inst *inst)
3909 {
3910 auto *enc = static_cast<EncodeVisitor *>(visitor);
3911 auto store_static = inst->CastToStoreStatic();
3912 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // class
3913 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), inst->GetType()); // store value
3914
3915 auto graph = enc->cg_->GetGraph();
3916 auto runtime = graph->GetRuntime();
3917 auto field = store_static->GetObjField();
3918 auto offset = runtime->GetFieldOffset(field);
3919 auto mem = MemRef(src0, offset);
3920
3921 if (inst->CastToStoreStatic()->GetNeedBarrier()) {
3922 enc->GetCodegen()->CreatePreWRB(inst, mem);
3923 }
3924 if (store_static->GetVolatile()) {
3925 enc->GetEncoder()->EncodeStrRelease(src1, mem);
3926 } else {
3927 enc->GetEncoder()->EncodeStr(src1, mem);
3928 }
3929 if (inst->CastToStoreStatic()->GetNeedBarrier()) {
3930 auto arch = enc->GetEncoder()->GetArch();
3931 ScopedTmpReg tmp_reg(enc->GetEncoder(), Codegen::ConvertDataType(DataType::GetIntTypeForReference(arch), arch));
3932 enc->GetEncoder()->EncodeLdr(tmp_reg, false, MemRef(src0, runtime->GetManagedClassOffset(enc->GetArch())));
3933 auto class_header_mem = MemRef(tmp_reg);
3934 enc->GetCodegen()->CreatePostWRB(inst, class_header_mem, src1, INVALID_REGISTER);
3935 }
3936 }
3937
FinishUnresolvedStoreStatic(EncodeVisitor * enc,Inst * inst,Graph * graph,ScopedTmpReg * tmp1,bool pre)3938 static void FinishUnresolvedStoreStatic(EncodeVisitor *enc, Inst *inst, Graph *graph, ScopedTmpReg *tmp1, bool pre)
3939 {
3940 auto runtime = graph->GetRuntime();
3941 auto value = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), inst->GetType());
3942
3943 {
3944 // tmp1 points to Field, load Class into tmp2
3945 ScopedLiveTmpReg tmp2(enc->GetEncoder(), Codegen::ConvertDataType(DataType::REFERENCE, enc->GetArch()));
3946 enc->GetEncoder()->EncodeLdr(tmp2, false, MemRef(*tmp1, runtime->GetFieldClassOffset(enc->GetArch())));
3947 // extract Offset
3948 enc->GetEncoder()->EncodeLdr(tmp1->GetReg().As(INT32_TYPE), false,
3949 MemRef(*tmp1, runtime->GetFieldOffsetOffset(enc->GetArch())));
3950 // address = Class + Offset
3951 enc->GetEncoder()->EncodeAdd(*tmp1, *tmp1, tmp2);
3952
3953 // Pre-barrier if necessary
3954 if (pre) {
3955 enc->GetCodegen()->CreatePreWRB(inst, MemRef(*tmp1));
3956 }
3957 // Unknown store, assume it can be volatile
3958 enc->GetEncoder()->EncodeStrRelease(value, MemRef(*tmp1));
3959
3960 enc->GetEncoder()->EncodeLdr(*tmp1, false, MemRef(tmp2, runtime->GetManagedClassOffset(enc->GetArch())));
3961 }
3962
3963 auto class_header_mem = MemRef(*tmp1);
3964 enc->GetCodegen()->CreatePostWRB(inst, class_header_mem, value, INVALID_REGISTER);
3965 }
3966
3967 // TODO(pishin): In 'slow' case extra runtime call (two in total) required because lack of registers,
3968 // two free registers necessary for CreatePreWRB for x86_64
GenUnresolvedStoreStaticBarrierAot(EncodeVisitor * enc,Inst * inst,Graph * graph,bool slow)3969 static void GenUnresolvedStoreStaticBarrierAot(EncodeVisitor *enc, Inst *inst, Graph *graph, bool slow)
3970 {
3971 auto store_static = inst->CastToUnresolvedStoreStatic();
3972 auto type_id = store_static->GetTypeId();
3973 auto entrypoint = RuntimeInterface::EntrypointId::GET_UNKNOWN_STATIC_FIELD_PTR;
3974
3975 ScopedLiveTmpReg tmp_reg(enc->GetEncoder());
3976 if (slow) {
3977 entrypoint = RuntimeInterface::EntrypointId::GET_UNKNOWN_STATIC_FIELD_MEMORY_ADDRESS;
3978 enc->GetCodegen()->LoadMethod(tmp_reg);
3979 enc->GetCodegen()->CallRuntime(inst, entrypoint, tmp_reg, {tmp_reg, Imm(type_id), Imm(0)});
3980
3981 auto mem = MemRef(tmp_reg);
3982 enc->GetCodegen()->CreatePreWRB(inst, mem);
3983
3984 entrypoint = RuntimeInterface::EntrypointId::GET_UNKNOWN_STATIC_FIELD_PTR;
3985 }
3986 // TODO(zhroma): consider caching ?
3987 enc->GetCodegen()->LoadMethod(tmp_reg);
3988 enc->GetCodegen()->CallRuntime(inst, entrypoint, tmp_reg, {tmp_reg, Imm(type_id), Imm(0)});
3989
3990 FinishUnresolvedStoreStatic(enc, inst, graph, &tmp_reg, !slow);
3991 }
3992
GenUnresolvedStoreStaticBarrierJitSlow(EncodeVisitor * enc,Inst * inst,Graph * graph)3993 static void GenUnresolvedStoreStaticBarrierJitSlow(EncodeVisitor *enc, Inst *inst, Graph *graph)
3994 {
3995 auto runtime = graph->GetRuntime();
3996 auto store_static = inst->CastToUnresolvedStoreStatic();
3997 auto type_id = store_static->GetTypeId();
3998 auto method = store_static->GetMethod();
3999
4000 // Pre-barrier preparations
4001 auto entrypoint = RuntimeInterface::EntrypointId::GET_UNKNOWN_STATIC_FIELD_MEMORY_ADDRESS;
4002 auto skind = UnresolvedTypesInterface::SlotKind::FIELD;
4003 auto field_addr = runtime->GetUnresolvedTypes()->GetTableSlot(method, type_id, skind);
4004
4005 ScopedLiveTmpReg tmp_reg(enc->GetEncoder());
4006 enc->GetEncoder()->EncodeMov(tmp_reg, Imm(field_addr));
4007 enc->GetEncoder()->EncodeLdr(tmp_reg, false, MemRef(tmp_reg));
4008
4009 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathUnresolved>(inst, entrypoint);
4010 slow_path->SetUnresolvedType(method, type_id);
4011 slow_path->SetDstReg(tmp_reg);
4012 slow_path->SetSlotAddr(field_addr);
4013
4014 enc->GetEncoder()->EncodeJump(slow_path->GetLabel(), tmp_reg, Condition::EQ);
4015 slow_path->BindBackLabel(enc->GetEncoder());
4016
4017 auto mem = MemRef(tmp_reg);
4018 enc->GetCodegen()->CreatePreWRB(inst, mem);
4019
4020 // Actual store preparations
4021 entrypoint = RuntimeInterface::EntrypointId::GET_UNKNOWN_STATIC_FIELD_PTR;
4022 skind = UnresolvedTypesInterface::SlotKind::STATIC_FIELD_PTR;
4023 field_addr = runtime->GetUnresolvedTypes()->GetTableSlot(method, type_id, skind);
4024
4025 enc->GetEncoder()->EncodeMov(tmp_reg, Imm(field_addr));
4026 enc->GetEncoder()->EncodeLdr(tmp_reg, false, MemRef(tmp_reg));
4027
4028 slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathUnresolved>(inst, entrypoint);
4029 slow_path->SetUnresolvedType(method, type_id);
4030 slow_path->SetDstReg(tmp_reg);
4031 slow_path->SetSlotAddr(field_addr);
4032
4033 enc->GetEncoder()->EncodeJump(slow_path->GetLabel(), tmp_reg, Condition::EQ);
4034 slow_path->BindBackLabel(enc->GetEncoder());
4035
4036 FinishUnresolvedStoreStatic(enc, inst, graph, &tmp_reg, false);
4037 }
4038
GenUnresolvedStoreStaticBarrier(EncodeVisitor * enc,Inst * inst,Graph * graph)4039 static void GenUnresolvedStoreStaticBarrier(EncodeVisitor *enc, Inst *inst, Graph *graph)
4040 {
4041 auto arch = enc->GetCodegen()->GetArch();
4042 auto runtime = graph->GetRuntime();
4043 bool slow = (arch == Arch::AARCH32 || arch == Arch::X86_64) &&
4044 runtime->GetPreType() != panda::mem::BarrierType::PRE_WRB_NONE;
4045
4046 if (graph->IsAotMode()) {
4047 ASSERT(arch == Arch::AARCH64 || arch == Arch::X86_64);
4048 GenUnresolvedStoreStaticBarrierAot(enc, inst, graph, slow);
4049 return;
4050 }
4051 if (slow) {
4052 GenUnresolvedStoreStaticBarrierJitSlow(enc, inst, graph);
4053 return;
4054 }
4055
4056 // JIT caching case
4057 auto store_static = inst->CastToUnresolvedStoreStatic();
4058 auto type_id = store_static->GetTypeId();
4059 auto method = store_static->GetMethod();
4060
4061 auto entrypoint = RuntimeInterface::EntrypointId::GET_UNKNOWN_STATIC_FIELD_PTR;
4062 auto skind = UnresolvedTypesInterface::SlotKind::STATIC_FIELD_PTR;
4063 auto field_addr = runtime->GetUnresolvedTypes()->GetTableSlot(method, type_id, skind);
4064
4065 ScopedLiveTmpReg tmp_reg(enc->GetEncoder());
4066 enc->GetEncoder()->EncodeMov(tmp_reg, Imm(field_addr));
4067 enc->GetEncoder()->EncodeLdr(tmp_reg, false, MemRef(tmp_reg));
4068
4069 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathUnresolved>(inst, entrypoint);
4070 slow_path->SetUnresolvedType(method, type_id);
4071 slow_path->SetDstReg(tmp_reg);
4072 slow_path->SetSlotAddr(field_addr);
4073
4074 enc->GetEncoder()->EncodeJump(slow_path->GetLabel(), tmp_reg, Condition::EQ);
4075 slow_path->BindBackLabel(enc->GetEncoder());
4076
4077 FinishUnresolvedStoreStatic(enc, inst, graph, &tmp_reg, true);
4078 }
4079
VisitUnresolvedStoreStatic(GraphVisitor * visitor,Inst * inst)4080 void EncodeVisitor::VisitUnresolvedStoreStatic(GraphVisitor *visitor, Inst *inst)
4081 {
4082 auto *enc = static_cast<EncodeVisitor *>(visitor);
4083 auto graph = enc->cg_->GetGraph();
4084 auto store_static = inst->CastToUnresolvedStoreStatic();
4085 auto type_id = store_static->GetTypeId();
4086 auto method = store_static->GetMethod();
4087
4088 if (store_static->GetNeedBarrier()) {
4089 GenUnresolvedStoreStaticBarrier(enc, inst, graph);
4090 return;
4091 }
4092
4093 // Non-barriered case
4094 auto value = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), inst->GetType());
4095 auto entrypoint = EntrypointId::GET_UNKNOWN_STATIC_FIELD_MEMORY_ADDRESS;
4096
4097 ScopedTmpReg tmp_reg(enc->GetEncoder());
4098 if (graph->IsAotMode()) {
4099 // TODO(zhroma): consider caching ?
4100 enc->GetCodegen()->CallRuntimeWithMethod(inst, method, entrypoint, tmp_reg, Imm(type_id), Imm(0));
4101 } else {
4102 auto skind = UnresolvedTypesInterface::SlotKind::FIELD;
4103 auto field_addr = graph->GetRuntime()->GetUnresolvedTypes()->GetTableSlot(method, type_id, skind);
4104 enc->GetEncoder()->EncodeMov(tmp_reg, Imm(field_addr));
4105 enc->GetEncoder()->EncodeLdr(tmp_reg, false, MemRef(tmp_reg));
4106
4107 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathUnresolved>(inst, entrypoint);
4108 slow_path->SetUnresolvedType(method, type_id);
4109 slow_path->SetDstReg(tmp_reg);
4110 slow_path->SetSlotAddr(field_addr);
4111
4112 enc->GetEncoder()->EncodeJump(slow_path->GetLabel(), tmp_reg, Condition::EQ);
4113 slow_path->BindBackLabel(enc->GetEncoder());
4114 }
4115
4116 auto mem = MemRef(tmp_reg);
4117 // Unknown store, assume it can be volatile
4118 enc->GetEncoder()->EncodeStrRelease(value, mem);
4119 }
4120
VisitNewObject(GraphVisitor * visitor,Inst * inst)4121 void EncodeVisitor::VisitNewObject(GraphVisitor *visitor, Inst *inst)
4122 {
4123 auto *enc = static_cast<EncodeVisitor *>(visitor);
4124 // TODO(msherstennikov): use irtoced entrypoint once spill-fills will be supported for entrypoints mode.
4125 if (enc->cg_->GetArch() == Arch::AARCH32) {
4126 enc->GetCodegen()->CreateNewObjCallOld(inst->CastToNewObject());
4127 } else {
4128 enc->GetCodegen()->CreateNewObjCall(inst->CastToNewObject());
4129 }
4130 if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
4131 enc->GetEncoder()->EncodeMemoryBarrier(MemoryOrder::Release);
4132 }
4133 }
4134
VisitUnresolvedLoadType(GraphVisitor * visitor,Inst * inst)4135 void EncodeVisitor::VisitUnresolvedLoadType(GraphVisitor *visitor, Inst *inst)
4136 {
4137 auto *enc = static_cast<EncodeVisitor *>(visitor);
4138 auto encoder = enc->GetEncoder();
4139 auto codegen = enc->GetCodegen();
4140 auto load_type = inst->CastToUnresolvedLoadType();
4141 if (load_type->GetNeedBarrier()) {
4142 // !TODO Ishin Pavel inserts barriers for GC
4143 }
4144 auto dst = codegen->ConvertRegister(inst->GetDstReg(), inst->GetType());
4145 auto graph = enc->cg_->GetGraph();
4146 auto type_id = load_type->GetTypeId();
4147
4148 auto runtime = graph->GetRuntime();
4149 auto method = load_type->GetMethod();
4150
4151 if (graph->IsAotMode()) {
4152 ScopedTmpReg tmp_reg(encoder);
4153 // Load pointer to klass from PLT
4154 codegen->CreateLoadClassFromPLT(inst, tmp_reg, dst, type_id);
4155 // Finally load Object
4156 encoder->EncodeLdr(dst, false, MemRef(dst, runtime->GetManagedClassOffset(enc->GetArch())));
4157 } else {
4158 auto utypes = runtime->GetUnresolvedTypes();
4159 auto cls_addr = utypes->GetTableSlot(method, type_id, UnresolvedTypesInterface::SlotKind::MANAGED_CLASS);
4160 Reg dst_ptr(dst.GetId(), codegen->GetPtrRegType());
4161 encoder->EncodeMov(dst_ptr, Imm(cls_addr));
4162 encoder->EncodeLdr(dst, false, MemRef(dst_ptr));
4163
4164 auto slow_path = codegen->CreateSlowPath<SlowPathUnresolved>(inst, EntrypointId::RESOLVE_CLASS_OBJECT);
4165 slow_path->SetUnresolvedType(method, type_id);
4166 slow_path->SetDstReg(dst);
4167 slow_path->SetSlotAddr(cls_addr);
4168 encoder->EncodeJump(slow_path->GetLabel(), dst, Condition::EQ);
4169 slow_path->BindBackLabel(encoder);
4170 }
4171 }
4172
VisitLoadType(GraphVisitor * visitor,Inst * inst)4173 void EncodeVisitor::VisitLoadType(GraphVisitor *visitor, Inst *inst)
4174 {
4175 auto *enc = static_cast<EncodeVisitor *>(visitor);
4176 auto encoder = enc->GetEncoder();
4177 auto load_type = inst->CastToLoadType();
4178 if (load_type->GetNeedBarrier()) {
4179 // !TODO Ishin Pavel inserts barriers for GC
4180 }
4181 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
4182 auto graph = enc->cg_->GetGraph();
4183 auto type_id = load_type->GetTypeId();
4184
4185 auto runtime = graph->GetRuntime();
4186 auto method = load_type->GetMethod();
4187
4188 if (graph->IsAotMode()) {
4189 auto method_class_id = runtime->GetClassIdForMethod(graph->GetMethod());
4190 if (method_class_id == type_id) {
4191 auto dst_ptr = dst.As(Codegen::ConvertDataType(DataType::POINTER, graph->GetArch()));
4192 enc->GetCodegen()->LoadMethod(dst_ptr);
4193 auto mem = MemRef(dst_ptr, graph->GetRuntime()->GetClassOffset(graph->GetArch()));
4194 encoder->EncodeLdr(dst.As(Codegen::ConvertDataType(DataType::REFERENCE, graph->GetArch())), false, mem);
4195 } else {
4196 ScopedTmpReg tmp_reg(encoder);
4197 // Load pointer to klass from PLT
4198 enc->GetCodegen()->CreateLoadClassFromPLT(inst, tmp_reg, dst, type_id);
4199 }
4200 // Finally load ManagedClass object
4201 encoder->EncodeLdr(dst, false, MemRef(dst, runtime->GetManagedClassOffset(enc->GetArch())));
4202 } else { // JIT mode
4203 auto klass = reinterpret_cast<uintptr_t>(runtime->ResolveType(method, type_id));
4204 auto managed_klass = runtime->GetManagedType(klass);
4205 encoder->EncodeMov(dst, Imm(managed_klass));
4206 }
4207 }
4208
FillUnresolvedClass(GraphVisitor * visitor,Inst * inst)4209 void EncodeVisitor::FillUnresolvedClass(GraphVisitor *visitor, Inst *inst)
4210 {
4211 auto *enc = static_cast<EncodeVisitor *>(visitor);
4212 auto graph = enc->cg_->GetGraph();
4213 auto encoder = enc->GetEncoder();
4214 auto class_reg = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), DataType::REFERENCE);
4215 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathEntrypoint>(inst, EntrypointId::CHECK_CAST);
4216 encoder->EncodeJump(slow_path->GetLabel(), class_reg, Condition::EQ);
4217 slow_path->CreateBackLabel(encoder);
4218 auto src = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // obj
4219 encoder->EncodeJump(slow_path->GetBackLabel(), src, Condition::EQ);
4220 ScopedTmpReg tmp_reg(encoder, Codegen::ConvertDataType(DataType::REFERENCE, graph->GetArch()));
4221 enc->GetCodegen()->LoadClassFromObject(tmp_reg, src);
4222 encoder->EncodeJump(slow_path->GetLabel(), tmp_reg, class_reg, Condition::NE);
4223 slow_path->BindBackLabel(encoder);
4224 }
4225
FillObjectClass(GraphVisitor * visitor,Reg tmp_reg,LabelHolder::LabelId throw_label)4226 void EncodeVisitor::FillObjectClass(GraphVisitor *visitor, Reg tmp_reg, LabelHolder::LabelId throw_label)
4227 {
4228 auto *enc = static_cast<EncodeVisitor *>(visitor);
4229 auto graph = enc->cg_->GetGraph();
4230 auto runtime = graph->GetRuntime();
4231 auto encoder = enc->GetEncoder();
4232
4233 Reg type_reg(tmp_reg.GetId(), INT8_TYPE);
4234 // Load type class
4235 encoder->EncodeLdr(type_reg, false, MemRef(tmp_reg, runtime->GetClassTypeOffset(enc->GetArch())));
4236 // Jump to EH if type not reference
4237 encoder->EncodeJump(throw_label, type_reg, Imm(runtime->GetReferenceTypeMask()), Condition::NE);
4238 }
4239
4240 /* The CheckCast class should be a subclass of input class:
4241 ......................
4242 bool Class::IsSubClassOf(const Class *klass) const {
4243 const Class *current = this;
4244 do {
4245 if (current == klass) {
4246 return true;
4247 }
4248 current = current->GetBase();
4249 } while (current != nullptr);
4250 return false;
4251 }
4252 */
4253
FillOtherClass(GraphVisitor * visitor,Inst * inst,Reg tmp_reg,LabelHolder::LabelId throw_label)4254 void EncodeVisitor::FillOtherClass(GraphVisitor *visitor, Inst *inst, Reg tmp_reg, LabelHolder::LabelId throw_label)
4255 {
4256 auto *enc = static_cast<EncodeVisitor *>(visitor);
4257 auto graph = enc->cg_->GetGraph();
4258 auto encoder = enc->GetEncoder();
4259 auto loop_label = encoder->CreateLabel();
4260
4261 // First compare `current == klass` we make before switch
4262 encoder->BindLabel(loop_label);
4263 // Load base klass
4264 encoder->EncodeLdr(tmp_reg, false, MemRef(tmp_reg, graph->GetRuntime()->GetClassBaseOffset(enc->GetArch())));
4265 encoder->EncodeJump(throw_label, tmp_reg, Condition::EQ);
4266 auto class_reg = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), DataType::REFERENCE);
4267 encoder->EncodeJump(loop_label, tmp_reg, class_reg, Condition::NE);
4268 }
4269
FillArrayObjectClass(GraphVisitor * visitor,Reg tmp_reg,LabelHolder::LabelId throw_label)4270 void EncodeVisitor::FillArrayObjectClass(GraphVisitor *visitor, Reg tmp_reg, LabelHolder::LabelId throw_label)
4271 {
4272 auto *enc = static_cast<EncodeVisitor *>(visitor);
4273 auto graph = enc->cg_->GetGraph();
4274 auto runtime = graph->GetRuntime();
4275 auto encoder = enc->GetEncoder();
4276
4277 Reg type_reg(tmp_reg.GetId(), INT8_TYPE);
4278 // Load Component class
4279 encoder->EncodeLdr(tmp_reg, false, MemRef(tmp_reg, runtime->GetClassComponentTypeOffset(enc->GetArch())));
4280 // Jump to EH if src is not array class
4281 encoder->EncodeJump(throw_label, tmp_reg, Condition::EQ);
4282 // Load type of the component class
4283 encoder->EncodeLdr(type_reg, false, MemRef(tmp_reg, runtime->GetClassTypeOffset(enc->GetArch())));
4284 // Jump to EH if type not reference
4285 encoder->EncodeJump(throw_label, type_reg, Imm(runtime->GetReferenceTypeMask()), Condition::NE);
4286 }
4287
FillArrayClass(GraphVisitor * visitor,Inst * inst,Reg tmp_reg,LabelHolder::LabelId throw_label)4288 void EncodeVisitor::FillArrayClass(GraphVisitor *visitor, Inst *inst, Reg tmp_reg, LabelHolder::LabelId throw_label)
4289 {
4290 auto *enc = static_cast<EncodeVisitor *>(visitor);
4291 auto graph = enc->cg_->GetGraph();
4292 auto runtime = graph->GetRuntime();
4293 auto encoder = enc->GetEncoder();
4294
4295 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathEntrypoint>(inst, EntrypointId::CHECK_CAST);
4296
4297 // Load Component type of Input
4298 encoder->EncodeLdr(tmp_reg, false, MemRef(tmp_reg, runtime->GetClassComponentTypeOffset(enc->GetArch())));
4299 // Check that src is array class
4300 encoder->EncodeJump(throw_label, tmp_reg, Condition::EQ);
4301 // Load Component type of the instance
4302 auto class_reg = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), DataType::REFERENCE);
4303 ScopedTmpReg tmp_reg1(encoder, Codegen::ConvertDataType(DataType::REFERENCE, graph->GetArch()));
4304 encoder->EncodeLdr(tmp_reg1, false, MemRef(class_reg, runtime->GetClassComponentTypeOffset(enc->GetArch())));
4305 // Compare component types
4306 encoder->EncodeJump(slow_path->GetLabel(), tmp_reg, tmp_reg1, Condition::NE);
4307
4308 slow_path->BindBackLabel(encoder);
4309 }
4310
FillInterfaceClass(GraphVisitor * visitor,Inst * inst)4311 void EncodeVisitor::FillInterfaceClass(GraphVisitor *visitor, Inst *inst)
4312 {
4313 auto enc = static_cast<EncodeVisitor *>(visitor);
4314 auto encoder = enc->GetEncoder();
4315 auto codegen = enc->GetCodegen();
4316 if (codegen->GetArch() == Arch::AARCH32) {
4317 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathEntrypoint>(inst, EntrypointId::CHECK_CAST);
4318 encoder->EncodeJump(slow_path->GetLabel());
4319 slow_path->BindBackLabel(encoder);
4320 } else {
4321 codegen->CreateCheckCastInterfaceCall(inst);
4322 }
4323 }
4324
FillCheckCast(GraphVisitor * visitor,Inst * inst,Reg src,LabelHolder::LabelId end_label,compiler::ClassType klass_type)4325 void EncodeVisitor::FillCheckCast(GraphVisitor *visitor, Inst *inst, Reg src, LabelHolder::LabelId end_label,
4326 compiler::ClassType klass_type)
4327 {
4328 if (klass_type == ClassType::INTERFACE_CLASS) {
4329 FillInterfaceClass(visitor, inst);
4330 return;
4331 }
4332 auto *enc = static_cast<EncodeVisitor *>(visitor);
4333 auto graph = enc->cg_->GetGraph();
4334 auto encoder = enc->GetEncoder();
4335 // class_reg - CheckCast class
4336 // tmp_reg - input class
4337 auto class_reg = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), DataType::REFERENCE);
4338 ScopedTmpReg tmp_reg(encoder, Codegen::ConvertDataType(DataType::REFERENCE, graph->GetArch()));
4339 enc->GetCodegen()->LoadClassFromObject(tmp_reg, src);
4340 // There is no exception if the classes are equal
4341 encoder->EncodeJump(end_label, class_reg, tmp_reg, Condition::EQ);
4342
4343 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathCheckCast>(inst, EntrypointId::CLASS_CAST_EXCEPTION);
4344 slow_path->SetClassReg(class_reg);
4345 auto throw_label = slow_path->GetLabel();
4346 switch (klass_type) {
4347 // The input class should be not primitive type
4348 case ClassType::OBJECT_CLASS: {
4349 FillObjectClass(visitor, tmp_reg, throw_label);
4350 break;
4351 }
4352 case ClassType::OTHER_CLASS: {
4353 FillOtherClass(visitor, inst, tmp_reg, throw_label);
4354 break;
4355 }
4356 // The input class should be array class and component type should be not primitive type
4357 case ClassType::ARRAY_OBJECT_CLASS: {
4358 FillArrayObjectClass(visitor, tmp_reg, throw_label);
4359 break;
4360 }
4361 // Check that components types are equals, else call slow path
4362 case ClassType::ARRAY_CLASS: {
4363 FillArrayClass(visitor, inst, tmp_reg, throw_label);
4364 break;
4365 }
4366 case ClassType::FINAL_CLASS: {
4367 EVENT_CODEGEN_SIMPLIFICATION(events::CodegenSimplificationInst::CHECKCAST,
4368 events::CodegenSimplificationReason::FINAL_CLASS);
4369 encoder->EncodeJump(throw_label);
4370 break;
4371 }
4372 default: {
4373 UNREACHABLE();
4374 }
4375 }
4376 }
4377
VisitCheckCast(GraphVisitor * visitor,Inst * inst)4378 void EncodeVisitor::VisitCheckCast(GraphVisitor *visitor, Inst *inst)
4379 {
4380 auto *enc = static_cast<EncodeVisitor *>(visitor);
4381 auto method = inst->CastToCheckCast()->GetMethod();
4382 auto type_id = inst->CastToCheckCast()->GetTypeId();
4383 auto encoder = enc->GetEncoder();
4384
4385 auto klass_type = inst->CastToCheckCast()->GetClassType();
4386
4387 if (klass_type == ClassType::UNRESOLVED_CLASS) {
4388 FillUnresolvedClass(visitor, inst);
4389 return;
4390 }
4391 auto src = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // obj
4392 auto end_label = encoder->CreateLabel();
4393
4394 if (inst->CastToCheckCast()->GetOmitNullCheck()) {
4395 EVENT_CODEGEN_SIMPLIFICATION(events::CodegenSimplificationInst::CHECKCAST,
4396 events::CodegenSimplificationReason::SKIP_NULLCHECK);
4397 } else {
4398 // Compare with nullptr
4399 encoder->EncodeJump(end_label, src, Condition::EQ);
4400 }
4401
4402 [[maybe_unused]] auto klass = enc->cg_->GetGraph()->GetRuntime()->GetClass(method, type_id);
4403 ASSERT(klass != nullptr);
4404
4405 FillCheckCast(visitor, inst, src, end_label, klass_type);
4406 encoder->BindLabel(end_label);
4407 }
4408
FillIsInstanceUnresolved(GraphVisitor * visitor,Inst * inst)4409 void EncodeVisitor::FillIsInstanceUnresolved(GraphVisitor *visitor, Inst *inst)
4410 {
4411 auto *enc = static_cast<EncodeVisitor *>(visitor);
4412 auto graph = enc->cg_->GetGraph();
4413 auto encoder = enc->GetEncoder();
4414 auto src = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // obj
4415 auto class_reg = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), DataType::REFERENCE);
4416 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
4417
4418 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathEntrypoint>(inst, EntrypointId::IS_INSTANCE);
4419 encoder->EncodeJump(slow_path->GetLabel(), class_reg, Condition::EQ);
4420 slow_path->CreateBackLabel(encoder);
4421 auto end_label = slow_path->GetBackLabel();
4422
4423 // Compare with nullptr
4424 auto next_label = encoder->CreateLabel();
4425 encoder->EncodeJump(next_label, src, Condition::NE);
4426 encoder->EncodeMov(dst, Imm(0));
4427 encoder->EncodeJump(end_label);
4428 encoder->BindLabel(next_label);
4429
4430 // Get instance class
4431 ScopedTmpReg tmp_reg(encoder, Codegen::ConvertDataType(DataType::REFERENCE, graph->GetArch()));
4432 enc->GetCodegen()->LoadClassFromObject(tmp_reg, src);
4433
4434 // Sets true if the classes are equal
4435 encoder->EncodeJump(slow_path->GetLabel(), tmp_reg, class_reg, Condition::NE);
4436 encoder->EncodeMov(dst, Imm(1));
4437
4438 slow_path->BindBackLabel(encoder);
4439 }
4440
FillIsInstanceCaseObject(GraphVisitor * visitor,Inst * inst,Reg tmp_reg)4441 void EncodeVisitor::FillIsInstanceCaseObject(GraphVisitor *visitor, Inst *inst, Reg tmp_reg)
4442 {
4443 auto *enc = static_cast<EncodeVisitor *>(visitor);
4444 auto encoder = enc->GetEncoder();
4445 auto runtime = enc->cg_->GetGraph()->GetRuntime();
4446 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
4447
4448 // ClassType::OBJECT_CLASS
4449 Reg type_reg(tmp_reg.GetId(), INT8_TYPE);
4450 // Load type class
4451 encoder->EncodeLdr(type_reg, false, MemRef(tmp_reg, runtime->GetClassTypeOffset(enc->GetArch())));
4452 ScopedTmpReg type_mask_reg(encoder, INT8_TYPE);
4453 encoder->EncodeMov(type_mask_reg, Imm(runtime->GetReferenceTypeMask()));
4454 encoder->EncodeCompare(dst, type_mask_reg, type_reg, Condition::EQ);
4455 }
4456
4457 /* Sets true if IsInstance class is a subclass of input class:
4458 ......................
4459 bool Class::IsSubClassOf(const Class *klass) const {
4460 const Class *current = this;
4461 do {
4462 if (current == klass) {
4463 return true;
4464 }
4465 current = current->GetBase();
4466 } while (current != nullptr);
4467 return false;
4468 }
4469 */
4470
FillIsInstanceCaseOther(GraphVisitor * visitor,Inst * inst,Reg tmp_reg,LabelHolder::LabelId end_label)4471 void EncodeVisitor::FillIsInstanceCaseOther(GraphVisitor *visitor, Inst *inst, Reg tmp_reg,
4472 LabelHolder::LabelId end_label)
4473 {
4474 auto *enc = static_cast<EncodeVisitor *>(visitor);
4475 auto encoder = enc->GetEncoder();
4476 auto runtime = enc->cg_->GetGraph()->GetRuntime();
4477 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
4478 auto class_reg = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), DataType::REFERENCE);
4479
4480 // ClassType::OTHER_CLASS
4481 auto loop_label = encoder->CreateLabel();
4482 auto false_label = encoder->CreateLabel();
4483
4484 // First compare `current == klass` we make before switch
4485 encoder->BindLabel(loop_label);
4486 // Load base klass
4487 encoder->EncodeLdr(tmp_reg, false, MemRef(tmp_reg, runtime->GetClassBaseOffset(enc->GetArch())));
4488 encoder->EncodeJump(false_label, tmp_reg, Condition::EQ);
4489 encoder->EncodeJump(loop_label, tmp_reg, class_reg, Condition::NE);
4490
4491 // Set true result and jump to exit
4492 encoder->EncodeMov(dst, Imm(1));
4493 encoder->EncodeJump(end_label);
4494
4495 // Set false result and jump to exit
4496 encoder->BindLabel(false_label);
4497 encoder->EncodeMov(dst, Imm(0));
4498 }
4499
4500 // Sets true if the Input class is array class and component type is not primitive type
FillIsInstanceCaseArrayObject(GraphVisitor * visitor,Inst * inst,Reg tmp_reg,LabelHolder::LabelId end_label)4501 void EncodeVisitor::FillIsInstanceCaseArrayObject(GraphVisitor *visitor, Inst *inst, Reg tmp_reg,
4502 LabelHolder::LabelId end_label)
4503 {
4504 auto *enc = static_cast<EncodeVisitor *>(visitor);
4505 auto encoder = enc->GetEncoder();
4506 auto graph = enc->cg_->GetGraph();
4507 auto runtime = graph->GetRuntime();
4508 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
4509
4510 // ClassType::ARRAY_OBJECT_CLASS
4511 Reg dst_ref(dst.GetId(), Codegen::ConvertDataType(DataType::REFERENCE, graph->GetArch()));
4512 Reg type_reg(tmp_reg.GetId(), INT8_TYPE);
4513 // Load Component class
4514 encoder->EncodeLdr(dst_ref, false, MemRef(tmp_reg, runtime->GetClassComponentTypeOffset(enc->GetArch())));
4515 // Check that src is array class
4516 encoder->EncodeJump(end_label, dst_ref, Condition::EQ);
4517 // Load type of the component class
4518 encoder->EncodeLdr(type_reg, false, MemRef(dst_ref, runtime->GetClassTypeOffset(enc->GetArch())));
4519 ScopedTmpReg type_mask_reg(encoder, INT8_TYPE);
4520 encoder->EncodeMov(type_mask_reg, Imm(runtime->GetReferenceTypeMask()));
4521 encoder->EncodeCompare(dst, type_mask_reg, type_reg, Condition::EQ);
4522 }
4523
4524 // Check that components types are equals, else call slow path
FillIsInstanceCaseArrayClass(GraphVisitor * visitor,Inst * inst,Reg tmp_reg,LabelHolder::LabelId end_label)4525 void EncodeVisitor::FillIsInstanceCaseArrayClass(GraphVisitor *visitor, Inst *inst, Reg tmp_reg,
4526 LabelHolder::LabelId end_label)
4527 {
4528 auto *enc = static_cast<EncodeVisitor *>(visitor);
4529 auto encoder = enc->GetEncoder();
4530 auto graph = enc->cg_->GetGraph();
4531 auto runtime = graph->GetRuntime();
4532 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
4533
4534 // ClassType::ARRAY_CLASS
4535 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathEntrypoint>(inst, EntrypointId::IS_INSTANCE);
4536
4537 auto next_label_1 = encoder->CreateLabel();
4538 // Load Component type of Input
4539 encoder->EncodeLdr(tmp_reg, false, MemRef(tmp_reg, runtime->GetClassComponentTypeOffset(enc->GetArch())));
4540 // Check that src is array class
4541 encoder->EncodeJump(next_label_1, tmp_reg, Condition::NE);
4542 encoder->EncodeMov(dst, Imm(0));
4543 encoder->EncodeJump(end_label);
4544 encoder->BindLabel(next_label_1);
4545 // Load Component type of the instance
4546 auto class_reg = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), DataType::REFERENCE);
4547 ScopedTmpReg tmp_reg1(encoder, Codegen::ConvertDataType(DataType::REFERENCE, graph->GetArch()));
4548 encoder->EncodeLdr(tmp_reg1, false, MemRef(class_reg, runtime->GetClassComponentTypeOffset(enc->GetArch())));
4549 // Compare component types
4550 encoder->EncodeJump(slow_path->GetLabel(), tmp_reg, tmp_reg1, Condition::NE);
4551 encoder->EncodeMov(dst, Imm(1));
4552
4553 slow_path->BindBackLabel(encoder);
4554 }
4555
FillIsInstanceCaseInterface(GraphVisitor * visitor,Inst * inst)4556 void EncodeVisitor::FillIsInstanceCaseInterface(GraphVisitor *visitor, Inst *inst)
4557 {
4558 auto *enc = static_cast<EncodeVisitor *>(visitor);
4559 auto encoder = enc->GetEncoder();
4560 // ClassType::INTERFACE_CLASS
4561 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathEntrypoint>(inst, EntrypointId::IS_INSTANCE);
4562
4563 encoder->EncodeJump(slow_path->GetLabel());
4564
4565 slow_path->BindBackLabel(encoder);
4566 }
4567
FillIsInstance(GraphVisitor * visitor,Inst * inst,Reg tmp_reg,LabelHolder::LabelId end_label)4568 void EncodeVisitor::FillIsInstance(GraphVisitor *visitor, Inst *inst, Reg tmp_reg, LabelHolder::LabelId end_label)
4569 {
4570 auto *enc = static_cast<EncodeVisitor *>(visitor);
4571 auto encoder = enc->GetEncoder();
4572 auto src = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // obj
4573 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
4574
4575 if (inst->CastToIsInstance()->GetOmitNullCheck()) {
4576 EVENT_CODEGEN_SIMPLIFICATION(events::CodegenSimplificationInst::ISINSTANCE,
4577 events::CodegenSimplificationReason::SKIP_NULLCHECK);
4578 } else {
4579 // Compare with nullptr
4580 auto next_label = encoder->CreateLabel();
4581 encoder->EncodeJump(next_label, src, Condition::NE);
4582 encoder->EncodeMov(dst, Imm(0));
4583 encoder->EncodeJump(end_label);
4584 encoder->BindLabel(next_label);
4585 }
4586
4587 auto class_reg = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), DataType::REFERENCE);
4588 enc->GetCodegen()->LoadClassFromObject(tmp_reg, src);
4589
4590 // Sets true if the classes are equals
4591 if (inst->CastToIsInstance()->GetClassType() == ClassType::FINAL_CLASS) {
4592 encoder->EncodeCompare(dst, class_reg, tmp_reg, Condition::EQ);
4593 } else if (dst.GetId() != src.GetId() && dst.GetId() != class_reg.GetId()) {
4594 encoder->EncodeCompare(dst, class_reg, tmp_reg, Condition::EQ);
4595 encoder->EncodeJump(end_label, dst, Condition::NE);
4596 } else {
4597 auto next_label_1 = encoder->CreateLabel();
4598 encoder->EncodeJump(next_label_1, class_reg, tmp_reg, Condition::NE);
4599 encoder->EncodeMov(dst, Imm(1));
4600 encoder->EncodeJump(end_label);
4601 encoder->BindLabel(next_label_1);
4602 }
4603 }
4604
VisitIsInstance(GraphVisitor * visitor,Inst * inst)4605 void EncodeVisitor::VisitIsInstance(GraphVisitor *visitor, Inst *inst)
4606 {
4607 auto *enc = static_cast<EncodeVisitor *>(visitor);
4608 auto graph = enc->cg_->GetGraph();
4609 auto encoder = enc->GetEncoder();
4610 auto klass_type = inst->CastToIsInstance()->GetClassType();
4611 if (klass_type == ClassType::UNRESOLVED_CLASS) {
4612 FillIsInstanceUnresolved(visitor, inst);
4613 return;
4614 }
4615 // tmp_reg - input class
4616 ScopedTmpReg tmp_reg(encoder, Codegen::ConvertDataType(DataType::REFERENCE, graph->GetArch()));
4617 auto end_label = encoder->CreateLabel();
4618
4619 FillIsInstance(visitor, inst, tmp_reg, end_label);
4620 switch (klass_type) {
4621 // Sets true if the Input class is not primitive type
4622 case ClassType::OBJECT_CLASS: {
4623 FillIsInstanceCaseObject(visitor, inst, tmp_reg);
4624 break;
4625 }
4626 case ClassType::OTHER_CLASS: {
4627 FillIsInstanceCaseOther(visitor, inst, tmp_reg, end_label);
4628 break;
4629 }
4630 // Sets true if the Input class is array class and component type is not primitive type
4631 case ClassType::ARRAY_OBJECT_CLASS: {
4632 FillIsInstanceCaseArrayObject(visitor, inst, tmp_reg, end_label);
4633 break;
4634 }
4635 // Check that components types are equals, else call slow path
4636 case ClassType::ARRAY_CLASS: {
4637 FillIsInstanceCaseArrayClass(visitor, inst, tmp_reg, end_label);
4638 break;
4639 }
4640 case ClassType::INTERFACE_CLASS: {
4641 FillIsInstanceCaseInterface(visitor, inst);
4642 break;
4643 }
4644 case ClassType::FINAL_CLASS: {
4645 EVENT_CODEGEN_SIMPLIFICATION(events::CodegenSimplificationInst::ISINSTANCE,
4646 events::CodegenSimplificationReason::FINAL_CLASS);
4647 break;
4648 }
4649 default: {
4650 UNREACHABLE();
4651 }
4652 }
4653 encoder->BindLabel(end_label);
4654 }
4655
CreateMonitorCall(MonitorInst * inst)4656 void Codegen::CreateMonitorCall(MonitorInst *inst)
4657 {
4658 auto src = ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // obj
4659 auto id = inst->IsExit() ? EntrypointId::MONITOR_EXIT_FAST_PATH : EntrypointId::MONITOR_ENTER_FAST_PATH;
4660
4661 auto param_regs {GetLiveRegisters(inst).first};
4662 param_regs &= GetTarget().GetParamRegsMask(2U) | RegMask(MakeMask(GetTarget().GetReturnRegId()));
4663
4664 SaveCallerRegisters(param_regs, VRegMask(), false);
4665
4666 FillCallParams({src});
4667 MemRef entry(ThreadReg(), GetRuntime()->GetEntrypointTlsOffset(GetArch(), id));
4668 GetEncoder()->MakeCall(entry);
4669 CreateStackMap(inst);
4670 LoadCallerRegisters(param_regs, VRegMask(), false);
4671 }
4672
CreateMonitorCallOld(MonitorInst * inst)4673 void Codegen::CreateMonitorCallOld(MonitorInst *inst)
4674 {
4675 auto src = ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // obj
4676 auto dst = ConvertRegister(inst->GetDstReg(), inst->GetType());
4677 auto id = inst->IsExit() ? EntrypointId::UNLOCK_OBJECT : EntrypointId::LOCK_OBJECT;
4678 CallRuntime(inst, id, dst, {src});
4679 }
4680
CreateCheckCastInterfaceCall(Inst * inst)4681 void Codegen::CreateCheckCastInterfaceCall(Inst *inst)
4682 {
4683 auto enc = GetEncoder();
4684 auto param_regs {GetLiveRegisters(inst).first};
4685 param_regs &= GetTarget().GetParamRegsMask(2U);
4686 SaveCallerRegisters(param_regs, VRegMask(), false);
4687 auto obj = ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE);
4688 auto interface = ConvertRegister(inst->GetSrcReg(SECOND_OPERAND), DataType::REFERENCE);
4689 FillCallParams({obj, interface});
4690 MemRef entry(ThreadReg(), GetRuntime()->GetEntrypointTlsOffset(GetArch(), EntrypointId::CHECK_CAST_INTERFACE));
4691 enc->MakeCall(entry);
4692 CreateStackMap(inst);
4693 LoadCallerRegisters(param_regs, VRegMask(), false);
4694 }
4695
VisitMonitor(GraphVisitor * visitor,Inst * inst)4696 void EncodeVisitor::VisitMonitor(GraphVisitor *visitor, Inst *inst)
4697 {
4698 auto *enc = static_cast<EncodeVisitor *>(visitor);
4699 if (enc->GetCodegen()->GetArch() == Arch::AARCH32) {
4700 enc->GetCodegen()->CreateMonitorCallOld(inst->CastToMonitor());
4701 } else {
4702 enc->GetCodegen()->CreateMonitorCall(inst->CastToMonitor());
4703 }
4704 }
4705
AcquireNonLiveReg(RegMask * livemask)4706 Reg Codegen::AcquireNonLiveReg(RegMask *livemask)
4707 {
4708 Reg reg = INVALID_REGISTER;
4709 for (int k = 0; k < MAX_NUM_REGS; ++k) {
4710 if (!livemask->test(k)) {
4711 reg = Reg(k, INT32_TYPE);
4712 livemask->set(k);
4713 break;
4714 }
4715 }
4716 ASSERT(reg != INVALID_REGISTER);
4717 return reg;
4718 }
4719
TryInsertImplicitNullCheck(Inst * inst,size_t prevOffset)4720 void Codegen::TryInsertImplicitNullCheck(Inst *inst, size_t prevOffset)
4721 {
4722 if (!IsSuitableForImplicitNullCheck(inst)) {
4723 return;
4724 }
4725 if (!inst->CanThrow()) {
4726 return;
4727 }
4728
4729 auto nullcheck = inst->GetInput(0).GetInst();
4730 ASSERT(nullcheck->GetOpcode() == Opcode::NullCheck && nullcheck->CastToNullCheck()->IsImplicit());
4731 auto curr_offset = GetEncoder()->GetCursorOffset();
4732 ASSERT(curr_offset > prevOffset);
4733 GetCodeBuilder()->AddImplicitNullCheck(curr_offset, curr_offset - prevOffset);
4734 CreateStackMap(nullcheck, inst);
4735 }
4736
CreateFloatIsInf(IntrinsicInst * inst,Reg dst,SRCREGS src,RegMask * lvrmask)4737 void Codegen::CreateFloatIsInf([[maybe_unused]] IntrinsicInst *inst, Reg dst, SRCREGS src,
4738 [[maybe_unused]] RegMask *lvrmask)
4739 {
4740 GetEncoder()->EncodeIsInf(dst, src[0]);
4741 }
4742
4743 #include "intrinsics_codegen.inl"
4744
CreateBuiltinIntrinsic(IntrinsicInst * inst)4745 void Codegen::CreateBuiltinIntrinsic(IntrinsicInst *inst)
4746 {
4747 Reg dst = INVALID_REGISTER;
4748 SRCREGS src;
4749
4750 if (!inst->NoDest()) {
4751 dst = ConvertRegister(inst->GetDstReg(), inst->GetType());
4752 }
4753
4754 auto live_regs = GetLiveRegisters(inst).first;
4755 /* regalloc doesn't set the destination register in the live
4756 * registers mask as it considers it dead after the instruction
4757 * is executed, set this bit for AcquireNonLiveReg() to work
4758 * correctly */
4759 if (dst != INVALID_REGISTER && !DataType::IsFloatType(inst->GetType())) {
4760 live_regs.set(dst.GetId());
4761 }
4762 for (size_t i = 0; i < inst->GetInputsCount(); ++i) {
4763 if (inst->GetInput(i).GetInst()->IsSaveState()) {
4764 continue;
4765 }
4766 auto location = inst->GetLocation(i);
4767 ASSERT(location.IsFixedRegister());
4768 auto type = inst->GetInputType(i);
4769 src[i] = ConvertRegister(location.GetValue(), type);
4770 if (!DataType::IsFloatType(type)) {
4771 live_regs.set(src[i].GetId());
4772 }
4773 }
4774 FillBuiltin(inst, src, dst, &live_regs);
4775 }
4776
VisitIntrinsic(GraphVisitor * visitor,Inst * inst)4777 void EncodeVisitor::VisitIntrinsic(GraphVisitor *visitor, Inst *inst)
4778 {
4779 auto *enc = static_cast<EncodeVisitor *>(visitor);
4780 auto codegen = enc->GetCodegen();
4781 auto intrinsic = inst->CastToIntrinsic();
4782 auto id = intrinsic->GetIntrinsicId();
4783 auto arch = codegen->GetGraph()->GetArch();
4784 auto runtime = codegen->GetGraph()->GetRuntime();
4785 if (EncodesBuiltin(runtime, id, arch) || IsIrtocIntrinsic(id)) {
4786 codegen->CreateBuiltinIntrinsic(intrinsic);
4787 return;
4788 }
4789 codegen->CreateCallIntrinsic(intrinsic);
4790 }
4791
VisitBoundsCheckI(GraphVisitor * visitor,Inst * inst)4792 void EncodeVisitor::VisitBoundsCheckI(GraphVisitor *visitor, Inst *inst)
4793 {
4794 auto *enc = static_cast<EncodeVisitor *>(visitor);
4795 auto len_reg = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), inst->GetInputType(0));
4796
4797 ASSERT(inst->GetInput(1).GetInst()->GetOpcode() == Opcode::SaveState);
4798 EntrypointId entrypoint = inst->CastToBoundsCheckI()->IsArray()
4799 ? EntrypointId::ARRAY_INDEX_OUT_OF_BOUNDS_EXCEPTION
4800 : EntrypointId::STRING_INDEX_OUT_OF_BOUNDS_EXCEPTION;
4801
4802 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathCheck>(inst, entrypoint);
4803
4804 auto value = inst->CastToBoundsCheckI()->GetImm();
4805 if (enc->GetEncoder()->CanEncodeImmAddSubCmp(value, WORD_SIZE, false)) {
4806 auto imm = enc->GetCodegen()->ConvertImm(value, DataType::INT64);
4807 enc->GetEncoder()->EncodeJump(slow_path->GetLabel(), len_reg, imm, Condition::LS);
4808 } else {
4809 ScopedTmpReg tmp(enc->GetEncoder(), len_reg.GetType());
4810 enc->GetEncoder()->EncodeMov(tmp, Imm(value));
4811 enc->GetEncoder()->EncodeJump(slow_path->GetLabel(), len_reg, tmp, Condition::LS);
4812 }
4813 }
4814
VisitStoreArrayI(GraphVisitor * visitor,Inst * inst)4815 void EncodeVisitor::VisitStoreArrayI(GraphVisitor *visitor, Inst *inst)
4816 {
4817 auto *enc = static_cast<EncodeVisitor *>(visitor);
4818 auto array_reg = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE);
4819 auto type = inst->GetType();
4820 auto value = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type);
4821 auto index = inst->CastToStoreArrayI()->GetImm();
4822 int64_t offset = enc->cg_->GetGraph()->GetRuntime()->GetArrayDataOffset(enc->GetCodegen()->GetArch()) +
4823 (index << DataType::ShiftByType(type, enc->GetCodegen()->GetArch()));
4824 auto object_header_mem = MemRef(array_reg);
4825 auto mem = MemRef(array_reg, offset);
4826 if (inst->CastToStoreArrayI()->GetNeedBarrier()) {
4827 enc->GetCodegen()->CreatePreWRB(inst, mem);
4828 }
4829 auto prev_offset = enc->GetEncoder()->GetCursorOffset();
4830 enc->GetEncoder()->EncodeStr(value, mem);
4831 enc->GetCodegen()->TryInsertImplicitNullCheck(inst, prev_offset);
4832 if (inst->CastToStoreArrayI()->GetNeedBarrier()) {
4833 enc->GetCodegen()->CreatePostWRB(inst, object_header_mem, value, INVALID_REGISTER);
4834 }
4835 }
4836
VisitLoadArrayI(GraphVisitor * visitor,Inst * inst)4837 void EncodeVisitor::VisitLoadArrayI(GraphVisitor *visitor, Inst *inst)
4838 {
4839 auto *enc = static_cast<EncodeVisitor *>(visitor);
4840 auto inst_load_array_i = inst->CastToLoadArrayI();
4841 auto runtime = enc->cg_->GetGraph()->GetRuntime();
4842 ASSERT(inst_load_array_i->IsArray() || !runtime->IsCompressedStringsEnabled());
4843 if (inst_load_array_i->GetNeedBarrier()) {
4844 // !TODO Ishin Pavel inserts barriers for GC
4845 }
4846 auto array_reg = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0));
4847 uint32_t index = inst_load_array_i->GetImm();
4848 auto type = inst->GetType();
4849 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type);
4850 int64_t data_offset = inst_load_array_i->IsArray() ? runtime->GetArrayDataOffset(enc->GetArch())
4851 : runtime->GetStringDataOffset(enc->GetArch());
4852 uint32_t shift = DataType::ShiftByType(type, enc->GetArch());
4853 int64_t offset = data_offset + (index << shift);
4854 auto mem = MemRef(array_reg, offset);
4855 auto encoder = enc->GetEncoder();
4856 auto arch = enc->GetArch();
4857 ScopedTmpReg scoped_tmp(encoder, Codegen::ConvertDataType(DataType::GetIntTypeForReference(arch), arch));
4858 auto prev_offset = enc->GetEncoder()->GetCursorOffset();
4859 encoder->EncodeLdr(dst, IsTypeSigned(type), mem);
4860 enc->GetCodegen()->TryInsertImplicitNullCheck(inst, prev_offset);
4861 }
4862
VisitLoadCompressedStringCharI(GraphVisitor * visitor,Inst * inst)4863 void EncodeVisitor::VisitLoadCompressedStringCharI(GraphVisitor *visitor, Inst *inst)
4864 {
4865 auto *enc = static_cast<EncodeVisitor *>(visitor);
4866 auto runtime = enc->cg_->GetGraph()->GetRuntime();
4867 auto type = inst->GetType();
4868 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0)); // array
4869 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), DataType::INT32); // length
4870 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), type); // load value
4871 int32_t offset = runtime->GetStringDataOffset(enc->GetArch());
4872 auto encoder = enc->GetEncoder();
4873 auto arch = encoder->GetArch();
4874 int32_t shift = DataType::ShiftByType(type, arch);
4875 auto index = inst->CastToLoadCompressedStringCharI()->GetImm();
4876
4877 ASSERT(encoder->CanEncodeCompressedStringCharAt());
4878 auto mask = runtime->GetStringCompressionMask();
4879 if (mask != 1) {
4880 UNREACHABLE(); // mask is hardcoded in JCL, but verify it just in case it's changed
4881 }
4882 enc->GetEncoder()->EncodeCompressedStringCharAtI(dst, src0, src1, offset, index, shift);
4883 }
4884
VisitMultiArray(GraphVisitor * visitor,Inst * inst)4885 void EncodeVisitor::VisitMultiArray(GraphVisitor *visitor, Inst *inst)
4886 {
4887 auto *enc = static_cast<EncodeVisitor *>(visitor);
4888 auto codegen = enc->GetCodegen();
4889
4890 auto array_inst = inst->CastToMultiArray();
4891 codegen->CreateMultiArrayCall(array_inst);
4892 if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
4893 enc->GetEncoder()->EncodeMemoryBarrier(MemoryOrder::Release);
4894 }
4895 }
4896
VisitCallStatic(GraphVisitor * visitor,Inst * inst)4897 void EncodeVisitor::VisitCallStatic(GraphVisitor *visitor, Inst *inst)
4898 {
4899 auto *enc = static_cast<EncodeVisitor *>(visitor);
4900 auto codegen = enc->GetCodegen();
4901
4902 auto call_inst = inst->CastToCallStatic();
4903 codegen->CreateCall(call_inst);
4904 }
4905
VisitUnresolvedCallStatic(GraphVisitor * visitor,Inst * inst)4906 void EncodeVisitor::VisitUnresolvedCallStatic(GraphVisitor *visitor, Inst *inst)
4907 {
4908 auto *enc = static_cast<EncodeVisitor *>(visitor);
4909 auto codegen = enc->GetCodegen();
4910
4911 auto call_inst = inst->CastToUnresolvedCallStatic();
4912 codegen->CreateCall(call_inst);
4913 }
4914
VisitCallVirtual(GraphVisitor * visitor,Inst * inst)4915 void EncodeVisitor::VisitCallVirtual(GraphVisitor *visitor, [[maybe_unused]] Inst *inst)
4916 {
4917 auto *enc = static_cast<EncodeVisitor *>(visitor);
4918 auto call_inst = inst->CastToCallVirtual();
4919
4920 // PC in frame must to be filled by SaveState
4921 enc->GetCodegen()->CreateVirtualCall(call_inst);
4922 }
4923
VisitCallDynamic(GraphVisitor * visitor,Inst * inst)4924 void EncodeVisitor::VisitCallDynamic(GraphVisitor *visitor, Inst *inst)
4925 {
4926 auto *enc = static_cast<EncodeVisitor *>(visitor);
4927 auto codegen = enc->GetCodegen();
4928
4929 auto call_inst = inst->CastToCallDynamic();
4930 codegen->CreateDynamicCall(call_inst);
4931 }
4932
VisitUnresolvedCallVirtual(GraphVisitor * visitor,Inst * inst)4933 void EncodeVisitor::VisitUnresolvedCallVirtual(GraphVisitor *visitor, [[maybe_unused]] Inst *inst)
4934 {
4935 auto *enc = static_cast<EncodeVisitor *>(visitor);
4936 auto call_inst = inst->CastToUnresolvedCallVirtual();
4937
4938 // PC in frame must to be filled by SaveState
4939 enc->GetCodegen()->CreateVirtualCall(call_inst);
4940 }
4941
VisitSafePoint(GraphVisitor * visitor,Inst * inst)4942 void EncodeVisitor::VisitSafePoint(GraphVisitor *visitor, Inst *inst)
4943 {
4944 auto *enc = static_cast<EncodeVisitor *>(visitor);
4945 auto codegen = enc->GetCodegen();
4946 auto graph = codegen->GetGraph();
4947 auto encoder = enc->GetEncoder();
4948 int64_t flag_addr_offset = graph->GetRuntime()->GetFlagAddrOffset(codegen->GetArch());
4949 ScopedTmpRegU16 tmp(encoder);
4950
4951 // TMP <= Flag
4952 auto mem = MemRef(codegen->ThreadReg(), flag_addr_offset);
4953 encoder->EncodeLdr(tmp, false, mem);
4954
4955 // check value and jump to call GC
4956 auto slow_path = codegen->CreateSlowPath<SlowPathEntrypoint>(inst, EntrypointId::SAFEPOINT);
4957
4958 encoder->EncodeJump(slow_path->GetLabel(), tmp, Condition::NE);
4959
4960 slow_path->BindBackLabel(encoder);
4961 }
4962
VisitSelect(GraphVisitor * visitor,Inst * inst)4963 void EncodeVisitor::VisitSelect(GraphVisitor *visitor, Inst *inst)
4964 {
4965 auto *enc = static_cast<EncodeVisitor *>(visitor);
4966 auto dst_type = inst->GetType();
4967 auto cmp_type = inst->CastToSelect()->GetOperandsType();
4968
4969 constexpr int32_t IMM_2 = 2;
4970 constexpr int32_t IMM_3 = 3;
4971 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), dst_type);
4972 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), dst_type);
4973 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), dst_type);
4974 auto src2 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(IMM_2), cmp_type);
4975 auto src3 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(IMM_3), cmp_type);
4976 auto cc = enc->GetCodegen()->ConvertCc(inst->CastToSelect()->GetCc());
4977 if (IsTestCc(cc)) {
4978 enc->GetEncoder()->EncodeSelectTest(dst, src0, src1, src2, src3, cc);
4979 } else {
4980 enc->GetEncoder()->EncodeSelect(dst, src0, src1, src2, src3, cc);
4981 }
4982 }
4983
VisitSelectImm(GraphVisitor * visitor,Inst * inst)4984 void EncodeVisitor::VisitSelectImm(GraphVisitor *visitor, Inst *inst)
4985 {
4986 auto *enc = static_cast<EncodeVisitor *>(visitor);
4987 auto dst_type = inst->GetType();
4988 auto cmp_type = inst->CastToSelectImm()->GetOperandsType();
4989
4990 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), dst_type);
4991 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), dst_type);
4992 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), dst_type);
4993 constexpr int32_t IMM_2 = 2;
4994 auto src2 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(IMM_2), cmp_type);
4995 auto imm = enc->GetCodegen()->ConvertImm(inst->CastToSelectImm()->GetImm(), cmp_type);
4996 auto cc = enc->GetCodegen()->ConvertCc(inst->CastToSelectImm()->GetCc());
4997 if (IsTestCc(cc)) {
4998 enc->GetEncoder()->EncodeSelectTest(dst, src0, src1, src2, imm, cc);
4999 } else {
5000 enc->GetEncoder()->EncodeSelect(dst, src0, src1, src2, imm, cc);
5001 }
5002 }
5003
VisitIf(GraphVisitor * visitor,Inst * inst)5004 void EncodeVisitor::VisitIf(GraphVisitor *visitor, Inst *inst)
5005 {
5006 auto *enc = static_cast<EncodeVisitor *>(visitor);
5007
5008 auto bb = inst->GetBasicBlock();
5009 auto label = bb->GetTrueSuccessor()->GetId();
5010
5011 auto type = inst->CastToIf()->GetOperandsType();
5012 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type);
5013 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type);
5014 auto cc = enc->GetCodegen()->ConvertCc(inst->CastToIf()->GetCc());
5015 if (IsTestCc(cc)) {
5016 enc->GetEncoder()->EncodeJumpTest(label, src0, src1, cc);
5017 } else {
5018 enc->GetEncoder()->EncodeJump(label, src0, src1, cc);
5019 }
5020 }
5021
VisitIfImm(GraphVisitor * visitor,Inst * inst)5022 void EncodeVisitor::VisitIfImm(GraphVisitor *visitor, Inst *inst)
5023 {
5024 auto *enc = static_cast<EncodeVisitor *>(visitor);
5025
5026 auto bb = inst->GetBasicBlock();
5027 auto label = bb->GetTrueSuccessor()->GetId();
5028
5029 auto type = inst->CastToIfImm()->GetOperandsType();
5030 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type);
5031 auto imm = enc->GetCodegen()->ConvertImm(inst->CastToIfImm()->GetImm(), DataType::INT64);
5032 auto cc = enc->GetCodegen()->ConvertCc(inst->CastToIfImm()->GetCc());
5033 if (IsTestCc(cc)) {
5034 enc->GetEncoder()->EncodeJumpTest(label, src0, imm, cc);
5035 } else {
5036 enc->GetEncoder()->EncodeJump(label, src0, imm, cc);
5037 }
5038 }
5039
VisitAddOverflow(GraphVisitor * visitor,Inst * inst)5040 void EncodeVisitor::VisitAddOverflow(GraphVisitor *visitor, Inst *inst)
5041 {
5042 auto *enc = static_cast<EncodeVisitor *>(visitor);
5043
5044 auto bb = inst->GetBasicBlock();
5045 auto label = bb->GetTrueSuccessor()->GetId();
5046
5047 auto type = inst->CastToAddOverflow()->GetOperandsType();
5048 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
5049 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type);
5050 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type);
5051 auto cc = enc->GetCodegen()->ConvertCcOverflow(inst->CastToAddOverflow()->GetCc());
5052 enc->GetEncoder()->EncodeAddOverflow(label, dst, src0, src1, cc);
5053 }
5054
VisitAddOverflowCheck(GraphVisitor * visitor,Inst * inst)5055 void EncodeVisitor::VisitAddOverflowCheck(GraphVisitor *visitor, Inst *inst)
5056 {
5057 ASSERT(DataType::IsTypeNumeric(inst->GetInput(0).GetInst()->GetType()));
5058 ASSERT(DataType::IsTypeNumeric(inst->GetInput(1).GetInst()->GetType()));
5059 auto *enc = static_cast<EncodeVisitor *>(visitor);
5060 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathEntrypoint>(inst, EntrypointId::DEOPTIMIZE);
5061 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
5062 auto src0 = Reg(inst->GetSrcReg(0), INT32_TYPE);
5063 auto src1 = Reg(inst->GetSrcReg(1), INT32_TYPE);
5064 enc->GetEncoder()->EncodeAddOverflow(slow_path->GetLabel(), dst, src0, src1, Condition::VS);
5065 }
5066
VisitSubOverflow(GraphVisitor * visitor,Inst * inst)5067 void EncodeVisitor::VisitSubOverflow(GraphVisitor *visitor, Inst *inst)
5068 {
5069 auto *enc = static_cast<EncodeVisitor *>(visitor);
5070
5071 auto bb = inst->GetBasicBlock();
5072 auto label = bb->GetTrueSuccessor()->GetId();
5073
5074 auto type = inst->CastToSubOverflow()->GetOperandsType();
5075 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
5076 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), type);
5077 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type);
5078 auto cc = enc->GetCodegen()->ConvertCcOverflow(inst->CastToSubOverflow()->GetCc());
5079 enc->GetEncoder()->EncodeSubOverflow(label, dst, src0, src1, cc);
5080 }
5081
VisitSubOverflowCheck(GraphVisitor * visitor,Inst * inst)5082 void EncodeVisitor::VisitSubOverflowCheck(GraphVisitor *visitor, Inst *inst)
5083 {
5084 ASSERT(DataType::IsTypeNumeric(inst->GetInput(0).GetInst()->GetType()));
5085 ASSERT(DataType::IsTypeNumeric(inst->GetInput(1).GetInst()->GetType()));
5086 auto *enc = static_cast<EncodeVisitor *>(visitor);
5087 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathEntrypoint>(inst, EntrypointId::DEOPTIMIZE);
5088 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
5089 auto src0 = Reg(inst->GetSrcReg(0), INT32_TYPE);
5090 auto src1 = Reg(inst->GetSrcReg(1), INT32_TYPE);
5091 enc->GetEncoder()->EncodeSubOverflow(slow_path->GetLabel(), dst, src0, src1, Condition::VS);
5092 }
5093
VisitLoadArrayPair(GraphVisitor * visitor,Inst * inst)5094 void EncodeVisitor::VisitLoadArrayPair(GraphVisitor *visitor, Inst *inst)
5095 {
5096 auto *enc = static_cast<EncodeVisitor *>(visitor);
5097 if (inst->CastToLoadArrayPair()->GetNeedBarrier()) {
5098 // !TODO Ishin Pavel inserts barriers for GC
5099 }
5100 auto type = inst->GetType();
5101 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // array
5102 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), DataType::INT32); // index
5103 auto dst0 = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(0), type); // first value
5104 auto dst1 = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(1), type); // second value
5105 int64_t offset = enc->cg_->GetGraph()->GetRuntime()->GetArrayDataOffset(enc->GetCodegen()->GetArch());
5106 ScopedTmpReg tmp(enc->GetEncoder());
5107
5108 int32_t scale = DataType::ShiftByType(type, enc->GetCodegen()->GetArch());
5109 enc->GetEncoder()->EncodeAdd(tmp, src0, Shift(src1, scale));
5110 auto mem = MemRef(tmp, offset);
5111 auto prev_offset = enc->GetEncoder()->GetCursorOffset();
5112 enc->GetEncoder()->EncodeLdp(dst0, dst1, IsTypeSigned(type), mem);
5113 enc->GetCodegen()->TryInsertImplicitNullCheck(inst, prev_offset);
5114 }
5115
VisitLoadArrayPairI(GraphVisitor * visitor,Inst * inst)5116 void EncodeVisitor::VisitLoadArrayPairI(GraphVisitor *visitor, Inst *inst)
5117 {
5118 auto *enc = static_cast<EncodeVisitor *>(visitor);
5119 if (inst->CastToLoadArrayPairI()->GetNeedBarrier()) {
5120 // !TODO Ishin Pavel inserts barriers for GC
5121 }
5122 auto type = inst->GetType();
5123 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // array
5124 auto dst0 = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(0), type); // first value
5125 auto dst1 = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(1), type); // second value
5126 uint64_t index = inst->CastToLoadArrayPairI()->GetImm();
5127 int64_t offset = enc->cg_->GetGraph()->GetRuntime()->GetArrayDataOffset(enc->GetCodegen()->GetArch()) +
5128 (index << DataType::ShiftByType(type, enc->GetCodegen()->GetArch()));
5129 auto mem = MemRef(src0, offset);
5130
5131 auto prev_offset = enc->GetEncoder()->GetCursorOffset();
5132 enc->GetEncoder()->EncodeLdp(dst0, dst1, IsTypeSigned(type), mem);
5133 enc->GetCodegen()->TryInsertImplicitNullCheck(inst, prev_offset);
5134 }
5135
5136 /**
5137 * It is a pseudo instruction that is needed to separate multiple outputs from a single instruction in SSA such as
5138 * LoadArrayPair and LoadArrayPairI. No codegeneration is required.
5139 */
VisitLoadPairPart(GraphVisitor * visitor,Inst * inst)5140 void EncodeVisitor::VisitLoadPairPart([[maybe_unused]] GraphVisitor *visitor, [[maybe_unused]] Inst *inst) {}
5141
VisitStoreArrayPair(GraphVisitor * visitor,Inst * inst)5142 void EncodeVisitor::VisitStoreArrayPair(GraphVisitor *visitor, Inst *inst)
5143 {
5144 auto *enc = static_cast<EncodeVisitor *>(visitor);
5145 auto type = inst->GetType();
5146 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // array
5147 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), DataType::INT32); // index
5148 constexpr int32_t IMM_2 = 2;
5149 auto src2 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(IMM_2), type); // first value
5150 constexpr int32_t IMM_3 = 3;
5151 auto src3 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(IMM_3), type); // second value
5152 int32_t offset = enc->cg_->GetGraph()->GetRuntime()->GetArrayDataOffset(enc->GetCodegen()->GetArch());
5153 int32_t scale = DataType::ShiftByType(type, enc->GetCodegen()->GetArch());
5154
5155 ScopedLiveTmpReg tmp(enc->GetEncoder(), src0.GetType());
5156
5157 auto object_header_mem = MemRef(src0);
5158 enc->GetEncoder()->EncodeAdd(tmp, src0, Shift(src1, scale));
5159 auto mem = MemRef(tmp, offset);
5160 if (inst->CastToStoreArrayPair()->GetNeedBarrier()) {
5161 enc->GetCodegen()->CreatePreWRB(inst, mem, true);
5162 }
5163 auto prev_offset = enc->GetEncoder()->GetCursorOffset();
5164 enc->GetEncoder()->EncodeStp(src2, src3, mem);
5165 enc->GetCodegen()->TryInsertImplicitNullCheck(inst, prev_offset);
5166 if (inst->CastToStoreArrayPair()->GetNeedBarrier()) {
5167 enc->GetCodegen()->CreatePostWRB(inst, object_header_mem, src2, src3);
5168 }
5169 }
5170
VisitStoreArrayPairI(GraphVisitor * visitor,Inst * inst)5171 void EncodeVisitor::VisitStoreArrayPairI(GraphVisitor *visitor, Inst *inst)
5172 {
5173 auto *enc = static_cast<EncodeVisitor *>(visitor);
5174 auto type = inst->GetType();
5175 auto src0 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(0), DataType::REFERENCE); // array
5176 auto src1 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(1), type); // first value
5177 constexpr int32_t IMM_2 = 2;
5178 auto src2 = enc->GetCodegen()->ConvertRegister(inst->GetSrcReg(IMM_2), type); // second value
5179 auto index = inst->CastToStoreArrayPairI()->GetImm();
5180 int64_t offset = enc->cg_->GetGraph()->GetRuntime()->GetArrayDataOffset(enc->GetCodegen()->GetArch()) +
5181 (index << DataType::ShiftByType(type, enc->GetCodegen()->GetArch()));
5182 auto object_header_mem = MemRef(src0);
5183 auto mem = MemRef(src0, offset);
5184 if (inst->CastToStoreArrayPairI()->GetNeedBarrier()) {
5185 enc->GetCodegen()->CreatePreWRB(inst, mem, true);
5186 }
5187 auto prev_offset = enc->GetEncoder()->GetCursorOffset();
5188 enc->GetEncoder()->EncodeStp(src1, src2, mem);
5189 enc->GetCodegen()->TryInsertImplicitNullCheck(inst, prev_offset);
5190 if (inst->CastToStoreArrayPairI()->GetNeedBarrier()) {
5191 enc->GetCodegen()->CreatePostWRB(inst, object_header_mem, src1, src2);
5192 }
5193 }
5194
VisitNOP(GraphVisitor * visitor,Inst * inst)5195 void EncodeVisitor::VisitNOP([[maybe_unused]] GraphVisitor *visitor, [[maybe_unused]] Inst *inst)
5196 {
5197 #ifndef NDEBUG
5198 COMPILER_LOG(DEBUG, CODEGEN) << "The NOP wasn't removed before " << *inst;
5199 #endif
5200 }
5201
VisitThrow(GraphVisitor * visitor,Inst * inst)5202 void EncodeVisitor::VisitThrow(GraphVisitor *visitor, [[maybe_unused]] Inst *inst)
5203 {
5204 auto codegen = static_cast<EncodeVisitor *>(visitor)->GetCodegen();
5205 SlowPathCheck slow_path(codegen->GetEncoder()->CreateLabel(), inst, EntrypointId::THROW_EXCEPTION);
5206 slow_path.Generate(codegen);
5207 }
5208
VisitDeoptimize(GraphVisitor * visitor,Inst * inst)5209 void EncodeVisitor::VisitDeoptimize(GraphVisitor *visitor, Inst *inst)
5210 {
5211 auto codegen = static_cast<EncodeVisitor *>(visitor)->GetCodegen();
5212 ASSERT(inst->GetSaveState() != nullptr);
5213
5214 SlowPathCheck slow_path(codegen->GetEncoder()->CreateLabel(), inst, EntrypointId::DEOPTIMIZE);
5215 slow_path.Generate(codegen);
5216 }
5217
VisitIsMustDeoptimize(GraphVisitor * visitor,Inst * inst)5218 void EncodeVisitor::VisitIsMustDeoptimize(GraphVisitor *visitor, Inst *inst)
5219 {
5220 auto *codegen = static_cast<EncodeVisitor *>(visitor)->GetCodegen();
5221 auto *enc = static_cast<EncodeVisitor *>(visitor)->GetEncoder();
5222 auto dst = codegen->ConvertRegister(inst->GetDstReg(), inst->GetType());
5223 auto offset = CFrameFlags::GetOffsetFromSpInBytes(codegen->GetFrameLayout());
5224
5225 enc->EncodeLdr(dst, false, MemRef(codegen->SpReg(), offset));
5226 enc->EncodeAnd(dst, dst, Imm(1));
5227 }
5228
VisitGetInstanceClass(GraphVisitor * visitor,Inst * inst)5229 void EncodeVisitor::VisitGetInstanceClass(GraphVisitor *visitor, Inst *inst)
5230 {
5231 auto *codegen = static_cast<EncodeVisitor *>(visitor)->GetCodegen();
5232 auto dst = codegen->ConvertRegister(inst->GetDstReg(), inst->GetType());
5233 auto obj_reg = codegen->ConvertRegister(inst->GetSrcReg(0), inst->GetType());
5234 ASSERT(obj_reg.IsValid());
5235 codegen->LoadClassFromObject(dst, obj_reg);
5236 }
5237
VisitClassImmediate(GraphVisitor * visitor,Inst * inst)5238 void EncodeVisitor::VisitClassImmediate(GraphVisitor *visitor, Inst *inst)
5239 {
5240 auto *codegen = static_cast<EncodeVisitor *>(visitor)->GetCodegen();
5241 auto dst = codegen->ConvertRegister(inst->GetDstReg(), inst->GetType());
5242
5243 codegen->GetEncoder()->EncodeMov(dst,
5244 Imm(reinterpret_cast<uintptr_t>(inst->CastToClassImmediate()->GetClassPtr())));
5245 }
5246
VisitRegDef(GraphVisitor * visitor,Inst * inst)5247 void EncodeVisitor::VisitRegDef([[maybe_unused]] GraphVisitor *visitor, [[maybe_unused]] Inst *inst) {}
5248
VisitLiveIn(GraphVisitor * visitor,Inst * inst)5249 void EncodeVisitor::VisitLiveIn([[maybe_unused]] GraphVisitor *visitor, [[maybe_unused]] Inst *inst) {}
5250
VisitLiveOut(GraphVisitor * visitor,Inst * inst)5251 void EncodeVisitor::VisitLiveOut([[maybe_unused]] GraphVisitor *visitor, [[maybe_unused]] Inst *inst)
5252 {
5253 auto enc = static_cast<EncodeVisitor *>(visitor);
5254 auto codegen = enc->GetCodegen();
5255
5256 codegen->AddLiveOut(inst->GetBasicBlock(), inst->GetDstReg());
5257
5258 auto dst_reg = codegen->ConvertRegister(inst->GetDstReg(), inst->GetType());
5259 if (codegen->GetTarget().GetTempRegsMask().Test(dst_reg.GetId()) &&
5260 enc->GetEncoder()->IsScratchRegisterReleased(dst_reg)) {
5261 enc->GetEncoder()->AcquireScratchRegister(dst_reg);
5262 }
5263
5264 if (inst->GetSrcReg(0) != inst->GetDstReg()) {
5265 enc->GetEncoder()->EncodeMov(dst_reg, codegen->ConvertRegister(inst->GetSrcReg(0), inst->GetType()));
5266 }
5267 }
5268
VisitCompareAnyType(GraphVisitor * visitor,Inst * inst)5269 void EncodeVisitor::VisitCompareAnyType(GraphVisitor *visitor, Inst *inst)
5270 {
5271 auto enc = static_cast<EncodeVisitor *>(visitor);
5272 const auto *cati = inst->CastToCompareAnyType();
5273
5274 if (cati->GetInputType(0) != DataType::Type::ANY) {
5275 enc->GetEncoder()->EncodeAbort();
5276 UNREACHABLE();
5277 return;
5278 }
5279
5280 if (TryCompareAnyTypePluginGen(cati, enc)) {
5281 return;
5282 }
5283
5284 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), inst->GetType());
5285 if (cati->GetAnyType() == AnyBaseType::UNDEFINED_TYPE) {
5286 enc->GetEncoder()->EncodeMov(dst, Imm(true));
5287 } else {
5288 enc->GetEncoder()->EncodeMov(dst, Imm(false));
5289 }
5290 }
5291
VisitCastAnyTypeValue(GraphVisitor * visitor,Inst * inst)5292 void EncodeVisitor::VisitCastAnyTypeValue(GraphVisitor *visitor, Inst *inst)
5293 {
5294 auto enc = static_cast<EncodeVisitor *>(visitor);
5295 const auto *cati = inst->CastToCastAnyTypeValue();
5296
5297 if (cati->GetInputType(0) != DataType::Type::ANY) {
5298 enc->GetEncoder()->EncodeAbort();
5299 UNREACHABLE();
5300 return;
5301 }
5302
5303 if (TryCastAnyTypeValuePluginGen(cati, enc)) {
5304 return;
5305 }
5306
5307 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), DataType::Type::BOOL);
5308 enc->GetEncoder()->EncodeMov(dst, Imm(0));
5309 }
5310
VisitCastValueToAnyType(GraphVisitor * visitor,Inst * inst)5311 void EncodeVisitor::VisitCastValueToAnyType(GraphVisitor *visitor, Inst *inst)
5312 {
5313 auto enc = static_cast<EncodeVisitor *>(visitor);
5314 const auto *cvai = inst->CastToCastValueToAnyType();
5315
5316 if (cvai->GetInputType(0) == DataType::Type::ANY) {
5317 enc->GetEncoder()->EncodeAbort();
5318 UNREACHABLE();
5319 return;
5320 }
5321
5322 if (TryCastValueToAnyTypePluginGen(cvai, enc)) {
5323 return;
5324 }
5325
5326 auto dst = enc->GetCodegen()->ConvertRegister(inst->GetDstReg(), DataType::Type::BOOL);
5327 enc->GetEncoder()->EncodeMov(dst, Imm(0));
5328 }
5329
VisitAnyTypeCheck(GraphVisitor * visitor,Inst * inst)5330 void EncodeVisitor::VisitAnyTypeCheck(GraphVisitor *visitor, Inst *inst)
5331 {
5332 auto enc = static_cast<EncodeVisitor *>(visitor);
5333 const auto *check_inst = inst->CastToAnyTypeCheck();
5334
5335 if (check_inst->GetInputType(0) != DataType::Type::ANY) {
5336 enc->GetEncoder()->EncodeAbort();
5337 UNREACHABLE();
5338 }
5339 // Empty check
5340 if (check_inst->GetAnyType() == AnyBaseType::UNDEFINED_TYPE) {
5341 return;
5342 }
5343 auto slow_path = enc->GetCodegen()->CreateSlowPath<SlowPathCheck>(inst, EntrypointId::DEOPTIMIZE);
5344
5345 if (TryAnyTypeCheckPluginGen(check_inst, enc, slow_path->GetLabel())) {
5346 return;
5347 }
5348 UNREACHABLE();
5349 }
5350
5351 /**
5352 * Returns true if codegen emits call(s) to some library function(s)
5353 * while processing the instruction.
5354 */
InstEncodedWithLibCall(const Inst * inst,Arch arch)5355 bool Codegen::InstEncodedWithLibCall(const Inst *inst, Arch arch)
5356 {
5357 ASSERT(inst != nullptr);
5358 Opcode op = inst->GetOpcode();
5359 if (op == Opcode::Mod) {
5360 auto dst_type = inst->GetType();
5361 if (arch == Arch::AARCH64 || arch == Arch::X86_64) {
5362 return dst_type == DataType::FLOAT32 || dst_type == DataType::FLOAT64;
5363 }
5364 return arch == Arch::AARCH32;
5365 }
5366 if (op == Opcode::Div && arch == Arch::AARCH32) {
5367 auto dst_type = inst->GetType();
5368 return dst_type == DataType::INT64 || dst_type == DataType::UINT64;
5369 }
5370 if (op == Opcode::Cast && arch == Arch::AARCH32) {
5371 auto dst_type = inst->GetType();
5372 auto src_type = inst->GetInputType(0);
5373 if (dst_type == DataType::FLOAT32 || dst_type == DataType::FLOAT64) {
5374 return src_type == DataType::INT64 || src_type == DataType::UINT64;
5375 }
5376 if (src_type == DataType::FLOAT32 || src_type == DataType::FLOAT64) {
5377 return dst_type == DataType::INT64 || dst_type == DataType::UINT64;
5378 }
5379 return false;
5380 }
5381 if (op == Opcode::LoadObject) {
5382 return inst->CastToLoadObject()->GetNeedBarrier();
5383 }
5384 if (op == Opcode::StoreObject) {
5385 return inst->CastToStoreObject()->GetNeedBarrier();
5386 }
5387 if (op == Opcode::LoadArray) {
5388 return inst->CastToLoadArray()->GetNeedBarrier();
5389 }
5390 if (op == Opcode::StoreArray) {
5391 return inst->CastToStoreArray()->GetNeedBarrier();
5392 }
5393 if (op == Opcode::LoadArrayI) {
5394 return inst->CastToLoadArrayI()->GetNeedBarrier();
5395 }
5396 if (op == Opcode::StoreArrayI) {
5397 return inst->CastToStoreArrayI()->GetNeedBarrier();
5398 }
5399 if (op == Opcode::LoadArrayPair) {
5400 return inst->CastToLoadArrayPair()->GetNeedBarrier();
5401 }
5402 if (op == Opcode::StoreArrayPair) {
5403 return inst->CastToStoreArrayPair()->GetNeedBarrier();
5404 }
5405 if (op == Opcode::LoadArrayPairI) {
5406 return inst->CastToLoadArrayPairI()->GetNeedBarrier();
5407 }
5408 if (op == Opcode::StoreArrayPairI) {
5409 return inst->CastToStoreArrayPairI()->GetNeedBarrier();
5410 }
5411 if (op == Opcode::LoadStatic) {
5412 return inst->CastToLoadStatic()->GetNeedBarrier();
5413 }
5414 if (op == Opcode::StoreStatic) {
5415 return inst->CastToStoreStatic()->GetNeedBarrier();
5416 }
5417 return false;
5418 }
5419 } // namespace panda::compiler
5420