1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "aarch64_obj_emitter.h"
17
18 namespace {
19 enum ShiftNumber : maple::uint8 {
20 kShiftFour = 4,
21 kShiftFive = 5,
22 kShiftSix = 6,
23 kShiftEight = 8,
24 kShiftTen = 10,
25 kShiftTwelve = 12,
26 kShiftThirteen = 13,
27 kShiftFifteen = 15,
28 kShiftSixteen = 16,
29 kShiftNineteen = 19,
30 kShiftTwenty = 20,
31 kShiftTwentyOne = 21,
32 kShiftTwentyTwo = 22,
33 kShiftTwentyFour = 24,
34 kShiftTwentyNine = 29,
35 };
36
37 enum ShiftTypeValue : maple::uint32 {
38 kShiftLSL = 0,
39 kShiftLSR = 1,
40 kShiftASR = 2,
41 };
42
43 /* from armv8 manual C1.2.3 */
44 maple::uint8 ccEncode[maplebe::kCcLast] = {
45 #define CONDCODE(a, encode) (encode),
46 #include "aarch64_cc.def"
47 #undef CONDCODE
48 };
49 }; // namespace
50
51 namespace maplebe {
52 /* fixup b .label, b(cond) .label, ldr label insn */
HandleLocalBranchFixup(const std::vector<uint32> & label2Offset,const std::vector<uint32> & symbol2Offset)53 void AArch64ObjFuncEmitInfo::HandleLocalBranchFixup(const std::vector<uint32> &label2Offset,
54 const std::vector<uint32> &symbol2Offset)
55 {
56 for (auto *fixup : localFixups) {
57 uint32 useOffset = fixup->GetOffset();
58 uint32 useLabelIndex = fixup->GetLabelIndex();
59 uint32 defOffset = label2Offset[useLabelIndex];
60
61 FixupKind fixupKind = fixup->GetFixupKind();
62 CHECK_FATAL((defOffset != 0xFFFFFFFFULL || static_cast<AArch64FixupKind>(fixupKind) == kAArch64LoadPCRelImm19),
63 "fixup is not local");
64 if (static_cast<AArch64FixupKind>(fixupKind) == kAArch64CondBranchPCRelImm19 ||
65 static_cast<AArch64FixupKind>(fixupKind) == kAArch64CompareBranchPCRelImm19) {
66 uint32 pcRelImm = (defOffset - useOffset) >> k2BitSize;
67 uint32 mask = 0x7FFFF;
68 #ifdef EMIT_DEBUG
69 LogInfo::MapleLogger() << "contents: " << std::hex << GetTextDataElem32(useOffset) << "\n";
70 #endif
71 CHECK_FATAL(useOffset < textData.size(), "out of range");
72 uint32 newValue = GetTextDataElem32(useOffset) | ((pcRelImm & mask) << kShiftFive);
73 SwapTextData(&newValue, useOffset, sizeof(uint32));
74 #ifdef EMIT_DEBUG
75 LogInfo::MapleLogger() << "after contents: " << std::hex << GetTextDataElem32(useOffset) << "\n";
76 #endif
77 } else if (static_cast<AArch64FixupKind>(fixupKind) == kAArch64UnCondBranchPCRelImm26) {
78 uint32 pcRelImm = (defOffset - useOffset) >> k2BitSize;
79 uint32 mask = 0x3FFFFFF;
80 #ifdef EMIT_DEBUG
81 LogInfo::MapleLogger() << "contents: " << std::hex << GetTextDataElem32(useOffset) << "\n";
82 #endif
83 CHECK_FATAL(useOffset < textData.size(), "out of vector size!");
84 uint32 newValue = GetTextDataElem32(useOffset) | (pcRelImm & mask);
85 SwapTextData(&newValue, useOffset, sizeof(uint32));
86 #ifdef EMIT_DEBUG
87 LogInfo::MapleLogger() << "after contents: " << std::hex << GetTextDataElem32(useOffset) << "\n";
88 #endif
89 } else if (static_cast<AArch64FixupKind>(fixupKind) == kAArch64TestBranchPCRelImm14) {
90 uint32 pcRelImm = (defOffset - useOffset) >> k2BitSize;
91 uint32 mask = 0x3FFF;
92 CHECK_FATAL(useOffset < textData.size(), "out of vector size");
93 uint32 newValue = GetTextDataElem32(useOffset) | ((pcRelImm & mask) << kShiftFive);
94 SwapTextData(&newValue, useOffset, sizeof(uint32));
95 } else if (static_cast<AArch64FixupKind>(fixupKind) == kAArch64LoadPCRelImm19) {
96 defOffset = symbol2Offset[useLabelIndex];
97 uint32 pcRelImm = (defOffset - useOffset) >> k2BitSize;
98 uint32 mask = 0x7FFFF;
99 uint32 newValue = GetTextDataElem32(useOffset) | ((pcRelImm & mask) << kShiftFive);
100 SwapTextData(&newValue, useOffset, sizeof(uint32));
101 }
102 }
103 localFixups.clear();
104 }
105
HandleTextSectionGlobalFixup()106 void AArch64ObjEmitter::HandleTextSectionGlobalFixup()
107 {
108 for (auto *content : contents) {
109 if (content == nullptr) {
110 continue;
111 }
112 for (auto *fixup : content->GetGlobalFixups()) {
113 switch (static_cast<AArch64FixupKind>(fixup->GetFixupKind())) {
114 case kAArch64CallPCRelImm26: {
115 HandleCallFixup(*content, *fixup);
116 break;
117 }
118 case kAArch64PCRelAdrImm21: {
119 HandleAdrFixup(*content, *fixup);
120 break;
121 }
122 default:
123 DEBUG_ASSERT(false, "unsupported FixupKind");
124 break;
125 }
126 }
127 }
128 }
129
HandleCallFixup(ObjFuncEmitInfo & funcEmitInfo,const Fixup & fixup)130 void AArch64ObjEmitter::HandleCallFixup(ObjFuncEmitInfo &funcEmitInfo, const Fixup &fixup)
131 {
132 AArch64ObjFuncEmitInfo &objFuncEmitInfo = static_cast<AArch64ObjFuncEmitInfo &>(funcEmitInfo);
133 uint32 useOffset = objFuncEmitInfo.GetStartOffset() + fixup.GetOffset();
134 const std::string &funcName = fixup.GetLabel();
135 auto str2objSymbolItr = globalLabel2Offset.find(funcName);
136 if (str2objSymbolItr != globalLabel2Offset.end()) {
137 uint32 defOffset = str2objSymbolItr->second.offset;
138 uint32 pcRelImm = (defOffset - useOffset) >> k2BitSize;
139 uint32 newValue = objFuncEmitInfo.GetTextDataElem32(fixup.GetOffset()) | (pcRelImm & 0x3FFFFFF);
140 objFuncEmitInfo.SwapTextData(&newValue, fixup.GetOffset(), sizeof(uint32));
141 }
142 }
143
HandleAdrFixup(ObjFuncEmitInfo & funcEmitInfo,const Fixup & fixup)144 void AArch64ObjEmitter::HandleAdrFixup(ObjFuncEmitInfo &funcEmitInfo, const Fixup &fixup)
145 {
146 AArch64ObjFuncEmitInfo &objFuncEmitInfo = static_cast<AArch64ObjFuncEmitInfo &>(funcEmitInfo);
147 uint32 useOffset = objFuncEmitInfo.GetStartOffset() + fixup.GetOffset();
148 const std::string &label = fixup.GetLabel();
149 auto str2objSymbolItr = globalLabel2Offset.find(label);
150 if (str2objSymbolItr != globalLabel2Offset.end()) {
151 uint32 defOffset = str2objSymbolItr->second.offset + fixup.GetRelOffset();
152 uint32 pcRelImm = defOffset - useOffset;
153 uint32 immLow = (pcRelImm & 0x3) << kShiftTwentyNine;
154 uint32 immHigh = ((pcRelImm >> k2BitSize) & 0x7FFFF) << kShiftFive;
155 uint32 newValue = objFuncEmitInfo.GetTextDataElem32(fixup.GetOffset()) | immLow | immHigh;
156 objFuncEmitInfo.SwapTextData(&newValue, fixup.GetOffset(), sizeof(uint32));
157 }
158 }
159
AppendTextSectionData()160 void AArch64ObjEmitter::AppendTextSectionData()
161 {
162 auto &contents = GetContents();
163 for (auto *content : contents) {
164 if (content == nullptr) {
165 continue;
166 }
167 MapleVector<uint8> funcTextData = content->GetTextData();
168 textSection->AppendData(funcTextData);
169 }
170 }
171
AppendGlobalLabel()172 void AArch64ObjEmitter::AppendGlobalLabel()
173 {
174 uint32 lastModulePc = cg->GetMIRModule()->GetLastModulePC();
175 auto &contents = GetContents();
176 uint32 offset = lastModulePc;
177 for (size_t i = 0; i < contents.size(); i++) {
178 auto *content = contents[i];
179 if (content == nullptr) {
180 continue;
181 }
182 content->SetStartOffset(offset);
183 ObjLabel objLabel = {offset, content->GetTextDataSize()};
184 std::string funcName(content->GetFuncName().c_str());
185 const auto &emitMemoryManager = CGOptions::GetInstance().GetEmitMemoryManager();
186 if (emitMemoryManager.funcAddressSaver != nullptr) {
187 emitMemoryManager.funcAddressSaver(emitMemoryManager.codeSpace, funcName, offset);
188 }
189 if (emitMemoryManager.codeSpace != nullptr) {
190 auto &offset2StackMapInfo = content->GetOffset2StackMapInfo();
191 for (const auto &elem : offset2StackMapInfo) {
192 const auto &stackMapInfo = elem.second;
193 emitMemoryManager.pc2CallSiteInfoSaver(
194 emitMemoryManager.codeSpace, content->GetStartOffset() + elem.first, stackMapInfo.referenceMap);
195 emitMemoryManager.pc2DeoptInfoSaver(emitMemoryManager.codeSpace, content->GetStartOffset() + elem.first,
196 stackMapInfo.deoptInfo);
197 }
198 offset2StackMapInfo.clear();
199 }
200
201 offset += content->GetTextDataSize();
202 cg->GetMIRModule()->SetCurModulePC(offset);
203 RegisterGlobalLabel(funcName, objLabel);
204 /* register all the start of switch table */
205 const MapleMap<MapleString, uint32> &switchTableOffset = content->GetSwitchTableOffset();
206 for (auto &elem : switchTableOffset) {
207 ObjLabel switchTableLabel = {elem.second + content->GetStartOffset(), 0};
208 RegisterGlobalLabel(elem.first.c_str(), switchTableLabel);
209 }
210 }
211 }
212
AppendSymsToSymTabSec()213 void AArch64ObjEmitter::AppendSymsToSymTabSec()
214 {
215 Address offset = 0;
216 auto &contents = GetContents();
217 for (auto *content : contents) {
218 if (content == nullptr) {
219 continue;
220 }
221 // func symbol
222 AddFuncSymbol(content->GetFuncName(), content->GetTextData().size(), offset);
223 offset += content->GetTextData().size();
224 }
225 }
226
InitSections()227 void AArch64ObjEmitter::InitSections()
228 {
229 (void)memPool->New<DataSection>(" ", SHT_NULL, 0, 0, *this, *memPool);
230 textSection =
231 memPool->New<DataSection>(".text", SHT_PROGBITS, SHF_ALLOC | SHF_EXECINSTR, k4ByteSize, *this, *memPool);
232 dataSection = memPool->New<DataSection>(".data", SHT_PROGBITS, SHF_WRITE | SHF_ALLOC, k8ByteSize, *this, *memPool);
233 strTabSection = memPool->New<StringSection>(".strtab", SHT_STRTAB, 0, 1, *this, *memPool);
234 symbolTabSection =
235 memPool->New<SymbolSection>(".symtab", SHT_SYMTAB, 0, sizeof(Symbol), *this, *memPool, *strTabSection);
236 shStrSection = memPool->New<StringSection>(".shstrtab", SHT_STRTAB, 0, 1, *this, *memPool);
237 }
238
LayoutSections()239 void AArch64ObjEmitter::LayoutSections()
240 {
241 /* Init elf file header */
242 InitELFHeader();
243 globalOffset = sizeof(FileHeader);
244 globalOffset = Alignment::Align<Offset>(globalOffset, k8ByteSize);
245
246 globalAddr = globalOffset;
247
248 for (auto *section : sections) {
249 section->SetSectionHeaderNameIndex(static_cast<Word>(shStrSection->AddString(section->GetName())));
250 }
251
252 for (auto *section : sections) {
253 globalOffset = Alignment::Align<Offset>(globalOffset, section->GetAlign());
254 globalAddr = Alignment::Align<Address>(globalAddr, section->GetAlign());
255 section->Layout();
256 }
257
258 globalOffset = Alignment::Align<Offset>(globalOffset, k8ByteSize);
259 header.e_shoff = globalOffset;
260 header.e_phnum = 0;
261 header.e_shnum = sections.size();
262 }
263
UpdateMachineAndFlags(FileHeader & header)264 void AArch64ObjEmitter::UpdateMachineAndFlags(FileHeader &header)
265 {
266 header.e_machine = EM_AARCH64;
267 header.e_flags = 0;
268 }
269
270 /* input insn, ang get the binary code of insn */
GetBinaryCodeForInsn(const Insn & insn,const std::vector<uint32> & label2Offset,ObjFuncEmitInfo & objFuncEmitInfo) const271 uint32 AArch64ObjEmitter::GetBinaryCodeForInsn(const Insn &insn, const std::vector<uint32> &label2Offset,
272 ObjFuncEmitInfo &objFuncEmitInfo) const
273 {
274 const InsnDesc &md = AArch64CG::kMd[insn.GetMachineOpcode()];
275 uint32 binInsn = md.GetMopEncode();
276 switch (md.GetEncodeType()) {
277 case kMovReg:
278 return GenMovReg(insn);
279
280 case kMovImm:
281 return GenMovImm(insn);
282
283 case kAddSubExtendReg:
284 return binInsn | GenAddSubExtendRegInsn(insn);
285
286 case kAddSubImm:
287 return binInsn | GenAddSubImmInsn(insn);
288
289 case kAddSubShiftImm:
290 return binInsn | GenAddSubShiftImmInsn(insn);
291
292 case kAddSubReg:
293 return binInsn | GenAddSubRegInsn(insn);
294
295 case kAddSubShiftReg:
296 return binInsn | GenAddSubShiftRegInsn(insn);
297
298 case kBitfield: {
299 if (insn.GetMachineOpcode() == MOP_xuxtw64) {
300 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
301 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftSixteen;
302 opnd |= (0b11111 << kShiftFive) | AArch64CG::kMd[MOP_wiorrrr].GetMopEncode();
303 return opnd;
304 }
305 return binInsn | GenBitfieldInsn(insn);
306 }
307
308 case kExtract:
309 return binInsn | GenExtractInsn(insn);
310
311 case kBranchImm:
312 return binInsn | GenBranchImmInsn(insn, label2Offset, objFuncEmitInfo);
313
314 case kBranchReg:
315 return binInsn | GenBranchRegInsn(insn);
316
317 case kCompareBranch:
318 return binInsn | GenCompareBranchInsn(insn, objFuncEmitInfo);
319
320 case kCondCompareImm:
321 return binInsn | GenCondCompareImmInsn(insn);
322
323 case kCondCompareReg:
324 return binInsn | GenCondCompareRegInsn(insn);
325
326 case kConditionalSelect:
327 return binInsn | GenConditionalSelectInsn(insn);
328
329 case kDataProcess1Src:
330 return binInsn | GenDataProcess1SrcInsn(insn);
331
332 case kDataProcess2Src:
333 return binInsn | GenDataProcess2SrcInsn(insn);
334
335 case kDataProcess3Src:
336 return binInsn | GenDataProcess3SrcInsn(insn);
337
338 case kFloatIntConversions:
339 return binInsn | GenFloatIntConversionsInsn(insn);
340
341 case kFloatCompare:
342 return binInsn | GenFloatCompareInsn(insn);
343
344 case kFloatDataProcessing1:
345 return binInsn | GenFloatDataProcessing1Insn(insn);
346
347 case kFloatDataProcessing2:
348 return binInsn | GenFloatDataProcessing2Insn(insn);
349
350 case kFloatImm:
351 return binInsn | GenFloatImmInsn(insn);
352
353 case kFloatCondSelect:
354 return binInsn | GenFloatCondSelectInsn(insn);
355
356 case kLoadStoreReg:
357 return GenLoadStoreRegInsn(insn, objFuncEmitInfo);
358
359 case kLoadStoreAR:
360 return binInsn | GenLoadStoreARInsn(insn);
361
362 case kLoadExclusive:
363 return binInsn | GenLoadExclusiveInsn(insn);
364
365 case kLoadExclusivePair:
366 return binInsn | GenLoadExclusivePairInsn(insn);
367
368 case kStoreExclusive:
369 return binInsn | GenStoreExclusiveInsn(insn);
370
371 case kStoreExclusivePair:
372 return binInsn | GenStoreExclusivePairInsn(insn);
373
374 case kLoadPair:
375 return binInsn | GenLoadPairInsn(insn);
376
377 case kStorePair:
378 return binInsn | GenStorePairInsn(insn);
379
380 case kLoadStoreFloat:
381 return GenLoadStoreFloatInsn(insn, objFuncEmitInfo);
382
383 case kLoadPairFloat:
384 return binInsn | GenLoadPairFloatInsn(insn);
385
386 case kStorePairFloat:
387 return binInsn | GenStorePairFloatInsn(insn);
388
389 case kLoadLiteralReg:
390 return binInsn | GenLoadLiteralRegInsn(insn, objFuncEmitInfo);
391
392 case kLogicalReg:
393 return binInsn | GenLogicalRegInsn(insn);
394
395 case kLogicalImm:
396 return binInsn | GenLogicalImmInsn(insn);
397
398 case kMoveWide:
399 return binInsn | GenMoveWideInsn(insn);
400
401 case kPCRelAddr:
402 return binInsn | GenPCRelAddrInsn(insn, objFuncEmitInfo);
403
404 case kAddPCRelAddr:
405 return binInsn | GenAddPCRelAddrInsn(insn, objFuncEmitInfo);
406
407 case kSystemInsn:
408 return binInsn | GenSystemInsn(insn);
409
410 case kTestBranch:
411 return binInsn | GenTestBranchInsn(insn, objFuncEmitInfo);
412
413 case kCondBranch:
414 return binInsn | GenCondBranchInsn(insn, objFuncEmitInfo);
415
416 case kUnknownEncodeType:
417 break;
418 case kBrkInsn:
419 return binInsn | ((GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd)) & 0xFFFF) << kShiftFive);
420 default:
421 break;
422 }
423 return binInsn;
424 }
425
426 /* get binary code of operand */
GetOpndMachineValue(const Operand & opnd) const427 uint32 AArch64ObjEmitter::GetOpndMachineValue(const Operand &opnd) const
428 {
429 if (opnd.IsRegister()) {
430 const RegOperand ®Opnd = static_cast<const RegOperand &>(opnd);
431 uint32 regNO = regOpnd.GetRegisterNumber();
432 if (regNO == kRFLAG) {
433 return 0;
434 }
435 if (regOpnd.IsOfIntClass()) {
436 if (regOpnd.GetRegisterNumber() == RZR) {
437 return regNO - R0 - kRegNum2;
438 }
439 if (regOpnd.GetRegisterNumber() == RSP) {
440 return regNO - R0 - 1;
441 }
442 DEBUG_ASSERT(regNO >= R0, "value overflow");
443 return regNO - R0;
444 }
445 return regNO - V0;
446 } else if (opnd.IsImmediate()) {
447 return static_cast<uint32>(static_cast<const ImmOperand &>(opnd).GetValue());
448 } else if (opnd.IsConditionCode()) {
449 const CondOperand &condOpnd = static_cast<const CondOperand &>(opnd);
450 return static_cast<uint32>(ccEncode[condOpnd.GetCode()]);
451 } else if (opnd.IsOpdExtend()) {
452 const ExtendShiftOperand &exendOpnd = static_cast<const ExtendShiftOperand &>(opnd);
453 uint32 shift = exendOpnd.GetShiftAmount();
454 DEBUG_ASSERT(exendOpnd.GetExtendOp() == ExtendShiftOperand::kSXTW, "support kSXTW only!");
455 uint32 option = 0x30;
456 return option | shift;
457 } else {
458 CHECK_FATAL(false, "not supported operand type currently");
459 }
460 }
461
GetAdrLabelOpndValue(const Insn & insn,const Operand & opnd,ObjFuncEmitInfo & objFuncEmitInfo) const462 uint32 AArch64ObjEmitter::GetAdrLabelOpndValue(const Insn &insn, const Operand &opnd,
463 ObjFuncEmitInfo &objFuncEmitInfo) const
464 {
465 FixupKind fixupKind =
466 (insn.GetMachineOpcode() == MOP_xadrp) ? FixupKind(kAArch64PCRelAdrpImm21) : FixupKind(kAArch64PCRelAdrImm21);
467 if (opnd.IsMemoryAccessOperand()) {
468 const MemOperand &memOpnd = static_cast<const MemOperand &>(opnd);
469 Fixup *fixup = memPool->New<Fixup>(memOpnd.GetSymbolName(), 0, objFuncEmitInfo.GetTextDataSize(), fixupKind);
470 objFuncEmitInfo.AppendGlobalFixups(*fixup);
471 } else if (opnd.IsStImmediate()) {
472 const StImmOperand &stOpnd = static_cast<const StImmOperand &>(opnd);
473 Fixup *fixup =
474 memPool->New<Fixup>(stOpnd.GetName(), stOpnd.GetOffset(), objFuncEmitInfo.GetTextDataSize(), fixupKind);
475 objFuncEmitInfo.AppendGlobalFixups(*fixup);
476 } else {
477 CHECK_FATAL(opnd.IsImmediate(), "check kind failed");
478 }
479 return 0;
480 }
481
GetLoadLiteralOpndValue(const Operand & opnd,ObjFuncEmitInfo & objFuncEmitInfo) const482 uint32 AArch64ObjEmitter::GetLoadLiteralOpndValue(const Operand &opnd, ObjFuncEmitInfo &objFuncEmitInfo) const
483 {
484 FixupKind fixupKind = FixupKind(kAArch64LoadPCRelImm19);
485 CHECK_FATAL(opnd.IsLabelOpnd(), "check literal kind failed");
486 const LabelOperand &label = static_cast<const LabelOperand &>(opnd);
487 LocalFixup *fixup = memPool->New<LocalFixup>(label.GetLabelIndex(), objFuncEmitInfo.GetTextDataSize(), fixupKind);
488 objFuncEmitInfo.AppendLocalFixups(*fixup);
489 return 0;
490 }
491
GetCondBranchOpndValue(const Operand & opnd,ObjFuncEmitInfo & objFuncEmitInfo) const492 uint32 AArch64ObjEmitter::GetCondBranchOpndValue(const Operand &opnd, ObjFuncEmitInfo &objFuncEmitInfo) const
493 {
494 FixupKind fixupKind = FixupKind(kAArch64CondBranchPCRelImm19);
495 uint32 labelIndex = static_cast<const LabelOperand &>(opnd).GetLabelIndex();
496 LocalFixup *fixup = memPool->New<LocalFixup>(labelIndex, objFuncEmitInfo.GetTextDataSize(), fixupKind);
497 objFuncEmitInfo.AppendLocalFixups(*fixup);
498 return 0;
499 }
500
GetUnCondBranchOpndValue(const Operand & opnd,const std::vector<uint32> & label2Offset,ObjFuncEmitInfo & objFuncEmitInfo) const501 uint32 AArch64ObjEmitter::GetUnCondBranchOpndValue(const Operand &opnd, const std::vector<uint32> &label2Offset,
502 ObjFuncEmitInfo &objFuncEmitInfo) const
503 {
504 auto labelIndex = static_cast<const LabelOperand &>(opnd).GetLabelIndex();
505 CHECK_FATAL(labelIndex < label2Offset.size(), "labelIndex is out of range");
506 uint32 defOffset = label2Offset[labelIndex];
507 if (defOffset != 0xFFFFFFFFULL) {
508 uint32 useOffset = objFuncEmitInfo.GetTextDataSize();
509 uint32 pcRelImm = (defOffset - useOffset) >> k2BitSize;
510 return (pcRelImm & 0x3FFFFFF);
511 }
512
513 FixupKind fixupKind = FixupKind(kAArch64UnCondBranchPCRelImm26);
514 LocalFixup *fixup = memPool->New<LocalFixup>(labelIndex, objFuncEmitInfo.GetTextDataSize(), fixupKind);
515 objFuncEmitInfo.AppendLocalFixups(*fixup);
516 return 0;
517 }
518
GetCallFuncOpndValue(const Operand & opnd,ObjFuncEmitInfo & objFuncEmitInfo) const519 uint32 AArch64ObjEmitter::GetCallFuncOpndValue(const Operand &opnd, ObjFuncEmitInfo &objFuncEmitInfo) const
520 {
521 const FuncNameOperand &funcNameOpnd = static_cast<const FuncNameOperand &>(opnd);
522 const MIRSymbol *funcSymbol = funcNameOpnd.GetFunctionSymbol();
523 FixupKind fixupKind = FixupKind(kAArch64CallPCRelImm26);
524
525 Fixup *fixup = memPool->New<Fixup>(funcNameOpnd.GetName(), 0, objFuncEmitInfo.GetTextDataSize(), fixupKind);
526 if (funcSymbol->IsGlobal()) {
527 objFuncEmitInfo.AppendGlobalFixups(*fixup);
528 }
529 return 0;
530 }
531
GetCompareBranchOpndValue(const Operand & opnd,ObjFuncEmitInfo & objFuncEmitInfo) const532 uint32 AArch64ObjEmitter::GetCompareBranchOpndValue(const Operand &opnd, ObjFuncEmitInfo &objFuncEmitInfo) const
533 {
534 FixupKind fixupKind = FixupKind(kAArch64CompareBranchPCRelImm19);
535 uint32 labelIndex = static_cast<const LabelOperand &>(opnd).GetLabelIndex();
536 LocalFixup *fixup = memPool->New<LocalFixup>(labelIndex, objFuncEmitInfo.GetTextDataSize(), fixupKind);
537 objFuncEmitInfo.AppendLocalFixups(*fixup);
538 return 0;
539 }
540
GetTestBranchOpndValue(const Operand & opnd,ObjFuncEmitInfo & objFuncEmitInfo) const541 uint32 AArch64ObjEmitter::GetTestBranchOpndValue(const Operand &opnd, ObjFuncEmitInfo &objFuncEmitInfo) const
542 {
543 FixupKind fixupKind = FixupKind(kAArch64TestBranchPCRelImm14);
544 uint32 labelIndex = static_cast<const LabelOperand &>(opnd).GetLabelIndex();
545 LocalFixup *fixup = memPool->New<LocalFixup>(labelIndex, objFuncEmitInfo.GetTextDataSize(), fixupKind);
546 objFuncEmitInfo.AppendLocalFixups(*fixup);
547 return 0;
548 }
549
GetLo12LitrealOpndValue(MOperator mOp,const Operand & opnd,ObjFuncEmitInfo & objFuncEmitInfo) const550 uint32 AArch64ObjEmitter::GetLo12LitrealOpndValue(MOperator mOp, const Operand &opnd,
551 ObjFuncEmitInfo &objFuncEmitInfo) const
552 {
553 FixupKind fixupKind = (mOp == MOP_xadrpl12) ? FixupKind(kAArch64AddPCRelLo12) : FixupKind(kAArch64LdrPCRelLo12);
554 if (opnd.IsMemoryAccessOperand()) {
555 const MemOperand &memOpnd = static_cast<const MemOperand &>(opnd);
556 uint32 offset = 0;
557 if (memOpnd.GetOffsetImmediate() != nullptr) {
558 offset = static_cast<uint32>(memOpnd.GetOffsetImmediate()->GetOffsetValue());
559 }
560 Fixup *fixup =
561 memPool->New<Fixup>(memOpnd.GetSymbolName(), offset, objFuncEmitInfo.GetTextDataSize(), fixupKind);
562 objFuncEmitInfo.AppendGlobalFixups(*fixup);
563 } else {
564 CHECK_FATAL(opnd.IsStImmediate(), "check opnd kind");
565 const StImmOperand &stOpnd = static_cast<const StImmOperand &>(opnd);
566 Fixup *fixup =
567 memPool->New<Fixup>(stOpnd.GetName(), stOpnd.GetOffset(), objFuncEmitInfo.GetTextDataSize(), fixupKind);
568 objFuncEmitInfo.AppendGlobalFixups(*fixup);
569 }
570 return 0;
571 }
572
GenMovReg(const Insn & insn) const573 uint32 AArch64ObjEmitter::GenMovReg(const Insn &insn) const
574 {
575 Operand &opnd1 = insn.GetOperand(kInsnFirstOpnd);
576 Operand &opnd2 = insn.GetOperand(kInsnSecondOpnd);
577 DEBUG_ASSERT(opnd1.IsRegister(), "opnd1 must be a register");
578 DEBUG_ASSERT(opnd2.IsRegister(), "opnd2 must be a register");
579 uint32 opCode = 0;
580 if (static_cast<RegOperand &>(opnd1).GetRegisterNumber() == RSP ||
581 static_cast<RegOperand &>(opnd2).GetRegisterNumber() == RSP) {
582 if (insn.GetMachineOpcode() == MOP_xmovrr) {
583 const InsnDesc &md = AArch64CG::kMd[MOP_xaddrri12];
584 opCode = md.GetMopEncode();
585 } else {
586 DEBUG_ASSERT(insn.GetMachineOpcode() == MOP_wmovrr, "support MOP_wmovrr Currently!");
587 const InsnDesc &md = AArch64CG::kMd[MOP_waddrri12];
588 opCode = md.GetMopEncode();
589 }
590 /* Rd */
591 uint32 opnd = opCode | GetOpndMachineValue(opnd1);
592 /* Rn */
593 opnd |= GetOpndMachineValue(opnd2) << kShiftFive;
594 return opnd;
595 } else {
596 if (insn.GetMachineOpcode() == MOP_xmovrr) {
597 const InsnDesc &md = AArch64CG::kMd[MOP_xiorrrr];
598 opCode = md.GetMopEncode();
599 } else {
600 DEBUG_ASSERT(insn.GetMachineOpcode() == MOP_wmovrr, "support MOP_wmovrr Currently!");
601 const InsnDesc &md = AArch64CG::kMd[MOP_wiorrrr];
602 opCode = md.GetMopEncode();
603 }
604 /* Rd */
605 uint32 opnd = opCode | GetOpndMachineValue(opnd1);
606 /* Rn */
607 opnd |= GetOpndMachineValue(opnd2) << kShiftSixteen;
608 /* Rm */
609 uint32 zr = 0x1f; /* xzr / wzr */
610 opnd |= zr << kShiftFive;
611 return opnd;
612 }
613 }
614
GenMovImm(const Insn & insn) const615 uint32 AArch64ObjEmitter::GenMovImm(const Insn &insn) const
616 {
617 /* Rd */
618 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
619 ImmOperand &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnSecondOpnd));
620 uint32 immSize = insn.GetDesc()->GetOpndDes(kInsnSecondOpnd)->GetSize();
621 uint64 immValue = static_cast<uint64>(immOpnd.GetValue());
622 bool isMovz = IsMoveWidableImmediate(immValue, immSize);
623 bool isMovn = IsMoveWidableImmediate(~immValue, immSize);
624 if (isMovz || isMovn) {
625 if (!isMovz) {
626 immValue = ~immValue;
627 }
628 uint32 hwFlag = 0;
629 if (immSize == k32BitSize) {
630 auto &md = isMovz ? AArch64CG::kMd[MOP_wmovzri16] : AArch64CG::kMd[MOP_wmovnri16];
631 opnd |= md.GetMopEncode();
632 immValue = static_cast<uint32>(immValue);
633 uint32 bitFieldValue = 0xFFFF;
634 if (((static_cast<uint32>(immValue)) & (bitFieldValue << k16BitSize)) != 0) {
635 hwFlag = 1;
636 }
637 } else {
638 DEBUG_ASSERT(immSize == k64BitSize, "support 64 bit only!");
639 auto &md = isMovz ? AArch64CG::kMd[MOP_xmovzri16] : AArch64CG::kMd[MOP_xmovnri16];
640 opnd |= md.GetMopEncode();
641 uint64 bitFieldValue = 0xFFFF;
642 for (hwFlag = 0; hwFlag <= 3; ++hwFlag) { // hwFlag is just from 0(00b) to 3(11b)
643 if (immValue & (bitFieldValue << (k16BitSize * hwFlag))) {
644 break;
645 }
646 }
647 }
648 opnd |= ((static_cast<uint32>(immValue >> (hwFlag * k16BitSize))) << kShiftFive);
649 opnd |= (hwFlag << kShiftTwentyOne);
650 } else {
651 if (immSize == k32BitSize) {
652 auto &md = AArch64CG::kMd[MOP_wiorrri12];
653 opnd |= md.GetMopEncode();
654 } else {
655 DEBUG_ASSERT(immSize == k64BitSize, "support 64 bit only!");
656 auto &md = AArch64CG::kMd[MOP_xiorrri13];
657 opnd |= md.GetMopEncode();
658 }
659 uint64 value = static_cast<uint64>(immOpnd.GetValue());
660 uint32 size = (immSize == k32BitSize) ? k32BitSize : k64BitSize;
661 opnd |= EncodeLogicaImm(value, size) << kShiftTen;
662 opnd |= (0x1FU << kShiftFive);
663 }
664
665 return opnd;
666 }
667
GenAddSubExtendRegInsn(const Insn & insn) const668 uint32 AArch64ObjEmitter::GenAddSubExtendRegInsn(const Insn &insn) const
669 {
670 /* Rd */
671 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
672 /* Rn */
673 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
674 /* Rm */
675 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
676 /* Extend */
677 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFourthOpnd)) << kShiftTen;
678 return opnd;
679 }
680
GenAddPCRelAddrInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const681 uint32 AArch64ObjEmitter::GenAddPCRelAddrInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
682 {
683 /* Rd */
684 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
685 /* Rn */
686 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
687 /* Imm */
688 opnd |= GetLo12LitrealOpndValue(insn.GetMachineOpcode(), insn.GetOperand(kInsnThirdOpnd), objFuncEmitInfo)
689 << kShiftTen;
690 return opnd;
691 }
692
GenAddSubImmInsn(const Insn & insn) const693 uint32 AArch64ObjEmitter::GenAddSubImmInsn(const Insn &insn) const
694 {
695 uint32 operandSize = 4; // subs insn
696 int32 index = insn.GetOperandSize() == operandSize ? 1 : 0;
697 /* Rd */
698 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd + index));
699 /* Rn */
700 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd + index)) << kShiftFive;
701 /* Imm */
702 uint32 immValue = GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd + index));
703 if ((immValue & (0xFFFU)) == 0 && ((immValue & (0xFFFU << kShiftTwelve))) != 0) {
704 opnd |= (1U << kShiftTwentyTwo);
705 immValue >>= kShiftTwelve;
706 }
707 opnd |= (immValue << kShiftTen);
708 return opnd;
709 }
710
GenAddSubShiftImmInsn(const Insn & insn) const711 uint32 AArch64ObjEmitter::GenAddSubShiftImmInsn(const Insn &insn) const
712 {
713 uint32 operandSize = 5; // subs insn
714 int32 index = insn.GetOperandSize() == operandSize ? 1 : 0;
715 /* Rd */
716 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd + index));
717 /* Rn */
718 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd + index)) << kShiftFive;
719 /* Imm */
720 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd + index)) << kShiftTen;
721 /* Shift */
722 BitShiftOperand &lslOpnd = static_cast<BitShiftOperand &>(insn.GetOperand(kInsnFourthOpnd + index));
723 if (lslOpnd.GetShiftAmount() > 0) {
724 uint32 shift = 0x1;
725 opnd |= shift << kShiftTwentyTwo;
726 }
727 return opnd;
728 }
729
GenAddSubRegInsn(const Insn & insn) const730 uint32 AArch64ObjEmitter::GenAddSubRegInsn(const Insn &insn) const
731 {
732 int32 index = insn.GetOperandSize() == k4ByteSize ? 1 : 0; // subs insn
733 /* Rd */
734 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd + index));
735 if (insn.GetOperandSize() == k2ByteSize) { // neg, cmp or cmn insn
736 /* Rm */
737 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftSixteen;
738 return opnd;
739 }
740 /* Rn */
741 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd + index)) << kShiftFive;
742 /* Rm */
743 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd + index)) << kShiftSixteen;
744
745 RegOperand &rd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd + index));
746 RegOperand &rn = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd + index));
747 // SP register can only be used with LSL or Extend
748 if (rd.GetRegisterNumber() == RSP || rn.GetRegisterNumber() == RSP) {
749 uint32 regSize = insn.GetDesc()->GetOpndDes(kInsnSecondOpnd)->GetSize();
750 opnd |= 1 << kShiftTwentyOne;
751 opnd |= ((regSize == k64BitSize ? 0b11 : 0b10) << kShiftThirteen); // option
752 }
753 return opnd;
754 }
755
GenAddSubShiftRegInsn(const Insn & insn) const756 uint32 AArch64ObjEmitter::GenAddSubShiftRegInsn(const Insn &insn) const
757 {
758 /* Rd */
759 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
760
761 BitShiftOperand *bitShiftOpnd = nullptr;
762
763 uint32 operandSize = 3;
764 if (insn.GetOperandSize() == operandSize) {
765 /* Rm */
766 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftSixteen;
767 bitShiftOpnd = static_cast<BitShiftOperand *>(&insn.GetOperand(kInsnThirdOpnd));
768 } else {
769 /* Rn */
770 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
771 /* Rm */
772 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
773 bitShiftOpnd = static_cast<BitShiftOperand *>(&insn.GetOperand(kInsnFourthOpnd));
774 }
775 uint32 shift = 0;
776 switch (bitShiftOpnd->GetShiftOp()) {
777 case BitShiftOperand::kLSL:
778 shift = kShiftLSL;
779 break;
780 case BitShiftOperand::kLSR:
781 shift = kShiftLSR;
782 break;
783 case BitShiftOperand::kASR:
784 shift = kShiftASR;
785 break;
786 default:
787 break;
788 }
789 /* Shift */
790 opnd |= shift << kShiftTwentyTwo;
791 /* Imm */
792 opnd |= bitShiftOpnd->GetShiftAmount() << kShiftTen;
793 return opnd;
794 }
795
GenBitfieldInsn(const Insn & insn) const796 uint32 AArch64ObjEmitter::GenBitfieldInsn(const Insn &insn) const
797 {
798 /* Rd */
799 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
800 /* Rn */
801 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
802
803 uint32 operandSize = 4;
804 if (insn.GetMachineOpcode() == MOP_wubfizrri5i5 || insn.GetMachineOpcode() == MOP_xubfizrri6i6 ||
805 insn.GetMachineOpcode() == MOP_wbfirri5i5 || insn.GetMachineOpcode() == MOP_xbfirri6i6) {
806 uint32 mod = insn.GetDesc()->GetOpndDes(kInsnFirstOpnd)->GetSize(); /* 64 & 32 from ARMv8 manual C5.6.114 */
807 CHECK_FATAL(mod == 64 || mod == 32, "mod must be 64/32");
808 uint32 shift = GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd));
809 uint32 immr = -shift % mod;
810 opnd |= immr << kShiftSixteen;
811 uint32 width = GetOpndMachineValue(insn.GetOperand(kInsnFourthOpnd));
812 CHECK_FATAL(width >= 1, "value overflow");
813 uint32 imms = width - 1;
814 opnd |= imms << kShiftTen;
815 } else if (insn.GetOperandSize() == operandSize) {
816 uint32 lab = GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd));
817 opnd |= lab << kShiftSixteen;
818 uint32 width = GetOpndMachineValue(insn.GetOperand(kInsnFourthOpnd));
819 CHECK_FATAL(lab < UINT64_MAX - width, "value overflow");
820 CHECK_FATAL(lab + width >= 1, "value overflow");
821 opnd |= (lab + width - 1) << kShiftTen;
822 } else if (insn.GetMachineOpcode() == MOP_xlslrri6 || insn.GetMachineOpcode() == MOP_wlslrri5) {
823 uint32 mod = insn.GetDesc()->GetOpndDes(kInsnFirstOpnd)->GetSize(); /* 64 & 32 from ARMv8 manual C5.6.114 */
824 CHECK_FATAL(mod == 64 || mod == 32, "mod must be 64/32");
825 uint32 shift = GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd));
826 uint32 immr = -shift % mod;
827 opnd |= immr << kShiftSixteen;
828 uint32 imms = mod - 1 - shift;
829 opnd |= imms << kShiftTen;
830 } else if (insn.GetMachineOpcode() == MOP_xlsrrri6 || insn.GetMachineOpcode() == MOP_wlsrrri5 ||
831 insn.GetMachineOpcode() == MOP_xasrrri6 || insn.GetMachineOpcode() == MOP_wasrrri5) {
832 uint32 mod = insn.GetDesc()->GetOpndDes(kInsnFirstOpnd)->GetSize(); /* 64 & 32 from ARMv8 manual C5.6.114 */
833 CHECK_FATAL(mod == 64 || mod == 32, "mod must be 64/32");
834 uint32 immr = GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd));
835 opnd |= immr << kShiftSixteen;
836 uint32 imms = mod - 1;
837 opnd |= imms << kShiftTen;
838 }
839 return opnd;
840 }
841
GenExtractInsn(const Insn & insn) const842 uint32 AArch64ObjEmitter::GenExtractInsn(const Insn &insn) const
843 {
844 /* Rd */
845 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
846 /* Rn */
847 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
848 /* Imm */
849 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftTen;
850 /* Rm */
851 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
852
853 return opnd;
854 }
855
GenBranchImmInsn(const Insn & insn,const std::vector<uint32> & label2Offset,ObjFuncEmitInfo & objFuncEmitInfo) const856 uint32 AArch64ObjEmitter::GenBranchImmInsn(const Insn &insn, const std::vector<uint32> &label2Offset,
857 ObjFuncEmitInfo &objFuncEmitInfo) const
858 {
859 /* Imm */
860 if (insn.IsCall()) {
861 return GetCallFuncOpndValue(insn.GetOperand(kInsnFirstOpnd), objFuncEmitInfo);
862 } else {
863 return GetUnCondBranchOpndValue(insn.GetOperand(kInsnFirstOpnd), label2Offset, objFuncEmitInfo);
864 }
865 }
866
GenBranchRegInsn(const Insn & insn) const867 uint32 AArch64ObjEmitter::GenBranchRegInsn(const Insn &insn) const
868 {
869 if (insn.GetMachineOpcode() == MOP_xret || insn.GetMachineOpcode() == MOP_clrex) {
870 return 0;
871 }
872 /* Rn */
873 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd)) << kShiftFive;
874 return opnd;
875 }
876
GenCompareBranchInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const877 uint32 AArch64ObjEmitter::GenCompareBranchInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
878 {
879 /* Rt */
880 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
881 /* Imm */
882 opnd |= GetCompareBranchOpndValue(insn.GetOperand(kInsnSecondOpnd), objFuncEmitInfo) << kShiftFive;
883 return opnd;
884 }
885
GenCondCompareImmInsn(const Insn & insn) const886 uint32 AArch64ObjEmitter::GenCondCompareImmInsn(const Insn &insn) const
887 {
888 /* Rn */
889 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
890 /* Imm */
891 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
892 /* Nzcv */
893 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFourthOpnd));
894 /* Cond */
895 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFifthOpnd)) << kShiftTwelve;
896 return opnd;
897 }
898
GenCondCompareRegInsn(const Insn & insn) const899 uint32 AArch64ObjEmitter::GenCondCompareRegInsn(const Insn &insn) const
900 {
901 /* Rn */
902 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
903 /* Rm */
904 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
905 /* Nzcv */
906 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFourthOpnd));
907 /* Cond */
908 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFifthOpnd)) << kShiftTwelve;
909 return opnd;
910 }
911
GenConditionalSelectInsn(const Insn & insn) const912 uint32 AArch64ObjEmitter::GenConditionalSelectInsn(const Insn &insn) const
913 {
914 /* Rd */
915 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
916 uint32 operandSize = 5;
917 if (insn.GetOperandSize() == operandSize) {
918 /* Rn */
919 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
920 /* Rm */
921 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
922 /* Cond */
923 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFourthOpnd)) << kShiftTwelve;
924 } else if (insn.GetMachineOpcode() == MOP_wcnegrrrc || insn.GetMachineOpcode() == MOP_xcnegrrrc) {
925 /* Rn */
926 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
927 /* Rm Rn==Rm */
928 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftSixteen;
929 /* Cond */
930 uint8 cond = GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd));
931 /* invert cond */
932 opnd |= ((cond ^ 1u) & 0xfu) << kShiftTwelve;
933 } else {
934 /* Cond */
935 uint8 cond = GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd));
936 /* invert cond */
937 opnd |= ((cond ^ 1u) & 0xfu) << kShiftTwelve;
938 }
939 return opnd;
940 }
941
GenDataProcess1SrcInsn(const Insn & insn) const942 uint32 AArch64ObjEmitter::GenDataProcess1SrcInsn(const Insn &insn) const
943 {
944 /* Rd */
945 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
946 /* Rn */
947 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
948 return opnd;
949 }
950
GenDataProcess2SrcInsn(const Insn & insn) const951 uint32 AArch64ObjEmitter::GenDataProcess2SrcInsn(const Insn &insn) const
952 {
953 /* Rd */
954 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
955 /* Rn */
956 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
957 /* Rm */
958 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
959 return opnd;
960 }
961
GenDataProcess3SrcInsn(const Insn & insn) const962 uint32 AArch64ObjEmitter::GenDataProcess3SrcInsn(const Insn &insn) const
963 {
964 /* Rd */
965 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
966 /* Rn */
967 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
968 /* Rm */
969 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
970 /* Ra */
971 uint32 operandSize = 4;
972 if (insn.GetOperandSize() == operandSize) {
973 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFourthOpnd)) << kShiftTen;
974 }
975 return opnd;
976 }
977
GenFloatIntConversionsInsn(const Insn & insn) const978 uint32 AArch64ObjEmitter::GenFloatIntConversionsInsn(const Insn &insn) const
979 {
980 /* Rd */
981 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
982 /* Rn */
983 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
984 return opnd;
985 }
986
GenFloatCompareInsn(const Insn & insn) const987 uint32 AArch64ObjEmitter::GenFloatCompareInsn(const Insn &insn) const
988 {
989 /* Rn */
990 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
991 if (insn.GetOperand(kInsnThirdOpnd).IsRegister()) {
992 /* Rm */
993 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
994 }
995 return opnd;
996 }
997
GenFloatDataProcessing1Insn(const Insn & insn) const998 uint32 AArch64ObjEmitter::GenFloatDataProcessing1Insn(const Insn &insn) const
999 {
1000 /* Rd */
1001 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1002 /* Rn */
1003 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1004 return opnd;
1005 }
1006
GenFloatDataProcessing2Insn(const Insn & insn) const1007 uint32 AArch64ObjEmitter::GenFloatDataProcessing2Insn(const Insn &insn) const
1008 {
1009 /* Rd */
1010 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1011 /* Rn */
1012 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1013 /* Rm */
1014 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
1015 return opnd;
1016 }
1017
GenFloatImmInsn(const Insn & insn) const1018 uint32 AArch64ObjEmitter::GenFloatImmInsn(const Insn &insn) const
1019 {
1020 /* Rd */
1021 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1022 /* Imm */
1023 opnd |= (GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) & 0xff) << kShiftThirteen;
1024 return opnd;
1025 }
1026
GenFloatCondSelectInsn(const Insn & insn) const1027 uint32 AArch64ObjEmitter::GenFloatCondSelectInsn(const Insn &insn) const
1028 {
1029 /* Rd */
1030 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1031 /* Rn */
1032 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1033 /* Rm */
1034 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
1035 /* Cond */
1036 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFourthOpnd)) << kShiftTwelve;
1037 return opnd;
1038 }
1039
GenLoadStoreModeLiteral(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const1040 uint32 AArch64ObjEmitter::GenLoadStoreModeLiteral(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
1041 {
1042 /* Rt */
1043 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1044 /* Mem */
1045 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnSecondOpnd));
1046 FixupKind fixupKind = FixupKind(kAArch64LoadPCRelImm19);
1047 LocalFixup *fixup =
1048 memPool->New<LocalFixup>(objFuncEmitInfo.GetCGFunc().GetLocalSymLabelIndex(*memOpnd.GetSymbol()),
1049 objFuncEmitInfo.GetTextDataSize(), fixupKind);
1050 objFuncEmitInfo.AppendLocalFixups(*fixup);
1051 MOperator mOp = insn.GetMachineOpcode();
1052 if (mOp == MOP_sldr) {
1053 mOp = MOP_sldli;
1054 } else if (mOp == MOP_dldr) {
1055 mOp = MOP_dldli;
1056 } else if (mOp == MOP_xldr) {
1057 mOp = MOP_xldli;
1058 } else if (mOp == MOP_wldr) {
1059 mOp = MOP_wldli;
1060 } else {
1061 CHECK_FATAL(false, "unsupported mOp");
1062 }
1063 auto &md = AArch64CG::kMd[mOp];
1064 return md.GetMopEncode() | opnd;
1065 }
1066
GenLoadStoreModeBOi(const Insn & insn) const1067 uint32 AArch64ObjEmitter::GenLoadStoreModeBOi(const Insn &insn) const
1068 {
1069 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnSecondOpnd));
1070 OfstOperand *ofstOpnd = static_cast<OfstOperand *>(memOpnd.GetOffsetImmediate());
1071 /* Imm */
1072 int32 offsetValue = ofstOpnd->GetOffsetValue();
1073 uint32 imm9Mask = 0x1ff;
1074 uint32 opnd = 0U;
1075 if (memOpnd.IsPostIndexed()) {
1076 opnd |= (static_cast<uint32>(offsetValue) & imm9Mask) << kShiftTwelve;
1077 uint32 specialOpCode = 0x1;
1078 opnd |= specialOpCode << kShiftTen;
1079 } else if (memOpnd.IsPreIndexed()) {
1080 opnd |= (static_cast<uint32>(offsetValue) & imm9Mask) << kShiftTwelve;
1081 uint32 specialOpCode = 0x3;
1082 opnd |= specialOpCode << kShiftTen;
1083 } else {
1084 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1085 uint32 specialOpCode = 0x1;
1086 opnd |= specialOpCode << kShiftTwentyFour;
1087 uint32 divisor = 1;
1088 MOperator mOp = insn.GetMachineOpcode();
1089 if ((mOp == MOP_xldr) || (mOp == MOP_xstr) || (mOp == MOP_dldr) || (mOp == MOP_dstr)) {
1090 divisor = k8BitSize;
1091 } else if ((mOp == MOP_wldr) || (mOp == MOP_wstr) || (mOp == MOP_sstr) || (mOp == MOP_sldr)) {
1092 divisor = k4BitSize;
1093 } else if (mOp == MOP_hldr) {
1094 divisor = k2BitSize;
1095 }
1096 uint32 shiftRightNum = 0;
1097 if ((mOp == MOP_wldrsh) || (mOp == MOP_wldrh) || (mOp == MOP_wstrh)) {
1098 shiftRightNum = 1;
1099 }
1100 opnd |= ((static_cast<uint32>(offsetValue) >> shiftRightNum) / divisor) << kShiftTen;
1101 }
1102 return opnd;
1103 }
1104
GenLoadStoreModeBOrX(const Insn & insn) const1105 uint32 AArch64ObjEmitter::GenLoadStoreModeBOrX(const Insn &insn) const
1106 {
1107 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnSecondOpnd));
1108 uint32 opnd = 0;
1109 opnd |= 0x1 << kShiftTwentyOne;
1110 opnd |= 0x2 << kShiftTen;
1111 RegOperand *offsetReg = memOpnd.GetIndexRegister();
1112 opnd |= GetOpndMachineValue(*offsetReg) << kShiftSixteen;
1113 std::string extend = memOpnd.GetExtendAsString();
1114 uint32 shift = memOpnd.ShiftAmount();
1115 uint32 option = 0;
1116 if (extend == "UXTW") {
1117 option = 0x2;
1118 } else if (extend == "LSL") {
1119 option = 0x3;
1120 uint32 regSize = insn.GetDesc()->GetOpndDes(kInsnFirstOpnd)->GetSize();
1121 // lsl extend insn shift amount can only be 0 or 1(16-bit def opnd) or 2(32-bit def opnd) or
1122 // 3(64-bit def opnd) or 4(128-bit def opnd) in ldr/str insn
1123 CHECK_FATAL((shift == k0BitSize) || (regSize == k16BitSize && shift == k1BitSize) ||
1124 (regSize == k32BitSize && shift == k2BitSize) ||
1125 (regSize == k64BitSize && shift == k3BitSize) || (regSize == k128BitSize && shift == k4BitSize),
1126 "unsupport LSL amount");
1127 } else if (extend == "SXTW") {
1128 option = 0x6;
1129 } else {
1130 DEBUG_ASSERT(extend == "SXTX", "must be SXTX!");
1131 option = 0x7;
1132 }
1133 opnd |= option << kShiftThirteen;
1134 uint32 s = (shift > 0) ? 1 : 0;
1135 opnd |= s << kShiftTwelve;
1136 return opnd;
1137 }
1138
GenLoadStoreRegInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const1139 uint32 AArch64ObjEmitter::GenLoadStoreRegInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
1140 {
1141 /* Mem */
1142 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnSecondOpnd));
1143 if (memOpnd.GetAddrMode() == MemOperand::kAddrModeLiteral) {
1144 return GenLoadStoreModeLiteral(insn, objFuncEmitInfo);
1145 }
1146
1147 MOperator mOp = insn.GetMachineOpcode();
1148 #ifdef USE_32BIT_REF
1149 if (((mOp == MOP_xstr) || (mOp == MOP_xldr)) &&
1150 static_cast<AArch64RegOperand &>(insn.GetOperand(kInsnFirstOpnd)).IsRefField()) {
1151 mOp = (mOp == MOP_xstr) ? MOP_wstr : MOP_wldr;
1152 }
1153 #endif
1154 auto &md = AArch64CG::kMd[mOp];
1155 uint32 binInsn = md.GetMopEncode();
1156 // invalid insn generated by the eval node
1157 if (static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)).GetRegisterNumber() == RZR) {
1158 if (mOp == MOP_sldr) {
1159 binInsn = AArch64CG::kMd[MOP_wldr].GetMopEncode();
1160 } else if (mOp == MOP_dldr) {
1161 binInsn = AArch64CG::kMd[MOP_xldr].GetMopEncode();
1162 } else if (mOp == MOP_sstr) {
1163 binInsn = AArch64CG::kMd[MOP_wstr].GetMopEncode();
1164 } else if (mOp == MOP_dstr) {
1165 binInsn = AArch64CG::kMd[MOP_xstr].GetMopEncode();
1166 }
1167 }
1168 /* Rt */
1169 binInsn |= GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1170 /* Rn */
1171 Operand *baseReg = memOpnd.GetBaseRegister();
1172 binInsn |= GetOpndMachineValue(*baseReg) << kShiftFive;
1173
1174 if (memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi) {
1175 uint32 size = insn.GetDesc()->GetOpndDes(kInsnSecondOpnd)->GetSize();
1176 OfstOperand *ofstOpnd = static_cast<OfstOperand *>(memOpnd.GetOffsetImmediate());
1177 /* Imm */
1178 int32 offsetValue = ofstOpnd != nullptr ? ofstOpnd->GetOffsetValue() : 0;
1179 if ((((size == k16BitSize) && (offsetValue % k2BitSize) != 0) ||
1180 ((size == k32BitSize) && (offsetValue % k4BitSize) != 0) ||
1181 ((size == k64BitSize) && (offsetValue % k8BitSize) != 0)) &&
1182 ((offsetValue < k256BitSizeInt) && (offsetValue >= kNegative256BitSize))) {
1183 uint32 mopEncode = 0;
1184 // ldur, ldurh, ldurb
1185 if (insn.IsLoad()) {
1186 if (insn.GetDesc()->GetEncodeType() == kLoadStoreFloat) {
1187 mopEncode = size == k16BitSize ? 0x7c400000 : (size == k32BitSize ? 0xbc400000 : 0xfc400000);
1188 } else {
1189 mopEncode = size == k16BitSize ? 0x78400000 : (size == k32BitSize ? 0xb8400000 : 0xf8400000);
1190 }
1191 } else { // stur, sturh, sturb
1192 if (insn.GetDesc()->GetEncodeType() == kLoadStoreFloat) {
1193 mopEncode = size == k16BitSize ? 0x7c000000 : (size == k32BitSize ? 0xbc000000 : 0xfc000000);
1194 } else {
1195 mopEncode = size == k16BitSize ? 0x78000000 : (size == k32BitSize ? 0xb8000000 : 0xf8000000);
1196 }
1197 }
1198 binInsn =
1199 GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd)) | (GetOpndMachineValue(*baseReg) << kShiftFive);
1200 return binInsn | mopEncode | (offsetValue << kShiftTwelve);
1201 }
1202 return binInsn | GenLoadStoreModeBOi(insn);
1203 } else if (memOpnd.GetAddrMode() == MemOperand::kAddrModeBOrX) {
1204 return binInsn | GenLoadStoreModeBOrX(insn);
1205 }
1206 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeLo12Li, "support kAddrModeLo12Li only!");
1207 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1208 binInsn |= GetLo12LitrealOpndValue(insn.GetMachineOpcode(), memOpnd, objFuncEmitInfo) << kShiftTen;
1209 uint32 specialOpCode = 0x1;
1210 binInsn |= specialOpCode << kShiftTwentyFour;
1211
1212 return binInsn;
1213 }
1214
GenLoadStoreARInsn(const Insn & insn) const1215 uint32 AArch64ObjEmitter::GenLoadStoreARInsn(const Insn &insn) const
1216 {
1217 /* Rt */
1218 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1219 /* Mem */
1220 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnSecondOpnd));
1221 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1222 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1223 Operand *baseReg = memOpnd.GetBaseRegister();
1224 /* Rn */
1225 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1226 return opnd;
1227 }
1228
GenLoadExclusiveInsn(const Insn & insn) const1229 uint32 AArch64ObjEmitter::GenLoadExclusiveInsn(const Insn &insn) const
1230 {
1231 /* Rt */
1232 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1233 /* Mem */
1234 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnSecondOpnd));
1235 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1236 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1237 Operand *baseReg = memOpnd.GetBaseRegister();
1238 /* Rn */
1239 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1240 return opnd;
1241 }
1242
GenLoadExclusivePairInsn(const Insn & insn) const1243 uint32 AArch64ObjEmitter::GenLoadExclusivePairInsn(const Insn &insn) const
1244 {
1245 /* Rt */
1246 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1247 /* Rt2 */
1248 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftTen;
1249 /* Mem */
1250 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnThirdOpnd));
1251 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1252 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1253 Operand *baseReg = memOpnd.GetBaseRegister();
1254 /* Rn */
1255 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1256 return opnd;
1257 }
1258
GenStoreExclusiveInsn(const Insn & insn) const1259 uint32 AArch64ObjEmitter::GenStoreExclusiveInsn(const Insn &insn) const
1260 {
1261 /* Rs */
1262 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd)) << kShiftSixteen;
1263 /* Rt */
1264 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd));
1265 /* Mem */
1266 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnThirdOpnd));
1267 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1268 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1269 Operand *baseReg = memOpnd.GetBaseRegister();
1270 /* Rn */
1271 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1272 return opnd;
1273 }
1274
GenStoreExclusivePairInsn(const Insn & insn) const1275 uint32 AArch64ObjEmitter::GenStoreExclusivePairInsn(const Insn &insn) const
1276 {
1277 /* Rs */
1278 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd)) << kShiftSixteen;
1279 /* Rt */
1280 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd));
1281 /* Rt2 */
1282 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftTen;
1283 /* Mem */
1284 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnFourthOpnd));
1285 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1286 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1287 Operand *baseReg = memOpnd.GetBaseRegister();
1288 /* Rn */
1289 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1290 return opnd;
1291 }
1292
GenLoadPairInsn(const Insn & insn) const1293 uint32 AArch64ObjEmitter::GenLoadPairInsn(const Insn &insn) const
1294 {
1295 /* Rt */
1296 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1297 /* Rt2 */
1298 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftTen;
1299 /* Mem */
1300 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnThirdOpnd));
1301 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1302 /* Rn */
1303 Operand *baseReg = memOpnd.GetBaseRegister();
1304 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1305 /* Imm */
1306 OfstOperand *ofstOpnd = static_cast<OfstOperand *>(memOpnd.GetOffsetImmediate());
1307 int32 offsetValue = ofstOpnd->GetOffsetValue();
1308 uint32 divisor = 0;
1309 if (insn.GetDesc()->GetOpndDes(kInsnThirdOpnd)->GetSize() == k64BitSize) {
1310 divisor = k8ByteSize;
1311 } else {
1312 divisor = k4ByteSize;
1313 }
1314 uint32 imm7Mask = 0x7f;
1315 opnd |= (((static_cast<uint32>(offsetValue / divisor) & imm7Mask)) << kShiftFifteen);
1316
1317 uint32 specialOpCode = 0;
1318 if (memOpnd.IsPostIndexed()) {
1319 specialOpCode = 0x3;
1320 } else if (memOpnd.IsPreIndexed()) {
1321 specialOpCode = 0x7;
1322 } else {
1323 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1324 specialOpCode = 0x5;
1325 }
1326 opnd |= specialOpCode << kShiftTwentyTwo;
1327 return opnd;
1328 }
1329
GenStorePairInsn(const Insn & insn) const1330 uint32 AArch64ObjEmitter::GenStorePairInsn(const Insn &insn) const
1331 {
1332 /* Rt */
1333 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1334 /* Rt2 */
1335 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftTen;
1336 /* Mem */
1337 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnThirdOpnd));
1338 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1339 /* Rn */
1340 Operand *baseReg = memOpnd.GetBaseRegister();
1341 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1342 /* Imm */
1343 OfstOperand *ofstOpnd = static_cast<OfstOperand *>(memOpnd.GetOffsetImmediate());
1344 int32 offsetValue = ofstOpnd->GetOffsetValue();
1345 uint32 divisor = 0;
1346 if (insn.GetDesc()->GetOpndDes(kInsnThirdOpnd)->GetSize() == k64BitSize) {
1347 divisor = k8ByteSize;
1348 } else {
1349 divisor = k4ByteSize;
1350 }
1351 uint32 imm7Mask = 0x7f;
1352 opnd |= ((static_cast<uint32>(offsetValue / divisor) & imm7Mask) << kShiftFifteen);
1353
1354 uint32 specialOpCode = 0;
1355 if (memOpnd.IsPostIndexed()) {
1356 specialOpCode = 0x2;
1357 } else if (memOpnd.IsPreIndexed()) {
1358 specialOpCode = 0x6;
1359 } else {
1360 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1361 specialOpCode = 0x4;
1362 }
1363 opnd |= specialOpCode << kShiftTwentyTwo;
1364 return opnd;
1365 }
1366
GenLoadStoreFloatInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const1367 uint32 AArch64ObjEmitter::GenLoadStoreFloatInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
1368 {
1369 return GenLoadStoreRegInsn(insn, objFuncEmitInfo);
1370 }
1371
GenLoadPairFloatInsn(const Insn & insn) const1372 uint32 AArch64ObjEmitter::GenLoadPairFloatInsn(const Insn &insn) const
1373 {
1374 /* Rt */
1375 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1376 /* Rt2 */
1377 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftTen;
1378 /* Mem */
1379 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnThirdOpnd));
1380 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1381 /* Rn */
1382 Operand *baseReg = memOpnd.GetBaseRegister();
1383 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1384 /* Imm */
1385 OfstOperand *ofstOpnd = static_cast<OfstOperand *>(memOpnd.GetOffsetImmediate());
1386 int32 offsetValue = ofstOpnd->GetOffsetValue();
1387 uint32 divisor = 0;
1388 if (insn.GetDesc()->GetOpndDes(kInsnThirdOpnd)->GetSize() == k64BitSize) {
1389 divisor = k8ByteSize;
1390 } else {
1391 divisor = k4ByteSize;
1392 }
1393 uint32 imm7Mask = 0x7f;
1394 opnd |= (static_cast<uint32>(static_cast<uint32>(offsetValue) / divisor) & imm7Mask) << kShiftFifteen;
1395
1396 uint32 specialOpCode = 0;
1397 if (memOpnd.IsPostIndexed()) {
1398 specialOpCode = 0x3;
1399 } else if (memOpnd.IsPreIndexed()) {
1400 specialOpCode = 0x7;
1401 } else {
1402 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1403 specialOpCode = 0x5;
1404 }
1405 opnd |= specialOpCode << kShiftTwentyTwo;
1406 return opnd;
1407 }
1408
GenStorePairFloatInsn(const Insn & insn) const1409 uint32 AArch64ObjEmitter::GenStorePairFloatInsn(const Insn &insn) const
1410 {
1411 /* Rt */
1412 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1413 /* Rt2 */
1414 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftTen;
1415 /* Mem */
1416 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnThirdOpnd));
1417 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1418 /* Rn */
1419 Operand *baseReg = memOpnd.GetBaseRegister();
1420 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1421 /* Imm */
1422 OfstOperand *ofstOpnd = static_cast<OfstOperand *>(memOpnd.GetOffsetImmediate());
1423 int32 offsetValue = ofstOpnd->GetOffsetValue();
1424 uint32 divisor = 0;
1425 if (insn.GetDesc()->GetOpndDes(kInsnThirdOpnd)->GetSize() == k64BitSize) {
1426 divisor = k8ByteSize;
1427 } else {
1428 divisor = k4ByteSize;
1429 }
1430 uint32 imm7Mask = 0x7f;
1431 opnd |= (static_cast<uint32>(static_cast<uint32>(offsetValue) / divisor) & imm7Mask) << kShiftFifteen;
1432
1433 uint32 specialOpCode = 0;
1434 if (memOpnd.IsPostIndexed()) {
1435 specialOpCode = 0x2;
1436 } else if (memOpnd.IsPreIndexed()) {
1437 specialOpCode = 0x6;
1438 } else {
1439 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1440 specialOpCode = 0x4;
1441 }
1442 opnd |= specialOpCode << kShiftTwentyTwo;
1443 return opnd;
1444 }
1445
GenLoadLiteralRegInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const1446 uint32 AArch64ObjEmitter::GenLoadLiteralRegInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
1447 {
1448 /* Rt */
1449 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1450 /* Imm */
1451 opnd |= GetLoadLiteralOpndValue(insn.GetOperand(kInsnSecondOpnd), objFuncEmitInfo) << kShiftFive;
1452 return opnd;
1453 }
1454
GenLogicalRegInsn(const Insn & insn) const1455 uint32 AArch64ObjEmitter::GenLogicalRegInsn(const Insn &insn) const
1456 {
1457 /* Rd */
1458 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1459
1460 uint32 operandSize = k2ByteSize; // mvn insn
1461 if (insn.GetOperandSize() == operandSize) {
1462 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd)) << kShiftFive;
1463 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftSixteen;
1464 return opnd;
1465 }
1466
1467 /* Rn */
1468 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1469 /* Rm */
1470 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
1471
1472 operandSize = k4ByteSize;
1473 if (insn.GetOperandSize() == operandSize) {
1474 BitShiftOperand &bitShiftOpnd = static_cast<BitShiftOperand &>(insn.GetOperand(kInsnFourthOpnd));
1475 uint32 shift = 0;
1476 switch (bitShiftOpnd.GetShiftOp()) {
1477 case BitShiftOperand::kLSL:
1478 shift = kShiftLSL;
1479 break;
1480 case BitShiftOperand::kLSR:
1481 shift = kShiftLSR;
1482 break;
1483 case BitShiftOperand::kASR:
1484 shift = kShiftASR;
1485 break;
1486 default:
1487 break;
1488 }
1489 /* Shift */
1490 opnd |= shift << kShiftTwentyTwo;
1491 /* Imm */
1492 opnd |= bitShiftOpnd.GetShiftAmount() << kShiftTen;
1493 }
1494 return opnd;
1495 }
1496
GenLogicalImmInsn(const Insn & insn) const1497 uint32 AArch64ObjEmitter::GenLogicalImmInsn(const Insn &insn) const
1498 {
1499 /* Rd */
1500 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1501 if (insn.GetMachineOpcode() == MOP_wmovri32 || insn.GetMachineOpcode() == MOP_xmovri64) {
1502 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftTen;
1503 return opnd;
1504 }
1505
1506 // tst insn
1507 if (insn.GetMachineOpcode() == MOP_wtstri32 || insn.GetMachineOpcode() == MOP_xtstri64) {
1508 // Rn
1509 uint32 opndValue = GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1510 // Imm
1511 ImmOperand &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
1512 uint64 value = static_cast<uint64>(immOpnd.GetValue());
1513 uint32 size = insn.GetDesc()->GetOpndDes(kInsnThirdOpnd)->GetSize();
1514 opndValue |= EncodeLogicaImm(value, size) << kShiftTen;
1515 return opndValue;
1516 }
1517
1518 /* Rn */
1519 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1520 /* Imm */
1521 ImmOperand &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
1522 uint64 value = static_cast<uint64>(immOpnd.GetValue());
1523 uint32 size = insn.GetDesc()->GetOpndDes(kInsnSecondOpnd)->GetSize();
1524 opnd |= EncodeLogicaImm(value, size) << kShiftTen;
1525 return opnd;
1526 }
1527
GenMoveWideInsn(const Insn & insn) const1528 uint32 AArch64ObjEmitter::GenMoveWideInsn(const Insn &insn) const
1529 {
1530 /* Rd */
1531 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1532 /* Imm */
1533 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1534
1535 BitShiftOperand &lslOpnd = static_cast<BitShiftOperand &>(insn.GetOperand(kInsnThirdOpnd));
1536 uint32 mod = 16; /* 16 from Armv8 Manual C5.6.128 */
1537 uint32 shift = lslOpnd.GetShiftAmount() / mod;
1538 /* Shift */
1539 opnd |= shift << kShiftTwentyOne;
1540 return opnd;
1541 }
1542
GenPCRelAddrInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const1543 uint32 AArch64ObjEmitter::GenPCRelAddrInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
1544 {
1545 /* Rd */
1546 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1547 /* Imm */
1548 opnd |= GetAdrLabelOpndValue(insn, insn.GetOperand(kInsnSecondOpnd), objFuncEmitInfo) << kShiftFive;
1549 return opnd;
1550 }
1551
GenSystemInsn(const Insn & insn) const1552 uint32 AArch64ObjEmitter::GenSystemInsn(const Insn &insn) const
1553 {
1554 (void)insn;
1555 return 0;
1556 }
1557
GenTestBranchInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const1558 uint32 AArch64ObjEmitter::GenTestBranchInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
1559 {
1560 /* Rt */
1561 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1562 /* b40 */
1563 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftNineteen;
1564 /* Imm */
1565 opnd |= GetTestBranchOpndValue(insn.GetOperand(kInsnThirdOpnd), objFuncEmitInfo) << kShiftFive;
1566 return opnd;
1567 }
1568
GenCondBranchInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const1569 uint32 AArch64ObjEmitter::GenCondBranchInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
1570 {
1571 /* Imm */
1572 uint32 opnd = GetCondBranchOpndValue(insn.GetOperand(kInsnSecondOpnd), objFuncEmitInfo) << kShiftFive;
1573 return opnd;
1574 }
1575
InsertNopInsn(ObjFuncEmitInfo & objFuncEmitInfo) const1576 void AArch64ObjEmitter::InsertNopInsn(ObjFuncEmitInfo &objFuncEmitInfo) const
1577 {
1578 AArch64CGFunc &cgFunc = static_cast<AArch64CGFunc &>(objFuncEmitInfo.GetCGFunc());
1579 bool found = false;
1580 FOR_ALL_BB_REV(bb, &cgFunc)
1581 {
1582 FOR_BB_INSNS_REV(insn, bb)
1583 {
1584 if (insn->IsMachineInstruction()) {
1585 if (insn->IsCall()) {
1586 Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn<AArch64CG>(MOP_nop);
1587 bb->InsertInsnAfter(*insn, newInsn);
1588 }
1589 found = true;
1590 break;
1591 }
1592 }
1593 if (found) {
1594 break;
1595 }
1596 }
1597 }
1598
EncodeLogicaImm(uint64 imm,uint32 size) const1599 uint32 AArch64ObjEmitter::EncodeLogicaImm(uint64 imm, uint32 size) const
1600 {
1601 /* the element size */
1602 uint32 elementSize = size;
1603 while (elementSize > k2ByteSize) {
1604 elementSize >>= 1;
1605 uint64 mask = (1ULL << elementSize) - 1;
1606 if ((imm & mask) != ((imm >> elementSize) & mask)) {
1607 elementSize <<= 1;
1608 break;
1609 }
1610 }
1611
1612 if (elementSize != k64BitSize) {
1613 imm &= ((1ULL << elementSize) - 1);
1614 }
1615 std::bitset<k64BitSize> bitValue(imm);
1616 uint32 trailCount = 0;
1617 for (uint32 i = 1; i < elementSize; ++i) {
1618 if (bitValue[i] ^ bitValue[0]) {
1619 trailCount = i;
1620 break;
1621 }
1622 }
1623
1624 uint32 immr = 0;
1625 uint32 oneNum = bitValue.count();
1626 if (bitValue.test(0)) { /* for 1+0+1+ pattern */
1627 DEBUG_ASSERT(oneNum >= trailCount, "value overflow");
1628 immr = oneNum - trailCount;
1629 } else { /* for 0+1+0+ pattern */
1630 immr = elementSize - trailCount;
1631 }
1632 CHECK_FATAL(elementSize >= 1, "value overflow");
1633 DEBUG_ASSERT(oneNum >= 1, "value overflow");
1634 uint32 imms = ~(elementSize - 1) << 1;
1635 imms |= oneNum - 1u;
1636 uint32 n = (elementSize == k64BitSize) ? 1 : 0;
1637 return (n << kShiftTwelve) | (immr << kShiftSix) | (imms & 0x3f);
1638 }
1639
EmitAdrpLabel(const Insn & insn,const std::vector<uint32> & label2Offset,ObjFuncEmitInfo & objFuncEmitInfo)1640 void AArch64ObjEmitter::EmitAdrpLabel(const Insn &insn, const std::vector<uint32> &label2Offset,
1641 ObjFuncEmitInfo &objFuncEmitInfo)
1642 {
1643 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1644 uint32 binInsn = AArch64CG::kMd[MOP_xadrp].GetMopEncode();
1645 binInsn |= opnd;
1646 objFuncEmitInfo.AppendTextData(binInsn, k4ByteSize);
1647 binInsn = AArch64CG::kMd[MOP_xaddrri12].GetMopEncode();
1648 binInsn |= opnd | (opnd << kShiftFive);
1649 objFuncEmitInfo.AppendTextData(binInsn, k4ByteSize);
1650 return;
1651 }
1652
EmitGetHeapConstTable(const Insn & insn,const std::vector<uint32> & label2Offset,ObjFuncEmitInfo & objFuncEmitInfo)1653 void AArch64ObjEmitter::EmitGetHeapConstTable(const Insn &insn, const std::vector<uint32> &label2Offset,
1654 ObjFuncEmitInfo &objFuncEmitInfo)
1655 {
1656 AArch64CGFunc &cgFunc = static_cast<AArch64CGFunc&>(objFuncEmitInfo.GetCGFunc());
1657 RegOperand &destReg = static_cast<RegOperand&>(insn.GetOperand(kInsnFirstOpnd));
1658 RegOperand &jsFuncReg = static_cast<RegOperand&>(insn.GetOperand(kInsnSecondOpnd));
1659 ImmOperand &machineCodeOffset = static_cast<ImmOperand&>(insn.GetOperand(kInsnThirdOpnd));
1660 ImmOperand &constTableOffset = static_cast<ImmOperand&>(insn.GetOperand(kInsnFourthOpnd));
1661 Operand &machineCodeMemOpnd = cgFunc.CreateMemOpnd(jsFuncReg, machineCodeOffset.GetValue(), k64BitSize);
1662 Insn &ldrMachineCodeInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xldr, destReg, machineCodeMemOpnd);
1663 EncodeInstruction(ldrMachineCodeInsn, label2Offset, objFuncEmitInfo);
1664 Operand &constTableMemOpnd = cgFunc.CreateMemOpnd(destReg, constTableOffset.GetValue(), k64BitSize);
1665 Insn &ldrConstantTableInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xldr, destReg, constTableMemOpnd);
1666 EncodeInstruction(ldrConstantTableInsn, label2Offset, objFuncEmitInfo);
1667 return;
1668 }
1669
EmitHeapConst(const Insn & insn,const std::vector<uint32> & label2Offset,ObjFuncEmitInfo & objFuncEmitInfo)1670 void AArch64ObjEmitter::EmitHeapConst(const Insn &insn, const std::vector<uint32> &label2Offset,
1671 ObjFuncEmitInfo &objFuncEmitInfo)
1672 {
1673 AArch64CGFunc &cgFunc = static_cast<AArch64CGFunc&>(objFuncEmitInfo.GetCGFunc());
1674 RegOperand &destReg = static_cast<RegOperand&>(insn.GetOperand(kInsnFirstOpnd));
1675 RegOperand &constTableStart = static_cast<RegOperand&>(insn.GetOperand(kInsnSecondOpnd));
1676 ImmOperand &constSlotIndex = static_cast<ImmOperand&>(insn.GetOperand(kInsnThirdOpnd));
1677 Operand &constSlotMem = cgFunc.CreateMemOpnd(
1678 constTableStart, constSlotIndex.GetValue() * k8ByteSize, k64BitSize);
1679 Insn &ldrConstantInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xldr, destReg, constSlotMem);
1680 EncodeInstruction(ldrConstantInsn, label2Offset, objFuncEmitInfo);
1681 return;
1682 }
1683
EmitTaggedIsHeapObject(const Insn & insn,const std::vector<uint32> & label2Offset,ObjFuncEmitInfo & objFuncEmitInfo)1684 void AArch64ObjEmitter::EmitTaggedIsHeapObject(const Insn &insn, const std::vector<uint32> &label2Offset,
1685 ObjFuncEmitInfo &objFuncEmitInfo)
1686 {
1687 AArch64CGFunc &cgFunc = static_cast<AArch64CGFunc&>(objFuncEmitInfo.GetCGFunc());
1688 RegOperand &destReg = static_cast<RegOperand&>(insn.GetOperand(kInsnFirstOpnd));
1689 RegOperand &srcReg = static_cast<RegOperand&>(insn.GetOperand(kInsnSecondOpnd));
1690 RegOperand &heapObjectTagMask = static_cast<RegOperand&>(insn.GetOperand(kInsnThirdOpnd));
1691 Insn &insn3 = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xandrrr, destReg, srcReg, heapObjectTagMask);
1692 EncodeInstruction(insn3, label2Offset, objFuncEmitInfo);
1693 Operand &rflag = cgFunc.GetOrCreateRflag();
1694 ImmOperand &immValueZero = cgFunc.CreateImmOperand(0, k16BitSize, false);
1695 Insn &insn4 = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xcmpri, rflag, destReg, immValueZero);
1696 EncodeInstruction(insn4, label2Offset, objFuncEmitInfo);
1697
1698 CondOperand &condOpnd = cgFunc.GetCondOperand(CC_EQ);
1699 Insn &insn5 = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xcsetrc, destReg, condOpnd, rflag);
1700 EncodeInstruction(insn5, label2Offset, objFuncEmitInfo);
1701 return;
1702 }
1703
EmitIsStableElements(const Insn & insn,const std::vector<uint32> & label2Offset,ObjFuncEmitInfo & objFuncEmitInfo)1704 void AArch64ObjEmitter::EmitIsStableElements(const Insn &insn, const std::vector<uint32> &label2Offset,
1705 ObjFuncEmitInfo &objFuncEmitInfo)
1706 {
1707 AArch64CGFunc &cgFunc = static_cast<AArch64CGFunc&>(objFuncEmitInfo.GetCGFunc());
1708 RegOperand &destReg = static_cast<RegOperand&>(insn.GetOperand(kInsnFirstOpnd));
1709 RegOperand &srcReg = static_cast<RegOperand&>(insn.GetOperand(kInsnSecondOpnd));
1710 int64 bitFieldOffset = static_cast<ImmOperand&>(insn.GetOperand(kInsnThirdOpnd)).GetValue();
1711 CHECK_FATAL(bitFieldOffset % k8ByteSize == 0, "unsupported encoding offset");
1712 // ldr dest, [src, #8], get JSHClass BitField
1713 Operand &bitFieldMem = cgFunc.CreateMemOpnd(srcReg, bitFieldOffset, k32BitSize);
1714 Insn &ldrBitField = cgFunc.GetInsnBuilder()->BuildInsn(MOP_wldr, destReg, bitFieldMem);
1715 EncodeInstruction(ldrBitField, label2Offset, objFuncEmitInfo);
1716
1717 // ubfx dest, dest, 19, 1, get bit JSHClass::IsStableElementsBit::START_BIT
1718 ImmOperand &stableElementsBit = static_cast<ImmOperand&>(insn.GetOperand(kInsnFourthOpnd));
1719 ImmOperand &bitsLength = cgFunc.CreateImmOperand(1, k64BitSize, false);
1720 Insn &getBitInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_wubfxrri5i5, destReg, destReg,
1721 stableElementsBit, bitsLength);
1722 EncodeInstruction(getBitInsn, label2Offset, objFuncEmitInfo);
1723 return;
1724 }
1725
EmitHasPendingException(const Insn & insn,const std::vector<uint32> & label2Offset,ObjFuncEmitInfo & objFuncEmitInfo)1726 void AArch64ObjEmitter::EmitHasPendingException(const Insn &insn, const std::vector<uint32> &label2Offset,
1727 ObjFuncEmitInfo &objFuncEmitInfo)
1728 {
1729 AArch64CGFunc &cgFunc = static_cast<AArch64CGFunc&>(objFuncEmitInfo.GetCGFunc());
1730 RegOperand &destReg = static_cast<RegOperand&>(insn.GetOperand(kInsnFirstOpnd));
1731 RegOperand &srcReg = static_cast<RegOperand&>(insn.GetOperand(kInsnSecondOpnd));
1732 DEBUG_ASSERT(insn.GetOperand(kInsnThirdOpnd).IsImmediate(), "wrong operand type");
1733 ImmOperand &offsetOpnd = static_cast<ImmOperand&>(insn.GetOperand(kInsnThirdOpnd));
1734 ImmOperand &holeValue = static_cast<ImmOperand&>(insn.GetOperand(kInsnFourthOpnd));
1735 CHECK_FATAL(holeValue.GetValue() == 5, "unexpected tagged hole value"); // 5: VALUE_HOLE
1736 // ExceptionOffset: 0xf70(3952)
1737 int64 offset = offsetOpnd.GetValue();
1738 Operand &exceptionMem = cgFunc.CreateMemOpnd(srcReg, offset, k64BitSize);
1739 Insn &ldrExceptionInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xldr, destReg, exceptionMem);
1740 EncodeInstruction(ldrExceptionInsn, label2Offset, objFuncEmitInfo);
1741
1742 // HOLE : 5
1743 ImmOperand &taggedHole = cgFunc.CreateImmOperand(holeValue.GetValue(), k16BitSize, false);
1744 Operand &rflag = cgFunc.GetOrCreateRflag();
1745 Insn &cmpInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xcmpri, rflag, destReg, taggedHole);
1746 EncodeInstruction(cmpInsn, label2Offset, objFuncEmitInfo);
1747
1748 CondOperand &condOpnd = cgFunc.GetCondOperand(CC_NE);
1749 Insn &csetInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xcsetrc, destReg, condOpnd, rflag);
1750 EncodeInstruction(csetInsn, label2Offset, objFuncEmitInfo);
1751 return;
1752 }
1753
EmitTaggedObjectIsString(const Insn & insn,const std::vector<uint32> & label2Offset,ObjFuncEmitInfo & objFuncEmitInfo)1754 void AArch64ObjEmitter::EmitTaggedObjectIsString(const Insn &insn, const std::vector<uint32> &label2Offset,
1755 ObjFuncEmitInfo &objFuncEmitInfo)
1756 {
1757 AArch64CGFunc &cgFunc = static_cast<AArch64CGFunc&>(objFuncEmitInfo.GetCGFunc());
1758 RegOperand &destReg = static_cast<RegOperand&>(insn.GetOperand(kInsnFirstOpnd));
1759 // glue is not used.
1760 RegOperand &srcReg = static_cast<RegOperand&>(insn.GetOperand(kInsnThirdOpnd));
1761 Operand &hclass = cgFunc.CreateMemOpnd(srcReg, 0, k64BitSize);
1762 Insn &ldrObj = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xldr, destReg, hclass);
1763 EncodeInstruction(ldrObj, label2Offset, objFuncEmitInfo);
1764 int64 bitFieldOffset = static_cast<ImmOperand&>(insn.GetOperand(kInsnFourthOpnd)).GetValue();
1765 CHECK_FATAL(bitFieldOffset % k8ByteSize == 0, "unsupported encoding offset");
1766 Operand &bitField = cgFunc.CreateMemOpnd(destReg, bitFieldOffset, k8BitSize);
1767 Insn &ldrBitField = cgFunc.GetInsnBuilder()->BuildInsn(MOP_wldrb, destReg, bitField);
1768 EncodeInstruction(ldrBitField, label2Offset, objFuncEmitInfo);
1769 int64 stringFirstValue = static_cast<ImmOperand&>(insn.GetOperand(kInsnFifthOpnd)).GetValue();
1770 ImmOperand &stringFirst = cgFunc.CreateImmOperand(stringFirstValue, k16BitSize, false);
1771 Insn &subInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xsubrri12, destReg, destReg, stringFirst);
1772 EncodeInstruction(subInsn, label2Offset, objFuncEmitInfo);
1773
1774 Operand &rflag = cgFunc.GetOrCreateRflag();
1775 int64 stringLastValue = static_cast<ImmOperand&>(insn.GetOperand(kInsnSixthOpnd)).GetValue();
1776 ImmOperand &toStringLastDis = cgFunc.CreateImmOperand(stringLastValue - stringFirstValue,
1777 k16BitSize, false);
1778 Insn &cmpInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xcmpri, rflag, destReg, toStringLastDis);
1779 EncodeInstruction(cmpInsn, label2Offset, objFuncEmitInfo);
1780
1781 CondOperand &condOpnd = cgFunc.GetCondOperand(CC_LS);
1782 Insn &csetInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xcsetrc, destReg, condOpnd, rflag);
1783 EncodeInstruction(csetInsn, label2Offset, objFuncEmitInfo);
1784 return;
1785 }
1786
EmitIsCowArray(const Insn & insn,const std::vector<uint32> & label2Offset,ObjFuncEmitInfo & objFuncEmitInfo)1787 void AArch64ObjEmitter::EmitIsCowArray(const Insn &insn, const std::vector<uint32> &label2Offset,
1788 ObjFuncEmitInfo &objFuncEmitInfo)
1789 {
1790 AArch64CGFunc &cgFunc = static_cast<AArch64CGFunc&>(objFuncEmitInfo.GetCGFunc());
1791 RegOperand &destReg = static_cast<RegOperand&>(insn.GetOperand(kInsnFirstOpnd));
1792 // glue is not used.
1793 RegOperand &srcReg = static_cast<RegOperand&>(insn.GetOperand(kInsnThirdOpnd));
1794 int64 elementsOffset = static_cast<ImmOperand&>(insn.GetOperand(kInsnFourthOpnd)).GetValue();
1795 Operand &elements = cgFunc.CreateMemOpnd(srcReg, elementsOffset, k64BitSize);
1796 Insn &ldrElements = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xldr, destReg, elements);
1797 EncodeInstruction(ldrElements, label2Offset, objFuncEmitInfo);
1798
1799 Operand &hclass = cgFunc.CreateMemOpnd(destReg, 0, k64BitSize);
1800 Insn &ldrObj = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xldr, destReg, hclass);
1801 EncodeInstruction(ldrObj, label2Offset, objFuncEmitInfo);
1802
1803 int64 bitFieldOffset = static_cast<ImmOperand&>(insn.GetOperand(kInsnFifthOpnd)).GetValue();
1804 CHECK_FATAL(bitFieldOffset % k8ByteSize == 0, "unsupported encoding offset");
1805 Operand &bitField = cgFunc.CreateMemOpnd(destReg, bitFieldOffset, k8BitSize);
1806 Insn &ldrBitField = cgFunc.GetInsnBuilder()->BuildInsn(MOP_wldrb, destReg, bitField);
1807 EncodeInstruction(ldrBitField, label2Offset, objFuncEmitInfo);
1808
1809 int64 cowFirstValue = static_cast<ImmOperand&>(insn.GetOperand(kInsnSixthOpnd)).GetValue();
1810 ImmOperand &cowFirst = cgFunc.CreateImmOperand(cowFirstValue, k16BitSize, false);
1811 Insn &subInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xsubrri12, destReg, destReg, cowFirst);
1812 EncodeInstruction(subInsn, label2Offset, objFuncEmitInfo);
1813
1814 Operand &rflag = cgFunc.GetOrCreateRflag();
1815 int64 cowLastValue = static_cast<ImmOperand&>(insn.GetOperand(kInsnSeventhOpnd)).GetValue();
1816 ImmOperand &toCowLastDis = cgFunc.CreateImmOperand(cowLastValue - cowFirstValue, k16BitSize, false);
1817 Insn &cmpInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xcmpri, rflag, destReg, toCowLastDis);
1818 EncodeInstruction(cmpInsn, label2Offset, objFuncEmitInfo);
1819
1820 CondOperand &condOpnd = cgFunc.GetCondOperand(CC_LS);
1821 Insn &csetInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_xcsetrc, destReg, condOpnd, rflag);
1822 EncodeInstruction(csetInsn, label2Offset, objFuncEmitInfo);
1823 return;
1824 }
1825
EmitIntrinsicInsn(const Insn & insn,const std::vector<uint32> & label2Offset,ObjFuncEmitInfo & objFuncEmitInfo)1826 void AArch64ObjEmitter::EmitIntrinsicInsn(const Insn &insn, const std::vector<uint32> &label2Offset,
1827 ObjFuncEmitInfo &objFuncEmitInfo)
1828 {
1829 switch (insn.GetMachineOpcode()) {
1830 // adrp xd, label
1831 // add xd, xd, #:lo12:label
1832 case MOP_adrp_label:
1833 EmitAdrpLabel(insn, label2Offset, objFuncEmitInfo);
1834 break;
1835 case MOP_get_heap_const_table:
1836 EmitGetHeapConstTable(insn, label2Offset, objFuncEmitInfo);
1837 break;
1838 case MOP_heap_const:
1839 EmitHeapConst(insn, label2Offset, objFuncEmitInfo);
1840 break;
1841 case MOP_tagged_is_heapobject:
1842 EmitTaggedIsHeapObject(insn, label2Offset, objFuncEmitInfo);
1843 break;
1844 case MOP_is_stable_elements:
1845 EmitIsStableElements(insn, label2Offset, objFuncEmitInfo);
1846 break;
1847 case MOP_has_pending_exception:
1848 EmitHasPendingException(insn, label2Offset, objFuncEmitInfo);
1849 break;
1850 case MOP_tagged_object_is_string:
1851 EmitTaggedObjectIsString(insn, label2Offset, objFuncEmitInfo);
1852 break;
1853 case MOP_is_cow_array:
1854 EmitIsCowArray(insn, label2Offset, objFuncEmitInfo);
1855 break;
1856 default:
1857 CHECK_FATAL(false, "unsupport mop in EmitIntrinsicInsn!\n");
1858 }
1859 }
1860
EmitSpinIntrinsicInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo)1861 void AArch64ObjEmitter::EmitSpinIntrinsicInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo)
1862 {
1863 switch (insn.GetMachineOpcode()) {
1864 case MOP_tls_desc_rel: {
1865 objFuncEmitInfo.AppendTextData(0x91400000, k4ByteSize);
1866 objFuncEmitInfo.AppendTextData(0x91000000, k4ByteSize);
1867 break;
1868 }
1869 default:
1870 CHECK_FATAL(false, "unsupport mop in EmitSpinIntrinsicInsn!\n");
1871 }
1872 }
1873 } /* namespace maplebe */
1874