1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "aarch64_obj_emitter.h"
17 #include "aarch64_isa.h"
18
19 namespace {
20 enum ShiftNumber : maple::uint8 {
21 kShiftFour = 4,
22 kShiftFive = 5,
23 kShiftSix = 6,
24 kShiftEight = 8,
25 kShiftTen = 10,
26 kShiftTwelve = 12,
27 kShiftThirteen = 13,
28 kShiftFifteen = 15,
29 kShiftSixteen = 16,
30 kShiftNineteen = 19,
31 kShiftTwenty = 20,
32 kShiftTwentyOne = 21,
33 kShiftTwentyTwo = 22,
34 kShiftTwentyFour = 24,
35 kShiftTwentyNine = 29,
36 };
37
38 enum ShiftTypeValue : maple::uint32 {
39 kShiftLSL = 0,
40 kShiftLSR = 1,
41 kShiftASR = 2,
42 };
43
44 /* from armv8 manual C1.2.3 */
45 maple::uint8 ccEncode[maplebe::kCcLast] = {
46 #define CONDCODE(a, encode) (encode),
47 #include "aarch64_cc.def"
48 #undef CONDCODE
49 };
50 }; // namespace
51
52 namespace maplebe {
53 /* fixup b .label, b(cond) .label, ldr label insn */
HandleLocalBranchFixup(const std::vector<uint32> & label2Offset,const std::vector<uint32> & symbol2Offset)54 void AArch64ObjFuncEmitInfo::HandleLocalBranchFixup(const std::vector<uint32> &label2Offset,
55 const std::vector<uint32> &symbol2Offset)
56 {
57 for (auto *fixup : localFixups) {
58 uint32 useOffset = fixup->GetOffset();
59 uint32 useLabelIndex = fixup->GetLabelIndex();
60 uint32 defOffset = label2Offset[useLabelIndex];
61
62 FixupKind fixupKind = fixup->GetFixupKind();
63 if (defOffset == 0xFFFFFFFFULL) {
64 CHECK_FATAL(static_cast<AArch64FixupKind>(fixupKind) == kAArch64LoadPCRelImm19, "fixup is not local");
65 }
66 if (static_cast<AArch64FixupKind>(fixupKind) == kAArch64CondBranchPCRelImm19 ||
67 static_cast<AArch64FixupKind>(fixupKind) == kAArch64CompareBranchPCRelImm19) {
68 uint32 pcRelImm = (defOffset - useOffset) >> k2BitSize;
69 uint32 mask = 0x7FFFF;
70 #ifdef EMIT_DEBUG
71 LogInfo::MapleLogger() << "contents: " << std::hex << GetTextDataElem32(useOffset) << "\n";
72 #endif
73 CHECK_FATAL(useOffset < textData.size(), "out of range");
74 uint32 newValue = GetTextDataElem32(useOffset) | ((pcRelImm & mask) << kShiftFive);
75 SwapTextData(&newValue, useOffset, sizeof(uint32));
76 #ifdef EMIT_DEBUG
77 LogInfo::MapleLogger() << "after contents: " << std::hex << GetTextDataElem32(useOffset) << "\n";
78 #endif
79 } else if (static_cast<AArch64FixupKind>(fixupKind) == kAArch64UnCondBranchPCRelImm26) {
80 uint32 pcRelImm = (defOffset - useOffset) >> k2BitSize;
81 uint32 mask = 0x3FFFFFF;
82 #ifdef EMIT_DEBUG
83 LogInfo::MapleLogger() << "contents: " << std::hex << GetTextDataElem32(useOffset) << "\n";
84 #endif
85 CHECK_FATAL(useOffset < textData.size(), "out of vector size!");
86 uint32 newValue = GetTextDataElem32(useOffset) | (pcRelImm & mask);
87 SwapTextData(&newValue, useOffset, sizeof(uint32));
88 #ifdef EMIT_DEBUG
89 LogInfo::MapleLogger() << "after contents: " << std::hex << GetTextDataElem32(useOffset) << "\n";
90 #endif
91 } else if (static_cast<AArch64FixupKind>(fixupKind) == kAArch64TestBranchPCRelImm14) {
92 uint32 pcRelImm = (defOffset - useOffset) >> k2BitSize;
93 uint32 mask = 0x3FFF;
94 CHECK_FATAL(useOffset < textData.size(), "out of vector size");
95 uint32 newValue = GetTextDataElem32(useOffset) | ((pcRelImm & mask) << kShiftFive);
96 SwapTextData(&newValue, useOffset, sizeof(uint32));
97 } else if (static_cast<AArch64FixupKind>(fixupKind) == kAArch64LoadPCRelImm19) {
98 defOffset = symbol2Offset[useLabelIndex];
99 uint32 pcRelImm = (defOffset - useOffset) >> k2BitSize;
100 uint32 mask = 0x7FFFF;
101 uint32 newValue = GetTextDataElem32(useOffset) | ((pcRelImm & mask) << kShiftFive);
102 SwapTextData(&newValue, useOffset, sizeof(uint32));
103 }
104 }
105 localFixups.clear();
106 }
107
HandleTextSectionGlobalFixup()108 void AArch64ObjEmitter::HandleTextSectionGlobalFixup()
109 {
110 for (auto *content : contents) {
111 if (content == nullptr) {
112 continue;
113 }
114 for (auto *fixup : content->GetGlobalFixups()) {
115 if (fixup->GetFixupKind() == kLSDAFixup) {
116 HandleLSDAFixup(*content, *fixup);
117 continue;
118 }
119 switch (static_cast<AArch64FixupKind>(fixup->GetFixupKind())) {
120 case kAArch64CallPCRelImm26: {
121 HandleCallFixup(*content, *fixup);
122 break;
123 }
124 case kAArch64PCRelAdrImm21: {
125 HandleAdrFixup(*content, *fixup);
126 break;
127 }
128 default:
129 DEBUG_ASSERT(false, "unsupported FixupKind");
130 break;
131 }
132 }
133 }
134 }
135
HandleTextSectionFixup()136 void AArch64ObjEmitter::HandleTextSectionFixup()
137 {
138 relaSection = memPool->New<RelaSection>(".rela.text", SHT_RELA, SHF_INFO_LINK, textSection->GetIndex(), k8ByteSize,
139 *symbolTabSection, *this, *memPool);
140 for (auto *content : contents) {
141 if (content == nullptr) {
142 continue;
143 }
144 for (auto *fixup : content->GetGlobalFixups()) {
145 switch (static_cast<AArch64FixupKind>(fixup->GetFixupKind())) {
146 case kAArch64CallPCRelImm26: {
147 auto nameIndex = strTabSection->AddString(fixup->GetLabel());
148 symbolTabSection->AppendSymbol({static_cast<Word>(nameIndex),
149 static_cast<uint8>((STB_GLOBAL << kShiftFour) + (STT_NOTYPE & 0xf)),
150 0, 0, 0, 0});
151 symbolTabSection->AppendIdxInSymbols(0); // 0: temporarily
152 uint32 relOffset = fixup->GetRelOffset();
153 uint32 offset = fixup->GetOffset();
154 uint64 type = R_AARCH64_CALL26;
155 relaSection->AppendRela(
156 {offset, static_cast<Xword>((symbolTabSection->GetIdxInSymbols(0) << 32) + (type & 0xffffffff)),
157 relOffset});
158 break;
159 }
160 case kAArch64PCRelAdrpImm21: {
161 uint32 relOffset = fixup->GetRelOffset();
162 uint32 offset = fixup->GetOffset();
163 uint64 type = R_AARCH64_ADR_PREL_PG_HI21;
164 int64 rodataSecSymIdx = ~rodataSection->GetIndex() + 1;
165 relaSection->AppendRela(
166 {offset,
167 static_cast<Xword>((symbolTabSection->GetIdxInSymbols(rodataSecSymIdx) << 32) +
168 (type & 0xffffffff)),
169 relOffset});
170 break;
171 }
172 case kAArch64PCRelAdrImm21: {
173 break;
174 }
175 case kAArch64LdrPCRelLo12:
176 case kAArch64AddPCRelLo12: {
177 int32 relOffset = static_cast<int32>(fixup->GetRelOffset());
178 uint32 offset = fixup->GetOffset();
179 uint64 type = R_AARCH64_ADD_ABS_LO12_NC;
180 int64 rodataSecSymIdx = ~rodataSection->GetIndex() + 1;
181 relaSection->AppendRela(
182 {offset,
183 static_cast<Xword>((symbolTabSection->GetIdxInSymbols(rodataSecSymIdx) << 32) +
184 (type & 0xffffffff)),
185 relOffset});
186 break;
187 }
188 default:
189 DEBUG_ASSERT(false, "unsupported FixupKind");
190 break;
191 }
192 }
193 }
194 }
195
HandleCallFixup(ObjFuncEmitInfo & funcEmitInfo,const Fixup & fixup)196 void AArch64ObjEmitter::HandleCallFixup(ObjFuncEmitInfo &funcEmitInfo, const Fixup &fixup)
197 {
198 AArch64ObjFuncEmitInfo &objFuncEmitInfo = static_cast<AArch64ObjFuncEmitInfo &>(funcEmitInfo);
199 uint32 useOffset = objFuncEmitInfo.GetStartOffset() + fixup.GetOffset();
200 const std::string &funcName = fixup.GetLabel();
201 auto str2objSymbolItr = globalLabel2Offset.find(funcName);
202 if (str2objSymbolItr != globalLabel2Offset.end()) {
203 uint32 defOffset = str2objSymbolItr->second.offset;
204 uint32 pcRelImm = (defOffset - useOffset) >> k2BitSize;
205 uint32 newValue = objFuncEmitInfo.GetTextDataElem32(fixup.GetOffset()) | (pcRelImm & 0x3FFFFFF);
206 objFuncEmitInfo.SwapTextData(&newValue, fixup.GetOffset(), sizeof(uint32));
207 }
208 }
209
HandleAdrFixup(ObjFuncEmitInfo & funcEmitInfo,const Fixup & fixup)210 void AArch64ObjEmitter::HandleAdrFixup(ObjFuncEmitInfo &funcEmitInfo, const Fixup &fixup)
211 {
212 AArch64ObjFuncEmitInfo &objFuncEmitInfo = static_cast<AArch64ObjFuncEmitInfo &>(funcEmitInfo);
213 uint32 useOffset = objFuncEmitInfo.GetStartOffset() + fixup.GetOffset();
214 const std::string &label = fixup.GetLabel();
215 auto str2objSymbolItr = globalLabel2Offset.find(label);
216 if (str2objSymbolItr != globalLabel2Offset.end()) {
217 uint32 defOffset = str2objSymbolItr->second.offset + fixup.GetRelOffset();
218 uint32 pcRelImm = defOffset - useOffset;
219 uint32 immLow = (pcRelImm & 0x3) << kShiftTwentyNine;
220 uint32 immHigh = ((pcRelImm >> k2BitSize) & 0x7FFFF) << kShiftFive;
221 uint32 newValue = objFuncEmitInfo.GetTextDataElem32(fixup.GetOffset()) | immLow | immHigh;
222 objFuncEmitInfo.SwapTextData(&newValue, fixup.GetOffset(), sizeof(uint32));
223 }
224 }
225
HandleLSDAFixup(ObjFuncEmitInfo & funcEmitInfo,const Fixup & fixup)226 void AArch64ObjEmitter::HandleLSDAFixup(ObjFuncEmitInfo &funcEmitInfo, const Fixup &fixup)
227 {
228 AArch64ObjFuncEmitInfo &objFuncEmitInfo = static_cast<AArch64ObjFuncEmitInfo &>(funcEmitInfo);
229 uint32 value = objFuncEmitInfo.GetExceptStartOffset() - objFuncEmitInfo.GetStartOffset();
230 objFuncEmitInfo.SwapTextData(&value, fixup.GetOffset(), sizeof(uint32));
231 }
232
AppendTextSectionData()233 void AArch64ObjEmitter::AppendTextSectionData()
234 {
235 auto &contents = GetContents();
236 for (auto *content : contents) {
237 if (content == nullptr) {
238 continue;
239 }
240 MapleVector<uint8> funcTextData = content->GetTextData();
241 textSection->AppendData(funcTextData);
242 }
243 }
244
AppendGlobalLabel()245 void AArch64ObjEmitter::AppendGlobalLabel()
246 {
247 uint32 lastModulePc = cg->GetMIRModule()->GetLastModulePC();
248 auto &contents = GetContents();
249 uint32 offset = lastModulePc;
250 for (size_t i = 0; i < contents.size(); i++) {
251 auto *content = contents[i];
252 if (content == nullptr) {
253 continue;
254 }
255 content->SetStartOffset(offset);
256 ObjLabel objLabel = {offset, content->GetTextDataSize()};
257 std::string funcName(content->GetFuncName().c_str());
258 const auto &emitMemoryManager = CGOptions::GetInstance().GetEmitMemoryManager();
259 if (emitMemoryManager.funcAddressSaver != nullptr) {
260 emitMemoryManager.funcAddressSaver(emitMemoryManager.codeSpace, funcName, offset);
261 }
262 if (emitMemoryManager.codeSpace != nullptr) {
263 auto &offset2StackMapInfo = content->GetOffset2StackMapInfo();
264 for (const auto &elem : offset2StackMapInfo) {
265 const auto &stackMapInfo = elem.second;
266 emitMemoryManager.pc2CallSiteInfoSaver(
267 emitMemoryManager.codeSpace, content->GetStartOffset() + elem.first, stackMapInfo.referenceMap);
268 emitMemoryManager.pc2DeoptInfoSaver(emitMemoryManager.codeSpace, content->GetStartOffset() + elem.first,
269 stackMapInfo.deoptInfo);
270 }
271 offset2StackMapInfo.clear();
272 }
273
274 offset += content->GetTextDataSize();
275 cg->GetMIRModule()->SetCurModulePC(offset);
276 RegisterGlobalLabel(funcName, objLabel);
277 /* register all the start of switch table */
278 const MapleMap<MapleString, uint32> &switchTableOffset = content->GetSwitchTableOffset();
279 for (auto &elem : switchTableOffset) {
280 ObjLabel switchTableLabel = {elem.second + content->GetStartOffset(), 0};
281 RegisterGlobalLabel(elem.first.c_str(), switchTableLabel);
282 }
283 }
284 }
285
AppendSymsToSymTabSec()286 void AArch64ObjEmitter::AppendSymsToSymTabSec()
287 {
288 Address offset = 0;
289 auto &contents = GetContents();
290 for (auto *content : contents) {
291 if (content == nullptr) {
292 continue;
293 }
294 // func symbol
295 AddFuncSymbol(content->GetFuncName(), content->GetTextData().size(), offset);
296 offset += content->GetTextData().size();
297 }
298 }
299
InitSections()300 void AArch64ObjEmitter::InitSections()
301 {
302 (void)memPool->New<DataSection>(" ", SHT_NULL, 0, 0, *this, *memPool);
303 textSection =
304 memPool->New<DataSection>(".text", SHT_PROGBITS, SHF_ALLOC | SHF_EXECINSTR, k4ByteSize, *this, *memPool);
305 dataSection = memPool->New<DataSection>(".data", SHT_PROGBITS, SHF_WRITE | SHF_ALLOC, k8ByteSize, *this, *memPool);
306 strTabSection = memPool->New<StringSection>(".strtab", SHT_STRTAB, 0, 1, *this, *memPool);
307 symbolTabSection =
308 memPool->New<SymbolSection>(".symtab", SHT_SYMTAB, 0, sizeof(Symbol), *this, *memPool, *strTabSection);
309 shStrSection = memPool->New<StringSection>(".shstrtab", SHT_STRTAB, 0, 1, *this, *memPool);
310 }
311
LayoutSections()312 void AArch64ObjEmitter::LayoutSections()
313 {
314 /* Init elf file header */
315 InitELFHeader();
316 globalOffset = sizeof(FileHeader);
317 globalOffset = Alignment::Align<Offset>(globalOffset, k8ByteSize);
318
319 globalAddr = globalOffset;
320
321 for (auto *section : sections) {
322 section->SetSectionHeaderNameIndex(static_cast<Word>(shStrSection->AddString(section->GetName())));
323 }
324
325 for (auto *section : sections) {
326 globalOffset = Alignment::Align<Offset>(globalOffset, section->GetAlign());
327 globalAddr = Alignment::Align<Address>(globalAddr, section->GetAlign());
328 section->Layout();
329 }
330
331 globalOffset = Alignment::Align<Offset>(globalOffset, k8ByteSize);
332 header.e_shoff = globalOffset;
333 header.e_phnum = 0;
334 header.e_shnum = sections.size();
335 }
336
UpdateMachineAndFlags(FileHeader & header)337 void AArch64ObjEmitter::UpdateMachineAndFlags(FileHeader &header)
338 {
339 header.e_machine = EM_AARCH64;
340 header.e_flags = 0;
341 }
342
343 /* input insn, ang get the binary code of insn */
GetBinaryCodeForInsn(const Insn & insn,const std::vector<uint32> & label2Offset,ObjFuncEmitInfo & objFuncEmitInfo) const344 uint32 AArch64ObjEmitter::GetBinaryCodeForInsn(const Insn &insn, const std::vector<uint32> &label2Offset,
345 ObjFuncEmitInfo &objFuncEmitInfo) const
346 {
347 const InsnDesc &md = AArch64CG::kMd[insn.GetMachineOpcode()];
348 uint32 binInsn = md.GetMopEncode();
349 switch (md.GetEncodeType()) {
350 case kMovReg:
351 return GenMovReg(insn);
352
353 case kMovImm:
354 return GenMovImm(insn);
355
356 case kAddSubExtendReg:
357 return binInsn | GenAddSubExtendRegInsn(insn);
358
359 case kAddSubImm:
360 return binInsn | GenAddSubImmInsn(insn);
361
362 case kAddSubShiftImm:
363 return binInsn | GenAddSubShiftImmInsn(insn);
364
365 case kAddSubReg:
366 return binInsn | GenAddSubRegInsn(insn);
367
368 case kAddSubShiftReg:
369 return binInsn | GenAddSubShiftRegInsn(insn);
370
371 case kBitfield: {
372 if (insn.GetMachineOpcode() == MOP_xuxtw64) {
373 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
374 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftSixteen;
375 opnd |= (0b11111 << kShiftFive) | AArch64CG::kMd[MOP_wiorrrr].GetMopEncode();
376 return opnd;
377 }
378 return binInsn | GenBitfieldInsn(insn);
379 }
380
381 case kExtract:
382 return binInsn | GenExtractInsn(insn);
383
384 case kBranchImm:
385 return binInsn | GenBranchImmInsn(insn, label2Offset, objFuncEmitInfo);
386
387 case kBranchReg:
388 return binInsn | GenBranchRegInsn(insn);
389
390 case kCompareBranch:
391 return binInsn | GenCompareBranchInsn(insn, objFuncEmitInfo);
392
393 case kCondCompareImm:
394 return binInsn | GenCondCompareImmInsn(insn);
395
396 case kCondCompareReg:
397 return binInsn | GenCondCompareRegInsn(insn);
398
399 case kConditionalSelect:
400 return binInsn | GenConditionalSelectInsn(insn);
401
402 case kDataProcess1Src:
403 return binInsn | GenDataProcess1SrcInsn(insn);
404
405 case kDataProcess2Src:
406 return binInsn | GenDataProcess2SrcInsn(insn);
407
408 case kDataProcess3Src:
409 return binInsn | GenDataProcess3SrcInsn(insn);
410
411 case kFloatIntConversions:
412 return binInsn | GenFloatIntConversionsInsn(insn);
413
414 case kFloatCompare:
415 return binInsn | GenFloatCompareInsn(insn);
416
417 case kFloatDataProcessing1:
418 return binInsn | GenFloatDataProcessing1Insn(insn);
419
420 case kFloatDataProcessing2:
421 return binInsn | GenFloatDataProcessing2Insn(insn);
422
423 case kFloatImm:
424 return binInsn | GenFloatImmInsn(insn);
425
426 case kFloatCondSelect:
427 return binInsn | GenFloatCondSelectInsn(insn);
428
429 case kLoadStoreReg:
430 return GenLoadStoreRegInsn(insn, objFuncEmitInfo);
431
432 case kLoadStoreAR:
433 return binInsn | GenLoadStoreARInsn(insn);
434
435 case kLoadExclusive:
436 return binInsn | GenLoadExclusiveInsn(insn);
437
438 case kLoadExclusivePair:
439 return binInsn | GenLoadExclusivePairInsn(insn);
440
441 case kStoreExclusive:
442 return binInsn | GenStoreExclusiveInsn(insn);
443
444 case kStoreExclusivePair:
445 return binInsn | GenStoreExclusivePairInsn(insn);
446
447 case kLoadPair:
448 return binInsn | GenLoadPairInsn(insn);
449
450 case kStorePair:
451 return binInsn | GenStorePairInsn(insn);
452
453 case kLoadStoreFloat:
454 return GenLoadStoreFloatInsn(insn, objFuncEmitInfo);
455
456 case kLoadPairFloat:
457 return binInsn | GenLoadPairFloatInsn(insn);
458
459 case kStorePairFloat:
460 return binInsn | GenStorePairFloatInsn(insn);
461
462 case kLoadLiteralReg:
463 return binInsn | GenLoadLiteralRegInsn(insn, objFuncEmitInfo);
464
465 case kLogicalReg:
466 return binInsn | GenLogicalRegInsn(insn);
467
468 case kLogicalImm:
469 return binInsn | GenLogicalImmInsn(insn);
470
471 case kMoveWide:
472 return binInsn | GenMoveWideInsn(insn);
473
474 case kPCRelAddr:
475 return binInsn | GenPCRelAddrInsn(insn, objFuncEmitInfo);
476
477 case kAddPCRelAddr:
478 return binInsn | GenAddPCRelAddrInsn(insn, objFuncEmitInfo);
479
480 case kSystemInsn:
481 return binInsn | GenSystemInsn(insn);
482
483 case kTestBranch:
484 return binInsn | GenTestBranchInsn(insn, objFuncEmitInfo);
485
486 case kCondBranch:
487 return binInsn | GenCondBranchInsn(insn, objFuncEmitInfo);
488
489 case kUnknownEncodeType:
490 break;
491 case kBrkInsn:
492 return binInsn | ((GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd)) & 0xFFFF) << kShiftFive);
493 default:
494 break;
495 }
496 return binInsn;
497 }
498
499 /* get binary code of operand */
GetOpndMachineValue(const Operand & opnd) const500 uint32 AArch64ObjEmitter::GetOpndMachineValue(const Operand &opnd) const
501 {
502 if (opnd.IsRegister()) {
503 const RegOperand ®Opnd = static_cast<const RegOperand &>(opnd);
504 uint32 regNO = regOpnd.GetRegisterNumber();
505 if (regNO == kRFLAG) {
506 return 0;
507 }
508 if (regOpnd.IsOfIntClass()) {
509 if (regOpnd.GetRegisterNumber() == RZR) {
510 return regNO - R0 - kRegNum2;
511 }
512 if (regOpnd.GetRegisterNumber() == RSP) {
513 return regNO - R0 - 1;
514 }
515 DEBUG_ASSERT(regNO >= R0, "value overflow");
516 return regNO - R0;
517 }
518 return regNO - V0;
519 } else if (opnd.IsImmediate()) {
520 return static_cast<uint32>(static_cast<const ImmOperand &>(opnd).GetValue());
521 } else if (opnd.IsConditionCode()) {
522 const CondOperand &condOpnd = static_cast<const CondOperand &>(opnd);
523 return static_cast<uint32>(ccEncode[condOpnd.GetCode()]);
524 } else if (opnd.IsOpdExtend()) {
525 const ExtendShiftOperand &exendOpnd = static_cast<const ExtendShiftOperand &>(opnd);
526 uint32 shift = exendOpnd.GetShiftAmount();
527 DEBUG_ASSERT(exendOpnd.GetExtendOp() == ExtendShiftOperand::kSXTW, "support kSXTW only!");
528 uint32 option = 0x30;
529 return option | shift;
530 } else {
531 CHECK_FATAL(false, "not supported operand type currently");
532 }
533 }
534
GetAdrLabelOpndValue(const Insn & insn,const Operand & opnd,ObjFuncEmitInfo & objFuncEmitInfo) const535 uint32 AArch64ObjEmitter::GetAdrLabelOpndValue(const Insn &insn, const Operand &opnd,
536 ObjFuncEmitInfo &objFuncEmitInfo) const
537 {
538 FixupKind fixupKind =
539 (insn.GetMachineOpcode() == MOP_xadrp) ? FixupKind(kAArch64PCRelAdrpImm21) : FixupKind(kAArch64PCRelAdrImm21);
540 if (opnd.IsMemoryAccessOperand()) {
541 const MemOperand &memOpnd = static_cast<const MemOperand &>(opnd);
542 Fixup *fixup = memPool->New<Fixup>(memOpnd.GetSymbolName(), 0, objFuncEmitInfo.GetTextDataSize(), fixupKind);
543 objFuncEmitInfo.AppendGlobalFixups(*fixup);
544 } else if (opnd.IsStImmediate()) {
545 const StImmOperand &stOpnd = static_cast<const StImmOperand &>(opnd);
546 Fixup *fixup =
547 memPool->New<Fixup>(stOpnd.GetName(), stOpnd.GetOffset(), objFuncEmitInfo.GetTextDataSize(), fixupKind);
548 objFuncEmitInfo.AppendGlobalFixups(*fixup);
549 } else {
550 CHECK_FATAL(opnd.IsImmediate(), "check kind failed");
551 }
552 return 0;
553 }
554
GetLoadLiteralOpndValue(const Operand & opnd,ObjFuncEmitInfo & objFuncEmitInfo) const555 uint32 AArch64ObjEmitter::GetLoadLiteralOpndValue(const Operand &opnd, ObjFuncEmitInfo &objFuncEmitInfo) const
556 {
557 FixupKind fixupKind = FixupKind(kAArch64LoadPCRelImm19);
558 CHECK_FATAL(opnd.IsLabelOpnd(), "check literal kind failed");
559 const LabelOperand &label = static_cast<const LabelOperand &>(opnd);
560 LocalFixup *fixup = memPool->New<LocalFixup>(label.GetLabelIndex(), objFuncEmitInfo.GetTextDataSize(), fixupKind);
561 objFuncEmitInfo.AppendLocalFixups(*fixup);
562 return 0;
563 }
564
GetCondBranchOpndValue(const Operand & opnd,ObjFuncEmitInfo & objFuncEmitInfo) const565 uint32 AArch64ObjEmitter::GetCondBranchOpndValue(const Operand &opnd, ObjFuncEmitInfo &objFuncEmitInfo) const
566 {
567 FixupKind fixupKind = FixupKind(kAArch64CondBranchPCRelImm19);
568 uint32 labelIndex = static_cast<const LabelOperand &>(opnd).GetLabelIndex();
569 LocalFixup *fixup = memPool->New<LocalFixup>(labelIndex, objFuncEmitInfo.GetTextDataSize(), fixupKind);
570 objFuncEmitInfo.AppendLocalFixups(*fixup);
571 return 0;
572 }
573
GetUnCondBranchOpndValue(const Operand & opnd,const std::vector<uint32> & label2Offset,ObjFuncEmitInfo & objFuncEmitInfo) const574 uint32 AArch64ObjEmitter::GetUnCondBranchOpndValue(const Operand &opnd, const std::vector<uint32> &label2Offset,
575 ObjFuncEmitInfo &objFuncEmitInfo) const
576 {
577 auto labelIndex = static_cast<const LabelOperand &>(opnd).GetLabelIndex();
578 CHECK_FATAL(labelIndex < label2Offset.size(), "labelIndex is out of range");
579 uint32 defOffset = label2Offset[labelIndex];
580 if (defOffset != 0xFFFFFFFFULL) {
581 uint32 useOffset = objFuncEmitInfo.GetTextDataSize();
582 uint32 pcRelImm = (defOffset - useOffset) >> k2BitSize;
583 return (pcRelImm & 0x3FFFFFF);
584 }
585
586 FixupKind fixupKind = FixupKind(kAArch64UnCondBranchPCRelImm26);
587 LocalFixup *fixup = memPool->New<LocalFixup>(labelIndex, objFuncEmitInfo.GetTextDataSize(), fixupKind);
588 objFuncEmitInfo.AppendLocalFixups(*fixup);
589 return 0;
590 }
591
GetCallFuncOpndValue(const Operand & opnd,ObjFuncEmitInfo & objFuncEmitInfo) const592 uint32 AArch64ObjEmitter::GetCallFuncOpndValue(const Operand &opnd, ObjFuncEmitInfo &objFuncEmitInfo) const
593 {
594 const FuncNameOperand &funcNameOpnd = static_cast<const FuncNameOperand &>(opnd);
595 const MIRSymbol *funcSymbol = funcNameOpnd.GetFunctionSymbol();
596 FixupKind fixupKind = FixupKind(kAArch64CallPCRelImm26);
597
598 Fixup *fixup = memPool->New<Fixup>(funcNameOpnd.GetName(), 0, objFuncEmitInfo.GetTextDataSize(), fixupKind);
599 if (funcSymbol->IsGlobal()) {
600 objFuncEmitInfo.AppendGlobalFixups(*fixup);
601 }
602 return 0;
603 }
604
GetCompareBranchOpndValue(const Operand & opnd,ObjFuncEmitInfo & objFuncEmitInfo) const605 uint32 AArch64ObjEmitter::GetCompareBranchOpndValue(const Operand &opnd, ObjFuncEmitInfo &objFuncEmitInfo) const
606 {
607 FixupKind fixupKind = FixupKind(kAArch64CompareBranchPCRelImm19);
608 uint32 labelIndex = static_cast<const LabelOperand &>(opnd).GetLabelIndex();
609 LocalFixup *fixup = memPool->New<LocalFixup>(labelIndex, objFuncEmitInfo.GetTextDataSize(), fixupKind);
610 objFuncEmitInfo.AppendLocalFixups(*fixup);
611 return 0;
612 }
613
GetTestBranchOpndValue(const Operand & opnd,ObjFuncEmitInfo & objFuncEmitInfo) const614 uint32 AArch64ObjEmitter::GetTestBranchOpndValue(const Operand &opnd, ObjFuncEmitInfo &objFuncEmitInfo) const
615 {
616 FixupKind fixupKind = FixupKind(kAArch64TestBranchPCRelImm14);
617 uint32 labelIndex = static_cast<const LabelOperand &>(opnd).GetLabelIndex();
618 LocalFixup *fixup = memPool->New<LocalFixup>(labelIndex, objFuncEmitInfo.GetTextDataSize(), fixupKind);
619 objFuncEmitInfo.AppendLocalFixups(*fixup);
620 return 0;
621 }
622
GetLo12LitrealOpndValue(MOperator mOp,const Operand & opnd,ObjFuncEmitInfo & objFuncEmitInfo) const623 uint32 AArch64ObjEmitter::GetLo12LitrealOpndValue(MOperator mOp, const Operand &opnd,
624 ObjFuncEmitInfo &objFuncEmitInfo) const
625 {
626 FixupKind fixupKind = (mOp == MOP_xadrpl12) ? FixupKind(kAArch64AddPCRelLo12) : FixupKind(kAArch64LdrPCRelLo12);
627 if (opnd.IsMemoryAccessOperand()) {
628 const MemOperand &memOpnd = static_cast<const MemOperand &>(opnd);
629 uint32 offset = 0;
630 if (memOpnd.GetOffsetImmediate() != nullptr) {
631 offset = static_cast<uint32>(memOpnd.GetOffsetImmediate()->GetOffsetValue());
632 }
633 Fixup *fixup =
634 memPool->New<Fixup>(memOpnd.GetSymbolName(), offset, objFuncEmitInfo.GetTextDataSize(), fixupKind);
635 objFuncEmitInfo.AppendGlobalFixups(*fixup);
636 } else {
637 CHECK_FATAL(opnd.IsStImmediate(), "check opnd kind");
638 const StImmOperand &stOpnd = static_cast<const StImmOperand &>(opnd);
639 Fixup *fixup =
640 memPool->New<Fixup>(stOpnd.GetName(), stOpnd.GetOffset(), objFuncEmitInfo.GetTextDataSize(), fixupKind);
641 objFuncEmitInfo.AppendGlobalFixups(*fixup);
642 }
643 return 0;
644 }
645
GenMovReg(const Insn & insn) const646 uint32 AArch64ObjEmitter::GenMovReg(const Insn &insn) const
647 {
648 Operand &opnd1 = insn.GetOperand(kInsnFirstOpnd);
649 Operand &opnd2 = insn.GetOperand(kInsnSecondOpnd);
650 DEBUG_ASSERT(opnd1.IsRegister(), "opnd1 must be a register");
651 DEBUG_ASSERT(opnd2.IsRegister(), "opnd2 must be a register");
652 uint32 opCode = 0;
653 if (static_cast<RegOperand &>(opnd1).GetRegisterNumber() == RSP ||
654 static_cast<RegOperand &>(opnd2).GetRegisterNumber() == RSP) {
655 if (insn.GetMachineOpcode() == MOP_xmovrr) {
656 const InsnDesc &md = AArch64CG::kMd[MOP_xaddrri12];
657 opCode = md.GetMopEncode();
658 } else {
659 DEBUG_ASSERT(insn.GetMachineOpcode() == MOP_wmovrr, "support MOP_wmovrr Currently!");
660 const InsnDesc &md = AArch64CG::kMd[MOP_waddrri12];
661 opCode = md.GetMopEncode();
662 }
663 /* Rd */
664 uint32 opnd = opCode | GetOpndMachineValue(opnd1);
665 /* Rn */
666 opnd |= GetOpndMachineValue(opnd2) << kShiftFive;
667 return opnd;
668 } else {
669 if (insn.GetMachineOpcode() == MOP_xmovrr) {
670 const InsnDesc &md = AArch64CG::kMd[MOP_xiorrrr];
671 opCode = md.GetMopEncode();
672 } else {
673 DEBUG_ASSERT(insn.GetMachineOpcode() == MOP_wmovrr, "support MOP_wmovrr Currently!");
674 const InsnDesc &md = AArch64CG::kMd[MOP_wiorrrr];
675 opCode = md.GetMopEncode();
676 }
677 /* Rd */
678 uint32 opnd = opCode | GetOpndMachineValue(opnd1);
679 /* Rn */
680 opnd |= GetOpndMachineValue(opnd2) << kShiftSixteen;
681 /* Rm */
682 uint32 zr = 0x1f; /* xzr / wzr */
683 opnd |= zr << kShiftFive;
684 return opnd;
685 }
686 }
687
GenMovImm(const Insn & insn) const688 uint32 AArch64ObjEmitter::GenMovImm(const Insn &insn) const
689 {
690 /* Rd */
691 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
692 ImmOperand &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnSecondOpnd));
693 uint32 immSize = insn.GetDesc()->GetOpndDes(kInsnSecondOpnd)->GetSize();
694 uint64 immValue = static_cast<uint64>(immOpnd.GetValue());
695 bool isMovz = IsMoveWidableImmediate(immValue, immSize);
696 bool isMovn = IsMoveWidableImmediate(~immValue, immSize);
697 if (isMovz || isMovn) {
698 if (!isMovz) {
699 immValue = ~immValue;
700 }
701 uint32 hwFlag = 0;
702 if (immSize == k32BitSize) {
703 auto &md = isMovz ? AArch64CG::kMd[MOP_wmovzri16] : AArch64CG::kMd[MOP_wmovnri16];
704 opnd |= md.GetMopEncode();
705 immValue = static_cast<uint32>(immValue);
706 uint32 bitFieldValue = 0xFFFF;
707 if (((static_cast<uint32>(immValue)) & (bitFieldValue << k16BitSize)) != 0) {
708 hwFlag = 1;
709 }
710 } else {
711 DEBUG_ASSERT(immSize == k64BitSize, "support 64 bit only!");
712 auto &md = isMovz ? AArch64CG::kMd[MOP_xmovzri16] : AArch64CG::kMd[MOP_xmovnri16];
713 opnd |= md.GetMopEncode();
714 uint64 bitFieldValue = 0xFFFF;
715 for (hwFlag = 0; hwFlag <= 3; ++hwFlag) { // hwFlag is just from 0(00b) to 3(11b)
716 if (immValue & (bitFieldValue << (k16BitSize * hwFlag))) {
717 break;
718 }
719 }
720 }
721 opnd |= ((static_cast<uint32>(immValue >> (hwFlag * k16BitSize))) << kShiftFive);
722 opnd |= (hwFlag << kShiftTwentyOne);
723 } else {
724 if (immSize == k32BitSize) {
725 auto &md = AArch64CG::kMd[MOP_wiorrri12];
726 opnd |= md.GetMopEncode();
727 } else {
728 DEBUG_ASSERT(immSize == k64BitSize, "support 64 bit only!");
729 auto &md = AArch64CG::kMd[MOP_xiorrri13];
730 opnd |= md.GetMopEncode();
731 }
732 uint64 value = static_cast<uint64>(immOpnd.GetValue());
733 uint32 size = (immSize == k32BitSize) ? k32BitSize : k64BitSize;
734 opnd |= EncodeLogicaImm(value, size) << kShiftTen;
735 opnd |= (0x1FU << kShiftFive);
736 }
737
738 return opnd;
739 }
740
GenAddSubExtendRegInsn(const Insn & insn) const741 uint32 AArch64ObjEmitter::GenAddSubExtendRegInsn(const Insn &insn) const
742 {
743 /* Rd */
744 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
745 /* Rn */
746 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
747 /* Rm */
748 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
749 /* Extend */
750 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFourthOpnd)) << kShiftTen;
751 return opnd;
752 }
753
GenAddPCRelAddrInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const754 uint32 AArch64ObjEmitter::GenAddPCRelAddrInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
755 {
756 /* Rd */
757 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
758 /* Rn */
759 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
760 /* Imm */
761 opnd |= GetLo12LitrealOpndValue(insn.GetMachineOpcode(), insn.GetOperand(kInsnThirdOpnd), objFuncEmitInfo)
762 << kShiftTen;
763 return opnd;
764 }
765
GenAddSubImmInsn(const Insn & insn) const766 uint32 AArch64ObjEmitter::GenAddSubImmInsn(const Insn &insn) const
767 {
768 uint32 operandSize = 4; // subs insn
769 int32 index = insn.GetOperandSize() == operandSize ? 1 : 0;
770 /* Rd */
771 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd + index));
772 /* Rn */
773 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd + index)) << kShiftFive;
774 /* Imm */
775 uint32 immValue = GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd + index));
776 if ((immValue & (0xFFFU)) == 0 && ((immValue & (0xFFFU << kShiftTwelve))) != 0) {
777 opnd |= (1U << kShiftTwentyTwo);
778 immValue >>= kShiftTwelve;
779 }
780 opnd |= (immValue << kShiftTen);
781 return opnd;
782 }
783
GenAddSubShiftImmInsn(const Insn & insn) const784 uint32 AArch64ObjEmitter::GenAddSubShiftImmInsn(const Insn &insn) const
785 {
786 uint32 operandSize = 5; // subs insn
787 int32 index = insn.GetOperandSize() == operandSize ? 1 : 0;
788 /* Rd */
789 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd + index));
790 /* Rn */
791 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd + index)) << kShiftFive;
792 /* Imm */
793 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd + index)) << kShiftTen;
794 /* Shift */
795 BitShiftOperand &lslOpnd = static_cast<BitShiftOperand &>(insn.GetOperand(kInsnFourthOpnd + index));
796 if (lslOpnd.GetShiftAmount() > 0) {
797 uint32 shift = 0x1;
798 opnd |= shift << kShiftTwentyTwo;
799 }
800 return opnd;
801 }
802
GenAddSubRegInsn(const Insn & insn) const803 uint32 AArch64ObjEmitter::GenAddSubRegInsn(const Insn &insn) const
804 {
805 int32 index = insn.GetOperandSize() == k4ByteSize ? 1 : 0; // subs insn
806 /* Rd */
807 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd + index));
808 if (insn.GetOperandSize() == k2ByteSize) { // neg, cmp or cmn insn
809 /* Rm */
810 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftSixteen;
811 return opnd;
812 }
813 /* Rn */
814 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd + index)) << kShiftFive;
815 /* Rm */
816 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd + index)) << kShiftSixteen;
817
818 RegOperand &rd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd + index));
819 RegOperand &rn = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd + index));
820 // SP register can only be used with LSL or Extend
821 if (rd.GetRegisterNumber() == RSP || rn.GetRegisterNumber() == RSP) {
822 uint32 regSize = insn.GetDesc()->GetOpndDes(kInsnSecondOpnd)->GetSize();
823 opnd |= 1 << kShiftTwentyOne;
824 opnd |= ((regSize == k64BitSize ? 0b11 : 0b10) << kShiftThirteen); // option
825 }
826 return opnd;
827 }
828
GenAddSubShiftRegInsn(const Insn & insn) const829 uint32 AArch64ObjEmitter::GenAddSubShiftRegInsn(const Insn &insn) const
830 {
831 /* Rd */
832 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
833
834 BitShiftOperand *bitShiftOpnd = nullptr;
835
836 uint32 operandSize = 3;
837 if (insn.GetOperandSize() == operandSize) {
838 /* Rm */
839 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftSixteen;
840 bitShiftOpnd = static_cast<BitShiftOperand *>(&insn.GetOperand(kInsnThirdOpnd));
841 } else {
842 /* Rn */
843 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
844 /* Rm */
845 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
846 bitShiftOpnd = static_cast<BitShiftOperand *>(&insn.GetOperand(kInsnFourthOpnd));
847 }
848 uint32 shift = 0;
849 switch (bitShiftOpnd->GetShiftOp()) {
850 case BitShiftOperand::kLSL:
851 shift = kShiftLSL;
852 break;
853 case BitShiftOperand::kLSR:
854 shift = kShiftLSR;
855 break;
856 case BitShiftOperand::kASR:
857 shift = kShiftASR;
858 break;
859 default:
860 break;
861 }
862 /* Shift */
863 opnd |= shift << kShiftTwentyTwo;
864 /* Imm */
865 opnd |= bitShiftOpnd->GetShiftAmount() << kShiftTen;
866 return opnd;
867 }
868
GenBitfieldInsn(const Insn & insn) const869 uint32 AArch64ObjEmitter::GenBitfieldInsn(const Insn &insn) const
870 {
871 /* Rd */
872 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
873 /* Rn */
874 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
875
876 uint32 operandSize = 4;
877 if (insn.GetMachineOpcode() == MOP_wubfizrri5i5 || insn.GetMachineOpcode() == MOP_xubfizrri6i6 ||
878 insn.GetMachineOpcode() == MOP_wbfirri5i5 || insn.GetMachineOpcode() == MOP_xbfirri6i6) {
879 uint32 mod = insn.GetDesc()->GetOpndDes(kInsnFirstOpnd)->GetSize(); /* 64 & 32 from ARMv8 manual C5.6.114 */
880 CHECK_FATAL(mod == 64 || mod == 32, "mod must be 64/32");
881 uint32 shift = GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd));
882 uint32 immr = -shift % mod;
883 opnd |= immr << kShiftSixteen;
884 uint32 width = GetOpndMachineValue(insn.GetOperand(kInsnFourthOpnd));
885 CHECK_FATAL(width >= 1, "value overflow");
886 uint32 imms = width - 1;
887 opnd |= imms << kShiftTen;
888 } else if (insn.GetOperandSize() == operandSize) {
889 uint32 lab = GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd));
890 opnd |= lab << kShiftSixteen;
891 uint32 width = GetOpndMachineValue(insn.GetOperand(kInsnFourthOpnd));
892 CHECK_FATAL(lab < UINT64_MAX - width, "value overflow");
893 CHECK_FATAL(lab + width >= 1, "value overflow");
894 opnd |= (lab + width - 1) << kShiftTen;
895 } else if (insn.GetMachineOpcode() == MOP_xlslrri6 || insn.GetMachineOpcode() == MOP_wlslrri5) {
896 uint32 mod = insn.GetDesc()->GetOpndDes(kInsnFirstOpnd)->GetSize(); /* 64 & 32 from ARMv8 manual C5.6.114 */
897 CHECK_FATAL(mod == 64 || mod == 32, "mod must be 64/32");
898 uint32 shift = GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd));
899 uint32 immr = -shift % mod;
900 opnd |= immr << kShiftSixteen;
901 uint32 imms = mod - 1 - shift;
902 opnd |= imms << kShiftTen;
903 } else if (insn.GetMachineOpcode() == MOP_xlsrrri6 || insn.GetMachineOpcode() == MOP_wlsrrri5 ||
904 insn.GetMachineOpcode() == MOP_xasrrri6 || insn.GetMachineOpcode() == MOP_wasrrri5) {
905 uint32 mod = insn.GetDesc()->GetOpndDes(kInsnFirstOpnd)->GetSize(); /* 64 & 32 from ARMv8 manual C5.6.114 */
906 CHECK_FATAL(mod == 64 || mod == 32, "mod must be 64/32");
907 uint32 immr = GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd));
908 opnd |= immr << kShiftSixteen;
909 uint32 imms = mod - 1;
910 opnd |= imms << kShiftTen;
911 }
912 return opnd;
913 }
914
GenExtractInsn(const Insn & insn) const915 uint32 AArch64ObjEmitter::GenExtractInsn(const Insn &insn) const
916 {
917 /* Rd */
918 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
919 /* Rn */
920 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
921 /* Imm */
922 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftTen;
923 /* Rm */
924 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
925
926 return opnd;
927 }
928
GenBranchImmInsn(const Insn & insn,const std::vector<uint32> & label2Offset,ObjFuncEmitInfo & objFuncEmitInfo) const929 uint32 AArch64ObjEmitter::GenBranchImmInsn(const Insn &insn, const std::vector<uint32> &label2Offset,
930 ObjFuncEmitInfo &objFuncEmitInfo) const
931 {
932 /* Imm */
933 if (insn.IsCall()) {
934 return GetCallFuncOpndValue(insn.GetOperand(kInsnFirstOpnd), objFuncEmitInfo);
935 } else {
936 return GetUnCondBranchOpndValue(insn.GetOperand(kInsnFirstOpnd), label2Offset, objFuncEmitInfo);
937 }
938 }
939
GenBranchRegInsn(const Insn & insn) const940 uint32 AArch64ObjEmitter::GenBranchRegInsn(const Insn &insn) const
941 {
942 if (insn.GetMachineOpcode() == MOP_xret || insn.GetMachineOpcode() == MOP_clrex) {
943 return 0;
944 }
945 /* Rn */
946 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd)) << kShiftFive;
947 return opnd;
948 }
949
GenCompareBranchInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const950 uint32 AArch64ObjEmitter::GenCompareBranchInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
951 {
952 /* Rt */
953 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
954 /* Imm */
955 opnd |= GetCompareBranchOpndValue(insn.GetOperand(kInsnSecondOpnd), objFuncEmitInfo) << kShiftFive;
956 return opnd;
957 }
958
GenCondCompareImmInsn(const Insn & insn) const959 uint32 AArch64ObjEmitter::GenCondCompareImmInsn(const Insn &insn) const
960 {
961 /* Rn */
962 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
963 /* Imm */
964 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
965 /* Nzcv */
966 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFourthOpnd));
967 /* Cond */
968 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFifthOpnd)) << kShiftTwelve;
969 return opnd;
970 }
971
GenCondCompareRegInsn(const Insn & insn) const972 uint32 AArch64ObjEmitter::GenCondCompareRegInsn(const Insn &insn) const
973 {
974 /* Rn */
975 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
976 /* Rm */
977 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
978 /* Nzcv */
979 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFourthOpnd));
980 /* Cond */
981 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFifthOpnd)) << kShiftTwelve;
982 return opnd;
983 }
984
GenConditionalSelectInsn(const Insn & insn) const985 uint32 AArch64ObjEmitter::GenConditionalSelectInsn(const Insn &insn) const
986 {
987 /* Rd */
988 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
989 uint32 operandSize = 5;
990 if (insn.GetOperandSize() == operandSize) {
991 /* Rn */
992 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
993 /* Rm */
994 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
995 /* Cond */
996 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFourthOpnd)) << kShiftTwelve;
997 } else if (insn.GetMachineOpcode() == MOP_wcnegrrrc || insn.GetMachineOpcode() == MOP_xcnegrrrc) {
998 /* Rn */
999 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1000 /* Rm Rn==Rm */
1001 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftSixteen;
1002 /* Cond */
1003 uint8 cond = GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd));
1004 /* invert cond */
1005 opnd |= ((cond ^ 1u) & 0xfu) << kShiftTwelve;
1006 } else {
1007 /* Cond */
1008 uint8 cond = GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd));
1009 /* invert cond */
1010 opnd |= ((cond ^ 1u) & 0xfu) << kShiftTwelve;
1011 }
1012 return opnd;
1013 }
1014
GenDataProcess1SrcInsn(const Insn & insn) const1015 uint32 AArch64ObjEmitter::GenDataProcess1SrcInsn(const Insn &insn) const
1016 {
1017 /* Rd */
1018 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1019 /* Rn */
1020 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1021 return opnd;
1022 }
1023
GenDataProcess2SrcInsn(const Insn & insn) const1024 uint32 AArch64ObjEmitter::GenDataProcess2SrcInsn(const Insn &insn) const
1025 {
1026 /* Rd */
1027 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1028 /* Rn */
1029 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1030 /* Rm */
1031 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
1032 return opnd;
1033 }
1034
GenDataProcess3SrcInsn(const Insn & insn) const1035 uint32 AArch64ObjEmitter::GenDataProcess3SrcInsn(const Insn &insn) const
1036 {
1037 /* Rd */
1038 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1039 /* Rn */
1040 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1041 /* Rm */
1042 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
1043 /* Ra */
1044 uint32 operandSize = 4;
1045 if (insn.GetOperandSize() == operandSize) {
1046 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFourthOpnd)) << kShiftTen;
1047 }
1048 return opnd;
1049 }
1050
GenFloatIntConversionsInsn(const Insn & insn) const1051 uint32 AArch64ObjEmitter::GenFloatIntConversionsInsn(const Insn &insn) const
1052 {
1053 /* Rd */
1054 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1055 /* Rn */
1056 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1057 return opnd;
1058 }
1059
GenFloatCompareInsn(const Insn & insn) const1060 uint32 AArch64ObjEmitter::GenFloatCompareInsn(const Insn &insn) const
1061 {
1062 /* Rn */
1063 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1064 if (insn.GetOperand(kInsnThirdOpnd).IsRegister()) {
1065 /* Rm */
1066 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
1067 }
1068 return opnd;
1069 }
1070
GenFloatDataProcessing1Insn(const Insn & insn) const1071 uint32 AArch64ObjEmitter::GenFloatDataProcessing1Insn(const Insn &insn) const
1072 {
1073 /* Rd */
1074 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1075 /* Rn */
1076 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1077 return opnd;
1078 }
1079
GenFloatDataProcessing2Insn(const Insn & insn) const1080 uint32 AArch64ObjEmitter::GenFloatDataProcessing2Insn(const Insn &insn) const
1081 {
1082 /* Rd */
1083 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1084 /* Rn */
1085 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1086 /* Rm */
1087 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
1088 return opnd;
1089 }
1090
GenFloatImmInsn(const Insn & insn) const1091 uint32 AArch64ObjEmitter::GenFloatImmInsn(const Insn &insn) const
1092 {
1093 /* Rd */
1094 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1095 /* Imm */
1096 opnd |= (GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) & 0xff) << kShiftThirteen;
1097 return opnd;
1098 }
1099
GenFloatCondSelectInsn(const Insn & insn) const1100 uint32 AArch64ObjEmitter::GenFloatCondSelectInsn(const Insn &insn) const
1101 {
1102 /* Rd */
1103 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1104 /* Rn */
1105 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1106 /* Rm */
1107 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
1108 /* Cond */
1109 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFourthOpnd)) << kShiftTwelve;
1110 return opnd;
1111 }
1112
GenLoadStoreModeLiteral(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const1113 uint32 AArch64ObjEmitter::GenLoadStoreModeLiteral(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
1114 {
1115 /* Rt */
1116 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1117 /* Mem */
1118 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnSecondOpnd));
1119 FixupKind fixupKind = FixupKind(kAArch64LoadPCRelImm19);
1120 LocalFixup *fixup =
1121 memPool->New<LocalFixup>(objFuncEmitInfo.GetCGFunc().GetLocalSymLabelIndex(*memOpnd.GetSymbol()),
1122 objFuncEmitInfo.GetTextDataSize(), fixupKind);
1123 objFuncEmitInfo.AppendLocalFixups(*fixup);
1124 MOperator mOp = insn.GetMachineOpcode();
1125 if (mOp == MOP_sldr) {
1126 mOp = MOP_sldli;
1127 } else if (mOp == MOP_dldr) {
1128 mOp = MOP_dldli;
1129 } else if (mOp == MOP_xldr) {
1130 mOp = MOP_xldli;
1131 } else if (mOp == MOP_wldr) {
1132 mOp = MOP_wldli;
1133 } else {
1134 CHECK_FATAL(false, "unsupported mOp");
1135 }
1136 auto &md = AArch64CG::kMd[mOp];
1137 return md.GetMopEncode() | opnd;
1138 }
1139
GenLoadStoreModeBOi(const Insn & insn) const1140 uint32 AArch64ObjEmitter::GenLoadStoreModeBOi(const Insn &insn) const
1141 {
1142 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnSecondOpnd));
1143 OfstOperand *ofstOpnd = static_cast<OfstOperand *>(memOpnd.GetOffsetImmediate());
1144 /* Imm */
1145 int32 offsetValue = ofstOpnd->GetOffsetValue();
1146 uint32 imm9Mask = 0x1ff;
1147 uint32 opnd = 0U;
1148 if (memOpnd.IsPostIndexed()) {
1149 opnd |= (static_cast<uint32>(offsetValue) & imm9Mask) << kShiftTwelve;
1150 uint32 specialOpCode = 0x1;
1151 opnd |= specialOpCode << kShiftTen;
1152 } else if (memOpnd.IsPreIndexed()) {
1153 opnd |= (static_cast<uint32>(offsetValue) & imm9Mask) << kShiftTwelve;
1154 uint32 specialOpCode = 0x3;
1155 opnd |= specialOpCode << kShiftTen;
1156 } else {
1157 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1158 uint32 specialOpCode = 0x1;
1159 opnd |= specialOpCode << kShiftTwentyFour;
1160 uint32 divisor = 1;
1161 MOperator mOp = insn.GetMachineOpcode();
1162 if ((mOp == MOP_xldr) || (mOp == MOP_xstr) || (mOp == MOP_dldr) || (mOp == MOP_dstr)) {
1163 divisor = k8BitSize;
1164 } else if ((mOp == MOP_wldr) || (mOp == MOP_wstr) || (mOp == MOP_sstr) || (mOp == MOP_sldr)) {
1165 divisor = k4BitSize;
1166 } else if (mOp == MOP_hldr) {
1167 divisor = k2BitSize;
1168 }
1169 uint32 shiftRightNum = 0;
1170 if ((mOp == MOP_wldrsh) || (mOp == MOP_wldrh) || (mOp == MOP_wstrh)) {
1171 shiftRightNum = 1;
1172 }
1173 opnd |= ((static_cast<uint32>(offsetValue) >> shiftRightNum) / divisor) << kShiftTen;
1174 }
1175 return opnd;
1176 }
1177
GenLoadStoreModeBOrX(const Insn & insn) const1178 uint32 AArch64ObjEmitter::GenLoadStoreModeBOrX(const Insn &insn) const
1179 {
1180 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnSecondOpnd));
1181 uint32 opnd = 0;
1182 opnd |= 0x1 << kShiftTwentyOne;
1183 opnd |= 0x2 << kShiftTen;
1184 RegOperand *offsetReg = memOpnd.GetIndexRegister();
1185 opnd |= GetOpndMachineValue(*offsetReg) << kShiftSixteen;
1186 std::string extend = memOpnd.GetExtendAsString();
1187 uint32 shift = memOpnd.ShiftAmount();
1188 uint32 option = 0;
1189 if (extend == "UXTW") {
1190 option = 0x2;
1191 } else if (extend == "LSL") {
1192 option = 0x3;
1193 uint32 regSize = insn.GetDesc()->GetOpndDes(kInsnFirstOpnd)->GetSize();
1194 // lsl extend insn shift amount can only be 0 or 1(16-bit def opnd) or 2(32-bit def opnd) or
1195 // 3(64-bit def opnd) or 4(128-bit def opnd) in ldr/str insn
1196 CHECK_FATAL((shift == k0BitSize) || (regSize == k16BitSize && shift == k1BitSize) ||
1197 (regSize == k32BitSize && shift == k2BitSize) ||
1198 (regSize == k64BitSize && shift == k3BitSize) || (regSize == k128BitSize && shift == k4BitSize),
1199 "unsupport LSL amount");
1200 } else if (extend == "SXTW") {
1201 option = 0x6;
1202 } else {
1203 DEBUG_ASSERT(extend == "SXTX", "must be SXTX!");
1204 option = 0x7;
1205 }
1206 opnd |= option << kShiftThirteen;
1207 uint32 s = (shift > 0) ? 1 : 0;
1208 opnd |= s << kShiftTwelve;
1209 return opnd;
1210 }
1211
GenLoadStoreRegInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const1212 uint32 AArch64ObjEmitter::GenLoadStoreRegInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
1213 {
1214 /* Mem */
1215 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnSecondOpnd));
1216 if (memOpnd.GetAddrMode() == MemOperand::kAddrModeLiteral) {
1217 return GenLoadStoreModeLiteral(insn, objFuncEmitInfo);
1218 }
1219
1220 MOperator mOp = insn.GetMachineOpcode();
1221 #ifdef USE_32BIT_REF
1222 if (((mOp == MOP_xstr) || (mOp == MOP_xldr)) &&
1223 static_cast<AArch64RegOperand &>(insn.GetOperand(kInsnFirstOpnd)).IsRefField()) {
1224 mOp = (mOp == MOP_xstr) ? MOP_wstr : MOP_wldr;
1225 }
1226 #endif
1227 auto &md = AArch64CG::kMd[mOp];
1228 uint32 binInsn = md.GetMopEncode();
1229 // invalid insn generated by the eval node
1230 if (static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)).GetRegisterNumber() == RZR) {
1231 if (mOp == MOP_sldr) {
1232 binInsn = AArch64CG::kMd[MOP_wldr].GetMopEncode();
1233 } else if (mOp == MOP_dldr) {
1234 binInsn = AArch64CG::kMd[MOP_xldr].GetMopEncode();
1235 } else if (mOp == MOP_sstr) {
1236 binInsn = AArch64CG::kMd[MOP_wstr].GetMopEncode();
1237 } else if (mOp == MOP_dstr) {
1238 binInsn = AArch64CG::kMd[MOP_xstr].GetMopEncode();
1239 }
1240 }
1241 /* Rt */
1242 binInsn |= GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1243 /* Rn */
1244 Operand *baseReg = memOpnd.GetBaseRegister();
1245 binInsn |= GetOpndMachineValue(*baseReg) << kShiftFive;
1246
1247 if (memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi) {
1248 uint32 size = insn.GetDesc()->GetOpndDes(kInsnSecondOpnd)->GetSize();
1249 OfstOperand *ofstOpnd = static_cast<OfstOperand *>(memOpnd.GetOffsetImmediate());
1250 /* Imm */
1251 int32 offsetValue = ofstOpnd != nullptr ? ofstOpnd->GetOffsetValue() : 0;
1252 if ((((size == k16BitSize) && (offsetValue % k2BitSize) != 0) ||
1253 ((size == k32BitSize) && (offsetValue % k4BitSize) != 0) ||
1254 ((size == k64BitSize) && (offsetValue % k8BitSize) != 0)) &&
1255 ((offsetValue < k256BitSizeInt) && (offsetValue >= kNegative256BitSize))) {
1256 uint32 mopEncode = 0;
1257 // ldur, ldurh, ldurb
1258 if (insn.IsLoad()) {
1259 if (insn.GetDesc()->GetEncodeType() == kLoadStoreFloat) {
1260 mopEncode = size == k16BitSize ? 0x7c400000 : (size == k32BitSize ? 0xbc400000 : 0xfc400000);
1261 } else {
1262 mopEncode = size == k16BitSize ? 0x78400000 : (size == k32BitSize ? 0xb8400000 : 0xf8400000);
1263 }
1264 } else { // stur, sturh, sturb
1265 if (insn.GetDesc()->GetEncodeType() == kLoadStoreFloat) {
1266 mopEncode = size == k16BitSize ? 0x7c000000 : (size == k32BitSize ? 0xbc000000 : 0xfc000000);
1267 } else {
1268 mopEncode = size == k16BitSize ? 0x78000000 : (size == k32BitSize ? 0xb8000000 : 0xf8000000);
1269 }
1270 }
1271 binInsn =
1272 GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd)) | (GetOpndMachineValue(*baseReg) << kShiftFive);
1273 return binInsn | mopEncode | (offsetValue << kShiftTwelve);
1274 }
1275 return binInsn | GenLoadStoreModeBOi(insn);
1276 } else if (memOpnd.GetAddrMode() == MemOperand::kAddrModeBOrX) {
1277 return binInsn | GenLoadStoreModeBOrX(insn);
1278 }
1279 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeLo12Li, "support kAddrModeLo12Li only!");
1280 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1281 binInsn |= GetLo12LitrealOpndValue(insn.GetMachineOpcode(), memOpnd, objFuncEmitInfo) << kShiftTen;
1282 uint32 specialOpCode = 0x1;
1283 binInsn |= specialOpCode << kShiftTwentyFour;
1284
1285 return binInsn;
1286 }
1287
GenLoadStoreARInsn(const Insn & insn) const1288 uint32 AArch64ObjEmitter::GenLoadStoreARInsn(const Insn &insn) const
1289 {
1290 /* Rt */
1291 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1292 /* Mem */
1293 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnSecondOpnd));
1294 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1295 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1296 Operand *baseReg = memOpnd.GetBaseRegister();
1297 /* Rn */
1298 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1299 return opnd;
1300 }
1301
GenLoadExclusiveInsn(const Insn & insn) const1302 uint32 AArch64ObjEmitter::GenLoadExclusiveInsn(const Insn &insn) const
1303 {
1304 /* Rt */
1305 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1306 /* Mem */
1307 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnSecondOpnd));
1308 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1309 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1310 Operand *baseReg = memOpnd.GetBaseRegister();
1311 /* Rn */
1312 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1313 return opnd;
1314 }
1315
GenLoadExclusivePairInsn(const Insn & insn) const1316 uint32 AArch64ObjEmitter::GenLoadExclusivePairInsn(const Insn &insn) const
1317 {
1318 /* Rt */
1319 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1320 /* Rt2 */
1321 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftTen;
1322 /* Mem */
1323 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnThirdOpnd));
1324 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1325 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1326 Operand *baseReg = memOpnd.GetBaseRegister();
1327 /* Rn */
1328 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1329 return opnd;
1330 }
1331
GenStoreExclusiveInsn(const Insn & insn) const1332 uint32 AArch64ObjEmitter::GenStoreExclusiveInsn(const Insn &insn) const
1333 {
1334 /* Rs */
1335 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd)) << kShiftSixteen;
1336 /* Rt */
1337 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd));
1338 /* Mem */
1339 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnThirdOpnd));
1340 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1341 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1342 Operand *baseReg = memOpnd.GetBaseRegister();
1343 /* Rn */
1344 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1345 return opnd;
1346 }
1347
GenStoreExclusivePairInsn(const Insn & insn) const1348 uint32 AArch64ObjEmitter::GenStoreExclusivePairInsn(const Insn &insn) const
1349 {
1350 /* Rs */
1351 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd)) << kShiftSixteen;
1352 /* Rt */
1353 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd));
1354 /* Rt2 */
1355 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftTen;
1356 /* Mem */
1357 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnFourthOpnd));
1358 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1359 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1360 Operand *baseReg = memOpnd.GetBaseRegister();
1361 /* Rn */
1362 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1363 return opnd;
1364 }
1365
GenLoadPairInsn(const Insn & insn) const1366 uint32 AArch64ObjEmitter::GenLoadPairInsn(const Insn &insn) const
1367 {
1368 /* Rt */
1369 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1370 /* Rt2 */
1371 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftTen;
1372 /* Mem */
1373 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnThirdOpnd));
1374 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1375 /* Rn */
1376 Operand *baseReg = memOpnd.GetBaseRegister();
1377 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1378 /* Imm */
1379 OfstOperand *ofstOpnd = static_cast<OfstOperand *>(memOpnd.GetOffsetImmediate());
1380 int32 offsetValue = ofstOpnd->GetOffsetValue();
1381 uint32 divisor = 0;
1382 if (insn.GetDesc()->GetOpndDes(kInsnThirdOpnd)->GetSize() == k64BitSize) {
1383 divisor = k8ByteSize;
1384 } else {
1385 divisor = k4ByteSize;
1386 }
1387 uint32 imm7Mask = 0x7f;
1388 opnd |= (((static_cast<uint32>(offsetValue / divisor) & imm7Mask)) << kShiftFifteen);
1389
1390 uint32 specialOpCode = 0;
1391 if (memOpnd.IsPostIndexed()) {
1392 specialOpCode = 0x3;
1393 } else if (memOpnd.IsPreIndexed()) {
1394 specialOpCode = 0x7;
1395 } else {
1396 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1397 specialOpCode = 0x5;
1398 }
1399 opnd |= specialOpCode << kShiftTwentyTwo;
1400 return opnd;
1401 }
1402
GenStorePairInsn(const Insn & insn) const1403 uint32 AArch64ObjEmitter::GenStorePairInsn(const Insn &insn) const
1404 {
1405 /* Rt */
1406 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1407 /* Rt2 */
1408 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftTen;
1409 /* Mem */
1410 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnThirdOpnd));
1411 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1412 /* Rn */
1413 Operand *baseReg = memOpnd.GetBaseRegister();
1414 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1415 /* Imm */
1416 OfstOperand *ofstOpnd = static_cast<OfstOperand *>(memOpnd.GetOffsetImmediate());
1417 int32 offsetValue = ofstOpnd->GetOffsetValue();
1418 uint32 divisor = 0;
1419 if (insn.GetDesc()->GetOpndDes(kInsnThirdOpnd)->GetSize() == k64BitSize) {
1420 divisor = k8ByteSize;
1421 } else {
1422 divisor = k4ByteSize;
1423 }
1424 uint32 imm7Mask = 0x7f;
1425 opnd |= ((static_cast<uint32>(offsetValue / divisor) & imm7Mask) << kShiftFifteen);
1426
1427 uint32 specialOpCode = 0;
1428 if (memOpnd.IsPostIndexed()) {
1429 specialOpCode = 0x2;
1430 } else if (memOpnd.IsPreIndexed()) {
1431 specialOpCode = 0x6;
1432 } else {
1433 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1434 specialOpCode = 0x4;
1435 }
1436 opnd |= specialOpCode << kShiftTwentyTwo;
1437 return opnd;
1438 }
1439
GenLoadStoreFloatInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const1440 uint32 AArch64ObjEmitter::GenLoadStoreFloatInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
1441 {
1442 return GenLoadStoreRegInsn(insn, objFuncEmitInfo);
1443 }
1444
GenLoadPairFloatInsn(const Insn & insn) const1445 uint32 AArch64ObjEmitter::GenLoadPairFloatInsn(const Insn &insn) const
1446 {
1447 /* Rt */
1448 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1449 /* Rt2 */
1450 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftTen;
1451 /* Mem */
1452 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnThirdOpnd));
1453 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1454 /* Rn */
1455 Operand *baseReg = memOpnd.GetBaseRegister();
1456 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1457 /* Imm */
1458 OfstOperand *ofstOpnd = static_cast<OfstOperand *>(memOpnd.GetOffsetImmediate());
1459 int32 offsetValue = ofstOpnd->GetOffsetValue();
1460 uint32 divisor = 0;
1461 if (insn.GetDesc()->GetOpndDes(kInsnThirdOpnd)->GetSize() == k64BitSize) {
1462 divisor = k8ByteSize;
1463 } else {
1464 divisor = k4ByteSize;
1465 }
1466 uint32 imm7Mask = 0x7f;
1467 opnd |= (static_cast<uint32>(static_cast<uint32>(offsetValue) / divisor) & imm7Mask) << kShiftFifteen;
1468
1469 uint32 specialOpCode = 0;
1470 if (memOpnd.IsPostIndexed()) {
1471 specialOpCode = 0x3;
1472 } else if (memOpnd.IsPreIndexed()) {
1473 specialOpCode = 0x7;
1474 } else {
1475 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1476 specialOpCode = 0x5;
1477 }
1478 opnd |= specialOpCode << kShiftTwentyTwo;
1479 return opnd;
1480 }
1481
GenStorePairFloatInsn(const Insn & insn) const1482 uint32 AArch64ObjEmitter::GenStorePairFloatInsn(const Insn &insn) const
1483 {
1484 /* Rt */
1485 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1486 /* Rt2 */
1487 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftTen;
1488 /* Mem */
1489 MemOperand &memOpnd = static_cast<MemOperand &>(insn.GetOperand(kInsnThirdOpnd));
1490 DEBUG_ASSERT(memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi, "support kAddrModeBOi only!");
1491 /* Rn */
1492 Operand *baseReg = memOpnd.GetBaseRegister();
1493 opnd |= GetOpndMachineValue(*baseReg) << kShiftFive;
1494 /* Imm */
1495 OfstOperand *ofstOpnd = static_cast<OfstOperand *>(memOpnd.GetOffsetImmediate());
1496 int32 offsetValue = ofstOpnd->GetOffsetValue();
1497 uint32 divisor = 0;
1498 if (insn.GetDesc()->GetOpndDes(kInsnThirdOpnd)->GetSize() == k64BitSize) {
1499 divisor = k8ByteSize;
1500 } else {
1501 divisor = k4ByteSize;
1502 }
1503 uint32 imm7Mask = 0x7f;
1504 opnd |= (static_cast<uint32>(static_cast<uint32>(offsetValue) / divisor) & imm7Mask) << kShiftFifteen;
1505
1506 uint32 specialOpCode = 0;
1507 if (memOpnd.IsPostIndexed()) {
1508 specialOpCode = 0x2;
1509 } else if (memOpnd.IsPreIndexed()) {
1510 specialOpCode = 0x6;
1511 } else {
1512 DEBUG_ASSERT(memOpnd.IsIntactIndexed(), "must be kIntact!");
1513 specialOpCode = 0x4;
1514 }
1515 opnd |= specialOpCode << kShiftTwentyTwo;
1516 return opnd;
1517 }
1518
GenLoadLiteralRegInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const1519 uint32 AArch64ObjEmitter::GenLoadLiteralRegInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
1520 {
1521 /* Rt */
1522 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1523 /* Imm */
1524 opnd |= GetLoadLiteralOpndValue(insn.GetOperand(kInsnSecondOpnd), objFuncEmitInfo) << kShiftFive;
1525 return opnd;
1526 }
1527
GenLogicalRegInsn(const Insn & insn) const1528 uint32 AArch64ObjEmitter::GenLogicalRegInsn(const Insn &insn) const
1529 {
1530 /* Rd */
1531 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1532
1533 uint32 operandSize = k2ByteSize; // mvn insn
1534 if (insn.GetOperandSize() == operandSize) {
1535 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd)) << kShiftFive;
1536 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftSixteen;
1537 return opnd;
1538 }
1539
1540 /* Rn */
1541 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1542 /* Rm */
1543 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnThirdOpnd)) << kShiftSixteen;
1544
1545 operandSize = k4ByteSize;
1546 if (insn.GetOperandSize() == operandSize) {
1547 BitShiftOperand &bitShiftOpnd = static_cast<BitShiftOperand &>(insn.GetOperand(kInsnFourthOpnd));
1548 uint32 shift = 0;
1549 switch (bitShiftOpnd.GetShiftOp()) {
1550 case BitShiftOperand::kLSL:
1551 shift = kShiftLSL;
1552 break;
1553 case BitShiftOperand::kLSR:
1554 shift = kShiftLSR;
1555 break;
1556 case BitShiftOperand::kASR:
1557 shift = kShiftASR;
1558 break;
1559 default:
1560 break;
1561 }
1562 /* Shift */
1563 opnd |= shift << kShiftTwentyTwo;
1564 /* Imm */
1565 opnd |= bitShiftOpnd.GetShiftAmount() << kShiftTen;
1566 }
1567 return opnd;
1568 }
1569
GenLogicalImmInsn(const Insn & insn) const1570 uint32 AArch64ObjEmitter::GenLogicalImmInsn(const Insn &insn) const
1571 {
1572 /* Rd */
1573 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1574 if (insn.GetMachineOpcode() == MOP_wmovri32 || insn.GetMachineOpcode() == MOP_xmovri64) {
1575 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftTen;
1576 return opnd;
1577 }
1578
1579 // tst insn
1580 if (insn.GetMachineOpcode() == MOP_wtstri32 || insn.GetMachineOpcode() == MOP_xtstri64) {
1581 // Rn
1582 uint32 opndValue = GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1583 // Imm
1584 ImmOperand &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
1585 uint64 value = static_cast<uint64>(immOpnd.GetValue());
1586 uint32 size = insn.GetDesc()->GetOpndDes(kInsnThirdOpnd)->GetSize();
1587 opndValue |= EncodeLogicaImm(value, size) << kShiftTen;
1588 return opndValue;
1589 }
1590
1591 /* Rn */
1592 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1593 /* Imm */
1594 ImmOperand &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
1595 uint64 value = static_cast<uint64>(immOpnd.GetValue());
1596 uint32 size = insn.GetDesc()->GetOpndDes(kInsnSecondOpnd)->GetSize();
1597 opnd |= EncodeLogicaImm(value, size) << kShiftTen;
1598 return opnd;
1599 }
1600
GenMoveWideInsn(const Insn & insn) const1601 uint32 AArch64ObjEmitter::GenMoveWideInsn(const Insn &insn) const
1602 {
1603 /* Rd */
1604 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1605 /* Imm */
1606 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftFive;
1607
1608 BitShiftOperand &lslOpnd = static_cast<BitShiftOperand &>(insn.GetOperand(kInsnThirdOpnd));
1609 uint32 mod = 16; /* 16 from Armv8 Manual C5.6.128 */
1610 uint32 shift = lslOpnd.GetShiftAmount() / mod;
1611 /* Shift */
1612 opnd |= shift << kShiftTwentyOne;
1613 return opnd;
1614 }
1615
GenPCRelAddrInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const1616 uint32 AArch64ObjEmitter::GenPCRelAddrInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
1617 {
1618 /* Rd */
1619 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1620 /* Imm */
1621 opnd |= GetAdrLabelOpndValue(insn, insn.GetOperand(kInsnSecondOpnd), objFuncEmitInfo) << kShiftFive;
1622 return opnd;
1623 }
1624
GenSystemInsn(const Insn & insn) const1625 uint32 AArch64ObjEmitter::GenSystemInsn(const Insn &insn) const
1626 {
1627 (void)insn;
1628 return 0;
1629 }
1630
GenTestBranchInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const1631 uint32 AArch64ObjEmitter::GenTestBranchInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
1632 {
1633 /* Rt */
1634 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1635 /* b40 */
1636 opnd |= GetOpndMachineValue(insn.GetOperand(kInsnSecondOpnd)) << kShiftNineteen;
1637 /* Imm */
1638 opnd |= GetTestBranchOpndValue(insn.GetOperand(kInsnThirdOpnd), objFuncEmitInfo) << kShiftFive;
1639 return opnd;
1640 }
1641
GenCondBranchInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo) const1642 uint32 AArch64ObjEmitter::GenCondBranchInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo) const
1643 {
1644 /* Imm */
1645 uint32 opnd = GetCondBranchOpndValue(insn.GetOperand(kInsnSecondOpnd), objFuncEmitInfo) << kShiftFive;
1646 return opnd;
1647 }
1648
InsertNopInsn(ObjFuncEmitInfo & objFuncEmitInfo) const1649 void AArch64ObjEmitter::InsertNopInsn(ObjFuncEmitInfo &objFuncEmitInfo) const
1650 {
1651 AArch64CGFunc &cgFunc = static_cast<AArch64CGFunc &>(objFuncEmitInfo.GetCGFunc());
1652 bool found = false;
1653 FOR_ALL_BB_REV(bb, &cgFunc)
1654 {
1655 FOR_BB_INSNS_REV(insn, bb)
1656 {
1657 if (insn->IsMachineInstruction()) {
1658 if (insn->IsCall()) {
1659 Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn<AArch64CG>(MOP_nop);
1660 bb->InsertInsnAfter(*insn, newInsn);
1661 }
1662 found = true;
1663 break;
1664 }
1665 }
1666 if (found) {
1667 break;
1668 }
1669 }
1670 }
1671
EncodeLogicaImm(uint64 imm,uint32 size) const1672 uint32 AArch64ObjEmitter::EncodeLogicaImm(uint64 imm, uint32 size) const
1673 {
1674 /* the element size */
1675 uint32 elementSize = size;
1676 while (elementSize > k2ByteSize) {
1677 elementSize >>= 1;
1678 uint64 mask = (1ULL << elementSize) - 1;
1679 if ((imm & mask) != ((imm >> elementSize) & mask)) {
1680 elementSize <<= 1;
1681 break;
1682 }
1683 }
1684
1685 if (elementSize != k64BitSize) {
1686 imm &= ((1ULL << elementSize) - 1);
1687 }
1688 std::bitset<k64BitSize> bitValue(imm);
1689 uint32 trailCount = 0;
1690 for (uint32 i = 1; i < elementSize; ++i) {
1691 if (bitValue[i] ^ bitValue[0]) {
1692 trailCount = i;
1693 break;
1694 }
1695 }
1696
1697 uint32 immr = 0;
1698 uint32 oneNum = bitValue.count();
1699 if (bitValue.test(0)) { /* for 1+0+1+ pattern */
1700 DEBUG_ASSERT(oneNum >= trailCount, "value overflow");
1701 immr = oneNum - trailCount;
1702 } else { /* for 0+1+0+ pattern */
1703 immr = elementSize - trailCount;
1704 }
1705 CHECK_FATAL(elementSize >= 1, "value overflow");
1706 DEBUG_ASSERT(oneNum >= 1, "value overflow");
1707 uint32 imms = ~(elementSize - 1) << 1;
1708 imms |= oneNum - 1u;
1709 uint32 n = (elementSize == k64BitSize) ? 1 : 0;
1710 return (n << kShiftTwelve) | (immr << kShiftSix) | (imms & 0x3f);
1711 }
1712
EmitIntrinsicInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo)1713 void AArch64ObjEmitter::EmitIntrinsicInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo)
1714 {
1715 switch (insn.GetMachineOpcode()) {
1716 // adrp xd, label
1717 // add xd, xd, #:lo12:label
1718 case MOP_adrp_label: {
1719 uint32 opnd = GetOpndMachineValue(insn.GetOperand(kInsnFirstOpnd));
1720 uint32 binInsn = AArch64CG::kMd[MOP_xadrp].GetMopEncode();
1721 binInsn |= opnd;
1722 objFuncEmitInfo.AppendTextData(binInsn, k4ByteSize);
1723 binInsn = AArch64CG::kMd[MOP_xaddrri12].GetMopEncode();
1724 binInsn |= opnd | (opnd << kShiftFive);
1725 objFuncEmitInfo.AppendTextData(binInsn, k4ByteSize);
1726 break;
1727 }
1728 default:
1729 CHECK_FATAL(false, "unsupport mop in EmitIntrinsicInsn!\n");
1730 }
1731 }
1732
EmitSpinIntrinsicInsn(const Insn & insn,ObjFuncEmitInfo & objFuncEmitInfo)1733 void AArch64ObjEmitter::EmitSpinIntrinsicInsn(const Insn &insn, ObjFuncEmitInfo &objFuncEmitInfo)
1734 {
1735 switch (insn.GetMachineOpcode()) {
1736 case MOP_tls_desc_rel: {
1737 objFuncEmitInfo.AppendTextData(0x91400000, k4ByteSize);
1738 objFuncEmitInfo.AppendTextData(0x91000000, k4ByteSize);
1739 break;
1740 }
1741 default:
1742 CHECK_FATAL(false, "unsupport mop in EmitSpinIntrinsicInsn!\n");
1743 }
1744 }
1745 } /* namespace maplebe */
1746