1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "aarch64_dependence.h"
17 #include "aarch64_cg.h"
18 #include "aarch64_operand.h"
19 #include "pressure.h"
20
21 /* For building dependence graph, The entry is AArch64DepAnalysis::Run. */
22 namespace maplebe {
23 /* constructor */
AArch64DepAnalysis(CGFunc & func,MemPool & mp,MAD & mad,bool beforeRA)24 AArch64DepAnalysis::AArch64DepAnalysis(CGFunc &func, MemPool &mp, MAD &mad, bool beforeRA)
25 : DepAnalysis(func, mp, mad, beforeRA),
26 stackUses(alloc.Adapter()),
27 stackDefs(alloc.Adapter()),
28 heapUses(alloc.Adapter()),
29 heapDefs(alloc.Adapter()),
30 mayThrows(alloc.Adapter()),
31 ambiInsns(alloc.Adapter()),
32 ehInRegs(alloc.Adapter())
33 {
34 uint32 maxRegNum;
35 if (beforeRA) {
36 maxRegNum = cgFunc.GetMaxVReg();
37 } else {
38 maxRegNum = kAllRegNum;
39 }
40 regDefs = memPool.NewArray<Insn *>(maxRegNum);
41 regUses = memPool.NewArray<RegList *>(maxRegNum);
42 }
43
44 /* print dep node information */
DumpDepNode(DepNode & node) const45 void AArch64DepAnalysis::DumpDepNode(DepNode &node) const
46 {
47 node.GetInsn()->Dump();
48 uint32 num = node.GetUnitNum();
49 LogInfo::MapleLogger() << "unit num : " << num << ", ";
50 for (uint32 i = 0; i < num; ++i) {
51 const Unit *unit = node.GetUnitByIndex(i);
52 if (unit != nullptr) {
53 PRINT_VAL(unit->GetName());
54 } else {
55 PRINT_VAL("none");
56 }
57 }
58 LogInfo::MapleLogger() << '\n';
59 node.DumpSchedInfo();
60 if (beforeRA) {
61 node.DumpRegPressure();
62 }
63 }
64
65 /* print dep link information */
DumpDepLink(DepLink & link,const DepNode * node) const66 void AArch64DepAnalysis::DumpDepLink(DepLink &link, const DepNode *node) const
67 {
68 PRINT_VAL(GetDepTypeName(link.GetDepType()));
69 PRINT_STR_VAL("Latency: ", link.GetLatency());
70 if (node != nullptr) {
71 node->GetInsn()->Dump();
72 return;
73 }
74 LogInfo::MapleLogger() << "from : ";
75 link.GetFrom().GetInsn()->Dump();
76 LogInfo::MapleLogger() << "to : ";
77 link.GetTo().GetInsn()->Dump();
78 }
79
80 /* Append use register to the list. */
AppendRegUseList(Insn & insn,regno_t regNO)81 void AArch64DepAnalysis::AppendRegUseList(Insn &insn, regno_t regNO)
82 {
83 RegList *regList = memPool.New<RegList>();
84 regList->insn = &insn;
85 regList->next = nullptr;
86 if (regUses[regNO] == nullptr) {
87 regUses[regNO] = regList;
88 if (beforeRA) {
89 Insn *defInsn = regDefs[regNO];
90 if (defInsn == nullptr) {
91 return;
92 }
93 DepNode *defNode = defInsn->GetDepNode();
94 defNode->SetRegDefs(regNO, regList);
95 }
96 return;
97 }
98 RegList *lastRegList = regUses[regNO];
99 while (lastRegList->next != nullptr) {
100 lastRegList = lastRegList->next;
101 }
102 lastRegList->next = regList;
103 }
104
105 /*
106 * Add dependence edge.
107 * Two dependence node has a unique edge.
108 * True dependence overwirtes other dependences.
109 */
AddDependence(DepNode & fromNode,DepNode & toNode,DepType depType)110 void AArch64DepAnalysis::AddDependence(DepNode &fromNode, DepNode &toNode, DepType depType)
111 {
112 /* Can not build a self loop dependence. */
113 if (&fromNode == &toNode) {
114 return;
115 }
116 /* Check if exist edge. */
117 if (!fromNode.GetSuccs().empty()) {
118 DepLink *depLink = fromNode.GetSuccs().back();
119 if (&(depLink->GetTo()) == &toNode) {
120 if (depLink->GetDepType() != kDependenceTypeTrue) {
121 if (depType == kDependenceTypeTrue) {
122 /* Has exist edge, replace it. */
123 depLink->SetDepType(kDependenceTypeTrue);
124 depLink->SetLatency(mad.GetLatency(*fromNode.GetInsn(), *toNode.GetInsn()));
125 }
126 }
127 return;
128 }
129 }
130 DepLink *depLink = memPool.New<DepLink>(fromNode, toNode, depType);
131 if (depType == kDependenceTypeTrue) {
132 depLink->SetLatency(mad.GetLatency(*fromNode.GetInsn(), *toNode.GetInsn()));
133 }
134 fromNode.AddSucc(*depLink);
135 toNode.AddPred(*depLink);
136 }
137
AddDependence4InsnInVectorByType(MapleVector<Insn * > & insns,Insn & insn,const DepType & type)138 void AArch64DepAnalysis::AddDependence4InsnInVectorByType(MapleVector<Insn *> &insns, Insn &insn, const DepType &type)
139 {
140 for (auto anyInsn : insns) {
141 AddDependence(*anyInsn->GetDepNode(), *insn.GetDepNode(), type);
142 }
143 }
144
AddDependence4InsnInVectorByTypeAndCmp(MapleVector<Insn * > & insns,Insn & insn,const DepType & type)145 void AArch64DepAnalysis::AddDependence4InsnInVectorByTypeAndCmp(MapleVector<Insn *> &insns, Insn &insn,
146 const DepType &type)
147 {
148 for (auto anyInsn : insns) {
149 if (anyInsn != &insn) {
150 AddDependence(*anyInsn->GetDepNode(), *insn.GetDepNode(), type);
151 }
152 }
153 }
154
155 /* Remove self dependence (self loop) in dependence graph. */
RemoveSelfDeps(Insn & insn)156 void AArch64DepAnalysis::RemoveSelfDeps(Insn &insn)
157 {
158 DepNode *node = insn.GetDepNode();
159 DEBUG_ASSERT(node->GetSuccs().back()->GetTo().GetInsn() == &insn, "Is not a self dependence.");
160 DEBUG_ASSERT(node->GetPreds().back()->GetFrom().GetInsn() == &insn, "Is not a self dependence.");
161 node->RemoveSucc();
162 node->RemovePred();
163 }
164
165 /* Build dependences of source register operand. */
BuildDepsUseReg(Insn & insn,regno_t regNO)166 void AArch64DepAnalysis::BuildDepsUseReg(Insn &insn, regno_t regNO)
167 {
168 DepNode *node = insn.GetDepNode();
169 node->AddUseReg(regNO);
170 if (regDefs[regNO] != nullptr) {
171 /* Build true dependences. */
172 AddDependence(*regDefs[regNO]->GetDepNode(), *insn.GetDepNode(), kDependenceTypeTrue);
173 }
174 }
175
176 /* Build dependences of destination register operand. */
BuildDepsDefReg(Insn & insn,regno_t regNO)177 void AArch64DepAnalysis::BuildDepsDefReg(Insn &insn, regno_t regNO)
178 {
179 DepNode *node = insn.GetDepNode();
180 node->AddDefReg(regNO);
181 /* Build anti dependences. */
182 RegList *regList = regUses[regNO];
183 while (regList != nullptr) {
184 CHECK_NULL_FATAL(regList->insn);
185 AddDependence(*regList->insn->GetDepNode(), *node, kDependenceTypeAnti);
186 regList = regList->next;
187 }
188 /* Build output depnedence. */
189 if (regDefs[regNO] != nullptr) {
190 AddDependence(*regDefs[regNO]->GetDepNode(), *node, kDependenceTypeOutput);
191 }
192 }
193
ReplaceDepNodeWithNewInsn(DepNode & firstNode,DepNode & secondNode,Insn & newInsn,bool isFromClinit) const194 void AArch64DepAnalysis::ReplaceDepNodeWithNewInsn(DepNode &firstNode, DepNode &secondNode, Insn &newInsn,
195 bool isFromClinit) const
196 {
197 if (isFromClinit) {
198 firstNode.AddClinitInsn(*firstNode.GetInsn());
199 firstNode.AddClinitInsn(*secondNode.GetInsn());
200 firstNode.SetCfiInsns(secondNode.GetCfiInsns());
201 } else {
202 for (Insn *insn : secondNode.GetCfiInsns()) {
203 firstNode.AddCfiInsn(*insn);
204 }
205 for (Insn *insn : secondNode.GetComments()) {
206 firstNode.AddComments(*insn);
207 }
208 secondNode.ClearComments();
209 }
210 firstNode.SetInsn(newInsn);
211 Reservation *rev = mad.FindReservation(newInsn);
212 CHECK_FATAL(rev != nullptr, "reservation is nullptr.");
213 firstNode.SetReservation(*rev);
214 firstNode.SetUnits(rev->GetUnit());
215 firstNode.SetUnitNum(rev->GetUnitNum());
216 newInsn.SetDepNode(firstNode);
217 }
218
ClearDepNodeInfo(DepNode & depNode) const219 void AArch64DepAnalysis::ClearDepNodeInfo(DepNode &depNode) const
220 {
221 Insn &insn = cgFunc.GetInsnBuilder()->BuildInsn<AArch64CG>(MOP_pseudo_none);
222 insn.SetDepNode(depNode);
223 Reservation *seRev = mad.FindReservation(insn);
224 depNode.SetInsn(insn);
225 depNode.SetType(kNodeTypeEmpty);
226 DEBUG_ASSERT(seRev != nullptr, "seRev should not be nullptr");
227 depNode.SetReservation(*seRev);
228 depNode.SetUnitNum(0);
229 depNode.ClearCfiInsns();
230 depNode.SetUnits(nullptr);
231 }
232
233 /* Combine adrpldr&clinit_tail to clinit. */
CombineClinit(DepNode & firstNode,DepNode & secondNode,bool isAcrossSeparator)234 void AArch64DepAnalysis::CombineClinit(DepNode &firstNode, DepNode &secondNode, bool isAcrossSeparator)
235 {
236 DEBUG_ASSERT(firstNode.GetInsn()->GetMachineOpcode() == MOP_adrp_ldr, "first insn should be adrpldr");
237 DEBUG_ASSERT(secondNode.GetInsn()->GetMachineOpcode() == MOP_clinit_tail, "second insn should be clinit_tail");
238 DEBUG_ASSERT(firstNode.GetCfiInsns().empty(), "There should not be any comment/cfi instructions between clinit.");
239 DEBUG_ASSERT(secondNode.GetComments().empty(), "There should not be any comment/cfi instructions between clinit.");
240 Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_clinit, firstNode.GetInsn()->GetOperand(0),
241 firstNode.GetInsn()->GetOperand(1));
242 newInsn.SetId(firstNode.GetInsn()->GetId());
243 /* Replace first node with new insn. */
244 ReplaceDepNodeWithNewInsn(firstNode, secondNode, newInsn, true);
245 /* Clear second node information. */
246 ClearDepNodeInfo(secondNode);
247 CombineDependence(firstNode, secondNode, isAcrossSeparator);
248 }
249
250 /*
251 * Combine memory access pair:
252 * 1.ldr to ldp.
253 * 2.str to stp.
254 */
CombineMemoryAccessPair(DepNode & firstNode,DepNode & secondNode,bool useFirstOffset)255 void AArch64DepAnalysis::CombineMemoryAccessPair(DepNode &firstNode, DepNode &secondNode, bool useFirstOffset)
256 {
257 DEBUG_ASSERT(firstNode.GetInsn(), "the insn of first Node should not be nullptr");
258 DEBUG_ASSERT(secondNode.GetInsn(), "the insn of second Node should not be nullptr");
259 MOperator thisMop = firstNode.GetInsn()->GetMachineOpcode();
260 MOperator mopPair = GetMopPair(thisMop, false);
261 DEBUG_ASSERT(mopPair != 0, "mopPair should not be zero");
262 Operand *opnd0 = nullptr;
263 Operand *opnd1 = nullptr;
264 Operand *opnd2 = nullptr;
265 if (useFirstOffset) {
266 opnd0 = &(firstNode.GetInsn()->GetOperand(0));
267 opnd1 = &(secondNode.GetInsn()->GetOperand(0));
268 opnd2 = &(firstNode.GetInsn()->GetOperand(1));
269 } else {
270 opnd0 = &(secondNode.GetInsn()->GetOperand(0));
271 opnd1 = &(firstNode.GetInsn()->GetOperand(0));
272 opnd2 = &(secondNode.GetInsn()->GetOperand(1));
273 }
274 Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(mopPair, *opnd0, *opnd1, *opnd2);
275 newInsn.SetId(firstNode.GetInsn()->GetId());
276 std::string newComment;
277 const MapleString &comment = firstNode.GetInsn()->GetComment();
278 if (comment.c_str() != nullptr) {
279 newComment += comment.c_str();
280 }
281 const MapleString &secondComment = secondNode.GetInsn()->GetComment();
282 if (secondComment.c_str() != nullptr) {
283 newComment += " ";
284 newComment += secondComment.c_str();
285 }
286 if ((newComment.c_str() != nullptr) && (strlen(newComment.c_str()) > 0)) {
287 newInsn.SetComment(newComment);
288 }
289 /* Replace first node with new insn. */
290 ReplaceDepNodeWithNewInsn(firstNode, secondNode, newInsn, false);
291 /* Clear second node information. */
292 ClearDepNodeInfo(secondNode);
293 CombineDependence(firstNode, secondNode, false, true);
294 }
295
296 /* Combine two dependence nodes to one */
CombineDependence(DepNode & firstNode,DepNode & secondNode,bool isAcrossSeparator,bool isMemCombine)297 void AArch64DepAnalysis::CombineDependence(DepNode &firstNode, DepNode &secondNode, bool isAcrossSeparator,
298 bool isMemCombine)
299 {
300 if (isAcrossSeparator) {
301 /* Clear all latency of the second node. */
302 for (auto predLink : secondNode.GetPreds()) {
303 predLink->SetLatency(0);
304 }
305 for (auto succLink : secondNode.GetSuccs()) {
306 succLink->SetLatency(0);
307 }
308 return;
309 }
310 std::set<DepNode *> uniqueNodes;
311
312 for (auto predLink : firstNode.GetPreds()) {
313 if (predLink->GetDepType() == kDependenceTypeTrue) {
314 predLink->SetLatency(mad.GetLatency(*predLink->GetFrom().GetInsn(), *firstNode.GetInsn()));
315 }
316 (void)uniqueNodes.insert(&predLink->GetFrom());
317 }
318 for (auto predLink : secondNode.GetPreds()) {
319 if (&predLink->GetFrom() != &firstNode) {
320 if (uniqueNodes.insert(&(predLink->GetFrom())).second) {
321 AddDependence(predLink->GetFrom(), firstNode, predLink->GetDepType());
322 }
323 }
324 predLink->SetLatency(0);
325 }
326 uniqueNodes.clear();
327 for (auto succLink : firstNode.GetSuccs()) {
328 if (succLink->GetDepType() == kDependenceTypeTrue) {
329 succLink->SetLatency(mad.GetLatency(*succLink->GetFrom().GetInsn(), *firstNode.GetInsn()));
330 }
331 (void)uniqueNodes.insert(&(succLink->GetTo()));
332 }
333 for (auto succLink : secondNode.GetSuccs()) {
334 if (uniqueNodes.insert(&(succLink->GetTo())).second) {
335 AddDependence(firstNode, succLink->GetTo(), succLink->GetDepType());
336 if (isMemCombine) {
337 succLink->GetTo().IncreaseValidPredsSize();
338 }
339 }
340 succLink->SetLatency(0);
341 }
342 }
343
344 /*
345 * Build dependences of ambiguous instruction.
346 * ambiguous instruction : instructions that can not across may throw instructions.
347 */
BuildDepsAmbiInsn(Insn & insn)348 void AArch64DepAnalysis::BuildDepsAmbiInsn(Insn &insn)
349 {
350 AddDependence4InsnInVectorByType(mayThrows, insn, kDependenceTypeThrow);
351 ambiInsns.emplace_back(&insn);
352 }
353
354 /* Build dependences of may throw instructions. */
BuildDepsMayThrowInsn(Insn & insn)355 void AArch64DepAnalysis::BuildDepsMayThrowInsn(Insn &insn)
356 {
357 AddDependence4InsnInVectorByType(ambiInsns, insn, kDependenceTypeThrow);
358 }
359
IsFrameReg(const RegOperand & opnd) const360 bool AArch64DepAnalysis::IsFrameReg(const RegOperand &opnd) const
361 {
362 return (opnd.GetRegisterNumber() == RFP) || (opnd.GetRegisterNumber() == RSP);
363 }
364
BuildNextMemOperandByByteSize(const MemOperand & aarchMemOpnd,uint32 byteSize) const365 MemOperand *AArch64DepAnalysis::BuildNextMemOperandByByteSize(const MemOperand &aarchMemOpnd, uint32 byteSize) const
366 {
367 MemOperand *nextMemOpnd = aarchMemOpnd.Clone(memPool);
368 Operand *nextOfstOpnd = nextMemOpnd->GetOffsetImmediate()->Clone(memPool);
369 OfstOperand *aarchNextOfstOpnd = static_cast<OfstOperand *>(nextOfstOpnd);
370 CHECK_NULL_FATAL(aarchNextOfstOpnd);
371 int32 offsetVal = static_cast<int32>(aarchNextOfstOpnd->GetOffsetValue());
372 aarchNextOfstOpnd->SetOffsetValue(offsetVal + byteSize);
373 nextMemOpnd->SetOffsetOperand(*aarchNextOfstOpnd);
374 return nextMemOpnd;
375 }
376
377 /* Get the second memory access operand of stp/ldp instructions. */
GetNextMemOperand(const Insn & insn,const MemOperand & aarchMemOpnd) const378 MemOperand *AArch64DepAnalysis::GetNextMemOperand(const Insn &insn, const MemOperand &aarchMemOpnd) const
379 {
380 MemOperand *nextMemOpnd = nullptr;
381 switch (insn.GetMachineOpcode()) {
382 case MOP_wldp:
383 case MOP_sldp:
384 case MOP_xldpsw:
385 case MOP_wstp:
386 case MOP_sstp: {
387 nextMemOpnd = BuildNextMemOperandByByteSize(aarchMemOpnd, k4ByteSize);
388 break;
389 }
390 case MOP_xldp:
391 case MOP_dldp:
392 case MOP_xstp:
393 case MOP_dstp: {
394 nextMemOpnd = BuildNextMemOperandByByteSize(aarchMemOpnd, k8ByteSize);
395 break;
396 }
397 default:
398 break;
399 }
400
401 return nextMemOpnd;
402 }
403
404 /*
405 * Build dependences of symbol memory access.
406 * Memory access with symbol must be a heap memory access.
407 */
BuildDepsAccessStImmMem(Insn & insn,bool isDest)408 void AArch64DepAnalysis::BuildDepsAccessStImmMem(Insn &insn, bool isDest)
409 {
410 if (isDest) {
411 /*
412 * Heap memory
413 * Build anti dependences.
414 */
415 AddDependence4InsnInVectorByType(heapUses, insn, kDependenceTypeAnti);
416 /* Build output depnedence. */
417 AddDependence4InsnInVectorByType(heapDefs, insn, kDependenceTypeOutput);
418 heapDefs.emplace_back(&insn);
419 } else {
420 /* Heap memory */
421 AddDependence4InsnInVectorByType(heapDefs, insn, kDependenceTypeTrue);
422 heapUses.emplace_back(&insn);
423 }
424 if (memBarInsn != nullptr) {
425 AddDependence(*memBarInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeMembar);
426 }
427 }
428
429 /* Build dependences of stack memory and heap memory uses. */
BuildDepsUseMem(Insn & insn,MemOperand & aarchMemOpnd)430 void AArch64DepAnalysis::BuildDepsUseMem(Insn &insn, MemOperand &aarchMemOpnd)
431 {
432 RegOperand *baseRegister = aarchMemOpnd.GetBaseRegister();
433 MemOperand *nextMemOpnd = GetNextMemOperand(insn, aarchMemOpnd);
434
435 aarchMemOpnd.SetAccessSize(insn.GetMemoryByteSize());
436 /* Stack memory address */
437 for (auto defInsn : stackDefs) {
438 if (defInsn->IsCall() || NeedBuildDepsMem(aarchMemOpnd, nextMemOpnd, *defInsn)) {
439 AddDependence(*defInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeTrue);
440 continue;
441 }
442 }
443 /* Heap memory */
444 AddDependence4InsnInVectorByType(heapDefs, insn, kDependenceTypeTrue);
445 if (((baseRegister != nullptr) && IsFrameReg(*baseRegister)) || aarchMemOpnd.IsStackMem()) {
446 stackUses.emplace_back(&insn);
447 } else {
448 heapUses.emplace_back(&insn);
449 }
450 if (memBarInsn != nullptr) {
451 AddDependence(*memBarInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeMembar);
452 }
453 }
454
NoAlias(const MemOperand & leftOpnd,const MemOperand & rightOpnd)455 static bool NoAlias(const MemOperand &leftOpnd, const MemOperand &rightOpnd)
456 {
457 if (leftOpnd.GetAddrMode() == MemOperand::kAddrModeBOi && rightOpnd.GetAddrMode() == MemOperand::kAddrModeBOi &&
458 leftOpnd.GetIndexOpt() == MemOperand::kIntact && rightOpnd.GetIndexOpt() == MemOperand::kIntact) {
459 if (leftOpnd.GetBaseRegister()->GetRegisterNumber() == RFP ||
460 rightOpnd.GetBaseRegister()->GetRegisterNumber() == RFP) {
461 Operand *ofstOpnd = leftOpnd.GetOffsetOperand();
462 Operand *rofstOpnd = rightOpnd.GetOffsetOperand();
463 DEBUG_ASSERT(ofstOpnd != nullptr, "offset operand should not be null.");
464 DEBUG_ASSERT(rofstOpnd != nullptr, "offset operand should not be null.");
465 ImmOperand *ofst = static_cast<ImmOperand *>(ofstOpnd);
466 ImmOperand *rofst = static_cast<ImmOperand *>(rofstOpnd);
467 DEBUG_ASSERT(ofst != nullptr, "CG internal error, invalid type.");
468 DEBUG_ASSERT(rofst != nullptr, "CG internal error, invalid type.");
469 return (!ofst->ValueEquals(*rofst));
470 }
471 }
472 return false;
473 }
474
NoOverlap(const MemOperand & leftOpnd,const MemOperand & rightOpnd)475 static bool NoOverlap(const MemOperand &leftOpnd, const MemOperand &rightOpnd)
476 {
477 if (leftOpnd.GetAddrMode() != MemOperand::kAddrModeBOi || rightOpnd.GetAddrMode() != MemOperand::kAddrModeBOi ||
478 leftOpnd.GetIndexOpt() != MemOperand::kIntact || rightOpnd.GetIndexOpt() != MemOperand::kIntact) {
479 return false;
480 }
481 if (leftOpnd.GetBaseRegister()->GetRegisterNumber() != RFP ||
482 rightOpnd.GetBaseRegister()->GetRegisterNumber() != RFP) {
483 return false;
484 }
485 int64 ofset1 = leftOpnd.GetOffsetOperand()->GetValue();
486 int64 ofset2 = rightOpnd.GetOffsetOperand()->GetValue();
487 if (ofset1 < ofset2) {
488 return ((ofset1 + leftOpnd.GetAccessSize()) <= ofset2);
489 } else {
490 return ((ofset2 + rightOpnd.GetAccessSize()) <= ofset1);
491 }
492 }
493
494 /* Return true if memInsn's memOpnd no alias with memOpnd and nextMemOpnd */
NeedBuildDepsMem(const MemOperand & memOpnd,const MemOperand * nextMemOpnd,const Insn & memInsn) const495 bool AArch64DepAnalysis::NeedBuildDepsMem(const MemOperand &memOpnd, const MemOperand *nextMemOpnd,
496 const Insn &memInsn) const
497 {
498 auto *memOpndOfmemInsn = static_cast<MemOperand *>(memInsn.GetMemOpnd());
499 CHECK_NULL_FATAL(memOpndOfmemInsn);
500 if (!NoAlias(memOpnd, *memOpndOfmemInsn) ||
501 ((nextMemOpnd != nullptr) && !NoAlias(*nextMemOpnd, *memOpndOfmemInsn))) {
502 return true;
503 }
504 if (cgFunc.GetMirModule().GetSrcLang() == kSrcLangC && !memInsn.IsCall()) {
505 static_cast<MemOperand *>(memInsn.GetMemOpnd())->SetAccessSize(memInsn.GetMemoryByteSize());
506 return (!NoOverlap(memOpnd, *memOpndOfmemInsn));
507 }
508 MemOperand *nextMemOpndOfmemInsn = GetNextMemOperand(memInsn, *memOpndOfmemInsn);
509 if (nextMemOpndOfmemInsn != nullptr) {
510 if (!NoAlias(memOpnd, *nextMemOpndOfmemInsn) ||
511 ((nextMemOpnd != nullptr) && !NoAlias(*nextMemOpnd, *nextMemOpndOfmemInsn))) {
512 return true;
513 }
514 }
515 return false;
516 }
517
518 /*
519 * Build anti dependences between insn and other insn that use stack memroy.
520 * insn : the instruction that defines stack memory.
521 * memOpnd : insn's memOpnd
522 * nextMemOpnd : some memory pair operator instruction (like ldp/stp) defines two memory.
523 */
BuildAntiDepsDefStackMem(Insn & insn,MemOperand & memOpnd,const MemOperand * nextMemOpnd)524 void AArch64DepAnalysis::BuildAntiDepsDefStackMem(Insn &insn, MemOperand &memOpnd, const MemOperand *nextMemOpnd)
525 {
526 memOpnd.SetAccessSize(insn.GetMemoryByteSize());
527 for (auto *useInsn : stackUses) {
528 if (NeedBuildDepsMem(memOpnd, nextMemOpnd, *useInsn)) {
529 AddDependence(*useInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeAnti);
530 }
531 }
532 }
533
534 /*
535 * Build output dependences between insn with other insn that define stack memroy.
536 * insn : the instruction that defines stack memory.
537 * memOpnd : insn's memOpnd
538 * nextMemOpnd : some memory pair operator instruction (like ldp/stp) defines two memory.
539 */
BuildOutputDepsDefStackMem(Insn & insn,MemOperand & memOpnd,const MemOperand * nextMemOpnd)540 void AArch64DepAnalysis::BuildOutputDepsDefStackMem(Insn &insn, MemOperand &memOpnd, const MemOperand *nextMemOpnd)
541 {
542 memOpnd.SetAccessSize(insn.GetMemoryByteSize());
543 for (auto defInsn : stackDefs) {
544 if (defInsn->IsCall() || NeedBuildDepsMem(memOpnd, nextMemOpnd, *defInsn)) {
545 AddDependence(*defInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeOutput);
546 }
547 }
548 }
549
550 /* Build dependences of memory barrior instructions. */
BuildDepsMemBar(Insn & insn)551 void AArch64DepAnalysis::BuildDepsMemBar(Insn &insn)
552 {
553 AddDependence4InsnInVectorByTypeAndCmp(stackUses, insn, kDependenceTypeMembar);
554 AddDependence4InsnInVectorByTypeAndCmp(heapUses, insn, kDependenceTypeMembar);
555 AddDependence4InsnInVectorByTypeAndCmp(stackDefs, insn, kDependenceTypeMembar);
556 AddDependence4InsnInVectorByTypeAndCmp(heapDefs, insn, kDependenceTypeMembar);
557 memBarInsn = &insn;
558 }
559
560 /* A pseudo separator node depends all the other nodes. */
BuildDepsSeparator(DepNode & newSepNode,MapleVector<DepNode * > & nodes)561 void AArch64DepAnalysis::BuildDepsSeparator(DepNode &newSepNode, MapleVector<DepNode *> &nodes)
562 {
563 CHECK_FATAL(nodes.size() >= 1, "value overlfow");
564 uint32 nextSepIndex = (separatorIndex + kMaxDependenceNum) < nodes.size() ? (separatorIndex + kMaxDependenceNum)
565 : static_cast<uint32>(nodes.size() - 1);
566 newSepNode.ReservePreds(nextSepIndex - separatorIndex);
567 newSepNode.ReserveSuccs(nextSepIndex - separatorIndex);
568 for (uint32 i = separatorIndex; i < nextSepIndex; ++i) {
569 AddDependence(*nodes[i], newSepNode, kDependenceTypeSeparator);
570 }
571 }
572
573 /* Build control dependence for branch/ret instructions. */
BuildDepsControlAll(DepNode & depNode,const MapleVector<DepNode * > & nodes)574 void AArch64DepAnalysis::BuildDepsControlAll(DepNode &depNode, const MapleVector<DepNode *> &nodes)
575 {
576 for (uint32 i = separatorIndex; i < depNode.GetIndex(); ++i) {
577 AddDependence(*nodes[i], depNode, kDependenceTypeControl);
578 }
579 }
580
581 /*
582 * Build dependences of call instructions.
583 * Caller-saved physical registers will defined by a call instruction.
584 * Also a conditional register may modified by a call.
585 */
BuildCallerSavedDeps(Insn & insn)586 void AArch64DepAnalysis::BuildCallerSavedDeps(Insn &insn)
587 {
588 /* Build anti dependence and output dependence. */
589 for (uint32 i = R0; i <= R7; ++i) {
590 BuildDepsDefReg(insn, i);
591 }
592 for (uint32 i = V0; i <= V7; ++i) {
593 BuildDepsDefReg(insn, i);
594 }
595 if (!beforeRA) {
596 for (uint32 i = R8; i <= R18; ++i) {
597 BuildDepsDefReg(insn, i);
598 }
599 for (uint32 i = RLR; i <= RSP; ++i) {
600 BuildDepsUseReg(insn, i);
601 }
602 for (uint32 i = V16; i <= V31; ++i) {
603 BuildDepsDefReg(insn, i);
604 }
605 }
606 /* For condition operand, such as NE, EQ, and so on. */
607 if (cgFunc.GetRflag() != nullptr) {
608 BuildDepsDefReg(insn, kRFLAG);
609 }
610 }
611
612 /*
613 * Build dependence between control register and last call instruction.
614 * insn : instruction that with control register operand.
615 * isDest : if the control register operand is a destination operand.
616 */
BuildDepsBetweenControlRegAndCall(Insn & insn,bool isDest)617 void AArch64DepAnalysis::BuildDepsBetweenControlRegAndCall(Insn &insn, bool isDest)
618 {
619 if (lastCallInsn == nullptr) {
620 return;
621 }
622 if (isDest) {
623 AddDependence(*lastCallInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeOutput);
624 return;
625 }
626 AddDependence(*lastCallInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeAnti);
627 }
628
629 /*
630 * Build dependence between stack-define-instruction that deal with call-insn's args and a call-instruction.
631 * insn : a call instruction (call/tail-call)
632 */
BuildStackPassArgsDeps(Insn & insn)633 void AArch64DepAnalysis::BuildStackPassArgsDeps(Insn &insn)
634 {
635 for (auto stackDefInsn : stackDefs) {
636 if (stackDefInsn->IsCall()) {
637 continue;
638 }
639 Operand *opnd = stackDefInsn->GetMemOpnd();
640 DEBUG_ASSERT(opnd->IsMemoryAccessOperand(), "make sure opnd is memOpnd");
641 MemOperand *memOpnd = static_cast<MemOperand *>(opnd);
642 DEBUG_ASSERT(memOpnd != nullptr, "memOpnd should not be nullptr");
643 RegOperand *baseReg = memOpnd->GetBaseRegister();
644 if ((baseReg != nullptr) && (baseReg->GetRegisterNumber() == RSP)) {
645 AddDependence(*stackDefInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeControl);
646 }
647 }
648 }
649
650 /* Some insns may dirty all stack memory, such as "bl MCC_InitializeLocalStackRef". */
BuildDepsDirtyStack(Insn & insn)651 void AArch64DepAnalysis::BuildDepsDirtyStack(Insn &insn)
652 {
653 /* Build anti dependences. */
654 AddDependence4InsnInVectorByType(stackUses, insn, kDependenceTypeAnti);
655 /* Build output depnedence. */
656 AddDependence4InsnInVectorByType(stackDefs, insn, kDependenceTypeOutput);
657 stackDefs.emplace_back(&insn);
658 }
659
660 /* Some call insns may use all stack memory, such as "bl MCC_CleanupLocalStackRef_NaiveRCFast". */
BuildDepsUseStack(Insn & insn)661 void AArch64DepAnalysis::BuildDepsUseStack(Insn &insn)
662 {
663 /* Build true dependences. */
664 AddDependence4InsnInVectorByType(stackDefs, insn, kDependenceTypeTrue);
665 }
666
667 /* Some insns may dirty all heap memory, such as a call insn. */
BuildDepsDirtyHeap(Insn & insn)668 void AArch64DepAnalysis::BuildDepsDirtyHeap(Insn &insn)
669 {
670 /* Build anti dependences. */
671 AddDependence4InsnInVectorByType(heapUses, insn, kDependenceTypeAnti);
672 /* Build output depnedence. */
673 AddDependence4InsnInVectorByType(heapDefs, insn, kDependenceTypeOutput);
674 if (memBarInsn != nullptr) {
675 AddDependence(*memBarInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeMembar);
676 }
677 heapDefs.emplace_back(&insn);
678 }
679
680 /* Build a pseudo node to seperate dependence graph. */
BuildSeparatorNode()681 DepNode *AArch64DepAnalysis::BuildSeparatorNode()
682 {
683 Insn &pseudoSepInsn = cgFunc.GetInsnBuilder()->BuildInsn<AArch64CG>(MOP_pseudo_dependence_seperator);
684 DepNode *separatorNode = memPool.New<DepNode>(pseudoSepInsn, alloc);
685 separatorNode->SetType(kNodeTypeSeparator);
686 pseudoSepInsn.SetDepNode(*separatorNode);
687 if (beforeRA) {
688 RegPressure *regPressure = memPool.New<RegPressure>(alloc);
689 separatorNode->SetRegPressure(*regPressure);
690 separatorNode->InitPressure();
691 }
692 return separatorNode;
693 }
694
695 /* Init depAnalysis data struction */
Init(BB & bb,MapleVector<DepNode * > & nodes)696 void AArch64DepAnalysis::Init(BB &bb, MapleVector<DepNode *> &nodes)
697 {
698 curBB = &bb;
699 ClearAllDepData();
700 lastComments.clear();
701 /* Clear all dependence nodes and push the first separator node. */
702 nodes.clear();
703 DepNode *pseudoSepNode = BuildSeparatorNode();
704 nodes.emplace_back(pseudoSepNode);
705 separatorIndex = 0;
706
707 if (beforeRA) {
708 /* assump first pseudo_dependence_seperator insn of current bb define live-in's registers */
709 Insn *pseudoSepInsn = pseudoSepNode->GetInsn();
710 for (auto ®NO : bb.GetLiveInRegNO()) {
711 regDefs[regNO] = pseudoSepInsn;
712 pseudoSepNode->AddDefReg(regNO);
713 pseudoSepNode->SetRegDefs(pseudoSepNode->GetDefRegnos().size(), nullptr);
714 }
715 }
716 }
717
718 /* When a separator build, it is the same as a new basic block. */
ClearAllDepData()719 void AArch64DepAnalysis::ClearAllDepData()
720 {
721 uint32 maxRegNum;
722 if (beforeRA) {
723 maxRegNum = cgFunc.GetMaxVReg();
724 } else {
725 maxRegNum = kAllRegNum;
726 }
727 errno_t ret = memset_s(regDefs, sizeof(Insn *) * maxRegNum, 0, sizeof(Insn *) * maxRegNum);
728 CHECK_FATAL(ret == EOK, "call memset_s failed in Unit");
729 ret = memset_s(regUses, sizeof(RegList *) * maxRegNum, 0, sizeof(RegList *) * maxRegNum);
730 CHECK_FATAL(ret == EOK, "call memset_s failed in Unit");
731 memBarInsn = nullptr;
732 lastCallInsn = nullptr;
733 lastFrameDef = nullptr;
734
735 stackUses.clear();
736 stackDefs.clear();
737 heapUses.clear();
738 heapDefs.clear();
739 mayThrows.clear();
740 ambiInsns.clear();
741 }
742
743 /* Check if regNO is in ehInRegs. */
IfInAmbiRegs(regno_t regNO) const744 bool AArch64DepAnalysis::IfInAmbiRegs(regno_t regNO) const
745 {
746 if (!hasAmbiRegs) {
747 return false;
748 }
749 if (ehInRegs.find(regNO) != ehInRegs.end()) {
750 return true;
751 }
752 return false;
753 }
754
IsYieldPoint(Insn & insn)755 static bool IsYieldPoint(Insn &insn)
756 {
757 /*
758 * It is a yieldpoint if loading from a dedicated
759 * register holding polling page address:
760 * ldr wzr, [RYP]
761 */
762 if (insn.IsLoad() && !insn.IsLoadLabel()) {
763 auto mem = static_cast<MemOperand *>(insn.GetMemOpnd());
764 return (mem != nullptr && mem->GetBaseRegister() != nullptr &&
765 mem->GetBaseRegister()->GetRegisterNumber() == RYP);
766 }
767 return false;
768 }
769
770 /*
771 * Build dependences of memory operand.
772 * insn : a instruction with the memory access operand.
773 * opnd : the memory access operand.
774 * regProp : operand property of the memory access operandess operand.
775 */
BuildMemOpndDependency(Insn & insn,Operand & opnd,const OpndDesc & regProp)776 void AArch64DepAnalysis::BuildMemOpndDependency(Insn &insn, Operand &opnd, const OpndDesc ®Prop)
777 {
778 DEBUG_ASSERT(opnd.IsMemoryAccessOperand(), "opnd must be memory Operand");
779 MemOperand *memOpnd = static_cast<MemOperand *>(&opnd);
780 RegOperand *baseRegister = memOpnd->GetBaseRegister();
781 if (baseRegister != nullptr) {
782 regno_t regNO = baseRegister->GetRegisterNumber();
783 BuildDepsUseReg(insn, regNO);
784 if ((memOpnd->GetAddrMode() == MemOperand::kAddrModeBOi) &&
785 (memOpnd->IsPostIndexed() || memOpnd->IsPreIndexed())) {
786 /* Base operand has changed. */
787 BuildDepsDefReg(insn, regNO);
788 }
789 }
790 RegOperand *indexRegister = memOpnd->GetIndexRegister();
791 if (indexRegister != nullptr) {
792 regno_t regNO = indexRegister->GetRegisterNumber();
793 BuildDepsUseReg(insn, regNO);
794 }
795 if (regProp.IsUse()) {
796 BuildDepsUseMem(insn, *memOpnd);
797 } else {
798 BuildDepsAmbiInsn(insn);
799 }
800 if (IsYieldPoint(insn)) {
801 BuildDepsMemBar(insn);
802 BuildDepsDefReg(insn, kRFLAG);
803 }
804 }
805
806 /* Build Dependency for each Operand of insn */
BuildOpndDependency(Insn & insn)807 void AArch64DepAnalysis::BuildOpndDependency(Insn &insn)
808 {
809 const InsnDesc *md = insn.GetDesc();
810 MOperator mOp = insn.GetMachineOpcode();
811 uint32 opndNum = insn.GetOperandSize();
812 for (uint32 i = 0; i < opndNum; ++i) {
813 Operand &opnd = insn.GetOperand(i);
814 const OpndDesc *regProp = md->opndMD[i];
815 if (opnd.IsMemoryAccessOperand()) {
816 BuildMemOpndDependency(insn, opnd, *regProp);
817 } else if (opnd.IsStImmediate()) {
818 if (mOp != MOP_xadrpl12) {
819 BuildDepsAccessStImmMem(insn, false);
820 }
821 } else if (opnd.IsRegister()) {
822 RegOperand ®Opnd = static_cast<RegOperand &>(opnd);
823 regno_t regNO = regOpnd.GetRegisterNumber();
824
825 if (regProp->IsUse()) {
826 BuildDepsUseReg(insn, regNO);
827 }
828
829 if (regProp->IsDef()) {
830 BuildDepsDefReg(insn, regNO);
831 }
832 } else if (opnd.IsConditionCode()) {
833 /* For condition operand, such as NE, EQ, and so on. */
834 if (regProp->IsUse()) {
835 BuildDepsUseReg(insn, kRFLAG);
836 BuildDepsBetweenControlRegAndCall(insn, false);
837 }
838
839 if (regProp->IsDef()) {
840 BuildDepsDefReg(insn, kRFLAG);
841 BuildDepsBetweenControlRegAndCall(insn, true);
842 }
843 } else if (opnd.IsList()) {
844 ListOperand &listOpnd = static_cast<ListOperand &>(opnd);
845 /* Build true dependences */
846 for (auto lst : listOpnd.GetOperands()) {
847 regno_t regNO = lst->GetRegisterNumber();
848 BuildDepsUseReg(insn, regNO);
849 }
850 }
851 }
852 }
853
IsLazyLoad(MOperator op)854 static bool IsLazyLoad(MOperator op)
855 {
856 return (op == MOP_lazy_ldr) || (op == MOP_lazy_ldr_static) || (op == MOP_lazy_tail);
857 }
858
859 /*
860 * Build dependences in some special issue (stack/heap/throw/clinit/lazy binding/control flow).
861 * insn : a instruction.
862 * depNode : insn's depNode.
863 * nodes : the dependence nodes inclue insn's depNode.
864 */
BuildSpecialInsnDependency(Insn & insn,DepNode & depNode,const MapleVector<DepNode * > & nodes)865 void AArch64DepAnalysis::BuildSpecialInsnDependency(Insn &insn, DepNode &depNode, const MapleVector<DepNode *> &nodes)
866 {
867 const InsnDesc *md = insn.GetDesc();
868 MOperator mOp = insn.GetMachineOpcode();
869 if (insn.IsCall() || insn.IsTailCall()) {
870 /* Caller saved registers. */
871 BuildCallerSavedDeps(insn);
872 BuildStackPassArgsDeps(insn);
873
874 if (mOp == MOP_xbl) {
875 FuncNameOperand &target = static_cast<FuncNameOperand &>(insn.GetOperand(0));
876 if ((target.GetName() == "MCC_InitializeLocalStackRef") || (target.GetName() == "MCC_ClearLocalStackRef") ||
877 (target.GetName() == "MCC_DecRefResetPair")) {
878 /* Write stack memory. */
879 BuildDepsDirtyStack(insn);
880 } else if ((target.GetName() == "MCC_CleanupLocalStackRef_NaiveRCFast") ||
881 (target.GetName() == "MCC_CleanupLocalStackRefSkip_NaiveRCFast") ||
882 (target.GetName() == "MCC_CleanupLocalStackRefSkip")) {
883 /* UseStackMemory. */
884 BuildDepsUseStack(insn);
885 } else if (cgFunc.GetMirModule().GetSrcLang() == kSrcLangC) {
886 /* potential C aliasing. */
887 BuildDepsDirtyStack(insn);
888 }
889 }
890 BuildDepsDirtyHeap(insn);
891 BuildDepsAmbiInsn(insn);
892 if (lastCallInsn != nullptr) {
893 AddDependence(*lastCallInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeControl);
894 }
895 lastCallInsn = &insn;
896 } else if (insn.IsClinit() || IsLazyLoad(insn.GetMachineOpcode()) ||
897 insn.GetMachineOpcode() == MOP_arrayclass_cache_ldr) {
898 BuildDepsDirtyHeap(insn);
899 BuildDepsDefReg(insn, kRFLAG);
900 if (insn.GetMachineOpcode() != MOP_adrp_ldr) {
901 BuildDepsDefReg(insn, R16);
902 BuildDepsDefReg(insn, R17);
903 }
904 } else if ((mOp == MOP_xret) || md->IsBranch()) {
905 BuildDepsControlAll(depNode, nodes);
906 } else if (insn.IsMemAccessBar()) {
907 BuildDepsMemBar(insn);
908 } else if (insn.IsSpecialIntrinsic()) {
909 BuildDepsDirtyHeap(insn);
910 }
911 }
912
913 /*
914 * If the instruction's number of current basic block more than kMaxDependenceNum,
915 * then insert some pseudo separator node to split baic block.
916 */
SeperateDependenceGraph(MapleVector<DepNode * > & nodes,uint32 & nodeSum)917 void AArch64DepAnalysis::SeperateDependenceGraph(MapleVector<DepNode *> &nodes, uint32 &nodeSum)
918 {
919 if ((nodeSum > 0) && ((nodeSum % kMaxDependenceNum) == 0)) {
920 DEBUG_ASSERT(nodeSum == nodes.size(), "CG internal error, nodeSum should equal to nodes.size.");
921 /* Add a pseudo node to seperate dependence graph. */
922 DepNode *separatorNode = BuildSeparatorNode();
923 separatorNode->SetIndex(nodeSum);
924 nodes.emplace_back(separatorNode);
925 BuildDepsSeparator(*separatorNode, nodes);
926
927 if (beforeRA) {
928 /* for all live-out register of current bb */
929 for (auto ®NO : curBB->GetLiveOutRegNO()) {
930 if (regDefs[regNO] != nullptr) {
931 AppendRegUseList(*(separatorNode->GetInsn()), regNO);
932 separatorNode->AddUseReg(regNO);
933 separatorNode->SetRegUses(*regUses[regNO]);
934 }
935 }
936 }
937 ClearAllDepData();
938 separatorIndex = nodeSum++;
939 }
940 }
941
942 /*
943 * Generate a depNode,
944 * insn : create depNode for the instruction.
945 * nodes : a vector to store depNode.
946 * nodeSum : the new depNode's index.
947 * comments : those comment insn between last no-comment's insn and insn.
948 */
GenerateDepNode(Insn & insn,MapleVector<DepNode * > & nodes,int32 nodeSum,const MapleVector<Insn * > & comments)949 DepNode *AArch64DepAnalysis::GenerateDepNode(Insn &insn, MapleVector<DepNode *> &nodes, int32 nodeSum,
950 const MapleVector<Insn *> &comments)
951 {
952 DepNode *depNode = nullptr;
953 Reservation *rev = mad.FindReservation(insn);
954 DEBUG_ASSERT(rev != nullptr, "rev is nullptr");
955 depNode = memPool.New<DepNode>(insn, alloc, rev->GetUnit(), rev->GetUnitNum(), *rev);
956 if (beforeRA) {
957 RegPressure *regPressure = memPool.New<RegPressure>(alloc);
958 depNode->SetRegPressure(*regPressure);
959 depNode->InitPressure();
960 }
961 depNode->SetIndex(nodeSum);
962 nodes.emplace_back(depNode);
963 insn.SetDepNode(*depNode);
964
965 constexpr size_t vectorSize = 5;
966 depNode->ReservePreds(vectorSize);
967 depNode->ReserveSuccs(vectorSize);
968
969 if (!comments.empty()) {
970 depNode->SetComments(comments);
971 }
972 return depNode;
973 }
974
BuildAmbiInsnDependency(Insn & insn)975 void AArch64DepAnalysis::BuildAmbiInsnDependency(Insn &insn)
976 {
977 const auto &defRegnos = insn.GetDepNode()->GetDefRegnos();
978 for (const auto ®NO : defRegnos) {
979 if (IfInAmbiRegs(regNO)) {
980 BuildDepsAmbiInsn(insn);
981 break;
982 }
983 }
984 }
985
BuildMayThrowInsnDependency(Insn & insn)986 void AArch64DepAnalysis::BuildMayThrowInsnDependency(Insn &insn)
987 {
988 /* build dependency for maythrow insn; */
989 if (insn.MayThrow()) {
990 BuildDepsMayThrowInsn(insn);
991 if (lastFrameDef != nullptr) {
992 AddDependence(*lastFrameDef->GetDepNode(), *insn.GetDepNode(), kDependenceTypeThrow);
993 }
994 }
995 }
996
UpdateRegUseAndDef(Insn & insn,const DepNode & depNode,MapleVector<DepNode * > & nodes)997 void AArch64DepAnalysis::UpdateRegUseAndDef(Insn &insn, const DepNode &depNode, MapleVector<DepNode *> &nodes)
998 {
999 const auto &useRegnos = depNode.GetUseRegnos();
1000 if (beforeRA) {
1001 depNode.InitRegUsesSize(useRegnos.size());
1002 }
1003 for (auto regNO : useRegnos) {
1004 AppendRegUseList(insn, regNO);
1005 if (beforeRA) {
1006 depNode.SetRegUses(*regUses[regNO]);
1007 if (regDefs[regNO] == nullptr) {
1008 regDefs[regNO] = nodes[separatorIndex]->GetInsn();
1009 nodes[separatorIndex]->AddDefReg(regNO);
1010 nodes[separatorIndex]->SetRegDefs(nodes[separatorIndex]->GetDefRegnos().size(), regUses[regNO]);
1011 }
1012 }
1013 }
1014
1015 const auto &defRegnos = depNode.GetDefRegnos();
1016 size_t i = 0;
1017 if (beforeRA) {
1018 depNode.InitRegDefsSize(defRegnos.size());
1019 }
1020 for (const auto regNO : defRegnos) {
1021 regDefs[regNO] = &insn;
1022 regUses[regNO] = nullptr;
1023 if (beforeRA) {
1024 depNode.SetRegDefs(i, nullptr);
1025 if (regNO >= R0 && regNO <= R3) {
1026 depNode.SetHasPreg(true);
1027 } else if (regNO == R8) {
1028 depNode.SetHasNativeCallRegister(true);
1029 }
1030 }
1031 ++i;
1032 }
1033 }
1034
1035 /* Update stack and heap dependency */
UpdateStackAndHeapDependency(DepNode & depNode,Insn & insn,const Insn & locInsn)1036 void AArch64DepAnalysis::UpdateStackAndHeapDependency(DepNode &depNode, Insn &insn, const Insn &locInsn)
1037 {
1038 if (!insn.MayThrow()) {
1039 return;
1040 }
1041 depNode.SetLocInsn(locInsn);
1042 mayThrows.emplace_back(&insn);
1043 AddDependence4InsnInVectorByType(stackDefs, insn, kDependenceTypeThrow);
1044 AddDependence4InsnInVectorByType(heapDefs, insn, kDependenceTypeThrow);
1045 }
1046
1047 /* Add a separatorNode to the end of a nodes
1048 * * before RA: add all live-out registers to this separatorNode'Uses
1049 * */
AddEndSeparatorNode(MapleVector<DepNode * > & nodes)1050 void AArch64DepAnalysis::AddEndSeparatorNode(MapleVector<DepNode *> &nodes)
1051 {
1052 DepNode *separatorNode = BuildSeparatorNode();
1053 nodes.emplace_back(separatorNode);
1054 BuildDepsSeparator(*separatorNode, nodes);
1055
1056 if (beforeRA) {
1057 /* for all live-out register of current bb */
1058 for (auto ®NO : curBB->GetLiveOutRegNO()) {
1059 if (regDefs[regNO] != nullptr) {
1060 AppendRegUseList(*(separatorNode->GetInsn()), regNO);
1061 separatorNode->AddUseReg(regNO);
1062 separatorNode->SetRegUses(*regUses[regNO]);
1063 }
1064 }
1065 }
1066 }
1067
1068 /*
1069 * Build dependence graph.
1070 * 1: Build dependence nodes.
1071 * 2: Build edges between dependence nodes. Edges are:
1072 * 2.1) True dependences
1073 * 2.2) Anti dependences
1074 * 2.3) Output dependences
1075 * 2.4) Barrier dependences
1076 */
Run(BB & bb,MapleVector<DepNode * > & nodes)1077 void AArch64DepAnalysis::Run(BB &bb, MapleVector<DepNode *> &nodes)
1078 {
1079 /* Initial internal datas. */
1080 Init(bb, nodes);
1081 uint32 nodeSum = 1;
1082 MapleVector<Insn *> comments(alloc.Adapter());
1083 const Insn *locInsn = bb.GetFirstLoc();
1084 FOR_BB_INSNS(insn, (&bb))
1085 {
1086 if (!insn->IsMachineInstruction()) {
1087 if (insn->IsImmaterialInsn()) {
1088 if (!insn->IsComment()) {
1089 locInsn = insn;
1090 } else {
1091 comments.emplace_back(insn);
1092 }
1093 } else if (insn->IsCfiInsn()) {
1094 if (!nodes.empty()) {
1095 nodes.back()->AddCfiInsn(*insn);
1096 }
1097 }
1098 continue;
1099 }
1100 /* Add a pseudo node to seperate dependence graph when appropriate */
1101 SeperateDependenceGraph(nodes, nodeSum);
1102 /* generate a DepNode */
1103 DepNode *depNode = GenerateDepNode(*insn, nodes, nodeSum, comments);
1104 ++nodeSum;
1105 comments.clear();
1106 /* Build Dependency for maythrow insn; */
1107 BuildMayThrowInsnDependency(*insn);
1108 /* Build Dependency for each Operand of insn */
1109 BuildOpndDependency(*insn);
1110 /* Build Dependency for special insn */
1111 BuildSpecialInsnDependency(*insn, *depNode, nodes);
1112 /* Build Dependency for AmbiInsn if needed */
1113 BuildAmbiInsnDependency(*insn);
1114 /* Update stack and heap dependency */
1115 UpdateStackAndHeapDependency(*depNode, *insn, *locInsn);
1116 if (insn->IsFrameDef()) {
1117 lastFrameDef = insn;
1118 }
1119 /* Seperator exists. */
1120 AddDependence(*nodes[separatorIndex], *insn->GetDepNode(), kDependenceTypeSeparator);
1121 /* Update register use and register def */
1122 UpdateRegUseAndDef(*insn, *depNode, nodes);
1123 }
1124
1125 AddEndSeparatorNode(nodes);
1126
1127 if (!comments.empty()) {
1128 lastComments = comments;
1129 }
1130 comments.clear();
1131 }
1132
1133 /* return dependence type name */
GetDepTypeName(DepType depType) const1134 const std::string &AArch64DepAnalysis::GetDepTypeName(DepType depType) const
1135 {
1136 DEBUG_ASSERT(depType <= kDependenceTypeNone, "array boundary check failed");
1137 return kDepTypeName[depType];
1138 }
1139 } /* namespace maplebe */
1140