• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "aarch64_dependence.h"
17 #include "aarch64_cg.h"
18 #include "aarch64_operand.h"
19 #include "pressure.h"
20 
21 /* For building dependence graph, The entry is AArch64DepAnalysis::Run. */
22 namespace maplebe {
23 /* constructor */
AArch64DepAnalysis(CGFunc & func,MemPool & mp,MAD & mad,bool beforeRA)24 AArch64DepAnalysis::AArch64DepAnalysis(CGFunc &func, MemPool &mp, MAD &mad, bool beforeRA)
25     : DepAnalysis(func, mp, mad, beforeRA),
26       stackUses(alloc.Adapter()),
27       stackDefs(alloc.Adapter()),
28       heapUses(alloc.Adapter()),
29       heapDefs(alloc.Adapter()),
30       mayThrows(alloc.Adapter()),
31       ambiInsns(alloc.Adapter()),
32       ehInRegs(alloc.Adapter())
33 {
34     uint32 maxRegNum;
35     if (beforeRA) {
36         maxRegNum = cgFunc.GetMaxVReg();
37     } else {
38         maxRegNum = kAllRegNum;
39     }
40     regDefs = memPool.NewArray<Insn *>(maxRegNum);
41     regUses = memPool.NewArray<RegList *>(maxRegNum);
42 }
43 
44 /* print dep node information */
DumpDepNode(DepNode & node) const45 void AArch64DepAnalysis::DumpDepNode(DepNode &node) const
46 {
47     node.GetInsn()->Dump();
48     uint32 num = node.GetUnitNum();
49     LogInfo::MapleLogger() << "unit num : " << num << ", ";
50     for (uint32 i = 0; i < num; ++i) {
51         const Unit *unit = node.GetUnitByIndex(i);
52         if (unit != nullptr) {
53             PRINT_VAL(unit->GetName());
54         } else {
55             PRINT_VAL("none");
56         }
57     }
58     LogInfo::MapleLogger() << '\n';
59     node.DumpSchedInfo();
60     if (beforeRA) {
61         node.DumpRegPressure();
62     }
63 }
64 
65 /* print dep link information */
DumpDepLink(DepLink & link,const DepNode * node) const66 void AArch64DepAnalysis::DumpDepLink(DepLink &link, const DepNode *node) const
67 {
68     PRINT_VAL(GetDepTypeName(link.GetDepType()));
69     PRINT_STR_VAL("Latency: ", link.GetLatency());
70     if (node != nullptr) {
71         node->GetInsn()->Dump();
72         return;
73     }
74     LogInfo::MapleLogger() << "from : ";
75     link.GetFrom().GetInsn()->Dump();
76     LogInfo::MapleLogger() << "to : ";
77     link.GetTo().GetInsn()->Dump();
78 }
79 
80 /* Append use register to the list. */
AppendRegUseList(Insn & insn,regno_t regNO)81 void AArch64DepAnalysis::AppendRegUseList(Insn &insn, regno_t regNO)
82 {
83     RegList *regList = memPool.New<RegList>();
84     regList->insn = &insn;
85     regList->next = nullptr;
86     if (regUses[regNO] == nullptr) {
87         regUses[regNO] = regList;
88         if (beforeRA) {
89             Insn *defInsn = regDefs[regNO];
90             if (defInsn == nullptr) {
91                 return;
92             }
93             DepNode *defNode = defInsn->GetDepNode();
94             defNode->SetRegDefs(regNO, regList);
95         }
96         return;
97     }
98     RegList *lastRegList = regUses[regNO];
99     while (lastRegList->next != nullptr) {
100         lastRegList = lastRegList->next;
101     }
102     lastRegList->next = regList;
103 }
104 
105 /*
106  * Add dependence edge.
107  * Two dependence node has a unique edge.
108  * True dependence overwirtes other dependences.
109  */
AddDependence(DepNode & fromNode,DepNode & toNode,DepType depType)110 void AArch64DepAnalysis::AddDependence(DepNode &fromNode, DepNode &toNode, DepType depType)
111 {
112     /* Can not build a self loop dependence. */
113     if (&fromNode == &toNode) {
114         return;
115     }
116     /* Check if exist edge. */
117     if (!fromNode.GetSuccs().empty()) {
118         DepLink *depLink = fromNode.GetSuccs().back();
119         if (&(depLink->GetTo()) == &toNode) {
120             if (depLink->GetDepType() != kDependenceTypeTrue) {
121                 if (depType == kDependenceTypeTrue) {
122                     /* Has exist edge, replace it. */
123                     depLink->SetDepType(kDependenceTypeTrue);
124                     depLink->SetLatency(mad.GetLatency(*fromNode.GetInsn(), *toNode.GetInsn()));
125                 }
126             }
127             return;
128         }
129     }
130     DepLink *depLink = memPool.New<DepLink>(fromNode, toNode, depType);
131     if (depType == kDependenceTypeTrue) {
132         depLink->SetLatency(mad.GetLatency(*fromNode.GetInsn(), *toNode.GetInsn()));
133     }
134     fromNode.AddSucc(*depLink);
135     toNode.AddPred(*depLink);
136 }
137 
AddDependence4InsnInVectorByType(MapleVector<Insn * > & insns,Insn & insn,const DepType & type)138 void AArch64DepAnalysis::AddDependence4InsnInVectorByType(MapleVector<Insn *> &insns, Insn &insn, const DepType &type)
139 {
140     for (auto anyInsn : insns) {
141         AddDependence(*anyInsn->GetDepNode(), *insn.GetDepNode(), type);
142     }
143 }
144 
AddDependence4InsnInVectorByTypeAndCmp(MapleVector<Insn * > & insns,Insn & insn,const DepType & type)145 void AArch64DepAnalysis::AddDependence4InsnInVectorByTypeAndCmp(MapleVector<Insn *> &insns, Insn &insn,
146                                                                 const DepType &type)
147 {
148     for (auto anyInsn : insns) {
149         if (anyInsn != &insn) {
150             AddDependence(*anyInsn->GetDepNode(), *insn.GetDepNode(), type);
151         }
152     }
153 }
154 
155 /* Remove self dependence (self loop) in dependence graph. */
RemoveSelfDeps(Insn & insn)156 void AArch64DepAnalysis::RemoveSelfDeps(Insn &insn)
157 {
158     DepNode *node = insn.GetDepNode();
159     DEBUG_ASSERT(node->GetSuccs().back()->GetTo().GetInsn() == &insn, "Is not a self dependence.");
160     DEBUG_ASSERT(node->GetPreds().back()->GetFrom().GetInsn() == &insn, "Is not a self dependence.");
161     node->RemoveSucc();
162     node->RemovePred();
163 }
164 
165 /* Build dependences of source register operand. */
BuildDepsUseReg(Insn & insn,regno_t regNO)166 void AArch64DepAnalysis::BuildDepsUseReg(Insn &insn, regno_t regNO)
167 {
168     DepNode *node = insn.GetDepNode();
169     node->AddUseReg(regNO);
170     if (regDefs[regNO] != nullptr) {
171         /* Build true dependences. */
172         AddDependence(*regDefs[regNO]->GetDepNode(), *insn.GetDepNode(), kDependenceTypeTrue);
173     }
174 }
175 
176 /* Build dependences of destination register operand. */
BuildDepsDefReg(Insn & insn,regno_t regNO)177 void AArch64DepAnalysis::BuildDepsDefReg(Insn &insn, regno_t regNO)
178 {
179     DepNode *node = insn.GetDepNode();
180     node->AddDefReg(regNO);
181     /* Build anti dependences. */
182     RegList *regList = regUses[regNO];
183     while (regList != nullptr) {
184         CHECK_NULL_FATAL(regList->insn);
185         AddDependence(*regList->insn->GetDepNode(), *node, kDependenceTypeAnti);
186         regList = regList->next;
187     }
188     /* Build output depnedence. */
189     if (regDefs[regNO] != nullptr) {
190         AddDependence(*regDefs[regNO]->GetDepNode(), *node, kDependenceTypeOutput);
191     }
192 }
193 
ReplaceDepNodeWithNewInsn(DepNode & firstNode,DepNode & secondNode,Insn & newInsn,bool isFromClinit) const194 void AArch64DepAnalysis::ReplaceDepNodeWithNewInsn(DepNode &firstNode, DepNode &secondNode, Insn &newInsn,
195                                                    bool isFromClinit) const
196 {
197     if (isFromClinit) {
198         firstNode.AddClinitInsn(*firstNode.GetInsn());
199         firstNode.AddClinitInsn(*secondNode.GetInsn());
200         firstNode.SetCfiInsns(secondNode.GetCfiInsns());
201     } else {
202         for (Insn *insn : secondNode.GetCfiInsns()) {
203             firstNode.AddCfiInsn(*insn);
204         }
205         for (Insn *insn : secondNode.GetComments()) {
206             firstNode.AddComments(*insn);
207         }
208         secondNode.ClearComments();
209     }
210     firstNode.SetInsn(newInsn);
211     Reservation *rev = mad.FindReservation(newInsn);
212     CHECK_FATAL(rev != nullptr, "reservation is nullptr.");
213     firstNode.SetReservation(*rev);
214     firstNode.SetUnits(rev->GetUnit());
215     firstNode.SetUnitNum(rev->GetUnitNum());
216     newInsn.SetDepNode(firstNode);
217 }
218 
ClearDepNodeInfo(DepNode & depNode) const219 void AArch64DepAnalysis::ClearDepNodeInfo(DepNode &depNode) const
220 {
221     Insn &insn = cgFunc.GetInsnBuilder()->BuildInsn<AArch64CG>(MOP_pseudo_none);
222     insn.SetDepNode(depNode);
223     Reservation *seRev = mad.FindReservation(insn);
224     depNode.SetInsn(insn);
225     depNode.SetType(kNodeTypeEmpty);
226     depNode.SetReservation(*seRev);
227     depNode.SetUnitNum(0);
228     depNode.ClearCfiInsns();
229     depNode.SetUnits(nullptr);
230 }
231 
232 /* Combine adrpldr&clinit_tail to clinit. */
CombineClinit(DepNode & firstNode,DepNode & secondNode,bool isAcrossSeparator)233 void AArch64DepAnalysis::CombineClinit(DepNode &firstNode, DepNode &secondNode, bool isAcrossSeparator)
234 {
235     DEBUG_ASSERT(firstNode.GetInsn()->GetMachineOpcode() == MOP_adrp_ldr, "first insn should be adrpldr");
236     DEBUG_ASSERT(secondNode.GetInsn()->GetMachineOpcode() == MOP_clinit_tail, "second insn should be clinit_tail");
237     DEBUG_ASSERT(firstNode.GetCfiInsns().empty(), "There should not be any comment/cfi instructions between clinit.");
238     DEBUG_ASSERT(secondNode.GetComments().empty(), "There should not be any comment/cfi instructions between clinit.");
239     Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_clinit, firstNode.GetInsn()->GetOperand(0),
240                                                        firstNode.GetInsn()->GetOperand(1));
241     newInsn.SetId(firstNode.GetInsn()->GetId());
242     /* Replace first node with new insn. */
243     ReplaceDepNodeWithNewInsn(firstNode, secondNode, newInsn, true);
244     /* Clear second node information. */
245     ClearDepNodeInfo(secondNode);
246     CombineDependence(firstNode, secondNode, isAcrossSeparator);
247 }
248 
249 /*
250  *  Combine memory access pair:
251  *   1.ldr to ldp.
252  *   2.str to stp.
253  */
CombineMemoryAccessPair(DepNode & firstNode,DepNode & secondNode,bool useFirstOffset)254 void AArch64DepAnalysis::CombineMemoryAccessPair(DepNode &firstNode, DepNode &secondNode, bool useFirstOffset)
255 {
256     DEBUG_ASSERT(firstNode.GetInsn(), "the insn of first Node should not be nullptr");
257     DEBUG_ASSERT(secondNode.GetInsn(), "the insn of second Node should not be nullptr");
258     MOperator thisMop = firstNode.GetInsn()->GetMachineOpcode();
259     MOperator mopPair = GetMopPair(thisMop);
260     DEBUG_ASSERT(mopPair != 0, "mopPair should not be zero");
261     Operand *opnd0 = nullptr;
262     Operand *opnd1 = nullptr;
263     Operand *opnd2 = nullptr;
264     if (useFirstOffset) {
265         opnd0 = &(firstNode.GetInsn()->GetOperand(0));
266         opnd1 = &(secondNode.GetInsn()->GetOperand(0));
267         opnd2 = &(firstNode.GetInsn()->GetOperand(1));
268     } else {
269         opnd0 = &(secondNode.GetInsn()->GetOperand(0));
270         opnd1 = &(firstNode.GetInsn()->GetOperand(0));
271         opnd2 = &(secondNode.GetInsn()->GetOperand(1));
272     }
273     Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(mopPair, *opnd0, *opnd1, *opnd2);
274     newInsn.SetId(firstNode.GetInsn()->GetId());
275     std::string newComment;
276     const MapleString &comment = firstNode.GetInsn()->GetComment();
277     if (comment.c_str() != nullptr) {
278         newComment += comment.c_str();
279     }
280     const MapleString &secondComment = secondNode.GetInsn()->GetComment();
281     if (secondComment.c_str() != nullptr) {
282         newComment += "  ";
283         newComment += secondComment.c_str();
284     }
285     if ((newComment.c_str() != nullptr) && (strlen(newComment.c_str()) > 0)) {
286         newInsn.SetComment(newComment);
287     }
288     /* Replace first node with new insn. */
289     ReplaceDepNodeWithNewInsn(firstNode, secondNode, newInsn, false);
290     /* Clear second node information. */
291     ClearDepNodeInfo(secondNode);
292     CombineDependence(firstNode, secondNode, false, true);
293 }
294 
295 /* Combine two dependence nodes to one */
CombineDependence(DepNode & firstNode,DepNode & secondNode,bool isAcrossSeparator,bool isMemCombine)296 void AArch64DepAnalysis::CombineDependence(DepNode &firstNode, DepNode &secondNode, bool isAcrossSeparator,
297                                            bool isMemCombine)
298 {
299     if (isAcrossSeparator) {
300         /* Clear all latency of the second node. */
301         for (auto predLink : secondNode.GetPreds()) {
302             predLink->SetLatency(0);
303         }
304         for (auto succLink : secondNode.GetSuccs()) {
305             succLink->SetLatency(0);
306         }
307         return;
308     }
309     std::set<DepNode *> uniqueNodes;
310 
311     for (auto predLink : firstNode.GetPreds()) {
312         if (predLink->GetDepType() == kDependenceTypeTrue) {
313             predLink->SetLatency(mad.GetLatency(*predLink->GetFrom().GetInsn(), *firstNode.GetInsn()));
314         }
315         (void)uniqueNodes.insert(&predLink->GetFrom());
316     }
317     for (auto predLink : secondNode.GetPreds()) {
318         if (&predLink->GetFrom() != &firstNode) {
319             if (uniqueNodes.insert(&(predLink->GetFrom())).second) {
320                 AddDependence(predLink->GetFrom(), firstNode, predLink->GetDepType());
321             }
322         }
323         predLink->SetLatency(0);
324     }
325     uniqueNodes.clear();
326     for (auto succLink : firstNode.GetSuccs()) {
327         if (succLink->GetDepType() == kDependenceTypeTrue) {
328             succLink->SetLatency(mad.GetLatency(*succLink->GetFrom().GetInsn(), *firstNode.GetInsn()));
329         }
330         (void)uniqueNodes.insert(&(succLink->GetTo()));
331     }
332     for (auto succLink : secondNode.GetSuccs()) {
333         if (uniqueNodes.insert(&(succLink->GetTo())).second) {
334             AddDependence(firstNode, succLink->GetTo(), succLink->GetDepType());
335             if (isMemCombine) {
336                 succLink->GetTo().IncreaseValidPredsSize();
337             }
338         }
339         succLink->SetLatency(0);
340     }
341 }
342 
343 /*
344  * Build dependences of ambiguous instruction.
345  * ambiguous instruction : instructions that can not across may throw instructions.
346  */
BuildDepsAmbiInsn(Insn & insn)347 void AArch64DepAnalysis::BuildDepsAmbiInsn(Insn &insn)
348 {
349     AddDependence4InsnInVectorByType(mayThrows, insn, kDependenceTypeThrow);
350     ambiInsns.emplace_back(&insn);
351 }
352 
353 /* Build dependences of may throw instructions. */
BuildDepsMayThrowInsn(Insn & insn)354 void AArch64DepAnalysis::BuildDepsMayThrowInsn(Insn &insn)
355 {
356     AddDependence4InsnInVectorByType(ambiInsns, insn, kDependenceTypeThrow);
357 }
358 
IsFrameReg(const RegOperand & opnd) const359 bool AArch64DepAnalysis::IsFrameReg(const RegOperand &opnd) const
360 {
361     return (opnd.GetRegisterNumber() == RFP) || (opnd.GetRegisterNumber() == RSP);
362 }
363 
BuildNextMemOperandByByteSize(const MemOperand & aarchMemOpnd,uint32 byteSize) const364 MemOperand *AArch64DepAnalysis::BuildNextMemOperandByByteSize(const MemOperand &aarchMemOpnd, uint32 byteSize) const
365 {
366     MemOperand *nextMemOpnd = aarchMemOpnd.Clone(memPool);
367     Operand *nextOfstOpnd = nextMemOpnd->GetOffsetImmediate()->Clone(memPool);
368     OfstOperand *aarchNextOfstOpnd = static_cast<OfstOperand *>(nextOfstOpnd);
369     CHECK_NULL_FATAL(aarchNextOfstOpnd);
370     int32 offsetVal = static_cast<int32>(aarchNextOfstOpnd->GetOffsetValue());
371     aarchNextOfstOpnd->SetOffsetValue(offsetVal + byteSize);
372     nextMemOpnd->SetOffsetOperand(*aarchNextOfstOpnd);
373     return nextMemOpnd;
374 }
375 
376 /* Get the second memory access operand of stp/ldp instructions. */
GetNextMemOperand(const Insn & insn,const MemOperand & aarchMemOpnd) const377 MemOperand *AArch64DepAnalysis::GetNextMemOperand(const Insn &insn, const MemOperand &aarchMemOpnd) const
378 {
379     MemOperand *nextMemOpnd = nullptr;
380     switch (insn.GetMachineOpcode()) {
381         case MOP_wldp:
382         case MOP_sldp:
383         case MOP_xldpsw:
384         case MOP_wstp:
385         case MOP_sstp: {
386             nextMemOpnd = BuildNextMemOperandByByteSize(aarchMemOpnd, k4ByteSize);
387             break;
388         }
389         case MOP_xldp:
390         case MOP_dldp:
391         case MOP_xstp:
392         case MOP_dstp: {
393             nextMemOpnd = BuildNextMemOperandByByteSize(aarchMemOpnd, k8ByteSize);
394             break;
395         }
396         default:
397             break;
398     }
399 
400     return nextMemOpnd;
401 }
402 
403 /*
404  * Build dependences of symbol memory access.
405  * Memory access with symbol must be a heap memory access.
406  */
BuildDepsAccessStImmMem(Insn & insn,bool isDest)407 void AArch64DepAnalysis::BuildDepsAccessStImmMem(Insn &insn, bool isDest)
408 {
409     if (isDest) {
410         /*
411          * Heap memory
412          * Build anti dependences.
413          */
414         AddDependence4InsnInVectorByType(heapUses, insn, kDependenceTypeAnti);
415         /* Build output depnedence. */
416         AddDependence4InsnInVectorByType(heapDefs, insn, kDependenceTypeOutput);
417         heapDefs.emplace_back(&insn);
418     } else {
419         /* Heap memory */
420         AddDependence4InsnInVectorByType(heapDefs, insn, kDependenceTypeTrue);
421         heapUses.emplace_back(&insn);
422     }
423     if (memBarInsn != nullptr) {
424         AddDependence(*memBarInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeMembar);
425     }
426 }
427 
428 /* Build dependences of stack memory and heap memory uses. */
BuildDepsUseMem(Insn & insn,MemOperand & aarchMemOpnd)429 void AArch64DepAnalysis::BuildDepsUseMem(Insn &insn, MemOperand &aarchMemOpnd)
430 {
431     RegOperand *baseRegister = aarchMemOpnd.GetBaseRegister();
432     MemOperand *nextMemOpnd = GetNextMemOperand(insn, aarchMemOpnd);
433 
434     aarchMemOpnd.SetAccessSize(insn.GetMemoryByteSize());
435     /* Stack memory address */
436     for (auto defInsn : stackDefs) {
437         if (defInsn->IsCall() || NeedBuildDepsMem(aarchMemOpnd, nextMemOpnd, *defInsn)) {
438             AddDependence(*defInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeTrue);
439             continue;
440         }
441     }
442     /* Heap memory */
443     AddDependence4InsnInVectorByType(heapDefs, insn, kDependenceTypeTrue);
444     if (((baseRegister != nullptr) && IsFrameReg(*baseRegister)) || aarchMemOpnd.IsStackMem()) {
445         stackUses.emplace_back(&insn);
446     } else {
447         heapUses.emplace_back(&insn);
448     }
449     if (memBarInsn != nullptr) {
450         AddDependence(*memBarInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeMembar);
451     }
452 }
453 
NoAlias(const MemOperand & leftOpnd,const MemOperand & rightOpnd)454 static bool NoAlias(const MemOperand &leftOpnd, const MemOperand &rightOpnd)
455 {
456     if (leftOpnd.GetAddrMode() == MemOperand::kAddrModeBOi && rightOpnd.GetAddrMode() == MemOperand::kAddrModeBOi &&
457         leftOpnd.GetIndexOpt() == MemOperand::kIntact && rightOpnd.GetIndexOpt() == MemOperand::kIntact) {
458         if (leftOpnd.GetBaseRegister()->GetRegisterNumber() == RFP ||
459             rightOpnd.GetBaseRegister()->GetRegisterNumber() == RFP) {
460             Operand *ofstOpnd = leftOpnd.GetOffsetOperand();
461             Operand *rofstOpnd = rightOpnd.GetOffsetOperand();
462             DEBUG_ASSERT(ofstOpnd != nullptr, "offset operand should not be null.");
463             DEBUG_ASSERT(rofstOpnd != nullptr, "offset operand should not be null.");
464             ImmOperand *ofst = static_cast<ImmOperand *>(ofstOpnd);
465             ImmOperand *rofst = static_cast<ImmOperand *>(rofstOpnd);
466             DEBUG_ASSERT(ofst != nullptr, "CG internal error, invalid type.");
467             DEBUG_ASSERT(rofst != nullptr, "CG internal error, invalid type.");
468             return (!ofst->ValueEquals(*rofst));
469         }
470     }
471     return false;
472 }
473 
NoOverlap(const MemOperand & leftOpnd,const MemOperand & rightOpnd)474 static bool NoOverlap(const MemOperand &leftOpnd, const MemOperand &rightOpnd)
475 {
476     if (leftOpnd.GetAddrMode() != MemOperand::kAddrModeBOi || rightOpnd.GetAddrMode() != MemOperand::kAddrModeBOi ||
477         leftOpnd.GetIndexOpt() != MemOperand::kIntact || rightOpnd.GetIndexOpt() != MemOperand::kIntact) {
478         return false;
479     }
480     if (leftOpnd.GetBaseRegister()->GetRegisterNumber() != RFP ||
481         rightOpnd.GetBaseRegister()->GetRegisterNumber() != RFP) {
482         return false;
483     }
484     int64 ofset1 = leftOpnd.GetOffsetOperand()->GetValue();
485     int64 ofset2 = rightOpnd.GetOffsetOperand()->GetValue();
486     if (ofset1 < ofset2) {
487         return ((ofset1 + leftOpnd.GetAccessSize()) <= ofset2);
488     } else {
489         return ((ofset2 + rightOpnd.GetAccessSize()) <= ofset1);
490     }
491 }
492 
493 /* Return true if memInsn's memOpnd no alias with memOpnd and nextMemOpnd */
NeedBuildDepsMem(const MemOperand & memOpnd,const MemOperand * nextMemOpnd,const Insn & memInsn) const494 bool AArch64DepAnalysis::NeedBuildDepsMem(const MemOperand &memOpnd, const MemOperand *nextMemOpnd,
495                                           const Insn &memInsn) const
496 {
497     auto *memOpndOfmemInsn = static_cast<MemOperand *>(memInsn.GetMemOpnd());
498     if (!NoAlias(memOpnd, *memOpndOfmemInsn) ||
499         ((nextMemOpnd != nullptr) && !NoAlias(*nextMemOpnd, *memOpndOfmemInsn))) {
500         return true;
501     }
502     if (cgFunc.GetMirModule().GetSrcLang() == kSrcLangC && !memInsn.IsCall()) {
503         static_cast<MemOperand *>(memInsn.GetMemOpnd())->SetAccessSize(memInsn.GetMemoryByteSize());
504         return (!NoOverlap(memOpnd, *memOpndOfmemInsn));
505     }
506     MemOperand *nextMemOpndOfmemInsn = GetNextMemOperand(memInsn, *memOpndOfmemInsn);
507     if (nextMemOpndOfmemInsn != nullptr) {
508         if (!NoAlias(memOpnd, *nextMemOpndOfmemInsn) ||
509             ((nextMemOpnd != nullptr) && !NoAlias(*nextMemOpnd, *nextMemOpndOfmemInsn))) {
510             return true;
511         }
512     }
513     return false;
514 }
515 
516 /*
517  * Build anti dependences between insn and other insn that use stack memroy.
518  * insn        : the instruction that defines stack memory.
519  * memOpnd     : insn's memOpnd
520  * nextMemOpnd : some memory pair operator instruction (like ldp/stp) defines two memory.
521  */
BuildAntiDepsDefStackMem(Insn & insn,MemOperand & memOpnd,const MemOperand * nextMemOpnd)522 void AArch64DepAnalysis::BuildAntiDepsDefStackMem(Insn &insn, MemOperand &memOpnd, const MemOperand *nextMemOpnd)
523 {
524     memOpnd.SetAccessSize(insn.GetMemoryByteSize());
525     for (auto *useInsn : stackUses) {
526         if (NeedBuildDepsMem(memOpnd, nextMemOpnd, *useInsn)) {
527             AddDependence(*useInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeAnti);
528         }
529     }
530 }
531 
532 /*
533  * Build output dependences between insn with other insn that define stack memroy.
534  * insn        : the instruction that defines stack memory.
535  * memOpnd     : insn's memOpnd
536  * nextMemOpnd : some memory pair operator instruction (like ldp/stp) defines two memory.
537  */
BuildOutputDepsDefStackMem(Insn & insn,MemOperand & memOpnd,const MemOperand * nextMemOpnd)538 void AArch64DepAnalysis::BuildOutputDepsDefStackMem(Insn &insn, MemOperand &memOpnd, const MemOperand *nextMemOpnd)
539 {
540     memOpnd.SetAccessSize(insn.GetMemoryByteSize());
541     for (auto defInsn : stackDefs) {
542         if (defInsn->IsCall() || NeedBuildDepsMem(memOpnd, nextMemOpnd, *defInsn)) {
543             AddDependence(*defInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeOutput);
544         }
545     }
546 }
547 
548 /* Build dependences of stack memory and heap memory definitions. */
BuildDepsDefMem(Insn & insn,MemOperand & aarchMemOpnd)549 void AArch64DepAnalysis::BuildDepsDefMem(Insn &insn, MemOperand &aarchMemOpnd)
550 {
551     RegOperand *baseRegister = aarchMemOpnd.GetBaseRegister();
552     MemOperand *nextMemOpnd = GetNextMemOperand(insn, aarchMemOpnd);
553 
554     /* Build anti dependences. */
555     BuildAntiDepsDefStackMem(insn, aarchMemOpnd, nextMemOpnd);
556     /* Build output depnedence. */
557     BuildOutputDepsDefStackMem(insn, aarchMemOpnd, nextMemOpnd);
558     if (lastCallInsn != nullptr) {
559         /* Build a dependence between stack passed arguments and call. */
560         DEBUG_ASSERT(baseRegister != nullptr, "baseRegister shouldn't be null here");
561         if (baseRegister->GetRegisterNumber() == RSP) {
562             AddDependence(*lastCallInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeControl);
563         }
564     }
565 
566     /* Heap memory
567      * Build anti dependences.
568      */
569     AddDependence4InsnInVectorByType(heapUses, insn, kDependenceTypeAnti);
570     /* Build output depnedence. */
571     AddDependence4InsnInVectorByType(heapDefs, insn, kDependenceTypeOutput);
572 
573     if (((baseRegister != nullptr) && IsFrameReg(*baseRegister)) || aarchMemOpnd.IsStackMem()) {
574         stackDefs.emplace_back(&insn);
575     } else {
576         heapDefs.emplace_back(&insn);
577     }
578     if (memBarInsn != nullptr) {
579         AddDependence(*memBarInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeMembar);
580     }
581     /* Memory definition can not across may-throw insns. */
582     AddDependence4InsnInVectorByType(mayThrows, insn, kDependenceTypeThrow);
583 }
584 
585 /* Build dependences of memory barrior instructions. */
BuildDepsMemBar(Insn & insn)586 void AArch64DepAnalysis::BuildDepsMemBar(Insn &insn)
587 {
588     AddDependence4InsnInVectorByTypeAndCmp(stackUses, insn, kDependenceTypeMembar);
589     AddDependence4InsnInVectorByTypeAndCmp(heapUses, insn, kDependenceTypeMembar);
590     AddDependence4InsnInVectorByTypeAndCmp(stackDefs, insn, kDependenceTypeMembar);
591     AddDependence4InsnInVectorByTypeAndCmp(heapDefs, insn, kDependenceTypeMembar);
592     memBarInsn = &insn;
593 }
594 
595 /* A pseudo separator node depends all the other nodes. */
BuildDepsSeparator(DepNode & newSepNode,MapleVector<DepNode * > & nodes)596 void AArch64DepAnalysis::BuildDepsSeparator(DepNode &newSepNode, MapleVector<DepNode *> &nodes)
597 {
598     uint32 nextSepIndex = (separatorIndex + kMaxDependenceNum) < nodes.size() ? (separatorIndex + kMaxDependenceNum)
599                                                                               : static_cast<uint32>(nodes.size() - 1);
600     newSepNode.ReservePreds(nextSepIndex - separatorIndex);
601     newSepNode.ReserveSuccs(nextSepIndex - separatorIndex);
602     for (uint32 i = separatorIndex; i < nextSepIndex; ++i) {
603         AddDependence(*nodes[i], newSepNode, kDependenceTypeSeparator);
604     }
605 }
606 
607 /* Build control dependence for branch/ret instructions. */
BuildDepsControlAll(DepNode & depNode,const MapleVector<DepNode * > & nodes)608 void AArch64DepAnalysis::BuildDepsControlAll(DepNode &depNode, const MapleVector<DepNode *> &nodes)
609 {
610     for (uint32 i = separatorIndex; i < depNode.GetIndex(); ++i) {
611         AddDependence(*nodes[i], depNode, kDependenceTypeControl);
612     }
613 }
614 
615 /*
616  * Build dependences of call instructions.
617  * Caller-saved physical registers will defined by a call instruction.
618  * Also a conditional register may modified by a call.
619  */
BuildCallerSavedDeps(Insn & insn)620 void AArch64DepAnalysis::BuildCallerSavedDeps(Insn &insn)
621 {
622     /* Build anti dependence and output dependence. */
623     for (uint32 i = R0; i <= R7; ++i) {
624         BuildDepsDefReg(insn, i);
625     }
626     for (uint32 i = V0; i <= V7; ++i) {
627         BuildDepsDefReg(insn, i);
628     }
629     if (!beforeRA) {
630         for (uint32 i = R8; i <= R18; ++i) {
631             BuildDepsDefReg(insn, i);
632         }
633         for (uint32 i = RLR; i <= RSP; ++i) {
634             BuildDepsUseReg(insn, i);
635         }
636         for (uint32 i = V16; i <= V31; ++i) {
637             BuildDepsDefReg(insn, i);
638         }
639     }
640     /* For condition operand, such as NE, EQ, and so on. */
641     if (cgFunc.GetRflag() != nullptr) {
642         BuildDepsDefReg(insn, kRFLAG);
643     }
644 }
645 
646 /*
647  * Build dependence between control register and last call instruction.
648  * insn : instruction that with control register operand.
649  * isDest : if the control register operand is a destination operand.
650  */
BuildDepsBetweenControlRegAndCall(Insn & insn,bool isDest)651 void AArch64DepAnalysis::BuildDepsBetweenControlRegAndCall(Insn &insn, bool isDest)
652 {
653     if (lastCallInsn == nullptr) {
654         return;
655     }
656     if (isDest) {
657         AddDependence(*lastCallInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeOutput);
658         return;
659     }
660     AddDependence(*lastCallInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeAnti);
661 }
662 
663 /*
664  * Build dependence between stack-define-instruction that deal with call-insn's args and a call-instruction.
665  * insn : a call instruction (call/tail-call)
666  */
BuildStackPassArgsDeps(Insn & insn)667 void AArch64DepAnalysis::BuildStackPassArgsDeps(Insn &insn)
668 {
669     for (auto stackDefInsn : stackDefs) {
670         if (stackDefInsn->IsCall()) {
671             continue;
672         }
673         Operand *opnd = stackDefInsn->GetMemOpnd();
674         DEBUG_ASSERT(opnd->IsMemoryAccessOperand(), "make sure opnd is memOpnd");
675         MemOperand *memOpnd = static_cast<MemOperand *>(opnd);
676         RegOperand *baseReg = memOpnd->GetBaseRegister();
677         if ((baseReg != nullptr) && (baseReg->GetRegisterNumber() == RSP)) {
678             AddDependence(*stackDefInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeControl);
679         }
680     }
681 }
682 
683 /* Some insns may dirty all stack memory, such as "bl MCC_InitializeLocalStackRef". */
BuildDepsDirtyStack(Insn & insn)684 void AArch64DepAnalysis::BuildDepsDirtyStack(Insn &insn)
685 {
686     /* Build anti dependences. */
687     AddDependence4InsnInVectorByType(stackUses, insn, kDependenceTypeAnti);
688     /* Build output depnedence. */
689     AddDependence4InsnInVectorByType(stackDefs, insn, kDependenceTypeOutput);
690     stackDefs.emplace_back(&insn);
691 }
692 
693 /* Some call insns may use all stack memory, such as "bl MCC_CleanupLocalStackRef_NaiveRCFast". */
BuildDepsUseStack(Insn & insn)694 void AArch64DepAnalysis::BuildDepsUseStack(Insn &insn)
695 {
696     /* Build true dependences. */
697     AddDependence4InsnInVectorByType(stackDefs, insn, kDependenceTypeTrue);
698 }
699 
700 /* Some insns may dirty all heap memory, such as a call insn. */
BuildDepsDirtyHeap(Insn & insn)701 void AArch64DepAnalysis::BuildDepsDirtyHeap(Insn &insn)
702 {
703     /* Build anti dependences. */
704     AddDependence4InsnInVectorByType(heapUses, insn, kDependenceTypeAnti);
705     /* Build output depnedence. */
706     AddDependence4InsnInVectorByType(heapDefs, insn, kDependenceTypeOutput);
707     if (memBarInsn != nullptr) {
708         AddDependence(*memBarInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeMembar);
709     }
710     heapDefs.emplace_back(&insn);
711 }
712 
713 /* Build a pseudo node to seperate dependence graph. */
BuildSeparatorNode()714 DepNode *AArch64DepAnalysis::BuildSeparatorNode()
715 {
716     Insn &pseudoSepInsn = cgFunc.GetInsnBuilder()->BuildInsn<AArch64CG>(MOP_pseudo_dependence_seperator);
717     DepNode *separatorNode = memPool.New<DepNode>(pseudoSepInsn, alloc);
718     separatorNode->SetType(kNodeTypeSeparator);
719     pseudoSepInsn.SetDepNode(*separatorNode);
720     if (beforeRA) {
721         RegPressure *regPressure = memPool.New<RegPressure>(alloc);
722         separatorNode->SetRegPressure(*regPressure);
723         separatorNode->InitPressure();
724     }
725     return separatorNode;
726 }
727 
728 /* Init depAnalysis data struction */
Init(BB & bb,MapleVector<DepNode * > & nodes)729 void AArch64DepAnalysis::Init(BB &bb, MapleVector<DepNode *> &nodes)
730 {
731     curBB = &bb;
732     ClearAllDepData();
733     lastComments.clear();
734     /* Analysis live-in registers in catch BB. */
735     AnalysisAmbiInsns(bb);
736     /* Clear all dependence nodes and push the first separator node. */
737     nodes.clear();
738     DepNode *pseudoSepNode = BuildSeparatorNode();
739     nodes.emplace_back(pseudoSepNode);
740     separatorIndex = 0;
741 
742     if (beforeRA) {
743         /* assump first pseudo_dependence_seperator insn of current bb define live-in's registers */
744         Insn *pseudoSepInsn = pseudoSepNode->GetInsn();
745         for (auto &regNO : bb.GetLiveInRegNO()) {
746             regDefs[regNO] = pseudoSepInsn;
747             pseudoSepNode->AddDefReg(regNO);
748             pseudoSepNode->SetRegDefs(pseudoSepNode->GetDefRegnos().size(), nullptr);
749         }
750     }
751 }
752 
753 /* When a separator build, it is the same as a new basic block. */
ClearAllDepData()754 void AArch64DepAnalysis::ClearAllDepData()
755 {
756     uint32 maxRegNum;
757     if (beforeRA) {
758         maxRegNum = cgFunc.GetMaxVReg();
759     } else {
760         maxRegNum = kAllRegNum;
761     }
762     errno_t ret = memset_s(regDefs, sizeof(Insn *) * maxRegNum, 0, sizeof(Insn *) * maxRegNum);
763     CHECK_FATAL(ret == EOK, "call memset_s failed in Unit");
764     ret = memset_s(regUses, sizeof(RegList *) * maxRegNum, 0, sizeof(RegList *) * maxRegNum);
765     CHECK_FATAL(ret == EOK, "call memset_s failed in Unit");
766     memBarInsn = nullptr;
767     lastCallInsn = nullptr;
768     lastFrameDef = nullptr;
769 
770     stackUses.clear();
771     stackDefs.clear();
772     heapUses.clear();
773     heapDefs.clear();
774     mayThrows.clear();
775     ambiInsns.clear();
776 }
777 
778 /* Analysis live-in registers in catch bb and cleanup bb. */
AnalysisAmbiInsns(BB & bb)779 void AArch64DepAnalysis::AnalysisAmbiInsns(BB &bb)
780 {
781     hasAmbiRegs = false;
782     if (bb.GetEhSuccs().empty()) {
783         return;
784     }
785 
786     /* Union all catch bb */
787     for (auto succBB : bb.GetEhSuccs()) {
788         const MapleSet<regno_t> &liveInRegSet = succBB->GetLiveInRegNO();
789         set_union(liveInRegSet.begin(), liveInRegSet.end(), ehInRegs.begin(), ehInRegs.end(),
790                   inserter(ehInRegs, ehInRegs.begin()));
791     }
792 
793     /* Union cleanup entry bb. */
794     const MapleSet<regno_t> &regNOSet = cgFunc.GetCleanupEntryBB()->GetLiveInRegNO();
795     std::set_union(regNOSet.begin(), regNOSet.end(), ehInRegs.begin(), ehInRegs.end(),
796                    inserter(ehInRegs, ehInRegs.begin()));
797 
798     /* Subtract R0 and R1, that is defined by eh runtime. */
799     ehInRegs.erase(R0);
800     ehInRegs.erase(R1);
801     if (ehInRegs.empty()) {
802         return;
803     }
804     hasAmbiRegs = true;
805 }
806 
807 /* Check if regNO is in ehInRegs. */
IfInAmbiRegs(regno_t regNO) const808 bool AArch64DepAnalysis::IfInAmbiRegs(regno_t regNO) const
809 {
810     if (!hasAmbiRegs) {
811         return false;
812     }
813     if (ehInRegs.find(regNO) != ehInRegs.end()) {
814         return true;
815     }
816     return false;
817 }
818 
IsYieldPoint(Insn & insn)819 static bool IsYieldPoint(Insn &insn)
820 {
821     /*
822      * It is a yieldpoint if loading from a dedicated
823      * register holding polling page address:
824      * ldr  wzr, [RYP]
825      */
826     if (insn.IsLoad() && !insn.IsLoadLabel()) {
827         auto mem = static_cast<MemOperand *>(insn.GetMemOpnd());
828         return (mem != nullptr && mem->GetBaseRegister() != nullptr &&
829                 mem->GetBaseRegister()->GetRegisterNumber() == RYP);
830     }
831     return false;
832 }
833 
834 /*
835  * Build dependences of memory operand.
836  * insn : a instruction with the memory access operand.
837  * opnd : the memory access operand.
838  * regProp : operand property of the memory access operandess operand.
839  */
BuildMemOpndDependency(Insn & insn,Operand & opnd,const OpndDesc & regProp)840 void AArch64DepAnalysis::BuildMemOpndDependency(Insn &insn, Operand &opnd, const OpndDesc &regProp)
841 {
842     DEBUG_ASSERT(opnd.IsMemoryAccessOperand(), "opnd must be memory Operand");
843     MemOperand *memOpnd = static_cast<MemOperand *>(&opnd);
844     RegOperand *baseRegister = memOpnd->GetBaseRegister();
845     if (baseRegister != nullptr) {
846         regno_t regNO = baseRegister->GetRegisterNumber();
847         BuildDepsUseReg(insn, regNO);
848         if ((memOpnd->GetAddrMode() == MemOperand::kAddrModeBOi) &&
849             (memOpnd->IsPostIndexed() || memOpnd->IsPreIndexed())) {
850             /* Base operand has changed. */
851             BuildDepsDefReg(insn, regNO);
852         }
853     }
854     RegOperand *indexRegister = memOpnd->GetIndexRegister();
855     if (indexRegister != nullptr) {
856         regno_t regNO = indexRegister->GetRegisterNumber();
857         BuildDepsUseReg(insn, regNO);
858     }
859     if (regProp.IsUse()) {
860         BuildDepsUseMem(insn, *memOpnd);
861     } else {
862         BuildDepsDefMem(insn, *memOpnd);
863         BuildDepsAmbiInsn(insn);
864     }
865     if (IsYieldPoint(insn)) {
866         BuildDepsMemBar(insn);
867         BuildDepsDefReg(insn, kRFLAG);
868     }
869 }
870 
871 /* Build Dependency for each Operand of insn */
BuildOpndDependency(Insn & insn)872 void AArch64DepAnalysis::BuildOpndDependency(Insn &insn)
873 {
874     const InsnDesc *md = insn.GetDesc();
875     MOperator mOp = insn.GetMachineOpcode();
876     uint32 opndNum = insn.GetOperandSize();
877     for (uint32 i = 0; i < opndNum; ++i) {
878         Operand &opnd = insn.GetOperand(i);
879         const OpndDesc *regProp = md->opndMD[i];
880         if (opnd.IsMemoryAccessOperand()) {
881             BuildMemOpndDependency(insn, opnd, *regProp);
882         } else if (opnd.IsStImmediate()) {
883             if (mOp != MOP_xadrpl12) {
884                 BuildDepsAccessStImmMem(insn, false);
885             }
886         } else if (opnd.IsRegister()) {
887             RegOperand &regOpnd = static_cast<RegOperand &>(opnd);
888             regno_t regNO = regOpnd.GetRegisterNumber();
889 
890             if (regProp->IsUse()) {
891                 BuildDepsUseReg(insn, regNO);
892             }
893 
894             if (regProp->IsDef()) {
895                 BuildDepsDefReg(insn, regNO);
896             }
897         } else if (opnd.IsConditionCode()) {
898             /* For condition operand, such as NE, EQ, and so on. */
899             if (regProp->IsUse()) {
900                 BuildDepsUseReg(insn, kRFLAG);
901                 BuildDepsBetweenControlRegAndCall(insn, false);
902             }
903 
904             if (regProp->IsDef()) {
905                 BuildDepsDefReg(insn, kRFLAG);
906                 BuildDepsBetweenControlRegAndCall(insn, true);
907             }
908         } else if (opnd.IsList()) {
909             ListOperand &listOpnd = static_cast<ListOperand &>(opnd);
910             /* Build true dependences */
911             for (auto lst : listOpnd.GetOperands()) {
912                 regno_t regNO = lst->GetRegisterNumber();
913                 BuildDepsUseReg(insn, regNO);
914             }
915         }
916     }
917 }
918 
IsLazyLoad(MOperator op)919 static bool IsLazyLoad(MOperator op)
920 {
921     return (op == MOP_lazy_ldr) || (op == MOP_lazy_ldr_static) || (op == MOP_lazy_tail);
922 }
923 
924 /*
925  * Build dependences in some special issue (stack/heap/throw/clinit/lazy binding/control flow).
926  * insn :  a instruction.
927  * depNode : insn's depNode.
928  * nodes : the dependence nodes inclue insn's depNode.
929  */
BuildSpecialInsnDependency(Insn & insn,DepNode & depNode,const MapleVector<DepNode * > & nodes)930 void AArch64DepAnalysis::BuildSpecialInsnDependency(Insn &insn, DepNode &depNode, const MapleVector<DepNode *> &nodes)
931 {
932     const InsnDesc *md = insn.GetDesc();
933     MOperator mOp = insn.GetMachineOpcode();
934     if (insn.IsCall() || insn.IsTailCall()) {
935         /* Caller saved registers. */
936         BuildCallerSavedDeps(insn);
937         BuildStackPassArgsDeps(insn);
938 
939         if (mOp == MOP_xbl) {
940             FuncNameOperand &target = static_cast<FuncNameOperand &>(insn.GetOperand(0));
941             if ((target.GetName() == "MCC_InitializeLocalStackRef") || (target.GetName() == "MCC_ClearLocalStackRef") ||
942                 (target.GetName() == "MCC_DecRefResetPair")) {
943                 /* Write stack memory. */
944                 BuildDepsDirtyStack(insn);
945             } else if ((target.GetName() == "MCC_CleanupLocalStackRef_NaiveRCFast") ||
946                        (target.GetName() == "MCC_CleanupLocalStackRefSkip_NaiveRCFast") ||
947                        (target.GetName() == "MCC_CleanupLocalStackRefSkip")) {
948                 /* UseStackMemory. */
949                 BuildDepsUseStack(insn);
950             } else if (cgFunc.GetMirModule().GetSrcLang() == kSrcLangC) {
951                 /* potential C aliasing. */
952                 BuildDepsDirtyStack(insn);
953             }
954         }
955         BuildDepsDirtyHeap(insn);
956         BuildDepsAmbiInsn(insn);
957         if (lastCallInsn != nullptr) {
958             AddDependence(*lastCallInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeControl);
959         }
960         lastCallInsn = &insn;
961     } else if (insn.IsClinit() || IsLazyLoad(insn.GetMachineOpcode()) ||
962                insn.GetMachineOpcode() == MOP_arrayclass_cache_ldr) {
963         BuildDepsDirtyHeap(insn);
964         BuildDepsDefReg(insn, kRFLAG);
965         if (insn.GetMachineOpcode() != MOP_adrp_ldr) {
966             BuildDepsDefReg(insn, R16);
967             BuildDepsDefReg(insn, R17);
968         }
969     } else if ((mOp == MOP_xret) || md->IsBranch()) {
970         BuildDepsControlAll(depNode, nodes);
971     } else if (insn.IsMemAccessBar()) {
972         BuildDepsMemBar(insn);
973     } else if (insn.IsSpecialIntrinsic()) {
974         BuildDepsDirtyHeap(insn);
975     }
976 }
977 
978 /*
979  * If the instruction's number of current basic block more than kMaxDependenceNum,
980  * then insert some pseudo separator node to split baic block.
981  */
SeperateDependenceGraph(MapleVector<DepNode * > & nodes,uint32 & nodeSum)982 void AArch64DepAnalysis::SeperateDependenceGraph(MapleVector<DepNode *> &nodes, uint32 &nodeSum)
983 {
984     if ((nodeSum > 0) && ((nodeSum % kMaxDependenceNum) == 0)) {
985         DEBUG_ASSERT(nodeSum == nodes.size(), "CG internal error, nodeSum should equal to nodes.size.");
986         /* Add a pseudo node to seperate dependence graph. */
987         DepNode *separatorNode = BuildSeparatorNode();
988         separatorNode->SetIndex(nodeSum);
989         nodes.emplace_back(separatorNode);
990         BuildDepsSeparator(*separatorNode, nodes);
991 
992         if (beforeRA) {
993             /* for all live-out register of current bb */
994             for (auto &regNO : curBB->GetLiveOutRegNO()) {
995                 if (regDefs[regNO] != nullptr) {
996                     AppendRegUseList(*(separatorNode->GetInsn()), regNO);
997                     separatorNode->AddUseReg(regNO);
998                     separatorNode->SetRegUses(*regUses[regNO]);
999                 }
1000             }
1001         }
1002         ClearAllDepData();
1003         separatorIndex = nodeSum++;
1004     }
1005 }
1006 
1007 /*
1008  * Generate a depNode,
1009  * insn  : create depNode for the instruction.
1010  * nodes : a vector to store depNode.
1011  * nodeSum  : the new depNode's index.
1012  * comments : those comment insn between last no-comment's insn and insn.
1013  */
GenerateDepNode(Insn & insn,MapleVector<DepNode * > & nodes,int32 nodeSum,const MapleVector<Insn * > & comments)1014 DepNode *AArch64DepAnalysis::GenerateDepNode(Insn &insn, MapleVector<DepNode *> &nodes, int32 nodeSum,
1015                                              const MapleVector<Insn *> &comments)
1016 {
1017     DepNode *depNode = nullptr;
1018     Reservation *rev = mad.FindReservation(insn);
1019     DEBUG_ASSERT(rev != nullptr, "rev is nullptr");
1020     depNode = memPool.New<DepNode>(insn, alloc, rev->GetUnit(), rev->GetUnitNum(), *rev);
1021     if (beforeRA) {
1022         RegPressure *regPressure = memPool.New<RegPressure>(alloc);
1023         depNode->SetRegPressure(*regPressure);
1024         depNode->InitPressure();
1025     }
1026     depNode->SetIndex(nodeSum);
1027     nodes.emplace_back(depNode);
1028     insn.SetDepNode(*depNode);
1029 
1030     constexpr size_t vectorSize = 5;
1031     depNode->ReservePreds(vectorSize);
1032     depNode->ReserveSuccs(vectorSize);
1033 
1034     if (!comments.empty()) {
1035         depNode->SetComments(comments);
1036     }
1037     return depNode;
1038 }
1039 
BuildAmbiInsnDependency(Insn & insn)1040 void AArch64DepAnalysis::BuildAmbiInsnDependency(Insn &insn)
1041 {
1042     const auto &defRegnos = insn.GetDepNode()->GetDefRegnos();
1043     for (const auto &regNO : defRegnos) {
1044         if (IfInAmbiRegs(regNO)) {
1045             BuildDepsAmbiInsn(insn);
1046             break;
1047         }
1048     }
1049 }
1050 
BuildMayThrowInsnDependency(Insn & insn)1051 void AArch64DepAnalysis::BuildMayThrowInsnDependency(Insn &insn)
1052 {
1053     /* build dependency for maythrow insn; */
1054     if (insn.MayThrow()) {
1055         BuildDepsMayThrowInsn(insn);
1056         if (lastFrameDef != nullptr) {
1057             AddDependence(*lastFrameDef->GetDepNode(), *insn.GetDepNode(), kDependenceTypeThrow);
1058         }
1059     }
1060 }
1061 
UpdateRegUseAndDef(Insn & insn,const DepNode & depNode,MapleVector<DepNode * > & nodes)1062 void AArch64DepAnalysis::UpdateRegUseAndDef(Insn &insn, const DepNode &depNode, MapleVector<DepNode *> &nodes)
1063 {
1064     const auto &useRegnos = depNode.GetUseRegnos();
1065     if (beforeRA) {
1066         depNode.InitRegUsesSize(useRegnos.size());
1067     }
1068     for (auto regNO : useRegnos) {
1069         AppendRegUseList(insn, regNO);
1070         if (beforeRA) {
1071             depNode.SetRegUses(*regUses[regNO]);
1072             if (regDefs[regNO] == nullptr) {
1073                 regDefs[regNO] = nodes[separatorIndex]->GetInsn();
1074                 nodes[separatorIndex]->AddDefReg(regNO);
1075                 nodes[separatorIndex]->SetRegDefs(nodes[separatorIndex]->GetDefRegnos().size(), regUses[regNO]);
1076             }
1077         }
1078     }
1079 
1080     const auto &defRegnos = depNode.GetDefRegnos();
1081     size_t i = 0;
1082     if (beforeRA) {
1083         depNode.InitRegDefsSize(defRegnos.size());
1084     }
1085     for (const auto regNO : defRegnos) {
1086         regDefs[regNO] = &insn;
1087         regUses[regNO] = nullptr;
1088         if (beforeRA) {
1089             depNode.SetRegDefs(i, nullptr);
1090             if (regNO >= R0 && regNO <= R3) {
1091                 depNode.SetHasPreg(true);
1092             } else if (regNO == R8) {
1093                 depNode.SetHasNativeCallRegister(true);
1094             }
1095         }
1096         ++i;
1097     }
1098 }
1099 
1100 /* Update stack and heap dependency */
UpdateStackAndHeapDependency(DepNode & depNode,Insn & insn,const Insn & locInsn)1101 void AArch64DepAnalysis::UpdateStackAndHeapDependency(DepNode &depNode, Insn &insn, const Insn &locInsn)
1102 {
1103     if (!insn.MayThrow()) {
1104         return;
1105     }
1106     depNode.SetLocInsn(locInsn);
1107     mayThrows.emplace_back(&insn);
1108     AddDependence4InsnInVectorByType(stackDefs, insn, kDependenceTypeThrow);
1109     AddDependence4InsnInVectorByType(heapDefs, insn, kDependenceTypeThrow);
1110 }
1111 
1112 /* Add a separatorNode to the end of a nodes
1113  *  * before RA:  add all live-out registers to this separatorNode'Uses
1114  *   */
AddEndSeparatorNode(MapleVector<DepNode * > & nodes)1115 void AArch64DepAnalysis::AddEndSeparatorNode(MapleVector<DepNode *> &nodes)
1116 {
1117     DepNode *separatorNode = BuildSeparatorNode();
1118     nodes.emplace_back(separatorNode);
1119     BuildDepsSeparator(*separatorNode, nodes);
1120 
1121     if (beforeRA) {
1122         /* for all live-out register of current bb */
1123         for (auto &regNO : curBB->GetLiveOutRegNO()) {
1124             if (regDefs[regNO] != nullptr) {
1125                 AppendRegUseList(*(separatorNode->GetInsn()), regNO);
1126                 separatorNode->AddUseReg(regNO);
1127                 separatorNode->SetRegUses(*regUses[regNO]);
1128             }
1129         }
1130     }
1131 }
1132 
1133 /*
1134  * Build dependence graph.
1135  * 1: Build dependence nodes.
1136  * 2: Build edges between dependence nodes. Edges are:
1137  *   2.1) True dependences
1138  *   2.2) Anti dependences
1139  *   2.3) Output dependences
1140  *   2.4) Barrier dependences
1141  */
Run(BB & bb,MapleVector<DepNode * > & nodes)1142 void AArch64DepAnalysis::Run(BB &bb, MapleVector<DepNode *> &nodes)
1143 {
1144     /* Initial internal datas. */
1145     Init(bb, nodes);
1146     uint32 nodeSum = 1;
1147     MapleVector<Insn *> comments(alloc.Adapter());
1148     const Insn *locInsn = bb.GetFirstLoc();
1149     FOR_BB_INSNS(insn, (&bb)) {
1150         if (!insn->IsMachineInstruction()) {
1151             if (insn->IsImmaterialInsn()) {
1152                 if (!insn->IsComment()) {
1153                     locInsn = insn;
1154                 } else {
1155                     comments.emplace_back(insn);
1156                 }
1157             } else if (insn->IsCfiInsn()) {
1158                 if (!nodes.empty()) {
1159                     nodes.back()->AddCfiInsn(*insn);
1160                 }
1161             }
1162             continue;
1163         }
1164         /* Add a pseudo node to seperate dependence graph when appropriate */
1165         SeperateDependenceGraph(nodes, nodeSum);
1166         /* generate a DepNode */
1167         DepNode *depNode = GenerateDepNode(*insn, nodes, nodeSum, comments);
1168         ++nodeSum;
1169         comments.clear();
1170         /* Build Dependency for maythrow insn; */
1171         BuildMayThrowInsnDependency(*insn);
1172         /* Build Dependency for each Operand of insn */
1173         BuildOpndDependency(*insn);
1174         /* Build Dependency for special insn */
1175         BuildSpecialInsnDependency(*insn, *depNode, nodes);
1176         /* Build Dependency for AmbiInsn if needed */
1177         BuildAmbiInsnDependency(*insn);
1178         /* Update stack and heap dependency */
1179         UpdateStackAndHeapDependency(*depNode, *insn, *locInsn);
1180         if (insn->IsFrameDef()) {
1181             lastFrameDef = insn;
1182         }
1183         /* Seperator exists. */
1184         AddDependence(*nodes[separatorIndex], *insn->GetDepNode(), kDependenceTypeSeparator);
1185         /* Update register use and register def */
1186         UpdateRegUseAndDef(*insn, *depNode, nodes);
1187     }
1188 
1189     AddEndSeparatorNode(nodes);
1190 
1191     if (!comments.empty()) {
1192         lastComments = comments;
1193     }
1194     comments.clear();
1195 }
1196 
1197 /* return dependence type name */
GetDepTypeName(DepType depType) const1198 const std::string &AArch64DepAnalysis::GetDepTypeName(DepType depType) const
1199 {
1200     DEBUG_ASSERT(depType <= kDependenceTypeNone, "array boundary check failed");
1201     return kDepTypeName[depType];
1202 }
1203 } /* namespace maplebe */
1204