1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 #include "aarch64_cg.h"
16 #ifndef ONLY_C
17 #include "pressure.h"
18 #endif
19 #include "cg_irbuilder.h"
20 #include "aarch64_mem_reference.h"
21 #include "aarch64_data_dep_base.h"
22
23 namespace maplebe {
24 /** It is a yield point if loading from a dedicated
25 * register holding polling page address:
26 * ldr wzr, [RYP]
27 */
28
IsLazyLoad(MOperator op)29 static bool IsLazyLoad(MOperator op)
30 {
31 return (op == MOP_lazy_ldr) || (op == MOP_lazy_ldr_static) || (op == MOP_lazy_tail);
32 }
33
IsFrameReg(const RegOperand & opnd) const34 bool AArch64DataDepBase::IsFrameReg(const RegOperand &opnd) const
35 {
36 return (opnd.GetRegisterNumber() == RFP) || (opnd.GetRegisterNumber() == RSP);
37 }
38
InitCDGNodeDataInfo(MemPool & mp,MapleAllocator & alloc,CDGNode & cdgNode)39 void AArch64DataDepBase::InitCDGNodeDataInfo(MemPool &mp, MapleAllocator &alloc, CDGNode &cdgNode)
40 {
41 uint32 maxRegNum = (cgFunc.IsAfterRegAlloc() ? AArch64reg::kAllRegNum : cgFunc.GetMaxVReg());
42 cdgNode.InitDataDepInfo(mp, alloc, maxRegNum);
43 }
44
GetMemBaseDefInsn(const Insn & memInsn) const45 Insn *AArch64DataDepBase::GetMemBaseDefInsn(const Insn &memInsn) const
46 {
47 auto *memOpnd = static_cast<MemOperand *>(memInsn.GetMemOpnd());
48 if (memOpnd == nullptr) {
49 return nullptr;
50 }
51 RegOperand *baseOpnd = memOpnd->GetBaseRegister();
52 if (baseOpnd == nullptr) {
53 return nullptr;
54 }
55 return curCDGNode->GetLatestDefInsn(baseOpnd->GetRegisterNumber());
56 }
57
BuildDepsForMemDefCommon(Insn & insn,CDGNode & cdgNode)58 void AArch64DataDepBase::BuildDepsForMemDefCommon(Insn &insn, CDGNode &cdgNode)
59 {
60 // Stack memory
61 // Build anti dependency
62 MapleVector<Insn *> &stackUses = cdgNode.GetStackUseInsns();
63 for (auto *stackUse : stackUses) {
64 // the insn may be stack memory or heap use memory
65 if (AArch64MemReference::NeedBuildMemoryDependency(*stackUse, insn, GetMemBaseDefInsn(*stackUse),
66 GetMemBaseDefInsn(insn), kDependenceTypeAnti)) {
67 AddDependence(*stackUse->GetDepNode(), *insn.GetDepNode(), kDependenceTypeAnti);
68 }
69 }
70 // Build output dependency
71 MapleVector<Insn *> &stackDefs = cdgNode.GetStackDefInsns();
72 for (auto *stackDef : stackDefs) {
73 // the insn may be stack memory or heap use memory
74 if (AArch64MemReference::NeedBuildMemoryDependency(*stackDef, insn, GetMemBaseDefInsn(*stackDef),
75 GetMemBaseDefInsn(insn), kDependenceTypeOutput)) {
76 AddDependence(*stackDef->GetDepNode(), *insn.GetDepNode(), kDependenceTypeOutput);
77 }
78 }
79 // Heap memory
80 // Build anti dependency
81 MapleVector<Insn *> &heapUses = cdgNode.GetHeapUseInsns();
82 for (auto *heapUse : heapUses) {
83 if (AArch64MemReference::NeedBuildMemoryDependency(*heapUse, insn, GetMemBaseDefInsn(*heapUse),
84 GetMemBaseDefInsn(insn), kDependenceTypeAnti)) {
85 AddDependence(*heapUse->GetDepNode(), *insn.GetDepNode(), kDependenceTypeAnti);
86 }
87 }
88 // Build output dependency
89 MapleVector<Insn *> &heapDefs = cdgNode.GetHeapDefInsns();
90 for (auto *heapDef : heapDefs) {
91 if (AArch64MemReference::NeedBuildMemoryDependency(*heapDef, insn, GetMemBaseDefInsn(*heapDef),
92 GetMemBaseDefInsn(insn), kDependenceTypeOutput)) {
93 AddDependence(*heapDef->GetDepNode(), *insn.GetDepNode(), kDependenceTypeOutput);
94 }
95 }
96 }
97
BuildDepsForMemUseCommon(Insn & insn,CDGNode & cdgNode)98 void AArch64DataDepBase::BuildDepsForMemUseCommon(Insn &insn, CDGNode &cdgNode)
99 {
100 // Build dependency for stack memory access
101 MapleVector<Insn *> &stackDefs = cdgNode.GetStackDefInsns();
102 for (auto *stackDef : stackDefs) {
103 // The insn may be stack memory or heap memory
104 if ((stackDef->IsCall() && stackDef->GetMachineOpcode() != MOP_tls_desc_call) ||
105 AArch64MemReference::NeedBuildMemoryDependency(*stackDef, insn, GetMemBaseDefInsn(*stackDef),
106 GetMemBaseDefInsn(insn), kDependenceTypeTrue)) {
107 AddDependence(*stackDef->GetDepNode(), *insn.GetDepNode(), kDependenceTypeTrue);
108 }
109 }
110 // Build dependency for heap memory access
111 MapleVector<Insn *> &heapDefs = cdgNode.GetHeapDefInsns();
112 for (auto *heapDef : heapDefs) {
113 if (AArch64MemReference::NeedBuildMemoryDependency(*heapDef, insn, GetMemBaseDefInsn(*heapDef),
114 GetMemBaseDefInsn(insn), kDependenceTypeTrue)) {
115 AddDependence(*heapDef->GetDepNode(), *insn.GetDepNode(), kDependenceTypeTrue);
116 }
117 }
118 }
119
120 // Build data dependence of symbol memory access.
121 // Memory accesses with symbol must be a heap memory access.
BuildDepsAccessStImmMem(Insn & insn)122 void AArch64DataDepBase::BuildDepsAccessStImmMem(Insn &insn)
123 {
124 for (auto *heapDef : curCDGNode->GetHeapDefInsns()) {
125 if (AArch64MemReference::NeedBuildMemoryDependency(*heapDef, insn, GetMemBaseDefInsn(*heapDef),
126 GetMemBaseDefInsn(insn), kDependenceTypeTrue)) {
127 AddDependence(*heapDef->GetDepNode(), *insn.GetDepNode(), kDependenceTypeMemAccess);
128 }
129 }
130
131 curCDGNode->AddHeapUseInsn(&insn);
132
133 // Build dependency for membar insn
134 Insn *membarInsn = curCDGNode->GetMembarInsn();
135 if (membarInsn != nullptr) {
136 AddDependence(*membarInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeMembar);
137 }
138 }
139
140 // Build data dependence of memory bars instructions
BuildDepsMemBar(Insn & insn)141 void AArch64DataDepBase::BuildDepsMemBar(Insn &insn)
142 {
143 if (isIntra || curRegion->GetRegionNodeSize() == 1 || curRegion->GetRegionRoot() == curCDGNode) {
144 AddDependence4InsnInVectorByTypeAndCmp(curCDGNode->GetStackUseInsns(), insn, kDependenceTypeMembar);
145 AddDependence4InsnInVectorByTypeAndCmp(curCDGNode->GetHeapUseInsns(), insn, kDependenceTypeMembar);
146 AddDependence4InsnInVectorByTypeAndCmp(curCDGNode->GetStackDefInsns(), insn, kDependenceTypeMembar);
147 AddDependence4InsnInVectorByTypeAndCmp(curCDGNode->GetHeapDefInsns(), insn, kDependenceTypeMembar);
148 } else if (curRegion->GetRegionRoot() != curCDGNode) {
149 BuildInterBlockSpecialDataInfoDependency(*insn.GetDepNode(), true, kDependenceTypeMembar, kStackUses);
150 BuildInterBlockSpecialDataInfoDependency(*insn.GetDepNode(), true, kDependenceTypeMembar, kHeapUses);
151 BuildInterBlockSpecialDataInfoDependency(*insn.GetDepNode(), true, kDependenceTypeMembar, kStackDefs);
152 BuildInterBlockSpecialDataInfoDependency(*insn.GetDepNode(), true, kDependenceTypeMembar, kHeapDefs);
153 }
154 curCDGNode->SetMembarInsn(&insn);
155 }
156
157 // Build data dependence of stack memory and heap memory read:
158 // for memOpnd, do not build the true dependency, and identify it by a special mem dependency.
BuildDepsUseMem(Insn & insn,MemOperand & memOpnd)159 void AArch64DataDepBase::BuildDepsUseMem(Insn &insn, MemOperand &memOpnd)
160 {
161 memOpnd.SetAccessSize(insn.GetMemoryByteSize());
162
163 if (isIntra || curRegion->GetRegionNodeSize() == 1 || curRegion->GetRegionRoot() == curCDGNode) {
164 BuildDepsForMemUseCommon(insn, *curCDGNode);
165 } else if (curRegion->GetRegionRoot() != curCDGNode) {
166 BuildInterBlockMemDefUseDependency(*insn.GetDepNode(), false);
167 }
168
169 // Record mem insn
170 RegOperand *baseRegister = memOpnd.GetBaseRegister();
171 if ((baseRegister != nullptr && IsFrameReg(*baseRegister)) || memOpnd.IsStackMem()) {
172 curCDGNode->AddStackUseInsn(&insn);
173 } else {
174 curCDGNode->AddHeapUseInsn(&insn);
175 }
176
177 // Build dependency for membar insn
178 Insn *membarInsn = curCDGNode->GetMembarInsn();
179 if (membarInsn != nullptr) {
180 AddDependence(*membarInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeMembar);
181 } else if (!isIntra && curRegion->GetRegionRoot() != curCDGNode) {
182 BuildInterBlockSpecialDataInfoDependency(*insn.GetDepNode(), false, kDependenceTypeMembar, kMembar);
183 }
184 }
185
186 // Build data dependency of stack memory and heap memory definitions
187 // We do not need build output dependence for write-write, because of transitivity:
188 // e.g.
189 // write1 [mem1] ---
190 // | access |
191 // read [mem1] X (transitivity)
192 // | anti |
193 // write2 [mem1] ---
BuildDepsDefMem(Insn & insn,MemOperand & memOpnd)194 void AArch64DataDepBase::BuildDepsDefMem(Insn &insn, MemOperand &memOpnd)
195 {
196 RegOperand *baseRegister = memOpnd.GetBaseRegister();
197 ASSERT_NOT_NULL(baseRegister);
198 memOpnd.SetAccessSize(insn.GetMemoryByteSize());
199
200 if (isIntra || curRegion->GetRegionNodeSize() == 1 || curRegion->GetRegionRoot() == curCDGNode) {
201 BuildDepsForMemDefCommon(insn, *curCDGNode);
202 } else if (curRegion->GetRegionRoot() != curCDGNode) {
203 BuildInterBlockMemDefUseDependency(*insn.GetDepNode(), true);
204 }
205
206 if (baseRegister->GetRegisterNumber() == RSP) {
207 Insn *lastCallInsn = curCDGNode->GetLastCallInsn();
208 if (lastCallInsn != nullptr && lastCallInsn->GetMachineOpcode() != MOP_tls_desc_call) {
209 // Build a dependence between stack passed arguments and call
210 AddDependence(*lastCallInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeControl);
211 } else if (!isIntra && curRegion->GetRegionRoot() != curCDGNode) {
212 BuildInterBlockSpecialDataInfoDependency(*insn.GetDepNode(), false, kDependenceTypeControl, kLastCall);
213 }
214 }
215
216 // Build membar dependence
217 Insn *membarInsn = curCDGNode->GetMembarInsn();
218 if (membarInsn != nullptr) {
219 AddDependence(*membarInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeMembar);
220 } else if (!isIntra && curRegion->GetRegionRoot() != curCDGNode) {
221 BuildInterBlockSpecialDataInfoDependency(*insn.GetDepNode(), false, kDependenceTypeMembar, kMembar);
222 }
223
224 // Update cur cdgNode info of def-memory insn
225 if (IsFrameReg(*baseRegister) || memOpnd.IsStackMem()) {
226 curCDGNode->AddStackDefInsn(&insn);
227 } else {
228 curCDGNode->AddHeapDefInsn(&insn);
229 }
230 }
231
232 // Build dependence of call instructions.
233 // caller-saved physical registers will be defined by a call instruction.
234 // also a conditional register may be modified by a call.
BuildCallerSavedDeps(Insn & insn)235 void AArch64DataDepBase::BuildCallerSavedDeps(Insn &insn)
236 {
237 // Build anti dependence and output dependence
238 for (uint32 i = R0; i <= R9; ++i) {
239 BuildDepsDefReg(insn, i);
240 }
241 for (uint32 i = V0; i <= V7; ++i) {
242 BuildDepsDefReg(insn, i);
243 }
244 if (!beforeRA) {
245 for (uint32 i = R9; i <= R18; ++i) {
246 BuildDepsDefReg(insn, i);
247 }
248 for (uint32 i = RLR; i <= RSP; ++i) {
249 BuildDepsUseReg(insn, i);
250 }
251 for (uint32 i = V16; i <= V31; ++i) {
252 BuildDepsDefReg(insn, i);
253 }
254 }
255 /* For condition operand, such as NE, EQ, and so on. */
256 if (cgFunc.GetRflag() != nullptr) {
257 BuildDepsDefReg(insn, kRFLAG);
258 }
259 }
260
261 // Some insns may dirty all stack memory, such as "bl MCC_InitializeLocalStackRef"
BuildDepsDirtyStack(Insn & insn)262 void AArch64DataDepBase::BuildDepsDirtyStack(Insn &insn)
263 {
264 /* Build anti dependence */
265 MapleVector<Insn *> &stackUses = curCDGNode->GetStackUseInsns();
266 AddDependence4InsnInVectorByType(stackUses, insn, kDependenceTypeAnti);
267 /* Build output dependence */
268 MapleVector<Insn *> &stackDefs = curCDGNode->GetStackDefInsns();
269 AddDependence4InsnInVectorByType(stackDefs, insn, kDependenceTypeOutput);
270 curCDGNode->AddStackDefInsn(&insn);
271 }
272
273 // Some call insns may use all stack memory, such as "bl MCC_CleanupLocalStackRef_NaiveRCFast"
BuildDepsUseStack(Insn & insn)274 void AArch64DataDepBase::BuildDepsUseStack(Insn &insn)
275 {
276 /* Build true dependence */
277 MapleVector<Insn *> &stackDefs = curCDGNode->GetStackDefInsns();
278 AddDependence4InsnInVectorByType(stackDefs, insn, kDependenceTypeTrue);
279 }
280
281 // Some insns may dirty all heap memory, such as a call insn
BuildDepsDirtyHeap(Insn & insn)282 void AArch64DataDepBase::BuildDepsDirtyHeap(Insn &insn)
283 {
284 // Build anti dependence
285 MapleVector<Insn *> &heapUses = curCDGNode->GetHeapUseInsns();
286 AddDependence4InsnInVectorByType(heapUses, insn, kDependenceTypeAnti);
287 // Build output dependence
288 MapleVector<Insn *> &heapDefs = curCDGNode->GetHeapDefInsns();
289 AddDependence4InsnInVectorByType(heapDefs, insn, kDependenceTypeOutput);
290
291 Insn *membarInsn = curCDGNode->GetMembarInsn();
292 if (membarInsn != nullptr) {
293 AddDependence(*membarInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeMembar);
294 }
295 curCDGNode->AddHeapDefInsn(&insn);
296 }
297
298 // Build data dependence of memory operand.
299 // insn : an instruction with the memory access operand.
300 // opnd : the memory access operand.
301 // regProp : operand property of the memory access operand.
BuildMemOpndDependency(Insn & insn,Operand & opnd,const OpndDesc & regProp)302 void AArch64DataDepBase::BuildMemOpndDependency(Insn &insn, Operand &opnd, const OpndDesc ®Prop)
303 {
304 DEBUG_ASSERT(opnd.IsMemoryAccessOperand(), "opnd must be memory Operand");
305 auto *memOpnd = static_cast<MemOperand *>(&opnd);
306
307 // Build dependency for register of memOpnd
308 RegOperand *baseRegister = memOpnd->GetBaseRegister();
309 if (baseRegister != nullptr) {
310 regno_t regNO = baseRegister->GetRegisterNumber();
311 BuildDepsUseReg(insn, regNO);
312 if (memOpnd->IsPostIndexed() || memOpnd->IsPreIndexed()) {
313 // Base operand has redefined
314 BuildDepsDefReg(insn, regNO);
315 }
316 }
317 RegOperand *indexRegister = memOpnd->GetIndexRegister();
318 if (indexRegister != nullptr) {
319 regno_t regNO = indexRegister->GetRegisterNumber();
320 BuildDepsUseReg(insn, regNO);
321 }
322
323 // Build dependency for mem access
324 if (regProp.IsUse()) {
325 BuildDepsUseMem(insn, *memOpnd);
326 } else {
327 BuildDepsDefMem(insn, *memOpnd);
328 }
329 }
330
331 // Build Dependency for each operand of insn
BuildOpndDependency(Insn & insn)332 void AArch64DataDepBase::BuildOpndDependency(Insn &insn)
333 {
334 const InsnDesc *md = insn.GetDesc();
335 MOperator mOp = insn.GetMachineOpcode();
336 uint32 opndNum = insn.GetOperandSize();
337 for (uint32 i = 0; i < opndNum; ++i) {
338 Operand &opnd = insn.GetOperand(i);
339 const OpndDesc *regProp = md->opndMD[i];
340 if (opnd.IsMemoryAccessOperand()) {
341 BuildMemOpndDependency(insn, opnd, *regProp);
342 } else if (opnd.IsStImmediate() && mOp != MOP_xadrpl12) {
343 BuildDepsAccessStImmMem(insn);
344 } else if (opnd.IsRegister()) {
345 auto ®Opnd = static_cast<RegOperand &>(opnd);
346 regno_t regNO = regOpnd.GetRegisterNumber();
347 if (regProp->IsUse()) {
348 BuildDepsUseReg(insn, regNO);
349 }
350 if (regProp->IsDef()) {
351 BuildDepsDefReg(insn, regNO);
352 }
353 } else if (opnd.IsConditionCode()) {
354 // For condition operand, such as NE, EQ, and so on.
355 if (regProp->IsUse()) {
356 BuildDepsUseReg(insn, kRFLAG);
357 BuildDepsBetweenControlRegAndCall(insn, false);
358 }
359 if (regProp->IsDef()) {
360 BuildDepsDefReg(insn, kRFLAG);
361 BuildDepsBetweenControlRegAndCall(insn, true);
362 }
363 } else if (opnd.IsList()) {
364 auto &listOpnd = static_cast<const ListOperand &>(opnd);
365 for (auto &lst : listOpnd.GetOperands()) {
366 regno_t regNO = lst->GetRegisterNumber();
367 BuildDepsUseReg(insn, regNO);
368 }
369 }
370 }
371 }
372
373 // Build dependencies for call insns which do not obey standard call procedure
BuildSpecialCallDeps(Insn & insn)374 void AArch64DataDepBase::BuildSpecialCallDeps(Insn &insn)
375 {
376 if (insn.IsSpecialCall()) {
377 // The runtime model uses to implement this originates in the IA-64 processor-specific ABI
378 // It is not available everywhere !!!
379 // for tls_desc_call, which clobber r0, r1, cc reg according to call convention rules,
380 // and the blr will write the LR reg.
381 if (insn.GetMachineOpcode() == MOP_tls_desc_call) {
382 BuildDepsDefReg(insn, RLR);
383 BuildDepsDefReg(insn, kRFLAG);
384 }
385 }
386 }
387
388 // Build dependencies in some special cases (stack/heap/throw/clinit/lazy binding/control flow)
BuildSpecialInsnDependency(Insn & insn,const MapleVector<DepNode * > & nodes)389 void AArch64DataDepBase::BuildSpecialInsnDependency(Insn &insn, const MapleVector<DepNode *> &nodes)
390 {
391 const InsnDesc *md = insn.GetDesc();
392 MOperator mOp = insn.GetMachineOpcode();
393 if (insn.IsCall() || insn.IsTailCall()) {
394 // Build caller saved registers dependency
395 BuildCallerSavedDeps(insn);
396 BuildDepsDirtyStack(insn);
397 BuildDepsDirtyHeap(insn);
398 BuildDepsLastCallInsn(insn);
399 } else if (insn.IsClinit() || IsLazyLoad(insn.GetMachineOpcode()) ||
400 insn.GetMachineOpcode() == MOP_arrayclass_cache_ldr) {
401 BuildDepsDirtyHeap(insn);
402 BuildDepsDefReg(insn, kRFLAG);
403 if (insn.GetMachineOpcode() != MOP_adrp_ldr) {
404 BuildDepsDefReg(insn, R16);
405 BuildDepsDefReg(insn, R17);
406 }
407 } else if (mOp == MOP_xret || md->IsBranch()) {
408 BuildDepsControlAll(insn, nodes);
409 } else if (insn.IsMemAccessBar()) {
410 BuildDepsMemBar(insn);
411 } else if (insn.IsSpecialIntrinsic()) {
412 BuildDepsDirtyHeap(insn);
413 }
414 }
415
BuildAsmInsnDependency(Insn & insn)416 void AArch64DataDepBase::BuildAsmInsnDependency(Insn &insn)
417 {
418 Insn *asmInsn = curCDGNode->GetLastInlineAsmInsn();
419 if (asmInsn != nullptr) {
420 // Due to the possible undefined behavior of users, we conservatively restrict
421 // the instructions under the asm-insn to be moved above this instruction,
422 // by building dependency edges on asm-insn and all subsequent instructions.
423 // e.g.
424 // asm volatile ( "mov x2, %[a]\n\t"
425 // "sub x2, x2, %[a]\n\t"
426 // "orr x3, x2, %[a]\n\t"
427 // :
428 // : [a] "I" (1)
429 // : "x2"
430 // It only identifies that clobber x2.
431 AddDependence(*asmInsn->GetDepNode(), *insn.GetDepNode(), kDependenceTypeControl);
432 }
433
434 if (insn.IsAsmInsn()) {
435 DEBUG_ASSERT(insn.GetOperand(kInsnSecondOpnd).IsList(), "invalid opnd of asm insn");
436 DEBUG_ASSERT(insn.GetOperand(kInsnThirdOpnd).IsList(), "invalid opnd of asm insn");
437 DEBUG_ASSERT(insn.GetOperand(kInsnFourthOpnd).IsList(), "invalid opnd of asm insn");
438 auto &outputList = static_cast<ListOperand &>(insn.GetOperand(kInsnSecondOpnd));
439 auto &clobberList = static_cast<ListOperand &>(insn.GetOperand(kInsnThirdOpnd));
440 auto &inputList = static_cast<ListOperand &>(insn.GetOperand(kInsnFourthOpnd));
441 for (auto *defOpnd : outputList.GetOperands()) {
442 if (defOpnd == nullptr) {
443 continue;
444 }
445 BuildDepsDefReg(insn, defOpnd->GetRegisterNumber());
446 }
447 for (auto *defOpnd : clobberList.GetOperands()) {
448 if (defOpnd == nullptr) {
449 continue;
450 }
451 BuildDepsDefReg(insn, defOpnd->GetRegisterNumber());
452 }
453 for (auto *useOpnd : inputList.GetOperands()) {
454 if (useOpnd == nullptr) {
455 continue;
456 }
457 BuildDepsUseReg(insn, useOpnd->GetRegisterNumber());
458 }
459 curCDGNode->SetLastInlineAsmInsn(&insn);
460 }
461 }
462
BuildInterBlockMemDefUseDependency(DepNode & depNode,bool isMemDef)463 void AArch64DataDepBase::BuildInterBlockMemDefUseDependency(DepNode &depNode, bool isMemDef)
464 {
465 CHECK_FATAL(!isIntra, "must be inter block data dependence analysis");
466 CHECK_FATAL(curRegion->GetRegionRoot() != curCDGNode, "for the root node, cross-BB search is not required");
467 BB *curBB = curCDGNode->GetBB();
468 CHECK_FATAL(curBB != nullptr, "get bb from cdgNode failed");
469 std::vector<bool> visited(curRegion->GetMaxBBIdInRegion() + 1, false);
470 if (isMemDef) {
471 BuildPredPathMemDefDependencyDFS(*curBB, visited, depNode);
472 } else {
473 BuildPredPathMemUseDependencyDFS(*curBB, visited, depNode);
474 }
475 }
476
BuildPredPathMemDefDependencyDFS(BB & curBB,std::vector<bool> & visited,DepNode & depNode)477 void AArch64DataDepBase::BuildPredPathMemDefDependencyDFS(BB &curBB, std::vector<bool> &visited, DepNode &depNode)
478 {
479 if (visited[curBB.GetId()]) {
480 return;
481 }
482 CDGNode *cdgNode = curBB.GetCDGNode();
483 CHECK_FATAL(cdgNode != nullptr, "get cdgNode from bb failed");
484 CDGRegion *region = cdgNode->GetRegion();
485 CHECK_FATAL(region != nullptr, "get region from cdgNode failed");
486 if (region->GetRegionId() != curRegion->GetRegionId()) {
487 return;
488 }
489
490 visited[curBB.GetId()] = true;
491
492 BuildDepsForMemDefCommon(*depNode.GetInsn(), *cdgNode);
493
494 // Ignore back-edge
495 if (cdgNode == curRegion->GetRegionRoot()) {
496 return;
497 }
498
499 for (auto predIt = curBB.GetPredsBegin(); predIt != curBB.GetPredsEnd(); ++predIt) {
500 // Ignore back-edge of self-loop
501 if (*predIt != &curBB) {
502 BuildPredPathMemDefDependencyDFS(**predIt, visited, depNode);
503 }
504 }
505 }
506
BuildPredPathMemUseDependencyDFS(BB & curBB,std::vector<bool> & visited,DepNode & depNode)507 void AArch64DataDepBase::BuildPredPathMemUseDependencyDFS(BB &curBB, std::vector<bool> &visited, DepNode &depNode)
508 {
509 if (visited[curBB.GetId()]) {
510 return;
511 }
512 CDGNode *cdgNode = curBB.GetCDGNode();
513 CHECK_FATAL(cdgNode != nullptr, "get cdgNode from bb failed");
514 CDGRegion *region = cdgNode->GetRegion();
515 CHECK_FATAL(region != nullptr, "get region from cdgNode failed");
516 if (region->GetRegionId() != curRegion->GetRegionId()) {
517 return;
518 }
519 visited[curBB.GetId()] = true;
520
521 BuildDepsForMemUseCommon(*depNode.GetInsn(), *cdgNode);
522
523 // Ignore back-edge
524 if (cdgNode == curRegion->GetRegionRoot()) {
525 return;
526 }
527 for (auto predIt = curBB.GetPredsBegin(); predIt != curBB.GetPredsEnd(); ++predIt) {
528 // Ignore back-edge of self-loop
529 if (*predIt != &curBB) {
530 BuildPredPathMemUseDependencyDFS(**predIt, visited, depNode);
531 }
532 }
533 }
534
DumpNodeStyleInDot(std::ofstream & file,DepNode & depNode)535 void AArch64DataDepBase::DumpNodeStyleInDot(std::ofstream &file, DepNode &depNode)
536 {
537 MOperator mOp = depNode.GetInsn()->GetMachineOpcode();
538 const InsnDesc *md = &AArch64CG::kMd[mOp];
539 file << " insn_" << depNode.GetInsn() << "[";
540 file << "label = \"" << depNode.GetInsn()->GetId() << ":\n";
541 file << "{ " << md->name << "}\"];\n";
542 }
543 } // namespace maplebe
544