1 //===- subzero/src/IceOperand.cpp - High-level operand implementation -----===//
2 //
3 // The Subzero Code Generator
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 ///
10 /// \file
11 /// \brief Implements the Operand class and its target-independent subclasses,
12 /// primarily for the methods of the Variable class.
13 ///
14 //===----------------------------------------------------------------------===//
15
16 #include "IceOperand.h"
17
18 #include "IceCfg.h"
19 #include "IceCfgNode.h"
20 #include "IceInst.h"
21 #include "IceInstVarIter.h"
22 #include "IceMemory.h"
23 #include "IceTargetLowering.h" // dumping stack/frame pointer register
24
25 namespace Ice {
26
initShouldBePooled()27 void Constant::initShouldBePooled() {
28 ShouldBePooled = TargetLowering::shouldBePooled(this);
29 }
30
operator ==(const RelocatableTuple & A,const RelocatableTuple & B)31 bool operator==(const RelocatableTuple &A, const RelocatableTuple &B) {
32 // A and B are the same if:
33 // (1) they have the same name; and
34 // (2) they have the same offset.
35 //
36 // (1) is trivial to check, but (2) requires some care.
37 //
38 // For (2):
39 // if A and B have known offsets (i.e., no symbolic references), then
40 // A == B -> A.Offset == B.Offset.
41 // else each element i in A.OffsetExpr[i] must be the same (or have the same
42 // value) as B.OffsetExpr[i].
43 if (A.Name != B.Name) {
44 return false;
45 }
46
47 bool BothHaveKnownOffsets = true;
48 RelocOffsetT OffsetA = A.Offset;
49 RelocOffsetT OffsetB = B.Offset;
50 for (SizeT i = 0; i < A.OffsetExpr.size() && BothHaveKnownOffsets; ++i) {
51 BothHaveKnownOffsets = A.OffsetExpr[i]->hasOffset();
52 if (BothHaveKnownOffsets) {
53 OffsetA += A.OffsetExpr[i]->getOffset();
54 }
55 }
56 for (SizeT i = 0; i < B.OffsetExpr.size() && BothHaveKnownOffsets; ++i) {
57 BothHaveKnownOffsets = B.OffsetExpr[i]->hasOffset();
58 if (BothHaveKnownOffsets) {
59 OffsetB += B.OffsetExpr[i]->getOffset();
60 }
61 }
62 if (BothHaveKnownOffsets) {
63 // Both have known offsets (i.e., no unresolved symbolic references), so
64 // A == B -> A.Offset == B.Offset.
65 return OffsetA == OffsetB;
66 }
67
68 // Otherwise, A and B are not the same if their OffsetExpr's have different
69 // sizes.
70 if (A.OffsetExpr.size() != B.OffsetExpr.size()) {
71 return false;
72 }
73
74 // If the OffsetExprs' sizes are the same, then
75 // for each i in OffsetExprSize:
76 for (SizeT i = 0; i < A.OffsetExpr.size(); ++i) {
77 const auto *const RelocOffsetA = A.OffsetExpr[i];
78 const auto *const RelocOffsetB = B.OffsetExpr[i];
79 if (RelocOffsetA->hasOffset() && RelocOffsetB->hasOffset()) {
80 // A.OffsetExpr[i].Offset == B.OffsetExpr[i].Offset iff they are both
81 // defined;
82 if (RelocOffsetA->getOffset() != RelocOffsetB->getOffset()) {
83 return false;
84 }
85 } else if (RelocOffsetA != RelocOffsetB) {
86 // or, if they are undefined, then the RelocOffsets must be the same.
87 return false;
88 }
89 }
90
91 return true;
92 }
93
94 RegNumT::BaseType RegNumT::Limit = 0;
95
operator <(const RegWeight & A,const RegWeight & B)96 bool operator<(const RegWeight &A, const RegWeight &B) {
97 return A.getWeight() < B.getWeight();
98 }
operator <=(const RegWeight & A,const RegWeight & B)99 bool operator<=(const RegWeight &A, const RegWeight &B) { return !(B < A); }
operator ==(const RegWeight & A,const RegWeight & B)100 bool operator==(const RegWeight &A, const RegWeight &B) {
101 return !(B < A) && !(A < B);
102 }
103
addSegment(InstNumberT Start,InstNumberT End,CfgNode * Node)104 void LiveRange::addSegment(InstNumberT Start, InstNumberT End, CfgNode *Node) {
105 if (getFlags().getSplitGlobalVars()) {
106 // Disable merging to make sure a live range 'segment' has a single node.
107 // Might be possible to enable when the target segment has the same node.
108 assert(NodeMap.find(Start) == NodeMap.end());
109 NodeMap[Start] = Node;
110 } else {
111 if (!Range.empty()) {
112 // Check for merge opportunity.
113 InstNumberT CurrentEnd = Range.back().second;
114 assert(Start >= CurrentEnd);
115 if (Start == CurrentEnd) {
116 Range.back().second = End;
117 return;
118 }
119 }
120 }
121 Range.push_back(RangeElementType(Start, End));
122 }
123
124 // Returns true if this live range ends before Other's live range starts. This
125 // means that the highest instruction number in this live range is less than or
126 // equal to the lowest instruction number of the Other live range.
endsBefore(const LiveRange & Other) const127 bool LiveRange::endsBefore(const LiveRange &Other) const {
128 // Neither range should be empty, but let's be graceful.
129 if (Range.empty() || Other.Range.empty())
130 return true;
131 InstNumberT MyEnd = (*Range.rbegin()).second;
132 InstNumberT OtherStart = (*Other.Range.begin()).first;
133 return MyEnd <= OtherStart;
134 }
135
136 // Returns true if there is any overlap between the two live ranges.
overlaps(const LiveRange & Other,bool UseTrimmed) const137 bool LiveRange::overlaps(const LiveRange &Other, bool UseTrimmed) const {
138 // Do a two-finger walk through the two sorted lists of segments.
139 auto I1 = (UseTrimmed ? TrimmedBegin : Range.begin()),
140 I2 = (UseTrimmed ? Other.TrimmedBegin : Other.Range.begin());
141 auto E1 = Range.end(), E2 = Other.Range.end();
142 while (I1 != E1 && I2 != E2) {
143 if (I1->second <= I2->first) {
144 ++I1;
145 continue;
146 }
147 if (I2->second <= I1->first) {
148 ++I2;
149 continue;
150 }
151 return true;
152 }
153 return false;
154 }
155
overlapsInst(InstNumberT OtherBegin,bool UseTrimmed) const156 bool LiveRange::overlapsInst(InstNumberT OtherBegin, bool UseTrimmed) const {
157 bool Result = false;
158 for (auto I = (UseTrimmed ? TrimmedBegin : Range.begin()), E = Range.end();
159 I != E; ++I) {
160 if (OtherBegin < I->first) {
161 Result = false;
162 break;
163 }
164 if (OtherBegin < I->second) {
165 Result = true;
166 break;
167 }
168 }
169 // This is an equivalent but less inefficient implementation. It's expensive
170 // enough that we wouldn't want to run it under any build, but it could be
171 // enabled if e.g. the LiveRange implementation changes and extra testing is
172 // needed.
173 if (BuildDefs::extraValidation()) {
174 LiveRange Temp;
175 Temp.addSegment(OtherBegin, OtherBegin + 1);
176 bool Validation = overlaps(Temp);
177 (void)Validation;
178 assert(Result == Validation);
179 }
180 return Result;
181 }
182
183 // Returns true if the live range contains the given instruction number. This
184 // is only used for validating the live range calculation. The IsDest argument
185 // indicates whether the Variable being tested is used in the Dest position (as
186 // opposed to a Src position).
containsValue(InstNumberT Value,bool IsDest) const187 bool LiveRange::containsValue(InstNumberT Value, bool IsDest) const {
188 for (const RangeElementType &I : Range) {
189 if (I.first <= Value &&
190 (Value < I.second || (!IsDest && Value == I.second)))
191 return true;
192 }
193 return false;
194 }
195
trim(InstNumberT Lower)196 void LiveRange::trim(InstNumberT Lower) {
197 while (TrimmedBegin != Range.end() && TrimmedBegin->second <= Lower)
198 ++TrimmedBegin;
199 }
200
asType(const Cfg * Func,Type Ty,RegNumT NewRegNum) const201 const Variable *Variable::asType(const Cfg *Func, Type Ty,
202 RegNumT NewRegNum) const {
203 // Note: This returns a Variable, even if the "this" object is a subclass of
204 // Variable.
205 if (!BuildDefs::dump() || getType() == Ty)
206 return this;
207 static constexpr SizeT One = 1;
208 auto *V = new (CfgLocalAllocator<Variable>().allocate(One))
209 Variable(Func, kVariable, Ty, Number);
210 V->Name = Name;
211 V->RegNum = NewRegNum.hasValue() ? NewRegNum : RegNum;
212 V->StackOffset = StackOffset;
213 V->LinkedTo = LinkedTo;
214 return V;
215 }
216
getWeight(const Cfg * Func) const217 RegWeight Variable::getWeight(const Cfg *Func) const {
218 if (mustHaveReg())
219 return RegWeight(RegWeight::Inf);
220 if (mustNotHaveReg())
221 return RegWeight(RegWeight::Zero);
222 return Func->getVMetadata()->getUseWeight(this);
223 }
224
225 int32_t
getRematerializableOffset(const::Ice::TargetLowering * Target)226 Variable::getRematerializableOffset(const ::Ice::TargetLowering *Target) {
227 int32_t Disp = getStackOffset();
228 const auto RegNum = getRegNum();
229 if (RegNum == Target->getFrameReg()) {
230 Disp += Target->getFrameFixedAllocaOffset();
231 } else if (RegNum != Target->getStackReg()) {
232 llvm::report_fatal_error("Unexpected rematerializable register type");
233 }
234 return Disp;
235 }
markUse(MetadataKind TrackingKind,const Inst * Instr,CfgNode * Node,bool IsImplicit)236 void VariableTracking::markUse(MetadataKind TrackingKind, const Inst *Instr,
237 CfgNode *Node, bool IsImplicit) {
238 (void)TrackingKind;
239
240 // Increment the use weight depending on the loop nest depth. The weight is
241 // exponential in the nest depth as inner loops are expected to be executed
242 // an exponentially greater number of times.
243 constexpr uint32_t LogLoopTripCountEstimate = 2; // 2^2 = 4
244 constexpr SizeT MaxShift = sizeof(uint32_t) * CHAR_BIT - 1;
245 constexpr SizeT MaxLoopNestDepth = MaxShift / LogLoopTripCountEstimate;
246 const uint32_t LoopNestDepth =
247 std::min(Node->getLoopNestDepth(), MaxLoopNestDepth);
248 const uint32_t ThisUseWeight = uint32_t(1)
249 << LoopNestDepth * LogLoopTripCountEstimate;
250 UseWeight.addWeight(ThisUseWeight);
251
252 if (MultiBlock == MBS_MultiBlock)
253 return;
254 // TODO(stichnot): If the use occurs as a source operand in the first
255 // instruction of the block, and its definition is in this block's only
256 // predecessor, we might consider not marking this as a separate use. This
257 // may also apply if it's the first instruction of the block that actually
258 // uses a Variable.
259 assert(Node);
260 bool MakeMulti = false;
261 if (IsImplicit)
262 MakeMulti = true;
263 // A phi source variable conservatively needs to be marked as multi-block,
264 // even if its definition is in the same block. This is because there can be
265 // additional control flow before branching back to this node, and the
266 // variable is live throughout those nodes.
267 if (Instr && llvm::isa<InstPhi>(Instr))
268 MakeMulti = true;
269
270 if (!MakeMulti) {
271 switch (MultiBlock) {
272 case MBS_Unknown:
273 case MBS_NoUses:
274 MultiBlock = MBS_SingleBlock;
275 SingleUseNode = Node;
276 break;
277 case MBS_SingleBlock:
278 if (SingleUseNode != Node)
279 MakeMulti = true;
280 break;
281 case MBS_MultiBlock:
282 break;
283 }
284 }
285
286 if (MakeMulti) {
287 MultiBlock = MBS_MultiBlock;
288 SingleUseNode = nullptr;
289 }
290 }
291
markDef(MetadataKind TrackingKind,const Inst * Instr,CfgNode * Node)292 void VariableTracking::markDef(MetadataKind TrackingKind, const Inst *Instr,
293 CfgNode *Node) {
294 // TODO(stichnot): If the definition occurs in the last instruction of the
295 // block, consider not marking this as a separate use. But be careful not to
296 // omit all uses of the variable if markDef() and markUse() both use this
297 // optimization.
298 assert(Node);
299 // Verify that instructions are added in increasing order.
300 if (BuildDefs::asserts()) {
301 if (TrackingKind == VMK_All) {
302 const Inst *LastInstruction =
303 Definitions.empty() ? FirstOrSingleDefinition : Definitions.back();
304 (void)LastInstruction;
305 assert(LastInstruction == nullptr ||
306 Instr->getNumber() >= LastInstruction->getNumber());
307 }
308 }
309 constexpr bool IsImplicit = false;
310 markUse(TrackingKind, Instr, Node, IsImplicit);
311 if (TrackingKind == VMK_Uses)
312 return;
313 if (FirstOrSingleDefinition == nullptr)
314 FirstOrSingleDefinition = Instr;
315 else if (TrackingKind == VMK_All)
316 Definitions.push_back(Instr);
317 switch (MultiDef) {
318 case MDS_Unknown:
319 assert(SingleDefNode == nullptr);
320 MultiDef = MDS_SingleDef;
321 SingleDefNode = Node;
322 break;
323 case MDS_SingleDef:
324 assert(SingleDefNode);
325 if (Node == SingleDefNode) {
326 MultiDef = MDS_MultiDefSingleBlock;
327 } else {
328 MultiDef = MDS_MultiDefMultiBlock;
329 SingleDefNode = nullptr;
330 }
331 break;
332 case MDS_MultiDefSingleBlock:
333 assert(SingleDefNode);
334 if (Node != SingleDefNode) {
335 MultiDef = MDS_MultiDefMultiBlock;
336 SingleDefNode = nullptr;
337 }
338 break;
339 case MDS_MultiDefMultiBlock:
340 assert(SingleDefNode == nullptr);
341 break;
342 }
343 }
344
getFirstDefinitionSingleBlock() const345 const Inst *VariableTracking::getFirstDefinitionSingleBlock() const {
346 switch (MultiDef) {
347 case MDS_Unknown:
348 case MDS_MultiDefMultiBlock:
349 return nullptr;
350 case MDS_SingleDef:
351 case MDS_MultiDefSingleBlock:
352 assert(FirstOrSingleDefinition);
353 return FirstOrSingleDefinition;
354 }
355 return nullptr;
356 }
357
getSingleDefinition() const358 const Inst *VariableTracking::getSingleDefinition() const {
359 switch (MultiDef) {
360 case MDS_Unknown:
361 case MDS_MultiDefMultiBlock:
362 case MDS_MultiDefSingleBlock:
363 return nullptr;
364 case MDS_SingleDef:
365 assert(FirstOrSingleDefinition);
366 return FirstOrSingleDefinition;
367 }
368 return nullptr;
369 }
370
getFirstDefinition() const371 const Inst *VariableTracking::getFirstDefinition() const {
372 switch (MultiDef) {
373 case MDS_Unknown:
374 return nullptr;
375 case MDS_MultiDefMultiBlock:
376 case MDS_SingleDef:
377 case MDS_MultiDefSingleBlock:
378 assert(FirstOrSingleDefinition);
379 return FirstOrSingleDefinition;
380 }
381 return nullptr;
382 }
383
init(MetadataKind TrackingKind)384 void VariablesMetadata::init(MetadataKind TrackingKind) {
385 TimerMarker T(TimerStack::TT_vmetadata, Func);
386 Kind = TrackingKind;
387 Metadata.clear();
388 Metadata.resize(Func->getNumVariables(), VariableTracking::MBS_NoUses);
389
390 // Mark implicit args as being used in the entry node.
391 for (Variable *Var : Func->getImplicitArgs()) {
392 constexpr Inst *NoInst = nullptr;
393 CfgNode *EntryNode = Func->getEntryNode();
394 constexpr bool IsImplicit = true;
395 Metadata[Var->getIndex()].markUse(Kind, NoInst, EntryNode, IsImplicit);
396 }
397
398 for (CfgNode *Node : Func->getNodes())
399 addNode(Node);
400 }
401
addNode(CfgNode * Node)402 void VariablesMetadata::addNode(CfgNode *Node) {
403 if (Func->getNumVariables() > Metadata.size())
404 Metadata.resize(Func->getNumVariables());
405
406 for (Inst &I : Node->getPhis()) {
407 if (I.isDeleted())
408 continue;
409 if (Variable *Dest = I.getDest()) {
410 SizeT DestNum = Dest->getIndex();
411 assert(DestNum < Metadata.size());
412 Metadata[DestNum].markDef(Kind, &I, Node);
413 }
414 for (SizeT SrcNum = 0; SrcNum < I.getSrcSize(); ++SrcNum) {
415 if (auto *Var = llvm::dyn_cast<Variable>(I.getSrc(SrcNum))) {
416 SizeT VarNum = Var->getIndex();
417 assert(VarNum < Metadata.size());
418 constexpr bool IsImplicit = false;
419 Metadata[VarNum].markUse(Kind, &I, Node, IsImplicit);
420 }
421 }
422 }
423
424 for (Inst &I : Node->getInsts()) {
425 if (I.isDeleted())
426 continue;
427 // Note: The implicit definitions (and uses) from InstFakeKill are
428 // deliberately ignored.
429 if (Variable *Dest = I.getDest()) {
430 SizeT DestNum = Dest->getIndex();
431 assert(DestNum < Metadata.size());
432 Metadata[DestNum].markDef(Kind, &I, Node);
433 }
434 FOREACH_VAR_IN_INST(Var, I) {
435 SizeT VarNum = Var->getIndex();
436 assert(VarNum < Metadata.size());
437 constexpr bool IsImplicit = false;
438 Metadata[VarNum].markUse(Kind, &I, Node, IsImplicit);
439 }
440 }
441 }
442
isMultiDef(const Variable * Var) const443 bool VariablesMetadata::isMultiDef(const Variable *Var) const {
444 assert(Kind != VMK_Uses);
445 if (Var->getIsArg())
446 return false;
447 if (!isTracked(Var))
448 return true; // conservative answer
449 SizeT VarNum = Var->getIndex();
450 // Conservatively return true if the state is unknown.
451 return Metadata[VarNum].getMultiDef() != VariableTracking::MDS_SingleDef;
452 }
453
isMultiBlock(const Variable * Var) const454 bool VariablesMetadata::isMultiBlock(const Variable *Var) const {
455 if (Var->getIsArg())
456 return true;
457 if (Var->isRematerializable())
458 return false;
459 if (!isTracked(Var))
460 return true; // conservative answer
461 SizeT VarNum = Var->getIndex();
462 switch (Metadata[VarNum].getMultiBlock()) {
463 case VariableTracking::MBS_NoUses:
464 case VariableTracking::MBS_SingleBlock:
465 return false;
466 // Conservatively return true if the state is unknown.
467 case VariableTracking::MBS_Unknown:
468 case VariableTracking::MBS_MultiBlock:
469 return true;
470 }
471 assert(0);
472 return true;
473 }
474
isSingleBlock(const Variable * Var) const475 bool VariablesMetadata::isSingleBlock(const Variable *Var) const {
476 if (Var->getIsArg())
477 return false;
478 if (Var->isRematerializable())
479 return false;
480 if (!isTracked(Var))
481 return false; // conservative answer
482 SizeT VarNum = Var->getIndex();
483 switch (Metadata[VarNum].getMultiBlock()) {
484 case VariableTracking::MBS_SingleBlock:
485 return true;
486 case VariableTracking::MBS_Unknown:
487 case VariableTracking::MBS_NoUses:
488 case VariableTracking::MBS_MultiBlock:
489 return false;
490 }
491 assert(0);
492 return false;
493 }
494
495 const Inst *
getFirstDefinitionSingleBlock(const Variable * Var) const496 VariablesMetadata::getFirstDefinitionSingleBlock(const Variable *Var) const {
497 assert(Kind != VMK_Uses);
498 if (!isTracked(Var))
499 return nullptr; // conservative answer
500 SizeT VarNum = Var->getIndex();
501 return Metadata[VarNum].getFirstDefinitionSingleBlock();
502 }
503
getSingleDefinition(const Variable * Var) const504 const Inst *VariablesMetadata::getSingleDefinition(const Variable *Var) const {
505 assert(Kind != VMK_Uses);
506 if (!isTracked(Var))
507 return nullptr; // conservative answer
508 SizeT VarNum = Var->getIndex();
509 return Metadata[VarNum].getSingleDefinition();
510 }
511
getFirstDefinition(const Variable * Var) const512 const Inst *VariablesMetadata::getFirstDefinition(const Variable *Var) const {
513 assert(Kind != VMK_Uses);
514 if (!isTracked(Var))
515 return nullptr; // conservative answer
516 SizeT VarNum = Var->getIndex();
517 return Metadata[VarNum].getFirstDefinition();
518 }
519
520 const InstDefList &
getLatterDefinitions(const Variable * Var) const521 VariablesMetadata::getLatterDefinitions(const Variable *Var) const {
522 assert(Kind == VMK_All);
523 if (!isTracked(Var)) {
524 // NoDefinitions has to be initialized after we've had a chance to set the
525 // CfgAllocator, so it can't be a static global object. Also, while C++11
526 // guarantees the initialization of static local objects to be thread-safe,
527 // we use a pointer to it so we can avoid frequent mutex locking overhead.
528 if (NoDefinitions == nullptr) {
529 static const InstDefList NoDefinitionsInstance;
530 NoDefinitions = &NoDefinitionsInstance;
531 }
532 return *NoDefinitions;
533 }
534 SizeT VarNum = Var->getIndex();
535 return Metadata[VarNum].getLatterDefinitions();
536 }
537
getLocalUseNode(const Variable * Var) const538 CfgNode *VariablesMetadata::getLocalUseNode(const Variable *Var) const {
539 if (!isTracked(Var))
540 return nullptr; // conservative answer
541 SizeT VarNum = Var->getIndex();
542 return Metadata[VarNum].getNode();
543 }
544
getUseWeight(const Variable * Var) const545 RegWeight VariablesMetadata::getUseWeight(const Variable *Var) const {
546 if (!isTracked(Var))
547 return RegWeight(1); // conservative answer
548 SizeT VarNum = Var->getIndex();
549 return Metadata[VarNum].getUseWeight();
550 }
551
552 const InstDefList *VariablesMetadata::NoDefinitions = nullptr;
553
554 // ======================== dump routines ======================== //
555
emit(const Cfg * Func) const556 void Variable::emit(const Cfg *Func) const {
557 if (BuildDefs::dump())
558 Func->getTarget()->emitVariable(this);
559 }
560
dump(const Cfg * Func,Ostream & Str) const561 void Variable::dump(const Cfg *Func, Ostream &Str) const {
562 if (!BuildDefs::dump())
563 return;
564 if (Func == nullptr) {
565 Str << "%" << getName();
566 return;
567 }
568 if (Func->isVerbose(IceV_RegOrigins) ||
569 (!hasReg() && !Func->getTarget()->hasComputedFrame())) {
570 Str << "%" << getName();
571 for (Variable *Link = getLinkedTo(); Link != nullptr;
572 Link = Link->getLinkedTo()) {
573 Str << ":%" << Link->getName();
574 }
575 }
576 if (hasReg()) {
577 if (Func->isVerbose(IceV_RegOrigins))
578 Str << ":";
579 Str << Func->getTarget()->getRegName(RegNum, getType());
580 } else if (Func->getTarget()->hasComputedFrame()) {
581 if (Func->isVerbose(IceV_RegOrigins))
582 Str << ":";
583 const auto BaseRegisterNumber =
584 hasReg() ? getBaseRegNum() : Func->getTarget()->getFrameOrStackReg();
585 Str << "["
586 << Func->getTarget()->getRegName(BaseRegisterNumber, IceType_i32);
587 if (hasKnownStackOffset()) {
588 int32_t Offset = getStackOffset();
589 if (Offset) {
590 if (Offset > 0)
591 Str << "+";
592 Str << Offset;
593 }
594 }
595 Str << "]";
596 }
597 }
598
emit(TargetLowering * Target) const599 template <> void ConstantInteger32::emit(TargetLowering *Target) const {
600 Target->emit(this);
601 }
602
emit(TargetLowering * Target) const603 template <> void ConstantInteger64::emit(TargetLowering *Target) const {
604 Target->emit(this);
605 }
606
emit(TargetLowering * Target) const607 template <> void ConstantFloat::emit(TargetLowering *Target) const {
608 Target->emit(this);
609 }
610
emit(TargetLowering * Target) const611 template <> void ConstantDouble::emit(TargetLowering *Target) const {
612 Target->emit(this);
613 }
614
emit(TargetLowering * Target) const615 void ConstantRelocatable::emit(TargetLowering *Target) const {
616 Target->emit(this);
617 }
618
emitWithoutPrefix(const TargetLowering * Target,const char * Suffix) const619 void ConstantRelocatable::emitWithoutPrefix(const TargetLowering *Target,
620 const char *Suffix) const {
621 Target->emitWithoutPrefix(this, Suffix);
622 }
623
dump(const Cfg *,Ostream & Str) const624 void ConstantRelocatable::dump(const Cfg *, Ostream &Str) const {
625 if (!BuildDefs::dump())
626 return;
627 if (!EmitString.empty()) {
628 Str << EmitString;
629 return;
630 }
631 Str << "@" << (Name.hasStdString() ? Name.toString() : "<Unnamed>");
632 const RelocOffsetT Offset = getOffset();
633 if (Offset) {
634 if (Offset >= 0) {
635 Str << "+";
636 }
637 Str << Offset;
638 }
639 }
640
emit(TargetLowering * Target) const641 void ConstantUndef::emit(TargetLowering *Target) const { Target->emit(this); }
642
dump(Ostream & Str) const643 void LiveRange::dump(Ostream &Str) const {
644 if (!BuildDefs::dump())
645 return;
646 bool First = true;
647 for (const RangeElementType &I : Range) {
648 if (!First)
649 Str << ", ";
650 First = false;
651 Str << "[" << I.first << ":" << I.second << ")";
652 }
653 }
654
operator <<(Ostream & Str,const LiveRange & L)655 Ostream &operator<<(Ostream &Str, const LiveRange &L) {
656 if (!BuildDefs::dump())
657 return Str;
658 L.dump(Str);
659 return Str;
660 }
661
operator <<(Ostream & Str,const RegWeight & W)662 Ostream &operator<<(Ostream &Str, const RegWeight &W) {
663 if (!BuildDefs::dump())
664 return Str;
665 if (W.getWeight() == RegWeight::Inf)
666 Str << "Inf";
667 else
668 Str << W.getWeight();
669 return Str;
670 }
671
672 } // end of namespace Ice
673