1 //===---------------------------- StackMaps.cpp ---------------------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9
10 #include "llvm/CodeGen/StackMaps.h"
11 #include "llvm/CodeGen/AsmPrinter.h"
12 #include "llvm/CodeGen/MachineFrameInfo.h"
13 #include "llvm/CodeGen/MachineFunction.h"
14 #include "llvm/CodeGen/MachineInstr.h"
15 #include "llvm/IR/DataLayout.h"
16 #include "llvm/MC/MCContext.h"
17 #include "llvm/MC/MCExpr.h"
18 #include "llvm/MC/MCObjectFileInfo.h"
19 #include "llvm/MC/MCSectionMachO.h"
20 #include "llvm/MC/MCStreamer.h"
21 #include "llvm/Support/CommandLine.h"
22 #include "llvm/Target/TargetMachine.h"
23 #include "llvm/Target/TargetOpcodes.h"
24 #include "llvm/Target/TargetRegisterInfo.h"
25 #include "llvm/Target/TargetSubtargetInfo.h"
26 #include <iterator>
27
28 using namespace llvm;
29
30 #define DEBUG_TYPE "stackmaps"
31
32 static cl::opt<int> StackMapVersion(
33 "stackmap-version", cl::init(1),
34 cl::desc("Specify the stackmap encoding version (default = 1)"));
35
36 const char *StackMaps::WSMP = "Stack Maps: ";
37
PatchPointOpers(const MachineInstr * MI)38 PatchPointOpers::PatchPointOpers(const MachineInstr *MI)
39 : MI(MI), HasDef(MI->getOperand(0).isReg() && MI->getOperand(0).isDef() &&
40 !MI->getOperand(0).isImplicit()),
41 IsAnyReg(MI->getOperand(getMetaIdx(CCPos)).getImm() ==
42 CallingConv::AnyReg) {
43 #ifndef NDEBUG
44 unsigned CheckStartIdx = 0, e = MI->getNumOperands();
45 while (CheckStartIdx < e && MI->getOperand(CheckStartIdx).isReg() &&
46 MI->getOperand(CheckStartIdx).isDef() &&
47 !MI->getOperand(CheckStartIdx).isImplicit())
48 ++CheckStartIdx;
49
50 assert(getMetaIdx() == CheckStartIdx &&
51 "Unexpected additional definition in Patchpoint intrinsic.");
52 #endif
53 }
54
getNextScratchIdx(unsigned StartIdx) const55 unsigned PatchPointOpers::getNextScratchIdx(unsigned StartIdx) const {
56 if (!StartIdx)
57 StartIdx = getVarIdx();
58
59 // Find the next scratch register (implicit def and early clobber)
60 unsigned ScratchIdx = StartIdx, e = MI->getNumOperands();
61 while (ScratchIdx < e &&
62 !(MI->getOperand(ScratchIdx).isReg() &&
63 MI->getOperand(ScratchIdx).isDef() &&
64 MI->getOperand(ScratchIdx).isImplicit() &&
65 MI->getOperand(ScratchIdx).isEarlyClobber()))
66 ++ScratchIdx;
67
68 assert(ScratchIdx != e && "No scratch register available");
69 return ScratchIdx;
70 }
71
StackMaps(AsmPrinter & AP)72 StackMaps::StackMaps(AsmPrinter &AP) : AP(AP) {
73 if (StackMapVersion != 1)
74 llvm_unreachable("Unsupported stackmap version!");
75 }
76
77 /// Go up the super-register chain until we hit a valid dwarf register number.
getDwarfRegNum(unsigned Reg,const TargetRegisterInfo * TRI)78 static unsigned getDwarfRegNum(unsigned Reg, const TargetRegisterInfo *TRI) {
79 int RegNum = TRI->getDwarfRegNum(Reg, false);
80 for (MCSuperRegIterator SR(Reg, TRI); SR.isValid() && RegNum < 0; ++SR)
81 RegNum = TRI->getDwarfRegNum(*SR, false);
82
83 assert(RegNum >= 0 && "Invalid Dwarf register number.");
84 return (unsigned)RegNum;
85 }
86
87 MachineInstr::const_mop_iterator
parseOperand(MachineInstr::const_mop_iterator MOI,MachineInstr::const_mop_iterator MOE,LocationVec & Locs,LiveOutVec & LiveOuts) const88 StackMaps::parseOperand(MachineInstr::const_mop_iterator MOI,
89 MachineInstr::const_mop_iterator MOE, LocationVec &Locs,
90 LiveOutVec &LiveOuts) const {
91 const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo();
92 if (MOI->isImm()) {
93 switch (MOI->getImm()) {
94 default:
95 llvm_unreachable("Unrecognized operand type.");
96 case StackMaps::DirectMemRefOp: {
97 auto &DL = AP.MF->getDataLayout();
98
99 unsigned Size = DL.getPointerSizeInBits();
100 assert((Size % 8) == 0 && "Need pointer size in bytes.");
101 Size /= 8;
102 unsigned Reg = (++MOI)->getReg();
103 int64_t Imm = (++MOI)->getImm();
104 Locs.emplace_back(StackMaps::Location::Direct, Size,
105 getDwarfRegNum(Reg, TRI), Imm);
106 break;
107 }
108 case StackMaps::IndirectMemRefOp: {
109 int64_t Size = (++MOI)->getImm();
110 assert(Size > 0 && "Need a valid size for indirect memory locations.");
111 unsigned Reg = (++MOI)->getReg();
112 int64_t Imm = (++MOI)->getImm();
113 Locs.emplace_back(StackMaps::Location::Indirect, Size,
114 getDwarfRegNum(Reg, TRI), Imm);
115 break;
116 }
117 case StackMaps::ConstantOp: {
118 ++MOI;
119 assert(MOI->isImm() && "Expected constant operand.");
120 int64_t Imm = MOI->getImm();
121 Locs.emplace_back(Location::Constant, sizeof(int64_t), 0, Imm);
122 break;
123 }
124 }
125 return ++MOI;
126 }
127
128 // The physical register number will ultimately be encoded as a DWARF regno.
129 // The stack map also records the size of a spill slot that can hold the
130 // register content. (The runtime can track the actual size of the data type
131 // if it needs to.)
132 if (MOI->isReg()) {
133 // Skip implicit registers (this includes our scratch registers)
134 if (MOI->isImplicit())
135 return ++MOI;
136
137 assert(TargetRegisterInfo::isPhysicalRegister(MOI->getReg()) &&
138 "Virtreg operands should have been rewritten before now.");
139 const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(MOI->getReg());
140 assert(!MOI->getSubReg() && "Physical subreg still around.");
141
142 unsigned Offset = 0;
143 unsigned DwarfRegNum = getDwarfRegNum(MOI->getReg(), TRI);
144 unsigned LLVMRegNum = TRI->getLLVMRegNum(DwarfRegNum, false);
145 unsigned SubRegIdx = TRI->getSubRegIndex(LLVMRegNum, MOI->getReg());
146 if (SubRegIdx)
147 Offset = TRI->getSubRegIdxOffset(SubRegIdx);
148
149 Locs.emplace_back(Location::Register, RC->getSize(), DwarfRegNum, Offset);
150 return ++MOI;
151 }
152
153 if (MOI->isRegLiveOut())
154 LiveOuts = parseRegisterLiveOutMask(MOI->getRegLiveOut());
155
156 return ++MOI;
157 }
158
print(raw_ostream & OS)159 void StackMaps::print(raw_ostream &OS) {
160 const TargetRegisterInfo *TRI =
161 AP.MF ? AP.MF->getSubtarget().getRegisterInfo() : nullptr;
162 OS << WSMP << "callsites:\n";
163 for (const auto &CSI : CSInfos) {
164 const LocationVec &CSLocs = CSI.Locations;
165 const LiveOutVec &LiveOuts = CSI.LiveOuts;
166
167 OS << WSMP << "callsite " << CSI.ID << "\n";
168 OS << WSMP << " has " << CSLocs.size() << " locations\n";
169
170 unsigned Idx = 0;
171 for (const auto &Loc : CSLocs) {
172 OS << WSMP << "\t\tLoc " << Idx << ": ";
173 switch (Loc.Type) {
174 case Location::Unprocessed:
175 OS << "<Unprocessed operand>";
176 break;
177 case Location::Register:
178 OS << "Register ";
179 if (TRI)
180 OS << TRI->getName(Loc.Reg);
181 else
182 OS << Loc.Reg;
183 break;
184 case Location::Direct:
185 OS << "Direct ";
186 if (TRI)
187 OS << TRI->getName(Loc.Reg);
188 else
189 OS << Loc.Reg;
190 if (Loc.Offset)
191 OS << " + " << Loc.Offset;
192 break;
193 case Location::Indirect:
194 OS << "Indirect ";
195 if (TRI)
196 OS << TRI->getName(Loc.Reg);
197 else
198 OS << Loc.Reg;
199 OS << "+" << Loc.Offset;
200 break;
201 case Location::Constant:
202 OS << "Constant " << Loc.Offset;
203 break;
204 case Location::ConstantIndex:
205 OS << "Constant Index " << Loc.Offset;
206 break;
207 }
208 OS << "\t[encoding: .byte " << Loc.Type << ", .byte " << Loc.Size
209 << ", .short " << Loc.Reg << ", .int " << Loc.Offset << "]\n";
210 Idx++;
211 }
212
213 OS << WSMP << "\thas " << LiveOuts.size() << " live-out registers\n";
214
215 Idx = 0;
216 for (const auto &LO : LiveOuts) {
217 OS << WSMP << "\t\tLO " << Idx << ": ";
218 if (TRI)
219 OS << TRI->getName(LO.Reg);
220 else
221 OS << LO.Reg;
222 OS << "\t[encoding: .short " << LO.DwarfRegNum << ", .byte 0, .byte "
223 << LO.Size << "]\n";
224 Idx++;
225 }
226 }
227 }
228
229 /// Create a live-out register record for the given register Reg.
230 StackMaps::LiveOutReg
createLiveOutReg(unsigned Reg,const TargetRegisterInfo * TRI) const231 StackMaps::createLiveOutReg(unsigned Reg, const TargetRegisterInfo *TRI) const {
232 unsigned DwarfRegNum = getDwarfRegNum(Reg, TRI);
233 unsigned Size = TRI->getMinimalPhysRegClass(Reg)->getSize();
234 return LiveOutReg(Reg, DwarfRegNum, Size);
235 }
236
237 /// Parse the register live-out mask and return a vector of live-out registers
238 /// that need to be recorded in the stackmap.
239 StackMaps::LiveOutVec
parseRegisterLiveOutMask(const uint32_t * Mask) const240 StackMaps::parseRegisterLiveOutMask(const uint32_t *Mask) const {
241 assert(Mask && "No register mask specified");
242 const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo();
243 LiveOutVec LiveOuts;
244
245 // Create a LiveOutReg for each bit that is set in the register mask.
246 for (unsigned Reg = 0, NumRegs = TRI->getNumRegs(); Reg != NumRegs; ++Reg)
247 if ((Mask[Reg / 32] >> Reg % 32) & 1)
248 LiveOuts.push_back(createLiveOutReg(Reg, TRI));
249
250 // We don't need to keep track of a register if its super-register is already
251 // in the list. Merge entries that refer to the same dwarf register and use
252 // the maximum size that needs to be spilled.
253
254 std::sort(LiveOuts.begin(), LiveOuts.end(),
255 [](const LiveOutReg &LHS, const LiveOutReg &RHS) {
256 // Only sort by the dwarf register number.
257 return LHS.DwarfRegNum < RHS.DwarfRegNum;
258 });
259
260 for (auto I = LiveOuts.begin(), E = LiveOuts.end(); I != E; ++I) {
261 for (auto II = std::next(I); II != E; ++II) {
262 if (I->DwarfRegNum != II->DwarfRegNum) {
263 // Skip all the now invalid entries.
264 I = --II;
265 break;
266 }
267 I->Size = std::max(I->Size, II->Size);
268 if (TRI->isSuperRegister(I->Reg, II->Reg))
269 I->Reg = II->Reg;
270 II->Reg = 0; // mark for deletion.
271 }
272 }
273
274 LiveOuts.erase(
275 std::remove_if(LiveOuts.begin(), LiveOuts.end(),
276 [](const LiveOutReg &LO) { return LO.Reg == 0; }),
277 LiveOuts.end());
278
279 return LiveOuts;
280 }
281
recordStackMapOpers(const MachineInstr & MI,uint64_t ID,MachineInstr::const_mop_iterator MOI,MachineInstr::const_mop_iterator MOE,bool recordResult)282 void StackMaps::recordStackMapOpers(const MachineInstr &MI, uint64_t ID,
283 MachineInstr::const_mop_iterator MOI,
284 MachineInstr::const_mop_iterator MOE,
285 bool recordResult) {
286
287 MCContext &OutContext = AP.OutStreamer->getContext();
288 MCSymbol *MILabel = OutContext.createTempSymbol();
289 AP.OutStreamer->EmitLabel(MILabel);
290
291 LocationVec Locations;
292 LiveOutVec LiveOuts;
293
294 if (recordResult) {
295 assert(PatchPointOpers(&MI).hasDef() && "Stackmap has no return value.");
296 parseOperand(MI.operands_begin(), std::next(MI.operands_begin()), Locations,
297 LiveOuts);
298 }
299
300 // Parse operands.
301 while (MOI != MOE) {
302 MOI = parseOperand(MOI, MOE, Locations, LiveOuts);
303 }
304
305 // Move large constants into the constant pool.
306 for (auto &Loc : Locations) {
307 // Constants are encoded as sign-extended integers.
308 // -1 is directly encoded as .long 0xFFFFFFFF with no constant pool.
309 if (Loc.Type == Location::Constant && !isInt<32>(Loc.Offset)) {
310 Loc.Type = Location::ConstantIndex;
311 // ConstPool is intentionally a MapVector of 'uint64_t's (as
312 // opposed to 'int64_t's). We should never be in a situation
313 // where we have to insert either the tombstone or the empty
314 // keys into a map, and for a DenseMap<uint64_t, T> these are
315 // (uint64_t)0 and (uint64_t)-1. They can be and are
316 // represented using 32 bit integers.
317 assert((uint64_t)Loc.Offset != DenseMapInfo<uint64_t>::getEmptyKey() &&
318 (uint64_t)Loc.Offset !=
319 DenseMapInfo<uint64_t>::getTombstoneKey() &&
320 "empty and tombstone keys should fit in 32 bits!");
321 auto Result = ConstPool.insert(std::make_pair(Loc.Offset, Loc.Offset));
322 Loc.Offset = Result.first - ConstPool.begin();
323 }
324 }
325
326 // Create an expression to calculate the offset of the callsite from function
327 // entry.
328 const MCExpr *CSOffsetExpr = MCBinaryExpr::createSub(
329 MCSymbolRefExpr::create(MILabel, OutContext),
330 MCSymbolRefExpr::create(AP.CurrentFnSymForSize, OutContext), OutContext);
331
332 CSInfos.emplace_back(CSOffsetExpr, ID, std::move(Locations),
333 std::move(LiveOuts));
334
335 // Record the stack size of the current function.
336 const MachineFrameInfo *MFI = AP.MF->getFrameInfo();
337 const TargetRegisterInfo *RegInfo = AP.MF->getSubtarget().getRegisterInfo();
338 bool HasDynamicFrameSize =
339 MFI->hasVarSizedObjects() || RegInfo->needsStackRealignment(*(AP.MF));
340 FnStackSize[AP.CurrentFnSym] =
341 HasDynamicFrameSize ? UINT64_MAX : MFI->getStackSize();
342 }
343
recordStackMap(const MachineInstr & MI)344 void StackMaps::recordStackMap(const MachineInstr &MI) {
345 assert(MI.getOpcode() == TargetOpcode::STACKMAP && "expected stackmap");
346
347 int64_t ID = MI.getOperand(0).getImm();
348 recordStackMapOpers(MI, ID, std::next(MI.operands_begin(), 2),
349 MI.operands_end());
350 }
351
recordPatchPoint(const MachineInstr & MI)352 void StackMaps::recordPatchPoint(const MachineInstr &MI) {
353 assert(MI.getOpcode() == TargetOpcode::PATCHPOINT && "expected patchpoint");
354
355 PatchPointOpers opers(&MI);
356 int64_t ID = opers.getMetaOper(PatchPointOpers::IDPos).getImm();
357
358 auto MOI = std::next(MI.operands_begin(), opers.getStackMapStartIdx());
359 recordStackMapOpers(MI, ID, MOI, MI.operands_end(),
360 opers.isAnyReg() && opers.hasDef());
361
362 #ifndef NDEBUG
363 // verify anyregcc
364 auto &Locations = CSInfos.back().Locations;
365 if (opers.isAnyReg()) {
366 unsigned NArgs = opers.getMetaOper(PatchPointOpers::NArgPos).getImm();
367 for (unsigned i = 0, e = (opers.hasDef() ? NArgs + 1 : NArgs); i != e; ++i)
368 assert(Locations[i].Type == Location::Register &&
369 "anyreg arg must be in reg.");
370 }
371 #endif
372 }
recordStatepoint(const MachineInstr & MI)373 void StackMaps::recordStatepoint(const MachineInstr &MI) {
374 assert(MI.getOpcode() == TargetOpcode::STATEPOINT && "expected statepoint");
375
376 StatepointOpers opers(&MI);
377 // Record all the deopt and gc operands (they're contiguous and run from the
378 // initial index to the end of the operand list)
379 const unsigned StartIdx = opers.getVarIdx();
380 recordStackMapOpers(MI, opers.getID(), MI.operands_begin() + StartIdx,
381 MI.operands_end(), false);
382 }
383
384 /// Emit the stackmap header.
385 ///
386 /// Header {
387 /// uint8 : Stack Map Version (currently 1)
388 /// uint8 : Reserved (expected to be 0)
389 /// uint16 : Reserved (expected to be 0)
390 /// }
391 /// uint32 : NumFunctions
392 /// uint32 : NumConstants
393 /// uint32 : NumRecords
emitStackmapHeader(MCStreamer & OS)394 void StackMaps::emitStackmapHeader(MCStreamer &OS) {
395 // Header.
396 OS.EmitIntValue(StackMapVersion, 1); // Version.
397 OS.EmitIntValue(0, 1); // Reserved.
398 OS.EmitIntValue(0, 2); // Reserved.
399
400 // Num functions.
401 DEBUG(dbgs() << WSMP << "#functions = " << FnStackSize.size() << '\n');
402 OS.EmitIntValue(FnStackSize.size(), 4);
403 // Num constants.
404 DEBUG(dbgs() << WSMP << "#constants = " << ConstPool.size() << '\n');
405 OS.EmitIntValue(ConstPool.size(), 4);
406 // Num callsites.
407 DEBUG(dbgs() << WSMP << "#callsites = " << CSInfos.size() << '\n');
408 OS.EmitIntValue(CSInfos.size(), 4);
409 }
410
411 /// Emit the function frame record for each function.
412 ///
413 /// StkSizeRecord[NumFunctions] {
414 /// uint64 : Function Address
415 /// uint64 : Stack Size
416 /// }
emitFunctionFrameRecords(MCStreamer & OS)417 void StackMaps::emitFunctionFrameRecords(MCStreamer &OS) {
418 // Function Frame records.
419 DEBUG(dbgs() << WSMP << "functions:\n");
420 for (auto const &FR : FnStackSize) {
421 DEBUG(dbgs() << WSMP << "function addr: " << FR.first
422 << " frame size: " << FR.second);
423 OS.EmitSymbolValue(FR.first, 8);
424 OS.EmitIntValue(FR.second, 8);
425 }
426 }
427
428 /// Emit the constant pool.
429 ///
430 /// int64 : Constants[NumConstants]
emitConstantPoolEntries(MCStreamer & OS)431 void StackMaps::emitConstantPoolEntries(MCStreamer &OS) {
432 // Constant pool entries.
433 DEBUG(dbgs() << WSMP << "constants:\n");
434 for (const auto &ConstEntry : ConstPool) {
435 DEBUG(dbgs() << WSMP << ConstEntry.second << '\n');
436 OS.EmitIntValue(ConstEntry.second, 8);
437 }
438 }
439
440 /// Emit the callsite info for each callsite.
441 ///
442 /// StkMapRecord[NumRecords] {
443 /// uint64 : PatchPoint ID
444 /// uint32 : Instruction Offset
445 /// uint16 : Reserved (record flags)
446 /// uint16 : NumLocations
447 /// Location[NumLocations] {
448 /// uint8 : Register | Direct | Indirect | Constant | ConstantIndex
449 /// uint8 : Size in Bytes
450 /// uint16 : Dwarf RegNum
451 /// int32 : Offset
452 /// }
453 /// uint16 : Padding
454 /// uint16 : NumLiveOuts
455 /// LiveOuts[NumLiveOuts] {
456 /// uint16 : Dwarf RegNum
457 /// uint8 : Reserved
458 /// uint8 : Size in Bytes
459 /// }
460 /// uint32 : Padding (only if required to align to 8 byte)
461 /// }
462 ///
463 /// Location Encoding, Type, Value:
464 /// 0x1, Register, Reg (value in register)
465 /// 0x2, Direct, Reg + Offset (frame index)
466 /// 0x3, Indirect, [Reg + Offset] (spilled value)
467 /// 0x4, Constant, Offset (small constant)
468 /// 0x5, ConstIndex, Constants[Offset] (large constant)
emitCallsiteEntries(MCStreamer & OS)469 void StackMaps::emitCallsiteEntries(MCStreamer &OS) {
470 DEBUG(print(dbgs()));
471 // Callsite entries.
472 for (const auto &CSI : CSInfos) {
473 const LocationVec &CSLocs = CSI.Locations;
474 const LiveOutVec &LiveOuts = CSI.LiveOuts;
475
476 // Verify stack map entry. It's better to communicate a problem to the
477 // runtime than crash in case of in-process compilation. Currently, we do
478 // simple overflow checks, but we may eventually communicate other
479 // compilation errors this way.
480 if (CSLocs.size() > UINT16_MAX || LiveOuts.size() > UINT16_MAX) {
481 OS.EmitIntValue(UINT64_MAX, 8); // Invalid ID.
482 OS.EmitValue(CSI.CSOffsetExpr, 4);
483 OS.EmitIntValue(0, 2); // Reserved.
484 OS.EmitIntValue(0, 2); // 0 locations.
485 OS.EmitIntValue(0, 2); // padding.
486 OS.EmitIntValue(0, 2); // 0 live-out registers.
487 OS.EmitIntValue(0, 4); // padding.
488 continue;
489 }
490
491 OS.EmitIntValue(CSI.ID, 8);
492 OS.EmitValue(CSI.CSOffsetExpr, 4);
493
494 // Reserved for flags.
495 OS.EmitIntValue(0, 2);
496 OS.EmitIntValue(CSLocs.size(), 2);
497
498 for (const auto &Loc : CSLocs) {
499 OS.EmitIntValue(Loc.Type, 1);
500 OS.EmitIntValue(Loc.Size, 1);
501 OS.EmitIntValue(Loc.Reg, 2);
502 OS.EmitIntValue(Loc.Offset, 4);
503 }
504
505 // Num live-out registers and padding to align to 4 byte.
506 OS.EmitIntValue(0, 2);
507 OS.EmitIntValue(LiveOuts.size(), 2);
508
509 for (const auto &LO : LiveOuts) {
510 OS.EmitIntValue(LO.DwarfRegNum, 2);
511 OS.EmitIntValue(0, 1);
512 OS.EmitIntValue(LO.Size, 1);
513 }
514 // Emit alignment to 8 byte.
515 OS.EmitValueToAlignment(8);
516 }
517 }
518
519 /// Serialize the stackmap data.
serializeToStackMapSection()520 void StackMaps::serializeToStackMapSection() {
521 (void)WSMP;
522 // Bail out if there's no stack map data.
523 assert((!CSInfos.empty() || ConstPool.empty()) &&
524 "Expected empty constant pool too!");
525 assert((!CSInfos.empty() || FnStackSize.empty()) &&
526 "Expected empty function record too!");
527 if (CSInfos.empty())
528 return;
529
530 MCContext &OutContext = AP.OutStreamer->getContext();
531 MCStreamer &OS = *AP.OutStreamer;
532
533 // Create the section.
534 MCSection *StackMapSection =
535 OutContext.getObjectFileInfo()->getStackMapSection();
536 OS.SwitchSection(StackMapSection);
537
538 // Emit a dummy symbol to force section inclusion.
539 OS.EmitLabel(OutContext.getOrCreateSymbol(Twine("__LLVM_StackMaps")));
540
541 // Serialize data.
542 DEBUG(dbgs() << "********** Stack Map Output **********\n");
543 emitStackmapHeader(OS);
544 emitFunctionFrameRecords(OS);
545 emitConstantPoolEntries(OS);
546 emitCallsiteEntries(OS);
547 OS.AddBlankLine();
548
549 // Clean up.
550 CSInfos.clear();
551 ConstPool.clear();
552 }
553