1 //===- StackMaps.cpp ------------------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8
9 #include "llvm/CodeGen/StackMaps.h"
10 #include "llvm/ADT/DenseMapInfo.h"
11 #include "llvm/ADT/STLExtras.h"
12 #include "llvm/ADT/Twine.h"
13 #include "llvm/CodeGen/AsmPrinter.h"
14 #include "llvm/CodeGen/MachineFrameInfo.h"
15 #include "llvm/CodeGen/MachineFunction.h"
16 #include "llvm/CodeGen/MachineInstr.h"
17 #include "llvm/CodeGen/MachineOperand.h"
18 #include "llvm/CodeGen/TargetOpcodes.h"
19 #include "llvm/CodeGen/TargetRegisterInfo.h"
20 #include "llvm/CodeGen/TargetSubtargetInfo.h"
21 #include "llvm/IR/DataLayout.h"
22 #include "llvm/MC/MCContext.h"
23 #include "llvm/MC/MCExpr.h"
24 #include "llvm/MC/MCObjectFileInfo.h"
25 #include "llvm/MC/MCRegisterInfo.h"
26 #include "llvm/MC/MCStreamer.h"
27 #include "llvm/Support/CommandLine.h"
28 #include "llvm/Support/Debug.h"
29 #include "llvm/Support/ErrorHandling.h"
30 #include "llvm/Support/MathExtras.h"
31 #include "llvm/Support/raw_ostream.h"
32 #include <algorithm>
33 #include <cassert>
34 #include <cstdint>
35 #include <iterator>
36 #include <utility>
37
38 using namespace llvm;
39
40 #define DEBUG_TYPE "stackmaps"
41
42 static cl::opt<int> StackMapVersion(
43 "stackmap-version", cl::init(3), cl::Hidden,
44 cl::desc("Specify the stackmap encoding version (default = 3)"));
45
46 const char *StackMaps::WSMP = "Stack Maps: ";
47
getConstMetaVal(const MachineInstr & MI,unsigned Idx)48 static uint64_t getConstMetaVal(const MachineInstr &MI, unsigned Idx) {
49 assert(MI.getOperand(Idx).isImm() &&
50 MI.getOperand(Idx).getImm() == StackMaps::ConstantOp);
51 const auto &MO = MI.getOperand(Idx + 1);
52 assert(MO.isImm());
53 return MO.getImm();
54 }
55
StackMapOpers(const MachineInstr * MI)56 StackMapOpers::StackMapOpers(const MachineInstr *MI)
57 : MI(MI) {
58 assert(getVarIdx() <= MI->getNumOperands() &&
59 "invalid stackmap definition");
60 }
61
PatchPointOpers(const MachineInstr * MI)62 PatchPointOpers::PatchPointOpers(const MachineInstr *MI)
63 : MI(MI), HasDef(MI->getOperand(0).isReg() && MI->getOperand(0).isDef() &&
64 !MI->getOperand(0).isImplicit()) {
65 #ifndef NDEBUG
66 unsigned CheckStartIdx = 0, e = MI->getNumOperands();
67 while (CheckStartIdx < e && MI->getOperand(CheckStartIdx).isReg() &&
68 MI->getOperand(CheckStartIdx).isDef() &&
69 !MI->getOperand(CheckStartIdx).isImplicit())
70 ++CheckStartIdx;
71
72 assert(getMetaIdx() == CheckStartIdx &&
73 "Unexpected additional definition in Patchpoint intrinsic.");
74 #endif
75 }
76
getNextScratchIdx(unsigned StartIdx) const77 unsigned PatchPointOpers::getNextScratchIdx(unsigned StartIdx) const {
78 if (!StartIdx)
79 StartIdx = getVarIdx();
80
81 // Find the next scratch register (implicit def and early clobber)
82 unsigned ScratchIdx = StartIdx, e = MI->getNumOperands();
83 while (ScratchIdx < e &&
84 !(MI->getOperand(ScratchIdx).isReg() &&
85 MI->getOperand(ScratchIdx).isDef() &&
86 MI->getOperand(ScratchIdx).isImplicit() &&
87 MI->getOperand(ScratchIdx).isEarlyClobber()))
88 ++ScratchIdx;
89
90 assert(ScratchIdx != e && "No scratch register available");
91 return ScratchIdx;
92 }
93
getFirstGCPtrIdx()94 int StatepointOpers::getFirstGCPtrIdx() {
95 unsigned NumDeoptsIdx = getNumDeoptArgsIdx();
96 unsigned NumDeoptArgs = MI->getOperand(NumDeoptsIdx).getImm();
97
98 unsigned CurIdx = NumDeoptsIdx + 1;
99 while (NumDeoptArgs--) {
100 CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx);
101 }
102 ++CurIdx; // <StackMaps::ConstantOp>
103 unsigned NumGCPtrs = MI->getOperand(CurIdx).getImm();
104 if (NumGCPtrs == 0)
105 return -1;
106 ++CurIdx; // <num gc ptrs>
107 assert(CurIdx < MI->getNumOperands() && "Index points past operand list");
108 return (int)CurIdx;
109 }
110
getGCPointerMap(SmallVectorImpl<std::pair<unsigned,unsigned>> & GCMap)111 unsigned StatepointOpers::getGCPointerMap(
112 SmallVectorImpl<std::pair<unsigned, unsigned>> &GCMap) {
113 int FirstGCIdx = getFirstGCPtrIdx();
114 if (FirstGCIdx == -1)
115 return 0;
116 unsigned NumGCPtr = getConstMetaVal(*MI, (unsigned)FirstGCIdx - 2);
117 unsigned CurIdx = (unsigned)FirstGCIdx;
118 while (NumGCPtr--)
119 CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx);
120
121 unsigned NumAllocas = getConstMetaVal(*MI, CurIdx);
122 CurIdx += 2;
123 while (NumAllocas--)
124 CurIdx = StackMaps::getNextMetaArgIdx(MI, CurIdx);
125
126 assert(CurIdx < MI->getNumOperands());
127 unsigned GCMapSize = getConstMetaVal(*MI, CurIdx);
128 CurIdx += 2;
129 for (unsigned N = 0; N < GCMapSize; ++N) {
130 unsigned B = MI->getOperand(CurIdx++).getImm();
131 unsigned D = MI->getOperand(CurIdx++).getImm();
132 GCMap.push_back(std::make_pair(B, D));
133 }
134
135 return GCMapSize;
136 }
137
StackMaps(AsmPrinter & AP)138 StackMaps::StackMaps(AsmPrinter &AP) : AP(AP) {
139 if (StackMapVersion != 3)
140 llvm_unreachable("Unsupported stackmap version!");
141 }
142
getNextMetaArgIdx(const MachineInstr * MI,unsigned CurIdx)143 unsigned StackMaps::getNextMetaArgIdx(const MachineInstr *MI, unsigned CurIdx) {
144 assert(CurIdx < MI->getNumOperands() && "Bad meta arg index");
145 const auto &MO = MI->getOperand(CurIdx);
146 if (MO.isImm()) {
147 switch (MO.getImm()) {
148 default:
149 llvm_unreachable("Unrecognized operand type.");
150 case StackMaps::DirectMemRefOp:
151 CurIdx += 2;
152 break;
153 case StackMaps::IndirectMemRefOp:
154 CurIdx += 3;
155 break;
156 case StackMaps::ConstantOp:
157 ++CurIdx;
158 break;
159 }
160 }
161 ++CurIdx;
162 assert(CurIdx < MI->getNumOperands() && "points past operand list");
163 return CurIdx;
164 }
165
166 /// Go up the super-register chain until we hit a valid dwarf register number.
getDwarfRegNum(unsigned Reg,const TargetRegisterInfo * TRI)167 static unsigned getDwarfRegNum(unsigned Reg, const TargetRegisterInfo *TRI) {
168 int RegNum = TRI->getDwarfRegNum(Reg, false);
169 for (MCSuperRegIterator SR(Reg, TRI); SR.isValid() && RegNum < 0; ++SR)
170 RegNum = TRI->getDwarfRegNum(*SR, false);
171
172 assert(RegNum >= 0 && "Invalid Dwarf register number.");
173 return (unsigned)RegNum;
174 }
175
176 MachineInstr::const_mop_iterator
parseOperand(MachineInstr::const_mop_iterator MOI,MachineInstr::const_mop_iterator MOE,LocationVec & Locs,LiveOutVec & LiveOuts) const177 StackMaps::parseOperand(MachineInstr::const_mop_iterator MOI,
178 MachineInstr::const_mop_iterator MOE, LocationVec &Locs,
179 LiveOutVec &LiveOuts) const {
180 const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo();
181 if (MOI->isImm()) {
182 switch (MOI->getImm()) {
183 default:
184 llvm_unreachable("Unrecognized operand type.");
185 case StackMaps::DirectMemRefOp: {
186 auto &DL = AP.MF->getDataLayout();
187
188 unsigned Size = DL.getPointerSizeInBits();
189 assert((Size % 8) == 0 && "Need pointer size in bytes.");
190 Size /= 8;
191 Register Reg = (++MOI)->getReg();
192 int64_t Imm = (++MOI)->getImm();
193 Locs.emplace_back(StackMaps::Location::Direct, Size,
194 getDwarfRegNum(Reg, TRI), Imm);
195 break;
196 }
197 case StackMaps::IndirectMemRefOp: {
198 int64_t Size = (++MOI)->getImm();
199 assert(Size > 0 && "Need a valid size for indirect memory locations.");
200 Register Reg = (++MOI)->getReg();
201 int64_t Imm = (++MOI)->getImm();
202 Locs.emplace_back(StackMaps::Location::Indirect, Size,
203 getDwarfRegNum(Reg, TRI), Imm);
204 break;
205 }
206 case StackMaps::ConstantOp: {
207 ++MOI;
208 assert(MOI->isImm() && "Expected constant operand.");
209 int64_t Imm = MOI->getImm();
210 Locs.emplace_back(Location::Constant, sizeof(int64_t), 0, Imm);
211 break;
212 }
213 }
214 return ++MOI;
215 }
216
217 // The physical register number will ultimately be encoded as a DWARF regno.
218 // The stack map also records the size of a spill slot that can hold the
219 // register content. (The runtime can track the actual size of the data type
220 // if it needs to.)
221 if (MOI->isReg()) {
222 // Skip implicit registers (this includes our scratch registers)
223 if (MOI->isImplicit())
224 return ++MOI;
225
226 assert(Register::isPhysicalRegister(MOI->getReg()) &&
227 "Virtreg operands should have been rewritten before now.");
228 const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(MOI->getReg());
229 assert(!MOI->getSubReg() && "Physical subreg still around.");
230
231 unsigned Offset = 0;
232 unsigned DwarfRegNum = getDwarfRegNum(MOI->getReg(), TRI);
233 unsigned LLVMRegNum = *TRI->getLLVMRegNum(DwarfRegNum, false);
234 unsigned SubRegIdx = TRI->getSubRegIndex(LLVMRegNum, MOI->getReg());
235 if (SubRegIdx)
236 Offset = TRI->getSubRegIdxOffset(SubRegIdx);
237
238 Locs.emplace_back(Location::Register, TRI->getSpillSize(*RC),
239 DwarfRegNum, Offset);
240 return ++MOI;
241 }
242
243 if (MOI->isRegLiveOut())
244 LiveOuts = parseRegisterLiveOutMask(MOI->getRegLiveOut());
245
246 return ++MOI;
247 }
248
print(raw_ostream & OS)249 void StackMaps::print(raw_ostream &OS) {
250 const TargetRegisterInfo *TRI =
251 AP.MF ? AP.MF->getSubtarget().getRegisterInfo() : nullptr;
252 OS << WSMP << "callsites:\n";
253 for (const auto &CSI : CSInfos) {
254 const LocationVec &CSLocs = CSI.Locations;
255 const LiveOutVec &LiveOuts = CSI.LiveOuts;
256
257 OS << WSMP << "callsite " << CSI.ID << "\n";
258 OS << WSMP << " has " << CSLocs.size() << " locations\n";
259
260 unsigned Idx = 0;
261 for (const auto &Loc : CSLocs) {
262 OS << WSMP << "\t\tLoc " << Idx << ": ";
263 switch (Loc.Type) {
264 case Location::Unprocessed:
265 OS << "<Unprocessed operand>";
266 break;
267 case Location::Register:
268 OS << "Register ";
269 if (TRI)
270 OS << printReg(Loc.Reg, TRI);
271 else
272 OS << Loc.Reg;
273 break;
274 case Location::Direct:
275 OS << "Direct ";
276 if (TRI)
277 OS << printReg(Loc.Reg, TRI);
278 else
279 OS << Loc.Reg;
280 if (Loc.Offset)
281 OS << " + " << Loc.Offset;
282 break;
283 case Location::Indirect:
284 OS << "Indirect ";
285 if (TRI)
286 OS << printReg(Loc.Reg, TRI);
287 else
288 OS << Loc.Reg;
289 OS << "+" << Loc.Offset;
290 break;
291 case Location::Constant:
292 OS << "Constant " << Loc.Offset;
293 break;
294 case Location::ConstantIndex:
295 OS << "Constant Index " << Loc.Offset;
296 break;
297 }
298 OS << "\t[encoding: .byte " << Loc.Type << ", .byte 0"
299 << ", .short " << Loc.Size << ", .short " << Loc.Reg << ", .short 0"
300 << ", .int " << Loc.Offset << "]\n";
301 Idx++;
302 }
303
304 OS << WSMP << "\thas " << LiveOuts.size() << " live-out registers\n";
305
306 Idx = 0;
307 for (const auto &LO : LiveOuts) {
308 OS << WSMP << "\t\tLO " << Idx << ": ";
309 if (TRI)
310 OS << printReg(LO.Reg, TRI);
311 else
312 OS << LO.Reg;
313 OS << "\t[encoding: .short " << LO.DwarfRegNum << ", .byte 0, .byte "
314 << LO.Size << "]\n";
315 Idx++;
316 }
317 }
318 }
319
320 /// Create a live-out register record for the given register Reg.
321 StackMaps::LiveOutReg
createLiveOutReg(unsigned Reg,const TargetRegisterInfo * TRI) const322 StackMaps::createLiveOutReg(unsigned Reg, const TargetRegisterInfo *TRI) const {
323 unsigned DwarfRegNum = getDwarfRegNum(Reg, TRI);
324 unsigned Size = TRI->getSpillSize(*TRI->getMinimalPhysRegClass(Reg));
325 return LiveOutReg(Reg, DwarfRegNum, Size);
326 }
327
328 /// Parse the register live-out mask and return a vector of live-out registers
329 /// that need to be recorded in the stackmap.
330 StackMaps::LiveOutVec
parseRegisterLiveOutMask(const uint32_t * Mask) const331 StackMaps::parseRegisterLiveOutMask(const uint32_t *Mask) const {
332 assert(Mask && "No register mask specified");
333 const TargetRegisterInfo *TRI = AP.MF->getSubtarget().getRegisterInfo();
334 LiveOutVec LiveOuts;
335
336 // Create a LiveOutReg for each bit that is set in the register mask.
337 for (unsigned Reg = 0, NumRegs = TRI->getNumRegs(); Reg != NumRegs; ++Reg)
338 if ((Mask[Reg / 32] >> (Reg % 32)) & 1)
339 LiveOuts.push_back(createLiveOutReg(Reg, TRI));
340
341 // We don't need to keep track of a register if its super-register is already
342 // in the list. Merge entries that refer to the same dwarf register and use
343 // the maximum size that needs to be spilled.
344
345 llvm::sort(LiveOuts, [](const LiveOutReg &LHS, const LiveOutReg &RHS) {
346 // Only sort by the dwarf register number.
347 return LHS.DwarfRegNum < RHS.DwarfRegNum;
348 });
349
350 for (auto I = LiveOuts.begin(), E = LiveOuts.end(); I != E; ++I) {
351 for (auto II = std::next(I); II != E; ++II) {
352 if (I->DwarfRegNum != II->DwarfRegNum) {
353 // Skip all the now invalid entries.
354 I = --II;
355 break;
356 }
357 I->Size = std::max(I->Size, II->Size);
358 if (TRI->isSuperRegister(I->Reg, II->Reg))
359 I->Reg = II->Reg;
360 II->Reg = 0; // mark for deletion.
361 }
362 }
363
364 LiveOuts.erase(
365 llvm::remove_if(LiveOuts,
366 [](const LiveOutReg &LO) { return LO.Reg == 0; }),
367 LiveOuts.end());
368
369 return LiveOuts;
370 }
371
372 // See statepoint MI format description in StatepointOpers' class comment
373 // in include/llvm/CodeGen/StackMaps.h
parseStatepointOpers(const MachineInstr & MI,MachineInstr::const_mop_iterator MOI,MachineInstr::const_mop_iterator MOE,LocationVec & Locations,LiveOutVec & LiveOuts)374 void StackMaps::parseStatepointOpers(const MachineInstr &MI,
375 MachineInstr::const_mop_iterator MOI,
376 MachineInstr::const_mop_iterator MOE,
377 LocationVec &Locations,
378 LiveOutVec &LiveOuts) {
379 LLVM_DEBUG(dbgs() << "record statepoint : " << MI << "\n");
380 StatepointOpers SO(&MI);
381 MOI = parseOperand(MOI, MOE, Locations, LiveOuts); // CC
382 MOI = parseOperand(MOI, MOE, Locations, LiveOuts); // Flags
383 MOI = parseOperand(MOI, MOE, Locations, LiveOuts); // Num Deopts
384
385 // Record Deopt Args.
386 unsigned NumDeoptArgs = Locations.back().Offset;
387 assert(Locations.back().Type == Location::Constant);
388 assert(NumDeoptArgs == SO.getNumDeoptArgs());
389
390 while (NumDeoptArgs--)
391 MOI = parseOperand(MOI, MOE, Locations, LiveOuts);
392
393 // Record gc base/derived pairs
394 assert(MOI->isImm() && MOI->getImm() == StackMaps::ConstantOp);
395 ++MOI;
396 assert(MOI->isImm());
397 unsigned NumGCPointers = MOI->getImm();
398 ++MOI;
399 if (NumGCPointers) {
400 // Map logical index of GC ptr to MI operand index.
401 SmallVector<unsigned, 8> GCPtrIndices;
402 unsigned GCPtrIdx = (unsigned)SO.getFirstGCPtrIdx();
403 assert((int)GCPtrIdx != -1);
404 assert(MOI - MI.operands_begin() == GCPtrIdx + 0LL);
405 while (NumGCPointers--) {
406 GCPtrIndices.push_back(GCPtrIdx);
407 GCPtrIdx = StackMaps::getNextMetaArgIdx(&MI, GCPtrIdx);
408 }
409
410 SmallVector<std::pair<unsigned, unsigned>, 8> GCPairs;
411 unsigned NumGCPairs = SO.getGCPointerMap(GCPairs);
412 (void)NumGCPairs;
413 LLVM_DEBUG(dbgs() << "NumGCPairs = " << NumGCPairs << "\n");
414
415 auto MOB = MI.operands_begin();
416 for (auto &P : GCPairs) {
417 assert(P.first < GCPtrIndices.size() && "base pointer index not found");
418 assert(P.second < GCPtrIndices.size() &&
419 "derived pointer index not found");
420 unsigned BaseIdx = GCPtrIndices[P.first];
421 unsigned DerivedIdx = GCPtrIndices[P.second];
422 LLVM_DEBUG(dbgs() << "Base : " << BaseIdx << " Derived : " << DerivedIdx
423 << "\n");
424 (void)parseOperand(MOB + BaseIdx, MOE, Locations, LiveOuts);
425 (void)parseOperand(MOB + DerivedIdx, MOE, Locations, LiveOuts);
426 }
427
428 MOI = MOB + GCPtrIdx;
429 }
430
431 // Record gc allocas
432 assert(MOI < MOE);
433 assert(MOI->isImm() && MOI->getImm() == StackMaps::ConstantOp);
434 ++MOI;
435 unsigned NumAllocas = MOI->getImm();
436 ++MOI;
437 while (NumAllocas--) {
438 MOI = parseOperand(MOI, MOE, Locations, LiveOuts);
439 assert(MOI < MOE);
440 }
441 }
442
recordStackMapOpers(const MCSymbol & MILabel,const MachineInstr & MI,uint64_t ID,MachineInstr::const_mop_iterator MOI,MachineInstr::const_mop_iterator MOE,bool recordResult)443 void StackMaps::recordStackMapOpers(const MCSymbol &MILabel,
444 const MachineInstr &MI, uint64_t ID,
445 MachineInstr::const_mop_iterator MOI,
446 MachineInstr::const_mop_iterator MOE,
447 bool recordResult) {
448 MCContext &OutContext = AP.OutStreamer->getContext();
449
450 LocationVec Locations;
451 LiveOutVec LiveOuts;
452
453 if (recordResult) {
454 assert(PatchPointOpers(&MI).hasDef() && "Stackmap has no return value.");
455 parseOperand(MI.operands_begin(), std::next(MI.operands_begin()), Locations,
456 LiveOuts);
457 }
458
459 // Parse operands.
460 if (MI.getOpcode() == TargetOpcode::STATEPOINT)
461 parseStatepointOpers(MI, MOI, MOE, Locations, LiveOuts);
462 else
463 while (MOI != MOE)
464 MOI = parseOperand(MOI, MOE, Locations, LiveOuts);
465
466 // Move large constants into the constant pool.
467 for (auto &Loc : Locations) {
468 // Constants are encoded as sign-extended integers.
469 // -1 is directly encoded as .long 0xFFFFFFFF with no constant pool.
470 if (Loc.Type == Location::Constant && !isInt<32>(Loc.Offset)) {
471 Loc.Type = Location::ConstantIndex;
472 // ConstPool is intentionally a MapVector of 'uint64_t's (as
473 // opposed to 'int64_t's). We should never be in a situation
474 // where we have to insert either the tombstone or the empty
475 // keys into a map, and for a DenseMap<uint64_t, T> these are
476 // (uint64_t)0 and (uint64_t)-1. They can be and are
477 // represented using 32 bit integers.
478 assert((uint64_t)Loc.Offset != DenseMapInfo<uint64_t>::getEmptyKey() &&
479 (uint64_t)Loc.Offset !=
480 DenseMapInfo<uint64_t>::getTombstoneKey() &&
481 "empty and tombstone keys should fit in 32 bits!");
482 auto Result = ConstPool.insert(std::make_pair(Loc.Offset, Loc.Offset));
483 Loc.Offset = Result.first - ConstPool.begin();
484 }
485 }
486
487 // Create an expression to calculate the offset of the callsite from function
488 // entry.
489 const MCExpr *CSOffsetExpr = MCBinaryExpr::createSub(
490 MCSymbolRefExpr::create(&MILabel, OutContext),
491 MCSymbolRefExpr::create(AP.CurrentFnSymForSize, OutContext), OutContext);
492
493 CSInfos.emplace_back(CSOffsetExpr, ID, std::move(Locations),
494 std::move(LiveOuts));
495
496 // Record the stack size of the current function and update callsite count.
497 const MachineFrameInfo &MFI = AP.MF->getFrameInfo();
498 const TargetRegisterInfo *RegInfo = AP.MF->getSubtarget().getRegisterInfo();
499 bool HasDynamicFrameSize =
500 MFI.hasVarSizedObjects() || RegInfo->needsStackRealignment(*(AP.MF));
501 uint64_t FrameSize = HasDynamicFrameSize ? UINT64_MAX : MFI.getStackSize();
502
503 auto CurrentIt = FnInfos.find(AP.CurrentFnSym);
504 if (CurrentIt != FnInfos.end())
505 CurrentIt->second.RecordCount++;
506 else
507 FnInfos.insert(std::make_pair(AP.CurrentFnSym, FunctionInfo(FrameSize)));
508 }
509
recordStackMap(const MCSymbol & L,const MachineInstr & MI)510 void StackMaps::recordStackMap(const MCSymbol &L, const MachineInstr &MI) {
511 assert(MI.getOpcode() == TargetOpcode::STACKMAP && "expected stackmap");
512
513 StackMapOpers opers(&MI);
514 const int64_t ID = MI.getOperand(PatchPointOpers::IDPos).getImm();
515 recordStackMapOpers(L, MI, ID, std::next(MI.operands_begin(),
516 opers.getVarIdx()),
517 MI.operands_end());
518 }
519
recordPatchPoint(const MCSymbol & L,const MachineInstr & MI)520 void StackMaps::recordPatchPoint(const MCSymbol &L, const MachineInstr &MI) {
521 assert(MI.getOpcode() == TargetOpcode::PATCHPOINT && "expected patchpoint");
522
523 PatchPointOpers opers(&MI);
524 const int64_t ID = opers.getID();
525 auto MOI = std::next(MI.operands_begin(), opers.getStackMapStartIdx());
526 recordStackMapOpers(L, MI, ID, MOI, MI.operands_end(),
527 opers.isAnyReg() && opers.hasDef());
528
529 #ifndef NDEBUG
530 // verify anyregcc
531 auto &Locations = CSInfos.back().Locations;
532 if (opers.isAnyReg()) {
533 unsigned NArgs = opers.getNumCallArgs();
534 for (unsigned i = 0, e = (opers.hasDef() ? NArgs + 1 : NArgs); i != e; ++i)
535 assert(Locations[i].Type == Location::Register &&
536 "anyreg arg must be in reg.");
537 }
538 #endif
539 }
540
recordStatepoint(const MCSymbol & L,const MachineInstr & MI)541 void StackMaps::recordStatepoint(const MCSymbol &L, const MachineInstr &MI) {
542 assert(MI.getOpcode() == TargetOpcode::STATEPOINT && "expected statepoint");
543
544 StatepointOpers opers(&MI);
545 const unsigned StartIdx = opers.getVarIdx();
546 recordStackMapOpers(L, MI, opers.getID(), MI.operands_begin() + StartIdx,
547 MI.operands_end(), false);
548 }
549
550 /// Emit the stackmap header.
551 ///
552 /// Header {
553 /// uint8 : Stack Map Version (currently 3)
554 /// uint8 : Reserved (expected to be 0)
555 /// uint16 : Reserved (expected to be 0)
556 /// }
557 /// uint32 : NumFunctions
558 /// uint32 : NumConstants
559 /// uint32 : NumRecords
emitStackmapHeader(MCStreamer & OS)560 void StackMaps::emitStackmapHeader(MCStreamer &OS) {
561 // Header.
562 OS.emitIntValue(StackMapVersion, 1); // Version.
563 OS.emitIntValue(0, 1); // Reserved.
564 OS.emitInt16(0); // Reserved.
565
566 // Num functions.
567 LLVM_DEBUG(dbgs() << WSMP << "#functions = " << FnInfos.size() << '\n');
568 OS.emitInt32(FnInfos.size());
569 // Num constants.
570 LLVM_DEBUG(dbgs() << WSMP << "#constants = " << ConstPool.size() << '\n');
571 OS.emitInt32(ConstPool.size());
572 // Num callsites.
573 LLVM_DEBUG(dbgs() << WSMP << "#callsites = " << CSInfos.size() << '\n');
574 OS.emitInt32(CSInfos.size());
575 }
576
577 /// Emit the function frame record for each function.
578 ///
579 /// StkSizeRecord[NumFunctions] {
580 /// uint64 : Function Address
581 /// uint64 : Stack Size
582 /// uint64 : Record Count
583 /// }
emitFunctionFrameRecords(MCStreamer & OS)584 void StackMaps::emitFunctionFrameRecords(MCStreamer &OS) {
585 // Function Frame records.
586 LLVM_DEBUG(dbgs() << WSMP << "functions:\n");
587 for (auto const &FR : FnInfos) {
588 LLVM_DEBUG(dbgs() << WSMP << "function addr: " << FR.first
589 << " frame size: " << FR.second.StackSize
590 << " callsite count: " << FR.second.RecordCount << '\n');
591 OS.emitSymbolValue(FR.first, 8);
592 OS.emitIntValue(FR.second.StackSize, 8);
593 OS.emitIntValue(FR.second.RecordCount, 8);
594 }
595 }
596
597 /// Emit the constant pool.
598 ///
599 /// int64 : Constants[NumConstants]
emitConstantPoolEntries(MCStreamer & OS)600 void StackMaps::emitConstantPoolEntries(MCStreamer &OS) {
601 // Constant pool entries.
602 LLVM_DEBUG(dbgs() << WSMP << "constants:\n");
603 for (const auto &ConstEntry : ConstPool) {
604 LLVM_DEBUG(dbgs() << WSMP << ConstEntry.second << '\n');
605 OS.emitIntValue(ConstEntry.second, 8);
606 }
607 }
608
609 /// Emit the callsite info for each callsite.
610 ///
611 /// StkMapRecord[NumRecords] {
612 /// uint64 : PatchPoint ID
613 /// uint32 : Instruction Offset
614 /// uint16 : Reserved (record flags)
615 /// uint16 : NumLocations
616 /// Location[NumLocations] {
617 /// uint8 : Register | Direct | Indirect | Constant | ConstantIndex
618 /// uint8 : Size in Bytes
619 /// uint16 : Dwarf RegNum
620 /// int32 : Offset
621 /// }
622 /// uint16 : Padding
623 /// uint16 : NumLiveOuts
624 /// LiveOuts[NumLiveOuts] {
625 /// uint16 : Dwarf RegNum
626 /// uint8 : Reserved
627 /// uint8 : Size in Bytes
628 /// }
629 /// uint32 : Padding (only if required to align to 8 byte)
630 /// }
631 ///
632 /// Location Encoding, Type, Value:
633 /// 0x1, Register, Reg (value in register)
634 /// 0x2, Direct, Reg + Offset (frame index)
635 /// 0x3, Indirect, [Reg + Offset] (spilled value)
636 /// 0x4, Constant, Offset (small constant)
637 /// 0x5, ConstIndex, Constants[Offset] (large constant)
emitCallsiteEntries(MCStreamer & OS)638 void StackMaps::emitCallsiteEntries(MCStreamer &OS) {
639 LLVM_DEBUG(print(dbgs()));
640 // Callsite entries.
641 for (const auto &CSI : CSInfos) {
642 const LocationVec &CSLocs = CSI.Locations;
643 const LiveOutVec &LiveOuts = CSI.LiveOuts;
644
645 // Verify stack map entry. It's better to communicate a problem to the
646 // runtime than crash in case of in-process compilation. Currently, we do
647 // simple overflow checks, but we may eventually communicate other
648 // compilation errors this way.
649 if (CSLocs.size() > UINT16_MAX || LiveOuts.size() > UINT16_MAX) {
650 OS.emitIntValue(UINT64_MAX, 8); // Invalid ID.
651 OS.emitValue(CSI.CSOffsetExpr, 4);
652 OS.emitInt16(0); // Reserved.
653 OS.emitInt16(0); // 0 locations.
654 OS.emitInt16(0); // padding.
655 OS.emitInt16(0); // 0 live-out registers.
656 OS.emitInt32(0); // padding.
657 continue;
658 }
659
660 OS.emitIntValue(CSI.ID, 8);
661 OS.emitValue(CSI.CSOffsetExpr, 4);
662
663 // Reserved for flags.
664 OS.emitInt16(0);
665 OS.emitInt16(CSLocs.size());
666
667 for (const auto &Loc : CSLocs) {
668 OS.emitIntValue(Loc.Type, 1);
669 OS.emitIntValue(0, 1); // Reserved
670 OS.emitInt16(Loc.Size);
671 OS.emitInt16(Loc.Reg);
672 OS.emitInt16(0); // Reserved
673 OS.emitInt32(Loc.Offset);
674 }
675
676 // Emit alignment to 8 byte.
677 OS.emitValueToAlignment(8);
678
679 // Num live-out registers and padding to align to 4 byte.
680 OS.emitInt16(0);
681 OS.emitInt16(LiveOuts.size());
682
683 for (const auto &LO : LiveOuts) {
684 OS.emitInt16(LO.DwarfRegNum);
685 OS.emitIntValue(0, 1);
686 OS.emitIntValue(LO.Size, 1);
687 }
688 // Emit alignment to 8 byte.
689 OS.emitValueToAlignment(8);
690 }
691 }
692
693 /// Serialize the stackmap data.
serializeToStackMapSection()694 void StackMaps::serializeToStackMapSection() {
695 (void)WSMP;
696 // Bail out if there's no stack map data.
697 assert((!CSInfos.empty() || ConstPool.empty()) &&
698 "Expected empty constant pool too!");
699 assert((!CSInfos.empty() || FnInfos.empty()) &&
700 "Expected empty function record too!");
701 if (CSInfos.empty())
702 return;
703
704 MCContext &OutContext = AP.OutStreamer->getContext();
705 MCStreamer &OS = *AP.OutStreamer;
706
707 // Create the section.
708 MCSection *StackMapSection =
709 OutContext.getObjectFileInfo()->getStackMapSection();
710 OS.SwitchSection(StackMapSection);
711
712 // Emit a dummy symbol to force section inclusion.
713 OS.emitLabel(OutContext.getOrCreateSymbol(Twine("__LLVM_StackMaps")));
714
715 // Serialize data.
716 LLVM_DEBUG(dbgs() << "********** Stack Map Output **********\n");
717 emitStackmapHeader(OS);
718 emitFunctionFrameRecords(OS);
719 emitConstantPoolEntries(OS);
720 emitCallsiteEntries(OS);
721 OS.AddBlankLine();
722
723 // Clean up.
724 CSInfos.clear();
725 ConstPool.clear();
726 }
727