136#include "llvm/Config/llvm-config.h"
158#define DEBUG_TYPE "livedebugvalues"
160STATISTIC(NumInserted,
"Number of DBG_VALUE instructions inserted");
204 using u32_location_t = uint32_t;
205 using u32_index_t = uint32_t;
207 u32_location_t Location;
213 static constexpr u32_location_t kUniversalLocation = 0;
217 static constexpr u32_location_t kFirstRegLocation = 1;
221 static constexpr u32_location_t kFirstInvalidRegLocation = 1 << 30;
225 static constexpr u32_location_t kSpillLocation = kFirstInvalidRegLocation;
229 static constexpr u32_location_t kEntryValueBackupLocation =
230 kFirstInvalidRegLocation + 1;
238 static constexpr u32_location_t kWasmLocation = kFirstInvalidRegLocation + 2;
242 static constexpr u32_location_t kFirstVirtualRegLocation = 1 << 31;
244 LocIndex(u32_location_t Location, u32_index_t Index)
245 : Location(Location), Index(Index) {}
247 uint64_t getAsRawInteger()
const {
248 return (
static_cast<uint64_t
>(Location) << 32) | Index;
251 template<
typename IntT>
static LocIndex fromRawInteger(IntT
ID) {
252 static_assert(std::is_unsigned_v<IntT> &&
sizeof(
ID) ==
sizeof(uint64_t),
253 "Cannot convert raw integer to LocIndex");
254 return {
static_cast<u32_location_t
>(
ID >> 32),
255 static_cast<u32_index_t
>(
ID)};
261 return LocIndex(
Reg, 0).getAsRawInteger();
266 static auto indexRangeForLocation(
const VarLocSet &Set,
267 u32_location_t Location) {
268 uint64_t
Start = LocIndex(Location, 0).getAsRawInteger();
269 uint64_t End = LocIndex(Location + 1, 0).getAsRawInteger();
270 return Set.half_open_range(Start, End);
281class VarLocBasedLDV :
public LDVImpl {
283 const TargetRegisterInfo *TRI;
284 const TargetInstrInfo *TII;
285 const TargetFrameLowering *TFI;
286 bool ShouldEmitDebugEntryValues;
287 BitVector CalleeSavedRegs;
289 VarLocSet::Allocator Alloc;
291 const MachineInstr *LastNonDbgMI;
293 enum struct TransferKind { TransferCopy, TransferSpill, TransferRestore };
296 using OptFragmentInfo = std::optional<DIExpression::FragmentInfo>;
304 StackOffset SpillOffset;
306 return SpillBase ==
Other.SpillBase && SpillOffset ==
Other.SpillOffset;
309 return !(*
this ==
Other);
323 return Index ==
Other.Index && Offset ==
Other.Offset;
329 const DebugVariable Var;
332 const DIExpression *Expr;
336 const MachineInstr &MI;
338 enum class MachineLocKind {
346 enum class EntryValueLocKind {
347 NonEntryValueKind = 0,
349 EntryValueBackupKind,
350 EntryValueCopyBackupKind
351 } EVKind = EntryValueLocKind::NonEntryValueKind;
355 union MachineLocValue {
357 SpillLoc SpillLocation;
361 const ConstantInt *CImm;
362 WasmLoc WasmLocation;
363 MachineLocValue() : Hash(0) {}
372 MachineLocValue Value;
374 if (Kind !=
Other.Kind)
377 case MachineLocKind::SpillLocKind:
378 return Value.SpillLocation ==
Other.Value.SpillLocation;
379 case MachineLocKind::WasmLocKind:
380 return Value.WasmLocation ==
Other.Value.WasmLocation;
381 case MachineLocKind::RegisterKind:
382 case MachineLocKind::ImmediateKind:
383 return Value.Hash ==
Other.Value.Hash;
390 case MachineLocKind::SpillLocKind:
391 return std::make_tuple(
392 Kind, Value.SpillLocation.SpillBase,
393 Value.SpillLocation.SpillOffset.getFixed(),
394 Value.SpillLocation.SpillOffset.getScalable()) <
396 Other.Kind,
Other.Value.SpillLocation.SpillBase,
397 Other.Value.SpillLocation.SpillOffset.getFixed(),
398 Other.Value.SpillLocation.SpillOffset.getScalable());
399 case MachineLocKind::WasmLocKind:
400 return std::make_tuple(Kind, Value.WasmLocation.Index,
401 Value.WasmLocation.Offset) <
402 std::make_tuple(
Other.Kind,
Other.Value.WasmLocation.Index,
403 Other.Value.WasmLocation.Offset);
404 case MachineLocKind::RegisterKind:
405 case MachineLocKind::ImmediateKind:
406 return std::tie(Kind, Value.Hash) <
422 SmallVector<unsigned, 8> OrigLocMap;
424 VarLoc(
const MachineInstr &MI)
425 : Var(MI.getDebugVariable(), MI.getDebugExpression(),
427 Expr(MI.getDebugExpression()), MI(MI) {
428 assert(MI.isDebugValue() &&
"not a DBG_VALUE");
429 assert((MI.isDebugValueList() || MI.getNumOperands() == 4) &&
430 "malformed DBG_VALUE");
431 for (
const MachineOperand &
Op : MI.debug_operands()) {
432 MachineLoc
ML = GetLocForOp(
Op);
434 if (It == Locs.end()) {
436 OrigLocMap.push_back(MI.getDebugOperandIndex(&
Op));
440 unsigned OpIdx = Locs.size();
441 unsigned DuplicatingIdx = std::distance(Locs.begin(), It);
448 assert(EVKind != EntryValueLocKind::EntryValueKind &&
449 !isEntryBackupLoc());
452 static MachineLoc GetLocForOp(
const MachineOperand &
Op) {
456 Kind = MachineLocKind::RegisterKind;
457 Loc.RegNo =
Op.getReg();
458 }
else if (
Op.isImm()) {
459 Kind = MachineLocKind::ImmediateKind;
460 Loc.Immediate =
Op.getImm();
461 }
else if (
Op.isFPImm()) {
462 Kind = MachineLocKind::ImmediateKind;
463 Loc.FPImm =
Op.getFPImm();
464 }
else if (
Op.isCImm()) {
465 Kind = MachineLocKind::ImmediateKind;
466 Loc.CImm =
Op.getCImm();
467 }
else if (
Op.isTargetIndex()) {
468 Kind = MachineLocKind::WasmLocKind;
469 Loc.WasmLocation = {
Op.getIndex(),
Op.getOffset()};
477 static VarLoc CreateEntryLoc(
const MachineInstr &MI,
480 assert(VL.Locs.size() == 1 &&
481 VL.Locs[0].Kind == MachineLocKind::RegisterKind);
482 VL.EVKind = EntryValueLocKind::EntryValueKind;
484 VL.Locs[0].Value.RegNo =
Reg;
492 static VarLoc CreateEntryBackupLoc(
const MachineInstr &MI,
493 const DIExpression *EntryExpr) {
495 assert(VL.Locs.size() == 1 &&
496 VL.Locs[0].Kind == MachineLocKind::RegisterKind);
497 VL.EVKind = EntryValueLocKind::EntryValueBackupKind;
505 static VarLoc CreateEntryCopyBackupLoc(
const MachineInstr &MI,
506 const DIExpression *EntryExpr,
509 assert(VL.Locs.size() == 1 &&
510 VL.Locs[0].Kind == MachineLocKind::RegisterKind);
511 VL.EVKind = EntryValueLocKind::EntryValueCopyBackupKind;
513 VL.Locs[0].Value.RegNo = NewReg;
519 static VarLoc CreateCopyLoc(
const VarLoc &OldVL,
const MachineLoc &OldML,
522 for (MachineLoc &
ML : VL.Locs)
524 ML.Kind = MachineLocKind::RegisterKind;
525 ML.Value.RegNo = NewReg;
533 static VarLoc CreateSpillLoc(
const VarLoc &OldVL,
const MachineLoc &OldML,
534 unsigned SpillBase, StackOffset SpillOffset) {
536 for (MachineLoc &
ML : VL.Locs)
538 ML.Kind = MachineLocKind::SpillLocKind;
539 ML.Value.SpillLocation = {SpillBase, SpillOffset};
549 MachineInstr *BuildDbgValue(MachineFunction &MF)
const {
550 assert(!isEntryBackupLoc() &&
551 "Tried to produce DBG_VALUE for backup VarLoc");
552 const DebugLoc &DbgLoc = MI.getDebugLoc();
553 bool Indirect = MI.isIndirectDebugValue();
554 const auto &IID = MI.getDesc();
555 const DILocalVariable *Var = MI.getDebugVariable();
558 const DIExpression *DIExpr = Expr;
560 for (
unsigned I = 0,
E = Locs.size();
I <
E; ++
I) {
561 MachineLocKind LocKind = Locs[
I].Kind;
562 MachineLocValue Loc = Locs[
I].Value;
563 const MachineOperand &Orig = MI.getDebugOperand(OrigLocMap[
I]);
565 case MachineLocKind::RegisterKind:
573 EVKind == EntryValueLocKind::EntryValueKind ? Orig.
getReg()
577 case MachineLocKind::SpillLocKind: {
582 unsigned Base = Loc.SpillLocation.SpillBase;
584 if (MI.isNonListDebugValue()) {
586 DIExpr =
TRI->prependOffsetExpression(
588 Loc.SpillLocation.SpillOffset);
591 SmallVector<uint64_t, 4>
Ops;
592 TRI->getOffsetOpcodes(Loc.SpillLocation.SpillOffset,
Ops);
593 Ops.push_back(dwarf::DW_OP_deref);
599 case MachineLocKind::ImmediateKind: {
603 case MachineLocKind::WasmLocKind: {
607 case MachineLocKind::InvalidKind:
611 return BuildMI(MF, DbgLoc, IID, Indirect, MOs, Var, DIExpr);
616 return Kind == MachineLocKind::ImmediateKind;
620 bool isEntryBackupLoc()
const {
621 return EVKind == EntryValueLocKind::EntryValueBackupKind ||
622 EVKind == EntryValueLocKind::EntryValueCopyBackupKind;
628 return EVKind == EntryValueLocKind::EntryValueBackupKind && usesReg(
Reg);
633 bool isEntryValueCopyBackupReg(
Register Reg)
const {
634 return EVKind == EntryValueLocKind::EntryValueCopyBackupKind &&
641 RegML.Kind = MachineLocKind::RegisterKind;
642 RegML.Value.RegNo =
Reg;
648 for (
unsigned Idx = 0; Idx < Locs.size(); ++Idx)
649 if (Locs[Idx].Kind == MachineLocKind::RegisterKind &&
650 Register{
static_cast<unsigned>(Locs[Idx].Value.RegNo)} ==
Reg)
657 bool getDescribingRegs(SmallVectorImpl<uint32_t> &Regs)
const {
658 bool AnyRegs =
false;
659 for (
const auto &Loc : Locs)
660 if (Loc.Kind == MachineLocKind::RegisterKind) {
667 bool containsSpillLocs()
const {
668 return any_of(Locs, [](VarLoc::MachineLoc
ML) {
669 return ML.Kind == VarLoc::MachineLocKind::SpillLocKind;
675 bool usesSpillLoc(SpillLoc SpillLocation)
const {
677 SpillML.Kind = MachineLocKind::SpillLocKind;
678 SpillML.Value.SpillLocation = SpillLocation;
684 unsigned getSpillLocIdx(SpillLoc SpillLocation)
const {
685 for (
unsigned Idx = 0; Idx < Locs.size(); ++Idx)
686 if (Locs[Idx].Kind == MachineLocKind::SpillLocKind &&
687 Locs[Idx].
Value.SpillLocation == SpillLocation)
692 bool containsWasmLocs()
const {
693 return any_of(Locs, [](VarLoc::MachineLoc
ML) {
694 return ML.Kind == VarLoc::MachineLocKind::WasmLocKind;
700 bool usesWasmLoc(WasmLoc WasmLocation)
const {
702 WasmML.Kind = MachineLocKind::WasmLocKind;
703 WasmML.Value.WasmLocation = WasmLocation;
709 bool dominates(LexicalScopes &LS, MachineBasicBlock &
MBB)
const {
710 return LS.dominates(MI.getDebugLoc().get(), &
MBB);
713#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
715 void dump(
const TargetRegisterInfo *
TRI,
const TargetInstrInfo *
TII,
716 raw_ostream &Out =
dbgs())
const {
718 for (
const MachineLoc &MLoc : Locs) {
719 if (Locs.begin() != &MLoc)
722 case MachineLocKind::RegisterKind:
725 case MachineLocKind::SpillLocKind:
726 Out <<
printReg(MLoc.Value.SpillLocation.SpillBase,
TRI);
727 Out <<
"[" << MLoc.Value.SpillLocation.SpillOffset.getFixed() <<
" + "
728 << MLoc.Value.SpillLocation.SpillOffset.getScalable()
732 case MachineLocKind::ImmediateKind:
733 Out << MLoc.Value.Immediate;
735 case MachineLocKind::WasmLocKind: {
737 auto Indices =
TII->getSerializableTargetIndices();
739 find_if(Indices, [&](
const std::pair<int, const char *> &
I) {
740 return I.first == MLoc.Value.WasmLocation.Index;
742 assert(Found != Indices.end());
743 Out << Found->second;
744 if (MLoc.Value.WasmLocation.Offset > 0)
745 Out <<
" + " << MLoc.Value.WasmLocation.Offset;
751 case MachineLocKind::InvalidKind:
756 Out <<
", \"" << Var.getVariable()->getName() <<
"\", " << *Expr <<
", ";
757 if (Var.getInlinedAt())
758 Out <<
"!" << Var.getInlinedAt()->getMetadataID() <<
")\n";
762 if (isEntryBackupLoc())
763 Out <<
" (backup loc)\n";
770 return std::tie(EVKind, Var, Expr, Locs) ==
776 return std::tie(Var, EVKind, Locs, Expr) <
782 using VarVec = SmallVector<VarLoc, 32>;
792 std::map<VarLoc, LocIndices> Var2Indices;
796 SmallDenseMap<LocIndex::u32_location_t, std::vector<VarLoc>> Loc2Vars;
800 LocIndices insert(
const VarLoc &VL) {
801 LocIndices &Indices = Var2Indices[VL];
803 if (!Indices.empty())
811 if (VL.EVKind == VarLoc::EntryValueLocKind::NonEntryValueKind) {
812 VL.getDescribingRegs(Locations);
815 return (RegNo < LocIndex::kFirstInvalidRegLocation) ||
816 (LocIndex::kFirstVirtualRegLocation <= RegNo);
818 "Physical or virtual register out of range?");
819 if (VL.containsSpillLocs())
820 Locations.push_back(LocIndex::kSpillLocation);
821 if (VL.containsWasmLocs())
822 Locations.push_back(LocIndex::kWasmLocation);
823 }
else if (VL.EVKind != VarLoc::EntryValueLocKind::EntryValueKind) {
824 LocIndex::u32_location_t Loc = LocIndex::kEntryValueBackupLocation;
827 Locations.push_back(LocIndex::kUniversalLocation);
828 for (LocIndex::u32_location_t Location : Locations) {
831 {
Location,
static_cast<LocIndex::u32_index_t
>(Vars.size())});
837 LocIndices getAllIndices(
const VarLoc &VL)
const {
838 auto IndIt = Var2Indices.find(VL);
839 assert(IndIt != Var2Indices.end() &&
"VarLoc not tracked");
840 return IndIt->second;
844 const VarLoc &operator[](LocIndex
ID)
const {
845 auto LocIt = Loc2Vars.find(
ID.Location);
846 assert(LocIt != Loc2Vars.end() &&
"Location not tracked");
847 return LocIt->second[
ID.Index];
852 SmallDenseMap<const MachineBasicBlock *, std::unique_ptr<VarLocSet>>;
853 struct TransferDebugPair {
854 MachineInstr *TransferInst;
857 using TransferMap = SmallVector<TransferDebugPair, 4>;
860 using InstToEntryLocMap = std::multimap<const MachineInstr *, LocIndex>;
861 using RegDefToInstMap = DenseMap<Register, MachineInstr *>;
866 using FragmentOfVar =
867 std::pair<const DILocalVariable *, DIExpression::FragmentInfo>;
869 DenseMap<FragmentOfVar, SmallVector<DIExpression::FragmentInfo, 1>>;
873 using VarToFragments =
874 DenseMap<const DILocalVariable *, SmallSet<FragmentInfo, 4>>;
878 static void collectAllVarLocs(SmallVectorImpl<VarLoc> &Collected,
879 const VarLocSet &CollectFrom,
880 const VarLocMap &VarLocIDs);
884 void getUsedRegs(
const VarLocSet &CollectFrom,
885 SmallVectorImpl<Register> &UsedRegs)
const;
896 class OpenRangesSet {
897 VarLocSet::Allocator &Alloc;
900 SmallDenseMap<DebugVariable, LocIndices, 8> Vars;
902 SmallDenseMap<DebugVariable, LocIndices, 8> EntryValuesBackupVars;
906 OpenRangesSet(VarLocSet::Allocator &Alloc,
OverlapMap &_OLapMap)
907 : Alloc(Alloc), VarLocs(Alloc), OverlappingFragments(_OLapMap) {}
909 const VarLocSet &getVarLocs()
const {
return VarLocs; }
916 void getUniqueVarLocs(SmallVectorImpl<VarLoc> &Collected,
917 const VarLocMap &VarLocIDs)
const {
918 collectAllVarLocs(Collected, VarLocs, VarLocIDs);
922 void erase(
const VarLoc &VL);
926 void erase(
const VarLocsInRange &KillSet,
const VarLocMap &VarLocIDs,
927 LocIndex::u32_location_t Location);
930 void insert(LocIndices VarLocIDs,
const VarLoc &VL);
933 void insertFromLocSet(
const VarLocSet &ToLoad,
const VarLocMap &Map);
935 std::optional<LocIndices> getEntryValueBackup(DebugVariable Var);
941 EntryValuesBackupVars.clear();
946 assert(Vars.empty() == EntryValuesBackupVars.empty() &&
947 Vars.empty() == VarLocs.empty() &&
948 "open ranges are inconsistent");
949 return VarLocs.empty();
953 auto getEmptyVarLocRange()
const {
960 return LocIndex::indexRangeForLocation(getVarLocs(),
Reg);
964 auto getSpillVarLocs()
const {
965 return LocIndex::indexRangeForLocation(getVarLocs(),
966 LocIndex::kSpillLocation);
971 auto getEntryValueBackupVarLocs()
const {
972 return LocIndex::indexRangeForLocation(
973 getVarLocs(), LocIndex::kEntryValueBackupLocation);
977 auto getWasmVarLocs()
const {
978 return LocIndex::indexRangeForLocation(getVarLocs(),
979 LocIndex::kWasmLocation);
987 static void collectIDsForRegs(VarLocsInRange &Collected,
988 const DefinedRegsSet &Regs,
989 const VarLocSet &CollectFrom,
990 const VarLocMap &VarLocIDs);
992 VarLocSet &getVarLocsInMBB(
const MachineBasicBlock *
MBB, VarLocInMBB &Locs) {
993 std::unique_ptr<VarLocSet> &VLS = Locs[
MBB];
995 VLS = std::make_unique<VarLocSet>(Alloc);
999 const VarLocSet &getVarLocsInMBB(
const MachineBasicBlock *
MBB,
1000 const VarLocInMBB &Locs)
const {
1001 auto It = Locs.find(
MBB);
1002 assert(It != Locs.end() &&
"MBB not in map");
1007 bool isSpillInstruction(
const MachineInstr &
MI, MachineFunction *MF);
1015 bool isLocationSpill(
const MachineInstr &
MI, MachineFunction *MF,
1023 bool isEntryValueCandidate(
const MachineInstr &
MI,
1024 const DefinedRegsSet &Regs)
const;
1028 std::optional<VarLoc::SpillLoc> isRestoreInstruction(
const MachineInstr &
MI,
1029 MachineFunction *MF,
1033 VarLoc::SpillLoc extractSpillBaseRegAndOffset(
const MachineInstr &
MI);
1034 void insertTransferDebugPair(MachineInstr &
MI, OpenRangesSet &OpenRanges,
1035 TransferMap &Transfers, VarLocMap &VarLocIDs,
1036 LocIndex OldVarID, TransferKind Kind,
1037 const VarLoc::MachineLoc &OldLoc,
1040 void transferDebugValue(
const MachineInstr &
MI, OpenRangesSet &OpenRanges,
1041 VarLocMap &VarLocIDs,
1042 InstToEntryLocMap &EntryValTransfers,
1043 RegDefToInstMap &RegSetInstrs);
1044 void transferSpillOrRestoreInst(MachineInstr &
MI, OpenRangesSet &OpenRanges,
1045 VarLocMap &VarLocIDs, TransferMap &Transfers);
1046 void cleanupEntryValueTransfers(
const MachineInstr *
MI,
1047 OpenRangesSet &OpenRanges,
1048 VarLocMap &VarLocIDs,
const VarLoc &EntryVL,
1049 InstToEntryLocMap &EntryValTransfers);
1050 void removeEntryValue(
const MachineInstr &
MI, OpenRangesSet &OpenRanges,
1051 VarLocMap &VarLocIDs,
const VarLoc &EntryVL,
1052 InstToEntryLocMap &EntryValTransfers,
1053 RegDefToInstMap &RegSetInstrs);
1054 void emitEntryValues(MachineInstr &
MI, OpenRangesSet &OpenRanges,
1055 VarLocMap &VarLocIDs,
1056 InstToEntryLocMap &EntryValTransfers,
1057 VarLocsInRange &KillSet);
1058 void recordEntryValue(
const MachineInstr &
MI,
1059 const DefinedRegsSet &DefinedRegs,
1060 OpenRangesSet &OpenRanges, VarLocMap &VarLocIDs);
1061 void transferRegisterCopy(MachineInstr &
MI, OpenRangesSet &OpenRanges,
1062 VarLocMap &VarLocIDs, TransferMap &Transfers);
1063 void transferRegisterDef(MachineInstr &
MI, OpenRangesSet &OpenRanges,
1064 VarLocMap &VarLocIDs,
1065 InstToEntryLocMap &EntryValTransfers,
1066 RegDefToInstMap &RegSetInstrs);
1067 void transferWasmDef(MachineInstr &
MI, OpenRangesSet &OpenRanges,
1068 VarLocMap &VarLocIDs);
1069 bool transferTerminator(MachineBasicBlock *
MBB, OpenRangesSet &OpenRanges,
1070 VarLocInMBB &OutLocs,
const VarLocMap &VarLocIDs);
1072 void process(MachineInstr &
MI, OpenRangesSet &OpenRanges,
1073 VarLocMap &VarLocIDs, TransferMap &Transfers,
1074 InstToEntryLocMap &EntryValTransfers,
1075 RegDefToInstMap &RegSetInstrs);
1077 void accumulateFragmentMap(MachineInstr &
MI, VarToFragments &SeenFragments,
1078 OverlapMap &OLapMap);
1080 bool join(MachineBasicBlock &
MBB, VarLocInMBB &OutLocs, VarLocInMBB &InLocs,
1081 const VarLocMap &VarLocIDs,
1082 SmallPtrSet<const MachineBasicBlock *, 16> &Visited,
1083 SmallPtrSetImpl<const MachineBasicBlock *> &ArtificialBlocks);
1087 void flushPendingLocs(VarLocInMBB &PendingInLocs, VarLocMap &VarLocIDs);
1089 bool ExtendRanges(MachineFunction &MF, MachineDominatorTree *DomTree,
1090 bool ShouldEmitDebugEntryValues,
unsigned InputBBLimit,
1091 unsigned InputDbgValLimit)
override;
1100 void printVarLocInMBB(
const MachineFunction &MF,
const VarLocInMBB &V,
1101 const VarLocMap &VarLocIDs,
const char *msg,
1102 raw_ostream &Out)
const;
1111VarLocBasedLDV::VarLocBasedLDV() =
default;
1113VarLocBasedLDV::~VarLocBasedLDV() =
default;
1120void VarLocBasedLDV::OpenRangesSet::erase(
const VarLoc &VL) {
1122 auto DoErase = [&VL,
this](DebugVariable VarToErase) {
1123 auto *EraseFrom = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars;
1124 auto It = EraseFrom->
find(VarToErase);
1125 if (It != EraseFrom->end()) {
1126 LocIndices IDs = It->second;
1127 for (LocIndex
ID : IDs)
1128 VarLocs.reset(
ID.getAsRawInteger());
1129 EraseFrom->erase(It);
1133 DebugVariable Var = VL.Var;
1144 auto MapIt = OverlappingFragments.find({Var.
getVariable(), ThisFragment});
1145 if (MapIt != OverlappingFragments.end()) {
1146 for (
auto Fragment : MapIt->second) {
1147 VarLocBasedLDV::OptFragmentInfo FragmentHolder;
1149 FragmentHolder = VarLocBasedLDV::OptFragmentInfo(Fragment);
1155void VarLocBasedLDV::OpenRangesSet::erase(
const VarLocsInRange &KillSet,
1156 const VarLocMap &VarLocIDs,
1157 LocIndex::u32_location_t Location) {
1158 VarLocSet RemoveSet(
Alloc);
1159 for (LocIndex::u32_index_t
ID : KillSet) {
1160 const VarLoc &VL = VarLocIDs[LocIndex(Location,
ID)];
1161 auto *EraseFrom = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars;
1162 EraseFrom->
erase(VL.Var);
1163 LocIndices VLI = VarLocIDs.getAllIndices(VL);
1164 for (LocIndex
ID : VLI)
1165 RemoveSet.set(
ID.getAsRawInteger());
1167 VarLocs.intersectWithComplement(RemoveSet);
1170void VarLocBasedLDV::OpenRangesSet::insertFromLocSet(
const VarLocSet &ToLoad,
1171 const VarLocMap &Map) {
1172 VarLocsInRange UniqueVarLocIDs;
1173 DefinedRegsSet Regs;
1174 Regs.
insert(LocIndex::kUniversalLocation);
1175 collectIDsForRegs(UniqueVarLocIDs, Regs, ToLoad, Map);
1176 for (uint64_t
ID : UniqueVarLocIDs) {
1177 LocIndex Idx = LocIndex::fromRawInteger(
ID);
1178 const VarLoc &VarL =
Map[Idx];
1179 const LocIndices Indices =
Map.getAllIndices(VarL);
1180 insert(Indices, VarL);
1184void VarLocBasedLDV::OpenRangesSet::insert(LocIndices VarLocIDs,
1186 auto *InsertInto = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars;
1187 for (LocIndex
ID : VarLocIDs)
1188 VarLocs.set(
ID.getAsRawInteger());
1189 InsertInto->
insert({VL.Var, VarLocIDs});
1194std::optional<LocIndices>
1195VarLocBasedLDV::OpenRangesSet::getEntryValueBackup(DebugVariable Var) {
1196 auto It = EntryValuesBackupVars.
find(Var);
1197 if (It != EntryValuesBackupVars.
end())
1200 return std::nullopt;
1203void VarLocBasedLDV::collectIDsForRegs(VarLocsInRange &Collected,
1204 const DefinedRegsSet &Regs,
1205 const VarLocSet &CollectFrom,
1206 const VarLocMap &VarLocIDs) {
1207 assert(!Regs.empty() &&
"Nothing to collect");
1211 auto It = CollectFrom.find(LocIndex::rawIndexForReg(SortedRegs.
front()));
1212 auto End = CollectFrom.end();
1217 uint64_t FirstIndexForReg = LocIndex::rawIndexForReg(
Reg);
1218 uint64_t FirstInvalidIndex = LocIndex::rawIndexForReg(
Reg + 1);
1219 It.advanceToLowerBound(FirstIndexForReg);
1222 for (; It != End && *It < FirstInvalidIndex; ++It) {
1223 LocIndex ItIdx = LocIndex::fromRawInteger(*It);
1224 const VarLoc &VL = VarLocIDs[ItIdx];
1225 LocIndices LI = VarLocIDs.getAllIndices(VL);
1227 assert(LI.back().Location == LocIndex::kUniversalLocation &&
1228 "Unexpected order of LocIndices for VarLoc; was it inserted into "
1229 "the VarLocMap correctly?");
1230 Collected.insert(LI.back().Index);
1238void VarLocBasedLDV::getUsedRegs(
const VarLocSet &CollectFrom,
1239 SmallVectorImpl<Register> &UsedRegs)
const {
1242 uint64_t FirstRegIndex =
1243 LocIndex::rawIndexForReg(LocIndex::kFirstRegLocation);
1244 uint64_t FirstInvalidIndex =
1245 LocIndex::rawIndexForReg(LocIndex::kFirstInvalidRegLocation);
1246 uint64_t FirstVirtualRegIndex =
1247 LocIndex::rawIndexForReg(LocIndex::kFirstVirtualRegLocation);
1248 auto doGetUsedRegs = [&](VarLocSet::const_iterator &It) {
1251 uint32_t FoundReg = LocIndex::fromRawInteger(*It).Location;
1253 "Duplicate used reg");
1259 uint64_t NextRegIndex = LocIndex::rawIndexForReg(FoundReg + 1);
1260 It.advanceToLowerBound(NextRegIndex);
1262 for (
auto It = CollectFrom.find(FirstRegIndex),
1263 End = CollectFrom.find(FirstInvalidIndex);
1267 for (
auto It = CollectFrom.find(FirstVirtualRegIndex),
1268 End = CollectFrom.end();
1279void VarLocBasedLDV::printVarLocInMBB(
const MachineFunction &MF,
1280 const VarLocInMBB &V,
1281 const VarLocMap &VarLocIDs,
1283 raw_ostream &Out)
const {
1284 Out <<
'\n' << msg <<
'\n';
1285 for (
const MachineBasicBlock &BB : MF) {
1288 const VarLocSet &
L = getVarLocsInMBB(&BB, V);
1291 SmallVector<VarLoc, 32> VarLocs;
1292 collectAllVarLocs(VarLocs, L, VarLocIDs);
1293 Out <<
"MBB: " << BB.getNumber() <<
":\n";
1294 for (
const VarLoc &VL : VarLocs) {
1295 Out <<
" Var: " << VL.Var.getVariable()->getName();
1304VarLocBasedLDV::VarLoc::SpillLoc
1305VarLocBasedLDV::extractSpillBaseRegAndOffset(
const MachineInstr &
MI) {
1307 "Spill instruction does not have exactly one memory operand?");
1308 auto MMOI =
MI.memoperands_begin();
1309 const PseudoSourceValue *PVal = (*MMOI)->getPseudoValue();
1311 "Inconsistent memory operand in spill instruction");
1313 const MachineBasicBlock *
MBB =
MI.getParent();
1321void VarLocBasedLDV::cleanupEntryValueTransfers(
1322 const MachineInstr *TRInst, OpenRangesSet &OpenRanges, VarLocMap &VarLocIDs,
1323 const VarLoc &EntryVL, InstToEntryLocMap &EntryValTransfers) {
1324 if (EntryValTransfers.empty() || TRInst ==
nullptr)
1327 auto TransRange = EntryValTransfers.equal_range(TRInst);
1329 const VarLoc &EmittedEV = VarLocIDs[TDPair.second];
1330 if (std::tie(EntryVL.Var, EntryVL.Locs[0].Value.RegNo, EntryVL.Expr) ==
1331 std::tie(EmittedEV.Var, EmittedEV.Locs[0].Value.RegNo,
1333 OpenRanges.erase(EmittedEV);
1334 EntryValTransfers.erase(TRInst);
1344void VarLocBasedLDV::removeEntryValue(
const MachineInstr &
MI,
1345 OpenRangesSet &OpenRanges,
1346 VarLocMap &VarLocIDs,
1347 const VarLoc &EntryVL,
1348 InstToEntryLocMap &EntryValTransfers,
1349 RegDefToInstMap &RegSetInstrs) {
1351 if (&
MI == &EntryVL.MI)
1357 if (!
MI.getDebugOperand(0).isReg())
1362 const MachineInstr *TransferInst =
1366 if (!TransferInst && !LastNonDbgMI &&
MI.getParent()->isEntryBlock())
1372 if (
MI.getDebugExpression()->getNumElements() == 0 && TransferInst) {
1379 auto DestSrc =
TII->isCopyLikeInstr(*TransferInst);
1381 const MachineOperand *SrcRegOp, *DestRegOp;
1382 SrcRegOp = DestSrc->Source;
1383 DestRegOp = DestSrc->Destination;
1385 for (uint64_t
ID : OpenRanges.getEntryValueBackupVarLocs()) {
1386 const VarLoc &VL = VarLocIDs[LocIndex::fromRawInteger(
ID)];
1387 if (VL.isEntryValueCopyBackupReg(
Reg) &&
1389 VL.MI.getDebugOperand(0).getReg() == SrcRegOp->
getReg())
1400 cleanupEntryValueTransfers(TransferInst, OpenRanges, VarLocIDs, EntryVL,
1402 OpenRanges.erase(EntryVL);
1407void VarLocBasedLDV::transferDebugValue(
const MachineInstr &
MI,
1408 OpenRangesSet &OpenRanges,
1409 VarLocMap &VarLocIDs,
1410 InstToEntryLocMap &EntryValTransfers,
1411 RegDefToInstMap &RegSetInstrs) {
1412 if (!
MI.isDebugValue())
1414 const DILocalVariable *Var =
MI.getDebugVariable();
1415 const DIExpression *Expr =
MI.getDebugExpression();
1416 const DILocation *
DebugLoc =
MI.getDebugLoc();
1417 const DILocation *InlinedAt =
DebugLoc->getInlinedAt();
1419 "Expected inlined-at fields to agree");
1421 DebugVariable
V(Var, Expr, InlinedAt);
1425 auto EntryValBackupID = OpenRanges.getEntryValueBackup(V);
1427 const VarLoc &EntryVL = VarLocIDs[EntryValBackupID->back()];
1428 removeEntryValue(
MI, OpenRanges, VarLocIDs, EntryVL, EntryValTransfers,
1432 if (
all_of(
MI.debug_operands(), [](
const MachineOperand &MO) {
1433 return (MO.isReg() && MO.getReg()) || MO.isImm() || MO.isFPImm() ||
1434 MO.isCImm() || MO.isTargetIndex();
1439 OpenRanges.erase(VL);
1441 LocIndices IDs = VarLocIDs.insert(VL);
1443 OpenRanges.insert(IDs, VL);
1444 }
else if (
MI.memoperands().size() > 0) {
1445 llvm_unreachable(
"DBG_VALUE with mem operand encountered after regalloc?");
1449 "Unexpected non-undef DBG_VALUE encountered");
1451 OpenRanges.erase(VL);
1456void VarLocBasedLDV::collectAllVarLocs(SmallVectorImpl<VarLoc> &Collected,
1457 const VarLocSet &CollectFrom,
1458 const VarLocMap &VarLocIDs) {
1462 uint64_t FirstIndex = LocIndex::rawIndexForReg(LocIndex::kUniversalLocation);
1463 uint64_t FirstInvalidIndex =
1464 LocIndex::rawIndexForReg(LocIndex::kUniversalLocation + 1);
1466 for (
auto It = CollectFrom.find(FirstIndex), End = CollectFrom.end();
1467 It != End && *It < FirstInvalidIndex; ++It) {
1468 LocIndex RegIdx = LocIndex::fromRawInteger(*It);
1474void VarLocBasedLDV::emitEntryValues(MachineInstr &
MI,
1475 OpenRangesSet &OpenRanges,
1476 VarLocMap &VarLocIDs,
1477 InstToEntryLocMap &EntryValTransfers,
1478 VarLocsInRange &KillSet) {
1480 if (
MI.isTerminator())
1483 for (uint32_t
ID : KillSet) {
1485 LocIndex Idx = LocIndex(LocIndex::kUniversalLocation,
ID);
1486 const VarLoc &VL = VarLocIDs[Idx];
1487 if (!VL.Var.getVariable()->isParameter())
1490 auto DebugVar = VL.Var;
1491 std::optional<LocIndices> EntryValBackupIDs =
1492 OpenRanges.getEntryValueBackup(DebugVar);
1496 if (!EntryValBackupIDs)
1499 const VarLoc &EntryVL = VarLocIDs[EntryValBackupIDs->back()];
1500 VarLoc EntryLoc = VarLoc::CreateEntryLoc(EntryVL.MI, EntryVL.Expr,
1501 EntryVL.Locs[0].Value.RegNo);
1502 LocIndices EntryValueIDs = VarLocIDs.insert(EntryLoc);
1503 assert(EntryValueIDs.size() == 1 &&
1504 "EntryValue loc should not be variadic");
1505 EntryValTransfers.insert({&
MI, EntryValueIDs.back()});
1506 OpenRanges.insert(EntryValueIDs, EntryLoc);
1515void VarLocBasedLDV::insertTransferDebugPair(
1516 MachineInstr &
MI, OpenRangesSet &OpenRanges, TransferMap &Transfers,
1517 VarLocMap &VarLocIDs, LocIndex OldVarID, TransferKind Kind,
1518 const VarLoc::MachineLoc &OldLoc,
Register NewReg) {
1519 const VarLoc &OldVarLoc = VarLocIDs[OldVarID];
1521 auto ProcessVarLoc = [&
MI, &OpenRanges, &Transfers, &VarLocIDs](VarLoc &VL) {
1522 LocIndices LocIds = VarLocIDs.insert(VL);
1525 OpenRanges.erase(VL);
1529 OpenRanges.insert(LocIds, VL);
1530 assert(!
MI.isTerminator() &&
"Cannot insert DBG_VALUE after terminator");
1531 TransferDebugPair MIP = {&
MI, LocIds.back()};
1532 Transfers.push_back(MIP);
1536 OpenRanges.erase(VarLocIDs[OldVarID]);
1538 case TransferKind::TransferCopy: {
1540 "No register supplied when handling a copy of a debug value");
1543 VarLoc VL = VarLoc::CreateCopyLoc(OldVarLoc, OldLoc, NewReg);
1546 dbgs() <<
"Creating VarLoc for register copy:";
1551 case TransferKind::TransferSpill: {
1554 VarLoc::SpillLoc SpillLocation = extractSpillBaseRegAndOffset(
MI);
1555 VarLoc VL = VarLoc::CreateSpillLoc(
1556 OldVarLoc, OldLoc, SpillLocation.SpillBase, SpillLocation.SpillOffset);
1559 dbgs() <<
"Creating VarLoc for spill:";
1564 case TransferKind::TransferRestore: {
1566 "No register supplied when handling a restore of a debug value");
1569 VarLoc VL = VarLoc::CreateCopyLoc(OldVarLoc, OldLoc, NewReg);
1572 dbgs() <<
"Creating VarLoc for restore:";
1582void VarLocBasedLDV::transferRegisterDef(MachineInstr &
MI,
1583 OpenRangesSet &OpenRanges,
1584 VarLocMap &VarLocIDs,
1585 InstToEntryLocMap &EntryValTransfers,
1586 RegDefToInstMap &RegSetInstrs) {
1590 if (
MI.isMetaInstruction())
1593 MachineFunction *MF =
MI.getMF();
1598 DefinedRegsSet DeadRegs;
1599 SmallVector<const uint32_t *, 4> RegMasks;
1600 for (
const MachineOperand &MO :
MI.operands()) {
1603 !(
MI.isCall() && MO.
getReg() == SP)) {
1605 for (MCRegAliasIterator RAI(MO.
getReg(),
TRI,
true); RAI.isValid(); ++RAI)
1607 DeadRegs.insert((*RAI).id());
1608 RegSetInstrs.erase(MO.
getReg());
1609 RegSetInstrs.insert({MO.
getReg(), &
MI});
1618 if (!RegMasks.
empty()) {
1620 getUsedRegs(OpenRanges.getVarLocs(), UsedRegs);
1630 bool AnyRegMaskKillsReg =
1631 any_of(RegMasks, [
Reg](
const uint32_t *RegMask) {
1634 if (AnyRegMaskKillsReg)
1635 DeadRegs.insert(
Reg);
1636 if (AnyRegMaskKillsReg) {
1637 RegSetInstrs.erase(
Reg);
1638 RegSetInstrs.insert({
Reg, &
MI});
1643 if (DeadRegs.empty())
1646 VarLocsInRange KillSet;
1647 collectIDsForRegs(KillSet, DeadRegs, OpenRanges.getVarLocs(), VarLocIDs);
1648 OpenRanges.erase(KillSet, VarLocIDs, LocIndex::kUniversalLocation);
1650 if (ShouldEmitDebugEntryValues)
1651 emitEntryValues(
MI, OpenRanges, VarLocIDs, EntryValTransfers, KillSet);
1654void VarLocBasedLDV::transferWasmDef(MachineInstr &
MI,
1655 OpenRangesSet &OpenRanges,
1656 VarLocMap &VarLocIDs) {
1661 if (!
TII->isExplicitTargetIndexDef(
MI, Index,
Offset))
1666 VarLocsInRange KillSet;
1668 for (uint64_t
ID : OpenRanges.getWasmVarLocs()) {
1669 LocIndex Idx = LocIndex::fromRawInteger(
ID);
1670 const VarLoc &VL = VarLocIDs[Idx];
1671 assert(VL.containsWasmLocs() &&
"Broken VarLocSet?");
1672 if (VL.usesWasmLoc(Loc))
1675 OpenRanges.erase(KillSet, VarLocIDs, LocIndex::kWasmLocation);
1678bool VarLocBasedLDV::isSpillInstruction(
const MachineInstr &
MI,
1679 MachineFunction *MF) {
1681 if (!
MI.hasOneMemOperand())
1684 if (!
MI.getSpillSize(
TII) && !
MI.getFoldedSpillSize(
TII))
1691bool VarLocBasedLDV::isLocationSpill(
const MachineInstr &
MI,
1693 if (!isSpillInstruction(
MI, MF))
1696 auto isKilledReg = [&](
const MachineOperand MO,
Register &
Reg) {
1705 for (
const MachineOperand &MO :
MI.operands()) {
1708 if (isKilledReg(MO,
Reg))
1714 auto NextI = std::next(
MI.getIterator());
1716 if (
MI.getParent()->end() == NextI)
1719 for (
const MachineOperand &MONext : NextI->operands()) {
1722 if (isKilledReg(MONext, RegNext) && RegNext ==
Reg)
1731std::optional<VarLocBasedLDV::VarLoc::SpillLoc>
1732VarLocBasedLDV::isRestoreInstruction(
const MachineInstr &
MI,
1734 if (!
MI.hasOneMemOperand())
1735 return std::nullopt;
1739 if (
MI.getRestoreSize(
TII)) {
1740 Reg =
MI.getOperand(0).getReg();
1741 return extractSpillBaseRegAndOffset(
MI);
1743 return std::nullopt;
1753void VarLocBasedLDV::transferSpillOrRestoreInst(MachineInstr &
MI,
1754 OpenRangesSet &OpenRanges,
1755 VarLocMap &VarLocIDs,
1756 TransferMap &Transfers) {
1757 MachineFunction *MF =
MI.getMF();
1760 std::optional<VarLoc::SpillLoc> Loc;
1767 VarLocsInRange KillSet;
1768 if (isSpillInstruction(
MI, MF)) {
1769 Loc = extractSpillBaseRegAndOffset(
MI);
1770 for (uint64_t
ID : OpenRanges.getSpillVarLocs()) {
1771 LocIndex Idx = LocIndex::fromRawInteger(
ID);
1772 const VarLoc &VL = VarLocIDs[Idx];
1773 assert(VL.containsSpillLocs() &&
"Broken VarLocSet?");
1774 if (VL.usesSpillLoc(*Loc)) {
1786 unsigned SpillLocIdx = VL.getSpillLocIdx(*Loc);
1787 VarLoc::MachineLoc OldLoc = VL.Locs[SpillLocIdx];
1788 VarLoc UndefVL = VarLoc::CreateCopyLoc(VL, OldLoc, 0);
1789 LocIndices UndefLocIDs = VarLocIDs.insert(UndefVL);
1790 Transfers.push_back({&
MI, UndefLocIDs.back()});
1793 OpenRanges.erase(KillSet, VarLocIDs, LocIndex::kSpillLocation);
1798 if (isLocationSpill(
MI, MF,
Reg)) {
1799 TKind = TransferKind::TransferSpill;
1804 if (!(Loc = isRestoreInstruction(
MI, MF,
Reg)))
1806 TKind = TransferKind::TransferRestore;
1812 auto TransferCandidates = OpenRanges.getEmptyVarLocRange();
1813 if (TKind == TransferKind::TransferSpill)
1814 TransferCandidates = OpenRanges.getRegisterVarLocs(
Reg);
1815 else if (TKind == TransferKind::TransferRestore)
1816 TransferCandidates = OpenRanges.getSpillVarLocs();
1817 for (uint64_t
ID : TransferCandidates) {
1818 LocIndex Idx = LocIndex::fromRawInteger(
ID);
1819 const VarLoc &VL = VarLocIDs[Idx];
1821 if (TKind == TransferKind::TransferSpill) {
1822 assert(VL.usesReg(
Reg) &&
"Broken VarLocSet?");
1824 << VL.Var.getVariable()->getName() <<
")\n");
1825 LocIdx = VL.getRegIdx(
Reg);
1827 assert(TKind == TransferKind::TransferRestore && VL.containsSpillLocs() &&
1828 "Broken VarLocSet?");
1829 if (!VL.usesSpillLoc(*Loc))
1833 << VL.Var.getVariable()->getName() <<
")\n");
1834 LocIdx = VL.getSpillLocIdx(*Loc);
1836 VarLoc::MachineLoc MLoc = VL.Locs[LocIdx];
1837 insertTransferDebugPair(
MI, OpenRanges, Transfers, VarLocIDs, Idx, TKind,
1848void VarLocBasedLDV::transferRegisterCopy(MachineInstr &
MI,
1849 OpenRangesSet &OpenRanges,
1850 VarLocMap &VarLocIDs,
1851 TransferMap &Transfers) {
1852 auto DestSrc =
TII->isCopyLikeInstr(
MI);
1856 const MachineOperand *DestRegOp = DestSrc->Destination;
1857 const MachineOperand *SrcRegOp = DestSrc->Source;
1859 if (!DestRegOp->
isDef())
1863 for (MCRegAliasIterator RAI(
Reg,
TRI,
true); RAI.isValid(); ++RAI)
1864 if (CalleeSavedRegs.
test((*RAI).id()))
1877 if (!isCalleeSavedReg(DestReg))
1884 for (uint64_t
ID : OpenRanges.getEntryValueBackupVarLocs()) {
1885 LocIndex Idx = LocIndex::fromRawInteger(
ID);
1886 const VarLoc &VL = VarLocIDs[Idx];
1887 if (VL.isEntryValueBackupReg(SrcReg)) {
1889 VarLoc EntryValLocCopyBackup =
1890 VarLoc::CreateEntryCopyBackupLoc(VL.MI, VL.Expr, DestReg);
1892 OpenRanges.erase(VL);
1895 LocIndices EntryValCopyLocIDs = VarLocIDs.insert(EntryValLocCopyBackup);
1896 OpenRanges.insert(EntryValCopyLocIDs, EntryValLocCopyBackup);
1905 for (uint64_t
ID : OpenRanges.getRegisterVarLocs(SrcReg)) {
1906 LocIndex Idx = LocIndex::fromRawInteger(
ID);
1907 assert(VarLocIDs[Idx].usesReg(SrcReg) &&
"Broken VarLocSet?");
1908 VarLoc::MachineLocValue Loc;
1910 VarLoc::MachineLoc MLoc{VarLoc::MachineLocKind::RegisterKind, Loc};
1911 insertTransferDebugPair(
MI, OpenRanges, Transfers, VarLocIDs, Idx,
1912 TransferKind::TransferCopy, MLoc, DestReg);
1920bool VarLocBasedLDV::transferTerminator(MachineBasicBlock *CurMBB,
1921 OpenRangesSet &OpenRanges,
1922 VarLocInMBB &OutLocs,
1923 const VarLocMap &VarLocIDs) {
1927 OpenRanges.getUniqueVarLocs(VarLocs, VarLocIDs);
1928 for (VarLoc &VL : VarLocs) {
1930 dbgs() <<
"Add to OutLocs in MBB #" << CurMBB->
getNumber() <<
": ";
1934 VarLocSet &VLS = getVarLocsInMBB(CurMBB, OutLocs);
1935 Changed = VLS != OpenRanges.getVarLocs();
1939 VLS = OpenRanges.getVarLocs();
1954void VarLocBasedLDV::accumulateFragmentMap(MachineInstr &
MI,
1955 VarToFragments &SeenFragments,
1957 DebugVariable MIVar(
MI.getDebugVariable(),
MI.getDebugExpression(),
1958 MI.getDebugLoc()->getInlinedAt());
1959 FragmentInfo ThisFragment = MIVar.getFragmentOrDefault();
1964 auto [SeenIt,
Inserted] = SeenFragments.try_emplace(MIVar.getVariable());
1966 SeenIt->second.insert(ThisFragment);
1968 OverlappingFragments.insert({{MIVar.getVariable(), ThisFragment}, {}});
1975 OverlappingFragments.insert({{MIVar.getVariable(), ThisFragment}, {}});
1976 if (!IsInOLapMap.second)
1979 auto &ThisFragmentsOverlaps = IsInOLapMap.first->second;
1980 auto &AllSeenFragments = SeenIt->second;
1985 for (
const auto &ASeenFragment : AllSeenFragments) {
1989 ThisFragmentsOverlaps.push_back(ASeenFragment);
1992 auto ASeenFragmentsOverlaps =
1993 OverlappingFragments.find({MIVar.getVariable(), ASeenFragment});
1994 assert(ASeenFragmentsOverlaps != OverlappingFragments.end() &&
1995 "Previously seen var fragment has no vector of overlaps");
1996 ASeenFragmentsOverlaps->second.push_back(ThisFragment);
2000 AllSeenFragments.insert(ThisFragment);
2004void VarLocBasedLDV::process(MachineInstr &
MI, OpenRangesSet &OpenRanges,
2005 VarLocMap &VarLocIDs, TransferMap &Transfers,
2006 InstToEntryLocMap &EntryValTransfers,
2007 RegDefToInstMap &RegSetInstrs) {
2008 if (!
MI.isDebugInstr())
2010 transferDebugValue(
MI, OpenRanges, VarLocIDs, EntryValTransfers,
2012 transferRegisterDef(
MI, OpenRanges, VarLocIDs, EntryValTransfers,
2014 transferWasmDef(
MI, OpenRanges, VarLocIDs);
2015 transferRegisterCopy(
MI, OpenRanges, VarLocIDs, Transfers);
2016 transferSpillOrRestoreInst(
MI, OpenRanges, VarLocIDs, Transfers);
2022bool VarLocBasedLDV::join(
2023 MachineBasicBlock &
MBB, VarLocInMBB &OutLocs, VarLocInMBB &InLocs,
2024 const VarLocMap &VarLocIDs,
2025 SmallPtrSet<const MachineBasicBlock *, 16> &Visited,
2026 SmallPtrSetImpl<const MachineBasicBlock *> &ArtificialBlocks) {
2029 VarLocSet InLocsT(
Alloc);
2040 if (!Visited.
count(p)) {
2041 LLVM_DEBUG(
dbgs() <<
" ignoring unvisited pred MBB: " <<
p->getNumber()
2045 auto OL = OutLocs.find(p);
2047 if (OL == OutLocs.end())
2052 VarLocSet &OutLocVLS = *OL->second;
2054 InLocsT = OutLocVLS;
2056 InLocsT &= OutLocVLS;
2059 if (!InLocsT.empty()) {
2061 collectAllVarLocs(VarLocs, InLocsT, VarLocIDs);
2062 for (
const VarLoc &VL : VarLocs)
2063 dbgs() <<
" gathered candidate incoming var: "
2064 << VL.Var.getVariable()->getName() <<
"\n";
2072 VarLocSet KillSet(
Alloc);
2073 bool IsArtificial = ArtificialBlocks.
count(&
MBB);
2074 if (!IsArtificial) {
2075 for (uint64_t
ID : InLocsT) {
2076 LocIndex Idx = LocIndex::fromRawInteger(
ID);
2080 auto Name = VarLocIDs[Idx].Var.getVariable()->getName();
2081 dbgs() <<
" killing " <<
Name <<
", it doesn't dominate MBB\n";
2086 InLocsT.intersectWithComplement(KillSet);
2092 "Should have processed at least one predecessor");
2094 VarLocSet &ILS = getVarLocsInMBB(&
MBB, InLocs);
2096 if (ILS != InLocsT) {
2104void VarLocBasedLDV::flushPendingLocs(VarLocInMBB &PendingInLocs,
2105 VarLocMap &VarLocIDs) {
2108 for (
auto &Iter : PendingInLocs) {
2110 auto &
MBB =
const_cast<MachineBasicBlock &
>(*Iter.first);
2111 VarLocSet &Pending = *Iter.second;
2113 SmallVector<VarLoc, 32> VarLocs;
2114 collectAllVarLocs(VarLocs, Pending, VarLocIDs);
2116 for (VarLoc DiffIt : VarLocs) {
2119 if (DiffIt.isEntryBackupLoc())
2130bool VarLocBasedLDV::isEntryValueCandidate(
2131 const MachineInstr &
MI,
const DefinedRegsSet &DefinedRegs)
const {
2132 assert(
MI.isDebugValue() &&
"This must be DBG_VALUE.");
2138 auto *DIVar =
MI.getDebugVariable();
2139 if (!DIVar->isParameter())
2143 if (
MI.getDebugLoc()->getInlinedAt())
2156 if (DefinedRegs.count(
MI.getDebugOperand(0).getReg()))
2162 const DIExpression *Expr =
MI.getDebugExpression();
2174 Regs.insert(MO.
getReg());
2184void VarLocBasedLDV::recordEntryValue(
const MachineInstr &
MI,
2185 const DefinedRegsSet &DefinedRegs,
2186 OpenRangesSet &OpenRanges,
2187 VarLocMap &VarLocIDs) {
2188 if (!ShouldEmitDebugEntryValues)
2191 DebugVariable
V(
MI.getDebugVariable(),
MI.getDebugExpression(),
2192 MI.getDebugLoc()->getInlinedAt());
2194 if (!isEntryValueCandidate(
MI, DefinedRegs) ||
2195 OpenRanges.getEntryValueBackup(V))
2202 DIExpression *NewExpr =
2204 VarLoc EntryValLocAsBackup = VarLoc::CreateEntryBackupLoc(
MI, NewExpr);
2205 LocIndices EntryValLocIDs = VarLocIDs.insert(EntryValLocAsBackup);
2206 OpenRanges.insert(EntryValLocIDs, EntryValLocAsBackup);
2211bool VarLocBasedLDV::ExtendRanges(MachineFunction &MF,
2212 MachineDominatorTree *DomTree,
2213 bool ShouldEmitDebugEntryValues,
2215 unsigned InputDbgValLimit) {
2232 this->ShouldEmitDebugEntryValues = ShouldEmitDebugEntryValues;
2237 bool OLChanged =
false;
2238 bool MBBJoined =
false;
2240 VarLocMap VarLocIDs;
2242 OpenRangesSet OpenRanges(
Alloc, OverlapFragments);
2244 VarLocInMBB OutLocs;
2246 TransferMap Transfers;
2249 InstToEntryLocMap EntryValTransfers;
2251 RegDefToInstMap RegSetInstrs;
2253 VarToFragments SeenFragments;
2257 SmallPtrSet<const MachineBasicBlock *, 16> ArtificialBlocks;
2259 DenseMap<unsigned int, MachineBasicBlock *> OrderToBB;
2260 DenseMap<MachineBasicBlock *, unsigned int> BBToOrder;
2261 std::priority_queue<unsigned int, std::vector<unsigned int>,
2262 std::greater<unsigned int>>
2264 std::priority_queue<unsigned int, std::vector<unsigned int>,
2265 std::greater<unsigned int>>
2270 DefinedRegsSet DefinedRegs;
2274 MachineBasicBlock &First_MBB = *(MF.
begin());
2275 for (
auto &
MI : First_MBB) {
2277 if (
MI.isDebugValue())
2278 recordEntryValue(
MI, DefinedRegs, OpenRanges, VarLocIDs);
2282 for (
auto &
MBB : MF)
2283 for (
auto &
MI :
MBB)
2284 if (
MI.isDebugValue())
2285 accumulateFragmentMap(
MI, SeenFragments, OverlapFragments);
2287 auto hasNonArtificialLocation = [](
const MachineInstr &
MI) ->
bool {
2289 return DL.getLine() != 0;
2292 for (
auto &
MBB : MF)
2296 LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs,
2297 "OutLocs after initialization",
dbgs()));
2299 ReversePostOrderTraversal<MachineFunction *> RPOT(&MF);
2300 unsigned int RPONumber = 0;
2301 for (MachineBasicBlock *
MBB : RPOT) {
2302 OrderToBB[RPONumber] =
MBB;
2303 BBToOrder[
MBB] = RPONumber;
2304 Worklist.push(RPONumber);
2309 unsigned NumInputDbgValues = 0;
2310 for (
auto &
MBB : MF)
2311 for (
auto &
MI :
MBB)
2312 if (
MI.isDebugValue())
2313 ++NumInputDbgValues;
2314 if (NumInputDbgValues > InputDbgValLimit) {
2315 LLVM_DEBUG(
dbgs() <<
"Disabling VarLocBasedLDV: " << MF.getName()
2316 <<
" has " << RPONumber <<
" basic blocks and "
2317 << NumInputDbgValues
2318 <<
" input DBG_VALUEs, exceeding limits.\n");
2327 SmallPtrSet<const MachineBasicBlock *, 16> Visited;
2328 while (!Worklist.empty() || !Pending.empty()) {
2332 SmallPtrSet<MachineBasicBlock *, 16> OnPending;
2334 while (!Worklist.empty()) {
2335 MachineBasicBlock *
MBB = OrderToBB[Worklist.top()];
2337 MBBJoined =
join(*
MBB, OutLocs, InLocs, VarLocIDs, Visited,
2339 MBBJoined |= Visited.
insert(
MBB).second;
2347 OpenRanges.insertFromLocSet(getVarLocsInMBB(
MBB, InLocs), VarLocIDs);
2348 LastNonDbgMI =
nullptr;
2349 RegSetInstrs.clear();
2350 for (
auto &
MI : *
MBB)
2351 process(
MI, OpenRanges, VarLocIDs, Transfers, EntryValTransfers,
2353 OLChanged |= transferTerminator(
MBB, OpenRanges, OutLocs, VarLocIDs);
2355 LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs,
2356 "OutLocs after propagating",
dbgs()));
2357 LLVM_DEBUG(printVarLocInMBB(MF, InLocs, VarLocIDs,
2358 "InLocs after propagating",
dbgs()));
2363 if (OnPending.
insert(s).second) {
2364 Pending.push(BBToOrder[s]);
2369 Worklist.swap(Pending);
2372 assert(Pending.empty() &&
"Pending should be empty");
2376 for (
auto &TR : Transfers) {
2377 assert(!TR.TransferInst->isTerminator() &&
2378 "Cannot insert DBG_VALUE after terminator");
2380 const VarLoc &VL = VarLocIDs[TR.LocationID];
2381 MachineInstr *
MI = VL.BuildDbgValue(MF);
2387 for (
auto &TR : EntryValTransfers) {
2388 MachineInstr *TRInst =
const_cast<MachineInstr *
>(TR.first);
2390 "Cannot insert DBG_VALUE after terminator");
2392 const VarLoc &VL = VarLocIDs[TR.second];
2393 MachineInstr *
MI = VL.BuildDbgValue(MF);
2396 EntryValTransfers.clear();
2400 flushPendingLocs(InLocs, VarLocIDs);
2402 LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs,
"Final OutLocs",
dbgs()));
2403 LLVM_DEBUG(printVarLocInMBB(MF, InLocs, VarLocIDs,
"Final InLocs",
dbgs()));
2410 return new VarLocBasedLDV();
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
static bool isConstant(const MachineInstr &MI)
AMDGPU Prepare AGPR Alloc
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
A bitvector that uses an IntervalMap to coalesce adjacent elements into intervals.
This file defines the DenseMap class.
This file contains constants used for implementing Dwarf debug support.
const HexagonInstrInfo * TII
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
static cl::opt< unsigned > InputBBLimit("livedebugvalues-input-bb-limit", cl::desc("Maximum input basic blocks before DBG_VALUE limit applies"), cl::init(10000), cl::Hidden)
static DebugLoc getDebugLoc(MachineBasicBlock::instr_iterator FirstMI, MachineBasicBlock::instr_iterator LastMI)
Return the first found DebugLoc that has a DILocation, given a range of instructions.
Register const TargetRegisterInfo * TRI
Promote Memory to Register
MachineInstr unsigned OpIdx
This file builds on the ADT/GraphTraits.h file to build a generic graph post order iterator.
static bool dominates(InstrPosIndexes &PosIndexes, const MachineInstr &A, const MachineInstr &B)
This file defines the SmallPtrSet class.
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
This file describes how to lower LLVM code to machine code.
static bool isRegOtherThanSPAndFP(const MachineOperand &Op, const MachineInstr &MI, const TargetRegisterInfo *TRI)
If Op is a stack or frame register return true, otherwise return false.
static void collectRegDefs(const MachineInstr &MI, DefinedRegsSet &Regs, const TargetRegisterInfo *TRI)
Collect all register defines (including aliases) for the given instruction.
bool test(unsigned Idx) const
A bitvector that, under the hood, relies on an IntervalMap to coalesce elements into intervals.
unsigned getNumElements() const
DbgVariableFragmentInfo FragmentInfo
static bool fragmentsOverlap(const FragmentInfo &A, const FragmentInfo &B)
Check if fragments overlap between a pair of FragmentInfos.
static LLVM_ABI DIExpression * appendOpsToArg(const DIExpression *Expr, ArrayRef< uint64_t > Ops, unsigned ArgNo, bool StackValue=false)
Create a copy of Expr by appending the given list of Ops to each instance of the operand DW_OP_LLVM_a...
LLVM_ABI bool isDeref() const
Return whether there is exactly one operator and it is a DW_OP_deref;.
static LLVM_ABI DIExpression * replaceArg(const DIExpression *Expr, uint64_t OldArg, uint64_t NewArg)
Create a copy of Expr with each instance of DW_OP_LLVM_arg, \p OldArg replaced with DW_OP_LLVM_arg,...
static LLVM_ABI DIExpression * prepend(const DIExpression *Expr, uint8_t Flags, int64_t Offset=0)
Prepend DIExpr with a deref and offset operation and optionally turn it into a stack value or/and an ...
bool isValidLocationForIntrinsic(const DILocation *DL) const
Check that a location is valid for this variable.
static bool isDefaultFragment(const FragmentInfo F)
const DILocation * getInlinedAt() const
FragmentInfo getFragmentOrDefault() const
const DILocalVariable * getVariable() const
iterator find(const_arg_type_t< KeyT > Val)
bool erase(const KeyT &Val)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
DISubprogram * getSubprogram() const
Get the attached subprogram.
MCRegAliasIterator enumerates all registers aliasing Reg.
instr_iterator instr_begin()
LLVM_ABI instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
int getNumber() const
MachineBasicBlocks are uniquely numbered at the function level, unless they're not in a MachineFuncti...
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
iterator_range< succ_iterator > successors()
iterator_range< pred_iterator > predecessors()
instr_iterator insertAfterBundle(instr_iterator I, MachineInstr *MI)
If I is bundled then insert MI into the instruction list after the end of the bundle,...
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
StringRef getName() const
getName - Return the name of the corresponding LLVM function.
Function & getFunction()
Return the LLVM function that this machine code represents.
Representation of each machine instruction.
bool isTerminator(QueryType Type=AnyInBundle) const
Returns true if this instruction part of the terminator for a basic block.
const MachineBasicBlock * getParent() const
MachineOperand class - Representation of each machine instruction operand.
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isRegMask() const
isRegMask - Tests if this is a MO_RegisterMask operand.
Register getReg() const
getReg - Returns the register number.
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
const uint32_t * getRegMask() const
getRegMask - Returns a bit mask of registers preserved by this RegMask operand.
static MachineOperand CreateReg(Register Reg, bool isDef, bool isImp=false, bool isKill=false, bool isDead=false, bool isUndef=false, bool isEarlyClobber=false, unsigned SubReg=0, bool isDebug=false, bool isInternalRead=false, bool isRenamable=false)
virtual void print(raw_ostream &OS, const Module *M) const
print - Print out the internal state of the pass.
Wrapper class representing virtual and physical registers.
constexpr bool isValid() const
constexpr unsigned id() const
constexpr bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
iterator insert(iterator I, T &&Elt)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
virtual void getCalleeSaves(const MachineFunction &MF, BitVector &SavedRegs) const
Returns the callee-saved registers as computed by determineCalleeSaves in the BitVector SavedRegs.
virtual StackOffset getFrameIndexReference(const MachineFunction &MF, int FI, Register &FrameReg) const
getFrameIndexReference - This method should return the base register and offset used to reference a f...
Register getStackPointerRegisterToSaveRestore() const
If a physical register, this specifies the register that llvm.savestack/llvm.restorestack should save...
This class defines information used to lower LLVM code to legal SelectionDAG operators that the targe...
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
virtual const TargetFrameLowering * getFrameLowering() const
virtual const TargetInstrInfo * getInstrInfo() const
virtual const TargetRegisterInfo * getRegisterInfo() const =0
Return the target's register information.
virtual const TargetLowering * getTargetLowering() const
self_iterator getIterator()
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
DenseMap< FragmentOfVar, SmallVector< DIExpression::FragmentInfo, 1 > > OverlapMap
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
This is an optimization pass for GlobalISel generic memory operations.
void dump(const SparseBitVector< ElementSize > &LHS, raw_ostream &out)
bool operator<(int64_t V1, const APSInt &V2)
FunctionAddr VTableAddr Value
auto find(R &&Range, const T &Val)
Provide wrappers to std::find which take ranges instead of having to pass begin/end explicitly.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
bool operator!=(uint64_t V1, const APInt &V2)
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
void erase(Container &C, ValueType V)
Wrapper function to remove a value from a container:
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool none_of(R &&Range, UnaryPredicate P)
Provide wrappers to std::none_of which take ranges instead of having to pass begin/end explicitly.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
LDVImpl * makeVarLocBasedLiveDebugValues()
iterator_range(Container &&) -> iterator_range< llvm::detail::IterOfRange< Container > >
std::string join(IteratorT Begin, IteratorT End, StringRef Separator)
Joins the strings in the range [Begin, End), adding Separator between the elements.
DWARFExpression::Operation Op
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
auto find_if(R &&Range, UnaryPredicate P)
Provide wrappers to std::find_if which take ranges instead of having to pass begin/end explicitly.
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
void array_pod_sort(IteratorTy Start, IteratorTy End)
array_pod_sort - This sorts an array with the specified start and end extent.
LLVM_ABI Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.