}
};
-struct VFPtrInfo {
+/// Holds information about the inheritance path to a virtual base or function
+/// table pointer. A record may contain as many vfptrs or vbptrs as there are
+/// base subobjects.
+struct VPtrInfo {
typedef SmallVector<const CXXRecordDecl *, 1> BasePath;
- // Don't pass the PathToMangle as it should be calculated later.
- VFPtrInfo(CharUnits VFPtrOffset, const BasePath &PathToBaseWithVFPtr)
- : VBTableIndex(0), LastVBase(0), VFPtrOffset(VFPtrOffset),
- PathToBaseWithVFPtr(PathToBaseWithVFPtr), VFPtrFullOffset(VFPtrOffset) {
- }
-
- // Don't pass the PathToMangle as it should be calculated later.
- VFPtrInfo(uint64_t VBTableIndex, const CXXRecordDecl *LastVBase,
- CharUnits VFPtrOffset, const BasePath &PathToBaseWithVFPtr,
- CharUnits VFPtrFullOffset)
- : VBTableIndex(VBTableIndex), LastVBase(LastVBase),
- VFPtrOffset(VFPtrOffset), PathToBaseWithVFPtr(PathToBaseWithVFPtr),
- VFPtrFullOffset(VFPtrFullOffset) {
- assert(VBTableIndex && "The full constructor should only be used "
- "for vfptrs in virtual bases");
- assert(LastVBase);
- }
-
- /// If nonzero, holds the vbtable index of the virtual base with the vfptr.
- uint64_t VBTableIndex;
-
- /// Stores the last vbase on the path from the complete type to the vfptr.
- const CXXRecordDecl *LastVBase;
-
- /// This is the offset of the vfptr from the start of the last vbase,
- /// or the complete type if there are no virtual bases.
- CharUnits VFPtrOffset;
-
- /// This holds the base classes path from the complete type to the first base
- /// with the given vfptr offset, in the base-to-derived order.
- BasePath PathToBaseWithVFPtr;
-
- /// This holds the subset of records that need to be mangled into the vftable
- /// symbol name in order to get a unique name, in the derived-to-base order.
- BasePath PathToMangle;
-
- /// This is the full offset of the vfptr from the start of the complete type.
- CharUnits VFPtrFullOffset;
-};
-
-/// Holds information for a virtual base table for a single subobject. A record
-/// may contain as many vbptrs as there are base subobjects.
-struct VBTableInfo {
- VBTableInfo(const CXXRecordDecl *RD)
- : ReusingBase(RD), BaseWithVBPtr(RD), NextBaseToMangle(RD) {}
+ VPtrInfo(const CXXRecordDecl *RD)
+ : ReusingBase(RD), BaseWithVPtr(RD), NextBaseToMangle(RD) {}
// Copy constructor.
// FIXME: Uncomment when we've moved to C++11.
- //VBTableInfo(const VBTableInfo &) = default;
+ // VPtrInfo(const VPtrInfo &) = default;
- /// The vbtable will hold all of the virtual bases of ReusingBase. This may
- /// or may not be the same class as VBPtrSubobject.Base. A derived class will
- /// reuse the vbptr of the first non-virtual base subobject that has one.
+ /// The vtable will hold all of the virtual bases or virtual methods of
+ /// ReusingBase. This may or may not be the same class as VPtrSubobject.Base.
+ /// A derived class will reuse the vptr of the first non-virtual base
+ /// subobject that has one.
const CXXRecordDecl *ReusingBase;
- /// BaseWithVBPtr is at this offset from its containing complete object or
+ /// BaseWithVPtr is at this offset from its containing complete object or
/// virtual base.
CharUnits NonVirtualOffset;
- /// The vbptr is stored inside this subobject.
- const CXXRecordDecl *BaseWithVBPtr;
+ /// The vptr is stored inside this subobject.
+ const CXXRecordDecl *BaseWithVPtr;
/// The bases from the inheritance path that got used to mangle the vbtable
/// name. This is not really a full path like a CXXBasePath. It holds the
/// subset of records that need to be mangled into the vbtable symbol name in
/// order to get a unique name.
- SmallVector<const CXXRecordDecl *, 1> MangledPath;
+ BasePath MangledPath;
/// The next base to push onto the mangled path if this path is ambiguous in a
/// derived class. If it's null, then it's already been pushed onto the path.
/// The set of possibly indirect vbases that contain this vbtable. When a
/// derived class indirectly inherits from the same vbase twice, we only keep
- /// vbtables and their paths from the first instance.
- SmallVector<const CXXRecordDecl *, 1> ContainingVBases;
+ /// vtables and their paths from the first instance.
+ BasePath ContainingVBases;
+
+ /// This holds the base classes path from the complete type to the first base
+ /// with the given vfptr offset, in the base-to-derived order. Only used for
+ /// vftables.
+ BasePath PathToBaseWithVPtr;
+
+ /// Static offset from the top of the most derived class to this vfptr,
+ /// including any virtual base offset. Only used for vftables.
+ CharUnits FullOffsetInMDC;
- /// The vbptr is stored inside the non-virtual component of this virtual base.
- const CXXRecordDecl *getVBaseWithVBPtr() const {
+ /// The vptr is stored inside the non-virtual component of this virtual base.
+ const CXXRecordDecl *getVBaseWithVPtr() const {
return ContainingVBases.empty() ? 0 : ContainingVBases.front();
}
};
-typedef SmallVector<VBTableInfo *, 2> VBTableVector;
+typedef SmallVector<VPtrInfo *, 2> VPtrInfoVector;
/// All virtual base related information about a given record decl. Includes
/// information on all virtual base tables and the path components that are used
/// to mangle them.
struct VirtualBaseInfo {
- ~VirtualBaseInfo() { llvm::DeleteContainerPointers(VBTables); }
+ ~VirtualBaseInfo() { llvm::DeleteContainerPointers(VBPtrPaths); }
/// A map from virtual base to vbtable index for doing a conversion from the
/// the derived class to the a base.
/// Information on all virtual base tables used when this record is the most
/// derived class.
- VBTableVector VBTables;
+ VPtrInfoVector VBPtrPaths;
};
class MicrosoftVTableContext : public VTableContextBase {
}
};
- typedef SmallVector<VFPtrInfo, 1> VFPtrListTy;
-
private:
ASTContext &Context;
MethodVFTableLocationsTy;
MethodVFTableLocationsTy MethodVFTableLocations;
- typedef llvm::DenseMap<const CXXRecordDecl *, VFPtrListTy>
+ typedef llvm::DenseMap<const CXXRecordDecl *, VPtrInfoVector>
VFPtrLocationsMapTy;
VFPtrLocationsMapTy VFPtrLocations;
llvm::DenseMap<const CXXRecordDecl *, VirtualBaseInfo *> VBaseInfo;
- typedef llvm::SmallSetVector<const CXXRecordDecl *, 8> BasesSetVectorTy;
- void enumerateVFPtrs(const CXXRecordDecl *MostDerivedClass,
- const ASTRecordLayout &MostDerivedClassLayout,
- BaseSubobject Base, const CXXRecordDecl *LastVBase,
- const VFPtrInfo::BasePath &PathFromCompleteClass,
- BasesSetVectorTy &VisitedVBases,
- MicrosoftVTableContext::VFPtrListTy &Result);
-
- void enumerateVFPtrs(const CXXRecordDecl *ForClass,
- MicrosoftVTableContext::VFPtrListTy &Result);
+ void enumerateVFPtrs(const CXXRecordDecl *ForClass, VPtrInfoVector &Result);
void computeVTableRelatedInformation(const CXXRecordDecl *RD);
const VirtualBaseInfo *
computeVBTableRelatedInformation(const CXXRecordDecl *RD);
- void computeVBTablePaths(const CXXRecordDecl *RD, VBTableVector &Paths);
+ void computeVTablePaths(bool ForVBTables, const CXXRecordDecl *RD,
+ VPtrInfoVector &Paths);
public:
MicrosoftVTableContext(ASTContext &Context)
~MicrosoftVTableContext();
- const VFPtrListTy &getVFPtrOffsets(const CXXRecordDecl *RD);
+ const VPtrInfoVector &getVFPtrOffsets(const CXXRecordDecl *RD);
const VTableLayout &getVFTableLayout(const CXXRecordDecl *RD,
CharUnits VFPtrOffset);
unsigned getVBTableIndex(const CXXRecordDecl *Derived,
const CXXRecordDecl *VBase);
- const VBTableVector &enumerateVBTables(const CXXRecordDecl *RD);
+ const VPtrInfoVector &enumerateVBTables(const CXXRecordDecl *RD);
static bool classof(const VTableContextBase *VT) { return VT->isMicrosoft(); }
};
const ASTRecordLayout &MostDerivedClassLayout;
- VFPtrInfo WhichVFPtr;
+ const VPtrInfo &WhichVFPtr;
/// FinalOverriders - The final overriders of the most derived class.
const FinalOverriders Overriders;
// and the entries shadowed by return adjusting thunks.
if (MD->getParent() != MostDerivedClass || MI.Shadowed)
continue;
- MethodVFTableLocation Loc(MI.VBTableIndex, WhichVFPtr.LastVBase,
- WhichVFPtr.VFPtrOffset, MI.VFTableIndex);
+ MethodVFTableLocation Loc(MI.VBTableIndex, WhichVFPtr.getVBaseWithVPtr(),
+ WhichVFPtr.NonVirtualOffset, MI.VFTableIndex);
if (const CXXDestructorDecl *DD = dyn_cast<CXXDestructorDecl>(MD)) {
MethodVFTableLocations[GlobalDecl(DD, Dtor_Deleting)] = Loc;
} else {
public:
VFTableBuilder(MicrosoftVTableContext &VTables,
- const CXXRecordDecl *MostDerivedClass, VFPtrInfo Which)
+ const CXXRecordDecl *MostDerivedClass, const VPtrInfo *Which)
: VTables(VTables),
Context(MostDerivedClass->getASTContext()),
MostDerivedClass(MostDerivedClass),
MostDerivedClassLayout(Context.getASTRecordLayout(MostDerivedClass)),
- WhichVFPtr(Which),
+ WhichVFPtr(*Which),
Overriders(MostDerivedClass, CharUnits(), MostDerivedClass) {
LayoutVFTable();
const ASTRecordLayout::VBaseOffsetsMapTy &VBaseMap =
MostDerivedClassLayout.getVBaseOffsetsMap();
const ASTRecordLayout::VBaseOffsetsMapTy::const_iterator &VBaseMapEntry =
- VBaseMap.find(WhichVFPtr.LastVBase);
+ VBaseMap.find(WhichVFPtr.getVBaseWithVPtr());
assert(VBaseMapEntry != VBaseMap.end());
// Check if we need a vtordisp adjustment at all.
CharUnits VFPtrVBaseOffset = VBaseMapEntry->second.VBaseOffset;
// The implicit vtordisp field is located right before the vbase.
TA.Virtual.Microsoft.VtordispOffset =
- (VFPtrVBaseOffset - WhichVFPtr.VFPtrFullOffset).getQuantity() - 4;
+ (VFPtrVBaseOffset - WhichVFPtr.FullOffsetInMDC).getQuantity() - 4;
// If the final overrider is defined in either:
// - the most derived class or its non-virtual base or
const CXXRecordDecl *OverriderVBase =
ComputeBaseOffset(Context, OverriderRD, MostDerivedClass).VirtualBase;
- if (!OverriderVBase || OverriderVBase == WhichVFPtr.LastVBase)
+ if (!OverriderVBase || OverriderVBase == WhichVFPtr.getVBaseWithVPtr())
return;
// Otherwise, we need to do use the dynamic offset of the final overrider
// in order to get "this" adjustment right.
TA.Virtual.Microsoft.VBPtrOffset =
- (VFPtrVBaseOffset + WhichVFPtr.VFPtrOffset -
+ (VFPtrVBaseOffset + WhichVFPtr.NonVirtualOffset -
MostDerivedClassLayout.getVBPtrOffset()).getQuantity();
TA.Virtual.Microsoft.VBOffsetOffset =
Context.getTypeSizeInChars(Context.IntTy).getQuantity() *
// the one defined by the vfptr base path or the primary base of the current class.
const CXXRecordDecl *NextBase = 0, *NextLastVBase = LastVBase;
CharUnits NextBaseOffset;
- if (BaseDepth < WhichVFPtr.PathToBaseWithVFPtr.size()) {
- NextBase = WhichVFPtr.PathToBaseWithVFPtr[BaseDepth];
+ if (BaseDepth < WhichVFPtr.PathToBaseWithVPtr.size()) {
+ NextBase = WhichVFPtr.PathToBaseWithVPtr[BaseDepth];
if (Layout.getVBaseOffsetsMap().count(NextBase)) {
NextLastVBase = NextBase;
NextBaseOffset = MostDerivedClassLayout.getVBaseClassOffset(NextBase);
// Create a this-adjusting thunk if needed.
CharUnits TI = ComputeThisOffset(MD, Base, Overrider);
- if (TI != WhichVFPtr.VFPtrFullOffset) {
+ if (TI != WhichVFPtr.FullOffsetInMDC) {
ThisAdjustmentOffset.NonVirtual =
- (TI - WhichVFPtr.VFPtrFullOffset).getQuantity();
+ (TI - WhichVFPtr.FullOffsetInMDC).getQuantity();
}
- if (WhichVFPtr.LastVBase)
+ if (WhichVFPtr.getVBaseWithVPtr())
CalculateVtordispAdjustment(Overrider, TI, ThisAdjustmentOffset);
if (!ThisAdjustmentOffset.isEmpty()) {
AddThunk(MD, VTableThunks[SubOverrideMI.VFTableIndex]);
}
}
- } else if (Base.getBaseOffset() != WhichVFPtr.VFPtrFullOffset ||
+ } else if (Base.getBaseOffset() != WhichVFPtr.FullOffsetInMDC ||
MD->size_overridden_methods()) {
// Skip methods that don't belong to the vftable of the current class,
// e.g. each method that wasn't seen in any of the visited sub-bases
}
}
-static void PrintBasePath(const VFPtrInfo::BasePath &Path, raw_ostream &Out) {
- for (VFPtrInfo::BasePath::const_reverse_iterator I = Path.rbegin(),
+static void PrintBasePath(const VPtrInfo::BasePath &Path, raw_ostream &Out) {
+ for (VPtrInfo::BasePath::const_reverse_iterator I = Path.rbegin(),
E = Path.rend(); I != E; ++I) {
Out << "'";
(*I)->printQualifiedName(Out);
void VFTableBuilder::dumpLayout(raw_ostream &Out) {
Out << "VFTable for ";
- PrintBasePath(WhichVFPtr.PathToBaseWithVFPtr, Out);
+ PrintBasePath(WhichVFPtr.PathToBaseWithVPtr, Out);
Out << "'";
MostDerivedClass->printQualifiedName(Out);
Out << "' (" << Components.size() << " entries).\n";
return false;
}
-static bool rebucketPaths(VBTableVector &Paths);
+static bool rebucketPaths(VPtrInfoVector &Paths);
/// Produces MSVC-compatible vbtable data. The symbols produced by this
/// algorithm match those produced by MSVC 2012 and newer, which is different
/// to produce an unambiguous set of paths.
///
/// TODO: Presumably vftables use the same algorithm.
-void
-MicrosoftVTableContext::computeVBTablePaths(const CXXRecordDecl *RD,
- VBTableVector &Paths) {
+void MicrosoftVTableContext::computeVTablePaths(bool ForVBTables,
+ const CXXRecordDecl *RD,
+ VPtrInfoVector &Paths) {
assert(Paths.empty());
const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
- // Base case: this subobject has its own vbptr.
- if (Layout.hasOwnVBPtr())
- Paths.push_back(new VBTableInfo(RD));
+ // Base case: this subobject has its own vptr.
+ if (ForVBTables ? Layout.hasOwnVBPtr() : Layout.hasOwnVFPtr())
+ Paths.push_back(new VPtrInfo(RD));
// Recursive case: get all the vbtables from our bases and remove anything
// that shares a virtual base.
if (I->isVirtual() && VBasesSeen.count(Base))
continue;
- const VBTableVector &BasePaths = enumerateVBTables(Base);
+ if (!Base->isDynamicClass())
+ continue;
+
+ const VPtrInfoVector &BasePaths =
+ ForVBTables ? enumerateVBTables(Base) : getVFPtrOffsets(Base);
- for (VBTableVector::const_iterator II = BasePaths.begin(),
- EE = BasePaths.end();
+ for (VPtrInfoVector::const_iterator II = BasePaths.begin(),
+ EE = BasePaths.end();
II != EE; ++II) {
- VBTableInfo *BasePath = *II;
+ VPtrInfo *BaseInfo = *II;
// Don't include the path if it goes through a virtual base that we've
// already included.
- if (setsIntersect(VBasesSeen, BasePath->ContainingVBases))
+ if (setsIntersect(VBasesSeen, BaseInfo->ContainingVBases))
continue;
// Copy the path and adjust it as necessary.
- VBTableInfo *P = new VBTableInfo(*BasePath);
+ VPtrInfo *P = new VPtrInfo(*BaseInfo);
// We mangle Base into the path if the path would've been ambiguous and it
// wasn't already extended with Base.
if (P->MangledPath.empty() || P->MangledPath.back() != Base)
P->NextBaseToMangle = Base;
- // Keep track of which derived class ultimately uses the vbtable, and what
- // the full adjustment is from the MDC to this vbtable. The adjustment is
+ // Keep track of the full path.
+ // FIXME: Why do we need this?
+ P->PathToBaseWithVPtr.insert(P->PathToBaseWithVPtr.begin(), Base);
+
+ // Keep track of which derived class ultimately uses the vtable, and what
+ // the full adjustment is from the MDC to this vtable. The adjustment is
// captured by an optional vbase and a non-virtual offset.
- if (Base == Layout.getBaseSharingVBPtr())
+ if (Base == (ForVBTables ? Layout.getBaseSharingVBPtr()
+ : Layout.getPrimaryBase()))
P->ReusingBase = RD;
if (I->isVirtual())
P->ContainingVBases.push_back(Base);
else if (P->ContainingVBases.empty())
P->NonVirtualOffset += Layout.getBaseClassOffset(Base);
+ // Update the full offset in the MDC.
+ P->FullOffsetInMDC = P->NonVirtualOffset;
+ if (const CXXRecordDecl *VB = P->getVBaseWithVPtr())
+ P->FullOffsetInMDC += Layout.getVBaseClassOffset(VB);
+
Paths.push_back(P);
}
Changed = rebucketPaths(Paths);
}
-static bool pathCompare(const VBTableInfo *LHS, const VBTableInfo *RHS) {
+static bool pathCompare(const VPtrInfo *LHS, const VPtrInfo *RHS) {
return LHS->MangledPath < RHS->MangledPath;
}
-static bool extendPath(VBTableInfo *P) {
+static bool extendPath(VPtrInfo *P) {
if (P->NextBaseToMangle) {
P->MangledPath.push_back(P->NextBaseToMangle);
P->NextBaseToMangle = 0; // Prevent the path from being extended twice.
return false;
}
-static bool rebucketPaths(VBTableVector &Paths) {
+static bool rebucketPaths(VPtrInfoVector &Paths) {
// What we're essentially doing here is bucketing together ambiguous paths.
// Any bucket with more than one path in it gets extended by NextBase, which
// is usually the direct base of the inherited the vbptr. This code uses a
// sorted vector to implement a multiset to form the buckets. Note that the
// ordering is based on pointers, but it doesn't change our output order. The
// current algorithm is designed to match MSVC 2012's names.
- VBTableVector PathsSorted(Paths);
+ VPtrInfoVector PathsSorted(Paths);
std::sort(PathsSorted.begin(), PathsSorted.end(), pathCompare);
bool Changed = false;
for (size_t I = 0, E = PathsSorted.size(); I != E;) {
llvm::DeleteContainerSeconds(VBaseInfo);
}
-void MicrosoftVTableContext::enumerateVFPtrs(
- const CXXRecordDecl *MostDerivedClass,
- const ASTRecordLayout &MostDerivedClassLayout, BaseSubobject Base,
- const CXXRecordDecl *LastVBase,
- const VFPtrInfo::BasePath &PathFromCompleteClass,
- BasesSetVectorTy &VisitedVBases,
- VFPtrListTy &Result) {
- const CXXRecordDecl *CurrentClass = Base.getBase();
- CharUnits OffsetInCompleteClass = Base.getBaseOffset();
- const ASTRecordLayout &CurrentClassLayout =
- Context.getASTRecordLayout(CurrentClass);
-
- if (CurrentClassLayout.hasOwnVFPtr()) {
- if (LastVBase) {
- uint64_t VBIndex = getVBTableIndex(MostDerivedClass, LastVBase);
- assert(VBIndex > 0 && "vbases must have vbindex!");
- CharUnits VFPtrOffset =
- OffsetInCompleteClass -
- MostDerivedClassLayout.getVBaseClassOffset(LastVBase);
- Result.push_back(VFPtrInfo(VBIndex, LastVBase, VFPtrOffset,
- PathFromCompleteClass, OffsetInCompleteClass));
- } else {
- Result.push_back(VFPtrInfo(OffsetInCompleteClass, PathFromCompleteClass));
- }
- }
-
- for (CXXRecordDecl::base_class_const_iterator I = CurrentClass->bases_begin(),
- E = CurrentClass->bases_end(); I != E; ++I) {
- const CXXRecordDecl *BaseDecl = I->getType()->getAsCXXRecordDecl();
-
- CharUnits NextBaseOffset;
- const CXXRecordDecl *NextLastVBase;
- if (I->isVirtual()) {
- if (!VisitedVBases.insert(BaseDecl))
- continue;
- NextBaseOffset = MostDerivedClassLayout.getVBaseClassOffset(BaseDecl);
- NextLastVBase = BaseDecl;
- } else {
- NextBaseOffset = OffsetInCompleteClass +
- CurrentClassLayout.getBaseClassOffset(BaseDecl);
- NextLastVBase = LastVBase;
- }
-
- VFPtrInfo::BasePath NewPath = PathFromCompleteClass;
- NewPath.push_back(BaseDecl);
- BaseSubobject NextBase(BaseDecl, NextBaseOffset);
-
- enumerateVFPtrs(MostDerivedClass, MostDerivedClassLayout, NextBase,
- NextLastVBase, NewPath, VisitedVBases, Result);
- }
-}
-
-/// CalculatePathToMangle - Calculate the subset of records that should be used
-/// to mangle the vftable for the given vfptr.
-/// Should only be called if a class has multiple vftables.
-static void
-CalculatePathToMangle(const CXXRecordDecl *RD, VFPtrInfo &VFPtr) {
- // FIXME: In some rare cases this code produces a slightly incorrect mangling.
- // It's very likely that the vbtable mangling code can be adjusted to mangle
- // both vftables and vbtables correctly.
-
- VFPtrInfo::BasePath &FullPath = VFPtr.PathToBaseWithVFPtr;
- if (FullPath.empty()) {
- // Mangle the class's own vftable.
- assert(RD->getNumVBases() &&
- "Something's wrong: if the most derived "
- "class has more than one vftable, it can only have its own "
- "vftable if it has vbases");
- VFPtr.PathToMangle.push_back(RD);
- return;
- }
-
- unsigned Begin = 0;
-
- // First, skip all the bases before the vbase.
- if (VFPtr.LastVBase) {
- while (FullPath[Begin] != VFPtr.LastVBase) {
- Begin++;
- assert(Begin < FullPath.size());
- }
- }
-
- // Then, put the rest of the base path in the reverse order.
- for (unsigned I = FullPath.size(); I != Begin; --I) {
- const CXXRecordDecl *CurBase = FullPath[I - 1],
- *ItsBase = (I == 1) ? RD : FullPath[I - 2];
- bool BaseIsVirtual = false;
- for (CXXRecordDecl::base_class_const_iterator J = ItsBase->bases_begin(),
- F = ItsBase->bases_end(); J != F; ++J) {
- if (J->getType()->getAsCXXRecordDecl() == CurBase) {
- BaseIsVirtual = J->isVirtual();
- break;
- }
- }
-
- // Should skip the current base if it is a non-virtual base with no siblings.
- if (BaseIsVirtual || ItsBase->getNumBases() != 1)
- VFPtr.PathToMangle.push_back(CurBase);
- }
-}
-
-void MicrosoftVTableContext::enumerateVFPtrs(
- const CXXRecordDecl *ForClass,
- MicrosoftVTableContext::VFPtrListTy &Result) {
- Result.clear();
- const ASTRecordLayout &ClassLayout = Context.getASTRecordLayout(ForClass);
- BasesSetVectorTy VisitedVBases;
- enumerateVFPtrs(ForClass, ClassLayout,
- BaseSubobject(ForClass, CharUnits::Zero()), 0,
- VFPtrInfo::BasePath(), VisitedVBases, Result);
- if (Result.size() > 1) {
- for (unsigned I = 0, E = Result.size(); I != E; ++I)
- CalculatePathToMangle(ForClass, Result[I]);
- }
-}
-
void MicrosoftVTableContext::computeVTableRelatedInformation(
const CXXRecordDecl *RD) {
assert(RD->isDynamicClass());
const VTableLayout::AddressPointsMapTy EmptyAddressPointsMap;
- VFPtrListTy &VFPtrs = VFPtrLocations[RD];
- enumerateVFPtrs(RD, VFPtrs);
+ VPtrInfoVector &VFPtrs = VFPtrLocations[RD];
+ computeVTablePaths(/*ForVBTables=*/false, RD, VFPtrs);
MethodVFTableLocationsTy NewMethodLocations;
- for (VFPtrListTy::iterator I = VFPtrs.begin(), E = VFPtrs.end();
+ for (VPtrInfoVector::iterator I = VFPtrs.begin(), E = VFPtrs.end();
I != E; ++I) {
VFTableBuilder Builder(*this, RD, *I);
- VFTableIdTy id(RD, I->VFPtrFullOffset);
+ VFTableIdTy id(RD, (*I)->FullOffsetInMDC);
assert(VFTableLayouts.count(id) == 0);
SmallVector<VTableLayout::VTableThunkTy, 1> VTableThunks(
Builder.vtable_thunks_begin(), Builder.vtable_thunks_end());
Entry = VBI = new VirtualBaseInfo();
}
- computeVBTablePaths(RD, VBI->VBTables);
+ computeVTablePaths(/*ForVBTables=*/true, RD, VBI->VBPtrPaths);
// First, see if the Derived class shared the vbptr with a non-virtual base.
const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
return VBInfo->VBTableIndices.find(VBase)->second;
}
-const VBTableVector &
+const VPtrInfoVector &
MicrosoftVTableContext::enumerateVBTables(const CXXRecordDecl *RD) {
- return computeVBTableRelatedInformation(RD)->VBTables;
+ return computeVBTableRelatedInformation(RD)->VBPtrPaths;
}
-const MicrosoftVTableContext::VFPtrListTy &
+const VPtrInfoVector &
MicrosoftVTableContext::getVFPtrOffsets(const CXXRecordDecl *RD) {
computeVTableRelatedInformation(RD);
/// Holds all the vbtable globals for a given class.
struct VBTableGlobals {
- const VBTableVector *VBTables;
+ const VPtrInfoVector *VBTables;
SmallVector<llvm::GlobalVariable *, 2> Globals;
};
void emitVirtualInheritanceTables(const CXXRecordDecl *RD);
llvm::GlobalVariable *
- getAddrOfVBTable(const VBTableInfo &VBT, const CXXRecordDecl *RD,
+ getAddrOfVBTable(const VPtrInfo &VBT, const CXXRecordDecl *RD,
llvm::GlobalVariable::LinkageTypes Linkage);
- void emitVBTableDefinition(const VBTableInfo &VBT, const CXXRecordDecl *RD,
+ void emitVBTableDefinition(const VPtrInfo &VBT, const CXXRecordDecl *RD,
llvm::GlobalVariable *GV) const;
void setThunkLinkage(llvm::Function *Thunk, bool ForVTable) {
const VBTableGlobals &VBGlobals = enumerateVBTables(RD);
for (unsigned I = 0, E = VBGlobals.VBTables->size(); I != E; ++I) {
- const VBTableInfo *VBT = (*VBGlobals.VBTables)[I];
+ const VPtrInfo *VBT = (*VBGlobals.VBTables)[I];
llvm::GlobalVariable *GV = VBGlobals.Globals[I];
const ASTRecordLayout &SubobjectLayout =
- CGM.getContext().getASTRecordLayout(VBT->BaseWithVBPtr);
+ CGM.getContext().getASTRecordLayout(VBT->BaseWithVPtr);
CharUnits Offs = VBT->NonVirtualOffset;
Offs += SubobjectLayout.getVBPtrOffset();
- if (VBT->getVBaseWithVBPtr())
- Offs += Layout.getVBaseClassOffset(VBT->getVBaseWithVBPtr());
+ if (VBT->getVBaseWithVPtr())
+ Offs += Layout.getVBaseClassOffset(VBT->getVBaseWithVPtr());
llvm::Value *VBPtr =
CGF.Builder.CreateConstInBoundsGEP1_64(ThisInt8Ptr, Offs.getQuantity());
VBPtr = CGF.Builder.CreateBitCast(VBPtr, GV->getType()->getPointerTo(0),
void MicrosoftCXXABI::emitVTableDefinitions(CodeGenVTables &CGVT,
const CXXRecordDecl *RD) {
MicrosoftVTableContext &VFTContext = CGM.getMicrosoftVTableContext();
- MicrosoftVTableContext::VFPtrListTy VFPtrs = VFTContext.getVFPtrOffsets(RD);
+ VPtrInfoVector VFPtrs = VFTContext.getVFPtrOffsets(RD);
llvm::GlobalVariable::LinkageTypes Linkage = CGM.getVTableLinkage(RD);
- for (MicrosoftVTableContext::VFPtrListTy::iterator I = VFPtrs.begin(),
- E = VFPtrs.end(); I != E; ++I) {
- llvm::GlobalVariable *VTable = getAddrOfVTable(RD, I->VFPtrFullOffset);
+ for (VPtrInfoVector::iterator I = VFPtrs.begin(), E = VFPtrs.end(); I != E;
+ ++I) {
+ llvm::GlobalVariable *VTable = getAddrOfVTable(RD, (*I)->FullOffsetInMDC);
if (VTable->hasInitializer())
continue;
const VTableLayout &VTLayout =
- VFTContext.getVFTableLayout(RD, I->VFPtrFullOffset);
+ VFTContext.getVFTableLayout(RD, (*I)->FullOffsetInMDC);
llvm::Constant *Init = CGVT.CreateVTableInitializer(
RD, VTLayout.vtable_component_begin(),
VTLayout.getNumVTableComponents(), VTLayout.vtable_thunk_begin(),
}
static void mangleVFTableName(MicrosoftMangleContext &MangleContext,
- const CXXRecordDecl *RD, const VFPtrInfo &VFPtr,
+ const CXXRecordDecl *RD, const VPtrInfo *VFPtr,
SmallString<256> &Name) {
llvm::raw_svector_ostream Out(Name);
- MangleContext.mangleCXXVFTable(RD, VFPtr.PathToMangle, Out);
+ MangleContext.mangleCXXVFTable(RD, VFPtr->MangledPath, Out);
}
llvm::Constant *MicrosoftCXXABI::getVTableAddressPointForConstExpr(
llvm::GlobalVariable *&VTable = I->second;
MicrosoftVTableContext &VTContext = CGM.getMicrosoftVTableContext();
- const MicrosoftVTableContext::VFPtrListTy &VFPtrs =
- VTContext.getVFPtrOffsets(RD);
+ const VPtrInfoVector &VFPtrs = VTContext.getVFPtrOffsets(RD);
if (DeferredVFTables.insert(RD)) {
// We haven't processed this record type before.
}
for (size_t J = 0, F = VFPtrs.size(); J != F; ++J) {
- if (VFPtrs[J].VFPtrFullOffset != VPtrOffset)
+ if (VFPtrs[J]->FullOffsetInMDC != VPtrOffset)
continue;
llvm::ArrayType *ArrayType = llvm::ArrayType::get(
CGM.Int8PtrTy,
- VTContext.getVFTableLayout(RD, VFPtrs[J].VFPtrFullOffset)
+ VTContext.getVFTableLayout(RD, VFPtrs[J]->FullOffsetInMDC)
.getNumVTableComponents());
SmallString<256> Name;
// Cache the globals for all vbtables so we don't have to recompute the
// mangled names.
llvm::GlobalVariable::LinkageTypes Linkage = CGM.getVTableLinkage(RD);
- for (VBTableVector::const_iterator I = VBGlobals.VBTables->begin(),
- E = VBGlobals.VBTables->end();
+ for (VPtrInfoVector::const_iterator I = VBGlobals.VBTables->begin(),
+ E = VBGlobals.VBTables->end();
I != E; ++I) {
VBGlobals.Globals.push_back(getAddrOfVBTable(**I, RD, Linkage));
}
void MicrosoftCXXABI::emitVirtualInheritanceTables(const CXXRecordDecl *RD) {
const VBTableGlobals &VBGlobals = enumerateVBTables(RD);
for (unsigned I = 0, E = VBGlobals.VBTables->size(); I != E; ++I) {
- const VBTableInfo *VBT = (*VBGlobals.VBTables)[I];
+ const VPtrInfo *VBT = (*VBGlobals.VBTables)[I];
llvm::GlobalVariable *GV = VBGlobals.Globals[I];
emitVBTableDefinition(*VBT, RD, GV);
}
}
llvm::GlobalVariable *
-MicrosoftCXXABI::getAddrOfVBTable(const VBTableInfo &VBT,
- const CXXRecordDecl *RD,
+MicrosoftCXXABI::getAddrOfVBTable(const VPtrInfo &VBT, const CXXRecordDecl *RD,
llvm::GlobalVariable::LinkageTypes Linkage) {
SmallString<256> OutName;
llvm::raw_svector_ostream Out(OutName);
return GV;
}
-void MicrosoftCXXABI::emitVBTableDefinition(const VBTableInfo &VBT,
+void MicrosoftCXXABI::emitVBTableDefinition(const VPtrInfo &VBT,
const CXXRecordDecl *RD,
llvm::GlobalVariable *GV) const {
const CXXRecordDecl *ReusingBase = VBT.ReusingBase;
"should only emit vbtables for classes with vbtables");
const ASTRecordLayout &BaseLayout =
- CGM.getContext().getASTRecordLayout(VBT.BaseWithVBPtr);
+ CGM.getContext().getASTRecordLayout(VBT.BaseWithVPtr);
const ASTRecordLayout &DerivedLayout =
CGM.getContext().getASTRecordLayout(RD);
// Make it relative to the subobject vbptr.
CharUnits CompleteVBPtrOffset = VBT.NonVirtualOffset + VBPtrOffset;
- if (VBT.getVBaseWithVBPtr())
+ if (VBT.getVBaseWithVPtr())
CompleteVBPtrOffset +=
- DerivedLayout.getVBaseClassOffset(VBT.getVBaseWithVBPtr());
+ DerivedLayout.getVBaseClassOffset(VBT.getVBaseWithVPtr());
Offset -= CompleteVBPtrOffset;
unsigned VBIndex = Context.getVBTableIndex(ReusingBase, VBase);
struct O { virtual A *f(); };
struct P : O { virtual B *f(); };
P p;
-// CHECK-O: VFTable for 'O' in 'P' (1 entries)
-// CHECK-O-NEXT: 0 | B *P::f()
-
// CHECK-O: VFTable for 'O' (1 entries)
// CHECK-O-NEXT: 0 | A *O::f()
+// CHECK-O: VFTable for 'O' in 'P' (1 entries)
+// CHECK-O-NEXT: 0 | B *P::f()
+
struct Q {
// CHECK-Q: VFTable for 'Q' (2 entries)
// CHECK-Q-NEXT: 0 | void Q::foo(int)
// MANGLING-DAG: @"\01??_7Z@Test9@@6BX@1@@"
// MANGLING-DAG: @"\01??_7Z@Test9@@6BY@1@@"
- // FIXME this one is wrong:
- // INCORRECT MANGLING-DAG: @"\01??_7Z@Test9@@6BB@@@"
- // MANGLING-DAG-SHOULD-BE: @"\01??_7Z@Test9@@6B@"
+ // MANGLING-DAG: @"\01??_7Z@Test9@@6B@"
};
Z z;
// MANGLING-DAG: @"\01??_7W@Test9@@6BD@@@"
// MANGLING-DAG: @"\01??_7W@Test9@@6BX@1@@"
- // FIXME: these two are wrong:
- // INCORRECT MANGLING-DAG: @"\01??_7W@Test9@@6BB@@@"
- // MANGLING-DAG-SHOULD-BE: @"\01??_7W@Test9@@6B@"
- // INCORRECT MANGLING-DAG: @"\01??_7W@Test9@@6BY@1@Z@1@@"
- // MANGLING-DAG-SHOULD-BE: @"\01??_7W@Test9@@6BY@1@@"
+ // MANGLING-DAG: @"\01??_7W@Test9@@6B@"
+ // MANGLING-DAG: @"\01??_7W@Test9@@6BY@1@@"
};
W w;
// MANGLING-DAG: @"\01??_7T@Test9@@6BD@@@"
// MANGLING-DAG: @"\01??_7T@Test9@@6BX@1@@"
- // FIXME: these two are wrong:
- // INCORRECT MANGLING-DAG: @"\01??_7T@Test9@@6BB@@@"
- // MANGLING-DAG-SHOULD-BE: @"\01??_7T@Test9@@6B@"
- // INCORRECT MANGLING-DAG: @"\01??_7T@Test9@@6BY@1@Z@1@@"
- // MANGLING-DAG-SHOULD-BE: @"\01??_7T@Test9@@6BY@1@@"
+ // MANGLING-DAG: @"\01??_7T@Test9@@6B@"
+ // MANGLING-DAG: @"\01??_7T@Test9@@6BY@1@@"
virtual void f();
virtual void g();
struct Z : virtual X, Y {
// MANGLING-DAG: @"\01??_7Z@Test11@@6BY@1@@"
-
- // FIXME this one is wrong:
- // MANGLING-DAG-SHOULD-BE: @"\01??_7Z@Test11@@6BX@1@@"
- // INCORRECT MANGLING-DAG: @"\01??_7Z@Test11@@6BA@@@"
+ // MANGLING-DAG: @"\01??_7Z@Test11@@6BX@1@@"
};
Z z;
V v;
}
+
+namespace pr17748 {
+struct A {
+ virtual void f() {}
+};
+
+struct B : virtual A {
+ B() {}
+};
+
+struct C : virtual B, A {
+ C() {}
+};
+C c;
+
+// MANGLING-DAG: @"\01??_7A@pr17748@@6B@"
+// MANGLING-DAG: @"\01??_7B@pr17748@@6B@"
+// MANGLING-DAG: @"\01??_7C@pr17748@@6BA@1@@"
+// MANGLING-DAG: @"\01??_7C@pr17748@@6BB@1@@"
+}