const uint64_t *fieldoffsets, unsigned fieldcount,
uint64_t nonvirtualsize, unsigned nonvirtualalign,
const CXXRecordDecl *PB, bool PBVirtual,
- const CXXRecordDecl **bases, const uint64_t *baseoffsets,
- unsigned basecount, const CXXRecordDecl **vbases,
- const uint64_t *vbaseoffsets,unsigned vbasecount)
+ const std::pair<const CXXRecordDecl *, uint64_t> *bases,
+ unsigned numbases,
+ const std::pair<const CXXRecordDecl *, uint64_t> *vbases,
+ unsigned numvbases)
: Size(size), DataSize(datasize), FieldOffsets(0), Alignment(alignment),
FieldCount(fieldcount), CXXInfo(new CXXRecordLayoutInfo) {
if (FieldCount > 0) {
CXXInfo->PrimaryBaseWasVirtual = PBVirtual;
CXXInfo->NonVirtualSize = nonvirtualsize;
CXXInfo->NonVirtualAlign = nonvirtualalign;
- for (unsigned i = 0; i != basecount; ++i)
- CXXInfo->BaseOffsets[bases[i]] = baseoffsets[i];
- for (unsigned i = 0; i != vbasecount; ++i)
- CXXInfo->VBaseOffsets[vbases[i]] = vbaseoffsets[i];
+ for (unsigned i = 0; i != numbases; ++i)
+ CXXInfo->BaseOffsets[bases[i].first] = bases[i].second;
+ for (unsigned i = 0; i != numvbases; ++i)
+ CXXInfo->VBaseOffsets[vbases[i].first] = vbases[i].second;
}
~ASTRecordLayout() {
// Mark it so we don't lay it out twice.
mark.insert(Base);
assert (IndirectPrimary.count(Base) && "IndirectPrimary was wrong");
- VBases.push_back(Base);
- VBaseOffsets.push_back(Offset);
+ VBases.push_back(std::make_pair(Base, Offset));
} else if (IndirectPrimary.count(Base)) {
// Someone else will eventually lay this out.
;
// Mark it so we don't lay it out twice.
mark.insert(Base);
LayoutVirtualBase(Base);
- BaseOffset = *(VBaseOffsets.end()-1);
+ BaseOffset = VBases.back().second;
}
}
if (Base->getNumVBases()) {
Size = (Size + (BaseAlign-1)) & ~(BaseAlign-1);
// Add base class offsets.
- if (IsVirtualBase) {
- VBases.push_back(RD);
- VBaseOffsets.push_back(Size);
- } else {
- Bases.push_back(RD);
- BaseOffsets.push_back(Size);
- }
+ if (IsVirtualBase)
+ VBases.push_back(std::make_pair(RD, Size));
+ else
+ Bases.push_back(std::make_pair(RD, Size));
#if 0
// And now add offsets for all our primary virtual bases as well, so
// FIXME: IsPODForThePurposeOfLayout should be stored in the record layout.
bool IsPODForThePurposeOfLayout = cast<CXXRecordDecl>(D)->isPOD();
- assert(Builder.Bases.size() == Builder.BaseOffsets.size() &&
- "Base offsets vector must be same size as bases vector!");
- assert(Builder.VBases.size() == Builder.VBaseOffsets.size() &&
- "Base offsets vector must be same size as bases vector!");
-
// FIXME: This should be done in FinalizeLayout.
uint64_t DataSize =
IsPODForThePurposeOfLayout ? Builder.Size : Builder.NextOffset;
Builder.PrimaryBase,
Builder.PrimaryBaseWasVirtual,
Builder.Bases.data(),
- Builder.BaseOffsets.data(),
Builder.Bases.size(),
Builder.VBases.data(),
- Builder.VBaseOffsets.data(),
Builder.VBases.size());
}
const CXXRecordDecl *PrimaryBase;
bool PrimaryBaseWasVirtual;
- llvm::SmallVector<const CXXRecordDecl *, 4> Bases;
- llvm::SmallVector<uint64_t, 4> BaseOffsets;
-
- llvm::SmallVector<const CXXRecordDecl *, 4> VBases;
- llvm::SmallVector<uint64_t, 4> VBaseOffsets;
+ typedef llvm::SmallVector<std::pair<const CXXRecordDecl *,
+ uint64_t>, 4> BaseOffsetsTy;
+
+ BaseOffsetsTy Bases;
+ BaseOffsetsTy VBases;
ASTRecordLayoutBuilder(ASTContext &Ctx);