From: Mike Stump Date: Sun, 16 Aug 2009 01:46:26 +0000 (+0000) Subject: Cleanups and fixups for calculating the virtual base offsets. WIP. X-Git-Url: https://granicus.if.org/sourcecode?a=commitdiff_plain;h=276b9f1d814f4f6551cc3000590759a34185d6da;p=clang Cleanups and fixups for calculating the virtual base offsets. WIP. git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@79156 91177308-0d34-0410-b5e6-96231b3b80d8 --- diff --git a/lib/AST/RecordLayoutBuilder.cpp b/lib/AST/RecordLayoutBuilder.cpp index a53c16fba1..30682efbb8 100644 --- a/lib/AST/RecordLayoutBuilder.cpp +++ b/lib/AST/RecordLayoutBuilder.cpp @@ -168,18 +168,41 @@ void ASTRecordLayoutBuilder::LayoutVirtualBase(const CXXRecordDecl *RD) { } void ASTRecordLayoutBuilder::LayoutVirtualBases(const CXXRecordDecl *RD, + int64_t Offset, + llvm::SmallSet &mark, llvm::SmallSet &IndirectPrimary) { for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(), e = RD->bases_end(); i != e; ++i) { const CXXRecordDecl *Base = cast(i->getType()->getAs()->getDecl()); - if (i->isVirtual() && !IndirectPrimary.count(Base)) { - // Mark it so we don't output it twice. - IndirectPrimary.insert(Base); - LayoutVirtualBase(Base); +#if 0 + const ASTRecordLayout &L = Ctx.getASTRecordLayout(Base); + const CXXRecordDecl *PB = L.getPrimaryBase(); + if (PB && L.getPrimaryBaseWasVirtual() + && IndirectPrimary.count(PB)) { + int64_t BaseOffset; + // FIXME: calculate this. + BaseOffset = (1<<63) | (1<<31); + VBases.push_back(PB); + VBaseOffsets.push_back(BaseOffset); + } +#endif + if (i->isVirtual()) { + // Mark it so we don't lay it out twice. + if (mark.count(Base)) + continue; + if (IndirectPrimary.count(Base)) { + int64_t BaseOffset; + // FIXME: audit + BaseOffset = Offset; + // BaseOffset = (1<<63) | (1<<31); + VBases.push_back(Base); + VBaseOffsets.push_back(BaseOffset); + } else + LayoutVirtualBase(Base); } if (Base->getNumVBases()) - LayoutVirtualBases(Base, IndirectPrimary); + LayoutVirtualBases(Base, Offset, mark, IndirectPrimary); } } @@ -195,7 +218,7 @@ void ASTRecordLayoutBuilder::LayoutBaseNonVirtually(const CXXRecordDecl *RD, // Round up the current record size to the base's alignment boundary. Size = (Size + (BaseAlign-1)) & ~(BaseAlign-1); - // Add base class offsets. + // Add base class offsets. if (IsVirtualBase) { VBases.push_back(RD); VBaseOffsets.push_back(Size); @@ -204,6 +227,20 @@ void ASTRecordLayoutBuilder::LayoutBaseNonVirtually(const CXXRecordDecl *RD, BaseOffsets.push_back(Size); } + // And now add offsets for all our primary virtual bases as well, so + // they all have offsets. + const ASTRecordLayout *L = &BaseInfo; + const CXXRecordDecl *PB = L->getPrimaryBase(); + while (PB) { + if (L->getPrimaryBaseWasVirtual()) { + VBases.push_back(PB); + VBaseOffsets.push_back(Size); + } + PB = L->getPrimaryBase(); + if (PB) + L = &Ctx.getASTRecordLayout(PB); + } + // Reserve space for this base. Size += BaseSize; @@ -228,7 +265,7 @@ void ASTRecordLayoutBuilder::Layout(const RecordDecl *D) { llvm::SmallSet IndirectPrimary; - // If this is a C++ class, lay out the nonvirtual bases. + // If this is a C++ class, lay out the vtable and the non-virtual bases. const CXXRecordDecl *RD = dyn_cast(D); if (RD) { LayoutVtable(RD, IndirectPrimary); @@ -246,8 +283,10 @@ void ASTRecordLayoutBuilder::Layout(const RecordDecl *D) { NonVirtualSize = Size; NonVirtualAlignment = Alignment; - if (RD) - LayoutVirtualBases(RD, IndirectPrimary); + if (RD) { + llvm::SmallSet mark; + LayoutVirtualBases(RD, 0, mark, IndirectPrimary); + } // Finally, round the size of the total struct up to the alignment of the // struct itself. diff --git a/lib/AST/RecordLayoutBuilder.h b/lib/AST/RecordLayoutBuilder.h index 4447fff781..cdd077403c 100644 --- a/lib/AST/RecordLayoutBuilder.h +++ b/lib/AST/RecordLayoutBuilder.h @@ -74,6 +74,8 @@ class ASTRecordLayoutBuilder { void LayoutBaseNonVirtually(const CXXRecordDecl *RD, bool IsVBase); void LayoutVirtualBase(const CXXRecordDecl *RD); void LayoutVirtualBases(const CXXRecordDecl *RD, + int64_t Offset, + llvm::SmallSet &mark, llvm::SmallSet &IndirectPrimary); /// FinishLayout - Finalize record layout. Adjust record size based on the diff --git a/lib/CodeGen/CGCXX.cpp b/lib/CodeGen/CGCXX.cpp index f2e58af403..1c9c5634ab 100644 --- a/lib/CodeGen/CGCXX.cpp +++ b/lib/CodeGen/CGCXX.cpp @@ -680,6 +680,7 @@ void CodeGenFunction::GenerateMethods(std::vector &methods, } void CodeGenFunction::GenerateVtableForVBases(const CXXRecordDecl *RD, + const CXXRecordDecl *Class, llvm::Constant *rtti, std::vector &methods, llvm::SmallSet &IndirectPrimary) { @@ -690,19 +691,40 @@ void CodeGenFunction::GenerateVtableForVBases(const CXXRecordDecl *RD, if (i->isVirtual() && !IndirectPrimary.count(Base)) { // Mark it so we don't output it twice. IndirectPrimary.insert(Base); - GenerateVtableForBase(Base, RD, rtti, methods, false, true, + GenerateVtableForBase(Base, true, 0, Class, rtti, methods, true, IndirectPrimary); } if (Base->getNumVBases()) - GenerateVtableForVBases(Base, rtti, methods, IndirectPrimary); + GenerateVtableForVBases(Base, Class, rtti, methods, IndirectPrimary); + } +} + +void CodeGenFunction::GenerateVBaseOffsets( + std::vector &methods, const CXXRecordDecl *RD, + llvm::SmallSet &SeenVBase, + uint64_t Offset, const ASTRecordLayout &BLayout, llvm::Type *Ptr8Ty) { + for (CXXRecordDecl::base_class_const_iterator i =RD->bases_begin(), + e = RD->bases_end(); i != e; ++i) { + const CXXRecordDecl *Base = + cast(i->getType()->getAs()->getDecl()); + if (i->isVirtual() && !SeenVBase.count(Base)) { + SeenVBase.insert(Base); + int64_t BaseOffset = Offset/8 + BLayout.getVBaseClassOffset(Base) / 8; + llvm::Constant *m; + m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), BaseOffset); + m = llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty); + methods.push_back(m); + } + GenerateVBaseOffsets(methods, Base, SeenVBase, Offset, BLayout, Ptr8Ty); } } void CodeGenFunction::GenerateVtableForBase(const CXXRecordDecl *RD, + bool forPrimary, + int64_t Offset, const CXXRecordDecl *Class, llvm::Constant *rtti, std::vector &methods, - bool isPrimary, bool ForVirtualBase, llvm::SmallSet &IndirectPrimary) { llvm::Type *Ptr8Ty; @@ -712,69 +734,70 @@ void CodeGenFunction::GenerateVtableForBase(const CXXRecordDecl *RD, if (RD && !RD->isDynamicClass()) return; - const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Class); + const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); + const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase(); + const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual(); - if (isPrimary) { - // The virtual base offsets come first... - // FIXME: audit - for (CXXRecordDecl::reverse_base_class_const_iterator i - = Class->bases_rbegin(), - e = Class->bases_rend(); i != e; ++i) { - if (!i->isVirtual()) - continue; - const CXXRecordDecl *Base = - cast(i->getType()->getAs()->getDecl()); - int64_t BaseOffset = Layout.getVBaseClassOffset(Base) / 8; - llvm::Constant *m; - m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), BaseOffset); - m = llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty); - methods.push_back(m); - } + // The virtual base offsets come first... + // FIXME: Audit, is this right? + if (forPrimary || !PrimaryBaseWasVirtual) { + llvm::SmallSet SeenVBase; + std::vector offsets; + GenerateVBaseOffsets(offsets, RD, SeenVBase, Offset, Layout, Ptr8Ty); + for (std::vector::reverse_iterator i = offsets.rbegin(), + e = offsets.rend(); i != e; ++i) + methods.push_back(*i); } - // then comes the the vcall offsets for all our functions... - if (isPrimary && ForVirtualBase) - GenerateVcalls(methods, Class, Ptr8Ty); - - bool TopPrimary = true; - // Primary tables are composed from the chain of primaries. - if (isPrimary) { - const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase(); - const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual(); - if (PrimaryBase) { - if (PrimaryBaseWasVirtual) - IndirectPrimary.insert(PrimaryBase); - TopPrimary = false; - GenerateVtableForBase(0, PrimaryBase, rtti, methods, true, - PrimaryBaseWasVirtual, IndirectPrimary); - } + if (forPrimary || ForVirtualBase) { + // then comes the the vcall offsets for all our functions... + GenerateVcalls(methods, RD, Ptr8Ty); } + + bool Top = true; + + // vtables are composed from the chain of primaries. + if (PrimaryBase) { + if (PrimaryBaseWasVirtual) + IndirectPrimary.insert(PrimaryBase); + Top = false; + GenerateVtableForBase(PrimaryBase, true, Offset, Class, rtti, methods, + PrimaryBaseWasVirtual, IndirectPrimary); + } + // then come the vcall offsets for all our virtual bases. - if (!isPrimary && RD && ForVirtualBase) + if (!1 && ForVirtualBase) GenerateVcalls(methods, RD, Ptr8Ty); - if (TopPrimary) { - if (RD) { - int64_t BaseOffset; - if (ForVirtualBase) - BaseOffset = -(Layout.getVBaseClassOffset(RD) / 8); - else - BaseOffset = -(Layout.getBaseClassOffset(RD) / 8); - m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), BaseOffset); - m = llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty); - } + if (Top) { + int64_t BaseOffset; + if (ForVirtualBase) { + const ASTRecordLayout &BLayout = getContext().getASTRecordLayout(Class); + BaseOffset = -(BLayout.getVBaseClassOffset(RD) / 8); + } else + BaseOffset = -Offset/8; + m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), BaseOffset); + m = llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty); methods.push_back(m); methods.push_back(rtti); } - if (!isPrimary) { - if (RD) - GenerateMethods(methods, RD, Ptr8Ty); - return; - } - // And add the virtuals for the class to the primary vtable. - GenerateMethods(methods, Class, Ptr8Ty); + GenerateMethods(methods, RD, Ptr8Ty); + + // and then the non-virtual bases. + for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(), + e = RD->bases_end(); i != e; ++i) { + if (i->isVirtual()) + continue; + const CXXRecordDecl *Base = + cast(i->getType()->getAs()->getDecl()); + if (Base != PrimaryBase || PrimaryBaseWasVirtual) { + uint64_t o = Offset + Layout.getBaseClassOffset(Base); + GenerateVtableForBase(Base, true, o, Class, rtti, methods, false, + IndirectPrimary); + } + } } llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) { @@ -787,36 +810,20 @@ llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) { llvm::GlobalVariable::LinkageTypes linktype; linktype = llvm::GlobalValue::WeakAnyLinkage; std::vector methods; - llvm::Type *Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0); + llvm::Type *Ptr8Ty=llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext),0); int64_t Offset = 0; llvm::Constant *rtti = GenerateRtti(RD); Offset += LLVMPointerWidth; Offset += LLVMPointerWidth; - const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); - const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase(); - const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual(); llvm::SmallSet IndirectPrimary; - // The primary base comes first. - GenerateVtableForBase(PrimaryBase, RD, rtti, methods, true, - PrimaryBaseWasVirtual, IndirectPrimary); - - // Then come the non-virtual bases. - for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(), - e = RD->bases_end(); i != e; ++i) { - if (i->isVirtual()) - continue; - const CXXRecordDecl *Base = - cast(i->getType()->getAs()->getDecl()); - if (Base != PrimaryBase || PrimaryBaseWasVirtual) - GenerateVtableForBase(Base, RD, rtti, methods, false, false, - IndirectPrimary); - } + // First comes the vtables for all the non-virtual bases... + GenerateVtableForBase(RD, true, 0, RD, rtti, methods, false, IndirectPrimary); - // Then come the vtables for all the virtual bases. - GenerateVtableForVBases(RD, rtti, methods, IndirectPrimary); + // then the vtables for all the virtual bases. + GenerateVtableForVBases(RD, RD, rtti, methods, IndirectPrimary); llvm::Constant *C; llvm::ArrayType *type = llvm::ArrayType::get(Ptr8Ty, methods.size()); @@ -825,7 +832,7 @@ llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) { linktype, C, Name); vtable = Builder.CreateBitCast(vtable, Ptr8Ty); vtable = Builder.CreateGEP(vtable, - llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), + llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), Offset/8)); return vtable; } diff --git a/lib/CodeGen/CodeGenFunction.h b/lib/CodeGen/CodeGenFunction.h index b4469c503b..3110ac00af 100644 --- a/lib/CodeGen/CodeGenFunction.h +++ b/lib/CodeGen/CodeGenFunction.h @@ -362,19 +362,26 @@ public: void FinishFunction(SourceLocation EndLoc=SourceLocation()); llvm::Constant *GenerateRtti(const CXXRecordDecl *RD); + void GenerateVBaseOffsets(std::vector &methods, + const CXXRecordDecl *RD, + llvm::SmallSet &SeenVBase, + uint64_t Offset, + const ASTRecordLayout &Layout, llvm::Type *Ptr8Ty); void GenerateVcalls(std::vector &methods, const CXXRecordDecl *RD, llvm::Type *Ptr8Ty); void GenerateMethods(std::vector &methods, const CXXRecordDecl *RD, llvm::Type *Ptr8Ty); -void GenerateVtableForVBases(const CXXRecordDecl *RD, - llvm::Constant *rtti, - std::vector &methods, + void GenerateVtableForVBases(const CXXRecordDecl *RD, + const CXXRecordDecl *Class, + llvm::Constant *rtti, + std::vector &methods, llvm::SmallSet &IndirectPrimary); void GenerateVtableForBase(const CXXRecordDecl *RD, + bool ForPrimary, + int64_t Offset, const CXXRecordDecl *Class, llvm::Constant *rtti, std::vector &methods, - bool isPrimary, bool ForVirtualBase, llvm::SmallSet &IndirectPrimary); llvm::Value *GenerateVtable(const CXXRecordDecl *RD); diff --git a/test/CodeGenCXX/virt.cpp b/test/CodeGenCXX/virt.cpp index 6e537184e0..fb2e1383f0 100644 --- a/test/CodeGenCXX/virt.cpp +++ b/test/CodeGenCXX/virt.cpp @@ -204,12 +204,12 @@ struct test5_D : virtual test5_B1, virtual test5_B21, virtual test5_B31 { }; // CHECK-LP32:__ZTV7test5_D: -// CHECK-LP32 .long 16 -// CHECK-LP32 .long 12 -// CHECK-LP32 .long 8 +// CHECK-LP32: .long 16 +// CHECK-LP32: .long 12 +// CHECK-LP32: .long 8 // CHECK-LP32 .long 8 // CHECK-LP32 .long 8 -// CHECK-LP32 .long 4 +// CHECK-LP32: .long 4 // CHECK-LP32 .long 4 // CHECK-LP32 .long 4 // CHECK-LP32: .space 4 @@ -224,44 +224,44 @@ struct test5_D : virtual test5_B1, virtual test5_B21, virtual test5_B31 { // CHECK-LP32: .long __ZN8test5_B26funcB2Ev // CHECK-LP32: .long __ZN8test5_B16funcB1Ev // CHECK-LP32: .long __ZN7test5_D5funcDEv -// CHECK-LP32 .space 4 -// CHECK-LP32 .space 4 -// CHECK-LP32 .space 4 -// CHECK-LP32 .space 4 // CHECK-LP32: .space 4 -// CHECK-LP32: .long 4294967292 +// CHECK-LP32: .space 4 +// CHECK-LP32: .space 4 +// CHECK-LP32: .space 4 +// CHECK-LP32: .space 4 +// CHECK-LP32 .long 4294967292 // CHECK-LP32: .long __ZTI7test5_D // CHECK-LP32: .long __ZN9test5_B237funcB23Ev -// CHECK-LP32 .long __ZN9test5_B227funcB22Ev -// CHECK-LP32 .long __ZN9test5_B217funcB21Ev -// CHECK-LP32 .space 4 -// CHECK-LP32 .long 8 -// CHECK-LP32 .space 4 -// CHECK-LP32 .space 4 -// CHECK-LP32 .long 4 +// CHECK-LP32: .long __ZN9test5_B227funcB22Ev +// CHECK-LP32: .long __ZN9test5_B217funcB21Ev // CHECK-LP32 .space 4 +// CHECK-LP32: .long 8 // CHECK-LP32: .space 4 -// CHECK-LP32: .long 4294967288 -// CHECK-LP32 .long __ZTI7test5_D -// CHECK-LP32 .long __ZN9test5_B337funcB33Ev -// CHECK-LP32 .long __ZN9test5_B327funcB32Ev +// CHECK-LP32: .space 4 +// CHECK-LP32: .long 4 +// CHECK-LP32: .space 4 +// CHECK-LP32: .space 4 +// CHECK-LP32 .long 4294967288 +// CHECK-LP32: .long __ZTI7test5_D +// CHECK-LP32: .long __ZN9test5_B337funcB33Ev +// CHECK-LP32: .long __ZN9test5_B327funcB32Ev // CHECK-LP32: .long __ZN9test5_B317funcB31Ev // CHECK-LP32: .space 4 -// CHECK-LP32 .long -12 +// CHECK-LP32: .long 4294967284 // CHECK-LP32: .long __ZTI7test5_D // CHECK-LP32: .long __ZN4B2328funcB232Ev // CHECK-LP32: .space 4 -// CHECK-LP32 .long -16 +// CHECK-LP32:.long 4294967280 // CHECK-LP32: .long __ZTI7test5_D // CHECK-LP32: .long __ZN4B2318funcB231Ev // CHECK-LP64:__ZTV7test5_D: -// CHECK-LP64 .quad 32 -// CHECK-LP64 .quad 24 -// CHECK-LP64 .quad 16 +// CHECK-LP64: .quad 32 +// CHECK-LP64: .quad 24 +// CHECK-LP64: .quad 16 // CHECK-LP64 .quad 16 // CHECK-LP64 .quad 16 -// CHECK-LP64 .quad 8 +// CHECK-LP64: .quad 8 // CHECK-LP64 .quad 8 // CHECK-LP64 .quad 8 // CHECK-LP64: .space 8 @@ -276,37 +276,116 @@ struct test5_D : virtual test5_B1, virtual test5_B21, virtual test5_B31 { // CHECK-LP64: .quad __ZN8test5_B26funcB2Ev // CHECK-LP64: .quad __ZN8test5_B16funcB1Ev // CHECK-LP64: .quad __ZN7test5_D5funcDEv -// CHECK-LP64 .space 8 -// CHECK-LP64 .space 8 -// CHECK-LP64 .space 8 -// CHECK-LP64 .space 8 // CHECK-LP64: .space 8 -// CHECK-LP64:.quad 18446744073709551608 +// CHECK-LP64: .space 8 +// CHECK-LP64: .space 8 +// CHECK-LP64: .space 8 +// CHECK-LP64: .space 8 +// CHECK-LP64 .quad 18446744073709551608 // CHECK-LP64: .quad __ZTI7test5_D // CHECK-LP64: .quad __ZN9test5_B237funcB23Ev -// CHECK-LP64 .quad __ZN9test5_B227funcB22Ev -// CHECK-LP64 .quad __ZN9test5_B217funcB21Ev -// CHECK-LP64 .space 8 -// CHECK-LP64 .quad 16 -// CHECK-LP64 .space 8 -// CHECK-LP64 .space 8 -// CHECK-LP64 .quad 8 +// CHECK-LP64: .quad __ZN9test5_B227funcB22Ev +// CHECK-LP64: .quad __ZN9test5_B217funcB21Ev // CHECK-LP64 .space 8 +// CHECK-LP64: .quad 16 // CHECK-LP64: .space 8 -// CHECK-LP64: .quad 18446744073709551600 -// CHECK-LP64 .quad __ZTI7test5_D -// CHECK-LP64 .quad __ZN9test5_B337funcB33Ev -// CHECK-LP64 .quad __ZN9test5_B327funcB32Ev +// CHECK-LP64: .space 8 +// CHECK-LP64: .quad 8 +// CHECK-LP64: .space 8 +// CHECK-LP64: .space 8 +// CHECK-LP64 .quad 18446744073709551600 +// CHECK-LP64: .quad __ZTI7test5_D +// CHECK-LP64: .quad __ZN9test5_B337funcB33Ev +// CHECK-LP64: .quad __ZN9test5_B327funcB32Ev // CHECK-LP64: .quad __ZN9test5_B317funcB31Ev // CHECK-LP64: .space 8 -// CHECK-LP64 .quad 18446744073709551592 +// CHECK-LP64: .quad 18446744073709551592 // CHECK-LP64: .quad __ZTI7test5_D // CHECK-LP64: .quad __ZN4B2328funcB232Ev // CHECK-LP64: .space 8 -// CHECK-LP64 .quad 18446744073709551584 +// CHECK-LP64:.quad 18446744073709551584 // CHECK-LP64: .quad __ZTI7test5_D // CHECK-LP64: .quad __ZN4B2318funcB231Ev +struct test8_B1 { + virtual void ftest8_B1() { } +}; +struct test8_B2aa { + virtual void ftest8_B2aa() { } + int i; +}; +struct test8_B2ab { + virtual void ftest8_B2ab() { } + int i; +}; +struct test8_B2a : virtual test8_B2aa, virtual test8_B2ab { + virtual void ftest8_B2a() { } +}; +struct test8_B2b { + virtual void ftest8_B2b() { } +}; +struct test8_B2 : test8_B2a, test8_B2b { + virtual void ftest8_B2() { } +}; +struct test8_B3 { + virtual void ftest8_B3() { } +}; +class test8_D : test8_B1, test8_B2, test8_B3 { +} d8; + +// CHECK-LP32:__ZTV7test8_D: +// CHECK-LP32: .long 24 +// CHECK-LP32: .long 16 +// CHECK-LP32: .space 4 +// CHECK-LP32: .long __ZTI7test8_D +// CHECK-LP32: .long __ZN8test8_B19ftest8_B1Ev +// CHECK-LP32: .long 20 +// CHECK-LP32: .long 12 +// CHECK-LP32: .long 4294967292 +// CHECK-LP32: .long __ZTI7test8_D +// CHECK-LP32: .long __ZN9test8_B2a10ftest8_B2aEv +// CHECK-LP32: .long __ZN8test8_B29ftest8_B2Ev +// CHECK-LP32: .long 4294967288 +// CHECK-LP32: .long __ZTI7test8_D +// CHECK-LP32: .long __ZN9test8_B2b10ftest8_B2bEv +// CHECK-LP32: .long 4294967284 +// CHECK-LP32: .long __ZTI7test8_D +// CHECK-LP32: .long __ZN8test8_B39ftest8_B3Ev +// CHECK-LP32: .space 4 +// CHECK-LP32: .long 4294967280 +// CHECK-LP32: .long __ZTI7test8_D +// CHECK-LP32: .long __ZN10test8_B2aa11ftest8_B2aaEv +// CHECK-LP32: .space 4 +// CHECK-LP32: .long 4294967272 +// CHECK-LP32: .long __ZTI7test8_D +// CHECK-LP32: .long __ZN10test8_B2ab11ftest8_B2abEv + +// CHECK-LP64:__ZTV7test8_D: +// CHECK-LP64: .quad 48 +// CHECK-LP64: .quad 32 +// CHECK-LP64: .space 8 +// CHECK-LP64: .quad __ZTI7test8_D +// CHECK-LP64: .quad __ZN8test8_B19ftest8_B1Ev +// CHECK-LP64: .quad 40 +// CHECK-LP64: .quad 24 +// CHECK-LP64: .quad 18446744073709551608 +// CHECK-LP64: .quad __ZTI7test8_D +// CHECK-LP64: .quad __ZN9test8_B2a10ftest8_B2aEv +// CHECK-LP64: .quad __ZN8test8_B29ftest8_B2Ev +// CHECK-LP64: .quad 18446744073709551600 +// CHECK-LP64: .quad __ZTI7test8_D +// CHECK-LP64: .quad __ZN9test8_B2b10ftest8_B2bEv +// CHECK-LP64: .quad 18446744073709551592 +// CHECK-LP64: .quad __ZTI7test8_D +// CHECK-LP64: .quad __ZN8test8_B39ftest8_B3Ev +// CHECK-LP64: .space 8 +// CHECK-LP64: .quad 18446744073709551584 +// CHECK-LP64: .quad __ZTI7test8_D +// CHECK-LP64: .quad __ZN10test8_B2aa11ftest8_B2aaEv +// CHECK-LP64: .space 8 +// CHECK-LP64: .quad 18446744073709551568 +// CHECK-LP64: .quad __ZTI7test8_D +// CHECK-LP64: .quad __ZN10test8_B2ab11ftest8_B2abEv