void ASTRecordLayoutBuilder::SelectPrimaryForBase(const CXXRecordDecl *RD,
llvm::SmallSet<const CXXRecordDecl*, 32> &IndirectPrimary) {
+ const ASTRecordLayout &Layout = Ctx.getASTRecordLayout(RD);
+ const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
+ const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
+ if (PrimaryBaseWasVirtual) {
+ IndirectPrimary.insert(PrimaryBase);
+ }
for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
e = RD->bases_end(); i != e; ++i) {
- if (!i->isVirtual()) {
- const CXXRecordDecl *Base =
- cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
- // Only bases with virtual bases participate in computing the
- // indirect primary base classes.
- // FIXME: audit indirect virtual bases
- if (Base->getNumVBases() == 0)
- return;
- // FIXME: This information is recomputed a whole lot, cache it instead.
- SelectPrimaryBase(Base);
- IndirectPrimary.insert(PrimaryBase);
- SelectPrimaryForBase(Base, IndirectPrimary);
- }
+ const CXXRecordDecl *Base =
+ cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
+ // Only bases with virtual bases participate in computing the
+ // indirect primary virtual base classes.
+ // FIXME: audit indirect virtual bases
+ if (Base->getNumVBases() == 0)
+ continue;
+ SelectPrimaryForBase(Base, IndirectPrimary);
}
}
/// SelectPrimaryBase - Selects the primary base for the given class and
-/// records that with setPrimaryBase.
+/// record that with setPrimaryBase.
void ASTRecordLayoutBuilder::SelectPrimaryBase(const CXXRecordDecl *RD) {
// The primary base is the first non-virtual indirect or direct base class,
// if one exists.
}
// Otherwise, it is the first nearly empty virtual base that is not an
- // indirect primary base class, if one exists.
+ // indirect primary virtual base class, if one exists.
// If we have no virtual bases at this point, bail out as the searching below
// is expensive.
return;
}
- // First, we compute all the primary bases for all of our direct and indirect
- // non-virtual bases, and record all their primary base classes.
+ // First, we compute all the primary virtual bases for all of our direct and
+ // indirect bases, and record all their primary virtual base classes.
const CXXRecordDecl *FirstPrimary = 0;
llvm::SmallSet<const CXXRecordDecl*, 32> IndirectPrimary;
for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
e = RD->bases_end(); i != e; ++i) {
- if (!i->isVirtual()) {
- const CXXRecordDecl *Base =
- cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
- SelectPrimaryForBase(Base, IndirectPrimary);
- }
+ const CXXRecordDecl *Base =
+ cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
+ SelectPrimaryForBase(Base, IndirectPrimary);
}
// Then we can search for the first nearly empty virtual base itself.
- // FIXME: audit indirect virtual bases
+ // FIXME: audit indirect virtual bases and order (backwards?)
for (CXXRecordDecl::base_class_const_iterator i = RD->vbases_begin(),
e = RD->vbases_end(); i != e; ++i) {
const CXXRecordDecl *Base =
if (RD && !RD->isDynamicClass())
return;
- if (RD && ForVirtualBase)
- for (meth_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
- ++mi) {
+ const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Class);
+
+ // The virtual base offsets come first...
+ for (CXXRecordDecl::reverse_base_class_const_iterator i
+ = Class->bases_rbegin(),
+ e = Class->bases_rend(); i != e; ++i) {
+ if (!i->isVirtual())
+ continue;
+ const CXXRecordDecl *Base =
+ cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
+ int64_t BaseOffset = Layout.getBaseClassOffset(Base) / 8;
+ llvm::Constant *m;
+ m = llvm::ConstantInt::get(llvm::Type::Int64Ty, BaseOffset);
+ m = llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty);
+ methods.push_back(m);
+ }
+
+ // then comes the the vcall offsets for all our functions...
+ if (isPrimary && ForVirtualBase)
+ for (meth_iter mi = Class->method_begin(),
+ me = Class->method_end(); mi != me; ++mi) {
if (mi->isVirtual()) {
// FIXME: vcall: offset for virtual base for this function
m = llvm::Constant::getNullValue(Ptr8Ty);
methods.push_back(m);
}
}
- if (isPrimary && ForVirtualBase)
- for (meth_iter mi = Class->method_begin(),
- me = Class->method_end(); mi != me; ++mi) {
+ bool TopPrimary = true;
+ // Primary tables are composed from the chain of primaries.
+ if (isPrimary) {
+ const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
+ const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
+ if (PrimaryBase) {
+ TopPrimary = false;
+ GenerateVtableForBase(0, PrimaryBase, rtti, methods, true,
+ PrimaryBaseWasVirtual);
+ }
+ }
+ // then come the vcall offsets for all our virtual bases.
+ if (!isPrimary && RD && ForVirtualBase)
+ for (meth_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
+ ++mi) {
if (mi->isVirtual()) {
// FIXME: vcall: offset for virtual base for this function
m = llvm::Constant::getNullValue(Ptr8Ty);
}
}
- if (RD) {
- const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Class);
- int64_t BaseOffset = -(Layout.getBaseClassOffset(RD) / 8);
- m = llvm::ConstantInt::get(llvm::Type::Int64Ty, BaseOffset);
- m = llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty);
+ if (TopPrimary) {
+ if (RD) {
+ int64_t BaseOffset = -(Layout.getBaseClassOffset(RD) / 8);
+ m = llvm::ConstantInt::get(llvm::Type::Int64Ty, BaseOffset);
+ m = llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty);
+ }
+ methods.push_back(m);
+ methods.push_back(rtti);
}
- methods.push_back(m);
- methods.push_back(rtti);
- if (RD)
+ if (!isPrimary && RD)
for (meth_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
++mi) {
if (mi->isVirtual()) {
const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
- // The virtual base offsets come first.
- for (CXXRecordDecl::reverse_base_class_const_iterator i = RD->vbases_rbegin(),
- e = RD->vbases_rend(); i != e; ++i) {
- const CXXRecordDecl *Base =
- cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
- int64_t BaseOffset = Layout.getBaseClassOffset(Base) / 8;
- llvm::Constant *m;
- m = llvm::ConstantInt::get(llvm::Type::Int64Ty, BaseOffset);
- m = llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty);
- methods.push_back(m);
- }
-
// The primary base comes first.
GenerateVtableForBase(PrimaryBase, RD, rtti, methods, true,
PrimaryBaseWasVirtual);
-// RUN: clang-cc -triple x86_64-apple-darwin -std=c++0x -O3 -S %s -o %t-64.s &&
+// RUN: clang-cc -triple x86_64-apple-darwin -std=c++0x -O0 -S %s -o %t-64.s &&
// RUN: FileCheck -check-prefix LP64 --input-file=%t-64.s %s &&
-// RUN: clang-cc -triple i386-apple-darwin -std=c++0x -O3 -S %s -o %t-32.s &&
+// RUN: clang-cc -triple i386-apple-darwin -std=c++0x -O0 -S %s -o %t-32.s &&
// RUN: FileCheck -check-prefix LP32 -input-file=%t-32.s %s &&
+
+// RUN: clang-cc -triple x86_64-apple-darwin -std=c++0x -O3 -S %s -o %t-O3-64.s &&
+// RUN: FileCheck -check-prefix LPOPT64 --input-file=%t-O3-64.s %s &&
+// RUN: clang-cc -triple i386-apple-darwin -std=c++0x -O3 -S %s -o %t-O3-32.s &&
+// RUN: FileCheck -check-prefix LPOPT32 -input-file=%t-O3-32.s %s &&
+
// RUN: true
struct B {
j = sz;
// FIXME: These should result in a frontend constant a la fold, no run time
// initializer
- // CHECK-LP32: movl $4, __ZZ5test2vE2sz
- // CHECK-LP64: movl $8, __ZZ5test2vE2sz(%rip)
+ // CHECK-LPOPT32: movl $4, __ZZ5test2vE2sz
+ // CHECK-LPOPT64: movl $8, __ZZ5test2vE2sz(%rip)
}
static_assert(sizeof(F) == sizeof(void*)*4, "invalid vbase size");
// CHECK-LP64: movl $1, 12(%rax)
// CHECK-LP64: movl $2, 8(%rax)
+
+struct test3_B3 { virtual void funcB3(); };
+struct test3_B2 : virtual test3_B3 { virtual void funcB2(); };
+struct test3_B1 : virtual test3_B2 { virtual void funcB1(); };
+
+struct test3_D : virtual test3_B1 {
+ virtual void funcD() { }
+};
+
+// CHECK-LP32:__ZTV7test3_D:
+// CHECK-LP32: .space 4
+// CHECK-LP32: .space 4
+// CHECK-LP32: .space 4
+// CHECK-LP32: .space 4
+// CHECK-LP32: .space 4
+// CHECK-LP32: .space 4
+// CHECK-LP32: .space 4
+// CHECK-LP32: .long __ZTI7test3_D
+// CHECK-LP32: .long __ZN8test3_B36funcB3Ev
+// CHECK-LP32: .long __ZN8test3_B26funcB2Ev
+// CHECK-LP32: .long __ZN8test3_B16funcB1Ev
+// CHECK-LP32: .long __ZN7test3_D5funcDEv
+
+// CHECK-LP64:__ZTV7test3_D:
+// CHECK-LP64: .space 8
+// CHECK-LP64: .space 8
+// CHECK-LP64: .space 8
+// CHECK-LP64: .space 8
+// CHECK-LP64: .space 8
+// CHECK-LP64: .space 8
+// CHECK-LP64: .space 8
+// CHECK-LP64: .quad __ZTI7test3_D
+// CHECK-LP64: .quad __ZN8test3_B36funcB3Ev
+// CHECK-LP64: .quad __ZN8test3_B26funcB2Ev
+// CHECK-LP64: .quad __ZN8test3_B16funcB1Ev
+// CHECK-LP64: .quad __ZN7test3_D5funcDEv
+
+
+
+
// CHECK-LP64: __ZTV1B:
// CHECK-LP64: .space 8
// CHECK-LP64: .quad __ZTI1B
// CHECK-LP64: .quad __ZN2D14bar3Ev
// CHECK-LP64: .quad __ZN2D14bar4Ev
// CHECK-LP64: .quad __ZN2D14bar5Ev
+
+
+test3_D d;
+