}
void ASTRecordLayoutBuilder::LayoutVirtualBases(const CXXRecordDecl *RD,
+ const CXXRecordDecl *PB,
int64_t Offset,
llvm::SmallSet<const CXXRecordDecl*, 32> &mark,
llvm::SmallSet<const CXXRecordDecl*, 32> &IndirectPrimary) {
VBaseOffsets.push_back(BaseOffset);
}
#endif
+ int64_t BaseOffset = Offset;;
+ // FIXME: Calculate BaseOffset.
if (i->isVirtual()) {
- // Mark it so we don't lay it out twice.
- if (mark.count(Base))
- continue;
- if (IndirectPrimary.count(Base)) {
- int64_t BaseOffset;
- // FIXME: audit
- BaseOffset = Offset;
- // BaseOffset = (1<<63) | (1<<31);
+ if (Base == PB) {
+ // Only lay things out once.
+ if (mark.count(Base))
+ continue;
+ // Mark it so we don't lay it out twice.
+ mark.insert(Base);
+ assert (IndirectPrimary.count(Base) && "IndirectPrimary was wrong");
VBases.push_back(Base);
- VBaseOffsets.push_back(BaseOffset);
- } else
+ VBaseOffsets.push_back(Offset);
+ } else if (IndirectPrimary.count(Base)) {
+ // Someone else will eventually lay this out.
+ ;
+ } else {
+ // Only lay things out once.
+ if (mark.count(Base))
+ continue;
+ // Mark it so we don't lay it out twice.
+ mark.insert(Base);
LayoutVirtualBase(Base);
+ BaseOffset = *(VBaseOffsets.end()-1);
+ }
+ }
+ if (Base->getNumVBases()) {
+ const ASTRecordLayout &L = Ctx.getASTRecordLayout(Base);
+ const CXXRecordDecl *PB = L.getPrimaryBase();
+ LayoutVirtualBases(Base, PB, BaseOffset, mark, IndirectPrimary);
}
- if (Base->getNumVBases())
- LayoutVirtualBases(Base, Offset, mark, IndirectPrimary);
}
}
BaseOffsets.push_back(Size);
}
+#if 0
// And now add offsets for all our primary virtual bases as well, so
// they all have offsets.
const ASTRecordLayout *L = &BaseInfo;
if (PB)
L = &Ctx.getASTRecordLayout(PB);
}
+#endif
// Reserve space for this base.
Size += BaseSize;
if (RD) {
llvm::SmallSet<const CXXRecordDecl*, 32> mark;
- LayoutVirtualBases(RD, 0, mark, IndirectPrimary);
+ LayoutVirtualBases(RD, PrimaryBase, 0, mark, IndirectPrimary);
}
// Finally, round the size of the total struct up to the alignment of the
// CHECK-LP32:__ZTV7test5_D:
// CHECK-LP32-NEXT: .long 16
// CHECK-LP32-NEXT: .long 12
-// CHECK-LP32: .long 8
-// CHECK-LP32 .long 8
-// CHECK-LP32 .long 8
-// CHECK-LP32: .long 4
-// CHECK-LP32 .long 4
-// CHECK-LP32 .long 4
-// CHECK-LP32: .space 4
+// CHECK-LP32-NEXT: .long 8
+// CHECK-LP32-NEXT: .long 8
+// CHECK-LP32-NEXT: .long 8
+// CHECK-LP32-NEXT: .long 4
+// CHECK-LP32-NEXT: .long 4
+// CHECK-LP32-NEXT: .long 4
+// CHECK-LP32-NEXT: .space 4
// CHECK-LP32-NEXT: .space 4
// CHECK-LP32-NEXT: .space 4
// CHECK-LP32-NEXT: .space 4
// CHECK-LP32-NEXT: .long __ZN8test5_B26funcB2Ev
// CHECK-LP32-NEXT: .long __ZN8test5_B16funcB1Ev
// CHECK-LP32-NEXT: .long __ZN7test5_D5funcDEv
-// CHECK-LP32: .space 4
// CHECK-LP32-NEXT: .space 4
// CHECK-LP32-NEXT: .space 4
// CHECK-LP32-NEXT: .space 4
// CHECK-LP32-NEXT: .space 4
-// CHECK-LP32 .long 4294967292
-// CHECK-LP32: .long __ZTI7test5_D
+// CHECK-LP32-NEXT: .space 4
+// CHECK-LP32: .long 4294967292
+// CHECK-LP32-NEXT: .long __ZTI7test5_D
// CHECK-LP32-NEXT: .long __ZN9test5_B237funcB23Ev
// CHECK-LP32-NEXT: .long __ZN9test5_B227funcB22Ev
// CHECK-LP32-NEXT: .long __ZN9test5_B217funcB21Ev
// CHECK-LP32: .long 4
// CHECK-LP32-NEXT: .space 4
// CHECK-LP32-NEXT: .space 4
-// CHECK-LP32 .long 4294967288
-// CHECK-LP32: .long __ZTI7test5_D
+// CHECK-LP32: .long 4294967288
+// CHECK-LP32-NEXT: .long __ZTI7test5_D
// CHECK-LP32-NEXT: .long __ZN9test5_B337funcB33Ev
// CHECK-LP32-NEXT: .long __ZN9test5_B327funcB32Ev
// CHECK-LP32-NEXT: .long __ZN9test5_B317funcB31Ev
// CHECK-LP64:__ZTV7test5_D:
// CHECK-LP64-NEXT: .quad 32
// CHECK-LP64-NEXT: .quad 24
-// CHECK-LP64: .quad 16
-// CHECK-LP64 .quad 16
-// CHECK-LP64 .quad 16
-// CHECK-LP64: .quad 8
-// CHECK-LP64 .quad 8
-// CHECK-LP64 .quad 8
-// CHECK-LP64: .space 8
+// CHECK-LP64-NEXT: .quad 16
+// CHECK-LP64-NEXT: .quad 16
+// CHECK-LP64-NEXT: .quad 16
+// CHECK-LP64-NEXT: .quad 8
+// CHECK-LP64-NEXT: .quad 8
+// CHECK-LP64-NEXT: .quad 8
+// CHECK-LP64-NEXT: .space 8
// CHECK-LP64-NEXT: .space 8
// CHECK-LP64-NEXT: .space 8
// CHECK-LP64-NEXT: .space 8
// CHECK-LP64-NEXT: .quad __ZN8test5_B26funcB2Ev
// CHECK-LP64-NEXT: .quad __ZN8test5_B16funcB1Ev
// CHECK-LP64-NEXT: .quad __ZN7test5_D5funcDEv
-// CHECK-LP64: .space 8
// CHECK-LP64-NEXT: .space 8
// CHECK-LP64-NEXT: .space 8
// CHECK-LP64-NEXT: .space 8
// CHECK-LP64-NEXT: .space 8
-// CHECK-LP64 .quad 18446744073709551608
-// CHECK-LP64: .quad __ZTI7test5_D
+// CHECK-LP64-NEXT: .space 8
+// CHECK-LP64: .quad 18446744073709551608
+// CHECK-LP64-NEXT: .quad __ZTI7test5_D
// CHECK-LP64-NEXT: .quad __ZN9test5_B237funcB23Ev
// CHECK-LP64-NEXT: .quad __ZN9test5_B227funcB22Ev
// CHECK-LP64-NEXT: .quad __ZN9test5_B217funcB21Ev
// CHECK-LP64: .quad 8
// CHECK-LP64-NEXT: .space 8
// CHECK-LP64-NEXT: .space 8
-// CHECK-LP64 .quad 18446744073709551600
-// CHECK-LP64: .quad __ZTI7test5_D
+// CHECK-LP64: .quad 18446744073709551600
+// CHECK-LP64-NEXT: .quad __ZTI7test5_D
// CHECK-LP64-NEXT: .quad __ZN9test5_B337funcB33Ev
// CHECK-LP64-NEXT: .quad __ZN9test5_B327funcB32Ev
// CHECK-LP64-NEXT: .quad __ZN9test5_B317funcB31Ev