const TagType *TT = Ty->getAs<TagType>();
if (TT == 0) return true;
+
// If it's a tagged type, but is a forward decl, we can't convert it.
if (!TT->getDecl()->isDefinition())
return false;
return;
}
+ // If we completed a RecordDecl that we previously used and converted to an
+ // anonymous type, then go ahead and complete it now.
const RecordDecl *RD = cast<RecordDecl>(TD);
- if (!RD->isDependentType())
+ if (RD->isDependentType()) return;
+
+ // Only complete it if we converted it already. If we haven't converted it
+ // yet, we'll just do it lazily.
+ // if (RecordDeclTypes.count(Context.getTagDeclType(RD).getTypePtr()))
ConvertRecordDeclType(RD);
}
// First, check whether we can build the full function type. If the
// function type depends on an incomplete type (e.g. a struct or enum), we
// cannot lower the function type.
- if (!isFuncTypeConvertible(cast<FunctionType>(Ty))) {
+ if (RecursionState == RS_StructPointer ||
+ !isFuncTypeConvertible(cast<FunctionType>(Ty))) {
// This function's type depends on an incomplete tag type.
// Return a placeholder type.
ResultType = llvm::StructType::get(getLLVMContext());
break;
}
+ // While we're converting the argument types for a function, we don't want
+ // to recursively convert any pointed-to structs. Converting directly-used
+ // structs is ok though.
+ RecursionStateTy SavedRecursionState = RecursionState;
+ RecursionState = RS_Struct;
+
// The function type can be built; call the appropriate routines to
// build it.
const CGFunctionInfo *FI;
CanQual<FunctionNoProtoType>::CreateUnsafe(QualType(FNPT, 0)));
isVariadic = true;
}
-
+
ResultType = GetFunctionType(*FI, isVariadic);
+
+ // Restore our recursion state.
+ RecursionState = SavedRecursionState;
+
+ if (RecursionState == RS_Normal)
+ while (!DeferredRecords.empty())
+ ConvertRecordDeclType(DeferredRecords.pop_back_val());
break;
}
CGRecordLayout *Layout = ComputeRecordLayout(RD, Ty);
CGRecordLayouts[Key] = Layout;
+ // If this struct blocked a FunctionType conversion, then recompute whatever
+ // was derived from that.
+ // FIXME: This is hugely overconservative.
+ TypeCache.clear();
+
// Restore our recursion state. If we're done converting the outer-most
// record, then convert any deferred structs as well.
RecursionState = SavedRecursionState;
+
if (RecursionState == RS_Normal)
while (!DeferredRecords.empty())
ConvertRecordDeclType(DeferredRecords.pop_back_val());
rangepair rp;
return (rp = f18_ext()).range1;
}
+
+
+
+// Complex forward reference of struct.
+struct f19S;
+extern struct f19T {
+ struct f19S (*p)(void);
+} t;
+struct f19S { int i; };
+void f19(void) {
+ t.p();
+}
+
// CHECK-NEXT: [[UNION:%.*]] = getelementptr inbounds {{.*}} [[THIS]], i32 0, i32 0
// CHECK-NEXT: [[STRUCT:%.*]] = bitcast {{.*}}* [[UNION]] to
// CHECK-NEXT: [[CALLBACK:%.*]] = getelementptr inbounds {{.*}} [[STRUCT]], i32 0, i32 0
- // CHECK-NEXT: store void (i8*)* null, void (i8*)** [[CALLBACK]]
+ // CHECK-NEXT: store {{.*}}* null, {{.*}}** [[CALLBACK]]
// CHECK-NEXT: [[UNION:%.*]] = getelementptr inbounds {{.*}} [[THIS]], i32 0, i32 0
// CHECK-NEXT: [[STRUCT:%.*]] = bitcast {{.*}}* [[UNION]] to
// CHECK-NEXT: [[CVALUE:%.*]] = getelementptr inbounds {{.*}} [[STRUCT]], i32 0, i32 1
// CHECK-LP64-NEXT: [[T1:%.*]] = load i8** [[X]]
// CHECK-LP64-NEXT: [[T2:%.*]] = call i8* @objc_retain(i8* [[T1]])
// CHECK-LP64-NEXT: store i8* [[T2]], i8** [[T0]]
-// CHECK-LP64-NEXT: [[T1:%.*]] = bitcast [[BLOCK_T]]* [[BLOCK]] to void ()*
-// CHECK-LP64-NEXT: call void @use_block(void ()* [[T1]])
+// CHECK-LP64-NEXT: [[T1:%.*]] = bitcast [[BLOCK_T]]* [[BLOCK]]
+// CHECK-LP64-NEXT: call void @use_block({{.*}}* [[T1]])
// CHECK-LP64-NEXT: [[T1:%.*]] = load i8** [[T0]]
// CHECK-LP64-NEXT: call void @objc_release(i8* [[T1]])
// CHECK-LP64: [[T0:%.*]] = getelementptr inbounds [[BLOCK_T]]* [[BLOCK]], i32 0, i32 5
// CHECK-LP64-NEXT: [[T1:%.*]] = call i8* @objc_loadWeak(i8** [[X]])
// CHECK-LP64-NEXT: call i8* @objc_initWeak(i8** [[T0]], i8* [[T1]])
-// CHECK-LP64-NEXT: [[T1:%.*]] = bitcast [[BLOCK_T]]* [[BLOCK]] to void ()*
-// CHECK-LP64-NEXT: call void @use_block(void ()* [[T1]])
+// CHECK-LP64-NEXT: [[T1:%.*]] = bitcast [[BLOCK_T]]* [[BLOCK]] to
+// CHECK-LP64-NEXT: call void @use_block({{.*}} [[T1]])
// CHECK-LP64-NEXT: call void @objc_destroyWeak(i8** [[T0]])
// CHECK-LP64-NEXT: call void @objc_destroyWeak(i8** [[X]])
// CHECK: [[CAPTURE:%.*]] = getelementptr inbounds [[BLOCK_T]]* [[BLOCK]], i32 0, i32 5
// CHECK-NEXT: [[T0:%.*]] = load i8** [[VAR]]
// CHECK-NEXT: store i8* [[T0]], i8** [[CAPTURE]]
- // CHECK-NEXT: [[T0:%.*]] = bitcast [[BLOCK_T]]* [[BLOCK]] to void ()*
- // CHECK-NEXT: call void @test39_helper(void ()* [[T0]])
+ // CHECK-NEXT: [[T0:%.*]] = bitcast [[BLOCK_T]]* [[BLOCK]] to
+ // CHECK-NEXT: call void @test39_helper({{.*}} [[T0]])
// CHECK-NEXT: ret void
}
// CHECK-NEXT: [[T3:%.*]] = call i8* @objc_retain(i8* [[T2]])
// CHECK-NEXT: [[T4:%.*]] = bitcast i8* [[T3]] to [[TEST42]]*
// CHECK-NEXT: store [[TEST42]]* [[T4]], [[TEST42]]** [[T0]]
-// CHECK-NEXT: bitcast [[BLOCK_T]]* [[BLOCK]] to void ()*
+// CHECK-NEXT: bitcast [[BLOCK_T]]* [[BLOCK]] to
// CHECK-NEXT: call void @test42_helper(
// CHECK-NEXT: [[T1:%.*]] = load [[TEST42]]** [[T0]]
// CHECK-NEXT: [[T2:%.*]] = bitcast [[TEST42]]* [[T1]] to i8*
// CHECK: define linkonce_odr {{%.*}}* @_ZN15ObjCBlockMemberaSERKS_(
// CHECK: [[T0:%.*]] = call i8* @objc_retainBlock(
// CHECK-NEXT: [[T1:%.*]] = bitcast i8* [[T0]] to i32 (i32)*
-// CHECK-NEXT: [[T2:%.*]] = load i32 (i32)** [[SLOT:%.*]],
-// CHECK-NEXT: store i32 (i32)* [[T1]], i32 (i32)** [[SLOT]]
-// CHECK-NEXT: [[T3:%.*]] = bitcast i32 (i32)* [[T2]] to i8*
+// CHECK-NEXT: [[T2:%.*]] = load {{.*}} [[SLOT:%.*]],
+// CHECK: store
+// CHECK-NEXT: [[T3:%.*]] = bitcast
// CHECK-NEXT: call void @objc_release(i8* [[T3]])
// CHECK-NEXT: ret
// Implicitly-generated default constructor for ObjCBlockMember
// CHECK: define linkonce_odr void @_ZN15ObjCBlockMemberC2Ev
-// CHECK: store i32 (i32)* null,
+// CHECK: store {{.*}} null,
// CHECK-NEXT: ret void
// Implicitly-generated copy constructor for ObjCArrayMember