case Builtin::BI__builtin_truncl:
return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::trunc));
+ case Builtin::BIlround:
+ case Builtin::BIlroundf:
+ case Builtin::BIlroundl:
+ case Builtin::BI__builtin_lround:
+ case Builtin::BI__builtin_lroundf:
+ case Builtin::BI__builtin_lroundl: {
+ llvm::Type *ResultType = ConvertType(E->getType());
+ int Width = ResultType->getPrimitiveSizeInBits();
+ return RValue::get(emitUnaryBuiltin(*this, E,
+ Width == 32 ? Intrinsic::lround_i32
+ : Intrinsic::lround_i64));
+ }
+
+ case Builtin::BIllround:
+ case Builtin::BIllroundf:
+ case Builtin::BIllroundl:
+ case Builtin::BI__builtin_llround:
+ case Builtin::BI__builtin_llroundf:
+ case Builtin::BI__builtin_llroundl:
+ return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::llround));
+
default:
break;
}
volatile float resf;
volatile double resd;
volatile long double resld;
+ volatile long int resli;
+ volatile long long int reslli;
resf = __builtin_fmodf(F,F);
// CHECK: frem float
resld = __builtin_roundl(LD);
// CHECK: call x86_fp80 @llvm.round.f80
+ resli = __builtin_lroundf (F);
+ // CHECK: call i64 @llvm.lround.i64.f32
+
+ resli = __builtin_lround (D);
+ // CHECK: call i64 @llvm.lround.i64.f64
+
+ resli = __builtin_lroundl (LD);
+ // CHECK: call i64 @llvm.lround.i64.f80
}
// __builtin_longjmp isn't supported on all platforms, so only test it on X86.
__builtin_llround(f); __builtin_llroundf(f); __builtin_llroundl(f);
-// NO__ERRNO: declare i64 @llround(double) [[READNONE]]
-// NO__ERRNO: declare i64 @llroundf(float) [[READNONE]]
-// NO__ERRNO: declare i64 @llroundl(x86_fp80) [[READNONE]]
+// NO__ERRNO: declare i64 @llvm.llround.f64(double) [[READNONE_INTRINSIC]]
+// NO__ERRNO: declare i64 @llvm.llround.f32(float) [[READNONE_INTRINSIC]]
+// NO__ERRNO: declare i64 @llvm.llround.f80(x86_fp80) [[READNONE_INTRINSIC]]
// HAS_ERRNO: declare i64 @llround(double) [[NOT_READNONE]]
// HAS_ERRNO: declare i64 @llroundf(float) [[NOT_READNONE]]
// HAS_ERRNO: declare i64 @llroundl(x86_fp80) [[NOT_READNONE]]
__builtin_lround(f); __builtin_lroundf(f); __builtin_lroundl(f);
-// NO__ERRNO: declare i64 @lround(double) [[READNONE]]
-// NO__ERRNO: declare i64 @lroundf(float) [[READNONE]]
-// NO__ERRNO: declare i64 @lroundl(x86_fp80) [[READNONE]]
+// NO__ERRNO: declare i64 @llvm.lround.i64.f64(double) [[READNONE_INTRINSIC]]
+// NO__ERRNO: declare i64 @llvm.lround.i64.f32(float) [[READNONE_INTRINSIC]]
+// NO__ERRNO: declare i64 @llvm.lround.i64.f80(x86_fp80) [[READNONE_INTRINSIC]]
// HAS_ERRNO: declare i64 @lround(double) [[NOT_READNONE]]
// HAS_ERRNO: declare i64 @lroundf(float) [[NOT_READNONE]]
// HAS_ERRNO: declare i64 @lroundl(x86_fp80) [[NOT_READNONE]]
llround(f); llroundf(f); llroundl(f);
-// NO__ERRNO: declare i64 @llround(double) [[READNONE]]
-// NO__ERRNO: declare i64 @llroundf(float) [[READNONE]]
-// NO__ERRNO: declare i64 @llroundl(x86_fp80) [[READNONE]]
+// NO__ERRNO: declare i64 @llvm.llround.f64(double) [[READNONE_INTRINSIC]]
+// NO__ERRNO: declare i64 @llvm.llround.f32(float) [[READNONE_INTRINSIC]]
+// NO__ERRNO: declare i64 @llvm.llround.f80(x86_fp80) [[READNONE_INTRINSIC]]
// HAS_ERRNO: declare i64 @llround(double) [[NOT_READNONE]]
// HAS_ERRNO: declare i64 @llroundf(float) [[NOT_READNONE]]
// HAS_ERRNO: declare i64 @llroundl(x86_fp80) [[NOT_READNONE]]
lround(f); lroundf(f); lroundl(f);
-// NO__ERRNO: declare i64 @lround(double) [[READNONE]]
-// NO__ERRNO: declare i64 @lroundf(float) [[READNONE]]
-// NO__ERRNO: declare i64 @lroundl(x86_fp80) [[READNONE]]
+// NO__ERRNO: declare i64 @llvm.lround.i64.f64(double) [[READNONE_INTRINSIC]]
+// NO__ERRNO: declare i64 @llvm.lround.i64.f32(float) [[READNONE_INTRINSIC]]
+// NO__ERRNO: declare i64 @llvm.lround.i64.f80(x86_fp80) [[READNONE_INTRINSIC]]
// HAS_ERRNO: declare i64 @lround(double) [[NOT_READNONE]]
// HAS_ERRNO: declare i64 @lroundf(float) [[NOT_READNONE]]
// HAS_ERRNO: declare i64 @lroundl(x86_fp80) [[NOT_READNONE]]