r360889 added new llround builtin functions. This patch adds their
signatures for the WebAssembly backend.
It also adds wasm32 support to utils/update_llc_test_checks.py, since
that's the script other targets are using for their testcases for this
feature.
Differential Revision: https://reviews.llvm.org/D62207
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@361327
91177308-0d34-0410-b5e6-
96231b3b80d8
func_iPTR_i64_i64_i64_i64_i64_i64,
i32_func_i64_i64,
i32_func_i64_i64_i64_i64,
+ iPTR_func_f32,
+ iPTR_func_f64,
+ iPTR_func_i64_i64,
unsupported
};
Table[RTLIB::ROUND_F32] = f32_func_f32;
Table[RTLIB::ROUND_F64] = f64_func_f64;
Table[RTLIB::ROUND_F128] = func_iPTR_i64_i64;
+ Table[RTLIB::LROUND_F32] = iPTR_func_f32;
+ Table[RTLIB::LROUND_F64] = iPTR_func_f64;
+ Table[RTLIB::LROUND_F128] = iPTR_func_i64_i64;
+ Table[RTLIB::LLROUND_F32] = i64_func_f32;
+ Table[RTLIB::LLROUND_F64] = i64_func_f64;
+ Table[RTLIB::LLROUND_F128] = i64_func_i64_i64;
Table[RTLIB::FLOOR_F32] = f32_func_f32;
Table[RTLIB::FLOOR_F64] = f64_func_f64;
Table[RTLIB::FLOOR_F128] = func_iPTR_i64_i64;
Params.push_back(wasm::ValType::I64);
Params.push_back(wasm::ValType::I64);
break;
+ case iPTR_func_f32:
+ Rets.push_back(PtrTy);
+ Params.push_back(wasm::ValType::F32);
+ break;
+ case iPTR_func_f64:
+ Rets.push_back(PtrTy);
+ Params.push_back(wasm::ValType::F64);
+ break;
+ case iPTR_func_i64_i64:
+ Rets.push_back(PtrTy);
+ Params.push_back(wasm::ValType::I64);
+ Params.push_back(wasm::ValType::I64);
+ break;
case unsupported:
llvm_unreachable("unsupported runtime library signature");
}
--- /dev/null
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc < %s -mtriple=wasm32 | FileCheck %s
+
+define i64 @testmsxs_builtin(float %x) {
+; CHECK-LABEL: testmsxs_builtin:
+; CHECK: .functype testmsxs_builtin (f32) -> (i64)
+; CHECK-NEXT: # %bb.0: # %entry
+; CHECK-NEXT: local.get 0
+; CHECK-NEXT: i64.call llroundf
+; CHECK-NEXT: # fallthrough-return-value
+; CHECK-NEXT: end_function
+entry:
+ %0 = tail call i64 @llvm.llround.f32(float %x)
+ ret i64 %0
+}
+
+define i64 @testmsxd_builtin(double %x) {
+; CHECK-LABEL: testmsxd_builtin:
+; CHECK: .functype testmsxd_builtin (f64) -> (i64)
+; CHECK-NEXT: # %bb.0: # %entry
+; CHECK-NEXT: local.get 0
+; CHECK-NEXT: i64.call llround
+; CHECK-NEXT: # fallthrough-return-value
+; CHECK-NEXT: end_function
+entry:
+ %0 = tail call i64 @llvm.llround.f64(double %x)
+ ret i64 %0
+}
+
+declare i64 @llvm.llround.f32(float) nounwind readnone
+declare i64 @llvm.llround.f64(double) nounwind readnone
r'.Lfunc_end[0-9]+:\n',
flags=(re.M | re.S))
+ASM_FUNCTION_WASM32_RE = re.compile(
+ r'^_?(?P<func>[^:]+):[ \t]*#+[ \t]*@(?P=func)\n'
+ r'(?P<body>.*?)\n'
+ r'.Lfunc_end[0-9]+:\n',
+ flags=(re.M | re.S))
+
SCRUB_LOOP_COMMENT_RE = re.compile(
r'# =>This Inner Loop Header:.*|# in Loop:.*', flags=re.M)
asm = common.SCRUB_TRAILING_WHITESPACE_RE.sub(r'', asm)
return asm
+def scrub_asm_wasm32(asm, args):
+ # Scrub runs of whitespace out of the assembly, but leave the leading
+ # whitespace in place.
+ asm = common.SCRUB_WHITESPACE_RE.sub(r' ', asm)
+ # Expand the tabs used for indentation.
+ asm = string.expandtabs(asm, 2)
+ # Strip trailing whitespace.
+ asm = common.SCRUB_TRAILING_WHITESPACE_RE.sub(r'', asm)
+ return asm
+
def get_triple_from_march(march):
triples = {
'amdgcn': 'amdgcn',
'sparc': (scrub_asm_sparc, ASM_FUNCTION_SPARC_RE),
'sparcv9': (scrub_asm_sparc, ASM_FUNCTION_SPARC_RE),
's390x': (scrub_asm_systemz, ASM_FUNCTION_SYSTEMZ_RE),
+ 'wasm32': (scrub_asm_wasm32, ASM_FUNCTION_WASM32_RE),
}
handlers = None
for prefix, s in target_handlers.items():