}
#endif
+/* 8.6 Memory prefetch intrinsics */
+/* 8.6.1 Data prefetch */
+#define __pld(addr) __pldx(0, 0, 0, addr)
+
+#if __ARM_32BIT_STATE
+#define __pldx(access_kind, cache_level, retention_policy, addr) \
+ __builtin_arm_prefetch(addr, access_kind, 1)
+#else
+#define __pldx(access_kind, cache_level, retention_policy, addr) \
+ __builtin_arm_prefetch(addr, access_kind, cache_level, retention_policy, 1)
+#endif
+
+/* 8.6.2 Instruction prefetch */
+#define __pli(addr) __plix(0, 0, addr)
+
+#if __ARM_32BIT_STATE
+#define __plix(cache_level, retention_policy, addr) \
+ __builtin_arm_prefetch(addr, 0, 0)
+#else
+#define __plix(cache_level, retention_policy, addr) \
+ __builtin_arm_prefetch(addr, 0, cache_level, retention_policy, 0)
+#endif
+
/* 8.7 NOP */
static __inline__ void __attribute__((always_inline, nodebug)) __nop(void) {
__builtin_arm_nop();
__sevl();
}
+/* 8.6 Memory prefetch intrinsics */
+/* 8.6.1 Data prefetch */
+// ARM-LABEL: test_pld
+// ARM: call void @llvm.prefetch(i8* null, i32 0, i32 3, i32 1)
+void test_pld() {
+ __pld(0);
+}
+
+// ARM-LABEL: test_pldx
+// AArch32: call void @llvm.prefetch(i8* null, i32 1, i32 3, i32 1)
+// AArch64: call void @llvm.prefetch(i8* null, i32 1, i32 1, i32 1)
+void test_pldx() {
+ __pldx(1, 2, 0, 0);
+}
+
+/* 8.6.2 Instruction prefetch */
+// ARM-LABEL: test_pli
+// ARM: call void @llvm.prefetch(i8* null, i32 0, i32 3, i32 0)
+void test_pli() {
+ __pli(0);
+}
+
+// ARM-LABEL: test_plix
+// AArch32: call void @llvm.prefetch(i8* null, i32 0, i32 3, i32 0)
+// AArch64: call void @llvm.prefetch(i8* null, i32 0, i32 1, i32 0)
+void test_plix() {
+ __plix(2, 0, 0);
+}
+
/* 8.7 NOP */
// ARM-LABEL: test_nop
// AArch32: call void @llvm.arm.hint(i32 0)
int32_t test_usat_const_diag(int32_t t, const int32_t v) {
return __usat(t, v); // expected-error-re {{argument to {{.*}} must be a constant integer}}
}
+
+/*
+ * Prefetch intrinsics
+ */
+void test_pldx_const_diag(int32_t i) {
+ __pldx(i, 0, 0, 0); // expected-error-re {{argument to {{.*}} must be a constant integer}}
+}