def mno_rdseed : Flag<["-"], "mno-rdseed">, Group<m_x86_Features_Group>;
def mno_adx : Flag<["-"], "mno-adx">, Group<m_x86_Features_Group>;
def mno_sha : Flag<["-"], "mno-sha">, Group<m_x86_Features_Group>;
+def mno_xsave : Flag<["-"], "mno-xsave">, Group<m_x86_Features_Group>;
+def mno_xsaveopt : Flag<["-"], "mno-xsaveopt">, Group<m_x86_Features_Group>;
+def mno_xsavec : Flag<["-"], "mno-xsavec">, Group<m_x86_Features_Group>;
+def mno_xsaves : Flag<["-"], "mno-xsaves">, Group<m_x86_Features_Group>;
def munaligned_access : Flag<["-"], "munaligned-access">, Group<m_arm_Features_Group>,
HelpText<"Allow memory accesses to be unaligned (AArch32/AArch64 only)">;
def madx : Flag<["-"], "madx">, Group<m_x86_Features_Group>;
def msha : Flag<["-"], "msha">, Group<m_x86_Features_Group>;
def mcx16 : Flag<["-"], "mcx16">, Group<m_x86_Features_Group>;
+def mxsave : Flag<["-"], "mxsave">, Group<m_x86_Features_Group>;
+def mxsaveopt : Flag<["-"], "mxsaveopt">, Group<m_x86_Features_Group>;
+def mxsavec : Flag<["-"], "mxsavec">, Group<m_x86_Features_Group>;
+def mxsaves : Flag<["-"], "mxsaves">, Group<m_x86_Features_Group>;
def mips16 : Flag<["-"], "mips16">, Group<m_Group>;
def mno_mips16 : Flag<["-"], "mno-mips16">, Group<m_Group>;
def mmicromips : Flag<["-"], "mmicromips">, Group<m_Group>;
bool HasAVX512VL = false;
bool HasSHA = false;
bool HasCX16 = false;
+ bool HasXSAVE = false;
+ bool HasXSAVEOPT = false;
+ bool HasXSAVEC = false;
+ bool HasXSAVES = false;
/// \brief Enumeration of all of the X86 CPUs supported by Clang.
///
setFeatureEnabledImpl(Features, "avx512dq", true);
setFeatureEnabledImpl(Features, "avx512bw", true);
setFeatureEnabledImpl(Features, "avx512vl", true);
+ setFeatureEnabledImpl(Features, "xsavec", true);
+ setFeatureEnabledImpl(Features, "xsaves", true);
// FALLTHROUGH
case CK_Broadwell:
setFeatureEnabledImpl(Features, "rdseed", true);
// FALLTHROUGH
case CK_SandyBridge:
setFeatureEnabledImpl(Features, "avx", true);
+ setFeatureEnabledImpl(Features, "xsave", true);
+ setFeatureEnabledImpl(Features, "xsaveopt", true);
// FALLTHROUGH
case CK_Westmere:
case CK_Silvermont:
setFeatureEnabledImpl(Features, "aes", true);
setFeatureEnabledImpl(Features, "pclmul", true);
setFeatureEnabledImpl(Features, "cx16", true);
+ setFeatureEnabledImpl(Features, "xsaveopt", true);
+ setFeatureEnabledImpl(Features, "xsave", true);
break;
case CK_K6_2:
case CK_K6_3:
setFeatureEnabledImpl(Features, "pclmul", true);
setFeatureEnabledImpl(Features, "bmi", true);
setFeatureEnabledImpl(Features, "f16c", true);
+ setFeatureEnabledImpl(Features, "xsaveopt", true);
// FALLTHROUGH
case CK_BTVER1:
setFeatureEnabledImpl(Features, "ssse3", true);
setFeatureEnabledImpl(Features, "popcnt", true);
setFeatureEnabledImpl(Features, "prfchw", true);
setFeatureEnabledImpl(Features, "cx16", true);
+ setFeatureEnabledImpl(Features, "xsave", true);
break;
case CK_BDVER4:
setFeatureEnabledImpl(Features, "avx2", true);
// FALLTHROUGH
case CK_BDVER3:
setFeatureEnabledImpl(Features, "fsgsbase", true);
+ setFeatureEnabledImpl(Features, "xsaveopt", true);
// FALLTHROUGH
case CK_BDVER2:
setFeatureEnabledImpl(Features, "bmi", true);
setFeatureEnabledImpl(Features, "pclmul", true);
setFeatureEnabledImpl(Features, "prfchw", true);
setFeatureEnabledImpl(Features, "cx16", true);
+ setFeatureEnabledImpl(Features, "xsave", true);
break;
}
if (!TargetInfo::initFeatureMap(Features, Diags, CPU, FeaturesVec))
Features["avx2"] = true;
case AVX:
Features["avx"] = true;
+ Features["xsave"] = true;
case SSE42:
Features["sse4.2"] = true;
case SSE41:
case SSE42:
Features["sse4.2"] = false;
case AVX:
- Features["fma"] = Features["avx"] = Features["f16c"] = false;
+ Features["fma"] = Features["avx"] = Features["f16c"] = Features["xsave"] =
+ Features["xsaveopt"] = false;
setXOPLevel(Features, FMA4, false);
case AVX2:
Features["avx2"] = false;
setSSELevel(Features, SSE42, Enabled);
else
setSSELevel(Features, SSE41, Enabled);
+ } else if (Name == "xsave") {
+ if (Enabled)
+ setSSELevel(Features, AVX, Enabled);
+ else
+ Features["xsaveopt"] = false;
+ } else if (Name == "xsaveopt" || Name == "xsavec" || Name == "xsaves") {
+ if (Enabled) {
+ Features["xsave"] = true;
+ setSSELevel(Features, AVX, Enabled);
+ }
}
}
HasSHA = true;
} else if (Feature == "+cx16") {
HasCX16 = true;
+ } else if (Feature == "+xsave") {
+ HasXSAVE = true;
+ } else if (Feature == "+xsaveopt") {
+ HasXSAVEOPT = true;
+ } else if (Feature == "+xsavec") {
+ HasXSAVEC = true;
+ } else if (Feature == "+xsaves") {
+ HasXSAVES = true;
}
X86SSEEnum Level = llvm::StringSwitch<X86SSEEnum>(Feature)
if (HasSHA)
Builder.defineMacro("__SHA__");
+ if (HasXSAVE)
+ Builder.defineMacro("__XSAVE__");
+ if (HasXSAVEOPT)
+ Builder.defineMacro("__XSAVEOPT__");
+ if (HasXSAVEC)
+ Builder.defineMacro("__XSAVEC__");
+ if (HasXSAVES)
+ Builder.defineMacro("__XSAVES__");
+
if (HasCX16)
Builder.defineMacro("__GCC_HAVE_SYNC_COMPARE_AND_SWAP_16");
.Case("x86_32", getTriple().getArch() == llvm::Triple::x86)
.Case("x86_64", getTriple().getArch() == llvm::Triple::x86_64)
.Case("xop", XOPLevel >= XOP)
+ .Case("xsave", HasXSAVE)
+ .Case("xsavec", HasXSAVEC)
+ .Case("xsaves", HasXSAVES)
+ .Case("xsaveopt", HasXSAVEOPT)
.Default(false);
}
// CHECK: qax{{.*}} #4
// CHECK: qq{{.*}} #5
// CHECK: #0 = {{.*}}"target-cpu"="x86-64" "target-features"="+mmx,+sse,+sse2"
-// CHECK: #1 = {{.*}}"target-cpu"="ivybridge" "target-features"="+aes,+avx,+cx16,+f16c,+fsgsbase,+mmx,+pclmul,+popcnt,+rdrnd,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3"
-// CHECK: #2 = {{.*}}"target-cpu"="x86-64" "target-features"="+mmx,+sse,-aes,-avx,-avx2,-avx512bw,-avx512cd,-avx512dq,-avx512er,-avx512f,-avx512pf,-avx512vl,-f16c,-fma,-fma4,-pclmul,-sha,-sse2,-sse3,-sse4.1,-sse4.2,-sse4a,-ssse3,-xop"
+// CHECK: #1 = {{.*}}"target-cpu"="ivybridge" "target-features"="+aes,+avx,+cx16,+f16c,+fsgsbase,+mmx,+pclmul,+popcnt,+rdrnd,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+xsave,+xsaveopt"
+// CHECK: #2 = {{.*}}"target-cpu"="x86-64" "target-features"="+mmx,+sse,-aes,-avx,-avx2,-avx512bw,-avx512cd,-avx512dq,-avx512er,-avx512f,-avx512pf,-avx512vl,-f16c,-fma,-fma4,-pclmul,-sha,-sse2,-sse3,-sse4.1,-sse4.2,-sse4a,-ssse3,-xop,-xsave,-xsaveopt"
// CHECK: #3 = {{.*}}"target-cpu"="x86-64" "target-features"="+mmx,+popcnt,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3"
-// CHECK: #4 = {{.*}}"target-cpu"="ivybridge" "target-features"="+avx,+cx16,+f16c,+fsgsbase,+mmx,+pclmul,+popcnt,+rdrnd,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,-aes"
+// CHECK: #4 = {{.*}}"target-cpu"="ivybridge" "target-features"="+avx,+cx16,+f16c,+fsgsbase,+mmx,+pclmul,+popcnt,+rdrnd,+sse,+sse2,+sse3,+sse4.1,+sse4.2,+ssse3,+xsave,+xsaveopt,-aes"
// CHECK: #5 = {{.*}}"target-cpu"="x86-64" "target-features"="+sse,+sse2,-3dnow,-3dnowa,-mmx"
// CHECK_COREI7_AVX_M32: #define __SSE4_2__ 1
// CHECK_COREI7_AVX_M32: #define __SSE__ 1
// CHECK_COREI7_AVX_M32: #define __SSSE3__ 1
+// CHECK_COREI7_AVX_M32: #define __XSAVEOPT__ 1
+// CHECK_COREI7_AVX_M32: #define __XSAVE__ 1
// CHECK_COREI7_AVX_M32: #define __corei7 1
// CHECK_COREI7_AVX_M32: #define __corei7__ 1
// CHECK_COREI7_AVX_M32: #define __i386 1
// CHECK_COREI7_AVX_M64: #define __SSE_MATH__ 1
// CHECK_COREI7_AVX_M64: #define __SSE__ 1
// CHECK_COREI7_AVX_M64: #define __SSSE3__ 1
+// CHECK_COREI7_AVX_M64: #define __XSAVEOPT__ 1
+// CHECK_COREI7_AVX_M64: #define __XSAVE__ 1
// CHECK_COREI7_AVX_M64: #define __amd64 1
// CHECK_COREI7_AVX_M64: #define __amd64__ 1
// CHECK_COREI7_AVX_M64: #define __corei7 1
// CHECK_CORE_AVX_I_M32: #define __SSE4_2__ 1
// CHECK_CORE_AVX_I_M32: #define __SSE__ 1
// CHECK_CORE_AVX_I_M32: #define __SSSE3__ 1
+// CHECK_CORE_AVX_I_M32: #define __XSAVEOPT__ 1
+// CHECK_CORE_AVX_I_M32: #define __XSAVE__ 1
// CHECK_CORE_AVX_I_M32: #define __corei7 1
// CHECK_CORE_AVX_I_M32: #define __corei7__ 1
// CHECK_CORE_AVX_I_M32: #define __i386 1
// CHECK_CORE_AVX_I_M64: #define __SSE_MATH__ 1
// CHECK_CORE_AVX_I_M64: #define __SSE__ 1
// CHECK_CORE_AVX_I_M64: #define __SSSE3__ 1
+// CHECK_CORE_AVX_I_M64: #define __XSAVEOPT__ 1
+// CHECK_CORE_AVX_I_M64: #define __XSAVE__ 1
// CHECK_CORE_AVX_I_M64: #define __amd64 1
// CHECK_CORE_AVX_I_M64: #define __amd64__ 1
// CHECK_CORE_AVX_I_M64: #define __corei7 1
// CHECK_CORE_AVX2_M32: #define __SSE4_2__ 1
// CHECK_CORE_AVX2_M32: #define __SSE__ 1
// CHECK_CORE_AVX2_M32: #define __SSSE3__ 1
+// CHECK_CORE_AVX2_M32: #define __XSAVEOPT__ 1
+// CHECK_CORE_AVX2_M32: #define __XSAVE__ 1
// CHECK_CORE_AVX2_M32: #define __corei7 1
// CHECK_CORE_AVX2_M32: #define __corei7__ 1
// CHECK_CORE_AVX2_M32: #define __i386 1
// CHECK_CORE_AVX2_M64: #define __SSE_MATH__ 1
// CHECK_CORE_AVX2_M64: #define __SSE__ 1
// CHECK_CORE_AVX2_M64: #define __SSSE3__ 1
+// CHECK_CORE_AVX2_M64: #define __XSAVEOPT__ 1
+// CHECK_CORE_AVX2_M64: #define __XSAVE__ 1
// CHECK_CORE_AVX2_M64: #define __amd64 1
// CHECK_CORE_AVX2_M64: #define __amd64__ 1
// CHECK_CORE_AVX2_M64: #define __corei7 1
// CHECK_BROADWELL_M32: #define __SSE4_2__ 1
// CHECK_BROADWELL_M32: #define __SSE__ 1
// CHECK_BROADWELL_M32: #define __SSSE3__ 1
+// CHECK_BROADWELL_M32: #define __XSAVEOPT__ 1
+// CHECK_BROADWELL_M32: #define __XSAVE__ 1
// CHECK_BROADWELL_M32: #define __corei7 1
// CHECK_BROADWELL_M32: #define __corei7__ 1
// CHECK_BROADWELL_M32: #define __i386 1
// CHECK_BROADWELL_M64: #define __SSE_MATH__ 1
// CHECK_BROADWELL_M64: #define __SSE__ 1
// CHECK_BROADWELL_M64: #define __SSSE3__ 1
+// CHECK_BROADWELL_M64: #define __XSAVEOPT__ 1
+// CHECK_BROADWELL_M64: #define __XSAVE__ 1
// CHECK_BROADWELL_M64: #define __amd64 1
// CHECK_BROADWELL_M64: #define __amd64__ 1
// CHECK_BROADWELL_M64: #define __corei7 1
// CHECK_KNL_M32: #define __SSE4_2__ 1
// CHECK_KNL_M32: #define __SSE__ 1
// CHECK_KNL_M32: #define __SSSE3__ 1
+// CHECK_KNL_M32: #define __XSAVEOPT__ 1
+// CHECK_KNL_M32: #define __XSAVE__ 1
// CHECK_KNL_M32: #define __i386 1
// CHECK_KNL_M32: #define __i386__ 1
// CHECK_KNL_M32: #define __knl 1
// CHECK_KNL_M64: #define __SSE_MATH__ 1
// CHECK_KNL_M64: #define __SSE__ 1
// CHECK_KNL_M64: #define __SSSE3__ 1
+// CHECK_KNL_M64: #define __XSAVEOPT__ 1
+// CHECK_KNL_M64: #define __XSAVE__ 1
// CHECK_KNL_M64: #define __amd64 1
// CHECK_KNL_M64: #define __amd64__ 1
// CHECK_KNL_M64: #define __knl 1
// CHECK_SKX_M32: #define __SSE4_2__ 1
// CHECK_SKX_M32: #define __SSE__ 1
// CHECK_SKX_M32: #define __SSSE3__ 1
+// CHECK_SKX_M32: #define __XSAVEC__ 1
+// CHECK_SKX_M32: #define __XSAVEOPT__ 1
+// CHECK_SKX_M32: #define __XSAVES__ 1
+// CHECK_SKX_M32: #define __XSAVE__ 1
// CHECK_SKX_M32: #define __i386 1
// CHECK_SKX_M32: #define __i386__ 1
// CHECK_SKX_M32: #define __skx 1
// CHECK_SKX_M64: #define __SSE_MATH__ 1
// CHECK_SKX_M64: #define __SSE__ 1
// CHECK_SKX_M64: #define __SSSE3__ 1
+// CHECK_SKX_M64: #define __XSAVEC__ 1
+// CHECK_SKX_M64: #define __XSAVEOPT__ 1
+// CHECK_SKX_M64: #define __XSAVES__ 1
+// CHECK_SKX_M64: #define __XSAVE__ 1
// CHECK_SKX_M64: #define __amd64 1
// CHECK_SKX_M64: #define __amd64__ 1
// CHECK_SKX_M64: #define __skx 1
// CHECK_BTVER1_M32: #define __SSE_MATH__ 1
// CHECK_BTVER1_M32: #define __SSE__ 1
// CHECK_BTVER1_M32: #define __SSSE3__ 1
+// CHECK_BTVER1_M32: #define __XSAVE__ 1
// CHECK_BTVER1_M32: #define __btver1 1
// CHECK_BTVER1_M32: #define __btver1__ 1
// CHECK_BTVER1_M32: #define __i386 1
// CHECK_BTVER1_M64: #define __SSE_MATH__ 1
// CHECK_BTVER1_M64: #define __SSE__ 1
// CHECK_BTVER1_M64: #define __SSSE3__ 1
+// CHECK_BTVER1_M64: #define __XSAVE__ 1
// CHECK_BTVER1_M64: #define __amd64 1
// CHECK_BTVER1_M64: #define __amd64__ 1
// CHECK_BTVER1_M64: #define __btver1 1
// CHECK_BTVER2_M32: #define __SSE_MATH__ 1
// CHECK_BTVER2_M32: #define __SSE__ 1
// CHECK_BTVER2_M32: #define __SSSE3__ 1
+// CHECK_BTVER2_M32: #define __XSAVEOPT__ 1
+// CHECK_BTVER2_M32: #define __XSAVE__ 1
// CHECK_BTVER2_M32: #define __btver2 1
// CHECK_BTVER2_M32: #define __btver2__ 1
// CHECK_BTVER2_M32: #define __i386 1
// CHECK_BTVER2_M64: #define __SSE_MATH__ 1
// CHECK_BTVER2_M64: #define __SSE__ 1
// CHECK_BTVER2_M64: #define __SSSE3__ 1
+// CHECK_BTVER2_M64: #define __XSAVEOPT__ 1
+// CHECK_BTVER2_M64: #define __XSAVE__ 1
// CHECK_BTVER2_M64: #define __amd64 1
// CHECK_BTVER2_M64: #define __amd64__ 1
// CHECK_BTVER2_M64: #define __btver2 1
// CHECK_BDVER1_M32: #define __SSE__ 1
// CHECK_BDVER1_M32: #define __SSSE3__ 1
// CHECK_BDVER1_M32: #define __XOP__ 1
+// CHECK_BDVER1_M32: #define __XSAVE__ 1
// CHECK_BDVER1_M32: #define __bdver1 1
// CHECK_BDVER1_M32: #define __bdver1__ 1
// CHECK_BDVER1_M32: #define __i386 1
// CHECK_BDVER1_M64: #define __SSE__ 1
// CHECK_BDVER1_M64: #define __SSSE3__ 1
// CHECK_BDVER1_M64: #define __XOP__ 1
+// CHECK_BDVER1_M64: #define __XSAVE__ 1
// CHECK_BDVER1_M64: #define __amd64 1
// CHECK_BDVER1_M64: #define __amd64__ 1
// CHECK_BDVER1_M64: #define __bdver1 1
// CHECK_BDVER2_M32: #define __SSSE3__ 1
// CHECK_BDVER2_M32: #define __TBM__ 1
// CHECK_BDVER2_M32: #define __XOP__ 1
+// CHECK_BDVER2_M32: #define __XSAVE__ 1
// CHECK_BDVER2_M32: #define __bdver2 1
// CHECK_BDVER2_M32: #define __bdver2__ 1
// CHECK_BDVER2_M32: #define __i386 1
// CHECK_BDVER2_M64: #define __SSSE3__ 1
// CHECK_BDVER2_M64: #define __TBM__ 1
// CHECK_BDVER2_M64: #define __XOP__ 1
+// CHECK_BDVER2_M64: #define __XSAVE__ 1
// CHECK_BDVER2_M64: #define __amd64 1
// CHECK_BDVER2_M64: #define __amd64__ 1
// CHECK_BDVER2_M64: #define __bdver2 1
// CHECK_BDVER3_M32: #define __SSSE3__ 1
// CHECK_BDVER3_M32: #define __TBM__ 1
// CHECK_BDVER3_M32: #define __XOP__ 1
+// CHECK_BDVER3_M32: #define __XSAVEOPT__ 1
+// CHECK_BDVER3_M32: #define __XSAVE__ 1
// CHECK_BDVER3_M32: #define __bdver3 1
// CHECK_BDVER3_M32: #define __bdver3__ 1
// CHECK_BDVER3_M32: #define __i386 1
// CHECK_BDVER3_M64: #define __SSSE3__ 1
// CHECK_BDVER3_M64: #define __TBM__ 1
// CHECK_BDVER3_M64: #define __XOP__ 1
+// CHECK_BDVER3_M64: #define __XSAVEOPT__ 1
+// CHECK_BDVER3_M64: #define __XSAVE__ 1
// CHECK_BDVER3_M64: #define __amd64 1
// CHECK_BDVER3_M64: #define __amd64__ 1
// CHECK_BDVER3_M64: #define __bdver3 1
// CHECK_BDVER4_M32: #define __SSSE3__ 1
// CHECK_BDVER4_M32: #define __TBM__ 1
// CHECK_BDVER4_M32: #define __XOP__ 1
+// CHECK_BDVER4_M32: #define __XSAVE__ 1
// CHECK_BDVER4_M32: #define __bdver4 1
// CHECK_BDVER4_M32: #define __bdver4__ 1
// CHECK_BDVER4_M32: #define __i386 1
// CHECK_BDVER4_M64: #define __SSSE3__ 1
// CHECK_BDVER4_M64: #define __TBM__ 1
// CHECK_BDVER4_M64: #define __XOP__ 1
+// CHECK_BDVER4_M64: #define __XSAVE__ 1
// CHECK_BDVER4_M64: #define __amd64 1
// CHECK_BDVER4_M64: #define __amd64__ 1
// CHECK_BDVER4_M64: #define __bdver4 1
// RDSEED: #define __RDSEED__ 1
+// RUN: %clang -target i386-unknown-unknown -march=atom -mxsave -x c -E -dM -o - %s | FileCheck --check-prefix=XSAVE %s
+
+// XSAVE: #define __XSAVE__ 1
+
+// RUN: %clang -target i386-unknown-unknown -march=atom -mxsaveopt -x c -E -dM -o - %s | FileCheck --check-prefix=XSAVEOPT %s
+
+// XSAVEOPT: #define __XSAVEOPT__ 1
+// XSAVEOPT: #define __XSAVE__ 1
+
+// RUN: %clang -target i386-unknown-unknown -march=atom -mxsavec -x c -E -dM -o - %s | FileCheck --check-prefix=XSAVEC %s
+
+// XSAVEC: #define __XSAVEC__ 1
+// XSAVEC: #define __XSAVE__ 1
+
+// RUN: %clang -target i386-unknown-unknown -march=atom -mxsaves -x c -E -dM -o - %s | FileCheck --check-prefix=XSAVES %s
+
+// XSAVES: #define __XSAVES__ 1
+// XSAVES: #define __XSAVE__ 1
+
+// RUN: %clang -target i386-unknown-unknown -march=atom -mxsaveopt -mno-xsave -x c -E -dM -o - %s | FileCheck --check-prefix=NOXSAVE %s
+
+// NOXSAVE-NOT: #define __XSAVEOPT__ 1
+// NOXSAVE-NOT: #define __XSAVE__ 1