BUILTIN(__builtin_ia32_wrgsbase32, "vUi", "")
BUILTIN(__builtin_ia32_wrgsbase64, "vULLi", "")
+// FXSR
+BUILTIN(__builtin_ia32_fxrstor, "vv*", "")
+BUILTIN(__builtin_ia32_fxrstor64, "vv*", "")
+BUILTIN(__builtin_ia32_fxsave, "vv*", "")
+BUILTIN(__builtin_ia32_fxsave64, "vv*", "")
+
// ADX
BUILTIN(__builtin_ia32_addcarryx_u32, "UcUcUiUiUi*", "")
BUILTIN(__builtin_ia32_addcarryx_u64, "UcUcULLiULLiULLi*", "")
float.h
fma4intrin.h
fmaintrin.h
+ fxsrintrin.h
htmintrin.h
htmxlintrin.h
ia32intrin.h
unsigned short __cdecl _byteswap_ushort(unsigned short);
void __cdecl _disable(void);
void __cdecl _enable(void);
-void __cdecl _fxrstor(void const *);
-void __cdecl _fxsave(void *);
long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
static __inline__
long _InterlockedAnd(long volatile *_Value, long _Mask);
static __inline__
unsigned char _bittestandset64(__int64 *, __int64);
unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
-void __cdecl _fxrstor64(void const *);
-void __cdecl _fxsave64(void *);
long _InterlockedAnd_np(long volatile *_Value, long _Mask);
short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
__int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
--- /dev/null
+/*===---- fxsrintrin.h - FXSR intrinsic ------------------------------------===
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ *
+ *===-----------------------------------------------------------------------===
+ */
+
+#ifndef __IMMINTRIN_H
+#error "Never use <fxsrintrin.h> directly; include <immintrin.h> instead."
+#endif
+
+#ifndef __FXSRINTRIN_H
+#define __FXSRINTRIN_H
+
+#define DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__))
+
+static __inline__ void DEFAULT_FN_ATTRS
+_fxsave(void *__p) {
+ return __builtin_ia32_fxsave(__p);
+}
+
+static __inline__ void DEFAULT_FN_ATTRS
+_fxsave64(void *__p) {
+ return __builtin_ia32_fxsave64(__p);
+}
+
+static __inline__ void DEFAULT_FN_ATTRS
+_fxrstor(void *__p) {
+ return __builtin_ia32_fxrstor(__p);
+}
+
+static __inline__ void DEFAULT_FN_ATTRS
+_fxrstor64(void *__p) {
+ return __builtin_ia32_fxrstor64(__p);
+}
+
+#endif
#include <shaintrin.h>
+#include <fxsrintrin.h>
+
/* Some intrinsics inside adxintrin.h are available only on processors with ADX,
* whereas others are also available at all times. */
#include <adxintrin.h>
(void) __builtin_ia32_ldmxcsr(tmp_Ui);
tmp_Ui = __builtin_ia32_stmxcsr();
+ (void)__builtin_ia32_fxsave(tmp_vp);
+ (void)__builtin_ia32_fxsave64(tmp_vp);
+ (void)__builtin_ia32_fxrstor(tmp_vp);
+ (void)__builtin_ia32_fxrstor64(tmp_vp);
tmp_V4f = __builtin_ia32_cvtpi2ps(tmp_V4f, tmp_V2i);
tmp_V2i = __builtin_ia32_cvtps2pi(tmp_V4f);
tmp_i = __builtin_ia32_cvtss2si(tmp_V4f);