#if !defined(PEDANTIC) && !defined(OPENSSL_NO_ASM) && !defined(OPENSSL_NO_INLINE_ASM)
# if defined(__GNUC__) && __GNUC__>=2
# if defined(__x86_64) || defined(__x86_64__)
-# define BSWAP8(x) ({ u64 ret=(x); \
+# define BSWAP8(x) ({ u64 ret_=(x); \
asm ("bswapq %0" \
- : "+r"(ret)); ret; })
-# define BSWAP4(x) ({ u32 ret=(x); \
+ : "+r"(ret_)); ret_; })
+# define BSWAP4(x) ({ u32 ret_=(x); \
asm ("bswapl %0" \
- : "+r"(ret)); ret; })
+ : "+r"(ret_)); ret_; })
# elif (defined(__i386) || defined(__i386__)) && !defined(I386_ONLY)
-# define BSWAP8(x) ({ u32 lo=(u64)(x)>>32,hi=(x); \
+# define BSWAP8(x) ({ u32 lo_=(u64)(x)>>32,hi_=(x); \
asm ("bswapl %0; bswapl %1" \
- : "+r"(hi),"+r"(lo)); \
- (u64)hi<<32|lo; })
-# define BSWAP4(x) ({ u32 ret=(x); \
+ : "+r"(hi_),"+r"(lo_)); \
+ (u64)hi_<<32|lo_; })
+# define BSWAP4(x) ({ u32 ret_=(x); \
asm ("bswapl %0" \
- : "+r"(ret)); ret; })
+ : "+r"(ret_)); ret_; })
# elif defined(__aarch64__)
-# define BSWAP8(x) ({ u64 ret; \
+# define BSWAP8(x) ({ u64 ret_; \
asm ("rev %0,%1" \
- : "=r"(ret) : "r"(x)); ret; })
-# define BSWAP4(x) ({ u32 ret; \
+ : "=r"(ret_) : "r"(x)); ret_; })
+# define BSWAP4(x) ({ u32 ret_; \
asm ("rev %w0,%w1" \
- : "=r"(ret) : "r"(x)); ret; })
+ : "=r"(ret_) : "r"(x)); ret_; })
# elif (defined(__arm__) || defined(__arm)) && !defined(STRICT_ALIGNMENT)
-# define BSWAP8(x) ({ u32 lo=(u64)(x)>>32,hi=(x); \
+# define BSWAP8(x) ({ u32 lo_=(u64)(x)>>32,hi_=(x); \
asm ("rev %0,%0; rev %1,%1" \
- : "+r"(hi),"+r"(lo)); \
- (u64)hi<<32|lo; })
-# define BSWAP4(x) ({ u32 ret; \
+ : "+r"(hi_),"+r"(lo_)); \
+ (u64)hi_<<32|lo_; })
+# define BSWAP4(x) ({ u32 ret_; \
asm ("rev %0,%1" \
- : "=r"(ret) : "r"((u32)(x))); \
- ret; })
+ : "=r"(ret_) : "r"((u32)(x))); \
+ ret_; })
# endif
# elif defined(_MSC_VER)
# if _MSC_VER>=1300