|.section code, cold_code, jmp_table
-#define IS_32BIT(addr) (((uintptr_t)(addr)) <= 0xffffffff)
+#define IS_32BIT(addr) (((uintptr_t)(addr)) <= 0x7fffffff)
#define IS_SIGNED_32BIT(val) ((((intptr_t)(val)) <= 0x7fffffff) && (((intptr_t)(val)) >= (-2147483647 - 1)))
|.macro LOAD_ADDR, reg, addr
| .if X64
-|| if (IS_32BIT(addr)) {
+|| if (IS_SIGNED_32BIT(addr)) {
| mov reg, ((ptrdiff_t)addr) // 0x48 0xc7 0xc0 <imm-32-bit>
|| } else {
| mov64 reg, ((ptrdiff_t)addr) // 0x48 0xb8 <imm-64-bit>
|.macro ADDR_OP1, addr_ins, addr, tmp_reg
| .if X64
-|| if (IS_32BIT(addr)) {
+|| if (IS_SIGNED_32BIT(addr)) {
| addr_ins ((ptrdiff_t)addr)
|| } else {
| mov64 tmp_reg, ((ptrdiff_t)addr)
|.macro ADDR_OP2_2, addr_ins, op1, addr, tmp_reg
| .if X64
-|| if (IS_32BIT(addr)) {
+|| if (IS_SIGNED_32BIT(addr)) {
| addr_ins op1, ((ptrdiff_t)addr)
|| } else {
| mov64 tmp_reg, ((ptrdiff_t)addr)
|.macro MEM_OP1, mem_ins, prefix, addr, tmp_reg
| .if X64
-|| if (IS_32BIT(addr)) {
+|| if (IS_SIGNED_32BIT(addr)) {
| mem_ins prefix [addr]
|| } else {
| mov64 tmp_reg, ((ptrdiff_t)addr)
|.macro MEM_OP2_1, mem_ins, prefix, addr, op2, tmp_reg
| .if X64
-|| if (IS_32BIT(addr)) {
+|| if (IS_SIGNED_32BIT(addr)) {
| mem_ins prefix [addr], op2
|| } else {
| mov64 tmp_reg, ((ptrdiff_t)addr)
|.macro MEM_OP2_2, mem_ins, op1, prefix, addr, tmp_reg
| .if X64
-|| if (IS_32BIT(addr)) {
+|| if (IS_SIGNED_32BIT(addr)) {
| mem_ins op1, prefix [addr]
|| } else {
| mov64 tmp_reg, ((ptrdiff_t)addr)
|.macro MEM_OP3_3, mem_ins, op1, op2, prefix, addr, tmp_reg
| .if X64
-|| if (IS_32BIT(addr)) {
+|| if (IS_SIGNED_32BIT(addr)) {
| mem_ins op1, op2, prefix [addr]
|| } else {
| mov64 tmp_reg, ((ptrdiff_t)addr)
|.macro SSE_AVX_OP, sse_ins, avx_ins, reg, addr
|| if (Z_MODE(addr) == IS_CONST_ZVAL) {
| .if X64
-|| if (IS_32BIT(Z_ZV(addr))) {
+|| if (IS_SIGNED_32BIT(Z_ZV(addr))) {
| SSE_AVX_INS sse_ins, avx_ins, xmm(reg-ZREG_XMM0), qword [Z_ZV(addr)]
|| } else {
| LOAD_ADDR r0, Z_ZV(addr)
|| if (Z_MODE(addr) != IS_REG || reg != Z_REG(addr)) {
|| if (Z_MODE(addr) == IS_CONST_ZVAL) {
| .if X64
-|| if (IS_32BIT(Z_ZV(addr))) {
+|| if (IS_SIGNED_32BIT(Z_ZV(addr))) {
| SSE_AVX_INS movsd, vmovsd, xmm(reg-ZREG_XMM0), qword [Z_ZV(addr)]
|| } else {
| LOAD_ADDR r0, Z_ZV(addr)
| xorps xmm(dst_reg-ZREG_XMM0), xmm(dst_reg-ZREG_XMM0)
|| }
| .if X64
-|| } else if (!IS_32BIT(zv)) {
+|| } else if (!IS_SIGNED_32BIT(zv)) {
| mov64 Ra(tmp_reg), ((uintptr_t)zv)
| SSE_AVX_INS movsd, vmovsd, xmm(dst_reg-ZREG_XMM0), qword [Ra(tmp_reg)]
| .endif
| xorps xmm(dst_reg-ZREG_XMM0), xmm(dst_reg-ZREG_XMM0)
|| }
| .if X64
-|| } else if (!IS_32BIT(zv)) {
+|| } else if (!IS_SIGNED_32BIT(zv)) {
| mov64 Ra(tmp_reg), ((uintptr_t)zv)
| SSE_AVX_INS movsd, vmovsd, xmm(dst_reg-ZREG_XMM0), qword [Ra(tmp_reg)]
| .endif
#if ZTS
if (1) {
#else
- if ((sizeof(void*) == 8 && !IS_32BIT(&EG(jit_trace_num)))) {
+ if ((sizeof(void*) == 8 && !IS_SIGNED_32BIT(&EG(jit_trace_num)))) {
#endif
/* assignment to EG(jit_trace_num) shouldn't clober CPU register used by deoptimizer */
if (parent) {
zval *zv = Z_ZV(val_addr);
if (Z_TYPE_P(zv) == IS_DOUBLE) {
- if (Z_DVAL_P(zv) == 0 || IS_32BIT(zv)) {
+ if (Z_DVAL_P(zv) == 0 || IS_SIGNED_32BIT(zv)) {
keep_gc = 1;
}
} else if (IS_SIGNED_32BIT(Z_LVAL_P(zv))) {
|.if X64
|| if (op_type == IS_CONST) {
|| zval *zv = RT_CONSTANT(opline, op);
-|| if (Z_TYPE_P(zv) == IS_DOUBLE && Z_DVAL_P(zv) != 0 && !IS_32BIT(zv)) {
+|| if (Z_TYPE_P(zv) == IS_DOUBLE && Z_DVAL_P(zv) != 0 && !IS_SIGNED_32BIT(zv)) {
|| return 1;
|| } else if (Z_TYPE_P(zv) == IS_LONG && !IS_SIGNED_32BIT(Z_LVAL_P(zv))) {
|| return 1;
#if ZTS
ZEND_REGSET_INCL(regset, ZREG_R0);
#else
- if ((sizeof(void*) == 8 && !IS_32BIT(&EG(vm_interrupt)))) {
+ if ((sizeof(void*) == 8 && !IS_SIGNED_32BIT(&EG(vm_interrupt)))) {
ZEND_REGSET_INCL(regset, ZREG_R0);
}
#endif
#if ZTS
ZEND_REGSET_INCL(regset, ZREG_R0);
#else
- if ((sizeof(void*) == 8 && !IS_32BIT(&EG(vm_interrupt)))) {
+ if ((sizeof(void*) == 8 && !IS_SIGNED_32BIT(&EG(vm_interrupt)))) {
ZEND_REGSET_INCL(regset, ZREG_R0);
}
#endif