#define BP_JIT_IS 6
+#define CAN_USE_AVX() (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX)
+
|.macro LOAD_ADDR, reg, addr
| .if X64
|| if (IS_32BIT(addr)) {
|.endmacro
|.macro SSE_AVX_INS, sse_ins, avx_ins, op1, op2
-|| if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+|| if (CAN_USE_AVX()) {
| avx_ins op1, op2
|| } else {
| sse_ins op1, op2
|.macro SSE_GET_LONG, reg, lval
|| if (lval == 0) {
-|| if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+|| if (CAN_USE_AVX()) {
| vxorps xmm(reg-ZREG_XMM0), xmm(reg-ZREG_XMM0), xmm(reg-ZREG_XMM0)
|| } else {
| xorps xmm(reg-ZREG_XMM0), xmm(reg-ZREG_XMM0)
|.else
| mov r0, lval
|.endif
-|| if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+|| if (CAN_USE_AVX()) {
| vxorps xmm(reg-ZREG_XMM0), xmm(reg-ZREG_XMM0), xmm(reg-ZREG_XMM0)
| vcvtsi2sd, xmm(reg-ZREG_XMM0), xmm(reg-ZREG_XMM0), r0
|| } else {
|| if (Z_MODE(addr) == IS_CONST_ZVAL) {
| SSE_GET_LONG reg, Z_LVAL_P(Z_ZV(addr))
|| } else if (Z_MODE(addr) == IS_MEM_ZVAL) {
-|| if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+|| if (CAN_USE_AVX()) {
| vxorps xmm(reg-ZREG_XMM0), xmm(reg-ZREG_XMM0), xmm(reg-ZREG_XMM0)
| vcvtsi2sd xmm(reg-ZREG_XMM0), xmm(reg-ZREG_XMM0), aword [Ra(Z_REG(addr))+Z_OFFSET(addr)]
|| } else {
| cvtsi2sd xmm(reg-ZREG_XMM0), aword [Ra(Z_REG(addr))+Z_OFFSET(addr)]
|| }
|| } else if (Z_MODE(addr) == IS_REG) {
-|| if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+|| if (CAN_USE_AVX()) {
| vxorps xmm(reg-ZREG_XMM0), xmm(reg-ZREG_XMM0), xmm(reg-ZREG_XMM0)
| vcvtsi2sd xmm(reg-ZREG_XMM0), xmm(reg-ZREG_XMM0), Ra(Z_REG(addr))
|| } else {
|| if (Z_TYPE_P(zv) == IS_DOUBLE) {
|| zend_reg dst_reg = (Z_MODE(dst_addr) == IS_REG) ? Z_REG(dst_addr) : ZREG_XMM0;
|| if (Z_DVAL_P(zv) == 0.0 && !is_signed(Z_DVAL_P(zv))) {
-|| if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+|| if (CAN_USE_AVX()) {
| vxorps xmm(dst_reg-ZREG_XMM0), xmm(dst_reg-ZREG_XMM0), xmm(dst_reg-ZREG_XMM0)
|| } else {
| xorps xmm(dst_reg-ZREG_XMM0), xmm(dst_reg-ZREG_XMM0)
|| zend_reg dst_reg = (Z_MODE(dst_addr) == IS_REG) ?
|| Z_REG(dst_addr) : ((Z_MODE(res_addr) == IS_REG) ? Z_MODE(res_addr) : ZREG_XMM0);
|| if (Z_DVAL_P(zv) == 0.0 && !is_signed(Z_DVAL_P(zv))) {
-|| if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+|| if (CAN_USE_AVX()) {
| vxorps xmm(dst_reg-ZREG_XMM0), xmm(dst_reg-ZREG_XMM0), xmm(dst_reg-ZREG_XMM0)
|| } else {
| xorps xmm(dst_reg-ZREG_XMM0), xmm(dst_reg-ZREG_XMM0)
}
| SSE_GET_ZVAL_DVAL tmp_reg, op1_addr
if (opline->opcode == ZEND_PRE_INC || opline->opcode == ZEND_POST_INC) {
- if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+ if (CAN_USE_AVX()) {
| vaddsd xmm(tmp_reg-ZREG_XMM0), xmm(tmp_reg-ZREG_XMM0), qword [->one]
} else {
| addsd xmm(tmp_reg-ZREG_XMM0), qword [->one]
}
} else {
- if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+ if (CAN_USE_AVX()) {
| vsubsd xmm(tmp_reg-ZREG_XMM0), xmm(tmp_reg-ZREG_XMM0), qword [->one]
} else {
| subsd xmm(tmp_reg-ZREG_XMM0), qword [->one]
| SSE_GET_ZVAL_LVAL tmp_reg1, op1_addr
| SSE_GET_ZVAL_LVAL tmp_reg2, op2_addr
- if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+ if (CAN_USE_AVX()) {
| AVX_MATH_REG opcode, tmp_reg1, tmp_reg1, tmp_reg2
} else {
| SSE_MATH_REG opcode, tmp_reg1, tmp_reg2
if (Z_MODE(res_addr) == IS_MEM_ZVAL && Z_REG(res_addr) == ZREG_R0) {
/* ASSIGN_DIM_OP */
- if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+ if (CAN_USE_AVX()) {
| AVX_MATH opcode, result_reg, result_reg, op2_addr, r1
} else {
| SSE_MATH opcode, result_reg, op2_addr, r1
}
} else {
- if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+ if (CAN_USE_AVX()) {
| AVX_MATH opcode, result_reg, result_reg, op2_addr, r0
} else {
| SSE_MATH opcode, result_reg, op2_addr, r0
| SSE_GET_ZVAL_LVAL result_reg, op2_addr
if (Z_MODE(res_addr) == IS_MEM_ZVAL && Z_REG(res_addr) == ZREG_R0) {
/* ASSIGN_DIM_OP */
- if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+ if (CAN_USE_AVX()) {
| AVX_MATH opcode, result_reg, result_reg, op1_addr, r1
} else {
| SSE_MATH opcode, result_reg, op1_addr, r1
}
} else {
- if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+ if (CAN_USE_AVX()) {
| AVX_MATH opcode, result_reg, result_reg, op1_addr, r0
} else {
| SSE_MATH opcode, result_reg, op1_addr, r0
result_reg = ZREG_XMM0;
tmp_reg = ZREG_XMM1;
}
- if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+ if (CAN_USE_AVX()) {
zend_reg op1_reg;
if (Z_MODE(op1_addr) == IS_REG) {
result_reg = ZREG_XMM0;
}
- if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+ if (CAN_USE_AVX()) {
zend_reg op1_reg;
zend_jit_addr val_addr;
}
if ((op1_info & MAY_BE_ANY) == MAY_BE_DOUBLE) {
- if (JIT_G(opt_flags) & allowed_opt_flags & ZEND_JIT_CPU_AVX) {
+ if (CAN_USE_AVX()) {
| vxorps xmm0, xmm0, xmm0
} else {
| xorps xmm0, xmm0