ast->kind = ZEND_AST_ZVAL;
ast->attr = attr;
ZVAL_COPY_VALUE(&ast->val, zv);
- ast->val.u2.lineno = lineno;
+ Z_LINENO(ast->val) = lineno;
return (zend_ast *) ast;
}
ast->kind = ZEND_AST_CONSTANT;
ast->attr = attr;
ZVAL_STR(&ast->val, name);
- ast->val.u2.lineno = CG(zend_lineno);
+ Z_LINENO(ast->val) = CG(zend_lineno);
return (zend_ast *) ast;
}
static zend_always_inline uint32_t zend_ast_get_lineno(zend_ast *ast) {
if (ast->kind == ZEND_AST_ZVAL) {
zval *zv = zend_ast_get_zval(ast);
- return zv->u2.lineno;
+ return Z_LINENO_P(zv);
} else {
return ast->lineno;
}
fast_call = ZEND_CALL_VAR(ex, ex->func->op_array.opcodes[finally_op_end].op1.var);
Z_OBJ_P(fast_call) = EG(exception);
EG(exception) = NULL;
- fast_call->u2.lineno = (uint32_t)-1;
+ Z_OPLINE_NUM_P(fast_call) = (uint32_t)-1;
ex->opline = &ex->func->op_array.opcodes[finally_op_num];
generator->flags |= ZEND_GENERATOR_FORCED_CLOSE;
/* hash values are always pred-calculated here */
(EXPECTED(ZSTR_H(str) == ZSTR_H(member)) &&
EXPECTED(zend_string_equal_content(str, member)))) {
- return &zv->u2.property_guard;
- } else if (EXPECTED(zv->u2.property_guard == 0)) {
+ return &Z_PROPERTY_GUARD_P(zv);
+ } else if (EXPECTED(Z_PROPERTY_GUARD_P(zv) == 0)) {
zend_string_release(Z_STR_P(zv));
ZVAL_STR_COPY(zv, member);
- return &zv->u2.property_guard;
+ return &Z_PROPERTY_GUARD_P(zv);
} else {
ALLOC_HASHTABLE(guards);
zend_hash_init(guards, 8, NULL, zend_property_guard_dtor, 0);
/* mark pointer as "special" using low bit */
zend_hash_add_new_ptr(guards, str,
- (void*)(((zend_uintptr_t)&zv->u2.property_guard) | 1));
+ (void*)(((zend_uintptr_t)&Z_PROPERTY_GUARD_P(zv)) | 1));
zend_string_release(Z_STR_P(zv));
ZVAL_ARR(zv, guards);
}
ZEND_ASSERT(Z_TYPE_P(zv) == IS_UNDEF);
OBJ_FLAGS(zobj) |= IS_OBJ_HAS_GUARDS;
ZVAL_STR_COPY(zv, member);
- zv->u2.property_guard = 0;
- return &zv->u2.property_guard;
+ Z_PROPERTY_GUARD_P(zv) = 0;
+ return &Z_PROPERTY_GUARD_P(zv);
}
/* we have to allocate uint32_t separately because ht->arData may be reallocated */
ptr = (uint32_t*)emalloc(sizeof(uint32_t));
union {
uint32_t next; /* hash collision chain */
uint32_t cache_slot; /* literal cache slot */
+ uint32_t opline_num; /* opline number (for FAST_CALL) */
uint32_t lineno; /* line number (for ast nodes) */
uint32_t num_args; /* arguments number for EX(This) */
uint32_t fe_pos; /* foreach position */
#define Z_CACHE_SLOT(zval) (zval).u2.cache_slot
#define Z_CACHE_SLOT_P(zval_p) Z_CACHE_SLOT(*(zval_p))
+#define Z_LINENO(zval) (zval).u2.lineno
+#define Z_LINENO_P(zval_p) Z_LINENO(*(zval_p))
+
+#define Z_OPLINE_NUM(zval) (zval).u2.opline_num
+#define Z_OPLINE_NUM_P(zval_p) Z_OPLINE_NUM(*(zval_p))
+
#define Z_FE_POS(zval) (zval).u2.fe_pos
#define Z_FE_POS_P(zval_p) Z_FE_POS(*(zval_p))
#define Z_ACCESS_FLAGS(zval) (zval).u2.access_flags
#define Z_ACCESS_FLAGS_P(zval_p) Z_ACCESS_FLAGS(*(zval_p))
+#define Z_PROPERTY_GUARD(zval) (zval).u2.property_guard
+#define Z_PROPERTY_GUARD_P(zval_p) Z_PROPERTY_GUARD(*(zval_p))
+
#define Z_EXTRA(zval) (zval).u2.extra
#define Z_EXTRA_P(zval_p) Z_EXTRA(*(zval_p))
cleanup_live_vars(execute_data, op_num, try_catch->finally_op);
Z_OBJ_P(fast_call) = EG(exception);
EG(exception) = NULL;
- fast_call->u2.lineno = (uint32_t)-1;
+ Z_OPLINE_NUM_P(fast_call) = (uint32_t)-1;
ZEND_VM_SET_OPCODE(&EX(func)->op_array.opcodes[try_catch->finally_op]);
ZEND_VM_CONTINUE();
zval *fast_call = EX_VAR(EX(func)->op_array.opcodes[try_catch->finally_end].op1.var);
/* cleanup incomplete RETURN statement */
- if (fast_call->u2.lineno != (uint32_t)-1
- && (EX(func)->op_array.opcodes[fast_call->u2.lineno].op2_type & (IS_TMP_VAR | IS_VAR))) {
- zval *return_value = EX_VAR(EX(func)->op_array.opcodes[fast_call->u2.lineno].op2.var);
+ if (Z_OPLINE_NUM_P(fast_call) != (uint32_t)-1
+ && (EX(func)->op_array.opcodes[Z_OPLINE_NUM_P(fast_call)].op2_type & (IS_TMP_VAR | IS_VAR))) {
+ zval *return_value = EX_VAR(EX(func)->op_array.opcodes[Z_OPLINE_NUM_P(fast_call)].op2.var);
zval_ptr_dtor(return_value);
}
SAVE_OPLINE();
/* cleanup incomplete RETURN statement */
- if (fast_call->u2.lineno != (uint32_t)-1
- && (EX(func)->op_array.opcodes[fast_call->u2.lineno].op2_type & (IS_TMP_VAR | IS_VAR))) {
- zval *return_value = EX_VAR(EX(func)->op_array.opcodes[fast_call->u2.lineno].op2.var);
+ if (Z_OPLINE_NUM_P(fast_call) != (uint32_t)-1
+ && (EX(func)->op_array.opcodes[Z_OPLINE_NUM_P(fast_call)].op2_type & (IS_TMP_VAR | IS_VAR))) {
+ zval *return_value = EX_VAR(EX(func)->op_array.opcodes[Z_OPLINE_NUM_P(fast_call)].op2.var);
zval_ptr_dtor(return_value);
}
Z_OBJ_P(fast_call) = NULL;
/* set return address */
- fast_call->u2.lineno = opline - EX(func)->op_array.opcodes;
+ Z_OPLINE_NUM_P(fast_call) = opline - EX(func)->op_array.opcodes;
ZEND_VM_SET_OPCODE(OP_JMP_ADDR(opline, opline->op1));
ZEND_VM_CONTINUE();
}
zval *fast_call = EX_VAR(opline->op1.var);
uint32_t current_try_catch_offset, current_op_num;
- if (fast_call->u2.lineno != (uint32_t)-1) {
- const zend_op *fast_ret = EX(func)->op_array.opcodes + fast_call->u2.lineno;
+ if (Z_OPLINE_NUM_P(fast_call) != (uint32_t)-1) {
+ const zend_op *fast_ret = EX(func)->op_array.opcodes + Z_OPLINE_NUM_P(fast_call);
ZEND_VM_SET_OPCODE(fast_ret + 1);
ZEND_VM_CONTINUE();
cleanup_live_vars(execute_data, op_num, try_catch->finally_op);
Z_OBJ_P(fast_call) = EG(exception);
EG(exception) = NULL;
- fast_call->u2.lineno = (uint32_t)-1;
+ Z_OPLINE_NUM_P(fast_call) = (uint32_t)-1;
ZEND_VM_SET_OPCODE(&EX(func)->op_array.opcodes[try_catch->finally_op]);
ZEND_VM_CONTINUE();
zval *fast_call = EX_VAR(EX(func)->op_array.opcodes[try_catch->finally_end].op1.var);
/* cleanup incomplete RETURN statement */
- if (fast_call->u2.lineno != (uint32_t)-1
- && (EX(func)->op_array.opcodes[fast_call->u2.lineno].op2_type & (IS_TMP_VAR | IS_VAR))) {
- zval *return_value = EX_VAR(EX(func)->op_array.opcodes[fast_call->u2.lineno].op2.var);
+ if (Z_OPLINE_NUM_P(fast_call) != (uint32_t)-1
+ && (EX(func)->op_array.opcodes[Z_OPLINE_NUM_P(fast_call)].op2_type & (IS_TMP_VAR | IS_VAR))) {
+ zval *return_value = EX_VAR(EX(func)->op_array.opcodes[Z_OPLINE_NUM_P(fast_call)].op2.var);
zval_ptr_dtor(return_value);
}
SAVE_OPLINE();
/* cleanup incomplete RETURN statement */
- if (fast_call->u2.lineno != (uint32_t)-1
- && (EX(func)->op_array.opcodes[fast_call->u2.lineno].op2_type & (IS_TMP_VAR | IS_VAR))) {
- zval *return_value = EX_VAR(EX(func)->op_array.opcodes[fast_call->u2.lineno].op2.var);
+ if (Z_OPLINE_NUM_P(fast_call) != (uint32_t)-1
+ && (EX(func)->op_array.opcodes[Z_OPLINE_NUM_P(fast_call)].op2_type & (IS_TMP_VAR | IS_VAR))) {
+ zval *return_value = EX_VAR(EX(func)->op_array.opcodes[Z_OPLINE_NUM_P(fast_call)].op2.var);
zval_ptr_dtor(return_value);
}
Z_OBJ_P(fast_call) = NULL;
/* set return address */
- fast_call->u2.lineno = opline - EX(func)->op_array.opcodes;
+ Z_OPLINE_NUM_P(fast_call) = opline - EX(func)->op_array.opcodes;
ZEND_VM_SET_OPCODE(OP_JMP_ADDR(opline, opline->op1));
ZEND_VM_CONTINUE();
}
zval *fast_call = EX_VAR(opline->op1.var);
uint32_t current_try_catch_offset, current_op_num;
- if (fast_call->u2.lineno != (uint32_t)-1) {
- const zend_op *fast_ret = EX(func)->op_array.opcodes + fast_call->u2.lineno;
+ if (Z_OPLINE_NUM_P(fast_call) != (uint32_t)-1) {
+ const zend_op *fast_ret = EX(func)->op_array.opcodes + Z_OPLINE_NUM_P(fast_call);
ZEND_VM_SET_OPCODE(fast_ret + 1);
ZEND_VM_CONTINUE();