. Fix integer overflows (Joshua Rogers)
- Core:
+ . Removed IS_TYPE_IMMUTABLE (it's the same as COPYABLE & !REFCOUED). (Dmitry)
. Removed the sql.safe_mode directive. (Kalle)
. Removed support for Netware. (Kalle)
. Fixed bug #54535 (WSA cleanup executes before MSHUTDOWN). (Kalle)
zval *zv = zend_ast_get_zval(ast);
if (Z_OPT_CONSTANT_P(zv)) {
- if (!(Z_TYPE_FLAGS_P(zv) & IS_TYPE_IMMUTABLE)) {
+ if (Z_TYPE_FLAGS_P(zv) & IS_TYPE_REFCOUNTED) {
if (UNEXPECTED(zval_update_constant_ex(zv, scope) != SUCCESS)) {
ret = FAILURE;
break;
ZVAL_DEREF(val);
if (Z_REFCOUNTED_P(val)) {
if (Z_TYPE_P(val) == IS_ARRAY) {
- if (!Z_IMMUTABLE_P(val)) {
+ if (Z_REFCOUNTED_P(val)) {
if (Z_ARRVAL_P(val)->u.v.nApplyCount > 0) {
zend_error(E_WARNING, "Constants cannot be recursive arrays");
ret = 0;
new_val = zend_hash_index_add_new(Z_ARRVAL_P(dst), idx, val);
}
if (Z_TYPE_P(val) == IS_ARRAY) {
- if (!Z_IMMUTABLE_P(val)) {
+ if (Z_REFCOUNTED_P(val)) {
copy_constant_array(new_val, val);
}
} else if (Z_REFCOUNTED_P(val)) {
val = &val_free;
break;
case IS_ARRAY:
- if (!Z_IMMUTABLE_P(val)) {
+ if (Z_REFCOUNTED_P(val)) {
if (!validate_constant_array(Z_ARRVAL_P(val))) {
RETURN_FALSE;
} else {
zend_throw_error(NULL, "Cannot declare self-referencing constant '%s'", Z_STRVAL_P(p));
return FAILURE;
}
- inline_change = (Z_TYPE_FLAGS_P(p) & IS_TYPE_IMMUTABLE) == 0;
+ inline_change = (Z_TYPE_FLAGS_P(p) & IS_TYPE_REFCOUNTED) != 0;
SEPARATE_ZVAL_NOREF(p);
MARK_CONSTANT_VISITED(p);
if (Z_CONST_FLAGS_P(p) & IS_CONSTANT_CLASS) {
} else if (Z_TYPE_P(p) == IS_CONSTANT_AST) {
zval tmp;
- inline_change = (Z_TYPE_FLAGS_P(p) & IS_TYPE_IMMUTABLE) == 0;
+ inline_change = (Z_TYPE_FLAGS_P(p) & IS_TYPE_REFCOUNTED) != 0;
if (UNEXPECTED(zend_ast_evaluate(&tmp, Z_ASTVAL_P(p), scope) != SUCCESS)) {
return FAILURE;
}
zend_call_method_with_0_params(object, ce, &ce->__debugInfo, ZEND_DEBUGINFO_FUNC_NAME, &retval);
if (Z_TYPE(retval) == IS_ARRAY) {
- if (Z_IMMUTABLE(retval)) {
+ if (!Z_REFCOUNTED(retval)) {
*is_temp = 1;
return zend_array_dup(Z_ARRVAL(retval));
} else if (Z_REFCOUNT(retval) <= 1) {
#define Z_GC_INFO(zval) GC_INFO(Z_COUNTED(zval))
#define Z_GC_INFO_P(zval_p) Z_GC_INFO(*(zval_p))
-
#define Z_GC_TYPE_INFO(zval) GC_TYPE_INFO(Z_COUNTED(zval))
#define Z_GC_TYPE_INFO_P(zval_p) Z_GC_TYPE_INFO(*(zval_p))
/* zval.u1.v.type_flags */
#define IS_TYPE_CONSTANT (1<<0)
-#define IS_TYPE_IMMUTABLE (1<<1)
#define IS_TYPE_REFCOUNTED (1<<2)
#define IS_TYPE_COPYABLE (1<<4)
#define IS_STR_CONSTANT_UNQUALIFIED (1<<4) /* the same as IS_CONSTANT_UNQUALIFIED */
/* array flags */
-#define IS_ARRAY_IMMUTABLE (1<<1) /* the same as IS_TYPE_IMMUTABLE */
+#define IS_ARRAY_IMMUTABLE (1<<1)
/* object flags (zval.value->gc.u.flags) */
#define IS_OBJ_APPLY_COUNT 0x07
#define Z_COPYABLE(zval) ((Z_TYPE_FLAGS(zval) & IS_TYPE_COPYABLE) != 0)
#define Z_COPYABLE_P(zval_p) Z_COPYABLE(*(zval_p))
-#define Z_IMMUTABLE(zval) ((Z_TYPE_FLAGS(zval) & IS_TYPE_IMMUTABLE) != 0)
-#define Z_IMMUTABLE_P(zval_p) Z_IMMUTABLE(*(zval_p))
-
/* the following Z_OPT_* macros make better code when Z_TYPE_INFO accessed before */
#define Z_OPT_TYPE(zval) (Z_TYPE_INFO(zval) & Z_TYPE_MASK)
#define Z_OPT_TYPE_P(zval_p) Z_OPT_TYPE(*(zval_p))
#define Z_OPT_COPYABLE(zval) ((Z_TYPE_INFO(zval) & (IS_TYPE_COPYABLE << Z_TYPE_FLAGS_SHIFT)) != 0)
#define Z_OPT_COPYABLE_P(zval_p) Z_OPT_COPYABLE(*(zval_p))
-#define Z_OPT_IMMUTABLE(zval) ((Z_TYPE_INFO(zval) & (IS_TYPE_IMMUTABLE << Z_TYPE_FLAGS_SHIFT)) != 0)
-#define Z_OPT_IMMUTABLE_P(zval_p) Z_OPT_IMMUTABLE(*(zval_p))
-
#define Z_OPT_ISREF(zval) (Z_OPT_TYPE(zval) == IS_REFERENCE)
#define Z_OPT_ISREF_P(zval_p) Z_OPT_ISREF(*(zval_p))
#define Z_TRY_DELREF(z) Z_TRY_DELREF_P(&(z))
static zend_always_inline uint32_t zval_refcount_p(zval* pz) {
- ZEND_ASSERT(Z_REFCOUNTED_P(pz) || Z_IMMUTABLE_P(pz));
+ ZEND_ASSERT(Z_REFCOUNTED_P(pz) || Z_COPYABLE_P(pz));
return GC_REFCOUNT(Z_COUNTED_P(pz));
}
zend_refcounted *_gc = Z_COUNTED_P(_z2); \
uint32_t _t = Z_TYPE_INFO_P(_z2); \
ZVAL_COPY_VALUE_EX(_z1, _z2, _gc, _t); \
- if ((_t & ((IS_TYPE_REFCOUNTED|IS_TYPE_IMMUTABLE) << Z_TYPE_FLAGS_SHIFT)) != 0) { \
- if ((_t & ((IS_TYPE_COPYABLE|IS_TYPE_IMMUTABLE) << Z_TYPE_FLAGS_SHIFT)) != 0) { \
+ if ((_t & ((IS_TYPE_REFCOUNTED|IS_TYPE_COPYABLE) << Z_TYPE_FLAGS_SHIFT)) != 0) { \
+ if ((_t & (IS_TYPE_COPYABLE << Z_TYPE_FLAGS_SHIFT)) != 0) { \
_zval_copy_ctor_func(_z1 ZEND_FILE_LINE_CC); \
} else { \
GC_REFCOUNT(_gc)++; \
zval *_zv = (zv); \
zend_array *_arr = Z_ARR_P(_zv); \
if (UNEXPECTED(GC_REFCOUNT(_arr) > 1)) { \
- if (!Z_IMMUTABLE_P(_zv)) { \
+ if (Z_REFCOUNTED_P(_zv)) { \
GC_REFCOUNT(_arr)--; \
} \
ZVAL_ARR(_zv, zend_array_dup(_arr)); \
#define SEPARATE_ZVAL_NOREF(zv) do { \
zval *_zv = (zv); \
ZEND_ASSERT(Z_TYPE_P(_zv) != IS_REFERENCE); \
- if (Z_COPYABLE_P(_zv) || \
- Z_IMMUTABLE_P(_zv)) { \
+ if (Z_COPYABLE_P(_zv)) { \
if (Z_REFCOUNT_P(_zv) > 1) { \
- if (!Z_IMMUTABLE_P(_zv)) { \
+ if (Z_REFCOUNTED_P(_zv)) { \
Z_DELREF_P(_zv); \
} \
zval_copy_ctor_func(_zv); \
#define SEPARATE_ZVAL(zv) do { \
zval *_zv = (zv); \
if (Z_REFCOUNTED_P(_zv) || \
- Z_IMMUTABLE_P(_zv)) { \
+ Z_COPYABLE_P(_zv)) { \
if (Z_REFCOUNT_P(_zv) > 1) { \
- if (Z_COPYABLE_P(_zv) || \
- Z_IMMUTABLE_P(_zv)) { \
- if (!Z_IMMUTABLE_P(_zv)) { \
+ if (Z_COPYABLE_P(_zv)) { \
+ if (Z_REFCOUNTED_P(_zv)) { \
Z_DELREF_P(_zv); \
} \
zval_copy_ctor_func(_zv); \
#define SEPARATE_ZVAL_IF_NOT_REF(zv) do { \
zval *_zv = (zv); \
- if (Z_COPYABLE_P(_zv) || \
- Z_IMMUTABLE_P(_zv)) { \
+ if (Z_COPYABLE_P(_zv)) { \
if (Z_REFCOUNT_P(_zv) > 1) { \
- if (!Z_IMMUTABLE_P(_zv)) { \
+ if (Z_REFCOUNTED_P(_zv)) { \
Z_DELREF_P(_zv); \
} \
zval_copy_ctor_func(_zv); \
static zend_always_inline void _zval_copy_ctor(zval *zvalue ZEND_FILE_LINE_DC)
{
- if (Z_REFCOUNTED_P(zvalue) || Z_IMMUTABLE_P(zvalue)) {
- if (Z_COPYABLE_P(zvalue) || Z_IMMUTABLE_P(zvalue)) {
- _zval_copy_ctor_func(zvalue ZEND_FILE_LINE_RELAY_CC);
- } else {
- Z_ADDREF_P(zvalue);
- }
- }
-}
-
-static zend_always_inline void _zval_opt_copy_ctor(zval *zvalue ZEND_FILE_LINE_DC)
-{
- if (Z_OPT_REFCOUNTED_P(zvalue) || Z_OPT_IMMUTABLE_P(zvalue)) {
- if (Z_OPT_COPYABLE_P(zvalue) || Z_OPT_IMMUTABLE_P(zvalue)) {
- _zval_copy_ctor_func(zvalue ZEND_FILE_LINE_RELAY_CC);
- } else {
- Z_ADDREF_P(zvalue);
- }
- }
-}
-
-static zend_always_inline void _zval_copy_ctor_no_imm(zval *zvalue ZEND_FILE_LINE_DC)
-{
- if (Z_REFCOUNTED_P(zvalue)) {
+ if (Z_REFCOUNTED_P(zvalue) || Z_COPYABLE_P(zvalue)) {
if (Z_COPYABLE_P(zvalue)) {
_zval_copy_ctor_func(zvalue ZEND_FILE_LINE_RELAY_CC);
} else {
}
}
-static zend_always_inline void _zval_opt_copy_ctor_no_imm(zval *zvalue ZEND_FILE_LINE_DC)
+static zend_always_inline void _zval_opt_copy_ctor(zval *zvalue ZEND_FILE_LINE_DC)
{
- if (Z_OPT_REFCOUNTED_P(zvalue)) {
+ if (Z_OPT_REFCOUNTED_P(zvalue) || Z_OPT_COPYABLE_P(zvalue)) {
if (Z_OPT_COPYABLE_P(zvalue)) {
_zval_copy_ctor_func(zvalue ZEND_FILE_LINE_RELAY_CC);
} else {
ZEND_API void _zval_dtor_wrapper(zval *zvalue);
#define zval_copy_ctor(zvalue) _zval_copy_ctor((zvalue) ZEND_FILE_LINE_CC)
#define zval_opt_copy_ctor(zvalue) _zval_opt_copy_ctor((zvalue) ZEND_FILE_LINE_CC)
-#define zval_copy_ctor_no_imm(zvalue) _zval_copy_ctor_no_imm((zvalue) ZEND_FILE_LINE_CC)
-#define zval_opt_copy_ctor_no_imm(zvalue) _zval_opt_copy_ctor_no_imm((zvalue) ZEND_FILE_LINE_CC)
#define zval_dtor(zvalue) zval_ptr_dtor_nogc(zvalue)
#define zval_ptr_dtor(zval_ptr) _zval_ptr_dtor((zval_ptr) ZEND_FILE_LINE_CC)
#define zval_ptr_dtor_nogc(zval_ptr) _zval_ptr_dtor_nogc((zval_ptr) ZEND_FILE_LINE_CC)
target_hash = HASH_OF(var);
if (target_hash != NULL) {
while ((hash_entry = zend_hash_get_current_data(target_hash)) != NULL) {
- if (!Z_IMMUTABLE_P(var)) {
+ if (Z_REFCOUNTED_P(var)) {
if (++target_hash->u.v.nApplyCount > 1) {
--target_hash->u.v.nApplyCount;
recursion_error = 1;
}
if (recursion_error) {
while(stack_level-- && (var = &stack[stack_level])) {
- if (!Z_IMMUTABLE_P(var)) {
+ if (Z_REFCOUNTED_P(var)) {
if (HASH_OF(var)->u.v.nApplyCount > 1) {
HASH_OF(var)->u.v.nApplyCount--;
}
hash_entry = hash_entry_ptr;
ZVAL_DEREF(hash_entry);
if (Z_TYPE_P(hash_entry) == IS_ARRAY || Z_TYPE_P(hash_entry) == IS_OBJECT) {
- if (!Z_IMMUTABLE_P(hash_entry)) {
+ if (Z_REFCOUNTED_P(hash_entry)) {
if (++(HASH_OF(hash_entry)->u.v.nApplyCount) > 1) {
--(HASH_OF(hash_entry)->u.v.nApplyCount);
recursion_error = 1;
if (recursion_error) {
while(stack_level-- && (var = &stack[stack_level])) {
- if (!Z_IMMUTABLE_P(var)) {
+ if (Z_REFCOUNTED_P(var)) {
if (HASH_OF(var)->u.v.nApplyCount > 1) {
HASH_OF(var)->u.v.nApplyCount--;
}
flags = Z_GC_FLAGS_P(z) & ~ (IS_STR_PERSISTENT | IS_STR_INTERNED | IS_STR_PERMANENT);
zend_accel_store_interned_string(Z_STR_P(z));
Z_GC_FLAGS_P(z) |= flags;
- Z_TYPE_FLAGS_P(z) &= ~(IS_TYPE_REFCOUNTED | IS_TYPE_COPYABLE);
- if (Z_TYPE_P(z) == IS_CONSTANT) {
- Z_TYPE_FLAGS_P(z) |= IS_TYPE_IMMUTABLE;
- }
+ Z_TYPE_FLAGS_P(z) &= ~IS_TYPE_REFCOUNTED;
break;
case IS_ARRAY:
new_ptr = zend_shared_alloc_get_xlat_entry(Z_ARR_P(z));
if (new_ptr) {
Z_ARR_P(z) = new_ptr;
- Z_TYPE_FLAGS_P(z) = IS_TYPE_IMMUTABLE;
+ Z_TYPE_FLAGS_P(z) = IS_TYPE_COPYABLE;
} else {
- if (Z_IMMUTABLE_P(z)) {
+ if (!Z_REFCOUNTED_P(z)) {
Z_ARR_P(z) = zend_accel_memdup(Z_ARR_P(z), sizeof(zend_array));
zend_hash_persist_immutable(Z_ARRVAL_P(z));
} else {
zend_accel_store(Z_ARR_P(z), sizeof(zend_array));
zend_hash_persist(Z_ARRVAL_P(z), zend_persist_zval);
/* make immutable array */
- Z_TYPE_FLAGS_P(z) = IS_TYPE_IMMUTABLE;
+ Z_TYPE_FLAGS_P(z) = IS_TYPE_COPYABLE;
GC_REFCOUNT(Z_COUNTED_P(z)) = 2;
GC_FLAGS(Z_COUNTED_P(z)) |= IS_ARRAY_IMMUTABLE;
Z_ARRVAL_P(z)->u.flags |= HASH_FLAG_STATIC_KEYS;
new_ptr = zend_shared_alloc_get_xlat_entry(Z_AST_P(z));
if (new_ptr) {
Z_AST_P(z) = new_ptr;
- Z_TYPE_FLAGS_P(z) = IS_TYPE_CONSTANT | IS_TYPE_IMMUTABLE;
+ Z_TYPE_FLAGS_P(z) = IS_TYPE_CONSTANT | IS_TYPE_COPYABLE;
} else {
zend_accel_store(Z_AST_P(z), sizeof(zend_ast_ref));
Z_ASTVAL_P(z) = zend_persist_ast(Z_ASTVAL_P(z));
- Z_TYPE_FLAGS_P(z) = IS_TYPE_CONSTANT | IS_TYPE_IMMUTABLE;
+ Z_TYPE_FLAGS_P(z) = IS_TYPE_CONSTANT | IS_TYPE_COPYABLE;
GC_REFCOUNT(Z_COUNTED_P(z)) = 2;
}
break;
return;
}
- if (Z_IMMUTABLE_P(name_var)) {
+ if (!Z_REFCOUNTED_P(name_var)) {
ZEND_HASH_FOREACH_VAL(target_hash, val) {
php_wddx_add_var(packet, val);
} ZEND_HASH_FOREACH_END();