summaryrefslogtreecommitdiff
path: root/ext/opcache
diff options
context:
space:
mode:
authorDmitry Stogov <dmitry@zend.com>2020-12-09 17:16:54 +0300
committerDmitry Stogov <dmitry@zend.com>2020-12-09 17:16:54 +0300
commit8d268e844313bc18380da0b7ea3d74629fd7f86c (patch)
tree9960568cc5781a68e48e5223c7405a76e105d235 /ext/opcache
parent186b76636830a00e2533c7b0aaff4b02d9489376 (diff)
downloadphp-git-8d268e844313bc18380da0b7ea3d74629fd7f86c.tar.gz
Fixed IS_32BIT/IS_SIGNED_32BIT mess
Diffstat (limited to 'ext/opcache')
-rw-r--r--ext/opcache/jit/zend_jit_x86.dasc34
1 files changed, 17 insertions, 17 deletions
diff --git a/ext/opcache/jit/zend_jit_x86.dasc b/ext/opcache/jit/zend_jit_x86.dasc
index c2e261df34..ff70f96ba5 100644
--- a/ext/opcache/jit/zend_jit_x86.dasc
+++ b/ext/opcache/jit/zend_jit_x86.dasc
@@ -165,7 +165,7 @@ static void* dasm_labels[zend_lb_MAX];
|.section code, cold_code, jmp_table
-#define IS_32BIT(addr) (((uintptr_t)(addr)) <= 0xffffffff)
+#define IS_32BIT(addr) (((uintptr_t)(addr)) <= 0x7fffffff)
#define IS_SIGNED_32BIT(val) ((((intptr_t)(val)) <= 0x7fffffff) && (((intptr_t)(val)) >= (-2147483647 - 1)))
@@ -188,7 +188,7 @@ static void* dasm_labels[zend_lb_MAX];
|.macro LOAD_ADDR, reg, addr
| .if X64
-|| if (IS_32BIT(addr)) {
+|| if (IS_SIGNED_32BIT(addr)) {
| mov reg, ((ptrdiff_t)addr) // 0x48 0xc7 0xc0 <imm-32-bit>
|| } else {
| mov64 reg, ((ptrdiff_t)addr) // 0x48 0xb8 <imm-64-bit>
@@ -249,7 +249,7 @@ static void* dasm_labels[zend_lb_MAX];
|.macro ADDR_OP1, addr_ins, addr, tmp_reg
| .if X64
-|| if (IS_32BIT(addr)) {
+|| if (IS_SIGNED_32BIT(addr)) {
| addr_ins ((ptrdiff_t)addr)
|| } else {
| mov64 tmp_reg, ((ptrdiff_t)addr)
@@ -262,7 +262,7 @@ static void* dasm_labels[zend_lb_MAX];
|.macro ADDR_OP2_2, addr_ins, op1, addr, tmp_reg
| .if X64
-|| if (IS_32BIT(addr)) {
+|| if (IS_SIGNED_32BIT(addr)) {
| addr_ins op1, ((ptrdiff_t)addr)
|| } else {
| mov64 tmp_reg, ((ptrdiff_t)addr)
@@ -289,7 +289,7 @@ static void* dasm_labels[zend_lb_MAX];
|.macro MEM_OP1, mem_ins, prefix, addr, tmp_reg
| .if X64
-|| if (IS_32BIT(addr)) {
+|| if (IS_SIGNED_32BIT(addr)) {
| mem_ins prefix [addr]
|| } else {
| mov64 tmp_reg, ((ptrdiff_t)addr)
@@ -302,7 +302,7 @@ static void* dasm_labels[zend_lb_MAX];
|.macro MEM_OP2_1, mem_ins, prefix, addr, op2, tmp_reg
| .if X64
-|| if (IS_32BIT(addr)) {
+|| if (IS_SIGNED_32BIT(addr)) {
| mem_ins prefix [addr], op2
|| } else {
| mov64 tmp_reg, ((ptrdiff_t)addr)
@@ -315,7 +315,7 @@ static void* dasm_labels[zend_lb_MAX];
|.macro MEM_OP2_2, mem_ins, op1, prefix, addr, tmp_reg
| .if X64
-|| if (IS_32BIT(addr)) {
+|| if (IS_SIGNED_32BIT(addr)) {
| mem_ins op1, prefix [addr]
|| } else {
| mov64 tmp_reg, ((ptrdiff_t)addr)
@@ -346,7 +346,7 @@ static void* dasm_labels[zend_lb_MAX];
|.macro MEM_OP3_3, mem_ins, op1, op2, prefix, addr, tmp_reg
| .if X64
-|| if (IS_32BIT(addr)) {
+|| if (IS_SIGNED_32BIT(addr)) {
| mem_ins op1, op2, prefix [addr]
|| } else {
| mov64 tmp_reg, ((ptrdiff_t)addr)
@@ -581,7 +581,7 @@ static void* dasm_labels[zend_lb_MAX];
|.macro SSE_AVX_OP, sse_ins, avx_ins, reg, addr
|| if (Z_MODE(addr) == IS_CONST_ZVAL) {
| .if X64
-|| if (IS_32BIT(Z_ZV(addr))) {
+|| if (IS_SIGNED_32BIT(Z_ZV(addr))) {
| SSE_AVX_INS sse_ins, avx_ins, xmm(reg-ZREG_XMM0), qword [Z_ZV(addr)]
|| } else {
| LOAD_ADDR r0, Z_ZV(addr)
@@ -654,7 +654,7 @@ static void* dasm_labels[zend_lb_MAX];
|| if (Z_MODE(addr) != IS_REG || reg != Z_REG(addr)) {
|| if (Z_MODE(addr) == IS_CONST_ZVAL) {
| .if X64
-|| if (IS_32BIT(Z_ZV(addr))) {
+|| if (IS_SIGNED_32BIT(Z_ZV(addr))) {
| SSE_AVX_INS movsd, vmovsd, xmm(reg-ZREG_XMM0), qword [Z_ZV(addr)]
|| } else {
| LOAD_ADDR r0, Z_ZV(addr)
@@ -924,7 +924,7 @@ static void* dasm_labels[zend_lb_MAX];
| xorps xmm(dst_reg-ZREG_XMM0), xmm(dst_reg-ZREG_XMM0)
|| }
| .if X64
-|| } else if (!IS_32BIT(zv)) {
+|| } else if (!IS_SIGNED_32BIT(zv)) {
| mov64 Ra(tmp_reg), ((uintptr_t)zv)
| SSE_AVX_INS movsd, vmovsd, xmm(dst_reg-ZREG_XMM0), qword [Ra(tmp_reg)]
| .endif
@@ -978,7 +978,7 @@ static void* dasm_labels[zend_lb_MAX];
| xorps xmm(dst_reg-ZREG_XMM0), xmm(dst_reg-ZREG_XMM0)
|| }
| .if X64
-|| } else if (!IS_32BIT(zv)) {
+|| } else if (!IS_SIGNED_32BIT(zv)) {
| mov64 Ra(tmp_reg), ((uintptr_t)zv)
| SSE_AVX_INS movsd, vmovsd, xmm(dst_reg-ZREG_XMM0), qword [Ra(tmp_reg)]
| .endif
@@ -3130,7 +3130,7 @@ static int zend_jit_trace_begin(dasm_State **Dst, uint32_t trace_num, zend_jit_t
#if ZTS
if (1) {
#else
- if ((sizeof(void*) == 8 && !IS_32BIT(&EG(jit_trace_num)))) {
+ if ((sizeof(void*) == 8 && !IS_SIGNED_32BIT(&EG(jit_trace_num)))) {
#endif
/* assignment to EG(jit_trace_num) shouldn't clober CPU register used by deoptimizer */
if (parent) {
@@ -6053,7 +6053,7 @@ static int zend_jit_assign_to_variable(dasm_State **Dst,
zval *zv = Z_ZV(val_addr);
if (Z_TYPE_P(zv) == IS_DOUBLE) {
- if (Z_DVAL_P(zv) == 0 || IS_32BIT(zv)) {
+ if (Z_DVAL_P(zv) == 0 || IS_SIGNED_32BIT(zv)) {
keep_gc = 1;
}
} else if (IS_SIGNED_32BIT(Z_LVAL_P(zv))) {
@@ -15331,7 +15331,7 @@ static zend_bool zend_needs_extra_reg_for_const(const zend_op *opline, zend_ucha
|.if X64
|| if (op_type == IS_CONST) {
|| zval *zv = RT_CONSTANT(opline, op);
-|| if (Z_TYPE_P(zv) == IS_DOUBLE && Z_DVAL_P(zv) != 0 && !IS_32BIT(zv)) {
+|| if (Z_TYPE_P(zv) == IS_DOUBLE && Z_DVAL_P(zv) != 0 && !IS_SIGNED_32BIT(zv)) {
|| return 1;
|| } else if (Z_TYPE_P(zv) == IS_LONG && !IS_SIGNED_32BIT(Z_LVAL_P(zv))) {
|| return 1;
@@ -15676,7 +15676,7 @@ static zend_regset zend_jit_get_scratch_regset(const zend_op *opline, const zend
#if ZTS
ZEND_REGSET_INCL(regset, ZREG_R0);
#else
- if ((sizeof(void*) == 8 && !IS_32BIT(&EG(vm_interrupt)))) {
+ if ((sizeof(void*) == 8 && !IS_SIGNED_32BIT(&EG(vm_interrupt)))) {
ZEND_REGSET_INCL(regset, ZREG_R0);
}
#endif
@@ -15689,7 +15689,7 @@ static zend_regset zend_jit_get_scratch_regset(const zend_op *opline, const zend
#if ZTS
ZEND_REGSET_INCL(regset, ZREG_R0);
#else
- if ((sizeof(void*) == 8 && !IS_32BIT(&EG(vm_interrupt)))) {
+ if ((sizeof(void*) == 8 && !IS_SIGNED_32BIT(&EG(vm_interrupt)))) {
ZEND_REGSET_INCL(regset, ZREG_R0);
}
#endif