diff --git a/Zend/zend_vm_opcodes.h b/Zend/zend_vm_opcodes.h index e7e40c8a853da..664b960dcf727 100644 --- a/Zend/zend_vm_opcodes.h +++ b/Zend/zend_vm_opcodes.h @@ -35,7 +35,8 @@ #endif #if (ZEND_VM_KIND == ZEND_VM_KIND_HYBRID) && !defined(__SANITIZE_ADDRESS__) -# if ((defined(i386) && !defined(__PIC__)) || defined(__x86_64__) || defined(_M_X64)) +# if ((defined(i386) && !defined(__PIC__)) || defined(__x86_64__) || \ + defined(_M_X64) || defined(__aarch64__)) # define ZEND_VM_HYBRID_JIT_RED_ZONE_SIZE 16 # endif #endif diff --git a/build/Makefile.global b/build/Makefile.global index 9a8779d56d092..46323a0018b89 100644 --- a/build/Makefile.global +++ b/build/Makefile.global @@ -125,6 +125,8 @@ distclean: clean rm -f scripts/man1/phpize.1 scripts/php-config scripts/man1/php-config.1 sapi/cli/php.1 sapi/cgi/php-cgi.1 sapi/phpdbg/phpdbg.1 ext/phar/phar.1 ext/phar/phar.phar.1 rm -f sapi/fpm/php-fpm.conf sapi/fpm/init.d.php-fpm sapi/fpm/php-fpm.service sapi/fpm/php-fpm.8 sapi/fpm/status.html rm -f ext/phar/phar.phar ext/phar/phar.php + rm -f ext/opcache/jit/zend_jit_x86.c + rm -f ext/opcache/jit/zend_jit_arm64.c if test "$(srcdir)" != "$(builddir)"; then \ rm -f ext/phar/phar/phar.inc; \ fi diff --git a/ext/opcache/config.m4 b/ext/opcache/config.m4 index 33c99c59c130f..95eee57284921 100644 --- a/ext/opcache/config.m4 +++ b/ext/opcache/config.m4 @@ -29,7 +29,7 @@ if test "$PHP_OPCACHE" != "no"; then if test "$PHP_OPCACHE_JIT" = "yes"; then case $host_cpu in - x86*) + x86*|aarch64) ;; *) AC_MSG_WARN([JIT not supported by host architecture]) @@ -59,18 +59,37 @@ if test "$PHP_OPCACHE" != "no"; then case $host_alias in *x86_64-*-darwin*) DASM_FLAGS="-D X64APPLE=1 -D X64=1" + DASM_ARCH="x86" ;; *x86_64*) DASM_FLAGS="-D X64=1" + DASM_ARCH="x86" + ;; + *aarch64*) + DASM_FLAGS="-D ARM64=1" + DASM_ARCH="arm64" ;; esac + else + DASM_ARCH="x86" fi if test "$PHP_THREAD_SAFETY" = "yes"; then DASM_FLAGS="$DASM_FLAGS -D ZTS=1" fi + if test "$DASM_ARCH" = "arm64"; then + PKG_CHECK_MODULES([CAPSTONE], [capstone >= 3.0.0], + [have_capstone="yes"], [have_capstone="no"]) + if test "$have_capstone" = "yes"; then + AC_DEFINE(HAVE_CAPSTONE, 1, [ ]) + PHP_EVAL_LIBLINE($CAPSTONE_LIBS, OPCACHE_SHARED_LIBADD) + PHP_EVAL_INCLINE($CAPSTONE_CFLAGS) + fi + fi + PHP_SUBST(DASM_FLAGS) + PHP_SUBST(DASM_ARCH) AC_MSG_CHECKING(for opagent in default path) for i in /usr/local /usr; do diff --git a/ext/opcache/config.w32 b/ext/opcache/config.w32 index a7f292ee7625f..764a2edaab146 100644 --- a/ext/opcache/config.w32 +++ b/ext/opcache/config.w32 @@ -25,6 +25,7 @@ if (PHP_OPCACHE != "no") { dasm_flags += " -D ZTS=1"; } DEFINE("DASM_FLAGS", dasm_flags); + DEFINE("DASM_ARCH", "x86"); AC_DEFINE('HAVE_JIT', 1, 'Define to enable JIT'); /* XXX read this dynamically */ diff --git a/ext/opcache/jit/Makefile.frag b/ext/opcache/jit/Makefile.frag index b3af5b290a7f3..8e291783cebb1 100644 --- a/ext/opcache/jit/Makefile.frag +++ b/ext/opcache/jit/Makefile.frag @@ -2,13 +2,13 @@ $(builddir)/minilua: $(srcdir)/jit/dynasm/minilua.c $(CC) $(srcdir)/jit/dynasm/minilua.c -lm -o $@ -$(builddir)/jit/zend_jit_x86.c: $(srcdir)/jit/zend_jit_x86.dasc $(srcdir)/jit/dynasm/*.lua $(builddir)/minilua - $(builddir)/minilua $(srcdir)/jit/dynasm/dynasm.lua $(DASM_FLAGS) -o $@ $(srcdir)/jit/zend_jit_x86.dasc +$(builddir)/jit/zend_jit_$(DASM_ARCH).c: $(srcdir)/jit/zend_jit_$(DASM_ARCH).dasc $(srcdir)/jit/dynasm/*.lua $(builddir)/minilua + $(builddir)/minilua $(srcdir)/jit/dynasm/dynasm.lua $(DASM_FLAGS) -o $@ $(srcdir)/jit/zend_jit_$(DASM_ARCH).dasc $(builddir)/jit/zend_jit.lo: \ - $(builddir)/jit/zend_jit_x86.c \ + $(builddir)/jit/zend_jit_$(DASM_ARCH).c \ $(srcdir)/jit/zend_jit_helpers.c \ - $(srcdir)/jit/zend_jit_disasm_x86.c \ + $(srcdir)/jit/zend_jit_disasm_$(DASM_ARCH).c \ $(srcdir)/jit/zend_jit_gdb.c \ $(srcdir)/jit/zend_jit_perf_dump.c \ $(srcdir)/jit/zend_jit_oprofile.c \ diff --git a/ext/opcache/jit/dynasm/dasm_arm64.h b/ext/opcache/jit/dynasm/dasm_arm64.h index d64e60a3e6e69..bfdf27fef7076 100644 --- a/ext/opcache/jit/dynasm/dasm_arm64.h +++ b/ext/opcache/jit/dynasm/dasm_arm64.h @@ -23,6 +23,7 @@ enum { /* The following actions also have an argument. */ DASM_REL_PC, DASM_LABEL_PC, DASM_IMM, DASM_IMM6, DASM_IMM12, DASM_IMM13W, DASM_IMM13X, DASM_IMML, + DASM_VREG, DASM__MAX }; @@ -39,6 +40,7 @@ enum { #define DASM_S_RANGE_LG 0x13000000 #define DASM_S_RANGE_PC 0x14000000 #define DASM_S_RANGE_REL 0x15000000 +#define DASM_S_RANGE_VREG 0x16000000 #define DASM_S_UNDEF_LG 0x21000000 #define DASM_S_UNDEF_PC 0x22000000 @@ -312,13 +314,17 @@ void dasm_put(Dst_DECL, int start, ...) } case DASM_IMML: { #ifdef DASM_CHECKS - int scale = (p[-2] >> 30); + int scale = (ins & 0x3); CK((!(n & ((1<>scale) < 4096) || (unsigned int)(n+256) < 512, RANGE_I); #endif b[pos++] = n; break; } + case DASM_VREG: + CK(n < 32, RANGE_VREG); + b[pos++] = n; + break; } } } @@ -348,7 +354,7 @@ int dasm_link(Dst_DECL, size_t *szp) { /* Handle globals not defined in this translation unit. */ int idx; - for (idx = 20; idx*sizeof(int) < D->lgsize; idx++) { + for (idx = 10; idx*sizeof(int) < D->lgsize; idx++) { int n = D->lglabels[idx]; /* Undefined label: Collapse rel chain and replace with marker (< 0). */ while (n > 0) { int *pb = DASM_POS2PTR(D, n); n = *pb; *pb = -idx; } @@ -377,6 +383,7 @@ int dasm_link(Dst_DECL, size_t *szp) case DASM_IMM: case DASM_IMM6: case DASM_IMM12: case DASM_IMM13W: case DASM_IMML: pos++; break; case DASM_IMM13X: pos += 2; break; + case DASM_VREG: pos++; break; } } stop: (void)0; @@ -426,6 +433,10 @@ int dasm_encode(Dst_DECL, void *buffer) ins &= 255; while ((((char *)cp - base) & ins)) *cp++ = 0xe1a00000; break; case DASM_REL_LG: + if (n < 0) { /* Global label reference */ + n = (int)((ptrdiff_t)D->globals[-n] - (ptrdiff_t)cp + 4); + goto patchrel; + } CK(n >= 0, UNDEF_LG); case DASM_REL_PC: CK(n >= 0, UNDEF_PC); @@ -467,11 +478,14 @@ int dasm_encode(Dst_DECL, void *buffer) cp[-1] |= (dasm_imm13(n, *b++) << 10); break; case DASM_IMML: { - int scale = (p[-2] >> 30); + int scale = (ins & 0x3); cp[-1] |= (!(n & ((1<>scale) < 4096) ? ((n << (10-scale)) | 0x01000000) : ((n & 511) << 12); break; } + case DASM_VREG: + cp[-1] |= (n & 0x1f) << (ins & 0x1f); + break; default: *cp++ = ins; break; } } diff --git a/ext/opcache/jit/dynasm/dasm_arm64.lua b/ext/opcache/jit/dynasm/dasm_arm64.lua index 4a7d8dfeeb54c..d9f4eca8d0bbb 100644 --- a/ext/opcache/jit/dynasm/dasm_arm64.lua +++ b/ext/opcache/jit/dynasm/dasm_arm64.lua @@ -40,6 +40,7 @@ local action_names = { "STOP", "SECTION", "ESC", "REL_EXT", "ALIGN", "REL_LG", "LABEL_LG", "REL_PC", "LABEL_PC", "IMM", "IMM6", "IMM12", "IMM13W", "IMM13X", "IMML", + "VREG" } -- Maximum number of section buffer positions for dasm_put(). @@ -246,7 +247,7 @@ local map_cond = { local parse_reg_type -local function parse_reg(expr) +local function parse_reg(expr, shift) if not expr then werror("expected register name") end local tname, ovreg = match(expr, "^([%w_]+):(@?%l%d+)$") local tp = map_type[tname or expr] @@ -266,18 +267,29 @@ local function parse_reg(expr) elseif parse_reg_type ~= rt then werror("register size mismatch") end - return r, tp + return shl(r, shift or 0), tp end end + -- Allow Rx(...) for dynamic register names + local vrt, vreg = match(expr, "^R([xwqdshb])(%b())$") + if vreg then + if not parse_reg_type then + parse_reg_type = vrt + elseif parse_reg_type ~= vrt then + werror("register size mismatch") + end + if shift then waction("VREG", shift, vreg) end + return 0 + end werror("bad register name `"..expr.."'") end local function parse_reg_base(expr) if expr == "sp" then return 0x3e0 end - local base, tp = parse_reg(expr) + local base, tp = parse_reg(expr, 5) if parse_reg_type ~= "x" then werror("bad register type") end parse_reg_type = false - return shl(base, 5), tp + return base, tp end local parse_ctx = {} @@ -403,7 +415,7 @@ local function parse_imm_load(imm, scale) end werror("out of range immediate `"..imm.."'") else - waction("IMML", 0, imm) + waction("IMML", scale, imm) return 0 end end @@ -470,7 +482,7 @@ local function parse_load(params, nparams, n, op) if reg and tailr ~= "" then local base, tp = parse_reg_base(reg) if tp then - waction("IMML", 0, format(tp.ctypefmt, tailr)) + waction("IMML", shr(op, 30), format(tp.ctypefmt, tailr)) return op + base end end @@ -494,7 +506,7 @@ local function parse_load(params, nparams, n, op) op = op + parse_imm_load(imm, scale) else local p2b, p3b, p3s = match(p2a, "^,%s*([^,%s]*)%s*,?%s*(%S*)%s*(.*)$") - op = op + shl(parse_reg(p2b), 16) + 0x00200800 + op = op + parse_reg(p2b, 16) + 0x00200800 if parse_reg_type ~= "x" and parse_reg_type ~= "w" then werror("bad index register type") end @@ -891,15 +903,15 @@ local function parse_template(params, template, nparams, pos) for p in gmatch(sub(template, 9), ".") do local q = params[n] if p == "D" then - op = op + parse_reg(q); n = n + 1 + op = op + parse_reg(q, 0); n = n + 1 elseif p == "N" then - op = op + shl(parse_reg(q), 5); n = n + 1 + op = op + parse_reg(q, 5); n = n + 1 elseif p == "M" then - op = op + shl(parse_reg(q), 16); n = n + 1 + op = op + parse_reg(q, 16); n = n + 1 elseif p == "A" then - op = op + shl(parse_reg(q), 10); n = n + 1 + op = op + parse_reg(q, 10); n = n + 1 elseif p == "m" then - op = op + shl(parse_reg(params[n-1]), 16) + op = op + parse_reg(params[n-1], 16) elseif p == "p" then if q == "sp" then params[n] = "@x31" end diff --git a/ext/opcache/jit/zend_jit.c b/ext/opcache/jit/zend_jit.c index 097f4f6acc42e..48fdf52f23b80 100644 --- a/ext/opcache/jit/zend_jit.c +++ b/ext/opcache/jit/zend_jit.c @@ -39,7 +39,14 @@ #include "Optimizer/zend_call_graph.h" #include "Optimizer/zend_dump.h" +#if defined(__x86_64__) || defined(i386) #include "jit/zend_jit_x86.h" +#elif defined (__aarch64__) +#include "jit/zend_jit_arm64.h" +#else +#error "JIT not supported on this platform" +#endif + #include "jit/zend_jit_internal.h" #ifdef ZTS @@ -204,9 +211,18 @@ static bool zend_long_is_power_of_two(zend_long x) #define OP2_RANGE() OP_RANGE(ssa_op, op2) #define OP1_DATA_RANGE() OP_RANGE(ssa_op + 1, op1) +#if defined(__x86_64__) || defined(i386) #include "dynasm/dasm_x86.h" +#elif defined(__aarch64__) +#include "dynasm/dasm_arm64.h" +#endif + #include "jit/zend_jit_helpers.c" +#if defined(__x86_64__) || defined(i386) #include "jit/zend_jit_disasm_x86.c" +#elif defined(__aarch64__) +#include "jit/zend_jit_disasm_arm64.c" +#endif #ifndef _WIN32 #include "jit/zend_jit_gdb.c" #include "jit/zend_jit_perf_dump.c" @@ -216,7 +232,11 @@ static bool zend_long_is_power_of_two(zend_long x) #endif #include "jit/zend_jit_vtune.c" +#if defined(__x86_64__) || defined(i386) #include "jit/zend_jit_x86.c" +#elif defined(__aarch64__) +#include "jit/zend_jit_arm64.c" +#endif #if _WIN32 # include @@ -298,15 +318,32 @@ static void handle_dasm_error(int ret) { case DASM_S_RANGE_PC: fprintf(stderr, "DASM_S_RANGE_PC %d\n", ret & 0xffffffu); break; +#ifdef DASM_S_RANGE_VREG case DASM_S_RANGE_VREG: fprintf(stderr, "DASM_S_RANGE_VREG\n"); break; +#endif +#ifdef DASM_S_UNDEF_L case DASM_S_UNDEF_L: fprintf(stderr, "DASM_S_UNDEF_L\n"); break; +#endif +#ifdef DASM_S_UNDEF_LG + case DASM_S_UNDEF_LG: + fprintf(stderr, "DASM_S_UNDEF_LG\n"); + break; +#endif +#ifdef DASM_S_RANGE_REL + case DASM_S_RANGE_REL: + fprintf(stderr, "DASM_S_RANGE_REL\n"); + break; +#endif case DASM_S_UNDEF_PC: fprintf(stderr, "DASM_S_UNDEF_PC\n"); break; + default: + fprintf(stderr, "DASM_S_%0x\n", ret & 0xff000000u); + break; } ZEND_UNREACHABLE(); } @@ -391,6 +428,9 @@ static void *dasm_link_and_encode(dasm_State **dasm_state, entry = *dasm_ptr; *dasm_ptr = (void*)((char*)*dasm_ptr + ZEND_MM_ALIGNED_SIZE_EX(size, DASM_ALIGNMENT)); + /* flush the hardware I-cache */ + JIT_CACHE_FLUSH(entry, entry + size); + if (trace_num) { zend_jit_trace_add_code(entry, size); } diff --git a/ext/opcache/jit/zend_jit_arm64.dasc b/ext/opcache/jit/zend_jit_arm64.dasc new file mode 100644 index 0000000000000..50f81b5cab765 --- /dev/null +++ b/ext/opcache/jit/zend_jit_arm64.dasc @@ -0,0 +1,5725 @@ +/* + * +----------------------------------------------------------------------+ + * | Zend JIT | + * +----------------------------------------------------------------------+ + * | Copyright (c) The PHP Group | + * +----------------------------------------------------------------------+ + * | This source file is subject to version 3.01 of the PHP license, | + * | that is bundled with this package in the file LICENSE, and is | + * | available through the world-wide-web at the following url: | + * | http://www.php.net/license/3_01.txt | + * | If you did not receive a copy of the PHP license and are unable to | + * | obtain it through the world-wide-web, please send a note to | + * | license@php.net so we can mail you a copy immediately. | + * +----------------------------------------------------------------------+ + * | Authors: Dmitry Stogov | + * | Xinchen Hui | + * | Hao Sun | + * +----------------------------------------------------------------------+ + */ + +|.arch arm64 + +|.define FP, x27 +|.define IP, x28 +|.define IPl, w28 +|.define RX, x28 // the same as VM IP reused as a general purpose reg +|.define LR, x30 +|.define CARG1, x0 +|.define CARG2, x1 +|.define CARG3, x2 +|.define CARG4, x3 +|.define CARG5, x4 +|.define CARG6, x5 +|.define RETVALx, x0 +|.define RETVALw, w0 +|.define FCARG1x, x0 +|.define FCARG1w, w0 +|.define FCARG2x, x1 +|.define SPAD, #0x10 // padding for CPU stack alignment +|.define NR_SPAD, #0x30 // padding for CPU stack alignment +|.define T4, [sp, #0x20] // Used to store old value of LR (CALL VM only) +|.define T3, [sp, #0x18] // Used to store old value of IP (CALL VM only) +|.define T2, [sp, #0x10] // Used to store old value of FP (CALL VM only) +|.define T1, [sp] +|.define A4, [r4+0xC] // preallocated slots for arguments of "cdecl" functions (intersect with T1) +|.define A3, [r4+0x8] +|.define A2, [r4+0x4] +|.define A1, [r4] + +// Temporaries, not preserved across calls +|.define TMP1, x8 +|.define TMP1w, w8 +|.define TMP2, x9 +|.define TMP2w, w9 +|.define TMP3, x10 +|.define TMP3w, w10 +|.define TMP4, x11 +|.define TMP4w, w11 + +|.define HYBRID_SPAD, #16 // padding for stack alignment + +#define TMP_ZVAL_OFFSET 0 +#define DASM_ALIGNMENT 16 +#define MAX_IMM12 0xfff // maximum value for imm12 + +#include "Zend/zend_cpuinfo.h" +#include "jit/zend_jit_arm64.h" + +#ifdef HAVE_VALGRIND +# include +#endif + +/* The generated code may contain tautological comparisons, ignore them. */ +#if defined(__clang__) +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wtautological-compare" +# pragma clang diagnostic ignored "-Wstring-compare" +#endif + +const char* zend_reg_name[] = { + "x0", "x1", "x2", "x3", "x4", "x5", "x6", "x7", + "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", + "x16", "x17", "x18", "x19", "x20", "x21", "x22", "x23", + "x24", "x25", "x26", "x27", "x28", "x29", "x30", "sp", + "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", + "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", + "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", + "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31" +}; + +#ifdef HAVE_GCC_GLOBAL_REGS +# define GCC_GLOBAL_REGS 1 +#else +# define GCC_GLOBAL_REGS 0 +#endif + +#if ZTS +static size_t tsrm_ls_cache_tcb_offset = 0; +static size_t tsrm_tls_index; +static size_t tsrm_tls_offset; +#endif + +/* By default avoid JITing inline handlers if it does not seem profitable due to lack of + * type information. Disabling this option allows testing some JIT handlers in the + * presence of try/catch blocks, which prevent SSA construction. */ +#ifndef PROFITABILITY_CHECKS +# define PROFITABILITY_CHECKS 1 +#endif + +|.type EX, zend_execute_data, FP +|.type OP, zend_op +|.type ZVAL, zval + +|.actionlist dasm_actions + +|.globals zend_lb +static void* dasm_labels[zend_lb_MAX]; + +|.section code, cold_code, jmp_table + +#define IS_32BIT(addr) (((uintptr_t)(addr)) <= 0x7fffffff) + +#define IS_SIGNED_32BIT(val) ((((intptr_t)(val)) <= 0x7fffffff) && (((intptr_t)(val)) >= (-2147483647 - 1))) + +#define BP_JIT_IS 6 + +/* In x86/64, HYBRID_SPAD bytes are reserved on the stack only if flag ZEND_VM_HYBRID_JIT_RED_ZONE_SIZE + * is not defined, because the 16-byte redzone, allocated on the stack when the flag is defined, can be + * reused. In AArch64, it's safe that these bytes are always reserved because the stack layout might + * change along software evolution, making the redzone not reusable any longer. */ +|.macro ADD_HYBRID_SPAD +| add sp, sp, HYBRID_SPAD +|.endmacro + +|.macro SUB_HYBRID_SPAD +| sub sp, sp, HYBRID_SPAD +|.endmacro + +|.macro LOAD_ADDR, reg, addr +| // 48-bit virtual address +| mov reg, #((uintptr_t)(addr) & 0xffff) +| movk reg, #(((uintptr_t)(addr) >> 16) & 0xffff), lsl #16 +| movk reg, #(((uintptr_t)(addr) >> 32) & 0xffff), lsl #32 +|.endmacro + +// Type cast to unsigned is used to avoid undefined behavior. +|.macro LOAD_32BIT_VAL, reg, val +| mov reg, #((uint32_t)(val) & 0xffff) +| movk reg, #(((uint32_t)(val) >> 16) & 0xffff), lsl #16 +|.endmacro + +|.macro LOAD_64BIT_VAL, reg, val +| mov reg, #((uint64_t)(val) & 0xffff) +| movk reg, #(((uint64_t)(val) >> 16) & 0xffff), lsl #16 +| movk reg, #(((uint64_t)(val) >> 32) & 0xffff), lsl #32 +| movk reg, #(((uint64_t)(val) >> 48) & 0xffff), lsl #48 +|.endmacro + +// Safe memory load/store with an unsigned immediate offset. +// When using Z_OFFSET(addr), which is 24-bit long, as the unsigned offset to compute a memory address, +// we should firstly check whether it's greater than MAX_IMM12. +|.macro SAFE_MEM_ACC_WITH_UOFFSET, ldr_str_ins, op, base_reg, offset, tmp_reg +|| if (offset > MAX_IMM12) { +| LOAD_32BIT_VAL tmp_reg, offset +| ldr_str_ins op, [base_reg, tmp_reg] +|| } else { +| ldr_str_ins op, [base_reg, #(offset)] +|| } +|.endmacro + +|.macro LOAD_TSRM_CACHE, reg +| brk #0 // TODO +|.endmacro + +|.macro LOAD_ADDR_ZTS, reg, struct, field +| brk #0 // TODO +|.endmacro + +|.macro ADDR_OP1, addr_ins, addr, tmp_reg +| brk #0 // TODO +|.endmacro + +// Move the 48-bit address 'addr' into 'tmp_reg' and store it into the dest addr 'op1' +|.macro ADDR_STORE, op1, addr, tmp_reg +| LOAD_ADDR tmp_reg, addr +| str tmp_reg, op1 +|.endmacro + +// Move the 48-bit address 'addr' into 'tmp_reg1' and compare with the value inside address 'op1' +|.macro ADDR_CMP, op1, addr, tmp_reg1, tmp_reg2 +| LOAD_ADDR tmp_reg1, addr +| ldr tmp_reg2, op1 +| cmp tmp_reg2, tmp_reg1 +|.endmacro + +|.macro PUSH_ADDR, addr, tmp_reg +| ADDR_OP1 push, addr, tmp_reg +|.endmacro + +|.macro PUSH_ADDR_ZTS, struct, field, tmp_reg +| brk #0 // TODO +|.endmacro + +// Store the value from a register 'op' into memory 'addr' +|.macro MEM_STORE, str_ins, op, addr, tmp_reg +| LOAD_ADDR tmp_reg, addr +| str_ins op, [tmp_reg] +|.endmacro + +|.macro MEM_STORE_ZTS, str_ins, op, struct, field, tmp_reg +| .if ZTS +| brk #0 // TODO: test +| LOAD_TSRM_CACHE tmp_reg +| str_ins op, [tmp_reg, #(struct.._offset+offsetof(zend_..struct, field))] +| .else +| MEM_STORE str_ins, op, &struct.field, tmp_reg +| .endif +|.endmacro + +// Load the value from memory 'addr' into a register 'op' +|.macro MEM_LOAD, ldr_ins, op, addr, tmp_reg +| LOAD_ADDR tmp_reg, addr +| ldr_ins op, [tmp_reg] +|.endmacro + +|.macro MEM_LOAD_ZTS, ldr_ins, op, struct, field, tmp_reg +| .if ZTS +| brk #0 // TODO: test +| LOAD_TSRM_CACHE tmp_reg +| ldr_ins op, [tmp_reg, #(struct.._offset+offsetof(zend_..struct, field))] +| .else +| MEM_LOAD ldr_ins, op, &struct.field, tmp_reg +| .endif +|.endmacro + +// Load the value from memory 'addr' into a tmp register 'tmp_reg1', +// and conduct arithmetic operations with 'op'. +// Operations can be add/sub/div/mul, and the computation result is stored into 'op'. +|.macro MEM_LOAD_OP, mem_ins, ldr_ins, op, addr, tmp_reg1, tmp_reg2 +| MEM_LOAD ldr_ins, tmp_reg1, addr, tmp_reg2 +| mem_ins op, op, tmp_reg1 +|.endmacro + +|.macro MEM_LOAD_OP_ZTS, mem_ins, ldr_ins, op, struct, field, tmp_reg1, tmp_reg2 +| .if ZTS +| brk #0 // TODO: test +| LOAD_TSRM_CACHE tmp_reg1 +| ldr_ins tmp_reg2, [tmp_reg1, #(struct.._offset+offsetof(zend_..struct, field))] +| mem_ins op, op, tmp_reg2 +| .else +| MEM_LOAD_OP mem_ins, ldr_ins, op, &struct.field, tmp_reg1, tmp_reg2 +| .endif +|.endmacro + +// Similar to MEM_LOAD_OP/_ZTS, but operations are compare instructions. +// Note that 'op' can be imm12. +|.macro MEM_LOAD_CMP, ldr_ins, op, addr, tmp_reg1, tmp_reg2 +| MEM_LOAD ldr_ins, tmp_reg1, addr, tmp_reg2 +| cmp tmp_reg1, op +|.endmacro + +|.macro MEM_LOAD_CMP_ZTS, ldr_ins, op, struct, field, tmp_reg1, tmp_reg2 +| .if ZTS +| brk #0 // TODO: test +| LOAD_TSRM_CACHE tmp_reg1 +| ldr_ins tmp_reg2, [tmp_reg1, #(struct.._offset+offsetof(zend_..struct, field))] +| cmp tmp_reg2, op +| .else +| MEM_LOAD_CMP ldr_ins, op, &struct.field, tmp_reg1, tmp_reg2 +| .endif +|.endmacro + +// Load the value from memory 'addr' into a tmp register 'tmp_reg1' and conduct arithmetic operations with 'op'. +// The computation result is stored back to memory 'addr'. 'op' can be either imm12 or register. +// For constant case, it should be guaranteed that 'op' can be represented by imm12 before using this macro. +|.macro MEM_LOAD_OP_STORE, mem_ins, ldr_ins, str_ins, op, addr, tmp_reg1, tmp_reg2 +| MEM_LOAD ldr_ins, tmp_reg1, addr, tmp_reg2 +| mem_ins tmp_reg1, tmp_reg1, op +| str_ins tmp_reg1, [tmp_reg2] +|.endmacro + +|.macro MEM_LOAD_OP_STORE_ZTS, mem_ins, ldr_ins, str_ins, op, struct, field, tmp_reg1, tmp_reg2 +| .if ZTS +| brk #0 // TODO: test +| LOAD_TSRM_CACHE tmp_reg1 +| ldr_ins tmp_reg2, [tmp_reg1, #(struct.._offset+offsetof(zend_..struct, field))] +| mem_ins tmp_reg2, tmp_reg2, op +| str_ins tmp_reg2, [tmp_reg1, #(struct.._offset+offsetof(zend_..struct, field))] +| .else +| MEM_LOAD_OP_STORE mem_ins, ldr_ins, str_ins, op, &struct.field, tmp_reg1, tmp_reg2 +| .endif +|.endmacro + +|.macro MEM_OP3_3, mem_ins, op1, op2, prefix, addr, tmp_reg +| brk #0 // TODO +|.endmacro + +|.macro LOAD_BASE_ADDR, reg, base, offset +|| if (offset) { +|| if (offset > MAX_IMM12) { +| LOAD_32BIT_VAL reg, offset +| add reg, Rx(base), reg +|| } else { +| add reg, Rx(base), #offset +|| } +|| } else { +|| if (base == ZREG_RSP) { +| mov reg, sp +|| } else { +| mov reg, Rx(base) +|| } +|| } +|.endmacro + +|.macro PUSH_BASE_ADDR, base, offset, tmp_reg +| brk #0 // TODO +|.endmacro + +|.macro EXT_CALL, func, tmp_reg +| LOAD_ADDR tmp_reg, func +| blr tmp_reg +|.endmacro + +|.macro EXT_JMP, func, tmp_reg +| LOAD_ADDR tmp_reg, func +| br tmp_reg +|.endmacro + +|.macro SAVE_IP +|| if (GCC_GLOBAL_REGS) { +| str IP, EX->opline +|| } +|.endmacro + +|.macro LOAD_IP +|| if (GCC_GLOBAL_REGS) { +| ldr IP, EX->opline +|| } +|.endmacro + +|.macro LOAD_IP_ADDR, addr +|| if (GCC_GLOBAL_REGS) { +| LOAD_ADDR IP, addr +|| } else { +| ADDR_STORE EX->opline, addr, RX +|| } +|.endmacro + +|.macro LOAD_IP_ADDR_ZTS, struct, field +| brk #0 // TODO +|.endmacro + +|.macro GET_IP, reg +|| if (GCC_GLOBAL_REGS) { +| mov reg, IP +|| } else { +| ldr reg, EX->opline +|| } +|.endmacro + +// In x86 implementation, 'val' can be either a constant or a register. +// In AArch64, use ADD_IP for register case, +// and use ADD_IP_FROM_CST for constant case, where the value can be represented by imm12. +|.macro ADD_IP, val, tmp_reg +|| if (GCC_GLOBAL_REGS) { +| add IP, IP, val +|| } else { +| ldr tmp_reg, EX->opline +| add tmp_reg, tmp_reg, val +| str tmp_reg, EX->opline +|| } +|.endmacro + +|.macro ADD_IP_FROM_CST, val, tmp_reg +|| ZEND_ASSERT(val >=0 && val <= MAX_IMM12); +|| if (GCC_GLOBAL_REGS) { +| add IP, IP, #val +|| } else { +| ldr tmp_reg, EX->opline +| add tmp_reg, tmp_reg, #val +| str tmp_reg, EX->opline +|| } +|.endmacro + +|.macro JMP_IP +|| if (GCC_GLOBAL_REGS) { +| ldr TMP1, [IP] +| br TMP1 +|| } else { +| ldr TMP1, EX:CARG1->opline +| br TMP1 +|| } +|.endmacro + +|.macro CMP_IP, addr +| brk #0 // TODO +|.endmacro + +|.macro LOAD_ZVAL_ADDR, reg, addr +|| if (Z_MODE(addr) == IS_CONST_ZVAL) { +| LOAD_ADDR reg, Z_ZV(addr) +|| } else if (Z_MODE(addr) == IS_MEM_ZVAL) { +| LOAD_BASE_ADDR reg, Z_REG(addr), Z_OFFSET(addr) +|| } else { +|| ZEND_UNREACHABLE(); +|| } +|.endmacro + +|.macro PUSH_ZVAL_ADDR, addr, tmp_reg +|| if (Z_MODE(addr) == IS_CONST_ZVAL) { +| PUSH_ADDR Z_ZV(addr), tmp_reg +|| } else if (Z_MODE(addr) == IS_MEM_ZVAL) { +| PUSH_BASE_ADDR Z_REG(addr), Z_OFFSET(addr), tmp_reg +|| } else { +|| ZEND_UNREACHABLE(); +|| } +|.endmacro + +|.macro GET_Z_TYPE_INFO, reg, zv +| mov reg, dword [zv+offsetof(zval,u1.type_info)] +|.endmacro + +|.macro SET_Z_TYPE_INFO, zv, type, tmp_reg +| LOAD_32BIT_VAL tmp_reg, type +| str tmp_reg, [zv, #offsetof(zval,u1.type_info)] +|.endmacro + +|.macro GET_ZVAL_TYPE, reg, addr +|| ZEND_ASSERT(Z_MODE(addr) == IS_MEM_ZVAL); +| mov reg, byte [Ra(Z_REG(addr))+Z_OFFSET(addr)+offsetof(zval,u1.v.type)] +|.endmacro + +|.macro GET_ZVAL_TYPE_INFO, reg, addr, tmp_reg +|| ZEND_ASSERT(Z_MODE(addr) == IS_MEM_ZVAL); +| SAFE_MEM_ACC_WITH_UOFFSET ldr, reg, Rx(Z_REG(addr)), Z_OFFSET(addr)+offsetof(zval,u1.type_info), tmp_reg +|.endmacro + +|.macro SET_ZVAL_TYPE_INFO, addr, type, tmp_reg1, tmp_reg2 +|| ZEND_ASSERT(Z_MODE(addr) == IS_MEM_ZVAL); +| LOAD_32BIT_VAL tmp_reg1, type +| SAFE_MEM_ACC_WITH_UOFFSET str, tmp_reg1, Rx(Z_REG(addr)), Z_OFFSET(addr)+offsetof(zval,u1.type_info), tmp_reg2 +|.endmacro + +|.macro SET_ZVAL_TYPE_INFO_FROM_REG, addr, type, tmp_reg +|| ZEND_ASSERT(Z_MODE(addr) == IS_MEM_ZVAL); +| SAFE_MEM_ACC_WITH_UOFFSET str, type, Rx(Z_REG(addr)), Z_OFFSET(addr)+offsetof(zval,u1.type_info), tmp_reg +|.endmacro + +|.macro GET_Z_PTR, reg, zv +| mov reg, aword [zv] +|.endmacro + +|.macro SET_Z_PTR, zv, val +| mov aword [zv], val +|.endmacro + +|.macro GET_Z_W2, reg, zv +| mov reg, dword [zv+4] +|.endmacro + +|.macro SET_Z_W2, zv, reg +| mov dword [zv+4], reg +|.endmacro + +|.macro GET_ZVAL_PTR, reg, addr, tmp_reg +|| ZEND_ASSERT(Z_MODE(addr) == IS_MEM_ZVAL); +| SAFE_MEM_ACC_WITH_UOFFSET ldr, reg, Rx(Z_REG(addr)), Z_OFFSET(addr), tmp_reg +|.endmacro + +|.macro SET_ZVAL_PTR, addr, val, tmp_reg +|| ZEND_ASSERT(Z_MODE(addr) == IS_MEM_ZVAL); +| SAFE_MEM_ACC_WITH_UOFFSET str, val, Rx(Z_REG(addr)), Z_OFFSET(addr), tmp_reg +|.endmacro + +|.macro GET_ZVAL_W2, reg, addr +|| ZEND_ASSERT(Z_MODE(addr) == IS_MEM_ZVAL); +| brk #0 // TODO +|.endmacro + +|.macro SET_ZVAL_W2, addr, val +|| ZEND_ASSERT(Z_MODE(addr) == IS_MEM_ZVAL); +| brk #0 // TODO +|.endmacro + +|.macro UNDEF_OPLINE_RESULT +| brk #0 // TODO +|.endmacro + +|.macro SSE_GET_LONG, reg, lval, tmp_reg +|| if (lval == 0) { +| brk #0 // TODO: test +| // vxorps xmm(reg-ZREG_XMM0), xmm(reg-ZREG_XMM0), xmm(reg-ZREG_XMM0) +|| } else { +| LOAD_64BIT_VAL Rx(tmp_reg), lval +| scvtf Rd(reg-ZREG_XMM0), Rx(tmp_reg) +|| } +|.endmacro + +// Define DOUBLE_GET_ZVAL_LVAL to replace SSE_GET_ZVAL_LVAL in x86 implementation. +// Convert the LONG value in 'addr' into DOUBLE type, and move it into 'reg' +|.macro DOUBLE_GET_ZVAL_LVAL, reg, addr, tmp_reg1, tmp_reg2 +|| if (Z_MODE(addr) == IS_CONST_ZVAL) { +| SSE_GET_LONG reg, Z_LVAL_P(Z_ZV(addr)), tmp_reg1 +|| } else if (Z_MODE(addr) == IS_MEM_ZVAL) { +| SAFE_MEM_ACC_WITH_UOFFSET ldr, Rx(tmp_reg1), Rx(Z_REG(addr)), Z_OFFSET(addr), Rx(tmp_reg2) +| scvtf Rd(reg-ZREG_XMM0), Rx(tmp_reg1) +|| } else if (Z_MODE(addr) == IS_REG) { +| brk #0 // TODO: test +| scvtf Rd(reg-ZREG_XMM0), Rx(Z_REG(addr)) +|| } else { +|| ZEND_UNREACHABLE(); +|| } +|.endmacro + +// Define DOUBLE_MATH_REG to replace AVX_MATH_REG in x86 implementation. +|.macro DOUBLE_MATH_REG, opcode, dst_reg, op1_reg, src_reg +|| switch (opcode) { +|| case ZEND_ADD: +| fadd Rd(dst_reg-ZREG_XMM0), Rd(op1_reg-ZREG_XMM0), Rd(src_reg-ZREG_XMM0) +|| break; +|| case ZEND_SUB: +| brk #0 // vsubsd xmm(dst_reg-ZREG_XMM0), xmm(op1_reg-ZREG_XMM0), xmm(src_reg-ZREG_XMM0) +|| break; +|| case ZEND_MUL: +| brk #0 // vmulsd xmm(dst_reg-ZREG_XMM0), xmm(op1_reg-ZREG_XMM0), xmm(src_reg-ZREG_XMM0) +|| break; +|| case ZEND_DIV: +| brk #0 // vdivsd xmm(dst_reg-ZREG_XMM0), xmm(op1_reg-ZREG_XMM0), xmm(src_reg-ZREG_XMM0) +|| break; +|| } +|.endmacro + +|.macro LONG_OP, long_ins, reg, addr, tmp_reg1, tmp_reg2 +|| if (Z_MODE(addr) == IS_CONST_ZVAL) { +|| if(Z_LVAL_P(Z_ZV(addr)) >= 0 && Z_LVAL_P(Z_ZV(addr)) <= MAX_IMM12) { +| long_ins Rx(reg), Rx(reg), #(Z_LVAL_P(Z_ZV(addr))) +|| } else { +| LOAD_64BIT_VAL tmp_reg1, Z_LVAL_P(Z_ZV(addr)) +| long_ins Rx(reg), Rx(reg), tmp_reg1 +|| } +|| } else if (Z_MODE(addr) == IS_MEM_ZVAL) { +| brk #0 // TODO: test +| SAFE_MEM_ACC_WITH_UOFFSET ldr, tmp_reg1, Rx(Z_REG(addr)), Z_OFFSET(addr), tmp_reg2 +| long_ins Rx(reg), Rx(reg), tmp_reg1 +|| } else if (Z_MODE(addr) == IS_REG) { +| brk #0 // TODO: test +| long_ins Rx(reg), Rx(reg), Rx(Z_REG(addr)) +|| } else { +|| ZEND_UNREACHABLE(); +|| } +|.endmacro + +|.macro LONG_OP_WITH_32BIT_CONST, long_ins, op1_addr, lval +| brk #0 // TODO +|.endmacro + +|.macro LONG_OP_WITH_CONST, long_ins, op1_addr, lval +| brk #0 // TODO +|.endmacro + +|.macro GET_ZVAL_LVAL, reg, addr, tmp_reg +|| if (Z_MODE(addr) == IS_CONST_ZVAL) { +|| if (Z_LVAL_P(Z_ZV(addr)) == 0) { +| brk #0 // TODO: test +| mov Rx(reg), xzr +|| } else { +| brk #0 // TODO: test +| LOAD_64BIT_VAL Rx(reg), Z_LVAL_P(Z_ZV(addr)) +|| } +|| } else if (Z_MODE(addr) == IS_MEM_ZVAL) { +| SAFE_MEM_ACC_WITH_UOFFSET ldr, Rx(reg), Rx(Z_REG(addr)), Z_OFFSET(addr), tmp_reg +|| } else if (Z_MODE(addr) == IS_REG) { +| brk #0 // TODO: test +|| if (reg != Z_REG(addr)) { +| brk #0 // TODO: test +| mov Rx(reg), Rx(Z_REG(addr)) +|| } +|| } else { +|| ZEND_UNREACHABLE(); +|| } +|.endmacro + +|.macro LONG_MATH, opcode, reg, addr, tmp_reg1, tmp_reg2 +|| switch (opcode) { +|| case ZEND_ADD: +| LONG_OP adds, reg, addr, tmp_reg1, tmp_reg2 +|| break; +|| case ZEND_SUB: +| brk #0 // LONG_OP sub, reg, addr +|| break; +|| case ZEND_MUL: +| brk #0 // LONG_OP imul, reg, addr +|| break; +|| case ZEND_BW_OR: +| brk #0 // LONG_OP or, reg, addr +|| break; +|| case ZEND_BW_AND: +| brk #0 // LONG_OP and, reg, addr +|| break; +|| case ZEND_BW_XOR: +| brk #0 // LONG_OP xor, reg, addr +|| break; +|| default: +|| ZEND_UNREACHABLE(); +|| } +|.endmacro + +|.macro LONG_MATH_REG, opcode, dst_reg, src_reg +| brk #0 // TODO +|.endmacro + +// In x86 implementation, argument 'lval' of SET_ZVAL_LVAL can be either a LONG constant +// or a register. Here, we separate it into two macros, SET_ZVAL_LVAL for the consant case, +// and SET_ZVAL_LVAL_FROM_REG for the register case. +|.macro SET_ZVAL_LVAL_FROM_REG, addr, reg, tmp_reg +|| if (Z_MODE(addr) == IS_REG) { +| mov Rx(Z_REG(addr)), reg +|| } else { +|| ZEND_ASSERT(Z_MODE(addr) == IS_MEM_ZVAL); +| SAFE_MEM_ACC_WITH_UOFFSET str, reg, Rx(Z_REG(addr)), Z_OFFSET(addr), tmp_reg +|| } +|.endmacro + +|.macro SET_ZVAL_LVAL, addr, lval, tmp_reg1, tmp_reg2 +|| if (lval == 0) { +| SET_ZVAL_LVAL_FROM_REG addr, xzr, tmp_reg2 +|| } else { +| LOAD_64BIT_VAL tmp_reg1, lval +| SET_ZVAL_LVAL_FROM_REG addr, tmp_reg1, tmp_reg2 +|| } +|.endmacro + +// Define SET_ZVAL_DVAL to replace SSE_SET_ZVAL_DVAL in x86 implementation. +|.macro SET_ZVAL_DVAL, addr, reg, tmp_reg +|| if (Z_MODE(addr) == IS_REG) { +|| if (reg != Z_REG(addr)) { +| brk #0 // TODO: test +| fmov Rd(Z_REG(addr)-ZREG_XMM0), Rd(reg-ZREG_XMM0) +|| } +|| } else { +|| ZEND_ASSERT(Z_MODE(addr) == IS_MEM_ZVAL); +| SAFE_MEM_ACC_WITH_UOFFSET str, Rd(reg-ZREG_XMM0), Rx(Z_REG(addr)), Z_OFFSET(addr), Rx(tmp_reg) +|| } +|.endmacro + +// Define GET_ZVAL_DVAL to replace SSE_GET_ZVAL_DVAL in x86 implementation. +|.macro GET_ZVAL_DVAL, reg, addr, tmp_reg +| brk #0 // TODO: test +|| if (Z_MODE(addr) != IS_REG || reg != Z_REG(addr)) { +|| if (Z_MODE(addr) == IS_CONST_ZVAL) { +| brk #0 // TODO: test +| LOAD_ADDR Rx(tmp_reg), Z_ZV(addr) +| ldr Rd(reg-ZREG_XMM0), [Rx(tmp_reg)] +|| } else if (Z_MODE(addr) == IS_MEM_ZVAL) { +| brk #0 // TODO: test +| SAFE_MEM_ACC_WITH_UOFFSET ldr, Rd(reg-ZREG_XMM0), Rx(Z_REG(addr)), Z_OFFSET(addr), Rx(tmp_reg) +|| } else if (Z_MODE(addr) == IS_REG) { +| brk #0 // TODO: test +| fmov Rd(reg-ZREG_XMM0), Rd(Z_REG(addr)-ZREG_XMM0) +|| } else { +|| ZEND_UNREACHABLE(); +|| } +|| } +|.endmacro + +|.macro ZVAL_COPY_CONST, dst_addr, dst_info, dst_def_info, zv, tmp_reg1, tmp_reg2 +|| if (Z_TYPE_P(zv) > IS_TRUE) { +|| if (Z_TYPE_P(zv) == IS_DOUBLE) { +|| zend_reg dst_reg = (Z_MODE(dst_addr) == IS_REG) ? Z_REG(dst_addr) : ZREG_XMM0; +| LOAD_ADDR Rx(tmp_reg1), zv +| ldr Rd(dst_reg-ZREG_XMM0), [Rx(tmp_reg1)] +| SET_ZVAL_DVAL dst_addr, dst_reg, tmp_reg2 +|| } else if (Z_TYPE_P(zv) == IS_LONG && dst_def_info == MAY_BE_DOUBLE) { +|| zend_reg dst_reg = (Z_MODE(dst_addr) == IS_REG) ? Z_REG(dst_addr) : ZREG_XMM0; +| brk #0 // TODO: test +|| } else { +| // In x64, if the range of this LONG value can be represented via INT type, only move the low 32 bits into dst_addr. +| // Note that imm32 is signed extended to 64 bits during mov. +| // In aarch64, we choose to handle both cases in the same way. Even though 4 mov's are used for 64-bit value and 2 mov's are +| // needed for 32-bit value, an extra ext insn is needed for 32-bit vlaue. +| SET_ZVAL_LVAL dst_addr, Z_LVAL_P(zv), Rx(tmp_reg1), Rx(tmp_reg2) +|| } +|| } +|| if (Z_MODE(dst_addr) == IS_MEM_ZVAL) { +|| if (dst_def_info == MAY_BE_DOUBLE) { +| brk #0 // TODO: test +|| if ((dst_info & (MAY_BE_ANY|MAY_BE_UNDEF|MAY_BE_GUARD)) != MAY_BE_DOUBLE) { +| SET_ZVAL_TYPE_INFO dst_addr, IS_DOUBLE, Rw(tmp_reg1), Rx(tmp_reg2) +|| } +|| } else if (((dst_info & (MAY_BE_ANY|MAY_BE_UNDEF|MAY_BE_GUARD)) != (1<1 +|| } +| // brk #0 // TODO: test +| GC_ADDREF value_ptr_reg, tmp_reg +|1: +|| } +|.endmacro + +|.macro TRY_ADDREF_2, val_info, type_flags_reg, value_ptr_reg +|| if (val_info & (MAY_BE_STRING|MAY_BE_ARRAY|MAY_BE_OBJECT|MAY_BE_RESOURCE)) { +|| if (val_info & (MAY_BE_ANY-(MAY_BE_OBJECT|MAY_BE_RESOURCE))) { +| IF_NOT_REFCOUNTED type_flags_reg, >1 +|| } +| add dword [value_ptr_reg], 2 +|1: +|| } +|.endmacro + +|.macro ZVAL_DEREF, reg, info +|| if (info & MAY_BE_REF) { +| IF_NOT_Z_TYPE, reg, IS_REFERENCE, >1 +| GET_Z_PTR reg, reg +| add reg, offsetof(zend_reference, val) +|1: +|| } +|.endmacro + +|.macro SET_EX_OPLINE, op, tmp_reg +|| if (op == last_valid_opline) { +|| zend_jit_use_last_valid_opline(); +| SAVE_IP +|| } else { +| ADDR_STORE EX->opline, op, tmp_reg +|| if (!GCC_GLOBAL_REGS) { +|| zend_jit_reset_last_valid_opline(); +|| } +|| } +|.endmacro + +// zval should be in FCARG1x +|.macro ZVAL_DTOR_FUNC, var_info, opline // arg1 must be in FCARG1x +|| do { +|| if (has_concrete_type((var_info) & (MAY_BE_STRING|MAY_BE_ARRAY|MAY_BE_OBJECT|MAY_BE_RESOURCE|MAY_BE_INDIRECT))) { +| brk #0 // TODO: test +|| } +|| if (opline) { +| SET_EX_OPLINE opline, TMP1 +|| } +| EXT_CALL rc_dtor_func, TMP1 +|| } while(0); +|.endmacro + +// TMP1 is used inside. +|.macro ZVAL_PTR_DTOR, addr, op_info, gc, cold, opline, tmp_reg +|| if ((op_info) & (MAY_BE_STRING|MAY_BE_ARRAY|MAY_BE_OBJECT|MAY_BE_RESOURCE|MAY_BE_REF)) { +|| if ((op_info) & ((MAY_BE_ANY|MAY_BE_UNDEF|MAY_BE_INDIRECT)-(MAY_BE_OBJECT|MAY_BE_RESOURCE))) { +| // if (Z_REFCOUNTED_P(cv)) { +|| if (cold) { +| IF_ZVAL_REFCOUNTED addr, >1, TMP1w, tmp_reg +|.cold_code +|1: +|| } else { +| brk #0 // TODO: test. +| IF_NOT_ZVAL_REFCOUNTED addr, >4, TMP1w, tmp_reg +|| } +|| } +| // if (!Z_DELREF_P(cv)) { +| GET_ZVAL_PTR FCARG1x, addr, tmp_reg +| GC_DELREF FCARG1x, TMP1w +|| if (RC_MAY_BE_1(op_info)) { +|| if (RC_MAY_BE_N(op_info)) { +|| if (gc && RC_MAY_BE_N(op_info) && ((op_info) & (MAY_BE_REF|MAY_BE_ARRAY|MAY_BE_OBJECT)) != 0) { +| bne >3 +|| } else { +| brk #0 // TODO: test +| bne >4 +|| } +|| } +| // zval_dtor_func(r); +| ZVAL_DTOR_FUNC op_info, opline +|| if (gc && RC_MAY_BE_N(op_info) && ((op_info) & (MAY_BE_REF|MAY_BE_ARRAY|MAY_BE_OBJECT)) != 0) { +| b >4 +|| } +|3: +|| } +|| if (gc && RC_MAY_BE_N(op_info) && ((op_info) & (MAY_BE_REF|MAY_BE_ARRAY|MAY_BE_OBJECT)) != 0) { +|| if ((op_info) & MAY_BE_REF) { +|| zend_jit_addr ref_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FCARG1x, offsetof(zend_reference, val)); +| IF_NOT_ZVAL_TYPE addr, IS_REFERENCE, >1, TMP1w, tmp_reg +| IF_NOT_ZVAL_COLLECTABLE ref_addr, >4, TMP1w, tmp_reg +| GET_ZVAL_PTR FCARG1x, ref_addr, tmp_reg +|1: +|| } +| IF_GC_MAY_NOT_LEAK FCARG1x, >4, TMP1w +| // gc_possible_root(Z_COUNTED_P(z)) +| EXT_CALL gc_possible_root, TMP1 +|| } +|| if (cold && ((op_info) & ((MAY_BE_ANY|MAY_BE_UNDEF)-(MAY_BE_OBJECT|MAY_BE_RESOURCE))) != 0) { +| b >4 +|.code +|| } +|4: +|| } +|.endmacro + +|.macro FREE_OP, op_type, op, op_info, cold, opline +|| if (op_type & (IS_VAR|IS_TMP_VAR)) { +| brk #0 // TODO: test +| // ZVAL_PTR_DTOR ZEND_ADDR_MEM_ZVAL(ZREG_FP, op.var), op_info, 0, cold, opline +|| } +|.endmacro + +|.macro SEPARATE_ARRAY, addr, op_info, cold +| brk #0 // TODO +|.endmacro + +|.macro EFREE_REG_REFERENCE +| brk #0 // TODO +|.endmacro + +|.macro EFREE_REFERENCE, ptr +| brk #0 // TODO +|.endmacro + +|.macro EMALLOC, size, op_array, opline +| brk #0 // TODO +|.endmacro + +|.macro OBJ_RELEASE, reg, exit_label +| brk #0 // TODO +|.endmacro + +|.macro UNDEFINED_OFFSET, opline +|| if (opline == last_valid_opline) { +|| zend_jit_use_last_valid_opline(); +| call ->undefined_offset_ex +|| } else { +| SET_EX_OPLINE opline, r0 +| call ->undefined_offset +|| } +|.endmacro + +|.macro UNDEFINED_INDEX, opline +|| if (opline == last_valid_opline) { +|| zend_jit_use_last_valid_opline(); +| call ->undefined_index_ex +|| } else { +| SET_EX_OPLINE opline, r0 +| call ->undefined_index +|| } +|.endmacro + +|.macro CANNOT_ADD_ELEMENT, opline +|| if (opline == last_valid_opline) { +|| zend_jit_use_last_valid_opline(); +| call ->cannot_add_element_ex +|| } else { +| SET_EX_OPLINE opline, r0 +| call ->cannot_add_element +|| } +|.endmacro + +static zend_bool reuse_ip = 0; +static zend_bool delayed_call_chain = 0; +static uint32_t delayed_call_level = 0; +static const zend_op *last_valid_opline = NULL; +static zend_bool use_last_vald_opline = 0; +static zend_bool track_last_valid_opline = 0; +static int jit_return_label = -1; +static uint32_t current_trace_num = 0; +static uint32_t allowed_opt_flags = 0; + +static void zend_jit_track_last_valid_opline(void) +{ + use_last_vald_opline = 0; + track_last_valid_opline = 1; +} + +static void zend_jit_use_last_valid_opline(void) +{ + if (track_last_valid_opline) { + use_last_vald_opline = 1; + track_last_valid_opline = 0; + } +} + +static zend_bool zend_jit_trace_uses_initial_ip(void) +{ + return use_last_vald_opline; +} + +static void zend_jit_set_last_valid_opline(const zend_op *target_opline) +{ + if (!reuse_ip) { + track_last_valid_opline = 0; + last_valid_opline = target_opline; + } +} + +static void zend_jit_reset_last_valid_opline(void) +{ + track_last_valid_opline = 0; + last_valid_opline = NULL; +} + +static void zend_jit_start_reuse_ip(void) +{ + zend_jit_reset_last_valid_opline(); + reuse_ip = 1; +} + +static int zend_jit_reuse_ip(dasm_State **Dst) +{ + if (!reuse_ip) { + zend_jit_start_reuse_ip(); + | // call = EX(call); + | ldr RX, EX->call + } + return 1; +} + +static void zend_jit_stop_reuse_ip(void) +{ + reuse_ip = 0; +} + +/* bit helpers */ + +/* from http://aggregate.org/MAGIC/ */ +static uint32_t ones32(uint32_t x) +{ + x -= ((x >> 1) & 0x55555555); + x = (((x >> 2) & 0x33333333) + (x & 0x33333333)); + x = (((x >> 4) + x) & 0x0f0f0f0f); + x += (x >> 8); + x += (x >> 16); + return x & 0x0000003f; +} + +static uint32_t floor_log2(uint32_t x) +{ + ZEND_ASSERT(x != 0); + x |= (x >> 1); + x |= (x >> 2); + x |= (x >> 4); + x |= (x >> 8); + x |= (x >> 16); + return ones32(x) - 1; +} + +static zend_bool is_power_of_two(uint32_t x) +{ + return !(x & (x - 1)) && x != 0; +} + +static zend_bool has_concrete_type(uint32_t value_type) +{ + return is_power_of_two (value_type & (MAY_BE_ANY|MAY_BE_UNDEF)); +} + +static uint32_t concrete_type(uint32_t value_type) +{ + return floor_log2(value_type & (MAY_BE_ANY|MAY_BE_UNDEF)); +} + +static inline zend_bool is_signed(double d) +{ + return (((unsigned char*)&d)[sizeof(double)-1] & 0x80) != 0; +} + +static int zend_jit_interrupt_handler_stub(dasm_State **Dst) +{ + |->interrupt_handler: + | brk #0 // TODO + + return 1; +} + +static int zend_jit_exception_handler_stub(dasm_State **Dst) +{ + |->exception_handler: + if (zend_jit_vm_kind == ZEND_VM_KIND_HYBRID) { + const void *handler = zend_get_opcode_handler_func(EG(exception_op)); + + | ADD_HYBRID_SPAD + | EXT_CALL handler, TMP1 + | JMP_IP + } else { + const void *handler = EG(exception_op)->handler; + + if (GCC_GLOBAL_REGS) { + | add sp, sp, SPAD // stack alignment + | EXT_JMP handler, TMP1 + } else if (JIT_G(trigger) == ZEND_JIT_ON_HOT_TRACE) { + | brk #0 // TODO: test + } else { + | mov FCARG1x, FP + | ldp FP, RX, T2 // retore FP and IP + | ldr LR, T4 // retore LR + | add sp, sp, NR_SPAD // stack alignment + | EXT_JMP handler, TMP1 + } + } + + return 1; +} + +static int zend_jit_exception_handler_undef_stub(dasm_State **Dst) +{ + |->exception_handler_undef: + | brk #0 // TODO + + return 1; +} + +static int zend_jit_leave_function_stub(dasm_State **Dst) +{ + |->leave_function_handler: + | brk #0 // TODO: test + + return 1; +} + +static int zend_jit_leave_throw_stub(dasm_State **Dst) +{ + |->leave_throw_handler: + | brk #0 // TODO: test + + return 1; +} + +static int zend_jit_icall_throw_stub(dasm_State **Dst) +{ + |->icall_throw_handler: + | brk #0 // TODO + + return 1; +} + +static int zend_jit_throw_cannot_pass_by_ref_stub(dasm_State **Dst) +{ + |->throw_cannot_pass_by_ref: + | brk #0 // TODO + + return 1; +} + +static int zend_jit_undefined_offset_ex_stub(dasm_State **Dst) +{ + |->undefined_offset_ex: + | brk #0 // TODO + + return 1; +} + +static int zend_jit_undefined_offset_stub(dasm_State **Dst) +{ + |->undefined_offset: + | brk #0 // TODO + + return 1; +} + +static int zend_jit_undefined_index_ex_stub(dasm_State **Dst) +{ + |->undefined_index_ex: + | SAVE_IP + | b ->undefined_index + + return 1; +} + +static int zend_jit_undefined_index_stub(dasm_State **Dst) +{ + |->undefined_index: + | brk #0 // TODO + + return 1; +} + +static int zend_jit_cannot_add_element_ex_stub(dasm_State **Dst) +{ + |->cannot_add_element_ex: + | brk #0 // TODO + + return 1; +} + +static int zend_jit_cannot_add_element_stub(dasm_State **Dst) +{ + |->cannot_add_element: + | brk #0 // TODO + + return 1; +} + +static int zend_jit_undefined_function_stub(dasm_State **Dst) +{ + |->undefined_function: + | brk #0 // TODO + return 1; +} + +static int zend_jit_negative_shift_stub(dasm_State **Dst) +{ + |->negative_shift: + | brk #0 // TODO + return 1; +} + +static int zend_jit_mod_by_zero_stub(dasm_State **Dst) +{ + |->mod_by_zero: + | brk #0 // TODO + return 1; +} + +static int zend_jit_invalid_this_stub(dasm_State **Dst) +{ + |->invalid_this: + | brk #0 // TODO + return 1; +} + +static int zend_jit_double_one_stub(dasm_State **Dst) +{ + |->one: + | brk #0 // TODO + return 1; +} + +static int zend_jit_hybrid_runtime_jit_stub(dasm_State **Dst) +{ + if (zend_jit_vm_kind != ZEND_VM_KIND_HYBRID) { + return 1; + } + + |->hybrid_runtime_jit: + | EXT_CALL zend_runtime_jit, TMP1 + | JMP_IP + return 1; +} + +static int zend_jit_hybrid_profile_jit_stub(dasm_State **Dst) +{ + if (zend_jit_vm_kind != ZEND_VM_KIND_HYBRID) { + return 1; + } + + |->hybrid_profile_jit: + | brk #0 // TODO + return 1; +} + +static int zend_jit_hybrid_hot_code_stub(dasm_State **Dst) +{ + if (zend_jit_vm_kind != ZEND_VM_KIND_HYBRID) { + return 1; + } + + |->hybrid_hot_code: + | brk #0 // TODO + return 1; +} + +/* + * This code is based Mike Pall's "Hashed profile counters" idea, implemented + * in LuaJIT. The full description may be found in "LuaJIT 2.0 intellectual + * property disclosure and research opportunities" email + * at http://lua-users.org/lists/lua-l/2009-11/msg00089.html + * + * In addition we use a variation of Knuth's multiplicative hash function + * described at https://code.i-harness.com/en/q/a21ce + * + * uint64_t hash(uint64_t x) { + * x = (x ^ (x >> 30)) * 0xbf58476d1ce4e5b9; + * x = (x ^ (x >> 27)) * 0x94d049bb133111eb; + * x = x ^ (x >> 31); + * return x; + * } + * + * uint_32_t hash(uint32_t x) { + * x = ((x >> 16) ^ x) * 0x45d9f3b; + * x = ((x >> 16) ^ x) * 0x45d9f3b; + * x = (x >> 16) ^ x; + * return x; + * } + * + */ +static int zend_jit_hybrid_hot_counter_stub(dasm_State **Dst, uint32_t cost) +{ + | brk #0 // TODO + return 1; +} + +static int zend_jit_hybrid_func_hot_counter_stub(dasm_State **Dst) +{ + if (zend_jit_vm_kind != ZEND_VM_KIND_HYBRID || !JIT_G(hot_func)) { + return 1; + } + + |->hybrid_func_hot_counter: + + return zend_jit_hybrid_hot_counter_stub(Dst, + ((ZEND_JIT_COUNTER_INIT + JIT_G(hot_func) - 1) / JIT_G(hot_func))); +} + +static int zend_jit_hybrid_loop_hot_counter_stub(dasm_State **Dst) +{ + if (zend_jit_vm_kind != ZEND_VM_KIND_HYBRID || !JIT_G(hot_loop)) { + return 1; + } + + |->hybrid_loop_hot_counter: + + return zend_jit_hybrid_hot_counter_stub(Dst, + ((ZEND_JIT_COUNTER_INIT + JIT_G(hot_loop) - 1) / JIT_G(hot_loop))); +} + +static int zend_jit_hybrid_hot_trace_stub(dasm_State **Dst) +{ + if (zend_jit_vm_kind != ZEND_VM_KIND_HYBRID) { + return 1; + } + + // On entry from counter stub: + // TMP4 -> zend_op_trace_info.counter + + |->hybrid_hot_trace: + | mov TMP1w, #ZEND_JIT_COUNTER_INIT + | strh TMP1w, [TMP4] + | mov FCARG1x, FP + | GET_IP FCARG2x + | EXT_CALL zend_jit_trace_hot_root, TMP1 + | cmp RETVALw, #0 // Result is < 0 on failure. + | blt >1 + | MEM_LOAD_ZTS ldr, FP, executor_globals, current_execute_data, TMP1 + | LOAD_IP + | JMP_IP + |1: + | EXT_JMP zend_jit_halt_op->handler, TMP1 + + return 1; +} + +static int zend_jit_hybrid_trace_counter_stub(dasm_State **Dst, uint32_t cost) +{ + // Need to preserve CARGx as these are passed through to the + // original opcode handler if the instruction is not hot enough + + | ldr TMP1, EX->func + | ldr TMP2, [TMP1, #offsetof(zend_op_array, reserved[zend_func_info_rid])] + | ldr TMP2, [TMP2, #offsetof(zend_jit_op_array_trace_extension, offset)] + | add TMP3, TMP2, IP + | ldr TMP4, [TMP3, #offsetof(zend_op_trace_info, counter)] + | ldrh TMP1w, [TMP4] + | LOAD_32BIT_VAL TMP2w, cost + | sub TMP1w, TMP1w, TMP2w + | strh TMP1w, [TMP4] + | cmp TMP1w, #0 + | ble ->hybrid_hot_trace + | ldr TMP1, [TMP3, #offsetof(zend_op_trace_info, orig_handler)] + | br TMP1 + + return 1; +} + +static int zend_jit_hybrid_func_trace_counter_stub(dasm_State **Dst) +{ + if (zend_jit_vm_kind != ZEND_VM_KIND_HYBRID || !JIT_G(hot_func)) { + return 1; + } + + |->hybrid_func_trace_counter: + + return zend_jit_hybrid_trace_counter_stub(Dst, + ((ZEND_JIT_COUNTER_INIT + JIT_G(hot_func) - 1) / JIT_G(hot_func))); +} + +static int zend_jit_hybrid_ret_trace_counter_stub(dasm_State **Dst) +{ + if (zend_jit_vm_kind != ZEND_VM_KIND_HYBRID || !JIT_G(hot_return)) { + return 1; + } + + |->hybrid_ret_trace_counter: + + return zend_jit_hybrid_trace_counter_stub(Dst, + ((ZEND_JIT_COUNTER_INIT + JIT_G(hot_return) - 1) / JIT_G(hot_return))); +} + +static int zend_jit_hybrid_loop_trace_counter_stub(dasm_State **Dst) +{ + if (zend_jit_vm_kind != ZEND_VM_KIND_HYBRID || !JIT_G(hot_loop)) { + return 1; + } + + |->hybrid_loop_trace_counter: + + return zend_jit_hybrid_trace_counter_stub(Dst, + ((ZEND_JIT_COUNTER_INIT + JIT_G(hot_loop) - 1) / JIT_G(hot_loop))); +} + +static int zend_jit_trace_halt_stub(dasm_State **Dst) +{ + |->trace_halt: + | brk #0 // TODO + return 1; +} + +static int zend_jit_trace_exit_stub(dasm_State **Dst) +{ + |->trace_exit: + | brk #0 // TODO + + return 1; +} + +static int zend_jit_trace_escape_stub(dasm_State **Dst) +{ + |->trace_escape: + | + | brk #0 // TODO + + return 1; +} + +/* Keep 32 exit points in a single code block */ +#define ZEND_JIT_EXIT_POINTS_SPACING 4 // push byte + short jmp = bytes +#define ZEND_JIT_EXIT_POINTS_PER_GROUP 32 // number of continuous exit points + +static int zend_jit_trace_exit_group_stub(dasm_State **Dst, uint32_t n) +{ + uint32_t i; + + | brk #0 // TODO + | b ->trace_exit + + return 1; +} + +#ifdef CONTEXT_THREADED_JIT +static int zend_jit_context_threaded_call_stub(dasm_State **Dst) +{ + |->context_threaded_call: + | brk #0 // TODO + return 1; +} +#endif + +static int zend_jit_assign_to_variable(dasm_State **Dst, + const zend_op *opline, + zend_jit_addr var_use_addr, + zend_jit_addr var_addr, + uint32_t var_info, + uint32_t var_def_info, + zend_uchar val_type, + zend_jit_addr val_addr, + uint32_t val_info, + zend_jit_addr res_addr, + zend_bool check_exception); + +static int zend_jit_assign_const_stub(dasm_State **Dst) +{ + zend_jit_addr var_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FCARG1x, 0); + zend_jit_addr val_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FCARG2a, 0); + uint32_t val_info = MAY_BE_ANY|MAY_BE_RC1|MAY_BE_RCN; + + |->assign_const: + | brk #0 // TODO + if (!zend_jit_assign_to_variable( + Dst, NULL, + var_addr, var_addr, -1, -1, + IS_CONST, val_addr, val_info, + 0, 0)) { + return 0; + } + | ret + return 1; +} + +static int zend_jit_assign_tmp_stub(dasm_State **Dst) +{ + zend_jit_addr var_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FCARG1x, 0); + zend_jit_addr val_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FCARG2a, 0); + uint32_t val_info = MAY_BE_ANY|MAY_BE_RC1|MAY_BE_RCN; + + |->assign_tmp: + | brk #0 // TODO + if (!zend_jit_assign_to_variable( + Dst, NULL, + var_addr, var_addr, -1, -1, + IS_TMP_VAR, val_addr, val_info, + 0, 0)) { + return 0; + } + | ret + return 1; +} + +static int zend_jit_assign_var_stub(dasm_State **Dst) +{ + zend_jit_addr var_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FCARG1x, 0); + zend_jit_addr val_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FCARG2a, 0); + uint32_t val_info = MAY_BE_ANY|MAY_BE_RC1|MAY_BE_RCN|MAY_BE_REF; + + |->assign_var: + | brk #0 // TODOa + | ret + return 1; +} + +static int zend_jit_assign_cv_noref_stub(dasm_State **Dst) +{ + zend_jit_addr var_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FCARG1x, 0); + zend_jit_addr val_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FCARG2a, 0); + uint32_t val_info = MAY_BE_ANY|MAY_BE_RC1|MAY_BE_RCN/*|MAY_BE_UNDEF*/; + + |->assign_cv_noref: + | brk #0 // TODO + | ret + return 1; +} + +static int zend_jit_assign_cv_stub(dasm_State **Dst) +{ + zend_jit_addr var_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FCARG1x, 0); + zend_jit_addr val_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FCARG2a, 0); + uint32_t val_info = MAY_BE_ANY|MAY_BE_RC1|MAY_BE_RCN|MAY_BE_REF/*|MAY_BE_UNDEF*/; + + |->assign_cv: + | brk #0 // TODO + | ret + return 1; +} + +static const zend_jit_stub zend_jit_stubs[] = { + JIT_STUB(interrupt_handler), + JIT_STUB(exception_handler), + JIT_STUB(exception_handler_undef), + JIT_STUB(leave_function), + JIT_STUB(leave_throw), + JIT_STUB(icall_throw), + JIT_STUB(throw_cannot_pass_by_ref), + JIT_STUB(undefined_offset), + JIT_STUB(undefined_index), + JIT_STUB(cannot_add_element), + JIT_STUB(undefined_offset_ex), + JIT_STUB(undefined_index_ex), + JIT_STUB(cannot_add_element_ex), + JIT_STUB(undefined_function), + JIT_STUB(negative_shift), + JIT_STUB(mod_by_zero), + JIT_STUB(invalid_this), + JIT_STUB(trace_halt), + JIT_STUB(trace_exit), + JIT_STUB(trace_escape), + JIT_STUB(hybrid_runtime_jit), + JIT_STUB(hybrid_profile_jit), + JIT_STUB(hybrid_hot_code), + JIT_STUB(hybrid_func_hot_counter), + JIT_STUB(hybrid_loop_hot_counter), + JIT_STUB(hybrid_hot_trace), + JIT_STUB(hybrid_func_trace_counter), + JIT_STUB(hybrid_ret_trace_counter), + JIT_STUB(hybrid_loop_trace_counter), + JIT_STUB(assign_const), + JIT_STUB(assign_tmp), + JIT_STUB(assign_var), + JIT_STUB(assign_cv_noref), + JIT_STUB(assign_cv), + JIT_STUB(double_one), +#ifdef CONTEXT_THREADED_JIT + JIT_STUB(context_threaded_call), +#endif +}; + +#if ZTS && defined(ZEND_WIN32) +extern uint32_t _tls_index; +extern char *_tls_start; +extern char *_tls_end; +#endif + +static int zend_jit_setup(void) +{ + allowed_opt_flags = 0; + +#if ZTS +# ifdef _WIN64 + tsrm_tls_index = _tls_index * sizeof(void*); + + /* To find offset of "_tsrm_ls_cache" in TLS segment we perform a linear scan of local TLS memory */ + /* Probably, it might be better solution */ + do { + void ***tls_mem = ((void**)__readgsqword(0x58))[_tls_index]; + void *val = _tsrm_ls_cache; + size_t offset = 0; + size_t size = (char*)&_tls_end - (char*)&_tls_start; + + while (offset < size) { + if (*tls_mem == val) { + tsrm_tls_offset = offset; + break; + } + tls_mem++; + offset += sizeof(void*); + } + if (offset >= size) { + // TODO: error message ??? + return FAILURE; + } + } while(0); +# elif ZEND_WIN32 + tsrm_tls_index = _tls_index * sizeof(void*); + + /* To find offset of "_tsrm_ls_cache" in TLS segment we perform a linear scan of local TLS memory */ + /* Probably, it might be better solution */ + do { + void ***tls_mem = ((void***)__readfsdword(0x2c))[_tls_index]; + void *val = _tsrm_ls_cache; + size_t offset = 0; + size_t size = (char*)&_tls_end - (char*)&_tls_start; + + while (offset < size) { + if (*tls_mem == val) { + tsrm_tls_offset = offset; + break; + } + tls_mem++; + offset += sizeof(void*); + } + if (offset >= size) { + // TODO: error message ??? + return FAILURE; + } + } while(0); +# elif defined(__APPLE__) && defined(__x86_64__) + tsrm_ls_cache_tcb_offset = tsrm_get_ls_cache_tcb_offset(); + if (tsrm_ls_cache_tcb_offset == 0) { + size_t *ti; + __asm__( + "leaq __tsrm_ls_cache(%%rip),%0" + : "=r" (ti)); + tsrm_tls_offset = ti[2]; + tsrm_tls_index = ti[1] * 8; + } +# elif defined(__GNUC__) && defined(__x86_64__) + tsrm_ls_cache_tcb_offset = tsrm_get_ls_cache_tcb_offset(); + if (tsrm_ls_cache_tcb_offset == 0) { +#if defined(__has_attribute) && __has_attribute(tls_model) + size_t ret; + + asm ("movq _tsrm_ls_cache@gottpoff(%%rip),%0" + : "=r" (ret)); + tsrm_ls_cache_tcb_offset = ret; +#else + size_t *ti; + + __asm__( + "leaq _tsrm_ls_cache@tlsgd(%%rip), %0\n" + : "=a" (ti)); + tsrm_tls_offset = ti[1]; + tsrm_tls_index = ti[0] * 16; +#endif + } +# elif defined(__GNUC__) && defined(__i386__) + tsrm_ls_cache_tcb_offset = tsrm_get_ls_cache_tcb_offset(); + if (tsrm_ls_cache_tcb_offset == 0) { +#if 1 + size_t ret; + + asm ("leal _tsrm_ls_cache@ntpoff,%0\n" + : "=a" (ret)); + tsrm_ls_cache_tcb_offset = ret; +#else + size_t *ti, _ebx, _ecx, _edx; + + __asm__( + "call 1f\n" + ".subsection 1\n" + "1:\tmovl (%%esp), %%ebx\n\t" + "ret\n" + ".previous\n\t" + "addl $_GLOBAL_OFFSET_TABLE_, %%ebx\n\t" + "leal _tsrm_ls_cache@tlsldm(%%ebx), %0\n\t" + "call ___tls_get_addr@plt\n\t" + "leal _tsrm_ls_cache@tlsldm(%%ebx), %0\n" + : "=a" (ti), "=&b" (_ebx), "=&c" (_ecx), "=&d" (_edx)); + tsrm_tls_offset = ti[1]; + tsrm_tls_index = ti[0] * 8; +#endif + } +# endif +#endif + + return SUCCESS; +} + +static ZEND_ATTRIBUTE_UNUSED int zend_jit_trap(dasm_State **Dst) +{ + | brk #0 + return 1; +} + +static int zend_jit_align_func(dasm_State **Dst) +{ + reuse_ip = 0; + delayed_call_chain = 0; + last_valid_opline = NULL; + use_last_vald_opline = 0; + track_last_valid_opline = 0; + jit_return_label = -1; + |.align 16 + return 1; +} + +static int zend_jit_prologue(dasm_State **Dst) +{ + if (zend_jit_vm_kind == ZEND_VM_KIND_HYBRID) { + | SUB_HYBRID_SPAD + } else if (GCC_GLOBAL_REGS) { + | sub sp, sp, SPAD // stack alignment + } else { + | sub sp, sp, NR_SPAD // stack alignment + | stp FP, RX, T2 // save FP and IP + | str LR, T4 // save LR + | mov FP, FCARG1x + } + return 1; +} + +static int zend_jit_label(dasm_State **Dst, unsigned int label) +{ + |=>label: + return 1; +} + +static int zend_jit_save_call_chain(dasm_State **Dst, uint32_t call_level) +{ + | // call->prev_execute_data = EX(call); + if (call_level == 1) { + | str xzr, EX:RX->prev_execute_data + } else { + | brk #0 // TODO: test + | ldr TMP1, EX->call + | str TMP1, EX:RX->prev_execute_data + } + | // EX(call) = call; + | str RX, EX->call + + delayed_call_chain = 0; + + return 1; +} + +static int zend_jit_set_ip(dasm_State **Dst, const zend_op *opline) +{ + if (last_valid_opline == opline) { + zend_jit_use_last_valid_opline(); + } else if (GCC_GLOBAL_REGS && last_valid_opline) { + zend_jit_use_last_valid_opline(); + | brk #0 // TODO + } else { + | LOAD_IP_ADDR opline + } + zend_jit_set_last_valid_opline(opline); + + return 1; +} + +static int zend_jit_set_valid_ip(dasm_State **Dst, const zend_op *opline) +{ + if (delayed_call_chain) { + | brk #0 // TODO: test + if (!zend_jit_save_call_chain(Dst, delayed_call_level)) { + return 0; + } + } + if (!zend_jit_set_ip(Dst, opline)) { + return 0; + } + reuse_ip = 0; + return 1; +} + +static int zend_jit_check_timeout(dasm_State **Dst, const zend_op *opline, const void *exit_addr) +{ + // TODO: not implemented. + return 1; +} + +static int zend_jit_trace_end_loop(dasm_State **Dst, int loop_label, const void *timeout_exit_addr) +{ + | brk #0 // TODO + return 1; +} + +static int zend_jit_check_exception(dasm_State **Dst) +{ + | MEM_LOAD_CMP_ZTS ldr, xzr, executor_globals, exception, TMP1, TMP2 + | bne ->exception_handler + return 1; +} + +static int zend_jit_check_exception_undef_result(dasm_State **Dst, const zend_op *opline) +{ + if (opline->result_type & (IS_TMP_VAR|IS_VAR)) { + | brk #0 // TODO + return 1; + } + return zend_jit_check_exception(Dst); +} + +static int zend_jit_trace_begin(dasm_State **Dst, uint32_t trace_num, zend_jit_trace_info *parent, uint32_t exit_num) +{ + zend_regset regset = ZEND_REGSET_SCRATCH; + + // In the x86 implementation, this clause would be conducted if ZTS is enabled or the addressing mode is 64-bit. + { + /* assignment to EG(jit_trace_num) shouldn't clober CPU register used by deoptimizer */ + if (parent) { + int i; + int parent_vars_count = parent->exit_info[exit_num].stack_size; + zend_jit_trace_stack *parent_stack = + parent->stack_map + + parent->exit_info[exit_num].stack_offset; + + for (i = 0; i < parent_vars_count; i++) { + if (STACK_REG(parent_stack, i) != ZREG_NONE) { + if (STACK_REG(parent_stack, i) < ZREG_NUM) { + ZEND_REGSET_EXCL(regset, STACK_REG(parent_stack, i)); + } else if (STACK_REG(parent_stack, i) == ZREG_ZVAL_COPY_R0) { + ZEND_REGSET_EXCL(regset, ZREG_R0); + } + } + } + } + } + + if (parent && parent->exit_info[exit_num].flags & ZEND_JIT_EXIT_METHOD_CALL) { + ZEND_REGSET_EXCL(regset, ZREG_R0); + } + + current_trace_num = trace_num; + + | // EG(jit_trace_num) = trace_num; + if (regset == ZEND_REGSET_EMPTY || ZEND_REGSET_IS_SINGLETON(regset)) { + | sub sp, sp, #16 + | stp TMP1, TMP2, [sp] // save TMP1 and TMP2 + | LOAD_32BIT_VAL TMP1w, trace_num + | MEM_STORE_ZTS str, TMP1w, executor_globals, jit_trace_num, TMP2 + | ldp TMP1, TMP2, [sp] // retore TMP1 and TMP2 + | add sp, sp, #16 + } else { + zend_reg tmp1 = ZEND_REGSET_FIRST(regset); + zend_reg tmp2 = ZEND_REGSET_SECOND(regset); + + | LOAD_32BIT_VAL Rw(tmp1), trace_num + | MEM_STORE_ZTS str, Rw(tmp1), executor_globals, jit_trace_num, Rx(tmp2) + (void)tmp1; + (void)tmp2; + } + + return 1; +} + +typedef ZEND_SET_ALIGNED(1, uint16_t unaligned_uint16_t); +typedef ZEND_SET_ALIGNED(1, int32_t unaligned_int32_t); + +static int zend_jit_patch(const void *code, size_t size, uint32_t jmp_table_size, const void *from_addr, const void *to_addr) +{ + int ret = 0; + uint8_t *p, *end; + + abort(); // TODO + return ret; +} + +static int zend_jit_link_side_trace(const void *code, size_t size, uint32_t jmp_table_size, uint32_t exit_num, const void *addr) +{ + return zend_jit_patch(code, size, jmp_table_size, zend_jit_trace_get_exit_addr(exit_num), addr); +} + +static int zend_jit_trace_link_to_root(dasm_State **Dst, zend_jit_trace_info *t, const void *timeout_exit_addr) +{ + const void *link_addr; + size_t prologue_size; + + | brk #0 // TODO + return 1; +} + +static int zend_jit_trace_return(dasm_State **Dst, zend_bool original_handler) +{ + if (zend_jit_vm_kind == ZEND_VM_KIND_HYBRID) { + | ADD_HYBRID_SPAD + if (!original_handler) { + | JMP_IP + } else { + | brk #0 // TODO: test + | ldr TMP1, EX->func + | ldr TMP1, [TMP1, #offsetof(zend_op_array, reserved[zend_func_info_rid])] + | ldr TMP1, [TMP1, #offsetof(zend_jit_op_array_trace_extension, offset)] + | add TMP1, IP, TMP1 + | ldr TMP1, [TMP1] + | br TMP1 + } + } else if (GCC_GLOBAL_REGS) { + | add sp, sp, SPAD // stack alignment + if (!original_handler) { + | JMP_IP + } else { + | brk #0 // TODO: test + | ldr TMP1, EX->func + | ldr TMP1, [TMP1, #offsetof(zend_op_array, reserved[zend_func_info_rid])] + | ldr TMP1, [TMP1, #offsetof(zend_jit_op_array_trace_extension, offset)] + | add TMP1, IP, TMP1 + | ldr TMP1, [TMP1] + | br TMP1 + } + } else { + if (original_handler) { + | brk #0 // TODO: test + | mov FCARG1x, FP + | ldr TMP1, EX->func + | ldr TMP1, [TMP1, #offsetof(zend_op_array, reserved[zend_func_info_rid])] + | ldr TMP1, [TMP1, #offsetof(zend_jit_op_array_trace_extension, offset)] + | add TMP1, IP, TMP1 + | ldr TMP1, [TMP1] + | blr TMP1 + } + | ldp FP, RX, T2 // retore FP and IP + | ldr LR, T4 // retore LR + | add sp, sp, NR_SPAD // stack alignment + | mov RETVALx, #2 // ZEND_VM_LEAVE + | ret + } + return 1; +} + +static int zend_jit_type_guard(dasm_State **Dst, const zend_op *opline, uint32_t var, uint8_t type) +{ + int32_t exit_point = zend_jit_trace_get_exit_point(opline, 0); + const void *exit_addr = zend_jit_trace_get_exit_addr(exit_point); + + if (!exit_addr) { + return 0; + } + | brk #0 // TODO + + return 1; +} + +static int zend_jit_packed_guard(dasm_State **Dst, const zend_op *opline, uint32_t var, uint32_t op_info) +{ + int32_t exit_point = zend_jit_trace_get_exit_point(opline, ZEND_JIT_EXIT_PACKED_GUARD); + const void *exit_addr = zend_jit_trace_get_exit_addr(exit_point); + + if (!exit_addr) { + return 0; + } + + | brk #0 // TODO + + return 1; +} + +static int zend_jit_trace_handler(dasm_State **Dst, const zend_op_array *op_array, const zend_op *opline, int may_throw, zend_jit_trace_rec *trace) +{ + zend_jit_op_array_trace_extension *jit_extension = + (zend_jit_op_array_trace_extension*)ZEND_FUNC_INFO(op_array); + size_t offset = jit_extension->offset; + const void *handler = + (zend_vm_opcode_handler_t)ZEND_OP_TRACE_INFO(opline, offset)->call_handler; + + if (!zend_jit_set_valid_ip(Dst, opline)) { + return 0; + } + + | brk #0 // TODO + + return 1; +} + +static int zend_jit_handler(dasm_State **Dst, const zend_op *opline, int may_throw) +{ + const void *handler; + + if (zend_jit_vm_kind == ZEND_VM_KIND_HYBRID) { + handler = zend_get_opcode_handler_func(opline); + } else { + handler = opline->handler; + } + + if (!zend_jit_set_valid_ip(Dst, opline)) { + return 0; + } + if (!GCC_GLOBAL_REGS) { + | brk #0 // TODO + } + | EXT_CALL handler, TMP1 + if (may_throw) { + zend_jit_check_exception(Dst); + } + + /* Skip the following OP_DATA */ + switch (opline->opcode) { + case ZEND_ASSIGN_DIM: + case ZEND_ASSIGN_OBJ: + case ZEND_ASSIGN_STATIC_PROP: + case ZEND_ASSIGN_DIM_OP: + case ZEND_ASSIGN_OBJ_OP: + case ZEND_ASSIGN_STATIC_PROP_OP: + case ZEND_ASSIGN_STATIC_PROP_REF: + case ZEND_ASSIGN_OBJ_REF: + zend_jit_set_last_valid_opline(opline + 2); + break; + default: + zend_jit_set_last_valid_opline(opline + 1); + break; + } + + return 1; +} + +static int zend_jit_tail_handler(dasm_State **Dst, const zend_op *opline) +{ + if (!zend_jit_set_valid_ip(Dst, opline)) { + return 0; + } + if (zend_jit_vm_kind == ZEND_VM_KIND_HYBRID) { + if (opline->opcode == ZEND_DO_UCALL || + opline->opcode == ZEND_DO_FCALL_BY_NAME || + opline->opcode == ZEND_DO_FCALL || + opline->opcode == ZEND_RETURN) { + + /* Use inlined HYBRID VM handler */ + const void *handler = opline->handler; + + | ADD_HYBRID_SPAD + | EXT_JMP handler, TMP1 + } else { + const void *handler = zend_get_opcode_handler_func(opline); + + | brk #0 // TODO: test + } + } else { + const void *handler = opline->handler; + + if (GCC_GLOBAL_REGS) { + | add sp, sp, SPAD // stack alignment + } else { + | mov FCARG1x, FP + | ldp FP, RX, T2 // retore FP and IP + | ldr LR, T4 // retore LR + | add sp, sp, NR_SPAD // stack alignment + } + | EXT_JMP handler, TMP1 + } + zend_jit_reset_last_valid_opline(); + return 1; +} + +static int zend_jit_trace_opline_guard(dasm_State **Dst, const zend_op *opline) +{ + uint32_t exit_point = zend_jit_trace_get_exit_point(NULL, 0); + const void *exit_addr = zend_jit_trace_get_exit_addr(exit_point); + + if (!exit_addr) { + return 0; + } + | brk #0 // TODO + + zend_jit_set_last_valid_opline(opline); + + return 1; +} + +static int zend_jit_jmp(dasm_State **Dst, unsigned int target_label) +{ + | b =>target_label + return 1; +} + +static int zend_jit_cond_jmp(dasm_State **Dst, const zend_op *next_opline, unsigned int target_label) +{ + | brk #0 // TODO + + zend_jit_set_last_valid_opline(next_opline); + + return 1; +} + +#ifdef CONTEXT_THREADED_JIT +static int zend_jit_context_threaded_call(dasm_State **Dst, const zend_op *opline, unsigned int next_block) +{ + | brk #0 // TODO + return 1; +} +#endif + +static int zend_jit_call(dasm_State **Dst, const zend_op *opline, unsigned int next_block) +{ +#ifdef CONTEXT_THREADED_JIT + return zend_jit_context_threaded_call(Dst, opline, next_block); +#else + return zend_jit_tail_handler(Dst, opline); +#endif +} + +static int zend_jit_spill_store(dasm_State **Dst, zend_jit_addr src, zend_jit_addr dst, uint32_t info, zend_bool set_type) +{ + ZEND_ASSERT(Z_MODE(src) == IS_REG); + ZEND_ASSERT(Z_MODE(dst) == IS_MEM_ZVAL); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_load_reg(dasm_State **Dst, zend_jit_addr src, zend_jit_addr dst, uint32_t info) +{ + ZEND_ASSERT(Z_MODE(src) == IS_MEM_ZVAL); + ZEND_ASSERT(Z_MODE(dst) == IS_REG); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_store_var(dasm_State **Dst, uint32_t info, int var, zend_reg reg, zend_bool set_type) +{ + zend_jit_addr src = ZEND_ADDR_REG(reg); + zend_jit_addr dst = ZEND_ADDR_MEM_ZVAL(ZREG_FP, EX_NUM_TO_VAR(var)); + + return zend_jit_spill_store(Dst, src, dst, info, set_type); +} + +static int zend_jit_store_var_if_necessary(dasm_State **Dst, int var, zend_jit_addr src, uint32_t info) +{ + if (Z_MODE(src) == IS_REG && Z_STORE(src)) { + | brk #0 // TODO: test + zend_jit_addr dst = ZEND_ADDR_MEM_ZVAL(ZREG_FP, var); + return zend_jit_spill_store(Dst, src, dst, info, 1); + } + return 1; +} + +static int zend_jit_store_var_if_necessary_ex(dasm_State **Dst, int var, zend_jit_addr src, uint32_t info, zend_jit_addr old, uint32_t old_info) +{ + if (Z_MODE(src) == IS_REG && Z_STORE(src)) { + zend_jit_addr dst = ZEND_ADDR_MEM_ZVAL(ZREG_FP, var); + zend_bool set_type = 1; + + if ((info & (MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF)) == + (old_info & (MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF))) { + if (Z_MODE(old) != IS_REG || Z_LOAD(old) || Z_STORE(old)) { + set_type = 0; + } + } + return zend_jit_spill_store(Dst, src, dst, info, set_type); + } + return 1; +} + +static int zend_jit_load_var(dasm_State **Dst, uint32_t info, int var, zend_reg reg) +{ + zend_jit_addr src = ZEND_ADDR_MEM_ZVAL(ZREG_FP, EX_NUM_TO_VAR(var)); + zend_jit_addr dst = ZEND_ADDR_REG(reg); + + return zend_jit_load_reg(Dst, src, dst, info); +} + +static int zend_jit_update_regs(dasm_State **Dst, uint32_t var, zend_jit_addr src, zend_jit_addr dst, uint32_t info) +{ + | brk #0 // TODO + return 1; +} + +static int zend_jit_escape_if_undef_r0(dasm_State **Dst, int var, uint32_t flags, const zend_op *opline) +{ + zend_jit_addr val_addr = ZEND_ADDR_MEM_ZVAL(ZREG_R0, 0); + + | brk #0 // TODO + + return 1; +} + +static int zend_jit_store_const(dasm_State **Dst, int var, zend_reg reg) +{ + zend_jit_addr dst = ZEND_ADDR_MEM_ZVAL(ZREG_FP, EX_NUM_TO_VAR(var)); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_free_trampoline(dasm_State **Dst) +{ + | brk #0 // TODO + return 1; +} + +static int zend_jit_inc_dec(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, zend_jit_addr op1_addr, uint32_t op1_def_info, zend_jit_addr op1_def_addr, uint32_t res_use_info, uint32_t res_info, zend_jit_addr res_addr, int may_overflow, int may_throw) +{ + | brk #0 // TODO + return 1; +} + +static int zend_jit_opline_uses_reg(const zend_op *opline, int8_t reg) +{ + if ((opline+1)->opcode == ZEND_OP_DATA + && ((opline+1)->op1_type & (IS_VAR|IS_TMP_VAR|IS_CV)) + && JIT_G(current_frame)->stack[EX_VAR_TO_NUM((opline+1)->op1.var)].reg == reg) { + return 1; + } + return + ((opline->result_type & (IS_VAR|IS_TMP_VAR|IS_CV)) && + JIT_G(current_frame)->stack[EX_VAR_TO_NUM(opline->result.var)].reg == reg) || + ((opline->op1_type & (IS_VAR|IS_TMP_VAR|IS_CV)) && + JIT_G(current_frame)->stack[EX_VAR_TO_NUM(opline->op1.var)].reg == reg) || + ((opline->op2_type & (IS_VAR|IS_TMP_VAR|IS_CV)) && + JIT_G(current_frame)->stack[EX_VAR_TO_NUM(opline->op2.var)].reg == reg); +} + +static int zend_jit_math_long_long(dasm_State **Dst, + const zend_op *opline, + zend_uchar opcode, + zend_jit_addr op1_addr, + zend_jit_addr op2_addr, + zend_jit_addr res_addr, + uint32_t res_info, + uint32_t res_use_info, + int may_overflow) +{ + zend_bool same_ops = zend_jit_same_addr(op1_addr, op2_addr); + zend_reg result_reg; + zend_reg tmp_reg = ZREG_R0; + + if (Z_MODE(res_addr) == IS_REG && (res_info & MAY_BE_LONG)) { + | brk #0 // TODO: test + if (may_overflow && (res_info & MAY_BE_GUARD) + && JIT_G(current_frame) + && zend_jit_opline_uses_reg(opline, Z_REG(res_addr))) { + result_reg = ZREG_R0; + } else { + result_reg = Z_REG(res_addr); + } + } else if (Z_MODE(op1_addr) == IS_REG && Z_LAST_USE(op1_addr)) { + | brk #0 // TODO: test + result_reg = Z_REG(op1_addr); + } else if (Z_REG(res_addr) != ZREG_R0) { + result_reg = ZREG_R0; + } else { + | brk #0 // TODO: test + /* ASSIGN_DIM_OP */ + result_reg = ZREG_FCARG1x; + tmp_reg = ZREG_FCARG1x; + } + + if (opcode == ZEND_MUL && + ((Z_MODE(op2_addr) == IS_CONST_ZVAL && + IS_SIGNED_32BIT(Z_LVAL_P(Z_ZV(op2_addr))) && + is_power_of_two(Z_LVAL_P(Z_ZV(op2_addr)))) || + (Z_MODE(op1_addr) == IS_CONST_ZVAL && + IS_SIGNED_32BIT(Z_LVAL_P(Z_ZV(op1_addr))) && + is_power_of_two(Z_LVAL_P(Z_ZV(op1_addr)))))) { + | brk #0 // TODO: test + } else if (opcode == ZEND_DIV && + (Z_MODE(op2_addr) == IS_CONST_ZVAL && + is_power_of_two(Z_LVAL_P(Z_ZV(op2_addr))))) { + | brk #0 // TODO: test + } else if (opcode == ZEND_ADD && + !may_overflow && + Z_MODE(op1_addr) == IS_REG && + Z_MODE(op2_addr) == IS_CONST_ZVAL) { + | brk #0 // TODO: test + } else if (opcode == ZEND_ADD && + !may_overflow && + Z_MODE(op2_addr) == IS_REG && + Z_MODE(op1_addr) == IS_CONST_ZVAL) { + | brk #0 // TODO: test + } else if (opcode == ZEND_SUB && + !may_overflow && + Z_MODE(op1_addr) == IS_REG && + Z_MODE(op2_addr) == IS_CONST_ZVAL) { + | brk #0 // TODO: test + } else { + | GET_ZVAL_LVAL result_reg, op1_addr, TMP1 + if ((opcode == ZEND_ADD || opcode == ZEND_SUB) + && Z_MODE(op2_addr) == IS_CONST_ZVAL + && Z_LVAL_P(Z_ZV(op2_addr)) == 0) { + /* +/- 0 */ + may_overflow = 0; + } else if (same_ops && opcode != ZEND_DIV) { + | brk #0 // TODO: test + } else { + | LONG_MATH opcode, result_reg, op2_addr, TMP1, TMP2 + } + } + if (may_overflow) { + if (res_info & MAY_BE_GUARD) { + | brk #0 // TODO: test + } else { + if (res_info & MAY_BE_LONG) { + | bvs >1 + } else { + | brk #0 // TODO: test + } + } + } + + if (Z_MODE(res_addr) == IS_MEM_ZVAL && (res_info & MAY_BE_LONG)) { + | SET_ZVAL_LVAL_FROM_REG res_addr, Rx(result_reg), TMP1 + if (Z_MODE(op1_addr) != IS_MEM_ZVAL || Z_REG(op1_addr) != Z_REG(res_addr) || Z_OFFSET(op1_addr) != Z_OFFSET(res_addr)) { + if ((res_use_info & (MAY_BE_ANY|MAY_BE_UNDEF|MAY_BE_REF|MAY_BE_GUARD)) != MAY_BE_LONG) { + | SET_ZVAL_TYPE_INFO res_addr, IS_LONG, TMP1w, TMP2 + } + } + } + + if (may_overflow && (!(res_info & MAY_BE_GUARD) || (res_info & MAY_BE_ANY) == MAY_BE_DOUBLE)) { + zend_reg tmp_reg1 = ZREG_XMM0; + zend_reg tmp_reg2 = ZREG_XMM1; + + if (res_info & MAY_BE_LONG) { + |.cold_code + |1: + } + + do { + if ((Z_MODE(op1_addr) == IS_CONST_ZVAL && Z_LVAL_P(Z_ZV(op1_addr)) == 1) || + (Z_MODE(op2_addr) == IS_CONST_ZVAL && Z_LVAL_P(Z_ZV(op2_addr)) == 1)) { + if (opcode == ZEND_ADD) { + if (Z_MODE(res_addr) == IS_REG) { + | brk #0 // TODO: test + } else { + | SET_ZVAL_LVAL res_addr, 0x43e0000000000000, TMP1, TMP2 + } + break; + } else if (opcode == ZEND_SUB) { + | brk #0 // TODO: test + break; + } + } + + | DOUBLE_GET_ZVAL_LVAL tmp_reg1, op1_addr, tmp_reg, ZREG_R8 + | DOUBLE_GET_ZVAL_LVAL tmp_reg2, op2_addr, tmp_reg, ZREG_R8 + | DOUBLE_MATH_REG opcode, tmp_reg1, tmp_reg1, tmp_reg2 + | SET_ZVAL_DVAL res_addr, tmp_reg1, ZREG_R8 + } while (0); + + if (Z_MODE(res_addr) == IS_MEM_ZVAL + && (res_use_info & (MAY_BE_ANY|MAY_BE_UNDEF|MAY_BE_REF|MAY_BE_GUARD)) != MAY_BE_DOUBLE) { + | SET_ZVAL_TYPE_INFO res_addr, IS_DOUBLE, TMP1w, TMP2 + } + if (res_info & MAY_BE_LONG) { + | b >2 + |.code + } + |2: + } + + return 1; +} + +static int zend_jit_math_long_double(dasm_State **Dst, + zend_uchar opcode, + zend_jit_addr op1_addr, + zend_jit_addr op2_addr, + zend_jit_addr res_addr, + uint32_t res_use_info) +{ + zend_reg result_reg = + (Z_MODE(res_addr) == IS_REG) ? Z_REG(res_addr) : ZREG_XMM0; + zend_reg tmp_reg; + + | brk #0 // TODO + + return 1; +} + +static int zend_jit_math_double_long(dasm_State **Dst, + zend_uchar opcode, + zend_jit_addr op1_addr, + zend_jit_addr op2_addr, + zend_jit_addr res_addr, + uint32_t res_use_info) +{ + zend_reg result_reg, tmp_reg; + + | brk #0 // TODO + return 1; +} + +static int zend_jit_math_double_double(dasm_State **Dst, + zend_uchar opcode, + zend_jit_addr op1_addr, + zend_jit_addr op2_addr, + zend_jit_addr res_addr, + uint32_t res_use_info) +{ + zend_bool same_ops = zend_jit_same_addr(op1_addr, op2_addr); + zend_reg result_reg; + + | brk #0 // TODO + return 1; +} + +static int zend_jit_math_helper(dasm_State **Dst, + const zend_op *opline, + zend_uchar opcode, + zend_uchar op1_type, + znode_op op1, + zend_jit_addr op1_addr, + uint32_t op1_info, + zend_uchar op2_type, + znode_op op2, + zend_jit_addr op2_addr, + uint32_t op2_info, + uint32_t res_var, + zend_jit_addr res_addr, + uint32_t res_info, + uint32_t res_use_info, + int may_overflow, + int may_throw) +/* Labels: 1,2,3,4,5,6 */ +{ + zend_bool same_ops = zend_jit_same_addr(op1_addr, op2_addr); + + if ((op1_info & MAY_BE_LONG) && (op2_info & MAY_BE_LONG)) { + if (op1_info & (MAY_BE_ANY-MAY_BE_LONG)) { + if (op1_info & MAY_BE_DOUBLE) { + | IF_NOT_ZVAL_TYPE op1_addr, IS_LONG, >3, TMP1w, TMP2 + } else { + | brk #0 // TODO: test + | IF_NOT_ZVAL_TYPE op1_addr, IS_LONG, >6, TMP1w, TMP2 + } + } + if (!same_ops && (op2_info & (MAY_BE_ANY-MAY_BE_LONG))) { + | brk #0 // TODO: test + if (op2_info & MAY_BE_DOUBLE) { + | IF_NOT_ZVAL_TYPE op2_addr, IS_LONG, >1, TMP1w, TMP2 + |.cold_code + |1: + if (op2_info & (MAY_BE_ANY-(MAY_BE_LONG|MAY_BE_DOUBLE))) { + | IF_NOT_ZVAL_TYPE op2_addr, IS_DOUBLE, >6, TMP1w, TMP2 + } + if (!zend_jit_math_long_double(Dst, opcode, op1_addr, op2_addr, res_addr, res_use_info)) { + return 0; + } + | b >5 + |.code + } else { + | IF_NOT_ZVAL_TYPE op2_addr, IS_LONG, >6, TMP1w, TMP2 + } + } + if (!zend_jit_math_long_long(Dst, opline, opcode, op1_addr, op2_addr, res_addr, res_info, res_use_info, may_overflow)) { + return 0; + } + if (op1_info & MAY_BE_DOUBLE) { + |.cold_code + |3: + if (op1_info & (MAY_BE_ANY-(MAY_BE_LONG|MAY_BE_DOUBLE))) { + | IF_NOT_ZVAL_TYPE op1_addr, IS_DOUBLE, >6, TMP1w, TMP2 + } + | brk #0 // TODO: test + if (op2_info & MAY_BE_DOUBLE) { + if (!same_ops && (op2_info & (MAY_BE_ANY-MAY_BE_DOUBLE))) { + if (!same_ops) { + | IF_NOT_ZVAL_TYPE, op2_addr, IS_DOUBLE, >1, TMP1w, TMP2 + } else { + | IF_NOT_ZVAL_TYPE, op2_addr, IS_DOUBLE, >6, TMP1w, TMP2 + } + } + if (!zend_jit_math_double_double(Dst, opcode, op1_addr, op2_addr, res_addr, res_use_info)) { + return 0; + } + | b >5 + } + if (!same_ops) { + |1: + if (op2_info & (MAY_BE_ANY-(MAY_BE_LONG|MAY_BE_DOUBLE))) { + | IF_NOT_ZVAL_TYPE op2_addr, IS_LONG, >6, TMP1w, TMP2 + } + if (!zend_jit_math_double_long(Dst, opcode, op1_addr, op2_addr, res_addr, res_use_info)) { + return 0; + } + | b >5 + } + |.code + } + } else if ((op1_info & MAY_BE_DOUBLE) && + !(op1_info & MAY_BE_LONG) && + (op2_info & (MAY_BE_LONG|MAY_BE_DOUBLE))) { + | brk #0 // TODO: test + if (op1_info & (MAY_BE_ANY-MAY_BE_DOUBLE)) { + | IF_NOT_ZVAL_TYPE op1_addr, IS_DOUBLE, >6, TMP1w, TMP2 + } + if (op2_info & MAY_BE_DOUBLE) { + if (!same_ops && (op2_info & (MAY_BE_ANY-MAY_BE_DOUBLE))) { + if (!same_ops && (op2_info & MAY_BE_LONG)) { + | IF_NOT_ZVAL_TYPE op2_addr, IS_DOUBLE, >1, TMP1w, TMP2 + } else { + | IF_NOT_ZVAL_TYPE op2_addr, IS_DOUBLE, >6, TMP1w, TMP2 + } + } + if (!zend_jit_math_double_double(Dst, opcode, op1_addr, op2_addr, res_addr, res_use_info)) { + return 0; + } + } + if (!same_ops && (op2_info & MAY_BE_LONG)) { + if (op2_info & MAY_BE_DOUBLE) { + |.cold_code + } + |1: + if (op2_info & (MAY_BE_ANY-(MAY_BE_DOUBLE|MAY_BE_LONG))) { + | IF_NOT_ZVAL_TYPE op2_addr, IS_LONG, >6, TMP1w, TMP2 + } + if (!zend_jit_math_double_long(Dst, opcode, op1_addr, op2_addr, res_addr, res_use_info)) { + return 0; + } + if (op2_info & MAY_BE_DOUBLE) { + | b >5 + |.code + } + } + } else if ((op2_info & MAY_BE_DOUBLE) && + !(op2_info & MAY_BE_LONG) && + (op1_info & (MAY_BE_LONG|MAY_BE_DOUBLE))) { + | brk #0 // TODO: test + if (op2_info & (MAY_BE_ANY-MAY_BE_DOUBLE)) { + | IF_NOT_ZVAL_TYPE op2_addr, IS_DOUBLE, >6, TMP1w, TMP2 + } + if (op1_info & MAY_BE_DOUBLE) { + if (!same_ops && (op1_info & (MAY_BE_ANY-MAY_BE_DOUBLE))) { + if (!same_ops && (op1_info & MAY_BE_LONG)) { + | IF_NOT_ZVAL_TYPE op1_addr, IS_DOUBLE, >1, TMP1w, TMP2 + } else { + | IF_NOT_ZVAL_TYPE op1_addr, IS_DOUBLE, >6, TMP1w, TMP2 + } + } + if (!zend_jit_math_double_double(Dst, opcode, op1_addr, op2_addr, res_addr, res_use_info)) { + return 0; + } + } + if (!same_ops && (op1_info & MAY_BE_LONG)) { + if (op1_info & MAY_BE_DOUBLE) { + |.cold_code + } + |1: + if (op1_info & (MAY_BE_ANY-(MAY_BE_DOUBLE|MAY_BE_LONG))) { + | IF_NOT_ZVAL_TYPE op1_addr, IS_LONG, >6, TMP1w, TMP2 + } + if (!zend_jit_math_long_double(Dst, opcode, op1_addr, op2_addr, res_addr, res_use_info)) { + return 0; + } + if (op1_info & MAY_BE_DOUBLE) { + | b >5 + |.code + } + } + } + + |5: + + if ((op1_info & ((MAY_BE_ANY|MAY_BE_UNDEF)-(MAY_BE_LONG|MAY_BE_DOUBLE))) || + (op2_info & ((MAY_BE_ANY|MAY_BE_UNDEF)-(MAY_BE_LONG|MAY_BE_DOUBLE)))) { + if ((op1_info & (MAY_BE_LONG|MAY_BE_DOUBLE)) && + (op2_info & (MAY_BE_LONG|MAY_BE_DOUBLE))) { + |.cold_code + } + |6: + if (Z_MODE(res_addr) == IS_REG) { + zend_jit_addr real_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, res_var); + | brk #0 // TODO: test + | LOAD_ZVAL_ADDR FCARG1x, real_addr + } else if (Z_REG(res_addr) != ZREG_FCARG1x || Z_OFFSET(res_addr) != 0) { + | LOAD_ZVAL_ADDR FCARG1x, res_addr + } + if (Z_MODE(op1_addr) == IS_REG) { + | brk #0 // TODO: test + } + | LOAD_ZVAL_ADDR FCARG2x, op1_addr + if (Z_MODE(op2_addr) == IS_REG) { + | brk #0 // TODO: test + } + | LOAD_ZVAL_ADDR CARG3, op2_addr + | SET_EX_OPLINE opline, TMP1 + if (opcode == ZEND_ADD) { + | EXT_CALL add_function, TMP1 + } else if (opcode == ZEND_SUB) { + | brk #0 // TODO: test + | EXT_CALL sub_function, TMP1 + } else if (opcode == ZEND_MUL) { + | brk #0 // TODO: test + | EXT_CALL mul_function, TMP1 + } else if (opcode == ZEND_DIV) { + | brk #0 // TODO: test + | EXT_CALL div_function, TMP1 + } else { + ZEND_UNREACHABLE(); + } + | FREE_OP op1_type, op1, op1_info, 0, opline + | FREE_OP op2_type, op2, op2_info, 0, opline + if (may_throw) { + zend_jit_check_exception(Dst); + } + if (Z_MODE(res_addr) == IS_REG) { + | brk #0 // TODO: test + } + if ((op1_info & (MAY_BE_LONG|MAY_BE_DOUBLE)) && + (op2_info & (MAY_BE_LONG|MAY_BE_DOUBLE))) { + | b <5 + |.code + } + } + + return 1; +} + +static int zend_jit_math(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, zend_jit_addr op1_addr, uint32_t op2_info, zend_jit_addr op2_addr, uint32_t res_use_info, uint32_t res_info, zend_jit_addr res_addr, int may_overflow, int may_throw) +{ + ZEND_ASSERT(!(op1_info & MAY_BE_UNDEF) && !(op2_info & MAY_BE_UNDEF)); + ZEND_ASSERT((op1_info & (MAY_BE_LONG|MAY_BE_DOUBLE)) && + (op2_info & (MAY_BE_LONG|MAY_BE_DOUBLE))); + + if (!zend_jit_math_helper(Dst, opline, opline->opcode, opline->op1_type, opline->op1, op1_addr, op1_info, opline->op2_type, opline->op2, op2_addr, op2_info, opline->result.var, res_addr, res_info, res_use_info, may_overflow, may_throw)) { + return 0; + } + if (!zend_jit_store_var_if_necessary(Dst, opline->result.var, res_addr, res_info)) { + return 0; + } + return 1; +} + +static int zend_jit_add_arrays(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, uint32_t op2_info, zend_jit_addr res_addr) +{ + zend_jit_addr op1_addr = OP1_ADDR(); + zend_jit_addr op2_addr = OP2_ADDR(); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_long_math_helper(dasm_State **Dst, + const zend_op *opline, + zend_uchar opcode, + zend_uchar op1_type, + znode_op op1, + zend_jit_addr op1_addr, + uint32_t op1_info, + zend_ssa_range *op1_range, + zend_uchar op2_type, + znode_op op2, + zend_jit_addr op2_addr, + uint32_t op2_info, + zend_ssa_range *op2_range, + uint32_t res_var, + zend_jit_addr res_addr, + uint32_t res_info, + uint32_t res_use_info, + int may_throw) +/* Labels: 6 */ +{ + zend_bool same_ops = zend_jit_same_addr(op1_addr, op2_addr); + zend_reg result_reg; + zval tmp; + + | brk #0 // TODO + + return 1; +} + +static int zend_jit_long_math(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, zend_ssa_range *op1_range, zend_jit_addr op1_addr, uint32_t op2_info, zend_ssa_range *op2_range, zend_jit_addr op2_addr, uint32_t res_use_info, uint32_t res_info, zend_jit_addr res_addr, int may_throw) +{ + ZEND_ASSERT(!(op1_info & MAY_BE_UNDEF) && !(op2_info & MAY_BE_UNDEF)); + ZEND_ASSERT((op1_info & MAY_BE_LONG) && (op2_info & MAY_BE_LONG)); + + if (!zend_jit_long_math_helper(Dst, opline, opline->opcode, + opline->op1_type, opline->op1, op1_addr, op1_info, op1_range, + opline->op2_type, opline->op2, op2_addr, op2_info, op2_range, + opline->result.var, res_addr, res_info, res_use_info, may_throw)) { + return 0; + } + if (!zend_jit_store_var_if_necessary(Dst, opline->result.var, res_addr, res_info)) { + return 0; + } + return 1; +} + +static int zend_jit_concat_helper(dasm_State **Dst, + const zend_op *opline, + zend_uchar op1_type, + znode_op op1, + zend_jit_addr op1_addr, + uint32_t op1_info, + zend_uchar op2_type, + znode_op op2, + zend_jit_addr op2_addr, + uint32_t op2_info, + zend_jit_addr res_addr, + int may_throw) +{ + | brk #0 // TODO + + return 1; +} + +static int zend_jit_concat(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, uint32_t op2_info, zend_jit_addr res_addr, int may_throw) +{ + zend_jit_addr op1_addr, op2_addr; + + ZEND_ASSERT(!(op1_info & MAY_BE_UNDEF) && !(op2_info & MAY_BE_UNDEF)); + ZEND_ASSERT((op1_info & MAY_BE_STRING) && (op2_info & MAY_BE_STRING)); + + op1_addr = OP1_ADDR(); + op2_addr = OP2_ADDR(); + + return zend_jit_concat_helper(Dst, opline, opline->op1_type, opline->op1, op1_addr, op1_info, opline->op2_type, opline->op2, op2_addr, op2_info, res_addr, may_throw); +} + +static int zend_jit_fetch_dimension_address_inner(dasm_State **Dst, const zend_op *opline, uint32_t type, uint32_t op1_info, uint32_t op2_info, const void *found_exit_addr, const void *not_found_exit_addr, const void *exit_addr) +/* Labels: 1,2,3,4,5 */ +{ + zend_jit_addr op2_addr = OP2_ADDR(); + zend_jit_addr res_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, opline->result.var); + + | brk #0 // TODO + + return 1; +} + +static int zend_jit_simple_assign(dasm_State **Dst, + const zend_op *opline, + zend_jit_addr var_addr, + uint32_t var_info, + uint32_t var_def_info, + zend_uchar val_type, + zend_jit_addr val_addr, + uint32_t val_info, + zend_jit_addr res_addr, + int in_cold, + int save_r1) +/* Labels: 1,2,3 */ +{ + zend_reg tmp_reg; + + | brk #0 // TODO + return 1; +} + +static int zend_jit_assign_to_typed_ref(dasm_State **Dst, + const zend_op *opline, + zend_uchar val_type, + zend_jit_addr val_addr, + zend_bool check_exception) +{ + | brk #0 // TODO + + return 1; +} + +static int zend_jit_assign_to_variable_call(dasm_State **Dst, + const zend_op *opline, + zend_jit_addr __var_use_addr, + zend_jit_addr var_addr, + uint32_t __var_info, + uint32_t __var_def_info, + zend_uchar val_type, + zend_jit_addr val_addr, + uint32_t val_info, + zend_jit_addr __res_addr, + zend_bool __check_exception) +{ + | brk #0 // TODO + + return 1; +} + +static int zend_jit_assign_to_variable(dasm_State **Dst, + const zend_op *opline, + zend_jit_addr var_use_addr, + zend_jit_addr var_addr, + uint32_t var_info, + uint32_t var_def_info, + zend_uchar val_type, + zend_jit_addr val_addr, + uint32_t val_info, + zend_jit_addr res_addr, + zend_bool check_exception) +/* Labels: 1,2,3,4,5,8 */ +{ + int done = 0; + zend_reg ref_reg, tmp_reg; + + | brk #0 // TODO + return 1; +} + +static int zend_jit_assign_dim(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, zend_jit_addr op1_addr, uint32_t op2_info, uint32_t val_info, int may_throw) +{ + zend_jit_addr op2_addr, op3_addr, res_addr; + + | brk #0 // TODO + return 1; +} + +static int zend_jit_assign_dim_op(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, uint32_t op1_def_info, zend_jit_addr op1_addr, uint32_t op2_info, uint32_t op1_data_info, zend_ssa_range *op1_data_range, int may_throw) +{ + zend_jit_addr op2_addr, op3_addr, var_addr; + + ZEND_ASSERT(opline->result_type == IS_UNUSED); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_assign_op(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, uint32_t op1_def_info, zend_ssa_range *op1_range, uint32_t op2_info, zend_ssa_range *op2_range, int may_overflow, int may_throw) +{ + zend_jit_addr op1_addr, op2_addr; + + ZEND_ASSERT(opline->op1_type == IS_CV && opline->result_type == IS_UNUSED); + ZEND_ASSERT(!(op1_info & MAY_BE_UNDEF) && !(op2_info & MAY_BE_UNDEF)); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_is_constant_cmp_long_long(const zend_op *opline, + zend_ssa_range *op1_range, + zend_jit_addr op1_addr, + zend_ssa_range *op2_range, + zend_jit_addr op2_addr, + zend_bool *result) +{ + zend_long op1_min; + zend_long op1_max; + zend_long op2_min; + zend_long op2_max; + + if (op1_range) { + op1_min = op1_range->min; + op1_max = op1_range->max; + } else if (Z_MODE(op1_addr) == IS_CONST_ZVAL) { + ZEND_ASSERT(Z_TYPE_P(Z_ZV(op1_addr)) == IS_LONG); + op1_min = op1_max = Z_LVAL_P(Z_ZV(op1_addr)); + } else { + return 0; + } + + if (op2_range) { + op2_min = op2_range->min; + op2_max = op2_range->max; + } else if (Z_MODE(op2_addr) == IS_CONST_ZVAL) { + ZEND_ASSERT(Z_TYPE_P(Z_ZV(op2_addr)) == IS_LONG); + op2_min = op2_max = Z_LVAL_P(Z_ZV(op2_addr)); + } else { + return 0; + } + + switch (opline->opcode) { + case ZEND_IS_EQUAL: + case ZEND_IS_IDENTICAL: + case ZEND_CASE: + case ZEND_CASE_STRICT: + if (op1_min == op1_max && op2_min == op2_max && op1_min == op2_min) { + *result = 1; + return 1; + } else if (op1_max < op2_min || op1_min > op2_max) { + *result = 0; + return 1; + } + return 0; + case ZEND_IS_NOT_EQUAL: + case ZEND_IS_NOT_IDENTICAL: + if (op1_min == op1_max && op2_min == op2_max && op1_min == op2_min) { + *result = 0; + return 1; + } else if (op1_max < op2_min || op1_min > op2_max) { + *result = 1; + return 1; + } + return 0; + case ZEND_IS_SMALLER: + if (op1_max < op2_min) { + *result = 1; + return 1; + } else if (op1_min >= op2_max) { + *result = 0; + return 1; + } + return 0; + case ZEND_IS_SMALLER_OR_EQUAL: + if (op1_max <= op2_min) { + *result = 1; + return 1; + } else if (op1_min > op2_max) { + *result = 0; + return 1; + } + return 0; + default: + ZEND_UNREACHABLE(); + } + return 0; +} + +static int zend_jit_cmp_long_long(dasm_State **Dst, + const zend_op *opline, + zend_ssa_range *op1_range, + zend_jit_addr op1_addr, + zend_ssa_range *op2_range, + zend_jit_addr op2_addr, + zend_jit_addr res_addr, + zend_uchar smart_branch_opcode, + uint32_t target_label, + uint32_t target_label2, + const void *exit_addr, + zend_bool skip_comparison) +{ + zend_bool swap = 0; + zend_bool result; + + | brk #0 // TODO + + return 1; +} + +static int zend_jit_cmp_double_common(dasm_State **Dst, const zend_op *opline, zend_jit_addr res_addr, zend_bool swap, zend_uchar smart_branch_opcode, uint32_t target_label, uint32_t target_label2, const void *exit_addr) +{ + | brk #0 // TODO + + return 1; +} + +static int zend_jit_cmp_long_double(dasm_State **Dst, const zend_op *opline, zend_jit_addr op1_addr, zend_jit_addr op2_addr, zend_jit_addr res_addr, zend_uchar smart_branch_opcode, uint32_t target_label, uint32_t target_label2, const void *exit_addr) +{ + zend_reg tmp_reg = ZREG_XMM0; + + | brk #0 // TODO + + return zend_jit_cmp_double_common(Dst, opline, res_addr, 0, smart_branch_opcode, target_label, target_label2, exit_addr); +} + +static int zend_jit_cmp_double_long(dasm_State **Dst, const zend_op *opline, zend_jit_addr op1_addr, zend_jit_addr op2_addr, zend_jit_addr res_addr, zend_uchar smart_branch_opcode, uint32_t target_label, uint32_t target_label2, const void *exit_addr) +{ + zend_reg tmp_reg = ZREG_XMM0; + + | brk #0 // TODO + + return zend_jit_cmp_double_common(Dst, opline, res_addr, /* swap */ 1, smart_branch_opcode, target_label, target_label2, exit_addr); +} + +static int zend_jit_cmp_double_double(dasm_State **Dst, const zend_op *opline, zend_jit_addr op1_addr, zend_jit_addr op2_addr, zend_jit_addr res_addr, zend_uchar smart_branch_opcode, uint32_t target_label, uint32_t target_label2, const void *exit_addr) +{ + zend_bool swap = 0; + + | brk #0 // TODO + + return zend_jit_cmp_double_common(Dst, opline, res_addr, swap, smart_branch_opcode, target_label, target_label2, exit_addr); +} + +static int zend_jit_cmp_slow(dasm_State **Dst, const zend_op *opline, zend_jit_addr res_addr, zend_uchar smart_branch_opcode, uint32_t target_label, uint32_t target_label2, const void *exit_addr) +{ + | brk #0 // TODO + + return 1; +} + +static int zend_jit_cmp(dasm_State **Dst, + const zend_op *opline, + uint32_t op1_info, + zend_ssa_range *op1_range, + zend_jit_addr op1_addr, + uint32_t op2_info, + zend_ssa_range *op2_range, + zend_jit_addr op2_addr, + zend_jit_addr res_addr, + int may_throw, + zend_uchar smart_branch_opcode, + uint32_t target_label, + uint32_t target_label2, + const void *exit_addr, + zend_bool skip_comparison) +{ + zend_bool same_ops = (opline->op1_type == opline->op2_type) && (opline->op1.var == opline->op2.var); + zend_bool has_slow; + + | brk #0 // TODO + return 1; +} + +static int zend_jit_identical(dasm_State **Dst, + const zend_op *opline, + uint32_t op1_info, + zend_ssa_range *op1_range, + zend_jit_addr op1_addr, + uint32_t op2_info, + zend_ssa_range *op2_range, + zend_jit_addr op2_addr, + zend_jit_addr res_addr, + int may_throw, + zend_uchar smart_branch_opcode, + uint32_t target_label, + uint32_t target_label2, + const void *exit_addr, + zend_bool skip_comparison) +{ + uint32_t identical_label = (uint32_t)-1; + uint32_t not_identical_label = (uint32_t)-1; + + | brk #0 // TODO + return 1; +} + +static int zend_jit_bool_jmpznz(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, zend_jit_addr op1_addr, zend_jit_addr res_addr, uint32_t target_label, uint32_t target_label2, int may_throw, zend_uchar branch_opcode, const void *exit_addr) +{ + uint32_t true_label = -1; + uint32_t false_label = -1; + zend_bool set_bool = 0; + zend_bool set_bool_not = 0; + zend_bool set_delayed = 0; + zend_bool jmp_done = 0; + + | brk #0 // TODO + return 1; +} + +static int zend_jit_qm_assign(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, zend_jit_addr op1_addr, zend_jit_addr op1_def_addr, uint32_t res_use_info, uint32_t res_info, zend_jit_addr res_addr) +{ + if (op1_addr != op1_def_addr) { + if (!zend_jit_update_regs(Dst, opline->op1.var, op1_addr, op1_def_addr, op1_info)) { + return 0; + } + if (Z_MODE(op1_def_addr) == IS_REG && Z_MODE(op1_addr) != IS_REG) { + op1_addr = op1_def_addr; + } + } + + if (!zend_jit_simple_assign(Dst, opline, res_addr, res_use_info, res_info, opline->op1_type, op1_addr, op1_info, 0, 0, 0)) { + return 0; + } + if (!zend_jit_store_var_if_necessary(Dst, opline->result.var, res_addr, res_info)) { + return 0; + } + return 1; +} + +static int zend_jit_assign(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, zend_jit_addr op1_use_addr, uint32_t op1_def_info, zend_jit_addr op1_addr, uint32_t op2_info, zend_jit_addr op2_addr, zend_jit_addr op2_def_addr, uint32_t res_info, zend_jit_addr res_addr, int may_throw) +{ + ZEND_ASSERT(opline->op1_type == IS_CV); + + if (op2_addr != op2_def_addr) { + if (!zend_jit_update_regs(Dst, opline->op2.var, op2_addr, op2_def_addr, op2_info)) { + return 0; + } + if (Z_MODE(op2_def_addr) == IS_REG && Z_MODE(op2_addr) != IS_REG) { + op2_addr = op2_def_addr; + } + } + + if (Z_MODE(op1_addr) != IS_REG + && Z_MODE(op1_use_addr) == IS_REG + && !Z_LOAD(op1_use_addr) + && !Z_STORE(op1_use_addr)) { + /* Force type update */ + op1_info |= MAY_BE_UNDEF; + } + if (!zend_jit_assign_to_variable(Dst, opline, op1_use_addr, op1_addr, op1_info, op1_def_info, opline->op2_type, op2_addr, op2_info, res_addr, + may_throw)) { + return 0; + } + if (!zend_jit_store_var_if_necessary_ex(Dst, opline->op1.var, op1_addr, op1_def_info, op1_use_addr, op1_info)) { + return 0; + } + if (opline->result_type != IS_UNUSED) { + if (!zend_jit_store_var_if_necessary(Dst, opline->result.var, res_addr, res_info)) { + return 0; + } + } + + return 1; +} + +/* copy of hidden zend_closure */ +typedef struct _zend_closure { + zend_object std; + zend_function func; + zval this_ptr; + zend_class_entry *called_scope; + zif_handler orig_internal_handler; +} zend_closure; + +static int zend_jit_stack_check(dasm_State **Dst, const zend_op *opline, uint32_t used_stack) +{ + int32_t exit_point = zend_jit_trace_get_exit_point(opline, ZEND_JIT_EXIT_TO_VM); + const void *exit_addr = zend_jit_trace_get_exit_addr(exit_point); + + if (!exit_addr) { + return 0; + } + + | brk #0 // TODO + + return 1; +} + +static int zend_jit_push_call_frame(dasm_State **Dst, const zend_op *opline, const zend_op_array *op_array, zend_function *func, zend_bool is_closure, zend_bool use_this, zend_bool stack_check) +{ + uint32_t used_stack; + + // TMP1 -> zend_function + if (func) { + used_stack = zend_vm_calc_used_stack(opline->extended_value, func); + } else { + used_stack = (ZEND_CALL_FRAME_SLOT + opline->extended_value) * sizeof(zval); + + | // if (EXPECTED(ZEND_USER_CODE(func->type))) { + if (!is_closure) { + | LOAD_32BIT_VAL FCARG1w, used_stack + | // Check whether TMP1 is an internal function. + | ldrb TMP2w, [TMP1, #offsetof(zend_function, type)] + | tst TMP2w, #1 + | bne >1 + } else { + | brk #0 // TODO: test + | LOAD_32BIT_VAL FCARG1w, used_stack + } + | // used_stack += (func->op_array.last_var + func->op_array.T - MIN(func->op_array.num_args, num_args)) * sizeof(zval); + | LOAD_32BIT_VAL TMP2w, opline->extended_value + if (!is_closure) { + | ldr TMP3w, [TMP1, #offsetof(zend_function, op_array.num_args)] + | cmp TMP2w, TMP3w + | csel TMP2w, TMP2w, TMP3w, le + | ldr TMP3w, [TMP1, #offsetof(zend_function, op_array.last_var)] + | sub TMP2w, TMP2w, TMP3w + | ldr TMP3w, [TMP1, #offsetof(zend_function, op_array.T)] + | sub TMP2w, TMP2w, TMP3w + } else { + | brk #0 // TODO + } + | lsl TMP2w, TMP2w, #5 + | sxtw TMP2, TMP2w + | sub FCARG1x, FCARG1x, TMP2 + |1: + } + // FCARG1x -> used_stack + + zend_jit_start_reuse_ip(); + + | // if (UNEXPECTED(used_stack > (size_t)(((char*)EG(vm_stack_end)) - (char*)call))) { + | MEM_LOAD_ZTS ldr, RX, executor_globals, vm_stack_top, TMP2 + + if (stack_check) { + | // Check Stack Overflow + | MEM_LOAD_ZTS ldr, TMP2, executor_globals, vm_stack_end, TMP3 + | sub TMP2, TMP2, RX + if (func) { + || if (used_stack <= MAX_IMM12) { + | cmp TMP2, #used_stack + || } else { + | LOAD_32BIT_VAL TMP3, used_stack + | cmp TMP2, TMP3 + || } + } else { + | cmp TMP2, FCARG1x + } + + if (JIT_G(trigger) == ZEND_JIT_ON_HOT_TRACE) { + int32_t exit_point = zend_jit_trace_get_exit_point(opline, ZEND_JIT_EXIT_TO_VM); + const void *exit_addr = zend_jit_trace_get_exit_addr(exit_point); + + if (!exit_addr) { + return 0; + } + + | blt >1 + |.cold_code + |1: + | brk #0 // TODO: test. Cold. + | EXT_JMP exit_addr, TMP3 + |.code + } else { + | blt >1 + | // EG(vm_stack_top) = (zval*)((char*)call + used_stack); + |.cold_code + |1: + if (func) { + | brk #0 // TODO + } + if (opline->opcode == ZEND_INIT_FCALL && func && func->type == ZEND_INTERNAL_FUNCTION) { + | brk #0 // TODO + } else { + if (!is_closure) { + | brk #0 // TODO + } else { + | brk #0 // TODO + } + | brk #0 // TODO + } + | brk #0 // TODO + |.code + } + } + + if (func) { + || if (used_stack <= MAX_IMM12) { + | MEM_LOAD_OP_STORE_ZTS add, ldr, str, #used_stack, executor_globals, vm_stack_top, TMP2, TMP3 + || } else { + | LOAD_32BIT_VAL TMP4, used_stack + | MEM_LOAD_OP_STORE_ZTS add, ldr, str, TMP4, executor_globals, vm_stack_top, TMP2, TMP3 + || } + } else { + | MEM_LOAD_OP_STORE_ZTS add, ldr, str, FCARG1x, executor_globals, vm_stack_top, TMP2, TMP3 + } + | // zend_vm_init_call_frame(call, call_info, func, num_args, called_scope, object); + if (JIT_G(trigger) != ZEND_JIT_ON_HOT_TRACE || opline->opcode != ZEND_INIT_METHOD_CALL) { + | // ZEND_SET_CALL_INFO(call, 0, call_info); + | LOAD_32BIT_VAL TMP2w, (IS_UNDEF | ZEND_CALL_NESTED_FUNCTION) + | str TMP2w, EX:RX->This.u1.type_info + } + if (opline->opcode == ZEND_INIT_FCALL && func && func->type == ZEND_INTERNAL_FUNCTION) { + | // call->func = func; + |1: + | ADDR_STORE EX:RX->func, func, TMP2 + } else { + if (!is_closure) { + | // call->func = func; + if (func + && op_array == &func->op_array + && (func->op_array.fn_flags & ZEND_ACC_IMMUTABLE) + && (sizeof(void*) != 8 || IS_SIGNED_32BIT(func))) { + | brk #0 // TODO + } else { + | str TMP1, EX:RX->func + } + } else { + | // call->func = &closure->func; + | brk #0 // TODO + } + |1: + } + if (opline->opcode == ZEND_INIT_METHOD_CALL) { + | // Z_PTR(call->This) = obj; + | brk #0 // TODO + } else if (!is_closure) { + | // Z_CE(call->This) = called_scope; + | str xzr, EX:RX->This.value.ptr + } else { + | brk #0 // TODO + } + | // ZEND_CALL_NUM_ARGS(call) = num_args; + | LOAD_32BIT_VAL TMP2w, opline->extended_value + | str TMP2w, EX:RX->This.u2.num_args + + return 1; +} + +static int zend_jit_needs_call_chain(zend_call_info *call_info, uint32_t b, const zend_op_array *op_array, zend_ssa *ssa, const zend_ssa_op *ssa_op, const zend_op *opline, zend_jit_trace_rec *trace) +{ + int skip; + + if (trace) { + zend_jit_trace_rec *p = trace; + + ssa_op++; + while (1) { + if (p->op == ZEND_JIT_TRACE_VM) { + switch (p->opline->opcode) { + case ZEND_SEND_ARRAY: + case ZEND_SEND_USER: + case ZEND_SEND_UNPACK: + case ZEND_INIT_FCALL: + case ZEND_INIT_METHOD_CALL: + case ZEND_INIT_STATIC_METHOD_CALL: + case ZEND_INIT_FCALL_BY_NAME: + case ZEND_INIT_NS_FCALL_BY_NAME: + case ZEND_INIT_DYNAMIC_CALL: + case ZEND_NEW: + case ZEND_INIT_USER_CALL: + case ZEND_FAST_CALL: + case ZEND_JMP: + case ZEND_JMPZNZ: + case ZEND_JMPZ: + case ZEND_JMPNZ: + case ZEND_JMPZ_EX: + case ZEND_JMPNZ_EX: + case ZEND_FE_RESET_R: + case ZEND_FE_RESET_RW: + case ZEND_JMP_SET: + case ZEND_COALESCE: + case ZEND_JMP_NULL: + case ZEND_ASSERT_CHECK: + case ZEND_CATCH: + case ZEND_DECLARE_ANON_CLASS: + case ZEND_FE_FETCH_R: + case ZEND_FE_FETCH_RW: + return 1; + case ZEND_DO_ICALL: + case ZEND_DO_UCALL: + case ZEND_DO_FCALL_BY_NAME: + case ZEND_DO_FCALL: + return 0; + case ZEND_SEND_VAL: + case ZEND_SEND_VAR: + case ZEND_SEND_VAL_EX: + case ZEND_SEND_VAR_EX: + case ZEND_SEND_FUNC_ARG: + case ZEND_SEND_REF: + case ZEND_SEND_VAR_NO_REF: + case ZEND_SEND_VAR_NO_REF_EX: + /* skip */ + break; + default: + if (zend_may_throw(opline, ssa_op, op_array, ssa)) { + return 1; + } + } + ssa_op += zend_jit_trace_op_len(opline); + } else if (p->op == ZEND_JIT_TRACE_ENTER || + p->op == ZEND_JIT_TRACE_BACK || + p->op == ZEND_JIT_TRACE_END) { + return 1; + } + p++; + } + } + + if (!call_info) { + const zend_op *end = op_array->opcodes + op_array->last; + + opline++; + ssa_op++; + skip = 1; + while (opline != end) { + if (!skip) { + if (zend_may_throw(opline, ssa_op, op_array, ssa)) { + return 1; + } + } + switch (opline->opcode) { + case ZEND_SEND_VAL: + case ZEND_SEND_VAR: + case ZEND_SEND_VAL_EX: + case ZEND_SEND_VAR_EX: + case ZEND_SEND_FUNC_ARG: + case ZEND_SEND_REF: + case ZEND_SEND_VAR_NO_REF: + case ZEND_SEND_VAR_NO_REF_EX: + skip = 0; + break; + case ZEND_SEND_ARRAY: + case ZEND_SEND_USER: + case ZEND_SEND_UNPACK: + case ZEND_INIT_FCALL: + case ZEND_INIT_METHOD_CALL: + case ZEND_INIT_STATIC_METHOD_CALL: + case ZEND_INIT_FCALL_BY_NAME: + case ZEND_INIT_NS_FCALL_BY_NAME: + case ZEND_INIT_DYNAMIC_CALL: + case ZEND_NEW: + case ZEND_INIT_USER_CALL: + case ZEND_FAST_CALL: + case ZEND_JMP: + case ZEND_JMPZNZ: + case ZEND_JMPZ: + case ZEND_JMPNZ: + case ZEND_JMPZ_EX: + case ZEND_JMPNZ_EX: + case ZEND_FE_RESET_R: + case ZEND_FE_RESET_RW: + case ZEND_JMP_SET: + case ZEND_COALESCE: + case ZEND_JMP_NULL: + case ZEND_ASSERT_CHECK: + case ZEND_CATCH: + case ZEND_DECLARE_ANON_CLASS: + case ZEND_FE_FETCH_R: + case ZEND_FE_FETCH_RW: + return 1; + case ZEND_DO_ICALL: + case ZEND_DO_UCALL: + case ZEND_DO_FCALL_BY_NAME: + case ZEND_DO_FCALL: + end = opline; + if (end - op_array->opcodes >= ssa->cfg.blocks[b].start + ssa->cfg.blocks[b].len) { + /* INIT_FCALL and DO_FCALL in different BasicBlocks */ + return 1; + } + return 0; + } + opline++; + ssa_op++; + } + + return 1; + } else { + const zend_op *end = call_info->caller_call_opline; + + if (end - op_array->opcodes >= ssa->cfg.blocks[b].start + ssa->cfg.blocks[b].len) { + /* INIT_FCALL and DO_FCALL in different BasicBlocks */ + return 1; + } + + opline++; + ssa_op++; + skip = 1; + while (opline != end) { + if (skip) { + switch (opline->opcode) { + case ZEND_SEND_VAL: + case ZEND_SEND_VAR: + case ZEND_SEND_VAL_EX: + case ZEND_SEND_VAR_EX: + case ZEND_SEND_FUNC_ARG: + case ZEND_SEND_REF: + case ZEND_SEND_VAR_NO_REF: + case ZEND_SEND_VAR_NO_REF_EX: + skip = 0; + break; + case ZEND_SEND_ARRAY: + case ZEND_SEND_USER: + case ZEND_SEND_UNPACK: + return 1; + } + } else { + if (zend_may_throw(opline, ssa_op, op_array, ssa)) { + return 1; + } + } + opline++; + ssa_op++; + } + + return 0; + } +} + +static int zend_jit_init_fcall_guard(dasm_State **Dst, uint32_t level, const zend_function *func, const zend_op *to_opline) +{ + int32_t exit_point; + const void *exit_addr; + + | brk #0 // TODO + + return 1; +} + +static int zend_jit_init_fcall(dasm_State **Dst, const zend_op *opline, uint32_t b, const zend_op_array *op_array, zend_ssa *ssa, const zend_ssa_op *ssa_op, int call_level, zend_jit_trace_rec *trace, zend_bool stack_check) +{ + zend_func_info *info = ZEND_FUNC_INFO(op_array); + zend_call_info *call_info = NULL; + zend_function *func = NULL; + + if (delayed_call_chain) { + | brk #0 // TODO + } + + if (info) { + call_info = info->callee_info; + while (call_info && call_info->caller_init_opline != opline) { + call_info = call_info->next_callee; + } + if (call_info && call_info->callee_func) { + func = call_info->callee_func; + } + } + + if (!func + && trace + && trace->op == ZEND_JIT_TRACE_INIT_CALL) { + | brk #0 // TODO: Tracing mode. ASLR? + } + + if (opline->opcode == ZEND_INIT_FCALL + && func + && func->type == ZEND_INTERNAL_FUNCTION) { + /* load constant address later */ + } else if (func && op_array == &func->op_array) { + /* recursive call */ + | brk #0 // TODO + } else { + | // if (CACHED_PTR(opline->result.num)) + | ldr TMP1, EX->run_time_cache + | ldr TMP1, [TMP1, #opline->result.num] + | cbz TMP1, >1 + |.cold_code + |1: + if (opline->opcode == ZEND_INIT_FCALL + && func + && func->type == ZEND_USER_FUNCTION + && (func->op_array.fn_flags & ZEND_ACC_IMMUTABLE)) { + | brk #0 // TODO + } else { + zval *zv = RT_CONSTANT(opline, opline->op2); + + if (opline->opcode == ZEND_INIT_FCALL) { + | LOAD_ADDR FCARG1x, Z_STR_P(zv); + | EXT_CALL zend_jit_find_func_helper, TMP1 + } else if (opline->opcode == ZEND_INIT_FCALL_BY_NAME) { + | brk #0 // TODO + } else if (opline->opcode == ZEND_INIT_NS_FCALL_BY_NAME) { + | brk #0 // TODO + } else { + ZEND_UNREACHABLE(); + } + | // CACHE_PTR(opline->result.num, fbc); + | ldr TMP2, EX->run_time_cache + | mov TMP1, RETVALx + | str TMP1, [TMP2, #opline->result.num] + if (JIT_G(trigger) == ZEND_JIT_ON_HOT_TRACE) { + | brk #0 // TODO. tracing mode. + } else { + | cbnz TMP1, >3 + | // SAVE_OPLINE(); + | brk #0 // TODO: invalid func address. + } + } + |.code + |3: + } + + if (!zend_jit_push_call_frame(Dst, opline, op_array, func, 0, 0, stack_check)) { + return 0; + } + + if (zend_jit_needs_call_chain(call_info, b, op_array, ssa, ssa_op, opline, trace)) { + if (!zend_jit_save_call_chain(Dst, call_level)) { + return 0; + } + } else { + delayed_call_chain = 1; + delayed_call_level = call_level; + } + + return 1; +} + +static int zend_jit_init_method_call(dasm_State **Dst, + const zend_op *opline, + uint32_t b, + const zend_op_array *op_array, + zend_ssa *ssa, + const zend_ssa_op *ssa_op, + int call_level, + uint32_t op1_info, + zend_jit_addr op1_addr, + zend_class_entry *ce, + zend_bool ce_is_instanceof, + zend_bool use_this, + zend_class_entry *trace_ce, + zend_jit_trace_rec *trace, + zend_bool stack_check, + zend_bool polymorphic_side_trace) +{ + zend_func_info *info = ZEND_FUNC_INFO(op_array); + zend_call_info *call_info = NULL; + zend_function *func = NULL; + zval *function_name; + + ZEND_ASSERT(opline->op2_type == IS_CONST); + ZEND_ASSERT(op1_info & MAY_BE_OBJECT); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_init_closure_call(dasm_State **Dst, + const zend_op *opline, + uint32_t b, + const zend_op_array *op_array, + zend_ssa *ssa, + const zend_ssa_op *ssa_op, + int call_level, + zend_jit_trace_rec *trace, + zend_bool stack_check) +{ + zend_function *func = NULL; + zend_jit_addr op2_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, opline->op2.var); + + | brk #0 // TODO + return 1; +} + +static uint32_t skip_valid_arguments(const zend_op_array *op_array, zend_ssa *ssa, const zend_call_info *call_info) +{ + uint32_t num_args = 0; + zend_function *func = call_info->callee_func; + + while (num_args < call_info->num_args) { + zend_arg_info *arg_info = func->op_array.arg_info + num_args; + + if (ZEND_TYPE_IS_SET(arg_info->type)) { + if (ZEND_TYPE_IS_ONLY_MASK(arg_info->type)) { + zend_op *opline = call_info->arg_info[num_args].opline; + zend_ssa_op *ssa_op = &ssa->ops[opline - op_array->opcodes]; + uint32_t type_mask = ZEND_TYPE_PURE_MASK(arg_info->type); + if ((OP1_INFO() & (MAY_BE_ANY|MAY_BE_UNDEF)) & ~type_mask) { + break; + } + } else { + break; + } + } + num_args++; + } + return num_args; +} + +static int zend_jit_do_fcall(dasm_State **Dst, const zend_op *opline, const zend_op_array *op_array, zend_ssa *ssa, int call_level, unsigned int next_block, zend_jit_trace_rec *trace) +{ + zend_func_info *info = ZEND_FUNC_INFO(op_array); + zend_call_info *call_info = NULL; + const zend_function *func = NULL; + uint32_t i; + zend_jit_addr res_addr; + uint32_t call_num_args = 0; + zend_bool unknown_num_args = 0; + const void *exit_addr = NULL; + const zend_op *prev_opline; + + if (RETURN_VALUE_USED(opline)) { + res_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, opline->result.var); + } else { + /* CPU stack allocated temporary zval */ + res_addr = ZEND_ADDR_MEM_ZVAL(ZREG_RSP, TMP_ZVAL_OFFSET); + } + + prev_opline = opline - 1; + while (prev_opline->opcode == ZEND_EXT_FCALL_BEGIN || prev_opline->opcode == ZEND_TICKS) { + | brk #0 // TODO + prev_opline--; + } + if (prev_opline->opcode == ZEND_SEND_UNPACK || prev_opline->opcode == ZEND_SEND_ARRAY || + prev_opline->opcode == ZEND_CHECK_UNDEF_ARGS) { + | brk #0 // TODO + unknown_num_args = 1; + } + + if (info) { + call_info = info->callee_info; + while (call_info && call_info->caller_call_opline != opline) { + call_info = call_info->next_callee; + } + if (call_info && call_info->callee_func) { + func = call_info->callee_func; + } + } + if (!func) { + /* resolve function at run time */ + } else if (func->type == ZEND_USER_FUNCTION) { + | brk #0 // TODO + ZEND_ASSERT(opline->opcode != ZEND_DO_ICALL); + call_num_args = call_info->num_args; + } else if (func->type == ZEND_INTERNAL_FUNCTION) { + ZEND_ASSERT(opline->opcode != ZEND_DO_UCALL); + call_num_args = call_info->num_args; + } else { + ZEND_UNREACHABLE(); + } + + if (trace && !func) { + | brk #0 // TODO + } + + bool may_have_extra_named_params = + opline->extended_value == ZEND_FCALL_MAY_HAVE_EXTRA_NAMED_PARAMS && + (!func || func->common.fn_flags & ZEND_ACC_VARIADIC); + + if (!reuse_ip) { + zend_jit_start_reuse_ip(); + | // call = EX(call); + | ldr RX, EX->call + } + zend_jit_stop_reuse_ip(); + + | // fbc = call->func; + | // mov r2, EX:RX->func ??? + | // SAVE_OPLINE(); + | SET_EX_OPLINE opline, TMP1 + + if (opline->opcode == ZEND_DO_FCALL) { + | brk #0 // TODO + } + + if (!delayed_call_chain) { + if (call_level == 1) { + | str xzr, EX->call + } else { + | //EX(call) = call->prev_execute_data; + | brk #0 // TODO: test + } + } + delayed_call_chain = 0; + + | //call->prev_execute_data = execute_data; + | str EX, EX:RX->prev_execute_data + + if (!func) { + | ldr TMP1, EX:RX->func + } + + if (opline->opcode == ZEND_DO_FCALL) { + | brk #0 // TODO + } + + if (!func + && opline->opcode != ZEND_DO_UCALL + && opline->opcode != ZEND_DO_ICALL) { + | brk #0 // TODO + } + + if ((!func || func->type == ZEND_USER_FUNCTION) + && opline->opcode != ZEND_DO_ICALL) { + | // EX(call) = NULL; + | str xzr, EX:RX->call + + if (RETURN_VALUE_USED(opline)) { + | // EX(return_value) = EX_VAR(opline->result.var); + | LOAD_ZVAL_ADDR TMP3, res_addr + | str TMP3, EX:RX->return_value + } else { + | // EX(return_value) = 0; + | str xzr, EX:RX->return_value + } + + //EX_LOAD_RUN_TIME_CACHE(op_array); + if (!func || func->op_array.cache_size) { + if (func && op_array == &func->op_array) { + /* recursive call */ + if (trace || func->op_array.cache_size > sizeof(void*)) { + | brk #0 // TODO + } + } else { + if (func) { + | brk #0 // TODO + } + | ldr TMP2, [TMP1, #offsetof(zend_op_array, run_time_cache__ptr)] +// Always defined as ZEND_MAP_PTR_KIND_PTR_OR_OFFSET. See Zend/zend_map_ptr.h. +#if ZEND_MAP_PTR_KIND == ZEND_MAP_PTR_KIND_PTR + | ldr TMP2, [TMP2] +#elif ZEND_MAP_PTR_KIND == ZEND_MAP_PTR_KIND_PTR_OR_OFFSET + if (func && !(func->op_array.fn_flags & ZEND_ACC_CLOSURE)) { + | brk #0 // TODO + } else { + | tst TMP2, #1 + | beq >1 + | MEM_LOAD_OP_ZTS add, ldr, TMP2, compiler_globals, map_ptr_base, TMP3, TMP4 + |1: + | ldr TMP2, [TMP2] + } +#else +# error "Unknown ZEND_MAP_PTR_KIND" +#endif + | str TMP2, EX:RX->run_time_cache + } + } + + | // EG(current_execute_data) = execute_data; + | MEM_STORE_ZTS str, RX, executor_globals, current_execute_data, TMP2 + | mov FP, RX + + | // opline = op_array->opcodes; + if (func && !unknown_num_args) { + | brk #0 // TODO + } else { + | // opline = op_array->opcodes + if (func && zend_accel_in_shm(func->op_array.opcodes)) { + | brk #0 // TODO + } else if (GCC_GLOBAL_REGS) { + | ldr IP, [TMP1, #offsetof(zend_op_array, opcodes)] + } else { + | ldr FCARG1x, [TMP1, #offsetof(zend_op_array, opcodes)] + | str FCARG1x, EX->opline + } + if (func) { + | brk #0 // TODO + } else { + | // first_extra_arg = op_array->num_args; + | ldr TMP3w, [TMP1, #offsetof(zend_op_array, num_args)] + | // num_args = EX_NUM_ARGS(); + | ldr TMP2w, [FP, #offsetof(zend_execute_data, This.u2.num_args)] + | // if (UNEXPECTED(num_args > first_extra_arg)) + | cmp TMP2w, TMP3w + } + | bgt >1 + |.cold_code + |1: + | brk #0 // TDOO: test + |.code + if (!func || (func->op_array.fn_flags & ZEND_ACC_HAS_TYPE_HINTS) == 0) { + if (!func) { + | // if (EXPECTED((op_array->fn_flags & ZEND_ACC_HAS_TYPE_HINTS) == 0)) + | ldr TMP4w, [TMP1, #offsetof(zend_op_array, fn_flags)] + | tst TMP4w, #ZEND_ACC_HAS_TYPE_HINTS + | bne >1 + } + | // opline += num_args; + || ZEND_ASSERT(sizeof(zend_op) == 32); + | mov TMP3w, TMP2w + | lsl TMP3, TMP3, #5 + | ADD_IP TMP3, TMP4 + } + |1: + | // if (EXPECTED((int)num_args < op_array->last_var)) { + if (func) { + | brk #0 // TODO + } else { + | ldr TMP3w, [TMP1, #offsetof(zend_op_array, last_var)] + } + | sub TMP3w, TMP3w, TMP2w + | ble >3 + | brk #0 // TODO: test + |3: + } + + if (ZEND_OBSERVER_ENABLED) { + | brk #0 // TODO: test + | SAVE_IP + | mov FCARG1x, FP + | EXT_CALL zend_observer_fcall_begin, TMP1 + } + + if (trace) { + if (!func && (opline->opcode != ZEND_DO_UCALL)) { + | brk #0 // TODO + } + } else { +#ifdef CONTEXT_THREADED_JIT + | brk #0 // TODO: CONTEXT_THREADED_JIT is always undefined. +#else + if (zend_jit_vm_kind == ZEND_VM_KIND_HYBRID) { + | ADD_HYBRID_SPAD + | JMP_IP + } else if (GCC_GLOBAL_REGS) { + | add sp, sp, SPAD // stack alignment + | JMP_IP + } else { + | ldp FP, RX, T2 // retore FP and IP + | ldr LR, T4 // retore LR + | add sp, sp, NR_SPAD // stack alignment + | mov RETVALx, #1 // ZEND_VM_ENTER + | ret + } + } +#endif + } + + if ((!func || func->type == ZEND_INTERNAL_FUNCTION) + && (opline->opcode != ZEND_DO_UCALL)) { + if (!func && (opline->opcode != ZEND_DO_ICALL)) { + |8: + } + if (opline->opcode == ZEND_DO_FCALL_BY_NAME) { + | brk #0 // TODO + } + + | // ZVAL_NULL(EX_VAR(opline->result.var)); + | LOAD_ZVAL_ADDR FCARG2x, res_addr + | SET_Z_TYPE_INFO FCARG2x, IS_NULL, TMP2w + + | // EG(current_execute_data) = execute_data; + | MEM_STORE_ZTS str, RX, executor_globals, current_execute_data, TMP2 + + zend_jit_reset_last_valid_opline(); + + | // fbc->internal_function.handler(call, ret); + | mov FCARG1x, RX + if (func) { + | EXT_CALL func->internal_function.handler, TMP1 + } else { + | ldr TMP2, [TMP1, #offsetof(zend_internal_function, handler)] + | blr TMP2 + } + + | // EG(current_execute_data) = execute_data; + | MEM_STORE_ZTS str, FP, executor_globals, current_execute_data, TMP1 + + | // zend_vm_stack_free_args(call); + if (func && !unknown_num_args) { + for (i = 0; i < call_num_args; i++ ) { + uint32_t offset = EX_NUM_TO_VAR(i); + zend_jit_addr arg_addr = ZEND_ADDR_MEM_ZVAL(ZREG_RX, offset); + | ZVAL_PTR_DTOR arg_addr, (MAY_BE_ANY|MAY_BE_RC1|MAY_BE_RCN), 0, 1, opline, TMP2 + } + } else { + | mov FCARG1x, RX + | EXT_CALL zend_jit_vm_stack_free_args_helper, TMP1 + } + if (may_have_extra_named_params) { + | brk #0 // TODO + } + + |8: + if (opline->opcode == ZEND_DO_FCALL) { + // TODO: optimize ??? + | brk #0 // TODO + } + + if (JIT_G(trigger) != ZEND_JIT_ON_HOT_TRACE || + !JIT_G(current_frame) || + !JIT_G(current_frame)->call || + !TRACE_FRAME_IS_NESTED(JIT_G(current_frame)->call) || + prev_opline->opcode == ZEND_SEND_UNPACK || + prev_opline->opcode == ZEND_SEND_ARRAY || + prev_opline->opcode == ZEND_CHECK_UNDEF_ARGS) { + + | // zend_vm_stack_free_call_frame(call); + | ldrb TMP1w, [RX, #(offsetof(zend_execute_data, This.u1.type_info) + 2)] + | tst TMP1w, #((ZEND_CALL_ALLOCATED >> 16) & 0xff) + | bne >1 // TODO: test. In current case, don't jump to cold-code. + |.cold_code + |1: + | brk #0 // TODO + | mov FCARG1x, RX + | EXT_CALL zend_jit_free_call_frame, TMP1 + | b >1 + |.code + } + | MEM_STORE_ZTS str, RX, executor_globals, vm_stack_top, TMP1 + |1: + + if (!RETURN_VALUE_USED(opline)) { + zend_class_entry *ce; + zend_bool ce_is_instanceof; + uint32_t func_info = call_info ? + zend_get_func_info(call_info, ssa, &ce, &ce_is_instanceof) : + (MAY_BE_ANY|MAY_BE_REF|MAY_BE_RC1|MAY_BE_RCN); + + /* If an exception is thrown, the return_value may stay at the + * original value of null. */ + func_info |= MAY_BE_NULL; + + if (func_info & (MAY_BE_STRING|MAY_BE_ARRAY|MAY_BE_OBJECT|MAY_BE_RESOURCE|MAY_BE_REF)) { + | ZVAL_PTR_DTOR res_addr, func_info, 1, 1, opline, TMP2 + } + } + + | // if (UNEXPECTED(EG(exception) != NULL)) { + | MEM_LOAD_CMP_ZTS ldr, xzr, executor_globals, exception, TMP1, TMP2 + | bne ->icall_throw_handler + + // TODO: Can we avoid checking for interrupts after each call ??? + if (trace && last_valid_opline != opline) { + int32_t exit_point = zend_jit_trace_get_exit_point(opline + 1, ZEND_JIT_EXIT_TO_VM); + + exit_addr = zend_jit_trace_get_exit_addr(exit_point); + if (!exit_addr) { + return 0; + } + } else { + exit_addr = NULL; + } + if (!zend_jit_check_timeout(Dst, opline + 1, exit_addr)) { + return 0; + } + + if ((!trace || !func) && opline->opcode != ZEND_DO_ICALL) { + | brk #0 // TODO + } else if (trace + && trace->op == ZEND_JIT_TRACE_END + && trace->stop == ZEND_JIT_TRACE_STOP_INTERPRETER) { + | brk #0 // TODO + } + } + + if (!func) { + |9: + } + + return 1; +} + +static int zend_jit_send_val(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, zend_jit_addr op1_addr) +{ + uint32_t arg_num = opline->op2.num; + zend_jit_addr arg_addr; + + ZEND_ASSERT(opline->opcode == ZEND_SEND_VAL || arg_num <= MAX_ARG_FLAG_NUM); + + if (!zend_jit_reuse_ip(Dst)) { + return 0; + } + + if (opline->opcode == ZEND_SEND_VAL_EX) { + uint32_t mask = ZEND_SEND_BY_REF << ((arg_num + 3) * 2); + + ZEND_ASSERT(arg_num <= MAX_ARG_FLAG_NUM); + + if (JIT_G(trigger) == ZEND_JIT_ON_HOT_TRACE + && JIT_G(current_frame) + && JIT_G(current_frame)->call + && JIT_G(current_frame)->call->func) { + if (ARG_MUST_BE_SENT_BY_REF(JIT_G(current_frame)->call->func, arg_num)) { + /* Don't generate code that always throws exception */ + return 0; + } + } else if (JIT_G(trigger) == ZEND_JIT_ON_HOT_TRACE) { + int32_t exit_point = zend_jit_trace_get_exit_point(opline, ZEND_JIT_EXIT_TO_VM); + const void *exit_addr = zend_jit_trace_get_exit_addr(exit_point); + if (!exit_addr) { + return 0; + } + | brk #0 // TODO + } else { + | brk #0 // TODO + } + } + + arg_addr = ZEND_ADDR_MEM_ZVAL(ZREG_RX, opline->result.var); + + if (opline->op1_type == IS_CONST) { + zval *zv = RT_CONSTANT(opline, opline->op1); + + | ZVAL_COPY_CONST arg_addr, MAY_BE_ANY, MAY_BE_ANY, zv, ZREG_R8, ZREG_R9 + if (Z_REFCOUNTED_P(zv)) { + | brk #0 // TODO: test + } + } else { + | brk #0 // TODO: test + } + + return 1; +} + +static int zend_jit_check_undef_args(dasm_State **Dst, const zend_op *opline) +{ + | brk #0 // TODO + + return 1; +} + +static int zend_jit_send_ref(dasm_State **Dst, const zend_op *opline, const zend_op_array *op_array, uint32_t op1_info, int cold) +{ + zend_jit_addr op1_addr, arg_addr, ref_addr; + + op1_addr = OP1_ADDR(); + arg_addr = ZEND_ADDR_MEM_ZVAL(ZREG_RX, opline->result.var); + + | brk #0 // TODO + + return 1; +} + +static int zend_jit_send_var(dasm_State **Dst, const zend_op *opline, const zend_op_array *op_array, uint32_t op1_info, zend_jit_addr op1_addr, zend_jit_addr op1_def_addr) +{ + uint32_t arg_num = opline->op2.num; + zend_jit_addr arg_addr; + + ZEND_ASSERT((opline->opcode != ZEND_SEND_VAR_EX && + opline->opcode != ZEND_SEND_VAR_NO_REF_EX) || + arg_num <= MAX_ARG_FLAG_NUM); + + arg_addr = ZEND_ADDR_MEM_ZVAL(ZREG_RX, opline->result.var); + + if (!zend_jit_reuse_ip(Dst)) { + return 0; + } + + if (opline->opcode == ZEND_SEND_VAR_EX) { + | brk #0 // TODO + } else if (opline->opcode == ZEND_SEND_VAR_NO_REF_EX) { + | brk #0 // TODO + } else if (opline->opcode == ZEND_SEND_FUNC_ARG) { + | brk #0 // TODO + } + + if (op1_info & MAY_BE_UNDEF) { + if (op1_info & (MAY_BE_ANY|MAY_BE_REF)) { + | IF_ZVAL_TYPE op1_addr, IS_UNDEF, >1, TMP1w, TMP2 + |.cold_code + |1: + } + + | brk #0 // TODO: test + | SET_EX_OPLINE opline, TMP1 + | LOAD_32BIT_VAL FCARG1w, opline->op1.var + | EXT_CALL zend_jit_undefined_op_helper, TMP1 + | SET_ZVAL_TYPE_INFO arg_addr, IS_NULL, TMP1w, TMP2 + | cbz RETVALx, ->exception_handler + + if (op1_info & (MAY_BE_ANY|MAY_BE_REF)) { + | brk #0 // TODO: test + | b >7 + |.code + } else { + | brk #0 // TODO: test + } + } + + if (opline->opcode == ZEND_SEND_VAR_NO_REF) { + | brk #0 // TODO: test + } else { + if (op1_info & MAY_BE_REF) { + if (opline->op1_type == IS_CV) { + zend_jit_addr val_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FCARG1x, 0); + + | brk #0 // TODO: test + } else { + zend_jit_addr ref_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FCARG1x, 8); + + | IF_ZVAL_TYPE op1_addr, IS_REFERENCE, >1, TMP1w, TMP2 + |.cold_code + |1: + | brk #0 // TODO: test. cold-code. not covered currently + |.code + | ZVAL_COPY_VALUE arg_addr, MAY_BE_ANY, op1_addr, op1_info, ZREG_R8, ZREG_R9, ZREG_R10, ZREG_R11 + |2: + } + } else { + if (op1_addr != op1_def_addr) { + | brk #0 // TODO: test + } + | ZVAL_COPY_VALUE arg_addr, MAY_BE_ANY, op1_addr, op1_info, ZREG_R8, ZREG_R9, ZREG_R10, ZREG_R11 + if (opline->op1_type == IS_CV) { + | // In x86 implementation, type flags and value pointer would be stored into eax and r2 respectively, + | // and then ah (bits 8 to 15) and r2 are used inside TRY_ADDREF. + | // In AArch64, we use TMP1w and TMP2 accordingly. + | // Note that, bits 8 to 15 should be extacted, i.e., (TMP1w >> 8) & 0xff. + | lsr TMP1w, TMP1w, #8 + | and TMP1w, TMP1w, #0xff + | TRY_ADDREF op1_info, TMP1w, TMP2, TMP3 + } + } + } + |7: + + return 1; +} + +static int zend_jit_check_func_arg(dasm_State **Dst, const zend_op *opline) +{ + uint32_t arg_num = opline->op2.num; + + | brk #0 // TODO + + return 1; +} + +static int zend_jit_smart_true(dasm_State **Dst, const zend_op *opline, int jmp, zend_uchar smart_branch_opcode, uint32_t target_label, uint32_t target_label2) +{ + | brk #0 // TODO + + return 1; +} + +static int zend_jit_smart_false(dasm_State **Dst, const zend_op *opline, int jmp, zend_uchar smart_branch_opcode, uint32_t target_label) +{ + | brk #0 // TODO + + return 1; +} + +static int zend_jit_defined(dasm_State **Dst, const zend_op *opline, zend_uchar smart_branch_opcode, uint32_t target_label, uint32_t target_label2, const void *exit_addr) +{ + uint32_t defined_label = (uint32_t)-1; + uint32_t undefined_label = (uint32_t)-1; + zval *zv = RT_CONSTANT(opline, opline->op1); + zend_jit_addr res_addr = 0; + + | brk #0 // TODO + + return 1; +} + +static int zend_jit_type_check(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, zend_uchar smart_branch_opcode, uint32_t target_label, uint32_t target_label2, const void *exit_addr) +{ + uint32_t mask; + zend_uchar type; + zend_jit_addr op1_addr = OP1_ADDR(); + + // TODO: support for is_resource() ??? + ZEND_ASSERT(opline->extended_value != MAY_BE_RESOURCE); + + | brk #0 // TODO + + return 1; +} + +static uint32_t zend_ssa_cv_info(const zend_op_array *op_array, zend_ssa *ssa, uint32_t var) +{ + uint32_t j, info; + + if (ssa->vars && ssa->var_info) { + info = ssa->var_info[var].type; + for (j = op_array->last_var; j < ssa->vars_count; j++) { + if (ssa->vars[j].var == var) { + info |= ssa->var_info[j].type; + } + } + } else { + info = MAY_BE_RC1 | MAY_BE_RCN | MAY_BE_REF | MAY_BE_ANY | MAY_BE_UNDEF | + MAY_BE_ARRAY_KEY_ANY | MAY_BE_ARRAY_OF_ANY | MAY_BE_ARRAY_OF_REF; + } + +#ifdef ZEND_JIT_USE_RC_INFERENCE + /* Refcount may be increased by RETURN opcode */ + if ((info & MAY_BE_RC1) && !(info & MAY_BE_RCN)) { + for (j = 0; j < ssa->cfg.blocks_count; j++) { + if ((ssa->cfg.blocks[j].flags & ZEND_BB_REACHABLE) && + ssa->cfg.blocks[j].len > 0) { + const zend_op *opline = op_array->opcodes + ssa->cfg.blocks[j].start + ssa->cfg.blocks[j].len - 1; + + if (opline->opcode == ZEND_RETURN) { + if (opline->op1_type == IS_CV && opline->op1.var == EX_NUM_TO_VAR(var)) { + info |= MAY_BE_RCN; + break; + } + } + } + } + } +#endif + + return info; +} + +static int zend_jit_leave_frame(dasm_State **Dst) +{ + | // EG(current_execute_data) = EX(prev_execute_data); + | ldr TMP1, EX->prev_execute_data + | MEM_STORE_ZTS str, TMP1, executor_globals, current_execute_data, TMP3 + return 1; +} + +static int zend_jit_free_cv(dasm_State **Dst, uint32_t info, uint32_t var) +{ + if (info & (MAY_BE_STRING|MAY_BE_ARRAY|MAY_BE_OBJECT|MAY_BE_RESOURCE|MAY_BE_REF)) { + uint32_t offset = EX_NUM_TO_VAR(var); + zend_jit_addr addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, offset); + | ZVAL_PTR_DTOR addr, info, 1, 1, NULL, TMP2 + } + return 1; +} + +static int zend_jit_free_op(dasm_State **Dst, const zend_op *opline, uint32_t info, uint32_t var_offset) +{ + if (info & (MAY_BE_STRING|MAY_BE_ARRAY|MAY_BE_OBJECT|MAY_BE_RESOURCE|MAY_BE_REF)) { + | brk #0 // TODO + } + return 1; +} + +static int zend_jit_leave_func(dasm_State **Dst, + const zend_op_array *op_array, + const zend_op *opline, + uint32_t op1_info, + zend_bool left_frame, + zend_jit_trace_rec *trace, + zend_jit_trace_info *trace_info, + int indirect_var_access, + int may_throw) +{ + zend_bool may_be_top_frame = + JIT_G(trigger) != ZEND_JIT_ON_HOT_TRACE || + !JIT_G(current_frame) || + !TRACE_FRAME_IS_NESTED(JIT_G(current_frame)); + zend_bool may_need_call_helper = + indirect_var_access || /* may have symbol table */ + !op_array->function_name || /* may have symbol table */ + may_be_top_frame || + (op_array->fn_flags & ZEND_ACC_VARIADIC) || /* may have extra named args */ + JIT_G(trigger) != ZEND_JIT_ON_HOT_TRACE || + !JIT_G(current_frame) || + TRACE_FRAME_NUM_ARGS(JIT_G(current_frame)) == -1 || /* unknown number of args */ + (uint32_t)TRACE_FRAME_NUM_ARGS(JIT_G(current_frame)) > op_array->num_args; /* extra args */ + zend_bool may_need_release_this = + !(op_array->fn_flags & ZEND_ACC_CLOSURE) && + op_array->scope && + !(op_array->fn_flags & ZEND_ACC_STATIC) && + (JIT_G(trigger) != ZEND_JIT_ON_HOT_TRACE || + !JIT_G(current_frame) || + !TRACE_FRAME_NO_NEED_REKEASE_THIS(JIT_G(current_frame))); + + if (may_need_call_helper || may_need_release_this) { + | ldr FCARG1w, [FP, #offsetof(zend_execute_data, This.u1.type_info)] + } + if (may_need_call_helper) { + if (!left_frame) { + left_frame = 1; + if (!zend_jit_leave_frame(Dst)) { + return 0; + } + } + /* ZEND_CALL_FAKE_CLOSURE handled on slow path to eliminate check for ZEND_CALL_CLOSURE on fast path */ + + | LOAD_32BIT_VAL TMP1w, (ZEND_CALL_TOP|ZEND_CALL_HAS_SYMBOL_TABLE|ZEND_CALL_FREE_EXTRA_ARGS|ZEND_CALL_ALLOCATED|ZEND_CALL_HAS_EXTRA_NAMED_PARAMS|ZEND_CALL_FAKE_CLOSURE) + | tst FCARG1w, TMP1w + if (trace && trace->op != ZEND_JIT_TRACE_END) { + | brk #0 // TODO: test + } else { + | bne ->leave_function_handler + } + } + + if (op_array->fn_flags & ZEND_ACC_CLOSURE) { + if (!left_frame) { + left_frame = 1; + if (!zend_jit_leave_frame(Dst)) { + return 0; + } + } + | brk #0 // TODO: test + } else if (may_need_release_this) { + if (!left_frame) { + left_frame = 1; + if (!zend_jit_leave_frame(Dst)) { + return 0; + } + } + | brk #0 // TODO: test + // TODO: avoid EG(excption) check for $this->foo() calls + may_throw = 1; + } + + | // EG(vm_stack_top) = (zval*)execute_data; + | MEM_STORE_ZTS str, FP, executor_globals, vm_stack_top, TMP1 + | // execute_data = EX(prev_execute_data); + | ldr FP, EX->prev_execute_data + + if (!left_frame) { + | brk #0 // TODO: teset + | // EG(current_execute_data) = execute_data; + | MEM_STORE_ZTS str, FP, executor_globals, current_execute_data, TMP1 + } + + |9: + if (trace) { + if (trace->op != ZEND_JIT_TRACE_END + && (JIT_G(current_frame) && !TRACE_FRAME_IS_UNKNOWN_RETURN(JIT_G(current_frame)))) { + zend_jit_reset_last_valid_opline(); + } else { + | LOAD_IP + | ADD_IP_FROM_CST sizeof(zend_op), TMP1 + } + + |8: + + if (trace->op == ZEND_JIT_TRACE_BACK + && (!JIT_G(current_frame) || TRACE_FRAME_IS_UNKNOWN_RETURN(JIT_G(current_frame)))) { + const zend_op *next_opline = trace->opline; + + | brk #0 // TODO: test + + return 1; + } else if (may_throw || + (((opline->op1_type & (IS_VAR|IS_TMP_VAR)) + && (op1_info & MAY_BE_RC1) + && (op1_info & (MAY_BE_OBJECT|MAY_BE_RESOURCE|MAY_BE_ARRAY_OF_OBJECT|MAY_BE_ARRAY_OF_RESOURCE|MAY_BE_ARRAY_OF_ARRAY))) + && (!JIT_G(current_frame) || TRACE_FRAME_IS_RETURN_VALUE_UNUSED(JIT_G(current_frame))))) { + | brk #0 // TODO: test + } + + return 1; + } else { + | // if (EG(exception)) + | MEM_LOAD_CMP_ZTS ldr, xzr, executor_globals, exception, TMP1, TMP2 + | LOAD_IP + | bne ->leave_throw_handler + | // opline = EX(opline) + 1 + | ADD_IP_FROM_CST sizeof(zend_op), TMP1 + } + + if (zend_jit_vm_kind == ZEND_VM_KIND_HYBRID) { + | ADD_HYBRID_SPAD +#ifdef CONTEXT_THREADED_JIT + | brk #0 // TODO: CONTEXT_THREADED_JIT is always undefined +#else + | JMP_IP +#endif + } else if (GCC_GLOBAL_REGS) { + | add sp, sp, SPAD // stack alignment +#ifdef CONTEXT_THREADED_JIT + | brk #0 // TODO +#else + | JMP_IP +#endif + } else { +#ifdef CONTEXT_THREADED_JIT + ZEND_UNREACHABLE(); + // TODO: context threading can't work without GLOBAL REGS because we have to change + // the value of execute_data in execute_ex() + | brk #0 // TODO +#else + | ldp FP, RX, T2 // restore FP and IP + | ldr LR, T4 // restore LR + | add sp, sp, NR_SPAD // stack alignment + | mov RETVALx, #2 // ZEND_VM_LEAVE ???? + | ret +#endif + } + + return 1; +} + +static int zend_jit_return(dasm_State **Dst, const zend_op *opline, const zend_op_array *op_array, uint32_t op1_info, zend_jit_addr op1_addr) +{ + zend_jit_addr ret_addr; + int8_t return_value_used; + + ZEND_ASSERT(op_array->type != ZEND_EVAL_CODE && op_array->function_name); + ZEND_ASSERT(!(op1_info & MAY_BE_UNDEF)); + + if (JIT_G(trigger) == ZEND_JIT_ON_HOT_TRACE && JIT_G(current_frame)) { + if (TRACE_FRAME_IS_RETURN_VALUE_USED(JIT_G(current_frame))) { + return_value_used = 1; + } else if (TRACE_FRAME_IS_RETURN_VALUE_UNUSED(JIT_G(current_frame))) { + return_value_used = 0; + } else { + return_value_used = -1; + } + } else { + return_value_used = -1; + } + + // TODO: This macro is only used in four sites. We should design a test variant to cover it. + if (ZEND_OBSERVER_ENABLED) { + | brk #0 // TODO: test + } + + // if (!EX(return_value)) + if (Z_MODE(op1_addr) == IS_REG && Z_REG(op1_addr) == ZREG_R1) { + | brk #0 // TODO: test + if (return_value_used != 0) { + | ldr x2, EX->return_value + } + if (return_value_used == -1) { + | tst x2, x2 + } + ret_addr = ZEND_ADDR_MEM_ZVAL(ZREG_R2, 0); + } else { + if (return_value_used != 0) { + | ldr x1, EX->return_value + } + if (return_value_used == -1) { + | tst x1, x1 + } + ret_addr = ZEND_ADDR_MEM_ZVAL(ZREG_R1, 0); + } + + if ((opline->op1_type & (IS_VAR|IS_TMP_VAR)) && + (op1_info & (MAY_BE_STRING|MAY_BE_ARRAY|MAY_BE_OBJECT|MAY_BE_RESOURCE))) { + | brk #0 // TODO: test + } else if (return_value_used == -1) { + if (jit_return_label >= 0) { + | brk #0 // TODO: test + } else { + | beq >9 + } + } + + if (return_value_used == 0) { + |9: + | brk #0 // TODO: test + return 1; + } + + if (opline->op1_type == IS_CONST) { + zval *zv = RT_CONSTANT(opline, opline->op1); + | ZVAL_COPY_CONST ret_addr, MAY_BE_ANY, MAY_BE_ANY, zv, ZREG_R8, ZREG_R9 + if (Z_REFCOUNTED_P(zv)) { + | brk #0 // TODO: test + } + } else if (opline->op1_type == IS_TMP_VAR) { + | brk #0 // TODO + } else if (opline->op1_type == IS_CV) { + | brk #0 // TODO + } else { + | brk #0 // TODO + } + + |9: + return 1; +} + +static int zend_jit_zval_copy_deref(dasm_State **Dst, zend_jit_addr res_addr, zend_jit_addr val_addr, zend_reg type_reg) +{ + ZEND_ASSERT(type_reg == ZREG_R2); + + | brk #0 // TODO + return 1; +} + +static zend_bool zend_jit_may_avoid_refcounting(const zend_op *opline) +{ + switch (opline->opcode) { + case ZEND_FETCH_OBJ_FUNC_ARG: + if (!JIT_G(current_frame) || + !JIT_G(current_frame)->call->func || + !TRACE_FRAME_IS_LAST_SEND_BY_VAL(JIT_G(current_frame)->call)) { + return 0; + } + /* break missing intentionally */ + case ZEND_FETCH_OBJ_R: + case ZEND_FETCH_OBJ_IS: + if (opline->op2_type == IS_CONST + && Z_TYPE_P(RT_CONSTANT(opline, opline->op2)) == IS_STRING + && Z_STRVAL_P(RT_CONSTANT(opline, opline->op2))[0] != '\0') { + return 1; + } + break; + case ZEND_FETCH_DIM_FUNC_ARG: + if (!JIT_G(current_frame) || + !JIT_G(current_frame)->call->func || + !TRACE_FRAME_IS_LAST_SEND_BY_VAL(JIT_G(current_frame)->call)) { + return 0; + } + /* break missing intentionally */ + case ZEND_FETCH_DIM_R: + case ZEND_FETCH_DIM_IS: + return 1; + case ZEND_ISSET_ISEMPTY_DIM_OBJ: + if (!(opline->extended_value & ZEND_ISEMPTY)) { + return 1; + } + break; + } + return 0; +} + +static int zend_jit_fetch_dim_read(dasm_State **Dst, + const zend_op *opline, + zend_ssa *ssa, + const zend_ssa_op *ssa_op, + uint32_t op1_info, + zend_jit_addr op1_addr, + zend_bool op1_avoid_refcounting, + uint32_t op2_info, + uint32_t res_info, + zend_jit_addr res_addr, + int may_throw) +{ + zend_jit_addr orig_op1_addr, op2_addr; + const void *exit_addr = NULL; + const void *not_found_exit_addr = NULL; + const void *res_exit_addr = NULL; + zend_bool result_avoid_refcounting = 0; + uint32_t may_be_string = (opline->opcode != ZEND_FETCH_LIST_R) ? MAY_BE_STRING : 0; + + orig_op1_addr = OP1_ADDR(); + op2_addr = OP2_ADDR(); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_fetch_dim(dasm_State **Dst, + const zend_op *opline, + uint32_t op1_info, + zend_jit_addr op1_addr, + uint32_t op2_info, + zend_jit_addr res_addr, + int may_throw) +{ + zend_jit_addr op2_addr; + + op2_addr = (opline->op2_type != IS_UNUSED) ? OP2_ADDR() : 0; + + | brk #0 // TODO + return 1; +} + +static int zend_jit_isset_isempty_dim(dasm_State **Dst, + const zend_op *opline, + uint32_t op1_info, + zend_jit_addr op1_addr, + zend_bool op1_avoid_refcounting, + uint32_t op2_info, + int may_throw, + zend_uchar smart_branch_opcode, + uint32_t target_label, + uint32_t target_label2, + const void *exit_addr) +{ + zend_jit_addr op2_addr, res_addr; + + // TODO: support for empty() ??? + ZEND_ASSERT(!(opline->extended_value & ZEND_ISEMPTY)); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_bind_global(dasm_State **Dst, const zend_op *opline, uint32_t op1_info) +{ + zend_jit_addr op1_addr = OP1_ADDR(); + zend_string *varname = Z_STR_P(RT_CONSTANT(opline, opline->op2)); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_verify_arg_type(dasm_State **Dst, const zend_op *opline, zend_arg_info *arg_info, zend_bool check_exception) +{ + zend_jit_addr res_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, opline->result.var); + zend_bool in_cold = 0; + uint32_t type_mask = ZEND_TYPE_PURE_MASK(arg_info->type) & MAY_BE_ANY; + zend_reg tmp_reg = (type_mask == 0 || is_power_of_two(type_mask)) ? ZREG_FCARG1x : ZREG_R0; + + | brk #0 // TODO + return 1; +} + +static int zend_jit_recv(dasm_State **Dst, const zend_op *opline, const zend_op_array *op_array) +{ + uint32_t arg_num = opline->op1.num; + zend_arg_info *arg_info = NULL; + + | brk #0 // TODO + return 1; +} + +static int zend_jit_recv_init(dasm_State **Dst, const zend_op *opline, const zend_op_array *op_array, zend_bool is_last, int may_throw) +{ + uint32_t arg_num = opline->op1.num; + zval *zv = RT_CONSTANT(opline, opline->op2); + zend_jit_addr res_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, opline->result.var); + + | brk #0 // TODO + return 1; +} + +static zend_property_info* zend_get_known_property_info(zend_class_entry *ce, zend_string *member, zend_bool on_this, zend_string *filename) +{ + zend_property_info *info = NULL; + + if (!ce || + !(ce->ce_flags & ZEND_ACC_LINKED) || + (ce->ce_flags & ZEND_ACC_TRAIT) || + ce->create_object) { + return NULL; + } + + if (!(ce->ce_flags & ZEND_ACC_IMMUTABLE)) { + if (ce->info.user.filename != filename) { + /* class declaration might be changed independently */ + return NULL; + } + + if (ce->parent) { + zend_class_entry *parent = ce->parent; + + do { + if (parent->type == ZEND_INTERNAL_CLASS) { + break; + } else if (parent->info.user.filename != filename) { + /* some of parents class declarations might be changed independently */ + /* TODO: this check may be not enough, because even + * in the same it's possible to conditionally define + * few classes with the same name, and "parent" may + * change from request to request. + */ + return NULL; + } + parent = parent->parent; + } while (parent); + } + } + + info = (zend_property_info*)zend_hash_find_ptr(&ce->properties_info, member); + if (info == NULL || + !IS_VALID_PROPERTY_OFFSET(info->offset) || + (info->flags & ZEND_ACC_STATIC)) { + return NULL; + } + + if (!(info->flags & ZEND_ACC_PUBLIC) && + (!on_this || info->ce != ce)) { + return NULL; + } + + return info; +} + +static zend_bool zend_may_be_dynamic_property(zend_class_entry *ce, zend_string *member, zend_bool on_this, zend_string *filename) +{ + zend_property_info *info; + + if (!ce || (ce->ce_flags & ZEND_ACC_TRAIT)) { + return 1; + } + + if (!(ce->ce_flags & ZEND_ACC_IMMUTABLE)) { + if (ce->info.user.filename != filename) { + /* class declaration might be changed independently */ + return 1; + } + } + + info = (zend_property_info*)zend_hash_find_ptr(&ce->properties_info, member); + if (info == NULL || + !IS_VALID_PROPERTY_OFFSET(info->offset) || + (info->flags & ZEND_ACC_STATIC)) { + return 1; + } + + if (!(info->flags & ZEND_ACC_PUBLIC) && + (!on_this || info->ce != ce)) { + return 1; + } + + return 0; +} + +static int zend_jit_class_guard(dasm_State **Dst, const zend_op *opline, zend_class_entry *ce) +{ + int32_t exit_point = zend_jit_trace_get_exit_point(opline, 0); + const void *exit_addr = zend_jit_trace_get_exit_addr(exit_point); + + if (!exit_addr) { + return 0; + } + + | brk #0 // TODO + return 1; +} + +static int zend_jit_fetch_obj(dasm_State **Dst, + const zend_op *opline, + const zend_op_array *op_array, + zend_ssa *ssa, + const zend_ssa_op *ssa_op, + uint32_t op1_info, + zend_jit_addr op1_addr, + zend_bool op1_indirect, + zend_class_entry *ce, + zend_bool ce_is_instanceof, + zend_bool use_this, + zend_bool op1_avoid_refcounting, + zend_class_entry *trace_ce, + int may_throw) +{ + zval *member; + zend_property_info *prop_info; + zend_bool may_be_dynamic = 1; + zend_jit_addr res_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, opline->result.var); + zend_jit_addr this_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, offsetof(zend_execute_data, This)); + zend_jit_addr prop_addr; + uint32_t res_info = RES_INFO(); + + ZEND_ASSERT(opline->op2_type == IS_CONST); + ZEND_ASSERT(op1_info & MAY_BE_OBJECT); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_incdec_obj(dasm_State **Dst, + const zend_op *opline, + const zend_op_array *op_array, + zend_ssa *ssa, + const zend_ssa_op *ssa_op, + uint32_t op1_info, + zend_jit_addr op1_addr, + zend_bool op1_indirect, + zend_class_entry *ce, + zend_bool ce_is_instanceof, + zend_bool use_this, + zend_class_entry *trace_ce, + int may_throw) +{ + zval *member; + zend_string *name; + zend_property_info *prop_info; + zend_jit_addr this_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, offsetof(zend_execute_data, This)); + zend_jit_addr res_addr = 0; + zend_jit_addr prop_addr; + zend_bool needs_slow_path = 0; + + ZEND_ASSERT(opline->op2_type == IS_CONST); + ZEND_ASSERT(op1_info & MAY_BE_OBJECT); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_assign_obj_op(dasm_State **Dst, + const zend_op *opline, + const zend_op_array *op_array, + zend_ssa *ssa, + const zend_ssa_op *ssa_op, + uint32_t op1_info, + zend_jit_addr op1_addr, + uint32_t val_info, + zend_ssa_range *val_range, + zend_bool op1_indirect, + zend_class_entry *ce, + zend_bool ce_is_instanceof, + zend_bool use_this, + zend_class_entry *trace_ce, + int may_throw) +{ + zval *member; + zend_string *name; + zend_property_info *prop_info; + zend_jit_addr val_addr = OP1_DATA_ADDR(); + zend_jit_addr this_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, offsetof(zend_execute_data, This)); + zend_jit_addr prop_addr; + zend_bool needs_slow_path = 0; + binary_op_type binary_op = get_binary_op(opline->extended_value); + + ZEND_ASSERT(opline->op2_type == IS_CONST); + ZEND_ASSERT(op1_info & MAY_BE_OBJECT); + ZEND_ASSERT(opline->result_type == IS_UNUSED); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_assign_obj(dasm_State **Dst, + const zend_op *opline, + const zend_op_array *op_array, + zend_ssa *ssa, + const zend_ssa_op *ssa_op, + uint32_t op1_info, + zend_jit_addr op1_addr, + uint32_t val_info, + zend_bool op1_indirect, + zend_class_entry *ce, + zend_bool ce_is_instanceof, + zend_bool use_this, + zend_class_entry *trace_ce, + int may_throw) +{ + zval *member; + zend_string *name; + zend_property_info *prop_info; + zend_jit_addr val_addr = OP1_DATA_ADDR(); + zend_jit_addr res_addr = 0; + zend_jit_addr this_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, offsetof(zend_execute_data, This)); + zend_jit_addr prop_addr; + zend_bool needs_slow_path = 0; + + | brk #0 // TODO + return 1; +} + +static int zend_jit_free(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, int may_throw) +{ + zend_jit_addr op1_addr = OP1_ADDR(); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_echo(dasm_State **Dst, const zend_op *opline, uint32_t op1_info) +{ + | brk #0 // TODO + return 1; +} + +static int zend_jit_strlen(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, zend_jit_addr op1_addr) +{ + zend_jit_addr res_addr = RES_ADDR(); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_load_this(dasm_State **Dst, uint32_t var) +{ + zend_jit_addr var_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, var); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_fetch_this(dasm_State **Dst, const zend_op *opline, const zend_op_array *op_array, zend_bool check_only) +{ + | brk #0 // TODO + return 1; +} + +static int zend_jit_hash_jmp(dasm_State **Dst, const zend_op *opline, const zend_op_array *op_array, zend_ssa *ssa, HashTable *jumptable, int default_b, const void *default_label, const zend_op *next_opline, zend_jit_trace_info *trace_info) +{ + uint32_t count; + Bucket *p; + const zend_op *target; + int b; + int32_t exit_point; + const void *exit_addr; + + | brk #0 // TODO + return 1; +} + +static int zend_jit_switch(dasm_State **Dst, const zend_op *opline, const zend_op_array *op_array, zend_ssa *ssa, zend_jit_trace_rec *trace, zend_jit_trace_info *trace_info) +{ + HashTable *jumptable = Z_ARRVAL_P(RT_CONSTANT(opline, opline->op2)); + const zend_op *next_opline = NULL; + + if (trace) { + ZEND_ASSERT(trace->op == ZEND_JIT_TRACE_VM || trace->op == ZEND_JIT_TRACE_END); + ZEND_ASSERT(trace->opline != NULL); + next_opline = trace->opline; + } + + | brk #0 // TODO + return 1; +} + +static zend_bool zend_jit_verify_return_type(dasm_State **Dst, const zend_op *opline, const zend_op_array *op_array, uint32_t op1_info) +{ + zend_arg_info *arg_info = &op_array->arg_info[-1]; + ZEND_ASSERT(ZEND_TYPE_IS_SET(arg_info->type)); + zend_jit_addr op1_addr = OP1_ADDR(); + zend_bool needs_slow_check = 1; + zend_bool slow_check_in_cold = 1; + uint32_t type_mask = ZEND_TYPE_PURE_MASK(arg_info->type) & MAY_BE_ANY; + + | brk #0 // TODO + return 1; +} + +static int zend_jit_isset_isempty_cv(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, zend_jit_addr op1_addr, zend_uchar smart_branch_opcode, uint32_t target_label, uint32_t target_label2, const void *exit_addr) +{ + zend_jit_addr res_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, opline->result.var); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_fe_reset(dasm_State **Dst, const zend_op *opline, uint32_t op1_info) +{ + zend_jit_addr res_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, opline->result.var); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_fe_fetch(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, uint32_t op2_info, unsigned int target_label, zend_uchar exit_opcode, const void *exit_addr) +{ + zend_jit_addr op1_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, opline->op1.var); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_fetch_constant(dasm_State **Dst, + const zend_op *opline, + const zend_op_array *op_array, + zend_ssa *ssa, + const zend_ssa_op *ssa_op) +{ + zval *zv = RT_CONSTANT(opline, opline->op2) + 1; + zend_jit_addr res_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, opline->result.var); + zend_jit_addr const_addr = ZEND_ADDR_MEM_ZVAL(ZREG_R0, 0); + uint32_t res_info = RES_INFO(); + + | brk #0 // TODO + return 1; +} + +static int zend_jit_in_array(dasm_State **Dst, const zend_op *opline, uint32_t op1_info, zend_jit_addr op1_addr, zend_uchar smart_branch_opcode, uint32_t target_label, uint32_t target_label2, const void *exit_addr) +{ + HashTable *ht = Z_ARRVAL_P(RT_CONSTANT(opline, opline->op2)); + zend_jit_addr res_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FP, opline->result.var); + + ZEND_ASSERT(opline->op1_type != IS_VAR && opline->op1_type != IS_TMP_VAR); + ZEND_ASSERT((op1_info & (MAY_BE_ANY|MAY_BE_UNDEF|MAY_BE_REF)) == MAY_BE_STRING); + + | brk #0 // TODO + return 1; +} + +static zend_bool zend_jit_noref_guard(dasm_State **Dst, const zend_op *opline, zend_jit_addr var_addr) +{ + int32_t exit_point = zend_jit_trace_get_exit_point(opline, 0); + const void *exit_addr = zend_jit_trace_get_exit_addr(exit_point); + + if (!exit_addr) { + return 0; + } + | brk #0 // IF_ZVAL_TYPE var_addr, IS_REFERENCE, &exit_addr + + return 1; +} + +static zend_bool zend_jit_fetch_reference(dasm_State **Dst, const zend_op *opline, uint8_t var_type, uint32_t *var_info_ptr, zend_jit_addr *var_addr_ptr, zend_bool add_ref_guard, zend_bool add_type_guard) +{ + zend_jit_addr var_addr = *var_addr_ptr; + uint32_t var_info = *var_info_ptr; + const void *exit_addr = NULL; + + if (add_ref_guard || add_type_guard) { + int32_t exit_point = zend_jit_trace_get_exit_point(opline, 0); + + exit_addr = zend_jit_trace_get_exit_addr(exit_point); + if (!exit_addr) { + return 0; + } + } + + if (add_ref_guard) { + | brk #0 // TODO + } + if (opline->opcode == ZEND_INIT_METHOD_CALL && opline->op1_type == IS_VAR) { + /* Hack: Convert reference to regular value to simplify JIT code for INIT_METHOD_CALL */ + if (Z_REG(var_addr) != ZREG_FCARG1x || Z_OFFSET(var_addr) != 0) { + | LOAD_ZVAL_ADDR FCARG1x, var_addr + } + | EXT_CALL zend_jit_unref_helper, TMP1 + } else { + | brk #0 // GET_ZVAL_PTR FCARG1x, var_addr + var_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FCARG1x, offsetof(zend_reference, val)); + *var_addr_ptr = var_addr; + } + + if (var_type != IS_UNKNOWN) { + var_type &= ~(IS_TRACE_REFERENCE|IS_TRACE_INDIRECT|IS_TRACE_PACKED); + } + if (add_type_guard + && var_type != IS_UNKNOWN + && (var_info & (MAY_BE_ANY|MAY_BE_UNDEF)) != (1 << var_type)) { + | brk #0 // TODO + + ZEND_ASSERT(var_info & (1 << var_type)); + if (var_type < IS_STRING) { + var_info = (1 << var_type); + } else if (var_type != IS_ARRAY) { + var_info = (1 << var_type) | (var_info & (MAY_BE_RC1|MAY_BE_RCN)); + } else { + var_info = MAY_BE_ARRAY | (var_info & (MAY_BE_ARRAY_OF_ANY|MAY_BE_ARRAY_OF_REF|MAY_BE_ARRAY_KEY_ANY|MAY_BE_RC1|MAY_BE_RCN)); + } + + *var_info_ptr = var_info; + } else { + var_info &= ~MAY_BE_REF; + *var_info_ptr = var_info; + } + + return 1; +} + +static zend_bool zend_jit_fetch_indirect_var(dasm_State **Dst, const zend_op *opline, uint8_t var_type, uint32_t *var_info_ptr, zend_jit_addr *var_addr_ptr, zend_bool add_indirect_guard) +{ + zend_jit_addr var_addr = *var_addr_ptr; + uint32_t var_info = *var_info_ptr; + int32_t exit_point; + const void *exit_addr; + + if (add_indirect_guard) { + int32_t exit_point = zend_jit_trace_get_exit_point(opline, 0); + const void *exit_addr = zend_jit_trace_get_exit_addr(exit_point); + + if (!exit_addr) { + return 0; + } + | brk #0 // TODO + } else { + /* May be already loaded into FCARG1a or RAX by previus FETCH_OBJ_W/DIM_W */ + if (opline->op1_type != IS_VAR || + (opline-1)->result_type != IS_VAR || + (opline-1)->result.var != opline->op1.var || + (opline-1)->op2_type == IS_VAR || + (opline-1)->op2_type == IS_TMP_VAR) { + | brk #0 // GET_ZVAL_PTR FCARG1x, var_addr + } else if ((opline-1)->opcode == ZEND_FETCH_DIM_W || (opline-1)->opcode == ZEND_FETCH_DIM_RW) { + | brk #0 // TODO + } + } + *var_info_ptr &= ~MAY_BE_INDIRECT; + var_addr = ZEND_ADDR_MEM_ZVAL(ZREG_FCARG1x, 0); + *var_addr_ptr = var_addr; + + if (var_type != IS_UNKNOWN) { + var_type &= ~(IS_TRACE_INDIRECT|IS_TRACE_PACKED); + } + if (!(var_type & IS_TRACE_REFERENCE) + && var_type != IS_UNKNOWN + && (var_info & (MAY_BE_ANY|MAY_BE_UNDEF)) != (1 << var_type)) { + exit_point = zend_jit_trace_get_exit_point(opline, 0); + exit_addr = zend_jit_trace_get_exit_addr(exit_point); + + if (!exit_addr) { + return 0; + } + + | brk #0 // TODO + + //var_info = zend_jit_trace_type_to_info_ex(var_type, var_info); + ZEND_ASSERT(var_info & (1 << var_type)); + if (var_type < IS_STRING) { + var_info = (1 << var_type); + } else if (var_type != IS_ARRAY) { + var_info = (1 << var_type) | (var_info & (MAY_BE_RC1|MAY_BE_RCN)); + } else { + var_info = MAY_BE_ARRAY | (var_info & (MAY_BE_ARRAY_OF_ANY|MAY_BE_ARRAY_OF_REF|MAY_BE_ARRAY_KEY_ANY|MAY_BE_RC1|MAY_BE_RCN)); + } + + *var_info_ptr = var_info; + } + + return 1; +} + +static zend_bool zend_jit_may_reuse_reg(const zend_op *opline, const zend_ssa_op *ssa_op, zend_ssa *ssa, int def_var, int use_var) +{ + if ((ssa->var_info[def_var].type & ~MAY_BE_GUARD) != (ssa->var_info[use_var].type & ~MAY_BE_GUARD)) { + return 0; + } + + switch (opline->opcode) { + case ZEND_QM_ASSIGN: + case ZEND_SEND_VAR: + case ZEND_ASSIGN: + case ZEND_PRE_INC: + case ZEND_PRE_DEC: + case ZEND_POST_INC: + case ZEND_POST_DEC: + return 1; + case ZEND_ADD: + case ZEND_SUB: + case ZEND_MUL: + case ZEND_BW_OR: + case ZEND_BW_AND: + case ZEND_BW_XOR: + if (def_var == ssa_op->result_def && + use_var == ssa_op->op1_use) { + return 1; + } + break; + default: + break; + } + return 0; +} + +static zend_bool zend_jit_opline_supports_reg(const zend_op_array *op_array, zend_ssa *ssa, const zend_op *opline, const zend_ssa_op *ssa_op, zend_jit_trace_rec *trace) +{ + uint32_t op1_info, op2_info; + + switch (opline->opcode) { + case ZEND_QM_ASSIGN: + case ZEND_SEND_VAR: + case ZEND_SEND_VAL: + case ZEND_SEND_VAL_EX: + case ZEND_IS_SMALLER: + case ZEND_IS_SMALLER_OR_EQUAL: + case ZEND_IS_EQUAL: + case ZEND_IS_NOT_EQUAL: + case ZEND_IS_IDENTICAL: + case ZEND_IS_NOT_IDENTICAL: + case ZEND_CASE: + case ZEND_RETURN: + return 1; + case ZEND_ASSIGN: + op1_info = OP1_INFO(); + op2_info = OP2_INFO(); + return + opline->op1_type == IS_CV && + !(op1_info & (MAY_BE_STRING|MAY_BE_ARRAY|MAY_BE_RESOURCE|MAY_BE_REF)) && + !(op2_info & ((MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF)-(MAY_BE_LONG|MAY_BE_DOUBLE))); + case ZEND_ADD: + case ZEND_SUB: + case ZEND_MUL: + op1_info = OP1_INFO(); + op2_info = OP2_INFO(); + return !((op1_info | op2_info) & ((MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF) - (MAY_BE_LONG|MAY_BE_DOUBLE))); + case ZEND_BW_OR: + case ZEND_BW_AND: + case ZEND_BW_XOR: + case ZEND_SL: + case ZEND_SR: + case ZEND_MOD: + op1_info = OP1_INFO(); + op2_info = OP2_INFO(); + return !((op1_info | op2_info) & ((MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF) - MAY_BE_LONG)); + case ZEND_PRE_INC: + case ZEND_PRE_DEC: + case ZEND_POST_INC: + case ZEND_POST_DEC: + op1_info = OP1_INFO(); + return opline->op1_type == IS_CV && !(op1_info & ((MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF) - MAY_BE_LONG)); + case ZEND_BOOL: + case ZEND_BOOL_NOT: + case ZEND_JMPZ: + case ZEND_JMPNZ: + case ZEND_JMPZNZ: + case ZEND_JMPZ_EX: + case ZEND_JMPNZ_EX: + return 1; + case ZEND_FETCH_DIM_R: + op1_info = OP1_INFO(); + op2_info = OP2_INFO(); + if (trace + && trace->op1_type != IS_UNKNOWN + && (trace->op1_type & ~(IS_TRACE_REFERENCE|IS_TRACE_INDIRECT|IS_TRACE_PACKED)) == IS_ARRAY) { + op1_info &= ~((MAY_BE_ANY|MAY_BE_UNDEF) - MAY_BE_ARRAY); + } + return ((op1_info & (MAY_BE_ANY|MAY_BE_UNDEF)) == MAY_BE_ARRAY) && + (!(opline->op1_type & (IS_TMP_VAR|IS_VAR)) || !(op1_info & MAY_BE_RC1)) && + (((op2_info & (MAY_BE_ANY|MAY_BE_UNDEF)) == MAY_BE_LONG) || + (((op2_info & (MAY_BE_ANY|MAY_BE_UNDEF)) == MAY_BE_STRING) && + (!(opline->op2_type & (IS_TMP_VAR|IS_VAR)) || !(op2_info & MAY_BE_RC1)))); + } + return 0; +} + +static zend_bool zend_jit_var_supports_reg(zend_ssa *ssa, int var) +{ + if (ssa->vars[var].no_val) { + /* we don't need the value */ + return 0; + } + + if (!(JIT_G(opt_flags) & ZEND_JIT_REG_ALLOC_GLOBAL)) { + /* Disable global register allocation, + * register allocation for SSA variables connected through Phi functions + */ + if (ssa->vars[var].definition_phi) { + return 0; + } + if (ssa->vars[var].phi_use_chain) { + zend_ssa_phi *phi = ssa->vars[var].phi_use_chain; + do { + if (!ssa->vars[phi->ssa_var].no_val) { + return 0; + } + phi = zend_ssa_next_use_phi(ssa, var, phi); + } while (phi); + } + } + + if (((ssa->var_info[var].type & (MAY_BE_ANY|MAY_BE_UNDEF|MAY_BE_REF)) != MAY_BE_DOUBLE) && + ((ssa->var_info[var].type & (MAY_BE_ANY|MAY_BE_UNDEF|MAY_BE_REF)) != MAY_BE_LONG)) { + /* bad type */ + return 0; + } + + return 1; +} + +static zend_bool zend_jit_may_be_in_reg(const zend_op_array *op_array, zend_ssa *ssa, int var) +{ + if (!zend_jit_var_supports_reg(ssa, var)) { + return 0; + } + + if (ssa->vars[var].definition >= 0) { + uint32_t def = ssa->vars[var].definition; + if (!zend_jit_opline_supports_reg(op_array, ssa, op_array->opcodes + def, ssa->ops + def, NULL)) { + return 0; + } + } + + if (ssa->vars[var].use_chain >= 0) { + int use = ssa->vars[var].use_chain; + + do { + if (!zend_ssa_is_no_val_use(op_array->opcodes + use, ssa->ops + use, var) && + !zend_jit_opline_supports_reg(op_array, ssa, op_array->opcodes + use, ssa->ops + use, NULL)) { + return 0; + } + use = zend_ssa_next_use(ssa->ops, var, use); + } while (use >= 0); + } + + return 1; +} + +static zend_bool zend_needs_extra_reg_for_const(const zend_op *opline, zend_uchar op_type, znode_op op) +{ +|| if (op_type == IS_CONST) { +|| zval *zv = RT_CONSTANT(opline, op); +|| if (Z_TYPE_P(zv) == IS_DOUBLE && Z_DVAL_P(zv) != 0 && !IS_SIGNED_32BIT(zv)) { +|| return 1; +|| } else if (Z_TYPE_P(zv) == IS_LONG && !IS_SIGNED_32BIT(Z_LVAL_P(zv))) { +|| return 1; +|| } +|| } + return 0; +} + +static zend_regset zend_jit_get_def_scratch_regset(const zend_op *opline, const zend_ssa_op *ssa_op, const zend_op_array *op_array, zend_ssa *ssa, int current_var, zend_bool last_use) +{ + uint32_t op1_info, op2_info; + + switch (opline->opcode) { + case ZEND_FETCH_DIM_R: + op1_info = OP1_INFO(); + op2_info = OP2_INFO(); + if (((opline->op1_type & (IS_TMP_VAR|IS_VAR)) && + (op1_info & (MAY_BE_STRING|MAY_BE_ARRAY|MAY_BE_OBJECT|MAY_BE_RESOURCE))) || + ((opline->op2_type & (IS_TMP_VAR|IS_VAR)) && + (op2_info & (MAY_BE_STRING|MAY_BE_ARRAY|MAY_BE_OBJECT|MAY_BE_RESOURCE)))) { + return ZEND_REGSET(ZREG_FCARG1x); + } + break; + default: + break; + } + + return ZEND_REGSET_EMPTY; +} + +static zend_regset zend_jit_get_scratch_regset(const zend_op *opline, const zend_ssa_op *ssa_op, const zend_op_array *op_array, zend_ssa *ssa, int current_var, zend_bool last_use) +{ + uint32_t op1_info, op2_info, res_info; + zend_regset regset = ZEND_REGSET_SCRATCH; + + switch (opline->opcode) { + case ZEND_NOP: + case ZEND_OP_DATA: + case ZEND_JMP: + case ZEND_RETURN: + regset = ZEND_REGSET_EMPTY; + break; + case ZEND_QM_ASSIGN: + if (ssa_op->op1_def == current_var || + ssa_op->result_def == current_var) { + regset = ZEND_REGSET_EMPTY; + break; + } + /* break missing intentionally */ + case ZEND_SEND_VAL: + case ZEND_SEND_VAL_EX: + if (ssa_op->op1_use == current_var) { + regset = ZEND_REGSET(ZREG_R0); + break; + } + op1_info = OP1_INFO(); + if (!(op1_info & MAY_BE_UNDEF)) { + if ((op1_info & (MAY_BE_ANY|MAY_BE_REF)) == MAY_BE_DOUBLE) { + regset = ZEND_REGSET(ZREG_XMM0); + } else if ((op1_info & (MAY_BE_ANY|MAY_BE_REF)) == MAY_BE_LONG) { + regset = ZEND_REGSET(ZREG_R0); + } else { + regset = ZEND_REGSET_UNION(ZEND_REGSET(ZREG_R0), ZEND_REGSET(ZREG_R2)); + } + } + break; + case ZEND_SEND_VAR: + if (ssa_op->op1_use == current_var || + ssa_op->op1_def == current_var) { + regset = ZEND_REGSET_EMPTY; + break; + } + op1_info = OP1_INFO(); + if (!(op1_info & MAY_BE_UNDEF)) { + if ((op1_info & (MAY_BE_ANY|MAY_BE_REF)) == MAY_BE_DOUBLE) { + regset = ZEND_REGSET(ZREG_XMM0); + } else if ((op1_info & (MAY_BE_ANY|MAY_BE_REF)) == MAY_BE_LONG) { + } else { + regset = ZEND_REGSET_UNION(ZEND_REGSET(ZREG_R0), ZEND_REGSET(ZREG_R2)); + if (op1_info & MAY_BE_REF) { + ZEND_REGSET_INCL(regset, ZREG_R1); + } + } + } + break; + case ZEND_ASSIGN: + if (ssa_op->op2_use == current_var || + ssa_op->op2_def == current_var || + ssa_op->op1_def == current_var || + ssa_op->result_def == current_var) { + regset = ZEND_REGSET_EMPTY; + break; + } + op1_info = OP1_INFO(); + op2_info = OP2_INFO(); + if (opline->op1_type == IS_CV + && !(op2_info & MAY_BE_UNDEF) + && !(op1_info & (MAY_BE_STRING|MAY_BE_ARRAY|MAY_BE_RESOURCE|MAY_BE_REF))) { + if ((op2_info & (MAY_BE_ANY|MAY_BE_REF)) == MAY_BE_DOUBLE) { + regset = ZEND_REGSET(ZREG_XMM0); + } else if ((op2_info & (MAY_BE_ANY|MAY_BE_REF)) == MAY_BE_LONG) { + regset = ZEND_REGSET(ZREG_R0); + } else { + regset = ZEND_REGSET_UNION(ZEND_REGSET(ZREG_R0), ZEND_REGSET(ZREG_R2)); + } + } + break; + case ZEND_PRE_INC: + case ZEND_PRE_DEC: + case ZEND_POST_INC: + case ZEND_POST_DEC: + if (ssa_op->op1_use == current_var || + ssa_op->op1_def == current_var || + ssa_op->result_def == current_var) { + regset = ZEND_REGSET_EMPTY; + break; + } + op1_info = OP1_INFO(); + if (opline->op1_type == IS_CV + && (op1_info & MAY_BE_LONG) + && !(op1_info & ((MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF)-(MAY_BE_LONG|MAY_BE_DOUBLE)))) { + regset = ZEND_REGSET_EMPTY; + if (op1_info & MAY_BE_DOUBLE) { + regset = ZEND_REGSET(ZREG_XMM0); + } + } + break; + case ZEND_ADD: + case ZEND_SUB: + case ZEND_MUL: + op1_info = OP1_INFO(); + op2_info = OP2_INFO(); + if (!(op1_info & ((MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF)-(MAY_BE_LONG|MAY_BE_DOUBLE))) && + !(op2_info & ((MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF)-(MAY_BE_LONG|MAY_BE_DOUBLE)))) { + + regset = ZEND_REGSET_EMPTY; + if ((op1_info & MAY_BE_LONG) && (op2_info & MAY_BE_LONG)) { + if (ssa_op->result_def != current_var && + (ssa_op->op1_use != current_var || !last_use)) { + ZEND_REGSET_INCL(regset, ZREG_R0); + } + res_info = OP1_INFO(); + if (res_info & MAY_BE_DOUBLE) { + ZEND_REGSET_INCL(regset, ZREG_XMM0); + ZEND_REGSET_INCL(regset, ZREG_XMM1); + } + } + if ((op1_info & MAY_BE_LONG) && (op2_info & MAY_BE_DOUBLE)) { + if (ssa_op->result_def != current_var) { + ZEND_REGSET_INCL(regset, ZREG_XMM0); + } + } + if ((op1_info & MAY_BE_DOUBLE) && (op2_info & MAY_BE_LONG)) { + if (zend_is_commutative(opline->opcode)) { + if (ssa_op->result_def != current_var) { + ZEND_REGSET_INCL(regset, ZREG_XMM0); + } + } else { + ZEND_REGSET_INCL(regset, ZREG_XMM0); + if (ssa_op->result_def != current_var && + (ssa_op->op1_use != current_var || !last_use)) { + ZEND_REGSET_INCL(regset, ZREG_XMM1); + } + } + } + if ((op1_info & MAY_BE_DOUBLE) && (op2_info & MAY_BE_DOUBLE)) { + if (ssa_op->result_def != current_var && + (ssa_op->op1_use != current_var || !last_use) && + (!zend_is_commutative(opline->opcode) || ssa_op->op2_use != current_var || !last_use)) { + ZEND_REGSET_INCL(regset, ZREG_XMM0); + } + } + if (zend_needs_extra_reg_for_const(opline, opline->op1_type, opline->op1) || + zend_needs_extra_reg_for_const(opline, opline->op2_type, opline->op2)) { + if (!ZEND_REGSET_IN(regset, ZREG_R0)) { + ZEND_REGSET_INCL(regset, ZREG_R0); + } else { + ZEND_REGSET_INCL(regset, ZREG_R1); + } + } + } + break; + case ZEND_BW_OR: + case ZEND_BW_AND: + case ZEND_BW_XOR: + op1_info = OP1_INFO(); + op2_info = OP2_INFO(); + if (!(op1_info & ((MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF)-MAY_BE_LONG)) && + !(op2_info & ((MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF)-MAY_BE_LONG))) { + regset = ZEND_REGSET_EMPTY; + if (ssa_op->result_def != current_var && + (ssa_op->op1_use != current_var || !last_use)) { + ZEND_REGSET_INCL(regset, ZREG_R0); + } + if (zend_needs_extra_reg_for_const(opline, opline->op1_type, opline->op1) || + zend_needs_extra_reg_for_const(opline, opline->op2_type, opline->op2)) { + if (!ZEND_REGSET_IN(regset, ZREG_R0)) { + ZEND_REGSET_INCL(regset, ZREG_R0); + } else { + ZEND_REGSET_INCL(regset, ZREG_R1); + } + } + } + break; + case ZEND_SL: + case ZEND_SR: + op1_info = OP1_INFO(); + op2_info = OP2_INFO(); + if (!(op1_info & ((MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF)-MAY_BE_LONG)) && + !(op2_info & ((MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF)-MAY_BE_LONG))) { + regset = ZEND_REGSET_EMPTY; + if (ssa_op->result_def != current_var && + (ssa_op->op1_use != current_var || !last_use)) { + ZEND_REGSET_INCL(regset, ZREG_R0); + } + if (opline->op2_type != IS_CONST && ssa_op->op2_use != current_var) { + ZEND_REGSET_INCL(regset, ZREG_R1); + } + } + break; + case ZEND_MOD: + op1_info = OP1_INFO(); + op2_info = OP2_INFO(); + if (!(op1_info & ((MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF)-MAY_BE_LONG)) && + !(op2_info & ((MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF)-MAY_BE_LONG))) { + regset = ZEND_REGSET_EMPTY; + if (opline->op2_type == IS_CONST && + Z_TYPE_P(RT_CONSTANT(opline, opline->op2)) == IS_LONG && + zend_long_is_power_of_two(Z_LVAL_P(RT_CONSTANT(opline, opline->op2))) && + OP1_HAS_RANGE() && + OP1_MIN_RANGE() >= 0) { + if (ssa_op->result_def != current_var && + (ssa_op->op1_use != current_var || !last_use)) { + ZEND_REGSET_INCL(regset, ZREG_R0); + } + if (sizeof(void*) == 8 + && !IS_SIGNED_32BIT(Z_LVAL_P(RT_CONSTANT(opline, opline->op2)) - 1)) { + if (!ZEND_REGSET_IN(regset, ZREG_R0)) { + ZEND_REGSET_INCL(regset, ZREG_R0); + } else { + ZEND_REGSET_INCL(regset, ZREG_R1); + } + } + } else { + ZEND_REGSET_INCL(regset, ZREG_R0); + ZEND_REGSET_INCL(regset, ZREG_R2); + if (opline->op2_type == IS_CONST) { + ZEND_REGSET_INCL(regset, ZREG_R1); + } + } + } + break; + case ZEND_IS_SMALLER: + case ZEND_IS_SMALLER_OR_EQUAL: + case ZEND_IS_EQUAL: + case ZEND_IS_NOT_EQUAL: + case ZEND_IS_IDENTICAL: + case ZEND_IS_NOT_IDENTICAL: + case ZEND_CASE: + op1_info = OP1_INFO(); + op2_info = OP2_INFO(); + if (!(op1_info & ((MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF)-(MAY_BE_LONG|MAY_BE_DOUBLE))) && + !(op2_info & ((MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF)-(MAY_BE_LONG|MAY_BE_DOUBLE)))) { + regset = ZEND_REGSET_EMPTY; + if (!(opline->result_type & (IS_SMART_BRANCH_JMPZ|IS_SMART_BRANCH_JMPNZ))) { + ZEND_REGSET_INCL(regset, ZREG_R0); + } + if ((op1_info & MAY_BE_LONG) && (op2_info & MAY_BE_LONG) && + opline->op1_type != IS_CONST && opline->op2_type != IS_CONST) { + if (ssa_op->op1_use != current_var && + ssa_op->op2_use != current_var) { + ZEND_REGSET_INCL(regset, ZREG_R0); + } + } + if ((op1_info & MAY_BE_LONG) && (op2_info & MAY_BE_DOUBLE)) { + ZEND_REGSET_INCL(regset, ZREG_XMM0); + } + if ((op1_info & MAY_BE_DOUBLE) && (op2_info & MAY_BE_LONG)) { + ZEND_REGSET_INCL(regset, ZREG_XMM0); + } + if ((op1_info & MAY_BE_DOUBLE) && (op2_info & MAY_BE_DOUBLE)) { + if (ssa_op->op1_use != current_var && + ssa_op->op2_use != current_var) { + ZEND_REGSET_INCL(regset, ZREG_XMM0); + } + } + if (zend_needs_extra_reg_for_const(opline, opline->op1_type, opline->op1) || + zend_needs_extra_reg_for_const(opline, opline->op2_type, opline->op2)) { + ZEND_REGSET_INCL(regset, ZREG_R0); + } + } + break; + case ZEND_BOOL: + case ZEND_BOOL_NOT: + case ZEND_JMPZ: + case ZEND_JMPNZ: + case ZEND_JMPZNZ: + case ZEND_JMPZ_EX: + case ZEND_JMPNZ_EX: + op1_info = OP1_INFO(); + if (!(op1_info & ((MAY_BE_ANY|MAY_BE_REF|MAY_BE_UNDEF)-(MAY_BE_NULL|MAY_BE_FALSE|MAY_BE_TRUE|MAY_BE_LONG|MAY_BE_DOUBLE)))) { + regset = ZEND_REGSET_EMPTY; + if (op1_info & MAY_BE_DOUBLE) { + ZEND_REGSET_INCL(regset, ZREG_XMM0); + } + if (opline->opcode == ZEND_BOOL || + opline->opcode == ZEND_BOOL_NOT || + opline->opcode == ZEND_JMPZ_EX || + opline->opcode == ZEND_JMPNZ_EX) { + ZEND_REGSET_INCL(regset, ZREG_R0); + } + } + break; + case ZEND_DO_UCALL: + case ZEND_DO_FCALL: + case ZEND_DO_FCALL_BY_NAME: + case ZEND_INCLUDE_OR_EVAL: + case ZEND_GENERATOR_CREATE: + case ZEND_YIELD: + case ZEND_YIELD_FROM: + regset = ZEND_REGSET_UNION(ZEND_REGSET_GP, ZEND_REGSET_FP); + break; + default: + break; + } + + if (JIT_G(trigger) == ZEND_JIT_ON_HOT_TRACE) { + if (ssa_op == ssa->ops + && JIT_G(current_trace)[ZEND_JIT_TRACE_START_REC_SIZE].op == ZEND_JIT_TRACE_INIT_CALL + && (JIT_G(current_trace)[ZEND_JIT_TRACE_START_REC_SIZE].info & ZEND_JIT_TRACE_FAKE_INIT_CALL)) { + ZEND_REGSET_INCL(regset, ZREG_R0); + ZEND_REGSET_INCL(regset, ZREG_R1); + } + } + + /* %r0 is used to check EG(vm_interrupt) */ + if (JIT_G(trigger) == ZEND_JIT_ON_HOT_TRACE) { + if (ssa_op == ssa->ops + && (JIT_G(current_trace)->stop == ZEND_JIT_TRACE_STOP_LOOP || + JIT_G(current_trace)->stop == ZEND_JIT_TRACE_STOP_RECURSIVE_CALL)) { +#if ZTS + ZEND_REGSET_INCL(regset, ZREG_R0); +#else + if ((sizeof(void*) == 8 && !IS_SIGNED_32BIT(&EG(vm_interrupt)))) { + ZEND_REGSET_INCL(regset, ZREG_R0); + } +#endif + } + } else { + uint32_t b = ssa->cfg.map[ssa_op - ssa->ops]; + + if ((ssa->cfg.blocks[b].flags & ZEND_BB_LOOP_HEADER) != 0 + && ssa->cfg.blocks[b].start == ssa_op - ssa->ops) { +#if ZTS + ZEND_REGSET_INCL(regset, ZREG_R0); +#else + if ((sizeof(void*) == 8 && !IS_SIGNED_32BIT(&EG(vm_interrupt)))) { + ZEND_REGSET_INCL(regset, ZREG_R0); + } +#endif + } + } + + return regset; +} + +#if defined(__clang__) +# pragma clang diagnostic pop +#endif + +/* + * Local variables: + * tab-width: 4 + * c-basic-offset: 4 + * indent-tabs-mode: t + * End: + */ diff --git a/ext/opcache/jit/zend_jit_arm64.h b/ext/opcache/jit/zend_jit_arm64.h new file mode 100644 index 0000000000000..703ecb41b55e7 --- /dev/null +++ b/ext/opcache/jit/zend_jit_arm64.h @@ -0,0 +1,325 @@ +/* + +----------------------------------------------------------------------+ + | Zend JIT | + +----------------------------------------------------------------------+ + | Copyright (c) The PHP Group | + +----------------------------------------------------------------------+ + | This source file is subject to version 3.01 of the PHP license, | + | that is bundled with this package in the file LICENSE, and is | + | available through the world-wide-web at the following url: | + | http://www.php.net/license/3_01.txt | + | If you did not receive a copy of the PHP license and are unable to | + | obtain it through the world-wide-web, please send a note to | + | license@php.net so we can mail you a copy immediately. | + +----------------------------------------------------------------------+ + | Authors: Dmitry Stogov | + | Hao Sun | + +----------------------------------------------------------------------+ +*/ + +#ifndef HAVE_JIT_ARM64_H +#define HAVE_JIT_ARM64_H + +// R# and XMM# are register idioms in x86 and we should use X# and V# here. +// However, ZREG_RO and ZREG_XMM0 are also used in zend_jit.c and zend_jit_trace.c. +// TODO: arch-indepedent register names should be used, such as +// GPR (general-purpose register) and FPR (floating-point register). +typedef enum _zend_reg { + ZREG_NONE = -1, + + ZREG_R0, + ZREG_R1, + ZREG_R2, + ZREG_R3, + ZREG_R4, + ZREG_R5, + ZREG_R6, + ZREG_R7, + ZREG_R8, + ZREG_R9, + ZREG_R10, + ZREG_R11, + ZREG_R12, + ZREG_R13, + ZREG_R14, + ZREG_R15, + ZREG_R16, + ZREG_R17, + ZREG_R18, + ZREG_R19, + ZREG_R20, + ZREG_R21, + ZREG_R22, + ZREG_R23, + ZREG_R24, + ZREG_R25, + ZREG_R26, + ZREG_R27, + ZREG_R28, + ZREG_R29, + ZREG_R30, + ZREG_R31, + + ZREG_XMM0, + ZREG_XMM1, + ZREG_XMM2, + ZREG_XMM3, + ZREG_XMM4, + ZREG_XMM5, + ZREG_XMM6, + ZREG_XMM7, + ZREG_XMM8, + ZREG_XMM9, + ZREG_XMM10, + ZREG_XMM11, + ZREG_XMM12, + ZREG_XMM13, + ZREG_XMM14, + ZREG_XMM15, + ZREG_XMM16, + ZREG_XMM17, + ZREG_XMM18, + ZREG_XMM19, + ZREG_XMM20, + ZREG_XMM21, + ZREG_XMM22, + ZREG_XMM23, + ZREG_XMM24, + ZREG_XMM25, + ZREG_XMM26, + ZREG_XMM27, + ZREG_XMM28, + ZREG_XMM29, + ZREG_XMM30, + ZREG_XMM31, + + ZREG_NUM, + + ZREG_THIS, /* used for delayed FETCH_THIS deoptimization */ + + /* pseudo constants used by deoptimizer */ + ZREG_LONG_MIN_MINUS_1, + ZREG_LONG_MIN, + ZREG_LONG_MAX, + ZREG_LONG_MAX_PLUS_1, + ZREG_NULL, + + ZREG_ZVAL_TRY_ADDREF, + ZREG_ZVAL_COPY_R0, +} zend_reg; + +// TODO: like ZREG_R# and ZREG_XMM#, `r` and `xmm` should be renamed as well. +typedef struct _zend_jit_registers_buf { + uint32_t r[31]; + double xmm[32]; +} zend_jit_registers_buf; + +#define ZREG_RSP ZREG_R31 +#define ZREG_RLR ZREG_R30 +#define ZREG_RFP ZREG_R29 +#define ZREG_RPR ZREG_R18 + +# define ZREG_FP ZREG_R27 +# define ZREG_IP ZREG_R28 +# define ZREG_RX ZREG_IP +# define ZREG_FCARG1x ZREG_R0 +# define ZREG_FCARG2a ZREG_R1 + +extern const char *zend_reg_name[]; + +// Register set operation is arch-independent. +// TODO: tiny change to the x86 definition. Might put it into a separate header. +typedef uint64_t zend_regset; + +#define ZEND_REGSET_EMPTY 0 + +#define ZEND_REGSET_IS_EMPTY(regset) \ + (regset == ZEND_REGSET_EMPTY) + +#define ZEND_REGSET_IS_SINGLETON(regset) \ + (regset && !(regset & (regset - 1))) + +#define ZEND_REGSET(reg) \ + (1ull << (reg)) + +#define ZEND_REGSET_INTERVAL(reg1, reg2) \ + (((1ull << ((reg2) - (reg1) + 1)) - 1) << (reg1)) + +#define ZEND_REGSET_IN(regset, reg) \ + (((regset) & ZEND_REGSET(reg)) != 0) + +#define ZEND_REGSET_INCL(regset, reg) \ + (regset) |= ZEND_REGSET(reg) + +#define ZEND_REGSET_EXCL(regset, reg) \ + (regset) &= ~ZEND_REGSET(reg) + +#define ZEND_REGSET_UNION(set1, set2) \ + ((set1) | (set2)) + +#define ZEND_REGSET_INTERSECTION(set1, set2) \ + ((set1) & (set2)) + +#define ZEND_REGSET_DIFFERENCE(set1, set2) \ + ((set1) & ~(set2)) + +# define ZEND_REGSET_FIXED \ + (ZEND_REGSET(ZREG_RSP) | ZEND_REGSET(ZREG_RLR) | ZEND_REGSET(ZREG_RFP) | \ + ZEND_REGSET(ZREG_RPR) | ZEND_REGSET(ZREG_FP) | ZEND_REGSET(ZREG_IP)) +# define ZEND_REGSET_GP \ + ZEND_REGSET_DIFFERENCE(ZEND_REGSET_INTERVAL(ZREG_R0, ZREG_R30), ZEND_REGSET_FIXED) +# define ZEND_REGSET_FP \ + ZEND_REGSET_DIFFERENCE(ZEND_REGSET_INTERVAL(ZREG_XMM0, ZREG_XMM31), ZEND_REGSET_FIXED) +# define ZEND_REGSET_SCRATCH \ + (ZEND_REGSET_INTERVAL(ZREG_R0, ZREG_R17) | ZEND_REGSET_FP) +# define ZEND_REGSET_PRESERVED \ + ZEND_REGSET_INTERVAL(ZREG_R19, ZREG_R26) // TODO: might treat v8-v15 as callee-saved when handling vector instructions. + +#ifndef _WIN32 +#define ZEND_REGSET_FIRST(set) ((zend_reg)__builtin_ctzll(set)) +#define ZEND_REGSET_SECOND(set) ((zend_reg)__builtin_ctzll(set ^ (1ull << ZEND_REGSET_FIRST(set)))) +#define ZEND_REGSET_LAST(set) ((zend_reg)(__builtin_clzll(set) ^ 63))) // TODO: Not used. Should remove. Besides, parentheses are mismatched. +#else +#include +uint32_t __inline __zend_jit_ctz( uint32_t value ) { + DWORD trailing_zero = 0; + if (_BitScanForward(&trailing_zero, value)) { + return trailing_zero; + } + return 32; +} +uint32_t __inline __zend_jit_clz(uint32_t value) { + DWORD leading_zero = 0; + if (_BitScanReverse(&leading_zero, value)) { + return 31 - leading_zero; + } + return 32; +} +#define ZEND_REGSET_FIRST(set) ((zend_reg)__zend_jit_ctz(set)) +#define ZEND_REGSET_LAST(set) ((zend_reg)(__zend_jit_clz(set)^31))) +#endif + +#define ZEND_REGSET_FOREACH(set, reg) \ + do { \ + zend_regset _tmp = (set); \ + while (!ZEND_REGSET_IS_EMPTY(_tmp)) { \ + zend_reg _reg = ZEND_REGSET_FIRST(_tmp); \ + ZEND_REGSET_EXCL(_tmp, _reg); \ + reg = _reg; \ + +#define ZEND_REGSET_FOREACH_END() \ + } \ + } while (0) + +// Zend address decoding is arch-independent. +// TODO: duplicate code with x86. Might put it into a separate header. +typedef uintptr_t zend_jit_addr; + +#define IS_CONST_ZVAL 0 +#define IS_MEM_ZVAL 1 +#define IS_REG 2 + +#define _ZEND_ADDR_MODE_MASK 0x3 +#define _ZEND_ADDR_REG_SHIFT 2 +#define _ZEND_ADDR_REG_MASK 0x3f +#define _ZEND_ADDR_OFFSET_SHIFT 8 +#define _ZEND_ADDR_REG_STORE_BIT 8 +#define _ZEND_ADDR_REG_LOAD_BIT 9 +#define _ZEND_ADDR_REG_LAST_USE_BIT 10 + +#define ZEND_ADDR_CONST_ZVAL(zv) \ + (((zend_jit_addr)(uintptr_t)(zv)) | IS_CONST_ZVAL) +#define ZEND_ADDR_MEM_ZVAL(reg, offset) \ + ((((zend_jit_addr)(uintptr_t)(offset)) << _ZEND_ADDR_OFFSET_SHIFT) | \ + (((zend_jit_addr)(uintptr_t)(reg)) << _ZEND_ADDR_REG_SHIFT) | \ + IS_MEM_ZVAL) +#define ZEND_ADDR_REG(reg) \ + ((((zend_jit_addr)(uintptr_t)(reg)) << _ZEND_ADDR_REG_SHIFT) | \ + IS_REG) + +#define Z_MODE(addr) (((addr) & _ZEND_ADDR_MODE_MASK)) +#define Z_ZV(addr) ((zval*)(addr)) +#define Z_OFFSET(addr) ((uint32_t)((addr)>>_ZEND_ADDR_OFFSET_SHIFT)) +#define Z_REG(addr) ((zend_reg)(((addr)>>_ZEND_ADDR_REG_SHIFT) & _ZEND_ADDR_REG_MASK)) +#define Z_STORE(addr) ((zend_reg)(((addr)>>_ZEND_ADDR_REG_STORE_BIT) & 1)) +#define Z_LOAD(addr) ((zend_reg)(((addr)>>_ZEND_ADDR_REG_LOAD_BIT) & 1)) +#define Z_LAST_USE(addr) ((zend_reg)(((addr)>>_ZEND_ADDR_REG_LAST_USE_BIT) & 1)) + +#define OP_REG_EX(reg, store, load, last_use) \ + ((reg) | \ + ((store) ? (1 << (_ZEND_ADDR_REG_STORE_BIT-_ZEND_ADDR_REG_SHIFT)) : 0) | \ + ((load) ? (1 << (_ZEND_ADDR_REG_LOAD_BIT-_ZEND_ADDR_REG_SHIFT)) : 0) | \ + ((last_use) ? (1 << (_ZEND_ADDR_REG_LAST_USE_BIT-_ZEND_ADDR_REG_SHIFT)) : 0) \ + ) + +#define OP_REG(ssa_op, op) \ + (ra && ssa_op->op >= 0 && ra[ssa_op->op] ? \ + OP_REG_EX(ra[ssa_op->op]->reg, \ + (ra[ssa_op->op]->flags & ZREG_STORE), \ + (ra[ssa_op->op]->flags & ZREG_LOAD), \ + zend_ival_is_last_use(ra[ssa_op->op], ssa_op - ssa->ops) \ + ) : ZREG_NONE) + +static zend_always_inline zend_jit_addr _zend_jit_decode_op(zend_uchar op_type, znode_op op, const zend_op *opline, zend_reg reg) +{ + if (op_type == IS_CONST) { +#if ZEND_USE_ABS_CONST_ADDR + return ZEND_ADDR_CONST_ZVAL(op.zv); +#else + return ZEND_ADDR_CONST_ZVAL(RT_CONSTANT(opline, op)); +#endif + } else { + ZEND_ASSERT(op_type & (IS_CV|IS_TMP_VAR|IS_VAR)); + if (reg != ZREG_NONE) { + return ZEND_ADDR_REG(reg); + } else { + return ZEND_ADDR_MEM_ZVAL(ZREG_FP, op.var); + } + } +} + +#define OP_ADDR(opline, type, op) \ + _zend_jit_decode_op((opline)->type, (opline)->op, opline, ZREG_NONE) + +#define OP1_ADDR() \ + OP_ADDR(opline, op1_type, op1) +#define OP2_ADDR() \ + OP_ADDR(opline, op2_type, op2) +#define RES_ADDR() \ + OP_ADDR(opline, result_type, result) +#define OP1_DATA_ADDR() \ + OP_ADDR(opline + 1, op1_type, op1) + +#define OP_REG_ADDR(opline, type, _op, _ssa_op) \ + _zend_jit_decode_op((opline)->type, (opline)->_op, opline, \ + OP_REG(ssa_op, _ssa_op)) + +#define OP1_REG_ADDR() \ + OP_REG_ADDR(opline, op1_type, op1, op1_use) +#define OP2_REG_ADDR() \ + OP_REG_ADDR(opline, op2_type, op2, op2_use) +#define RES_REG_ADDR() \ + OP_REG_ADDR(opline, result_type, result, result_def) +#define OP1_DATA_REG_ADDR() \ + OP_REG_ADDR(opline + 1, op1_type, op1, op1_use) + +#define OP1_DEF_REG_ADDR() \ + OP_REG_ADDR(opline, op1_type, op1, op1_def) +#define OP2_DEF_REG_ADDR() \ + OP_REG_ADDR(opline, op2_type, op2, op2_def) +#define RES_USE_REG_ADDR() \ + OP_REG_ADDR(opline, result_type, result, result_use) +#define OP1_DATA_DEF_REG_ADDR() \ + OP_REG_ADDR(opline + 1, op1_type, op1, op1_def) + +static zend_always_inline zend_bool zend_jit_same_addr(zend_jit_addr addr1, zend_jit_addr addr2) +{ + if (addr1 == addr2) { + return 1; + } else if (Z_MODE(addr1) == IS_REG && Z_MODE(addr2) == IS_REG) { + return Z_REG(addr1) == Z_REG(addr2); + } + return 0; +} + +#endif /* ZEND_JIT_ARM64_H */ diff --git a/ext/opcache/jit/zend_jit_disasm_arm64.c b/ext/opcache/jit/zend_jit_disasm_arm64.c new file mode 100644 index 0000000000000..5c4d09b282f8a --- /dev/null +++ b/ext/opcache/jit/zend_jit_disasm_arm64.c @@ -0,0 +1,542 @@ +/* + +----------------------------------------------------------------------+ + | Zend JIT | + +----------------------------------------------------------------------+ + | Copyright (c) The PHP Group | + +----------------------------------------------------------------------+ + | This source file is subject to version 3.01 of the PHP license, | + | that is bundled with this package in the file LICENSE, and is | + | available through the world-wide-web at the following url: | + | http://www.php.net/license/3_01.txt | + | If you did not receive a copy of the PHP license and are unable to | + | obtain it through the world-wide-web, please send a note to | + | license@php.net so we can mail you a copy immediately. | + +----------------------------------------------------------------------+ + | Authors: Dmitry Stogov | + | Xinchen Hui | + | Hao Sun | + +----------------------------------------------------------------------+ +*/ + +#ifdef HAVE_CAPSTONE + +#define HAVE_DISASM 1 + +#include "zend_jit.h" +#include "zend_sort.h" + +static void zend_jit_disasm_add_symbol(const char *name, + uint64_t addr, + uint64_t size); + +#ifndef _WIN32 +# include "jit/zend_elf.c" +#endif + +#include "zend_sort.h" + +#ifndef _GNU_SOURCE +# define _GNU_SOURCE +#endif + +#ifndef _WIN32 +#include +#endif + +#include + +static csh cs; + +struct _sym_node { + uint64_t addr; + uint64_t end; + struct _sym_node *parent; + struct _sym_node *child[2]; + unsigned char info; + char name[1]; +}; + +static void zend_syms_rotateleft(zend_sym_node *p) { + zend_sym_node *r = p->child[1]; + p->child[1] = r->child[0]; + if (r->child[0]) { + r->child[0]->parent = p; + } + r->parent = p->parent; + if (p->parent == NULL) { + JIT_G(symbols) = r; + } else if (p->parent->child[0] == p) { + p->parent->child[0] = r; + } else { + p->parent->child[1] = r; + } + r->child[0] = p; + p->parent = r; +} + +static void zend_syms_rotateright(zend_sym_node *p) { + zend_sym_node *l = p->child[0]; + p->child[0] = l->child[1]; + if (l->child[1]) { + l->child[1]->parent = p; + } + l->parent = p->parent; + if (p->parent == NULL) { + JIT_G(symbols) = l; + } else if (p->parent->child[1] == p) { + p->parent->child[1] = l; + } else { + p->parent->child[0] = l; + } + l->child[1] = p; + p->parent = l; +} + +static void zend_jit_disasm_add_symbol(const char *name, + uint64_t addr, + uint64_t size) +{ + zend_sym_node *sym; + size_t len = strlen(name); + + sym = malloc(sizeof(zend_sym_node) + len + 1); + if (!sym) { + return; + } + sym->addr = addr; + sym->end = (addr + size - 1); + memcpy((char*)&sym->name, name, len + 1); + sym->parent = sym->child[0] = sym->child[1] = NULL; + sym->info = 1; + if (JIT_G(symbols)) { + zend_sym_node *node = JIT_G(symbols); + + /* insert it into rbtree */ + do { + if (sym->addr > node->addr) { + ZEND_ASSERT(sym->addr > (node->end)); + if (node->child[1]) { + node = node->child[1]; + } else { + node->child[1] = sym; + sym->parent = node; + break; + } + } else if (sym->addr < node->addr) { + if (node->child[0]) { + node = node->child[0]; + } else { + node->child[0] = sym; + sym->parent = node; + break; + } + } else { + ZEND_ASSERT(sym->addr == node->addr); + if (strcmp(name, node->name) == 0 && sym->end < node->end) { + /* reduce size of the existing symbol */ + node->end = sym->end; + } + free(sym); + return; + } + } while (1); + + /* fix rbtree after instering */ + while (sym && sym != JIT_G(symbols) && sym->parent->info == 1) { + if (sym->parent == sym->parent->parent->child[0]) { + node = sym->parent->parent->child[1]; + if (node && node->info == 1) { + sym->parent->info = 0; + node->info = 0; + sym->parent->parent->info = 1; + sym = sym->parent->parent; + } else { + if (sym == sym->parent->child[1]) { + sym = sym->parent; + zend_syms_rotateleft(sym); + } + sym->parent->info = 0; + sym->parent->parent->info = 1; + zend_syms_rotateright(sym->parent->parent); + } + } else { + node = sym->parent->parent->child[0]; + if (node && node->info == 1) { + sym->parent->info = 0; + node->info = 0; + sym->parent->parent->info = 1; + sym = sym->parent->parent; + } else { + if (sym == sym->parent->child[0]) { + sym = sym->parent; + zend_syms_rotateright(sym); + } + sym->parent->info = 0; + sym->parent->parent->info = 1; + zend_syms_rotateleft(sym->parent->parent); + } + } + } + } else { + JIT_G(symbols) = sym; + } + JIT_G(symbols)->info = 0; +} + +static void zend_jit_disasm_destroy_symbols(zend_sym_node *n) { + if (n) { + if (n->child[0]) { + zend_jit_disasm_destroy_symbols(n->child[0]); + } + if (n->child[1]) { + zend_jit_disasm_destroy_symbols(n->child[1]); + } + free(n); + } +} + +static const char* zend_jit_disasm_find_symbol(uint64_t addr, + int64_t *offset) { + zend_sym_node *node = JIT_G(symbols); + while (node) { + if (addr < node->addr) { + node = node->child[0]; + } else if (addr > node->end) { + node = node->child[1]; + } else { + *offset = addr - node->addr; + return node->name; + } + } + return NULL; +} + +static int zend_jit_cmp_labels(Bucket *b1, Bucket *b2) +{ + return ((b1->h > b2->h) > 0) ? 1 : -1; +} + +static uint64_t zend_jit_disasm_branch_target(const cs_insn *insn) +{ + unsigned int i; + + if (cs_insn_group(cs, insn, ARM64_GRP_JUMP)) { + for (i = 0; i < insn->detail->arm64.op_count; i++) { + if (insn->detail->arm64.operands[i].type == ARM64_OP_IMM) + return insn->detail->arm64.operands[i].imm; + } + } + + return 0; +} + +static int zend_jit_disasm(const char *name, + const char *filename, + const zend_op_array *op_array, + zend_cfg *cfg, + const void *start, + size_t size) +{ + const void *end = (void *)((char *)start + size); + zval zv, *z; + zend_long n, m; + HashTable labels; + uint64_t addr; + int b, prefixlen; + cs_insn *insn; + size_t count, i; + const char *sym; + int64_t offset; + + if (name) { + fprintf(stderr, "%s: ; (%s)\n", name, filename ? filename : "unknown"); + } + + zend_hash_init(&labels, 8, NULL, NULL, 0); + if (op_array && cfg) { + ZVAL_FALSE(&zv); + for (b = 0; b < cfg->blocks_count; b++) { + if (cfg->blocks[b].flags & (ZEND_BB_ENTRY|ZEND_BB_RECV_ENTRY)) { + addr = (uint64_t)(uintptr_t)op_array->opcodes[cfg->blocks[b].start].handler; + if (addr >= (uint64_t)(uintptr_t)start && addr < (uint64_t)(uintptr_t)end) { + zend_hash_index_add(&labels, addr, &zv); + } + } + } + } + count = cs_disasm(cs, start, (uint8_t*)end - (uint8_t*)start, (uintptr_t)start, 0, &insn); + + ZVAL_TRUE(&zv); + for (i = 0; i < count; i++) { + if ((addr = zend_jit_disasm_branch_target(&(insn[i])))) { + if (addr >= (uint64_t)(uintptr_t)start && addr < (uint64_t)(uintptr_t)end) { + zend_hash_index_add(&labels, addr, &zv); + } + } + } + + zend_hash_sort(&labels, zend_jit_cmp_labels, 0); + + /* label numbering */ + n = 0; m = 0; + ZEND_HASH_FOREACH_VAL(&labels, z) { + if (Z_TYPE_P(z) == IS_FALSE) { + m--; + ZVAL_LONG(z, m); + } else { + n++; + ZVAL_LONG(z, n); + } + } ZEND_HASH_FOREACH_END(); + + for (i = 0; i < count; i++) { + z = zend_hash_index_find(&labels, insn[i].address); + if (z) { + if (Z_LVAL_P(z) < 0) { + fprintf(stderr, ".ENTRY" ZEND_LONG_FMT ":\n", -Z_LVAL_P(z)); + } else { + fprintf(stderr, ".L" ZEND_LONG_FMT ":\n", Z_LVAL_P(z)); + } + } + + fprintf(stderr, " "ZEND_XLONG_FMT":\t%s ", + insn[i].address, insn[i].mnemonic); + + /* Try to replace the target address with a symbol */ + if ((addr = zend_jit_disasm_branch_target(&(insn[i])))) { + /* Immediate value prefixed with '#' in operand string */ + prefixlen = strchrnul(insn[i].op_str, '#') - insn[i].op_str; + if (addr >= (uint64_t)(uintptr_t)start && addr < (uint64_t)(uintptr_t)end) { + if ((z = zend_hash_index_find(&labels, addr))) { + fprintf(stderr, "%.*s", prefixlen, insn[i].op_str); + if (Z_LVAL_P(z) < 0) { + fprintf(stderr, ".ENTRY" ZEND_LONG_FMT "\n", -Z_LVAL_P(z)); + } else { + fprintf(stderr, ".L" ZEND_LONG_FMT "\n", Z_LVAL_P(z)); + } + continue; + } + } else if ((sym = zend_jit_disasm_find_symbol(addr, &offset))) { + fprintf(stderr, "%.*s%s\n", prefixlen, insn[i].op_str, sym); + continue; + } + } + + fprintf(stderr, "%s\n", insn[i].op_str); + } + fprintf(stderr, "\n"); + + cs_free(insn, count); + zend_hash_destroy(&labels); + + return 1; +} + +static int zend_jit_disasm_init(void) +{ + if (cs_open(CS_ARCH_ARM64, CS_MODE_ARM, &cs) != CS_ERR_OK) + return 0; + + cs_option(cs, CS_OPT_DETAIL, CS_OPT_ON); + cs_option(cs, CS_OPT_SYNTAX, CS_OPT_SYNTAX_ATT); + +#ifndef ZTS +#define REGISTER_EG(n) \ + zend_jit_disasm_add_symbol("EG("#n")", \ + (uint64_t)(uintptr_t)&executor_globals.n, sizeof(executor_globals.n)) + REGISTER_EG(uninitialized_zval); + REGISTER_EG(exception); + REGISTER_EG(vm_interrupt); + REGISTER_EG(exception_op); + REGISTER_EG(timed_out); + REGISTER_EG(current_execute_data); + REGISTER_EG(vm_stack_top); + REGISTER_EG(vm_stack_end); + REGISTER_EG(symbol_table); + REGISTER_EG(jit_trace_num); +#undef REGISTER_EG +#endif + + /* Register JIT helper functions */ +#define REGISTER_HELPER(n) \ + zend_jit_disasm_add_symbol(#n, \ + (uint64_t)(uintptr_t)n, sizeof(void*)); + REGISTER_HELPER(memcmp); + REGISTER_HELPER(zend_jit_init_func_run_time_cache_helper); + REGISTER_HELPER(zend_jit_find_func_helper); + REGISTER_HELPER(zend_jit_find_ns_func_helper); + REGISTER_HELPER(zend_jit_find_method_helper); + REGISTER_HELPER(zend_jit_find_method_tmp_helper); + REGISTER_HELPER(zend_jit_push_static_metod_call_frame); + REGISTER_HELPER(zend_jit_push_static_metod_call_frame_tmp); + REGISTER_HELPER(zend_jit_invalid_method_call); + REGISTER_HELPER(zend_jit_invalid_method_call_tmp); + REGISTER_HELPER(zend_jit_unref_helper); + REGISTER_HELPER(zend_jit_extend_stack_helper); + REGISTER_HELPER(zend_jit_int_extend_stack_helper); + REGISTER_HELPER(zend_jit_leave_nested_func_helper); + REGISTER_HELPER(zend_jit_leave_top_func_helper); + REGISTER_HELPER(zend_jit_leave_func_helper); + REGISTER_HELPER(zend_jit_symtable_find); + REGISTER_HELPER(zend_jit_hash_index_lookup_rw); + REGISTER_HELPER(zend_jit_hash_index_lookup_w); + REGISTER_HELPER(zend_jit_hash_lookup_rw); + REGISTER_HELPER(zend_jit_hash_lookup_w); + REGISTER_HELPER(zend_jit_symtable_lookup_rw); + REGISTER_HELPER(zend_jit_symtable_lookup_w); + REGISTER_HELPER(zend_jit_undefined_op_helper); + REGISTER_HELPER(zend_jit_fetch_dim_r_helper); + REGISTER_HELPER(zend_jit_fetch_dim_is_helper); + REGISTER_HELPER(zend_jit_fetch_dim_isset_helper); + REGISTER_HELPER(zend_jit_fetch_dim_str_offset_r_helper); + REGISTER_HELPER(zend_jit_fetch_dim_str_r_helper); + REGISTER_HELPER(zend_jit_fetch_dim_str_is_helper); + REGISTER_HELPER(zend_jit_fetch_dim_obj_r_helper); + REGISTER_HELPER(zend_jit_fetch_dim_obj_is_helper); + REGISTER_HELPER(zend_jit_fetch_dim_rw_helper); + REGISTER_HELPER(zend_jit_fetch_dim_w_helper); + REGISTER_HELPER(zend_jit_fetch_dim_obj_rw_helper); + REGISTER_HELPER(zend_jit_fetch_dim_obj_w_helper); +// REGISTER_HELPER(zend_jit_fetch_dim_obj_unset_helper); + REGISTER_HELPER(zend_jit_assign_dim_helper); + REGISTER_HELPER(zend_jit_assign_dim_op_helper); + REGISTER_HELPER(zend_jit_fast_assign_concat_helper); + REGISTER_HELPER(zend_jit_fast_concat_helper); + REGISTER_HELPER(zend_jit_isset_dim_helper); + REGISTER_HELPER(zend_jit_free_call_frame); + REGISTER_HELPER(zend_jit_fetch_global_helper); + REGISTER_HELPER(zend_jit_verify_arg_slow); + REGISTER_HELPER(zend_jit_verify_return_slow); + REGISTER_HELPER(zend_jit_fetch_obj_r_slow); + REGISTER_HELPER(zend_jit_fetch_obj_r_dynamic); + REGISTER_HELPER(zend_jit_fetch_obj_is_slow); + REGISTER_HELPER(zend_jit_fetch_obj_is_dynamic); + REGISTER_HELPER(zend_jit_fetch_obj_w_slow); + REGISTER_HELPER(zend_jit_check_array_promotion); + REGISTER_HELPER(zend_jit_create_typed_ref); + REGISTER_HELPER(zend_jit_extract_helper); + REGISTER_HELPER(zend_jit_vm_stack_free_args_helper); + REGISTER_HELPER(zend_jit_copy_extra_args_helper); + REGISTER_HELPER(zend_jit_deprecated_helper); + REGISTER_HELPER(zend_jit_assign_const_to_typed_ref); + REGISTER_HELPER(zend_jit_assign_tmp_to_typed_ref); + REGISTER_HELPER(zend_jit_assign_var_to_typed_ref); + REGISTER_HELPER(zend_jit_assign_cv_to_typed_ref); + REGISTER_HELPER(zend_jit_pre_inc_typed_ref); + REGISTER_HELPER(zend_jit_pre_dec_typed_ref); + REGISTER_HELPER(zend_jit_post_inc_typed_ref); + REGISTER_HELPER(zend_jit_post_dec_typed_ref); + REGISTER_HELPER(zend_jit_assign_op_to_typed_ref); + REGISTER_HELPER(zend_jit_only_vars_by_reference); + REGISTER_HELPER(zend_jit_invalid_array_access); + REGISTER_HELPER(zend_jit_invalid_property_read); + REGISTER_HELPER(zend_jit_invalid_property_write); + REGISTER_HELPER(zend_jit_invalid_property_incdec); + REGISTER_HELPER(zend_jit_invalid_property_assign); + REGISTER_HELPER(zend_jit_invalid_property_assign_op); + REGISTER_HELPER(zend_jit_prepare_assign_dim_ref); + REGISTER_HELPER(zend_jit_pre_inc); + REGISTER_HELPER(zend_jit_pre_dec); + REGISTER_HELPER(zend_runtime_jit); + REGISTER_HELPER(zend_jit_hot_func); + REGISTER_HELPER(zend_jit_check_constant); + REGISTER_HELPER(zend_jit_get_constant); + REGISTER_HELPER(zend_jit_array_free); + REGISTER_HELPER(zend_jit_zval_array_dup); + REGISTER_HELPER(zend_jit_add_arrays_helper); + REGISTER_HELPER(zend_jit_assign_obj_helper); + REGISTER_HELPER(zend_jit_assign_obj_op_helper); + REGISTER_HELPER(zend_jit_assign_to_typed_prop); + REGISTER_HELPER(zend_jit_assign_op_to_typed_prop); + REGISTER_HELPER(zend_jit_inc_typed_prop); + REGISTER_HELPER(zend_jit_dec_typed_prop); + REGISTER_HELPER(zend_jit_pre_inc_typed_prop); + REGISTER_HELPER(zend_jit_pre_dec_typed_prop); + REGISTER_HELPER(zend_jit_post_inc_typed_prop); + REGISTER_HELPER(zend_jit_post_dec_typed_prop); + REGISTER_HELPER(zend_jit_pre_inc_obj_helper); + REGISTER_HELPER(zend_jit_pre_dec_obj_helper); + REGISTER_HELPER(zend_jit_post_inc_obj_helper); + REGISTER_HELPER(zend_jit_post_dec_obj_helper); +#if (PHP_VERSION_ID <= 80100) && (SIZEOF_SIZE_T == 4) + REGISTER_HELPER(zval_jit_update_constant_ex); +#endif + REGISTER_HELPER(zend_jit_free_trampoline_helper); +#undef REGISTER_HELPER + +#ifndef _WIN32 + zend_elf_load_symbols(); +#endif + + if (zend_vm_kind() == ZEND_VM_KIND_HYBRID) { + zend_op opline; + + memset(&opline, 0, sizeof(opline)); + + opline.opcode = ZEND_DO_UCALL; + opline.result_type = IS_UNUSED; + zend_vm_set_opcode_handler(&opline); + zend_jit_disasm_add_symbol("ZEND_DO_UCALL_SPEC_RETVAL_UNUSED_LABEL", (uint64_t)(uintptr_t)opline.handler, sizeof(void*)); + + opline.opcode = ZEND_DO_UCALL; + opline.result_type = IS_VAR; + zend_vm_set_opcode_handler(&opline); + zend_jit_disasm_add_symbol("ZEND_DO_UCALL_SPEC_RETVAL_USED_LABEL", (uint64_t)(uintptr_t)opline.handler, sizeof(void*)); + + opline.opcode = ZEND_DO_FCALL_BY_NAME; + opline.result_type = IS_UNUSED; + zend_vm_set_opcode_handler(&opline); + zend_jit_disasm_add_symbol("ZEND_DO_FCALL_BY_NAME_SPEC_RETVAL_UNUSED_LABEL", (uint64_t)(uintptr_t)opline.handler, sizeof(void*)); + + opline.opcode = ZEND_DO_FCALL_BY_NAME; + opline.result_type = IS_VAR; + zend_vm_set_opcode_handler(&opline); + zend_jit_disasm_add_symbol("ZEND_DO_FCALL_BY_NAME_SPEC_RETVAL_USED_LABEL", (uint64_t)(uintptr_t)opline.handler, sizeof(void*)); + + opline.opcode = ZEND_DO_FCALL; + opline.result_type = IS_UNUSED; + zend_vm_set_opcode_handler(&opline); + zend_jit_disasm_add_symbol("ZEND_DO_FCALL_SPEC_RETVAL_UNUSED_LABEL", (uint64_t)(uintptr_t)opline.handler, sizeof(void*)); + + opline.opcode = ZEND_DO_FCALL; + opline.result_type = IS_VAR; + zend_vm_set_opcode_handler(&opline); + zend_jit_disasm_add_symbol("ZEND_DO_FCALL_SPEC_RETVAL_USED_LABEL", (uint64_t)(uintptr_t)opline.handler, sizeof(void*)); + + opline.opcode = ZEND_RETURN; + opline.op1_type = IS_CONST; + zend_vm_set_opcode_handler(&opline); + zend_jit_disasm_add_symbol("ZEND_RETURN_SPEC_CONST_LABEL", (uint64_t)(uintptr_t)opline.handler, sizeof(void*)); + + opline.opcode = ZEND_RETURN; + opline.op1_type = IS_TMP_VAR; + zend_vm_set_opcode_handler(&opline); + zend_jit_disasm_add_symbol("ZEND_RETURN_SPEC_TMP_LABEL", (uint64_t)(uintptr_t)opline.handler, sizeof(void*)); + + opline.opcode = ZEND_RETURN; + opline.op1_type = IS_VAR; + zend_vm_set_opcode_handler(&opline); + zend_jit_disasm_add_symbol("ZEND_RETURN_SPEC_VAR_LABEL", (uint64_t)(uintptr_t)opline.handler, sizeof(void*)); + + opline.opcode = ZEND_RETURN; + opline.op1_type = IS_CV; + zend_vm_set_opcode_handler(&opline); + zend_jit_disasm_add_symbol("ZEND_RETURN_SPEC_CV_LABEL", (uint64_t)(uintptr_t)opline.handler, sizeof(void*)); + + zend_jit_disasm_add_symbol("ZEND_HYBRID_HALT_LABEL", (uint64_t)(uintptr_t)zend_jit_halt_op->handler, sizeof(void*)); + } + + return 1; +} + +static void zend_jit_disasm_shutdown(void) +{ + if (JIT_G(symbols)) { + zend_jit_disasm_destroy_symbols(JIT_G(symbols)); + JIT_G(symbols) = NULL; + } + + cs_close(&cs); +} + +#endif /* HAVE_CAPSTONE */ diff --git a/ext/opcache/jit/zend_jit_gdb.c b/ext/opcache/jit/zend_jit_gdb.c index 0320776cd66c7..566a4e971b095 100644 --- a/ext/opcache/jit/zend_jit_gdb.c +++ b/ext/opcache/jit/zend_jit_gdb.c @@ -20,7 +20,12 @@ +----------------------------------------------------------------------+ */ + +#if defined(__x86_64__) || defined(i386) #define HAVE_GDB +#else +#warning Missing GDB JIT support on this platform +#endif #ifdef HAVE_GDB diff --git a/ext/opcache/jit/zend_jit_internal.h b/ext/opcache/jit/zend_jit_internal.h index 906262773f43d..430110bcbb59b 100644 --- a/ext/opcache/jit/zend_jit_internal.h +++ b/ext/opcache/jit/zend_jit_internal.h @@ -526,4 +526,17 @@ static zend_always_inline bool zend_jit_may_be_polymorphic_call(const zend_op *o } } +/* Instruction cache flush */ +#ifndef JIT_CACHE_FLUSH +# if defined (__aarch64__) +# if ((defined(__GNUC__) && ZEND_GCC_VERSION >= 4003) || __has_builtin(__builtin___clear_cache)) +# define JIT_CACHE_FLUSH(from, to) __builtin___clear_cache((char*)(from), (char*)(to)) +# else +# error "Missing builtin to flush instruction cache for AArch64" +# endif +# else /* Not required to implement on archs with unified caches */ +# define JIT_CACHE_FLUSH(from, to) +# endif +#endif /* !JIT_CACHE_FLUSH */ + #endif /* ZEND_JIT_INTERNAL_H */ diff --git a/ext/opcache/jit/zend_jit_perf_dump.c b/ext/opcache/jit/zend_jit_perf_dump.c index 8555be2b80ea4..e76f27e94d35f 100644 --- a/ext/opcache/jit/zend_jit_perf_dump.c +++ b/ext/opcache/jit/zend_jit_perf_dump.c @@ -22,6 +22,9 @@ #include #include #include +#include +#include +#include #if defined(__linux__) #include diff --git a/ext/opcache/jit/zend_jit_vm_helpers.c b/ext/opcache/jit/zend_jit_vm_helpers.c index 9a7b205340f02..2b8b5097da23e 100644 --- a/ext/opcache/jit/zend_jit_vm_helpers.c +++ b/ext/opcache/jit/zend_jit_vm_helpers.c @@ -28,7 +28,12 @@ #include "Optimizer/zend_func_info.h" #include "Optimizer/zend_call_graph.h" #include "zend_jit.h" +#if defined(__x86_64__) || defined(i386) #include "zend_jit_x86.h" +#elif defined(__aarch64__) +#include "zend_jit_arm64.h" +#endif + #include "zend_jit_internal.h" #ifdef HAVE_GCC_GLOBAL_REGS @@ -36,9 +41,12 @@ # if defined(__x86_64__) register zend_execute_data* volatile execute_data __asm__("%r14"); register const zend_op* volatile opline __asm__("%r15"); -# else +# elif defined(i386) register zend_execute_data* volatile execute_data __asm__("%esi"); register const zend_op* volatile opline __asm__("%edi"); +# elif defined(__aarch64__) +register zend_execute_data* volatile execute_data __asm__("x27"); +register const zend_op* volatile opline __asm__("x28"); # endif # pragma GCC diagnostic warning "-Wvolatile-register-var" #endif diff --git a/ext/opcache/tests/jit/arm64/add_001.phpt b/ext/opcache/tests/jit/arm64/add_001.phpt new file mode 100644 index 0000000000000..dc89e6d0216e2 --- /dev/null +++ b/ext/opcache/tests/jit/arm64/add_001.phpt @@ -0,0 +1,20 @@ +--TEST-- +JIT ADD: 001 +--INI-- +opcache.enable=1 +opcache.enable_cli=1 +opcache.file_update_protection=0 +opcache.jit_buffer_size=32M +;opcache.jit_debug=257 +--SKIPIF-- + +--FILE-- + +--EXPECT-- +int(2) diff --git a/ext/opcache/tests/jit/arm64/add_002.phpt b/ext/opcache/tests/jit/arm64/add_002.phpt new file mode 100644 index 0000000000000..b575de26e895a --- /dev/null +++ b/ext/opcache/tests/jit/arm64/add_002.phpt @@ -0,0 +1,20 @@ +--TEST-- +JIT ADD: 002 +--INI-- +opcache.enable=1 +opcache.enable_cli=1 +opcache.file_update_protection=0 +opcache.jit_buffer_size=32M +;opcache.jit_debug=257 +--SKIPIF-- + +--FILE-- + +--EXPECT-- +int(4097) diff --git a/ext/opcache/tests/jit/arm64/add_003.phpt b/ext/opcache/tests/jit/arm64/add_003.phpt new file mode 100644 index 0000000000000..102d07186992c --- /dev/null +++ b/ext/opcache/tests/jit/arm64/add_003.phpt @@ -0,0 +1,20 @@ +--TEST-- +JIT ADD: 003 +--INI-- +opcache.enable=1 +opcache.enable_cli=1 +opcache.file_update_protection=0 +opcache.jit_buffer_size=32M +;opcache.jit_debug=257 +--SKIPIF-- + +--FILE-- + +--EXPECT-- +float(9.223372036854776E+18) diff --git a/ext/opcache/tests/jit/arm64/add_004.phpt b/ext/opcache/tests/jit/arm64/add_004.phpt new file mode 100644 index 0000000000000..97daf4af7593f --- /dev/null +++ b/ext/opcache/tests/jit/arm64/add_004.phpt @@ -0,0 +1,20 @@ +--TEST-- +JIT ADD: 004 +--INI-- +opcache.enable=1 +opcache.enable_cli=1 +opcache.file_update_protection=0 +opcache.jit_buffer_size=32M +;opcache.jit_debug=257 +--SKIPIF-- + +--FILE-- + +--EXPECT-- +float(9.223372036854776E+18) diff --git a/ext/opcache/tests/jit/arm64/add_005.phpt b/ext/opcache/tests/jit/arm64/add_005.phpt new file mode 100644 index 0000000000000..14bc03e7a5e52 --- /dev/null +++ b/ext/opcache/tests/jit/arm64/add_005.phpt @@ -0,0 +1,24 @@ +--TEST-- +JIT ADD: 005 +--INI-- +opcache.enable=1 +opcache.enable_cli=1 +opcache.file_update_protection=0 +opcache.jit_buffer_size=32M +;opcache.jit_debug=257 +--SKIPIF-- + +--FILE-- + +--EXPECTF-- +Fatal error: Uncaught TypeError: Unsupported operand types: string + int in %s:%d +Stack trace: +#0 %s(%d): foo('hello') +#1 {main} + thrown in %s on line %d diff --git a/ext/opcache/tests/jit/arm64/hot_func_001.phpt b/ext/opcache/tests/jit/arm64/hot_func_001.phpt new file mode 100644 index 0000000000000..306b8f3c684b3 --- /dev/null +++ b/ext/opcache/tests/jit/arm64/hot_func_001.phpt @@ -0,0 +1,24 @@ +--TEST-- +JIT HOT_FUNC: 001 +--INI-- +opcache.enable=1 +opcache.enable_cli=1 +opcache.file_update_protection=0 +opcache.jit_buffer_size=32M +opcache.jit=1255 +;opcache.jit_debug=257 +--SKIPIF-- + +--FILE-- + +--EXPECT-- +string(5) "hello" +string(5) "hello" +string(5) "hello" diff --git a/ext/opcache/tests/jit/arm64/hot_func_002.phpt b/ext/opcache/tests/jit/arm64/hot_func_002.phpt new file mode 100644 index 0000000000000..c3e92b9d53c8a --- /dev/null +++ b/ext/opcache/tests/jit/arm64/hot_func_002.phpt @@ -0,0 +1,25 @@ +--TEST-- +JIT HOT_FUNC: 002 +--INI-- +opcache.enable=1 +opcache.enable_cli=1 +opcache.file_update_protection=0 +opcache.jit_buffer_size=32M +opcache.jit=1255 +opcache.jit_hot_func=2 +;opcache.jit_debug=257 +--SKIPIF-- + +--FILE-- + +--EXPECT-- +string(5) "hello" +string(5) "hello" +string(5) "hello" diff --git a/ext/opcache/tests/jit/arm64/icall_001.phpt b/ext/opcache/tests/jit/arm64/icall_001.phpt new file mode 100644 index 0000000000000..c0ea68a51f588 --- /dev/null +++ b/ext/opcache/tests/jit/arm64/icall_001.phpt @@ -0,0 +1,24 @@ +--TEST-- +JIT ICALL: 001 +--INI-- +opcache.enable=1 +opcache.enable_cli=1 +opcache.file_update_protection=0 +opcache.jit_buffer_size=1M +;opcache.jit_debug=257 +--SKIPIF-- + +--FILE-- + +--EXPECT-- +bool(true) +int(0) +int(42) +int(-42) +float(0) +float(2) +string(5) "hello" +array(0) { +} diff --git a/ext/opcache/tests/jit/arm64/recv_001.phpt b/ext/opcache/tests/jit/arm64/recv_001.phpt new file mode 100644 index 0000000000000..7852101cf1223 --- /dev/null +++ b/ext/opcache/tests/jit/arm64/recv_001.phpt @@ -0,0 +1,26 @@ +--TEST-- +JIT RECV: 001 +--INI-- +opcache.enable=1 +opcache.enable_cli=1 +opcache.file_update_protection=0 +opcache.jit_buffer_size=32M +;opcache.jit_debug=257 +--SKIPIF-- + +--FILE-- + +--EXPECT-- +int(1) +float(1) +string(5) "hello" +array(0) { +} diff --git a/ext/opcache/tests/jit/arm64/ret_001.phpt b/ext/opcache/tests/jit/arm64/ret_001.phpt new file mode 100644 index 0000000000000..a750d5b2e98e3 --- /dev/null +++ b/ext/opcache/tests/jit/arm64/ret_001.phpt @@ -0,0 +1,20 @@ +--TEST-- +JIT RET: 001 +--INI-- +opcache.enable=1 +opcache.enable_cli=1 +opcache.file_update_protection=0 +opcache.jit_buffer_size=32M +;opcache.jit_debug=257 +--SKIPIF-- + +--FILE-- + +--EXPECT-- +int(1) diff --git a/ext/opcache/tests/jit/arm64/ret_002.phpt b/ext/opcache/tests/jit/arm64/ret_002.phpt new file mode 100644 index 0000000000000..4ae3efd7332b9 --- /dev/null +++ b/ext/opcache/tests/jit/arm64/ret_002.phpt @@ -0,0 +1,20 @@ +--TEST-- +JIT RET: 002 +--INI-- +opcache.enable=1 +opcache.enable_cli=1 +opcache.file_update_protection=0 +opcache.jit_buffer_size=32M +;opcache.jit_debug=257 +--SKIPIF-- + +--FILE-- + +--EXPECT-- +float(1) diff --git a/ext/opcache/tests/jit/arm64/ret_003.phpt b/ext/opcache/tests/jit/arm64/ret_003.phpt new file mode 100644 index 0000000000000..12bcaa9b76f59 --- /dev/null +++ b/ext/opcache/tests/jit/arm64/ret_003.phpt @@ -0,0 +1,20 @@ +--TEST-- +JIT RET: 003 +--INI-- +opcache.enable=1 +opcache.enable_cli=1 +opcache.file_update_protection=0 +opcache.jit_buffer_size=32M +;opcache.jit_debug=257 +--SKIPIF-- + +--FILE-- + +--EXPECT-- +string(5) "hello" diff --git a/ext/opcache/tests/jit/arm64/skipif.inc b/ext/opcache/tests/jit/arm64/skipif.inc new file mode 100644 index 0000000000000..c5a81810391b7 --- /dev/null +++ b/ext/opcache/tests/jit/arm64/skipif.inc @@ -0,0 +1,3 @@ + diff --git a/ext/opcache/tests/jit/arm64/ucall_001.phpt b/ext/opcache/tests/jit/arm64/ucall_001.phpt new file mode 100644 index 0000000000000..c3fea8c9dafce --- /dev/null +++ b/ext/opcache/tests/jit/arm64/ucall_001.phpt @@ -0,0 +1,19 @@ +--TEST-- +JIT UCALL: 001 +--INI-- +opcache.enable=1 +opcache.enable_cli=1 +opcache.file_update_protection=0 +opcache.jit_buffer_size=32M +;opcache.jit_debug=257 +--SKIPIF-- + +--FILE-- + +--EXPECT-- +string(5) "hello" diff --git a/ext/opcache/tests/jit/arm64/ucall_002.phpt b/ext/opcache/tests/jit/arm64/ucall_002.phpt new file mode 100644 index 0000000000000..519454afe1299 --- /dev/null +++ b/ext/opcache/tests/jit/arm64/ucall_002.phpt @@ -0,0 +1,21 @@ +--TEST-- +JIT UCALL: 002 +--INI-- +opcache.enable=1 +opcache.enable_cli=1 +opcache.file_update_protection=0 +opcache.jit_buffer_size=32M +;opcache.jit_debug=257 +--SKIPIF-- + +--FILE-- + +--EXPECT-- +string(5) "hello" +string(6) "world!" diff --git a/ext/opcache/tests/jit/arm64/ucall_003.phpt b/ext/opcache/tests/jit/arm64/ucall_003.phpt new file mode 100644 index 0000000000000..f02b91f72275b --- /dev/null +++ b/ext/opcache/tests/jit/arm64/ucall_003.phpt @@ -0,0 +1,22 @@ +--TEST-- +JIT UCALL: 003 +--INI-- +opcache.enable=1 +opcache.enable_cli=1 +opcache.file_update_protection=0 +opcache.jit_buffer_size=32M +;opcache.jit_debug=257 +--SKIPIF-- + +--FILE-- + +--EXPECT-- +string(5) "hello" diff --git a/ext/opcache/tests/jit/arm64/ucall_004.phpt b/ext/opcache/tests/jit/arm64/ucall_004.phpt new file mode 100644 index 0000000000000..6cbb17ed26434 --- /dev/null +++ b/ext/opcache/tests/jit/arm64/ucall_004.phpt @@ -0,0 +1,23 @@ +--TEST-- +JIT UCALL: 004 +--INI-- +opcache.enable=1 +opcache.enable_cli=1 +opcache.file_update_protection=0 +opcache.jit_buffer_size=32M +;opcache.jit_debug=257 +--SKIPIF-- + +--FILE-- + +--EXPECT-- +string(5) "hello" +string(5) "hello" +string(5) "hello"