summaryrefslogtreecommitdiff
path: root/sljit/sljitNativeX86_64.c
diff options
context:
space:
mode:
Diffstat (limited to 'sljit/sljitNativeX86_64.c')
-rw-r--r--sljit/sljitNativeX86_64.c256
1 files changed, 114 insertions, 142 deletions
diff --git a/sljit/sljitNativeX86_64.c b/sljit/sljitNativeX86_64.c
index ac0fca2..58d460a 100644
--- a/sljit/sljitNativeX86_64.c
+++ b/sljit/sljitNativeX86_64.c
@@ -87,118 +87,94 @@ static sljit_ub* generate_fixed_jump(sljit_ub *code_ptr, sljit_sw addr, sljit_si
return code_ptr;
}
-SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler, sljit_si args, sljit_si scratches, sljit_si saveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler,
+ sljit_si args, sljit_si scratches, sljit_si saveds,
+ sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
{
- sljit_si size, pushed_size;
+ sljit_si i, tmp, size, allocated_size;
sljit_ub *inst;
CHECK_ERROR();
- check_sljit_emit_enter(compiler, args, scratches, saveds, local_size);
+ check_sljit_emit_enter(compiler, args, scratches, saveds, fscratches, fsaveds, local_size);
compiler->scratches = scratches;
compiler->saveds = saveds;
+ compiler->fscratches = fscratches;
+ compiler->fsaveds = fsaveds;
compiler->flags_saved = 0;
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
compiler->logical_local_size = local_size;
#endif
- size = saveds;
/* Including the return address saved by the call instruction. */
- pushed_size = (saveds + 1) * sizeof(sljit_sw);
-#ifndef _WIN64
- if (saveds >= 2)
- size += saveds - 1;
-#else
- if (saveds >= 4)
- size += saveds - 3;
- if (scratches >= 5) {
- size += (5 - 4) * 2;
- pushed_size += sizeof(sljit_sw);
- }
-#endif
- size += args * 3;
- if (size > 0) {
+ allocated_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds, 1);
+
+ tmp = saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - saveds) : SLJIT_FIRST_SAVED_REG;
+ for (i = SLJIT_S0; i >= tmp; i--) {
+ size = reg_map[i] >= 8 ? 2 : 1;
inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
FAIL_IF(!inst);
-
INC_SIZE(size);
- if (saveds >= 5) {
- SLJIT_COMPILE_ASSERT(reg_map[SLJIT_SAVED_EREG2] >= 8, saved_ereg2_is_hireg);
- *inst++ = REX_B;
- PUSH_REG(reg_lmap[SLJIT_SAVED_EREG2]);
- }
- if (saveds >= 4) {
- SLJIT_COMPILE_ASSERT(reg_map[SLJIT_SAVED_EREG1] >= 8, saved_ereg1_is_hireg);
- *inst++ = REX_B;
- PUSH_REG(reg_lmap[SLJIT_SAVED_EREG1]);
- }
- if (saveds >= 3) {
-#ifndef _WIN64
- SLJIT_COMPILE_ASSERT(reg_map[SLJIT_SAVED_REG3] >= 8, saved_reg3_is_hireg);
- *inst++ = REX_B;
-#else
- SLJIT_COMPILE_ASSERT(reg_map[SLJIT_SAVED_REG3] < 8, saved_reg3_is_loreg);
-#endif
- PUSH_REG(reg_lmap[SLJIT_SAVED_REG3]);
- }
- if (saveds >= 2) {
-#ifndef _WIN64
- SLJIT_COMPILE_ASSERT(reg_map[SLJIT_SAVED_REG2] >= 8, saved_reg2_is_hireg);
+ if (reg_map[i] >= 8)
*inst++ = REX_B;
-#else
- SLJIT_COMPILE_ASSERT(reg_map[SLJIT_SAVED_REG2] < 8, saved_reg2_is_loreg);
-#endif
- PUSH_REG(reg_lmap[SLJIT_SAVED_REG2]);
- }
- if (saveds >= 1) {
- SLJIT_COMPILE_ASSERT(reg_map[SLJIT_SAVED_REG1] < 8, saved_reg1_is_loreg);
- PUSH_REG(reg_lmap[SLJIT_SAVED_REG1]);
- }
-#ifdef _WIN64
- if (scratches >= 5) {
- SLJIT_COMPILE_ASSERT(reg_map[SLJIT_SCRATCH_EREG2] >= 8, temporary_ereg2_is_hireg);
+ PUSH_REG(reg_lmap[i]);
+ }
+
+ for (i = scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
+ size = reg_map[i] >= 8 ? 2 : 1;
+ inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
+ FAIL_IF(!inst);
+ INC_SIZE(size);
+ if (reg_map[i] >= 8)
*inst++ = REX_B;
- PUSH_REG(reg_lmap[SLJIT_SCRATCH_EREG2]);
- }
-#endif
+ PUSH_REG(reg_lmap[i]);
+ }
+
+ if (args > 0) {
+ size = args * 3;
+ inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
+ FAIL_IF(!inst);
+
+ INC_SIZE(size);
#ifndef _WIN64
if (args > 0) {
*inst++ = REX_W;
*inst++ = MOV_r_rm;
- *inst++ = MOD_REG | (reg_map[SLJIT_SAVED_REG1] << 3) | 0x7 /* rdi */;
+ *inst++ = MOD_REG | (reg_map[SLJIT_S0] << 3) | 0x7 /* rdi */;
}
if (args > 1) {
*inst++ = REX_W | REX_R;
*inst++ = MOV_r_rm;
- *inst++ = MOD_REG | (reg_lmap[SLJIT_SAVED_REG2] << 3) | 0x6 /* rsi */;
+ *inst++ = MOD_REG | (reg_lmap[SLJIT_S1] << 3) | 0x6 /* rsi */;
}
if (args > 2) {
*inst++ = REX_W | REX_R;
*inst++ = MOV_r_rm;
- *inst++ = MOD_REG | (reg_lmap[SLJIT_SAVED_REG3] << 3) | 0x2 /* rdx */;
+ *inst++ = MOD_REG | (reg_lmap[SLJIT_S2] << 3) | 0x2 /* rdx */;
}
#else
if (args > 0) {
*inst++ = REX_W;
*inst++ = MOV_r_rm;
- *inst++ = MOD_REG | (reg_map[SLJIT_SAVED_REG1] << 3) | 0x1 /* rcx */;
+ *inst++ = MOD_REG | (reg_map[SLJIT_S0] << 3) | 0x1 /* rcx */;
}
if (args > 1) {
*inst++ = REX_W;
*inst++ = MOV_r_rm;
- *inst++ = MOD_REG | (reg_map[SLJIT_SAVED_REG2] << 3) | 0x2 /* rdx */;
+ *inst++ = MOD_REG | (reg_map[SLJIT_S1] << 3) | 0x2 /* rdx */;
}
if (args > 2) {
*inst++ = REX_W | REX_B;
*inst++ = MOV_r_rm;
- *inst++ = MOD_REG | (reg_map[SLJIT_SAVED_REG3] << 3) | 0x0 /* r8 */;
+ *inst++ = MOD_REG | (reg_map[SLJIT_S2] << 3) | 0x0 /* r8 */;
}
#endif
}
- local_size = ((local_size + FIXED_LOCALS_OFFSET + pushed_size + 16 - 1) & ~(16 - 1)) - pushed_size;
+ local_size = ((local_size + FIXED_LOCALS_OFFSET + allocated_size + 16 - 1) & ~(16 - 1)) - allocated_size;
compiler->local_size = local_size;
+
#ifdef _WIN64
if (local_size > 1024) {
/* Allocate stack for the callback, which grows the stack. */
@@ -208,9 +184,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
*inst++ = REX_W;
*inst++ = GROUP_BINARY_83;
*inst++ = MOD_REG | SUB | 4;
- /* Pushed size must be divisible by 8. */
- SLJIT_ASSERT(!(pushed_size & 0x7));
- if (pushed_size & 0x8) {
+ /* Allocated size for registers must be divisible by 8. */
+ SLJIT_ASSERT(!(allocated_size & 0x7));
+ /* Aligned to 16 byte. */
+ if (allocated_size & 0x8) {
*inst++ = 5 * sizeof(sljit_sw);
local_size -= 5 * sizeof(sljit_sw);
} else {
@@ -218,10 +195,10 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
local_size -= 4 * sizeof(sljit_sw);
}
/* Second instruction */
- SLJIT_COMPILE_ASSERT(reg_map[SLJIT_SCRATCH_REG1] < 8, temporary_reg1_is_loreg);
+ SLJIT_COMPILE_ASSERT(reg_map[SLJIT_R0] < 8, temporary_reg1_is_loreg);
*inst++ = REX_W;
*inst++ = MOV_rm_i32;
- *inst++ = MOD_REG | reg_lmap[SLJIT_SCRATCH_REG1];
+ *inst++ = MOD_REG | reg_lmap[SLJIT_R0];
*(sljit_si*)inst = local_size;
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) || (defined SLJIT_DEBUG && SLJIT_DEBUG)
compiler->skip_checks = 1;
@@ -229,6 +206,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
FAIL_IF(sljit_emit_ijump(compiler, SLJIT_CALL1, SLJIT_IMM, SLJIT_FUNC_OFFSET(sljit_grow_stack)));
}
#endif
+
SLJIT_ASSERT(local_size > 0);
if (local_size <= 127) {
inst = (sljit_ub*)ensure_buf(compiler, 1 + 4);
@@ -249,43 +227,46 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil
*(sljit_si*)inst = local_size;
inst += sizeof(sljit_si);
}
+
#ifdef _WIN64
- /* Save xmm6 with MOVAPS instruction. */
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 5);
- FAIL_IF(!inst);
- INC_SIZE(5);
- *inst++ = GROUP_0F;
- *(sljit_si*)inst = 0x20247429;
+ /* Save xmm6 register: movaps [rsp + 0x20], xmm6 */
+ if (fscratches >= 6 || fsaveds >= 1) {
+ inst = (sljit_ub*)ensure_buf(compiler, 1 + 5);
+ FAIL_IF(!inst);
+ INC_SIZE(5);
+ *inst++ = GROUP_0F;
+ *(sljit_si*)inst = 0x20247429;
+ }
#endif
return SLJIT_SUCCESS;
}
-SLJIT_API_FUNC_ATTRIBUTE void sljit_set_context(struct sljit_compiler *compiler, sljit_si args, sljit_si scratches, sljit_si saveds, sljit_si local_size)
+SLJIT_API_FUNC_ATTRIBUTE void sljit_set_context(struct sljit_compiler *compiler,
+ sljit_si args, sljit_si scratches, sljit_si saveds,
+ sljit_si fscratches, sljit_si fsaveds, sljit_si local_size)
{
- sljit_si pushed_size;
+ sljit_si allocated_size;
CHECK_ERROR_VOID();
- check_sljit_set_context(compiler, args, scratches, saveds, local_size);
+ check_sljit_set_context(compiler, args, scratches, saveds, fscratches, fsaveds, local_size);
compiler->scratches = scratches;
compiler->saveds = saveds;
+ compiler->fscratches = fscratches;
+ compiler->fsaveds = fsaveds;
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
compiler->logical_local_size = local_size;
#endif
/* Including the return address saved by the call instruction. */
- pushed_size = (saveds + 1) * sizeof(sljit_sw);
-#ifdef _WIN64
- if (scratches >= 5)
- pushed_size += sizeof(sljit_sw);
-#endif
- compiler->local_size = ((local_size + FIXED_LOCALS_OFFSET + pushed_size + 16 - 1) & ~(16 - 1)) - pushed_size;
+ allocated_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds, 1);
+ compiler->local_size = ((local_size + FIXED_LOCALS_OFFSET + allocated_size + 16 - 1) & ~(16 - 1)) - allocated_size;
}
SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw)
{
- sljit_si size;
+ sljit_si i, tmp, size;
sljit_ub *inst;
CHECK_ERROR();
@@ -295,13 +276,16 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
FAIL_IF(emit_mov_before_return(compiler, op, src, srcw));
#ifdef _WIN64
- /* Restore xmm6 with MOVAPS instruction. */
- inst = (sljit_ub*)ensure_buf(compiler, 1 + 5);
- FAIL_IF(!inst);
- INC_SIZE(5);
- *inst++ = GROUP_0F;
- *(sljit_si*)inst = 0x20247428;
+ /* Restore xmm6 register: movaps xmm6, [rsp + 0x20] */
+ if (compiler->fscratches >= 6 || compiler->fsaveds >= 1) {
+ inst = (sljit_ub*)ensure_buf(compiler, 1 + 5);
+ FAIL_IF(!inst);
+ INC_SIZE(5);
+ *inst++ = GROUP_0F;
+ *(sljit_si*)inst = 0x20247428;
+ }
#endif
+
SLJIT_ASSERT(compiler->local_size > 0);
if (compiler->local_size <= 127) {
inst = (sljit_ub*)ensure_buf(compiler, 1 + 4);
@@ -322,50 +306,31 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi
*(sljit_si*)inst = compiler->local_size;
}
- size = 1 + compiler->saveds;
-#ifndef _WIN64
- if (compiler->saveds >= 2)
- size += compiler->saveds - 1;
-#else
- if (compiler->saveds >= 4)
- size += compiler->saveds - 3;
- if (compiler->scratches >= 5)
- size += (5 - 4) * 2;
-#endif
- inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
- FAIL_IF(!inst);
-
- INC_SIZE(size);
-
-#ifdef _WIN64
- if (compiler->scratches >= 5) {
- *inst++ = REX_B;
- POP_REG(reg_lmap[SLJIT_SCRATCH_EREG2]);
- }
-#endif
- if (compiler->saveds >= 1)
- POP_REG(reg_map[SLJIT_SAVED_REG1]);
- if (compiler->saveds >= 2) {
-#ifndef _WIN64
- *inst++ = REX_B;
-#endif
- POP_REG(reg_lmap[SLJIT_SAVED_REG2]);
- }
- if (compiler->saveds >= 3) {
-#ifndef _WIN64
- *inst++ = REX_B;
-#endif
- POP_REG(reg_lmap[SLJIT_SAVED_REG3]);
- }
- if (compiler->saveds >= 4) {
- *inst++ = REX_B;
- POP_REG(reg_lmap[SLJIT_SAVED_EREG1]);
+ tmp = compiler->scratches;
+ for (i = SLJIT_FIRST_SAVED_REG; i <= tmp; i++) {
+ size = reg_map[i] >= 8 ? 2 : 1;
+ inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
+ FAIL_IF(!inst);
+ INC_SIZE(size);
+ if (reg_map[i] >= 8)
+ *inst++ = REX_B;
+ POP_REG(reg_lmap[i]);
}
- if (compiler->saveds >= 5) {
- *inst++ = REX_B;
- POP_REG(reg_lmap[SLJIT_SAVED_EREG2]);
+
+ tmp = compiler->saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - compiler->saveds) : SLJIT_FIRST_SAVED_REG;
+ for (i = tmp; i <= SLJIT_S0; i++) {
+ size = reg_map[i] >= 8 ? 2 : 1;
+ inst = (sljit_ub*)ensure_buf(compiler, 1 + size);
+ FAIL_IF(!inst);
+ INC_SIZE(size);
+ if (reg_map[i] >= 8)
+ *inst++ = REX_B;
+ POP_REG(reg_lmap[i]);
}
+ inst = (sljit_ub*)ensure_buf(compiler, 1 + 1);
+ FAIL_IF(!inst);
+ INC_SIZE(1);
RET();
return SLJIT_SUCCESS;
}
@@ -442,7 +407,7 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si
b |= TMP_REG3;
}
else if (reg_lmap[b & REG_MASK] == 4)
- b |= TO_OFFS_REG(SLJIT_LOCALS_REG);
+ b |= TO_OFFS_REG(SLJIT_SP);
}
if ((b & REG_MASK) == SLJIT_UNUSED)
@@ -450,13 +415,16 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si
else {
if (reg_map[b & REG_MASK] >= 8)
rex |= REX_B;
- if (immb != 0 && (!(b & OFFS_REG_MASK) || (b & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_LOCALS_REG))) {
+
+ if (immb != 0 && (!(b & OFFS_REG_MASK) || (b & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_SP))) {
/* Immediate operand. */
if (immb <= 127 && immb >= -128)
inst_size += sizeof(sljit_sb);
else
inst_size += sizeof(sljit_si);
}
+ else if (reg_lmap[b & REG_MASK] == 5)
+ inst_size += sizeof(sljit_sb);
if ((b & OFFS_REG_MASK) != SLJIT_UNUSED) {
inst_size += 1; /* SIB byte. */
@@ -540,8 +508,8 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si
if (!(b & SLJIT_MEM))
*buf_ptr++ |= MOD_REG + ((!(flags & EX86_SSE2_OP2)) ? reg_lmap[b] : b);
else if ((b & REG_MASK) != SLJIT_UNUSED) {
- if ((b & OFFS_REG_MASK) == SLJIT_UNUSED || (b & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_LOCALS_REG)) {
- if (immb != 0) {
+ if ((b & OFFS_REG_MASK) == SLJIT_UNUSED || (b & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_SP)) {
+ if (immb != 0 || reg_lmap[b & REG_MASK] == 5) {
if (immb <= 127 && immb >= -128)
*buf_ptr |= 0x40;
else
@@ -555,7 +523,7 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si
*buf_ptr++ = reg_lmap[b & REG_MASK] | (reg_lmap[OFFS_REG(b)] << 3);
}
- if (immb != 0) {
+ if (immb != 0 || reg_lmap[b & REG_MASK] == 5) {
if (immb <= 127 && immb >= -128)
*buf_ptr++ = immb; /* 8 bit displacement. */
else {
@@ -565,8 +533,12 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si
}
}
else {
+ if (reg_lmap[b & REG_MASK] == 5)
+ *buf_ptr |= 0x40;
*buf_ptr++ |= 0x04;
*buf_ptr++ = reg_lmap[b & REG_MASK] | (reg_lmap[OFFS_REG(b)] << 3) | (immb << 6);
+ if (reg_lmap[b & REG_MASK] == 5)
+ *buf_ptr++ = 0;
}
}
else {
@@ -597,7 +569,7 @@ static SLJIT_INLINE sljit_si call_with_args(struct sljit_compiler *compiler, slj
sljit_ub *inst;
#ifndef _WIN64
- SLJIT_COMPILE_ASSERT(reg_map[SLJIT_SCRATCH_REG2] == 6 && reg_map[SLJIT_SCRATCH_REG1] < 8 && reg_map[SLJIT_SCRATCH_REG3] < 8, args_registers);
+ SLJIT_COMPILE_ASSERT(reg_map[SLJIT_R1] == 6 && reg_map[SLJIT_R0] < 8 && reg_map[SLJIT_R2] < 8, args_registers);
inst = (sljit_ub*)ensure_buf(compiler, 1 + ((type < SLJIT_CALL3) ? 3 : 6));
FAIL_IF(!inst);
@@ -605,13 +577,13 @@ static SLJIT_INLINE sljit_si call_with_args(struct sljit_compiler *compiler, slj
if (type >= SLJIT_CALL3) {
*inst++ = REX_W;
*inst++ = MOV_r_rm;
- *inst++ = MOD_REG | (0x2 /* rdx */ << 3) | reg_lmap[SLJIT_SCRATCH_REG3];
+ *inst++ = MOD_REG | (0x2 /* rdx */ << 3) | reg_lmap[SLJIT_R2];
}
*inst++ = REX_W;
*inst++ = MOV_r_rm;
- *inst++ = MOD_REG | (0x7 /* rdi */ << 3) | reg_lmap[SLJIT_SCRATCH_REG1];
+ *inst++ = MOD_REG | (0x7 /* rdi */ << 3) | reg_lmap[SLJIT_R0];
#else
- SLJIT_COMPILE_ASSERT(reg_map[SLJIT_SCRATCH_REG2] == 2 && reg_map[SLJIT_SCRATCH_REG1] < 8 && reg_map[SLJIT_SCRATCH_REG3] < 8, args_registers);
+ SLJIT_COMPILE_ASSERT(reg_map[SLJIT_R1] == 2 && reg_map[SLJIT_R0] < 8 && reg_map[SLJIT_R2] < 8, args_registers);
inst = (sljit_ub*)ensure_buf(compiler, 1 + ((type < SLJIT_CALL3) ? 3 : 6));
FAIL_IF(!inst);
@@ -619,11 +591,11 @@ static SLJIT_INLINE sljit_si call_with_args(struct sljit_compiler *compiler, slj
if (type >= SLJIT_CALL3) {
*inst++ = REX_W | REX_R;
*inst++ = MOV_r_rm;
- *inst++ = MOD_REG | (0x0 /* r8 */ << 3) | reg_lmap[SLJIT_SCRATCH_REG3];
+ *inst++ = MOD_REG | (0x0 /* r8 */ << 3) | reg_lmap[SLJIT_R2];
}
*inst++ = REX_W;
*inst++ = MOV_r_rm;
- *inst++ = MOD_REG | (0x1 /* rcx */ << 3) | reg_lmap[SLJIT_SCRATCH_REG1];
+ *inst++ = MOD_REG | (0x1 /* rcx */ << 3) | reg_lmap[SLJIT_R0];
#endif
return SLJIT_SUCCESS;
}