pcre2: Update to 10.47

This commit is contained in:
Rémi Verschelde
2025-12-12 23:10:07 +01:00
parent 08e6cd181f
commit 36e7675d6d
69 changed files with 8548 additions and 3234 deletions
+284 -42
View File
@@ -113,7 +113,7 @@ static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
};
/* --------------------------------------------------------------------- */
/* Instrucion forms */
/* Instruction forms */
/* --------------------------------------------------------------------- */
#define D(d) ((sljit_ins)reg_map[d] << 21)
#define S(s) ((sljit_ins)reg_map[s] << 21)
@@ -504,6 +504,12 @@ static void generate_jump_or_mov_addr(struct sljit_jump *jump, sljit_sw executab
#endif /* SLJIT_CONFIG_PPC_32 */
}
static SLJIT_INLINE sljit_ins *process_extended_label(sljit_ins *code_ptr, struct sljit_extended_label *ext_label)
{
SLJIT_ASSERT(ext_label->label.u.index == SLJIT_LABEL_ALIGNED);
return (sljit_ins*)((sljit_uw)code_ptr & ~(ext_label->data));
}
static void reduce_code_size(struct sljit_compiler *compiler)
{
struct sljit_label *label;
@@ -619,18 +625,20 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
struct sljit_const *const_;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_generate_code(compiler));
CHECK_PTR(check_sljit_generate_code(compiler, options));
reduce_code_size(compiler);
#if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
if (!(options & SLJIT_GENERATE_CODE_NO_CONTEXT)) {
/* add to compiler->size additional instruction space to hold the trampoline and padding */
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
compiler->size += (compiler->size & 0x1) + (sizeof(struct sljit_function_context) / sizeof(sljit_ins));
#else
compiler->size += (sizeof(struct sljit_function_context) / sizeof(sljit_ins));
#endif
#endif
compiler->size += (compiler->size & 0x1) + (sizeof(struct sljit_function_context) / sizeof(sljit_ins));
#else /* !SLJIT_CONFIG_PPC_64 */
compiler->size += (sizeof(struct sljit_function_context) / sizeof(sljit_ins));
#endif /* SLJIT_CONFIG_PPC_64 */
}
#endif /* SLJIT_INDIRECT_CALL */
code = (sljit_ins*)allocate_executable_memory(compiler->size * sizeof(sljit_ins), options, exec_allocator_data, &executable_offset);
PTR_FAIL_WITH_EXEC_IF(code);
@@ -657,6 +665,11 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
/* These structures are ordered by their address. */
if (next_min_addr == next_label_size) {
if (label->u.index >= SLJIT_LABEL_ALIGNED) {
code_ptr = process_extended_label(code_ptr, (struct sljit_extended_label*)label);
*code_ptr = buf_ptr[-1];
}
/* Just recording the address. */
label->u.addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
label->size = (sljit_uw)(code_ptr - code);
@@ -700,6 +713,9 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
} while (buf);
if (label && label->size == word_count) {
if (label->u.index >= SLJIT_LABEL_ALIGNED)
code_ptr = process_extended_label(code_ptr, (struct sljit_extended_label*)label);
label->u.addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
label->size = (sljit_uw)(code_ptr - code);
label = label->next;
@@ -710,7 +726,8 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
SLJIT_ASSERT(!const_);
#if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
SLJIT_ASSERT(code_ptr - code <= (sljit_sw)(compiler->size - (sizeof(struct sljit_function_context) / sizeof(sljit_ins))));
SLJIT_ASSERT(code_ptr - code <= (sljit_sw)(compiler->size -
((options & SLJIT_GENERATE_CODE_NO_CONTEXT) ? 0 : (sizeof(struct sljit_function_context) / sizeof(sljit_ins)))));
#else
SLJIT_ASSERT(code_ptr - code <= (sljit_sw)compiler->size);
#endif
@@ -727,12 +744,14 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
code = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
#if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
if (!(options & SLJIT_GENERATE_CODE_NO_CONTEXT)) {
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
if (((sljit_sw)code_ptr) & 0x4)
code_ptr++;
#endif
sljit_set_function_context(NULL, (struct sljit_function_context*)code_ptr, (sljit_uw)code, (void*)sljit_generate_code);
#endif
if (((sljit_sw)code_ptr) & 0x4)
code_ptr++;
#endif /* SLJIT_CONFIG_PPC_64 */
sljit_set_function_context(NULL, (struct sljit_function_context*)code_ptr, (sljit_uw)code, (void*)sljit_generate_code);
}
#endif /* SLJIT_INDIRECT_CALL */
code_ptr = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
@@ -740,12 +759,14 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
SLJIT_UPDATE_WX_FLAGS(code, code_ptr, 1);
#if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
compiler->executable_size = (sljit_uw)(code_ptr - code) * sizeof(sljit_ins) + sizeof(struct sljit_function_context);
return code_ptr;
#else
if (!(options & SLJIT_GENERATE_CODE_NO_CONTEXT)) {
compiler->executable_size = (sljit_uw)(code_ptr - code) * sizeof(sljit_ins) + sizeof(struct sljit_function_context);
return code_ptr;
}
#endif /* SLJIT_INDIRECT_CALL */
compiler->executable_size = (sljit_uw)(code_ptr - code) * sizeof(sljit_ins);
return code;
#endif
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
@@ -1683,7 +1704,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
op |= SLJIT_32;
#endif /* SLJIT_CONFIG_PPC_64 */
/* fallthrough */
SLJIT_FALLTHROUGH
case SLJIT_REV:
case SLJIT_REV_U16:
case SLJIT_REV_S16:
@@ -1925,7 +1946,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
if (src1 == SLJIT_IMM && src1w == -1) {
return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM4, dst, dstw, TMP_REG1, 0, src2, src2w);
}
/* fallthrough */
SLJIT_FALLTHROUGH
case SLJIT_AND:
case SLJIT_OR:
/* Commutative unsigned operations. */
@@ -2103,6 +2124,56 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *
return push_inst(compiler, OR | S(dst_reg) | A(dst_reg) | B(TMP_REG1));
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2_shift(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w,
sljit_sw shift_arg)
{
sljit_s32 dst_r, tmp_r;
CHECK_ERROR();
CHECK(check_sljit_emit_op2_shift(compiler, op, dst, dstw, src1, src1w, src2, src2w, shift_arg));
ADJUST_LOCAL_OFFSET(dst, dstw);
ADJUST_LOCAL_OFFSET(src1, src1w);
ADJUST_LOCAL_OFFSET(src2, src2w);
shift_arg &= (sljit_sw)((sizeof(sljit_sw) * 8) - 1);
if (src2 == SLJIT_IMM) {
src2w = src2w << shift_arg;
shift_arg = 0;
}
if (shift_arg == 0) {
SLJIT_SKIP_CHECKS(compiler);
return sljit_emit_op2(compiler, GET_OPCODE(op), dst, dstw, src1, src1w, src2, src2w);
}
if (src1 == SLJIT_IMM) {
FAIL_IF(load_immediate(compiler, TMP_REG1, src1w));
src1 = TMP_REG1;
} else if (src1 & SLJIT_MEM) {
FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src1, src1w, TMP_REG1));
src1 = TMP_REG1;
}
tmp_r = (src1 == TMP_REG1) ? TMP_REG2 : TMP_REG1;
if (src2 & SLJIT_MEM) {
FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, tmp_r, src2, src2w, tmp_r));
src2 = tmp_r;
}
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
FAIL_IF(push_inst(compiler, SLWI_W(shift_arg) | S(src2) | A(tmp_r)));
FAIL_IF(push_inst(compiler, ADD | D(dst_r) | A(src1) | B(tmp_r)));
if (dst & SLJIT_MEM)
return emit_op_mem(compiler, WORD_DATA, dst_r, dst, dstw, TMP_REG1);
return SLJIT_SUCCESS;
}
static sljit_s32 emit_prefetch(struct sljit_compiler *compiler,
sljit_s32 src, sljit_sw srcw)
{
@@ -2322,7 +2393,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compil
FAIL_IF(push_inst(compiler, FRSP | FD(dst_r) | FB(src)));
break;
}
/* Fall through. */
SLJIT_FALLTHROUGH
case SLJIT_MOV_F64:
if (src != dst_r) {
if (!(dst & SLJIT_MEM))
@@ -2447,13 +2518,67 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compi
return label;
}
SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_aligned_label(struct sljit_compiler *compiler,
sljit_s32 alignment, struct sljit_read_only_buffer *buffers)
{
sljit_uw mask, i;
struct sljit_label *label;
struct sljit_label *next_label;
struct sljit_extended_label *ext_label;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_emit_aligned_label(compiler, alignment, buffers));
sljit_reset_read_only_buffers(buffers);
if (alignment <= SLJIT_LABEL_ALIGN_4) {
SLJIT_SKIP_CHECKS(compiler);
label = sljit_emit_label(compiler);
PTR_FAIL_IF(!label);
} else {
/* The used space is filled with NOPs. */
mask = ((sljit_uw)1 << alignment) - sizeof(sljit_ins);
for (i = (mask >> 2); i != 0; i--)
PTR_FAIL_IF(push_inst(compiler, NOP));
ext_label = (struct sljit_extended_label*)ensure_abuf(compiler, sizeof(struct sljit_extended_label));
PTR_FAIL_IF(!ext_label);
set_extended_label(ext_label, compiler, SLJIT_LABEL_ALIGNED, mask);
label = &ext_label->label;
}
if (buffers == NULL)
return label;
next_label = label;
while (1) {
buffers->u.label = next_label;
for (i = (buffers->size + 3) >> 2; i > 0; i--)
PTR_FAIL_IF(push_inst(compiler, NOP));
buffers = buffers->next;
if (buffers == NULL)
break;
SLJIT_SKIP_CHECKS(compiler);
next_label = sljit_emit_label(compiler);
PTR_FAIL_IF(!next_label);
}
return label;
}
static sljit_ins get_bo_bi_flags(struct sljit_compiler *compiler, sljit_s32 type)
{
switch (type) {
case SLJIT_NOT_CARRY:
if (compiler->status_flags_state & SLJIT_CURRENT_FLAGS_SUB)
return (4 << 21) | (2 << 16);
/* fallthrough */
SLJIT_FALLTHROUGH
case SLJIT_EQUAL:
case SLJIT_ATOMIC_STORED:
@@ -2462,7 +2587,7 @@ static sljit_ins get_bo_bi_flags(struct sljit_compiler *compiler, sljit_s32 type
case SLJIT_CARRY:
if (compiler->status_flags_state & SLJIT_CURRENT_FLAGS_SUB)
return (12 << 21) | (2 << 16);
/* fallthrough */
SLJIT_FALLTHROUGH
case SLJIT_NOT_EQUAL:
case SLJIT_ATOMIC_NOT_STORED:
@@ -2835,6 +2960,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *comp
{
sljit_ins *ptr;
sljit_uw size;
sljit_s32 is_compare = (type & SLJIT_COMPARE_SELECT);
sljit_ins ins;
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
sljit_s32 inp_flags = ((type & SLJIT_32) ? INT_DATA : WORD_DATA) | LOAD_DATA;
#else /* !SLJIT_CONFIG_PPC_64 */
@@ -2846,6 +2973,45 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *comp
ADJUST_LOCAL_OFFSET(src1, src1w);
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
if (src1 == SLJIT_IMM && (type & SLJIT_32))
src1w = (sljit_s32)src1w;
#endif /* SLJIT_CONFIG_PPC_64 */
type &= ~(SLJIT_32 | SLJIT_COMPARE_SELECT);
if (is_compare) {
ins = 0;
if (src1 & SLJIT_MEM) {
FAIL_IF(emit_op_mem(compiler, inp_flags, TMP_REG1, src1, src1w, TMP_REG1));
src1 = TMP_REG1;
src1w = 0;
}
if (src1 == SLJIT_IMM) {
if (type >= SLJIT_LESS && type <= SLJIT_LESS_EQUAL && src1w >= 0 && src1w <= UIMM_MAX)
ins = CMPLI | CRD(0) | IMM(src1w);
else if (type >= SLJIT_SIG_LESS && type <= SLJIT_SIG_LESS_EQUAL && src1w >= SIMM_MIN && src1w <= SIMM_MAX)
ins = CMPI | CRD(0) | IMM(src1w);
else {
FAIL_IF(load_immediate(compiler, TMP_REG1, src1w));
src1 = TMP_REG1;
src1w = 0;
}
}
if (ins == 0)
ins = ((type >= SLJIT_LESS && type <= SLJIT_LESS_EQUAL) ? CMPL : CMP) | CRD(0) | B(src1);
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
if (inp_flags == (WORD_DATA | LOAD_DATA))
ins |= CRD(1);
#endif /* SLJIT_CONFIG_PPC_64 */
FAIL_IF(push_inst(compiler, ins | A(src2_reg)));
type ^= 0x1;
}
if (dst_reg != src2_reg) {
if (dst_reg == src1) {
src1 = src2_reg;
@@ -2866,7 +3032,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *comp
}
}
if (((type & ~SLJIT_32) | 0x1) == SLJIT_NOT_CARRY)
if ((type | 0x1) == SLJIT_NOT_CARRY)
FAIL_IF(push_inst(compiler, ADDE | RC(ALT_SET_FLAGS) | D(TMP_REG1) | A(TMP_ZERO) | B(TMP_ZERO)));
size = compiler->size;
@@ -2878,15 +3044,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_select(struct sljit_compiler *comp
if (src1 & SLJIT_MEM) {
FAIL_IF(emit_op_mem(compiler, inp_flags, dst_reg, src1, src1w, TMP_REG1));
} else if (src1 == SLJIT_IMM) {
#if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
if (type & SLJIT_32)
src1w = (sljit_s32)src1w;
#endif /* SLJIT_CONFIG_RISCV_64 */
FAIL_IF(load_immediate(compiler, dst_reg, src1w));
} else
FAIL_IF(push_inst(compiler, OR | S(src1) | A(dst_reg) | B(src1)));
*ptr = BCx | get_bo_bi_flags(compiler, (type ^ 0x1) & ~SLJIT_32) | (sljit_ins)((compiler->size - size) << 2);
*ptr = BCx | get_bo_bi_flags(compiler, type ^ 0x1) | (sljit_ins)((compiler->size - size) << 2);
return SLJIT_SUCCESS;
}
@@ -3162,7 +3324,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_load(struct sljit_compiler
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
ins = LDARX;
break;
#endif /* SLJIT_CONFIG_RISCV_64 */
#endif /* SLJIT_CONFIG_PPC_64 */
case SLJIT_MOV_U32:
case SLJIT_MOV32:
ins = LWARX;
@@ -3200,7 +3362,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_store(struct sljit_compiler
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
ins = STDCX | 0x1;
break;
#endif /* SLJIT_CONFIG_RISCV_64 */
#endif /* SLJIT_CONFIG_PPC_64 */
case SLJIT_MOV_U32:
case SLJIT_MOV32:
ins = STWCX | 0x1;
@@ -3216,13 +3378,16 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_store(struct sljit_compiler
return push_inst(compiler, ins | D(src_reg) | B(mem_reg));
}
SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst, sljit_sw dstw,
sljit_sw init_value)
{
struct sljit_const *const_;
sljit_s32 dst_r;
sljit_s32 mem_flags = WORD_DATA;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
CHECK_PTR(check_sljit_emit_const(compiler, op, dst, dstw, init_value));
ADJUST_LOCAL_OFFSET(dst, dstw);
const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
@@ -3230,42 +3395,119 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
set_const(const_, compiler);
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
PTR_FAIL_IF(emit_const(compiler, dst_r, init_value));
switch (GET_OPCODE(op)) {
case SLJIT_MOV_U8:
if (init_value & 0x100)
init_value |= 0xff00;
else
init_value &= 0xff;
PTR_FAIL_IF(push_inst(compiler, ADDI | D(dst_r) | A(0) | IMM(init_value)));
mem_flags = BYTE_DATA;
break;
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
case SLJIT_MOV32:
mem_flags = INT_DATA;
SLJIT_FALLTHROUGH
case SLJIT_MOV_S32:
PTR_FAIL_IF(push_inst(compiler, ADDIS | D(dst_r) | A(0) | IMM(init_value >> 16)));
PTR_FAIL_IF(push_inst(compiler, ORI | S(dst_r) | A(dst_r) | IMM(init_value)));
break;
#endif /* SLJIT_CONFIG_PPC_64 */
default:
PTR_FAIL_IF(emit_const(compiler, dst_r, init_value));
break;
}
if (dst & SLJIT_MEM)
PTR_FAIL_IF(emit_op_mem(compiler, WORD_DATA, dst_r, dst, dstw, TMP_REG1));
PTR_FAIL_IF(emit_op_mem(compiler, mem_flags, dst_r, dst, dstw, TMP_REG1));
return const_;
}
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_mov_addr(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_op_addr(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst, sljit_sw dstw)
{
struct sljit_jump *jump;
sljit_s32 dst_r;
sljit_s32 dst_r, target_r;
SLJIT_UNUSED_ARG(op);
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_emit_mov_addr(compiler, dst, dstw));
CHECK_PTR(check_sljit_emit_op_addr(compiler, op, dst, dstw));
ADJUST_LOCAL_OFFSET(dst, dstw);
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
if (op != SLJIT_ADD_ABS_ADDR)
target_r = dst_r;
else {
target_r = TMP_REG1;
if (dst & SLJIT_MEM)
PTR_FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, dst_r, dst, dstw, TMP_REG1));
}
jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
PTR_FAIL_IF(!jump);
set_mov_addr(jump, compiler, 0);
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
PTR_FAIL_IF(push_inst(compiler, (sljit_ins)dst_r));
PTR_FAIL_IF(push_inst(compiler, (sljit_ins)target_r));
#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
compiler->size++;
#else
compiler->size += 4;
#endif
if (op == SLJIT_ADD_ABS_ADDR)
PTR_FAIL_IF(push_inst(compiler, ADD | D(dst_r) | A(dst_r) | B(TMP_REG1)));
if (dst & SLJIT_MEM)
PTR_FAIL_IF(emit_op(compiler, SLJIT_MOV, WORD_DATA, dst, dstw, TMP_REG1, 0, TMP_REG2, 0));
PTR_FAIL_IF(emit_op_mem(compiler, WORD_DATA, dst_r, dst, dstw, TMP_REG1));
return jump;
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_s32 op, sljit_sw new_constant, sljit_sw executable_offset)
{
sljit_set_jump_addr(addr, (sljit_uw)new_constant, executable_offset);
sljit_ins *inst;
switch (GET_OPCODE(op)) {
case SLJIT_MOV_U8:
inst = (sljit_ins *)addr;
SLJIT_ASSERT((inst[0] & 0xfc1f0000) == ADDI);
if (new_constant & 0x100)
new_constant |= 0xff00;
else
new_constant &= 0xff;
SLJIT_UPDATE_WX_FLAGS(inst, inst + 1, 0);
inst[0] = (inst[0] & 0xffff0000) | IMM(new_constant);
SLJIT_UPDATE_WX_FLAGS(inst, inst + 1, 1);
inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 1);
return;
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
case SLJIT_MOV32:
case SLJIT_MOV_S32:
inst = (sljit_ins *)addr;
SLJIT_ASSERT((inst[0] & 0xfc1f0000) == ADDIS && (inst[1] & 0xfc000000) == ORI);
SLJIT_UPDATE_WX_FLAGS(inst, inst + 2, 0);
inst[0] = (inst[0] & 0xffff0000) | IMM(new_constant >> 16);
inst[1] = (inst[1] & 0xffff0000) | IMM(new_constant);
SLJIT_UPDATE_WX_FLAGS(inst, inst + 2, 1);
inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 2);
return;
#endif /* SLJIT_CONFIG_PPC_64 */
default:
sljit_set_jump_addr(addr, (sljit_uw)new_constant, executable_offset);
return;
}
}