Update IR

IR commit: f7c0ddb1b4630e1287b0239f85d64a6965dfea29
This commit is contained in:
Dmitry Stogov 2024-02-14 23:57:22 +03:00
parent 1e770d12c2
commit ce96aa9188
9 changed files with 330 additions and 91 deletions

View File

@ -488,7 +488,7 @@ ir_ref ir_unique_const_addr(ir_ctx *ctx, uintptr_t addr)
return ref;
}
static IR_NEVER_INLINE ir_ref ir_const_ex(ir_ctx *ctx, ir_val val, uint8_t type, uint32_t optx)
ir_ref ir_const_ex(ir_ctx *ctx, ir_val val, uint8_t type, uint32_t optx)
{
ir_insn *insn, *prev_insn;
ir_ref ref, prev;
@ -1499,7 +1499,7 @@ ir_ref ir_addrtab_find(const ir_hashtab *tab, uint64_t key)
return IR_INVALID_VAL;
}
bool ir_addrtab_add(ir_hashtab *tab, uint64_t key, ir_ref val)
void ir_addrtab_set(ir_hashtab *tab, uint64_t key, ir_ref val)
{
char *data = (char*)tab->data;
uint32_t pos = ((uint32_t*)data)[(int32_t)(key | tab->mask)];
@ -1508,7 +1508,8 @@ bool ir_addrtab_add(ir_hashtab *tab, uint64_t key, ir_ref val)
while (pos != IR_INVALID_IDX) {
p = (ir_addrtab_bucket*)(data + pos);
if (p->key == key) {
return p->val == val;
p->val = val;
return;
}
pos = p->next;
}
@ -1527,7 +1528,6 @@ bool ir_addrtab_add(ir_hashtab *tab, uint64_t key, ir_ref val)
key |= tab->mask;
p->next = ((uint32_t*)data)[(int32_t)key];
((uint32_t*)data)[(int32_t)key] = pos;
return 1;
}
/* Memory API */
@ -1977,6 +1977,18 @@ ir_ref _ir_END_LIST(ir_ctx *ctx, ir_ref list)
return ref;
}
ir_ref _ir_END_PHI_LIST(ir_ctx *ctx, ir_ref list, ir_ref val)
{
ir_ref ref;
IR_ASSERT(ctx->control);
IR_ASSERT(!list || ctx->ir_base[list].op == IR_END);
/* create a liked list of END nodes with the same destination through END.op2 */
ref = ir_emit3(ctx, IR_END, ctx->control, list, val);
ctx->control = IR_UNUSED;
return ref;
}
void _ir_MERGE_LIST(ir_ctx *ctx, ir_ref list)
{
ir_ref ref = list;
@ -2016,6 +2028,41 @@ void _ir_MERGE_LIST(ir_ctx *ctx, ir_ref list)
}
}
ir_ref _ir_PHI_LIST(ir_ctx *ctx, ir_ref list)
{
ir_insn *merge, *end;
ir_ref phi, *ops, i;
ir_type type;
if (list == IR_UNUSED) {
return IR_UNUSED;
}
end = &ctx->ir_base[list];
if (!end->op2) {
phi = end->op3;
end->op3 = IR_UNUSED;
_ir_BEGIN(ctx, list);
} else if (!end->op3) {
_ir_MERGE_LIST(ctx, list);
phi = IR_UNUSED;
} else {
type = ctx->ir_base[end->op3].type;
_ir_MERGE_LIST(ctx, list);
merge = &ctx->ir_base[ctx->control];
IR_ASSERT(merge->op == IR_MERGE);
phi = ir_emit_N(ctx, IR_OPT(IR_PHI, type), merge->inputs_count + 1);
merge = &ctx->ir_base[ctx->control];
ops = merge->ops;
ir_set_op(ctx, phi, 1, ctx->control);
for (i = 0; i < merge->inputs_count; i++) {
end = &ctx->ir_base[ops[i + 1]];
ir_set_op(ctx, phi, i + 2, end->op3);
end->op3 = IR_END;
}
}
return phi;
}
ir_ref _ir_LOOP_BEGIN(ir_ctx *ctx, ir_ref src1)
{
IR_ASSERT(!ctx->control);

View File

@ -501,13 +501,14 @@ void ir_strtab_free(ir_strtab *strtab);
#define IR_GEN_ENDBR (1<<14)
#define IR_MERGE_EMPTY_ENTRIES (1<<15)
#define IR_OPT_FOLDING (1<<16)
#define IR_OPT_CFG (1<<17) /* merge BBs, by remove END->BEGIN nodes during CFG construction */
#define IR_OPT_CODEGEN (1<<18)
#define IR_GEN_NATIVE (1<<19)
#define IR_GEN_CODE (1<<20) /* C or LLVM */
#define IR_OPT_INLINE (1<<16)
#define IR_OPT_FOLDING (1<<17)
#define IR_OPT_CFG (1<<18) /* merge BBs, by remove END->BEGIN nodes during CFG construction */
#define IR_OPT_CODEGEN (1<<19)
#define IR_GEN_NATIVE (1<<20)
#define IR_GEN_CODE (1<<21) /* C or LLVM */
#define IR_GEN_CACHE_DEMOTE (1<<21) /* Demote the generated code from closest CPU caches */
#define IR_GEN_CACHE_DEMOTE (1<<22) /* Demote the generated code from closest CPU caches */
/* debug related */
#ifdef IR_DEBUG

View File

@ -561,7 +561,7 @@ int ir_get_target_constraints(ir_ctx *ctx, ir_ref ref, ir_target_constraints *co
n++;
}
}
flags = IR_USE_SHOULD_BE_IN_REG | IR_OP2_SHOULD_BE_IN_REG | IR_OP3_SHOULD_BE_IN_REG;
flags = IR_USE_SHOULD_BE_IN_REG | IR_OP2_MUST_BE_IN_REG | IR_OP3_SHOULD_BE_IN_REG;
break;
case IR_COND:
insn = &ctx->ir_base[ref];
@ -3953,6 +3953,10 @@ static void ir_emit_alloca(ir_ctx *ctx, ir_ref def, ir_insn *insn)
dasm_State **Dst = &data->dasm_state;
ir_reg def_reg = IR_REG_NUM(ctx->regs[def][0]);
if (ctx->use_lists[def].count == 1) {
/* dead alloca */
return;
}
if (IR_IS_CONST_REF(insn->op2)) {
ir_insn *val = &ctx->ir_base[insn->op2];
int32_t size = val->val.i32;
@ -4314,16 +4318,18 @@ static void ir_emit_switch(ir_ctx *ctx, uint32_t b, ir_ref def, ir_insn *insn)
}
}
if (aarch64_may_encode_imm12(max.i64)) {
| ASM_REG_IMM_OP cmp, type, op2_reg, max.i64
} else {
ir_emit_load_imm_int(ctx, type, tmp_reg, max.i64);
| ASM_REG_REG_OP cmp, type, op2_reg, tmp_reg
}
if (IR_IS_TYPE_SIGNED(type)) {
| bgt =>default_label
} else {
| bhi =>default_label
if (default_label) {
if (aarch64_may_encode_imm12(max.i64)) {
| ASM_REG_IMM_OP cmp, type, op2_reg, max.i64
} else {
ir_emit_load_imm_int(ctx, type, tmp_reg, max.i64);
| ASM_REG_REG_OP cmp, type, op2_reg, tmp_reg
}
if (IR_IS_TYPE_SIGNED(type)) {
| bgt =>default_label
} else {
| bhi =>default_label
}
}
if (op1_reg == IR_REG_NONE) {
@ -4335,11 +4341,15 @@ static void ir_emit_switch(ir_ctx *ctx, uint32_t b, ir_ref def, ir_insn *insn)
ir_emit_load_imm_int(ctx, type, tmp_reg, min.i64);
| ASM_REG_REG_REG_OP subs, type, op1_reg, op2_reg, tmp_reg
}
if (IR_IS_TYPE_SIGNED(type)) {
| blt =>default_label
} else {
| blo =>default_label
if (default_label) {
if (IR_IS_TYPE_SIGNED(type)) {
| blt =>default_label
} else {
| blo =>default_label
}
}
| adr Rx(tmp_reg), >1
| ldr Rx(tmp_reg), [Rx(tmp_reg), Rx(op1_reg), lsl #3]
| br Rx(tmp_reg)
@ -4352,25 +4362,29 @@ static void ir_emit_switch(ir_ctx *ctx, uint32_t b, ir_ref def, ir_insn *insn)
|1:
for (i = 0; i <= (max.i64 - min.i64); i++) {
int b = labels[i];
ir_block *bb = &ctx->cfg_blocks[b];
ir_insn *insn = &ctx->ir_base[bb->end];
if (b) {
ir_block *bb = &ctx->cfg_blocks[b];
ir_insn *insn = &ctx->ir_base[bb->end];
if (insn->op == IR_IJMP && IR_IS_CONST_REF(insn->op2)) {
ir_ref prev = ctx->prev_ref[bb->end];
if (prev != bb->start && ctx->ir_base[prev].op == IR_SNAPSHOT) {
prev = ctx->prev_ref[prev];
}
if (prev == bb->start) {
void *addr = ir_jmp_addr(ctx, insn, &ctx->ir_base[insn->op2]);
| .addr &addr
if (ctx->ir_base[bb->start].op != IR_CASE_DEFAULT) {
bb->flags |= IR_BB_EMPTY;
if (insn->op == IR_IJMP && IR_IS_CONST_REF(insn->op2)) {
ir_ref prev = ctx->prev_ref[bb->end];
if (prev != bb->start && ctx->ir_base[prev].op == IR_SNAPSHOT) {
prev = ctx->prev_ref[prev];
}
if (prev == bb->start) {
void *addr = ir_jmp_addr(ctx, insn, &ctx->ir_base[insn->op2]);
| .addr &addr
if (ctx->ir_base[bb->start].op != IR_CASE_DEFAULT) {
bb->flags |= IR_BB_EMPTY;
}
continue;
}
continue;
}
| .addr =>b
} else {
| .addr 0
}
| .addr =>b
}
|.code
ir_mem_free(labels);
@ -5187,7 +5201,9 @@ static void ir_emit_load_params(ir_ctx *ctx)
dst_reg = IR_REG_NUM(ctx->regs[use][0]);
IR_ASSERT(src_reg != IR_REG_NONE || dst_reg != IR_REG_NONE ||
stack_offset == ctx->live_intervals[ctx->vregs[use]]->stack_spill_pos +
((ctx->flags & IR_USE_FRAME_POINTER) ? -ctx->stack_frame_size : ctx->call_stack_size));
((ctx->flags & IR_USE_FRAME_POINTER) ?
-(ctx->stack_frame_size - ctx->stack_frame_alignment) :
ctx->call_stack_size));
if (src_reg != dst_reg) {
ir_emit_param_move(ctx, insn->type, src_reg, dst_reg, use, stack_offset);
}

View File

@ -614,6 +614,9 @@ extern "C" {
#define ir_END_list(_list) do { _list = _ir_END_LIST(_ir_CTX, _list); } while (0)
#define ir_MERGE_list(_list) _ir_MERGE_LIST(_ir_CTX, (_list))
#define ir_END_PHI_list(_list, _val) do { _list = _ir_END_PHI_LIST(_ir_CTX, _list, _val); } while (0)
#define ir_PHI_list(_list) _ir_PHI_LIST(_ir_CTX, (_list))
#define ir_MERGE_WITH(_src2) do {ir_ref end = ir_END(); ir_MERGE_2(end, _src2);} while (0)
#define ir_MERGE_WITH_EMPTY_TRUE(_if) do {ir_ref end = ir_END(); ir_IF_TRUE(_if); ir_MERGE_2(end, ir_END());} while (0)
#define ir_MERGE_WITH_EMPTY_FALSE(_if) do {ir_ref end = ir_END(); ir_IF_FALSE(_if); ir_MERGE_2(end, ir_END());} while (0)
@ -655,6 +658,7 @@ void _ir_ENTRY(ir_ctx *ctx, ir_ref src, ir_ref num);
void _ir_BEGIN(ir_ctx *ctx, ir_ref src);
ir_ref _ir_END(ir_ctx *ctx);
ir_ref _ir_END_LIST(ir_ctx *ctx, ir_ref list);
ir_ref _ir_END_PHI_LIST(ir_ctx *ctx, ir_ref list, ir_ref val);
ir_ref _ir_IF(ir_ctx *ctx, ir_ref condition);
void _ir_IF_TRUE(ir_ctx *ctx, ir_ref if_ref);
void _ir_IF_TRUE_cold(ir_ctx *ctx, ir_ref if_ref);
@ -664,6 +668,7 @@ void _ir_MERGE_2(ir_ctx *ctx, ir_ref src1, ir_ref src2);
void _ir_MERGE_N(ir_ctx *ctx, ir_ref n, ir_ref *inputs);
void _ir_MERGE_SET_OP(ir_ctx *ctx, ir_ref merge, ir_ref pos, ir_ref src);
void _ir_MERGE_LIST(ir_ctx *ctx, ir_ref list);
ir_ref _ir_PHI_LIST(ir_ctx *ctx, ir_ref list);
ir_ref _ir_LOOP_BEGIN(ir_ctx *ctx, ir_ref src1);
ir_ref _ir_LOOP_END(ir_ctx *ctx);
ir_ref _ir_TLS(ir_ctx *ctx, ir_ref index, ir_ref offset);

View File

@ -664,7 +664,7 @@ static void ir_emit_dessa_move(ir_ctx *ctx, ir_type type, ir_ref to, ir_ref from
IR_ALWAYS_INLINE void ir_dessa_resolve_cycle(ir_ctx *ctx, int32_t *pred, int32_t *loc, ir_bitset todo, ir_type type, int32_t to, ir_reg tmp_reg, ir_reg tmp_fp_reg)
{
ir_reg from;
ir_ref from;
ir_mem tmp_spill_slot;
IR_MEM_VAL(tmp_spill_slot) = 0;

View File

@ -275,18 +275,53 @@ IR_FOLD(UGT(C_FLOAT, C_FLOAT))
}
IR_FOLD(ADD(C_U8, C_U8))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_U(op1_insn->val.u8 + op2_insn->val.u8);
}
IR_FOLD(ADD(C_U16, C_U16))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_U(op1_insn->val.u16 + op2_insn->val.u16);
}
IR_FOLD(ADD(C_U32, C_U32))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_U(op1_insn->val.u32 + op2_insn->val.u32);
}
IR_FOLD(ADD(C_U64, C_U64))
IR_FOLD(ADD(C_ADDR, C_ADDR))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_U(op1_insn->val.u64 + op2_insn->val.u64);
}
IR_FOLD(ADD(C_ADDR, C_ADDR))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_U(op1_insn->val.addr + op2_insn->val.addr);
}
IR_FOLD(ADD(C_I8, C_I8))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_I(op1_insn->val.i8 + op2_insn->val.i8);
}
IR_FOLD(ADD(C_I16, C_I16))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_I(op1_insn->val.i16 + op2_insn->val.i16);
}
IR_FOLD(ADD(C_I32, C_I32))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_I(op1_insn->val.i32 + op2_insn->val.i32);
}
IR_FOLD(ADD(C_I64, C_I64))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
@ -306,18 +341,53 @@ IR_FOLD(ADD(C_FLOAT, C_FLOAT))
}
IR_FOLD(SUB(C_U8, C_U8))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_U(op1_insn->val.u8 - op2_insn->val.u8);
}
IR_FOLD(SUB(C_U16, C_U16))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_U(op1_insn->val.u16 - op2_insn->val.u16);
}
IR_FOLD(SUB(C_U32, C_U32))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_U(op1_insn->val.u32 - op2_insn->val.u32);
}
IR_FOLD(SUB(C_U64, C_U64))
IR_FOLD(SUB(C_ADDR, C_ADDR))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_U(op1_insn->val.u64 - op2_insn->val.u64);
}
IR_FOLD(SUB(C_ADDR, C_ADDR))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_U(op1_insn->val.addr - op2_insn->val.addr);
}
IR_FOLD(SUB(C_I8, C_I8))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_I(op1_insn->val.i8 - op2_insn->val.i8);
}
IR_FOLD(SUB(C_I16, C_I16))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_I(op1_insn->val.i16 - op2_insn->val.i16);
}
IR_FOLD(SUB(C_I32, C_I32))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_I(op1_insn->val.i32 - op2_insn->val.i32);
}
IR_FOLD(SUB(C_I64, C_I64))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
@ -337,18 +407,53 @@ IR_FOLD(SUB(C_FLOAT, C_FLOAT))
}
IR_FOLD(MUL(C_U8, C_U8))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_U(op1_insn->val.u8 * op2_insn->val.u8);
}
IR_FOLD(MUL(C_U16, C_U16))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_U(op1_insn->val.u16 * op2_insn->val.u16);
}
IR_FOLD(MUL(C_U32, C_U32))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_U(op1_insn->val.u32 * op2_insn->val.u32);
}
IR_FOLD(MUL(C_U64, C_U64))
IR_FOLD(MUL(C_ADDR, C_ADDR))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_U(op1_insn->val.u64 * op2_insn->val.u64);
}
IR_FOLD(MUL(C_ADDR, C_ADDR))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_U(op1_insn->val.addr * op2_insn->val.addr);
}
IR_FOLD(MUL(C_I8, C_I8))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_I(op1_insn->val.i8 * op2_insn->val.i8);
}
IR_FOLD(MUL(C_I16, C_I16))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_I(op1_insn->val.i16 * op2_insn->val.i16);
}
IR_FOLD(MUL(C_I32, C_I32))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
IR_FOLD_CONST_I(op1_insn->val.i32 * op2_insn->val.i32);
}
IR_FOLD(MUL(C_I64, C_I64))
{
IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
@ -1309,6 +1414,26 @@ IR_FOLD(BSWAP(BSWAP))
IR_FOLD_COPY(op1_insn->op1);
}
IR_FOLD(NOT(EQ))
IR_FOLD(NOT(NE))
IR_FOLD(NOT(LT))
IR_FOLD(NOT(GE))
IR_FOLD(NOT(LE))
IR_FOLD(NOT(GT))
IR_FOLD(NOT(ULT))
IR_FOLD(NOT(UGE))
IR_FOLD(NOT(ULE))
IR_FOLD(NOT(UGT))
{
if (IR_IS_TYPE_INT(ctx->ir_base[op1_insn->op1].type)) {
opt = op1_insn->opt ^ 1;
op1 = op1_insn->op1;
op2 = op1_insn->op2;
IR_FOLD_RESTART;
}
IR_FOLD_NEXT;
}
IR_FOLD(ADD(_, C_U8))
IR_FOLD(ADD(_, C_U16))
IR_FOLD(ADD(_, C_U32))

View File

@ -756,7 +756,7 @@ typedef struct _ir_addrtab_bucket {
void ir_addrtab_init(ir_hashtab *tab, uint32_t size);
void ir_addrtab_free(ir_hashtab *tab);
ir_ref ir_addrtab_find(const ir_hashtab *tab, uint64_t key);
bool ir_addrtab_add(ir_hashtab *tab, uint64_t key, ir_ref val);
void ir_addrtab_set(ir_hashtab *tab, uint64_t key, ir_ref val);
/*** IR OP info ***/
extern const uint8_t ir_type_flags[IR_LAST_TYPE];
@ -770,6 +770,8 @@ extern const char *ir_op_name[IR_LAST_OP];
#define IR_IS_FOLDABLE_OP(op) ((op) <= IR_LAST_FOLDABLE_OP)
#define IR_IS_SYM_CONST(op) ((op) == IR_STR || (op) == IR_SYM || (op) == IR_FUNC)
ir_ref ir_const_ex(ir_ctx *ctx, ir_val val, uint8_t type, uint32_t optx);
IR_ALWAYS_INLINE bool ir_const_is_true(const ir_insn *v)
{

View File

@ -603,14 +603,24 @@ static void ir_sccp_remove_unfeasible_merge_inputs(ir_ctx *ctx, ir_insn *_values
prev = input_insn->op1;
use_list = &ctx->use_lists[ref];
for (k = 0, p = &ctx->use_edges[use_list->refs]; k < use_list->count; k++, p++) {
use = *p;
use_insn = &ctx->ir_base[use];
IR_ASSERT((use_insn->op != IR_PHI) && "PHI must be already removed");
if (ir_op_flags[use_insn->op] & IR_OP_FLAG_CONTROL) {
next = use;
next_insn = use_insn;
break;
if (use_list->count == 1) {
next = ctx->use_edges[use_list->refs];
next_insn = &ctx->ir_base[next];
} else {
for (k = 0, p = &ctx->use_edges[use_list->refs]; k < use_list->count; k++, p++) {
use = *p;
use_insn = &ctx->ir_base[use];
IR_ASSERT((use_insn->op != IR_PHI) && "PHI must be already removed");
if (ir_op_flags[use_insn->op] & IR_OP_FLAG_CONTROL) {
IR_ASSERT(!next);
next = use;
next_insn = use_insn;
} else {
IR_ASSERT(use_insn->op1 == ref);
use_insn->op1 = prev;
ir_sccp_add_to_use_list(ctx, prev, use);
p = &ctx->use_edges[use_list->refs + k];
}
}
}
IR_ASSERT(prev && next);
@ -878,7 +888,8 @@ int ir_sccp(ir_ctx *ctx)
}
}
IR_MAKE_BOTTOM(i);
} else if ((flags & (IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_MASK)) == (IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_LOAD)
} else if (((flags & (IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_MASK)) == (IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_LOAD)
|| insn->op == IR_ALLOCA)
&& ctx->use_lists[i].count == 1) {
/* dead load */
_values[i].optx = IR_LOAD;
@ -928,7 +939,7 @@ int ir_sccp(ir_ctx *ctx)
#ifdef IR_DEBUG
if (ctx->flags & IR_DEBUG_SCCP) {
for (i = 1; i < ctx->insns_count; i++) {
if (IR_IS_CONST_OP(_values[i].op)) {
if (IR_IS_CONST_OP(_values[i].op) || IR_IS_SYM_CONST(_values[i].op)) {
fprintf(stderr, "%d. CONST(", i);
ir_print_const(ctx, &_values[i], stderr, true);
fprintf(stderr, ")\n");
@ -956,6 +967,10 @@ int ir_sccp(ir_ctx *ctx)
/* replace instruction by constant */
j = ir_const(ctx, value->val, value->type);
ir_sccp_replace_insn(ctx, _values, i, j, &worklist);
} else if (IR_IS_SYM_CONST(value->op)) {
/* replace instruction by constant */
j = ir_const_ex(ctx, value->val, value->type, value->optx);
ir_sccp_replace_insn(ctx, _values, i, j, &worklist);
#if IR_COMBO_COPY_PROPAGATION
} else if (value->op == IR_COPY) {
ir_sccp_replace_insn(ctx, _values, i, value->op1, &worklist);
@ -1008,7 +1023,8 @@ int ir_sccp(ir_ctx *ctx)
} else {
ir_sccp_fold2(ctx, _values, i, &worklist);
}
} else if ((ir_op_flags[insn->op] & (IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_MASK)) == (IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_LOAD)
} else if (((ir_op_flags[insn->op] & (IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_MASK)) == (IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_LOAD)
|| insn->op == IR_ALLOCA)
&& ctx->use_lists[i].count == 1) {
/* dead load */
ir_ref next = ctx->use_edges[ctx->use_lists[i].refs];

View File

@ -2546,6 +2546,7 @@ store_int:
return insn->op;
case IR_SEXT:
case IR_ZEXT:
case IR_TRUNC:
case IR_BITCAST:
case IR_FP2INT:
case IR_FP2FP:
@ -6054,8 +6055,17 @@ static void ir_emit_trunc(ir_ctx *ctx, ir_ref def, ir_insn *insn)
if (op1_reg != def_reg) {
ir_emit_mov(ctx, dst_type, def_reg, op1_reg);
}
} else {
} else if (IR_IS_CONST_REF(insn->op1)) {
ir_emit_load(ctx, dst_type, def_reg, insn->op1);
} else {
ir_mem mem;
if (ir_rule(ctx, insn->op1) & IR_FUSED) {
mem = ir_fuse_load(ctx, def, insn->op1);
} else {
mem = ir_ref_spill_slot(ctx, insn->op1);
}
ir_emit_load_mem(ctx, dst_type, def_reg, mem);
}
if (IR_REG_SPILLED(ctx->regs[def][0])) {
ir_emit_store(ctx, dst_type, def, def_reg);
@ -6967,6 +6977,10 @@ static void ir_emit_alloca(ir_ctx *ctx, ir_ref def, ir_insn *insn)
dasm_State **Dst = &data->dasm_state;
ir_reg def_reg = IR_REG_NUM(ctx->regs[def][0]);
if (ctx->use_lists[def].count == 1) {
/* dead alloca */
return;
}
if (IR_IS_CONST_REF(insn->op2)) {
ir_insn *val = &ctx->ir_base[insn->op2];
int32_t size = val->val.i32;
@ -7336,26 +7350,30 @@ static void ir_emit_switch(ir_ctx *ctx, uint32_t b, ir_ref def, ir_insn *insn)
}
}
if (IR_IS_32BIT(type, max)) {
| ASM_REG_IMM_OP cmp, type, op2_reg, max.i32
} else {
IR_ASSERT(ir_type_size[type] == 8);
IR_ASSERT(sizeof(void*) == 8);
if (default_label) {
if (IR_IS_32BIT(type, max)) {
| ASM_REG_IMM_OP cmp, type, op2_reg, max.i32
} else {
IR_ASSERT(ir_type_size[type] == 8);
IR_ASSERT(sizeof(void*) == 8);
|.if X64
| mov64 Rq(tmp_reg), max.i64
| cmp Rq(op2_reg), Rq(tmp_reg)
| mov64 Rq(tmp_reg), max.i64
| cmp Rq(op2_reg), Rq(tmp_reg)
|.endif
}
if (IR_IS_TYPE_SIGNED(type)) {
| jg =>default_label
} else {
| ja =>default_label
}
if (IR_IS_TYPE_SIGNED(type)) {
| jg =>default_label
} else {
| ja =>default_label
}
}
if (IR_IS_32BIT(type, min)) {
offset = -min.i64 * sizeof(void*);
if (IR_IS_SIGNED_32BIT(offset)) {
| ASM_REG_IMM_OP cmp, type, op2_reg, min.i32
if (default_label) {
| ASM_REG_IMM_OP cmp, type, op2_reg, min.i32
}
} else {
| ASM_REG_REG_OP sub, type, op2_reg, (int32_t)offset // TODO: reg clobbering
offset = 0;
@ -7368,10 +7386,13 @@ static void ir_emit_switch(ir_ctx *ctx, uint32_t b, ir_ref def, ir_insn *insn)
offset = 0;
|.endif
}
if (IR_IS_TYPE_SIGNED(type)) {
| jl =>default_label
} else {
| jb =>default_label
if (default_label) {
if (IR_IS_TYPE_SIGNED(type)) {
| jl =>default_label
} else {
| jb =>default_label
}
}
if (sizeof(void*) == 8) {
|.if X64
@ -7441,25 +7462,29 @@ static void ir_emit_switch(ir_ctx *ctx, uint32_t b, ir_ref def, ir_insn *insn)
|1:
for (i = 0; i <= (max.i64 - min.i64); i++) {
int b = labels[i];
ir_block *bb = &ctx->cfg_blocks[b];
ir_insn *insn = &ctx->ir_base[bb->end];
if (b) {
ir_block *bb = &ctx->cfg_blocks[b];
ir_insn *insn = &ctx->ir_base[bb->end];
if (insn->op == IR_IJMP && IR_IS_CONST_REF(insn->op2)) {
ir_ref prev = ctx->prev_ref[bb->end];
if (prev != bb->start && ctx->ir_base[prev].op == IR_SNAPSHOT) {
prev = ctx->prev_ref[prev];
}
if (prev == bb->start) {
void *addr = ir_jmp_addr(ctx, insn, &ctx->ir_base[insn->op2]);
| .aword &addr
if (ctx->ir_base[bb->start].op != IR_CASE_DEFAULT) {
bb->flags |= IR_BB_EMPTY;
if (insn->op == IR_IJMP && IR_IS_CONST_REF(insn->op2)) {
ir_ref prev = ctx->prev_ref[bb->end];
if (prev != bb->start && ctx->ir_base[prev].op == IR_SNAPSHOT) {
prev = ctx->prev_ref[prev];
}
if (prev == bb->start) {
void *addr = ir_jmp_addr(ctx, insn, &ctx->ir_base[insn->op2]);
| .aword &addr
if (ctx->ir_base[bb->start].op != IR_CASE_DEFAULT) {
bb->flags |= IR_BB_EMPTY;
}
continue;
}
continue;
}
| .aword =>b
} else {
| .aword 0
}
| .aword =>b
}
|.code
ir_mem_free(labels);
@ -8765,7 +8790,9 @@ static void ir_emit_load_params(ir_ctx *ctx)
dst_reg = IR_REG_NUM(ctx->regs[use][0]);
IR_ASSERT(src_reg != IR_REG_NONE || dst_reg != IR_REG_NONE ||
stack_offset == ctx->live_intervals[ctx->vregs[use]]->stack_spill_pos +
((ctx->flags & IR_USE_FRAME_POINTER) ? -ctx->stack_frame_size : ctx->call_stack_size));
((ctx->flags & IR_USE_FRAME_POINTER) ?
-(ctx->stack_frame_size - ctx->stack_frame_alignment) :
ctx->call_stack_size));
if (src_reg != dst_reg) {
ir_emit_param_move(ctx, insn->type, src_reg, dst_reg, use, stack_offset);
}