target/sparc: Move UMUL, SMUL to decodetree

Tested-by: Mark Cave-Ayland <mark.cave-ayland@ilande.co.uk>
Acked-by: Mark Cave-Ayland <mark.cave-ayland@ilande.co.uk>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
Richard Henderson 2023-10-01 22:55:04 -07:00
parent 22188d7da8
commit b5372650e2
2 changed files with 6 additions and 18 deletions

View File

@ -167,6 +167,8 @@ XORN 10 ..... 0.0111 ..... . ............. @r_r_ri_cc
ADDC 10 ..... 0.1000 ..... . ............. @r_r_ri_cc
MULX 10 ..... 001001 ..... . ............. @r_r_ri_cc0
UMUL 10 ..... 0.1010 ..... . ............. @r_r_ri_cc
SMUL 10 ..... 0.1011 ..... . ............. @r_r_ri_cc
Tcc_r 10 0 cond:4 111010 rs1:5 0 cc:1 0000000 rs2:5
{

View File

@ -2888,6 +2888,7 @@ static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
#ifdef TARGET_SPARC64
# define avail_32(C) false
# define avail_ASR17(C) false
# define avail_MUL(C) true
# define avail_POWERDOWN(C) false
# define avail_64(C) true
# define avail_GL(C) ((C)->def->features & CPU_FEATURE_GL)
@ -2895,6 +2896,7 @@ static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
#else
# define avail_32(C) true
# define avail_ASR17(C) ((C)->def->features & CPU_FEATURE_ASR17)
# define avail_MUL(C) ((C)->def->features & CPU_FEATURE_MUL)
# define avail_POWERDOWN(C) ((C)->def->features & CPU_FEATURE_POWERDOWN)
# define avail_64(C) false
# define avail_GL(C) false
@ -4098,6 +4100,8 @@ TRANS(ORN, ALL, do_logic, a, tcg_gen_orc_tl, NULL)
TRANS(XORN, ALL, do_logic, a, tcg_gen_eqv_tl, NULL)
TRANS(MULX, 64, do_arith, a, -1, tcg_gen_mul_tl, tcg_gen_muli_tl, NULL)
TRANS(UMUL, MUL, do_logic, a, gen_op_umul, NULL)
TRANS(SMUL, MUL, do_logic, a, gen_op_smul, NULL)
static bool trans_OR(DisasContext *dc, arg_r_r_ri_cc *a)
{
@ -4564,24 +4568,6 @@ static void disas_sparc_legacy(DisasContext *dc, unsigned int insn)
cpu_src1 = get_src1(dc, insn);
cpu_src2 = get_src2(dc, insn);
switch (xop & ~0x10) {
case 0xa: /* umul */
CHECK_IU_FEATURE(dc, MUL);
gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
if (xop & 0x10) {
tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
dc->cc_op = CC_OP_LOGIC;
}
break;
case 0xb: /* smul */
CHECK_IU_FEATURE(dc, MUL);
gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
if (xop & 0x10) {
tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
dc->cc_op = CC_OP_LOGIC;
}
break;
case 0xc: /* subx, V9 subc */
gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
(xop & 0x10));