[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[PULL 22/38] tcg/tci: Push opcode emit into each case
From: |
Richard Henderson |
Subject: |
[PULL 22/38] tcg/tci: Push opcode emit into each case |
Date: |
Wed, 17 Mar 2021 09:34:28 -0600 |
We're about to split out bytecode output into helpers, but
we can't do that one at a time if tcg_out_op_t is being done
outside of the switch.
Reviewed-by: Philippe Mathieu-Daudé <f4bug@amsat.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
tcg/tci/tcg-target.c.inc | 35 ++++++++++++++++++++++++++++++++---
1 file changed, 32 insertions(+), 3 deletions(-)
diff --git a/tcg/tci/tcg-target.c.inc b/tcg/tci/tcg-target.c.inc
index 7fb3b04eaf..c5b061fe76 100644
--- a/tcg/tci/tcg-target.c.inc
+++ b/tcg/tci/tcg-target.c.inc
@@ -385,40 +385,48 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
const TCGArg *args,
{
uint8_t *old_code_ptr = s->code_ptr;
- tcg_out_op_t(s, opc);
-
switch (opc) {
case INDEX_op_exit_tb:
+ tcg_out_op_t(s, opc);
tcg_out_i(s, args[0]);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
case INDEX_op_goto_tb:
tcg_debug_assert(s->tb_jmp_insn_offset == 0);
/* indirect jump method. */
+ tcg_out_op_t(s, opc);
tcg_out_i(s, (uintptr_t)(s->tb_jmp_target_addr + args[0]));
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
set_jmp_reset_offset(s, args[0]);
break;
case INDEX_op_br:
+ tcg_out_op_t(s, opc);
tci_out_label(s, arg_label(args[0]));
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
CASE_32_64(setcond)
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
tcg_out_r(s, args[2]);
tcg_out8(s, args[3]); /* condition */
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
#if TCG_TARGET_REG_BITS == 32
case INDEX_op_setcond2_i32:
/* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
tcg_out_r(s, args[2]);
tcg_out_r(s, args[3]);
tcg_out_r(s, args[4]);
tcg_out8(s, args[5]); /* condition */
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
#endif
@@ -436,10 +444,12 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
const TCGArg *args,
CASE_64(st32)
CASE_64(st)
stack_bounds_check(args[1], args[2]);
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
tcg_debug_assert(args[2] == (int32_t)args[2]);
tcg_out32(s, args[2]);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
CASE_32_64(add)
@@ -462,12 +472,15 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
const TCGArg *args,
CASE_32_64(divu) /* Optional (TCG_TARGET_HAS_div_*). */
CASE_32_64(rem) /* Optional (TCG_TARGET_HAS_div_*). */
CASE_32_64(remu) /* Optional (TCG_TARGET_HAS_div_*). */
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
tcg_out_r(s, args[2]);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
CASE_32_64(deposit) /* Optional (TCG_TARGET_HAS_deposit_*). */
+ tcg_out_op_t(s, opc);
{
TCGArg pos = args[3], len = args[4];
TCGArg max = opc == INDEX_op_deposit_i32 ? 32 : 64;
@@ -481,13 +494,16 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
const TCGArg *args,
tcg_out8(s, pos);
tcg_out8(s, len);
}
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
CASE_32_64(brcond)
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
tcg_out8(s, args[2]); /* condition */
tci_out_label(s, arg_label(args[3]));
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */
@@ -503,48 +519,59 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
const TCGArg *args,
CASE_32_64(bswap16) /* Optional (TCG_TARGET_HAS_bswap16_*). */
CASE_32_64(bswap32) /* Optional (TCG_TARGET_HAS_bswap32_*). */
CASE_64(bswap64) /* Optional (TCG_TARGET_HAS_bswap64_i64). */
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
#if TCG_TARGET_REG_BITS == 32
case INDEX_op_add2_i32:
case INDEX_op_sub2_i32:
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
tcg_out_r(s, args[2]);
tcg_out_r(s, args[3]);
tcg_out_r(s, args[4]);
tcg_out_r(s, args[5]);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
case INDEX_op_brcond2_i32:
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
tcg_out_r(s, args[2]);
tcg_out_r(s, args[3]);
tcg_out8(s, args[4]); /* condition */
tci_out_label(s, arg_label(args[5]));
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
case INDEX_op_mulu2_i32:
+ tcg_out_op_t(s, opc);
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
tcg_out_r(s, args[2]);
tcg_out_r(s, args[3]);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
#endif
case INDEX_op_qemu_ld_i32:
case INDEX_op_qemu_st_i32:
+ tcg_out_op_t(s, opc);
tcg_out_r(s, *args++);
tcg_out_r(s, *args++);
if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
tcg_out_r(s, *args++);
}
tcg_out32(s, *args++);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
case INDEX_op_qemu_ld_i64:
case INDEX_op_qemu_st_i64:
+ tcg_out_op_t(s, opc);
tcg_out_r(s, *args++);
if (TCG_TARGET_REG_BITS == 32) {
tcg_out_r(s, *args++);
@@ -554,9 +581,12 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const
TCGArg *args,
tcg_out_r(s, *args++);
}
tcg_out32(s, *args++);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
case INDEX_op_mb:
+ tcg_out_op_t(s, opc);
+ old_code_ptr[1] = s->code_ptr - old_code_ptr;
break;
case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
@@ -565,7 +595,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const
TCGArg *args,
default:
tcg_abort();
}
- old_code_ptr[1] = s->code_ptr - old_code_ptr;
}
static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
--
2.25.1
- [PULL 27/38] tcg/tci: Split out tcg_out_op_rrr, (continued)
- [PULL 27/38] tcg/tci: Split out tcg_out_op_rrr, Richard Henderson, 2021/03/17
- [PULL 30/38] tcg/tci: Split out tcg_out_op_rrrbb, Richard Henderson, 2021/03/17
- [PULL 28/38] tcg/tci: Split out tcg_out_op_rrrc, Richard Henderson, 2021/03/17
- [PULL 24/38] tcg/tci: Split out tcg_out_op_l, Richard Henderson, 2021/03/17
- [PULL 25/38] tcg/tci: Split out tcg_out_op_p, Richard Henderson, 2021/03/17
- [PULL 36/38] tcg/tci: Split out tcg_out_op_v, Richard Henderson, 2021/03/17
- [PULL 17/38] tcg/tci: Reduce qemu_ld/st TCGMemOpIdx operand to 32-bits, Richard Henderson, 2021/03/17
- [PULL 15/38] tcg/tci: Split out tci_args_rrrr, Richard Henderson, 2021/03/17
- [PULL 16/38] tcg/tci: Clean up deposit operations, Richard Henderson, 2021/03/17
- [PULL 23/38] tcg/tci: Split out tcg_out_op_rrs, Richard Henderson, 2021/03/17
- [PULL 22/38] tcg/tci: Push opcode emit into each case,
Richard Henderson <=
- [PULL 31/38] tcg/tci: Split out tcg_out_op_rrcl, Richard Henderson, 2021/03/17
- [PULL 33/38] tcg/tci: Split out tcg_out_op_rrrr, Richard Henderson, 2021/03/17
- [PULL 35/38] tcg/tci: Split out tcg_out_op_{rrm,rrrm,rrrrm}, Richard Henderson, 2021/03/17
- [PULL 32/38] tcg/tci: Split out tcg_out_op_rrrrrr, Richard Henderson, 2021/03/17
- [PULL 38/38] tcg: Fix prototypes for tcg_out_vec_op and tcg_out_op, Richard Henderson, 2021/03/17
- [PULL 37/38] tcg/tci: Split out tcg_out_op_r[iI], Richard Henderson, 2021/03/17
- [PULL 34/38] tcg/tci: Split out tcg_out_op_rrrrcl, Richard Henderson, 2021/03/17
- [PULL 29/38] tcg/tci: Split out tcg_out_op_rrrrrc, Richard Henderson, 2021/03/17
- Re: [PULL 00/38] tcg patch queue for 6.0, Peter Maydell, 2021/03/18