@@ -133,6 +133,7 @@ static void tcg_out_addi_ptr(TCGContext *s, TCGReg, TCGReg, tcg_target_long);
static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2);
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg);
static void tcg_out_goto_tb(TCGContext *s, int which);
+static void tcg_out_goto_ptr(TCGContext *s, TCGReg dest);
static void tcg_out_mb(TCGContext *s, unsigned bar);
static void tcg_out_br(TCGContext *s, TCGLabel *l);
static void tcg_out_set_carry(TCGContext *s);
@@ -1137,6 +1138,10 @@ static const TCGOutOpUnary outop_extrl_i64_i32 = {
};
#endif
+static const TCGOutOp outop_goto_ptr = {
+ .static_constraint = C_O0_I1(r),
+};
+
/*
* Register V as the TCGOutOp for O.
* This verifies that V is of type T, otherwise give a nice compiler error.
@@ -1198,6 +1203,8 @@ static const TCGOutOp * const all_outop[NB_OPS] = {
OUTOP(INDEX_op_subb1o, TCGOutOpAddSubCarry, outop_subbio),
OUTOP(INDEX_op_xor, TCGOutOpBinary, outop_xor),
+ [INDEX_op_goto_ptr] = &outop_goto_ptr,
+
#if TCG_TARGET_REG_BITS == 32
OUTOP(INDEX_op_brcond2_i32, TCGOutOpBrcond2, outop_brcond2),
OUTOP(INDEX_op_setcond2_i32, TCGOutOpSetcond2, outop_setcond2),
@@ -5784,6 +5791,11 @@ static void tcg_reg_alloc_op(TCGContext *s, const TCGOp *op)
g_assert_not_reached();
#endif
+ case INDEX_op_goto_ptr:
+ tcg_debug_assert(!const_args[0]);
+ tcg_out_goto_ptr(s, new_args[0]);
+ break;
+
default:
if (def->flags & TCG_OPF_VECTOR) {
tcg_out_vec_op(s, op->opc, type - TCG_TYPE_V64,
@@ -1986,6 +1986,11 @@ static void tcg_out_goto_tb(TCGContext *s, int which)
tcg_out_bti(s, BTI_J);
}
+static void tcg_out_goto_ptr(TCGContext *s, TCGReg a0)
+{
+ tcg_out_insn(s, 3207, BR, a0);
+}
+
void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
uintptr_t jmp_rx, uintptr_t jmp_rw)
{
@@ -2775,10 +2780,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType ext,
TCGArg a2 = args[2];
switch (opc) {
- case INDEX_op_goto_ptr:
- tcg_out_insn(s, 3207, BR, a0);
- break;
-
case INDEX_op_ld8u_i32:
case INDEX_op_ld8u_i64:
tcg_out_ldst(s, I3312_LDRB, a0, a1, a2, 0);
@@ -3293,9 +3294,6 @@ static TCGConstraintSetIndex
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
switch (op) {
- case INDEX_op_goto_ptr:
- return C_O0_I1(r);
-
case INDEX_op_ld8u_i32:
case INDEX_op_ld8s_i32:
case INDEX_op_ld16u_i32:
@@ -1801,6 +1801,11 @@ static void tcg_out_goto_tb(TCGContext *s, int which)
set_jmp_reset_offset(s, which);
}
+static void tcg_out_goto_ptr(TCGContext *s, TCGReg a0)
+{
+ tcg_out_b_reg(s, COND_AL, a0);
+}
+
void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
uintptr_t jmp_rx, uintptr_t jmp_rw)
{
@@ -2530,10 +2535,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
const int const_args[TCG_MAX_OP_ARGS])
{
switch (opc) {
- case INDEX_op_goto_ptr:
- tcg_out_b_reg(s, COND_AL, args[0]);
- break;
-
case INDEX_op_ld8u_i32:
tcg_out_ld8u(s, COND_AL, args[0], args[1], args[2]);
break;
@@ -2585,9 +2586,6 @@ static TCGConstraintSetIndex
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
switch (op) {
- case INDEX_op_goto_ptr:
- return C_O0_I1(r);
-
case INDEX_op_ld8u_i32:
case INDEX_op_ld8s_i32:
case INDEX_op_ld16u_i32:
@@ -2593,6 +2593,12 @@ static void tcg_out_goto_tb(TCGContext *s, int which)
set_jmp_reset_offset(s, which);
}
+static void tcg_out_goto_ptr(TCGContext *s, TCGReg a0)
+{
+ /* Jump to the given host address (could be epilogue) */
+ tcg_out_modrm(s, OPC_GRP5, EXT5_JMPN_Ev, a0);
+}
+
void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
uintptr_t jmp_rx, uintptr_t jmp_rw)
{
@@ -3437,10 +3443,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
rexw = type == TCG_TYPE_I32 ? 0 : P_REXW;
switch (opc) {
- case INDEX_op_goto_ptr:
- /* jmp to the given host address (could be epilogue) */
- tcg_out_modrm(s, OPC_GRP5, EXT5_JMPN_Ev, a0);
- break;
OP_32_64(ld8u):
/* Note that we can ignore REXW for the zero-extend to 64-bit. */
tcg_out_modrm_offset(s, OPC_MOVZBL, a0, a1, a2);
@@ -4093,9 +4095,6 @@ static TCGConstraintSetIndex
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
switch (op) {
- case INDEX_op_goto_ptr:
- return C_O0_I1(r);
-
case INDEX_op_ld8u_i32:
case INDEX_op_ld8u_i64:
case INDEX_op_ld8s_i32:
@@ -1295,6 +1295,11 @@ static void tcg_out_goto_tb(TCGContext *s, int which)
set_jmp_reset_offset(s, which);
}
+static void tcg_out_goto_ptr(TCGContext *s, TCGReg a0)
+{
+ tcg_out_opc_jirl(s, TCG_REG_ZERO, a0, 0);
+}
+
void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
uintptr_t jmp_rx, uintptr_t jmp_rw)
{
@@ -1911,10 +1916,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
TCGArg a3 = args[3];
switch (opc) {
- case INDEX_op_goto_ptr:
- tcg_out_opc_jirl(s, TCG_REG_ZERO, a0, 0);
- break;
-
case INDEX_op_ld8s_i32:
case INDEX_op_ld8s_i64:
tcg_out_ldst(s, OPC_LD_B, a0, a1, a2);
@@ -2485,9 +2486,6 @@ static TCGConstraintSetIndex
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
switch (op) {
- case INDEX_op_goto_ptr:
- return C_O0_I1(r);
-
case INDEX_op_st8_i32:
case INDEX_op_st8_i64:
case INDEX_op_st16_i32:
@@ -1571,6 +1571,16 @@ static void tcg_out_goto_tb(TCGContext *s, int which)
}
}
+static void tcg_out_goto_ptr(TCGContext *s, TCGReg a0)
+{
+ tcg_out_opc_reg(s, OPC_JR, 0, a0, 0);
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_out_mov(s, TCG_TYPE_PTR, TCG_REG_TB, a0);
+ } else {
+ tcg_out_nop(s);
+ }
+}
+
void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
uintptr_t jmp_rx, uintptr_t jmp_rw)
{
@@ -2283,15 +2293,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
a2 = args[2];
switch (opc) {
- case INDEX_op_goto_ptr:
- /* jmp to the given host address (could be epilogue) */
- tcg_out_opc_reg(s, OPC_JR, 0, a0, 0);
- if (TCG_TARGET_REG_BITS == 64) {
- tcg_out_mov(s, TCG_TYPE_PTR, TCG_REG_TB, a0);
- } else {
- tcg_out_nop(s);
- }
- break;
case INDEX_op_ld8u_i32:
case INDEX_op_ld8u_i64:
i1 = OPC_LBU;
@@ -2370,9 +2371,6 @@ static TCGConstraintSetIndex
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
switch (op) {
- case INDEX_op_goto_ptr:
- return C_O0_I1(r);
-
case INDEX_op_ld8u_i32:
case INDEX_op_ld8s_i32:
case INDEX_op_ld16u_i32:
@@ -2843,6 +2843,13 @@ static void tcg_out_goto_tb(TCGContext *s, int which)
set_jmp_reset_offset(s, which);
}
+static void tcg_out_goto_ptr(TCGContext *s, TCGReg a0)
+{
+ tcg_out32(s, MTSPR | RS(a0) | CTR);
+ tcg_out32(s, ADDI | TAI(TCG_REG_R3, 0, 0));
+ tcg_out32(s, BCCTR | BO_ALWAYS);
+}
+
void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
uintptr_t jmp_rx, uintptr_t jmp_rw)
{
@@ -3676,11 +3683,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
const int const_args[TCG_MAX_OP_ARGS])
{
switch (opc) {
- case INDEX_op_goto_ptr:
- tcg_out32(s, MTSPR | RS(args[0]) | CTR);
- tcg_out32(s, ADDI | TAI(TCG_REG_R3, 0, 0));
- tcg_out32(s, BCCTR | BO_ALWAYS);
- break;
case INDEX_op_ld8u_i32:
case INDEX_op_ld8u_i64:
tcg_out_mem_long(s, LBZ, LBZX, args[0], args[1], args[2]);
@@ -4371,9 +4373,6 @@ static TCGConstraintSetIndex
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
switch (op) {
- case INDEX_op_goto_ptr:
- return C_O0_I1(r);
-
case INDEX_op_ld8u_i32:
case INDEX_op_ld8s_i32:
case INDEX_op_ld16u_i32:
@@ -1915,6 +1915,11 @@ static void tcg_out_goto_tb(TCGContext *s, int which)
set_jmp_reset_offset(s, which);
}
+static void tcg_out_goto_ptr(TCGContext *s, TCGReg a0)
+{
+ tcg_out_opc_imm(s, OPC_JALR, TCG_REG_ZERO, a0, 0);
+}
+
void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
uintptr_t jmp_rx, uintptr_t jmp_rw)
{
@@ -2535,10 +2540,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
TCGArg a2 = args[2];
switch (opc) {
- case INDEX_op_goto_ptr:
- tcg_out_opc_imm(s, OPC_JALR, TCG_REG_ZERO, a0, 0);
- break;
-
case INDEX_op_ld8u_i32:
case INDEX_op_ld8u_i64:
tcg_out_ldst(s, OPC_LBU, a0, a1, a2);
@@ -2824,9 +2825,6 @@ static TCGConstraintSetIndex
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
switch (op) {
- case INDEX_op_goto_ptr:
- return C_O0_I1(r);
-
case INDEX_op_ld8u_i32:
case INDEX_op_ld8s_i32:
case INDEX_op_ld16u_i32:
@@ -2213,6 +2213,11 @@ static void tcg_out_goto_tb(TCGContext *s, int which)
set_jmp_reset_offset(s, which);
}
+static void tcg_out_goto_ptr(TCGContext *s, TCGReg a0)
+{
+ tcg_out_insn(s, RR, BCR, S390_CC_ALWAYS, a0);
+}
+
void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
uintptr_t jmp_rx, uintptr_t jmp_rw)
{
@@ -3033,14 +3038,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
const TCGArg args[TCG_MAX_OP_ARGS],
const int const_args[TCG_MAX_OP_ARGS])
{
- TCGArg a0;
-
switch (opc) {
- case INDEX_op_goto_ptr:
- a0 = args[0];
- tcg_out_insn(s, RR, BCR, S390_CC_ALWAYS, a0);
- break;
-
OP_32_64(ld8u):
/* ??? LLC (RXY format) is only present with the extended-immediate
facility, whereas LLGC is always present. */
@@ -3567,9 +3565,6 @@ static TCGConstraintSetIndex
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
switch (op) {
- case INDEX_op_goto_ptr:
- return C_O0_I1(r);
-
case INDEX_op_ld8u_i32:
case INDEX_op_ld8u_i64:
case INDEX_op_ld8s_i32:
@@ -1300,6 +1300,12 @@ static void tcg_out_goto_tb(TCGContext *s, int which)
}
}
+static void tcg_out_goto_ptr(TCGContext *s, TCGReg a0)
+{
+ tcg_out_arithi(s, TCG_REG_G0, a0, 0, JMPL);
+ tcg_out_mov_delay(s, TCG_REG_TB, a0);
+}
+
void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
uintptr_t jmp_rx, uintptr_t jmp_rw)
{
@@ -1963,11 +1969,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
a2 = args[2];
switch (opc) {
- case INDEX_op_goto_ptr:
- tcg_out_arithi(s, TCG_REG_G0, a0, 0, JMPL);
- tcg_out_mov_delay(s, TCG_REG_TB, a0);
- break;
-
#define OP_32_64(x) \
glue(glue(case INDEX_op_, x), _i32): \
glue(glue(case INDEX_op_, x), _i64)
@@ -2034,9 +2035,6 @@ static TCGConstraintSetIndex
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
switch (op) {
- case INDEX_op_goto_ptr:
- return C_O0_I1(r);
-
case INDEX_op_ld8u_i32:
case INDEX_op_ld8u_i64:
case INDEX_op_ld8s_i32:
@@ -40,9 +40,6 @@ static TCGConstraintSetIndex
tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
{
switch (op) {
- case INDEX_op_goto_ptr:
- return C_O0_I1(r);
-
case INDEX_op_ld8u_i32:
case INDEX_op_ld8s_i32:
case INDEX_op_ld16u_i32:
@@ -534,6 +531,11 @@ static void tcg_out_goto_tb(TCGContext *s, int which)
set_jmp_reset_offset(s, which);
}
+static void tcg_out_goto_ptr(TCGContext *s, TCGReg a0)
+{
+ tcg_out_op_r(s, INDEX_op_goto_ptr, a0);
+}
+
void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
uintptr_t jmp_rx, uintptr_t jmp_rw)
{
@@ -1146,10 +1148,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
const int const_args[TCG_MAX_OP_ARGS])
{
switch (opc) {
- case INDEX_op_goto_ptr:
- tcg_out_op_r(s, opc, args[0]);
- break;
-
CASE_32_64(ld8u)
CASE_32_64(ld8s)
CASE_32_64(ld16u)
Split these functions out from tcg_out_op. Define outop_goto_ptr generically. Call tcg_out_goto_ptr from tcg_reg_alloc_op. Signed-off-by: Richard Henderson <richard.henderson@linaro.org> --- tcg/tcg.c | 12 ++++++++++++ tcg/aarch64/tcg-target.c.inc | 12 +++++------- tcg/arm/tcg-target.c.inc | 12 +++++------- tcg/i386/tcg-target.c.inc | 13 ++++++------- tcg/loongarch64/tcg-target.c.inc | 12 +++++------- tcg/mips/tcg-target.c.inc | 22 ++++++++++------------ tcg/ppc/tcg-target.c.inc | 15 +++++++-------- tcg/riscv/tcg-target.c.inc | 12 +++++------- tcg/s390x/tcg-target.c.inc | 15 +++++---------- tcg/sparc64/tcg-target.c.inc | 14 ++++++-------- tcg/tci/tcg-target.c.inc | 12 +++++------- 11 files changed, 71 insertions(+), 80 deletions(-)