@@ -186,7 +186,6 @@ DEF(muls2_i64, 2, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_muls2_i64))
DEF(muluh_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_muluh_i64))
DEF(mulsh_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_mulsh_i64))
-#define TLADDR_ARGS (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? 1 : 2)
#define DATA64_ARGS (TCG_TARGET_REG_BITS == 64 ? 1 : 2)
/* QEMU specific */
@@ -199,25 +198,44 @@ DEF(goto_ptr, 0, 1, 0, TCG_OPF_BB_EXIT | TCG_OPF_BB_END)
DEF(plugin_cb_start, 0, 0, 3, TCG_OPF_NOT_PRESENT)
DEF(plugin_cb_end, 0, 0, 0, TCG_OPF_NOT_PRESENT)
-DEF(qemu_ld_i32, 1, TLADDR_ARGS, 1,
+/* Replicate ld/st ops for 32 and 64-bit guest addresses. */
+DEF(qemu_ld_a32_i32, 1, 1, 1,
TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
-DEF(qemu_st_i32, 0, TLADDR_ARGS + 1, 1,
+DEF(qemu_st_a32_i32, 0, 1 + 1, 1,
TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
-DEF(qemu_ld_i64, DATA64_ARGS, TLADDR_ARGS, 1,
+DEF(qemu_ld_a32_i64, DATA64_ARGS, 1, 1,
TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS | TCG_OPF_64BIT)
-DEF(qemu_st_i64, 0, TLADDR_ARGS + DATA64_ARGS, 1,
+DEF(qemu_st_a32_i64, 0, DATA64_ARGS + 1, 1,
+ TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS | TCG_OPF_64BIT)
+
+DEF(qemu_ld_a64_i32, 1, DATA64_ARGS, 1,
+ TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
+DEF(qemu_st_a64_i32, 0, 1 + DATA64_ARGS, 1,
+ TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
+DEF(qemu_ld_a64_i64, DATA64_ARGS, DATA64_ARGS, 1,
+ TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS | TCG_OPF_64BIT)
+DEF(qemu_st_a64_i64, 0, DATA64_ARGS + DATA64_ARGS, 1,
TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS | TCG_OPF_64BIT)
/* Only used by i386 to cope with stupid register constraints. */
-DEF(qemu_st8_i32, 0, TLADDR_ARGS + 1, 1,
+DEF(qemu_st8_a32_i32, 0, 1 + 1, 1,
+ TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS |
+ IMPL(TCG_TARGET_HAS_qemu_st8_i32))
+DEF(qemu_st8_a64_i32, 0, 1 + DATA64_ARGS, 1,
TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS |
IMPL(TCG_TARGET_HAS_qemu_st8_i32))
/* Only for 64-bit hosts at the moment. */
-DEF(qemu_ld_i128, 2, 1, 1,
+DEF(qemu_ld_a32_i128, 2, 1, 1,
TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS | TCG_OPF_64BIT |
IMPL(TCG_TARGET_HAS_qemu_ldst_i128))
-DEF(qemu_st_i128, 0, 3, 1,
+DEF(qemu_ld_a64_i128, 2, 1, 1,
+ TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS | TCG_OPF_64BIT |
+ IMPL(TCG_TARGET_HAS_qemu_ldst_i128))
+DEF(qemu_st_a32_i128, 0, 3, 1,
+ TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS | TCG_OPF_64BIT |
+ IMPL(TCG_TARGET_HAS_qemu_ldst_i128))
+DEF(qemu_st_a64_i128, 0, 3, 1,
TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS | TCG_OPF_64BIT |
IMPL(TCG_TARGET_HAS_qemu_ldst_i128))
@@ -291,7 +309,6 @@ DEF(tci_movi, 1, 0, 1, TCG_OPF_NOT_PRESENT)
DEF(tci_movl, 1, 0, 1, TCG_OPF_NOT_PRESENT)
#endif
-#undef TLADDR_ARGS
#undef DATA64_ARGS
#undef IMPL
#undef IMPL64
@@ -2184,13 +2184,22 @@ void tcg_optimize(TCGContext *s)
CASE_OP_32_64_VEC(orc):
done = fold_orc(&ctx, op);
break;
- case INDEX_op_qemu_ld_i32:
- case INDEX_op_qemu_ld_i64:
+ case INDEX_op_qemu_ld_a32_i32:
+ case INDEX_op_qemu_ld_a64_i32:
+ case INDEX_op_qemu_ld_a32_i64:
+ case INDEX_op_qemu_ld_a64_i64:
+ case INDEX_op_qemu_ld_a32_i128:
+ case INDEX_op_qemu_ld_a64_i128:
done = fold_qemu_ld(&ctx, op);
break;
- case INDEX_op_qemu_st_i32:
- case INDEX_op_qemu_st8_i32:
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_st8_a32_i32:
+ case INDEX_op_qemu_st8_a64_i32:
+ case INDEX_op_qemu_st_a32_i32:
+ case INDEX_op_qemu_st_a64_i32:
+ case INDEX_op_qemu_st_a32_i64:
+ case INDEX_op_qemu_st_a64_i64:
+ case INDEX_op_qemu_st_a32_i128:
+ case INDEX_op_qemu_st_a64_i128:
done = fold_qemu_st(&ctx, op);
break;
CASE_OP_32_64(rem):
@@ -164,6 +164,7 @@ static void tcg_gen_qemu_ld_i32_int(TCGv_i32 val, TCGTemp *addr,
MemOp orig_memop;
MemOpIdx orig_oi, oi;
TCGv_i64 copy_addr;
+ TCGOpcode opc;
tcg_gen_req_mo(TCG_MO_LD_LD | TCG_MO_ST_LD);
orig_memop = memop = tcg_canonicalize_memop(memop, 0, 0);
@@ -179,7 +180,12 @@ static void tcg_gen_qemu_ld_i32_int(TCGv_i32 val, TCGTemp *addr,
}
copy_addr = plugin_maybe_preserve_addr(addr);
- gen_ldst(INDEX_op_qemu_ld_i32, tcgv_i32_temp(val), NULL, addr, oi);
+ if (tcg_ctx->addr_type == TCG_TYPE_I32) {
+ opc = INDEX_op_qemu_ld_a32_i32;
+ } else {
+ opc = INDEX_op_qemu_ld_a64_i32;
+ }
+ gen_ldst(opc, tcgv_i32_temp(val), NULL, addr, oi);
plugin_gen_mem_callbacks(copy_addr, addr, orig_oi, QEMU_PLUGIN_MEM_R);
if ((orig_memop ^ memop) & MO_BSWAP) {
@@ -235,9 +241,17 @@ static void tcg_gen_qemu_st_i32_int(TCGv_i32 val, TCGTemp *addr,
}
if (TCG_TARGET_HAS_qemu_st8_i32 && (memop & MO_SIZE) == MO_8) {
- opc = INDEX_op_qemu_st8_i32;
+ if (tcg_ctx->addr_type == TCG_TYPE_I32) {
+ opc = INDEX_op_qemu_st8_a32_i32;
+ } else {
+ opc = INDEX_op_qemu_st8_a64_i32;
+ }
} else {
- opc = INDEX_op_qemu_st_i32;
+ if (tcg_ctx->addr_type == TCG_TYPE_I32) {
+ opc = INDEX_op_qemu_st_a32_i32;
+ } else {
+ opc = INDEX_op_qemu_st_a64_i32;
+ }
}
gen_ldst(opc, tcgv_i32_temp(val), NULL, addr, oi);
plugin_gen_mem_callbacks(NULL, addr, orig_oi, QEMU_PLUGIN_MEM_W);
@@ -261,6 +275,7 @@ static void tcg_gen_qemu_ld_i64_int(TCGv_i64 val, TCGTemp *addr,
MemOp orig_memop;
MemOpIdx orig_oi, oi;
TCGv_i64 copy_addr;
+ TCGOpcode opc;
if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
tcg_gen_qemu_ld_i32_int(TCGV_LOW(val), addr, idx, memop);
@@ -286,7 +301,12 @@ static void tcg_gen_qemu_ld_i64_int(TCGv_i64 val, TCGTemp *addr,
}
copy_addr = plugin_maybe_preserve_addr(addr);
- gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, oi);
+ if (tcg_ctx->addr_type == TCG_TYPE_I32) {
+ opc = INDEX_op_qemu_ld_a32_i64;
+ } else {
+ opc = INDEX_op_qemu_ld_a64_i64;
+ }
+ gen_ldst_i64(opc, val, addr, oi);
plugin_gen_mem_callbacks(copy_addr, addr, orig_oi, QEMU_PLUGIN_MEM_R);
if ((orig_memop ^ memop) & MO_BSWAP) {
@@ -322,6 +342,7 @@ static void tcg_gen_qemu_st_i64_int(TCGv_i64 val, TCGTemp *addr,
{
TCGv_i64 swap = NULL;
MemOpIdx orig_oi, oi;
+ TCGOpcode opc;
if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
tcg_gen_qemu_st_i32_int(TCGV_LOW(val), addr, idx, memop);
@@ -352,7 +373,12 @@ static void tcg_gen_qemu_st_i64_int(TCGv_i64 val, TCGTemp *addr,
oi = make_memop_idx(memop, idx);
}
- gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, oi);
+ if (tcg_ctx->addr_type == TCG_TYPE_I32) {
+ opc = INDEX_op_qemu_st_a32_i64;
+ } else {
+ opc = INDEX_op_qemu_st_a64_i64;
+ }
+ gen_ldst_i64(opc, val, addr, oi);
plugin_gen_mem_callbacks(NULL, addr, orig_oi, QEMU_PLUGIN_MEM_W);
if (swap) {
@@ -465,6 +491,7 @@ static void tcg_gen_qemu_ld_i128_int(TCGv_i128 val, TCGTemp *addr,
{
const MemOpIdx orig_oi = make_memop_idx(memop, idx);
TCGv_i64 ext_addr = NULL;
+ TCGOpcode opc;
tcg_gen_req_mo(TCG_MO_LD_LD | TCG_MO_ST_LD);
@@ -484,8 +511,12 @@ static void tcg_gen_qemu_ld_i128_int(TCGv_i128 val, TCGTemp *addr,
hi = TCGV128_HIGH(val);
}
- gen_ldst(INDEX_op_qemu_ld_i128, tcgv_i64_temp(lo),
- tcgv_i64_temp(hi), addr, oi);
+ if (tcg_ctx->addr_type == TCG_TYPE_I32) {
+ opc = INDEX_op_qemu_ld_a32_i128;
+ } else {
+ opc = INDEX_op_qemu_ld_a64_i128;
+ }
+ gen_ldst(opc, tcgv_i64_temp(lo), tcgv_i64_temp(hi), addr, oi);
if (need_bswap) {
tcg_gen_bswap64_i64(lo, lo);
@@ -501,6 +532,12 @@ static void tcg_gen_qemu_ld_i128_int(TCGv_i128 val, TCGTemp *addr,
canonicalize_memop_i128_as_i64(mop, memop);
need_bswap = (mop[0] ^ memop) & MO_BSWAP;
+ if (tcg_ctx->addr_type == TCG_TYPE_I32) {
+ opc = INDEX_op_qemu_ld_a32_i64;
+ } else {
+ opc = INDEX_op_qemu_ld_a64_i64;
+ }
+
/*
* Since there are no global TCGv_i128, there is no visible state
* changed if the second load faults. Load directly into the two
@@ -515,7 +552,7 @@ static void tcg_gen_qemu_ld_i128_int(TCGv_i128 val, TCGTemp *addr,
}
oi = make_memop_idx(mop[0], idx);
- gen_ldst_i64(INDEX_op_qemu_ld_i64, x, addr, oi);
+ gen_ldst_i64(opc, x, addr, oi);
if (need_bswap) {
tcg_gen_bswap64_i64(x, x);
@@ -531,7 +568,7 @@ static void tcg_gen_qemu_ld_i128_int(TCGv_i128 val, TCGTemp *addr,
addr_p8 = tcgv_i64_temp(t);
}
- gen_ldst_i64(INDEX_op_qemu_ld_i64, y, addr_p8, oi);
+ gen_ldst_i64(opc, y, addr_p8, oi);
tcg_temp_free_internal(addr_p8);
if (need_bswap) {
@@ -564,6 +601,7 @@ static void tcg_gen_qemu_st_i128_int(TCGv_i128 val, TCGTemp *addr,
{
const MemOpIdx orig_oi = make_memop_idx(memop, idx);
TCGv_i64 ext_addr = NULL;
+ TCGOpcode opc;
tcg_gen_req_mo(TCG_MO_ST_LD | TCG_MO_ST_ST);
@@ -586,8 +624,12 @@ static void tcg_gen_qemu_st_i128_int(TCGv_i128 val, TCGTemp *addr,
hi = TCGV128_HIGH(val);
}
- gen_ldst(INDEX_op_qemu_st_i128, tcgv_i64_temp(lo),
- tcgv_i64_temp(hi), addr, oi);
+ if (tcg_ctx->addr_type == TCG_TYPE_I32) {
+ opc = INDEX_op_qemu_st_a32_i128;
+ } else {
+ opc = INDEX_op_qemu_st_a64_i128;
+ }
+ gen_ldst(opc, tcgv_i64_temp(lo), tcgv_i64_temp(hi), addr, oi);
if (need_bswap) {
tcg_temp_free_i64(lo);
@@ -600,6 +642,12 @@ static void tcg_gen_qemu_st_i128_int(TCGv_i128 val, TCGTemp *addr,
canonicalize_memop_i128_as_i64(mop, memop);
+ if (tcg_ctx->addr_type == TCG_TYPE_I32) {
+ opc = INDEX_op_qemu_st_a32_i64;
+ } else {
+ opc = INDEX_op_qemu_st_a64_i64;
+ }
+
if ((memop & MO_BSWAP) == MO_LE) {
x = TCGV128_LOW(val);
y = TCGV128_HIGH(val);
@@ -613,8 +661,8 @@ static void tcg_gen_qemu_st_i128_int(TCGv_i128 val, TCGTemp *addr,
tcg_gen_bswap64_i64(b, x);
x = b;
}
- gen_ldst_i64(INDEX_op_qemu_st_i64, x, addr,
- make_memop_idx(mop[0], idx));
+
+ gen_ldst_i64(opc, x, addr, make_memop_idx(mop[0], idx));
if (tcg_ctx->addr_type == TCG_TYPE_I32) {
TCGv_i32 t = tcg_temp_ebb_new_i32();
@@ -628,13 +676,10 @@ static void tcg_gen_qemu_st_i128_int(TCGv_i128 val, TCGTemp *addr,
if (b) {
tcg_gen_bswap64_i64(b, y);
- y = b;
- }
- gen_ldst_i64(INDEX_op_qemu_st_i64, y, addr_p8,
- make_memop_idx(mop[1], idx));
-
- if (b) {
+ gen_ldst_i64(opc, b, addr_p8, make_memop_idx(mop[1], idx));
tcg_temp_free_i64(b);
+ } else {
+ gen_ldst_i64(opc, y, addr_p8, make_memop_idx(mop[1], idx));
}
tcg_temp_free_internal(addr_p8);
} else {
@@ -1839,17 +1839,24 @@ bool tcg_op_supported(TCGOpcode op)
case INDEX_op_exit_tb:
case INDEX_op_goto_tb:
case INDEX_op_goto_ptr:
- case INDEX_op_qemu_ld_i32:
- case INDEX_op_qemu_st_i32:
- case INDEX_op_qemu_ld_i64:
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_ld_a32_i32:
+ case INDEX_op_qemu_ld_a64_i32:
+ case INDEX_op_qemu_st_a32_i32:
+ case INDEX_op_qemu_st_a64_i32:
+ case INDEX_op_qemu_ld_a32_i64:
+ case INDEX_op_qemu_ld_a64_i64:
+ case INDEX_op_qemu_st_a32_i64:
+ case INDEX_op_qemu_st_a64_i64:
return true;
- case INDEX_op_qemu_st8_i32:
+ case INDEX_op_qemu_st8_a32_i32:
+ case INDEX_op_qemu_st8_a64_i32:
return TCG_TARGET_HAS_qemu_st8_i32;
- case INDEX_op_qemu_ld_i128:
- case INDEX_op_qemu_st_i128:
+ case INDEX_op_qemu_ld_a32_i128:
+ case INDEX_op_qemu_ld_a64_i128:
+ case INDEX_op_qemu_st_a32_i128:
+ case INDEX_op_qemu_st_a64_i128:
return TCG_TARGET_HAS_qemu_ldst_i128;
case INDEX_op_mov_i32:
@@ -2464,13 +2471,20 @@ static void tcg_dump_ops(TCGContext *s, FILE *f, bool have_prefs)
}
i = 1;
break;
- case INDEX_op_qemu_ld_i32:
- case INDEX_op_qemu_st_i32:
- case INDEX_op_qemu_st8_i32:
- case INDEX_op_qemu_ld_i64:
- case INDEX_op_qemu_st_i64:
- case INDEX_op_qemu_ld_i128:
- case INDEX_op_qemu_st_i128:
+ case INDEX_op_qemu_ld_a32_i32:
+ case INDEX_op_qemu_ld_a64_i32:
+ case INDEX_op_qemu_st_a32_i32:
+ case INDEX_op_qemu_st_a64_i32:
+ case INDEX_op_qemu_st8_a32_i32:
+ case INDEX_op_qemu_st8_a64_i32:
+ case INDEX_op_qemu_ld_a32_i64:
+ case INDEX_op_qemu_ld_a64_i64:
+ case INDEX_op_qemu_st_a32_i64:
+ case INDEX_op_qemu_st_a64_i64:
+ case INDEX_op_qemu_ld_a32_i128:
+ case INDEX_op_qemu_ld_a64_i128:
+ case INDEX_op_qemu_st_a32_i128:
+ case INDEX_op_qemu_st_a64_i128:
{
const char *s_al, *s_op, *s_at;
MemOpIdx oi = op->args[k++];
@@ -922,7 +922,8 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
tb_ptr = ptr;
break;
- case INDEX_op_qemu_ld_i32:
+ case INDEX_op_qemu_ld_a32_i32:
+ case INDEX_op_qemu_ld_a64_i32:
if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
tci_args_rrm(insn, &r0, &r1, &oi);
taddr = regs[r1];
@@ -934,7 +935,8 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
regs[r0] = tmp32;
break;
- case INDEX_op_qemu_ld_i64:
+ case INDEX_op_qemu_ld_a32_i64:
+ case INDEX_op_qemu_ld_a64_i64:
if (TCG_TARGET_REG_BITS == 64) {
tci_args_rrm(insn, &r0, &r1, &oi);
taddr = regs[r1];
@@ -954,7 +956,8 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
}
break;
- case INDEX_op_qemu_st_i32:
+ case INDEX_op_qemu_st_a32_i32:
+ case INDEX_op_qemu_st_a64_i32:
if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
tci_args_rrm(insn, &r0, &r1, &oi);
taddr = regs[r1];
@@ -966,7 +969,8 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
tci_qemu_st(env, taddr, tmp32, oi, tb_ptr);
break;
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_st_a32_i64:
+ case INDEX_op_qemu_st_a64_i64:
if (TCG_TARGET_REG_BITS == 64) {
tci_args_rrm(insn, &r0, &r1, &oi);
taddr = regs[r1];
@@ -1251,15 +1255,21 @@ int print_insn_tci(bfd_vma addr, disassemble_info *info)
str_r(r3), str_r(r4), str_r(r5));
break;
- case INDEX_op_qemu_ld_i64:
- case INDEX_op_qemu_st_i64:
- len = DIV_ROUND_UP(64, TCG_TARGET_REG_BITS);
+ case INDEX_op_qemu_ld_a32_i32:
+ case INDEX_op_qemu_st_a32_i32:
+ len = 1 + 1;
+ goto do_qemu_ldst;
+ case INDEX_op_qemu_ld_a32_i64:
+ case INDEX_op_qemu_st_a32_i64:
+ case INDEX_op_qemu_ld_a64_i32:
+ case INDEX_op_qemu_st_a64_i32:
+ len = 1 + DIV_ROUND_UP(64, TCG_TARGET_REG_BITS);
+ goto do_qemu_ldst;
+ case INDEX_op_qemu_ld_a64_i64:
+ case INDEX_op_qemu_st_a64_i64:
+ len = 2 * DIV_ROUND_UP(64, TCG_TARGET_REG_BITS);
goto do_qemu_ldst;
- case INDEX_op_qemu_ld_i32:
- case INDEX_op_qemu_st_i32:
- len = 1;
do_qemu_ldst:
- len += DIV_ROUND_UP(TARGET_LONG_BITS, TCG_TARGET_REG_BITS);
switch (len) {
case 2:
tci_args_rrm(insn, &r0, &r1, &oi);
@@ -2328,18 +2328,24 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_insn(s, 3506, CSEL, ext, a0, REG0(3), REG0(4), args[5]);
break;
- case INDEX_op_qemu_ld_i32:
- case INDEX_op_qemu_ld_i64:
+ case INDEX_op_qemu_ld_a32_i32:
+ case INDEX_op_qemu_ld_a64_i32:
+ case INDEX_op_qemu_ld_a32_i64:
+ case INDEX_op_qemu_ld_a64_i64:
tcg_out_qemu_ld(s, a0, a1, a2, ext);
break;
- case INDEX_op_qemu_st_i32:
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_st_a32_i32:
+ case INDEX_op_qemu_st_a64_i32:
+ case INDEX_op_qemu_st_a32_i64:
+ case INDEX_op_qemu_st_a64_i64:
tcg_out_qemu_st(s, REG0(0), a1, a2, ext);
break;
- case INDEX_op_qemu_ld_i128:
+ case INDEX_op_qemu_ld_a32_i128:
+ case INDEX_op_qemu_ld_a64_i128:
tcg_out_qemu_ld128(s, a0, a1, a2, args[3]);
break;
- case INDEX_op_qemu_st_i128:
+ case INDEX_op_qemu_st_a32_i128:
+ case INDEX_op_qemu_st_a64_i128:
tcg_out_qemu_st128(s, REG0(0), REG0(1), a2, args[3]);
break;
@@ -2976,15 +2982,21 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_movcond_i64:
return C_O1_I4(r, r, rA, rZ, rZ);
- case INDEX_op_qemu_ld_i32:
- case INDEX_op_qemu_ld_i64:
+ case INDEX_op_qemu_ld_a32_i32:
+ case INDEX_op_qemu_ld_a64_i32:
+ case INDEX_op_qemu_ld_a32_i64:
+ case INDEX_op_qemu_ld_a64_i64:
return C_O1_I1(r, l);
- case INDEX_op_qemu_ld_i128:
+ case INDEX_op_qemu_ld_a32_i128:
+ case INDEX_op_qemu_ld_a64_i128:
return C_O2_I1(r, r, l);
- case INDEX_op_qemu_st_i32:
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_st_a32_i32:
+ case INDEX_op_qemu_st_a64_i32:
+ case INDEX_op_qemu_st_a32_i64:
+ case INDEX_op_qemu_st_a64_i64:
return C_O0_I2(lZ, l);
- case INDEX_op_qemu_st_i128:
+ case INDEX_op_qemu_st_a32_i128:
+ case INDEX_op_qemu_st_a64_i128:
return C_O0_I3(lZ, lZ, l);
case INDEX_op_deposit_i32:
@@ -1985,41 +1985,36 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
ARITH_MOV, args[0], 0, 0);
break;
- case INDEX_op_qemu_ld_i32:
- if (TARGET_LONG_BITS == 32) {
- tcg_out_qemu_ld(s, args[0], -1, args[1], -1,
- args[2], TCG_TYPE_I32);
- } else {
- tcg_out_qemu_ld(s, args[0], -1, args[1], args[2],
- args[3], TCG_TYPE_I32);
- }
+ case INDEX_op_qemu_ld_a32_i32:
+ tcg_out_qemu_ld(s, args[0], -1, args[1], -1, args[2], TCG_TYPE_I32);
break;
- case INDEX_op_qemu_ld_i64:
- if (TARGET_LONG_BITS == 32) {
- tcg_out_qemu_ld(s, args[0], args[1], args[2], -1,
- args[3], TCG_TYPE_I64);
- } else {
- tcg_out_qemu_ld(s, args[0], args[1], args[2], args[3],
- args[4], TCG_TYPE_I64);
- }
+ case INDEX_op_qemu_ld_a64_i32:
+ tcg_out_qemu_ld(s, args[0], -1, args[1], args[2],
+ args[3], TCG_TYPE_I32);
break;
- case INDEX_op_qemu_st_i32:
- if (TARGET_LONG_BITS == 32) {
- tcg_out_qemu_st(s, args[0], -1, args[1], -1,
- args[2], TCG_TYPE_I32);
- } else {
- tcg_out_qemu_st(s, args[0], -1, args[1], args[2],
- args[3], TCG_TYPE_I32);
- }
+ case INDEX_op_qemu_ld_a32_i64:
+ tcg_out_qemu_ld(s, args[0], args[1], args[2], -1,
+ args[3], TCG_TYPE_I64);
break;
- case INDEX_op_qemu_st_i64:
- if (TARGET_LONG_BITS == 32) {
- tcg_out_qemu_st(s, args[0], args[1], args[2], -1,
- args[3], TCG_TYPE_I64);
- } else {
- tcg_out_qemu_st(s, args[0], args[1], args[2], args[3],
- args[4], TCG_TYPE_I64);
- }
+ case INDEX_op_qemu_ld_a64_i64:
+ tcg_out_qemu_ld(s, args[0], args[1], args[2], args[3],
+ args[4], TCG_TYPE_I64);
+ break;
+
+ case INDEX_op_qemu_st_a32_i32:
+ tcg_out_qemu_st(s, args[0], -1, args[1], -1, args[2], TCG_TYPE_I32);
+ break;
+ case INDEX_op_qemu_st_a64_i32:
+ tcg_out_qemu_st(s, args[0], -1, args[1], args[2],
+ args[3], TCG_TYPE_I32);
+ break;
+ case INDEX_op_qemu_st_a32_i64:
+ tcg_out_qemu_st(s, args[0], args[1], args[2], -1,
+ args[3], TCG_TYPE_I64);
+ break;
+ case INDEX_op_qemu_st_a64_i64:
+ tcg_out_qemu_st(s, args[0], args[1], args[2], args[3],
+ args[4], TCG_TYPE_I64);
break;
case INDEX_op_bswap16_i32:
@@ -2160,14 +2155,22 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_setcond2_i32:
return C_O1_I4(r, r, r, rI, rI);
- case INDEX_op_qemu_ld_i32:
- return TARGET_LONG_BITS == 32 ? C_O1_I1(r, q) : C_O1_I2(r, q, q);
- case INDEX_op_qemu_ld_i64:
- return TARGET_LONG_BITS == 32 ? C_O2_I1(e, p, q) : C_O2_I2(e, p, q, q);
- case INDEX_op_qemu_st_i32:
- return TARGET_LONG_BITS == 32 ? C_O0_I2(q, q) : C_O0_I3(q, q, q);
- case INDEX_op_qemu_st_i64:
- return TARGET_LONG_BITS == 32 ? C_O0_I3(Q, p, q) : C_O0_I4(Q, p, q, q);
+ case INDEX_op_qemu_ld_a32_i32:
+ return C_O1_I1(r, q);
+ case INDEX_op_qemu_ld_a64_i32:
+ return C_O1_I2(r, q, q);
+ case INDEX_op_qemu_ld_a32_i64:
+ return C_O2_I1(e, p, q);
+ case INDEX_op_qemu_ld_a64_i64:
+ return C_O2_I2(e, p, q, q);
+ case INDEX_op_qemu_st_a32_i32:
+ return C_O0_I2(q, q);
+ case INDEX_op_qemu_st_a64_i32:
+ return C_O0_I3(q, q, q);
+ case INDEX_op_qemu_st_a32_i64:
+ return C_O0_I3(Q, p, q);
+ case INDEX_op_qemu_st_a64_i64:
+ return C_O0_I4(Q, p, q, q);
case INDEX_op_st_vec:
return C_O0_I2(w, r);
@@ -2666,44 +2666,62 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_modrm(s, OPC_GRP3_Ev + rexw, EXT3_NOT, a0);
break;
- case INDEX_op_qemu_ld_i32:
- if (TCG_TARGET_REG_BITS >= TARGET_LONG_BITS) {
- tcg_out_qemu_ld(s, a0, -1, a1, -1, a2, TCG_TYPE_I32);
- } else {
+ case INDEX_op_qemu_ld_a64_i32:
+ if (TCG_TARGET_REG_BITS == 32) {
tcg_out_qemu_ld(s, a0, -1, a1, a2, args[3], TCG_TYPE_I32);
+ break;
}
+ /* fall through */
+ case INDEX_op_qemu_ld_a32_i32:
+ tcg_out_qemu_ld(s, a0, -1, a1, -1, a2, TCG_TYPE_I32);
break;
- case INDEX_op_qemu_ld_i64:
+ case INDEX_op_qemu_ld_a32_i64:
if (TCG_TARGET_REG_BITS == 64) {
tcg_out_qemu_ld(s, a0, -1, a1, -1, a2, TCG_TYPE_I64);
- } else if (TARGET_LONG_BITS == 32) {
+ } else {
tcg_out_qemu_ld(s, a0, a1, a2, -1, args[3], TCG_TYPE_I64);
+ }
+ break;
+ case INDEX_op_qemu_ld_a64_i64:
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_out_qemu_ld(s, a0, -1, a1, -1, a2, TCG_TYPE_I64);
} else {
tcg_out_qemu_ld(s, a0, a1, a2, args[3], args[4], TCG_TYPE_I64);
}
break;
- case INDEX_op_qemu_ld_i128:
+ case INDEX_op_qemu_ld_a32_i128:
+ case INDEX_op_qemu_ld_a64_i128:
tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
tcg_out_qemu_ld(s, a0, a1, a2, -1, args[3], TCG_TYPE_I128);
break;
- case INDEX_op_qemu_st_i32:
- case INDEX_op_qemu_st8_i32:
- if (TCG_TARGET_REG_BITS >= TARGET_LONG_BITS) {
- tcg_out_qemu_st(s, a0, -1, a1, -1, a2, TCG_TYPE_I32);
- } else {
+
+ case INDEX_op_qemu_st_a64_i32:
+ case INDEX_op_qemu_st8_a64_i32:
+ if (TCG_TARGET_REG_BITS == 32) {
tcg_out_qemu_st(s, a0, -1, a1, a2, args[3], TCG_TYPE_I32);
+ break;
}
+ /* fall through */
+ case INDEX_op_qemu_st_a32_i32:
+ case INDEX_op_qemu_st8_a32_i32:
+ tcg_out_qemu_st(s, a0, -1, a1, -1, a2, TCG_TYPE_I32);
break;
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_st_a32_i64:
if (TCG_TARGET_REG_BITS == 64) {
tcg_out_qemu_st(s, a0, -1, a1, -1, a2, TCG_TYPE_I64);
- } else if (TARGET_LONG_BITS == 32) {
+ } else {
tcg_out_qemu_st(s, a0, a1, a2, -1, args[3], TCG_TYPE_I64);
+ }
+ break;
+ case INDEX_op_qemu_st_a64_i64:
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_out_qemu_st(s, a0, -1, a1, -1, a2, TCG_TYPE_I64);
} else {
tcg_out_qemu_st(s, a0, a1, a2, args[3], args[4], TCG_TYPE_I64);
}
break;
- case INDEX_op_qemu_st_i128:
+ case INDEX_op_qemu_st_a32_i128:
+ case INDEX_op_qemu_st_a64_i128:
tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
tcg_out_qemu_st(s, a0, a1, a2, -1, args[3], TCG_TYPE_I128);
break;
@@ -3380,31 +3398,36 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_clz_i64:
return have_lzcnt ? C_N1_I2(r, r, rW) : C_N1_I2(r, r, r);
- case INDEX_op_qemu_ld_i32:
- return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
- ? C_O1_I1(r, L) : C_O1_I2(r, L, L));
+ case INDEX_op_qemu_ld_a32_i32:
+ return C_O1_I1(r, L);
+ case INDEX_op_qemu_ld_a64_i32:
+ return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, L) : C_O1_I2(r, L, L);
- case INDEX_op_qemu_st_i32:
- return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
- ? C_O0_I2(L, L) : C_O0_I3(L, L, L));
- case INDEX_op_qemu_st8_i32:
- return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
- ? C_O0_I2(s, L) : C_O0_I3(s, L, L));
+ case INDEX_op_qemu_st_a32_i32:
+ return C_O0_I2(L, L);
+ case INDEX_op_qemu_st_a64_i32:
+ return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(L, L) : C_O0_I3(L, L, L);
+ case INDEX_op_qemu_st8_a32_i32:
+ return C_O0_I2(s, L);
+ case INDEX_op_qemu_st8_a64_i32:
+ return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(s, L) : C_O0_I3(s, L, L);
- case INDEX_op_qemu_ld_i64:
- return (TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, L)
- : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O2_I1(r, r, L)
- : C_O2_I2(r, r, L, L));
+ case INDEX_op_qemu_ld_a32_i64:
+ return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, L) : C_O2_I1(r, r, L);
+ case INDEX_op_qemu_ld_a64_i64:
+ return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, L) : C_O2_I2(r, r, L, L);
- case INDEX_op_qemu_st_i64:
- return (TCG_TARGET_REG_BITS == 64 ? C_O0_I2(L, L)
- : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O0_I3(L, L, L)
- : C_O0_I4(L, L, L, L));
+ case INDEX_op_qemu_st_a32_i64:
+ return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(L, L) : C_O0_I3(L, L, L);
+ case INDEX_op_qemu_st_a64_i64:
+ return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(L, L) : C_O0_I4(L, L, L, L);
- case INDEX_op_qemu_ld_i128:
+ case INDEX_op_qemu_ld_a32_i128:
+ case INDEX_op_qemu_ld_a64_i128:
tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
return C_O2_I1(r, r, L);
- case INDEX_op_qemu_st_i128:
+ case INDEX_op_qemu_st_a32_i128:
+ case INDEX_op_qemu_st_a64_i128:
tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
return C_O0_I3(L, L, L);
@@ -1443,16 +1443,20 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_ldst(s, OPC_ST_D, a0, a1, a2);
break;
- case INDEX_op_qemu_ld_i32:
+ case INDEX_op_qemu_ld_a32_i32:
+ case INDEX_op_qemu_ld_a64_i32:
tcg_out_qemu_ld(s, a0, a1, a2, TCG_TYPE_I32);
break;
- case INDEX_op_qemu_ld_i64:
+ case INDEX_op_qemu_ld_a32_i64:
+ case INDEX_op_qemu_ld_a64_i64:
tcg_out_qemu_ld(s, a0, a1, a2, TCG_TYPE_I64);
break;
- case INDEX_op_qemu_st_i32:
+ case INDEX_op_qemu_st_a32_i32:
+ case INDEX_op_qemu_st_a64_i32:
tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I32);
break;
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_st_a32_i64:
+ case INDEX_op_qemu_st_a64_i64:
tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I64);
break;
@@ -1492,8 +1496,10 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_st32_i64:
case INDEX_op_st_i32:
case INDEX_op_st_i64:
- case INDEX_op_qemu_st_i32:
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_st_a32_i32:
+ case INDEX_op_qemu_st_a64_i32:
+ case INDEX_op_qemu_st_a32_i64:
+ case INDEX_op_qemu_st_a64_i64:
return C_O0_I2(rZ, r);
case INDEX_op_brcond_i32:
@@ -1535,8 +1541,10 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_ld32u_i64:
case INDEX_op_ld_i32:
case INDEX_op_ld_i64:
- case INDEX_op_qemu_ld_i32:
- case INDEX_op_qemu_ld_i64:
+ case INDEX_op_qemu_ld_a32_i32:
+ case INDEX_op_qemu_ld_a64_i32:
+ case INDEX_op_qemu_ld_a32_i64:
+ case INDEX_op_qemu_ld_a64_i64:
return C_O1_I1(r, r);
case INDEX_op_andc_i32:
@@ -1954,34 +1954,49 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_setcond2(s, args[5], a0, a1, a2, args[3], args[4]);
break;
- case INDEX_op_qemu_ld_i32:
- if (TCG_TARGET_REG_BITS >= TARGET_LONG_BITS) {
- tcg_out_qemu_ld(s, a0, 0, a1, 0, a2, TCG_TYPE_I32);
- } else {
+ case INDEX_op_qemu_ld_a64_i32:
+ if (TCG_TARGET_REG_BITS == 32) {
tcg_out_qemu_ld(s, a0, 0, a1, a2, args[3], TCG_TYPE_I32);
+ break;
}
+ /* fall through */
+ case INDEX_op_qemu_ld_a32_i32:
+ tcg_out_qemu_ld(s, a0, 0, a1, 0, a2, TCG_TYPE_I32);
break;
- case INDEX_op_qemu_ld_i64:
+ case INDEX_op_qemu_ld_a32_i64:
if (TCG_TARGET_REG_BITS == 64) {
tcg_out_qemu_ld(s, a0, 0, a1, 0, a2, TCG_TYPE_I64);
- } else if (TARGET_LONG_BITS == 32) {
+ } else {
tcg_out_qemu_ld(s, a0, a1, a2, 0, args[3], TCG_TYPE_I64);
+ }
+ break;
+ case INDEX_op_qemu_ld_a64_i64:
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_out_qemu_ld(s, a0, 0, a1, 0, a2, TCG_TYPE_I64);
} else {
tcg_out_qemu_ld(s, a0, a1, a2, args[3], args[4], TCG_TYPE_I64);
}
break;
- case INDEX_op_qemu_st_i32:
- if (TCG_TARGET_REG_BITS >= TARGET_LONG_BITS) {
- tcg_out_qemu_st(s, a0, 0, a1, 0, a2, TCG_TYPE_I32);
- } else {
+
+ case INDEX_op_qemu_st_a64_i32:
+ if (TCG_TARGET_REG_BITS == 32) {
tcg_out_qemu_st(s, a0, 0, a1, a2, args[3], TCG_TYPE_I32);
+ break;
}
+ /* fall through */
+ case INDEX_op_qemu_st_a32_i32:
+ tcg_out_qemu_st(s, a0, 0, a1, 0, a2, TCG_TYPE_I32);
break;
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_st_a32_i64:
if (TCG_TARGET_REG_BITS == 64) {
tcg_out_qemu_st(s, a0, 0, a1, 0, a2, TCG_TYPE_I64);
- } else if (TARGET_LONG_BITS == 32) {
+ } else {
tcg_out_qemu_st(s, a0, a1, a2, 0, args[3], TCG_TYPE_I64);
+ }
+ break;
+ case INDEX_op_qemu_st_a64_i64:
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_out_qemu_st(s, a0, 0, a1, 0, a2, TCG_TYPE_I64);
} else {
tcg_out_qemu_st(s, a0, a1, a2, args[3], args[4], TCG_TYPE_I64);
}
@@ -2140,19 +2155,22 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_brcond2_i32:
return C_O0_I4(rZ, rZ, rZ, rZ);
- case INDEX_op_qemu_ld_i32:
- return (TCG_TARGET_REG_BITS == 64 || TARGET_LONG_BITS == 32
- ? C_O1_I1(r, r) : C_O1_I2(r, r, r));
- case INDEX_op_qemu_st_i32:
- return (TCG_TARGET_REG_BITS == 64 || TARGET_LONG_BITS == 32
- ? C_O0_I2(rZ, r) : C_O0_I3(rZ, r, r));
- case INDEX_op_qemu_ld_i64:
- return (TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r)
- : TARGET_LONG_BITS == 32 ? C_O2_I1(r, r, r)
- : C_O2_I2(r, r, r, r));
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_ld_a32_i32:
+ return C_O1_I1(r, r);
+ case INDEX_op_qemu_ld_a64_i32:
+ return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O1_I2(r, r, r);
+ case INDEX_op_qemu_st_a32_i32:
+ return C_O0_I2(rZ, r);
+ case INDEX_op_qemu_st_a64_i32:
+ return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(rZ, r) : C_O0_I3(rZ, r, r);
+ case INDEX_op_qemu_ld_a32_i64:
+ return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
+ case INDEX_op_qemu_ld_a64_i64:
+ return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I2(r, r, r, r);
+ case INDEX_op_qemu_st_a32_i64:
+ return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(rZ, r) : C_O0_I3(rZ, rZ, r);
+ case INDEX_op_qemu_st_a64_i64:
return (TCG_TARGET_REG_BITS == 64 ? C_O0_I2(rZ, r)
- : TARGET_LONG_BITS == 32 ? C_O0_I3(rZ, rZ, r)
: C_O0_I4(rZ, rZ, r, r));
default:
@@ -2909,54 +2909,70 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out32(s, MODUD | TAB(args[0], args[1], args[2]));
break;
- case INDEX_op_qemu_ld_i32:
- if (TCG_TARGET_REG_BITS >= TARGET_LONG_BITS) {
- tcg_out_qemu_ld(s, args[0], -1, args[1], -1,
- args[2], TCG_TYPE_I32);
- } else {
+ case INDEX_op_qemu_ld_a64_i32:
+ if (TCG_TARGET_REG_BITS == 32) {
tcg_out_qemu_ld(s, args[0], -1, args[1], args[2],
args[3], TCG_TYPE_I32);
+ break;
}
+ /* fall through */
+ case INDEX_op_qemu_ld_a32_i32:
+ tcg_out_qemu_ld(s, args[0], -1, args[1], -1, args[2], TCG_TYPE_I32);
break;
- case INDEX_op_qemu_ld_i64:
+ case INDEX_op_qemu_ld_a32_i64:
if (TCG_TARGET_REG_BITS == 64) {
tcg_out_qemu_ld(s, args[0], -1, args[1], -1,
args[2], TCG_TYPE_I64);
- } else if (TARGET_LONG_BITS == 32) {
+ } else {
tcg_out_qemu_ld(s, args[0], args[1], args[2], -1,
args[3], TCG_TYPE_I64);
+ }
+ break;
+ case INDEX_op_qemu_ld_a64_i64:
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_out_qemu_ld(s, args[0], -1, args[1], -1,
+ args[2], TCG_TYPE_I64);
} else {
tcg_out_qemu_ld(s, args[0], args[1], args[2], args[3],
args[4], TCG_TYPE_I64);
}
break;
- case INDEX_op_qemu_ld_i128:
+ case INDEX_op_qemu_ld_a32_i128:
+ case INDEX_op_qemu_ld_a64_i128:
tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
tcg_out_qemu_ldst_i128(s, args[0], args[1], args[2], args[3], true);
break;
- case INDEX_op_qemu_st_i32:
- if (TCG_TARGET_REG_BITS >= TARGET_LONG_BITS) {
- tcg_out_qemu_st(s, args[0], -1, args[1], -1,
- args[2], TCG_TYPE_I32);
- } else {
+ case INDEX_op_qemu_st_a64_i32:
+ if (TCG_TARGET_REG_BITS == 32) {
tcg_out_qemu_st(s, args[0], -1, args[1], args[2],
args[3], TCG_TYPE_I32);
+ break;
}
+ /* fall through */
+ case INDEX_op_qemu_st_a32_i32:
+ tcg_out_qemu_st(s, args[0], -1, args[1], -1, args[2], TCG_TYPE_I32);
break;
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_st_a32_i64:
if (TCG_TARGET_REG_BITS == 64) {
tcg_out_qemu_st(s, args[0], -1, args[1], -1,
args[2], TCG_TYPE_I64);
- } else if (TARGET_LONG_BITS == 32) {
+ } else {
tcg_out_qemu_st(s, args[0], args[1], args[2], -1,
args[3], TCG_TYPE_I64);
+ }
+ break;
+ case INDEX_op_qemu_st_a64_i64:
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_out_qemu_st(s, args[0], -1, args[1], -1,
+ args[2], TCG_TYPE_I64);
} else {
tcg_out_qemu_st(s, args[0], args[1], args[2], args[3],
args[4], TCG_TYPE_I64);
}
break;
- case INDEX_op_qemu_st_i128:
+ case INDEX_op_qemu_st_a32_i128:
+ case INDEX_op_qemu_st_a64_i128:
tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
tcg_out_qemu_ldst_i128(s, args[0], args[1], args[2], args[3], false);
break;
@@ -3775,29 +3791,28 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_sub2_i32:
return C_O2_I4(r, r, rI, rZM, r, r);
- case INDEX_op_qemu_ld_i32:
- return (TCG_TARGET_REG_BITS == 64 || TARGET_LONG_BITS == 32
- ? C_O1_I1(r, r)
- : C_O1_I2(r, r, r));
-
- case INDEX_op_qemu_st_i32:
- return (TCG_TARGET_REG_BITS == 64 || TARGET_LONG_BITS == 32
- ? C_O0_I2(r, r)
- : C_O0_I3(r, r, r));
-
- case INDEX_op_qemu_ld_i64:
- return (TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r)
- : TARGET_LONG_BITS == 32 ? C_O2_I1(r, r, r)
- : C_O2_I2(r, r, r, r));
-
- case INDEX_op_qemu_st_i64:
- return (TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r)
- : TARGET_LONG_BITS == 32 ? C_O0_I3(r, r, r)
- : C_O0_I4(r, r, r, r));
-
- case INDEX_op_qemu_ld_i128:
+ case INDEX_op_qemu_ld_a32_i32:
+ return C_O1_I1(r, r);
+ case INDEX_op_qemu_ld_a64_i32:
+ return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O1_I2(r, r, r);
+ case INDEX_op_qemu_ld_a32_i64:
+ return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
+ case INDEX_op_qemu_ld_a64_i64:
+ return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I2(r, r, r, r);
+ case INDEX_op_qemu_ld_a32_i128:
+ case INDEX_op_qemu_ld_a64_i128:
return C_O2_I1(o, m, r);
- case INDEX_op_qemu_st_i128:
+
+ case INDEX_op_qemu_st_a32_i32:
+ return C_O0_I2(r, r);
+ case INDEX_op_qemu_st_a64_i32:
+ return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
+ case INDEX_op_qemu_st_a32_i64:
+ return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
+ case INDEX_op_qemu_st_a64_i64:
+ return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I4(r, r, r, r);
+ case INDEX_op_qemu_st_a32_i128:
+ case INDEX_op_qemu_st_a64_i128:
return C_O0_I3(o, m, r);
case INDEX_op_add_vec:
@@ -1382,16 +1382,20 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_setcond(s, args[3], a0, a1, a2);
break;
- case INDEX_op_qemu_ld_i32:
+ case INDEX_op_qemu_ld_a32_i32:
+ case INDEX_op_qemu_ld_a64_i32:
tcg_out_qemu_ld(s, a0, a1, a2, TCG_TYPE_I32);
break;
- case INDEX_op_qemu_ld_i64:
+ case INDEX_op_qemu_ld_a32_i64:
+ case INDEX_op_qemu_ld_a64_i64:
tcg_out_qemu_ld(s, a0, a1, a2, TCG_TYPE_I64);
break;
- case INDEX_op_qemu_st_i32:
+ case INDEX_op_qemu_st_a32_i32:
+ case INDEX_op_qemu_st_a64_i32:
tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I32);
break;
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_st_a32_i64:
+ case INDEX_op_qemu_st_a64_i64:
tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I64);
break;
@@ -1533,11 +1537,15 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_sub2_i64:
return C_O2_I4(r, r, rZ, rZ, rM, rM);
- case INDEX_op_qemu_ld_i32:
- case INDEX_op_qemu_ld_i64:
+ case INDEX_op_qemu_ld_a32_i32:
+ case INDEX_op_qemu_ld_a64_i32:
+ case INDEX_op_qemu_ld_a32_i64:
+ case INDEX_op_qemu_ld_a64_i64:
return C_O1_I1(r, r);
- case INDEX_op_qemu_st_i32:
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_st_a32_i32:
+ case INDEX_op_qemu_st_a64_i32:
+ case INDEX_op_qemu_st_a32_i64:
+ case INDEX_op_qemu_st_a64_i64:
return C_O0_I2(rZ, r);
default:
@@ -2297,22 +2297,28 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
args[2], const_args[2], args[3], const_args[3], args[4]);
break;
- case INDEX_op_qemu_ld_i32:
+ case INDEX_op_qemu_ld_a32_i32:
+ case INDEX_op_qemu_ld_a64_i32:
tcg_out_qemu_ld(s, args[0], args[1], args[2], TCG_TYPE_I32);
break;
- case INDEX_op_qemu_ld_i64:
+ case INDEX_op_qemu_ld_a32_i64:
+ case INDEX_op_qemu_ld_a64_i64:
tcg_out_qemu_ld(s, args[0], args[1], args[2], TCG_TYPE_I64);
break;
- case INDEX_op_qemu_st_i32:
+ case INDEX_op_qemu_st_a32_i32:
+ case INDEX_op_qemu_st_a64_i32:
tcg_out_qemu_st(s, args[0], args[1], args[2], TCG_TYPE_I32);
break;
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_st_a32_i64:
+ case INDEX_op_qemu_st_a64_i64:
tcg_out_qemu_st(s, args[0], args[1], args[2], TCG_TYPE_I64);
break;
- case INDEX_op_qemu_ld_i128:
+ case INDEX_op_qemu_ld_a32_i128:
+ case INDEX_op_qemu_ld_a64_i128:
tcg_out_qemu_ldst_i128(s, args[0], args[1], args[2], args[3], true);
break;
- case INDEX_op_qemu_st_i128:
+ case INDEX_op_qemu_st_a32_i128:
+ case INDEX_op_qemu_st_a64_i128:
tcg_out_qemu_ldst_i128(s, args[0], args[1], args[2], args[3], false);
break;
@@ -3186,15 +3192,21 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_ctpop_i64:
return C_O1_I1(r, r);
- case INDEX_op_qemu_ld_i32:
- case INDEX_op_qemu_ld_i64:
+ case INDEX_op_qemu_ld_a32_i32:
+ case INDEX_op_qemu_ld_a64_i32:
+ case INDEX_op_qemu_ld_a32_i64:
+ case INDEX_op_qemu_ld_a64_i64:
return C_O1_I1(r, r);
- case INDEX_op_qemu_st_i64:
- case INDEX_op_qemu_st_i32:
+ case INDEX_op_qemu_st_a32_i64:
+ case INDEX_op_qemu_st_a64_i64:
+ case INDEX_op_qemu_st_a32_i32:
+ case INDEX_op_qemu_st_a64_i32:
return C_O0_I2(r, r);
- case INDEX_op_qemu_ld_i128:
+ case INDEX_op_qemu_ld_a32_i128:
+ case INDEX_op_qemu_ld_a64_i128:
return C_O2_I1(o, m, r);
- case INDEX_op_qemu_st_i128:
+ case INDEX_op_qemu_st_a32_i128:
+ case INDEX_op_qemu_st_a64_i128:
return C_O0_I3(o, m, r);
case INDEX_op_deposit_i32:
@@ -1376,16 +1376,20 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_arithi(s, a1, a0, 32, SHIFT_SRLX);
break;
- case INDEX_op_qemu_ld_i32:
+ case INDEX_op_qemu_ld_a32_i32:
+ case INDEX_op_qemu_ld_a64_i32:
tcg_out_qemu_ld(s, a0, a1, a2, TCG_TYPE_I32);
break;
- case INDEX_op_qemu_ld_i64:
+ case INDEX_op_qemu_ld_a32_i64:
+ case INDEX_op_qemu_ld_a64_i64:
tcg_out_qemu_ld(s, a0, a1, a2, TCG_TYPE_I64);
break;
- case INDEX_op_qemu_st_i32:
+ case INDEX_op_qemu_st_a32_i32:
+ case INDEX_op_qemu_st_a64_i32:
tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I32);
break;
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_st_a32_i64:
+ case INDEX_op_qemu_st_a64_i64:
tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I64);
break;
@@ -1507,8 +1511,10 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
case INDEX_op_extrh_i64_i32:
- case INDEX_op_qemu_ld_i32:
- case INDEX_op_qemu_ld_i64:
+ case INDEX_op_qemu_ld_a32_i32:
+ case INDEX_op_qemu_ld_a64_i32:
+ case INDEX_op_qemu_ld_a32_i64:
+ case INDEX_op_qemu_ld_a64_i64:
return C_O1_I1(r, r);
case INDEX_op_st8_i32:
@@ -1518,8 +1524,10 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_st_i32:
case INDEX_op_st32_i64:
case INDEX_op_st_i64:
- case INDEX_op_qemu_st_i32:
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_st_a32_i32:
+ case INDEX_op_qemu_st_a64_i32:
+ case INDEX_op_qemu_st_a32_i64:
+ case INDEX_op_qemu_st_a64_i64:
return C_O0_I2(rZ, r);
case INDEX_op_add_i32:
@@ -156,22 +156,22 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_setcond2_i32:
return C_O1_I4(r, r, r, r, r);
- case INDEX_op_qemu_ld_i32:
- return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
- ? C_O1_I1(r, r)
- : C_O1_I2(r, r, r));
- case INDEX_op_qemu_ld_i64:
- return (TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r)
- : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O2_I1(r, r, r)
- : C_O2_I2(r, r, r, r));
- case INDEX_op_qemu_st_i32:
- return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
- ? C_O0_I2(r, r)
- : C_O0_I3(r, r, r));
- case INDEX_op_qemu_st_i64:
- return (TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r)
- : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O0_I3(r, r, r)
- : C_O0_I4(r, r, r, r));
+ case INDEX_op_qemu_ld_a32_i32:
+ return C_O1_I1(r, r);
+ case INDEX_op_qemu_ld_a64_i32:
+ return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O1_I2(r, r, r);
+ case INDEX_op_qemu_ld_a32_i64:
+ return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
+ case INDEX_op_qemu_ld_a64_i64:
+ return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I2(r, r, r, r);
+ case INDEX_op_qemu_st_a32_i32:
+ return C_O0_I2(r, r);
+ case INDEX_op_qemu_st_a64_i32:
+ return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
+ case INDEX_op_qemu_st_a32_i64:
+ return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
+ case INDEX_op_qemu_st_a64_i64:
+ return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I4(r, r, r, r);
default:
g_assert_not_reached();
@@ -849,8 +849,10 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
break;
- case INDEX_op_qemu_ld_i32:
- case INDEX_op_qemu_st_i32:
+ case INDEX_op_qemu_ld_a32_i32:
+ case INDEX_op_qemu_ld_a64_i32:
+ case INDEX_op_qemu_st_a32_i32:
+ case INDEX_op_qemu_st_a64_i32:
if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
} else {
@@ -858,8 +860,10 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
}
break;
- case INDEX_op_qemu_ld_i64:
- case INDEX_op_qemu_st_i64:
+ case INDEX_op_qemu_ld_a32_i64:
+ case INDEX_op_qemu_ld_a64_i64:
+ case INDEX_op_qemu_st_a32_i64:
+ case INDEX_op_qemu_st_a64_i64:
if (TCG_TARGET_REG_BITS == 64) {
tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
} else if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {