aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Henderson <rth@twiddle.net>2014-04-25 15:19:33 -0400
committerRichard Henderson <rth@twiddle.net>2014-05-12 11:13:13 -0700
commit96d0ee7f0950e725ef7a4f7516e0af6a08d303e0 (patch)
tree9fdb8dd5cdaccf7d86ebdfafb69c90a5dd146ead
parentaf3cbfbe8018ccc16fb3a0048e928f66f0d05e87 (diff)
tcg: Remove unreachable code in tcg_out_op and op_defs
The INDEX_op_call case has just been obsoleted; the mov and movi cases have not been reachable for years. Attempt to document this both in each tcg_out_op switch, and via TCG_OPF_NOT_PRESENT. Because of the TCG_OPF_NOT_PRESENT change, this must be done for all targets in a single commit. Signed-off-by: Richard Henderson <rth@twiddle.net>
-rw-r--r--tcg/aarch64/tcg-target.c22
-rw-r--r--tcg/arm/tcg-target.c32
-rw-r--r--tcg/i386/tcg-target.c24
-rw-r--r--tcg/ia64/tcg-target.c40
-rw-r--r--tcg/mips/tcg-target.c17
-rw-r--r--tcg/ppc/tcg-target.c34
-rw-r--r--tcg/ppc64/tcg-target.c33
-rw-r--r--tcg/s390/tcg-target.c35
-rw-r--r--tcg/sparc/tcg-target.c21
-rw-r--r--tcg/tcg-opc.h10
-rw-r--r--tcg/tci/tcg-target.c26
11 files changed, 49 insertions, 245 deletions
diff --git a/tcg/aarch64/tcg-target.c b/tcg/aarch64/tcg-target.c
index 1a71df1a21..77bb6d97de 100644
--- a/tcg/aarch64/tcg-target.c
+++ b/tcg/aarch64/tcg-target.c
@@ -1268,14 +1268,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
s->tb_next_offset[a0] = tcg_current_code_size(s);
break;
- case INDEX_op_call:
- if (const_args[0]) {
- tcg_out_call(s, (tcg_insn_unit *)(intptr_t)a0);
- } else {
- tcg_out_callr(s, a0);
- }
- break;
-
case INDEX_op_br:
tcg_out_goto_label(s, a0);
break;
@@ -1596,13 +1588,12 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_insn(s, 3508, SMULH, TCG_TYPE_I64, a0, a1, a2);
break;
+ case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
case INDEX_op_mov_i64:
- case INDEX_op_mov_i32:
+ case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
case INDEX_op_movi_i64:
- case INDEX_op_movi_i32:
- /* Always implemented with tcg_out_mov/i, never with tcg_out_op. */
+ case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
- /* Opcode not implemented. */
tcg_abort();
}
@@ -1612,15 +1603,8 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
static const TCGTargetOpDef aarch64_op_defs[] = {
{ INDEX_op_exit_tb, { } },
{ INDEX_op_goto_tb, { } },
- { INDEX_op_call, { "ri" } },
{ INDEX_op_br, { } },
- { INDEX_op_mov_i32, { "r", "r" } },
- { INDEX_op_mov_i64, { "r", "r" } },
-
- { INDEX_op_movi_i32, { "r" } },
- { INDEX_op_movi_i64, { "r" } },
-
{ INDEX_op_ld8u_i32, { "r", "r" } },
{ INDEX_op_ld8s_i32, { "r", "r" } },
{ INDEX_op_ld16u_i32, { "r", "r" } },
diff --git a/tcg/arm/tcg-target.c b/tcg/arm/tcg-target.c
index 5236a33a89..538ca2aed0 100644
--- a/tcg/arm/tcg-target.c
+++ b/tcg/arm/tcg-target.c
@@ -1038,17 +1038,6 @@ static void tcg_out_call(TCGContext *s, tcg_insn_unit *addr)
}
}
-static inline void tcg_out_callr(TCGContext *s, int cond, int arg)
-{
- if (use_armv5t_instructions) {
- tcg_out_blx(s, cond, arg);
- } else {
- tcg_out_dat_reg(s, cond, ARITH_MOV, TCG_REG_R14, 0,
- TCG_REG_PC, SHIFT_IMM_LSL(0));
- tcg_out_bx(s, cond, arg);
- }
-}
-
static inline void tcg_out_goto_label(TCGContext *s, int cond, int label_index)
{
TCGLabel *l = &s->labels[label_index];
@@ -1667,13 +1656,6 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
}
s->tb_next_offset[args[0]] = tcg_current_code_size(s);
break;
- case INDEX_op_call:
- if (const_args[0]) {
- tcg_out_call(s, (void *)args[0]);
- } else {
- tcg_out_callr(s, COND_AL, args[0]);
- }
- break;
case INDEX_op_br:
tcg_out_goto_label(s, COND_AL, args[0]);
break;
@@ -1703,13 +1685,6 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_st32(s, COND_AL, args[0], args[1], args[2]);
break;
- case INDEX_op_mov_i32:
- tcg_out_dat_reg(s, COND_AL, ARITH_MOV,
- args[0], 0, args[1], SHIFT_IMM_LSL(0));
- break;
- case INDEX_op_movi_i32:
- tcg_out_movi32(s, COND_AL, args[0], args[1]);
- break;
case INDEX_op_movcond_i32:
/* Constraints mean that v2 is always in the same register as dest,
* so we only need to do "if condition passed, move v1 to dest".
@@ -1925,6 +1900,9 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_udiv(s, COND_AL, args[0], args[1], args[2]);
break;
+ case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
+ case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
+ case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
tcg_abort();
}
@@ -1933,12 +1911,8 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
static const TCGTargetOpDef arm_op_defs[] = {
{ INDEX_op_exit_tb, { } },
{ INDEX_op_goto_tb, { } },
- { INDEX_op_call, { "ri" } },
{ INDEX_op_br, { } },
- { INDEX_op_mov_i32, { "r", "r" } },
- { INDEX_op_movi_i32, { "r" } },
-
{ INDEX_op_ld8u_i32, { "r", "r" } },
{ INDEX_op_ld8s_i32, { "r", "r" } },
{ INDEX_op_ld16u_i32, { "r", "r" } },
diff --git a/tcg/i386/tcg-target.c b/tcg/i386/tcg-target.c
index 48a95f8da2..a373073ff8 100644
--- a/tcg/i386/tcg-target.c
+++ b/tcg/i386/tcg-target.c
@@ -1746,20 +1746,9 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
}
s->tb_next_offset[args[0]] = tcg_current_code_size(s);
break;
- case INDEX_op_call:
- if (const_args[0]) {
- tcg_out_call(s, (tcg_insn_unit *)(uintptr_t)args[0]);
- } else {
- /* call *reg */
- tcg_out_modrm(s, OPC_GRP5, EXT5_CALLN_Ev, args[0]);
- }
- break;
case INDEX_op_br:
tcg_out_jxx(s, JCC_JMP, args[0], 0);
break;
- case INDEX_op_movi_i32:
- tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
- break;
OP_32_64(ld8u):
/* Note that we can ignore REXW for the zero-extend to 64-bit. */
tcg_out_modrm_offset(s, OPC_MOVZBL, args[0], args[1], args[2]);
@@ -2009,9 +1998,6 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_setcond2(s, args, const_args);
break;
#else /* TCG_TARGET_REG_BITS == 64 */
- case INDEX_op_movi_i64:
- tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
- break;
case INDEX_op_ld32s_i64:
tcg_out_modrm_offset(s, OPC_MOVSLQ, args[0], args[1], args[2]);
break;
@@ -2068,6 +2054,11 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
}
break;
+ case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
+ case INDEX_op_mov_i64:
+ case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
+ case INDEX_op_movi_i64:
+ case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
tcg_abort();
}
@@ -2078,10 +2069,7 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
static const TCGTargetOpDef x86_op_defs[] = {
{ INDEX_op_exit_tb, { } },
{ INDEX_op_goto_tb, { } },
- { INDEX_op_call, { "ri" } },
{ INDEX_op_br, { } },
- { INDEX_op_mov_i32, { "r", "r" } },
- { INDEX_op_movi_i32, { "r" } },
{ INDEX_op_ld8u_i32, { "r", "r" } },
{ INDEX_op_ld8s_i32, { "r", "r" } },
{ INDEX_op_ld16u_i32, { "r", "r" } },
@@ -2135,8 +2123,6 @@ static const TCGTargetOpDef x86_op_defs[] = {
{ INDEX_op_brcond2_i32, { "r", "r", "ri", "ri" } },
{ INDEX_op_setcond2_i32, { "r", "r", "r", "ri", "ri" } },
#else
- { INDEX_op_mov_i64, { "r", "r" } },
- { INDEX_op_movi_i64, { "r" } },
{ INDEX_op_ld8u_i64, { "r", "r" } },
{ INDEX_op_ld8s_i64, { "r", "r" } },
{ INDEX_op_ld16u_i64, { "r", "r" } },
diff --git a/tcg/ia64/tcg-target.c b/tcg/ia64/tcg-target.c
index 90dd9cd36a..6bc9924641 100644
--- a/tcg/ia64/tcg-target.c
+++ b/tcg/ia64/tcg-target.c
@@ -865,20 +865,6 @@ static inline void tcg_out_call(TCGContext *s, tcg_insn_unit *desc)
TCG_REG_B0, disp));
}
-static inline void tcg_out_callr(TCGContext *s, TCGReg addr)
-{
- tcg_out_bundle(s, MmI,
- tcg_opc_m1 (TCG_REG_P0, OPC_LD8_M1, TCG_REG_R2, addr),
- tcg_opc_a4 (TCG_REG_P0, OPC_ADDS_A4, TCG_REG_R3, 8, addr),
- tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21,
- TCG_REG_B6, TCG_REG_R2, 0));
- tcg_out_bundle(s, mmB,
- tcg_opc_m1 (TCG_REG_P0, OPC_LD8_M1, TCG_REG_R1, TCG_REG_R3),
- INSN_NOP_M,
- tcg_opc_b5 (TCG_REG_P0, OPC_BR_CALL_SPTK_MANY_B5,
- TCG_REG_B0, TCG_REG_B6));
-}
-
static void tcg_out_exit_tb(TCGContext *s, tcg_target_long arg)
{
uint64_t imm, opc1;
@@ -2009,24 +1995,10 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_br:
tcg_out_br(s, args[0]);
break;
- case INDEX_op_call:
- if (likely(const_args[0])) {
- tcg_out_call(s, (tcg_insn_unit *)(intptr_t)args[0]);
- } else {
- tcg_out_callr(s, args[0]);
- }
- break;
case INDEX_op_goto_tb:
tcg_out_goto_tb(s, args[0]);
break;
- case INDEX_op_movi_i32:
- tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
- break;
- case INDEX_op_movi_i64:
- tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
- break;
-
case INDEX_op_ld8u_i32:
case INDEX_op_ld8u_i64:
tcg_out_ld_rel(s, OPC_LD1_M1, args[0], args[1], args[2]);
@@ -2236,6 +2208,11 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_qemu_st(s, args);
break;
+ case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
+ case INDEX_op_mov_i64:
+ case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
+ case INDEX_op_movi_i64:
+ case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
tcg_abort();
}
@@ -2243,13 +2220,9 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
static const TCGTargetOpDef ia64_op_defs[] = {
{ INDEX_op_br, { } },
- { INDEX_op_call, { "ri" } },
{ INDEX_op_exit_tb, { } },
{ INDEX_op_goto_tb, { } },
- { INDEX_op_mov_i32, { "r", "r" } },
- { INDEX_op_movi_i32, { "r" } },
-
{ INDEX_op_ld8u_i32, { "r", "r" } },
{ INDEX_op_ld8s_i32, { "r", "r" } },
{ INDEX_op_ld16u_i32, { "r", "r" } },
@@ -2291,9 +2264,6 @@ static const TCGTargetOpDef ia64_op_defs[] = {
{ INDEX_op_setcond_i32, { "r", "rZ", "rZ" } },
{ INDEX_op_movcond_i32, { "r", "rZ", "rZ", "rI", "rI" } },
- { INDEX_op_mov_i64, { "r", "r" } },
- { INDEX_op_movi_i64, { "r" } },
-
{ INDEX_op_ld8u_i64, { "r", "r" } },
{ INDEX_op_ld8s_i64, { "r", "r" } },
{ INDEX_op_ld16u_i64, { "r", "r" } },
diff --git a/tcg/mips/tcg-target.c b/tcg/mips/tcg-target.c
index 65acc8677b..0ae495c586 100644
--- a/tcg/mips/tcg-target.c
+++ b/tcg/mips/tcg-target.c
@@ -1278,21 +1278,10 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_nop(s);
s->tb_next_offset[args[0]] = tcg_current_code_size(s);
break;
- case INDEX_op_call:
- assert(const_args[0]);
- tcg_out_call(s, (tcg_insn_unit *)(intptr_t)args[0]);
- break;
case INDEX_op_br:
tcg_out_brcond(s, TCG_COND_EQ, TCG_REG_ZERO, TCG_REG_ZERO, args[0]);
break;
- case INDEX_op_mov_i32:
- tcg_out_mov(s, TCG_TYPE_I32, args[0], args[1]);
- break;
- case INDEX_op_movi_i32:
- tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
- break;
-
case INDEX_op_ld8u_i32:
tcg_out_ldst(s, OPC_LBU, args[0], args[1], args[2]);
break;
@@ -1540,6 +1529,9 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_qemu_st(s, args, 3);
break;
+ case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
+ case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
+ case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
tcg_abort();
}
@@ -1548,11 +1540,8 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
static const TCGTargetOpDef mips_op_defs[] = {
{ INDEX_op_exit_tb, { } },
{ INDEX_op_goto_tb, { } },
- { INDEX_op_call, { "i" } },
{ INDEX_op_br, { } },
- { INDEX_op_mov_i32, { "r", "r" } },
- { INDEX_op_movi_i32, { "r" } },
{ INDEX_op_ld8u_i32, { "r", "r" } },
{ INDEX_op_ld8s_i32, { "r", "r" } },
{ INDEX_op_ld16u_i32, { "r", "r" } },
diff --git a/tcg/ppc/tcg-target.c b/tcg/ppc/tcg-target.c
index 9062898885..436b65bffb 100644
--- a/tcg/ppc/tcg-target.c
+++ b/tcg/ppc/tcg-target.c
@@ -489,23 +489,14 @@ static void tcg_out_b(TCGContext *s, int mask, tcg_insn_unit *target)
}
}
-static void tcg_out_callr(TCGContext *s, TCGReg reg, int lk)
+static void tcg_out_call1(TCGContext *s, tcg_insn_unit *target, int lk)
{
#ifdef _CALL_AIX
+ tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2, (uintptr_t)target);
tcg_out32(s, LWZ | RT(TCG_REG_R0) | RA(reg));
tcg_out32(s, MTSPR | RA(TCG_REG_R0) | CTR);
tcg_out32(s, LWZ | RT(TCG_REG_R2) | RA(reg) | 4);
-#else
- tcg_out32(s, MTSPR | RS(reg) | CTR);
-#endif
tcg_out32(s, BCCTR | BO_ALWAYS | lk);
-}
-
-static void tcg_out_call1(TCGContext *s, tcg_insn_unit *target, int lk)
-{
-#ifdef _CALL_AIX
- tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2, (uintptr_t)target);
- tcg_out_callr(s, TCG_REG_R2, lk);
#else
tcg_out_b(s, lk, target);
#endif
@@ -880,7 +871,7 @@ static void tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
#endif
#ifdef CONFIG_SOFTMMU
-static void emit_ldst_trampoline(TCGContext *s, void *ptr)
+static void emit_ldst_trampoline(TCGContext *s, tcg_insn_unit *ptr)
{
tcg_out_mov(s, TCG_TYPE_PTR, TCG_REG_R3, TCG_AREG0);
tcg_out_call1(s, ptr, 0);
@@ -1390,16 +1381,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
}
}
break;
- case INDEX_op_call:
- if (const_args[0]) {
- tcg_out_call(s, (void *)(uintptr_t)args[0]);
- } else {
- tcg_out_callr(s, args[0], LK);
- }
- break;
- case INDEX_op_movi_i32:
- tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
- break;
case INDEX_op_ld8u_i32:
tcg_out_ldst (s, args[0], args[1], args[2], LBZ, LBZX);
break;
@@ -1835,20 +1816,19 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
const_args[2]);
break;
+ case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
+ case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
+ case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
- tcg_dump_ops (s);
- tcg_abort ();
+ tcg_abort();
}
}
static const TCGTargetOpDef ppc_op_defs[] = {
{ INDEX_op_exit_tb, { } },
{ INDEX_op_goto_tb, { } },
- { INDEX_op_call, { "ri" } },
{ INDEX_op_br, { } },
- { INDEX_op_mov_i32, { "r", "r" } },
- { INDEX_op_movi_i32, { "r" } },
{ INDEX_op_ld8u_i32, { "r", "r" } },
{ INDEX_op_ld8s_i32, { "r", "r" } },
{ INDEX_op_ld16u_i32, { "r", "r" } },
diff --git a/tcg/ppc64/tcg-target.c b/tcg/ppc64/tcg-target.c
index d80f2d997d..c90ddcd03a 100644
--- a/tcg/ppc64/tcg-target.c
+++ b/tcg/ppc64/tcg-target.c
@@ -1502,27 +1502,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
}
}
break;
- case INDEX_op_call:
- if (const_args[0]) {
- tcg_out_call(s, (void *)(uintptr_t)arg);
- } else {
-#ifdef __APPLE__
- tcg_out32(s, MTSPR | RS(arg) | LR);
- tcg_out32(s, BCLR | BO_ALWAYS | LK);
-#else
- tcg_out32(s, LD | TAI(TCG_REG_R0, arg, 0));
- tcg_out32(s, MTSPR | RA(TCG_REG_R0) | CTR);
- tcg_out32(s, LD | TAI(TCG_REG_R2, arg, 8));
- tcg_out32(s, BCCTR | BO_ALWAYS | LK);
-#endif
- }
- break;
- case INDEX_op_movi_i32:
- tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
- break;
- case INDEX_op_movi_i64:
- tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
- break;
case INDEX_op_ld8u_i32:
case INDEX_op_ld8u_i64:
tcg_out_mem_long(s, LBZ, LBZX, args[0], args[1], args[2]);
@@ -2003,8 +1982,12 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
tcg_out32(s, MULHD | TAB(args[0], args[1], args[2]));
break;
+ case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
+ case INDEX_op_mov_i64:
+ case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
+ case INDEX_op_movi_i64:
+ case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
- tcg_dump_ops(s);
tcg_abort();
}
}
@@ -2012,14 +1995,8 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
static const TCGTargetOpDef ppc_op_defs[] = {
{ INDEX_op_exit_tb, { } },
{ INDEX_op_goto_tb, { } },
- { INDEX_op_call, { "ri" } },
{ INDEX_op_br, { } },
- { INDEX_op_mov_i32, { "r", "r" } },
- { INDEX_op_mov_i64, { "r", "r" } },
- { INDEX_op_movi_i32, { "r" } },
- { INDEX_op_movi_i64, { "r" } },
-
{ INDEX_op_ld8u_i32, { "r", "r" } },
{ INDEX_op_ld8s_i32, { "r", "r" } },
{ INDEX_op_ld16u_i32, { "r", "r" } },
diff --git a/tcg/s390/tcg-target.c b/tcg/s390/tcg-target.c
index 0ae40e1598..ebdd0743cf 100644
--- a/tcg/s390/tcg-target.c
+++ b/tcg/s390/tcg-target.c
@@ -1612,21 +1612,6 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
s->tb_next_offset[args[0]] = tcg_current_code_size(s);
break;
- case INDEX_op_call:
- if (const_args[0]) {
- tcg_out_call(s, (tcg_insn_unit *)(intptr_t)args[0]);
- } else {
- tcg_out_insn(s, RR, BASR, TCG_REG_R14, args[0]);
- }
- break;
-
- case INDEX_op_mov_i32:
- tcg_out_mov(s, TCG_TYPE_I32, args[0], args[1]);
- break;
- case INDEX_op_movi_i32:
- tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
- break;
-
OP_32_64(ld8u):
/* ??? LLC (RXY format) is only present with the extended-immediate
facility, whereas LLGC is always present. */
@@ -1859,13 +1844,6 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_qemu_st(s, args, LD_UINT64);
break;
- case INDEX_op_mov_i64:
- tcg_out_mov(s, TCG_TYPE_I64, args[0], args[1]);
- break;
- case INDEX_op_movi_i64:
- tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
- break;
-
case INDEX_op_ld16s_i64:
tcg_out_mem(s, 0, RXY_LGH, args[0], args[1], TCG_REG_NONE, args[2]);
break;
@@ -2070,8 +2048,12 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
tgen_deposit(s, args[0], args[2], args[3], args[4]);
break;
+ case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
+ case INDEX_op_mov_i64:
+ case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
+ case INDEX_op_movi_i64:
+ case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
- fprintf(stderr,"unimplemented opc 0x%x\n",opc);
tcg_abort();
}
}
@@ -2079,12 +2061,8 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
static const TCGTargetOpDef s390_op_defs[] = {
{ INDEX_op_exit_tb, { } },
{ INDEX_op_goto_tb, { } },
- { INDEX_op_call, { "ri" } },
{ INDEX_op_br, { } },
- { INDEX_op_mov_i32, { "r", "r" } },
- { INDEX_op_movi_i32, { "r" } },
-
{ INDEX_op_ld8u_i32, { "r", "r" } },
{ INDEX_op_ld8s_i32, { "r", "r" } },
{ INDEX_op_ld16u_i32, { "r", "r" } },
@@ -2142,9 +2120,6 @@ static const TCGTargetOpDef s390_op_defs[] = {
{ INDEX_op_qemu_st32, { "L", "L" } },
{ INDEX_op_qemu_st64, { "L", "L" } },
- { INDEX_op_mov_i64, { "r", "r" } },
- { INDEX_op_movi_i64, { "r" } },
-
{ INDEX_op_ld8u_i64, { "r", "r" } },
{ INDEX_op_ld8s_i64, { "r", "r" } },
{ INDEX_op_ld16u_i64, { "r", "r" } },
diff --git a/tcg/sparc/tcg-target.c b/tcg/sparc/tcg-target.c
index 4f8de1e0fe..17ff5773ad 100644
--- a/tcg/sparc/tcg-target.c
+++ b/tcg/sparc/tcg-target.c
@@ -1167,15 +1167,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_nop(s);
s->tb_next_offset[a0] = tcg_current_code_size(s);
break;
- case INDEX_op_call:
- if (const_args[0]) {
- tcg_out_call(s, (void *)(uintptr_t)a0);
- } else {
- tcg_out_arithi(s, TCG_REG_O7, a0, 0, JMPL);
- /* delay slot */
- tcg_out_nop(s);
- }
- break;
case INDEX_op_br:
tcg_out_bpcc(s, COND_A, BPCC_PT, a0);
tcg_out_nop(s);
@@ -1369,13 +1360,12 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_arithc(s, a0, TCG_REG_G0, a1, const_args[1], c);
break;
+ case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
case INDEX_op_mov_i64:
- case INDEX_op_mov_i32:
+ case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
case INDEX_op_movi_i64:
- case INDEX_op_movi_i32:
- /* Always implemented with tcg_out_mov/i, never with tcg_out_op. */
+ case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
- /* Opcode not implemented. */
tcg_abort();
}
}
@@ -1383,11 +1373,8 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
static const TCGTargetOpDef sparc_op_defs[] = {
{ INDEX_op_exit_tb, { } },
{ INDEX_op_goto_tb, { } },
- { INDEX_op_call, { "ri" } },
{ INDEX_op_br, { } },
- { INDEX_op_mov_i32, { "r", "r" } },
- { INDEX_op_movi_i32, { "r" } },
{ INDEX_op_ld8u_i32, { "r", "r" } },
{ INDEX_op_ld8s_i32, { "r", "r" } },
{ INDEX_op_ld16u_i32, { "r", "r" } },
@@ -1424,8 +1411,6 @@ static const TCGTargetOpDef sparc_op_defs[] = {
{ INDEX_op_mulu2_i32, { "r", "r", "rZ", "rJ" } },
{ INDEX_op_muls2_i32, { "r", "r", "rZ", "rJ" } },
- { INDEX_op_mov_i64, { "R", "R" } },
- { INDEX_op_movi_i64, { "R" } },
{ INDEX_op_ld8u_i64, { "R", "r" } },
{ INDEX_op_ld8s_i64, { "R", "r" } },
{ INDEX_op_ld16u_i64, { "R", "r" } },
diff --git a/tcg/tcg-opc.h b/tcg/tcg-opc.h
index 17168480b0..71ba64ac16 100644
--- a/tcg/tcg-opc.h
+++ b/tcg/tcg-opc.h
@@ -40,7 +40,7 @@ DEF(discard, 1, 0, 0, TCG_OPF_NOT_PRESENT)
DEF(set_label, 0, 0, 1, TCG_OPF_BB_END | TCG_OPF_NOT_PRESENT)
/* variable number of parameters */
-DEF(call, 0, 0, 3, TCG_OPF_CALL_CLOBBER)
+DEF(call, 0, 0, 3, TCG_OPF_CALL_CLOBBER | TCG_OPF_NOT_PRESENT)
DEF(br, 0, 0, 1, TCG_OPF_BB_END)
@@ -51,8 +51,8 @@ DEF(br, 0, 0, 1, TCG_OPF_BB_END)
# define IMPL64 TCG_OPF_64BIT
#endif
-DEF(mov_i32, 1, 1, 0, 0)
-DEF(movi_i32, 1, 0, 1, 0)
+DEF(mov_i32, 1, 1, 0, TCG_OPF_NOT_PRESENT)
+DEF(movi_i32, 1, 0, 1, TCG_OPF_NOT_PRESENT)
DEF(setcond_i32, 1, 2, 1, 0)
DEF(movcond_i32, 1, 4, 1, IMPL(TCG_TARGET_HAS_movcond_i32))
/* load/store */
@@ -110,8 +110,8 @@ DEF(eqv_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_eqv_i32))
DEF(nand_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_nand_i32))
DEF(nor_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_nor_i32))
-DEF(mov_i64, 1, 1, 0, IMPL64)
-DEF(movi_i64, 1, 0, 1, IMPL64)
+DEF(mov_i64, 1, 1, 0, TCG_OPF_64BIT | TCG_OPF_NOT_PRESENT)
+DEF(movi_i64, 1, 0, 1, TCG_OPF_64BIT | TCG_OPF_NOT_PRESENT)
DEF(setcond_i64, 1, 2, 1, IMPL64)
DEF(movcond_i64, 1, 4, 1, IMPL64 | IMPL(TCG_TARGET_HAS_movcond_i64))
/* load/store */
diff --git a/tcg/tci/tcg-target.c b/tcg/tci/tcg-target.c
index 7425e73df5..9b39231c15 100644
--- a/tcg/tci/tcg-target.c
+++ b/tcg/tci/tcg-target.c
@@ -59,12 +59,8 @@
static const TCGTargetOpDef tcg_target_op_defs[] = {
{ INDEX_op_exit_tb, { NULL } },
{ INDEX_op_goto_tb, { NULL } },
- { INDEX_op_call, { RI } },
{ INDEX_op_br, { NULL } },
- { INDEX_op_mov_i32, { R, R } },
- { INDEX_op_movi_i32, { R } },
-
{ INDEX_op_ld8u_i32, { R, R } },
{ INDEX_op_ld8s_i32, { R, R } },
{ INDEX_op_ld16u_i32, { R, R } },
@@ -141,9 +137,6 @@ static const TCGTargetOpDef tcg_target_op_defs[] = {
#endif
#if TCG_TARGET_REG_BITS == 64
- { INDEX_op_mov_i64, { R, R } },
- { INDEX_op_movi_i64, { R } },
-
{ INDEX_op_ld8u_i64, { R, R } },
{ INDEX_op_ld8s_i64, { R, R } },
{ INDEX_op_ld16u_i64, { R, R } },
@@ -581,9 +574,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
case INDEX_op_br:
tci_out_label(s, args[0]);
break;
- case INDEX_op_call:
- tcg_out_ri(s, const_args[0], args[0]);
- break;
case INDEX_op_setcond_i32:
tcg_out_r(s, args[0]);
tcg_out_r(s, args[1]);
@@ -608,9 +598,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
tcg_out8(s, args[3]); /* condition */
break;
#endif
- case INDEX_op_movi_i32:
- TODO(); /* Handled by tcg_out_movi? */
- break;
case INDEX_op_ld8u_i32:
case INDEX_op_ld8s_i32:
case INDEX_op_ld16u_i32:
@@ -666,10 +653,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
break;
#if TCG_TARGET_REG_BITS == 64
- case INDEX_op_mov_i64:
- case INDEX_op_movi_i64:
- TODO();
- break;
case INDEX_op_add_i64:
case INDEX_op_sub_i64:
case INDEX_op_mul_i64:
@@ -837,11 +820,12 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
tcg_out_i(s, *args);
#endif
break;
- case INDEX_op_end:
- TODO();
- break;
+ case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
+ case INDEX_op_mov_i64:
+ case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
+ case INDEX_op_movi_i64:
+ case INDEX_op_call: /* Always emitted via tcg_out_call. */
default:
- fprintf(stderr, "Missing: %s\n", tcg_op_defs[opc].name);
tcg_abort();
}
old_code_ptr[1] = s->code_ptr - old_code_ptr;