aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--accel/tcg/translate-all.c17
-rw-r--r--include/hw/core/tcg-cpu-ops.h10
2 files changed, 23 insertions, 4 deletions
diff --git a/accel/tcg/translate-all.c b/accel/tcg/translate-all.c
index 2c34adccce..99ca6f36b9 100644
--- a/accel/tcg/translate-all.c
+++ b/accel/tcg/translate-all.c
@@ -60,6 +60,7 @@
#include "sysemu/cpu-timers.h"
#include "sysemu/tcg.h"
#include "qapi/error.h"
+#include "hw/core/tcg-cpu-ops.h"
#include "internal.h"
/* #define DEBUG_TB_INVALIDATE */
@@ -2421,6 +2422,7 @@ void cpu_io_recompile(CPUState *cpu, uintptr_t retaddr)
CPUArchState *env = cpu->env_ptr;
#endif
TranslationBlock *tb;
+ CPUClass *cc;
uint32_t n;
tb = tcg_tb_lookup(retaddr);
@@ -2430,11 +2432,18 @@ void cpu_io_recompile(CPUState *cpu, uintptr_t retaddr)
}
cpu_restore_state_from_tb(cpu, tb, retaddr, true);
- /* On MIPS and SH, delay slot instructions can only be restarted if
- they were already the first instruction in the TB. If this is not
- the first instruction in a TB then re-execute the preceding
- branch. */
+ /*
+ * Some guests must re-execute the branch when re-executing a delay
+ * slot instruction. When this is the case, adjust icount and N
+ * to account for the re-execution of the branch.
+ */
n = 1;
+ cc = CPU_GET_CLASS(cpu);
+ if (cc->tcg_ops->io_recompile_replay_branch &&
+ cc->tcg_ops->io_recompile_replay_branch(cpu, tb)) {
+ cpu_neg(cpu)->icount_decr.u16.low++;
+ n = 2;
+ }
#if defined(TARGET_MIPS)
if ((env->hflags & MIPS_HFLAG_BMASK) != 0
&& env->active_tc.PC != tb->pc) {
diff --git a/include/hw/core/tcg-cpu-ops.h b/include/hw/core/tcg-cpu-ops.h
index ac3bb051f2..72d791438c 100644
--- a/include/hw/core/tcg-cpu-ops.h
+++ b/include/hw/core/tcg-cpu-ops.h
@@ -88,6 +88,16 @@ struct TCGCPUOps {
*/
bool (*debug_check_watchpoint)(CPUState *cpu, CPUWatchpoint *wp);
+ /**
+ * @io_recompile_replay_branch: Callback for cpu_io_recompile.
+ *
+ * The cpu has been stopped, and cpu_restore_state_from_tb has been
+ * called. If the faulting instruction is in a delay slot, and the
+ * target architecture requires re-execution of the branch, then
+ * adjust the cpu state as required and return true.
+ */
+ bool (*io_recompile_replay_branch)(CPUState *cpu,
+ const TranslationBlock *tb);
#endif /* CONFIG_SOFTMMU */
#endif /* NEED_CPU_H */