aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--target-ppc/translate.c1068
-rw-r--r--target-ppc/translate/vmx-impl.c829
-rw-r--r--target-ppc/translate/vmx-ops.c245
3 files changed, 1077 insertions, 1065 deletions
diff --git a/target-ppc/translate.c b/target-ppc/translate.c
index dac91f2c24..210152a68c 100644
--- a/target-ppc/translate.c
+++ b/target-ppc/translate.c
@@ -5241,6 +5241,8 @@ static void gen_rfsvc(DisasContext *ctx)
#include "translate/fp-impl.c"
+#include "translate/vmx-impl.c"
+
/* svc is not implemented for now */
/* BookE specific instructions */
@@ -6005,702 +6007,6 @@ static void gen_msgsnd(DisasContext *ctx)
#endif /* defined(CONFIG_USER_ONLY) */
}
-/*** Altivec vector extension ***/
-/* Altivec registers moves */
-
-static inline TCGv_ptr gen_avr_ptr(int reg)
-{
- TCGv_ptr r = tcg_temp_new_ptr();
- tcg_gen_addi_ptr(r, cpu_env, offsetof(CPUPPCState, avr[reg]));
- return r;
-}
-
-#define GEN_VR_LDX(name, opc2, opc3) \
-static void glue(gen_, name)(DisasContext *ctx) \
-{ \
- TCGv EA; \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- gen_set_access_type(ctx, ACCESS_INT); \
- EA = tcg_temp_new(); \
- gen_addr_reg_index(ctx, EA); \
- tcg_gen_andi_tl(EA, EA, ~0xf); \
- /* We only need to swap high and low halves. gen_qemu_ld64 does necessary \
- 64-bit byteswap already. */ \
- if (ctx->le_mode) { \
- gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
- tcg_gen_addi_tl(EA, EA, 8); \
- gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
- } else { \
- gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
- tcg_gen_addi_tl(EA, EA, 8); \
- gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
- } \
- tcg_temp_free(EA); \
-}
-
-#define GEN_VR_STX(name, opc2, opc3) \
-static void gen_st##name(DisasContext *ctx) \
-{ \
- TCGv EA; \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- gen_set_access_type(ctx, ACCESS_INT); \
- EA = tcg_temp_new(); \
- gen_addr_reg_index(ctx, EA); \
- tcg_gen_andi_tl(EA, EA, ~0xf); \
- /* We only need to swap high and low halves. gen_qemu_st64 does necessary \
- 64-bit byteswap already. */ \
- if (ctx->le_mode) { \
- gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
- tcg_gen_addi_tl(EA, EA, 8); \
- gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
- } else { \
- gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
- tcg_gen_addi_tl(EA, EA, 8); \
- gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
- } \
- tcg_temp_free(EA); \
-}
-
-#define GEN_VR_LVE(name, opc2, opc3, size) \
-static void gen_lve##name(DisasContext *ctx) \
- { \
- TCGv EA; \
- TCGv_ptr rs; \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- gen_set_access_type(ctx, ACCESS_INT); \
- EA = tcg_temp_new(); \
- gen_addr_reg_index(ctx, EA); \
- if (size > 1) { \
- tcg_gen_andi_tl(EA, EA, ~(size - 1)); \
- } \
- rs = gen_avr_ptr(rS(ctx->opcode)); \
- gen_helper_lve##name(cpu_env, rs, EA); \
- tcg_temp_free(EA); \
- tcg_temp_free_ptr(rs); \
- }
-
-#define GEN_VR_STVE(name, opc2, opc3, size) \
-static void gen_stve##name(DisasContext *ctx) \
- { \
- TCGv EA; \
- TCGv_ptr rs; \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- gen_set_access_type(ctx, ACCESS_INT); \
- EA = tcg_temp_new(); \
- gen_addr_reg_index(ctx, EA); \
- if (size > 1) { \
- tcg_gen_andi_tl(EA, EA, ~(size - 1)); \
- } \
- rs = gen_avr_ptr(rS(ctx->opcode)); \
- gen_helper_stve##name(cpu_env, rs, EA); \
- tcg_temp_free(EA); \
- tcg_temp_free_ptr(rs); \
- }
-
-GEN_VR_LDX(lvx, 0x07, 0x03);
-/* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
-GEN_VR_LDX(lvxl, 0x07, 0x0B);
-
-GEN_VR_LVE(bx, 0x07, 0x00, 1);
-GEN_VR_LVE(hx, 0x07, 0x01, 2);
-GEN_VR_LVE(wx, 0x07, 0x02, 4);
-
-GEN_VR_STX(svx, 0x07, 0x07);
-/* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
-GEN_VR_STX(svxl, 0x07, 0x0F);
-
-GEN_VR_STVE(bx, 0x07, 0x04, 1);
-GEN_VR_STVE(hx, 0x07, 0x05, 2);
-GEN_VR_STVE(wx, 0x07, 0x06, 4);
-
-static void gen_lvsl(DisasContext *ctx)
-{
- TCGv_ptr rd;
- TCGv EA;
- if (unlikely(!ctx->altivec_enabled)) {
- gen_exception(ctx, POWERPC_EXCP_VPU);
- return;
- }
- EA = tcg_temp_new();
- gen_addr_reg_index(ctx, EA);
- rd = gen_avr_ptr(rD(ctx->opcode));
- gen_helper_lvsl(rd, EA);
- tcg_temp_free(EA);
- tcg_temp_free_ptr(rd);
-}
-
-static void gen_lvsr(DisasContext *ctx)
-{
- TCGv_ptr rd;
- TCGv EA;
- if (unlikely(!ctx->altivec_enabled)) {
- gen_exception(ctx, POWERPC_EXCP_VPU);
- return;
- }
- EA = tcg_temp_new();
- gen_addr_reg_index(ctx, EA);
- rd = gen_avr_ptr(rD(ctx->opcode));
- gen_helper_lvsr(rd, EA);
- tcg_temp_free(EA);
- tcg_temp_free_ptr(rd);
-}
-
-static void gen_mfvscr(DisasContext *ctx)
-{
- TCGv_i32 t;
- if (unlikely(!ctx->altivec_enabled)) {
- gen_exception(ctx, POWERPC_EXCP_VPU);
- return;
- }
- tcg_gen_movi_i64(cpu_avrh[rD(ctx->opcode)], 0);
- t = tcg_temp_new_i32();
- tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, vscr));
- tcg_gen_extu_i32_i64(cpu_avrl[rD(ctx->opcode)], t);
- tcg_temp_free_i32(t);
-}
-
-static void gen_mtvscr(DisasContext *ctx)
-{
- TCGv_ptr p;
- if (unlikely(!ctx->altivec_enabled)) {
- gen_exception(ctx, POWERPC_EXCP_VPU);
- return;
- }
- p = gen_avr_ptr(rB(ctx->opcode));
- gen_helper_mtvscr(cpu_env, p);
- tcg_temp_free_ptr(p);
-}
-
-/* Logical operations */
-#define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
-static void glue(gen_, name)(DisasContext *ctx) \
-{ \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- tcg_op(cpu_avrh[rD(ctx->opcode)], cpu_avrh[rA(ctx->opcode)], cpu_avrh[rB(ctx->opcode)]); \
- tcg_op(cpu_avrl[rD(ctx->opcode)], cpu_avrl[rA(ctx->opcode)], cpu_avrl[rB(ctx->opcode)]); \
-}
-
-GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16);
-GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17);
-GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18);
-GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19);
-GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20);
-GEN_VX_LOGICAL(veqv, tcg_gen_eqv_i64, 2, 26);
-GEN_VX_LOGICAL(vnand, tcg_gen_nand_i64, 2, 22);
-GEN_VX_LOGICAL(vorc, tcg_gen_orc_i64, 2, 21);
-
-#define GEN_VXFORM(name, opc2, opc3) \
-static void glue(gen_, name)(DisasContext *ctx) \
-{ \
- TCGv_ptr ra, rb, rd; \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- ra = gen_avr_ptr(rA(ctx->opcode)); \
- rb = gen_avr_ptr(rB(ctx->opcode)); \
- rd = gen_avr_ptr(rD(ctx->opcode)); \
- gen_helper_##name (rd, ra, rb); \
- tcg_temp_free_ptr(ra); \
- tcg_temp_free_ptr(rb); \
- tcg_temp_free_ptr(rd); \
-}
-
-#define GEN_VXFORM_ENV(name, opc2, opc3) \
-static void glue(gen_, name)(DisasContext *ctx) \
-{ \
- TCGv_ptr ra, rb, rd; \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- ra = gen_avr_ptr(rA(ctx->opcode)); \
- rb = gen_avr_ptr(rB(ctx->opcode)); \
- rd = gen_avr_ptr(rD(ctx->opcode)); \
- gen_helper_##name(cpu_env, rd, ra, rb); \
- tcg_temp_free_ptr(ra); \
- tcg_temp_free_ptr(rb); \
- tcg_temp_free_ptr(rd); \
-}
-
-#define GEN_VXFORM3(name, opc2, opc3) \
-static void glue(gen_, name)(DisasContext *ctx) \
-{ \
- TCGv_ptr ra, rb, rc, rd; \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- ra = gen_avr_ptr(rA(ctx->opcode)); \
- rb = gen_avr_ptr(rB(ctx->opcode)); \
- rc = gen_avr_ptr(rC(ctx->opcode)); \
- rd = gen_avr_ptr(rD(ctx->opcode)); \
- gen_helper_##name(rd, ra, rb, rc); \
- tcg_temp_free_ptr(ra); \
- tcg_temp_free_ptr(rb); \
- tcg_temp_free_ptr(rc); \
- tcg_temp_free_ptr(rd); \
-}
-
-/*
- * Support for Altivec instruction pairs that use bit 31 (Rc) as
- * an opcode bit. In general, these pairs come from different
- * versions of the ISA, so we must also support a pair of flags for
- * each instruction.
- */
-#define GEN_VXFORM_DUAL(name0, flg0, flg2_0, name1, flg1, flg2_1) \
-static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
-{ \
- if ((Rc(ctx->opcode) == 0) && \
- ((ctx->insns_flags & flg0) || (ctx->insns_flags2 & flg2_0))) { \
- gen_##name0(ctx); \
- } else if ((Rc(ctx->opcode) == 1) && \
- ((ctx->insns_flags & flg1) || (ctx->insns_flags2 & flg2_1))) { \
- gen_##name1(ctx); \
- } else { \
- gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
- } \
-}
-
-GEN_VXFORM(vaddubm, 0, 0);
-GEN_VXFORM(vadduhm, 0, 1);
-GEN_VXFORM(vadduwm, 0, 2);
-GEN_VXFORM(vaddudm, 0, 3);
-GEN_VXFORM(vsububm, 0, 16);
-GEN_VXFORM(vsubuhm, 0, 17);
-GEN_VXFORM(vsubuwm, 0, 18);
-GEN_VXFORM(vsubudm, 0, 19);
-GEN_VXFORM(vmaxub, 1, 0);
-GEN_VXFORM(vmaxuh, 1, 1);
-GEN_VXFORM(vmaxuw, 1, 2);
-GEN_VXFORM(vmaxud, 1, 3);
-GEN_VXFORM(vmaxsb, 1, 4);
-GEN_VXFORM(vmaxsh, 1, 5);
-GEN_VXFORM(vmaxsw, 1, 6);
-GEN_VXFORM(vmaxsd, 1, 7);
-GEN_VXFORM(vminub, 1, 8);
-GEN_VXFORM(vminuh, 1, 9);
-GEN_VXFORM(vminuw, 1, 10);
-GEN_VXFORM(vminud, 1, 11);
-GEN_VXFORM(vminsb, 1, 12);
-GEN_VXFORM(vminsh, 1, 13);
-GEN_VXFORM(vminsw, 1, 14);
-GEN_VXFORM(vminsd, 1, 15);
-GEN_VXFORM(vavgub, 1, 16);
-GEN_VXFORM(vavguh, 1, 17);
-GEN_VXFORM(vavguw, 1, 18);
-GEN_VXFORM(vavgsb, 1, 20);
-GEN_VXFORM(vavgsh, 1, 21);
-GEN_VXFORM(vavgsw, 1, 22);
-GEN_VXFORM(vmrghb, 6, 0);
-GEN_VXFORM(vmrghh, 6, 1);
-GEN_VXFORM(vmrghw, 6, 2);
-GEN_VXFORM(vmrglb, 6, 4);
-GEN_VXFORM(vmrglh, 6, 5);
-GEN_VXFORM(vmrglw, 6, 6);
-
-static void gen_vmrgew(DisasContext *ctx)
-{
- TCGv_i64 tmp;
- int VT, VA, VB;
- if (unlikely(!ctx->altivec_enabled)) {
- gen_exception(ctx, POWERPC_EXCP_VPU);
- return;
- }
- VT = rD(ctx->opcode);
- VA = rA(ctx->opcode);
- VB = rB(ctx->opcode);
- tmp = tcg_temp_new_i64();
- tcg_gen_shri_i64(tmp, cpu_avrh[VB], 32);
- tcg_gen_deposit_i64(cpu_avrh[VT], cpu_avrh[VA], tmp, 0, 32);
- tcg_gen_shri_i64(tmp, cpu_avrl[VB], 32);
- tcg_gen_deposit_i64(cpu_avrl[VT], cpu_avrl[VA], tmp, 0, 32);
- tcg_temp_free_i64(tmp);
-}
-
-static void gen_vmrgow(DisasContext *ctx)
-{
- int VT, VA, VB;
- if (unlikely(!ctx->altivec_enabled)) {
- gen_exception(ctx, POWERPC_EXCP_VPU);
- return;
- }
- VT = rD(ctx->opcode);
- VA = rA(ctx->opcode);
- VB = rB(ctx->opcode);
-
- tcg_gen_deposit_i64(cpu_avrh[VT], cpu_avrh[VB], cpu_avrh[VA], 32, 32);
- tcg_gen_deposit_i64(cpu_avrl[VT], cpu_avrl[VB], cpu_avrl[VA], 32, 32);
-}
-
-GEN_VXFORM(vmuloub, 4, 0);
-GEN_VXFORM(vmulouh, 4, 1);
-GEN_VXFORM(vmulouw, 4, 2);
-GEN_VXFORM(vmuluwm, 4, 2);
-GEN_VXFORM_DUAL(vmulouw, PPC_ALTIVEC, PPC_NONE,
- vmuluwm, PPC_NONE, PPC2_ALTIVEC_207)
-GEN_VXFORM(vmulosb, 4, 4);
-GEN_VXFORM(vmulosh, 4, 5);
-GEN_VXFORM(vmulosw, 4, 6);
-GEN_VXFORM(vmuleub, 4, 8);
-GEN_VXFORM(vmuleuh, 4, 9);
-GEN_VXFORM(vmuleuw, 4, 10);
-GEN_VXFORM(vmulesb, 4, 12);
-GEN_VXFORM(vmulesh, 4, 13);
-GEN_VXFORM(vmulesw, 4, 14);
-GEN_VXFORM(vslb, 2, 4);
-GEN_VXFORM(vslh, 2, 5);
-GEN_VXFORM(vslw, 2, 6);
-GEN_VXFORM(vsld, 2, 23);
-GEN_VXFORM(vsrb, 2, 8);
-GEN_VXFORM(vsrh, 2, 9);
-GEN_VXFORM(vsrw, 2, 10);
-GEN_VXFORM(vsrd, 2, 27);
-GEN_VXFORM(vsrab, 2, 12);
-GEN_VXFORM(vsrah, 2, 13);
-GEN_VXFORM(vsraw, 2, 14);
-GEN_VXFORM(vsrad, 2, 15);
-GEN_VXFORM(vslo, 6, 16);
-GEN_VXFORM(vsro, 6, 17);
-GEN_VXFORM(vaddcuw, 0, 6);
-GEN_VXFORM(vsubcuw, 0, 22);
-GEN_VXFORM_ENV(vaddubs, 0, 8);
-GEN_VXFORM_ENV(vadduhs, 0, 9);
-GEN_VXFORM_ENV(vadduws, 0, 10);
-GEN_VXFORM_ENV(vaddsbs, 0, 12);
-GEN_VXFORM_ENV(vaddshs, 0, 13);
-GEN_VXFORM_ENV(vaddsws, 0, 14);
-GEN_VXFORM_ENV(vsububs, 0, 24);
-GEN_VXFORM_ENV(vsubuhs, 0, 25);
-GEN_VXFORM_ENV(vsubuws, 0, 26);
-GEN_VXFORM_ENV(vsubsbs, 0, 28);
-GEN_VXFORM_ENV(vsubshs, 0, 29);
-GEN_VXFORM_ENV(vsubsws, 0, 30);
-GEN_VXFORM(vadduqm, 0, 4);
-GEN_VXFORM(vaddcuq, 0, 5);
-GEN_VXFORM3(vaddeuqm, 30, 0);
-GEN_VXFORM3(vaddecuq, 30, 0);
-GEN_VXFORM_DUAL(vaddeuqm, PPC_NONE, PPC2_ALTIVEC_207, \
- vaddecuq, PPC_NONE, PPC2_ALTIVEC_207)
-GEN_VXFORM(vsubuqm, 0, 20);
-GEN_VXFORM(vsubcuq, 0, 21);
-GEN_VXFORM3(vsubeuqm, 31, 0);
-GEN_VXFORM3(vsubecuq, 31, 0);
-GEN_VXFORM_DUAL(vsubeuqm, PPC_NONE, PPC2_ALTIVEC_207, \
- vsubecuq, PPC_NONE, PPC2_ALTIVEC_207)
-GEN_VXFORM(vrlb, 2, 0);
-GEN_VXFORM(vrlh, 2, 1);
-GEN_VXFORM(vrlw, 2, 2);
-GEN_VXFORM(vrld, 2, 3);
-GEN_VXFORM(vsl, 2, 7);
-GEN_VXFORM(vsr, 2, 11);
-GEN_VXFORM_ENV(vpkuhum, 7, 0);
-GEN_VXFORM_ENV(vpkuwum, 7, 1);
-GEN_VXFORM_ENV(vpkudum, 7, 17);
-GEN_VXFORM_ENV(vpkuhus, 7, 2);
-GEN_VXFORM_ENV(vpkuwus, 7, 3);
-GEN_VXFORM_ENV(vpkudus, 7, 19);
-GEN_VXFORM_ENV(vpkshus, 7, 4);
-GEN_VXFORM_ENV(vpkswus, 7, 5);
-GEN_VXFORM_ENV(vpksdus, 7, 21);
-GEN_VXFORM_ENV(vpkshss, 7, 6);
-GEN_VXFORM_ENV(vpkswss, 7, 7);
-GEN_VXFORM_ENV(vpksdss, 7, 23);
-GEN_VXFORM(vpkpx, 7, 12);
-GEN_VXFORM_ENV(vsum4ubs, 4, 24);
-GEN_VXFORM_ENV(vsum4sbs, 4, 28);
-GEN_VXFORM_ENV(vsum4shs, 4, 25);
-GEN_VXFORM_ENV(vsum2sws, 4, 26);
-GEN_VXFORM_ENV(vsumsws, 4, 30);
-GEN_VXFORM_ENV(vaddfp, 5, 0);
-GEN_VXFORM_ENV(vsubfp, 5, 1);
-GEN_VXFORM_ENV(vmaxfp, 5, 16);
-GEN_VXFORM_ENV(vminfp, 5, 17);
-
-#define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
-static void glue(gen_, name)(DisasContext *ctx) \
- { \
- TCGv_ptr ra, rb, rd; \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- ra = gen_avr_ptr(rA(ctx->opcode)); \
- rb = gen_avr_ptr(rB(ctx->opcode)); \
- rd = gen_avr_ptr(rD(ctx->opcode)); \
- gen_helper_##opname(cpu_env, rd, ra, rb); \
- tcg_temp_free_ptr(ra); \
- tcg_temp_free_ptr(rb); \
- tcg_temp_free_ptr(rd); \
- }
-
-#define GEN_VXRFORM(name, opc2, opc3) \
- GEN_VXRFORM1(name, name, #name, opc2, opc3) \
- GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
-
-/*
- * Support for Altivec instructions that use bit 31 (Rc) as an opcode
- * bit but also use bit 21 as an actual Rc bit. In general, thse pairs
- * come from different versions of the ISA, so we must also support a
- * pair of flags for each instruction.
- */
-#define GEN_VXRFORM_DUAL(name0, flg0, flg2_0, name1, flg1, flg2_1) \
-static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
-{ \
- if ((Rc(ctx->opcode) == 0) && \
- ((ctx->insns_flags & flg0) || (ctx->insns_flags2 & flg2_0))) { \
- if (Rc21(ctx->opcode) == 0) { \
- gen_##name0(ctx); \
- } else { \
- gen_##name0##_(ctx); \
- } \
- } else if ((Rc(ctx->opcode) == 1) && \
- ((ctx->insns_flags & flg1) || (ctx->insns_flags2 & flg2_1))) { \
- if (Rc21(ctx->opcode) == 0) { \
- gen_##name1(ctx); \
- } else { \
- gen_##name1##_(ctx); \
- } \
- } else { \
- gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
- } \
-}
-
-GEN_VXRFORM(vcmpequb, 3, 0)
-GEN_VXRFORM(vcmpequh, 3, 1)
-GEN_VXRFORM(vcmpequw, 3, 2)
-GEN_VXRFORM(vcmpequd, 3, 3)
-GEN_VXRFORM(vcmpgtsb, 3, 12)
-GEN_VXRFORM(vcmpgtsh, 3, 13)
-GEN_VXRFORM(vcmpgtsw, 3, 14)
-GEN_VXRFORM(vcmpgtsd, 3, 15)
-GEN_VXRFORM(vcmpgtub, 3, 8)
-GEN_VXRFORM(vcmpgtuh, 3, 9)
-GEN_VXRFORM(vcmpgtuw, 3, 10)
-GEN_VXRFORM(vcmpgtud, 3, 11)
-GEN_VXRFORM(vcmpeqfp, 3, 3)
-GEN_VXRFORM(vcmpgefp, 3, 7)
-GEN_VXRFORM(vcmpgtfp, 3, 11)
-GEN_VXRFORM(vcmpbfp, 3, 15)
-
-GEN_VXRFORM_DUAL(vcmpeqfp, PPC_ALTIVEC, PPC_NONE, \
- vcmpequd, PPC_NONE, PPC2_ALTIVEC_207)
-GEN_VXRFORM_DUAL(vcmpbfp, PPC_ALTIVEC, PPC_NONE, \
- vcmpgtsd, PPC_NONE, PPC2_ALTIVEC_207)
-GEN_VXRFORM_DUAL(vcmpgtfp, PPC_ALTIVEC, PPC_NONE, \
- vcmpgtud, PPC_NONE, PPC2_ALTIVEC_207)
-
-#define GEN_VXFORM_SIMM(name, opc2, opc3) \
-static void glue(gen_, name)(DisasContext *ctx) \
- { \
- TCGv_ptr rd; \
- TCGv_i32 simm; \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- simm = tcg_const_i32(SIMM5(ctx->opcode)); \
- rd = gen_avr_ptr(rD(ctx->opcode)); \
- gen_helper_##name (rd, simm); \
- tcg_temp_free_i32(simm); \
- tcg_temp_free_ptr(rd); \
- }
-
-GEN_VXFORM_SIMM(vspltisb, 6, 12);
-GEN_VXFORM_SIMM(vspltish, 6, 13);
-GEN_VXFORM_SIMM(vspltisw, 6, 14);
-
-#define GEN_VXFORM_NOA(name, opc2, opc3) \
-static void glue(gen_, name)(DisasContext *ctx) \
- { \
- TCGv_ptr rb, rd; \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- rb = gen_avr_ptr(rB(ctx->opcode)); \
- rd = gen_avr_ptr(rD(ctx->opcode)); \
- gen_helper_##name (rd, rb); \
- tcg_temp_free_ptr(rb); \
- tcg_temp_free_ptr(rd); \
- }
-
-#define GEN_VXFORM_NOA_ENV(name, opc2, opc3) \
-static void glue(gen_, name)(DisasContext *ctx) \
- { \
- TCGv_ptr rb, rd; \
- \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- rb = gen_avr_ptr(rB(ctx->opcode)); \
- rd = gen_avr_ptr(rD(ctx->opcode)); \
- gen_helper_##name(cpu_env, rd, rb); \
- tcg_temp_free_ptr(rb); \
- tcg_temp_free_ptr(rd); \
- }
-
-GEN_VXFORM_NOA(vupkhsb, 7, 8);
-GEN_VXFORM_NOA(vupkhsh, 7, 9);
-GEN_VXFORM_NOA(vupkhsw, 7, 25);
-GEN_VXFORM_NOA(vupklsb, 7, 10);
-GEN_VXFORM_NOA(vupklsh, 7, 11);
-GEN_VXFORM_NOA(vupklsw, 7, 27);
-GEN_VXFORM_NOA(vupkhpx, 7, 13);
-GEN_VXFORM_NOA(vupklpx, 7, 15);
-GEN_VXFORM_NOA_ENV(vrefp, 5, 4);
-GEN_VXFORM_NOA_ENV(vrsqrtefp, 5, 5);
-GEN_VXFORM_NOA_ENV(vexptefp, 5, 6);
-GEN_VXFORM_NOA_ENV(vlogefp, 5, 7);
-GEN_VXFORM_NOA_ENV(vrfim, 5, 11);
-GEN_VXFORM_NOA_ENV(vrfin, 5, 8);
-GEN_VXFORM_NOA_ENV(vrfip, 5, 10);
-GEN_VXFORM_NOA_ENV(vrfiz, 5, 9);
-
-#define GEN_VXFORM_SIMM(name, opc2, opc3) \
-static void glue(gen_, name)(DisasContext *ctx) \
- { \
- TCGv_ptr rd; \
- TCGv_i32 simm; \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- simm = tcg_const_i32(SIMM5(ctx->opcode)); \
- rd = gen_avr_ptr(rD(ctx->opcode)); \
- gen_helper_##name (rd, simm); \
- tcg_temp_free_i32(simm); \
- tcg_temp_free_ptr(rd); \
- }
-
-#define GEN_VXFORM_UIMM(name, opc2, opc3) \
-static void glue(gen_, name)(DisasContext *ctx) \
- { \
- TCGv_ptr rb, rd; \
- TCGv_i32 uimm; \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
- rb = gen_avr_ptr(rB(ctx->opcode)); \
- rd = gen_avr_ptr(rD(ctx->opcode)); \
- gen_helper_##name (rd, rb, uimm); \
- tcg_temp_free_i32(uimm); \
- tcg_temp_free_ptr(rb); \
- tcg_temp_free_ptr(rd); \
- }
-
-#define GEN_VXFORM_UIMM_ENV(name, opc2, opc3) \
-static void glue(gen_, name)(DisasContext *ctx) \
- { \
- TCGv_ptr rb, rd; \
- TCGv_i32 uimm; \
- \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
- rb = gen_avr_ptr(rB(ctx->opcode)); \
- rd = gen_avr_ptr(rD(ctx->opcode)); \
- gen_helper_##name(cpu_env, rd, rb, uimm); \
- tcg_temp_free_i32(uimm); \
- tcg_temp_free_ptr(rb); \
- tcg_temp_free_ptr(rd); \
- }
-
-GEN_VXFORM_UIMM(vspltb, 6, 8);
-GEN_VXFORM_UIMM(vsplth, 6, 9);
-GEN_VXFORM_UIMM(vspltw, 6, 10);
-GEN_VXFORM_UIMM_ENV(vcfux, 5, 12);
-GEN_VXFORM_UIMM_ENV(vcfsx, 5, 13);
-GEN_VXFORM_UIMM_ENV(vctuxs, 5, 14);
-GEN_VXFORM_UIMM_ENV(vctsxs, 5, 15);
-
-static void gen_vsldoi(DisasContext *ctx)
-{
- TCGv_ptr ra, rb, rd;
- TCGv_i32 sh;
- if (unlikely(!ctx->altivec_enabled)) {
- gen_exception(ctx, POWERPC_EXCP_VPU);
- return;
- }
- ra = gen_avr_ptr(rA(ctx->opcode));
- rb = gen_avr_ptr(rB(ctx->opcode));
- rd = gen_avr_ptr(rD(ctx->opcode));
- sh = tcg_const_i32(VSH(ctx->opcode));
- gen_helper_vsldoi (rd, ra, rb, sh);
- tcg_temp_free_ptr(ra);
- tcg_temp_free_ptr(rb);
- tcg_temp_free_ptr(rd);
- tcg_temp_free_i32(sh);
-}
-
-#define GEN_VAFORM_PAIRED(name0, name1, opc2) \
-static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
- { \
- TCGv_ptr ra, rb, rc, rd; \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- ra = gen_avr_ptr(rA(ctx->opcode)); \
- rb = gen_avr_ptr(rB(ctx->opcode)); \
- rc = gen_avr_ptr(rC(ctx->opcode)); \
- rd = gen_avr_ptr(rD(ctx->opcode)); \
- if (Rc(ctx->opcode)) { \
- gen_helper_##name1(cpu_env, rd, ra, rb, rc); \
- } else { \
- gen_helper_##name0(cpu_env, rd, ra, rb, rc); \
- } \
- tcg_temp_free_ptr(ra); \
- tcg_temp_free_ptr(rb); \
- tcg_temp_free_ptr(rc); \
- tcg_temp_free_ptr(rd); \
- }
-
-GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16)
-
-static void gen_vmladduhm(DisasContext *ctx)
-{
- TCGv_ptr ra, rb, rc, rd;
- if (unlikely(!ctx->altivec_enabled)) {
- gen_exception(ctx, POWERPC_EXCP_VPU);
- return;
- }
- ra = gen_avr_ptr(rA(ctx->opcode));
- rb = gen_avr_ptr(rB(ctx->opcode));
- rc = gen_avr_ptr(rC(ctx->opcode));
- rd = gen_avr_ptr(rD(ctx->opcode));
- gen_helper_vmladduhm(rd, ra, rb, rc);
- tcg_temp_free_ptr(ra);
- tcg_temp_free_ptr(rb);
- tcg_temp_free_ptr(rc);
- tcg_temp_free_ptr(rd);
-}
-
-GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18)
-GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19)
-GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20)
-GEN_VAFORM_PAIRED(vsel, vperm, 21)
-GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23)
#if defined(TARGET_PPC64)
static void gen_maddld(DisasContext *ctx)
@@ -6736,115 +6042,6 @@ static void gen_maddhd_maddhdu(DisasContext *ctx)
}
#endif /* defined(TARGET_PPC64) */
-GEN_VXFORM_NOA(vclzb, 1, 28)
-GEN_VXFORM_NOA(vclzh, 1, 29)
-GEN_VXFORM_NOA(vclzw, 1, 30)
-GEN_VXFORM_NOA(vclzd, 1, 31)
-GEN_VXFORM_NOA(vpopcntb, 1, 28)
-GEN_VXFORM_NOA(vpopcnth, 1, 29)
-GEN_VXFORM_NOA(vpopcntw, 1, 30)
-GEN_VXFORM_NOA(vpopcntd, 1, 31)
-GEN_VXFORM_DUAL(vclzb, PPC_NONE, PPC2_ALTIVEC_207, \
- vpopcntb, PPC_NONE, PPC2_ALTIVEC_207)
-GEN_VXFORM_DUAL(vclzh, PPC_NONE, PPC2_ALTIVEC_207, \
- vpopcnth, PPC_NONE, PPC2_ALTIVEC_207)
-GEN_VXFORM_DUAL(vclzw, PPC_NONE, PPC2_ALTIVEC_207, \
- vpopcntw, PPC_NONE, PPC2_ALTIVEC_207)
-GEN_VXFORM_DUAL(vclzd, PPC_NONE, PPC2_ALTIVEC_207, \
- vpopcntd, PPC_NONE, PPC2_ALTIVEC_207)
-GEN_VXFORM(vbpermq, 6, 21);
-GEN_VXFORM_NOA(vgbbd, 6, 20);
-GEN_VXFORM(vpmsumb, 4, 16)
-GEN_VXFORM(vpmsumh, 4, 17)
-GEN_VXFORM(vpmsumw, 4, 18)
-GEN_VXFORM(vpmsumd, 4, 19)
-
-#define GEN_BCD(op) \
-static void gen_##op(DisasContext *ctx) \
-{ \
- TCGv_ptr ra, rb, rd; \
- TCGv_i32 ps; \
- \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- \
- ra = gen_avr_ptr(rA(ctx->opcode)); \
- rb = gen_avr_ptr(rB(ctx->opcode)); \
- rd = gen_avr_ptr(rD(ctx->opcode)); \
- \
- ps = tcg_const_i32((ctx->opcode & 0x200) != 0); \
- \
- gen_helper_##op(cpu_crf[6], rd, ra, rb, ps); \
- \
- tcg_temp_free_ptr(ra); \
- tcg_temp_free_ptr(rb); \
- tcg_temp_free_ptr(rd); \
- tcg_temp_free_i32(ps); \
-}
-
-GEN_BCD(bcdadd)
-GEN_BCD(bcdsub)
-
-GEN_VXFORM_DUAL(vsububm, PPC_ALTIVEC, PPC_NONE, \
- bcdadd, PPC_NONE, PPC2_ALTIVEC_207)
-GEN_VXFORM_DUAL(vsububs, PPC_ALTIVEC, PPC_NONE, \
- bcdadd, PPC_NONE, PPC2_ALTIVEC_207)
-GEN_VXFORM_DUAL(vsubuhm, PPC_ALTIVEC, PPC_NONE, \
- bcdsub, PPC_NONE, PPC2_ALTIVEC_207)
-GEN_VXFORM_DUAL(vsubuhs, PPC_ALTIVEC, PPC_NONE, \
- bcdsub, PPC_NONE, PPC2_ALTIVEC_207)
-
-static void gen_vsbox(DisasContext *ctx)
-{
- TCGv_ptr ra, rd;
- if (unlikely(!ctx->altivec_enabled)) {
- gen_exception(ctx, POWERPC_EXCP_VPU);
- return;
- }
- ra = gen_avr_ptr(rA(ctx->opcode));
- rd = gen_avr_ptr(rD(ctx->opcode));
- gen_helper_vsbox(rd, ra);
- tcg_temp_free_ptr(ra);
- tcg_temp_free_ptr(rd);
-}
-
-GEN_VXFORM(vcipher, 4, 20)
-GEN_VXFORM(vcipherlast, 4, 20)
-GEN_VXFORM(vncipher, 4, 21)
-GEN_VXFORM(vncipherlast, 4, 21)
-
-GEN_VXFORM_DUAL(vcipher, PPC_NONE, PPC2_ALTIVEC_207,
- vcipherlast, PPC_NONE, PPC2_ALTIVEC_207)
-GEN_VXFORM_DUAL(vncipher, PPC_NONE, PPC2_ALTIVEC_207,
- vncipherlast, PPC_NONE, PPC2_ALTIVEC_207)
-
-#define VSHASIGMA(op) \
-static void gen_##op(DisasContext *ctx) \
-{ \
- TCGv_ptr ra, rd; \
- TCGv_i32 st_six; \
- if (unlikely(!ctx->altivec_enabled)) { \
- gen_exception(ctx, POWERPC_EXCP_VPU); \
- return; \
- } \
- ra = gen_avr_ptr(rA(ctx->opcode)); \
- rd = gen_avr_ptr(rD(ctx->opcode)); \
- st_six = tcg_const_i32(rB(ctx->opcode)); \
- gen_helper_##op(rd, ra, st_six); \
- tcg_temp_free_ptr(ra); \
- tcg_temp_free_ptr(rd); \
- tcg_temp_free_i32(st_six); \
-}
-
-VSHASIGMA(vshasigmaw)
-VSHASIGMA(vshasigmad)
-
-GEN_VXFORM3(vpermxor, 22, 0xFF)
-GEN_VXFORM_DUAL(vsldoi, PPC_ALTIVEC, PPC_NONE,
- vpermxor, PPC_NONE, PPC2_ALTIVEC_207)
-
/*** VSX extension ***/
static inline TCGv_i64 cpu_vsrh(int n)
@@ -8165,266 +7362,7 @@ GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
#include "translate/fp-ops.c"
-#undef GEN_VR_LDX
-#undef GEN_VR_STX
-#undef GEN_VR_LVE
-#undef GEN_VR_STVE
-#define GEN_VR_LDX(name, opc2, opc3) \
-GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
-#define GEN_VR_STX(name, opc2, opc3) \
-GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
-#define GEN_VR_LVE(name, opc2, opc3) \
- GEN_HANDLER(lve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
-#define GEN_VR_STVE(name, opc2, opc3) \
- GEN_HANDLER(stve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
-GEN_VR_LDX(lvx, 0x07, 0x03),
-GEN_VR_LDX(lvxl, 0x07, 0x0B),
-GEN_VR_LVE(bx, 0x07, 0x00),
-GEN_VR_LVE(hx, 0x07, 0x01),
-GEN_VR_LVE(wx, 0x07, 0x02),
-GEN_VR_STX(svx, 0x07, 0x07),
-GEN_VR_STX(svxl, 0x07, 0x0F),
-GEN_VR_STVE(bx, 0x07, 0x04),
-GEN_VR_STVE(hx, 0x07, 0x05),
-GEN_VR_STVE(wx, 0x07, 0x06),
-
-#undef GEN_VX_LOGICAL
-#define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
-GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
-
-#undef GEN_VX_LOGICAL_207
-#define GEN_VX_LOGICAL_207(name, tcg_op, opc2, opc3) \
-GEN_HANDLER_E(name, 0x04, opc2, opc3, 0x00000000, PPC_NONE, PPC2_ALTIVEC_207)
-
-GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16),
-GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17),
-GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18),
-GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19),
-GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20),
-GEN_VX_LOGICAL_207(veqv, tcg_gen_eqv_i64, 2, 26),
-GEN_VX_LOGICAL_207(vnand, tcg_gen_nand_i64, 2, 22),
-GEN_VX_LOGICAL_207(vorc, tcg_gen_orc_i64, 2, 21),
-
-#undef GEN_VXFORM
-#define GEN_VXFORM(name, opc2, opc3) \
-GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
-
-#undef GEN_VXFORM_207
-#define GEN_VXFORM_207(name, opc2, opc3) \
-GEN_HANDLER_E(name, 0x04, opc2, opc3, 0x00000000, PPC_NONE, PPC2_ALTIVEC_207)
-
-#undef GEN_VXFORM_DUAL
-#define GEN_VXFORM_DUAL(name0, name1, opc2, opc3, type0, type1) \
-GEN_HANDLER_E(name0##_##name1, 0x4, opc2, opc3, 0x00000000, type0, type1)
-
-#undef GEN_VXRFORM_DUAL
-#define GEN_VXRFORM_DUAL(name0, name1, opc2, opc3, tp0, tp1) \
-GEN_HANDLER_E(name0##_##name1, 0x4, opc2, opc3, 0x00000000, tp0, tp1), \
-GEN_HANDLER_E(name0##_##name1, 0x4, opc2, (opc3 | 0x10), 0x00000000, tp0, tp1),
-
-GEN_VXFORM(vaddubm, 0, 0),
-GEN_VXFORM(vadduhm, 0, 1),
-GEN_VXFORM(vadduwm, 0, 2),
-GEN_VXFORM_207(vaddudm, 0, 3),
-GEN_VXFORM_DUAL(vsububm, bcdadd, 0, 16, PPC_ALTIVEC, PPC_NONE),
-GEN_VXFORM_DUAL(vsubuhm, bcdsub, 0, 17, PPC_ALTIVEC, PPC_NONE),
-GEN_VXFORM(vsubuwm, 0, 18),
-GEN_VXFORM_207(vsubudm, 0, 19),
-GEN_VXFORM(vmaxub, 1, 0),
-GEN_VXFORM(vmaxuh, 1, 1),
-GEN_VXFORM(vmaxuw, 1, 2),
-GEN_VXFORM_207(vmaxud, 1, 3),
-GEN_VXFORM(vmaxsb, 1, 4),
-GEN_VXFORM(vmaxsh, 1, 5),
-GEN_VXFORM(vmaxsw, 1, 6),
-GEN_VXFORM_207(vmaxsd, 1, 7),
-GEN_VXFORM(vminub, 1, 8),
-GEN_VXFORM(vminuh, 1, 9),
-GEN_VXFORM(vminuw, 1, 10),
-GEN_VXFORM_207(vminud, 1, 11),
-GEN_VXFORM(vminsb, 1, 12),
-GEN_VXFORM(vminsh, 1, 13),
-GEN_VXFORM(vminsw, 1, 14),
-GEN_VXFORM_207(vminsd, 1, 15),
-GEN_VXFORM(vavgub, 1, 16),
-GEN_VXFORM(vavguh, 1, 17),
-GEN_VXFORM(vavguw, 1, 18),
-GEN_VXFORM(vavgsb, 1, 20),
-GEN_VXFORM(vavgsh, 1, 21),
-GEN_VXFORM(vavgsw, 1, 22),
-GEN_VXFORM(vmrghb, 6, 0),
-GEN_VXFORM(vmrghh, 6, 1),
-GEN_VXFORM(vmrghw, 6, 2),
-GEN_VXFORM(vmrglb, 6, 4),
-GEN_VXFORM(vmrglh, 6, 5),
-GEN_VXFORM(vmrglw, 6, 6),
-GEN_VXFORM_207(vmrgew, 6, 30),
-GEN_VXFORM_207(vmrgow, 6, 26),
-GEN_VXFORM(vmuloub, 4, 0),
-GEN_VXFORM(vmulouh, 4, 1),
-GEN_VXFORM_DUAL(vmulouw, vmuluwm, 4, 2, PPC_ALTIVEC, PPC_NONE),
-GEN_VXFORM(vmulosb, 4, 4),
-GEN_VXFORM(vmulosh, 4, 5),
-GEN_VXFORM_207(vmulosw, 4, 6),
-GEN_VXFORM(vmuleub, 4, 8),
-GEN_VXFORM(vmuleuh, 4, 9),
-GEN_VXFORM_207(vmuleuw, 4, 10),
-GEN_VXFORM(vmulesb, 4, 12),
-GEN_VXFORM(vmulesh, 4, 13),
-GEN_VXFORM_207(vmulesw, 4, 14),
-GEN_VXFORM(vslb, 2, 4),
-GEN_VXFORM(vslh, 2, 5),
-GEN_VXFORM(vslw, 2, 6),
-GEN_VXFORM_207(vsld, 2, 23),
-GEN_VXFORM(vsrb, 2, 8),
-GEN_VXFORM(vsrh, 2, 9),
-GEN_VXFORM(vsrw, 2, 10),
-GEN_VXFORM_207(vsrd, 2, 27),
-GEN_VXFORM(vsrab, 2, 12),
-GEN_VXFORM(vsrah, 2, 13),
-GEN_VXFORM(vsraw, 2, 14),
-GEN_VXFORM_207(vsrad, 2, 15),
-GEN_VXFORM(vslo, 6, 16),
-GEN_VXFORM(vsro, 6, 17),
-GEN_VXFORM(vaddcuw, 0, 6),
-GEN_VXFORM(vsubcuw, 0, 22),
-GEN_VXFORM(vaddubs, 0, 8),
-GEN_VXFORM(vadduhs, 0, 9),
-GEN_VXFORM(vadduws, 0, 10),
-GEN_VXFORM(vaddsbs, 0, 12),
-GEN_VXFORM(vaddshs, 0, 13),
-GEN_VXFORM(vaddsws, 0, 14),
-GEN_VXFORM_DUAL(vsububs, bcdadd, 0, 24, PPC_ALTIVEC, PPC_NONE),
-GEN_VXFORM_DUAL(vsubuhs, bcdsub, 0, 25, PPC_ALTIVEC, PPC_NONE),
-GEN_VXFORM(vsubuws, 0, 26),
-GEN_VXFORM(vsubsbs, 0, 28),
-GEN_VXFORM(vsubshs, 0, 29),
-GEN_VXFORM(vsubsws, 0, 30),
-GEN_VXFORM_207(vadduqm, 0, 4),
-GEN_VXFORM_207(vaddcuq, 0, 5),
-GEN_VXFORM_DUAL(vaddeuqm, vaddecuq, 30, 0xFF, PPC_NONE, PPC2_ALTIVEC_207),
-GEN_VXFORM_207(vsubuqm, 0, 20),
-GEN_VXFORM_207(vsubcuq, 0, 21),
-GEN_VXFORM_DUAL(vsubeuqm, vsubecuq, 31, 0xFF, PPC_NONE, PPC2_ALTIVEC_207),
-GEN_VXFORM(vrlb, 2, 0),
-GEN_VXFORM(vrlh, 2, 1),
-GEN_VXFORM(vrlw, 2, 2),
-GEN_VXFORM_207(vrld, 2, 3),
-GEN_VXFORM(vsl, 2, 7),
-GEN_VXFORM(vsr, 2, 11),
-GEN_VXFORM(vpkuhum, 7, 0),
-GEN_VXFORM(vpkuwum, 7, 1),
-GEN_VXFORM_207(vpkudum, 7, 17),
-GEN_VXFORM(vpkuhus, 7, 2),
-GEN_VXFORM(vpkuwus, 7, 3),
-GEN_VXFORM_207(vpkudus, 7, 19),
-GEN_VXFORM(vpkshus, 7, 4),
-GEN_VXFORM(vpkswus, 7, 5),
-GEN_VXFORM_207(vpksdus, 7, 21),
-GEN_VXFORM(vpkshss, 7, 6),
-GEN_VXFORM(vpkswss, 7, 7),
-GEN_VXFORM_207(vpksdss, 7, 23),
-GEN_VXFORM(vpkpx, 7, 12),
-GEN_VXFORM(vsum4ubs, 4, 24),
-GEN_VXFORM(vsum4sbs, 4, 28),
-GEN_VXFORM(vsum4shs, 4, 25),
-GEN_VXFORM(vsum2sws, 4, 26),
-GEN_VXFORM(vsumsws, 4, 30),
-GEN_VXFORM(vaddfp, 5, 0),
-GEN_VXFORM(vsubfp, 5, 1),
-GEN_VXFORM(vmaxfp, 5, 16),
-GEN_VXFORM(vminfp, 5, 17),
-
-#undef GEN_VXRFORM1
-#undef GEN_VXRFORM
-#define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
- GEN_HANDLER2(name, str, 0x4, opc2, opc3, 0x00000000, PPC_ALTIVEC),
-#define GEN_VXRFORM(name, opc2, opc3) \
- GEN_VXRFORM1(name, name, #name, opc2, opc3) \
- GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
-GEN_VXRFORM(vcmpequb, 3, 0)
-GEN_VXRFORM(vcmpequh, 3, 1)
-GEN_VXRFORM(vcmpequw, 3, 2)
-GEN_VXRFORM(vcmpgtsb, 3, 12)
-GEN_VXRFORM(vcmpgtsh, 3, 13)
-GEN_VXRFORM(vcmpgtsw, 3, 14)
-GEN_VXRFORM(vcmpgtub, 3, 8)
-GEN_VXRFORM(vcmpgtuh, 3, 9)
-GEN_VXRFORM(vcmpgtuw, 3, 10)
-GEN_VXRFORM_DUAL(vcmpeqfp, vcmpequd, 3, 3, PPC_ALTIVEC, PPC_NONE)
-GEN_VXRFORM(vcmpgefp, 3, 7)
-GEN_VXRFORM_DUAL(vcmpgtfp, vcmpgtud, 3, 11, PPC_ALTIVEC, PPC_NONE)
-GEN_VXRFORM_DUAL(vcmpbfp, vcmpgtsd, 3, 15, PPC_ALTIVEC, PPC_NONE)
-
-#undef GEN_VXFORM_SIMM
-#define GEN_VXFORM_SIMM(name, opc2, opc3) \
- GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
-GEN_VXFORM_SIMM(vspltisb, 6, 12),
-GEN_VXFORM_SIMM(vspltish, 6, 13),
-GEN_VXFORM_SIMM(vspltisw, 6, 14),
-
-#undef GEN_VXFORM_NOA
-#define GEN_VXFORM_NOA(name, opc2, opc3) \
- GEN_HANDLER(name, 0x04, opc2, opc3, 0x001f0000, PPC_ALTIVEC)
-GEN_VXFORM_NOA(vupkhsb, 7, 8),
-GEN_VXFORM_NOA(vupkhsh, 7, 9),
-GEN_VXFORM_207(vupkhsw, 7, 25),
-GEN_VXFORM_NOA(vupklsb, 7, 10),
-GEN_VXFORM_NOA(vupklsh, 7, 11),
-GEN_VXFORM_207(vupklsw, 7, 27),
-GEN_VXFORM_NOA(vupkhpx, 7, 13),
-GEN_VXFORM_NOA(vupklpx, 7, 15),
-GEN_VXFORM_NOA(vrefp, 5, 4),
-GEN_VXFORM_NOA(vrsqrtefp, 5, 5),
-GEN_VXFORM_NOA(vexptefp, 5, 6),
-GEN_VXFORM_NOA(vlogefp, 5, 7),
-GEN_VXFORM_NOA(vrfim, 5, 11),
-GEN_VXFORM_NOA(vrfin, 5, 8),
-GEN_VXFORM_NOA(vrfip, 5, 10),
-GEN_VXFORM_NOA(vrfiz, 5, 9),
-
-#undef GEN_VXFORM_UIMM
-#define GEN_VXFORM_UIMM(name, opc2, opc3) \
- GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
-GEN_VXFORM_UIMM(vspltb, 6, 8),
-GEN_VXFORM_UIMM(vsplth, 6, 9),
-GEN_VXFORM_UIMM(vspltw, 6, 10),
-GEN_VXFORM_UIMM(vcfux, 5, 12),
-GEN_VXFORM_UIMM(vcfsx, 5, 13),
-GEN_VXFORM_UIMM(vctuxs, 5, 14),
-GEN_VXFORM_UIMM(vctsxs, 5, 15),
-
-#undef GEN_VAFORM_PAIRED
-#define GEN_VAFORM_PAIRED(name0, name1, opc2) \
- GEN_HANDLER(name0##_##name1, 0x04, opc2, 0xFF, 0x00000000, PPC_ALTIVEC)
-GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16),
-GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18),
-GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19),
-GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20),
-GEN_VAFORM_PAIRED(vsel, vperm, 21),
-GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23),
-
-GEN_VXFORM_DUAL(vclzb, vpopcntb, 1, 28, PPC_NONE, PPC2_ALTIVEC_207),
-GEN_VXFORM_DUAL(vclzh, vpopcnth, 1, 29, PPC_NONE, PPC2_ALTIVEC_207),
-GEN_VXFORM_DUAL(vclzw, vpopcntw, 1, 30, PPC_NONE, PPC2_ALTIVEC_207),
-GEN_VXFORM_DUAL(vclzd, vpopcntd, 1, 31, PPC_NONE, PPC2_ALTIVEC_207),
-
-GEN_VXFORM_207(vbpermq, 6, 21),
-GEN_VXFORM_207(vgbbd, 6, 20),
-GEN_VXFORM_207(vpmsumb, 4, 16),
-GEN_VXFORM_207(vpmsumh, 4, 17),
-GEN_VXFORM_207(vpmsumw, 4, 18),
-GEN_VXFORM_207(vpmsumd, 4, 19),
-
-GEN_VXFORM_207(vsbox, 4, 23),
-
-GEN_VXFORM_DUAL(vcipher, vcipherlast, 4, 20, PPC_NONE, PPC2_ALTIVEC_207),
-GEN_VXFORM_DUAL(vncipher, vncipherlast, 4, 21, PPC_NONE, PPC2_ALTIVEC_207),
-
-GEN_VXFORM_207(vshasigmaw, 1, 26),
-GEN_VXFORM_207(vshasigmad, 1, 27),
-
-GEN_VXFORM_DUAL(vsldoi, vpermxor, 22, 0xFF, PPC_ALTIVEC, PPC_NONE),
+#include "translate/vmx-ops.c"
GEN_HANDLER_E(lxsdx, 0x1F, 0x0C, 0x12, 0, PPC_NONE, PPC2_VSX),
GEN_HANDLER_E(lxsiwax, 0x1F, 0x0C, 0x02, 0, PPC_NONE, PPC2_VSX207),
diff --git a/target-ppc/translate/vmx-impl.c b/target-ppc/translate/vmx-impl.c
new file mode 100644
index 0000000000..110e19c88b
--- /dev/null
+++ b/target-ppc/translate/vmx-impl.c
@@ -0,0 +1,829 @@
+/*
+ * translate/vmx-impl.c
+ *
+ * Altivec/VMX translation
+ */
+
+/*** Altivec vector extension ***/
+/* Altivec registers moves */
+
+static inline TCGv_ptr gen_avr_ptr(int reg)
+{
+ TCGv_ptr r = tcg_temp_new_ptr();
+ tcg_gen_addi_ptr(r, cpu_env, offsetof(CPUPPCState, avr[reg]));
+ return r;
+}
+
+#define GEN_VR_LDX(name, opc2, opc3) \
+static void glue(gen_, name)(DisasContext *ctx) \
+{ \
+ TCGv EA; \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ gen_set_access_type(ctx, ACCESS_INT); \
+ EA = tcg_temp_new(); \
+ gen_addr_reg_index(ctx, EA); \
+ tcg_gen_andi_tl(EA, EA, ~0xf); \
+ /* We only need to swap high and low halves. gen_qemu_ld64 does necessary \
+ 64-bit byteswap already. */ \
+ if (ctx->le_mode) { \
+ gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
+ tcg_gen_addi_tl(EA, EA, 8); \
+ gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
+ } else { \
+ gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
+ tcg_gen_addi_tl(EA, EA, 8); \
+ gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
+ } \
+ tcg_temp_free(EA); \
+}
+
+#define GEN_VR_STX(name, opc2, opc3) \
+static void gen_st##name(DisasContext *ctx) \
+{ \
+ TCGv EA; \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ gen_set_access_type(ctx, ACCESS_INT); \
+ EA = tcg_temp_new(); \
+ gen_addr_reg_index(ctx, EA); \
+ tcg_gen_andi_tl(EA, EA, ~0xf); \
+ /* We only need to swap high and low halves. gen_qemu_st64 does necessary \
+ 64-bit byteswap already. */ \
+ if (ctx->le_mode) { \
+ gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
+ tcg_gen_addi_tl(EA, EA, 8); \
+ gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
+ } else { \
+ gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
+ tcg_gen_addi_tl(EA, EA, 8); \
+ gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
+ } \
+ tcg_temp_free(EA); \
+}
+
+#define GEN_VR_LVE(name, opc2, opc3, size) \
+static void gen_lve##name(DisasContext *ctx) \
+ { \
+ TCGv EA; \
+ TCGv_ptr rs; \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ gen_set_access_type(ctx, ACCESS_INT); \
+ EA = tcg_temp_new(); \
+ gen_addr_reg_index(ctx, EA); \
+ if (size > 1) { \
+ tcg_gen_andi_tl(EA, EA, ~(size - 1)); \
+ } \
+ rs = gen_avr_ptr(rS(ctx->opcode)); \
+ gen_helper_lve##name(cpu_env, rs, EA); \
+ tcg_temp_free(EA); \
+ tcg_temp_free_ptr(rs); \
+ }
+
+#define GEN_VR_STVE(name, opc2, opc3, size) \
+static void gen_stve##name(DisasContext *ctx) \
+ { \
+ TCGv EA; \
+ TCGv_ptr rs; \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ gen_set_access_type(ctx, ACCESS_INT); \
+ EA = tcg_temp_new(); \
+ gen_addr_reg_index(ctx, EA); \
+ if (size > 1) { \
+ tcg_gen_andi_tl(EA, EA, ~(size - 1)); \
+ } \
+ rs = gen_avr_ptr(rS(ctx->opcode)); \
+ gen_helper_stve##name(cpu_env, rs, EA); \
+ tcg_temp_free(EA); \
+ tcg_temp_free_ptr(rs); \
+ }
+
+GEN_VR_LDX(lvx, 0x07, 0x03);
+/* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
+GEN_VR_LDX(lvxl, 0x07, 0x0B);
+
+GEN_VR_LVE(bx, 0x07, 0x00, 1);
+GEN_VR_LVE(hx, 0x07, 0x01, 2);
+GEN_VR_LVE(wx, 0x07, 0x02, 4);
+
+GEN_VR_STX(svx, 0x07, 0x07);
+/* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
+GEN_VR_STX(svxl, 0x07, 0x0F);
+
+GEN_VR_STVE(bx, 0x07, 0x04, 1);
+GEN_VR_STVE(hx, 0x07, 0x05, 2);
+GEN_VR_STVE(wx, 0x07, 0x06, 4);
+
+static void gen_lvsl(DisasContext *ctx)
+{
+ TCGv_ptr rd;
+ TCGv EA;
+ if (unlikely(!ctx->altivec_enabled)) {
+ gen_exception(ctx, POWERPC_EXCP_VPU);
+ return;
+ }
+ EA = tcg_temp_new();
+ gen_addr_reg_index(ctx, EA);
+ rd = gen_avr_ptr(rD(ctx->opcode));
+ gen_helper_lvsl(rd, EA);
+ tcg_temp_free(EA);
+ tcg_temp_free_ptr(rd);
+}
+
+static void gen_lvsr(DisasContext *ctx)
+{
+ TCGv_ptr rd;
+ TCGv EA;
+ if (unlikely(!ctx->altivec_enabled)) {
+ gen_exception(ctx, POWERPC_EXCP_VPU);
+ return;
+ }
+ EA = tcg_temp_new();
+ gen_addr_reg_index(ctx, EA);
+ rd = gen_avr_ptr(rD(ctx->opcode));
+ gen_helper_lvsr(rd, EA);
+ tcg_temp_free(EA);
+ tcg_temp_free_ptr(rd);
+}
+
+static void gen_mfvscr(DisasContext *ctx)
+{
+ TCGv_i32 t;
+ if (unlikely(!ctx->altivec_enabled)) {
+ gen_exception(ctx, POWERPC_EXCP_VPU);
+ return;
+ }
+ tcg_gen_movi_i64(cpu_avrh[rD(ctx->opcode)], 0);
+ t = tcg_temp_new_i32();
+ tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, vscr));
+ tcg_gen_extu_i32_i64(cpu_avrl[rD(ctx->opcode)], t);
+ tcg_temp_free_i32(t);
+}
+
+static void gen_mtvscr(DisasContext *ctx)
+{
+ TCGv_ptr p;
+ if (unlikely(!ctx->altivec_enabled)) {
+ gen_exception(ctx, POWERPC_EXCP_VPU);
+ return;
+ }
+ p = gen_avr_ptr(rB(ctx->opcode));
+ gen_helper_mtvscr(cpu_env, p);
+ tcg_temp_free_ptr(p);
+}
+
+/* Logical operations */
+#define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
+static void glue(gen_, name)(DisasContext *ctx) \
+{ \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ tcg_op(cpu_avrh[rD(ctx->opcode)], cpu_avrh[rA(ctx->opcode)], cpu_avrh[rB(ctx->opcode)]); \
+ tcg_op(cpu_avrl[rD(ctx->opcode)], cpu_avrl[rA(ctx->opcode)], cpu_avrl[rB(ctx->opcode)]); \
+}
+
+GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16);
+GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17);
+GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18);
+GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19);
+GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20);
+GEN_VX_LOGICAL(veqv, tcg_gen_eqv_i64, 2, 26);
+GEN_VX_LOGICAL(vnand, tcg_gen_nand_i64, 2, 22);
+GEN_VX_LOGICAL(vorc, tcg_gen_orc_i64, 2, 21);
+
+#define GEN_VXFORM(name, opc2, opc3) \
+static void glue(gen_, name)(DisasContext *ctx) \
+{ \
+ TCGv_ptr ra, rb, rd; \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ ra = gen_avr_ptr(rA(ctx->opcode)); \
+ rb = gen_avr_ptr(rB(ctx->opcode)); \
+ rd = gen_avr_ptr(rD(ctx->opcode)); \
+ gen_helper_##name (rd, ra, rb); \
+ tcg_temp_free_ptr(ra); \
+ tcg_temp_free_ptr(rb); \
+ tcg_temp_free_ptr(rd); \
+}
+
+#define GEN_VXFORM_ENV(name, opc2, opc3) \
+static void glue(gen_, name)(DisasContext *ctx) \
+{ \
+ TCGv_ptr ra, rb, rd; \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ ra = gen_avr_ptr(rA(ctx->opcode)); \
+ rb = gen_avr_ptr(rB(ctx->opcode)); \
+ rd = gen_avr_ptr(rD(ctx->opcode)); \
+ gen_helper_##name(cpu_env, rd, ra, rb); \
+ tcg_temp_free_ptr(ra); \
+ tcg_temp_free_ptr(rb); \
+ tcg_temp_free_ptr(rd); \
+}
+
+#define GEN_VXFORM3(name, opc2, opc3) \
+static void glue(gen_, name)(DisasContext *ctx) \
+{ \
+ TCGv_ptr ra, rb, rc, rd; \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ ra = gen_avr_ptr(rA(ctx->opcode)); \
+ rb = gen_avr_ptr(rB(ctx->opcode)); \
+ rc = gen_avr_ptr(rC(ctx->opcode)); \
+ rd = gen_avr_ptr(rD(ctx->opcode)); \
+ gen_helper_##name(rd, ra, rb, rc); \
+ tcg_temp_free_ptr(ra); \
+ tcg_temp_free_ptr(rb); \
+ tcg_temp_free_ptr(rc); \
+ tcg_temp_free_ptr(rd); \
+}
+
+/*
+ * Support for Altivec instruction pairs that use bit 31 (Rc) as
+ * an opcode bit. In general, these pairs come from different
+ * versions of the ISA, so we must also support a pair of flags for
+ * each instruction.
+ */
+#define GEN_VXFORM_DUAL(name0, flg0, flg2_0, name1, flg1, flg2_1) \
+static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
+{ \
+ if ((Rc(ctx->opcode) == 0) && \
+ ((ctx->insns_flags & flg0) || (ctx->insns_flags2 & flg2_0))) { \
+ gen_##name0(ctx); \
+ } else if ((Rc(ctx->opcode) == 1) && \
+ ((ctx->insns_flags & flg1) || (ctx->insns_flags2 & flg2_1))) { \
+ gen_##name1(ctx); \
+ } else { \
+ gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
+ } \
+}
+
+GEN_VXFORM(vaddubm, 0, 0);
+GEN_VXFORM(vadduhm, 0, 1);
+GEN_VXFORM(vadduwm, 0, 2);
+GEN_VXFORM(vaddudm, 0, 3);
+GEN_VXFORM(vsububm, 0, 16);
+GEN_VXFORM(vsubuhm, 0, 17);
+GEN_VXFORM(vsubuwm, 0, 18);
+GEN_VXFORM(vsubudm, 0, 19);
+GEN_VXFORM(vmaxub, 1, 0);
+GEN_VXFORM(vmaxuh, 1, 1);
+GEN_VXFORM(vmaxuw, 1, 2);
+GEN_VXFORM(vmaxud, 1, 3);
+GEN_VXFORM(vmaxsb, 1, 4);
+GEN_VXFORM(vmaxsh, 1, 5);
+GEN_VXFORM(vmaxsw, 1, 6);
+GEN_VXFORM(vmaxsd, 1, 7);
+GEN_VXFORM(vminub, 1, 8);
+GEN_VXFORM(vminuh, 1, 9);
+GEN_VXFORM(vminuw, 1, 10);
+GEN_VXFORM(vminud, 1, 11);
+GEN_VXFORM(vminsb, 1, 12);
+GEN_VXFORM(vminsh, 1, 13);
+GEN_VXFORM(vminsw, 1, 14);
+GEN_VXFORM(vminsd, 1, 15);
+GEN_VXFORM(vavgub, 1, 16);
+GEN_VXFORM(vavguh, 1, 17);
+GEN_VXFORM(vavguw, 1, 18);
+GEN_VXFORM(vavgsb, 1, 20);
+GEN_VXFORM(vavgsh, 1, 21);
+GEN_VXFORM(vavgsw, 1, 22);
+GEN_VXFORM(vmrghb, 6, 0);
+GEN_VXFORM(vmrghh, 6, 1);
+GEN_VXFORM(vmrghw, 6, 2);
+GEN_VXFORM(vmrglb, 6, 4);
+GEN_VXFORM(vmrglh, 6, 5);
+GEN_VXFORM(vmrglw, 6, 6);
+
+static void gen_vmrgew(DisasContext *ctx)
+{
+ TCGv_i64 tmp;
+ int VT, VA, VB;
+ if (unlikely(!ctx->altivec_enabled)) {
+ gen_exception(ctx, POWERPC_EXCP_VPU);
+ return;
+ }
+ VT = rD(ctx->opcode);
+ VA = rA(ctx->opcode);
+ VB = rB(ctx->opcode);
+ tmp = tcg_temp_new_i64();
+ tcg_gen_shri_i64(tmp, cpu_avrh[VB], 32);
+ tcg_gen_deposit_i64(cpu_avrh[VT], cpu_avrh[VA], tmp, 0, 32);
+ tcg_gen_shri_i64(tmp, cpu_avrl[VB], 32);
+ tcg_gen_deposit_i64(cpu_avrl[VT], cpu_avrl[VA], tmp, 0, 32);
+ tcg_temp_free_i64(tmp);
+}
+
+static void gen_vmrgow(DisasContext *ctx)
+{
+ int VT, VA, VB;
+ if (unlikely(!ctx->altivec_enabled)) {
+ gen_exception(ctx, POWERPC_EXCP_VPU);
+ return;
+ }
+ VT = rD(ctx->opcode);
+ VA = rA(ctx->opcode);
+ VB = rB(ctx->opcode);
+
+ tcg_gen_deposit_i64(cpu_avrh[VT], cpu_avrh[VB], cpu_avrh[VA], 32, 32);
+ tcg_gen_deposit_i64(cpu_avrl[VT], cpu_avrl[VB], cpu_avrl[VA], 32, 32);
+}
+
+GEN_VXFORM(vmuloub, 4, 0);
+GEN_VXFORM(vmulouh, 4, 1);
+GEN_VXFORM(vmulouw, 4, 2);
+GEN_VXFORM(vmuluwm, 4, 2);
+GEN_VXFORM_DUAL(vmulouw, PPC_ALTIVEC, PPC_NONE,
+ vmuluwm, PPC_NONE, PPC2_ALTIVEC_207)
+GEN_VXFORM(vmulosb, 4, 4);
+GEN_VXFORM(vmulosh, 4, 5);
+GEN_VXFORM(vmulosw, 4, 6);
+GEN_VXFORM(vmuleub, 4, 8);
+GEN_VXFORM(vmuleuh, 4, 9);
+GEN_VXFORM(vmuleuw, 4, 10);
+GEN_VXFORM(vmulesb, 4, 12);
+GEN_VXFORM(vmulesh, 4, 13);
+GEN_VXFORM(vmulesw, 4, 14);
+GEN_VXFORM(vslb, 2, 4);
+GEN_VXFORM(vslh, 2, 5);
+GEN_VXFORM(vslw, 2, 6);
+GEN_VXFORM(vsld, 2, 23);
+GEN_VXFORM(vsrb, 2, 8);
+GEN_VXFORM(vsrh, 2, 9);
+GEN_VXFORM(vsrw, 2, 10);
+GEN_VXFORM(vsrd, 2, 27);
+GEN_VXFORM(vsrab, 2, 12);
+GEN_VXFORM(vsrah, 2, 13);
+GEN_VXFORM(vsraw, 2, 14);
+GEN_VXFORM(vsrad, 2, 15);
+GEN_VXFORM(vslo, 6, 16);
+GEN_VXFORM(vsro, 6, 17);
+GEN_VXFORM(vaddcuw, 0, 6);
+GEN_VXFORM(vsubcuw, 0, 22);
+GEN_VXFORM_ENV(vaddubs, 0, 8);
+GEN_VXFORM_ENV(vadduhs, 0, 9);
+GEN_VXFORM_ENV(vadduws, 0, 10);
+GEN_VXFORM_ENV(vaddsbs, 0, 12);
+GEN_VXFORM_ENV(vaddshs, 0, 13);
+GEN_VXFORM_ENV(vaddsws, 0, 14);
+GEN_VXFORM_ENV(vsububs, 0, 24);
+GEN_VXFORM_ENV(vsubuhs, 0, 25);
+GEN_VXFORM_ENV(vsubuws, 0, 26);
+GEN_VXFORM_ENV(vsubsbs, 0, 28);
+GEN_VXFORM_ENV(vsubshs, 0, 29);
+GEN_VXFORM_ENV(vsubsws, 0, 30);
+GEN_VXFORM(vadduqm, 0, 4);
+GEN_VXFORM(vaddcuq, 0, 5);
+GEN_VXFORM3(vaddeuqm, 30, 0);
+GEN_VXFORM3(vaddecuq, 30, 0);
+GEN_VXFORM_DUAL(vaddeuqm, PPC_NONE, PPC2_ALTIVEC_207, \
+ vaddecuq, PPC_NONE, PPC2_ALTIVEC_207)
+GEN_VXFORM(vsubuqm, 0, 20);
+GEN_VXFORM(vsubcuq, 0, 21);
+GEN_VXFORM3(vsubeuqm, 31, 0);
+GEN_VXFORM3(vsubecuq, 31, 0);
+GEN_VXFORM_DUAL(vsubeuqm, PPC_NONE, PPC2_ALTIVEC_207, \
+ vsubecuq, PPC_NONE, PPC2_ALTIVEC_207)
+GEN_VXFORM(vrlb, 2, 0);
+GEN_VXFORM(vrlh, 2, 1);
+GEN_VXFORM(vrlw, 2, 2);
+GEN_VXFORM(vrld, 2, 3);
+GEN_VXFORM(vsl, 2, 7);
+GEN_VXFORM(vsr, 2, 11);
+GEN_VXFORM_ENV(vpkuhum, 7, 0);
+GEN_VXFORM_ENV(vpkuwum, 7, 1);
+GEN_VXFORM_ENV(vpkudum, 7, 17);
+GEN_VXFORM_ENV(vpkuhus, 7, 2);
+GEN_VXFORM_ENV(vpkuwus, 7, 3);
+GEN_VXFORM_ENV(vpkudus, 7, 19);
+GEN_VXFORM_ENV(vpkshus, 7, 4);
+GEN_VXFORM_ENV(vpkswus, 7, 5);
+GEN_VXFORM_ENV(vpksdus, 7, 21);
+GEN_VXFORM_ENV(vpkshss, 7, 6);
+GEN_VXFORM_ENV(vpkswss, 7, 7);
+GEN_VXFORM_ENV(vpksdss, 7, 23);
+GEN_VXFORM(vpkpx, 7, 12);
+GEN_VXFORM_ENV(vsum4ubs, 4, 24);
+GEN_VXFORM_ENV(vsum4sbs, 4, 28);
+GEN_VXFORM_ENV(vsum4shs, 4, 25);
+GEN_VXFORM_ENV(vsum2sws, 4, 26);
+GEN_VXFORM_ENV(vsumsws, 4, 30);
+GEN_VXFORM_ENV(vaddfp, 5, 0);
+GEN_VXFORM_ENV(vsubfp, 5, 1);
+GEN_VXFORM_ENV(vmaxfp, 5, 16);
+GEN_VXFORM_ENV(vminfp, 5, 17);
+
+#define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
+static void glue(gen_, name)(DisasContext *ctx) \
+ { \
+ TCGv_ptr ra, rb, rd; \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ ra = gen_avr_ptr(rA(ctx->opcode)); \
+ rb = gen_avr_ptr(rB(ctx->opcode)); \
+ rd = gen_avr_ptr(rD(ctx->opcode)); \
+ gen_helper_##opname(cpu_env, rd, ra, rb); \
+ tcg_temp_free_ptr(ra); \
+ tcg_temp_free_ptr(rb); \
+ tcg_temp_free_ptr(rd); \
+ }
+
+#define GEN_VXRFORM(name, opc2, opc3) \
+ GEN_VXRFORM1(name, name, #name, opc2, opc3) \
+ GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
+
+/*
+ * Support for Altivec instructions that use bit 31 (Rc) as an opcode
+ * bit but also use bit 21 as an actual Rc bit. In general, thse pairs
+ * come from different versions of the ISA, so we must also support a
+ * pair of flags for each instruction.
+ */
+#define GEN_VXRFORM_DUAL(name0, flg0, flg2_0, name1, flg1, flg2_1) \
+static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
+{ \
+ if ((Rc(ctx->opcode) == 0) && \
+ ((ctx->insns_flags & flg0) || (ctx->insns_flags2 & flg2_0))) { \
+ if (Rc21(ctx->opcode) == 0) { \
+ gen_##name0(ctx); \
+ } else { \
+ gen_##name0##_(ctx); \
+ } \
+ } else if ((Rc(ctx->opcode) == 1) && \
+ ((ctx->insns_flags & flg1) || (ctx->insns_flags2 & flg2_1))) { \
+ if (Rc21(ctx->opcode) == 0) { \
+ gen_##name1(ctx); \
+ } else { \
+ gen_##name1##_(ctx); \
+ } \
+ } else { \
+ gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
+ } \
+}
+
+GEN_VXRFORM(vcmpequb, 3, 0)
+GEN_VXRFORM(vcmpequh, 3, 1)
+GEN_VXRFORM(vcmpequw, 3, 2)
+GEN_VXRFORM(vcmpequd, 3, 3)
+GEN_VXRFORM(vcmpgtsb, 3, 12)
+GEN_VXRFORM(vcmpgtsh, 3, 13)
+GEN_VXRFORM(vcmpgtsw, 3, 14)
+GEN_VXRFORM(vcmpgtsd, 3, 15)
+GEN_VXRFORM(vcmpgtub, 3, 8)
+GEN_VXRFORM(vcmpgtuh, 3, 9)
+GEN_VXRFORM(vcmpgtuw, 3, 10)
+GEN_VXRFORM(vcmpgtud, 3, 11)
+GEN_VXRFORM(vcmpeqfp, 3, 3)
+GEN_VXRFORM(vcmpgefp, 3, 7)
+GEN_VXRFORM(vcmpgtfp, 3, 11)
+GEN_VXRFORM(vcmpbfp, 3, 15)
+
+GEN_VXRFORM_DUAL(vcmpeqfp, PPC_ALTIVEC, PPC_NONE, \
+ vcmpequd, PPC_NONE, PPC2_ALTIVEC_207)
+GEN_VXRFORM_DUAL(vcmpbfp, PPC_ALTIVEC, PPC_NONE, \
+ vcmpgtsd, PPC_NONE, PPC2_ALTIVEC_207)
+GEN_VXRFORM_DUAL(vcmpgtfp, PPC_ALTIVEC, PPC_NONE, \
+ vcmpgtud, PPC_NONE, PPC2_ALTIVEC_207)
+
+#define GEN_VXFORM_SIMM(name, opc2, opc3) \
+static void glue(gen_, name)(DisasContext *ctx) \
+ { \
+ TCGv_ptr rd; \
+ TCGv_i32 simm; \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ simm = tcg_const_i32(SIMM5(ctx->opcode)); \
+ rd = gen_avr_ptr(rD(ctx->opcode)); \
+ gen_helper_##name (rd, simm); \
+ tcg_temp_free_i32(simm); \
+ tcg_temp_free_ptr(rd); \
+ }
+
+GEN_VXFORM_SIMM(vspltisb, 6, 12);
+GEN_VXFORM_SIMM(vspltish, 6, 13);
+GEN_VXFORM_SIMM(vspltisw, 6, 14);
+
+#define GEN_VXFORM_NOA(name, opc2, opc3) \
+static void glue(gen_, name)(DisasContext *ctx) \
+ { \
+ TCGv_ptr rb, rd; \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ rb = gen_avr_ptr(rB(ctx->opcode)); \
+ rd = gen_avr_ptr(rD(ctx->opcode)); \
+ gen_helper_##name (rd, rb); \
+ tcg_temp_free_ptr(rb); \
+ tcg_temp_free_ptr(rd); \
+ }
+
+#define GEN_VXFORM_NOA_ENV(name, opc2, opc3) \
+static void glue(gen_, name)(DisasContext *ctx) \
+ { \
+ TCGv_ptr rb, rd; \
+ \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ rb = gen_avr_ptr(rB(ctx->opcode)); \
+ rd = gen_avr_ptr(rD(ctx->opcode)); \
+ gen_helper_##name(cpu_env, rd, rb); \
+ tcg_temp_free_ptr(rb); \
+ tcg_temp_free_ptr(rd); \
+ }
+
+GEN_VXFORM_NOA(vupkhsb, 7, 8);
+GEN_VXFORM_NOA(vupkhsh, 7, 9);
+GEN_VXFORM_NOA(vupkhsw, 7, 25);
+GEN_VXFORM_NOA(vupklsb, 7, 10);
+GEN_VXFORM_NOA(vupklsh, 7, 11);
+GEN_VXFORM_NOA(vupklsw, 7, 27);
+GEN_VXFORM_NOA(vupkhpx, 7, 13);
+GEN_VXFORM_NOA(vupklpx, 7, 15);
+GEN_VXFORM_NOA_ENV(vrefp, 5, 4);
+GEN_VXFORM_NOA_ENV(vrsqrtefp, 5, 5);
+GEN_VXFORM_NOA_ENV(vexptefp, 5, 6);
+GEN_VXFORM_NOA_ENV(vlogefp, 5, 7);
+GEN_VXFORM_NOA_ENV(vrfim, 5, 11);
+GEN_VXFORM_NOA_ENV(vrfin, 5, 8);
+GEN_VXFORM_NOA_ENV(vrfip, 5, 10);
+GEN_VXFORM_NOA_ENV(vrfiz, 5, 9);
+
+#define GEN_VXFORM_SIMM(name, opc2, opc3) \
+static void glue(gen_, name)(DisasContext *ctx) \
+ { \
+ TCGv_ptr rd; \
+ TCGv_i32 simm; \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ simm = tcg_const_i32(SIMM5(ctx->opcode)); \
+ rd = gen_avr_ptr(rD(ctx->opcode)); \
+ gen_helper_##name (rd, simm); \
+ tcg_temp_free_i32(simm); \
+ tcg_temp_free_ptr(rd); \
+ }
+
+#define GEN_VXFORM_UIMM(name, opc2, opc3) \
+static void glue(gen_, name)(DisasContext *ctx) \
+ { \
+ TCGv_ptr rb, rd; \
+ TCGv_i32 uimm; \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
+ rb = gen_avr_ptr(rB(ctx->opcode)); \
+ rd = gen_avr_ptr(rD(ctx->opcode)); \
+ gen_helper_##name (rd, rb, uimm); \
+ tcg_temp_free_i32(uimm); \
+ tcg_temp_free_ptr(rb); \
+ tcg_temp_free_ptr(rd); \
+ }
+
+#define GEN_VXFORM_UIMM_ENV(name, opc2, opc3) \
+static void glue(gen_, name)(DisasContext *ctx) \
+ { \
+ TCGv_ptr rb, rd; \
+ TCGv_i32 uimm; \
+ \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
+ rb = gen_avr_ptr(rB(ctx->opcode)); \
+ rd = gen_avr_ptr(rD(ctx->opcode)); \
+ gen_helper_##name(cpu_env, rd, rb, uimm); \
+ tcg_temp_free_i32(uimm); \
+ tcg_temp_free_ptr(rb); \
+ tcg_temp_free_ptr(rd); \
+ }
+
+GEN_VXFORM_UIMM(vspltb, 6, 8);
+GEN_VXFORM_UIMM(vsplth, 6, 9);
+GEN_VXFORM_UIMM(vspltw, 6, 10);
+GEN_VXFORM_UIMM_ENV(vcfux, 5, 12);
+GEN_VXFORM_UIMM_ENV(vcfsx, 5, 13);
+GEN_VXFORM_UIMM_ENV(vctuxs, 5, 14);
+GEN_VXFORM_UIMM_ENV(vctsxs, 5, 15);
+
+static void gen_vsldoi(DisasContext *ctx)
+{
+ TCGv_ptr ra, rb, rd;
+ TCGv_i32 sh;
+ if (unlikely(!ctx->altivec_enabled)) {
+ gen_exception(ctx, POWERPC_EXCP_VPU);
+ return;
+ }
+ ra = gen_avr_ptr(rA(ctx->opcode));
+ rb = gen_avr_ptr(rB(ctx->opcode));
+ rd = gen_avr_ptr(rD(ctx->opcode));
+ sh = tcg_const_i32(VSH(ctx->opcode));
+ gen_helper_vsldoi (rd, ra, rb, sh);
+ tcg_temp_free_ptr(ra);
+ tcg_temp_free_ptr(rb);
+ tcg_temp_free_ptr(rd);
+ tcg_temp_free_i32(sh);
+}
+
+#define GEN_VAFORM_PAIRED(name0, name1, opc2) \
+static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
+ { \
+ TCGv_ptr ra, rb, rc, rd; \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ ra = gen_avr_ptr(rA(ctx->opcode)); \
+ rb = gen_avr_ptr(rB(ctx->opcode)); \
+ rc = gen_avr_ptr(rC(ctx->opcode)); \
+ rd = gen_avr_ptr(rD(ctx->opcode)); \
+ if (Rc(ctx->opcode)) { \
+ gen_helper_##name1(cpu_env, rd, ra, rb, rc); \
+ } else { \
+ gen_helper_##name0(cpu_env, rd, ra, rb, rc); \
+ } \
+ tcg_temp_free_ptr(ra); \
+ tcg_temp_free_ptr(rb); \
+ tcg_temp_free_ptr(rc); \
+ tcg_temp_free_ptr(rd); \
+ }
+
+GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16)
+
+static void gen_vmladduhm(DisasContext *ctx)
+{
+ TCGv_ptr ra, rb, rc, rd;
+ if (unlikely(!ctx->altivec_enabled)) {
+ gen_exception(ctx, POWERPC_EXCP_VPU);
+ return;
+ }
+ ra = gen_avr_ptr(rA(ctx->opcode));
+ rb = gen_avr_ptr(rB(ctx->opcode));
+ rc = gen_avr_ptr(rC(ctx->opcode));
+ rd = gen_avr_ptr(rD(ctx->opcode));
+ gen_helper_vmladduhm(rd, ra, rb, rc);
+ tcg_temp_free_ptr(ra);
+ tcg_temp_free_ptr(rb);
+ tcg_temp_free_ptr(rc);
+ tcg_temp_free_ptr(rd);
+}
+
+GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18)
+GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19)
+GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20)
+GEN_VAFORM_PAIRED(vsel, vperm, 21)
+GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23)
+
+GEN_VXFORM_NOA(vclzb, 1, 28)
+GEN_VXFORM_NOA(vclzh, 1, 29)
+GEN_VXFORM_NOA(vclzw, 1, 30)
+GEN_VXFORM_NOA(vclzd, 1, 31)
+GEN_VXFORM_NOA(vpopcntb, 1, 28)
+GEN_VXFORM_NOA(vpopcnth, 1, 29)
+GEN_VXFORM_NOA(vpopcntw, 1, 30)
+GEN_VXFORM_NOA(vpopcntd, 1, 31)
+GEN_VXFORM_DUAL(vclzb, PPC_NONE, PPC2_ALTIVEC_207, \
+ vpopcntb, PPC_NONE, PPC2_ALTIVEC_207)
+GEN_VXFORM_DUAL(vclzh, PPC_NONE, PPC2_ALTIVEC_207, \
+ vpopcnth, PPC_NONE, PPC2_ALTIVEC_207)
+GEN_VXFORM_DUAL(vclzw, PPC_NONE, PPC2_ALTIVEC_207, \
+ vpopcntw, PPC_NONE, PPC2_ALTIVEC_207)
+GEN_VXFORM_DUAL(vclzd, PPC_NONE, PPC2_ALTIVEC_207, \
+ vpopcntd, PPC_NONE, PPC2_ALTIVEC_207)
+GEN_VXFORM(vbpermq, 6, 21);
+GEN_VXFORM_NOA(vgbbd, 6, 20);
+GEN_VXFORM(vpmsumb, 4, 16)
+GEN_VXFORM(vpmsumh, 4, 17)
+GEN_VXFORM(vpmsumw, 4, 18)
+GEN_VXFORM(vpmsumd, 4, 19)
+
+#define GEN_BCD(op) \
+static void gen_##op(DisasContext *ctx) \
+{ \
+ TCGv_ptr ra, rb, rd; \
+ TCGv_i32 ps; \
+ \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ \
+ ra = gen_avr_ptr(rA(ctx->opcode)); \
+ rb = gen_avr_ptr(rB(ctx->opcode)); \
+ rd = gen_avr_ptr(rD(ctx->opcode)); \
+ \
+ ps = tcg_const_i32((ctx->opcode & 0x200) != 0); \
+ \
+ gen_helper_##op(cpu_crf[6], rd, ra, rb, ps); \
+ \
+ tcg_temp_free_ptr(ra); \
+ tcg_temp_free_ptr(rb); \
+ tcg_temp_free_ptr(rd); \
+ tcg_temp_free_i32(ps); \
+}
+
+GEN_BCD(bcdadd)
+GEN_BCD(bcdsub)
+
+GEN_VXFORM_DUAL(vsububm, PPC_ALTIVEC, PPC_NONE, \
+ bcdadd, PPC_NONE, PPC2_ALTIVEC_207)
+GEN_VXFORM_DUAL(vsububs, PPC_ALTIVEC, PPC_NONE, \
+ bcdadd, PPC_NONE, PPC2_ALTIVEC_207)
+GEN_VXFORM_DUAL(vsubuhm, PPC_ALTIVEC, PPC_NONE, \
+ bcdsub, PPC_NONE, PPC2_ALTIVEC_207)
+GEN_VXFORM_DUAL(vsubuhs, PPC_ALTIVEC, PPC_NONE, \
+ bcdsub, PPC_NONE, PPC2_ALTIVEC_207)
+
+static void gen_vsbox(DisasContext *ctx)
+{
+ TCGv_ptr ra, rd;
+ if (unlikely(!ctx->altivec_enabled)) {
+ gen_exception(ctx, POWERPC_EXCP_VPU);
+ return;
+ }
+ ra = gen_avr_ptr(rA(ctx->opcode));
+ rd = gen_avr_ptr(rD(ctx->opcode));
+ gen_helper_vsbox(rd, ra);
+ tcg_temp_free_ptr(ra);
+ tcg_temp_free_ptr(rd);
+}
+
+GEN_VXFORM(vcipher, 4, 20)
+GEN_VXFORM(vcipherlast, 4, 20)
+GEN_VXFORM(vncipher, 4, 21)
+GEN_VXFORM(vncipherlast, 4, 21)
+
+GEN_VXFORM_DUAL(vcipher, PPC_NONE, PPC2_ALTIVEC_207,
+ vcipherlast, PPC_NONE, PPC2_ALTIVEC_207)
+GEN_VXFORM_DUAL(vncipher, PPC_NONE, PPC2_ALTIVEC_207,
+ vncipherlast, PPC_NONE, PPC2_ALTIVEC_207)
+
+#define VSHASIGMA(op) \
+static void gen_##op(DisasContext *ctx) \
+{ \
+ TCGv_ptr ra, rd; \
+ TCGv_i32 st_six; \
+ if (unlikely(!ctx->altivec_enabled)) { \
+ gen_exception(ctx, POWERPC_EXCP_VPU); \
+ return; \
+ } \
+ ra = gen_avr_ptr(rA(ctx->opcode)); \
+ rd = gen_avr_ptr(rD(ctx->opcode)); \
+ st_six = tcg_const_i32(rB(ctx->opcode)); \
+ gen_helper_##op(rd, ra, st_six); \
+ tcg_temp_free_ptr(ra); \
+ tcg_temp_free_ptr(rd); \
+ tcg_temp_free_i32(st_six); \
+}
+
+VSHASIGMA(vshasigmaw)
+VSHASIGMA(vshasigmad)
+
+GEN_VXFORM3(vpermxor, 22, 0xFF)
+GEN_VXFORM_DUAL(vsldoi, PPC_ALTIVEC, PPC_NONE,
+ vpermxor, PPC_NONE, PPC2_ALTIVEC_207)
+
+#undef GEN_VR_LDX
+#undef GEN_VR_STX
+#undef GEN_VR_LVE
+#undef GEN_VR_STVE
+
+#undef GEN_VX_LOGICAL
+#undef GEN_VX_LOGICAL_207
+#undef GEN_VXFORM
+#undef GEN_VXFORM_207
+#undef GEN_VXFORM_DUAL
+#undef GEN_VXRFORM_DUAL
+#undef GEN_VXRFORM1
+#undef GEN_VXRFORM
+#undef GEN_VXFORM_SIMM
+#undef GEN_VXFORM_NOA
+#undef GEN_VXFORM_UIMM
+#undef GEN_VAFORM_PAIRED
diff --git a/target-ppc/translate/vmx-ops.c b/target-ppc/translate/vmx-ops.c
new file mode 100644
index 0000000000..185dc71fb9
--- /dev/null
+++ b/target-ppc/translate/vmx-ops.c
@@ -0,0 +1,245 @@
+#define GEN_VR_LDX(name, opc2, opc3) \
+GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
+#define GEN_VR_STX(name, opc2, opc3) \
+GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
+#define GEN_VR_LVE(name, opc2, opc3) \
+ GEN_HANDLER(lve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
+#define GEN_VR_STVE(name, opc2, opc3) \
+ GEN_HANDLER(stve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
+GEN_VR_LDX(lvx, 0x07, 0x03),
+GEN_VR_LDX(lvxl, 0x07, 0x0B),
+GEN_VR_LVE(bx, 0x07, 0x00),
+GEN_VR_LVE(hx, 0x07, 0x01),
+GEN_VR_LVE(wx, 0x07, 0x02),
+GEN_VR_STX(svx, 0x07, 0x07),
+GEN_VR_STX(svxl, 0x07, 0x0F),
+GEN_VR_STVE(bx, 0x07, 0x04),
+GEN_VR_STVE(hx, 0x07, 0x05),
+GEN_VR_STVE(wx, 0x07, 0x06),
+
+#define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
+GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
+
+#define GEN_VX_LOGICAL_207(name, tcg_op, opc2, opc3) \
+GEN_HANDLER_E(name, 0x04, opc2, opc3, 0x00000000, PPC_NONE, PPC2_ALTIVEC_207)
+
+GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16),
+GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17),
+GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18),
+GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19),
+GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20),
+GEN_VX_LOGICAL_207(veqv, tcg_gen_eqv_i64, 2, 26),
+GEN_VX_LOGICAL_207(vnand, tcg_gen_nand_i64, 2, 22),
+GEN_VX_LOGICAL_207(vorc, tcg_gen_orc_i64, 2, 21),
+
+#define GEN_VXFORM(name, opc2, opc3) \
+GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
+
+#define GEN_VXFORM_207(name, opc2, opc3) \
+GEN_HANDLER_E(name, 0x04, opc2, opc3, 0x00000000, PPC_NONE, PPC2_ALTIVEC_207)
+
+#define GEN_VXFORM_DUAL(name0, name1, opc2, opc3, type0, type1) \
+GEN_HANDLER_E(name0##_##name1, 0x4, opc2, opc3, 0x00000000, type0, type1)
+
+#define GEN_VXRFORM_DUAL(name0, name1, opc2, opc3, tp0, tp1) \
+GEN_HANDLER_E(name0##_##name1, 0x4, opc2, opc3, 0x00000000, tp0, tp1), \
+GEN_HANDLER_E(name0##_##name1, 0x4, opc2, (opc3 | 0x10), 0x00000000, tp0, tp1),
+
+GEN_VXFORM(vaddubm, 0, 0),
+GEN_VXFORM(vadduhm, 0, 1),
+GEN_VXFORM(vadduwm, 0, 2),
+GEN_VXFORM_207(vaddudm, 0, 3),
+GEN_VXFORM_DUAL(vsububm, bcdadd, 0, 16, PPC_ALTIVEC, PPC_NONE),
+GEN_VXFORM_DUAL(vsubuhm, bcdsub, 0, 17, PPC_ALTIVEC, PPC_NONE),
+GEN_VXFORM(vsubuwm, 0, 18),
+GEN_VXFORM_207(vsubudm, 0, 19),
+GEN_VXFORM(vmaxub, 1, 0),
+GEN_VXFORM(vmaxuh, 1, 1),
+GEN_VXFORM(vmaxuw, 1, 2),
+GEN_VXFORM_207(vmaxud, 1, 3),
+GEN_VXFORM(vmaxsb, 1, 4),
+GEN_VXFORM(vmaxsh, 1, 5),
+GEN_VXFORM(vmaxsw, 1, 6),
+GEN_VXFORM_207(vmaxsd, 1, 7),
+GEN_VXFORM(vminub, 1, 8),
+GEN_VXFORM(vminuh, 1, 9),
+GEN_VXFORM(vminuw, 1, 10),
+GEN_VXFORM_207(vminud, 1, 11),
+GEN_VXFORM(vminsb, 1, 12),
+GEN_VXFORM(vminsh, 1, 13),
+GEN_VXFORM(vminsw, 1, 14),
+GEN_VXFORM_207(vminsd, 1, 15),
+GEN_VXFORM(vavgub, 1, 16),
+GEN_VXFORM(vavguh, 1, 17),
+GEN_VXFORM(vavguw, 1, 18),
+GEN_VXFORM(vavgsb, 1, 20),
+GEN_VXFORM(vavgsh, 1, 21),
+GEN_VXFORM(vavgsw, 1, 22),
+GEN_VXFORM(vmrghb, 6, 0),
+GEN_VXFORM(vmrghh, 6, 1),
+GEN_VXFORM(vmrghw, 6, 2),
+GEN_VXFORM(vmrglb, 6, 4),
+GEN_VXFORM(vmrglh, 6, 5),
+GEN_VXFORM(vmrglw, 6, 6),
+GEN_VXFORM_207(vmrgew, 6, 30),
+GEN_VXFORM_207(vmrgow, 6, 26),
+GEN_VXFORM(vmuloub, 4, 0),
+GEN_VXFORM(vmulouh, 4, 1),
+GEN_VXFORM_DUAL(vmulouw, vmuluwm, 4, 2, PPC_ALTIVEC, PPC_NONE),
+GEN_VXFORM(vmulosb, 4, 4),
+GEN_VXFORM(vmulosh, 4, 5),
+GEN_VXFORM_207(vmulosw, 4, 6),
+GEN_VXFORM(vmuleub, 4, 8),
+GEN_VXFORM(vmuleuh, 4, 9),
+GEN_VXFORM_207(vmuleuw, 4, 10),
+GEN_VXFORM(vmulesb, 4, 12),
+GEN_VXFORM(vmulesh, 4, 13),
+GEN_VXFORM_207(vmulesw, 4, 14),
+GEN_VXFORM(vslb, 2, 4),
+GEN_VXFORM(vslh, 2, 5),
+GEN_VXFORM(vslw, 2, 6),
+GEN_VXFORM_207(vsld, 2, 23),
+GEN_VXFORM(vsrb, 2, 8),
+GEN_VXFORM(vsrh, 2, 9),
+GEN_VXFORM(vsrw, 2, 10),
+GEN_VXFORM_207(vsrd, 2, 27),
+GEN_VXFORM(vsrab, 2, 12),
+GEN_VXFORM(vsrah, 2, 13),
+GEN_VXFORM(vsraw, 2, 14),
+GEN_VXFORM_207(vsrad, 2, 15),
+GEN_VXFORM(vslo, 6, 16),
+GEN_VXFORM(vsro, 6, 17),
+GEN_VXFORM(vaddcuw, 0, 6),
+GEN_VXFORM(vsubcuw, 0, 22),
+GEN_VXFORM(vaddubs, 0, 8),
+GEN_VXFORM(vadduhs, 0, 9),
+GEN_VXFORM(vadduws, 0, 10),
+GEN_VXFORM(vaddsbs, 0, 12),
+GEN_VXFORM(vaddshs, 0, 13),
+GEN_VXFORM(vaddsws, 0, 14),
+GEN_VXFORM_DUAL(vsububs, bcdadd, 0, 24, PPC_ALTIVEC, PPC_NONE),
+GEN_VXFORM_DUAL(vsubuhs, bcdsub, 0, 25, PPC_ALTIVEC, PPC_NONE),
+GEN_VXFORM(vsubuws, 0, 26),
+GEN_VXFORM(vsubsbs, 0, 28),
+GEN_VXFORM(vsubshs, 0, 29),
+GEN_VXFORM(vsubsws, 0, 30),
+GEN_VXFORM_207(vadduqm, 0, 4),
+GEN_VXFORM_207(vaddcuq, 0, 5),
+GEN_VXFORM_DUAL(vaddeuqm, vaddecuq, 30, 0xFF, PPC_NONE, PPC2_ALTIVEC_207),
+GEN_VXFORM_207(vsubuqm, 0, 20),
+GEN_VXFORM_207(vsubcuq, 0, 21),
+GEN_VXFORM_DUAL(vsubeuqm, vsubecuq, 31, 0xFF, PPC_NONE, PPC2_ALTIVEC_207),
+GEN_VXFORM(vrlb, 2, 0),
+GEN_VXFORM(vrlh, 2, 1),
+GEN_VXFORM(vrlw, 2, 2),
+GEN_VXFORM_207(vrld, 2, 3),
+GEN_VXFORM(vsl, 2, 7),
+GEN_VXFORM(vsr, 2, 11),
+GEN_VXFORM(vpkuhum, 7, 0),
+GEN_VXFORM(vpkuwum, 7, 1),
+GEN_VXFORM_207(vpkudum, 7, 17),
+GEN_VXFORM(vpkuhus, 7, 2),
+GEN_VXFORM(vpkuwus, 7, 3),
+GEN_VXFORM_207(vpkudus, 7, 19),
+GEN_VXFORM(vpkshus, 7, 4),
+GEN_VXFORM(vpkswus, 7, 5),
+GEN_VXFORM_207(vpksdus, 7, 21),
+GEN_VXFORM(vpkshss, 7, 6),
+GEN_VXFORM(vpkswss, 7, 7),
+GEN_VXFORM_207(vpksdss, 7, 23),
+GEN_VXFORM(vpkpx, 7, 12),
+GEN_VXFORM(vsum4ubs, 4, 24),
+GEN_VXFORM(vsum4sbs, 4, 28),
+GEN_VXFORM(vsum4shs, 4, 25),
+GEN_VXFORM(vsum2sws, 4, 26),
+GEN_VXFORM(vsumsws, 4, 30),
+GEN_VXFORM(vaddfp, 5, 0),
+GEN_VXFORM(vsubfp, 5, 1),
+GEN_VXFORM(vmaxfp, 5, 16),
+GEN_VXFORM(vminfp, 5, 17),
+
+#define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
+ GEN_HANDLER2(name, str, 0x4, opc2, opc3, 0x00000000, PPC_ALTIVEC),
+#define GEN_VXRFORM(name, opc2, opc3) \
+ GEN_VXRFORM1(name, name, #name, opc2, opc3) \
+ GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
+GEN_VXRFORM(vcmpequb, 3, 0)
+GEN_VXRFORM(vcmpequh, 3, 1)
+GEN_VXRFORM(vcmpequw, 3, 2)
+GEN_VXRFORM(vcmpgtsb, 3, 12)
+GEN_VXRFORM(vcmpgtsh, 3, 13)
+GEN_VXRFORM(vcmpgtsw, 3, 14)
+GEN_VXRFORM(vcmpgtub, 3, 8)
+GEN_VXRFORM(vcmpgtuh, 3, 9)
+GEN_VXRFORM(vcmpgtuw, 3, 10)
+GEN_VXRFORM_DUAL(vcmpeqfp, vcmpequd, 3, 3, PPC_ALTIVEC, PPC_NONE)
+GEN_VXRFORM(vcmpgefp, 3, 7)
+GEN_VXRFORM_DUAL(vcmpgtfp, vcmpgtud, 3, 11, PPC_ALTIVEC, PPC_NONE)
+GEN_VXRFORM_DUAL(vcmpbfp, vcmpgtsd, 3, 15, PPC_ALTIVEC, PPC_NONE)
+
+#define GEN_VXFORM_SIMM(name, opc2, opc3) \
+ GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
+GEN_VXFORM_SIMM(vspltisb, 6, 12),
+GEN_VXFORM_SIMM(vspltish, 6, 13),
+GEN_VXFORM_SIMM(vspltisw, 6, 14),
+
+#define GEN_VXFORM_NOA(name, opc2, opc3) \
+ GEN_HANDLER(name, 0x04, opc2, opc3, 0x001f0000, PPC_ALTIVEC)
+GEN_VXFORM_NOA(vupkhsb, 7, 8),
+GEN_VXFORM_NOA(vupkhsh, 7, 9),
+GEN_VXFORM_207(vupkhsw, 7, 25),
+GEN_VXFORM_NOA(vupklsb, 7, 10),
+GEN_VXFORM_NOA(vupklsh, 7, 11),
+GEN_VXFORM_207(vupklsw, 7, 27),
+GEN_VXFORM_NOA(vupkhpx, 7, 13),
+GEN_VXFORM_NOA(vupklpx, 7, 15),
+GEN_VXFORM_NOA(vrefp, 5, 4),
+GEN_VXFORM_NOA(vrsqrtefp, 5, 5),
+GEN_VXFORM_NOA(vexptefp, 5, 6),
+GEN_VXFORM_NOA(vlogefp, 5, 7),
+GEN_VXFORM_NOA(vrfim, 5, 11),
+GEN_VXFORM_NOA(vrfin, 5, 8),
+GEN_VXFORM_NOA(vrfip, 5, 10),
+GEN_VXFORM_NOA(vrfiz, 5, 9),
+
+#define GEN_VXFORM_UIMM(name, opc2, opc3) \
+ GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
+GEN_VXFORM_UIMM(vspltb, 6, 8),
+GEN_VXFORM_UIMM(vsplth, 6, 9),
+GEN_VXFORM_UIMM(vspltw, 6, 10),
+GEN_VXFORM_UIMM(vcfux, 5, 12),
+GEN_VXFORM_UIMM(vcfsx, 5, 13),
+GEN_VXFORM_UIMM(vctuxs, 5, 14),
+GEN_VXFORM_UIMM(vctsxs, 5, 15),
+
+
+#define GEN_VAFORM_PAIRED(name0, name1, opc2) \
+ GEN_HANDLER(name0##_##name1, 0x04, opc2, 0xFF, 0x00000000, PPC_ALTIVEC)
+GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16),
+GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18),
+GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19),
+GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20),
+GEN_VAFORM_PAIRED(vsel, vperm, 21),
+GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23),
+
+GEN_VXFORM_DUAL(vclzb, vpopcntb, 1, 28, PPC_NONE, PPC2_ALTIVEC_207),
+GEN_VXFORM_DUAL(vclzh, vpopcnth, 1, 29, PPC_NONE, PPC2_ALTIVEC_207),
+GEN_VXFORM_DUAL(vclzw, vpopcntw, 1, 30, PPC_NONE, PPC2_ALTIVEC_207),
+GEN_VXFORM_DUAL(vclzd, vpopcntd, 1, 31, PPC_NONE, PPC2_ALTIVEC_207),
+
+GEN_VXFORM_207(vbpermq, 6, 21),
+GEN_VXFORM_207(vgbbd, 6, 20),
+GEN_VXFORM_207(vpmsumb, 4, 16),
+GEN_VXFORM_207(vpmsumh, 4, 17),
+GEN_VXFORM_207(vpmsumw, 4, 18),
+GEN_VXFORM_207(vpmsumd, 4, 19),
+
+GEN_VXFORM_207(vsbox, 4, 23),
+
+GEN_VXFORM_DUAL(vcipher, vcipherlast, 4, 20, PPC_NONE, PPC2_ALTIVEC_207),
+GEN_VXFORM_DUAL(vncipher, vncipherlast, 4, 21, PPC_NONE, PPC2_ALTIVEC_207),
+
+GEN_VXFORM_207(vshasigmaw, 1, 26),
+GEN_VXFORM_207(vshasigmad, 1, 27),
+
+GEN_VXFORM_DUAL(vsldoi, vpermxor, 22, 0xFF, PPC_ALTIVEC, PPC_NONE),