aboutsummaryrefslogtreecommitdiff
path: root/target/alpha
diff options
context:
space:
mode:
authorRichard Henderson <rth@twiddle.net>2016-10-14 17:35:42 -0500
committerRichard Henderson <rth@twiddle.net>2017-01-10 08:06:10 -0800
commitf49f1ae73be521f6118ea6c2c2ac3ab5e6c5918f (patch)
tree1572820e2337322cc52be720937de206cbd35bf8 /target/alpha
parent752b1be94757de906b9c24ebc8f5e6aa54b96b23 (diff)
target-alpha: Use deposit and extract ops
Signed-off-by: Richard Henderson <rth@twiddle.net>
Diffstat (limited to 'target/alpha')
-rw-r--r--target/alpha/translate.c67
1 files changed, 42 insertions, 25 deletions
diff --git a/target/alpha/translate.c b/target/alpha/translate.c
index 114927b751..5ac227700a 100644
--- a/target/alpha/translate.c
+++ b/target/alpha/translate.c
@@ -949,7 +949,13 @@ static void gen_ext_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
uint8_t lit, uint8_t byte_mask)
{
if (islit) {
- tcg_gen_shli_i64(vc, va, (64 - lit * 8) & 0x3f);
+ int pos = (64 - lit * 8) & 0x3f;
+ int len = cto32(byte_mask) * 8;
+ if (pos < len) {
+ tcg_gen_deposit_z_i64(vc, va, pos, len - pos);
+ } else {
+ tcg_gen_movi_i64(vc, 0);
+ }
} else {
TCGv tmp = tcg_temp_new();
tcg_gen_shli_i64(tmp, load_gpr(ctx, rb), 3);
@@ -966,38 +972,44 @@ static void gen_ext_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
uint8_t lit, uint8_t byte_mask)
{
if (islit) {
- tcg_gen_shri_i64(vc, va, (lit & 7) * 8);
+ int pos = (lit & 7) * 8;
+ int len = cto32(byte_mask) * 8;
+ if (pos + len >= 64) {
+ len = 64 - pos;
+ }
+ tcg_gen_extract_i64(vc, va, pos, len);
} else {
TCGv tmp = tcg_temp_new();
tcg_gen_andi_i64(tmp, load_gpr(ctx, rb), 7);
tcg_gen_shli_i64(tmp, tmp, 3);
tcg_gen_shr_i64(vc, va, tmp);
tcg_temp_free(tmp);
+ gen_zapnoti(vc, vc, byte_mask);
}
- gen_zapnoti(vc, vc, byte_mask);
}
/* INSWH, INSLH, INSQH */
static void gen_ins_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
uint8_t lit, uint8_t byte_mask)
{
- TCGv tmp = tcg_temp_new();
-
- /* The instruction description has us left-shift the byte mask and extract
- bits <15:8> and apply that zap at the end. This is equivalent to simply
- performing the zap first and shifting afterward. */
- gen_zapnoti(tmp, va, byte_mask);
-
if (islit) {
- lit &= 7;
- if (unlikely(lit == 0)) {
- tcg_gen_movi_i64(vc, 0);
+ int pos = 64 - (lit & 7) * 8;
+ int len = cto32(byte_mask) * 8;
+ if (pos < len) {
+ tcg_gen_extract_i64(vc, va, pos, len - pos);
} else {
- tcg_gen_shri_i64(vc, tmp, 64 - lit * 8);
+ tcg_gen_movi_i64(vc, 0);
}
} else {
+ TCGv tmp = tcg_temp_new();
TCGv shift = tcg_temp_new();
+ /* The instruction description has us left-shift the byte mask
+ and extract bits <15:8> and apply that zap at the end. This
+ is equivalent to simply performing the zap first and shifting
+ afterward. */
+ gen_zapnoti(tmp, va, byte_mask);
+
/* If (B & 7) == 0, we need to shift by 64 and leave a zero. Do this
portably by splitting the shift into two parts: shift_count-1 and 1.
Arrange for the -1 by using ones-complement instead of
@@ -1010,32 +1022,37 @@ static void gen_ins_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
tcg_gen_shr_i64(vc, tmp, shift);
tcg_gen_shri_i64(vc, vc, 1);
tcg_temp_free(shift);
+ tcg_temp_free(tmp);
}
- tcg_temp_free(tmp);
}
/* INSBL, INSWL, INSLL, INSQL */
static void gen_ins_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
uint8_t lit, uint8_t byte_mask)
{
- TCGv tmp = tcg_temp_new();
-
- /* The instruction description has us left-shift the byte mask
- the same number of byte slots as the data and apply the zap
- at the end. This is equivalent to simply performing the zap
- first and shifting afterward. */
- gen_zapnoti(tmp, va, byte_mask);
-
if (islit) {
- tcg_gen_shli_i64(vc, tmp, (lit & 7) * 8);
+ int pos = (lit & 7) * 8;
+ int len = cto32(byte_mask) * 8;
+ if (pos + len > 64) {
+ len = 64 - pos;
+ }
+ tcg_gen_deposit_z_i64(vc, va, pos, len);
} else {
+ TCGv tmp = tcg_temp_new();
TCGv shift = tcg_temp_new();
+
+ /* The instruction description has us left-shift the byte mask
+ and extract bits <15:8> and apply that zap at the end. This
+ is equivalent to simply performing the zap first and shifting
+ afterward. */
+ gen_zapnoti(tmp, va, byte_mask);
+
tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
tcg_gen_shli_i64(shift, shift, 3);
tcg_gen_shl_i64(vc, tmp, shift);
tcg_temp_free(shift);
+ tcg_temp_free(tmp);
}
- tcg_temp_free(tmp);
}
/* MSKWH, MSKLH, MSKQH */