aboutsummaryrefslogtreecommitdiff
path: root/tcg/s390x/tcg-target.c.inc
diff options
context:
space:
mode:
Diffstat (limited to 'tcg/s390x/tcg-target.c.inc')
-rw-r--r--tcg/s390x/tcg-target.c.inc102
1 files changed, 102 insertions, 0 deletions
diff --git a/tcg/s390x/tcg-target.c.inc b/tcg/s390x/tcg-target.c.inc
index 21007f94ad..bab2d679c2 100644
--- a/tcg/s390x/tcg-target.c.inc
+++ b/tcg/s390x/tcg-target.c.inc
@@ -181,8 +181,18 @@ typedef enum S390Opcode {
RRFa_MGRK = 0xb9ec,
RRFa_MSRKC = 0xb9fd,
RRFa_MSGRKC = 0xb9ed,
+ RRFa_NCRK = 0xb9f5,
+ RRFa_NCGRK = 0xb9e5,
+ RRFa_NNRK = 0xb974,
+ RRFa_NNGRK = 0xb964,
+ RRFa_NORK = 0xb976,
+ RRFa_NOGRK = 0xb966,
RRFa_NRK = 0xb9f4,
RRFa_NGRK = 0xb9e4,
+ RRFa_NXRK = 0xb977,
+ RRFa_NXGRK = 0xb967,
+ RRFa_OCRK = 0xb975,
+ RRFa_OCGRK = 0xb965,
RRFa_ORK = 0xb9f6,
RRFa_OGRK = 0xb9e6,
RRFa_SRK = 0xb9f9,
@@ -2007,9 +2017,46 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
}
break;
+ case INDEX_op_andc_i32:
+ a0 = args[0], a1 = args[1], a2 = (uint32_t)args[2];
+ if (const_args[2]) {
+ tcg_out_mov(s, TCG_TYPE_I32, a0, a1);
+ tgen_andi(s, TCG_TYPE_I32, a0, (uint32_t)~a2);
+ } else {
+ tcg_out_insn(s, RRFa, NCRK, a0, a1, a2);
+ }
+ break;
+ case INDEX_op_orc_i32:
+ a0 = args[0], a1 = args[1], a2 = (uint32_t)args[2];
+ if (const_args[2]) {
+ tcg_out_mov(s, TCG_TYPE_I32, a0, a1);
+ tgen_ori(s, a0, (uint32_t)~a2);
+ } else {
+ tcg_out_insn(s, RRFa, OCRK, a0, a1, a2);
+ }
+ break;
+ case INDEX_op_eqv_i32:
+ a0 = args[0], a1 = args[1], a2 = (uint32_t)args[2];
+ if (const_args[2]) {
+ tcg_out_mov(s, TCG_TYPE_I32, a0, a1);
+ tcg_out_insn(s, RIL, XILF, a0, ~a2);
+ } else {
+ tcg_out_insn(s, RRFa, NXRK, a0, a1, a2);
+ }
+ break;
+ case INDEX_op_nand_i32:
+ tcg_out_insn(s, RRFa, NNRK, args[0], args[1], args[2]);
+ break;
+ case INDEX_op_nor_i32:
+ tcg_out_insn(s, RRFa, NORK, args[0], args[1], args[2]);
+ break;
+
case INDEX_op_neg_i32:
tcg_out_insn(s, RR, LCR, args[0], args[1]);
break;
+ case INDEX_op_not_i32:
+ tcg_out_insn(s, RRFa, NORK, args[0], args[1], args[1]);
+ break;
case INDEX_op_mul_i32:
a0 = args[0], a1 = args[1], a2 = (int32_t)args[2];
@@ -2265,9 +2312,46 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
}
break;
+ case INDEX_op_andc_i64:
+ a0 = args[0], a1 = args[1], a2 = args[2];
+ if (const_args[2]) {
+ tcg_out_mov(s, TCG_TYPE_I64, a0, a1);
+ tgen_andi(s, TCG_TYPE_I64, a0, ~a2);
+ } else {
+ tcg_out_insn(s, RRFa, NCGRK, a0, a1, a2);
+ }
+ break;
+ case INDEX_op_orc_i64:
+ a0 = args[0], a1 = args[1], a2 = args[2];
+ if (const_args[2]) {
+ tcg_out_mov(s, TCG_TYPE_I64, a0, a1);
+ tgen_ori(s, a0, ~a2);
+ } else {
+ tcg_out_insn(s, RRFa, OCGRK, a0, a1, a2);
+ }
+ break;
+ case INDEX_op_eqv_i64:
+ a0 = args[0], a1 = args[1], a2 = args[2];
+ if (const_args[2]) {
+ tcg_out_mov(s, TCG_TYPE_I64, a0, a1);
+ tgen_xori(s, a0, ~a2);
+ } else {
+ tcg_out_insn(s, RRFa, NXGRK, a0, a1, a2);
+ }
+ break;
+ case INDEX_op_nand_i64:
+ tcg_out_insn(s, RRFa, NNGRK, args[0], args[1], args[2]);
+ break;
+ case INDEX_op_nor_i64:
+ tcg_out_insn(s, RRFa, NOGRK, args[0], args[1], args[2]);
+ break;
+
case INDEX_op_neg_i64:
tcg_out_insn(s, RRE, LCGR, args[0], args[1]);
break;
+ case INDEX_op_not_i64:
+ tcg_out_insn(s, RRFa, NOGRK, args[0], args[1], args[1]);
+ break;
case INDEX_op_bswap64_i64:
tcg_out_insn(s, RRE, LRVGR, args[0], args[1]);
break;
@@ -2945,6 +3029,22 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_xor_i64:
return C_O1_I2(r, r, rK);
+ case INDEX_op_andc_i32:
+ case INDEX_op_orc_i32:
+ case INDEX_op_eqv_i32:
+ return C_O1_I2(r, r, ri);
+ case INDEX_op_andc_i64:
+ return C_O1_I2(r, r, rKR);
+ case INDEX_op_orc_i64:
+ case INDEX_op_eqv_i64:
+ return C_O1_I2(r, r, rNK);
+
+ case INDEX_op_nand_i32:
+ case INDEX_op_nand_i64:
+ case INDEX_op_nor_i32:
+ case INDEX_op_nor_i64:
+ return C_O1_I2(r, r, r);
+
case INDEX_op_mul_i32:
return (HAVE_FACILITY(MISC_INSN_EXT2)
? C_O1_I2(r, r, ri)
@@ -2970,6 +3070,8 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_bswap64_i64:
case INDEX_op_neg_i32:
case INDEX_op_neg_i64:
+ case INDEX_op_not_i32:
+ case INDEX_op_not_i64:
case INDEX_op_ext8s_i32:
case INDEX_op_ext8s_i64:
case INDEX_op_ext8u_i32: