4 * Copyright (c) 2009 Ulrich Hecht
5 * Copyright (c) 2010 Alexander Graf
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 /* #define DEBUG_INLINE_BRANCHES */
22 #define S390X_DEBUG_DISAS
23 /* #define S390X_DEBUG_DISAS_VERBOSE */
25 #ifdef S390X_DEBUG_DISAS_VERBOSE
26 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
28 # define LOG_DISAS(...) do { } while (0)
32 #include "disas/disas.h"
35 #include "qemu/host-utils.h"
37 /* global register indexes */
38 static TCGv_ptr cpu_env;
40 #include "exec/gen-icount.h"
46 /* Information that (most) every instruction needs to manipulate. */
47 typedef struct DisasContext DisasContext;
48 typedef struct DisasInsn DisasInsn;
49 typedef struct DisasFields DisasFields;
52 struct TranslationBlock *tb;
53 const DisasInsn *insn;
57 bool singlestep_enabled;
61 /* Information carried about a condition to be evaluated. */
68 struct { TCGv_i64 a, b; } s64;
69 struct { TCGv_i32 a, b; } s32;
75 static void gen_op_calc_cc(DisasContext *s);
77 #ifdef DEBUG_INLINE_BRANCHES
78 static uint64_t inline_branch_hit[CC_OP_MAX];
79 static uint64_t inline_branch_miss[CC_OP_MAX];
82 static inline void debug_insn(uint64_t insn)
84 LOG_DISAS("insn: 0x%" PRIx64 "\n", insn);
87 static inline uint64_t pc_to_link_info(DisasContext *s, uint64_t pc)
89 if (!(s->tb->flags & FLAG_MASK_64)) {
90 if (s->tb->flags & FLAG_MASK_32) {
91 return pc | 0x80000000;
97 void cpu_dump_state(CPUS390XState *env, FILE *f, fprintf_function cpu_fprintf,
102 if (env->cc_op > 3) {
103 cpu_fprintf(f, "PSW=mask %016" PRIx64 " addr %016" PRIx64 " cc %15s\n",
104 env->psw.mask, env->psw.addr, cc_name(env->cc_op));
106 cpu_fprintf(f, "PSW=mask %016" PRIx64 " addr %016" PRIx64 " cc %02x\n",
107 env->psw.mask, env->psw.addr, env->cc_op);
110 for (i = 0; i < 16; i++) {
111 cpu_fprintf(f, "R%02d=%016" PRIx64, i, env->regs[i]);
113 cpu_fprintf(f, "\n");
119 for (i = 0; i < 16; i++) {
120 cpu_fprintf(f, "F%02d=%016" PRIx64, i, env->fregs[i].ll);
122 cpu_fprintf(f, "\n");
128 #ifndef CONFIG_USER_ONLY
129 for (i = 0; i < 16; i++) {
130 cpu_fprintf(f, "C%02d=%016" PRIx64, i, env->cregs[i]);
132 cpu_fprintf(f, "\n");
139 #ifdef DEBUG_INLINE_BRANCHES
140 for (i = 0; i < CC_OP_MAX; i++) {
141 cpu_fprintf(f, " %15s = %10ld\t%10ld\n", cc_name(i),
142 inline_branch_miss[i], inline_branch_hit[i]);
146 cpu_fprintf(f, "\n");
149 static TCGv_i64 psw_addr;
150 static TCGv_i64 psw_mask;
152 static TCGv_i32 cc_op;
153 static TCGv_i64 cc_src;
154 static TCGv_i64 cc_dst;
155 static TCGv_i64 cc_vr;
157 static char cpu_reg_names[32][4];
158 static TCGv_i64 regs[16];
159 static TCGv_i64 fregs[16];
161 static uint8_t gen_opc_cc_op[OPC_BUF_SIZE];
163 void s390x_translate_init(void)
167 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
168 psw_addr = tcg_global_mem_new_i64(TCG_AREG0,
169 offsetof(CPUS390XState, psw.addr),
171 psw_mask = tcg_global_mem_new_i64(TCG_AREG0,
172 offsetof(CPUS390XState, psw.mask),
175 cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUS390XState, cc_op),
177 cc_src = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, cc_src),
179 cc_dst = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, cc_dst),
181 cc_vr = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, cc_vr),
184 for (i = 0; i < 16; i++) {
185 snprintf(cpu_reg_names[i], sizeof(cpu_reg_names[0]), "r%d", i);
186 regs[i] = tcg_global_mem_new(TCG_AREG0,
187 offsetof(CPUS390XState, regs[i]),
191 for (i = 0; i < 16; i++) {
192 snprintf(cpu_reg_names[i + 16], sizeof(cpu_reg_names[0]), "f%d", i);
193 fregs[i] = tcg_global_mem_new(TCG_AREG0,
194 offsetof(CPUS390XState, fregs[i].d),
195 cpu_reg_names[i + 16]);
198 /* register helpers */
203 static inline TCGv_i64 load_reg(int reg)
205 TCGv_i64 r = tcg_temp_new_i64();
206 tcg_gen_mov_i64(r, regs[reg]);
210 static inline TCGv_i64 load_freg(int reg)
212 TCGv_i64 r = tcg_temp_new_i64();
213 tcg_gen_mov_i64(r, fregs[reg]);
217 static inline TCGv_i32 load_freg32(int reg)
219 TCGv_i32 r = tcg_temp_new_i32();
220 #if HOST_LONG_BITS == 32
221 tcg_gen_mov_i32(r, TCGV_HIGH(fregs[reg]));
223 tcg_gen_shri_i64(MAKE_TCGV_I64(GET_TCGV_I32(r)), fregs[reg], 32);
228 static inline TCGv_i64 load_freg32_i64(int reg)
230 TCGv_i64 r = tcg_temp_new_i64();
231 tcg_gen_shri_i64(r, fregs[reg], 32);
235 static inline TCGv_i32 load_reg32(int reg)
237 TCGv_i32 r = tcg_temp_new_i32();
238 tcg_gen_trunc_i64_i32(r, regs[reg]);
242 static inline TCGv_i64 load_reg32_i64(int reg)
244 TCGv_i64 r = tcg_temp_new_i64();
245 tcg_gen_ext32s_i64(r, regs[reg]);
249 static inline void store_reg(int reg, TCGv_i64 v)
251 tcg_gen_mov_i64(regs[reg], v);
254 static inline void store_freg(int reg, TCGv_i64 v)
256 tcg_gen_mov_i64(fregs[reg], v);
259 static inline void store_reg32(int reg, TCGv_i32 v)
261 /* 32 bit register writes keep the upper half */
262 #if HOST_LONG_BITS == 32
263 tcg_gen_mov_i32(TCGV_LOW(regs[reg]), v);
265 tcg_gen_deposit_i64(regs[reg], regs[reg],
266 MAKE_TCGV_I64(GET_TCGV_I32(v)), 0, 32);
270 static inline void store_reg32_i64(int reg, TCGv_i64 v)
272 /* 32 bit register writes keep the upper half */
273 tcg_gen_deposit_i64(regs[reg], regs[reg], v, 0, 32);
276 static inline void store_reg32h_i64(int reg, TCGv_i64 v)
278 tcg_gen_deposit_i64(regs[reg], regs[reg], v, 32, 32);
281 static inline void store_freg32(int reg, TCGv_i32 v)
283 /* 32 bit register writes keep the lower half */
284 #if HOST_LONG_BITS == 32
285 tcg_gen_mov_i32(TCGV_HIGH(fregs[reg]), v);
287 tcg_gen_deposit_i64(fregs[reg], fregs[reg],
288 MAKE_TCGV_I64(GET_TCGV_I32(v)), 32, 32);
292 static inline void store_freg32_i64(int reg, TCGv_i64 v)
294 tcg_gen_deposit_i64(fregs[reg], fregs[reg], v, 32, 32);
297 static inline void return_low128(TCGv_i64 dest)
299 tcg_gen_ld_i64(dest, cpu_env, offsetof(CPUS390XState, retxl));
302 static inline void update_psw_addr(DisasContext *s)
305 tcg_gen_movi_i64(psw_addr, s->pc);
308 static inline void potential_page_fault(DisasContext *s)
310 #ifndef CONFIG_USER_ONLY
316 static inline uint64_t ld_code2(CPUS390XState *env, uint64_t pc)
318 return (uint64_t)cpu_lduw_code(env, pc);
321 static inline uint64_t ld_code4(CPUS390XState *env, uint64_t pc)
323 return (uint64_t)(uint32_t)cpu_ldl_code(env, pc);
326 static inline uint64_t ld_code6(CPUS390XState *env, uint64_t pc)
328 return (ld_code2(env, pc) << 32) | ld_code4(env, pc + 2);
331 static inline int get_mem_index(DisasContext *s)
333 switch (s->tb->flags & FLAG_MASK_ASC) {
334 case PSW_ASC_PRIMARY >> 32:
336 case PSW_ASC_SECONDARY >> 32:
338 case PSW_ASC_HOME >> 32:
346 static void gen_exception(int excp)
348 TCGv_i32 tmp = tcg_const_i32(excp);
349 gen_helper_exception(cpu_env, tmp);
350 tcg_temp_free_i32(tmp);
353 static void gen_program_exception(DisasContext *s, int code)
357 /* Remember what pgm exeption this was. */
358 tmp = tcg_const_i32(code);
359 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUS390XState, int_pgm_code));
360 tcg_temp_free_i32(tmp);
362 tmp = tcg_const_i32(s->next_pc - s->pc);
363 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUS390XState, int_pgm_ilen));
364 tcg_temp_free_i32(tmp);
366 /* Advance past instruction. */
373 /* Trigger exception. */
374 gen_exception(EXCP_PGM);
377 s->is_jmp = DISAS_EXCP;
380 static inline void gen_illegal_opcode(DisasContext *s)
382 gen_program_exception(s, PGM_SPECIFICATION);
385 static inline void check_privileged(DisasContext *s)
387 if (s->tb->flags & (PSW_MASK_PSTATE >> 32)) {
388 gen_program_exception(s, PGM_PRIVILEGED);
392 static TCGv_i64 get_address(DisasContext *s, int x2, int b2, int d2)
396 /* 31-bitify the immediate part; register contents are dealt with below */
397 if (!(s->tb->flags & FLAG_MASK_64)) {
403 tmp = tcg_const_i64(d2);
404 tcg_gen_add_i64(tmp, tmp, regs[x2]);
409 tcg_gen_add_i64(tmp, tmp, regs[b2]);
413 tmp = tcg_const_i64(d2);
414 tcg_gen_add_i64(tmp, tmp, regs[b2]);
419 tmp = tcg_const_i64(d2);
422 /* 31-bit mode mask if there are values loaded from registers */
423 if (!(s->tb->flags & FLAG_MASK_64) && (x2 || b2)) {
424 tcg_gen_andi_i64(tmp, tmp, 0x7fffffffUL);
430 static inline void gen_op_movi_cc(DisasContext *s, uint32_t val)
432 s->cc_op = CC_OP_CONST0 + val;
435 static void gen_op_update1_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 dst)
437 tcg_gen_discard_i64(cc_src);
438 tcg_gen_mov_i64(cc_dst, dst);
439 tcg_gen_discard_i64(cc_vr);
443 static void gen_op_update1_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 dst)
445 tcg_gen_discard_i64(cc_src);
446 tcg_gen_extu_i32_i64(cc_dst, dst);
447 tcg_gen_discard_i64(cc_vr);
451 static void gen_op_update2_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 src,
454 tcg_gen_mov_i64(cc_src, src);
455 tcg_gen_mov_i64(cc_dst, dst);
456 tcg_gen_discard_i64(cc_vr);
460 static void gen_op_update2_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 src,
463 tcg_gen_extu_i32_i64(cc_src, src);
464 tcg_gen_extu_i32_i64(cc_dst, dst);
465 tcg_gen_discard_i64(cc_vr);
469 static void gen_op_update3_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 src,
470 TCGv_i64 dst, TCGv_i64 vr)
472 tcg_gen_mov_i64(cc_src, src);
473 tcg_gen_mov_i64(cc_dst, dst);
474 tcg_gen_mov_i64(cc_vr, vr);
478 static inline void set_cc_nz_u32(DisasContext *s, TCGv_i32 val)
480 gen_op_update1_cc_i32(s, CC_OP_NZ, val);
483 static inline void set_cc_nz_u64(DisasContext *s, TCGv_i64 val)
485 gen_op_update1_cc_i64(s, CC_OP_NZ, val);
488 static inline void gen_set_cc_nz_f32(DisasContext *s, TCGv_i64 val)
490 gen_op_update1_cc_i64(s, CC_OP_NZ_F32, val);
493 static inline void gen_set_cc_nz_f64(DisasContext *s, TCGv_i64 val)
495 gen_op_update1_cc_i64(s, CC_OP_NZ_F64, val);
498 static inline void gen_set_cc_nz_f128(DisasContext *s, TCGv_i64 vh, TCGv_i64 vl)
500 gen_op_update2_cc_i64(s, CC_OP_NZ_F128, vh, vl);
503 static inline void cmp_32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
506 gen_op_update2_cc_i32(s, cond, v1, v2);
509 static inline void cmp_64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
512 gen_op_update2_cc_i64(s, cond, v1, v2);
515 static inline void cmp_s32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
517 cmp_32(s, v1, v2, CC_OP_LTGT_32);
520 static inline void cmp_u32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
522 cmp_32(s, v1, v2, CC_OP_LTUGTU_32);
525 static inline void cmp_s32c(DisasContext *s, TCGv_i32 v1, int32_t v2)
527 /* XXX optimize for the constant? put it in s? */
528 TCGv_i32 tmp = tcg_const_i32(v2);
529 cmp_32(s, v1, tmp, CC_OP_LTGT_32);
530 tcg_temp_free_i32(tmp);
533 static inline void cmp_u32c(DisasContext *s, TCGv_i32 v1, uint32_t v2)
535 TCGv_i32 tmp = tcg_const_i32(v2);
536 cmp_32(s, v1, tmp, CC_OP_LTUGTU_32);
537 tcg_temp_free_i32(tmp);
540 static inline void cmp_s64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2)
542 cmp_64(s, v1, v2, CC_OP_LTGT_64);
545 static inline void cmp_u64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2)
547 cmp_64(s, v1, v2, CC_OP_LTUGTU_64);
550 static inline void cmp_s64c(DisasContext *s, TCGv_i64 v1, int64_t v2)
552 TCGv_i64 tmp = tcg_const_i64(v2);
554 tcg_temp_free_i64(tmp);
557 static inline void cmp_u64c(DisasContext *s, TCGv_i64 v1, uint64_t v2)
559 TCGv_i64 tmp = tcg_const_i64(v2);
561 tcg_temp_free_i64(tmp);
564 static inline void set_cc_s32(DisasContext *s, TCGv_i32 val)
566 gen_op_update1_cc_i32(s, CC_OP_LTGT0_32, val);
569 static inline void set_cc_s64(DisasContext *s, TCGv_i64 val)
571 gen_op_update1_cc_i64(s, CC_OP_LTGT0_64, val);
574 /* CC value is in env->cc_op */
575 static inline void set_cc_static(DisasContext *s)
577 tcg_gen_discard_i64(cc_src);
578 tcg_gen_discard_i64(cc_dst);
579 tcg_gen_discard_i64(cc_vr);
580 s->cc_op = CC_OP_STATIC;
583 static inline void gen_op_set_cc_op(DisasContext *s)
585 if (s->cc_op != CC_OP_DYNAMIC && s->cc_op != CC_OP_STATIC) {
586 tcg_gen_movi_i32(cc_op, s->cc_op);
590 static inline void gen_update_cc_op(DisasContext *s)
595 /* calculates cc into cc_op */
596 static void gen_op_calc_cc(DisasContext *s)
598 TCGv_i32 local_cc_op = tcg_const_i32(s->cc_op);
599 TCGv_i64 dummy = tcg_const_i64(0);
606 /* s->cc_op is the cc value */
607 tcg_gen_movi_i32(cc_op, s->cc_op - CC_OP_CONST0);
610 /* env->cc_op already is the cc value */
625 gen_helper_calc_cc(cc_op, cpu_env, local_cc_op, dummy, cc_dst, dummy);
630 case CC_OP_LTUGTU_32:
631 case CC_OP_LTUGTU_64:
638 gen_helper_calc_cc(cc_op, cpu_env, local_cc_op, cc_src, cc_dst, dummy);
653 gen_helper_calc_cc(cc_op, cpu_env, local_cc_op, cc_src, cc_dst, cc_vr);
656 /* unknown operation - assume 3 arguments and cc_op in env */
657 gen_helper_calc_cc(cc_op, cpu_env, cc_op, cc_src, cc_dst, cc_vr);
663 tcg_temp_free_i32(local_cc_op);
664 tcg_temp_free_i64(dummy);
666 /* We now have cc in cc_op as constant */
670 static inline void decode_rr(DisasContext *s, uint64_t insn, int *r1, int *r2)
674 *r1 = (insn >> 4) & 0xf;
678 static inline TCGv_i64 decode_rx(DisasContext *s, uint64_t insn, int *r1,
679 int *x2, int *b2, int *d2)
683 *r1 = (insn >> 20) & 0xf;
684 *x2 = (insn >> 16) & 0xf;
685 *b2 = (insn >> 12) & 0xf;
688 return get_address(s, *x2, *b2, *d2);
691 static inline void decode_rs(DisasContext *s, uint64_t insn, int *r1, int *r3,
696 *r1 = (insn >> 20) & 0xf;
698 *r3 = (insn >> 16) & 0xf;
699 *b2 = (insn >> 12) & 0xf;
703 static inline TCGv_i64 decode_si(DisasContext *s, uint64_t insn, int *i2,
708 *i2 = (insn >> 16) & 0xff;
709 *b1 = (insn >> 12) & 0xf;
712 return get_address(s, 0, *b1, *d1);
715 static int use_goto_tb(DisasContext *s, uint64_t dest)
717 /* NOTE: we handle the case where the TB spans two pages here */
718 return (((dest & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK)
719 || (dest & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK))
720 && !s->singlestep_enabled
721 && !(s->tb->cflags & CF_LAST_IO));
724 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong pc)
728 if (use_goto_tb(s, pc)) {
729 tcg_gen_goto_tb(tb_num);
730 tcg_gen_movi_i64(psw_addr, pc);
731 tcg_gen_exit_tb((tcg_target_long)s->tb + tb_num);
733 /* jump to another page: currently not optimized */
734 tcg_gen_movi_i64(psw_addr, pc);
739 static inline void account_noninline_branch(DisasContext *s, int cc_op)
741 #ifdef DEBUG_INLINE_BRANCHES
742 inline_branch_miss[cc_op]++;
746 static inline void account_inline_branch(DisasContext *s, int cc_op)
748 #ifdef DEBUG_INLINE_BRANCHES
749 inline_branch_hit[cc_op]++;
753 /* Table of mask values to comparison codes, given a comparison as input.
754 For a true comparison CC=3 will never be set, but we treat this
755 conservatively for possible use when CC=3 indicates overflow. */
756 static const TCGCond ltgt_cond[16] = {
757 TCG_COND_NEVER, TCG_COND_NEVER, /* | | | x */
758 TCG_COND_GT, TCG_COND_NEVER, /* | | GT | x */
759 TCG_COND_LT, TCG_COND_NEVER, /* | LT | | x */
760 TCG_COND_NE, TCG_COND_NEVER, /* | LT | GT | x */
761 TCG_COND_EQ, TCG_COND_NEVER, /* EQ | | | x */
762 TCG_COND_GE, TCG_COND_NEVER, /* EQ | | GT | x */
763 TCG_COND_LE, TCG_COND_NEVER, /* EQ | LT | | x */
764 TCG_COND_ALWAYS, TCG_COND_ALWAYS, /* EQ | LT | GT | x */
767 /* Table of mask values to comparison codes, given a logic op as input.
768 For such, only CC=0 and CC=1 should be possible. */
769 static const TCGCond nz_cond[16] = {
771 TCG_COND_NEVER, TCG_COND_NEVER, TCG_COND_NEVER, TCG_COND_NEVER,
773 TCG_COND_NE, TCG_COND_NE, TCG_COND_NE, TCG_COND_NE,
775 TCG_COND_EQ, TCG_COND_EQ, TCG_COND_EQ, TCG_COND_EQ,
776 /* EQ | NE | x | x */
777 TCG_COND_ALWAYS, TCG_COND_ALWAYS, TCG_COND_ALWAYS, TCG_COND_ALWAYS,
780 /* Interpret MASK in terms of S->CC_OP, and fill in C with all the
781 details required to generate a TCG comparison. */
782 static void disas_jcc(DisasContext *s, DisasCompare *c, uint32_t mask)
785 enum cc_op old_cc_op = s->cc_op;
787 if (mask == 15 || mask == 0) {
788 c->cond = (mask ? TCG_COND_ALWAYS : TCG_COND_NEVER);
791 c->g1 = c->g2 = true;
796 /* Find the TCG condition for the mask + cc op. */
802 cond = ltgt_cond[mask];
803 if (cond == TCG_COND_NEVER) {
806 account_inline_branch(s, old_cc_op);
809 case CC_OP_LTUGTU_32:
810 case CC_OP_LTUGTU_64:
811 cond = tcg_unsigned_cond(ltgt_cond[mask]);
812 if (cond == TCG_COND_NEVER) {
815 account_inline_branch(s, old_cc_op);
819 cond = nz_cond[mask];
820 if (cond == TCG_COND_NEVER) {
823 account_inline_branch(s, old_cc_op);
838 account_inline_branch(s, old_cc_op);
853 account_inline_branch(s, old_cc_op);
857 switch (mask & 0xa) {
858 case 8: /* src == 0 -> no one bit found */
861 case 2: /* src != 0 -> one bit found */
867 account_inline_branch(s, old_cc_op);
872 /* Calculate cc value. */
877 /* Jump based on CC. We'll load up the real cond below;
878 the assignment here merely avoids a compiler warning. */
879 account_noninline_branch(s, old_cc_op);
880 old_cc_op = CC_OP_STATIC;
881 cond = TCG_COND_NEVER;
885 /* Load up the arguments of the comparison. */
887 c->g1 = c->g2 = false;
891 c->u.s32.a = tcg_temp_new_i32();
892 tcg_gen_trunc_i64_i32(c->u.s32.a, cc_dst);
893 c->u.s32.b = tcg_const_i32(0);
896 case CC_OP_LTUGTU_32:
898 c->u.s32.a = tcg_temp_new_i32();
899 tcg_gen_trunc_i64_i32(c->u.s32.a, cc_src);
900 c->u.s32.b = tcg_temp_new_i32();
901 tcg_gen_trunc_i64_i32(c->u.s32.b, cc_dst);
908 c->u.s64.b = tcg_const_i64(0);
912 case CC_OP_LTUGTU_64:
915 c->g1 = c->g2 = true;
921 c->u.s64.a = tcg_temp_new_i64();
922 c->u.s64.b = tcg_const_i64(0);
923 tcg_gen_and_i64(c->u.s64.a, cc_src, cc_dst);
931 case 0x8 | 0x4 | 0x2: /* cc != 3 */
933 c->u.s32.b = tcg_const_i32(3);
935 case 0x8 | 0x4 | 0x1: /* cc != 2 */
937 c->u.s32.b = tcg_const_i32(2);
939 case 0x8 | 0x2 | 0x1: /* cc != 1 */
941 c->u.s32.b = tcg_const_i32(1);
943 case 0x8 | 0x2: /* cc == 0 ||Â cc == 2 => (cc & 1) == 0 */
946 c->u.s32.a = tcg_temp_new_i32();
947 c->u.s32.b = tcg_const_i32(0);
948 tcg_gen_andi_i32(c->u.s32.a, cc_op, 1);
950 case 0x8 | 0x4: /* cc < 2 */
952 c->u.s32.b = tcg_const_i32(2);
954 case 0x8: /* cc == 0 */
956 c->u.s32.b = tcg_const_i32(0);
958 case 0x4 | 0x2 | 0x1: /* cc != 0 */
960 c->u.s32.b = tcg_const_i32(0);
962 case 0x4 | 0x1: /* cc == 1 ||Â cc == 3 => (cc & 1) != 0 */
965 c->u.s32.a = tcg_temp_new_i32();
966 c->u.s32.b = tcg_const_i32(0);
967 tcg_gen_andi_i32(c->u.s32.a, cc_op, 1);
969 case 0x4: /* cc == 1 */
971 c->u.s32.b = tcg_const_i32(1);
973 case 0x2 | 0x1: /* cc > 1 */
975 c->u.s32.b = tcg_const_i32(1);
977 case 0x2: /* cc == 2 */
979 c->u.s32.b = tcg_const_i32(2);
981 case 0x1: /* cc == 3 */
983 c->u.s32.b = tcg_const_i32(3);
986 /* CC is masked by something else: (8 >> cc) & mask. */
989 c->u.s32.a = tcg_const_i32(8);
990 c->u.s32.b = tcg_const_i32(0);
991 tcg_gen_shr_i32(c->u.s32.a, c->u.s32.a, cc_op);
992 tcg_gen_andi_i32(c->u.s32.a, c->u.s32.a, mask);
1003 static void free_compare(DisasCompare *c)
1007 tcg_temp_free_i64(c->u.s64.a);
1009 tcg_temp_free_i32(c->u.s32.a);
1014 tcg_temp_free_i64(c->u.s64.b);
1016 tcg_temp_free_i32(c->u.s32.b);
1021 static void disas_b2(CPUS390XState *env, DisasContext *s, int op,
1024 #ifndef CONFIG_USER_ONLY
1028 LOG_DISAS("illegal b2 operation 0x%x\n", op);
1029 gen_illegal_opcode(s);
1030 #ifndef CONFIG_USER_ONLY
1036 static void disas_s390_insn(CPUS390XState *env, DisasContext *s)
1042 opc = cpu_ldub_code(env, s->pc);
1043 LOG_DISAS("opc 0x%x\n", opc);
1047 insn = ld_code4(env, s->pc);
1048 op = (insn >> 16) & 0xff;
1049 disas_b2(env, s, op, insn);
1052 qemu_log_mask(LOG_UNIMP, "unimplemented opcode 0x%x\n", opc);
1053 gen_illegal_opcode(s);
1058 /* ====================================================================== */
1059 /* Define the insn format enumeration. */
1060 #define F0(N) FMT_##N,
1061 #define F1(N, X1) F0(N)
1062 #define F2(N, X1, X2) F0(N)
1063 #define F3(N, X1, X2, X3) F0(N)
1064 #define F4(N, X1, X2, X3, X4) F0(N)
1065 #define F5(N, X1, X2, X3, X4, X5) F0(N)
1068 #include "insn-format.def"
1078 /* Define a structure to hold the decoded fields. We'll store each inside
1079 an array indexed by an enum. In order to conserve memory, we'll arrange
1080 for fields that do not exist at the same time to overlap, thus the "C"
1081 for compact. For checking purposes there is an "O" for original index
1082 as well that will be applied to availability bitmaps. */
1084 enum DisasFieldIndexO {
1107 enum DisasFieldIndexC {
1138 struct DisasFields {
1141 unsigned presentC:16;
1142 unsigned int presentO;
1146 /* This is the way fields are to be accessed out of DisasFields. */
1147 #define have_field(S, F) have_field1((S), FLD_O_##F)
1148 #define get_field(S, F) get_field1((S), FLD_O_##F, FLD_C_##F)
1150 static bool have_field1(const DisasFields *f, enum DisasFieldIndexO c)
1152 return (f->presentO >> c) & 1;
1155 static int get_field1(const DisasFields *f, enum DisasFieldIndexO o,
1156 enum DisasFieldIndexC c)
1158 assert(have_field1(f, o));
1162 /* Describe the layout of each field in each format. */
1163 typedef struct DisasField {
1165 unsigned int size:8;
1166 unsigned int type:2;
1167 unsigned int indexC:6;
1168 enum DisasFieldIndexO indexO:8;
1171 typedef struct DisasFormatInfo {
1172 DisasField op[NUM_C_FIELD];
1175 #define R(N, B) { B, 4, 0, FLD_C_r##N, FLD_O_r##N }
1176 #define M(N, B) { B, 4, 0, FLD_C_m##N, FLD_O_m##N }
1177 #define BD(N, BB, BD) { BB, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
1178 { BD, 12, 0, FLD_C_d##N, FLD_O_d##N }
1179 #define BXD(N) { 16, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
1180 { 12, 4, 0, FLD_C_x##N, FLD_O_x##N }, \
1181 { 20, 12, 0, FLD_C_d##N, FLD_O_d##N }
1182 #define BDL(N) { 16, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
1183 { 20, 20, 2, FLD_C_d##N, FLD_O_d##N }
1184 #define BXDL(N) { 16, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
1185 { 12, 4, 0, FLD_C_x##N, FLD_O_x##N }, \
1186 { 20, 20, 2, FLD_C_d##N, FLD_O_d##N }
1187 #define I(N, B, S) { B, S, 1, FLD_C_i##N, FLD_O_i##N }
1188 #define L(N, B, S) { B, S, 0, FLD_C_l##N, FLD_O_l##N }
1190 #define F0(N) { { } },
1191 #define F1(N, X1) { { X1 } },
1192 #define F2(N, X1, X2) { { X1, X2 } },
1193 #define F3(N, X1, X2, X3) { { X1, X2, X3 } },
1194 #define F4(N, X1, X2, X3, X4) { { X1, X2, X3, X4 } },
1195 #define F5(N, X1, X2, X3, X4, X5) { { X1, X2, X3, X4, X5 } },
1197 static const DisasFormatInfo format_info[] = {
1198 #include "insn-format.def"
1216 /* Generally, we'll extract operands into this structures, operate upon
1217 them, and store them back. See the "in1", "in2", "prep", "wout" sets
1218 of routines below for more details. */
1220 bool g_out, g_out2, g_in1, g_in2;
1221 TCGv_i64 out, out2, in1, in2;
1225 /* Return values from translate_one, indicating the state of the TB. */
1227 /* Continue the TB. */
1229 /* We have emitted one or more goto_tb. No fixup required. */
1231 /* We are not using a goto_tb (for whatever reason), but have updated
1232 the PC (for whatever reason), so there's no need to do it again on
1235 /* We are exiting the TB, but have neither emitted a goto_tb, nor
1236 updated the PC for the next instruction to be executed. */
1238 /* We are ending the TB with a noreturn function call, e.g. longjmp.
1239 No following code will be executed. */
1243 typedef enum DisasFacility {
1244 FAC_Z, /* zarch (default) */
1245 FAC_CASS, /* compare and swap and store */
1246 FAC_CASS2, /* compare and swap and store 2*/
1247 FAC_DFP, /* decimal floating point */
1248 FAC_DFPR, /* decimal floating point rounding */
1249 FAC_DO, /* distinct operands */
1250 FAC_EE, /* execute extensions */
1251 FAC_EI, /* extended immediate */
1252 FAC_FPE, /* floating point extension */
1253 FAC_FPSSH, /* floating point support sign handling */
1254 FAC_FPRGR, /* FPR-GR transfer */
1255 FAC_GIE, /* general instructions extension */
1256 FAC_HFP_MA, /* HFP multiply-and-add/subtract */
1257 FAC_HW, /* high-word */
1258 FAC_IEEEE_SIM, /* IEEE exception sumilation */
1259 FAC_LOC, /* load/store on condition */
1260 FAC_LD, /* long displacement */
1261 FAC_PC, /* population count */
1262 FAC_SCF, /* store clock fast */
1263 FAC_SFLE, /* store facility list extended */
1269 DisasFacility fac:6;
1273 void (*help_in1)(DisasContext *, DisasFields *, DisasOps *);
1274 void (*help_in2)(DisasContext *, DisasFields *, DisasOps *);
1275 void (*help_prep)(DisasContext *, DisasFields *, DisasOps *);
1276 void (*help_wout)(DisasContext *, DisasFields *, DisasOps *);
1277 void (*help_cout)(DisasContext *, DisasOps *);
1278 ExitStatus (*help_op)(DisasContext *, DisasOps *);
1283 /* ====================================================================== */
1284 /* Miscelaneous helpers, used by several operations. */
1286 static void help_l2_shift(DisasContext *s, DisasFields *f,
1287 DisasOps *o, int mask)
1289 int b2 = get_field(f, b2);
1290 int d2 = get_field(f, d2);
1293 o->in2 = tcg_const_i64(d2 & mask);
1295 o->in2 = get_address(s, 0, b2, d2);
1296 tcg_gen_andi_i64(o->in2, o->in2, mask);
1300 static ExitStatus help_goto_direct(DisasContext *s, uint64_t dest)
1302 if (dest == s->next_pc) {
1305 if (use_goto_tb(s, dest)) {
1306 gen_update_cc_op(s);
1308 tcg_gen_movi_i64(psw_addr, dest);
1309 tcg_gen_exit_tb((tcg_target_long)s->tb);
1310 return EXIT_GOTO_TB;
1312 tcg_gen_movi_i64(psw_addr, dest);
1313 return EXIT_PC_UPDATED;
1317 static ExitStatus help_branch(DisasContext *s, DisasCompare *c,
1318 bool is_imm, int imm, TCGv_i64 cdest)
1321 uint64_t dest = s->pc + 2 * imm;
1324 /* Take care of the special cases first. */
1325 if (c->cond == TCG_COND_NEVER) {
1330 if (dest == s->next_pc) {
1331 /* Branch to next. */
1335 if (c->cond == TCG_COND_ALWAYS) {
1336 ret = help_goto_direct(s, dest);
1340 if (TCGV_IS_UNUSED_I64(cdest)) {
1341 /* E.g. bcr %r0 -> no branch. */
1345 if (c->cond == TCG_COND_ALWAYS) {
1346 tcg_gen_mov_i64(psw_addr, cdest);
1347 ret = EXIT_PC_UPDATED;
1352 if (use_goto_tb(s, s->next_pc)) {
1353 if (is_imm && use_goto_tb(s, dest)) {
1354 /* Both exits can use goto_tb. */
1355 gen_update_cc_op(s);
1357 lab = gen_new_label();
1359 tcg_gen_brcond_i64(c->cond, c->u.s64.a, c->u.s64.b, lab);
1361 tcg_gen_brcond_i32(c->cond, c->u.s32.a, c->u.s32.b, lab);
1364 /* Branch not taken. */
1366 tcg_gen_movi_i64(psw_addr, s->next_pc);
1367 tcg_gen_exit_tb((tcg_target_long)s->tb + 0);
1372 tcg_gen_movi_i64(psw_addr, dest);
1373 tcg_gen_exit_tb((tcg_target_long)s->tb + 1);
1377 /* Fallthru can use goto_tb, but taken branch cannot. */
1378 /* Store taken branch destination before the brcond. This
1379 avoids having to allocate a new local temp to hold it.
1380 We'll overwrite this in the not taken case anyway. */
1382 tcg_gen_mov_i64(psw_addr, cdest);
1385 lab = gen_new_label();
1387 tcg_gen_brcond_i64(c->cond, c->u.s64.a, c->u.s64.b, lab);
1389 tcg_gen_brcond_i32(c->cond, c->u.s32.a, c->u.s32.b, lab);
1392 /* Branch not taken. */
1393 gen_update_cc_op(s);
1395 tcg_gen_movi_i64(psw_addr, s->next_pc);
1396 tcg_gen_exit_tb((tcg_target_long)s->tb + 0);
1400 tcg_gen_movi_i64(psw_addr, dest);
1402 ret = EXIT_PC_UPDATED;
1405 /* Fallthru cannot use goto_tb. This by itself is vanishingly rare.
1406 Most commonly we're single-stepping or some other condition that
1407 disables all use of goto_tb. Just update the PC and exit. */
1409 TCGv_i64 next = tcg_const_i64(s->next_pc);
1411 cdest = tcg_const_i64(dest);
1415 tcg_gen_movcond_i64(c->cond, psw_addr, c->u.s64.a, c->u.s64.b,
1418 TCGv_i32 t0 = tcg_temp_new_i32();
1419 TCGv_i64 t1 = tcg_temp_new_i64();
1420 TCGv_i64 z = tcg_const_i64(0);
1421 tcg_gen_setcond_i32(c->cond, t0, c->u.s32.a, c->u.s32.b);
1422 tcg_gen_extu_i32_i64(t1, t0);
1423 tcg_temp_free_i32(t0);
1424 tcg_gen_movcond_i64(TCG_COND_NE, psw_addr, t1, z, cdest, next);
1425 tcg_temp_free_i64(t1);
1426 tcg_temp_free_i64(z);
1430 tcg_temp_free_i64(cdest);
1432 tcg_temp_free_i64(next);
1434 ret = EXIT_PC_UPDATED;
1442 /* ====================================================================== */
1443 /* The operations. These perform the bulk of the work for any insn,
1444 usually after the operands have been loaded and output initialized. */
1446 static ExitStatus op_abs(DisasContext *s, DisasOps *o)
1448 gen_helper_abs_i64(o->out, o->in2);
1452 static ExitStatus op_absf32(DisasContext *s, DisasOps *o)
1454 tcg_gen_andi_i64(o->out, o->in2, 0x7fffffffull);
1458 static ExitStatus op_absf64(DisasContext *s, DisasOps *o)
1460 tcg_gen_andi_i64(o->out, o->in2, 0x7fffffffffffffffull);
1464 static ExitStatus op_absf128(DisasContext *s, DisasOps *o)
1466 tcg_gen_andi_i64(o->out, o->in1, 0x7fffffffffffffffull);
1467 tcg_gen_mov_i64(o->out2, o->in2);
1471 static ExitStatus op_add(DisasContext *s, DisasOps *o)
1473 tcg_gen_add_i64(o->out, o->in1, o->in2);
1477 static ExitStatus op_addc(DisasContext *s, DisasOps *o)
1481 tcg_gen_add_i64(o->out, o->in1, o->in2);
1483 /* XXX possible optimization point */
1485 cc = tcg_temp_new_i64();
1486 tcg_gen_extu_i32_i64(cc, cc_op);
1487 tcg_gen_shri_i64(cc, cc, 1);
1489 tcg_gen_add_i64(o->out, o->out, cc);
1490 tcg_temp_free_i64(cc);
1494 static ExitStatus op_aeb(DisasContext *s, DisasOps *o)
1496 gen_helper_aeb(o->out, cpu_env, o->in1, o->in2);
1500 static ExitStatus op_adb(DisasContext *s, DisasOps *o)
1502 gen_helper_adb(o->out, cpu_env, o->in1, o->in2);
1506 static ExitStatus op_axb(DisasContext *s, DisasOps *o)
1508 gen_helper_axb(o->out, cpu_env, o->out, o->out2, o->in1, o->in2);
1509 return_low128(o->out2);
1513 static ExitStatus op_and(DisasContext *s, DisasOps *o)
1515 tcg_gen_and_i64(o->out, o->in1, o->in2);
1519 static ExitStatus op_andi(DisasContext *s, DisasOps *o)
1521 int shift = s->insn->data & 0xff;
1522 int size = s->insn->data >> 8;
1523 uint64_t mask = ((1ull << size) - 1) << shift;
1526 tcg_gen_shli_i64(o->in2, o->in2, shift);
1527 tcg_gen_ori_i64(o->in2, o->in2, ~mask);
1528 tcg_gen_and_i64(o->out, o->in1, o->in2);
1530 /* Produce the CC from only the bits manipulated. */
1531 tcg_gen_andi_i64(cc_dst, o->out, mask);
1532 set_cc_nz_u64(s, cc_dst);
1536 static ExitStatus op_bas(DisasContext *s, DisasOps *o)
1538 tcg_gen_movi_i64(o->out, pc_to_link_info(s, s->next_pc));
1539 if (!TCGV_IS_UNUSED_I64(o->in2)) {
1540 tcg_gen_mov_i64(psw_addr, o->in2);
1541 return EXIT_PC_UPDATED;
1547 static ExitStatus op_basi(DisasContext *s, DisasOps *o)
1549 tcg_gen_movi_i64(o->out, pc_to_link_info(s, s->next_pc));
1550 return help_goto_direct(s, s->pc + 2 * get_field(s->fields, i2));
1553 static ExitStatus op_bc(DisasContext *s, DisasOps *o)
1555 int m1 = get_field(s->fields, m1);
1556 bool is_imm = have_field(s->fields, i2);
1557 int imm = is_imm ? get_field(s->fields, i2) : 0;
1560 disas_jcc(s, &c, m1);
1561 return help_branch(s, &c, is_imm, imm, o->in2);
1564 static ExitStatus op_bct32(DisasContext *s, DisasOps *o)
1566 int r1 = get_field(s->fields, r1);
1567 bool is_imm = have_field(s->fields, i2);
1568 int imm = is_imm ? get_field(s->fields, i2) : 0;
1572 c.cond = TCG_COND_NE;
1577 t = tcg_temp_new_i64();
1578 tcg_gen_subi_i64(t, regs[r1], 1);
1579 store_reg32_i64(r1, t);
1580 c.u.s32.a = tcg_temp_new_i32();
1581 c.u.s32.b = tcg_const_i32(0);
1582 tcg_gen_trunc_i64_i32(c.u.s32.a, t);
1583 tcg_temp_free_i64(t);
1585 return help_branch(s, &c, is_imm, imm, o->in2);
1588 static ExitStatus op_bct64(DisasContext *s, DisasOps *o)
1590 int r1 = get_field(s->fields, r1);
1591 bool is_imm = have_field(s->fields, i2);
1592 int imm = is_imm ? get_field(s->fields, i2) : 0;
1595 c.cond = TCG_COND_NE;
1600 tcg_gen_subi_i64(regs[r1], regs[r1], 1);
1601 c.u.s64.a = regs[r1];
1602 c.u.s64.b = tcg_const_i64(0);
1604 return help_branch(s, &c, is_imm, imm, o->in2);
1607 static ExitStatus op_ceb(DisasContext *s, DisasOps *o)
1609 gen_helper_ceb(cc_op, cpu_env, o->in1, o->in2);
1614 static ExitStatus op_cdb(DisasContext *s, DisasOps *o)
1616 gen_helper_cdb(cc_op, cpu_env, o->in1, o->in2);
1621 static ExitStatus op_cxb(DisasContext *s, DisasOps *o)
1623 gen_helper_cxb(cc_op, cpu_env, o->out, o->out2, o->in1, o->in2);
1628 static ExitStatus op_cfeb(DisasContext *s, DisasOps *o)
1630 TCGv_i32 m3 = tcg_const_i32(get_field(s->fields, m3));
1631 gen_helper_cfeb(o->out, cpu_env, o->in2, m3);
1632 tcg_temp_free_i32(m3);
1633 gen_set_cc_nz_f32(s, o->in2);
1637 static ExitStatus op_cfdb(DisasContext *s, DisasOps *o)
1639 TCGv_i32 m3 = tcg_const_i32(get_field(s->fields, m3));
1640 gen_helper_cfdb(o->out, cpu_env, o->in2, m3);
1641 tcg_temp_free_i32(m3);
1642 gen_set_cc_nz_f64(s, o->in2);
1646 static ExitStatus op_cfxb(DisasContext *s, DisasOps *o)
1648 TCGv_i32 m3 = tcg_const_i32(get_field(s->fields, m3));
1649 gen_helper_cfxb(o->out, cpu_env, o->in1, o->in2, m3);
1650 tcg_temp_free_i32(m3);
1651 gen_set_cc_nz_f128(s, o->in1, o->in2);
1655 static ExitStatus op_cgeb(DisasContext *s, DisasOps *o)
1657 TCGv_i32 m3 = tcg_const_i32(get_field(s->fields, m3));
1658 gen_helper_cgeb(o->out, cpu_env, o->in2, m3);
1659 tcg_temp_free_i32(m3);
1660 gen_set_cc_nz_f32(s, o->in2);
1664 static ExitStatus op_cgdb(DisasContext *s, DisasOps *o)
1666 TCGv_i32 m3 = tcg_const_i32(get_field(s->fields, m3));
1667 gen_helper_cgdb(o->out, cpu_env, o->in2, m3);
1668 tcg_temp_free_i32(m3);
1669 gen_set_cc_nz_f64(s, o->in2);
1673 static ExitStatus op_cgxb(DisasContext *s, DisasOps *o)
1675 TCGv_i32 m3 = tcg_const_i32(get_field(s->fields, m3));
1676 gen_helper_cgxb(o->out, cpu_env, o->in1, o->in2, m3);
1677 tcg_temp_free_i32(m3);
1678 gen_set_cc_nz_f128(s, o->in1, o->in2);
1682 static ExitStatus op_cegb(DisasContext *s, DisasOps *o)
1684 TCGv_i32 m3 = tcg_const_i32(get_field(s->fields, m3));
1685 gen_helper_cegb(o->out, cpu_env, o->in2, m3);
1686 tcg_temp_free_i32(m3);
1690 static ExitStatus op_cdgb(DisasContext *s, DisasOps *o)
1692 TCGv_i32 m3 = tcg_const_i32(get_field(s->fields, m3));
1693 gen_helper_cdgb(o->out, cpu_env, o->in2, m3);
1694 tcg_temp_free_i32(m3);
1698 static ExitStatus op_cxgb(DisasContext *s, DisasOps *o)
1700 TCGv_i32 m3 = tcg_const_i32(get_field(s->fields, m3));
1701 gen_helper_cxgb(o->out, cpu_env, o->in2, m3);
1702 tcg_temp_free_i32(m3);
1703 return_low128(o->out2);
1707 static ExitStatus op_cksm(DisasContext *s, DisasOps *o)
1709 int r2 = get_field(s->fields, r2);
1710 TCGv_i64 len = tcg_temp_new_i64();
1712 potential_page_fault(s);
1713 gen_helper_cksm(len, cpu_env, o->in1, o->in2, regs[r2 + 1]);
1715 return_low128(o->out);
1717 tcg_gen_add_i64(regs[r2], regs[r2], len);
1718 tcg_gen_sub_i64(regs[r2 + 1], regs[r2 + 1], len);
1719 tcg_temp_free_i64(len);
1724 static ExitStatus op_clc(DisasContext *s, DisasOps *o)
1726 int l = get_field(s->fields, l1);
1731 tcg_gen_qemu_ld8u(cc_src, o->addr1, get_mem_index(s));
1732 tcg_gen_qemu_ld8u(cc_dst, o->in2, get_mem_index(s));
1735 tcg_gen_qemu_ld16u(cc_src, o->addr1, get_mem_index(s));
1736 tcg_gen_qemu_ld16u(cc_dst, o->in2, get_mem_index(s));
1739 tcg_gen_qemu_ld32u(cc_src, o->addr1, get_mem_index(s));
1740 tcg_gen_qemu_ld32u(cc_dst, o->in2, get_mem_index(s));
1743 tcg_gen_qemu_ld64(cc_src, o->addr1, get_mem_index(s));
1744 tcg_gen_qemu_ld64(cc_dst, o->in2, get_mem_index(s));
1747 potential_page_fault(s);
1748 vl = tcg_const_i32(l);
1749 gen_helper_clc(cc_op, cpu_env, vl, o->addr1, o->in2);
1750 tcg_temp_free_i32(vl);
1754 gen_op_update2_cc_i64(s, CC_OP_LTUGTU_64, cc_src, cc_dst);
1758 static ExitStatus op_clcle(DisasContext *s, DisasOps *o)
1760 TCGv_i32 r1 = tcg_const_i32(get_field(s->fields, r1));
1761 TCGv_i32 r3 = tcg_const_i32(get_field(s->fields, r3));
1762 potential_page_fault(s);
1763 gen_helper_clcle(cc_op, cpu_env, r1, o->in2, r3);
1764 tcg_temp_free_i32(r1);
1765 tcg_temp_free_i32(r3);
1770 static ExitStatus op_clm(DisasContext *s, DisasOps *o)
1772 TCGv_i32 m3 = tcg_const_i32(get_field(s->fields, m3));
1773 TCGv_i32 t1 = tcg_temp_new_i32();
1774 tcg_gen_trunc_i64_i32(t1, o->in1);
1775 potential_page_fault(s);
1776 gen_helper_clm(cc_op, cpu_env, t1, m3, o->in2);
1778 tcg_temp_free_i32(t1);
1779 tcg_temp_free_i32(m3);
1783 static ExitStatus op_clst(DisasContext *s, DisasOps *o)
1785 potential_page_fault(s);
1786 gen_helper_clst(o->in1, cpu_env, regs[0], o->in1, o->in2);
1788 return_low128(o->in2);
1792 static ExitStatus op_cs(DisasContext *s, DisasOps *o)
1794 int r3 = get_field(s->fields, r3);
1795 potential_page_fault(s);
1796 gen_helper_cs(o->out, cpu_env, o->in1, o->in2, regs[r3]);
1801 static ExitStatus op_csg(DisasContext *s, DisasOps *o)
1803 int r3 = get_field(s->fields, r3);
1804 potential_page_fault(s);
1805 gen_helper_csg(o->out, cpu_env, o->in1, o->in2, regs[r3]);
1810 #ifndef CONFIG_USER_ONLY
1811 static ExitStatus op_csp(DisasContext *s, DisasOps *o)
1813 TCGv_i32 r1 = tcg_const_i32(get_field(s->fields, r1));
1814 check_privileged(s);
1815 gen_helper_csp(cc_op, cpu_env, r1, o->in2);
1816 tcg_temp_free_i32(r1);
1822 static ExitStatus op_cds(DisasContext *s, DisasOps *o)
1824 int r3 = get_field(s->fields, r3);
1825 TCGv_i64 in3 = tcg_temp_new_i64();
1826 tcg_gen_deposit_i64(in3, regs[r3 + 1], regs[r3], 32, 32);
1827 potential_page_fault(s);
1828 gen_helper_csg(o->out, cpu_env, o->in1, o->in2, in3);
1829 tcg_temp_free_i64(in3);
1834 static ExitStatus op_cdsg(DisasContext *s, DisasOps *o)
1836 TCGv_i32 r1 = tcg_const_i32(get_field(s->fields, r1));
1837 TCGv_i32 r3 = tcg_const_i32(get_field(s->fields, r3));
1838 potential_page_fault(s);
1839 /* XXX rewrite in tcg */
1840 gen_helper_cdsg(cc_op, cpu_env, r1, o->in2, r3);
1845 static ExitStatus op_cvd(DisasContext *s, DisasOps *o)
1847 TCGv_i64 t1 = tcg_temp_new_i64();
1848 TCGv_i32 t2 = tcg_temp_new_i32();
1849 tcg_gen_trunc_i64_i32(t2, o->in1);
1850 gen_helper_cvd(t1, t2);
1851 tcg_temp_free_i32(t2);
1852 tcg_gen_qemu_st64(t1, o->in2, get_mem_index(s));
1853 tcg_temp_free_i64(t1);
1857 #ifndef CONFIG_USER_ONLY
1858 static ExitStatus op_diag(DisasContext *s, DisasOps *o)
1862 check_privileged(s);
1863 potential_page_fault(s);
1865 /* We pretend the format is RX_a so that D2 is the field we want. */
1866 tmp = tcg_const_i32(get_field(s->fields, d2) & 0xfff);
1867 gen_helper_diag(regs[2], cpu_env, tmp, regs[2], regs[1]);
1868 tcg_temp_free_i32(tmp);
1873 static ExitStatus op_divs32(DisasContext *s, DisasOps *o)
1875 gen_helper_divs32(o->out2, cpu_env, o->in1, o->in2);
1876 return_low128(o->out);
1880 static ExitStatus op_divu32(DisasContext *s, DisasOps *o)
1882 gen_helper_divu32(o->out2, cpu_env, o->in1, o->in2);
1883 return_low128(o->out);
1887 static ExitStatus op_divs64(DisasContext *s, DisasOps *o)
1889 gen_helper_divs64(o->out2, cpu_env, o->in1, o->in2);
1890 return_low128(o->out);
1894 static ExitStatus op_divu64(DisasContext *s, DisasOps *o)
1896 gen_helper_divu64(o->out2, cpu_env, o->out, o->out2, o->in2);
1897 return_low128(o->out);
1901 static ExitStatus op_deb(DisasContext *s, DisasOps *o)
1903 gen_helper_deb(o->out, cpu_env, o->in1, o->in2);
1907 static ExitStatus op_ddb(DisasContext *s, DisasOps *o)
1909 gen_helper_ddb(o->out, cpu_env, o->in1, o->in2);
1913 static ExitStatus op_dxb(DisasContext *s, DisasOps *o)
1915 gen_helper_dxb(o->out, cpu_env, o->out, o->out2, o->in1, o->in2);
1916 return_low128(o->out2);
1920 static ExitStatus op_ear(DisasContext *s, DisasOps *o)
1922 int r2 = get_field(s->fields, r2);
1923 tcg_gen_ld32u_i64(o->out, cpu_env, offsetof(CPUS390XState, aregs[r2]));
1927 static ExitStatus op_efpc(DisasContext *s, DisasOps *o)
1929 tcg_gen_ld32u_i64(o->out, cpu_env, offsetof(CPUS390XState, fpc));
1933 static ExitStatus op_ex(DisasContext *s, DisasOps *o)
1935 /* ??? Perhaps a better way to implement EXECUTE is to set a bit in
1936 tb->flags, (ab)use the tb->cs_base field as the address of
1937 the template in memory, and grab 8 bits of tb->flags/cflags for
1938 the contents of the register. We would then recognize all this
1939 in gen_intermediate_code_internal, generating code for exactly
1940 one instruction. This new TB then gets executed normally.
1942 On the other hand, this seems to be mostly used for modifying
1943 MVC inside of memcpy, which needs a helper call anyway. So
1944 perhaps this doesn't bear thinking about any further. */
1951 tmp = tcg_const_i64(s->next_pc);
1952 gen_helper_ex(cc_op, cpu_env, cc_op, o->in1, o->in2, tmp);
1953 tcg_temp_free_i64(tmp);
1959 static ExitStatus op_flogr(DisasContext *s, DisasOps *o)
1961 /* We'll use the original input for cc computation, since we get to
1962 compare that against 0, which ought to be better than comparing
1963 the real output against 64. It also lets cc_dst be a convenient
1964 temporary during our computation. */
1965 gen_op_update1_cc_i64(s, CC_OP_FLOGR, o->in2);
1967 /* R1 = IN ? CLZ(IN) : 64. */
1968 gen_helper_clz(o->out, o->in2);
1970 /* R1+1 = IN & ~(found bit). Note that we may attempt to shift this
1971 value by 64, which is undefined. But since the shift is 64 iff the
1972 input is zero, we still get the correct result after and'ing. */
1973 tcg_gen_movi_i64(o->out2, 0x8000000000000000ull);
1974 tcg_gen_shr_i64(o->out2, o->out2, o->out);
1975 tcg_gen_andc_i64(o->out2, cc_dst, o->out2);
1979 static ExitStatus op_icm(DisasContext *s, DisasOps *o)
1981 int m3 = get_field(s->fields, m3);
1982 int pos, len, base = s->insn->data;
1983 TCGv_i64 tmp = tcg_temp_new_i64();
1988 /* Effectively a 32-bit load. */
1989 tcg_gen_qemu_ld32u(tmp, o->in2, get_mem_index(s));
1996 /* Effectively a 16-bit load. */
1997 tcg_gen_qemu_ld16u(tmp, o->in2, get_mem_index(s));
2005 /* Effectively an 8-bit load. */
2006 tcg_gen_qemu_ld8u(tmp, o->in2, get_mem_index(s));
2011 pos = base + ctz32(m3) * 8;
2012 tcg_gen_deposit_i64(o->out, o->out, tmp, pos, len);
2013 ccm = ((1ull << len) - 1) << pos;
2017 /* This is going to be a sequence of loads and inserts. */
2018 pos = base + 32 - 8;
2022 tcg_gen_qemu_ld8u(tmp, o->in2, get_mem_index(s));
2023 tcg_gen_addi_i64(o->in2, o->in2, 1);
2024 tcg_gen_deposit_i64(o->out, o->out, tmp, pos, 8);
2027 m3 = (m3 << 1) & 0xf;
2033 tcg_gen_movi_i64(tmp, ccm);
2034 gen_op_update2_cc_i64(s, CC_OP_ICM, tmp, o->out);
2035 tcg_temp_free_i64(tmp);
2039 static ExitStatus op_insi(DisasContext *s, DisasOps *o)
2041 int shift = s->insn->data & 0xff;
2042 int size = s->insn->data >> 8;
2043 tcg_gen_deposit_i64(o->out, o->in1, o->in2, shift, size);
2047 static ExitStatus op_ipm(DisasContext *s, DisasOps *o)
2052 tcg_gen_andi_i64(o->out, o->out, ~0xff000000ull);
2054 t1 = tcg_temp_new_i64();
2055 tcg_gen_shli_i64(t1, psw_mask, 20);
2056 tcg_gen_shri_i64(t1, t1, 36);
2057 tcg_gen_or_i64(o->out, o->out, t1);
2059 tcg_gen_extu_i32_i64(t1, cc_op);
2060 tcg_gen_shli_i64(t1, t1, 28);
2061 tcg_gen_or_i64(o->out, o->out, t1);
2062 tcg_temp_free_i64(t1);
2066 #ifndef CONFIG_USER_ONLY
2067 static ExitStatus op_ipte(DisasContext *s, DisasOps *o)
2069 check_privileged(s);
2070 gen_helper_ipte(cpu_env, o->in1, o->in2);
2074 static ExitStatus op_iske(DisasContext *s, DisasOps *o)
2076 check_privileged(s);
2077 gen_helper_iske(o->out, cpu_env, o->in2);
2082 static ExitStatus op_ldeb(DisasContext *s, DisasOps *o)
2084 gen_helper_ldeb(o->out, cpu_env, o->in2);
2088 static ExitStatus op_ledb(DisasContext *s, DisasOps *o)
2090 gen_helper_ledb(o->out, cpu_env, o->in2);
2094 static ExitStatus op_ldxb(DisasContext *s, DisasOps *o)
2096 gen_helper_ldxb(o->out, cpu_env, o->in1, o->in2);
2100 static ExitStatus op_lexb(DisasContext *s, DisasOps *o)
2102 gen_helper_lexb(o->out, cpu_env, o->in1, o->in2);
2106 static ExitStatus op_lxdb(DisasContext *s, DisasOps *o)
2108 gen_helper_lxdb(o->out, cpu_env, o->in2);
2109 return_low128(o->out2);
2113 static ExitStatus op_lxeb(DisasContext *s, DisasOps *o)
2115 gen_helper_lxeb(o->out, cpu_env, o->in2);
2116 return_low128(o->out2);
2120 static ExitStatus op_llgt(DisasContext *s, DisasOps *o)
2122 tcg_gen_andi_i64(o->out, o->in2, 0x7fffffff);
2126 static ExitStatus op_ld8s(DisasContext *s, DisasOps *o)
2128 tcg_gen_qemu_ld8s(o->out, o->in2, get_mem_index(s));
2132 static ExitStatus op_ld8u(DisasContext *s, DisasOps *o)
2134 tcg_gen_qemu_ld8u(o->out, o->in2, get_mem_index(s));
2138 static ExitStatus op_ld16s(DisasContext *s, DisasOps *o)
2140 tcg_gen_qemu_ld16s(o->out, o->in2, get_mem_index(s));
2144 static ExitStatus op_ld16u(DisasContext *s, DisasOps *o)
2146 tcg_gen_qemu_ld16u(o->out, o->in2, get_mem_index(s));
2150 static ExitStatus op_ld32s(DisasContext *s, DisasOps *o)
2152 tcg_gen_qemu_ld32s(o->out, o->in2, get_mem_index(s));
2156 static ExitStatus op_ld32u(DisasContext *s, DisasOps *o)
2158 tcg_gen_qemu_ld32u(o->out, o->in2, get_mem_index(s));
2162 static ExitStatus op_ld64(DisasContext *s, DisasOps *o)
2164 tcg_gen_qemu_ld64(o->out, o->in2, get_mem_index(s));
2168 #ifndef CONFIG_USER_ONLY
2169 static ExitStatus op_lctl(DisasContext *s, DisasOps *o)
2171 TCGv_i32 r1 = tcg_const_i32(get_field(s->fields, r1));
2172 TCGv_i32 r3 = tcg_const_i32(get_field(s->fields, r3));
2173 check_privileged(s);
2174 potential_page_fault(s);
2175 gen_helper_lctl(cpu_env, r1, o->in2, r3);
2176 tcg_temp_free_i32(r1);
2177 tcg_temp_free_i32(r3);
2181 static ExitStatus op_lctlg(DisasContext *s, DisasOps *o)
2183 TCGv_i32 r1 = tcg_const_i32(get_field(s->fields, r1));
2184 TCGv_i32 r3 = tcg_const_i32(get_field(s->fields, r3));
2185 check_privileged(s);
2186 potential_page_fault(s);
2187 gen_helper_lctlg(cpu_env, r1, o->in2, r3);
2188 tcg_temp_free_i32(r1);
2189 tcg_temp_free_i32(r3);
2192 static ExitStatus op_lra(DisasContext *s, DisasOps *o)
2194 check_privileged(s);
2195 potential_page_fault(s);
2196 gen_helper_lra(o->out, cpu_env, o->in2);
2201 static ExitStatus op_lpsw(DisasContext *s, DisasOps *o)
2205 check_privileged(s);
2207 t1 = tcg_temp_new_i64();
2208 t2 = tcg_temp_new_i64();
2209 tcg_gen_qemu_ld32u(t1, o->in2, get_mem_index(s));
2210 tcg_gen_addi_i64(o->in2, o->in2, 4);
2211 tcg_gen_qemu_ld32u(t2, o->in2, get_mem_index(s));
2212 /* Convert the 32-bit PSW_MASK into the 64-bit PSW_MASK. */
2213 tcg_gen_shli_i64(t1, t1, 32);
2214 gen_helper_load_psw(cpu_env, t1, t2);
2215 tcg_temp_free_i64(t1);
2216 tcg_temp_free_i64(t2);
2217 return EXIT_NORETURN;
2220 static ExitStatus op_lpswe(DisasContext *s, DisasOps *o)
2224 check_privileged(s);
2226 t1 = tcg_temp_new_i64();
2227 t2 = tcg_temp_new_i64();
2228 tcg_gen_qemu_ld64(t1, o->in2, get_mem_index(s));
2229 tcg_gen_addi_i64(o->in2, o->in2, 8);
2230 tcg_gen_qemu_ld64(t2, o->in2, get_mem_index(s));
2231 gen_helper_load_psw(cpu_env, t1, t2);
2232 tcg_temp_free_i64(t1);
2233 tcg_temp_free_i64(t2);
2234 return EXIT_NORETURN;
2238 static ExitStatus op_lam(DisasContext *s, DisasOps *o)
2240 TCGv_i32 r1 = tcg_const_i32(get_field(s->fields, r1));
2241 TCGv_i32 r3 = tcg_const_i32(get_field(s->fields, r3));
2242 potential_page_fault(s);
2243 gen_helper_lam(cpu_env, r1, o->in2, r3);
2244 tcg_temp_free_i32(r1);
2245 tcg_temp_free_i32(r3);
2249 static ExitStatus op_lm32(DisasContext *s, DisasOps *o)
2251 int r1 = get_field(s->fields, r1);
2252 int r3 = get_field(s->fields, r3);
2253 TCGv_i64 t = tcg_temp_new_i64();
2254 TCGv_i64 t4 = tcg_const_i64(4);
2257 tcg_gen_qemu_ld32u(t, o->in2, get_mem_index(s));
2258 store_reg32_i64(r1, t);
2262 tcg_gen_add_i64(o->in2, o->in2, t4);
2266 tcg_temp_free_i64(t);
2267 tcg_temp_free_i64(t4);
2271 static ExitStatus op_lmh(DisasContext *s, DisasOps *o)
2273 int r1 = get_field(s->fields, r1);
2274 int r3 = get_field(s->fields, r3);
2275 TCGv_i64 t = tcg_temp_new_i64();
2276 TCGv_i64 t4 = tcg_const_i64(4);
2279 tcg_gen_qemu_ld32u(t, o->in2, get_mem_index(s));
2280 store_reg32h_i64(r1, t);
2284 tcg_gen_add_i64(o->in2, o->in2, t4);
2288 tcg_temp_free_i64(t);
2289 tcg_temp_free_i64(t4);
2293 static ExitStatus op_lm64(DisasContext *s, DisasOps *o)
2295 int r1 = get_field(s->fields, r1);
2296 int r3 = get_field(s->fields, r3);
2297 TCGv_i64 t8 = tcg_const_i64(8);
2300 tcg_gen_qemu_ld64(regs[r1], o->in2, get_mem_index(s));
2304 tcg_gen_add_i64(o->in2, o->in2, t8);
2308 tcg_temp_free_i64(t8);
2312 static ExitStatus op_mov2(DisasContext *s, DisasOps *o)
2315 o->g_out = o->g_in2;
2316 TCGV_UNUSED_I64(o->in2);
2321 static ExitStatus op_movx(DisasContext *s, DisasOps *o)
2325 o->g_out = o->g_in1;
2326 o->g_out2 = o->g_in2;
2327 TCGV_UNUSED_I64(o->in1);
2328 TCGV_UNUSED_I64(o->in2);
2329 o->g_in1 = o->g_in2 = false;
2333 static ExitStatus op_mvc(DisasContext *s, DisasOps *o)
2335 TCGv_i32 l = tcg_const_i32(get_field(s->fields, l1));
2336 potential_page_fault(s);
2337 gen_helper_mvc(cpu_env, l, o->addr1, o->in2);
2338 tcg_temp_free_i32(l);
2342 static ExitStatus op_mvcl(DisasContext *s, DisasOps *o)
2344 TCGv_i32 r1 = tcg_const_i32(get_field(s->fields, r1));
2345 TCGv_i32 r2 = tcg_const_i32(get_field(s->fields, r2));
2346 potential_page_fault(s);
2347 gen_helper_mvcl(cc_op, cpu_env, r1, r2);
2348 tcg_temp_free_i32(r1);
2349 tcg_temp_free_i32(r2);
2354 static ExitStatus op_mvcle(DisasContext *s, DisasOps *o)
2356 TCGv_i32 r1 = tcg_const_i32(get_field(s->fields, r1));
2357 TCGv_i32 r3 = tcg_const_i32(get_field(s->fields, r3));
2358 potential_page_fault(s);
2359 gen_helper_mvcle(cc_op, cpu_env, r1, o->in2, r3);
2360 tcg_temp_free_i32(r1);
2361 tcg_temp_free_i32(r3);
2366 #ifndef CONFIG_USER_ONLY
2367 static ExitStatus op_mvcp(DisasContext *s, DisasOps *o)
2369 int r1 = get_field(s->fields, l1);
2370 check_privileged(s);
2371 potential_page_fault(s);
2372 gen_helper_mvcp(cc_op, cpu_env, regs[r1], o->addr1, o->in2);
2377 static ExitStatus op_mvcs(DisasContext *s, DisasOps *o)
2379 int r1 = get_field(s->fields, l1);
2380 check_privileged(s);
2381 potential_page_fault(s);
2382 gen_helper_mvcs(cc_op, cpu_env, regs[r1], o->addr1, o->in2);
2388 static ExitStatus op_mvpg(DisasContext *s, DisasOps *o)
2390 potential_page_fault(s);
2391 gen_helper_mvpg(cpu_env, regs[0], o->in1, o->in2);
2396 static ExitStatus op_mvst(DisasContext *s, DisasOps *o)
2398 potential_page_fault(s);
2399 gen_helper_mvst(o->in1, cpu_env, regs[0], o->in1, o->in2);
2401 return_low128(o->in2);
2405 static ExitStatus op_mul(DisasContext *s, DisasOps *o)
2407 tcg_gen_mul_i64(o->out, o->in1, o->in2);
2411 static ExitStatus op_mul128(DisasContext *s, DisasOps *o)
2413 gen_helper_mul128(o->out, cpu_env, o->in1, o->in2);
2414 return_low128(o->out2);
2418 static ExitStatus op_meeb(DisasContext *s, DisasOps *o)
2420 gen_helper_meeb(o->out, cpu_env, o->in1, o->in2);
2424 static ExitStatus op_mdeb(DisasContext *s, DisasOps *o)
2426 gen_helper_mdeb(o->out, cpu_env, o->in1, o->in2);
2430 static ExitStatus op_mdb(DisasContext *s, DisasOps *o)
2432 gen_helper_mdb(o->out, cpu_env, o->in1, o->in2);
2436 static ExitStatus op_mxb(DisasContext *s, DisasOps *o)
2438 gen_helper_mxb(o->out, cpu_env, o->out, o->out2, o->in1, o->in2);
2439 return_low128(o->out2);
2443 static ExitStatus op_mxdb(DisasContext *s, DisasOps *o)
2445 gen_helper_mxdb(o->out, cpu_env, o->out, o->out2, o->in2);
2446 return_low128(o->out2);
2450 static ExitStatus op_maeb(DisasContext *s, DisasOps *o)
2452 TCGv_i64 r3 = load_freg32_i64(get_field(s->fields, r3));
2453 gen_helper_maeb(o->out, cpu_env, o->in1, o->in2, r3);
2454 tcg_temp_free_i64(r3);
2458 static ExitStatus op_madb(DisasContext *s, DisasOps *o)
2460 int r3 = get_field(s->fields, r3);
2461 gen_helper_madb(o->out, cpu_env, o->in1, o->in2, fregs[r3]);
2465 static ExitStatus op_mseb(DisasContext *s, DisasOps *o)
2467 TCGv_i64 r3 = load_freg32_i64(get_field(s->fields, r3));
2468 gen_helper_mseb(o->out, cpu_env, o->in1, o->in2, r3);
2469 tcg_temp_free_i64(r3);
2473 static ExitStatus op_msdb(DisasContext *s, DisasOps *o)
2475 int r3 = get_field(s->fields, r3);
2476 gen_helper_msdb(o->out, cpu_env, o->in1, o->in2, fregs[r3]);
2480 static ExitStatus op_nabs(DisasContext *s, DisasOps *o)
2482 gen_helper_nabs_i64(o->out, o->in2);
2486 static ExitStatus op_nabsf32(DisasContext *s, DisasOps *o)
2488 tcg_gen_ori_i64(o->out, o->in2, 0x80000000ull);
2492 static ExitStatus op_nabsf64(DisasContext *s, DisasOps *o)
2494 tcg_gen_ori_i64(o->out, o->in2, 0x8000000000000000ull);
2498 static ExitStatus op_nabsf128(DisasContext *s, DisasOps *o)
2500 tcg_gen_ori_i64(o->out, o->in1, 0x8000000000000000ull);
2501 tcg_gen_mov_i64(o->out2, o->in2);
2505 static ExitStatus op_nc(DisasContext *s, DisasOps *o)
2507 TCGv_i32 l = tcg_const_i32(get_field(s->fields, l1));
2508 potential_page_fault(s);
2509 gen_helper_nc(cc_op, cpu_env, l, o->addr1, o->in2);
2510 tcg_temp_free_i32(l);
2515 static ExitStatus op_neg(DisasContext *s, DisasOps *o)
2517 tcg_gen_neg_i64(o->out, o->in2);
2521 static ExitStatus op_negf32(DisasContext *s, DisasOps *o)
2523 tcg_gen_xori_i64(o->out, o->in2, 0x80000000ull);
2527 static ExitStatus op_negf64(DisasContext *s, DisasOps *o)
2529 tcg_gen_xori_i64(o->out, o->in2, 0x8000000000000000ull);
2533 static ExitStatus op_negf128(DisasContext *s, DisasOps *o)
2535 tcg_gen_xori_i64(o->out, o->in1, 0x8000000000000000ull);
2536 tcg_gen_mov_i64(o->out2, o->in2);
2540 static ExitStatus op_oc(DisasContext *s, DisasOps *o)
2542 TCGv_i32 l = tcg_const_i32(get_field(s->fields, l1));
2543 potential_page_fault(s);
2544 gen_helper_oc(cc_op, cpu_env, l, o->addr1, o->in2);
2545 tcg_temp_free_i32(l);
2550 static ExitStatus op_or(DisasContext *s, DisasOps *o)
2552 tcg_gen_or_i64(o->out, o->in1, o->in2);
2556 static ExitStatus op_ori(DisasContext *s, DisasOps *o)
2558 int shift = s->insn->data & 0xff;
2559 int size = s->insn->data >> 8;
2560 uint64_t mask = ((1ull << size) - 1) << shift;
2563 tcg_gen_shli_i64(o->in2, o->in2, shift);
2564 tcg_gen_or_i64(o->out, o->in1, o->in2);
2566 /* Produce the CC from only the bits manipulated. */
2567 tcg_gen_andi_i64(cc_dst, o->out, mask);
2568 set_cc_nz_u64(s, cc_dst);
2572 #ifndef CONFIG_USER_ONLY
2573 static ExitStatus op_ptlb(DisasContext *s, DisasOps *o)
2575 check_privileged(s);
2576 gen_helper_ptlb(cpu_env);
2581 static ExitStatus op_rev16(DisasContext *s, DisasOps *o)
2583 tcg_gen_bswap16_i64(o->out, o->in2);
2587 static ExitStatus op_rev32(DisasContext *s, DisasOps *o)
2589 tcg_gen_bswap32_i64(o->out, o->in2);
2593 static ExitStatus op_rev64(DisasContext *s, DisasOps *o)
2595 tcg_gen_bswap64_i64(o->out, o->in2);
2599 static ExitStatus op_rll32(DisasContext *s, DisasOps *o)
2601 TCGv_i32 t1 = tcg_temp_new_i32();
2602 TCGv_i32 t2 = tcg_temp_new_i32();
2603 TCGv_i32 to = tcg_temp_new_i32();
2604 tcg_gen_trunc_i64_i32(t1, o->in1);
2605 tcg_gen_trunc_i64_i32(t2, o->in2);
2606 tcg_gen_rotl_i32(to, t1, t2);
2607 tcg_gen_extu_i32_i64(o->out, to);
2608 tcg_temp_free_i32(t1);
2609 tcg_temp_free_i32(t2);
2610 tcg_temp_free_i32(to);
2614 static ExitStatus op_rll64(DisasContext *s, DisasOps *o)
2616 tcg_gen_rotl_i64(o->out, o->in1, o->in2);
2620 #ifndef CONFIG_USER_ONLY
2621 static ExitStatus op_rrbe(DisasContext *s, DisasOps *o)
2623 check_privileged(s);
2624 gen_helper_rrbe(cc_op, cpu_env, o->in2);
2629 static ExitStatus op_sacf(DisasContext *s, DisasOps *o)
2631 check_privileged(s);
2632 gen_helper_sacf(cpu_env, o->in2);
2633 /* Addressing mode has changed, so end the block. */
2634 return EXIT_PC_STALE;
2638 static ExitStatus op_sar(DisasContext *s, DisasOps *o)
2640 int r1 = get_field(s->fields, r1);
2641 tcg_gen_st32_i64(o->in2, cpu_env, offsetof(CPUS390XState, aregs[r1]));
2645 static ExitStatus op_seb(DisasContext *s, DisasOps *o)
2647 gen_helper_seb(o->out, cpu_env, o->in1, o->in2);
2651 static ExitStatus op_sdb(DisasContext *s, DisasOps *o)
2653 gen_helper_sdb(o->out, cpu_env, o->in1, o->in2);
2657 static ExitStatus op_sxb(DisasContext *s, DisasOps *o)
2659 gen_helper_sxb(o->out, cpu_env, o->out, o->out2, o->in1, o->in2);
2660 return_low128(o->out2);
2664 static ExitStatus op_sqeb(DisasContext *s, DisasOps *o)
2666 gen_helper_sqeb(o->out, cpu_env, o->in2);
2670 static ExitStatus op_sqdb(DisasContext *s, DisasOps *o)
2672 gen_helper_sqdb(o->out, cpu_env, o->in2);
2676 static ExitStatus op_sqxb(DisasContext *s, DisasOps *o)
2678 gen_helper_sqxb(o->out, cpu_env, o->in1, o->in2);
2679 return_low128(o->out2);
2683 #ifndef CONFIG_USER_ONLY
2684 static ExitStatus op_servc(DisasContext *s, DisasOps *o)
2686 check_privileged(s);
2687 potential_page_fault(s);
2688 gen_helper_servc(cc_op, cpu_env, o->in2, o->in1);
2693 static ExitStatus op_sigp(DisasContext *s, DisasOps *o)
2695 TCGv_i32 r1 = tcg_const_i32(get_field(s->fields, r1));
2696 check_privileged(s);
2697 potential_page_fault(s);
2698 gen_helper_sigp(cc_op, cpu_env, o->in2, r1, o->in1);
2699 tcg_temp_free_i32(r1);
2704 static ExitStatus op_sla(DisasContext *s, DisasOps *o)
2706 uint64_t sign = 1ull << s->insn->data;
2707 enum cc_op cco = s->insn->data == 31 ? CC_OP_SLA_32 : CC_OP_SLA_64;
2708 gen_op_update2_cc_i64(s, cco, o->in1, o->in2);
2709 tcg_gen_shl_i64(o->out, o->in1, o->in2);
2710 /* The arithmetic left shift is curious in that it does not affect
2711 the sign bit. Copy that over from the source unchanged. */
2712 tcg_gen_andi_i64(o->out, o->out, ~sign);
2713 tcg_gen_andi_i64(o->in1, o->in1, sign);
2714 tcg_gen_or_i64(o->out, o->out, o->in1);
2718 static ExitStatus op_sll(DisasContext *s, DisasOps *o)
2720 tcg_gen_shl_i64(o->out, o->in1, o->in2);
2724 static ExitStatus op_sra(DisasContext *s, DisasOps *o)
2726 tcg_gen_sar_i64(o->out, o->in1, o->in2);
2730 static ExitStatus op_srl(DisasContext *s, DisasOps *o)
2732 tcg_gen_shr_i64(o->out, o->in1, o->in2);
2736 static ExitStatus op_sfpc(DisasContext *s, DisasOps *o)
2738 gen_helper_sfpc(cpu_env, o->in2);
2742 #ifndef CONFIG_USER_ONLY
2743 static ExitStatus op_spka(DisasContext *s, DisasOps *o)
2745 check_privileged(s);
2746 tcg_gen_shri_i64(o->in2, o->in2, 4);
2747 tcg_gen_deposit_i64(psw_mask, psw_mask, o->in2, PSW_SHIFT_KEY - 4, 4);
2751 static ExitStatus op_sske(DisasContext *s, DisasOps *o)
2753 check_privileged(s);
2754 gen_helper_sske(cpu_env, o->in1, o->in2);
2758 static ExitStatus op_ssm(DisasContext *s, DisasOps *o)
2760 check_privileged(s);
2761 tcg_gen_deposit_i64(psw_mask, psw_mask, o->in2, 56, 8);
2765 static ExitStatus op_stap(DisasContext *s, DisasOps *o)
2767 check_privileged(s);
2768 /* ??? Surely cpu address != cpu number. In any case the previous
2769 version of this stored more than the required half-word, so it
2770 is unlikely this has ever been tested. */
2771 tcg_gen_ld32u_i64(o->out, cpu_env, offsetof(CPUS390XState, cpu_num));
2775 static ExitStatus op_stck(DisasContext *s, DisasOps *o)
2777 gen_helper_stck(o->out, cpu_env);
2778 /* ??? We don't implement clock states. */
2779 gen_op_movi_cc(s, 0);
2783 static ExitStatus op_stcke(DisasContext *s, DisasOps *o)
2785 TCGv_i64 c1 = tcg_temp_new_i64();
2786 TCGv_i64 c2 = tcg_temp_new_i64();
2787 gen_helper_stck(c1, cpu_env);
2788 /* Shift the 64-bit value into its place as a zero-extended
2789 104-bit value. Note that "bit positions 64-103 are always
2790 non-zero so that they compare differently to STCK"; we set
2791 the least significant bit to 1. */
2792 tcg_gen_shli_i64(c2, c1, 56);
2793 tcg_gen_shri_i64(c1, c1, 8);
2794 tcg_gen_ori_i64(c2, c2, 0x10000);
2795 tcg_gen_qemu_st64(c1, o->in2, get_mem_index(s));
2796 tcg_gen_addi_i64(o->in2, o->in2, 8);
2797 tcg_gen_qemu_st64(c2, o->in2, get_mem_index(s));
2798 tcg_temp_free_i64(c1);
2799 tcg_temp_free_i64(c2);
2800 /* ??? We don't implement clock states. */
2801 gen_op_movi_cc(s, 0);
2805 static ExitStatus op_sckc(DisasContext *s, DisasOps *o)
2807 check_privileged(s);
2808 gen_helper_sckc(cpu_env, o->in2);
2812 static ExitStatus op_stckc(DisasContext *s, DisasOps *o)
2814 check_privileged(s);
2815 gen_helper_stckc(o->out, cpu_env);
2819 static ExitStatus op_stctg(DisasContext *s, DisasOps *o)
2821 TCGv_i32 r1 = tcg_const_i32(get_field(s->fields, r1));
2822 TCGv_i32 r3 = tcg_const_i32(get_field(s->fields, r3));
2823 check_privileged(s);
2824 potential_page_fault(s);
2825 gen_helper_stctg(cpu_env, r1, o->in2, r3);
2826 tcg_temp_free_i32(r1);
2827 tcg_temp_free_i32(r3);
2831 static ExitStatus op_stctl(DisasContext *s, DisasOps *o)
2833 TCGv_i32 r1 = tcg_const_i32(get_field(s->fields, r1));
2834 TCGv_i32 r3 = tcg_const_i32(get_field(s->fields, r3));
2835 check_privileged(s);
2836 potential_page_fault(s);
2837 gen_helper_stctl(cpu_env, r1, o->in2, r3);
2838 tcg_temp_free_i32(r1);
2839 tcg_temp_free_i32(r3);
2843 static ExitStatus op_stidp(DisasContext *s, DisasOps *o)
2845 check_privileged(s);
2846 tcg_gen_ld32u_i64(o->out, cpu_env, offsetof(CPUS390XState, cpu_num));
2850 static ExitStatus op_spt(DisasContext *s, DisasOps *o)
2852 check_privileged(s);
2853 gen_helper_spt(cpu_env, o->in2);
2857 static ExitStatus op_stfl(DisasContext *s, DisasOps *o)
2860 /* We really ought to have more complete indication of facilities
2861 that we implement. Address this when STFLE is implemented. */
2862 check_privileged(s);
2863 f = tcg_const_i64(0xc0000000);
2864 a = tcg_const_i64(200);
2865 tcg_gen_qemu_st32(f, a, get_mem_index(s));
2866 tcg_temp_free_i64(f);
2867 tcg_temp_free_i64(a);
2871 static ExitStatus op_stpt(DisasContext *s, DisasOps *o)
2873 check_privileged(s);
2874 gen_helper_stpt(o->out, cpu_env);
2878 static ExitStatus op_stsi(DisasContext *s, DisasOps *o)
2880 check_privileged(s);
2881 potential_page_fault(s);
2882 gen_helper_stsi(cc_op, cpu_env, o->in2, regs[0], regs[1]);
2887 static ExitStatus op_spx(DisasContext *s, DisasOps *o)
2889 check_privileged(s);
2890 gen_helper_spx(cpu_env, o->in2);
2894 static ExitStatus op_subchannel(DisasContext *s, DisasOps *o)
2896 check_privileged(s);
2897 /* Not operational. */
2898 gen_op_movi_cc(s, 3);
2902 static ExitStatus op_stpx(DisasContext *s, DisasOps *o)
2904 check_privileged(s);
2905 tcg_gen_ld_i64(o->out, cpu_env, offsetof(CPUS390XState, psa));
2906 tcg_gen_andi_i64(o->out, o->out, 0x7fffe000);
2910 static ExitStatus op_stnosm(DisasContext *s, DisasOps *o)
2912 uint64_t i2 = get_field(s->fields, i2);
2915 check_privileged(s);
2917 /* It is important to do what the instruction name says: STORE THEN.
2918 If we let the output hook perform the store then if we fault and
2919 restart, we'll have the wrong SYSTEM MASK in place. */
2920 t = tcg_temp_new_i64();
2921 tcg_gen_shri_i64(t, psw_mask, 56);
2922 tcg_gen_qemu_st8(t, o->addr1, get_mem_index(s));
2923 tcg_temp_free_i64(t);
2925 if (s->fields->op == 0xac) {
2926 tcg_gen_andi_i64(psw_mask, psw_mask,
2927 (i2 << 56) | 0x00ffffffffffffffull);
2929 tcg_gen_ori_i64(psw_mask, psw_mask, i2 << 56);
2934 static ExitStatus op_stura(DisasContext *s, DisasOps *o)
2936 check_privileged(s);
2937 potential_page_fault(s);
2938 gen_helper_stura(cpu_env, o->in2, o->in1);
2943 static ExitStatus op_st8(DisasContext *s, DisasOps *o)
2945 tcg_gen_qemu_st8(o->in1, o->in2, get_mem_index(s));
2949 static ExitStatus op_st16(DisasContext *s, DisasOps *o)
2951 tcg_gen_qemu_st16(o->in1, o->in2, get_mem_index(s));
2955 static ExitStatus op_st32(DisasContext *s, DisasOps *o)
2957 tcg_gen_qemu_st32(o->in1, o->in2, get_mem_index(s));
2961 static ExitStatus op_st64(DisasContext *s, DisasOps *o)
2963 tcg_gen_qemu_st64(o->in1, o->in2, get_mem_index(s));
2967 static ExitStatus op_stam(DisasContext *s, DisasOps *o)
2969 TCGv_i32 r1 = tcg_const_i32(get_field(s->fields, r1));
2970 TCGv_i32 r3 = tcg_const_i32(get_field(s->fields, r3));
2971 potential_page_fault(s);
2972 gen_helper_stam(cpu_env, r1, o->in2, r3);
2973 tcg_temp_free_i32(r1);
2974 tcg_temp_free_i32(r3);
2978 static ExitStatus op_stcm(DisasContext *s, DisasOps *o)
2980 int m3 = get_field(s->fields, m3);
2981 int pos, base = s->insn->data;
2982 TCGv_i64 tmp = tcg_temp_new_i64();
2984 pos = base + ctz32(m3) * 8;
2987 /* Effectively a 32-bit store. */
2988 tcg_gen_shri_i64(tmp, o->in1, pos);
2989 tcg_gen_qemu_st32(tmp, o->in2, get_mem_index(s));
2995 /* Effectively a 16-bit store. */
2996 tcg_gen_shri_i64(tmp, o->in1, pos);
2997 tcg_gen_qemu_st16(tmp, o->in2, get_mem_index(s));
3004 /* Effectively an 8-bit store. */
3005 tcg_gen_shri_i64(tmp, o->in1, pos);
3006 tcg_gen_qemu_st8(tmp, o->in2, get_mem_index(s));
3010 /* This is going to be a sequence of shifts and stores. */
3011 pos = base + 32 - 8;
3014 tcg_gen_shri_i64(tmp, o->in1, pos);
3015 tcg_gen_qemu_st8(tmp, o->in2, get_mem_index(s));
3016 tcg_gen_addi_i64(o->in2, o->in2, 1);
3018 m3 = (m3 << 1) & 0xf;
3023 tcg_temp_free_i64(tmp);
3027 static ExitStatus op_stm(DisasContext *s, DisasOps *o)
3029 int r1 = get_field(s->fields, r1);
3030 int r3 = get_field(s->fields, r3);
3031 int size = s->insn->data;
3032 TCGv_i64 tsize = tcg_const_i64(size);
3036 tcg_gen_qemu_st64(regs[r1], o->in2, get_mem_index(s));
3038 tcg_gen_qemu_st32(regs[r1], o->in2, get_mem_index(s));
3043 tcg_gen_add_i64(o->in2, o->in2, tsize);
3047 tcg_temp_free_i64(tsize);
3051 static ExitStatus op_stmh(DisasContext *s, DisasOps *o)
3053 int r1 = get_field(s->fields, r1);
3054 int r3 = get_field(s->fields, r3);
3055 TCGv_i64 t = tcg_temp_new_i64();
3056 TCGv_i64 t4 = tcg_const_i64(4);
3057 TCGv_i64 t32 = tcg_const_i64(32);
3060 tcg_gen_shl_i64(t, regs[r1], t32);
3061 tcg_gen_qemu_st32(t, o->in2, get_mem_index(s));
3065 tcg_gen_add_i64(o->in2, o->in2, t4);
3069 tcg_temp_free_i64(t);
3070 tcg_temp_free_i64(t4);
3071 tcg_temp_free_i64(t32);
3075 static ExitStatus op_srst(DisasContext *s, DisasOps *o)
3077 potential_page_fault(s);
3078 gen_helper_srst(o->in1, cpu_env, regs[0], o->in1, o->in2);
3080 return_low128(o->in2);
3084 static ExitStatus op_sub(DisasContext *s, DisasOps *o)
3086 tcg_gen_sub_i64(o->out, o->in1, o->in2);
3090 static ExitStatus op_subb(DisasContext *s, DisasOps *o)
3095 tcg_gen_not_i64(o->in2, o->in2);
3096 tcg_gen_add_i64(o->out, o->in1, o->in2);
3098 /* XXX possible optimization point */
3100 cc = tcg_temp_new_i64();
3101 tcg_gen_extu_i32_i64(cc, cc_op);
3102 tcg_gen_shri_i64(cc, cc, 1);
3103 tcg_gen_add_i64(o->out, o->out, cc);
3104 tcg_temp_free_i64(cc);
3108 static ExitStatus op_svc(DisasContext *s, DisasOps *o)
3115 t = tcg_const_i32(get_field(s->fields, i1) & 0xff);
3116 tcg_gen_st_i32(t, cpu_env, offsetof(CPUS390XState, int_svc_code));
3117 tcg_temp_free_i32(t);
3119 t = tcg_const_i32(s->next_pc - s->pc);
3120 tcg_gen_st_i32(t, cpu_env, offsetof(CPUS390XState, int_svc_ilen));
3121 tcg_temp_free_i32(t);
3123 gen_exception(EXCP_SVC);
3124 return EXIT_NORETURN;
3127 static ExitStatus op_tceb(DisasContext *s, DisasOps *o)
3129 gen_helper_tceb(cc_op, o->in1, o->in2);
3134 static ExitStatus op_tcdb(DisasContext *s, DisasOps *o)
3136 gen_helper_tcdb(cc_op, o->in1, o->in2);
3141 static ExitStatus op_tcxb(DisasContext *s, DisasOps *o)
3143 gen_helper_tcxb(cc_op, o->out, o->out2, o->in2);
3148 #ifndef CONFIG_USER_ONLY
3149 static ExitStatus op_tprot(DisasContext *s, DisasOps *o)
3151 potential_page_fault(s);
3152 gen_helper_tprot(cc_op, o->addr1, o->in2);
3158 static ExitStatus op_tr(DisasContext *s, DisasOps *o)
3160 TCGv_i32 l = tcg_const_i32(get_field(s->fields, l1));
3161 potential_page_fault(s);
3162 gen_helper_tr(cpu_env, l, o->addr1, o->in2);
3163 tcg_temp_free_i32(l);
3168 static ExitStatus op_unpk(DisasContext *s, DisasOps *o)
3170 TCGv_i32 l = tcg_const_i32(get_field(s->fields, l1));
3171 potential_page_fault(s);
3172 gen_helper_unpk(cpu_env, l, o->addr1, o->in2);
3173 tcg_temp_free_i32(l);
3177 static ExitStatus op_xc(DisasContext *s, DisasOps *o)
3179 TCGv_i32 l = tcg_const_i32(get_field(s->fields, l1));
3180 potential_page_fault(s);
3181 gen_helper_xc(cc_op, cpu_env, l, o->addr1, o->in2);
3182 tcg_temp_free_i32(l);
3187 static ExitStatus op_xor(DisasContext *s, DisasOps *o)
3189 tcg_gen_xor_i64(o->out, o->in1, o->in2);
3193 static ExitStatus op_xori(DisasContext *s, DisasOps *o)
3195 int shift = s->insn->data & 0xff;
3196 int size = s->insn->data >> 8;
3197 uint64_t mask = ((1ull << size) - 1) << shift;
3200 tcg_gen_shli_i64(o->in2, o->in2, shift);
3201 tcg_gen_xor_i64(o->out, o->in1, o->in2);
3203 /* Produce the CC from only the bits manipulated. */
3204 tcg_gen_andi_i64(cc_dst, o->out, mask);
3205 set_cc_nz_u64(s, cc_dst);
3209 static ExitStatus op_zero(DisasContext *s, DisasOps *o)
3211 o->out = tcg_const_i64(0);
3215 static ExitStatus op_zero2(DisasContext *s, DisasOps *o)
3217 o->out = tcg_const_i64(0);
3223 /* ====================================================================== */
3224 /* The "Cc OUTput" generators. Given the generated output (and in some cases
3225 the original inputs), update the various cc data structures in order to
3226 be able to compute the new condition code. */
3228 static void cout_abs32(DisasContext *s, DisasOps *o)
3230 gen_op_update1_cc_i64(s, CC_OP_ABS_32, o->out);
3233 static void cout_abs64(DisasContext *s, DisasOps *o)
3235 gen_op_update1_cc_i64(s, CC_OP_ABS_64, o->out);
3238 static void cout_adds32(DisasContext *s, DisasOps *o)
3240 gen_op_update3_cc_i64(s, CC_OP_ADD_32, o->in1, o->in2, o->out);
3243 static void cout_adds64(DisasContext *s, DisasOps *o)
3245 gen_op_update3_cc_i64(s, CC_OP_ADD_64, o->in1, o->in2, o->out);
3248 static void cout_addu32(DisasContext *s, DisasOps *o)
3250 gen_op_update3_cc_i64(s, CC_OP_ADDU_32, o->in1, o->in2, o->out);
3253 static void cout_addu64(DisasContext *s, DisasOps *o)
3255 gen_op_update3_cc_i64(s, CC_OP_ADDU_64, o->in1, o->in2, o->out);
3258 static void cout_addc32(DisasContext *s, DisasOps *o)
3260 gen_op_update3_cc_i64(s, CC_OP_ADDC_32, o->in1, o->in2, o->out);
3263 static void cout_addc64(DisasContext *s, DisasOps *o)
3265 gen_op_update3_cc_i64(s, CC_OP_ADDC_64, o->in1, o->in2, o->out);
3268 static void cout_cmps32(DisasContext *s, DisasOps *o)
3270 gen_op_update2_cc_i64(s, CC_OP_LTGT_32, o->in1, o->in2);
3273 static void cout_cmps64(DisasContext *s, DisasOps *o)
3275 gen_op_update2_cc_i64(s, CC_OP_LTGT_64, o->in1, o->in2);
3278 static void cout_cmpu32(DisasContext *s, DisasOps *o)
3280 gen_op_update2_cc_i64(s, CC_OP_LTUGTU_32, o->in1, o->in2);
3283 static void cout_cmpu64(DisasContext *s, DisasOps *o)
3285 gen_op_update2_cc_i64(s, CC_OP_LTUGTU_64, o->in1, o->in2);
3288 static void cout_f32(DisasContext *s, DisasOps *o)
3290 gen_op_update1_cc_i64(s, CC_OP_NZ_F32, o->out);
3293 static void cout_f64(DisasContext *s, DisasOps *o)
3295 gen_op_update1_cc_i64(s, CC_OP_NZ_F64, o->out);
3298 static void cout_f128(DisasContext *s, DisasOps *o)
3300 gen_op_update2_cc_i64(s, CC_OP_NZ_F128, o->out, o->out2);
3303 static void cout_nabs32(DisasContext *s, DisasOps *o)
3305 gen_op_update1_cc_i64(s, CC_OP_NABS_32, o->out);
3308 static void cout_nabs64(DisasContext *s, DisasOps *o)
3310 gen_op_update1_cc_i64(s, CC_OP_NABS_64, o->out);
3313 static void cout_neg32(DisasContext *s, DisasOps *o)
3315 gen_op_update1_cc_i64(s, CC_OP_COMP_32, o->out);
3318 static void cout_neg64(DisasContext *s, DisasOps *o)
3320 gen_op_update1_cc_i64(s, CC_OP_COMP_64, o->out);
3323 static void cout_nz32(DisasContext *s, DisasOps *o)
3325 tcg_gen_ext32u_i64(cc_dst, o->out);
3326 gen_op_update1_cc_i64(s, CC_OP_NZ, cc_dst);
3329 static void cout_nz64(DisasContext *s, DisasOps *o)
3331 gen_op_update1_cc_i64(s, CC_OP_NZ, o->out);
3334 static void cout_s32(DisasContext *s, DisasOps *o)
3336 gen_op_update1_cc_i64(s, CC_OP_LTGT0_32, o->out);
3339 static void cout_s64(DisasContext *s, DisasOps *o)
3341 gen_op_update1_cc_i64(s, CC_OP_LTGT0_64, o->out);
3344 static void cout_subs32(DisasContext *s, DisasOps *o)
3346 gen_op_update3_cc_i64(s, CC_OP_SUB_32, o->in1, o->in2, o->out);
3349 static void cout_subs64(DisasContext *s, DisasOps *o)
3351 gen_op_update3_cc_i64(s, CC_OP_SUB_64, o->in1, o->in2, o->out);
3354 static void cout_subu32(DisasContext *s, DisasOps *o)
3356 gen_op_update3_cc_i64(s, CC_OP_SUBU_32, o->in1, o->in2, o->out);
3359 static void cout_subu64(DisasContext *s, DisasOps *o)
3361 gen_op_update3_cc_i64(s, CC_OP_SUBU_64, o->in1, o->in2, o->out);
3364 static void cout_subb32(DisasContext *s, DisasOps *o)
3366 gen_op_update3_cc_i64(s, CC_OP_SUBB_32, o->in1, o->in2, o->out);
3369 static void cout_subb64(DisasContext *s, DisasOps *o)
3371 gen_op_update3_cc_i64(s, CC_OP_SUBB_64, o->in1, o->in2, o->out);
3374 static void cout_tm32(DisasContext *s, DisasOps *o)
3376 gen_op_update2_cc_i64(s, CC_OP_TM_32, o->in1, o->in2);
3379 static void cout_tm64(DisasContext *s, DisasOps *o)
3381 gen_op_update2_cc_i64(s, CC_OP_TM_64, o->in1, o->in2);
3384 /* ====================================================================== */
3385 /* The "PREPeration" generators. These initialize the DisasOps.OUT fields
3386 with the TCG register to which we will write. Used in combination with
3387 the "wout" generators, in some cases we need a new temporary, and in
3388 some cases we can write to a TCG global. */
3390 static void prep_new(DisasContext *s, DisasFields *f, DisasOps *o)
3392 o->out = tcg_temp_new_i64();
3395 static void prep_new_P(DisasContext *s, DisasFields *f, DisasOps *o)
3397 o->out = tcg_temp_new_i64();
3398 o->out2 = tcg_temp_new_i64();
3401 static void prep_r1(DisasContext *s, DisasFields *f, DisasOps *o)
3403 o->out = regs[get_field(f, r1)];
3407 static void prep_r1_P(DisasContext *s, DisasFields *f, DisasOps *o)
3409 /* ??? Specification exception: r1 must be even. */
3410 int r1 = get_field(f, r1);
3412 o->out2 = regs[(r1 + 1) & 15];
3413 o->g_out = o->g_out2 = true;
3416 static void prep_f1(DisasContext *s, DisasFields *f, DisasOps *o)
3418 o->out = fregs[get_field(f, r1)];
3422 static void prep_x1(DisasContext *s, DisasFields *f, DisasOps *o)
3424 /* ??? Specification exception: r1 must be < 14. */
3425 int r1 = get_field(f, r1);
3427 o->out2 = fregs[(r1 + 2) & 15];
3428 o->g_out = o->g_out2 = true;
3431 /* ====================================================================== */
3432 /* The "Write OUTput" generators. These generally perform some non-trivial
3433 copy of data to TCG globals, or to main memory. The trivial cases are
3434 generally handled by having a "prep" generator install the TCG global
3435 as the destination of the operation. */
3437 static void wout_r1(DisasContext *s, DisasFields *f, DisasOps *o)
3439 store_reg(get_field(f, r1), o->out);
3442 static void wout_r1_8(DisasContext *s, DisasFields *f, DisasOps *o)
3444 int r1 = get_field(f, r1);
3445 tcg_gen_deposit_i64(regs[r1], regs[r1], o->out, 0, 8);
3448 static void wout_r1_16(DisasContext *s, DisasFields *f, DisasOps *o)
3450 int r1 = get_field(f, r1);
3451 tcg_gen_deposit_i64(regs[r1], regs[r1], o->out, 0, 16);
3454 static void wout_r1_32(DisasContext *s, DisasFields *f, DisasOps *o)
3456 store_reg32_i64(get_field(f, r1), o->out);
3459 static void wout_r1_P32(DisasContext *s, DisasFields *f, DisasOps *o)
3461 /* ??? Specification exception: r1 must be even. */
3462 int r1 = get_field(f, r1);
3463 store_reg32_i64(r1, o->out);
3464 store_reg32_i64((r1 + 1) & 15, o->out2);
3467 static void wout_r1_D32(DisasContext *s, DisasFields *f, DisasOps *o)
3469 /* ??? Specification exception: r1 must be even. */
3470 int r1 = get_field(f, r1);
3471 store_reg32_i64((r1 + 1) & 15, o->out);
3472 tcg_gen_shri_i64(o->out, o->out, 32);
3473 store_reg32_i64(r1, o->out);
3476 static void wout_e1(DisasContext *s, DisasFields *f, DisasOps *o)
3478 store_freg32_i64(get_field(f, r1), o->out);
3481 static void wout_f1(DisasContext *s, DisasFields *f, DisasOps *o)
3483 store_freg(get_field(f, r1), o->out);
3486 static void wout_x1(DisasContext *s, DisasFields *f, DisasOps *o)
3488 /* ??? Specification exception: r1 must be < 14. */
3489 int f1 = get_field(s->fields, r1);
3490 store_freg(f1, o->out);
3491 store_freg((f1 + 2) & 15, o->out2);
3494 static void wout_cond_r1r2_32(DisasContext *s, DisasFields *f, DisasOps *o)
3496 if (get_field(f, r1) != get_field(f, r2)) {
3497 store_reg32_i64(get_field(f, r1), o->out);
3501 static void wout_cond_e1e2(DisasContext *s, DisasFields *f, DisasOps *o)
3503 if (get_field(f, r1) != get_field(f, r2)) {
3504 store_freg32_i64(get_field(f, r1), o->out);
3508 static void wout_m1_8(DisasContext *s, DisasFields *f, DisasOps *o)
3510 tcg_gen_qemu_st8(o->out, o->addr1, get_mem_index(s));
3513 static void wout_m1_16(DisasContext *s, DisasFields *f, DisasOps *o)
3515 tcg_gen_qemu_st16(o->out, o->addr1, get_mem_index(s));
3518 static void wout_m1_32(DisasContext *s, DisasFields *f, DisasOps *o)
3520 tcg_gen_qemu_st32(o->out, o->addr1, get_mem_index(s));
3523 static void wout_m1_64(DisasContext *s, DisasFields *f, DisasOps *o)
3525 tcg_gen_qemu_st64(o->out, o->addr1, get_mem_index(s));
3528 static void wout_m2_32(DisasContext *s, DisasFields *f, DisasOps *o)
3530 tcg_gen_qemu_st32(o->out, o->in2, get_mem_index(s));
3533 /* ====================================================================== */
3534 /* The "INput 1" generators. These load the first operand to an insn. */
3536 static void in1_r1(DisasContext *s, DisasFields *f, DisasOps *o)
3538 o->in1 = load_reg(get_field(f, r1));
3541 static void in1_r1_o(DisasContext *s, DisasFields *f, DisasOps *o)
3543 o->in1 = regs[get_field(f, r1)];
3547 static void in1_r1_32s(DisasContext *s, DisasFields *f, DisasOps *o)
3549 o->in1 = tcg_temp_new_i64();
3550 tcg_gen_ext32s_i64(o->in1, regs[get_field(f, r1)]);
3553 static void in1_r1_32u(DisasContext *s, DisasFields *f, DisasOps *o)
3555 o->in1 = tcg_temp_new_i64();
3556 tcg_gen_ext32u_i64(o->in1, regs[get_field(f, r1)]);
3559 static void in1_r1_sr32(DisasContext *s, DisasFields *f, DisasOps *o)
3561 o->in1 = tcg_temp_new_i64();
3562 tcg_gen_shri_i64(o->in1, regs[get_field(f, r1)], 32);
3565 static void in1_r1p1(DisasContext *s, DisasFields *f, DisasOps *o)
3567 /* ??? Specification exception: r1 must be even. */
3568 int r1 = get_field(f, r1);
3569 o->in1 = load_reg((r1 + 1) & 15);
3572 static void in1_r1p1_32s(DisasContext *s, DisasFields *f, DisasOps *o)
3574 /* ??? Specification exception: r1 must be even. */
3575 int r1 = get_field(f, r1);
3576 o->in1 = tcg_temp_new_i64();
3577 tcg_gen_ext32s_i64(o->in1, regs[(r1 + 1) & 15]);
3580 static void in1_r1p1_32u(DisasContext *s, DisasFields *f, DisasOps *o)
3582 /* ??? Specification exception: r1 must be even. */
3583 int r1 = get_field(f, r1);
3584 o->in1 = tcg_temp_new_i64();
3585 tcg_gen_ext32u_i64(o->in1, regs[(r1 + 1) & 15]);
3588 static void in1_r1_D32(DisasContext *s, DisasFields *f, DisasOps *o)
3590 /* ??? Specification exception: r1 must be even. */
3591 int r1 = get_field(f, r1);
3592 o->in1 = tcg_temp_new_i64();
3593 tcg_gen_concat32_i64(o->in1, regs[r1 + 1], regs[r1]);
3596 static void in1_r2(DisasContext *s, DisasFields *f, DisasOps *o)
3598 o->in1 = load_reg(get_field(f, r2));
3601 static void in1_r3(DisasContext *s, DisasFields *f, DisasOps *o)
3603 o->in1 = load_reg(get_field(f, r3));
3606 static void in1_r3_o(DisasContext *s, DisasFields *f, DisasOps *o)
3608 o->in1 = regs[get_field(f, r3)];
3612 static void in1_r3_32s(DisasContext *s, DisasFields *f, DisasOps *o)
3614 o->in1 = tcg_temp_new_i64();
3615 tcg_gen_ext32s_i64(o->in1, regs[get_field(f, r3)]);
3618 static void in1_r3_32u(DisasContext *s, DisasFields *f, DisasOps *o)
3620 o->in1 = tcg_temp_new_i64();
3621 tcg_gen_ext32u_i64(o->in1, regs[get_field(f, r3)]);
3624 static void in1_e1(DisasContext *s, DisasFields *f, DisasOps *o)
3626 o->in1 = load_freg32_i64(get_field(f, r1));
3629 static void in1_f1_o(DisasContext *s, DisasFields *f, DisasOps *o)
3631 o->in1 = fregs[get_field(f, r1)];
3635 static void in1_x1_o(DisasContext *s, DisasFields *f, DisasOps *o)
3637 /* ??? Specification exception: r1 must be < 14. */
3638 int r1 = get_field(f, r1);
3640 o->out2 = fregs[(r1 + 2) & 15];
3641 o->g_out = o->g_out2 = true;
3644 static void in1_la1(DisasContext *s, DisasFields *f, DisasOps *o)
3646 o->addr1 = get_address(s, 0, get_field(f, b1), get_field(f, d1));
3649 static void in1_la2(DisasContext *s, DisasFields *f, DisasOps *o)
3651 int x2 = have_field(f, x2) ? get_field(f, x2) : 0;
3652 o->addr1 = get_address(s, x2, get_field(f, b2), get_field(f, d2));
3655 static void in1_m1_8u(DisasContext *s, DisasFields *f, DisasOps *o)
3658 o->in1 = tcg_temp_new_i64();
3659 tcg_gen_qemu_ld8u(o->in1, o->addr1, get_mem_index(s));
3662 static void in1_m1_16s(DisasContext *s, DisasFields *f, DisasOps *o)
3665 o->in1 = tcg_temp_new_i64();
3666 tcg_gen_qemu_ld16s(o->in1, o->addr1, get_mem_index(s));
3669 static void in1_m1_16u(DisasContext *s, DisasFields *f, DisasOps *o)
3672 o->in1 = tcg_temp_new_i64();
3673 tcg_gen_qemu_ld16u(o->in1, o->addr1, get_mem_index(s));
3676 static void in1_m1_32s(DisasContext *s, DisasFields *f, DisasOps *o)
3679 o->in1 = tcg_temp_new_i64();
3680 tcg_gen_qemu_ld32s(o->in1, o->addr1, get_mem_index(s));
3683 static void in1_m1_32u(DisasContext *s, DisasFields *f, DisasOps *o)
3686 o->in1 = tcg_temp_new_i64();
3687 tcg_gen_qemu_ld32u(o->in1, o->addr1, get_mem_index(s));
3690 static void in1_m1_64(DisasContext *s, DisasFields *f, DisasOps *o)
3693 o->in1 = tcg_temp_new_i64();
3694 tcg_gen_qemu_ld64(o->in1, o->addr1, get_mem_index(s));
3697 /* ====================================================================== */
3698 /* The "INput 2" generators. These load the second operand to an insn. */
3700 static void in2_r1_o(DisasContext *s, DisasFields *f, DisasOps *o)
3702 o->in2 = regs[get_field(f, r1)];
3706 static void in2_r1_16u(DisasContext *s, DisasFields *f, DisasOps *o)
3708 o->in2 = tcg_temp_new_i64();
3709 tcg_gen_ext16u_i64(o->in2, regs[get_field(f, r1)]);
3712 static void in2_r1_32u(DisasContext *s, DisasFields *f, DisasOps *o)
3714 o->in2 = tcg_temp_new_i64();
3715 tcg_gen_ext32u_i64(o->in2, regs[get_field(f, r1)]);
3718 static void in2_r2(DisasContext *s, DisasFields *f, DisasOps *o)
3720 o->in2 = load_reg(get_field(f, r2));
3723 static void in2_r2_o(DisasContext *s, DisasFields *f, DisasOps *o)
3725 o->in2 = regs[get_field(f, r2)];
3729 static void in2_r2_nz(DisasContext *s, DisasFields *f, DisasOps *o)
3731 int r2 = get_field(f, r2);
3733 o->in2 = load_reg(r2);
3737 static void in2_r2_8s(DisasContext *s, DisasFields *f, DisasOps *o)
3739 o->in2 = tcg_temp_new_i64();
3740 tcg_gen_ext8s_i64(o->in2, regs[get_field(f, r2)]);
3743 static void in2_r2_8u(DisasContext *s, DisasFields *f, DisasOps *o)
3745 o->in2 = tcg_temp_new_i64();
3746 tcg_gen_ext8u_i64(o->in2, regs[get_field(f, r2)]);
3749 static void in2_r2_16s(DisasContext *s, DisasFields *f, DisasOps *o)
3751 o->in2 = tcg_temp_new_i64();
3752 tcg_gen_ext16s_i64(o->in2, regs[get_field(f, r2)]);
3755 static void in2_r2_16u(DisasContext *s, DisasFields *f, DisasOps *o)
3757 o->in2 = tcg_temp_new_i64();
3758 tcg_gen_ext16u_i64(o->in2, regs[get_field(f, r2)]);
3761 static void in2_r3(DisasContext *s, DisasFields *f, DisasOps *o)
3763 o->in2 = load_reg(get_field(f, r3));
3766 static void in2_r2_32s(DisasContext *s, DisasFields *f, DisasOps *o)
3768 o->in2 = tcg_temp_new_i64();
3769 tcg_gen_ext32s_i64(o->in2, regs[get_field(f, r2)]);
3772 static void in2_r2_32u(DisasContext *s, DisasFields *f, DisasOps *o)
3774 o->in2 = tcg_temp_new_i64();
3775 tcg_gen_ext32u_i64(o->in2, regs[get_field(f, r2)]);
3778 static void in2_e2(DisasContext *s, DisasFields *f, DisasOps *o)
3780 o->in2 = load_freg32_i64(get_field(f, r2));
3783 static void in2_f2_o(DisasContext *s, DisasFields *f, DisasOps *o)
3785 o->in2 = fregs[get_field(f, r2)];
3789 static void in2_x2_o(DisasContext *s, DisasFields *f, DisasOps *o)
3791 /* ??? Specification exception: r1 must be < 14. */
3792 int r2 = get_field(f, r2);
3794 o->in2 = fregs[(r2 + 2) & 15];
3795 o->g_in1 = o->g_in2 = true;
3798 static void in2_ra2(DisasContext *s, DisasFields *f, DisasOps *o)
3800 o->in2 = get_address(s, 0, get_field(f, r2), 0);
3803 static void in2_a2(DisasContext *s, DisasFields *f, DisasOps *o)
3805 int x2 = have_field(f, x2) ? get_field(f, x2) : 0;
3806 o->in2 = get_address(s, x2, get_field(f, b2), get_field(f, d2));
3809 static void in2_ri2(DisasContext *s, DisasFields *f, DisasOps *o)
3811 o->in2 = tcg_const_i64(s->pc + (int64_t)get_field(f, i2) * 2);
3814 static void in2_sh32(DisasContext *s, DisasFields *f, DisasOps *o)
3816 help_l2_shift(s, f, o, 31);
3819 static void in2_sh64(DisasContext *s, DisasFields *f, DisasOps *o)
3821 help_l2_shift(s, f, o, 63);
3824 static void in2_m2_8u(DisasContext *s, DisasFields *f, DisasOps *o)
3827 tcg_gen_qemu_ld8u(o->in2, o->in2, get_mem_index(s));
3830 static void in2_m2_16s(DisasContext *s, DisasFields *f, DisasOps *o)
3833 tcg_gen_qemu_ld16s(o->in2, o->in2, get_mem_index(s));
3836 static void in2_m2_16u(DisasContext *s, DisasFields *f, DisasOps *o)
3839 tcg_gen_qemu_ld16u(o->in2, o->in2, get_mem_index(s));
3842 static void in2_m2_32s(DisasContext *s, DisasFields *f, DisasOps *o)
3845 tcg_gen_qemu_ld32s(o->in2, o->in2, get_mem_index(s));
3848 static void in2_m2_32u(DisasContext *s, DisasFields *f, DisasOps *o)
3851 tcg_gen_qemu_ld32u(o->in2, o->in2, get_mem_index(s));
3854 static void in2_m2_64(DisasContext *s, DisasFields *f, DisasOps *o)
3857 tcg_gen_qemu_ld64(o->in2, o->in2, get_mem_index(s));
3860 static void in2_mri2_16u(DisasContext *s, DisasFields *f, DisasOps *o)
3863 tcg_gen_qemu_ld16u(o->in2, o->in2, get_mem_index(s));
3866 static void in2_mri2_32s(DisasContext *s, DisasFields *f, DisasOps *o)
3869 tcg_gen_qemu_ld32s(o->in2, o->in2, get_mem_index(s));
3872 static void in2_mri2_32u(DisasContext *s, DisasFields *f, DisasOps *o)
3875 tcg_gen_qemu_ld32u(o->in2, o->in2, get_mem_index(s));
3878 static void in2_mri2_64(DisasContext *s, DisasFields *f, DisasOps *o)
3881 tcg_gen_qemu_ld64(o->in2, o->in2, get_mem_index(s));
3884 static void in2_i2(DisasContext *s, DisasFields *f, DisasOps *o)
3886 o->in2 = tcg_const_i64(get_field(f, i2));
3889 static void in2_i2_8u(DisasContext *s, DisasFields *f, DisasOps *o)
3891 o->in2 = tcg_const_i64((uint8_t)get_field(f, i2));
3894 static void in2_i2_16u(DisasContext *s, DisasFields *f, DisasOps *o)
3896 o->in2 = tcg_const_i64((uint16_t)get_field(f, i2));
3899 static void in2_i2_32u(DisasContext *s, DisasFields *f, DisasOps *o)
3901 o->in2 = tcg_const_i64((uint32_t)get_field(f, i2));
3904 static void in2_i2_16u_shl(DisasContext *s, DisasFields *f, DisasOps *o)
3906 uint64_t i2 = (uint16_t)get_field(f, i2);
3907 o->in2 = tcg_const_i64(i2 << s->insn->data);
3910 static void in2_i2_32u_shl(DisasContext *s, DisasFields *f, DisasOps *o)
3912 uint64_t i2 = (uint32_t)get_field(f, i2);
3913 o->in2 = tcg_const_i64(i2 << s->insn->data);
3916 /* ====================================================================== */
3918 /* Find opc within the table of insns. This is formulated as a switch
3919 statement so that (1) we get compile-time notice of cut-paste errors
3920 for duplicated opcodes, and (2) the compiler generates the binary
3921 search tree, rather than us having to post-process the table. */
3923 #define C(OPC, NM, FT, FC, I1, I2, P, W, OP, CC) \
3924 D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, 0)
3926 #define D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, D) insn_ ## NM,
3928 enum DisasInsnEnum {
3929 #include "insn-data.def"
3933 #define D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, D) { \
3938 .help_in1 = in1_##I1, \
3939 .help_in2 = in2_##I2, \
3940 .help_prep = prep_##P, \
3941 .help_wout = wout_##W, \
3942 .help_cout = cout_##CC, \
3943 .help_op = op_##OP, \
3947 /* Allow 0 to be used for NULL in the table below. */
3955 static const DisasInsn insn_info[] = {
3956 #include "insn-data.def"
3960 #define D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, D) \
3961 case OPC: return &insn_info[insn_ ## NM];
3963 static const DisasInsn *lookup_opc(uint16_t opc)
3966 #include "insn-data.def"
3975 /* Extract a field from the insn. The INSN should be left-aligned in
3976 the uint64_t so that we can more easily utilize the big-bit-endian
3977 definitions we extract from the Principals of Operation. */
3979 static void extract_field(DisasFields *o, const DisasField *f, uint64_t insn)
3987 /* Zero extract the field from the insn. */
3988 r = (insn << f->beg) >> (64 - f->size);
3990 /* Sign-extend, or un-swap the field as necessary. */
3992 case 0: /* unsigned */
3994 case 1: /* signed */
3995 assert(f->size <= 32);
3996 m = 1u << (f->size - 1);
3999 case 2: /* dl+dh split, signed 20 bit. */
4000 r = ((int8_t)r << 12) | (r >> 8);
4006 /* Validate that the "compressed" encoding we selected above is valid.
4007 I.e. we havn't make two different original fields overlap. */
4008 assert(((o->presentC >> f->indexC) & 1) == 0);
4009 o->presentC |= 1 << f->indexC;
4010 o->presentO |= 1 << f->indexO;
4012 o->c[f->indexC] = r;
4015 /* Lookup the insn at the current PC, extracting the operands into O and
4016 returning the info struct for the insn. Returns NULL for invalid insn. */
4018 static const DisasInsn *extract_insn(CPUS390XState *env, DisasContext *s,
4021 uint64_t insn, pc = s->pc;
4023 const DisasInsn *info;
4025 insn = ld_code2(env, pc);
4026 op = (insn >> 8) & 0xff;
4027 ilen = get_ilen(op);
4028 s->next_pc = s->pc + ilen;
4035 insn = ld_code4(env, pc) << 32;
4038 insn = (insn << 48) | (ld_code4(env, pc + 2) << 16);
4044 /* We can't actually determine the insn format until we've looked up
4045 the full insn opcode. Which we can't do without locating the
4046 secondary opcode. Assume by default that OP2 is at bit 40; for
4047 those smaller insns that don't actually have a secondary opcode
4048 this will correctly result in OP2 = 0. */
4054 case 0xb2: /* S, RRF, RRE */
4055 case 0xb3: /* RRE, RRD, RRF */
4056 case 0xb9: /* RRE, RRF */
4057 case 0xe5: /* SSE, SIL */
4058 op2 = (insn << 8) >> 56;
4062 case 0xc0: /* RIL */
4063 case 0xc2: /* RIL */
4064 case 0xc4: /* RIL */
4065 case 0xc6: /* RIL */
4066 case 0xc8: /* SSF */
4067 case 0xcc: /* RIL */
4068 op2 = (insn << 12) >> 60;
4070 case 0xd0 ... 0xdf: /* SS */
4076 case 0xee ... 0xf3: /* SS */
4077 case 0xf8 ... 0xfd: /* SS */
4081 op2 = (insn << 40) >> 56;
4085 memset(f, 0, sizeof(*f));
4089 /* Lookup the instruction. */
4090 info = lookup_opc(op << 8 | op2);
4092 /* If we found it, extract the operands. */
4094 DisasFormat fmt = info->fmt;
4097 for (i = 0; i < NUM_C_FIELD; ++i) {
4098 extract_field(f, &format_info[fmt].op[i], insn);
4104 static ExitStatus translate_one(CPUS390XState *env, DisasContext *s)
4106 const DisasInsn *insn;
4107 ExitStatus ret = NO_EXIT;
4111 insn = extract_insn(env, s, &f);
4113 /* If not found, try the old interpreter. This includes ILLOPC. */
4115 disas_s390_insn(env, s);
4116 switch (s->is_jmp) {
4124 ret = EXIT_PC_UPDATED;
4127 ret = EXIT_NORETURN;
4137 /* Set up the strutures we use to communicate with the helpers. */
4140 o.g_out = o.g_out2 = o.g_in1 = o.g_in2 = false;
4141 TCGV_UNUSED_I64(o.out);
4142 TCGV_UNUSED_I64(o.out2);
4143 TCGV_UNUSED_I64(o.in1);
4144 TCGV_UNUSED_I64(o.in2);
4145 TCGV_UNUSED_I64(o.addr1);
4147 /* Implement the instruction. */
4148 if (insn->help_in1) {
4149 insn->help_in1(s, &f, &o);
4151 if (insn->help_in2) {
4152 insn->help_in2(s, &f, &o);
4154 if (insn->help_prep) {
4155 insn->help_prep(s, &f, &o);
4157 if (insn->help_op) {
4158 ret = insn->help_op(s, &o);
4160 if (insn->help_wout) {
4161 insn->help_wout(s, &f, &o);
4163 if (insn->help_cout) {
4164 insn->help_cout(s, &o);
4167 /* Free any temporaries created by the helpers. */
4168 if (!TCGV_IS_UNUSED_I64(o.out) && !o.g_out) {
4169 tcg_temp_free_i64(o.out);
4171 if (!TCGV_IS_UNUSED_I64(o.out2) && !o.g_out2) {
4172 tcg_temp_free_i64(o.out2);
4174 if (!TCGV_IS_UNUSED_I64(o.in1) && !o.g_in1) {
4175 tcg_temp_free_i64(o.in1);
4177 if (!TCGV_IS_UNUSED_I64(o.in2) && !o.g_in2) {
4178 tcg_temp_free_i64(o.in2);
4180 if (!TCGV_IS_UNUSED_I64(o.addr1)) {
4181 tcg_temp_free_i64(o.addr1);
4184 /* Advance to the next instruction. */
4189 static inline void gen_intermediate_code_internal(CPUS390XState *env,
4190 TranslationBlock *tb,
4194 target_ulong pc_start;
4195 uint64_t next_page_start;
4196 uint16_t *gen_opc_end;
4198 int num_insns, max_insns;
4206 if (!(tb->flags & FLAG_MASK_64)) {
4207 pc_start &= 0x7fffffff;
4212 dc.cc_op = CC_OP_DYNAMIC;
4213 do_debug = dc.singlestep_enabled = env->singlestep_enabled;
4214 dc.is_jmp = DISAS_NEXT;
4216 gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
4218 next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
4221 max_insns = tb->cflags & CF_COUNT_MASK;
4222 if (max_insns == 0) {
4223 max_insns = CF_COUNT_MASK;
4230 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
4234 tcg_ctx.gen_opc_instr_start[lj++] = 0;
4237 tcg_ctx.gen_opc_pc[lj] = dc.pc;
4238 gen_opc_cc_op[lj] = dc.cc_op;
4239 tcg_ctx.gen_opc_instr_start[lj] = 1;
4240 tcg_ctx.gen_opc_icount[lj] = num_insns;
4242 if (++num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
4246 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
4247 tcg_gen_debug_insn_start(dc.pc);
4251 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
4252 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
4253 if (bp->pc == dc.pc) {
4254 status = EXIT_PC_STALE;
4260 if (status == NO_EXIT) {
4261 status = translate_one(env, &dc);
4264 /* If we reach a page boundary, are single stepping,
4265 or exhaust instruction count, stop generation. */
4266 if (status == NO_EXIT
4267 && (dc.pc >= next_page_start
4268 || tcg_ctx.gen_opc_ptr >= gen_opc_end
4269 || num_insns >= max_insns
4271 || env->singlestep_enabled)) {
4272 status = EXIT_PC_STALE;
4274 } while (status == NO_EXIT);
4276 if (tb->cflags & CF_LAST_IO) {
4285 update_psw_addr(&dc);
4287 case EXIT_PC_UPDATED:
4288 if (singlestep && dc.cc_op != CC_OP_DYNAMIC) {
4289 gen_op_calc_cc(&dc);
4291 /* Next TB starts off with CC_OP_DYNAMIC,
4292 so make sure the cc op type is in env */
4293 gen_op_set_cc_op(&dc);
4296 gen_exception(EXCP_DEBUG);
4298 /* Generate the return instruction */
4306 gen_icount_end(tb, num_insns);
4307 *tcg_ctx.gen_opc_ptr = INDEX_op_end;
4309 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
4312 tcg_ctx.gen_opc_instr_start[lj++] = 0;
4315 tb->size = dc.pc - pc_start;
4316 tb->icount = num_insns;
4319 #if defined(S390X_DEBUG_DISAS)
4320 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4321 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4322 log_target_disas(env, pc_start, dc.pc - pc_start, 1);
4328 void gen_intermediate_code (CPUS390XState *env, struct TranslationBlock *tb)
4330 gen_intermediate_code_internal(env, tb, 0);
4333 void gen_intermediate_code_pc (CPUS390XState *env, struct TranslationBlock *tb)
4335 gen_intermediate_code_internal(env, tb, 1);
4338 void restore_state_to_opc(CPUS390XState *env, TranslationBlock *tb, int pc_pos)
4341 env->psw.addr = tcg_ctx.gen_opc_pc[pc_pos];
4342 cc_op = gen_opc_cc_op[pc_pos];
4343 if ((cc_op != CC_OP_DYNAMIC) && (cc_op != CC_OP_STATIC)) {