sync with latest
[sdk/emulator/qemu.git] / target-sh4 / translate.c
1 /*
2  *  SH4 translation
3  *
4  *  Copyright (c) 2005 Samuel Tardieu
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18  */
19
20 #define DEBUG_DISAS
21 #define SH4_DEBUG_DISAS
22 //#define SH4_SINGLE_STEP
23
24 #include "cpu.h"
25 #include "disas.h"
26 #include "tcg-op.h"
27
28 #include "helper.h"
29 #define GEN_HELPER 1
30 #include "helper.h"
31
32 typedef struct DisasContext {
33     struct TranslationBlock *tb;
34     target_ulong pc;
35     uint32_t sr;
36     uint32_t fpscr;
37     uint16_t opcode;
38     uint32_t flags;
39     int bstate;
40     int memidx;
41     uint32_t delayed_pc;
42     int singlestep_enabled;
43     uint32_t features;
44     int has_movcal;
45 } DisasContext;
46
47 #if defined(CONFIG_USER_ONLY)
48 #define IS_USER(ctx) 1
49 #else
50 #define IS_USER(ctx) (!(ctx->sr & SR_MD))
51 #endif
52
53 enum {
54     BS_NONE     = 0, /* We go out of the TB without reaching a branch or an
55                       * exception condition
56                       */
57     BS_STOP     = 1, /* We want to stop translation for any reason */
58     BS_BRANCH   = 2, /* We reached a branch condition     */
59     BS_EXCP     = 3, /* We reached an exception condition */
60 };
61
62 /* global register indexes */
63 static TCGv_ptr cpu_env;
64 static TCGv cpu_gregs[24];
65 static TCGv cpu_pc, cpu_sr, cpu_ssr, cpu_spc, cpu_gbr;
66 static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl;
67 static TCGv cpu_pr, cpu_fpscr, cpu_fpul, cpu_ldst;
68 static TCGv cpu_fregs[32];
69
70 /* internal register indexes */
71 static TCGv cpu_flags, cpu_delayed_pc;
72
73 static uint32_t gen_opc_hflags[OPC_BUF_SIZE];
74
75 #include "gen-icount.h"
76
77 static void sh4_translate_init(void)
78 {
79     int i;
80     static int done_init = 0;
81     static const char * const gregnames[24] = {
82         "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0",
83         "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0",
84         "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
85         "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1",
86         "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1"
87     };
88     static const char * const fregnames[32] = {
89          "FPR0_BANK0",  "FPR1_BANK0",  "FPR2_BANK0",  "FPR3_BANK0",
90          "FPR4_BANK0",  "FPR5_BANK0",  "FPR6_BANK0",  "FPR7_BANK0",
91          "FPR8_BANK0",  "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0",
92         "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0",
93          "FPR0_BANK1",  "FPR1_BANK1",  "FPR2_BANK1",  "FPR3_BANK1",
94          "FPR4_BANK1",  "FPR5_BANK1",  "FPR6_BANK1",  "FPR7_BANK1",
95          "FPR8_BANK1",  "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1",
96         "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1",
97     };
98
99     if (done_init)
100         return;
101
102     cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
103
104     for (i = 0; i < 24; i++)
105         cpu_gregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
106                                               offsetof(CPUSH4State, gregs[i]),
107                                               gregnames[i]);
108
109     cpu_pc = tcg_global_mem_new_i32(TCG_AREG0,
110                                     offsetof(CPUSH4State, pc), "PC");
111     cpu_sr = tcg_global_mem_new_i32(TCG_AREG0,
112                                     offsetof(CPUSH4State, sr), "SR");
113     cpu_ssr = tcg_global_mem_new_i32(TCG_AREG0,
114                                      offsetof(CPUSH4State, ssr), "SSR");
115     cpu_spc = tcg_global_mem_new_i32(TCG_AREG0,
116                                      offsetof(CPUSH4State, spc), "SPC");
117     cpu_gbr = tcg_global_mem_new_i32(TCG_AREG0,
118                                      offsetof(CPUSH4State, gbr), "GBR");
119     cpu_vbr = tcg_global_mem_new_i32(TCG_AREG0,
120                                      offsetof(CPUSH4State, vbr), "VBR");
121     cpu_sgr = tcg_global_mem_new_i32(TCG_AREG0,
122                                      offsetof(CPUSH4State, sgr), "SGR");
123     cpu_dbr = tcg_global_mem_new_i32(TCG_AREG0,
124                                      offsetof(CPUSH4State, dbr), "DBR");
125     cpu_mach = tcg_global_mem_new_i32(TCG_AREG0,
126                                       offsetof(CPUSH4State, mach), "MACH");
127     cpu_macl = tcg_global_mem_new_i32(TCG_AREG0,
128                                       offsetof(CPUSH4State, macl), "MACL");
129     cpu_pr = tcg_global_mem_new_i32(TCG_AREG0,
130                                     offsetof(CPUSH4State, pr), "PR");
131     cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
132                                        offsetof(CPUSH4State, fpscr), "FPSCR");
133     cpu_fpul = tcg_global_mem_new_i32(TCG_AREG0,
134                                       offsetof(CPUSH4State, fpul), "FPUL");
135
136     cpu_flags = tcg_global_mem_new_i32(TCG_AREG0,
137                                        offsetof(CPUSH4State, flags), "_flags_");
138     cpu_delayed_pc = tcg_global_mem_new_i32(TCG_AREG0,
139                                             offsetof(CPUSH4State, delayed_pc),
140                                             "_delayed_pc_");
141     cpu_ldst = tcg_global_mem_new_i32(TCG_AREG0,
142                                       offsetof(CPUSH4State, ldst), "_ldst_");
143
144     for (i = 0; i < 32; i++)
145         cpu_fregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
146                                               offsetof(CPUSH4State, fregs[i]),
147                                               fregnames[i]);
148
149     /* register helpers */
150 #define GEN_HELPER 2
151 #include "helper.h"
152
153     done_init = 1;
154 }
155
156 void cpu_dump_state(CPUSH4State * env, FILE * f,
157                     int (*cpu_fprintf) (FILE * f, const char *fmt, ...),
158                     int flags)
159 {
160     int i;
161     cpu_fprintf(f, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n",
162                 env->pc, env->sr, env->pr, env->fpscr);
163     cpu_fprintf(f, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n",
164                 env->spc, env->ssr, env->gbr, env->vbr);
165     cpu_fprintf(f, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n",
166                 env->sgr, env->dbr, env->delayed_pc, env->fpul);
167     for (i = 0; i < 24; i += 4) {
168         cpu_fprintf(f, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n",
169                     i, env->gregs[i], i + 1, env->gregs[i + 1],
170                     i + 2, env->gregs[i + 2], i + 3, env->gregs[i + 3]);
171     }
172     if (env->flags & DELAY_SLOT) {
173         cpu_fprintf(f, "in delay slot (delayed_pc=0x%08x)\n",
174                     env->delayed_pc);
175     } else if (env->flags & DELAY_SLOT_CONDITIONAL) {
176         cpu_fprintf(f, "in conditional delay slot (delayed_pc=0x%08x)\n",
177                     env->delayed_pc);
178     }
179 }
180
181 typedef struct {
182     const char *name;
183     int id;
184     uint32_t pvr;
185     uint32_t prr;
186     uint32_t cvr;
187     uint32_t features;
188 } sh4_def_t;
189
190 static sh4_def_t sh4_defs[] = {
191     {
192         .name = "SH7750R",
193         .id = SH_CPU_SH7750R,
194         .pvr = 0x00050000,
195         .prr = 0x00000100,
196         .cvr = 0x00110000,
197         .features = SH_FEATURE_BCR3_AND_BCR4,
198     }, {
199         .name = "SH7751R",
200         .id = SH_CPU_SH7751R,
201         .pvr = 0x04050005,
202         .prr = 0x00000113,
203         .cvr = 0x00110000,      /* Neutered caches, should be 0x20480000 */
204         .features = SH_FEATURE_BCR3_AND_BCR4,
205     }, {
206         .name = "SH7785",
207         .id = SH_CPU_SH7785,
208         .pvr = 0x10300700,
209         .prr = 0x00000200,
210         .cvr = 0x71440211,
211         .features = SH_FEATURE_SH4A,
212      },
213 };
214
215 static const sh4_def_t *cpu_sh4_find_by_name(const char *name)
216 {
217     int i;
218
219     if (strcasecmp(name, "any") == 0)
220         return &sh4_defs[0];
221
222     for (i = 0; i < ARRAY_SIZE(sh4_defs); i++)
223         if (strcasecmp(name, sh4_defs[i].name) == 0)
224             return &sh4_defs[i];
225
226     return NULL;
227 }
228
229 void sh4_cpu_list(FILE *f, fprintf_function cpu_fprintf)
230 {
231     int i;
232
233     for (i = 0; i < ARRAY_SIZE(sh4_defs); i++)
234         (*cpu_fprintf)(f, "%s\n", sh4_defs[i].name);
235 }
236
237 static void cpu_register(CPUSH4State *env, const sh4_def_t *def)
238 {
239     env->pvr = def->pvr;
240     env->prr = def->prr;
241     env->cvr = def->cvr;
242     env->id = def->id;
243 }
244
245 SuperHCPU *cpu_sh4_init(const char *cpu_model)
246 {
247     SuperHCPU *cpu;
248     CPUSH4State *env;
249     const sh4_def_t *def;
250
251     def = cpu_sh4_find_by_name(cpu_model);
252     if (!def)
253         return NULL;
254     cpu = SUPERH_CPU(object_new(TYPE_SUPERH_CPU));
255     env = &cpu->env;
256     env->features = def->features;
257     sh4_translate_init();
258     env->cpu_model_str = cpu_model;
259     cpu_reset(CPU(cpu));
260     cpu_register(env, def);
261     qemu_init_vcpu(env);
262     return cpu;
263 }
264
265 static void gen_goto_tb(DisasContext * ctx, int n, target_ulong dest)
266 {
267     TranslationBlock *tb;
268     tb = ctx->tb;
269
270     if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
271         !ctx->singlestep_enabled) {
272         /* Use a direct jump if in same page and singlestep not enabled */
273         tcg_gen_goto_tb(n);
274         tcg_gen_movi_i32(cpu_pc, dest);
275         tcg_gen_exit_tb((tcg_target_long)tb + n);
276     } else {
277         tcg_gen_movi_i32(cpu_pc, dest);
278         if (ctx->singlestep_enabled)
279             gen_helper_debug();
280         tcg_gen_exit_tb(0);
281     }
282 }
283
284 static void gen_jump(DisasContext * ctx)
285 {
286     if (ctx->delayed_pc == (uint32_t) - 1) {
287         /* Target is not statically known, it comes necessarily from a
288            delayed jump as immediate jump are conditinal jumps */
289         tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc);
290         if (ctx->singlestep_enabled)
291             gen_helper_debug();
292         tcg_gen_exit_tb(0);
293     } else {
294         gen_goto_tb(ctx, 0, ctx->delayed_pc);
295     }
296 }
297
298 static inline void gen_branch_slot(uint32_t delayed_pc, int t)
299 {
300     TCGv sr;
301     int label = gen_new_label();
302     tcg_gen_movi_i32(cpu_delayed_pc, delayed_pc);
303     sr = tcg_temp_new();
304     tcg_gen_andi_i32(sr, cpu_sr, SR_T);
305     tcg_gen_brcondi_i32(t ? TCG_COND_EQ:TCG_COND_NE, sr, 0, label);
306     tcg_gen_ori_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
307     gen_set_label(label);
308 }
309
310 /* Immediate conditional jump (bt or bf) */
311 static void gen_conditional_jump(DisasContext * ctx,
312                                  target_ulong ift, target_ulong ifnott)
313 {
314     int l1;
315     TCGv sr;
316
317     l1 = gen_new_label();
318     sr = tcg_temp_new();
319     tcg_gen_andi_i32(sr, cpu_sr, SR_T);
320     tcg_gen_brcondi_i32(TCG_COND_NE, sr, 0, l1);
321     gen_goto_tb(ctx, 0, ifnott);
322     gen_set_label(l1);
323     gen_goto_tb(ctx, 1, ift);
324 }
325
326 /* Delayed conditional jump (bt or bf) */
327 static void gen_delayed_conditional_jump(DisasContext * ctx)
328 {
329     int l1;
330     TCGv ds;
331
332     l1 = gen_new_label();
333     ds = tcg_temp_new();
334     tcg_gen_andi_i32(ds, cpu_flags, DELAY_SLOT_TRUE);
335     tcg_gen_brcondi_i32(TCG_COND_NE, ds, 0, l1);
336     gen_goto_tb(ctx, 1, ctx->pc + 2);
337     gen_set_label(l1);
338     tcg_gen_andi_i32(cpu_flags, cpu_flags, ~DELAY_SLOT_TRUE);
339     gen_jump(ctx);
340 }
341
342 static inline void gen_set_t(void)
343 {
344     tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_T);
345 }
346
347 static inline void gen_clr_t(void)
348 {
349     tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
350 }
351
352 static inline void gen_cmp(int cond, TCGv t0, TCGv t1)
353 {
354     TCGv t;
355
356     t = tcg_temp_new();
357     tcg_gen_setcond_i32(cond, t, t1, t0);
358     tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
359     tcg_gen_or_i32(cpu_sr, cpu_sr, t);
360
361     tcg_temp_free(t);
362 }
363
364 static inline void gen_cmp_imm(int cond, TCGv t0, int32_t imm)
365 {
366     TCGv t;
367
368     t = tcg_temp_new();
369     tcg_gen_setcondi_i32(cond, t, t0, imm);
370     tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
371     tcg_gen_or_i32(cpu_sr, cpu_sr, t);
372
373     tcg_temp_free(t);
374 }
375
376 static inline void gen_store_flags(uint32_t flags)
377 {
378     tcg_gen_andi_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
379     tcg_gen_ori_i32(cpu_flags, cpu_flags, flags);
380 }
381
382 static inline void gen_copy_bit_i32(TCGv t0, int p0, TCGv t1, int p1)
383 {
384     TCGv tmp = tcg_temp_new();
385
386     p0 &= 0x1f;
387     p1 &= 0x1f;
388
389     tcg_gen_andi_i32(tmp, t1, (1 << p1));
390     tcg_gen_andi_i32(t0, t0, ~(1 << p0));
391     if (p0 < p1)
392         tcg_gen_shri_i32(tmp, tmp, p1 - p0);
393     else if (p0 > p1)
394         tcg_gen_shli_i32(tmp, tmp, p0 - p1);
395     tcg_gen_or_i32(t0, t0, tmp);
396
397     tcg_temp_free(tmp);
398 }
399
400 static inline void gen_load_fpr64(TCGv_i64 t, int reg)
401 {
402     tcg_gen_concat_i32_i64(t, cpu_fregs[reg + 1], cpu_fregs[reg]);
403 }
404
405 static inline void gen_store_fpr64 (TCGv_i64 t, int reg)
406 {
407     TCGv_i32 tmp = tcg_temp_new_i32();
408     tcg_gen_trunc_i64_i32(tmp, t);
409     tcg_gen_mov_i32(cpu_fregs[reg + 1], tmp);
410     tcg_gen_shri_i64(t, t, 32);
411     tcg_gen_trunc_i64_i32(tmp, t);
412     tcg_gen_mov_i32(cpu_fregs[reg], tmp);
413     tcg_temp_free_i32(tmp);
414 }
415
416 #define B3_0 (ctx->opcode & 0xf)
417 #define B6_4 ((ctx->opcode >> 4) & 0x7)
418 #define B7_4 ((ctx->opcode >> 4) & 0xf)
419 #define B7_0 (ctx->opcode & 0xff)
420 #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff))
421 #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \
422   (ctx->opcode & 0xfff))
423 #define B11_8 ((ctx->opcode >> 8) & 0xf)
424 #define B15_12 ((ctx->opcode >> 12) & 0xf)
425
426 #define REG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) == (SR_MD | SR_RB) ? \
427                 (cpu_gregs[x + 16]) : (cpu_gregs[x]))
428
429 #define ALTREG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) != (SR_MD | SR_RB) \
430                 ? (cpu_gregs[x + 16]) : (cpu_gregs[x]))
431
432 #define FREG(x) (ctx->fpscr & FPSCR_FR ? (x) ^ 0x10 : (x))
433 #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe))
434 #define XREG(x) (ctx->fpscr & FPSCR_FR ? XHACK(x) ^ 0x10 : XHACK(x))
435 #define DREG(x) FREG(x) /* Assumes lsb of (x) is always 0 */
436
437 #define CHECK_NOT_DELAY_SLOT \
438   if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL))     \
439   {                                                           \
440       gen_helper_raise_slot_illegal_instruction();            \
441       ctx->bstate = BS_EXCP;                                  \
442       return;                                                 \
443   }
444
445 #define CHECK_PRIVILEGED                                        \
446   if (IS_USER(ctx)) {                                           \
447       if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
448          gen_helper_raise_slot_illegal_instruction();           \
449       } else {                                                  \
450          gen_helper_raise_illegal_instruction();                \
451       }                                                         \
452       ctx->bstate = BS_EXCP;                                    \
453       return;                                                   \
454   }
455
456 #define CHECK_FPU_ENABLED                                       \
457   if (ctx->flags & SR_FD) {                                     \
458       if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
459           gen_helper_raise_slot_fpu_disable();                  \
460       } else {                                                  \
461           gen_helper_raise_fpu_disable();                       \
462       }                                                         \
463       ctx->bstate = BS_EXCP;                                    \
464       return;                                                   \
465   }
466
467 static void _decode_opc(DisasContext * ctx)
468 {
469     /* This code tries to make movcal emulation sufficiently
470        accurate for Linux purposes.  This instruction writes
471        memory, and prior to that, always allocates a cache line.
472        It is used in two contexts:
473        - in memcpy, where data is copied in blocks, the first write
474        of to a block uses movca.l for performance.
475        - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used
476        to flush the cache. Here, the data written by movcal.l is never
477        written to memory, and the data written is just bogus.
478
479        To simulate this, we simulate movcal.l, we store the value to memory,
480        but we also remember the previous content. If we see ocbi, we check
481        if movcal.l for that address was done previously. If so, the write should
482        not have hit the memory, so we restore the previous content.
483        When we see an instruction that is neither movca.l
484        nor ocbi, the previous content is discarded.
485
486        To optimize, we only try to flush stores when we're at the start of
487        TB, or if we already saw movca.l in this TB and did not flush stores
488        yet.  */
489     if (ctx->has_movcal)
490         {
491           int opcode = ctx->opcode & 0xf0ff;
492           if (opcode != 0x0093 /* ocbi */
493               && opcode != 0x00c3 /* movca.l */)
494               {
495                   gen_helper_discard_movcal_backup ();
496                   ctx->has_movcal = 0;
497               }
498         }
499
500 #if 0
501     fprintf(stderr, "Translating opcode 0x%04x\n", ctx->opcode);
502 #endif
503
504     switch (ctx->opcode) {
505     case 0x0019:                /* div0u */
506         tcg_gen_andi_i32(cpu_sr, cpu_sr, ~(SR_M | SR_Q | SR_T));
507         return;
508     case 0x000b:                /* rts */
509         CHECK_NOT_DELAY_SLOT
510         tcg_gen_mov_i32(cpu_delayed_pc, cpu_pr);
511         ctx->flags |= DELAY_SLOT;
512         ctx->delayed_pc = (uint32_t) - 1;
513         return;
514     case 0x0028:                /* clrmac */
515         tcg_gen_movi_i32(cpu_mach, 0);
516         tcg_gen_movi_i32(cpu_macl, 0);
517         return;
518     case 0x0048:                /* clrs */
519         tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_S);
520         return;
521     case 0x0008:                /* clrt */
522         gen_clr_t();
523         return;
524     case 0x0038:                /* ldtlb */
525         CHECK_PRIVILEGED
526         gen_helper_ldtlb();
527         return;
528     case 0x002b:                /* rte */
529         CHECK_PRIVILEGED
530         CHECK_NOT_DELAY_SLOT
531         tcg_gen_mov_i32(cpu_sr, cpu_ssr);
532         tcg_gen_mov_i32(cpu_delayed_pc, cpu_spc);
533         ctx->flags |= DELAY_SLOT;
534         ctx->delayed_pc = (uint32_t) - 1;
535         return;
536     case 0x0058:                /* sets */
537         tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_S);
538         return;
539     case 0x0018:                /* sett */
540         gen_set_t();
541         return;
542     case 0xfbfd:                /* frchg */
543         tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_FR);
544         ctx->bstate = BS_STOP;
545         return;
546     case 0xf3fd:                /* fschg */
547         tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_SZ);
548         ctx->bstate = BS_STOP;
549         return;
550     case 0x0009:                /* nop */
551         return;
552     case 0x001b:                /* sleep */
553         CHECK_PRIVILEGED
554         gen_helper_sleep(tcg_const_i32(ctx->pc + 2));
555         return;
556     }
557
558     switch (ctx->opcode & 0xf000) {
559     case 0x1000:                /* mov.l Rm,@(disp,Rn) */
560         {
561             TCGv addr = tcg_temp_new();
562             tcg_gen_addi_i32(addr, REG(B11_8), B3_0 * 4);
563             tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
564             tcg_temp_free(addr);
565         }
566         return;
567     case 0x5000:                /* mov.l @(disp,Rm),Rn */
568         {
569             TCGv addr = tcg_temp_new();
570             tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 4);
571             tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
572             tcg_temp_free(addr);
573         }
574         return;
575     case 0xe000:                /* mov #imm,Rn */
576         tcg_gen_movi_i32(REG(B11_8), B7_0s);
577         return;
578     case 0x9000:                /* mov.w @(disp,PC),Rn */
579         {
580             TCGv addr = tcg_const_i32(ctx->pc + 4 + B7_0 * 2);
581             tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
582             tcg_temp_free(addr);
583         }
584         return;
585     case 0xd000:                /* mov.l @(disp,PC),Rn */
586         {
587             TCGv addr = tcg_const_i32((ctx->pc + 4 + B7_0 * 4) & ~3);
588             tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
589             tcg_temp_free(addr);
590         }
591         return;
592     case 0x7000:                /* add #imm,Rn */
593         tcg_gen_addi_i32(REG(B11_8), REG(B11_8), B7_0s);
594         return;
595     case 0xa000:                /* bra disp */
596         CHECK_NOT_DELAY_SLOT
597         ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
598         tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
599         ctx->flags |= DELAY_SLOT;
600         return;
601     case 0xb000:                /* bsr disp */
602         CHECK_NOT_DELAY_SLOT
603         tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
604         ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
605         tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
606         ctx->flags |= DELAY_SLOT;
607         return;
608     }
609
610     switch (ctx->opcode & 0xf00f) {
611     case 0x6003:                /* mov Rm,Rn */
612         tcg_gen_mov_i32(REG(B11_8), REG(B7_4));
613         return;
614     case 0x2000:                /* mov.b Rm,@Rn */
615         tcg_gen_qemu_st8(REG(B7_4), REG(B11_8), ctx->memidx);
616         return;
617     case 0x2001:                /* mov.w Rm,@Rn */
618         tcg_gen_qemu_st16(REG(B7_4), REG(B11_8), ctx->memidx);
619         return;
620     case 0x2002:                /* mov.l Rm,@Rn */
621         tcg_gen_qemu_st32(REG(B7_4), REG(B11_8), ctx->memidx);
622         return;
623     case 0x6000:                /* mov.b @Rm,Rn */
624         tcg_gen_qemu_ld8s(REG(B11_8), REG(B7_4), ctx->memidx);
625         return;
626     case 0x6001:                /* mov.w @Rm,Rn */
627         tcg_gen_qemu_ld16s(REG(B11_8), REG(B7_4), ctx->memidx);
628         return;
629     case 0x6002:                /* mov.l @Rm,Rn */
630         tcg_gen_qemu_ld32s(REG(B11_8), REG(B7_4), ctx->memidx);
631         return;
632     case 0x2004:                /* mov.b Rm,@-Rn */
633         {
634             TCGv addr = tcg_temp_new();
635             tcg_gen_subi_i32(addr, REG(B11_8), 1);
636             tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx);     /* might cause re-execution */
637             tcg_gen_mov_i32(REG(B11_8), addr);                  /* modify register status */
638             tcg_temp_free(addr);
639         }
640         return;
641     case 0x2005:                /* mov.w Rm,@-Rn */
642         {
643             TCGv addr = tcg_temp_new();
644             tcg_gen_subi_i32(addr, REG(B11_8), 2);
645             tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
646             tcg_gen_mov_i32(REG(B11_8), addr);
647             tcg_temp_free(addr);
648         }
649         return;
650     case 0x2006:                /* mov.l Rm,@-Rn */
651         {
652             TCGv addr = tcg_temp_new();
653             tcg_gen_subi_i32(addr, REG(B11_8), 4);
654             tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
655             tcg_gen_mov_i32(REG(B11_8), addr);
656         }
657         return;
658     case 0x6004:                /* mov.b @Rm+,Rn */
659         tcg_gen_qemu_ld8s(REG(B11_8), REG(B7_4), ctx->memidx);
660         if ( B11_8 != B7_4 )
661                 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 1);
662         return;
663     case 0x6005:                /* mov.w @Rm+,Rn */
664         tcg_gen_qemu_ld16s(REG(B11_8), REG(B7_4), ctx->memidx);
665         if ( B11_8 != B7_4 )
666                 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
667         return;
668     case 0x6006:                /* mov.l @Rm+,Rn */
669         tcg_gen_qemu_ld32s(REG(B11_8), REG(B7_4), ctx->memidx);
670         if ( B11_8 != B7_4 )
671                 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
672         return;
673     case 0x0004:                /* mov.b Rm,@(R0,Rn) */
674         {
675             TCGv addr = tcg_temp_new();
676             tcg_gen_add_i32(addr, REG(B11_8), REG(0));
677             tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx);
678             tcg_temp_free(addr);
679         }
680         return;
681     case 0x0005:                /* mov.w Rm,@(R0,Rn) */
682         {
683             TCGv addr = tcg_temp_new();
684             tcg_gen_add_i32(addr, REG(B11_8), REG(0));
685             tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
686             tcg_temp_free(addr);
687         }
688         return;
689     case 0x0006:                /* mov.l Rm,@(R0,Rn) */
690         {
691             TCGv addr = tcg_temp_new();
692             tcg_gen_add_i32(addr, REG(B11_8), REG(0));
693             tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
694             tcg_temp_free(addr);
695         }
696         return;
697     case 0x000c:                /* mov.b @(R0,Rm),Rn */
698         {
699             TCGv addr = tcg_temp_new();
700             tcg_gen_add_i32(addr, REG(B7_4), REG(0));
701             tcg_gen_qemu_ld8s(REG(B11_8), addr, ctx->memidx);
702             tcg_temp_free(addr);
703         }
704         return;
705     case 0x000d:                /* mov.w @(R0,Rm),Rn */
706         {
707             TCGv addr = tcg_temp_new();
708             tcg_gen_add_i32(addr, REG(B7_4), REG(0));
709             tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
710             tcg_temp_free(addr);
711         }
712         return;
713     case 0x000e:                /* mov.l @(R0,Rm),Rn */
714         {
715             TCGv addr = tcg_temp_new();
716             tcg_gen_add_i32(addr, REG(B7_4), REG(0));
717             tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
718             tcg_temp_free(addr);
719         }
720         return;
721     case 0x6008:                /* swap.b Rm,Rn */
722         {
723             TCGv high, low;
724             high = tcg_temp_new();
725             tcg_gen_andi_i32(high, REG(B7_4), 0xffff0000);
726             low = tcg_temp_new();
727             tcg_gen_ext16u_i32(low, REG(B7_4));
728             tcg_gen_bswap16_i32(low, low);
729             tcg_gen_or_i32(REG(B11_8), high, low);
730             tcg_temp_free(low);
731             tcg_temp_free(high);
732         }
733         return;
734     case 0x6009:                /* swap.w Rm,Rn */
735         {
736             TCGv high, low;
737             high = tcg_temp_new();
738             tcg_gen_shli_i32(high, REG(B7_4), 16);
739             low = tcg_temp_new();
740             tcg_gen_shri_i32(low, REG(B7_4), 16);
741             tcg_gen_ext16u_i32(low, low);
742             tcg_gen_or_i32(REG(B11_8), high, low);
743             tcg_temp_free(low);
744             tcg_temp_free(high);
745         }
746         return;
747     case 0x200d:                /* xtrct Rm,Rn */
748         {
749             TCGv high, low;
750             high = tcg_temp_new();
751             tcg_gen_shli_i32(high, REG(B7_4), 16);
752             low = tcg_temp_new();
753             tcg_gen_shri_i32(low, REG(B11_8), 16);
754             tcg_gen_ext16u_i32(low, low);
755             tcg_gen_or_i32(REG(B11_8), high, low);
756             tcg_temp_free(low);
757             tcg_temp_free(high);
758         }
759         return;
760     case 0x300c:                /* add Rm,Rn */
761         tcg_gen_add_i32(REG(B11_8), REG(B11_8), REG(B7_4));
762         return;
763     case 0x300e:                /* addc Rm,Rn */
764         gen_helper_addc(REG(B11_8), REG(B7_4), REG(B11_8));
765         return;
766     case 0x300f:                /* addv Rm,Rn */
767         gen_helper_addv(REG(B11_8), REG(B7_4), REG(B11_8));
768         return;
769     case 0x2009:                /* and Rm,Rn */
770         tcg_gen_and_i32(REG(B11_8), REG(B11_8), REG(B7_4));
771         return;
772     case 0x3000:                /* cmp/eq Rm,Rn */
773         gen_cmp(TCG_COND_EQ, REG(B7_4), REG(B11_8));
774         return;
775     case 0x3003:                /* cmp/ge Rm,Rn */
776         gen_cmp(TCG_COND_GE, REG(B7_4), REG(B11_8));
777         return;
778     case 0x3007:                /* cmp/gt Rm,Rn */
779         gen_cmp(TCG_COND_GT, REG(B7_4), REG(B11_8));
780         return;
781     case 0x3006:                /* cmp/hi Rm,Rn */
782         gen_cmp(TCG_COND_GTU, REG(B7_4), REG(B11_8));
783         return;
784     case 0x3002:                /* cmp/hs Rm,Rn */
785         gen_cmp(TCG_COND_GEU, REG(B7_4), REG(B11_8));
786         return;
787     case 0x200c:                /* cmp/str Rm,Rn */
788         {
789             TCGv cmp1 = tcg_temp_new();
790             TCGv cmp2 = tcg_temp_new();
791             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
792             tcg_gen_xor_i32(cmp1, REG(B7_4), REG(B11_8));
793             tcg_gen_andi_i32(cmp2, cmp1, 0xff000000);
794             tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
795             tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
796             tcg_gen_andi_i32(cmp2, cmp1, 0x00ff0000);
797             tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
798             tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
799             tcg_gen_andi_i32(cmp2, cmp1, 0x0000ff00);
800             tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
801             tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
802             tcg_gen_andi_i32(cmp2, cmp1, 0x000000ff);
803             tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
804             tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
805             tcg_temp_free(cmp2);
806             tcg_temp_free(cmp1);
807         }
808         return;
809     case 0x2007:                /* div0s Rm,Rn */
810         {
811             gen_copy_bit_i32(cpu_sr, 8, REG(B11_8), 31);        /* SR_Q */
812             gen_copy_bit_i32(cpu_sr, 9, REG(B7_4), 31);         /* SR_M */
813             TCGv val = tcg_temp_new();
814             tcg_gen_xor_i32(val, REG(B7_4), REG(B11_8));
815             gen_copy_bit_i32(cpu_sr, 0, val, 31);               /* SR_T */
816             tcg_temp_free(val);
817         }
818         return;
819     case 0x3004:                /* div1 Rm,Rn */
820         gen_helper_div1(REG(B11_8), REG(B7_4), REG(B11_8));
821         return;
822     case 0x300d:                /* dmuls.l Rm,Rn */
823         {
824             TCGv_i64 tmp1 = tcg_temp_new_i64();
825             TCGv_i64 tmp2 = tcg_temp_new_i64();
826
827             tcg_gen_ext_i32_i64(tmp1, REG(B7_4));
828             tcg_gen_ext_i32_i64(tmp2, REG(B11_8));
829             tcg_gen_mul_i64(tmp1, tmp1, tmp2);
830             tcg_gen_trunc_i64_i32(cpu_macl, tmp1);
831             tcg_gen_shri_i64(tmp1, tmp1, 32);
832             tcg_gen_trunc_i64_i32(cpu_mach, tmp1);
833
834             tcg_temp_free_i64(tmp2);
835             tcg_temp_free_i64(tmp1);
836         }
837         return;
838     case 0x3005:                /* dmulu.l Rm,Rn */
839         {
840             TCGv_i64 tmp1 = tcg_temp_new_i64();
841             TCGv_i64 tmp2 = tcg_temp_new_i64();
842
843             tcg_gen_extu_i32_i64(tmp1, REG(B7_4));
844             tcg_gen_extu_i32_i64(tmp2, REG(B11_8));
845             tcg_gen_mul_i64(tmp1, tmp1, tmp2);
846             tcg_gen_trunc_i64_i32(cpu_macl, tmp1);
847             tcg_gen_shri_i64(tmp1, tmp1, 32);
848             tcg_gen_trunc_i64_i32(cpu_mach, tmp1);
849
850             tcg_temp_free_i64(tmp2);
851             tcg_temp_free_i64(tmp1);
852         }
853         return;
854     case 0x600e:                /* exts.b Rm,Rn */
855         tcg_gen_ext8s_i32(REG(B11_8), REG(B7_4));
856         return;
857     case 0x600f:                /* exts.w Rm,Rn */
858         tcg_gen_ext16s_i32(REG(B11_8), REG(B7_4));
859         return;
860     case 0x600c:                /* extu.b Rm,Rn */
861         tcg_gen_ext8u_i32(REG(B11_8), REG(B7_4));
862         return;
863     case 0x600d:                /* extu.w Rm,Rn */
864         tcg_gen_ext16u_i32(REG(B11_8), REG(B7_4));
865         return;
866     case 0x000f:                /* mac.l @Rm+,@Rn+ */
867         {
868             TCGv arg0, arg1;
869             arg0 = tcg_temp_new();
870             tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
871             arg1 = tcg_temp_new();
872             tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
873             gen_helper_macl(arg0, arg1);
874             tcg_temp_free(arg1);
875             tcg_temp_free(arg0);
876             tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
877             tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
878         }
879         return;
880     case 0x400f:                /* mac.w @Rm+,@Rn+ */
881         {
882             TCGv arg0, arg1;
883             arg0 = tcg_temp_new();
884             tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
885             arg1 = tcg_temp_new();
886             tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
887             gen_helper_macw(arg0, arg1);
888             tcg_temp_free(arg1);
889             tcg_temp_free(arg0);
890             tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 2);
891             tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
892         }
893         return;
894     case 0x0007:                /* mul.l Rm,Rn */
895         tcg_gen_mul_i32(cpu_macl, REG(B7_4), REG(B11_8));
896         return;
897     case 0x200f:                /* muls.w Rm,Rn */
898         {
899             TCGv arg0, arg1;
900             arg0 = tcg_temp_new();
901             tcg_gen_ext16s_i32(arg0, REG(B7_4));
902             arg1 = tcg_temp_new();
903             tcg_gen_ext16s_i32(arg1, REG(B11_8));
904             tcg_gen_mul_i32(cpu_macl, arg0, arg1);
905             tcg_temp_free(arg1);
906             tcg_temp_free(arg0);
907         }
908         return;
909     case 0x200e:                /* mulu.w Rm,Rn */
910         {
911             TCGv arg0, arg1;
912             arg0 = tcg_temp_new();
913             tcg_gen_ext16u_i32(arg0, REG(B7_4));
914             arg1 = tcg_temp_new();
915             tcg_gen_ext16u_i32(arg1, REG(B11_8));
916             tcg_gen_mul_i32(cpu_macl, arg0, arg1);
917             tcg_temp_free(arg1);
918             tcg_temp_free(arg0);
919         }
920         return;
921     case 0x600b:                /* neg Rm,Rn */
922         tcg_gen_neg_i32(REG(B11_8), REG(B7_4));
923         return;
924     case 0x600a:                /* negc Rm,Rn */
925         {
926             TCGv t0, t1;
927             t0 = tcg_temp_new();
928             tcg_gen_neg_i32(t0, REG(B7_4));
929             t1 = tcg_temp_new();
930             tcg_gen_andi_i32(t1, cpu_sr, SR_T);
931             tcg_gen_sub_i32(REG(B11_8), t0, t1);
932             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
933             tcg_gen_setcondi_i32(TCG_COND_GTU, t1, t0, 0);
934             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
935             tcg_gen_setcond_i32(TCG_COND_GTU, t1, REG(B11_8), t0);
936             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
937             tcg_temp_free(t0);
938             tcg_temp_free(t1);
939         }
940         return;
941     case 0x6007:                /* not Rm,Rn */
942         tcg_gen_not_i32(REG(B11_8), REG(B7_4));
943         return;
944     case 0x200b:                /* or Rm,Rn */
945         tcg_gen_or_i32(REG(B11_8), REG(B11_8), REG(B7_4));
946         return;
947     case 0x400c:                /* shad Rm,Rn */
948         {
949             int label1 = gen_new_label();
950             int label2 = gen_new_label();
951             int label3 = gen_new_label();
952             int label4 = gen_new_label();
953             TCGv shift;
954             tcg_gen_brcondi_i32(TCG_COND_LT, REG(B7_4), 0, label1);
955             /* Rm positive, shift to the left */
956             shift = tcg_temp_new();
957             tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
958             tcg_gen_shl_i32(REG(B11_8), REG(B11_8), shift);
959             tcg_temp_free(shift);
960             tcg_gen_br(label4);
961             /* Rm negative, shift to the right */
962             gen_set_label(label1);
963             shift = tcg_temp_new();
964             tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
965             tcg_gen_brcondi_i32(TCG_COND_EQ, shift, 0, label2);
966             tcg_gen_not_i32(shift, REG(B7_4));
967             tcg_gen_andi_i32(shift, shift, 0x1f);
968             tcg_gen_addi_i32(shift, shift, 1);
969             tcg_gen_sar_i32(REG(B11_8), REG(B11_8), shift);
970             tcg_temp_free(shift);
971             tcg_gen_br(label4);
972             /* Rm = -32 */
973             gen_set_label(label2);
974             tcg_gen_brcondi_i32(TCG_COND_LT, REG(B11_8), 0, label3);
975             tcg_gen_movi_i32(REG(B11_8), 0);
976             tcg_gen_br(label4);
977             gen_set_label(label3);
978             tcg_gen_movi_i32(REG(B11_8), 0xffffffff);
979             gen_set_label(label4);
980         }
981         return;
982     case 0x400d:                /* shld Rm,Rn */
983         {
984             int label1 = gen_new_label();
985             int label2 = gen_new_label();
986             int label3 = gen_new_label();
987             TCGv shift;
988             tcg_gen_brcondi_i32(TCG_COND_LT, REG(B7_4), 0, label1);
989             /* Rm positive, shift to the left */
990             shift = tcg_temp_new();
991             tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
992             tcg_gen_shl_i32(REG(B11_8), REG(B11_8), shift);
993             tcg_temp_free(shift);
994             tcg_gen_br(label3);
995             /* Rm negative, shift to the right */
996             gen_set_label(label1);
997             shift = tcg_temp_new();
998             tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
999             tcg_gen_brcondi_i32(TCG_COND_EQ, shift, 0, label2);
1000             tcg_gen_not_i32(shift, REG(B7_4));
1001             tcg_gen_andi_i32(shift, shift, 0x1f);
1002             tcg_gen_addi_i32(shift, shift, 1);
1003             tcg_gen_shr_i32(REG(B11_8), REG(B11_8), shift);
1004             tcg_temp_free(shift);
1005             tcg_gen_br(label3);
1006             /* Rm = -32 */
1007             gen_set_label(label2);
1008             tcg_gen_movi_i32(REG(B11_8), 0);
1009             gen_set_label(label3);
1010         }
1011         return;
1012     case 0x3008:                /* sub Rm,Rn */
1013         tcg_gen_sub_i32(REG(B11_8), REG(B11_8), REG(B7_4));
1014         return;
1015     case 0x300a:                /* subc Rm,Rn */
1016         gen_helper_subc(REG(B11_8), REG(B7_4), REG(B11_8));
1017         return;
1018     case 0x300b:                /* subv Rm,Rn */
1019         gen_helper_subv(REG(B11_8), REG(B7_4), REG(B11_8));
1020         return;
1021     case 0x2008:                /* tst Rm,Rn */
1022         {
1023             TCGv val = tcg_temp_new();
1024             tcg_gen_and_i32(val, REG(B7_4), REG(B11_8));
1025             gen_cmp_imm(TCG_COND_EQ, val, 0);
1026             tcg_temp_free(val);
1027         }
1028         return;
1029     case 0x200a:                /* xor Rm,Rn */
1030         tcg_gen_xor_i32(REG(B11_8), REG(B11_8), REG(B7_4));
1031         return;
1032     case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */
1033         CHECK_FPU_ENABLED
1034         if (ctx->fpscr & FPSCR_SZ) {
1035             TCGv_i64 fp = tcg_temp_new_i64();
1036             gen_load_fpr64(fp, XREG(B7_4));
1037             gen_store_fpr64(fp, XREG(B11_8));
1038             tcg_temp_free_i64(fp);
1039         } else {
1040             tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1041         }
1042         return;
1043     case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */
1044         CHECK_FPU_ENABLED
1045         if (ctx->fpscr & FPSCR_SZ) {
1046             TCGv addr_hi = tcg_temp_new();
1047             int fr = XREG(B7_4);
1048             tcg_gen_addi_i32(addr_hi, REG(B11_8), 4);
1049             tcg_gen_qemu_st32(cpu_fregs[fr  ], REG(B11_8), ctx->memidx);
1050             tcg_gen_qemu_st32(cpu_fregs[fr+1], addr_hi,    ctx->memidx);
1051             tcg_temp_free(addr_hi);
1052         } else {
1053             tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], REG(B11_8), ctx->memidx);
1054         }
1055         return;
1056     case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */
1057         CHECK_FPU_ENABLED
1058         if (ctx->fpscr & FPSCR_SZ) {
1059             TCGv addr_hi = tcg_temp_new();
1060             int fr = XREG(B11_8);
1061             tcg_gen_addi_i32(addr_hi, REG(B7_4), 4);
1062             tcg_gen_qemu_ld32u(cpu_fregs[fr  ], REG(B7_4), ctx->memidx);
1063             tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr_hi,   ctx->memidx);
1064             tcg_temp_free(addr_hi);
1065         } else {
1066             tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], REG(B7_4), ctx->memidx);
1067         }
1068         return;
1069     case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */
1070         CHECK_FPU_ENABLED
1071         if (ctx->fpscr & FPSCR_SZ) {
1072             TCGv addr_hi = tcg_temp_new();
1073             int fr = XREG(B11_8);
1074             tcg_gen_addi_i32(addr_hi, REG(B7_4), 4);
1075             tcg_gen_qemu_ld32u(cpu_fregs[fr  ], REG(B7_4), ctx->memidx);
1076             tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr_hi,   ctx->memidx);
1077             tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 8);
1078             tcg_temp_free(addr_hi);
1079         } else {
1080             tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], REG(B7_4), ctx->memidx);
1081             tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
1082         }
1083         return;
1084     case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */
1085         CHECK_FPU_ENABLED
1086         if (ctx->fpscr & FPSCR_SZ) {
1087             TCGv addr = tcg_temp_new_i32();
1088             int fr = XREG(B7_4);
1089             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1090             tcg_gen_qemu_st32(cpu_fregs[fr+1], addr, ctx->memidx);
1091             tcg_gen_subi_i32(addr, addr, 4);
1092             tcg_gen_qemu_st32(cpu_fregs[fr  ], addr, ctx->memidx);
1093             tcg_gen_mov_i32(REG(B11_8), addr);
1094             tcg_temp_free(addr);
1095         } else {
1096             TCGv addr;
1097             addr = tcg_temp_new_i32();
1098             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1099             tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], addr, ctx->memidx);
1100             tcg_gen_mov_i32(REG(B11_8), addr);
1101             tcg_temp_free(addr);
1102         }
1103         return;
1104     case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */
1105         CHECK_FPU_ENABLED
1106         {
1107             TCGv addr = tcg_temp_new_i32();
1108             tcg_gen_add_i32(addr, REG(B7_4), REG(0));
1109             if (ctx->fpscr & FPSCR_SZ) {
1110                 int fr = XREG(B11_8);
1111                 tcg_gen_qemu_ld32u(cpu_fregs[fr  ], addr, ctx->memidx);
1112                 tcg_gen_addi_i32(addr, addr, 4);
1113                 tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr, ctx->memidx);
1114             } else {
1115                 tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], addr, ctx->memidx);
1116             }
1117             tcg_temp_free(addr);
1118         }
1119         return;
1120     case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */
1121         CHECK_FPU_ENABLED
1122         {
1123             TCGv addr = tcg_temp_new();
1124             tcg_gen_add_i32(addr, REG(B11_8), REG(0));
1125             if (ctx->fpscr & FPSCR_SZ) {
1126                 int fr = XREG(B7_4);
1127                 tcg_gen_qemu_ld32u(cpu_fregs[fr  ], addr, ctx->memidx);
1128                 tcg_gen_addi_i32(addr, addr, 4);
1129                 tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr, ctx->memidx);
1130             } else {
1131                 tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], addr, ctx->memidx);
1132             }
1133             tcg_temp_free(addr);
1134         }
1135         return;
1136     case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1137     case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1138     case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1139     case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1140     case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1141     case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1142         {
1143             CHECK_FPU_ENABLED
1144             if (ctx->fpscr & FPSCR_PR) {
1145                 TCGv_i64 fp0, fp1;
1146
1147                 if (ctx->opcode & 0x0110)
1148                     break; /* illegal instruction */
1149                 fp0 = tcg_temp_new_i64();
1150                 fp1 = tcg_temp_new_i64();
1151                 gen_load_fpr64(fp0, DREG(B11_8));
1152                 gen_load_fpr64(fp1, DREG(B7_4));
1153                 switch (ctx->opcode & 0xf00f) {
1154                 case 0xf000:            /* fadd Rm,Rn */
1155                     gen_helper_fadd_DT(fp0, fp0, fp1);
1156                     break;
1157                 case 0xf001:            /* fsub Rm,Rn */
1158                     gen_helper_fsub_DT(fp0, fp0, fp1);
1159                     break;
1160                 case 0xf002:            /* fmul Rm,Rn */
1161                     gen_helper_fmul_DT(fp0, fp0, fp1);
1162                     break;
1163                 case 0xf003:            /* fdiv Rm,Rn */
1164                     gen_helper_fdiv_DT(fp0, fp0, fp1);
1165                     break;
1166                 case 0xf004:            /* fcmp/eq Rm,Rn */
1167                     gen_helper_fcmp_eq_DT(fp0, fp1);
1168                     return;
1169                 case 0xf005:            /* fcmp/gt Rm,Rn */
1170                     gen_helper_fcmp_gt_DT(fp0, fp1);
1171                     return;
1172                 }
1173                 gen_store_fpr64(fp0, DREG(B11_8));
1174                 tcg_temp_free_i64(fp0);
1175                 tcg_temp_free_i64(fp1);
1176             } else {
1177                 switch (ctx->opcode & 0xf00f) {
1178                 case 0xf000:            /* fadd Rm,Rn */
1179                     gen_helper_fadd_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1180                     break;
1181                 case 0xf001:            /* fsub Rm,Rn */
1182                     gen_helper_fsub_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1183                     break;
1184                 case 0xf002:            /* fmul Rm,Rn */
1185                     gen_helper_fmul_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1186                     break;
1187                 case 0xf003:            /* fdiv Rm,Rn */
1188                     gen_helper_fdiv_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1189                     break;
1190                 case 0xf004:            /* fcmp/eq Rm,Rn */
1191                     gen_helper_fcmp_eq_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1192                     return;
1193                 case 0xf005:            /* fcmp/gt Rm,Rn */
1194                     gen_helper_fcmp_gt_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1195                     return;
1196                 }
1197             }
1198         }
1199         return;
1200     case 0xf00e: /* fmac FR0,RM,Rn */
1201         {
1202             CHECK_FPU_ENABLED
1203             if (ctx->fpscr & FPSCR_PR) {
1204                 break; /* illegal instruction */
1205             } else {
1206                 gen_helper_fmac_FT(cpu_fregs[FREG(B11_8)],
1207                                    cpu_fregs[FREG(0)], cpu_fregs[FREG(B7_4)], cpu_fregs[FREG(B11_8)]);
1208                 return;
1209             }
1210         }
1211     }
1212
1213     switch (ctx->opcode & 0xff00) {
1214     case 0xc900:                /* and #imm,R0 */
1215         tcg_gen_andi_i32(REG(0), REG(0), B7_0);
1216         return;
1217     case 0xcd00:                /* and.b #imm,@(R0,GBR) */
1218         {
1219             TCGv addr, val;
1220             addr = tcg_temp_new();
1221             tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1222             val = tcg_temp_new();
1223             tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1224             tcg_gen_andi_i32(val, val, B7_0);
1225             tcg_gen_qemu_st8(val, addr, ctx->memidx);
1226             tcg_temp_free(val);
1227             tcg_temp_free(addr);
1228         }
1229         return;
1230     case 0x8b00:                /* bf label */
1231         CHECK_NOT_DELAY_SLOT
1232             gen_conditional_jump(ctx, ctx->pc + 2,
1233                                  ctx->pc + 4 + B7_0s * 2);
1234         ctx->bstate = BS_BRANCH;
1235         return;
1236     case 0x8f00:                /* bf/s label */
1237         CHECK_NOT_DELAY_SLOT
1238         gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 0);
1239         ctx->flags |= DELAY_SLOT_CONDITIONAL;
1240         return;
1241     case 0x8900:                /* bt label */
1242         CHECK_NOT_DELAY_SLOT
1243             gen_conditional_jump(ctx, ctx->pc + 4 + B7_0s * 2,
1244                                  ctx->pc + 2);
1245         ctx->bstate = BS_BRANCH;
1246         return;
1247     case 0x8d00:                /* bt/s label */
1248         CHECK_NOT_DELAY_SLOT
1249         gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 1);
1250         ctx->flags |= DELAY_SLOT_CONDITIONAL;
1251         return;
1252     case 0x8800:                /* cmp/eq #imm,R0 */
1253         gen_cmp_imm(TCG_COND_EQ, REG(0), B7_0s);
1254         return;
1255     case 0xc400:                /* mov.b @(disp,GBR),R0 */
1256         {
1257             TCGv addr = tcg_temp_new();
1258             tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1259             tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
1260             tcg_temp_free(addr);
1261         }
1262         return;
1263     case 0xc500:                /* mov.w @(disp,GBR),R0 */
1264         {
1265             TCGv addr = tcg_temp_new();
1266             tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1267             tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
1268             tcg_temp_free(addr);
1269         }
1270         return;
1271     case 0xc600:                /* mov.l @(disp,GBR),R0 */
1272         {
1273             TCGv addr = tcg_temp_new();
1274             tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1275             tcg_gen_qemu_ld32s(REG(0), addr, ctx->memidx);
1276             tcg_temp_free(addr);
1277         }
1278         return;
1279     case 0xc000:                /* mov.b R0,@(disp,GBR) */
1280         {
1281             TCGv addr = tcg_temp_new();
1282             tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1283             tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
1284             tcg_temp_free(addr);
1285         }
1286         return;
1287     case 0xc100:                /* mov.w R0,@(disp,GBR) */
1288         {
1289             TCGv addr = tcg_temp_new();
1290             tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1291             tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
1292             tcg_temp_free(addr);
1293         }
1294         return;
1295     case 0xc200:                /* mov.l R0,@(disp,GBR) */
1296         {
1297             TCGv addr = tcg_temp_new();
1298             tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1299             tcg_gen_qemu_st32(REG(0), addr, ctx->memidx);
1300             tcg_temp_free(addr);
1301         }
1302         return;
1303     case 0x8000:                /* mov.b R0,@(disp,Rn) */
1304         {
1305             TCGv addr = tcg_temp_new();
1306             tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1307             tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
1308             tcg_temp_free(addr);
1309         }
1310         return;
1311     case 0x8100:                /* mov.w R0,@(disp,Rn) */
1312         {
1313             TCGv addr = tcg_temp_new();
1314             tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1315             tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
1316             tcg_temp_free(addr);
1317         }
1318         return;
1319     case 0x8400:                /* mov.b @(disp,Rn),R0 */
1320         {
1321             TCGv addr = tcg_temp_new();
1322             tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1323             tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
1324             tcg_temp_free(addr);
1325         }
1326         return;
1327     case 0x8500:                /* mov.w @(disp,Rn),R0 */
1328         {
1329             TCGv addr = tcg_temp_new();
1330             tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1331             tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
1332             tcg_temp_free(addr);
1333         }
1334         return;
1335     case 0xc700:                /* mova @(disp,PC),R0 */
1336         tcg_gen_movi_i32(REG(0), ((ctx->pc & 0xfffffffc) + 4 + B7_0 * 4) & ~3);
1337         return;
1338     case 0xcb00:                /* or #imm,R0 */
1339         tcg_gen_ori_i32(REG(0), REG(0), B7_0);
1340         return;
1341     case 0xcf00:                /* or.b #imm,@(R0,GBR) */
1342         {
1343             TCGv addr, val;
1344             addr = tcg_temp_new();
1345             tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1346             val = tcg_temp_new();
1347             tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1348             tcg_gen_ori_i32(val, val, B7_0);
1349             tcg_gen_qemu_st8(val, addr, ctx->memidx);
1350             tcg_temp_free(val);
1351             tcg_temp_free(addr);
1352         }
1353         return;
1354     case 0xc300:                /* trapa #imm */
1355         {
1356             TCGv imm;
1357             CHECK_NOT_DELAY_SLOT
1358             imm = tcg_const_i32(B7_0);
1359             gen_helper_trapa(imm);
1360             tcg_temp_free(imm);
1361             ctx->bstate = BS_BRANCH;
1362         }
1363         return;
1364     case 0xc800:                /* tst #imm,R0 */
1365         {
1366             TCGv val = tcg_temp_new();
1367             tcg_gen_andi_i32(val, REG(0), B7_0);
1368             gen_cmp_imm(TCG_COND_EQ, val, 0);
1369             tcg_temp_free(val);
1370         }
1371         return;
1372     case 0xcc00:                /* tst.b #imm,@(R0,GBR) */
1373         {
1374             TCGv val = tcg_temp_new();
1375             tcg_gen_add_i32(val, REG(0), cpu_gbr);
1376             tcg_gen_qemu_ld8u(val, val, ctx->memidx);
1377             tcg_gen_andi_i32(val, val, B7_0);
1378             gen_cmp_imm(TCG_COND_EQ, val, 0);
1379             tcg_temp_free(val);
1380         }
1381         return;
1382     case 0xca00:                /* xor #imm,R0 */
1383         tcg_gen_xori_i32(REG(0), REG(0), B7_0);
1384         return;
1385     case 0xce00:                /* xor.b #imm,@(R0,GBR) */
1386         {
1387             TCGv addr, val;
1388             addr = tcg_temp_new();
1389             tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1390             val = tcg_temp_new();
1391             tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1392             tcg_gen_xori_i32(val, val, B7_0);
1393             tcg_gen_qemu_st8(val, addr, ctx->memidx);
1394             tcg_temp_free(val);
1395             tcg_temp_free(addr);
1396         }
1397         return;
1398     }
1399
1400     switch (ctx->opcode & 0xf08f) {
1401     case 0x408e:                /* ldc Rm,Rn_BANK */
1402         CHECK_PRIVILEGED
1403         tcg_gen_mov_i32(ALTREG(B6_4), REG(B11_8));
1404         return;
1405     case 0x4087:                /* ldc.l @Rm+,Rn_BANK */
1406         CHECK_PRIVILEGED
1407         tcg_gen_qemu_ld32s(ALTREG(B6_4), REG(B11_8), ctx->memidx);
1408         tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1409         return;
1410     case 0x0082:                /* stc Rm_BANK,Rn */
1411         CHECK_PRIVILEGED
1412         tcg_gen_mov_i32(REG(B11_8), ALTREG(B6_4));
1413         return;
1414     case 0x4083:                /* stc.l Rm_BANK,@-Rn */
1415         CHECK_PRIVILEGED
1416         {
1417             TCGv addr = tcg_temp_new();
1418             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1419             tcg_gen_qemu_st32(ALTREG(B6_4), addr, ctx->memidx);
1420             tcg_gen_mov_i32(REG(B11_8), addr);
1421             tcg_temp_free(addr);
1422         }
1423         return;
1424     }
1425
1426     switch (ctx->opcode & 0xf0ff) {
1427     case 0x0023:                /* braf Rn */
1428         CHECK_NOT_DELAY_SLOT
1429         tcg_gen_addi_i32(cpu_delayed_pc, REG(B11_8), ctx->pc + 4);
1430         ctx->flags |= DELAY_SLOT;
1431         ctx->delayed_pc = (uint32_t) - 1;
1432         return;
1433     case 0x0003:                /* bsrf Rn */
1434         CHECK_NOT_DELAY_SLOT
1435         tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1436         tcg_gen_add_i32(cpu_delayed_pc, REG(B11_8), cpu_pr);
1437         ctx->flags |= DELAY_SLOT;
1438         ctx->delayed_pc = (uint32_t) - 1;
1439         return;
1440     case 0x4015:                /* cmp/pl Rn */
1441         gen_cmp_imm(TCG_COND_GT, REG(B11_8), 0);
1442         return;
1443     case 0x4011:                /* cmp/pz Rn */
1444         gen_cmp_imm(TCG_COND_GE, REG(B11_8), 0);
1445         return;
1446     case 0x4010:                /* dt Rn */
1447         tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1);
1448         gen_cmp_imm(TCG_COND_EQ, REG(B11_8), 0);
1449         return;
1450     case 0x402b:                /* jmp @Rn */
1451         CHECK_NOT_DELAY_SLOT
1452         tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1453         ctx->flags |= DELAY_SLOT;
1454         ctx->delayed_pc = (uint32_t) - 1;
1455         return;
1456     case 0x400b:                /* jsr @Rn */
1457         CHECK_NOT_DELAY_SLOT
1458         tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1459         tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1460         ctx->flags |= DELAY_SLOT;
1461         ctx->delayed_pc = (uint32_t) - 1;
1462         return;
1463     case 0x400e:                /* ldc Rm,SR */
1464         CHECK_PRIVILEGED
1465         tcg_gen_andi_i32(cpu_sr, REG(B11_8), 0x700083f3);
1466         ctx->bstate = BS_STOP;
1467         return;
1468     case 0x4007:                /* ldc.l @Rm+,SR */
1469         CHECK_PRIVILEGED
1470         {
1471             TCGv val = tcg_temp_new();
1472             tcg_gen_qemu_ld32s(val, REG(B11_8), ctx->memidx);
1473             tcg_gen_andi_i32(cpu_sr, val, 0x700083f3);
1474             tcg_temp_free(val);
1475             tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1476             ctx->bstate = BS_STOP;
1477         }
1478         return;
1479     case 0x0002:                /* stc SR,Rn */
1480         CHECK_PRIVILEGED
1481         tcg_gen_mov_i32(REG(B11_8), cpu_sr);
1482         return;
1483     case 0x4003:                /* stc SR,@-Rn */
1484         CHECK_PRIVILEGED
1485         {
1486             TCGv addr = tcg_temp_new();
1487             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1488             tcg_gen_qemu_st32(cpu_sr, addr, ctx->memidx);
1489             tcg_gen_mov_i32(REG(B11_8), addr);
1490             tcg_temp_free(addr);
1491         }
1492         return;
1493 #define LD(reg,ldnum,ldpnum,prechk)             \
1494   case ldnum:                                                   \
1495     prechk                                                      \
1496     tcg_gen_mov_i32 (cpu_##reg, REG(B11_8));                    \
1497     return;                                                     \
1498   case ldpnum:                                                  \
1499     prechk                                                      \
1500     tcg_gen_qemu_ld32s (cpu_##reg, REG(B11_8), ctx->memidx);    \
1501     tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);                \
1502     return;
1503 #define ST(reg,stnum,stpnum,prechk)             \
1504   case stnum:                                                   \
1505     prechk                                                      \
1506     tcg_gen_mov_i32 (REG(B11_8), cpu_##reg);                    \
1507     return;                                                     \
1508   case stpnum:                                                  \
1509     prechk                                                      \
1510     {                                                           \
1511         TCGv addr = tcg_temp_new();                             \
1512         tcg_gen_subi_i32(addr, REG(B11_8), 4);                  \
1513         tcg_gen_qemu_st32 (cpu_##reg, addr, ctx->memidx);       \
1514         tcg_gen_mov_i32(REG(B11_8), addr);                      \
1515         tcg_temp_free(addr);                                    \
1516     }                                                           \
1517     return;
1518 #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk)              \
1519         LD(reg,ldnum,ldpnum,prechk)                             \
1520         ST(reg,stnum,stpnum,prechk)
1521         LDST(gbr,  0x401e, 0x4017, 0x0012, 0x4013, {})
1522         LDST(vbr,  0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED)
1523         LDST(ssr,  0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED)
1524         LDST(spc,  0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED)
1525         ST(sgr,  0x003a, 0x4032, CHECK_PRIVILEGED)
1526         LD(sgr,  0x403a, 0x4036, CHECK_PRIVILEGED if (!(ctx->features & SH_FEATURE_SH4A)) break;)
1527         LDST(dbr,  0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED)
1528         LDST(mach, 0x400a, 0x4006, 0x000a, 0x4002, {})
1529         LDST(macl, 0x401a, 0x4016, 0x001a, 0x4012, {})
1530         LDST(pr,   0x402a, 0x4026, 0x002a, 0x4022, {})
1531         LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED})
1532     case 0x406a:                /* lds Rm,FPSCR */
1533         CHECK_FPU_ENABLED
1534         gen_helper_ld_fpscr(REG(B11_8));
1535         ctx->bstate = BS_STOP;
1536         return;
1537     case 0x4066:                /* lds.l @Rm+,FPSCR */
1538         CHECK_FPU_ENABLED
1539         {
1540             TCGv addr = tcg_temp_new();
1541             tcg_gen_qemu_ld32s(addr, REG(B11_8), ctx->memidx);
1542             tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1543             gen_helper_ld_fpscr(addr);
1544             tcg_temp_free(addr);
1545             ctx->bstate = BS_STOP;
1546         }
1547         return;
1548     case 0x006a:                /* sts FPSCR,Rn */
1549         CHECK_FPU_ENABLED
1550         tcg_gen_andi_i32(REG(B11_8), cpu_fpscr, 0x003fffff);
1551         return;
1552     case 0x4062:                /* sts FPSCR,@-Rn */
1553         CHECK_FPU_ENABLED
1554         {
1555             TCGv addr, val;
1556             val = tcg_temp_new();
1557             tcg_gen_andi_i32(val, cpu_fpscr, 0x003fffff);
1558             addr = tcg_temp_new();
1559             tcg_gen_subi_i32(addr, REG(B11_8), 4);
1560             tcg_gen_qemu_st32(val, addr, ctx->memidx);
1561             tcg_gen_mov_i32(REG(B11_8), addr);
1562             tcg_temp_free(addr);
1563             tcg_temp_free(val);
1564         }
1565         return;
1566     case 0x00c3:                /* movca.l R0,@Rm */
1567         {
1568             TCGv val = tcg_temp_new();
1569             tcg_gen_qemu_ld32u(val, REG(B11_8), ctx->memidx);
1570             gen_helper_movcal (REG(B11_8), val);            
1571             tcg_gen_qemu_st32(REG(0), REG(B11_8), ctx->memidx);
1572         }
1573         ctx->has_movcal = 1;
1574         return;
1575     case 0x40a9:
1576         /* MOVUA.L @Rm,R0 (Rm) -> R0
1577            Load non-boundary-aligned data */
1578         tcg_gen_qemu_ld32u(REG(0), REG(B11_8), ctx->memidx);
1579         return;
1580     case 0x40e9:
1581         /* MOVUA.L @Rm+,R0   (Rm) -> R0, Rm + 4 -> Rm
1582            Load non-boundary-aligned data */
1583         tcg_gen_qemu_ld32u(REG(0), REG(B11_8), ctx->memidx);
1584         tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1585         return;
1586     case 0x0029:                /* movt Rn */
1587         tcg_gen_andi_i32(REG(B11_8), cpu_sr, SR_T);
1588         return;
1589     case 0x0073:
1590         /* MOVCO.L
1591                LDST -> T
1592                If (T == 1) R0 -> (Rn)
1593                0 -> LDST
1594         */
1595         if (ctx->features & SH_FEATURE_SH4A) {
1596             int label = gen_new_label();
1597             gen_clr_t();
1598             tcg_gen_or_i32(cpu_sr, cpu_sr, cpu_ldst);
1599             tcg_gen_brcondi_i32(TCG_COND_EQ, cpu_ldst, 0, label);
1600             tcg_gen_qemu_st32(REG(0), REG(B11_8), ctx->memidx);
1601             gen_set_label(label);
1602             tcg_gen_movi_i32(cpu_ldst, 0);
1603             return;
1604         } else
1605             break;
1606     case 0x0063:
1607         /* MOVLI.L @Rm,R0
1608                1 -> LDST
1609                (Rm) -> R0
1610                When interrupt/exception
1611                occurred 0 -> LDST
1612         */
1613         if (ctx->features & SH_FEATURE_SH4A) {
1614             tcg_gen_movi_i32(cpu_ldst, 0);
1615             tcg_gen_qemu_ld32s(REG(0), REG(B11_8), ctx->memidx);
1616             tcg_gen_movi_i32(cpu_ldst, 1);
1617             return;
1618         } else
1619             break;
1620     case 0x0093:                /* ocbi @Rn */
1621         {
1622             gen_helper_ocbi (REG(B11_8));
1623         }
1624         return;
1625     case 0x00a3:                /* ocbp @Rn */
1626     case 0x00b3:                /* ocbwb @Rn */
1627         /* These instructions are supposed to do nothing in case of
1628            a cache miss. Given that we only partially emulate caches
1629            it is safe to simply ignore them. */
1630         return;
1631     case 0x0083:                /* pref @Rn */
1632         return;
1633     case 0x00d3:                /* prefi @Rn */
1634         if (ctx->features & SH_FEATURE_SH4A)
1635             return;
1636         else
1637             break;
1638     case 0x00e3:                /* icbi @Rn */
1639         if (ctx->features & SH_FEATURE_SH4A)
1640             return;
1641         else
1642             break;
1643     case 0x00ab:                /* synco */
1644         if (ctx->features & SH_FEATURE_SH4A)
1645             return;
1646         else
1647             break;
1648     case 0x4024:                /* rotcl Rn */
1649         {
1650             TCGv tmp = tcg_temp_new();
1651             tcg_gen_mov_i32(tmp, cpu_sr);
1652             gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1653             tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1654             gen_copy_bit_i32(REG(B11_8), 0, tmp, 0);
1655             tcg_temp_free(tmp);
1656         }
1657         return;
1658     case 0x4025:                /* rotcr Rn */
1659         {
1660             TCGv tmp = tcg_temp_new();
1661             tcg_gen_mov_i32(tmp, cpu_sr);
1662             gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1663             tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1664             gen_copy_bit_i32(REG(B11_8), 31, tmp, 0);
1665             tcg_temp_free(tmp);
1666         }
1667         return;
1668     case 0x4004:                /* rotl Rn */
1669         tcg_gen_rotli_i32(REG(B11_8), REG(B11_8), 1);
1670         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1671         return;
1672     case 0x4005:                /* rotr Rn */
1673         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1674         tcg_gen_rotri_i32(REG(B11_8), REG(B11_8), 1);
1675         return;
1676     case 0x4000:                /* shll Rn */
1677     case 0x4020:                /* shal Rn */
1678         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1679         tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1680         return;
1681     case 0x4021:                /* shar Rn */
1682         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1683         tcg_gen_sari_i32(REG(B11_8), REG(B11_8), 1);
1684         return;
1685     case 0x4001:                /* shlr Rn */
1686         gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1687         tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1688         return;
1689     case 0x4008:                /* shll2 Rn */
1690         tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 2);
1691         return;
1692     case 0x4018:                /* shll8 Rn */
1693         tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 8);
1694         return;
1695     case 0x4028:                /* shll16 Rn */
1696         tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 16);
1697         return;
1698     case 0x4009:                /* shlr2 Rn */
1699         tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 2);
1700         return;
1701     case 0x4019:                /* shlr8 Rn */
1702         tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 8);
1703         return;
1704     case 0x4029:                /* shlr16 Rn */
1705         tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 16);
1706         return;
1707     case 0x401b:                /* tas.b @Rn */
1708         {
1709             TCGv addr, val;
1710             addr = tcg_temp_local_new();
1711             tcg_gen_mov_i32(addr, REG(B11_8));
1712             val = tcg_temp_local_new();
1713             tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1714             gen_cmp_imm(TCG_COND_EQ, val, 0);
1715             tcg_gen_ori_i32(val, val, 0x80);
1716             tcg_gen_qemu_st8(val, addr, ctx->memidx);
1717             tcg_temp_free(val);
1718             tcg_temp_free(addr);
1719         }
1720         return;
1721     case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */
1722         CHECK_FPU_ENABLED
1723         tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fpul);
1724         return;
1725     case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */
1726         CHECK_FPU_ENABLED
1727         tcg_gen_mov_i32(cpu_fpul, cpu_fregs[FREG(B11_8)]);
1728         return;
1729     case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */
1730         CHECK_FPU_ENABLED
1731         if (ctx->fpscr & FPSCR_PR) {
1732             TCGv_i64 fp;
1733             if (ctx->opcode & 0x0100)
1734                 break; /* illegal instruction */
1735             fp = tcg_temp_new_i64();
1736             gen_helper_float_DT(fp, cpu_fpul);
1737             gen_store_fpr64(fp, DREG(B11_8));
1738             tcg_temp_free_i64(fp);
1739         }
1740         else {
1741             gen_helper_float_FT(cpu_fregs[FREG(B11_8)], cpu_fpul);
1742         }
1743         return;
1744     case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1745         CHECK_FPU_ENABLED
1746         if (ctx->fpscr & FPSCR_PR) {
1747             TCGv_i64 fp;
1748             if (ctx->opcode & 0x0100)
1749                 break; /* illegal instruction */
1750             fp = tcg_temp_new_i64();
1751             gen_load_fpr64(fp, DREG(B11_8));
1752             gen_helper_ftrc_DT(cpu_fpul, fp);
1753             tcg_temp_free_i64(fp);
1754         }
1755         else {
1756             gen_helper_ftrc_FT(cpu_fpul, cpu_fregs[FREG(B11_8)]);
1757         }
1758         return;
1759     case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */
1760         CHECK_FPU_ENABLED
1761         {
1762             gen_helper_fneg_T(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1763         }
1764         return;
1765     case 0xf05d: /* fabs FRn/DRn */
1766         CHECK_FPU_ENABLED
1767         if (ctx->fpscr & FPSCR_PR) {
1768             if (ctx->opcode & 0x0100)
1769                 break; /* illegal instruction */
1770             TCGv_i64 fp = tcg_temp_new_i64();
1771             gen_load_fpr64(fp, DREG(B11_8));
1772             gen_helper_fabs_DT(fp, fp);
1773             gen_store_fpr64(fp, DREG(B11_8));
1774             tcg_temp_free_i64(fp);
1775         } else {
1776             gen_helper_fabs_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1777         }
1778         return;
1779     case 0xf06d: /* fsqrt FRn */
1780         CHECK_FPU_ENABLED
1781         if (ctx->fpscr & FPSCR_PR) {
1782             if (ctx->opcode & 0x0100)
1783                 break; /* illegal instruction */
1784             TCGv_i64 fp = tcg_temp_new_i64();
1785             gen_load_fpr64(fp, DREG(B11_8));
1786             gen_helper_fsqrt_DT(fp, fp);
1787             gen_store_fpr64(fp, DREG(B11_8));
1788             tcg_temp_free_i64(fp);
1789         } else {
1790             gen_helper_fsqrt_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1791         }
1792         return;
1793     case 0xf07d: /* fsrra FRn */
1794         CHECK_FPU_ENABLED
1795         break;
1796     case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */
1797         CHECK_FPU_ENABLED
1798         if (!(ctx->fpscr & FPSCR_PR)) {
1799             tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0);
1800         }
1801         return;
1802     case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */
1803         CHECK_FPU_ENABLED
1804         if (!(ctx->fpscr & FPSCR_PR)) {
1805             tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0x3f800000);
1806         }
1807         return;
1808     case 0xf0ad: /* fcnvsd FPUL,DRn */
1809         CHECK_FPU_ENABLED
1810         {
1811             TCGv_i64 fp = tcg_temp_new_i64();
1812             gen_helper_fcnvsd_FT_DT(fp, cpu_fpul);
1813             gen_store_fpr64(fp, DREG(B11_8));
1814             tcg_temp_free_i64(fp);
1815         }
1816         return;
1817     case 0xf0bd: /* fcnvds DRn,FPUL */
1818         CHECK_FPU_ENABLED
1819         {
1820             TCGv_i64 fp = tcg_temp_new_i64();
1821             gen_load_fpr64(fp, DREG(B11_8));
1822             gen_helper_fcnvds_DT_FT(cpu_fpul, fp);
1823             tcg_temp_free_i64(fp);
1824         }
1825         return;
1826     case 0xf0ed: /* fipr FVm,FVn */
1827         CHECK_FPU_ENABLED
1828         if ((ctx->fpscr & FPSCR_PR) == 0) {
1829             TCGv m, n;
1830             m = tcg_const_i32((ctx->opcode >> 8) & 3);
1831             n = tcg_const_i32((ctx->opcode >> 10) & 3);
1832             gen_helper_fipr(m, n);
1833             tcg_temp_free(m);
1834             tcg_temp_free(n);
1835             return;
1836         }
1837         break;
1838     case 0xf0fd: /* ftrv XMTRX,FVn */
1839         CHECK_FPU_ENABLED
1840         if ((ctx->opcode & 0x0300) == 0x0100 &&
1841             (ctx->fpscr & FPSCR_PR) == 0) {
1842             TCGv n;
1843             n = tcg_const_i32((ctx->opcode >> 10) & 3);
1844             gen_helper_ftrv(n);
1845             tcg_temp_free(n);
1846             return;
1847         }
1848         break;
1849     }
1850 #if 0
1851     fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n",
1852             ctx->opcode, ctx->pc);
1853     fflush(stderr);
1854 #endif
1855     if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) {
1856        gen_helper_raise_slot_illegal_instruction();
1857     } else {
1858        gen_helper_raise_illegal_instruction();
1859     }
1860     ctx->bstate = BS_EXCP;
1861 }
1862
1863 static void decode_opc(DisasContext * ctx)
1864 {
1865     uint32_t old_flags = ctx->flags;
1866
1867     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP))) {
1868         tcg_gen_debug_insn_start(ctx->pc);
1869     }
1870
1871     _decode_opc(ctx);
1872
1873     if (old_flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) {
1874         if (ctx->flags & DELAY_SLOT_CLEARME) {
1875             gen_store_flags(0);
1876         } else {
1877             /* go out of the delay slot */
1878             uint32_t new_flags = ctx->flags;
1879             new_flags &= ~(DELAY_SLOT | DELAY_SLOT_CONDITIONAL);
1880             gen_store_flags(new_flags);
1881         }
1882         ctx->flags = 0;
1883         ctx->bstate = BS_BRANCH;
1884         if (old_flags & DELAY_SLOT_CONDITIONAL) {
1885             gen_delayed_conditional_jump(ctx);
1886         } else if (old_flags & DELAY_SLOT) {
1887             gen_jump(ctx);
1888         }
1889
1890     }
1891
1892     /* go into a delay slot */
1893     if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL))
1894         gen_store_flags(ctx->flags);
1895 }
1896
1897 static inline void
1898 gen_intermediate_code_internal(CPUSH4State * env, TranslationBlock * tb,
1899                                int search_pc)
1900 {
1901     DisasContext ctx;
1902     target_ulong pc_start;
1903     static uint16_t *gen_opc_end;
1904     CPUBreakpoint *bp;
1905     int i, ii;
1906     int num_insns;
1907     int max_insns;
1908
1909     pc_start = tb->pc;
1910     gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
1911     ctx.pc = pc_start;
1912     ctx.flags = (uint32_t)tb->flags;
1913     ctx.bstate = BS_NONE;
1914     ctx.sr = env->sr;
1915     ctx.fpscr = env->fpscr;
1916     ctx.memidx = (env->sr & SR_MD) == 0 ? 1 : 0;
1917     /* We don't know if the delayed pc came from a dynamic or static branch,
1918        so assume it is a dynamic branch.  */
1919     ctx.delayed_pc = -1; /* use delayed pc from env pointer */
1920     ctx.tb = tb;
1921     ctx.singlestep_enabled = env->singlestep_enabled;
1922     ctx.features = env->features;
1923     ctx.has_movcal = (tb->flags & TB_FLAG_PENDING_MOVCA);
1924
1925     ii = -1;
1926     num_insns = 0;
1927     max_insns = tb->cflags & CF_COUNT_MASK;
1928     if (max_insns == 0)
1929         max_insns = CF_COUNT_MASK;
1930     gen_icount_start();
1931     while (ctx.bstate == BS_NONE && gen_opc_ptr < gen_opc_end) {
1932         if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
1933             QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
1934                 if (ctx.pc == bp->pc) {
1935                     /* We have hit a breakpoint - make sure PC is up-to-date */
1936                     tcg_gen_movi_i32(cpu_pc, ctx.pc);
1937                     gen_helper_debug();
1938                     ctx.bstate = BS_EXCP;
1939                     break;
1940                 }
1941             }
1942         }
1943         if (search_pc) {
1944             i = gen_opc_ptr - gen_opc_buf;
1945             if (ii < i) {
1946                 ii++;
1947                 while (ii < i)
1948                     gen_opc_instr_start[ii++] = 0;
1949             }
1950             gen_opc_pc[ii] = ctx.pc;
1951             gen_opc_hflags[ii] = ctx.flags;
1952             gen_opc_instr_start[ii] = 1;
1953             gen_opc_icount[ii] = num_insns;
1954         }
1955         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
1956             gen_io_start();
1957 #if 0
1958         fprintf(stderr, "Loading opcode at address 0x%08x\n", ctx.pc);
1959         fflush(stderr);
1960 #endif
1961         ctx.opcode = lduw_code(ctx.pc);
1962         decode_opc(&ctx);
1963         num_insns++;
1964         ctx.pc += 2;
1965         if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
1966             break;
1967         if (env->singlestep_enabled)
1968             break;
1969         if (num_insns >= max_insns)
1970             break;
1971         if (singlestep)
1972             break;
1973     }
1974     if (tb->cflags & CF_LAST_IO)
1975         gen_io_end();
1976     if (env->singlestep_enabled) {
1977         tcg_gen_movi_i32(cpu_pc, ctx.pc);
1978         gen_helper_debug();
1979     } else {
1980         switch (ctx.bstate) {
1981         case BS_STOP:
1982             /* gen_op_interrupt_restart(); */
1983             /* fall through */
1984         case BS_NONE:
1985             if (ctx.flags) {
1986                 gen_store_flags(ctx.flags | DELAY_SLOT_CLEARME);
1987             }
1988             gen_goto_tb(&ctx, 0, ctx.pc);
1989             break;
1990         case BS_EXCP:
1991             /* gen_op_interrupt_restart(); */
1992             tcg_gen_exit_tb(0);
1993             break;
1994         case BS_BRANCH:
1995         default:
1996             break;
1997         }
1998     }
1999
2000     gen_icount_end(tb, num_insns);
2001     *gen_opc_ptr = INDEX_op_end;
2002     if (search_pc) {
2003         i = gen_opc_ptr - gen_opc_buf;
2004         ii++;
2005         while (ii <= i)
2006             gen_opc_instr_start[ii++] = 0;
2007     } else {
2008         tb->size = ctx.pc - pc_start;
2009         tb->icount = num_insns;
2010     }
2011
2012 #ifdef DEBUG_DISAS
2013 #ifdef SH4_DEBUG_DISAS
2014     qemu_log_mask(CPU_LOG_TB_IN_ASM, "\n");
2015 #endif
2016     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2017         qemu_log("IN:\n");      /* , lookup_symbol(pc_start)); */
2018         log_target_disas(pc_start, ctx.pc - pc_start, 0);
2019         qemu_log("\n");
2020     }
2021 #endif
2022 }
2023
2024 void gen_intermediate_code(CPUSH4State * env, struct TranslationBlock *tb)
2025 {
2026     gen_intermediate_code_internal(env, tb, 0);
2027 }
2028
2029 void gen_intermediate_code_pc(CPUSH4State * env, struct TranslationBlock *tb)
2030 {
2031     gen_intermediate_code_internal(env, tb, 1);
2032 }
2033
2034 void restore_state_to_opc(CPUSH4State *env, TranslationBlock *tb, int pc_pos)
2035 {
2036     env->pc = gen_opc_pc[pc_pos];
2037     env->flags = gen_opc_hflags[pc_pos];
2038 }