sensor: add pedometer sensor device
[sdk/emulator/qemu.git] / tci.c
1 /*
2  * Tiny Code Interpreter for QEMU
3  *
4  * Copyright (c) 2009, 2011 Stefan Weil
5  *
6  * This program is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation, either version 2 of the License, or
9  * (at your option) any later version.
10  *
11  * This program is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14  * GNU General Public License for more details.
15  *
16  * You should have received a copy of the GNU General Public License
17  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
18  */
19
20 #include "config.h"
21
22 /* Defining NDEBUG disables assertions (which makes the code faster). */
23 #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
24 # define NDEBUG
25 #endif
26
27 #include "qemu-common.h"
28 #include "exec/exec-all.h"           /* MAX_OPC_PARAM_IARGS */
29 #include "exec/cpu_ldst.h"
30 #include "tcg-op.h"
31
32 /* Marker for missing code. */
33 #define TODO() \
34     do { \
35         fprintf(stderr, "TODO %s:%u: %s()\n", \
36                 __FILE__, __LINE__, __func__); \
37         tcg_abort(); \
38     } while (0)
39
40 #if MAX_OPC_PARAM_IARGS != 5
41 # error Fix needed, number of supported input arguments changed!
42 #endif
43 #if TCG_TARGET_REG_BITS == 32
44 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
45                                     tcg_target_ulong, tcg_target_ulong,
46                                     tcg_target_ulong, tcg_target_ulong,
47                                     tcg_target_ulong, tcg_target_ulong,
48                                     tcg_target_ulong, tcg_target_ulong);
49 #else
50 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
51                                     tcg_target_ulong, tcg_target_ulong,
52                                     tcg_target_ulong);
53 #endif
54
55 static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
56
57 static tcg_target_ulong tci_read_reg(TCGReg index)
58 {
59     assert(index < ARRAY_SIZE(tci_reg));
60     return tci_reg[index];
61 }
62
63 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
64 static int8_t tci_read_reg8s(TCGReg index)
65 {
66     return (int8_t)tci_read_reg(index);
67 }
68 #endif
69
70 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
71 static int16_t tci_read_reg16s(TCGReg index)
72 {
73     return (int16_t)tci_read_reg(index);
74 }
75 #endif
76
77 #if TCG_TARGET_REG_BITS == 64
78 static int32_t tci_read_reg32s(TCGReg index)
79 {
80     return (int32_t)tci_read_reg(index);
81 }
82 #endif
83
84 static uint8_t tci_read_reg8(TCGReg index)
85 {
86     return (uint8_t)tci_read_reg(index);
87 }
88
89 static uint16_t tci_read_reg16(TCGReg index)
90 {
91     return (uint16_t)tci_read_reg(index);
92 }
93
94 static uint32_t tci_read_reg32(TCGReg index)
95 {
96     return (uint32_t)tci_read_reg(index);
97 }
98
99 #if TCG_TARGET_REG_BITS == 64
100 static uint64_t tci_read_reg64(TCGReg index)
101 {
102     return tci_read_reg(index);
103 }
104 #endif
105
106 static void tci_write_reg(TCGReg index, tcg_target_ulong value)
107 {
108     assert(index < ARRAY_SIZE(tci_reg));
109     assert(index != TCG_AREG0);
110     assert(index != TCG_REG_CALL_STACK);
111     tci_reg[index] = value;
112 }
113
114 #if TCG_TARGET_REG_BITS == 64
115 static void tci_write_reg32s(TCGReg index, int32_t value)
116 {
117     tci_write_reg(index, value);
118 }
119 #endif
120
121 static void tci_write_reg8(TCGReg index, uint8_t value)
122 {
123     tci_write_reg(index, value);
124 }
125
126 static void tci_write_reg32(TCGReg index, uint32_t value)
127 {
128     tci_write_reg(index, value);
129 }
130
131 #if TCG_TARGET_REG_BITS == 32
132 static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
133                             uint64_t value)
134 {
135     tci_write_reg(low_index, value);
136     tci_write_reg(high_index, value >> 32);
137 }
138 #elif TCG_TARGET_REG_BITS == 64
139 static void tci_write_reg64(TCGReg index, uint64_t value)
140 {
141     tci_write_reg(index, value);
142 }
143 #endif
144
145 #if TCG_TARGET_REG_BITS == 32
146 /* Create a 64 bit value from two 32 bit values. */
147 static uint64_t tci_uint64(uint32_t high, uint32_t low)
148 {
149     return ((uint64_t)high << 32) + low;
150 }
151 #endif
152
153 /* Read constant (native size) from bytecode. */
154 static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
155 {
156     tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
157     *tb_ptr += sizeof(value);
158     return value;
159 }
160
161 /* Read unsigned constant (32 bit) from bytecode. */
162 static uint32_t tci_read_i32(uint8_t **tb_ptr)
163 {
164     uint32_t value = *(uint32_t *)(*tb_ptr);
165     *tb_ptr += sizeof(value);
166     return value;
167 }
168
169 /* Read signed constant (32 bit) from bytecode. */
170 static int32_t tci_read_s32(uint8_t **tb_ptr)
171 {
172     int32_t value = *(int32_t *)(*tb_ptr);
173     *tb_ptr += sizeof(value);
174     return value;
175 }
176
177 #if TCG_TARGET_REG_BITS == 64
178 /* Read constant (64 bit) from bytecode. */
179 static uint64_t tci_read_i64(uint8_t **tb_ptr)
180 {
181     uint64_t value = *(uint64_t *)(*tb_ptr);
182     *tb_ptr += sizeof(value);
183     return value;
184 }
185 #endif
186
187 /* Read indexed register (native size) from bytecode. */
188 static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
189 {
190     tcg_target_ulong value = tci_read_reg(**tb_ptr);
191     *tb_ptr += 1;
192     return value;
193 }
194
195 /* Read indexed register (8 bit) from bytecode. */
196 static uint8_t tci_read_r8(uint8_t **tb_ptr)
197 {
198     uint8_t value = tci_read_reg8(**tb_ptr);
199     *tb_ptr += 1;
200     return value;
201 }
202
203 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
204 /* Read indexed register (8 bit signed) from bytecode. */
205 static int8_t tci_read_r8s(uint8_t **tb_ptr)
206 {
207     int8_t value = tci_read_reg8s(**tb_ptr);
208     *tb_ptr += 1;
209     return value;
210 }
211 #endif
212
213 /* Read indexed register (16 bit) from bytecode. */
214 static uint16_t tci_read_r16(uint8_t **tb_ptr)
215 {
216     uint16_t value = tci_read_reg16(**tb_ptr);
217     *tb_ptr += 1;
218     return value;
219 }
220
221 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
222 /* Read indexed register (16 bit signed) from bytecode. */
223 static int16_t tci_read_r16s(uint8_t **tb_ptr)
224 {
225     int16_t value = tci_read_reg16s(**tb_ptr);
226     *tb_ptr += 1;
227     return value;
228 }
229 #endif
230
231 /* Read indexed register (32 bit) from bytecode. */
232 static uint32_t tci_read_r32(uint8_t **tb_ptr)
233 {
234     uint32_t value = tci_read_reg32(**tb_ptr);
235     *tb_ptr += 1;
236     return value;
237 }
238
239 #if TCG_TARGET_REG_BITS == 32
240 /* Read two indexed registers (2 * 32 bit) from bytecode. */
241 static uint64_t tci_read_r64(uint8_t **tb_ptr)
242 {
243     uint32_t low = tci_read_r32(tb_ptr);
244     return tci_uint64(tci_read_r32(tb_ptr), low);
245 }
246 #elif TCG_TARGET_REG_BITS == 64
247 /* Read indexed register (32 bit signed) from bytecode. */
248 static int32_t tci_read_r32s(uint8_t **tb_ptr)
249 {
250     int32_t value = tci_read_reg32s(**tb_ptr);
251     *tb_ptr += 1;
252     return value;
253 }
254
255 /* Read indexed register (64 bit) from bytecode. */
256 static uint64_t tci_read_r64(uint8_t **tb_ptr)
257 {
258     uint64_t value = tci_read_reg64(**tb_ptr);
259     *tb_ptr += 1;
260     return value;
261 }
262 #endif
263
264 /* Read indexed register(s) with target address from bytecode. */
265 static target_ulong tci_read_ulong(uint8_t **tb_ptr)
266 {
267     target_ulong taddr = tci_read_r(tb_ptr);
268 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
269     taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
270 #endif
271     return taddr;
272 }
273
274 /* Read indexed register or constant (native size) from bytecode. */
275 static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
276 {
277     tcg_target_ulong value;
278     TCGReg r = **tb_ptr;
279     *tb_ptr += 1;
280     if (r == TCG_CONST) {
281         value = tci_read_i(tb_ptr);
282     } else {
283         value = tci_read_reg(r);
284     }
285     return value;
286 }
287
288 /* Read indexed register or constant (32 bit) from bytecode. */
289 static uint32_t tci_read_ri32(uint8_t **tb_ptr)
290 {
291     uint32_t value;
292     TCGReg r = **tb_ptr;
293     *tb_ptr += 1;
294     if (r == TCG_CONST) {
295         value = tci_read_i32(tb_ptr);
296     } else {
297         value = tci_read_reg32(r);
298     }
299     return value;
300 }
301
302 #if TCG_TARGET_REG_BITS == 32
303 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
304 static uint64_t tci_read_ri64(uint8_t **tb_ptr)
305 {
306     uint32_t low = tci_read_ri32(tb_ptr);
307     return tci_uint64(tci_read_ri32(tb_ptr), low);
308 }
309 #elif TCG_TARGET_REG_BITS == 64
310 /* Read indexed register or constant (64 bit) from bytecode. */
311 static uint64_t tci_read_ri64(uint8_t **tb_ptr)
312 {
313     uint64_t value;
314     TCGReg r = **tb_ptr;
315     *tb_ptr += 1;
316     if (r == TCG_CONST) {
317         value = tci_read_i64(tb_ptr);
318     } else {
319         value = tci_read_reg64(r);
320     }
321     return value;
322 }
323 #endif
324
325 static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
326 {
327     tcg_target_ulong label = tci_read_i(tb_ptr);
328     assert(label != 0);
329     return label;
330 }
331
332 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
333 {
334     bool result = false;
335     int32_t i0 = u0;
336     int32_t i1 = u1;
337     switch (condition) {
338     case TCG_COND_EQ:
339         result = (u0 == u1);
340         break;
341     case TCG_COND_NE:
342         result = (u0 != u1);
343         break;
344     case TCG_COND_LT:
345         result = (i0 < i1);
346         break;
347     case TCG_COND_GE:
348         result = (i0 >= i1);
349         break;
350     case TCG_COND_LE:
351         result = (i0 <= i1);
352         break;
353     case TCG_COND_GT:
354         result = (i0 > i1);
355         break;
356     case TCG_COND_LTU:
357         result = (u0 < u1);
358         break;
359     case TCG_COND_GEU:
360         result = (u0 >= u1);
361         break;
362     case TCG_COND_LEU:
363         result = (u0 <= u1);
364         break;
365     case TCG_COND_GTU:
366         result = (u0 > u1);
367         break;
368     default:
369         TODO();
370     }
371     return result;
372 }
373
374 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
375 {
376     bool result = false;
377     int64_t i0 = u0;
378     int64_t i1 = u1;
379     switch (condition) {
380     case TCG_COND_EQ:
381         result = (u0 == u1);
382         break;
383     case TCG_COND_NE:
384         result = (u0 != u1);
385         break;
386     case TCG_COND_LT:
387         result = (i0 < i1);
388         break;
389     case TCG_COND_GE:
390         result = (i0 >= i1);
391         break;
392     case TCG_COND_LE:
393         result = (i0 <= i1);
394         break;
395     case TCG_COND_GT:
396         result = (i0 > i1);
397         break;
398     case TCG_COND_LTU:
399         result = (u0 < u1);
400         break;
401     case TCG_COND_GEU:
402         result = (u0 >= u1);
403         break;
404     case TCG_COND_LEU:
405         result = (u0 <= u1);
406         break;
407     case TCG_COND_GTU:
408         result = (u0 > u1);
409         break;
410     default:
411         TODO();
412     }
413     return result;
414 }
415
416 #ifdef CONFIG_SOFTMMU
417 # define qemu_ld_ub \
418     helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
419 # define qemu_ld_leuw \
420     helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
421 # define qemu_ld_leul \
422     helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
423 # define qemu_ld_leq \
424     helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
425 # define qemu_ld_beuw \
426     helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
427 # define qemu_ld_beul \
428     helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
429 # define qemu_ld_beq \
430     helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
431 # define qemu_st_b(X) \
432     helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
433 # define qemu_st_lew(X) \
434     helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
435 # define qemu_st_lel(X) \
436     helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
437 # define qemu_st_leq(X) \
438     helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
439 # define qemu_st_bew(X) \
440     helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
441 # define qemu_st_bel(X) \
442     helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
443 # define qemu_st_beq(X) \
444     helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
445 #else
446 # define qemu_ld_ub      ldub_p(g2h(taddr))
447 # define qemu_ld_leuw    lduw_le_p(g2h(taddr))
448 # define qemu_ld_leul    (uint32_t)ldl_le_p(g2h(taddr))
449 # define qemu_ld_leq     ldq_le_p(g2h(taddr))
450 # define qemu_ld_beuw    lduw_be_p(g2h(taddr))
451 # define qemu_ld_beul    (uint32_t)ldl_be_p(g2h(taddr))
452 # define qemu_ld_beq     ldq_be_p(g2h(taddr))
453 # define qemu_st_b(X)    stb_p(g2h(taddr), X)
454 # define qemu_st_lew(X)  stw_le_p(g2h(taddr), X)
455 # define qemu_st_lel(X)  stl_le_p(g2h(taddr), X)
456 # define qemu_st_leq(X)  stq_le_p(g2h(taddr), X)
457 # define qemu_st_bew(X)  stw_be_p(g2h(taddr), X)
458 # define qemu_st_bel(X)  stl_be_p(g2h(taddr), X)
459 # define qemu_st_beq(X)  stq_be_p(g2h(taddr), X)
460 #endif
461
462 /* Interpret pseudo code in tb. */
463 uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
464 {
465     long tcg_temps[CPU_TEMP_BUF_NLONGS];
466     uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
467     uintptr_t next_tb = 0;
468
469     tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
470     tci_reg[TCG_REG_CALL_STACK] = sp_value;
471     assert(tb_ptr);
472
473     for (;;) {
474         TCGOpcode opc = tb_ptr[0];
475 #if !defined(NDEBUG)
476         uint8_t op_size = tb_ptr[1];
477         uint8_t *old_code_ptr = tb_ptr;
478 #endif
479         tcg_target_ulong t0;
480         tcg_target_ulong t1;
481         tcg_target_ulong t2;
482         tcg_target_ulong label;
483         TCGCond condition;
484         target_ulong taddr;
485         uint8_t tmp8;
486         uint16_t tmp16;
487         uint32_t tmp32;
488         uint64_t tmp64;
489 #if TCG_TARGET_REG_BITS == 32
490         uint64_t v64;
491 #endif
492         TCGMemOpIdx oi;
493
494 #if defined(GETPC)
495         tci_tb_ptr = (uintptr_t)tb_ptr;
496 #endif
497
498         /* Skip opcode and size entry. */
499         tb_ptr += 2;
500
501         switch (opc) {
502         case INDEX_op_call:
503             t0 = tci_read_ri(&tb_ptr);
504 #if TCG_TARGET_REG_BITS == 32
505             tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
506                                           tci_read_reg(TCG_REG_R1),
507                                           tci_read_reg(TCG_REG_R2),
508                                           tci_read_reg(TCG_REG_R3),
509                                           tci_read_reg(TCG_REG_R5),
510                                           tci_read_reg(TCG_REG_R6),
511                                           tci_read_reg(TCG_REG_R7),
512                                           tci_read_reg(TCG_REG_R8),
513                                           tci_read_reg(TCG_REG_R9),
514                                           tci_read_reg(TCG_REG_R10));
515             tci_write_reg(TCG_REG_R0, tmp64);
516             tci_write_reg(TCG_REG_R1, tmp64 >> 32);
517 #else
518             tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
519                                           tci_read_reg(TCG_REG_R1),
520                                           tci_read_reg(TCG_REG_R2),
521                                           tci_read_reg(TCG_REG_R3),
522                                           tci_read_reg(TCG_REG_R5));
523             tci_write_reg(TCG_REG_R0, tmp64);
524 #endif
525             break;
526         case INDEX_op_br:
527             label = tci_read_label(&tb_ptr);
528             assert(tb_ptr == old_code_ptr + op_size);
529             tb_ptr = (uint8_t *)label;
530             continue;
531         case INDEX_op_setcond_i32:
532             t0 = *tb_ptr++;
533             t1 = tci_read_r32(&tb_ptr);
534             t2 = tci_read_ri32(&tb_ptr);
535             condition = *tb_ptr++;
536             tci_write_reg32(t0, tci_compare32(t1, t2, condition));
537             break;
538 #if TCG_TARGET_REG_BITS == 32
539         case INDEX_op_setcond2_i32:
540             t0 = *tb_ptr++;
541             tmp64 = tci_read_r64(&tb_ptr);
542             v64 = tci_read_ri64(&tb_ptr);
543             condition = *tb_ptr++;
544             tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
545             break;
546 #elif TCG_TARGET_REG_BITS == 64
547         case INDEX_op_setcond_i64:
548             t0 = *tb_ptr++;
549             t1 = tci_read_r64(&tb_ptr);
550             t2 = tci_read_ri64(&tb_ptr);
551             condition = *tb_ptr++;
552             tci_write_reg64(t0, tci_compare64(t1, t2, condition));
553             break;
554 #endif
555         case INDEX_op_mov_i32:
556             t0 = *tb_ptr++;
557             t1 = tci_read_r32(&tb_ptr);
558             tci_write_reg32(t0, t1);
559             break;
560         case INDEX_op_movi_i32:
561             t0 = *tb_ptr++;
562             t1 = tci_read_i32(&tb_ptr);
563             tci_write_reg32(t0, t1);
564             break;
565
566             /* Load/store operations (32 bit). */
567
568         case INDEX_op_ld8u_i32:
569             t0 = *tb_ptr++;
570             t1 = tci_read_r(&tb_ptr);
571             t2 = tci_read_s32(&tb_ptr);
572             tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
573             break;
574         case INDEX_op_ld8s_i32:
575         case INDEX_op_ld16u_i32:
576             TODO();
577             break;
578         case INDEX_op_ld16s_i32:
579             TODO();
580             break;
581         case INDEX_op_ld_i32:
582             t0 = *tb_ptr++;
583             t1 = tci_read_r(&tb_ptr);
584             t2 = tci_read_s32(&tb_ptr);
585             tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
586             break;
587         case INDEX_op_st8_i32:
588             t0 = tci_read_r8(&tb_ptr);
589             t1 = tci_read_r(&tb_ptr);
590             t2 = tci_read_s32(&tb_ptr);
591             *(uint8_t *)(t1 + t2) = t0;
592             break;
593         case INDEX_op_st16_i32:
594             t0 = tci_read_r16(&tb_ptr);
595             t1 = tci_read_r(&tb_ptr);
596             t2 = tci_read_s32(&tb_ptr);
597             *(uint16_t *)(t1 + t2) = t0;
598             break;
599         case INDEX_op_st_i32:
600             t0 = tci_read_r32(&tb_ptr);
601             t1 = tci_read_r(&tb_ptr);
602             t2 = tci_read_s32(&tb_ptr);
603             assert(t1 != sp_value || (int32_t)t2 < 0);
604             *(uint32_t *)(t1 + t2) = t0;
605             break;
606
607             /* Arithmetic operations (32 bit). */
608
609         case INDEX_op_add_i32:
610             t0 = *tb_ptr++;
611             t1 = tci_read_ri32(&tb_ptr);
612             t2 = tci_read_ri32(&tb_ptr);
613             tci_write_reg32(t0, t1 + t2);
614             break;
615         case INDEX_op_sub_i32:
616             t0 = *tb_ptr++;
617             t1 = tci_read_ri32(&tb_ptr);
618             t2 = tci_read_ri32(&tb_ptr);
619             tci_write_reg32(t0, t1 - t2);
620             break;
621         case INDEX_op_mul_i32:
622             t0 = *tb_ptr++;
623             t1 = tci_read_ri32(&tb_ptr);
624             t2 = tci_read_ri32(&tb_ptr);
625             tci_write_reg32(t0, t1 * t2);
626             break;
627 #if TCG_TARGET_HAS_div_i32
628         case INDEX_op_div_i32:
629             t0 = *tb_ptr++;
630             t1 = tci_read_ri32(&tb_ptr);
631             t2 = tci_read_ri32(&tb_ptr);
632             tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
633             break;
634         case INDEX_op_divu_i32:
635             t0 = *tb_ptr++;
636             t1 = tci_read_ri32(&tb_ptr);
637             t2 = tci_read_ri32(&tb_ptr);
638             tci_write_reg32(t0, t1 / t2);
639             break;
640         case INDEX_op_rem_i32:
641             t0 = *tb_ptr++;
642             t1 = tci_read_ri32(&tb_ptr);
643             t2 = tci_read_ri32(&tb_ptr);
644             tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
645             break;
646         case INDEX_op_remu_i32:
647             t0 = *tb_ptr++;
648             t1 = tci_read_ri32(&tb_ptr);
649             t2 = tci_read_ri32(&tb_ptr);
650             tci_write_reg32(t0, t1 % t2);
651             break;
652 #elif TCG_TARGET_HAS_div2_i32
653         case INDEX_op_div2_i32:
654         case INDEX_op_divu2_i32:
655             TODO();
656             break;
657 #endif
658         case INDEX_op_and_i32:
659             t0 = *tb_ptr++;
660             t1 = tci_read_ri32(&tb_ptr);
661             t2 = tci_read_ri32(&tb_ptr);
662             tci_write_reg32(t0, t1 & t2);
663             break;
664         case INDEX_op_or_i32:
665             t0 = *tb_ptr++;
666             t1 = tci_read_ri32(&tb_ptr);
667             t2 = tci_read_ri32(&tb_ptr);
668             tci_write_reg32(t0, t1 | t2);
669             break;
670         case INDEX_op_xor_i32:
671             t0 = *tb_ptr++;
672             t1 = tci_read_ri32(&tb_ptr);
673             t2 = tci_read_ri32(&tb_ptr);
674             tci_write_reg32(t0, t1 ^ t2);
675             break;
676
677             /* Shift/rotate operations (32 bit). */
678
679         case INDEX_op_shl_i32:
680             t0 = *tb_ptr++;
681             t1 = tci_read_ri32(&tb_ptr);
682             t2 = tci_read_ri32(&tb_ptr);
683             tci_write_reg32(t0, t1 << (t2 & 31));
684             break;
685         case INDEX_op_shr_i32:
686             t0 = *tb_ptr++;
687             t1 = tci_read_ri32(&tb_ptr);
688             t2 = tci_read_ri32(&tb_ptr);
689             tci_write_reg32(t0, t1 >> (t2 & 31));
690             break;
691         case INDEX_op_sar_i32:
692             t0 = *tb_ptr++;
693             t1 = tci_read_ri32(&tb_ptr);
694             t2 = tci_read_ri32(&tb_ptr);
695             tci_write_reg32(t0, ((int32_t)t1 >> (t2 & 31)));
696             break;
697 #if TCG_TARGET_HAS_rot_i32
698         case INDEX_op_rotl_i32:
699             t0 = *tb_ptr++;
700             t1 = tci_read_ri32(&tb_ptr);
701             t2 = tci_read_ri32(&tb_ptr);
702             tci_write_reg32(t0, rol32(t1, t2 & 31));
703             break;
704         case INDEX_op_rotr_i32:
705             t0 = *tb_ptr++;
706             t1 = tci_read_ri32(&tb_ptr);
707             t2 = tci_read_ri32(&tb_ptr);
708             tci_write_reg32(t0, ror32(t1, t2 & 31));
709             break;
710 #endif
711 #if TCG_TARGET_HAS_deposit_i32
712         case INDEX_op_deposit_i32:
713             t0 = *tb_ptr++;
714             t1 = tci_read_r32(&tb_ptr);
715             t2 = tci_read_r32(&tb_ptr);
716             tmp16 = *tb_ptr++;
717             tmp8 = *tb_ptr++;
718             tmp32 = (((1 << tmp8) - 1) << tmp16);
719             tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
720             break;
721 #endif
722         case INDEX_op_brcond_i32:
723             t0 = tci_read_r32(&tb_ptr);
724             t1 = tci_read_ri32(&tb_ptr);
725             condition = *tb_ptr++;
726             label = tci_read_label(&tb_ptr);
727             if (tci_compare32(t0, t1, condition)) {
728                 assert(tb_ptr == old_code_ptr + op_size);
729                 tb_ptr = (uint8_t *)label;
730                 continue;
731             }
732             break;
733 #if TCG_TARGET_REG_BITS == 32
734         case INDEX_op_add2_i32:
735             t0 = *tb_ptr++;
736             t1 = *tb_ptr++;
737             tmp64 = tci_read_r64(&tb_ptr);
738             tmp64 += tci_read_r64(&tb_ptr);
739             tci_write_reg64(t1, t0, tmp64);
740             break;
741         case INDEX_op_sub2_i32:
742             t0 = *tb_ptr++;
743             t1 = *tb_ptr++;
744             tmp64 = tci_read_r64(&tb_ptr);
745             tmp64 -= tci_read_r64(&tb_ptr);
746             tci_write_reg64(t1, t0, tmp64);
747             break;
748         case INDEX_op_brcond2_i32:
749             tmp64 = tci_read_r64(&tb_ptr);
750             v64 = tci_read_ri64(&tb_ptr);
751             condition = *tb_ptr++;
752             label = tci_read_label(&tb_ptr);
753             if (tci_compare64(tmp64, v64, condition)) {
754                 assert(tb_ptr == old_code_ptr + op_size);
755                 tb_ptr = (uint8_t *)label;
756                 continue;
757             }
758             break;
759         case INDEX_op_mulu2_i32:
760             t0 = *tb_ptr++;
761             t1 = *tb_ptr++;
762             t2 = tci_read_r32(&tb_ptr);
763             tmp64 = tci_read_r32(&tb_ptr);
764             tci_write_reg64(t1, t0, t2 * tmp64);
765             break;
766 #endif /* TCG_TARGET_REG_BITS == 32 */
767 #if TCG_TARGET_HAS_ext8s_i32
768         case INDEX_op_ext8s_i32:
769             t0 = *tb_ptr++;
770             t1 = tci_read_r8s(&tb_ptr);
771             tci_write_reg32(t0, t1);
772             break;
773 #endif
774 #if TCG_TARGET_HAS_ext16s_i32
775         case INDEX_op_ext16s_i32:
776             t0 = *tb_ptr++;
777             t1 = tci_read_r16s(&tb_ptr);
778             tci_write_reg32(t0, t1);
779             break;
780 #endif
781 #if TCG_TARGET_HAS_ext8u_i32
782         case INDEX_op_ext8u_i32:
783             t0 = *tb_ptr++;
784             t1 = tci_read_r8(&tb_ptr);
785             tci_write_reg32(t0, t1);
786             break;
787 #endif
788 #if TCG_TARGET_HAS_ext16u_i32
789         case INDEX_op_ext16u_i32:
790             t0 = *tb_ptr++;
791             t1 = tci_read_r16(&tb_ptr);
792             tci_write_reg32(t0, t1);
793             break;
794 #endif
795 #if TCG_TARGET_HAS_bswap16_i32
796         case INDEX_op_bswap16_i32:
797             t0 = *tb_ptr++;
798             t1 = tci_read_r16(&tb_ptr);
799             tci_write_reg32(t0, bswap16(t1));
800             break;
801 #endif
802 #if TCG_TARGET_HAS_bswap32_i32
803         case INDEX_op_bswap32_i32:
804             t0 = *tb_ptr++;
805             t1 = tci_read_r32(&tb_ptr);
806             tci_write_reg32(t0, bswap32(t1));
807             break;
808 #endif
809 #if TCG_TARGET_HAS_not_i32
810         case INDEX_op_not_i32:
811             t0 = *tb_ptr++;
812             t1 = tci_read_r32(&tb_ptr);
813             tci_write_reg32(t0, ~t1);
814             break;
815 #endif
816 #if TCG_TARGET_HAS_neg_i32
817         case INDEX_op_neg_i32:
818             t0 = *tb_ptr++;
819             t1 = tci_read_r32(&tb_ptr);
820             tci_write_reg32(t0, -t1);
821             break;
822 #endif
823 #if TCG_TARGET_REG_BITS == 64
824         case INDEX_op_mov_i64:
825             t0 = *tb_ptr++;
826             t1 = tci_read_r64(&tb_ptr);
827             tci_write_reg64(t0, t1);
828             break;
829         case INDEX_op_movi_i64:
830             t0 = *tb_ptr++;
831             t1 = tci_read_i64(&tb_ptr);
832             tci_write_reg64(t0, t1);
833             break;
834
835             /* Load/store operations (64 bit). */
836
837         case INDEX_op_ld8u_i64:
838             t0 = *tb_ptr++;
839             t1 = tci_read_r(&tb_ptr);
840             t2 = tci_read_s32(&tb_ptr);
841             tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
842             break;
843         case INDEX_op_ld8s_i64:
844         case INDEX_op_ld16u_i64:
845         case INDEX_op_ld16s_i64:
846             TODO();
847             break;
848         case INDEX_op_ld32u_i64:
849             t0 = *tb_ptr++;
850             t1 = tci_read_r(&tb_ptr);
851             t2 = tci_read_s32(&tb_ptr);
852             tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
853             break;
854         case INDEX_op_ld32s_i64:
855             t0 = *tb_ptr++;
856             t1 = tci_read_r(&tb_ptr);
857             t2 = tci_read_s32(&tb_ptr);
858             tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
859             break;
860         case INDEX_op_ld_i64:
861             t0 = *tb_ptr++;
862             t1 = tci_read_r(&tb_ptr);
863             t2 = tci_read_s32(&tb_ptr);
864             tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
865             break;
866         case INDEX_op_st8_i64:
867             t0 = tci_read_r8(&tb_ptr);
868             t1 = tci_read_r(&tb_ptr);
869             t2 = tci_read_s32(&tb_ptr);
870             *(uint8_t *)(t1 + t2) = t0;
871             break;
872         case INDEX_op_st16_i64:
873             t0 = tci_read_r16(&tb_ptr);
874             t1 = tci_read_r(&tb_ptr);
875             t2 = tci_read_s32(&tb_ptr);
876             *(uint16_t *)(t1 + t2) = t0;
877             break;
878         case INDEX_op_st32_i64:
879             t0 = tci_read_r32(&tb_ptr);
880             t1 = tci_read_r(&tb_ptr);
881             t2 = tci_read_s32(&tb_ptr);
882             *(uint32_t *)(t1 + t2) = t0;
883             break;
884         case INDEX_op_st_i64:
885             t0 = tci_read_r64(&tb_ptr);
886             t1 = tci_read_r(&tb_ptr);
887             t2 = tci_read_s32(&tb_ptr);
888             assert(t1 != sp_value || (int32_t)t2 < 0);
889             *(uint64_t *)(t1 + t2) = t0;
890             break;
891
892             /* Arithmetic operations (64 bit). */
893
894         case INDEX_op_add_i64:
895             t0 = *tb_ptr++;
896             t1 = tci_read_ri64(&tb_ptr);
897             t2 = tci_read_ri64(&tb_ptr);
898             tci_write_reg64(t0, t1 + t2);
899             break;
900         case INDEX_op_sub_i64:
901             t0 = *tb_ptr++;
902             t1 = tci_read_ri64(&tb_ptr);
903             t2 = tci_read_ri64(&tb_ptr);
904             tci_write_reg64(t0, t1 - t2);
905             break;
906         case INDEX_op_mul_i64:
907             t0 = *tb_ptr++;
908             t1 = tci_read_ri64(&tb_ptr);
909             t2 = tci_read_ri64(&tb_ptr);
910             tci_write_reg64(t0, t1 * t2);
911             break;
912 #if TCG_TARGET_HAS_div_i64
913         case INDEX_op_div_i64:
914         case INDEX_op_divu_i64:
915         case INDEX_op_rem_i64:
916         case INDEX_op_remu_i64:
917             TODO();
918             break;
919 #elif TCG_TARGET_HAS_div2_i64
920         case INDEX_op_div2_i64:
921         case INDEX_op_divu2_i64:
922             TODO();
923             break;
924 #endif
925         case INDEX_op_and_i64:
926             t0 = *tb_ptr++;
927             t1 = tci_read_ri64(&tb_ptr);
928             t2 = tci_read_ri64(&tb_ptr);
929             tci_write_reg64(t0, t1 & t2);
930             break;
931         case INDEX_op_or_i64:
932             t0 = *tb_ptr++;
933             t1 = tci_read_ri64(&tb_ptr);
934             t2 = tci_read_ri64(&tb_ptr);
935             tci_write_reg64(t0, t1 | t2);
936             break;
937         case INDEX_op_xor_i64:
938             t0 = *tb_ptr++;
939             t1 = tci_read_ri64(&tb_ptr);
940             t2 = tci_read_ri64(&tb_ptr);
941             tci_write_reg64(t0, t1 ^ t2);
942             break;
943
944             /* Shift/rotate operations (64 bit). */
945
946         case INDEX_op_shl_i64:
947             t0 = *tb_ptr++;
948             t1 = tci_read_ri64(&tb_ptr);
949             t2 = tci_read_ri64(&tb_ptr);
950             tci_write_reg64(t0, t1 << (t2 & 63));
951             break;
952         case INDEX_op_shr_i64:
953             t0 = *tb_ptr++;
954             t1 = tci_read_ri64(&tb_ptr);
955             t2 = tci_read_ri64(&tb_ptr);
956             tci_write_reg64(t0, t1 >> (t2 & 63));
957             break;
958         case INDEX_op_sar_i64:
959             t0 = *tb_ptr++;
960             t1 = tci_read_ri64(&tb_ptr);
961             t2 = tci_read_ri64(&tb_ptr);
962             tci_write_reg64(t0, ((int64_t)t1 >> (t2 & 63)));
963             break;
964 #if TCG_TARGET_HAS_rot_i64
965         case INDEX_op_rotl_i64:
966             t0 = *tb_ptr++;
967             t1 = tci_read_ri64(&tb_ptr);
968             t2 = tci_read_ri64(&tb_ptr);
969             tci_write_reg64(t0, rol64(t1, t2 & 63));
970             break;
971         case INDEX_op_rotr_i64:
972             t0 = *tb_ptr++;
973             t1 = tci_read_ri64(&tb_ptr);
974             t2 = tci_read_ri64(&tb_ptr);
975             tci_write_reg64(t0, ror64(t1, t2 & 63));
976             break;
977 #endif
978 #if TCG_TARGET_HAS_deposit_i64
979         case INDEX_op_deposit_i64:
980             t0 = *tb_ptr++;
981             t1 = tci_read_r64(&tb_ptr);
982             t2 = tci_read_r64(&tb_ptr);
983             tmp16 = *tb_ptr++;
984             tmp8 = *tb_ptr++;
985             tmp64 = (((1ULL << tmp8) - 1) << tmp16);
986             tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
987             break;
988 #endif
989         case INDEX_op_brcond_i64:
990             t0 = tci_read_r64(&tb_ptr);
991             t1 = tci_read_ri64(&tb_ptr);
992             condition = *tb_ptr++;
993             label = tci_read_label(&tb_ptr);
994             if (tci_compare64(t0, t1, condition)) {
995                 assert(tb_ptr == old_code_ptr + op_size);
996                 tb_ptr = (uint8_t *)label;
997                 continue;
998             }
999             break;
1000 #if TCG_TARGET_HAS_ext8u_i64
1001         case INDEX_op_ext8u_i64:
1002             t0 = *tb_ptr++;
1003             t1 = tci_read_r8(&tb_ptr);
1004             tci_write_reg64(t0, t1);
1005             break;
1006 #endif
1007 #if TCG_TARGET_HAS_ext8s_i64
1008         case INDEX_op_ext8s_i64:
1009             t0 = *tb_ptr++;
1010             t1 = tci_read_r8s(&tb_ptr);
1011             tci_write_reg64(t0, t1);
1012             break;
1013 #endif
1014 #if TCG_TARGET_HAS_ext16s_i64
1015         case INDEX_op_ext16s_i64:
1016             t0 = *tb_ptr++;
1017             t1 = tci_read_r16s(&tb_ptr);
1018             tci_write_reg64(t0, t1);
1019             break;
1020 #endif
1021 #if TCG_TARGET_HAS_ext16u_i64
1022         case INDEX_op_ext16u_i64:
1023             t0 = *tb_ptr++;
1024             t1 = tci_read_r16(&tb_ptr);
1025             tci_write_reg64(t0, t1);
1026             break;
1027 #endif
1028 #if TCG_TARGET_HAS_ext32s_i64
1029         case INDEX_op_ext32s_i64:
1030 #endif
1031         case INDEX_op_ext_i32_i64:
1032             t0 = *tb_ptr++;
1033             t1 = tci_read_r32s(&tb_ptr);
1034             tci_write_reg64(t0, t1);
1035             break;
1036 #if TCG_TARGET_HAS_ext32u_i64
1037         case INDEX_op_ext32u_i64:
1038 #endif
1039         case INDEX_op_extu_i32_i64:
1040             t0 = *tb_ptr++;
1041             t1 = tci_read_r32(&tb_ptr);
1042             tci_write_reg64(t0, t1);
1043             break;
1044 #if TCG_TARGET_HAS_bswap16_i64
1045         case INDEX_op_bswap16_i64:
1046             TODO();
1047             t0 = *tb_ptr++;
1048             t1 = tci_read_r16(&tb_ptr);
1049             tci_write_reg64(t0, bswap16(t1));
1050             break;
1051 #endif
1052 #if TCG_TARGET_HAS_bswap32_i64
1053         case INDEX_op_bswap32_i64:
1054             t0 = *tb_ptr++;
1055             t1 = tci_read_r32(&tb_ptr);
1056             tci_write_reg64(t0, bswap32(t1));
1057             break;
1058 #endif
1059 #if TCG_TARGET_HAS_bswap64_i64
1060         case INDEX_op_bswap64_i64:
1061             t0 = *tb_ptr++;
1062             t1 = tci_read_r64(&tb_ptr);
1063             tci_write_reg64(t0, bswap64(t1));
1064             break;
1065 #endif
1066 #if TCG_TARGET_HAS_not_i64
1067         case INDEX_op_not_i64:
1068             t0 = *tb_ptr++;
1069             t1 = tci_read_r64(&tb_ptr);
1070             tci_write_reg64(t0, ~t1);
1071             break;
1072 #endif
1073 #if TCG_TARGET_HAS_neg_i64
1074         case INDEX_op_neg_i64:
1075             t0 = *tb_ptr++;
1076             t1 = tci_read_r64(&tb_ptr);
1077             tci_write_reg64(t0, -t1);
1078             break;
1079 #endif
1080 #endif /* TCG_TARGET_REG_BITS == 64 */
1081
1082             /* QEMU specific operations. */
1083
1084         case INDEX_op_exit_tb:
1085             next_tb = *(uint64_t *)tb_ptr;
1086             goto exit;
1087             break;
1088         case INDEX_op_goto_tb:
1089             t0 = tci_read_i32(&tb_ptr);
1090             assert(tb_ptr == old_code_ptr + op_size);
1091             tb_ptr += (int32_t)t0;
1092             continue;
1093         case INDEX_op_qemu_ld_i32:
1094             t0 = *tb_ptr++;
1095             taddr = tci_read_ulong(&tb_ptr);
1096             oi = tci_read_i(&tb_ptr);
1097             switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
1098             case MO_UB:
1099                 tmp32 = qemu_ld_ub;
1100                 break;
1101             case MO_SB:
1102                 tmp32 = (int8_t)qemu_ld_ub;
1103                 break;
1104             case MO_LEUW:
1105                 tmp32 = qemu_ld_leuw;
1106                 break;
1107             case MO_LESW:
1108                 tmp32 = (int16_t)qemu_ld_leuw;
1109                 break;
1110             case MO_LEUL:
1111                 tmp32 = qemu_ld_leul;
1112                 break;
1113             case MO_BEUW:
1114                 tmp32 = qemu_ld_beuw;
1115                 break;
1116             case MO_BESW:
1117                 tmp32 = (int16_t)qemu_ld_beuw;
1118                 break;
1119             case MO_BEUL:
1120                 tmp32 = qemu_ld_beul;
1121                 break;
1122             default:
1123                 tcg_abort();
1124             }
1125             tci_write_reg(t0, tmp32);
1126             break;
1127         case INDEX_op_qemu_ld_i64:
1128             t0 = *tb_ptr++;
1129             if (TCG_TARGET_REG_BITS == 32) {
1130                 t1 = *tb_ptr++;
1131             }
1132             taddr = tci_read_ulong(&tb_ptr);
1133             oi = tci_read_i(&tb_ptr);
1134             switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
1135             case MO_UB:
1136                 tmp64 = qemu_ld_ub;
1137                 break;
1138             case MO_SB:
1139                 tmp64 = (int8_t)qemu_ld_ub;
1140                 break;
1141             case MO_LEUW:
1142                 tmp64 = qemu_ld_leuw;
1143                 break;
1144             case MO_LESW:
1145                 tmp64 = (int16_t)qemu_ld_leuw;
1146                 break;
1147             case MO_LEUL:
1148                 tmp64 = qemu_ld_leul;
1149                 break;
1150             case MO_LESL:
1151                 tmp64 = (int32_t)qemu_ld_leul;
1152                 break;
1153             case MO_LEQ:
1154                 tmp64 = qemu_ld_leq;
1155                 break;
1156             case MO_BEUW:
1157                 tmp64 = qemu_ld_beuw;
1158                 break;
1159             case MO_BESW:
1160                 tmp64 = (int16_t)qemu_ld_beuw;
1161                 break;
1162             case MO_BEUL:
1163                 tmp64 = qemu_ld_beul;
1164                 break;
1165             case MO_BESL:
1166                 tmp64 = (int32_t)qemu_ld_beul;
1167                 break;
1168             case MO_BEQ:
1169                 tmp64 = qemu_ld_beq;
1170                 break;
1171             default:
1172                 tcg_abort();
1173             }
1174             tci_write_reg(t0, tmp64);
1175             if (TCG_TARGET_REG_BITS == 32) {
1176                 tci_write_reg(t1, tmp64 >> 32);
1177             }
1178             break;
1179         case INDEX_op_qemu_st_i32:
1180             t0 = tci_read_r(&tb_ptr);
1181             taddr = tci_read_ulong(&tb_ptr);
1182             oi = tci_read_i(&tb_ptr);
1183             switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1184             case MO_UB:
1185                 qemu_st_b(t0);
1186                 break;
1187             case MO_LEUW:
1188                 qemu_st_lew(t0);
1189                 break;
1190             case MO_LEUL:
1191                 qemu_st_lel(t0);
1192                 break;
1193             case MO_BEUW:
1194                 qemu_st_bew(t0);
1195                 break;
1196             case MO_BEUL:
1197                 qemu_st_bel(t0);
1198                 break;
1199             default:
1200                 tcg_abort();
1201             }
1202             break;
1203         case INDEX_op_qemu_st_i64:
1204             tmp64 = tci_read_r64(&tb_ptr);
1205             taddr = tci_read_ulong(&tb_ptr);
1206             oi = tci_read_i(&tb_ptr);
1207             switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1208             case MO_UB:
1209                 qemu_st_b(tmp64);
1210                 break;
1211             case MO_LEUW:
1212                 qemu_st_lew(tmp64);
1213                 break;
1214             case MO_LEUL:
1215                 qemu_st_lel(tmp64);
1216                 break;
1217             case MO_LEQ:
1218                 qemu_st_leq(tmp64);
1219                 break;
1220             case MO_BEUW:
1221                 qemu_st_bew(tmp64);
1222                 break;
1223             case MO_BEUL:
1224                 qemu_st_bel(tmp64);
1225                 break;
1226             case MO_BEQ:
1227                 qemu_st_beq(tmp64);
1228                 break;
1229             default:
1230                 tcg_abort();
1231             }
1232             break;
1233         default:
1234             TODO();
1235             break;
1236         }
1237         assert(tb_ptr == old_code_ptr + op_size);
1238     }
1239 exit:
1240     return next_tb;
1241 }