qemu_ld8s t0, t1, flags
qemu_ld16u t0, t1, flags
qemu_ld16s t0, t1, flags
+qemu_ld32 t0, t1, flags
qemu_ld32u t0, t1, flags
qemu_ld32s t0, t1, flags
qemu_ld64 t0, t1, flags
-Load data at the QEMU CPU address t1 into t0. t1 has the QEMU CPU
-address type. 'flags' contains the QEMU memory index (selects user or
-kernel access) for example.
+Load data at the QEMU CPU address t1 into t0. t1 has the QEMU CPU address
+type. 'flags' contains the QEMU memory index (selects user or kernel access)
+for example.
+
+Note that "qemu_ld32" implies a 32-bit result, while "qemu_ld32u" and
+"qemu_ld32s" imply a 64-bit result appropriately extended from 32 bits.
* qemu_st8 t0, t1, flags
qemu_st16 t0, t1, flags
case INDEX_op_qemu_ld16s:
tcg_out_qemu_ld(s, COND_AL, args, 1 | 4);
break;
- case INDEX_op_qemu_ld32u:
+ case INDEX_op_qemu_ld32:
tcg_out_qemu_ld(s, COND_AL, args, 2);
break;
case INDEX_op_qemu_ld64:
{ INDEX_op_qemu_ld8s, { "r", "x", "X" } },
{ INDEX_op_qemu_ld16u, { "r", "x", "X" } },
{ INDEX_op_qemu_ld16s, { "r", "x", "X" } },
- { INDEX_op_qemu_ld32u, { "r", "x", "X" } },
+ { INDEX_op_qemu_ld32, { "r", "x", "X" } },
{ INDEX_op_qemu_ld64, { "d", "r", "x", "X" } },
{ INDEX_op_qemu_st8, { "x", "x", "X" } },
case INDEX_op_qemu_ld16s:
tcg_out_qemu_ld(s, args, 1 | 4);
break;
- case INDEX_op_qemu_ld32u:
+ case INDEX_op_qemu_ld32:
tcg_out_qemu_ld(s, args, 2);
break;
{ INDEX_op_qemu_ld8s, { "r", "L" } },
{ INDEX_op_qemu_ld16u, { "r", "L" } },
{ INDEX_op_qemu_ld16s, { "r", "L" } },
- { INDEX_op_qemu_ld32u, { "r", "L" } },
+ { INDEX_op_qemu_ld32, { "r", "L" } },
{ INDEX_op_qemu_ld64, { "r", "r", "L" } },
{ INDEX_op_qemu_st8, { "L", "L" } },
{ INDEX_op_qemu_ld8s, { "r", "L", "L" } },
{ INDEX_op_qemu_ld16u, { "r", "L", "L" } },
{ INDEX_op_qemu_ld16s, { "r", "L", "L" } },
- { INDEX_op_qemu_ld32u, { "r", "L", "L" } },
+ { INDEX_op_qemu_ld32, { "r", "L", "L" } },
{ INDEX_op_qemu_ld64, { "r", "r", "L", "L" } },
{ INDEX_op_qemu_st8, { "L", "L", "L" } },
case INDEX_op_qemu_ld16s:
tcg_out_qemu_ld(s, args, 1 | 4);
break;
- case INDEX_op_qemu_ld32u:
+ case INDEX_op_qemu_ld32:
tcg_out_qemu_ld(s, args, 2);
break;
case INDEX_op_qemu_ld64:
{ INDEX_op_qemu_ld8s, { "r", "L" } },
{ INDEX_op_qemu_ld16u, { "r", "L" } },
{ INDEX_op_qemu_ld16s, { "r", "L" } },
- { INDEX_op_qemu_ld32u, { "r", "L" } },
+ { INDEX_op_qemu_ld32, { "r", "L" } },
{ INDEX_op_qemu_ld64, { "r", "r", "L" } },
{ INDEX_op_qemu_st8, { "cb", "L" } },
{ INDEX_op_qemu_ld8s, { "r", "L", "L" } },
{ INDEX_op_qemu_ld16u, { "r", "L", "L" } },
{ INDEX_op_qemu_ld16s, { "r", "L", "L" } },
- { INDEX_op_qemu_ld32u, { "r", "L", "L" } },
+ { INDEX_op_qemu_ld32, { "r", "L", "L" } },
{ INDEX_op_qemu_ld64, { "r", "r", "L", "L" } },
{ INDEX_op_qemu_st8, { "cb", "L", "L" } },
case INDEX_op_qemu_ld16s:
tcg_out_qemu_ld(s, args, 1 | 4);
break;
- case INDEX_op_qemu_ld32u:
+ case INDEX_op_qemu_ld32:
tcg_out_qemu_ld(s, args, 2);
break;
case INDEX_op_qemu_ld64:
{ INDEX_op_qemu_ld8s, { "L", "lZ" } },
{ INDEX_op_qemu_ld16u, { "L", "lZ" } },
{ INDEX_op_qemu_ld16s, { "L", "lZ" } },
- { INDEX_op_qemu_ld32u, { "L", "lZ" } },
+ { INDEX_op_qemu_ld32, { "L", "lZ" } },
{ INDEX_op_qemu_ld64, { "L", "L", "lZ" } },
{ INDEX_op_qemu_st8, { "SZ", "SZ" } },
{ INDEX_op_qemu_ld8s, { "L", "lZ", "lZ" } },
{ INDEX_op_qemu_ld16u, { "L", "lZ", "lZ" } },
{ INDEX_op_qemu_ld16s, { "L", "lZ", "lZ" } },
- { INDEX_op_qemu_ld32u, { "L", "lZ", "lZ" } },
+ { INDEX_op_qemu_ld32, { "L", "lZ", "lZ" } },
{ INDEX_op_qemu_ld64, { "L", "L", "lZ", "lZ" } },
{ INDEX_op_qemu_st8, { "SZ", "SZ", "SZ" } },
case INDEX_op_qemu_ld16s:
tcg_out_qemu_ld(s, args, 1 | 4);
break;
- case INDEX_op_qemu_ld32u:
+ case INDEX_op_qemu_ld32:
tcg_out_qemu_ld(s, args, 2);
break;
case INDEX_op_qemu_ld64:
{ INDEX_op_qemu_ld8s, { "r", "L" } },
{ INDEX_op_qemu_ld16u, { "r", "L" } },
{ INDEX_op_qemu_ld16s, { "r", "L" } },
- { INDEX_op_qemu_ld32u, { "r", "L" } },
+ { INDEX_op_qemu_ld32, { "r", "L" } },
{ INDEX_op_qemu_ld64, { "r", "r", "L" } },
{ INDEX_op_qemu_st8, { "K", "K" } },
{ INDEX_op_qemu_ld8s, { "r", "L", "L" } },
{ INDEX_op_qemu_ld16u, { "r", "L", "L" } },
{ INDEX_op_qemu_ld16s, { "r", "L", "L" } },
- { INDEX_op_qemu_ld32u, { "r", "L", "L" } },
+ { INDEX_op_qemu_ld32, { "r", "L", "L" } },
{ INDEX_op_qemu_ld64, { "r", "L", "L", "L" } },
{ INDEX_op_qemu_st8, { "K", "K", "K" } },
case INDEX_op_qemu_ld16s:
tcg_out_qemu_ld (s, args, 1 | 4);
break;
+ case INDEX_op_qemu_ld32:
case INDEX_op_qemu_ld32u:
tcg_out_qemu_ld (s, args, 2);
break;
{ INDEX_op_qemu_ld8s, { "r", "L" } },
{ INDEX_op_qemu_ld16u, { "r", "L" } },
{ INDEX_op_qemu_ld16s, { "r", "L" } },
+ { INDEX_op_qemu_ld32, { "r", "L" } },
{ INDEX_op_qemu_ld32u, { "r", "L" } },
{ INDEX_op_qemu_ld32s, { "r", "L" } },
{ INDEX_op_qemu_ld64, { "r", "L" } },
case INDEX_op_qemu_ld16s:
tcg_out_qemu_ld(s, args, 1 | 4);
break;
+ case INDEX_op_qemu_ld32:
+#if TCG_TARGET_REG_BITS == 64
case INDEX_op_qemu_ld32u:
+#endif
tcg_out_qemu_ld(s, args, 2);
break;
#if TCG_TARGET_REG_BITS == 64
{ INDEX_op_qemu_ld8s, { "r", "L" } },
{ INDEX_op_qemu_ld16u, { "r", "L" } },
{ INDEX_op_qemu_ld16s, { "r", "L" } },
- { INDEX_op_qemu_ld32u, { "r", "L" } },
+ { INDEX_op_qemu_ld32, { "r", "L" } },
#if TCG_TARGET_REG_BITS == 64
+ { INDEX_op_qemu_ld32u, { "r", "L" } },
{ INDEX_op_qemu_ld32s, { "r", "L" } },
#endif
static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index)
{
#if TARGET_LONG_BITS == 32
- tcg_gen_op3i_i32(INDEX_op_qemu_ld32u, ret, addr, mem_index);
+ tcg_gen_op3i_i32(INDEX_op_qemu_ld32, ret, addr, mem_index);
#else
- tcg_gen_op4i_i32(INDEX_op_qemu_ld32u, TCGV_LOW(ret), TCGV_LOW(addr),
+ tcg_gen_op4i_i32(INDEX_op_qemu_ld32, TCGV_LOW(ret), TCGV_LOW(addr),
TCGV_HIGH(addr), mem_index);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
#endif
static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index)
{
#if TARGET_LONG_BITS == 32
- tcg_gen_op3i_i32(INDEX_op_qemu_ld32u, ret, addr, mem_index);
+ tcg_gen_op3i_i32(INDEX_op_qemu_ld32, ret, addr, mem_index);
#else
- tcg_gen_op4i_i32(INDEX_op_qemu_ld32u, TCGV_LOW(ret), TCGV_LOW(addr),
+ tcg_gen_op4i_i32(INDEX_op_qemu_ld32, TCGV_LOW(ret), TCGV_LOW(addr),
TCGV_HIGH(addr), mem_index);
tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
#endif
DEF2(qemu_ld16s, 1, 2, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
#endif
#if TARGET_LONG_BITS == 32
-DEF2(qemu_ld32u, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
+DEF2(qemu_ld32, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
#else
-DEF2(qemu_ld32u, 1, 2, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
+DEF2(qemu_ld32, 1, 2, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
#endif
#if TARGET_LONG_BITS == 32
DEF2(qemu_ld64, 2, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
DEF2(qemu_ld8s, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
DEF2(qemu_ld16u, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
DEF2(qemu_ld16s, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
+DEF2(qemu_ld32, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
DEF2(qemu_ld32u, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
DEF2(qemu_ld32s, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
DEF2(qemu_ld64, 1, 1, 1, TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS)
case INDEX_op_qemu_ld16s:
tcg_out_qemu_ld(s, args, 1 | 4);
break;
+ case INDEX_op_qemu_ld32:
case INDEX_op_qemu_ld32u:
tcg_out_qemu_ld(s, args, 2);
break;
{ INDEX_op_qemu_ld8s, { "r", "L" } },
{ INDEX_op_qemu_ld16u, { "r", "L" } },
{ INDEX_op_qemu_ld16s, { "r", "L" } },
+ { INDEX_op_qemu_ld32, { "r", "L" } },
{ INDEX_op_qemu_ld32u, { "r", "L" } },
{ INDEX_op_qemu_ld32s, { "r", "L" } },
{ INDEX_op_qemu_ld64, { "r", "L" } },