NEON_GET_REG(T0, rm, pass * 2);
NEON_GET_REG(T1, rm, pass * 2 + 1);
switch (size) {
- case 0: tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]); break;
+ case 0: tcg_gen_bswap32_i32(cpu_T[0], cpu_T[0]); break;
case 1: gen_swap_half(cpu_T[0]); break;
case 2: /* no-op */ break;
default: abort();
} else {
gen_op_movl_T0_T1();
switch (size) {
- case 0: tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]); break;
+ case 0: tcg_gen_bswap32_i32(cpu_T[0], cpu_T[0]); break;
case 1: gen_swap_half(cpu_T[0]); break;
default: abort();
}
switch (op) {
case 1: /* VREV32 */
switch (size) {
- case 0: tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]); break;
+ case 0: tcg_gen_bswap32_i32(cpu_T[0], cpu_T[0]); break;
case 1: gen_swap_half(cpu_T[0]); break;
default: return 1;
}
if (insn & (1 << 7))
gen_rev16(tmp);
else
- tcg_gen_bswap_i32(tmp, tmp);
+ tcg_gen_bswap32_i32(tmp, tmp);
}
store_reg(s, rd, tmp);
} else {
gen_helper_rbit(tmp, tmp);
break;
case 0x08: /* rev */
- tcg_gen_bswap_i32(tmp, tmp);
+ tcg_gen_bswap32_i32(tmp, tmp);
break;
case 0x09: /* rev16 */
gen_rev16(tmp);
rd = insn & 0x7;
tmp = load_reg(s, rn);
switch ((insn >> 6) & 3) {
- case 0: tcg_gen_bswap_i32(tmp, tmp); break;
+ case 0: tcg_gen_bswap32_i32(tmp, tmp); break;
case 1: gen_rev16(tmp); break;
case 3: gen_revsh(tmp); break;
default: goto illegal_op;
#ifdef TARGET_X86_64
if (dflag == 2) {
gen_op_mov_TN_reg(OT_QUAD, 0, reg);
- tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
+ tcg_gen_bswap64_i64(cpu_T[0], cpu_T[0]);
gen_op_mov_reg_T0(OT_QUAD, reg);
} else
{
tmp0 = tcg_temp_new_i32();
tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
- tcg_gen_bswap_i32(tmp0, tmp0);
+ tcg_gen_bswap32_i32(tmp0, tmp0);
tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
gen_op_mov_reg_T0(OT_LONG, reg);
}
#else
{
gen_op_mov_TN_reg(OT_LONG, 0, reg);
- tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
+ tcg_gen_bswap32_i32(cpu_T[0], cpu_T[0]);
gen_op_mov_reg_T0(OT_LONG, reg);
}
#endif
TCGv reg;
reg = DREG(insn, 0);
- tcg_gen_bswap_i32(reg, reg);
+ tcg_gen_bswap32_i32(reg, reg);
}
DISAS_INSN(move)
#if defined(TARGET_PPC64)
TCGv_i32 t0 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(t0, arg1);
- tcg_gen_bswap_i32(t0, t0);
+ tcg_gen_bswap32_i32(t0, t0);
tcg_gen_extu_i32_tl(arg1, t0);
tcg_temp_free_i32(t0);
#else
- tcg_gen_bswap_i32(arg1, arg1);
+ tcg_gen_bswap32_i32(arg1, arg1);
#endif
}
}
tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
t0 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(t0, arg1);
- tcg_gen_bswap_i32(t0, t0);
+ tcg_gen_bswap32_i32(t0, t0);
tcg_gen_ext_i32_tl(arg1, t0);
tcg_temp_free_i32(t0);
} else
{
tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx);
if (unlikely(ctx->le_mode)) {
- tcg_gen_bswap_i64(arg1, arg1);
+ tcg_gen_bswap64_i64(arg1, arg1);
}
}
TCGv t1;
t0 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(t0, arg1);
- tcg_gen_bswap_i32(t0, t0);
+ tcg_gen_bswap32_i32(t0, t0);
t1 = tcg_temp_new();
tcg_gen_extu_i32_tl(t1, t0);
tcg_temp_free_i32(t0);
tcg_temp_free(t1);
#else
TCGv t0 = tcg_temp_new_i32();
- tcg_gen_bswap_i32(t0, arg1);
+ tcg_gen_bswap32_i32(t0, arg1);
tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
tcg_temp_free(t0);
#endif
{
if (unlikely(ctx->le_mode)) {
TCGv_i64 t0 = tcg_temp_new_i64();
- tcg_gen_bswap_i64(t0, arg1);
+ tcg_gen_bswap64_i64(t0, arg1);
tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx);
tcg_temp_free_i64(t0);
} else
#if defined(TARGET_PPC64)
TCGv_i32 t0 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(t0, arg1);
- tcg_gen_bswap_i32(t0, t0);
+ tcg_gen_bswap32_i32(t0, t0);
tcg_gen_extu_i32_tl(arg1, t0);
tcg_temp_free_i32(t0);
#else
- tcg_gen_bswap_i32(arg1, arg1);
+ tcg_gen_bswap32_i32(arg1, arg1);
#endif
}
}
TCGv t1;
t0 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(t0, arg1);
- tcg_gen_bswap_i32(t0, t0);
+ tcg_gen_bswap32_i32(t0, t0);
t1 = tcg_temp_new();
tcg_gen_extu_i32_tl(t1, t0);
tcg_temp_free_i32(t0);
tcg_temp_free(t1);
#else
TCGv t0 = tcg_temp_new_i32();
- tcg_gen_bswap_i32(t0, arg1);
+ tcg_gen_bswap32_i32(t0, arg1);
tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
tcg_temp_free(t0);
#endif
#undef TCG_TARGET_WORDS_BIGENDIAN
#undef TCG_TARGET_HAS_div_i32
#undef TCG_TARGET_HAS_div_i64
-#undef TCG_TARGET_HAS_bswap_i32
+#undef TCG_TARGET_HAS_bswap32_i32
#define TCG_TARGET_HAS_ext8s_i32
#define TCG_TARGET_HAS_ext16s_i32
#define TCG_TARGET_HAS_neg_i32
//#define TCG_TARGET_HAS_ext8s_i32
//#define TCG_TARGET_HAS_ext16s_i32
//#define TCG_TARGET_HAS_bswap16_i32
-//#define TCG_TARGET_HAS_bswap_i32
+//#define TCG_TARGET_HAS_bswap32_i32
/* Note: must be synced with dyngen-exec.h */
#define TCG_AREG0 TCG_REG_R17
tcg_out_brcond2(s, args, const_args);
break;
- case INDEX_op_bswap_i32:
+ case INDEX_op_bswap32_i32:
tcg_out_opc(s, (0xc8 + args[0]) | P_EXT);
break;
{ INDEX_op_sub2_i32, { "r", "r", "0", "1", "ri", "ri" } },
{ INDEX_op_brcond2_i32, { "r", "r", "ri", "ri" } },
- { INDEX_op_bswap_i32, { "r", "0" } },
+ { INDEX_op_bswap32_i32, { "r", "0" } },
{ INDEX_op_neg_i32, { "r", "0" } },
#define TCG_TARGET_CALL_STACK_OFFSET 0
/* optional instructions */
-#define TCG_TARGET_HAS_bswap_i32
+#define TCG_TARGET_HAS_bswap32_i32
#define TCG_TARGET_HAS_neg_i32
#define TCG_TARGET_HAS_not_i32
#define TCG_TARGET_HAS_ext8s_i32
#endif
/* optional instructions */
-//#define TCG_TARGET_HAS_bswap_i32
-//#define TCG_TARGET_HAS_bswap_i64
+//#define TCG_TARGET_HAS_bswap32_i32
+//#define TCG_TARGET_HAS_bswap64_i64
//#define TCG_TARGET_HAS_neg_i32
//#define TCG_TARGET_HAS_neg_i64
#endif
}
-static inline void tcg_gen_bswap_i32(TCGv_i32 ret, TCGv_i32 arg)
+static inline void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
{
-#ifdef TCG_TARGET_HAS_bswap_i32
- tcg_gen_op2_i32(INDEX_op_bswap_i32, ret, arg);
+#ifdef TCG_TARGET_HAS_bswap32_i32
+ tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
#else
TCGv_i32 t0, t1;
t0 = tcg_temp_new_i32();
tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
}
-static inline void tcg_gen_bswap_i64(TCGv_i64 ret, TCGv_i64 arg)
+static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
{
TCGv_i32 t0, t1;
t0 = tcg_temp_new_i32();
t1 = tcg_temp_new_i32();
- tcg_gen_bswap_i32(t0, TCGV_LOW(arg));
- tcg_gen_bswap_i32(t1, TCGV_HIGH(arg));
+ tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
+ tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
tcg_gen_mov_i32(TCGV_LOW(ret), t1);
tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
tcg_temp_free_i32(t0);
tcg_gen_ext32s_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)));
}
-static inline void tcg_gen_bswap_i64(TCGv_i64 ret, TCGv_i64 arg)
+static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
{
-#ifdef TCG_TARGET_HAS_bswap_i64
- tcg_gen_op2_i64(INDEX_op_bswap_i64, ret, arg);
+#ifdef TCG_TARGET_HAS_bswap64_i64
+ tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
#else
TCGv_i32 t0, t1;
t0 = tcg_temp_new_i32();
#ifdef TCG_TARGET_HAS_ext16s_i32
DEF2(ext16s_i32, 1, 1, 0, 0)
#endif
-#ifdef TCG_TARGET_HAS_bswap_i32
-DEF2(bswap_i32, 1, 1, 0, 0)
+#ifdef TCG_TARGET_HAS_bswap32_i32
+DEF2(bswap32_i32, 1, 1, 0, 0)
#endif
#ifdef TCG_TARGET_HAS_not_i32
DEF2(not_i32, 1, 1, 0, 0)
#ifdef TCG_TARGET_HAS_ext32s_i64
DEF2(ext32s_i64, 1, 1, 0, 0)
#endif
-#ifdef TCG_TARGET_HAS_bswap_i64
-DEF2(bswap_i64, 1, 1, 0, 0)
+#ifdef TCG_TARGET_HAS_bswap64_i64
+DEF2(bswap64_i64, 1, 1, 0, 0)
#endif
#ifdef TCG_TARGET_HAS_not_i64
DEF2(not_i64, 1, 1, 0, 0)
args[3], P_REXW);
break;
- case INDEX_op_bswap_i32:
+ case INDEX_op_bswap32_i32:
tcg_out_opc(s, (0xc8 + (args[0] & 7)) | P_EXT, 0, args[0], 0);
break;
- case INDEX_op_bswap_i64:
+ case INDEX_op_bswap64_i64:
tcg_out_opc(s, (0xc8 + (args[0] & 7)) | P_EXT | P_REXW, 0, args[0], 0);
break;
{ INDEX_op_brcond_i64, { "r", "re" } },
- { INDEX_op_bswap_i32, { "r", "0" } },
- { INDEX_op_bswap_i64, { "r", "0" } },
+ { INDEX_op_bswap32_i32, { "r", "0" } },
+ { INDEX_op_bswap64_i64, { "r", "0" } },
{ INDEX_op_neg_i32, { "r", "0" } },
{ INDEX_op_neg_i64, { "r", "0" } },
#define TCG_TARGET_CALL_STACK_OFFSET 0
/* optional instructions */
-#define TCG_TARGET_HAS_bswap_i32
-#define TCG_TARGET_HAS_bswap_i64
+#define TCG_TARGET_HAS_bswap32_i32
+#define TCG_TARGET_HAS_bswap64_i64
#define TCG_TARGET_HAS_neg_i32
#define TCG_TARGET_HAS_neg_i64
#define TCG_TARGET_HAS_not_i32