#ifdef USE_KQEMU
if (kqemu_is_ok(env) && env->interrupt_request == 0) {
int ret;
- env->eflags = env->eflags | cc_table[CC_OP].compute_all() | (DF & DF_MASK);
+ env->eflags = env->eflags | helper_cc_compute_all(CC_OP) | (DF & DF_MASK);
ret = kqemu_cpu_exec(env);
/* put eflags in CPU temporary format */
CC_SRC = env->eflags & (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C);
/* restore flags in standard format */
regs_to_env();
#if defined(TARGET_I386)
- env->eflags = env->eflags | cc_table[CC_OP].compute_all() | (DF & DF_MASK);
+ env->eflags = env->eflags | helper_cc_compute_all(CC_OP) | (DF & DF_MASK);
cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
env->eflags &= ~(DF_MASK | CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C);
#elif defined(TARGET_ARM)
#if defined(TARGET_I386)
/* restore flags in standard format */
- env->eflags = env->eflags | cc_table[CC_OP].compute_all() | (DF & DF_MASK);
+ env->eflags = env->eflags | helper_cc_compute_all(CC_OP) | (DF & DF_MASK);
#elif defined(TARGET_ARM)
/* XXX: Save/restore host fpu exception state?. */
#elif defined(TARGET_SPARC)
--- /dev/null
+/* Helper file for declaring TCG helper functions.
+ Should be included at the start and end of target-foo/helper.h.
+
+ Targets should use DEF_HELPER_N and DEF_HELPER_FLAGS_N to declare helper
+ functions. Names should be specified without the helper_ prefix, and
+ the return and argument types specified. 3 basic types are understood
+ (i32, i64 and ptr). Additional aliases are provided for convenience and
+ to match the types used by the C helper implementation.
+
+ The target helper.h should be included in all files that use/define
+ helper functions. THis will ensure that function prototypes are
+ consistent. In addition it should be included an extra two times for
+ helper.c, defining:
+ GEN_HELPER 1 to produce op generation functions (gen_helper_*)
+ GEN_HELPER 2 to do runtime registration helper functions.
+ */
+
+#ifndef DEF_HELPER_H
+#define DEF_HELPER_H 1
+
+#define HELPER(name) glue(helper_, name)
+
+#define GET_TCGV_i32 GET_TCGV_I32
+#define GET_TCGV_i64 GET_TCGV_I64
+#define GET_TCGV_ptr GET_TCGV_PTR
+
+/* Some types that make sense in C, but not for TCG. */
+#define dh_alias_i32 i32
+#define dh_alias_s32 i32
+#define dh_alias_int i32
+#define dh_alias_i64 i64
+#define dh_alias_s64 i64
+#define dh_alias_f32 i32
+#define dh_alias_f64 i64
+#if TARGET_LONG_BITS == 32
+#define dh_alias_tl i32
+#else
+#define dh_alias_tl i64
+#endif
+#define dh_alias_ptr ptr
+#define dh_alias_void void
+#define dh_alias_env ptr
+#define dh_alias(t) glue(dh_alias_, t)
+
+#define dh_ctype_i32 uint32_t
+#define dh_ctype_s32 int32_t
+#define dh_ctype_int int
+#define dh_ctype_i64 uint64_t
+#define dh_ctype_s64 int64_t
+#define dh_ctype_f32 float32
+#define dh_ctype_f64 float64
+#define dh_ctype_tl target_ulong
+#define dh_ctype_ptr void *
+#define dh_ctype_void void
+#define dh_ctype_env CPUState *
+#define dh_ctype(t) dh_ctype_##t
+
+/* We can't use glue() here because it falls foul of C preprocessor
+ recursive expansion rules. */
+#define dh_retvar_decl0_void void
+#define dh_retvar_decl0_i32 TCGv_i32 retval
+#define dh_retvar_decl0_i64 TCGv_i64 retval
+#define dh_retvar_decl0_ptr TCGv_iptr retval
+#define dh_retvar_decl0(t) glue(dh_retvar_decl0_, dh_alias(t))
+
+#define dh_retvar_decl_void
+#define dh_retvar_decl_i32 TCGv_i32 retval,
+#define dh_retvar_decl_i64 TCGv_i64 retval,
+#define dh_retvar_decl_ptr TCGv_iptr retval,
+#define dh_retvar_decl(t) glue(dh_retvar_decl_, dh_alias(t))
+
+#define dh_retvar_void TCG_CALL_DUMMY_ARG
+#define dh_retvar_i32 GET_TCGV_i32(retval)
+#define dh_retvar_i64 GET_TCGV_i64(retval)
+#define dh_retvar_ptr GET_TCGV_ptr(retval)
+#define dh_retvar(t) glue(dh_retvar_, dh_alias(t))
+
+#define dh_is_64bit_void 0
+#define dh_is_64bit_i32 0
+#define dh_is_64bit_i64 1
+#define dh_is_64bit_ptr (TCG_TARGET_REG_BITS == 64)
+#define dh_is_64bit(t) glue(dh_is_64bit_, dh_alias(t))
+
+#define dh_arg(t, n) \
+ args[n - 1] = glue(GET_TCGV_, dh_alias(t))(glue(arg, n)); \
+ sizemask |= dh_is_64bit(t) << n
+
+#define dh_arg_decl(t, n) glue(TCGv_, dh_alias(t)) glue(arg, n)
+
+
+#define DEF_HELPER_0(name, ret) \
+ DEF_HELPER_FLAGS_0(name, 0, ret)
+#define DEF_HELPER_1(name, ret, t1) \
+ DEF_HELPER_FLAGS_1(name, 0, ret, t1)
+#define DEF_HELPER_2(name, ret, t1, t2) \
+ DEF_HELPER_FLAGS_2(name, 0, ret, t1, t2)
+#define DEF_HELPER_3(name, ret, t1, t2, t3) \
+ DEF_HELPER_FLAGS_3(name, 0, ret, t1, t2, t3)
+#define DEF_HELPER_4(name, ret, t1, t2, t3, t4) \
+ DEF_HELPER_FLAGS_4(name, 0, ret, t1, t2, t3, t4)
+
+#endif /* DEF_HELPER_H */
+
+#ifndef GEN_HELPER
+/* Function prototypes. */
+
+#define DEF_HELPER_FLAGS_0(name, flags, ret) \
+dh_ctype(ret) HELPER(name) (void);
+
+#define DEF_HELPER_FLAGS_1(name, flags, ret, t1) \
+dh_ctype(ret) HELPER(name) (dh_ctype(t1));
+
+#define DEF_HELPER_FLAGS_2(name, flags, ret, t1, t2) \
+dh_ctype(ret) HELPER(name) (dh_ctype(t1), dh_ctype(t2));
+
+#define DEF_HELPER_FLAGS_3(name, flags, ret, t1, t2, t3) \
+dh_ctype(ret) HELPER(name) (dh_ctype(t1), dh_ctype(t2), dh_ctype(t3));
+
+#define DEF_HELPER_FLAGS_4(name, flags, ret, t1, t2, t3, t4) \
+dh_ctype(ret) HELPER(name) (dh_ctype(t1), dh_ctype(t2), dh_ctype(t3), \
+ dh_ctype(t4));
+
+#undef GEN_HELPER
+#define GEN_HELPER -1
+
+#elif GEN_HELPER == 1
+/* Gen functions. */
+
+#define DEF_HELPER_FLAGS_0(name, flags, ret) \
+static inline void glue(gen_helper_, name)(dh_retvar_decl0(ret)) \
+{ \
+ int sizemask; \
+ sizemask = dh_is_64bit(ret); \
+ tcg_gen_helperN(HELPER(name), flags, sizemask, dh_retvar(ret), 0, NULL); \
+}
+
+#define DEF_HELPER_FLAGS_1(name, flags, ret, t1) \
+static inline void glue(gen_helper_, name)(dh_retvar_decl(ret) dh_arg_decl(t1, 1)) \
+{ \
+ TCGArg args[1]; \
+ int sizemask; \
+ sizemask = dh_is_64bit(ret); \
+ dh_arg(t1, 1); \
+ tcg_gen_helperN(HELPER(name), flags, sizemask, dh_retvar(ret), 1, args); \
+}
+
+#define DEF_HELPER_FLAGS_2(name, flags, ret, t1, t2) \
+static inline void glue(gen_helper_, name)(dh_retvar_decl(ret) dh_arg_decl(t1, 1), \
+ dh_arg_decl(t2, 2)) \
+{ \
+ TCGArg args[2]; \
+ int sizemask; \
+ sizemask = dh_is_64bit(ret); \
+ dh_arg(t1, 1); \
+ dh_arg(t2, 2); \
+ tcg_gen_helperN(HELPER(name), flags, sizemask, dh_retvar(ret), 2, args); \
+}
+
+#define DEF_HELPER_FLAGS_3(name, flags, ret, t1, t2, t3) \
+static inline void glue(gen_helper_, name)(dh_retvar_decl(ret) dh_arg_decl(t1, 1), \
+ dh_arg_decl(t2, 2), dh_arg_decl(t3, 3)) \
+{ \
+ TCGArg args[3]; \
+ int sizemask; \
+ sizemask = dh_is_64bit(ret); \
+ dh_arg(t1, 1); \
+ dh_arg(t2, 2); \
+ dh_arg(t3, 3); \
+ tcg_gen_helperN(HELPER(name), flags, sizemask, dh_retvar(ret), 3, args); \
+}
+
+#define DEF_HELPER_FLAGS_4(name, flags, ret, t1, t2, t3, t4) \
+static inline void glue(gen_helper_, name)(dh_retvar_decl(ret) dh_arg_decl(t1, 1), \
+ dh_arg_decl(t2, 2), dh_arg_decl(t3, 3), dh_arg_decl(t4, 4)) \
+{ \
+ TCGArg args[4]; \
+ int sizemask; \
+ sizemask = dh_is_64bit(ret); \
+ dh_arg(t1, 1); \
+ dh_arg(t2, 2); \
+ dh_arg(t3, 3); \
+ dh_arg(t4, 4); \
+ tcg_gen_helperN(HELPER(name), flags, sizemask, dh_retvar(ret), 4, args); \
+}
+
+#undef GEN_HELPER
+#define GEN_HELPER -1
+
+#elif GEN_HELPER == 2
+/* Register helpers. */
+
+#define DEF_HELPER_FLAGS_0(name, flags, ret) \
+tcg_register_helper(HELPER(name), #name);
+
+#define DEF_HELPER_FLAGS_1(name, flags, ret, t1) \
+DEF_HELPER_FLAGS_0(name, flags, ret)
+
+#define DEF_HELPER_FLAGS_2(name, flags, ret, t1, t2) \
+DEF_HELPER_FLAGS_0(name, flags, ret)
+
+#define DEF_HELPER_FLAGS_3(name, flags, ret, t1, t2, t3) \
+DEF_HELPER_FLAGS_0(name, flags, ret)
+
+#define DEF_HELPER_FLAGS_4(name, flags, ret, t1, t2, t3, t4) \
+DEF_HELPER_FLAGS_0(name, flags, ret)
+
+#undef GEN_HELPER
+#define GEN_HELPER -1
+
+#elif GEN_HELPER == -1
+/* Undefine macros. */
+
+#undef DEF_HELPER_FLAGS_0
+#undef DEF_HELPER_FLAGS_1
+#undef DEF_HELPER_FLAGS_2
+#undef DEF_HELPER_FLAGS_3
+#undef DEF_HELPER_FLAGS_4
+#undef GEN_HELPER
+
+#endif
+
static inline void gen_icount_start(void)
{
- TCGv count;
+ TCGv_i32 count;
if (!use_icount)
return;
count needs to live over the conditional branch. To workaround this
we allow the target to supply a convenient register temporary. */
#ifndef ICOUNT_TEMP
- count = tcg_temp_local_new(TCG_TYPE_I32);
+ count = tcg_temp_local_new_i32();
#else
count = ICOUNT_TEMP;
#endif
tcg_gen_brcondi_i32(TCG_COND_LT, count, 0, icount_label);
tcg_gen_st16_i32(count, cpu_env, offsetof(CPUState, icount_decr.u16.low));
#ifndef ICOUNT_TEMP
- tcg_temp_free(count);
+ tcg_temp_free_i32(count);
#endif
}
static void inline gen_io_start(void)
{
- TCGv tmp = tcg_const_i32(1);
+ TCGv_i32 tmp = tcg_const_i32(1);
tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUState, can_do_io));
- tcg_temp_free(tmp);
+ tcg_temp_free_i32(tmp);
}
static inline void gen_io_end(void)
{
- TCGv tmp = tcg_const_i32(0);
+ TCGv_i32 tmp = tcg_const_i32(0);
tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUState, can_do_io));
- tcg_temp_free(tmp);
+ tcg_temp_free_i32(tmp);
}
-#ifndef DEF_HELPER
-#define DEF_HELPER(ret, name, params) ret name params;
-#endif
-
-DEF_HELPER(void, helper_tb_flush, (void))
-
-DEF_HELPER(void, helper_excp, (int, int))
-DEF_HELPER(uint64_t, helper_amask, (uint64_t))
-DEF_HELPER(uint64_t, helper_load_pcc, (void))
-DEF_HELPER(uint64_t, helper_load_implver, (void))
-DEF_HELPER(uint64_t, helper_rc, (void))
-DEF_HELPER(uint64_t, helper_rs, (void))
-
-DEF_HELPER(uint64_t, helper_addqv, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_addlv, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_subqv, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_sublv, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_mullv, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_mulqv, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_umulh, (uint64_t, uint64_t))
-
-DEF_HELPER(uint64_t, helper_ctpop, (uint64_t))
-DEF_HELPER(uint64_t, helper_ctlz, (uint64_t))
-DEF_HELPER(uint64_t, helper_cttz, (uint64_t))
-
-DEF_HELPER(uint64_t, helper_mskbl, (int64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_insbl, (int64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_mskwl, (int64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_inswl, (int64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_mskll, (int64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_insll, (int64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_zap, (int64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_zapnot, (int64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_mskql, (int64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_insql, (int64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_mskwh, (int64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_inswh, (int64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_msklh, (int64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_inslh, (int64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_mskqh, (int64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_insqh, (int64_t, uint64_t))
-
-DEF_HELPER(uint64_t, helper_cmpbge, (uint64_t, uint64_t))
-
-DEF_HELPER(uint64_t, helper_load_fpcr, (void))
-DEF_HELPER(void, helper_store_fpcr, (uint64_t val))
-
-DEF_HELPER(uint32_t, helper_f_to_memory, (uint64_t s))
-DEF_HELPER(uint64_t, helper_memory_to_f, (uint32_t s))
-DEF_HELPER(uint64_t, helper_addf, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_subf, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_mulf, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_divf, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_sqrtf, (uint64_t))
-
-DEF_HELPER(uint64_t, helper_g_to_memory, (uint64_t s))
-DEF_HELPER(uint64_t, helper_memory_to_g, (uint64_t s))
-DEF_HELPER(uint64_t, helper_addg, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_subg, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_mulg, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_divg, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_sqrtg, (uint64_t))
-
-DEF_HELPER(uint32_t, helper_s_to_memory, (uint64_t s))
-DEF_HELPER(uint64_t, helper_memory_to_s, (uint32_t s))
-DEF_HELPER(uint64_t, helper_adds, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_subs, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_muls, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_divs, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_sqrts, (uint64_t))
-
-DEF_HELPER(uint64_t, helper_addt, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_subt, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_mult, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_divt, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_sqrtt, (uint64_t))
-
-DEF_HELPER(uint64_t, helper_cmptun, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_cmpteq, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_cmptle, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_cmptlt, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_cmpgeq, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_cmpgle, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_cmpglt, (uint64_t, uint64_t))
-
-DEF_HELPER(uint64_t, helper_cmpfeq, (uint64_t))
-DEF_HELPER(uint64_t, helper_cmpfne, (uint64_t))
-DEF_HELPER(uint64_t, helper_cmpflt, (uint64_t))
-DEF_HELPER(uint64_t, helper_cmpfle, (uint64_t))
-DEF_HELPER(uint64_t, helper_cmpfgt, (uint64_t))
-DEF_HELPER(uint64_t, helper_cmpfge, (uint64_t))
-
-DEF_HELPER(uint64_t, helper_cpys, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_cpysn, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_cpyse, (uint64_t, uint64_t))
-
-DEF_HELPER(uint64_t, helper_cvtts, (uint64_t))
-DEF_HELPER(uint64_t, helper_cvtst, (uint64_t))
-DEF_HELPER(uint64_t, helper_cvttq, (uint64_t))
-DEF_HELPER(uint32_t, helper_cvtqs, (uint64_t))
-DEF_HELPER(uint64_t, helper_cvtqt, (uint64_t))
-DEF_HELPER(uint64_t, helper_cvtqf, (uint64_t))
-DEF_HELPER(uint64_t, helper_cvtgf, (uint64_t))
-DEF_HELPER(uint64_t, helper_cvtgq, (uint64_t))
-DEF_HELPER(uint64_t, helper_cvtqg, (uint64_t))
-DEF_HELPER(uint64_t, helper_cvtlq, (uint64_t))
-DEF_HELPER(uint64_t, helper_cvtql, (uint64_t))
-DEF_HELPER(uint64_t, helper_cvtqlv, (uint64_t))
-DEF_HELPER(uint64_t, helper_cvtqlsv, (uint64_t))
+#include "def-helper.h"
+
+DEF_HELPER_0(tb_flush, void)
+
+DEF_HELPER_2(excp, void, int, int)
+DEF_HELPER_1(amask, i64, i64)
+DEF_HELPER_0(load_pcc, i64)
+DEF_HELPER_0(load_implver, i64)
+DEF_HELPER_0(rc, i64)
+DEF_HELPER_0(rs, i64)
+
+DEF_HELPER_2(addqv, i64, i64, i64)
+DEF_HELPER_2(addlv, i64, i64, i64)
+DEF_HELPER_2(subqv, i64, i64, i64)
+DEF_HELPER_2(sublv, i64, i64, i64)
+DEF_HELPER_2(mullv, i64, i64, i64)
+DEF_HELPER_2(mulqv, i64, i64, i64)
+DEF_HELPER_2(umulh, i64, i64, i64)
+
+DEF_HELPER_1(ctpop, i64, i64)
+DEF_HELPER_1(ctlz, i64, i64)
+DEF_HELPER_1(cttz, i64, i64)
+
+DEF_HELPER_2(mskbl, i64, i64, i64)
+DEF_HELPER_2(insbl, i64, i64, i64)
+DEF_HELPER_2(mskwl, i64, i64, i64)
+DEF_HELPER_2(inswl, i64, i64, i64)
+DEF_HELPER_2(mskll, i64, i64, i64)
+DEF_HELPER_2(insll, i64, i64, i64)
+DEF_HELPER_2(zap, i64, i64, i64)
+DEF_HELPER_2(zapnot, i64, i64, i64)
+DEF_HELPER_2(mskql, i64, i64, i64)
+DEF_HELPER_2(insql, i64, i64, i64)
+DEF_HELPER_2(mskwh, i64, i64, i64)
+DEF_HELPER_2(inswh, i64, i64, i64)
+DEF_HELPER_2(msklh, i64, i64, i64)
+DEF_HELPER_2(inslh, i64, i64, i64)
+DEF_HELPER_2(mskqh, i64, i64, i64)
+DEF_HELPER_2(insqh, i64, i64, i64)
+
+DEF_HELPER_2(cmpbge, i64, i64, i64)
+
+DEF_HELPER_0(load_fpcr, i64)
+DEF_HELPER_1(store_fpcr, void, i64)
+
+DEF_HELPER_1(f_to_memory, i32, i64)
+DEF_HELPER_1(memory_to_f, i64, i32)
+DEF_HELPER_2(addf, i64, i64, i64)
+DEF_HELPER_2(subf, i64, i64, i64)
+DEF_HELPER_2(mulf, i64, i64, i64)
+DEF_HELPER_2(divf, i64, i64, i64)
+DEF_HELPER_1(sqrtf, i64, i64)
+
+DEF_HELPER_1(g_to_memory, i64, i64)
+DEF_HELPER_1(memory_to_g, i64, i64)
+DEF_HELPER_2(addg, i64, i64, i64)
+DEF_HELPER_2(subg, i64, i64, i64)
+DEF_HELPER_2(mulg, i64, i64, i64)
+DEF_HELPER_2(divg, i64, i64, i64)
+DEF_HELPER_1(sqrtg, i64, i64)
+
+DEF_HELPER_1(s_to_memory, i32, i64)
+DEF_HELPER_1(memory_to_s, i64, i32)
+DEF_HELPER_2(adds, i64, i64, i64)
+DEF_HELPER_2(subs, i64, i64, i64)
+DEF_HELPER_2(muls, i64, i64, i64)
+DEF_HELPER_2(divs, i64, i64, i64)
+DEF_HELPER_1(sqrts, i64, i64)
+
+DEF_HELPER_2(addt, i64, i64, i64)
+DEF_HELPER_2(subt, i64, i64, i64)
+DEF_HELPER_2(mult, i64, i64, i64)
+DEF_HELPER_2(divt, i64, i64, i64)
+DEF_HELPER_1(sqrtt, i64, i64)
+
+DEF_HELPER_2(cmptun, i64, i64, i64)
+DEF_HELPER_2(cmpteq, i64, i64, i64)
+DEF_HELPER_2(cmptle, i64, i64, i64)
+DEF_HELPER_2(cmptlt, i64, i64, i64)
+DEF_HELPER_2(cmpgeq, i64, i64, i64)
+DEF_HELPER_2(cmpgle, i64, i64, i64)
+DEF_HELPER_2(cmpglt, i64, i64, i64)
+
+DEF_HELPER_1(cmpfeq, i64, i64)
+DEF_HELPER_1(cmpfne, i64, i64)
+DEF_HELPER_1(cmpflt, i64, i64)
+DEF_HELPER_1(cmpfle, i64, i64)
+DEF_HELPER_1(cmpfgt, i64, i64)
+DEF_HELPER_1(cmpfge, i64, i64)
+
+DEF_HELPER_2(cpys, i64, i64, i64)
+DEF_HELPER_2(cpysn, i64, i64, i64)
+DEF_HELPER_2(cpyse, i64, i64, i64)
+
+DEF_HELPER_1(cvtts, i64, i64)
+DEF_HELPER_1(cvtst, i64, i64)
+DEF_HELPER_1(cvttq, i64, i64)
+DEF_HELPER_1(cvtqs, i64, i64)
+DEF_HELPER_1(cvtqt, i64, i64)
+DEF_HELPER_1(cvtqf, i64, i64)
+DEF_HELPER_1(cvtgf, i64, i64)
+DEF_HELPER_1(cvtgq, i64, i64)
+DEF_HELPER_1(cvtqg, i64, i64)
+DEF_HELPER_1(cvtlq, i64, i64)
+DEF_HELPER_1(cvtql, i64, i64)
+DEF_HELPER_1(cvtqlv, i64, i64)
+DEF_HELPER_1(cvtqlsv, i64, i64)
#if !defined (CONFIG_USER_ONLY)
-DEF_HELPER(void, helper_hw_rei, (void))
-DEF_HELPER(void, helper_hw_ret, (uint64_t))
-DEF_HELPER(uint64_t, helper_mfpr, (int, uint64_t))
-DEF_HELPER(void, helper_mtpr, (int, uint64_t))
-DEF_HELPER(void, helper_set_alt_mode, (void))
-DEF_HELPER(void, helper_restore_mode, (void))
-
-DEF_HELPER(uint64_t, helper_ld_virt_to_phys, (uint64_t))
-DEF_HELPER(uint64_t, helper_st_virt_to_phys, (uint64_t))
-DEF_HELPER(void, helper_ldl_raw, (uint64_t, uint64_t))
-DEF_HELPER(void, helper_ldq_raw, (uint64_t, uint64_t))
-DEF_HELPER(void, helper_ldl_l_raw, (uint64_t, uint64_t))
-DEF_HELPER(void, helper_ldq_l_raw, (uint64_t, uint64_t))
-DEF_HELPER(void, helper_ldl_kernel, (uint64_t, uint64_t))
-DEF_HELPER(void, helper_ldq_kernel, (uint64_t, uint64_t))
-DEF_HELPER(void, helper_ldl_data, (uint64_t, uint64_t))
-DEF_HELPER(void, helper_ldq_data, (uint64_t, uint64_t))
-DEF_HELPER(void, helper_stl_raw, (uint64_t, uint64_t))
-DEF_HELPER(void, helper_stq_raw, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_stl_c_raw, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_stq_c_raw, (uint64_t, uint64_t))
+DEF_HELPER_0(hw_rei, void)
+DEF_HELPER_1(hw_ret, void, i64)
+DEF_HELPER_2(mfpr, i64, int, i64)
+DEF_HELPER_2(mtpr, void, int, i64)
+DEF_HELPER_0(set_alt_mode, void)
+DEF_HELPER_0(restore_mode, void)
+
+DEF_HELPER_1(ld_virt_to_phys, i64, i64)
+DEF_HELPER_1(st_virt_to_phys, i64, i64)
+DEF_HELPER_2(ldl_raw, void, i64, i64)
+DEF_HELPER_2(ldq_raw, void, i64, i64)
+DEF_HELPER_2(ldl_l_raw, void, i64, i64)
+DEF_HELPER_2(ldq_l_raw, void, i64, i64)
+DEF_HELPER_2(ldl_kernel, void, i64, i64)
+DEF_HELPER_2(ldq_kernel, void, i64, i64)
+DEF_HELPER_2(ldl_data, void, i64, i64)
+DEF_HELPER_2(ldq_data, void, i64, i64)
+DEF_HELPER_2(stl_raw, void, i64, i64)
+DEF_HELPER_2(stq_raw, void, i64, i64)
+DEF_HELPER_2(stl_c_raw, i64, i64, i64)
+DEF_HELPER_2(stq_c_raw, i64, i64, i64)
#endif
+
+#include "def-helper.h"
#include "exec.h"
#include "host-utils.h"
#include "softfloat.h"
+#include "helper.h"
void helper_tb_flush (void)
{
#include "exec-all.h"
#include "disas.h"
#include "host-utils.h"
-#include "helper.h"
#include "tcg-op.h"
#include "qemu-common.h"
+#include "helper.h"
+#define GEN_HELPER 1
+#include "helper.h"
+
/* #define DO_SINGLE_STEP */
#define ALPHA_DEBUG_DISAS
/* #define DO_TB_FLUSH */
};
/* global register indexes */
-static TCGv cpu_env;
+static TCGv_ptr cpu_env;
static TCGv cpu_ir[31];
static TCGv cpu_fir[31];
static TCGv cpu_pc;
if (done_init)
return;
- cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
+ cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
p = cpu_reg_names;
for (i = 0; i < 31; i++) {
sprintf(p, "ir%d", i);
- cpu_ir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
- offsetof(CPUState, ir[i]), p);
+ cpu_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
+ offsetof(CPUState, ir[i]), p);
p += (i < 10) ? 4 : 5;
sprintf(p, "fir%d", i);
- cpu_fir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
- offsetof(CPUState, fir[i]), p);
+ cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
+ offsetof(CPUState, fir[i]), p);
p += (i < 10) ? 5 : 6;
}
- cpu_pc = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
- offsetof(CPUState, pc), "pc");
+ cpu_pc = tcg_global_mem_new_i64(TCG_AREG0,
+ offsetof(CPUState, pc), "pc");
- cpu_lock = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
- offsetof(CPUState, lock), "lock");
+ cpu_lock = tcg_global_mem_new_i64(TCG_AREG0,
+ offsetof(CPUState, lock), "lock");
/* register helpers */
-#undef DEF_HELPER
-#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
+#define GEN_HELPER 2
#include "helper.h"
done_init = 1;
static always_inline void gen_excp (DisasContext *ctx,
int exception, int error_code)
{
- TCGv tmp1, tmp2;
+ TCGv_i32 tmp1, tmp2;
tcg_gen_movi_i64(cpu_pc, ctx->pc);
tmp1 = tcg_const_i32(exception);
tmp2 = tcg_const_i32(error_code);
- tcg_gen_helper_0_2(helper_excp, tmp1, tmp2);
- tcg_temp_free(tmp2);
- tcg_temp_free(tmp1);
+ gen_helper_excp(tmp1, tmp2);
+ tcg_temp_free_i32(tmp2);
+ tcg_temp_free_i32(tmp1);
}
static always_inline void gen_invalid (DisasContext *ctx)
static always_inline void gen_qemu_ldf (TCGv t0, TCGv t1, int flags)
{
- TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv tmp = tcg_temp_new();
+ TCGv_i32 tmp32 = tcg_temp_new_i32();
tcg_gen_qemu_ld32u(tmp, t1, flags);
- tcg_gen_helper_1_1(helper_memory_to_f, t0, tmp);
+ tcg_gen_trunc_i64_i32(tmp32, tmp);
+ gen_helper_memory_to_f(t0, tmp32);
+ tcg_temp_free_i32(tmp32);
tcg_temp_free(tmp);
}
static always_inline void gen_qemu_ldg (TCGv t0, TCGv t1, int flags)
{
- TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv tmp = tcg_temp_new();
tcg_gen_qemu_ld64(tmp, t1, flags);
- tcg_gen_helper_1_1(helper_memory_to_g, t0, tmp);
+ gen_helper_memory_to_g(t0, tmp);
tcg_temp_free(tmp);
}
static always_inline void gen_qemu_lds (TCGv t0, TCGv t1, int flags)
{
- TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv tmp = tcg_temp_new();
+ TCGv_i32 tmp32 = tcg_temp_new_i32();
tcg_gen_qemu_ld32u(tmp, t1, flags);
- tcg_gen_helper_1_1(helper_memory_to_s, t0, tmp);
+ tcg_gen_trunc_i64_i32(tmp32, tmp);
+ gen_helper_memory_to_s(t0, tmp32);
+ tcg_temp_free_i32(tmp32);
tcg_temp_free(tmp);
}
if (unlikely(ra == 31))
return;
- addr = tcg_temp_new(TCG_TYPE_I64);
+ addr = tcg_temp_new();
if (rb != 31) {
tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
if (clear)
static always_inline void gen_qemu_stf (TCGv t0, TCGv t1, int flags)
{
- TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
- tcg_gen_helper_1_1(helper_f_to_memory, tmp, t0);
+ TCGv_i32 tmp32 = tcg_temp_new_i32();
+ TCGv tmp = tcg_temp_new();
+ gen_helper_f_to_memory(tmp32, t0);
+ tcg_gen_extu_i32_i64(tmp, tmp32);
tcg_gen_qemu_st32(tmp, t1, flags);
tcg_temp_free(tmp);
+ tcg_temp_free_i32(tmp32);
}
static always_inline void gen_qemu_stg (TCGv t0, TCGv t1, int flags)
{
- TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
- tcg_gen_helper_1_1(helper_g_to_memory, tmp, t0);
+ TCGv tmp = tcg_temp_new();
+ gen_helper_g_to_memory(tmp, t0);
tcg_gen_qemu_st64(tmp, t1, flags);
tcg_temp_free(tmp);
}
static always_inline void gen_qemu_sts (TCGv t0, TCGv t1, int flags)
{
- TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
- tcg_gen_helper_1_1(helper_s_to_memory, tmp, t0);
+ TCGv_i32 tmp32 = tcg_temp_new_i32();
+ TCGv tmp = tcg_temp_new();
+ gen_helper_s_to_memory(tmp32, t0);
+ tcg_gen_extu_i32_i64(tmp, tmp32);
tcg_gen_qemu_st32(tmp, t1, flags);
tcg_temp_free(tmp);
+ tcg_temp_free_i32(tmp32);
}
static always_inline void gen_qemu_stl_c (TCGv t0, TCGv t1, int flags)
{
TCGv addr;
if (local)
- addr = tcg_temp_local_new(TCG_TYPE_I64);
+ addr = tcg_temp_local_new();
else
- addr = tcg_temp_new(TCG_TYPE_I64);
+ addr = tcg_temp_new();
if (rb != 31) {
tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
if (clear)
l2 = gen_new_label();
if (likely(ra != 31)) {
if (mask) {
- TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv tmp = tcg_temp_new();
tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
tcg_gen_brcondi_i64(cond, tmp, 0, l1);
tcg_temp_free(tmp);
gen_set_label(l2);
}
-static always_inline void gen_fbcond (DisasContext *ctx,
- void* func,
+static always_inline void gen_fbcond (DisasContext *ctx, int opc,
int ra, int32_t disp16)
{
int l1, l2;
TCGv tmp;
+ TCGv src;
l1 = gen_new_label();
l2 = gen_new_label();
if (ra != 31) {
- tmp = tcg_temp_new(TCG_TYPE_I64);
- tcg_gen_helper_1_1(func, tmp, cpu_fir[ra]);
+ tmp = tcg_temp_new();
+ src = cpu_fir[ra];
} else {
tmp = tcg_const_i64(0);
- tcg_gen_helper_1_1(func, tmp, tmp);
+ src = tmp;
+ }
+ switch (opc) {
+ case 0x31: /* FBEQ */
+ gen_helper_cmpfeq(tmp, src);
+ break;
+ case 0x32: /* FBLT */
+ gen_helper_cmpflt(tmp, src);
+ break;
+ case 0x33: /* FBLE */
+ gen_helper_cmpfle(tmp, src);
+ break;
+ case 0x35: /* FBNE */
+ gen_helper_cmpfne(tmp, src);
+ break;
+ case 0x36: /* FBGE */
+ gen_helper_cmpfge(tmp, src);
+ break;
+ case 0x37: /* FBGT */
+ gen_helper_cmpfgt(tmp, src);
+ break;
+ default:
+ abort();
}
tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
tcg_gen_movi_i64(cpu_pc, ctx->pc);
if (ra != 31) {
if (mask) {
- TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv tmp = tcg_temp_new();
tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
tcg_temp_free(tmp);
gen_set_label(l1);
}
-static always_inline void gen_farith2 (void *helper,
- int rb, int rc)
-{
- if (unlikely(rc == 31))
- return;
-
- if (rb != 31)
- tcg_gen_helper_1_1(helper, cpu_fir[rc], cpu_fir[rb]);
- else {
- TCGv tmp = tcg_const_i64(0);
- tcg_gen_helper_1_1(helper, cpu_fir[rc], tmp);
- tcg_temp_free(tmp);
- }
+#define FARITH2(name) \
+static always_inline void glue(gen_f, name)(int rb, int rc) \
+{ \
+ if (unlikely(rc == 31)) \
+ return; \
+ \
+ if (rb != 31) \
+ gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]); \
+ else { \
+ TCGv tmp = tcg_const_i64(0); \
+ gen_helper_ ## name (cpu_fir[rc], tmp); \
+ tcg_temp_free(tmp); \
+ } \
}
-
-static always_inline void gen_farith3 (void *helper,
- int ra, int rb, int rc)
-{
- if (unlikely(rc == 31))
- return;
-
- if (ra != 31) {
- if (rb != 31)
- tcg_gen_helper_1_2(helper, cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]);
- else {
- TCGv tmp = tcg_const_i64(0);
- tcg_gen_helper_1_2(helper, cpu_fir[rc], cpu_fir[ra], tmp);
- tcg_temp_free(tmp);
- }
- } else {
- TCGv tmp = tcg_const_i64(0);
- if (rb != 31)
- tcg_gen_helper_1_2(helper, cpu_fir[rc], tmp, cpu_fir[rb]);
- else
- tcg_gen_helper_1_2(helper, cpu_fir[rc], tmp, tmp);
- tcg_temp_free(tmp);
- }
+FARITH2(sqrts)
+FARITH2(sqrtf)
+FARITH2(sqrtg)
+FARITH2(sqrtt)
+FARITH2(cvtgf)
+FARITH2(cvtgq)
+FARITH2(cvtqf)
+FARITH2(cvtqg)
+FARITH2(cvtst)
+FARITH2(cvtts)
+FARITH2(cvttq)
+FARITH2(cvtqs)
+FARITH2(cvtqt)
+FARITH2(cvtlq)
+FARITH2(cvtql)
+FARITH2(cvtqlv)
+FARITH2(cvtqlsv)
+
+#define FARITH3(name) \
+static always_inline void glue(gen_f, name) (int ra, int rb, int rc) \
+{ \
+ if (unlikely(rc == 31)) \
+ return; \
+ \
+ if (ra != 31) { \
+ if (rb != 31) \
+ gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]); \
+ else { \
+ TCGv tmp = tcg_const_i64(0); \
+ gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], tmp); \
+ tcg_temp_free(tmp); \
+ } \
+ } else { \
+ TCGv tmp = tcg_const_i64(0); \
+ if (rb != 31) \
+ gen_helper_ ## name (cpu_fir[rc], tmp, cpu_fir[rb]); \
+ else \
+ gen_helper_ ## name (cpu_fir[rc], tmp, tmp); \
+ tcg_temp_free(tmp); \
+ } \
}
-static always_inline void gen_fcmov (void *func,
- int ra, int rb, int rc)
-{
- int l1;
- TCGv tmp;
-
- if (unlikely(rc == 31))
- return;
-
- l1 = gen_new_label();
- tmp = tcg_temp_new(TCG_TYPE_I64);
- if (ra != 31) {
- tmp = tcg_temp_new(TCG_TYPE_I64);
- tcg_gen_helper_1_1(func, tmp, cpu_fir[ra]);
- } else {
- tmp = tcg_const_i64(0);
- tcg_gen_helper_1_1(func, tmp, tmp);
- }
- tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);
- if (rb != 31)
- tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
- else
- tcg_gen_movi_i64(cpu_fir[rc], 0);
- gen_set_label(l1);
+FARITH3(addf)
+FARITH3(subf)
+FARITH3(mulf)
+FARITH3(divf)
+FARITH3(addg)
+FARITH3(subg)
+FARITH3(mulg)
+FARITH3(divg)
+FARITH3(cmpgeq)
+FARITH3(cmpglt)
+FARITH3(cmpgle)
+FARITH3(adds)
+FARITH3(subs)
+FARITH3(muls)
+FARITH3(divs)
+FARITH3(addt)
+FARITH3(subt)
+FARITH3(mult)
+FARITH3(divt)
+FARITH3(cmptun)
+FARITH3(cmpteq)
+FARITH3(cmptlt)
+FARITH3(cmptle)
+FARITH3(cpys)
+FARITH3(cpysn)
+FARITH3(cpyse)
+
+#define FCMOV(name) \
+static always_inline void glue(gen_f, name) (int ra, int rb, int rc) \
+{ \
+ int l1; \
+ TCGv tmp; \
+ \
+ if (unlikely(rc == 31)) \
+ return; \
+ \
+ l1 = gen_new_label(); \
+ tmp = tcg_temp_new(); \
+ if (ra != 31) { \
+ tmp = tcg_temp_new(); \
+ gen_helper_ ## name (tmp, cpu_fir[ra]); \
+ } else { \
+ tmp = tcg_const_i64(0); \
+ gen_helper_ ## name (tmp, tmp); \
+ } \
+ tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1); \
+ if (rb != 31) \
+ tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]); \
+ else \
+ tcg_gen_movi_i64(cpu_fir[rc], 0); \
+ gen_set_label(l1); \
}
+FCMOV(cmpfeq)
+FCMOV(cmpfne)
+FCMOV(cmpflt)
+FCMOV(cmpfge)
+FCMOV(cmpfle)
+FCMOV(cmpfgt)
/* EXTWH, EXTWH, EXTLH, EXTQH */
static always_inline void gen_ext_h(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
} else {
TCGv tmp1, tmp2;
- tmp1 = tcg_temp_new(TCG_TYPE_I64);
+ tmp1 = tcg_temp_new();
tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
tcg_gen_shli_i64(tmp1, tmp1, 3);
tmp2 = tcg_const_i64(64);
if (islit) {
tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
} else {
- TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv tmp = tcg_temp_new();
tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
tcg_gen_shli_i64(tmp, tmp, 3);
tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
}
/* Code to call arith3 helpers */
-static always_inline void gen_arith3 (void *helper,
- int ra, int rb, int rc,
- int islit, uint8_t lit)
-{
- if (unlikely(rc == 31))
- return;
-
- if (ra != 31) {
- if (islit) {
- TCGv tmp = tcg_const_i64(lit);
- tcg_gen_helper_1_2(helper, cpu_ir[rc], cpu_ir[ra], tmp);
- tcg_temp_free(tmp);
- } else
- tcg_gen_helper_1_2(helper, cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
- } else {
- TCGv tmp1 = tcg_const_i64(0);
- if (islit) {
- TCGv tmp2 = tcg_const_i64(lit);
- tcg_gen_helper_1_2(helper, cpu_ir[rc], tmp1, tmp2);
- tcg_temp_free(tmp2);
- } else
- tcg_gen_helper_1_2(helper, cpu_ir[rc], tmp1, cpu_ir[rb]);
- tcg_temp_free(tmp1);
- }
+#define ARITH3(name) \
+static always_inline void glue(gen_, name) (int ra, int rb, int rc, \
+ int islit, uint8_t lit) \
+{ \
+ if (unlikely(rc == 31)) \
+ return; \
+ \
+ if (ra != 31) { \
+ if (islit) { \
+ TCGv tmp = tcg_const_i64(lit); \
+ gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp); \
+ tcg_temp_free(tmp); \
+ } else \
+ gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
+ } else { \
+ TCGv tmp1 = tcg_const_i64(0); \
+ if (islit) { \
+ TCGv tmp2 = tcg_const_i64(lit); \
+ gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2); \
+ tcg_temp_free(tmp2); \
+ } else \
+ gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]); \
+ tcg_temp_free(tmp1); \
+ } \
}
+ARITH3(cmpbge)
+ARITH3(addlv)
+ARITH3(sublv)
+ARITH3(addqv)
+ARITH3(subqv)
+ARITH3(mskbl)
+ARITH3(insbl)
+ARITH3(mskwl)
+ARITH3(inswl)
+ARITH3(mskll)
+ARITH3(insll)
+ARITH3(zap)
+ARITH3(zapnot)
+ARITH3(mskql)
+ARITH3(insql)
+ARITH3(mskwh)
+ARITH3(inswh)
+ARITH3(msklh)
+ARITH3(inslh)
+ARITH3(mskqh)
+ARITH3(insqh)
+ARITH3(umulh)
+ARITH3(mullv)
+ARITH3(mulqv)
static always_inline void gen_cmp(TCGCond cond,
int ra, int rb, int rc,
l2 = gen_new_label();
if (ra != 31) {
- tmp = tcg_temp_new(TCG_TYPE_I64);
+ tmp = tcg_temp_new();
tcg_gen_mov_i64(tmp, cpu_ir[ra]);
} else
tmp = tcg_const_i64(0);
/* S4ADDL */
if (likely(rc != 31)) {
if (ra != 31) {
- TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv tmp = tcg_temp_new();
tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
if (islit)
tcg_gen_addi_i64(tmp, tmp, lit);
/* S4SUBL */
if (likely(rc != 31)) {
if (ra != 31) {
- TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv tmp = tcg_temp_new();
tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
if (islit)
tcg_gen_subi_i64(tmp, tmp, lit);
break;
case 0x0F:
/* CMPBGE */
- gen_arith3(helper_cmpbge, ra, rb, rc, islit, lit);
+ gen_cmpbge(ra, rb, rc, islit, lit);
break;
case 0x12:
/* S8ADDL */
if (likely(rc != 31)) {
if (ra != 31) {
- TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv tmp = tcg_temp_new();
tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
if (islit)
tcg_gen_addi_i64(tmp, tmp, lit);
/* S8SUBL */
if (likely(rc != 31)) {
if (ra != 31) {
- TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv tmp = tcg_temp_new();
tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
if (islit)
tcg_gen_subi_i64(tmp, tmp, lit);
/* S4ADDQ */
if (likely(rc != 31)) {
if (ra != 31) {
- TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv tmp = tcg_temp_new();
tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
if (islit)
tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
/* S4SUBQ */
if (likely(rc != 31)) {
if (ra != 31) {
- TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv tmp = tcg_temp_new();
tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
if (islit)
tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
/* S8ADDQ */
if (likely(rc != 31)) {
if (ra != 31) {
- TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv tmp = tcg_temp_new();
tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
if (islit)
tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
/* S8SUBQ */
if (likely(rc != 31)) {
if (ra != 31) {
- TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv tmp = tcg_temp_new();
tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
if (islit)
tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
break;
case 0x40:
/* ADDL/V */
- gen_arith3(helper_addlv, ra, rb, rc, islit, lit);
+ gen_addlv(ra, rb, rc, islit, lit);
break;
case 0x49:
/* SUBL/V */
- gen_arith3(helper_sublv, ra, rb, rc, islit, lit);
+ gen_sublv(ra, rb, rc, islit, lit);
break;
case 0x4D:
/* CMPLT */
break;
case 0x60:
/* ADDQ/V */
- gen_arith3(helper_addqv, ra, rb, rc, islit, lit);
+ gen_addqv(ra, rb, rc, islit, lit);
break;
case 0x69:
/* SUBQ/V */
- gen_arith3(helper_subqv, ra, rb, rc, islit, lit);
+ gen_subqv(ra, rb, rc, islit, lit);
break;
case 0x6D:
/* CMPLE */
if (islit)
tcg_gen_movi_i64(cpu_ir[rc], helper_amask(lit));
else
- tcg_gen_helper_1_1(helper_amask, cpu_ir[rc], cpu_ir[rb]);
+ gen_helper_amask(cpu_ir[rc], cpu_ir[rb]);
}
break;
case 0x64:
case 0x6C:
/* IMPLVER */
if (rc != 31)
- tcg_gen_helper_1_0(helper_load_implver, cpu_ir[rc]);
+ gen_helper_load_implver(cpu_ir[rc]);
break;
default:
goto invalid_opc;
switch (fn7) {
case 0x02:
/* MSKBL */
- gen_arith3(helper_mskbl, ra, rb, rc, islit, lit);
+ gen_mskbl(ra, rb, rc, islit, lit);
break;
case 0x06:
/* EXTBL */
break;
case 0x0B:
/* INSBL */
- gen_arith3(helper_insbl, ra, rb, rc, islit, lit);
+ gen_insbl(ra, rb, rc, islit, lit);
break;
case 0x12:
/* MSKWL */
- gen_arith3(helper_mskwl, ra, rb, rc, islit, lit);
+ gen_mskwl(ra, rb, rc, islit, lit);
break;
case 0x16:
/* EXTWL */
break;
case 0x1B:
/* INSWL */
- gen_arith3(helper_inswl, ra, rb, rc, islit, lit);
+ gen_inswl(ra, rb, rc, islit, lit);
break;
case 0x22:
/* MSKLL */
- gen_arith3(helper_mskll, ra, rb, rc, islit, lit);
+ gen_mskll(ra, rb, rc, islit, lit);
break;
case 0x26:
/* EXTLL */
break;
case 0x2B:
/* INSLL */
- gen_arith3(helper_insll, ra, rb, rc, islit, lit);
+ gen_insll(ra, rb, rc, islit, lit);
break;
case 0x30:
/* ZAP */
- gen_arith3(helper_zap, ra, rb, rc, islit, lit);
+ gen_zap(ra, rb, rc, islit, lit);
break;
case 0x31:
/* ZAPNOT */
- gen_arith3(helper_zapnot, ra, rb, rc, islit, lit);
+ gen_zapnot(ra, rb, rc, islit, lit);
break;
case 0x32:
/* MSKQL */
- gen_arith3(helper_mskql, ra, rb, rc, islit, lit);
+ gen_mskql(ra, rb, rc, islit, lit);
break;
case 0x34:
/* SRL */
if (islit)
tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
else {
- TCGv shift = tcg_temp_new(TCG_TYPE_I64);
+ TCGv shift = tcg_temp_new();
tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
tcg_temp_free(shift);
if (islit)
tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
else {
- TCGv shift = tcg_temp_new(TCG_TYPE_I64);
+ TCGv shift = tcg_temp_new();
tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
tcg_temp_free(shift);
break;
case 0x3B:
/* INSQL */
- gen_arith3(helper_insql, ra, rb, rc, islit, lit);
+ gen_insql(ra, rb, rc, islit, lit);
break;
case 0x3C:
/* SRA */
if (islit)
tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
else {
- TCGv shift = tcg_temp_new(TCG_TYPE_I64);
+ TCGv shift = tcg_temp_new();
tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
tcg_temp_free(shift);
break;
case 0x52:
/* MSKWH */
- gen_arith3(helper_mskwh, ra, rb, rc, islit, lit);
+ gen_mskwh(ra, rb, rc, islit, lit);
break;
case 0x57:
/* INSWH */
- gen_arith3(helper_inswh, ra, rb, rc, islit, lit);
+ gen_inswh(ra, rb, rc, islit, lit);
break;
case 0x5A:
/* EXTWH */
break;
case 0x62:
/* MSKLH */
- gen_arith3(helper_msklh, ra, rb, rc, islit, lit);
+ gen_msklh(ra, rb, rc, islit, lit);
break;
case 0x67:
/* INSLH */
- gen_arith3(helper_inslh, ra, rb, rc, islit, lit);
+ gen_inslh(ra, rb, rc, islit, lit);
break;
case 0x6A:
/* EXTLH */
break;
case 0x72:
/* MSKQH */
- gen_arith3(helper_mskqh, ra, rb, rc, islit, lit);
+ gen_mskqh(ra, rb, rc, islit, lit);
break;
case 0x77:
/* INSQH */
- gen_arith3(helper_insqh, ra, rb, rc, islit, lit);
+ gen_insqh(ra, rb, rc, islit, lit);
break;
case 0x7A:
/* EXTQH */
break;
case 0x30:
/* UMULH */
- gen_arith3(helper_umulh, ra, rb, rc, islit, lit);
+ gen_umulh(ra, rb, rc, islit, lit);
break;
case 0x40:
/* MULL/V */
- gen_arith3(helper_mullv, ra, rb, rc, islit, lit);
+ gen_mullv(ra, rb, rc, islit, lit);
break;
case 0x60:
/* MULQ/V */
- gen_arith3(helper_mulqv, ra, rb, rc, islit, lit);
+ gen_mulqv(ra, rb, rc, islit, lit);
break;
default:
goto invalid_opc;
goto invalid_opc;
if (likely(rc != 31)) {
if (ra != 31) {
- TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 tmp = tcg_temp_new_i32();
tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
- tcg_gen_helper_1_1(helper_memory_to_s, cpu_fir[rc], tmp);
- tcg_temp_free(tmp);
+ gen_helper_memory_to_s(cpu_fir[rc], tmp);
+ tcg_temp_free_i32(tmp);
} else
tcg_gen_movi_i64(cpu_fir[rc], 0);
}
/* SQRTF */
if (!(ctx->amask & AMASK_FIX))
goto invalid_opc;
- gen_farith2(&helper_sqrtf, rb, rc);
+ gen_fsqrtf(rb, rc);
break;
case 0x0B:
/* SQRTS */
if (!(ctx->amask & AMASK_FIX))
goto invalid_opc;
- gen_farith2(&helper_sqrts, rb, rc);
+ gen_fsqrts(rb, rc);
break;
case 0x14:
/* ITOFF */
goto invalid_opc;
if (likely(rc != 31)) {
if (ra != 31) {
- TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 tmp = tcg_temp_new_i32();
tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
- tcg_gen_helper_1_1(helper_memory_to_f, cpu_fir[rc], tmp);
- tcg_temp_free(tmp);
+ gen_helper_memory_to_f(cpu_fir[rc], tmp);
+ tcg_temp_free_i32(tmp);
} else
tcg_gen_movi_i64(cpu_fir[rc], 0);
}
/* SQRTG */
if (!(ctx->amask & AMASK_FIX))
goto invalid_opc;
- gen_farith2(&helper_sqrtg, rb, rc);
+ gen_fsqrtg(rb, rc);
break;
case 0x02B:
/* SQRTT */
if (!(ctx->amask & AMASK_FIX))
goto invalid_opc;
- gen_farith2(&helper_sqrtt, rb, rc);
+ gen_fsqrtt(rb, rc);
break;
default:
goto invalid_opc;
switch (fpfn) { /* f11 & 0x3F */
case 0x00:
/* ADDF */
- gen_farith3(&helper_addf, ra, rb, rc);
+ gen_faddf(ra, rb, rc);
break;
case 0x01:
/* SUBF */
- gen_farith3(&helper_subf, ra, rb, rc);
+ gen_fsubf(ra, rb, rc);
break;
case 0x02:
/* MULF */
- gen_farith3(&helper_mulf, ra, rb, rc);
+ gen_fmulf(ra, rb, rc);
break;
case 0x03:
/* DIVF */
- gen_farith3(&helper_divf, ra, rb, rc);
+ gen_fdivf(ra, rb, rc);
break;
case 0x1E:
/* CVTDG */
#if 0 // TODO
- gen_farith2(&helper_cvtdg, rb, rc);
+ gen_fcvtdg(rb, rc);
#else
goto invalid_opc;
#endif
break;
case 0x20:
/* ADDG */
- gen_farith3(&helper_addg, ra, rb, rc);
+ gen_faddg(ra, rb, rc);
break;
case 0x21:
/* SUBG */
- gen_farith3(&helper_subg, ra, rb, rc);
+ gen_fsubg(ra, rb, rc);
break;
case 0x22:
/* MULG */
- gen_farith3(&helper_mulg, ra, rb, rc);
+ gen_fmulg(ra, rb, rc);
break;
case 0x23:
/* DIVG */
- gen_farith3(&helper_divg, ra, rb, rc);
+ gen_fdivg(ra, rb, rc);
break;
case 0x25:
/* CMPGEQ */
- gen_farith3(&helper_cmpgeq, ra, rb, rc);
+ gen_fcmpgeq(ra, rb, rc);
break;
case 0x26:
/* CMPGLT */
- gen_farith3(&helper_cmpglt, ra, rb, rc);
+ gen_fcmpglt(ra, rb, rc);
break;
case 0x27:
/* CMPGLE */
- gen_farith3(&helper_cmpgle, ra, rb, rc);
+ gen_fcmpgle(ra, rb, rc);
break;
case 0x2C:
/* CVTGF */
- gen_farith2(&helper_cvtgf, rb, rc);
+ gen_fcvtgf(rb, rc);
break;
case 0x2D:
/* CVTGD */
#if 0 // TODO
- gen_farith2(ctx, &helper_cvtgd, rb, rc);
+ gen_fcvtgd(rb, rc);
#else
goto invalid_opc;
#endif
break;
case 0x2F:
/* CVTGQ */
- gen_farith2(&helper_cvtgq, rb, rc);
+ gen_fcvtgq(rb, rc);
break;
case 0x3C:
/* CVTQF */
- gen_farith2(&helper_cvtqf, rb, rc);
+ gen_fcvtqf(rb, rc);
break;
case 0x3E:
/* CVTQG */
- gen_farith2(&helper_cvtqg, rb, rc);
+ gen_fcvtqg(rb, rc);
break;
default:
goto invalid_opc;
switch (fpfn) { /* f11 & 0x3F */
case 0x00:
/* ADDS */
- gen_farith3(&helper_adds, ra, rb, rc);
+ gen_fadds(ra, rb, rc);
break;
case 0x01:
/* SUBS */
- gen_farith3(&helper_subs, ra, rb, rc);
+ gen_fsubs(ra, rb, rc);
break;
case 0x02:
/* MULS */
- gen_farith3(&helper_muls, ra, rb, rc);
+ gen_fmuls(ra, rb, rc);
break;
case 0x03:
/* DIVS */
- gen_farith3(&helper_divs, ra, rb, rc);
+ gen_fdivs(ra, rb, rc);
break;
case 0x20:
/* ADDT */
- gen_farith3(&helper_addt, ra, rb, rc);
+ gen_faddt(ra, rb, rc);
break;
case 0x21:
/* SUBT */
- gen_farith3(&helper_subt, ra, rb, rc);
+ gen_fsubt(ra, rb, rc);
break;
case 0x22:
/* MULT */
- gen_farith3(&helper_mult, ra, rb, rc);
+ gen_fmult(ra, rb, rc);
break;
case 0x23:
/* DIVT */
- gen_farith3(&helper_divt, ra, rb, rc);
+ gen_fdivt(ra, rb, rc);
break;
case 0x24:
/* CMPTUN */
- gen_farith3(&helper_cmptun, ra, rb, rc);
+ gen_fcmptun(ra, rb, rc);
break;
case 0x25:
/* CMPTEQ */
- gen_farith3(&helper_cmpteq, ra, rb, rc);
+ gen_fcmpteq(ra, rb, rc);
break;
case 0x26:
/* CMPTLT */
- gen_farith3(&helper_cmptlt, ra, rb, rc);
+ gen_fcmptlt(ra, rb, rc);
break;
case 0x27:
/* CMPTLE */
- gen_farith3(&helper_cmptle, ra, rb, rc);
+ gen_fcmptle(ra, rb, rc);
break;
case 0x2C:
/* XXX: incorrect */
if (fn11 == 0x2AC || fn11 == 0x6AC) {
/* CVTST */
- gen_farith2(&helper_cvtst, rb, rc);
+ gen_fcvtst(rb, rc);
} else {
/* CVTTS */
- gen_farith2(&helper_cvtts, rb, rc);
+ gen_fcvtts(rb, rc);
}
break;
case 0x2F:
/* CVTTQ */
- gen_farith2(&helper_cvttq, rb, rc);
+ gen_fcvttq(rb, rc);
break;
case 0x3C:
/* CVTQS */
- gen_farith2(&helper_cvtqs, rb, rc);
+ gen_fcvtqs(rb, rc);
break;
case 0x3E:
/* CVTQT */
- gen_farith2(&helper_cvtqt, rb, rc);
+ gen_fcvtqt(rb, rc);
break;
default:
goto invalid_opc;
switch (fn11) {
case 0x010:
/* CVTLQ */
- gen_farith2(&helper_cvtlq, rb, rc);
+ gen_fcvtlq(rb, rc);
break;
case 0x020:
if (likely(rc != 31)) {
tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
else
/* CPYS */
- gen_farith3(&helper_cpys, ra, rb, rc);
+ gen_fcpys(ra, rb, rc);
}
break;
case 0x021:
/* CPYSN */
- gen_farith3(&helper_cpysn, ra, rb, rc);
+ gen_fcpysn(ra, rb, rc);
break;
case 0x022:
/* CPYSE */
- gen_farith3(&helper_cpyse, ra, rb, rc);
+ gen_fcpyse(ra, rb, rc);
break;
case 0x024:
/* MT_FPCR */
if (likely(ra != 31))
- tcg_gen_helper_0_1(helper_store_fpcr, cpu_fir[ra]);
+ gen_helper_store_fpcr(cpu_fir[ra]);
else {
TCGv tmp = tcg_const_i64(0);
- tcg_gen_helper_0_1(helper_store_fpcr, tmp);
+ gen_helper_store_fpcr(tmp);
tcg_temp_free(tmp);
}
break;
case 0x025:
/* MF_FPCR */
if (likely(ra != 31))
- tcg_gen_helper_1_0(helper_load_fpcr, cpu_fir[ra]);
+ gen_helper_load_fpcr(cpu_fir[ra]);
break;
case 0x02A:
/* FCMOVEQ */
- gen_fcmov(&helper_cmpfeq, ra, rb, rc);
+ gen_fcmpfeq(ra, rb, rc);
break;
case 0x02B:
/* FCMOVNE */
- gen_fcmov(&helper_cmpfne, ra, rb, rc);
+ gen_fcmpfne(ra, rb, rc);
break;
case 0x02C:
/* FCMOVLT */
- gen_fcmov(&helper_cmpflt, ra, rb, rc);
+ gen_fcmpflt(ra, rb, rc);
break;
case 0x02D:
/* FCMOVGE */
- gen_fcmov(&helper_cmpfge, ra, rb, rc);
+ gen_fcmpfge(ra, rb, rc);
break;
case 0x02E:
/* FCMOVLE */
- gen_fcmov(&helper_cmpfle, ra, rb, rc);
+ gen_fcmpfle(ra, rb, rc);
break;
case 0x02F:
/* FCMOVGT */
- gen_fcmov(&helper_cmpfgt, ra, rb, rc);
+ gen_fcmpfgt(ra, rb, rc);
break;
case 0x030:
/* CVTQL */
- gen_farith2(&helper_cvtql, rb, rc);
+ gen_fcvtql(rb, rc);
break;
case 0x130:
/* CVTQL/V */
- gen_farith2(&helper_cvtqlv, rb, rc);
+ gen_fcvtqlv(rb, rc);
break;
case 0x530:
/* CVTQL/SV */
- gen_farith2(&helper_cvtqlsv, rb, rc);
+ gen_fcvtqlsv(rb, rc);
break;
default:
goto invalid_opc;
case 0xC000:
/* RPCC */
if (ra != 31)
- tcg_gen_helper_1_0(helper_load_pcc, cpu_ir[ra]);
+ gen_helper_load_pcc(cpu_ir[ra]);
break;
case 0xE000:
/* RC */
if (ra != 31)
- tcg_gen_helper_1_0(helper_rc, cpu_ir[ra]);
+ gen_helper_rc(cpu_ir[ra]);
break;
case 0xE800:
/* ECB */
case 0xF000:
/* RS */
if (ra != 31)
- tcg_gen_helper_1_0(helper_rs, cpu_ir[ra]);
+ gen_helper_rs(cpu_ir[ra]);
break;
case 0xF800:
/* WH64 */
goto invalid_opc;
if (ra != 31) {
TCGv tmp = tcg_const_i32(insn & 0xFF);
- tcg_gen_helper_1_2(helper_mfpr, cpu_ir[ra], tmp, cpu_ir[ra]);
+ gen_helper_mfpr(cpu_ir[ra], tmp, cpu_ir[ra]);
tcg_temp_free(tmp);
}
break;
if (!ctx->pal_mode)
goto invalid_opc;
if (ra != 31) {
- TCGv addr = tcg_temp_new(TCG_TYPE_I64);
+ TCGv addr = tcg_temp_new();
if (rb != 31)
tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
else
switch ((insn >> 12) & 0xF) {
case 0x0:
/* Longword physical access */
- tcg_gen_helper_0_2(helper_ldl_raw, cpu_ir[ra], addr);
+ gen_helper_ldl_raw(cpu_ir[ra], addr);
break;
case 0x1:
/* Quadword physical access */
- tcg_gen_helper_0_2(helper_ldq_raw, cpu_ir[ra], addr);
+ gen_helper_ldq_raw(cpu_ir[ra], addr);
break;
case 0x2:
/* Longword physical access with lock */
- tcg_gen_helper_0_2(helper_ldl_l_raw, cpu_ir[ra], addr);
+ gen_helper_ldl_l_raw(cpu_ir[ra], addr);
break;
case 0x3:
/* Quadword physical access with lock */
- tcg_gen_helper_0_2(helper_ldq_l_raw, cpu_ir[ra], addr);
+ gen_helper_ldq_l_raw(cpu_ir[ra], addr);
break;
case 0x4:
/* Longword virtual PTE fetch */
- tcg_gen_helper_0_2(helper_ldl_kernel, cpu_ir[ra], addr);
+ gen_helper_ldl_kernel(cpu_ir[ra], addr);
break;
case 0x5:
/* Quadword virtual PTE fetch */
- tcg_gen_helper_0_2(helper_ldq_kernel, cpu_ir[ra], addr);
+ gen_helper_ldq_kernel(cpu_ir[ra], addr);
break;
case 0x6:
/* Incpu_ir[ra]id */
goto incpu_ir[ra]id_opc;
case 0x8:
/* Longword virtual access */
- tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
- tcg_gen_helper_0_2(helper_ldl_raw, cpu_ir[ra], addr);
+ gen_helper_st_virt_to_phys(addr, addr);
+ gen_helper_ldl_raw(cpu_ir[ra], addr);
break;
case 0x9:
/* Quadword virtual access */
- tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
- tcg_gen_helper_0_2(helper_ldq_raw, cpu_ir[ra], addr);
+ gen_helper_st_virt_to_phys(addr, addr);
+ gen_helper_ldq_raw(cpu_ir[ra], addr);
break;
case 0xA:
/* Longword virtual access with protection check */
break;
case 0xC:
/* Longword virtual access with altenate access mode */
- tcg_gen_helper_0_0(helper_set_alt_mode);
- tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
- tcg_gen_helper_0_2(helper_ldl_raw, cpu_ir[ra], addr);
- tcg_gen_helper_0_0(helper_restore_mode);
+ gen_helper_set_alt_mode();
+ gen_helper_st_virt_to_phys(addr, addr);
+ gen_helper_ldl_raw(cpu_ir[ra], addr);
+ gen_helper_restore_mode();
break;
case 0xD:
/* Quadword virtual access with altenate access mode */
- tcg_gen_helper_0_0(helper_set_alt_mode);
- tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
- tcg_gen_helper_0_2(helper_ldq_raw, cpu_ir[ra], addr);
- tcg_gen_helper_0_0(helper_restore_mode);
+ gen_helper_set_alt_mode();
+ gen_helper_st_virt_to_phys(addr, addr);
+ gen_helper_ldq_raw(cpu_ir[ra], addr);
+ gen_helper_restore_mode();
break;
case 0xE:
/* Longword virtual access with alternate access mode and
* protection checks
*/
- tcg_gen_helper_0_0(helper_set_alt_mode);
- tcg_gen_helper_0_2(helper_ldl_data, cpu_ir[ra], addr);
- tcg_gen_helper_0_0(helper_restore_mode);
+ gen_helper_set_alt_mode();
+ gen_helper_ldl_data(cpu_ir[ra], addr);
+ gen_helper_restore_mode();
break;
case 0xF:
/* Quadword virtual access with alternate access mode and
* protection checks
*/
- tcg_gen_helper_0_0(helper_set_alt_mode);
- tcg_gen_helper_0_2(helper_ldq_data, cpu_ir[ra], addr);
- tcg_gen_helper_0_0(helper_restore_mode);
+ gen_helper_set_alt_mode();
+ gen_helper_ldq_data(cpu_ir[ra], addr);
+ gen_helper_restore_mode();
break;
}
tcg_temp_free(addr);
if (islit)
tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
else
- tcg_gen_helper_1_1(helper_ctpop, cpu_ir[rc], cpu_ir[rb]);
+ gen_helper_ctpop(cpu_ir[rc], cpu_ir[rb]);
}
break;
case 0x31:
if (islit)
tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
else
- tcg_gen_helper_1_1(helper_ctlz, cpu_ir[rc], cpu_ir[rb]);
+ gen_helper_ctlz(cpu_ir[rc], cpu_ir[rb]);
}
break;
case 0x33:
if (islit)
tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
else
- tcg_gen_helper_1_1(helper_cttz, cpu_ir[rc], cpu_ir[rb]);
+ gen_helper_cttz(cpu_ir[rc], cpu_ir[rb]);
}
break;
case 0x34:
if (!(ctx->amask & AMASK_FIX))
goto invalid_opc;
if (rc != 31) {
- TCGv tmp1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 tmp1 = tcg_temp_new_i32();
if (ra != 31)
- tcg_gen_helper_1_1(helper_s_to_memory, tmp1, cpu_fir[ra]);
+ gen_helper_s_to_memory(tmp1, cpu_fir[ra]);
else {
TCGv tmp2 = tcg_const_i64(0);
- tcg_gen_helper_1_1(helper_s_to_memory, tmp1, tmp2);
+ gen_helper_s_to_memory(tmp1, tmp2);
tcg_temp_free(tmp2);
}
tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
- tcg_temp_free(tmp1);
+ tcg_temp_free_i32(tmp1);
}
break;
default:
else {
TCGv tmp1 = tcg_const_i32(insn & 0xFF);
if (ra != 31)
- tcg_gen_helper(helper_mtpr, tmp1, cpu_ir[ra]);
+ gen_helper_mtpr(tmp1, cpu_ir[ra]);
else {
TCGv tmp2 = tcg_const_i64(0);
- tcg_gen_helper(helper_mtpr, tmp1, tmp2);
+ gen_helper_mtpr(tmp1, tmp2);
tcg_temp_free(tmp2);
}
tcg_temp_free(tmp1);
goto invalid_opc;
if (rb == 31) {
/* "Old" alpha */
- tcg_gen_helper_0_0(helper_hw_rei);
+ gen_helper_hw_rei();
} else {
TCGv tmp;
if (ra != 31) {
- tmp = tcg_temp_new(TCG_TYPE_I64);
+ tmp = tcg_temp_new();
tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
} else
tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
- tcg_gen_helper_0_1(helper_hw_ret, tmp);
+ gen_helper_hw_ret(tmp);
tcg_temp_free(tmp);
}
ret = 2;
goto invalid_opc;
else {
TCGv addr, val;
- addr = tcg_temp_new(TCG_TYPE_I64);
+ addr = tcg_temp_new();
if (rb != 31)
tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
else
if (ra != 31)
val = cpu_ir[ra];
else {
- val = tcg_temp_new(TCG_TYPE_I64);
+ val = tcg_temp_new();
tcg_gen_movi_i64(val, 0);
}
switch ((insn >> 12) & 0xF) {
case 0x0:
/* Longword physical access */
- tcg_gen_helper_0_2(helper_stl_raw, val, addr);
+ gen_helper_stl_raw(val, addr);
break;
case 0x1:
/* Quadword physical access */
- tcg_gen_helper_0_2(helper_stq_raw, val, addr);
+ gen_helper_stq_raw(val, addr);
break;
case 0x2:
/* Longword physical access with lock */
- tcg_gen_helper_1_2(helper_stl_c_raw, val, val, addr);
+ gen_helper_stl_c_raw(val, val, addr);
break;
case 0x3:
/* Quadword physical access with lock */
- tcg_gen_helper_1_2(helper_stq_c_raw, val, val, addr);
+ gen_helper_stq_c_raw(val, val, addr);
break;
case 0x4:
/* Longword virtual access */
- tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
- tcg_gen_helper_0_2(helper_stl_raw, val, addr);
+ gen_helper_st_virt_to_phys(addr, addr);
+ gen_helper_stl_raw(val, addr);
break;
case 0x5:
/* Quadword virtual access */
- tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
- tcg_gen_helper_0_2(helper_stq_raw, val, addr);
+ gen_helper_st_virt_to_phys(addr, addr);
+ gen_helper_stq_raw(val, addr);
break;
case 0x6:
/* Invalid */
goto invalid_opc;
case 0xC:
/* Longword virtual access with alternate access mode */
- tcg_gen_helper_0_0(helper_set_alt_mode);
- tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
- tcg_gen_helper_0_2(helper_stl_raw, val, addr);
- tcg_gen_helper_0_0(helper_restore_mode);
+ gen_helper_set_alt_mode();
+ gen_helper_st_virt_to_phys(addr, addr);
+ gen_helper_stl_raw(val, addr);
+ gen_helper_restore_mode();
break;
case 0xD:
/* Quadword virtual access with alternate access mode */
- tcg_gen_helper_0_0(helper_set_alt_mode);
- tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
- tcg_gen_helper_0_2(helper_stl_raw, val, addr);
- tcg_gen_helper_0_0(helper_restore_mode);
+ gen_helper_set_alt_mode();
+ gen_helper_st_virt_to_phys(addr, addr);
+ gen_helper_stl_raw(val, addr);
+ gen_helper_restore_mode();
break;
case 0xE:
/* Invalid */
tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
ret = 1;
break;
- case 0x31:
- /* FBEQ */
- gen_fbcond(ctx, &helper_cmpfeq, ra, disp16);
- ret = 1;
- break;
- case 0x32:
- /* FBLT */
- gen_fbcond(ctx, &helper_cmpflt, ra, disp16);
- ret = 1;
- break;
- case 0x33:
- /* FBLE */
- gen_fbcond(ctx, &helper_cmpfle, ra, disp16);
+ case 0x31: /* FBEQ */
+ case 0x32: /* FBLT */
+ case 0x33: /* FBLE */
+ gen_fbcond(ctx, opc, ra, disp16);
ret = 1;
break;
case 0x34:
tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
ret = 1;
break;
- case 0x35:
- /* FBNE */
- gen_fbcond(ctx, &helper_cmpfne, ra, disp16);
- ret = 1;
- break;
- case 0x36:
- /* FBGE */
- gen_fbcond(ctx, &helper_cmpfge, ra, disp16);
- ret = 1;
- break;
- case 0x37:
- /* FBGT */
- gen_fbcond(ctx, &helper_cmpfgt, ra, disp16);
+ case 0x35: /* FBNE */
+ case 0x36: /* FBGE */
+ case 0x37: /* FBGT */
+ gen_fbcond(ctx, opc, ra, disp16);
ret = 1;
break;
case 0x38:
tcg_gen_movi_i64(cpu_pc, ctx.pc);
}
#if defined (DO_TB_FLUSH)
- tcg_gen_helper_0_0(helper_tb_flush);
+ gen_helper_tb_flush();
#endif
if (tb->cflags & CF_LAST_IO)
gen_io_end();
-#define DEF_HELPER(name, ret, args) ret glue(helper_,name) args;
-
-#ifdef GEN_HELPER
-#define DEF_HELPER_0_0(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name(void) \
-{ \
- tcg_gen_helper_0_0(helper_##name); \
-}
-#define DEF_HELPER_0_1(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name(TCGv arg1) \
-{ \
- tcg_gen_helper_0_1(helper_##name, arg1); \
-}
-#define DEF_HELPER_0_2(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name(TCGv arg1, TCGv arg2) \
-{ \
- tcg_gen_helper_0_2(helper_##name, arg1, arg2); \
-}
-#define DEF_HELPER_0_3(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name( \
- TCGv arg1, TCGv arg2, TCGv arg3) \
-{ \
- tcg_gen_helper_0_3(helper_##name, arg1, arg2, arg3); \
-}
-#define DEF_HELPER_1_0(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name(TCGv ret) \
-{ \
- tcg_gen_helper_1_0(helper_##name, ret); \
-}
-#define DEF_HELPER_1_1(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name(TCGv ret, TCGv arg1) \
-{ \
- tcg_gen_helper_1_1(helper_##name, ret, arg1); \
-}
-#define DEF_HELPER_1_2(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name(TCGv ret, TCGv arg1, TCGv arg2) \
-{ \
- tcg_gen_helper_1_2(helper_##name, ret, arg1, arg2); \
-}
-#define DEF_HELPER_1_3(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name(TCGv ret, \
- TCGv arg1, TCGv arg2, TCGv arg3) \
-{ \
- tcg_gen_helper_1_3(helper_##name, ret, arg1, arg2, arg3); \
-}
-#define DEF_HELPER_1_4(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name(TCGv ret, \
- TCGv arg1, TCGv arg2, TCGv arg3, TCGv arg4) \
-{ \
- tcg_gen_helper_1_4(helper_##name, ret, arg1, arg2, arg3, arg4); \
-}
-#else /* !GEN_HELPER */
-#define DEF_HELPER_0_0 DEF_HELPER
-#define DEF_HELPER_0_1 DEF_HELPER
-#define DEF_HELPER_0_2 DEF_HELPER
-#define DEF_HELPER_0_3 DEF_HELPER
-#define DEF_HELPER_1_0 DEF_HELPER
-#define DEF_HELPER_1_1 DEF_HELPER
-#define DEF_HELPER_1_2 DEF_HELPER
-#define DEF_HELPER_1_3 DEF_HELPER
-#define DEF_HELPER_1_4 DEF_HELPER
-#define HELPER(x) glue(helper_,x)
-#endif
-
-DEF_HELPER_1_1(clz, uint32_t, (uint32_t))
-DEF_HELPER_1_1(sxtb16, uint32_t, (uint32_t))
-DEF_HELPER_1_1(uxtb16, uint32_t, (uint32_t))
-
-DEF_HELPER_1_2(add_setq, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(add_saturate, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(sub_saturate, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(add_usaturate, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(sub_usaturate, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_1(double_saturate, uint32_t, (int32_t))
-DEF_HELPER_1_2(sdiv, int32_t, (int32_t, int32_t))
-DEF_HELPER_1_2(udiv, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_1(rbit, uint32_t, (uint32_t))
-DEF_HELPER_1_1(abs, uint32_t, (uint32_t))
+#include "def-helper.h"
+
+DEF_HELPER_1(clz, i32, i32)
+DEF_HELPER_1(sxtb16, i32, i32)
+DEF_HELPER_1(uxtb16, i32, i32)
+
+DEF_HELPER_2(add_setq, i32, i32, i32)
+DEF_HELPER_2(add_saturate, i32, i32, i32)
+DEF_HELPER_2(sub_saturate, i32, i32, i32)
+DEF_HELPER_2(add_usaturate, i32, i32, i32)
+DEF_HELPER_2(sub_usaturate, i32, i32, i32)
+DEF_HELPER_1(double_saturate, i32, s32)
+DEF_HELPER_2(sdiv, s32, s32, s32)
+DEF_HELPER_2(udiv, i32, i32, i32)
+DEF_HELPER_1(rbit, i32, i32)
+DEF_HELPER_1(abs, i32, i32)
#define PAS_OP(pfx) \
- DEF_HELPER_1_3(pfx ## add8, uint32_t, (uint32_t, uint32_t, uint32_t *)) \
- DEF_HELPER_1_3(pfx ## sub8, uint32_t, (uint32_t, uint32_t, uint32_t *)) \
- DEF_HELPER_1_3(pfx ## sub16, uint32_t, (uint32_t, uint32_t, uint32_t *)) \
- DEF_HELPER_1_3(pfx ## add16, uint32_t, (uint32_t, uint32_t, uint32_t *)) \
- DEF_HELPER_1_3(pfx ## addsubx, uint32_t, (uint32_t, uint32_t, uint32_t *)) \
- DEF_HELPER_1_3(pfx ## subaddx, uint32_t, (uint32_t, uint32_t, uint32_t *))
+ DEF_HELPER_3(pfx ## add8, i32, i32, i32, ptr) \
+ DEF_HELPER_3(pfx ## sub8, i32, i32, i32, ptr) \
+ DEF_HELPER_3(pfx ## sub16, i32, i32, i32, ptr) \
+ DEF_HELPER_3(pfx ## add16, i32, i32, i32, ptr) \
+ DEF_HELPER_3(pfx ## addsubx, i32, i32, i32, ptr) \
+ DEF_HELPER_3(pfx ## subaddx, i32, i32, i32, ptr)
PAS_OP(s)
PAS_OP(u)
#undef PAS_OP
#define PAS_OP(pfx) \
- DEF_HELPER_1_2(pfx ## add8, uint32_t, (uint32_t, uint32_t)) \
- DEF_HELPER_1_2(pfx ## sub8, uint32_t, (uint32_t, uint32_t)) \
- DEF_HELPER_1_2(pfx ## sub16, uint32_t, (uint32_t, uint32_t)) \
- DEF_HELPER_1_2(pfx ## add16, uint32_t, (uint32_t, uint32_t)) \
- DEF_HELPER_1_2(pfx ## addsubx, uint32_t, (uint32_t, uint32_t)) \
- DEF_HELPER_1_2(pfx ## subaddx, uint32_t, (uint32_t, uint32_t))
+ DEF_HELPER_2(pfx ## add8, i32, i32, i32) \
+ DEF_HELPER_2(pfx ## sub8, i32, i32, i32) \
+ DEF_HELPER_2(pfx ## sub16, i32, i32, i32) \
+ DEF_HELPER_2(pfx ## add16, i32, i32, i32) \
+ DEF_HELPER_2(pfx ## addsubx, i32, i32, i32) \
+ DEF_HELPER_2(pfx ## subaddx, i32, i32, i32)
PAS_OP(q)
PAS_OP(sh)
PAS_OP(uq)
PAS_OP(uh)
#undef PAS_OP
-DEF_HELPER_1_2(ssat, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(usat, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(ssat16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(usat16, uint32_t, (uint32_t, uint32_t))
-
-DEF_HELPER_1_2(usad8, uint32_t, (uint32_t, uint32_t))
-
-DEF_HELPER_1_1(logicq_cc, uint32_t, (uint64_t))
-
-DEF_HELPER_1_3(sel_flags, uint32_t, (uint32_t, uint32_t, uint32_t))
-DEF_HELPER_0_1(exception, void, (uint32_t))
-DEF_HELPER_0_0(wfi, void, (void))
-
-DEF_HELPER_0_2(cpsr_write, void, (uint32_t, uint32_t))
-DEF_HELPER_1_0(cpsr_read, uint32_t, (void))
-
-DEF_HELPER_0_3(v7m_msr, void, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_2(v7m_mrs, uint32_t, (CPUState *, uint32_t))
-
-DEF_HELPER_0_3(set_cp15, void, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_2(get_cp15, uint32_t, (CPUState *, uint32_t))
-
-DEF_HELPER_0_3(set_cp, void, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_2(get_cp, uint32_t, (CPUState *, uint32_t))
-
-DEF_HELPER_1_2(get_r13_banked, uint32_t, (CPUState *, uint32_t))
-DEF_HELPER_0_3(set_r13_banked, void, (CPUState *, uint32_t, uint32_t))
-
-DEF_HELPER_0_2(mark_exclusive, void, (CPUState *, uint32_t))
-DEF_HELPER_1_2(test_exclusive, uint32_t, (CPUState *, uint32_t))
-DEF_HELPER_0_1(clrex, void, (CPUState *))
-
-DEF_HELPER_1_1(get_user_reg, uint32_t, (uint32_t))
-DEF_HELPER_0_2(set_user_reg, void, (uint32_t, uint32_t))
-
-DEF_HELPER_1_1(vfp_get_fpscr, uint32_t, (CPUState *))
-DEF_HELPER_0_2(vfp_set_fpscr, void, (CPUState *, uint32_t))
-
-DEF_HELPER_1_3(vfp_adds, float32, (float32, float32, CPUState *))
-DEF_HELPER_1_3(vfp_addd, float64, (float64, float64, CPUState *))
-DEF_HELPER_1_3(vfp_subs, float32, (float32, float32, CPUState *))
-DEF_HELPER_1_3(vfp_subd, float64, (float64, float64, CPUState *))
-DEF_HELPER_1_3(vfp_muls, float32, (float32, float32, CPUState *))
-DEF_HELPER_1_3(vfp_muld, float64, (float64, float64, CPUState *))
-DEF_HELPER_1_3(vfp_divs, float32, (float32, float32, CPUState *))
-DEF_HELPER_1_3(vfp_divd, float64, (float64, float64, CPUState *))
-DEF_HELPER_1_1(vfp_negs, float32, (float32))
-DEF_HELPER_1_1(vfp_negd, float64, (float64))
-DEF_HELPER_1_1(vfp_abss, float32, (float32))
-DEF_HELPER_1_1(vfp_absd, float64, (float64))
-DEF_HELPER_1_2(vfp_sqrts, float32, (float32, CPUState *))
-DEF_HELPER_1_2(vfp_sqrtd, float64, (float64, CPUState *))
-DEF_HELPER_0_3(vfp_cmps, void, (float32, float32, CPUState *))
-DEF_HELPER_0_3(vfp_cmpd, void, (float64, float64, CPUState *))
-DEF_HELPER_0_3(vfp_cmpes, void, (float32, float32, CPUState *))
-DEF_HELPER_0_3(vfp_cmped, void, (float64, float64, CPUState *))
-
-DEF_HELPER_1_2(vfp_fcvtds, float64, (float32, CPUState *))
-DEF_HELPER_1_2(vfp_fcvtsd, float32, (float64, CPUState *))
-
-DEF_HELPER_1_2(vfp_uitos, float32, (float32, CPUState *))
-DEF_HELPER_1_2(vfp_uitod, float64, (float32, CPUState *))
-DEF_HELPER_1_2(vfp_sitos, float32, (float32, CPUState *))
-DEF_HELPER_1_2(vfp_sitod, float64, (float32, CPUState *))
-
-DEF_HELPER_1_2(vfp_touis, float32, (float32, CPUState *))
-DEF_HELPER_1_2(vfp_touid, float32, (float64, CPUState *))
-DEF_HELPER_1_2(vfp_touizs, float32, (float32, CPUState *))
-DEF_HELPER_1_2(vfp_touizd, float32, (float64, CPUState *))
-DEF_HELPER_1_2(vfp_tosis, float32, (float32, CPUState *))
-DEF_HELPER_1_2(vfp_tosid, float32, (float64, CPUState *))
-DEF_HELPER_1_2(vfp_tosizs, float32, (float32, CPUState *))
-DEF_HELPER_1_2(vfp_tosizd, float32, (float64, CPUState *))
-
-DEF_HELPER_1_3(vfp_toshs, float32, (float32, uint32_t, CPUState *))
-DEF_HELPER_1_3(vfp_tosls, float32, (float32, uint32_t, CPUState *))
-DEF_HELPER_1_3(vfp_touhs, float32, (float32, uint32_t, CPUState *))
-DEF_HELPER_1_3(vfp_touls, float32, (float32, uint32_t, CPUState *))
-DEF_HELPER_1_3(vfp_toshd, float64, (float64, uint32_t, CPUState *))
-DEF_HELPER_1_3(vfp_tosld, float64, (float64, uint32_t, CPUState *))
-DEF_HELPER_1_3(vfp_touhd, float64, (float64, uint32_t, CPUState *))
-DEF_HELPER_1_3(vfp_tould, float64, (float64, uint32_t, CPUState *))
-DEF_HELPER_1_3(vfp_shtos, float32, (float32, uint32_t, CPUState *))
-DEF_HELPER_1_3(vfp_sltos, float32, (float32, uint32_t, CPUState *))
-DEF_HELPER_1_3(vfp_uhtos, float32, (float32, uint32_t, CPUState *))
-DEF_HELPER_1_3(vfp_ultos, float32, (float32, uint32_t, CPUState *))
-DEF_HELPER_1_3(vfp_shtod, float64, (float64, uint32_t, CPUState *))
-DEF_HELPER_1_3(vfp_sltod, float64, (float64, uint32_t, CPUState *))
-DEF_HELPER_1_3(vfp_uhtod, float64, (float64, uint32_t, CPUState *))
-DEF_HELPER_1_3(vfp_ultod, float64, (float64, uint32_t, CPUState *))
-
-DEF_HELPER_1_3(recps_f32, float32, (float32, float32, CPUState *))
-DEF_HELPER_1_3(rsqrts_f32, float32, (float32, float32, CPUState *))
-DEF_HELPER_1_2(recpe_f32, float32, (float32, CPUState *))
-DEF_HELPER_1_2(rsqrte_f32, float32, (float32, CPUState *))
-DEF_HELPER_1_2(recpe_u32, uint32_t, (uint32_t, CPUState *))
-DEF_HELPER_1_2(rsqrte_u32, uint32_t, (uint32_t, CPUState *))
-DEF_HELPER_1_4(neon_tbl, uint32_t, (uint32_t, uint32_t, uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_add_saturate_u64, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(neon_add_saturate_s64, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(neon_sub_saturate_u64, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(neon_sub_saturate_s64, uint64_t, (uint64_t, uint64_t))
-
-DEF_HELPER_1_2(add_cc, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(adc_cc, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(sub_cc, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(sbc_cc, uint32_t, (uint32_t, uint32_t))
-
-DEF_HELPER_1_2(shl, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(shr, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(sar, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(ror, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(shl_cc, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(shr_cc, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(sar_cc, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(ror_cc, uint32_t, (uint32_t, uint32_t))
+DEF_HELPER_2(ssat, i32, i32, i32)
+DEF_HELPER_2(usat, i32, i32, i32)
+DEF_HELPER_2(ssat16, i32, i32, i32)
+DEF_HELPER_2(usat16, i32, i32, i32)
+
+DEF_HELPER_2(usad8, i32, i32, i32)
+
+DEF_HELPER_1(logicq_cc, i32, i64)
+
+DEF_HELPER_3(sel_flags, i32, i32, i32, i32)
+DEF_HELPER_1(exception, void, i32)
+DEF_HELPER_0(wfi, void)
+
+DEF_HELPER_2(cpsr_write, void, i32, i32)
+DEF_HELPER_0(cpsr_read, i32)
+
+DEF_HELPER_3(v7m_msr, void, env, i32, i32)
+DEF_HELPER_2(v7m_mrs, i32, env, i32)
+
+DEF_HELPER_3(set_cp15, void, env, i32, i32)
+DEF_HELPER_2(get_cp15, i32, env, i32)
+
+DEF_HELPER_3(set_cp, void, env, i32, i32)
+DEF_HELPER_2(get_cp, i32, env, i32)
+
+DEF_HELPER_2(get_r13_banked, i32, env, i32)
+DEF_HELPER_3(set_r13_banked, void, env, i32, i32)
+
+DEF_HELPER_2(mark_exclusive, void, env, i32)
+DEF_HELPER_2(test_exclusive, i32, env, i32)
+DEF_HELPER_1(clrex, void, env)
+
+DEF_HELPER_1(get_user_reg, i32, i32)
+DEF_HELPER_2(set_user_reg, void, i32, i32)
+
+DEF_HELPER_1(vfp_get_fpscr, i32, env)
+DEF_HELPER_2(vfp_set_fpscr, void, env, i32)
+
+DEF_HELPER_3(vfp_adds, f32, f32, f32, env)
+DEF_HELPER_3(vfp_addd, f64, f64, f64, env)
+DEF_HELPER_3(vfp_subs, f32, f32, f32, env)
+DEF_HELPER_3(vfp_subd, f64, f64, f64, env)
+DEF_HELPER_3(vfp_muls, f32, f32, f32, env)
+DEF_HELPER_3(vfp_muld, f64, f64, f64, env)
+DEF_HELPER_3(vfp_divs, f32, f32, f32, env)
+DEF_HELPER_3(vfp_divd, f64, f64, f64, env)
+DEF_HELPER_1(vfp_negs, f32, f32)
+DEF_HELPER_1(vfp_negd, f64, f64)
+DEF_HELPER_1(vfp_abss, f32, f32)
+DEF_HELPER_1(vfp_absd, f64, f64)
+DEF_HELPER_2(vfp_sqrts, f32, f32, env)
+DEF_HELPER_2(vfp_sqrtd, f64, f64, env)
+DEF_HELPER_3(vfp_cmps, void, f32, f32, env)
+DEF_HELPER_3(vfp_cmpd, void, f64, f64, env)
+DEF_HELPER_3(vfp_cmpes, void, f32, f32, env)
+DEF_HELPER_3(vfp_cmped, void, f64, f64, env)
+
+DEF_HELPER_2(vfp_fcvtds, f64, f32, env)
+DEF_HELPER_2(vfp_fcvtsd, f32, f64, env)
+
+DEF_HELPER_2(vfp_uitos, f32, f32, env)
+DEF_HELPER_2(vfp_uitod, f64, f32, env)
+DEF_HELPER_2(vfp_sitos, f32, f32, env)
+DEF_HELPER_2(vfp_sitod, f64, f32, env)
+
+DEF_HELPER_2(vfp_touis, f32, f32, env)
+DEF_HELPER_2(vfp_touid, f32, f64, env)
+DEF_HELPER_2(vfp_touizs, f32, f32, env)
+DEF_HELPER_2(vfp_touizd, f32, f64, env)
+DEF_HELPER_2(vfp_tosis, f32, f32, env)
+DEF_HELPER_2(vfp_tosid, f32, f64, env)
+DEF_HELPER_2(vfp_tosizs, f32, f32, env)
+DEF_HELPER_2(vfp_tosizd, f32, f64, env)
+
+DEF_HELPER_3(vfp_toshs, f32, f32, i32, env)
+DEF_HELPER_3(vfp_tosls, f32, f32, i32, env)
+DEF_HELPER_3(vfp_touhs, f32, f32, i32, env)
+DEF_HELPER_3(vfp_touls, f32, f32, i32, env)
+DEF_HELPER_3(vfp_toshd, f64, f64, i32, env)
+DEF_HELPER_3(vfp_tosld, f64, f64, i32, env)
+DEF_HELPER_3(vfp_touhd, f64, f64, i32, env)
+DEF_HELPER_3(vfp_tould, f64, f64, i32, env)
+DEF_HELPER_3(vfp_shtos, f32, f32, i32, env)
+DEF_HELPER_3(vfp_sltos, f32, f32, i32, env)
+DEF_HELPER_3(vfp_uhtos, f32, f32, i32, env)
+DEF_HELPER_3(vfp_ultos, f32, f32, i32, env)
+DEF_HELPER_3(vfp_shtod, f64, f64, i32, env)
+DEF_HELPER_3(vfp_sltod, f64, f64, i32, env)
+DEF_HELPER_3(vfp_uhtod, f64, f64, i32, env)
+DEF_HELPER_3(vfp_ultod, f64, f64, i32, env)
+
+DEF_HELPER_3(recps_f32, f32, f32, f32, env)
+DEF_HELPER_3(rsqrts_f32, f32, f32, f32, env)
+DEF_HELPER_2(recpe_f32, f32, f32, env)
+DEF_HELPER_2(rsqrte_f32, f32, f32, env)
+DEF_HELPER_2(recpe_u32, i32, i32, env)
+DEF_HELPER_2(rsqrte_u32, i32, i32, env)
+DEF_HELPER_4(neon_tbl, i32, i32, i32, i32, i32)
+DEF_HELPER_2(neon_add_saturate_u64, i64, i64, i64)
+DEF_HELPER_2(neon_add_saturate_s64, i64, i64, i64)
+DEF_HELPER_2(neon_sub_saturate_u64, i64, i64, i64)
+DEF_HELPER_2(neon_sub_saturate_s64, i64, i64, i64)
+
+DEF_HELPER_2(add_cc, i32, i32, i32)
+DEF_HELPER_2(adc_cc, i32, i32, i32)
+DEF_HELPER_2(sub_cc, i32, i32, i32)
+DEF_HELPER_2(sbc_cc, i32, i32, i32)
+
+DEF_HELPER_2(shl, i32, i32, i32)
+DEF_HELPER_2(shr, i32, i32, i32)
+DEF_HELPER_2(sar, i32, i32, i32)
+DEF_HELPER_2(ror, i32, i32, i32)
+DEF_HELPER_2(shl_cc, i32, i32, i32)
+DEF_HELPER_2(shr_cc, i32, i32, i32)
+DEF_HELPER_2(sar_cc, i32, i32, i32)
+DEF_HELPER_2(ror_cc, i32, i32, i32)
/* neon_helper.c */
-DEF_HELPER_1_3(neon_qadd_u8, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qadd_s8, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qadd_u16, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qadd_s16, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qsub_u8, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qsub_s8, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qsub_u16, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qsub_s16, uint32_t, (CPUState *, uint32_t, uint32_t))
-
-DEF_HELPER_1_2(neon_hadd_s8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_hadd_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_hadd_s16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_hadd_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_hadd_s32, int32_t, (int32_t, int32_t))
-DEF_HELPER_1_2(neon_hadd_u32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_rhadd_s8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_rhadd_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_rhadd_s16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_rhadd_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_rhadd_s32, int32_t, (int32_t, int32_t))
-DEF_HELPER_1_2(neon_rhadd_u32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_hsub_s8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_hsub_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_hsub_s16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_hsub_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_hsub_s32, int32_t, (int32_t, int32_t))
-DEF_HELPER_1_2(neon_hsub_u32, uint32_t, (uint32_t, uint32_t))
-
-DEF_HELPER_1_2(neon_cgt_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_cgt_s8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_cgt_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_cgt_s16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_cgt_u32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_cgt_s32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_cge_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_cge_s8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_cge_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_cge_s16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_cge_u32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_cge_s32, uint32_t, (uint32_t, uint32_t))
-
-DEF_HELPER_1_2(neon_min_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_min_s8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_min_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_min_s16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_min_u32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_min_s32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_max_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_max_s8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_max_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_max_s16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_max_u32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_max_s32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_pmin_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_pmin_s8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_pmin_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_pmin_s16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_pmin_u32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_pmin_s32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_pmax_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_pmax_s8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_pmax_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_pmax_s16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_pmax_u32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_pmax_s32, uint32_t, (uint32_t, uint32_t))
-
-DEF_HELPER_1_2(neon_abd_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_abd_s8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_abd_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_abd_s16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_abd_u32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_abd_s32, uint32_t, (uint32_t, uint32_t))
-
-DEF_HELPER_1_2(neon_shl_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_shl_s8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_shl_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_shl_s16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_shl_u32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_shl_s32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_shl_u64, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(neon_shl_s64, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(neon_rshl_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_rshl_s8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_rshl_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_rshl_s16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_rshl_u32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_rshl_s32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_rshl_u64, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(neon_rshl_s64, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_3(neon_qshl_u8, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qshl_s8, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qshl_u16, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qshl_s16, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qshl_u32, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qshl_s32, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qshl_u64, uint64_t, (CPUState *, uint64_t, uint64_t))
-DEF_HELPER_1_3(neon_qshl_s64, uint64_t, (CPUState *, uint64_t, uint64_t))
-DEF_HELPER_1_3(neon_qrshl_u8, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qrshl_s8, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qrshl_u16, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qrshl_s16, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qrshl_u32, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qrshl_s32, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qrshl_u64, uint64_t, (CPUState *, uint64_t, uint64_t))
-DEF_HELPER_1_3(neon_qrshl_s64, uint64_t, (CPUState *, uint64_t, uint64_t))
-
-DEF_HELPER_1_2(neon_add_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_add_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_padd_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_padd_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_sub_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_sub_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_mul_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_mul_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_mul_p8, uint32_t, (uint32_t, uint32_t))
-
-DEF_HELPER_1_2(neon_tst_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_tst_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_tst_u32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_ceq_u8, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_ceq_u16, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_ceq_u32, uint32_t, (uint32_t, uint32_t))
-
-DEF_HELPER_1_1(neon_abs_s8, uint32_t, (uint32_t))
-DEF_HELPER_1_1(neon_abs_s16, uint32_t, (uint32_t))
-DEF_HELPER_1_1(neon_clz_u8, uint32_t, (uint32_t))
-DEF_HELPER_1_1(neon_clz_u16, uint32_t, (uint32_t))
-DEF_HELPER_1_1(neon_cls_s8, uint32_t, (uint32_t))
-DEF_HELPER_1_1(neon_cls_s16, uint32_t, (uint32_t))
-DEF_HELPER_1_1(neon_cls_s32, uint32_t, (uint32_t))
-DEF_HELPER_1_1(neon_cnt_u8, uint32_t, (uint32_t))
-
-DEF_HELPER_1_3(neon_qdmulh_s16, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qrdmulh_s16, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qdmulh_s32, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(neon_qrdmulh_s32, uint32_t, (CPUState *, uint32_t, uint32_t))
-
-DEF_HELPER_1_1(neon_narrow_u8, uint32_t, (uint64_t))
-DEF_HELPER_1_1(neon_narrow_u16, uint32_t, (uint64_t))
-DEF_HELPER_1_2(neon_narrow_sat_u8, uint32_t, (CPUState *, uint64_t))
-DEF_HELPER_1_2(neon_narrow_sat_s8, uint32_t, (CPUState *, uint64_t))
-DEF_HELPER_1_2(neon_narrow_sat_u16, uint32_t, (CPUState *, uint64_t))
-DEF_HELPER_1_2(neon_narrow_sat_s16, uint32_t, (CPUState *, uint64_t))
-DEF_HELPER_1_2(neon_narrow_sat_u32, uint32_t, (CPUState *, uint64_t))
-DEF_HELPER_1_2(neon_narrow_sat_s32, uint32_t, (CPUState *, uint64_t))
-DEF_HELPER_1_1(neon_narrow_high_u8, uint32_t, (uint64_t))
-DEF_HELPER_1_1(neon_narrow_high_u16, uint32_t, (uint64_t))
-DEF_HELPER_1_1(neon_narrow_round_high_u8, uint32_t, (uint64_t))
-DEF_HELPER_1_1(neon_narrow_round_high_u16, uint32_t, (uint64_t))
-DEF_HELPER_1_1(neon_widen_u8, uint64_t, (uint32_t))
-DEF_HELPER_1_1(neon_widen_s8, uint64_t, (uint32_t))
-DEF_HELPER_1_1(neon_widen_u16, uint64_t, (uint32_t))
-DEF_HELPER_1_1(neon_widen_s16, uint64_t, (uint32_t))
-
-DEF_HELPER_1_2(neon_addl_u16, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(neon_addl_u32, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(neon_paddl_u16, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(neon_paddl_u32, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(neon_subl_u16, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(neon_subl_u32, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_3(neon_addl_saturate_s32, uint64_t, (CPUState *, uint64_t, uint64_t))
-DEF_HELPER_1_3(neon_addl_saturate_s64, uint64_t, (CPUState *, uint64_t, uint64_t))
-DEF_HELPER_1_2(neon_abdl_u16, uint64_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_abdl_s16, uint64_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_abdl_u32, uint64_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_abdl_s32, uint64_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_abdl_u64, uint64_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_abdl_s64, uint64_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_mull_u8, uint64_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_mull_s8, uint64_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_mull_u16, uint64_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_mull_s16, uint64_t, (uint32_t, uint32_t))
-
-DEF_HELPER_1_1(neon_negl_u16, uint64_t, (uint64_t))
-DEF_HELPER_1_1(neon_negl_u32, uint64_t, (uint64_t))
-DEF_HELPER_1_1(neon_negl_u64, uint64_t, (uint64_t))
-
-DEF_HELPER_1_2(neon_qabs_s8, uint32_t, (CPUState *, uint32_t))
-DEF_HELPER_1_2(neon_qabs_s16, uint32_t, (CPUState *, uint32_t))
-DEF_HELPER_1_2(neon_qabs_s32, uint32_t, (CPUState *, uint32_t))
-DEF_HELPER_1_2(neon_qneg_s8, uint32_t, (CPUState *, uint32_t))
-DEF_HELPER_1_2(neon_qneg_s16, uint32_t, (CPUState *, uint32_t))
-DEF_HELPER_1_2(neon_qneg_s32, uint32_t, (CPUState *, uint32_t))
-
-DEF_HELPER_0_0(neon_trn_u8, void, (void))
-DEF_HELPER_0_0(neon_trn_u16, void, (void))
-DEF_HELPER_0_0(neon_unzip_u8, void, (void))
-DEF_HELPER_0_0(neon_zip_u8, void, (void))
-DEF_HELPER_0_0(neon_zip_u16, void, (void))
-
-DEF_HELPER_1_2(neon_min_f32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_max_f32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_abd_f32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_add_f32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_sub_f32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_mul_f32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_ceq_f32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_cge_f32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_cgt_f32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_acge_f32, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_1_2(neon_acgt_f32, uint32_t, (uint32_t, uint32_t))
+DEF_HELPER_3(neon_qadd_u8, i32, env, i32, i32)
+DEF_HELPER_3(neon_qadd_s8, i32, env, i32, i32)
+DEF_HELPER_3(neon_qadd_u16, i32, env, i32, i32)
+DEF_HELPER_3(neon_qadd_s16, i32, env, i32, i32)
+DEF_HELPER_3(neon_qsub_u8, i32, env, i32, i32)
+DEF_HELPER_3(neon_qsub_s8, i32, env, i32, i32)
+DEF_HELPER_3(neon_qsub_u16, i32, env, i32, i32)
+DEF_HELPER_3(neon_qsub_s16, i32, env, i32, i32)
+
+DEF_HELPER_2(neon_hadd_s8, i32, i32, i32)
+DEF_HELPER_2(neon_hadd_u8, i32, i32, i32)
+DEF_HELPER_2(neon_hadd_s16, i32, i32, i32)
+DEF_HELPER_2(neon_hadd_u16, i32, i32, i32)
+DEF_HELPER_2(neon_hadd_s32, s32, s32, s32)
+DEF_HELPER_2(neon_hadd_u32, i32, i32, i32)
+DEF_HELPER_2(neon_rhadd_s8, i32, i32, i32)
+DEF_HELPER_2(neon_rhadd_u8, i32, i32, i32)
+DEF_HELPER_2(neon_rhadd_s16, i32, i32, i32)
+DEF_HELPER_2(neon_rhadd_u16, i32, i32, i32)
+DEF_HELPER_2(neon_rhadd_s32, s32, s32, s32)
+DEF_HELPER_2(neon_rhadd_u32, i32, i32, i32)
+DEF_HELPER_2(neon_hsub_s8, i32, i32, i32)
+DEF_HELPER_2(neon_hsub_u8, i32, i32, i32)
+DEF_HELPER_2(neon_hsub_s16, i32, i32, i32)
+DEF_HELPER_2(neon_hsub_u16, i32, i32, i32)
+DEF_HELPER_2(neon_hsub_s32, s32, s32, s32)
+DEF_HELPER_2(neon_hsub_u32, i32, i32, i32)
+
+DEF_HELPER_2(neon_cgt_u8, i32, i32, i32)
+DEF_HELPER_2(neon_cgt_s8, i32, i32, i32)
+DEF_HELPER_2(neon_cgt_u16, i32, i32, i32)
+DEF_HELPER_2(neon_cgt_s16, i32, i32, i32)
+DEF_HELPER_2(neon_cgt_u32, i32, i32, i32)
+DEF_HELPER_2(neon_cgt_s32, i32, i32, i32)
+DEF_HELPER_2(neon_cge_u8, i32, i32, i32)
+DEF_HELPER_2(neon_cge_s8, i32, i32, i32)
+DEF_HELPER_2(neon_cge_u16, i32, i32, i32)
+DEF_HELPER_2(neon_cge_s16, i32, i32, i32)
+DEF_HELPER_2(neon_cge_u32, i32, i32, i32)
+DEF_HELPER_2(neon_cge_s32, i32, i32, i32)
+
+DEF_HELPER_2(neon_min_u8, i32, i32, i32)
+DEF_HELPER_2(neon_min_s8, i32, i32, i32)
+DEF_HELPER_2(neon_min_u16, i32, i32, i32)
+DEF_HELPER_2(neon_min_s16, i32, i32, i32)
+DEF_HELPER_2(neon_min_u32, i32, i32, i32)
+DEF_HELPER_2(neon_min_s32, i32, i32, i32)
+DEF_HELPER_2(neon_max_u8, i32, i32, i32)
+DEF_HELPER_2(neon_max_s8, i32, i32, i32)
+DEF_HELPER_2(neon_max_u16, i32, i32, i32)
+DEF_HELPER_2(neon_max_s16, i32, i32, i32)
+DEF_HELPER_2(neon_max_u32, i32, i32, i32)
+DEF_HELPER_2(neon_max_s32, i32, i32, i32)
+DEF_HELPER_2(neon_pmin_u8, i32, i32, i32)
+DEF_HELPER_2(neon_pmin_s8, i32, i32, i32)
+DEF_HELPER_2(neon_pmin_u16, i32, i32, i32)
+DEF_HELPER_2(neon_pmin_s16, i32, i32, i32)
+DEF_HELPER_2(neon_pmax_u8, i32, i32, i32)
+DEF_HELPER_2(neon_pmax_s8, i32, i32, i32)
+DEF_HELPER_2(neon_pmax_u16, i32, i32, i32)
+DEF_HELPER_2(neon_pmax_s16, i32, i32, i32)
+
+DEF_HELPER_2(neon_abd_u8, i32, i32, i32)
+DEF_HELPER_2(neon_abd_s8, i32, i32, i32)
+DEF_HELPER_2(neon_abd_u16, i32, i32, i32)
+DEF_HELPER_2(neon_abd_s16, i32, i32, i32)
+DEF_HELPER_2(neon_abd_u32, i32, i32, i32)
+DEF_HELPER_2(neon_abd_s32, i32, i32, i32)
+
+DEF_HELPER_2(neon_shl_u8, i32, i32, i32)
+DEF_HELPER_2(neon_shl_s8, i32, i32, i32)
+DEF_HELPER_2(neon_shl_u16, i32, i32, i32)
+DEF_HELPER_2(neon_shl_s16, i32, i32, i32)
+DEF_HELPER_2(neon_shl_u32, i32, i32, i32)
+DEF_HELPER_2(neon_shl_s32, i32, i32, i32)
+DEF_HELPER_2(neon_shl_u64, i64, i64, i64)
+DEF_HELPER_2(neon_shl_s64, i64, i64, i64)
+DEF_HELPER_2(neon_rshl_u8, i32, i32, i32)
+DEF_HELPER_2(neon_rshl_s8, i32, i32, i32)
+DEF_HELPER_2(neon_rshl_u16, i32, i32, i32)
+DEF_HELPER_2(neon_rshl_s16, i32, i32, i32)
+DEF_HELPER_2(neon_rshl_u32, i32, i32, i32)
+DEF_HELPER_2(neon_rshl_s32, i32, i32, i32)
+DEF_HELPER_2(neon_rshl_u64, i64, i64, i64)
+DEF_HELPER_2(neon_rshl_s64, i64, i64, i64)
+DEF_HELPER_3(neon_qshl_u8, i32, env, i32, i32)
+DEF_HELPER_3(neon_qshl_s8, i32, env, i32, i32)
+DEF_HELPER_3(neon_qshl_u16, i32, env, i32, i32)
+DEF_HELPER_3(neon_qshl_s16, i32, env, i32, i32)
+DEF_HELPER_3(neon_qshl_u32, i32, env, i32, i32)
+DEF_HELPER_3(neon_qshl_s32, i32, env, i32, i32)
+DEF_HELPER_3(neon_qshl_u64, i64, env, i64, i64)
+DEF_HELPER_3(neon_qshl_s64, i64, env, i64, i64)
+DEF_HELPER_3(neon_qrshl_u8, i32, env, i32, i32)
+DEF_HELPER_3(neon_qrshl_s8, i32, env, i32, i32)
+DEF_HELPER_3(neon_qrshl_u16, i32, env, i32, i32)
+DEF_HELPER_3(neon_qrshl_s16, i32, env, i32, i32)
+DEF_HELPER_3(neon_qrshl_u32, i32, env, i32, i32)
+DEF_HELPER_3(neon_qrshl_s32, i32, env, i32, i32)
+DEF_HELPER_3(neon_qrshl_u64, i64, env, i64, i64)
+DEF_HELPER_3(neon_qrshl_s64, i64, env, i64, i64)
+
+DEF_HELPER_2(neon_add_u8, i32, i32, i32)
+DEF_HELPER_2(neon_add_u16, i32, i32, i32)
+DEF_HELPER_2(neon_padd_u8, i32, i32, i32)
+DEF_HELPER_2(neon_padd_u16, i32, i32, i32)
+DEF_HELPER_2(neon_sub_u8, i32, i32, i32)
+DEF_HELPER_2(neon_sub_u16, i32, i32, i32)
+DEF_HELPER_2(neon_mul_u8, i32, i32, i32)
+DEF_HELPER_2(neon_mul_u16, i32, i32, i32)
+DEF_HELPER_2(neon_mul_p8, i32, i32, i32)
+
+DEF_HELPER_2(neon_tst_u8, i32, i32, i32)
+DEF_HELPER_2(neon_tst_u16, i32, i32, i32)
+DEF_HELPER_2(neon_tst_u32, i32, i32, i32)
+DEF_HELPER_2(neon_ceq_u8, i32, i32, i32)
+DEF_HELPER_2(neon_ceq_u16, i32, i32, i32)
+DEF_HELPER_2(neon_ceq_u32, i32, i32, i32)
+
+DEF_HELPER_1(neon_abs_s8, i32, i32)
+DEF_HELPER_1(neon_abs_s16, i32, i32)
+DEF_HELPER_1(neon_clz_u8, i32, i32)
+DEF_HELPER_1(neon_clz_u16, i32, i32)
+DEF_HELPER_1(neon_cls_s8, i32, i32)
+DEF_HELPER_1(neon_cls_s16, i32, i32)
+DEF_HELPER_1(neon_cls_s32, i32, i32)
+DEF_HELPER_1(neon_cnt_u8, i32, i32)
+
+DEF_HELPER_3(neon_qdmulh_s16, i32, env, i32, i32)
+DEF_HELPER_3(neon_qrdmulh_s16, i32, env, i32, i32)
+DEF_HELPER_3(neon_qdmulh_s32, i32, env, i32, i32)
+DEF_HELPER_3(neon_qrdmulh_s32, i32, env, i32, i32)
+
+DEF_HELPER_1(neon_narrow_u8, i32, i64)
+DEF_HELPER_1(neon_narrow_u16, i32, i64)
+DEF_HELPER_2(neon_narrow_sat_u8, i32, env, i64)
+DEF_HELPER_2(neon_narrow_sat_s8, i32, env, i64)
+DEF_HELPER_2(neon_narrow_sat_u16, i32, env, i64)
+DEF_HELPER_2(neon_narrow_sat_s16, i32, env, i64)
+DEF_HELPER_2(neon_narrow_sat_u32, i32, env, i64)
+DEF_HELPER_2(neon_narrow_sat_s32, i32, env, i64)
+DEF_HELPER_1(neon_narrow_high_u8, i32, i64)
+DEF_HELPER_1(neon_narrow_high_u16, i32, i64)
+DEF_HELPER_1(neon_narrow_round_high_u8, i32, i64)
+DEF_HELPER_1(neon_narrow_round_high_u16, i32, i64)
+DEF_HELPER_1(neon_widen_u8, i64, i32)
+DEF_HELPER_1(neon_widen_s8, i64, i32)
+DEF_HELPER_1(neon_widen_u16, i64, i32)
+DEF_HELPER_1(neon_widen_s16, i64, i32)
+
+DEF_HELPER_2(neon_addl_u16, i64, i64, i64)
+DEF_HELPER_2(neon_addl_u32, i64, i64, i64)
+DEF_HELPER_2(neon_paddl_u16, i64, i64, i64)
+DEF_HELPER_2(neon_paddl_u32, i64, i64, i64)
+DEF_HELPER_2(neon_subl_u16, i64, i64, i64)
+DEF_HELPER_2(neon_subl_u32, i64, i64, i64)
+DEF_HELPER_3(neon_addl_saturate_s32, i64, env, i64, i64)
+DEF_HELPER_3(neon_addl_saturate_s64, i64, env, i64, i64)
+DEF_HELPER_2(neon_abdl_u16, i64, i32, i32)
+DEF_HELPER_2(neon_abdl_s16, i64, i32, i32)
+DEF_HELPER_2(neon_abdl_u32, i64, i32, i32)
+DEF_HELPER_2(neon_abdl_s32, i64, i32, i32)
+DEF_HELPER_2(neon_abdl_u64, i64, i32, i32)
+DEF_HELPER_2(neon_abdl_s64, i64, i32, i32)
+DEF_HELPER_2(neon_mull_u8, i64, i32, i32)
+DEF_HELPER_2(neon_mull_s8, i64, i32, i32)
+DEF_HELPER_2(neon_mull_u16, i64, i32, i32)
+DEF_HELPER_2(neon_mull_s16, i64, i32, i32)
+
+DEF_HELPER_1(neon_negl_u16, i64, i64)
+DEF_HELPER_1(neon_negl_u32, i64, i64)
+DEF_HELPER_1(neon_negl_u64, i64, i64)
+
+DEF_HELPER_2(neon_qabs_s8, i32, env, i32)
+DEF_HELPER_2(neon_qabs_s16, i32, env, i32)
+DEF_HELPER_2(neon_qabs_s32, i32, env, i32)
+DEF_HELPER_2(neon_qneg_s8, i32, env, i32)
+DEF_HELPER_2(neon_qneg_s16, i32, env, i32)
+DEF_HELPER_2(neon_qneg_s32, i32, env, i32)
+
+DEF_HELPER_0(neon_trn_u8, void)
+DEF_HELPER_0(neon_trn_u16, void)
+DEF_HELPER_0(neon_unzip_u8, void)
+DEF_HELPER_0(neon_zip_u8, void)
+DEF_HELPER_0(neon_zip_u16, void)
+
+DEF_HELPER_2(neon_min_f32, i32, i32, i32)
+DEF_HELPER_2(neon_max_f32, i32, i32, i32)
+DEF_HELPER_2(neon_abd_f32, i32, i32, i32)
+DEF_HELPER_2(neon_add_f32, i32, i32, i32)
+DEF_HELPER_2(neon_sub_f32, i32, i32, i32)
+DEF_HELPER_2(neon_mul_f32, i32, i32, i32)
+DEF_HELPER_2(neon_ceq_f32, i32, i32, i32)
+DEF_HELPER_2(neon_cge_f32, i32, i32, i32)
+DEF_HELPER_2(neon_cgt_f32, i32, i32, i32)
+DEF_HELPER_2(neon_acge_f32, i32, i32, i32)
+DEF_HELPER_2(neon_acgt_f32, i32, i32, i32)
/* iwmmxt_helper.c */
-DEF_HELPER_1_2(iwmmxt_maddsq, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(iwmmxt_madduq, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(iwmmxt_sadb, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(iwmmxt_sadw, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(iwmmxt_mulslw, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(iwmmxt_mulshw, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(iwmmxt_mululw, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(iwmmxt_muluhw, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(iwmmxt_macsw, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_2(iwmmxt_macuw, uint64_t, (uint64_t, uint64_t))
-DEF_HELPER_1_1(iwmmxt_setpsr_nz, uint32_t, (uint64_t))
+DEF_HELPER_2(iwmmxt_maddsq, i64, i64, i64)
+DEF_HELPER_2(iwmmxt_madduq, i64, i64, i64)
+DEF_HELPER_2(iwmmxt_sadb, i64, i64, i64)
+DEF_HELPER_2(iwmmxt_sadw, i64, i64, i64)
+DEF_HELPER_2(iwmmxt_mulslw, i64, i64, i64)
+DEF_HELPER_2(iwmmxt_mulshw, i64, i64, i64)
+DEF_HELPER_2(iwmmxt_mululw, i64, i64, i64)
+DEF_HELPER_2(iwmmxt_muluhw, i64, i64, i64)
+DEF_HELPER_2(iwmmxt_macsw, i64, i64, i64)
+DEF_HELPER_2(iwmmxt_macuw, i64, i64, i64)
+DEF_HELPER_1(iwmmxt_setpsr_nz, i32, i64)
#define DEF_IWMMXT_HELPER_SIZE_ENV(name) \
-DEF_HELPER_1_3(iwmmxt_##name##b, uint64_t, (CPUState *, uint64_t, uint64_t)) \
-DEF_HELPER_1_3(iwmmxt_##name##w, uint64_t, (CPUState *, uint64_t, uint64_t)) \
-DEF_HELPER_1_3(iwmmxt_##name##l, uint64_t, (CPUState *, uint64_t, uint64_t)) \
+DEF_HELPER_3(iwmmxt_##name##b, i64, env, i64, i64) \
+DEF_HELPER_3(iwmmxt_##name##w, i64, env, i64, i64) \
+DEF_HELPER_3(iwmmxt_##name##l, i64, env, i64, i64) \
DEF_IWMMXT_HELPER_SIZE_ENV(unpackl)
DEF_IWMMXT_HELPER_SIZE_ENV(unpackh)
-DEF_HELPER_1_2(iwmmxt_unpacklub, uint64_t, (CPUState *, uint64_t))
-DEF_HELPER_1_2(iwmmxt_unpackluw, uint64_t, (CPUState *, uint64_t))
-DEF_HELPER_1_2(iwmmxt_unpacklul, uint64_t, (CPUState *, uint64_t))
-DEF_HELPER_1_2(iwmmxt_unpackhub, uint64_t, (CPUState *, uint64_t))
-DEF_HELPER_1_2(iwmmxt_unpackhuw, uint64_t, (CPUState *, uint64_t))
-DEF_HELPER_1_2(iwmmxt_unpackhul, uint64_t, (CPUState *, uint64_t))
-DEF_HELPER_1_2(iwmmxt_unpacklsb, uint64_t, (CPUState *, uint64_t))
-DEF_HELPER_1_2(iwmmxt_unpacklsw, uint64_t, (CPUState *, uint64_t))
-DEF_HELPER_1_2(iwmmxt_unpacklsl, uint64_t, (CPUState *, uint64_t))
-DEF_HELPER_1_2(iwmmxt_unpackhsb, uint64_t, (CPUState *, uint64_t))
-DEF_HELPER_1_2(iwmmxt_unpackhsw, uint64_t, (CPUState *, uint64_t))
-DEF_HELPER_1_2(iwmmxt_unpackhsl, uint64_t, (CPUState *, uint64_t))
+DEF_HELPER_2(iwmmxt_unpacklub, i64, env, i64)
+DEF_HELPER_2(iwmmxt_unpackluw, i64, env, i64)
+DEF_HELPER_2(iwmmxt_unpacklul, i64, env, i64)
+DEF_HELPER_2(iwmmxt_unpackhub, i64, env, i64)
+DEF_HELPER_2(iwmmxt_unpackhuw, i64, env, i64)
+DEF_HELPER_2(iwmmxt_unpackhul, i64, env, i64)
+DEF_HELPER_2(iwmmxt_unpacklsb, i64, env, i64)
+DEF_HELPER_2(iwmmxt_unpacklsw, i64, env, i64)
+DEF_HELPER_2(iwmmxt_unpacklsl, i64, env, i64)
+DEF_HELPER_2(iwmmxt_unpackhsb, i64, env, i64)
+DEF_HELPER_2(iwmmxt_unpackhsw, i64, env, i64)
+DEF_HELPER_2(iwmmxt_unpackhsl, i64, env, i64)
DEF_IWMMXT_HELPER_SIZE_ENV(cmpeq)
DEF_IWMMXT_HELPER_SIZE_ENV(cmpgtu)
DEF_IWMMXT_HELPER_SIZE_ENV(subs)
DEF_IWMMXT_HELPER_SIZE_ENV(adds)
-DEF_HELPER_1_3(iwmmxt_avgb0, uint64_t, (CPUState *, uint64_t, uint64_t))
-DEF_HELPER_1_3(iwmmxt_avgb1, uint64_t, (CPUState *, uint64_t, uint64_t))
-DEF_HELPER_1_3(iwmmxt_avgw0, uint64_t, (CPUState *, uint64_t, uint64_t))
-DEF_HELPER_1_3(iwmmxt_avgw1, uint64_t, (CPUState *, uint64_t, uint64_t))
-
-DEF_HELPER_1_2(iwmmxt_msadb, uint64_t, (uint64_t, uint64_t))
-
-DEF_HELPER_1_3(iwmmxt_align, uint64_t, (uint64_t, uint64_t, uint32_t))
-DEF_HELPER_1_4(iwmmxt_insr, uint64_t, (uint64_t, uint32_t, uint32_t, uint32_t))
-
-DEF_HELPER_1_1(iwmmxt_bcstb, uint64_t, (uint32_t))
-DEF_HELPER_1_1(iwmmxt_bcstw, uint64_t, (uint32_t))
-DEF_HELPER_1_1(iwmmxt_bcstl, uint64_t, (uint32_t))
-
-DEF_HELPER_1_1(iwmmxt_addcb, uint64_t, (uint64_t))
-DEF_HELPER_1_1(iwmmxt_addcw, uint64_t, (uint64_t))
-DEF_HELPER_1_1(iwmmxt_addcl, uint64_t, (uint64_t))
-
-DEF_HELPER_1_1(iwmmxt_msbb, uint32_t, (uint64_t))
-DEF_HELPER_1_1(iwmmxt_msbw, uint32_t, (uint64_t))
-DEF_HELPER_1_1(iwmmxt_msbl, uint32_t, (uint64_t))
-
-DEF_HELPER_1_3(iwmmxt_srlw, uint64_t, (CPUState *, uint64_t, uint32_t))
-DEF_HELPER_1_3(iwmmxt_srll, uint64_t, (CPUState *, uint64_t, uint32_t))
-DEF_HELPER_1_3(iwmmxt_srlq, uint64_t, (CPUState *, uint64_t, uint32_t))
-DEF_HELPER_1_3(iwmmxt_sllw, uint64_t, (CPUState *, uint64_t, uint32_t))
-DEF_HELPER_1_3(iwmmxt_slll, uint64_t, (CPUState *, uint64_t, uint32_t))
-DEF_HELPER_1_3(iwmmxt_sllq, uint64_t, (CPUState *, uint64_t, uint32_t))
-DEF_HELPER_1_3(iwmmxt_sraw, uint64_t, (CPUState *, uint64_t, uint32_t))
-DEF_HELPER_1_3(iwmmxt_sral, uint64_t, (CPUState *, uint64_t, uint32_t))
-DEF_HELPER_1_3(iwmmxt_sraq, uint64_t, (CPUState *, uint64_t, uint32_t))
-DEF_HELPER_1_3(iwmmxt_rorw, uint64_t, (CPUState *, uint64_t, uint32_t))
-DEF_HELPER_1_3(iwmmxt_rorl, uint64_t, (CPUState *, uint64_t, uint32_t))
-DEF_HELPER_1_3(iwmmxt_rorq, uint64_t, (CPUState *, uint64_t, uint32_t))
-DEF_HELPER_1_3(iwmmxt_shufh, uint64_t, (CPUState *, uint64_t, uint32_t))
-
-DEF_HELPER_1_3(iwmmxt_packuw, uint64_t, (CPUState *, uint64_t, uint64_t))
-DEF_HELPER_1_3(iwmmxt_packul, uint64_t, (CPUState *, uint64_t, uint64_t))
-DEF_HELPER_1_3(iwmmxt_packuq, uint64_t, (CPUState *, uint64_t, uint64_t))
-DEF_HELPER_1_3(iwmmxt_packsw, uint64_t, (CPUState *, uint64_t, uint64_t))
-DEF_HELPER_1_3(iwmmxt_packsl, uint64_t, (CPUState *, uint64_t, uint64_t))
-DEF_HELPER_1_3(iwmmxt_packsq, uint64_t, (CPUState *, uint64_t, uint64_t))
-
-DEF_HELPER_1_3(iwmmxt_muladdsl, uint64_t, (uint64_t, uint32_t, uint32_t))
-DEF_HELPER_1_3(iwmmxt_muladdsw, uint64_t, (uint64_t, uint32_t, uint32_t))
-DEF_HELPER_1_3(iwmmxt_muladdswl, uint64_t, (uint64_t, uint32_t, uint32_t))
-
-#undef DEF_HELPER
-#undef DEF_HELPER_0_0
-#undef DEF_HELPER_0_1
-#undef DEF_HELPER_0_2
-#undef DEF_HELPER_1_0
-#undef DEF_HELPER_1_1
-#undef DEF_HELPER_1_2
-#undef DEF_HELPER_1_3
-#undef GEN_HELPER
+DEF_HELPER_3(iwmmxt_avgb0, i64, env, i64, i64)
+DEF_HELPER_3(iwmmxt_avgb1, i64, env, i64, i64)
+DEF_HELPER_3(iwmmxt_avgw0, i64, env, i64, i64)
+DEF_HELPER_3(iwmmxt_avgw1, i64, env, i64, i64)
+
+DEF_HELPER_2(iwmmxt_msadb, i64, i64, i64)
+
+DEF_HELPER_3(iwmmxt_align, i64, i64, i64, i32)
+DEF_HELPER_4(iwmmxt_insr, i64, i64, i32, i32, i32)
+
+DEF_HELPER_1(iwmmxt_bcstb, i64, i32)
+DEF_HELPER_1(iwmmxt_bcstw, i64, i32)
+DEF_HELPER_1(iwmmxt_bcstl, i64, i32)
+
+DEF_HELPER_1(iwmmxt_addcb, i64, i64)
+DEF_HELPER_1(iwmmxt_addcw, i64, i64)
+DEF_HELPER_1(iwmmxt_addcl, i64, i64)
+
+DEF_HELPER_1(iwmmxt_msbb, i32, i64)
+DEF_HELPER_1(iwmmxt_msbw, i32, i64)
+DEF_HELPER_1(iwmmxt_msbl, i32, i64)
+
+DEF_HELPER_3(iwmmxt_srlw, i64, env, i64, i32)
+DEF_HELPER_3(iwmmxt_srll, i64, env, i64, i32)
+DEF_HELPER_3(iwmmxt_srlq, i64, env, i64, i32)
+DEF_HELPER_3(iwmmxt_sllw, i64, env, i64, i32)
+DEF_HELPER_3(iwmmxt_slll, i64, env, i64, i32)
+DEF_HELPER_3(iwmmxt_sllq, i64, env, i64, i32)
+DEF_HELPER_3(iwmmxt_sraw, i64, env, i64, i32)
+DEF_HELPER_3(iwmmxt_sral, i64, env, i64, i32)
+DEF_HELPER_3(iwmmxt_sraq, i64, env, i64, i32)
+DEF_HELPER_3(iwmmxt_rorw, i64, env, i64, i32)
+DEF_HELPER_3(iwmmxt_rorl, i64, env, i64, i32)
+DEF_HELPER_3(iwmmxt_rorq, i64, env, i64, i32)
+DEF_HELPER_3(iwmmxt_shufh, i64, env, i64, i32)
+
+DEF_HELPER_3(iwmmxt_packuw, i64, env, i64, i64)
+DEF_HELPER_3(iwmmxt_packul, i64, env, i64, i64)
+DEF_HELPER_3(iwmmxt_packuq, i64, env, i64, i64)
+DEF_HELPER_3(iwmmxt_packsw, i64, env, i64, i64)
+DEF_HELPER_3(iwmmxt_packsl, i64, env, i64, i64)
+DEF_HELPER_3(iwmmxt_packsq, i64, env, i64, i64)
+
+DEF_HELPER_3(iwmmxt_muladdsl, i64, i64, i32, i32)
+DEF_HELPER_3(iwmmxt_muladdsw, i64, i64, i32, i32)
+DEF_HELPER_3(iwmmxt_muladdswl, i64, i64, i32, i32)
+
+#include "def-helper.h"
*/
#ifdef ARITH_GE
-#define GE_ARG , uint32_t *gep
+#define GE_ARG , void *gep
#define DECLARE_GE uint32_t ge = 0
-#define SET_GE *gep = ge
+#define SET_GE *(uint32_t *)gep = ge
#else
#define GE_ARG
#define DECLARE_GE do{}while(0)
#include "tcg-op.h"
#include "qemu-log.h"
+#include "helpers.h"
#define GEN_HELPER 1
#include "helpers.h"
#define DISAS_WFI 4
#define DISAS_SWI 5
-static TCGv cpu_env;
+static TCGv_ptr cpu_env;
/* We reuse the same 64-bit temporaries for efficiency. */
-static TCGv cpu_V0, cpu_V1, cpu_M0;
+static TCGv_i64 cpu_V0, cpu_V1, cpu_M0;
/* FIXME: These should be removed. */
static TCGv cpu_T[2];
-static TCGv cpu_F0s, cpu_F1s, cpu_F0d, cpu_F1d;
+static TCGv cpu_F0s, cpu_F1s;
+static TCGv_i64 cpu_F0d, cpu_F1d;
#define ICOUNT_TEMP cpu_T[0]
#include "gen-icount.h"
/* initialize TCG globals. */
void arm_translate_init(void)
{
- cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
+ cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
+
+ cpu_T[0] = tcg_global_reg_new_i32(TCG_AREG1, "T0");
+ cpu_T[1] = tcg_global_reg_new_i32(TCG_AREG2, "T1");
- cpu_T[0] = tcg_global_reg_new(TCG_TYPE_I32, TCG_AREG1, "T0");
- cpu_T[1] = tcg_global_reg_new(TCG_TYPE_I32, TCG_AREG2, "T1");
+#define GEN_HELPER 2
+#include "helpers.h"
}
/* The code generator doesn't like lots of temporaries, so maintain our own
static TCGv temps[MAX_TEMPS];
/* Allocate a temporary variable. */
-static TCGv new_tmp(void)
+static TCGv_i32 new_tmp(void)
{
TCGv tmp;
if (num_temps == MAX_TEMPS)
abort();
- if (GET_TCGV(temps[num_temps]))
+ if (GET_TCGV_I32(temps[num_temps]))
return temps[num_temps++];
- tmp = tcg_temp_new(TCG_TYPE_I32);
+ tmp = tcg_temp_new_i32();
temps[num_temps++] = tmp;
return tmp;
}
int i;
num_temps--;
i = num_temps;
- if (GET_TCGV(temps[i]) == GET_TCGV(tmp))
+ if (TCGV_EQUAL(temps[i], tmp))
return;
/* Shuffle this temp to the last slot. */
- while (GET_TCGV(temps[i]) != GET_TCGV(tmp))
+ while (!TCGV_EQUAL(temps[i], tmp))
i--;
while (i < num_temps) {
temps[i] = temps[i + 1];
/* FIXME: Most targets have native widening multiplication.
It would be good to use that instead of a full wide multiply. */
/* 32x32->64 multiply. Marks inputs as dead. */
-static TCGv gen_mulu_i64_i32(TCGv a, TCGv b)
+static TCGv_i64 gen_mulu_i64_i32(TCGv a, TCGv b)
{
- TCGv tmp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv tmp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 tmp1 = tcg_temp_new_i64();
+ TCGv_i64 tmp2 = tcg_temp_new_i64();
tcg_gen_extu_i32_i64(tmp1, a);
dead_tmp(a);
return tmp1;
}
-static TCGv gen_muls_i64_i32(TCGv a, TCGv b)
+static TCGv_i64 gen_muls_i64_i32(TCGv a, TCGv b)
{
- TCGv tmp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv tmp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 tmp1 = tcg_temp_new_i64();
+ TCGv_i64 tmp2 = tcg_temp_new_i64();
tcg_gen_ext_i32_i64(tmp1, a);
dead_tmp(a);
/* Unsigned 32x32->64 multiply. */
static void gen_op_mull_T0_T1(void)
{
- TCGv tmp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv tmp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 tmp1 = tcg_temp_new_i64();
+ TCGv_i64 tmp2 = tcg_temp_new_i64();
tcg_gen_extu_i32_i64(tmp1, cpu_T[0]);
tcg_gen_extu_i32_i64(tmp2, cpu_T[1]);
/* Signed 32x32->64 multiply. */
static void gen_imull(TCGv a, TCGv b)
{
- TCGv tmp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv tmp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 tmp1 = tcg_temp_new_i64();
+ TCGv_i64 tmp2 = tcg_temp_new_i64();
tcg_gen_ext_i32_i64(tmp1, a);
tcg_gen_ext_i32_i64(tmp2, b);
}
static void gen_arm_parallel_addsub(int op1, int op2, TCGv a, TCGv b)
{
- TCGv tmp;
+ TCGv_ptr tmp;
switch (op1) {
#define gen_pas_helper(name) glue(gen_helper_,name)(a, a, b, tmp)
case 1:
- tmp = tcg_temp_new(TCG_TYPE_PTR);
+ tmp = tcg_temp_new_ptr();
tcg_gen_addi_ptr(tmp, cpu_env, offsetof(CPUState, GE));
PAS_OP(s)
break;
case 5:
- tmp = tcg_temp_new(TCG_TYPE_PTR);
+ tmp = tcg_temp_new_ptr();
tcg_gen_addi_ptr(tmp, cpu_env, offsetof(CPUState, GE));
PAS_OP(u)
break;
}
static void gen_thumb2_parallel_addsub(int op1, int op2, TCGv a, TCGv b)
{
- TCGv tmp;
+ TCGv_ptr tmp;
switch (op1) {
#define gen_pas_helper(name) glue(gen_helper_,name)(a, a, b, tmp)
case 0:
- tmp = tcg_temp_new(TCG_TYPE_PTR);
+ tmp = tcg_temp_new_ptr();
tcg_gen_addi_ptr(tmp, cpu_env, offsetof(CPUState, GE));
PAS_OP(s)
break;
case 4:
- tmp = tcg_temp_new(TCG_TYPE_PTR);
+ tmp = tcg_temp_new_ptr();
tcg_gen_addi_ptr(tmp, cpu_env, offsetof(CPUState, GE));
PAS_OP(u)
break;
dead_tmp(var);
}
-static inline void neon_load_reg64(TCGv var, int reg)
+static inline void neon_load_reg64(TCGv_i64 var, int reg)
{
tcg_gen_ld_i64(var, cpu_env, vfp_reg_offset(1, reg));
}
-static inline void neon_store_reg64(TCGv var, int reg)
+static inline void neon_store_reg64(TCGv_i64 var, int reg)
{
tcg_gen_st_i64(var, cpu_env, vfp_reg_offset(1, reg));
}
#define ARM_CP_RW_BIT (1 << 20)
-static inline void iwmmxt_load_reg(TCGv var, int reg)
+static inline void iwmmxt_load_reg(TCGv_i64 var, int reg)
{
tcg_gen_ld_i64(var, cpu_env, offsetof(CPUState, iwmmxt.regs[reg]));
}
-static inline void iwmmxt_store_reg(TCGv var, int reg)
+static inline void iwmmxt_store_reg(TCGv_i64 var, int reg)
{
tcg_gen_st_i64(var, cpu_env, offsetof(CPUState, iwmmxt.regs[reg]));
}
tcg_gen_or_i32(dest, t, f);
}
-static inline void gen_neon_narrow(int size, TCGv dest, TCGv src)
+static inline void gen_neon_narrow(int size, TCGv dest, TCGv_i64 src)
{
switch (size) {
case 0: gen_helper_neon_narrow_u8(dest, src); break;
}
}
-static inline void gen_neon_narrow_sats(int size, TCGv dest, TCGv src)
+static inline void gen_neon_narrow_sats(int size, TCGv dest, TCGv_i64 src)
{
switch (size) {
case 0: gen_helper_neon_narrow_sat_s8(dest, cpu_env, src); break;
}
}
-static inline void gen_neon_narrow_satu(int size, TCGv dest, TCGv src)
+static inline void gen_neon_narrow_satu(int size, TCGv dest, TCGv_i64 src)
{
switch (size) {
case 0: gen_helper_neon_narrow_sat_u8(dest, cpu_env, src); break;
}
}
-static inline void gen_neon_widen(TCGv dest, TCGv src, int size, int u)
+static inline void gen_neon_widen(TCGv_i64 dest, TCGv src, int size, int u)
{
if (u) {
switch (size) {
}
}
-static inline void gen_neon_negl(TCGv var, int size)
+static inline void gen_neon_negl(TCGv_i64 var, int size)
{
switch (size) {
case 0: gen_helper_neon_negl_u16(var, var); break;
}
}
-static inline void gen_neon_addl_saturate(TCGv op0, TCGv op1, int size)
+static inline void gen_neon_addl_saturate(TCGv_i64 op0, TCGv_i64 op1, int size)
{
switch (size) {
case 1: gen_helper_neon_addl_saturate_s32(op0, cpu_env, op0, op1); break;
}
}
-static inline void gen_neon_mull(TCGv dest, TCGv a, TCGv b, int size, int u)
+static inline void gen_neon_mull(TCGv_i64 dest, TCGv a, TCGv b, int size, int u)
{
- TCGv tmp;
+ TCGv_i64 tmp;
switch ((size << 1) | u) {
case 0: gen_helper_neon_mull_s8(dest, a, b); break;
TCGv tmp;
TCGv tmp2;
TCGv tmp3;
+ TCGv_i64 tmp64;
if (!vfp_enabled(env))
return 1;
imm = (uint16_t)shift;
imm |= imm << 16;
tmp2 = tcg_const_i32(imm);
+ TCGV_UNUSED_I64(tmp64);
break;
case 2:
imm = (uint32_t)shift;
tmp2 = tcg_const_i32(imm);
+ TCGV_UNUSED_I64(tmp64);
case 3:
- tmp2 = tcg_const_i64(shift);
+ tmp64 = tcg_const_i64(shift);
+ TCGV_UNUSED(tmp2);
break;
default:
abort();
neon_load_reg64(cpu_V0, rm + pass);
if (q) {
if (u)
- gen_helper_neon_rshl_u64(cpu_V0, cpu_V0, tmp2);
+ gen_helper_neon_rshl_u64(cpu_V0, cpu_V0, tmp64);
else
- gen_helper_neon_rshl_s64(cpu_V0, cpu_V0, tmp2);
+ gen_helper_neon_rshl_s64(cpu_V0, cpu_V0, tmp64);
} else {
if (u)
- gen_helper_neon_shl_u64(cpu_V0, cpu_V0, tmp2);
+ gen_helper_neon_shl_u64(cpu_V0, cpu_V0, tmp64);
else
- gen_helper_neon_shl_s64(cpu_V0, cpu_V0, tmp2);
+ gen_helper_neon_shl_s64(cpu_V0, cpu_V0, tmp64);
}
} else {
tmp = neon_load_reg(rm + pass, 0);
neon_load_reg64(cpu_V1, rm);
}
} else if (q) {
- tmp = tcg_temp_new(TCG_TYPE_I64);
+ tmp64 = tcg_temp_new_i64();
if (imm < 8) {
neon_load_reg64(cpu_V0, rn);
- neon_load_reg64(tmp, rn + 1);
+ neon_load_reg64(tmp64, rn + 1);
} else {
neon_load_reg64(cpu_V0, rn + 1);
- neon_load_reg64(tmp, rm);
+ neon_load_reg64(tmp64, rm);
}
tcg_gen_shri_i64(cpu_V0, cpu_V0, (imm & 7) * 8);
- tcg_gen_shli_i64(cpu_V1, tmp, 64 - ((imm & 7) * 8));
+ tcg_gen_shli_i64(cpu_V1, tmp64, 64 - ((imm & 7) * 8));
tcg_gen_or_i64(cpu_V0, cpu_V0, cpu_V1);
if (imm < 8) {
neon_load_reg64(cpu_V1, rm);
imm -= 8;
}
tcg_gen_shli_i64(cpu_V1, cpu_V1, 64 - (imm * 8));
- tcg_gen_shri_i64(tmp, tmp, imm * 8);
- tcg_gen_or_i64(cpu_V1, cpu_V1, tmp);
+ tcg_gen_shri_i64(tmp64, tmp64, imm * 8);
+ tcg_gen_or_i64(cpu_V1, cpu_V1, tmp64);
} else {
+ /* BUGFIX */
neon_load_reg64(cpu_V0, rn);
- tcg_gen_shri_i32(cpu_V0, cpu_V0, imm * 8);
+ tcg_gen_shri_i64(cpu_V0, cpu_V0, imm * 8);
neon_load_reg64(cpu_V1, rm);
- tcg_gen_shli_i32(cpu_V1, cpu_V1, 64 - (imm * 8));
+ tcg_gen_shli_i64(cpu_V1, cpu_V1, 64 - (imm * 8));
tcg_gen_or_i64(cpu_V0, cpu_V0, cpu_V1);
}
neon_store_reg64(cpu_V0, rd);
/* Store a 64-bit value to a register pair. Clobbers val. */
-static void gen_storeq_reg(DisasContext *s, int rlow, int rhigh, TCGv val)
+static void gen_storeq_reg(DisasContext *s, int rlow, int rhigh, TCGv_i64 val)
{
TCGv tmp;
tmp = new_tmp();
}
/* load a 32-bit value from a register and perform a 64-bit accumulate. */
-static void gen_addq_lo(DisasContext *s, TCGv val, int rlow)
+static void gen_addq_lo(DisasContext *s, TCGv_i64 val, int rlow)
{
- TCGv tmp;
+ TCGv_i64 tmp;
TCGv tmp2;
/* Load value and extend to 64 bits. */
- tmp = tcg_temp_new(TCG_TYPE_I64);
+ tmp = tcg_temp_new_i64();
tmp2 = load_reg(s, rlow);
tcg_gen_extu_i32_i64(tmp, tmp2);
dead_tmp(tmp2);
}
/* load and add a 64-bit value from a register pair. */
-static void gen_addq(DisasContext *s, TCGv val, int rlow, int rhigh)
+static void gen_addq(DisasContext *s, TCGv_i64 val, int rlow, int rhigh)
{
- TCGv tmp;
+ TCGv_i64 tmp;
TCGv tmpl;
TCGv tmph;
/* Load 64-bit value rd:rn. */
tmpl = load_reg(s, rlow);
tmph = load_reg(s, rhigh);
- tmp = tcg_temp_new(TCG_TYPE_I64);
+ tmp = tcg_temp_new_i64();
tcg_gen_concat_i32_i64(tmp, tmpl, tmph);
dead_tmp(tmpl);
dead_tmp(tmph);
}
/* Set N and Z flags from a 64-bit value. */
-static void gen_logicq_cc(TCGv val)
+static void gen_logicq_cc(TCGv_i64 val)
{
TCGv tmp = new_tmp();
gen_helper_logicq_cc(tmp, val);
TCGv tmp2;
TCGv tmp3;
TCGv addr;
+ TCGv_i64 tmp64;
insn = ldl_code(s->pc);
s->pc += 4;
tcg_gen_sari_i32(tmp2, tmp2, 16);
else
gen_sxth(tmp2);
- tmp2 = gen_muls_i64_i32(tmp, tmp2);
- tcg_gen_shri_i64(tmp2, tmp2, 16);
+ tmp64 = gen_muls_i64_i32(tmp, tmp2);
+ tcg_gen_shri_i64(tmp64, tmp64, 16);
tmp = new_tmp();
- tcg_gen_trunc_i64_i32(tmp, tmp2);
+ tcg_gen_trunc_i64_i32(tmp, tmp64);
if ((sh & 2) == 0) {
tmp2 = load_reg(s, rn);
gen_helper_add_setq(tmp, tmp, tmp2);
gen_mulxy(tmp, tmp2, sh & 2, sh & 4);
dead_tmp(tmp2);
if (op1 == 2) {
- tmp2 = tcg_temp_new(TCG_TYPE_I64);
- tcg_gen_ext_i32_i64(tmp2, tmp);
+ tmp64 = tcg_temp_new_i64();
+ tcg_gen_ext_i32_i64(tmp64, tmp);
dead_tmp(tmp);
- gen_addq(s, tmp2, rn, rd);
- gen_storeq_reg(s, rn, rd, tmp2);
+ gen_addq(s, tmp64, rn, rd);
+ gen_storeq_reg(s, rn, rd, tmp64);
} else {
if (op1 == 0) {
tmp2 = load_reg(s, rn);
tmp = load_reg(s, rs);
tmp2 = load_reg(s, rm);
if (insn & (1 << 22))
- tmp = gen_muls_i64_i32(tmp, tmp2);
+ tmp64 = gen_muls_i64_i32(tmp, tmp2);
else
- tmp = gen_mulu_i64_i32(tmp, tmp2);
+ tmp64 = gen_mulu_i64_i32(tmp, tmp2);
if (insn & (1 << 21)) /* mult accumulate */
- gen_addq(s, tmp, rn, rd);
+ gen_addq(s, tmp64, rn, rd);
if (!(insn & (1 << 23))) { /* double accumulate */
ARCH(6);
- gen_addq_lo(s, tmp, rn);
- gen_addq_lo(s, tmp, rd);
+ gen_addq_lo(s, tmp64, rn);
+ gen_addq_lo(s, tmp64, rd);
}
if (insn & (1 << 20))
- gen_logicq_cc(tmp);
- gen_storeq_reg(s, rn, rd, tmp);
+ gen_logicq_cc(tmp64);
+ gen_storeq_reg(s, rn, rd, tmp64);
break;
}
} else {
tmp2 = load_reg(s, rs);
if (insn & (1 << 20)) {
/* Signed multiply most significant [accumulate]. */
- tmp2 = gen_muls_i64_i32(tmp, tmp2);
+ tmp64 = gen_muls_i64_i32(tmp, tmp2);
if (insn & (1 << 5))
- tcg_gen_addi_i64(tmp2, tmp2, 0x80000000u);
- tcg_gen_shri_i64(tmp2, tmp2, 32);
+ tcg_gen_addi_i64(tmp64, tmp64, 0x80000000u);
+ tcg_gen_shri_i64(tmp64, tmp64, 32);
tmp = new_tmp();
- tcg_gen_trunc_i64_i32(tmp, tmp2);
+ tcg_gen_trunc_i64_i32(tmp, tmp64);
if (rn != 15) {
tmp2 = load_reg(s, rn);
if (insn & (1 << 6)) {
dead_tmp(tmp2);
if (insn & (1 << 22)) {
/* smlald, smlsld */
- tmp2 = tcg_temp_new(TCG_TYPE_I64);
- tcg_gen_ext_i32_i64(tmp2, tmp);
+ tmp64 = tcg_temp_new_i64();
+ tcg_gen_ext_i32_i64(tmp64, tmp);
dead_tmp(tmp);
- gen_addq(s, tmp2, rd, rn);
- gen_storeq_reg(s, rd, rn, tmp2);
+ gen_addq(s, tmp64, rd, rn);
+ gen_storeq_reg(s, rd, rn, tmp64);
} else {
/* smuad, smusd, smlad, smlsd */
if (rd != 15)
TCGv tmp2;
TCGv tmp3;
TCGv addr;
+ TCGv_i64 tmp64;
int op;
int shiftop;
int conds;
tcg_gen_sari_i32(tmp2, tmp2, 16);
else
gen_sxth(tmp2);
- tmp2 = gen_muls_i64_i32(tmp, tmp2);
- tcg_gen_shri_i64(tmp2, tmp2, 16);
+ tmp64 = gen_muls_i64_i32(tmp, tmp2);
+ tcg_gen_shri_i64(tmp64, tmp64, 16);
tmp = new_tmp();
- tcg_gen_trunc_i64_i32(tmp, tmp2);
+ tcg_gen_trunc_i64_i32(tmp, tmp64);
if (rs != 15)
{
tmp2 = load_reg(s, rs);
tcg_gen_add_i32(tmp, tmp, tmp2);
}
dead_tmp(tmp2);
- tmp2 = tcg_temp_new(TCG_TYPE_I64);
- gen_addq(s, tmp, rs, rd);
- gen_storeq_reg(s, rs, rd, tmp);
+ /* BUGFIX */
+ tmp64 = tcg_temp_new_i64();
+ tcg_gen_ext_i32_i64(tmp64, tmp);
+ dead_tmp(tmp);
+ gen_addq(s, tmp64, rs, rd);
+ gen_storeq_reg(s, rs, rd, tmp64);
} else {
if (op & 0x20) {
/* Unsigned 64-bit multiply */
- tmp = gen_mulu_i64_i32(tmp, tmp2);
+ tmp64 = gen_mulu_i64_i32(tmp, tmp2);
} else {
if (op & 8) {
/* smlalxy */
gen_mulxy(tmp, tmp2, op & 2, op & 1);
dead_tmp(tmp2);
- tmp2 = tcg_temp_new(TCG_TYPE_I64);
- tcg_gen_ext_i32_i64(tmp2, tmp);
+ tmp64 = tcg_temp_new_i64();
+ tcg_gen_ext_i32_i64(tmp64, tmp);
dead_tmp(tmp);
- tmp = tmp2;
} else {
/* Signed 64-bit multiply */
- tmp = gen_muls_i64_i32(tmp, tmp2);
+ tmp64 = gen_muls_i64_i32(tmp, tmp2);
}
}
if (op & 4) {
/* umaal */
- gen_addq_lo(s, tmp, rs);
- gen_addq_lo(s, tmp, rd);
+ gen_addq_lo(s, tmp64, rs);
+ gen_addq_lo(s, tmp64, rd);
} else if (op & 0x40) {
/* 64-bit accumulate. */
- gen_addq(s, tmp, rs, rd);
+ gen_addq(s, tmp64, rs, rd);
}
- gen_storeq_reg(s, rs, rd, tmp);
+ gen_storeq_reg(s, rs, rd, tmp64);
}
break;
}
dc->user = (env->uncached_cpsr & 0x1f) == ARM_CPU_MODE_USR;
}
#endif
- cpu_F0s = tcg_temp_new(TCG_TYPE_I32);
- cpu_F1s = tcg_temp_new(TCG_TYPE_I32);
- cpu_F0d = tcg_temp_new(TCG_TYPE_I64);
- cpu_F1d = tcg_temp_new(TCG_TYPE_I64);
+ cpu_F0s = tcg_temp_new_i32();
+ cpu_F1s = tcg_temp_new_i32();
+ cpu_F0d = tcg_temp_new_i64();
+ cpu_F1d = tcg_temp_new_i64();
cpu_V0 = cpu_F0d;
cpu_V1 = cpu_F1d;
/* FIXME: cpu_M0 can probably be the same as cpu_V0. */
- cpu_M0 = tcg_temp_new(TCG_TYPE_I64);
+ cpu_M0 = tcg_temp_new_i64();
next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
lj = -1;
num_insns = 0;
-#define TCG_HELPER_PROTO
+#include "def-helper.h"
-void TCG_HELPER_PROTO helper_raise_exception(uint32_t index);
-void TCG_HELPER_PROTO helper_tlb_flush_pid(uint32_t pid);
-void TCG_HELPER_PROTO helper_spc_write(uint32_t pid);
-void TCG_HELPER_PROTO helper_dump(uint32_t a0, uint32_t a1, uint32_t a2);
-void TCG_HELPER_PROTO helper_rfe(void);
-void TCG_HELPER_PROTO helper_rfn(void);
+DEF_HELPER_1(raise_exception, void, i32)
+DEF_HELPER_1(tlb_flush_pid, void, i32)
+DEF_HELPER_1(spc_write, void, i32)
+DEF_HELPER_3(dump, void, i32, i32, i32)
+DEF_HELPER_0(rfe, void);
+DEF_HELPER_0(rfn, void);
-void TCG_HELPER_PROTO helper_movl_sreg_reg (uint32_t sreg, uint32_t reg);
-void TCG_HELPER_PROTO helper_movl_reg_sreg (uint32_t reg, uint32_t sreg);
+DEF_HELPER_2(movl_sreg_reg, void, i32, i32)
+DEF_HELPER_2(movl_reg_sreg, void, i32, i32)
-void TCG_HELPER_PROTO helper_evaluate_flags_muls(void);
-void TCG_HELPER_PROTO helper_evaluate_flags_mulu(void);
-void TCG_HELPER_PROTO helper_evaluate_flags_mcp(void);
-void TCG_HELPER_PROTO helper_evaluate_flags_alu_4(void);
-void TCG_HELPER_PROTO helper_evaluate_flags_move_4 (void);
-void TCG_HELPER_PROTO helper_evaluate_flags_move_2 (void);
-void TCG_HELPER_PROTO helper_evaluate_flags (void);
-void TCG_HELPER_PROTO helper_top_evaluate_flags(void);
+DEF_HELPER_0(evaluate_flags_muls, void)
+DEF_HELPER_0(evaluate_flags_mulu, void)
+DEF_HELPER_0(evaluate_flags_mcp, void)
+DEF_HELPER_0(evaluate_flags_alu_4, void)
+DEF_HELPER_0(evaluate_flags_move_4, void)
+DEF_HELPER_0(evaluate_flags_move_2, void)
+DEF_HELPER_0(evaluate_flags, void)
+DEF_HELPER_0(top_evaluate_flags, void)
+
+#include "def-helper.h"
#include "crisv32-decode.h"
#include "qemu-common.h"
+#define GEN_HELPER 1
+#include "helper.h"
+
#define DISAS_CRIS 0
#if DISAS_CRIS
#define DIS(x) if (loglevel & CPU_LOG_TB_IN_ASM) x
#define CC_MASK_NZVC 0xf
#define CC_MASK_RNZV 0x10e
-static TCGv cpu_env;
+static TCGv_ptr cpu_env;
static TCGv cpu_R[16];
static TCGv cpu_PR[16];
static TCGv cc_x;
tcg_gen_andi_tl(cpu_PR[r], tn, 3);
else {
if (r == PR_PID)
- tcg_gen_helper_0_1(helper_tlb_flush_pid, tn);
+ gen_helper_tlb_flush_pid(tn);
if (dc->tb_flags & S_FLAG && r == PR_SPC)
- tcg_gen_helper_0_1(helper_spc_write, tn);
+ gen_helper_spc_write(tn);
else if (r == PR_CCS)
dc->cpustate_changed = 1;
tcg_gen_mov_tl(cpu_PR[r], tn);
static inline void t_gen_raise_exception(uint32_t index)
{
- tcg_gen_helper_0_1(helper_raise_exception, tcg_const_tl(index));
+ TCGv_i32 tmp = tcg_const_i32(index);
+ gen_helper_raise_exception(tmp);
+ tcg_temp_free_i32(tmp);
}
static void t_gen_lsl(TCGv d, TCGv a, TCGv b)
{
TCGv t0, t_31;
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
t_31 = tcg_const_tl(31);
tcg_gen_shl_tl(d, a, b);
{
TCGv t0, t_31;
- t0 = tcg_temp_new(TCG_TYPE_TL);
- t_31 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
+ t_31 = tcg_temp_new();
tcg_gen_shr_tl(d, a, b);
tcg_gen_movi_tl(t_31, 31);
{
TCGv t0, t_31;
- t0 = tcg_temp_new(TCG_TYPE_TL);
- t_31 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
+ t_31 = tcg_temp_new();
tcg_gen_sar_tl(d, a, b);
tcg_gen_movi_tl(t_31, 31);
/* 64-bit signed mul, lower result in d and upper in d2. */
static void t_gen_muls(TCGv d, TCGv d2, TCGv a, TCGv b)
{
- TCGv t0, t1;
+ TCGv_i64 t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I64);
- t1 = tcg_temp_new(TCG_TYPE_I64);
+ t0 = tcg_temp_new_i64();
+ t1 = tcg_temp_new_i64();
- tcg_gen_ext32s_i64(t0, a);
- tcg_gen_ext32s_i64(t1, b);
+ tcg_gen_ext_i32_i64(t0, a);
+ tcg_gen_ext_i32_i64(t1, b);
tcg_gen_mul_i64(t0, t0, t1);
tcg_gen_trunc_i64_i32(d, t0);
tcg_gen_shri_i64(t0, t0, 32);
tcg_gen_trunc_i64_i32(d2, t0);
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i64(t0);
+ tcg_temp_free_i64(t1);
}
/* 64-bit unsigned muls, lower result in d and upper in d2. */
static void t_gen_mulu(TCGv d, TCGv d2, TCGv a, TCGv b)
{
- TCGv t0, t1;
+ TCGv_i64 t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I64);
- t1 = tcg_temp_new(TCG_TYPE_I64);
+ t0 = tcg_temp_new_i64();
+ t1 = tcg_temp_new_i64();
tcg_gen_extu_i32_i64(t0, a);
tcg_gen_extu_i32_i64(t1, b);
tcg_gen_shri_i64(t0, t0, 32);
tcg_gen_trunc_i64_i32(d2, t0);
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i64(t0);
+ tcg_temp_free_i64(t1);
}
/* 32bit branch-free binary search for counting leading zeros. */
static void t_gen_lz_i32(TCGv d, TCGv x)
{
- TCGv y, m, n;
+ TCGv_i32 y, m, n;
- y = tcg_temp_new(TCG_TYPE_I32);
- m = tcg_temp_new(TCG_TYPE_I32);
- n = tcg_temp_new(TCG_TYPE_I32);
+ y = tcg_temp_new_i32();
+ m = tcg_temp_new_i32();
+ n = tcg_temp_new_i32();
/* y = -(x >> 16) */
tcg_gen_shri_i32(y, x, 16);
*/
l1 = gen_new_label();
- sbit = tcg_temp_new(TCG_TYPE_TL);
- bset = tcg_temp_new(TCG_TYPE_TL);
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ sbit = tcg_temp_new();
+ bset = tcg_temp_new();
+ t0 = tcg_temp_new();
/* Compute bset and sbit. */
tcg_gen_andi_tl(sbit, b, 31);
{
TCGv c;
- c = tcg_temp_new(TCG_TYPE_TL);
+ c = tcg_temp_new();
t_gen_mov_TN_preg(c, PR_CCS);
/* Propagate carry into d. */
tcg_gen_andi_tl(c, c, 1 << flag);
if (dc->flags_x) {
TCGv c;
- c = tcg_temp_new(TCG_TYPE_TL);
+ c = tcg_temp_new();
t_gen_mov_TN_preg(c, PR_CCS);
/* C flag is already at bit 0. */
tcg_gen_andi_tl(c, c, C_FLAG);
} else {
TCGv x, c;
- x = tcg_temp_new(TCG_TYPE_TL);
- c = tcg_temp_new(TCG_TYPE_TL);
+ x = tcg_temp_new();
+ c = tcg_temp_new();
t_gen_mov_TN_preg(x, PR_CCS);
tcg_gen_mov_tl(c, x);
if (dc->flags_x) {
TCGv c;
- c = tcg_temp_new(TCG_TYPE_TL);
+ c = tcg_temp_new();
t_gen_mov_TN_preg(c, PR_CCS);
/* C flag is already at bit 0. */
tcg_gen_andi_tl(c, c, C_FLAG);
} else {
TCGv x, c;
- x = tcg_temp_new(TCG_TYPE_TL);
- c = tcg_temp_new(TCG_TYPE_TL);
+ x = tcg_temp_new();
+ c = tcg_temp_new();
t_gen_mov_TN_preg(x, PR_CCS);
tcg_gen_mov_tl(c, x);
{
TCGv t, org_s;
- t = tcg_temp_new(TCG_TYPE_TL);
- org_s = tcg_temp_new(TCG_TYPE_TL);
+ t = tcg_temp_new();
+ org_s = tcg_temp_new();
/* d and s may refer to the same object. */
tcg_gen_mov_tl(org_s, s);
{
TCGv t;
/* d and s refer the same object. */
- t = tcg_temp_new(TCG_TYPE_TL);
+ t = tcg_temp_new();
tcg_gen_mov_tl(t, s);
tcg_gen_shli_tl(d, t, 16);
tcg_gen_shri_tl(t, t, 16);
TCGv t, org_s;
/* d and s refer the same object. */
- t = tcg_temp_new(TCG_TYPE_TL);
- org_s = tcg_temp_new(TCG_TYPE_TL);
+ t = tcg_temp_new();
+ org_s = tcg_temp_new();
tcg_gen_mov_tl(org_s, s);
tcg_gen_shli_tl(t, org_s, bitrev[0].shift);
int l1;
l1 = gen_new_label();
- btaken = tcg_temp_new(TCG_TYPE_TL);
+ btaken = tcg_temp_new();
/* Conditional jmp. */
tcg_gen_mov_tl(btaken, env_btaken);
switch (dc->cc_op)
{
case CC_OP_MCP:
- tcg_gen_helper_0_0(helper_evaluate_flags_mcp);
+ gen_helper_evaluate_flags_mcp();
break;
case CC_OP_MULS:
- tcg_gen_helper_0_0(helper_evaluate_flags_muls);
+ gen_helper_evaluate_flags_muls();
break;
case CC_OP_MULU:
- tcg_gen_helper_0_0(helper_evaluate_flags_mulu);
+ gen_helper_evaluate_flags_mulu();
break;
case CC_OP_MOVE:
case CC_OP_AND:
switch (dc->cc_size)
{
case 4:
- tcg_gen_helper_0_0(helper_evaluate_flags_move_4);
+ gen_helper_evaluate_flags_move_4();
break;
case 2:
- tcg_gen_helper_0_0(helper_evaluate_flags_move_2);
+ gen_helper_evaluate_flags_move_2();
break;
default:
- tcg_gen_helper_0_0(helper_evaluate_flags);
+ gen_helper_evaluate_flags();
break;
}
break;
switch (dc->cc_size)
{
case 4:
- tcg_gen_helper_0_0(helper_evaluate_flags_alu_4);
+ gen_helper_evaluate_flags_alu_4();
break;
default:
- tcg_gen_helper_0_0(helper_evaluate_flags);
+ gen_helper_evaluate_flags();
break;
}
}
writeback = 1;
if (op == CC_OP_BOUND || op == CC_OP_BTST)
- tmp = tcg_temp_local_new(TCG_TYPE_TL);
+ tmp = tcg_temp_local_new();
if (op == CC_OP_CMP) {
- tmp = tcg_temp_new(TCG_TYPE_TL);
+ tmp = tcg_temp_new();
writeback = 0;
} else if (size == 4) {
tmp = d;
writeback = 0;
} else
- tmp = tcg_temp_new(TCG_TYPE_TL);
+ tmp = tcg_temp_new();
cris_pre_alu_update_cc(dc, op, op_a, op_b, size);
tcg_gen_andi_tl(d, d, ~0xffff);
tcg_gen_or_tl(d, d, tmp);
}
- if (GET_TCGV(tmp) != GET_TCGV(d))
+ if (!TCGV_EQUAL(tmp, d))
tcg_temp_free(tmp);
}
{
TCGv tmp;
- tmp = tcg_temp_new(TCG_TYPE_TL);
+ tmp = tcg_temp_new();
tcg_gen_xori_tl(tmp, cpu_PR[PR_CCS],
C_FLAG | Z_FLAG);
/* Overlay the C flag on top of the Z. */
{
TCGv n, z;
- n = tcg_temp_new(TCG_TYPE_TL);
- z = tcg_temp_new(TCG_TYPE_TL);
+ n = tcg_temp_new();
+ z = tcg_temp_new();
/* To avoid a shift we overlay everything on
the V flag. */
{
TCGv n, z;
- n = tcg_temp_new(TCG_TYPE_TL);
- z = tcg_temp_new(TCG_TYPE_TL);
+ n = tcg_temp_new();
+ z = tcg_temp_new();
/* To avoid a shift we overlay everything on
the V flag. */
tcg_gen_movi_tl(env_btaken, 1);
}
+static void gen_load64(DisasContext *dc, TCGv_i64 dst, TCGv addr)
+{
+ int mem_index = cpu_mmu_index(dc->env);
+
+ /* If we get a fault on a delayslot we must keep the jmp state in
+ the cpu-state to be able to re-execute the jmp. */
+ if (dc->delayed_branch == 1)
+ cris_store_direct_jmp(dc);
+
+ tcg_gen_qemu_ld64(dst, addr, mem_index);
+}
+
static void gen_load(DisasContext *dc, TCGv dst, TCGv addr,
unsigned int size, int sign)
{
else if (size == 4) {
tcg_gen_qemu_ld32u(dst, addr, mem_index);
}
- else if (size == 8) {
- tcg_gen_qemu_ld64(dst, addr, mem_index);
+ else {
+ abort();
}
}
tcg_gen_ext8s_i32(d, s);
else if (size == 2)
tcg_gen_ext16s_i32(d, s);
- else if(GET_TCGV(d) != GET_TCGV(s))
+ else if(!TCGV_EQUAL(d, s))
tcg_gen_mov_tl(d, s);
}
tcg_gen_ext8u_i32(d, s);
else if (size == 2)
tcg_gen_ext16u_i32(d, s);
- else if (GET_TCGV(d) != GET_TCGV(s))
+ else if (!TCGV_EQUAL(d, s))
tcg_gen_mov_tl(d, s);
}
DIS(fprintf (logfile, "btstq %u, $r%d\n", dc->op1, dc->op2));
cris_cc_mask(dc, CC_MASK_NZ);
- l0 = tcg_temp_local_new(TCG_TYPE_TL);
+ l0 = tcg_temp_local_new();
cris_alu(dc, CC_OP_BTST,
l0, cpu_R[dc->op2], tcg_const_tl(dc->op1), 4);
cris_update_cc_op(dc, CC_OP_FLAGS, 4);
else {
TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
dec_prep_move_r(dc, dc->op1, dc->op2, size, 0, t0);
cris_alu(dc, CC_OP_MOVE,
cpu_R[dc->op2],
t[0] = cpu_R[dc->op2];
t[1] = cpu_R[dc->op1];
} else {
- t[0] = tcg_temp_new(TCG_TYPE_TL);
- t[1] = tcg_temp_new(TCG_TYPE_TL);
+ t[0] = tcg_temp_new();
+ t[1] = tcg_temp_new();
}
}
DIS(fprintf (logfile, "lz $r%u, $r%u\n",
dc->op1, dc->op2));
cris_cc_mask(dc, CC_MASK_NZ);
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
dec_prep_alu_r(dc, dc->op1, dc->op2, 4, 0, cpu_R[dc->op2], t0);
cris_alu(dc, CC_OP_LZ, cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
tcg_temp_free(t0);
DIS(fprintf (logfile, "bound.%c $r%u, $r%u\n",
memsize_char(size), dc->op1, dc->op2));
cris_cc_mask(dc, CC_MASK_NZ);
- l0 = tcg_temp_local_new(TCG_TYPE_TL);
+ l0 = tcg_temp_local_new();
dec_prep_move_r(dc, dc->op1, dc->op2, size, 0, l0);
cris_alu(dc, CC_OP_BOUND, cpu_R[dc->op2], cpu_R[dc->op2], l0, 4);
tcg_temp_free(l0);
dc->op1, dc->op2));
cris_cc_mask(dc, CC_MASK_NZ);
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
tcg_gen_sari_tl(t0, cpu_R[dc->op1], 31);
tcg_gen_xor_tl(cpu_R[dc->op2], cpu_R[dc->op1], t0);
tcg_gen_sub_tl(cpu_R[dc->op2], cpu_R[dc->op2], t0);
swapmode_name(dc->op2, modename), dc->op1));
cris_cc_mask(dc, CC_MASK_NZ);
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
t_gen_mov_TN_reg(t0, dc->op1);
if (dc->op2 & 8)
tcg_gen_not_tl(t0, t0);
DIS(fprintf (logfile, "addi.%c $r%u, $r%u\n",
memsize_char(memsize_zz(dc)), dc->op2, dc->op1));
cris_cc_mask(dc, 0);
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
tcg_gen_shl_tl(t0, cpu_R[dc->op2], tcg_const_tl(dc->zzsize));
tcg_gen_add_tl(cpu_R[dc->op1], cpu_R[dc->op1], t0);
tcg_temp_free(t0);
DIS(fprintf (logfile, "addi.%c $r%u, $r%u, $acr\n",
memsize_char(memsize_zz(dc)), dc->op2, dc->op1));
cris_cc_mask(dc, 0);
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
tcg_gen_shl_tl(t0, cpu_R[dc->op2], tcg_const_tl(dc->zzsize));
tcg_gen_add_tl(cpu_R[R_ACR], cpu_R[dc->op1], t0);
tcg_temp_free(t0);
dc->op1, dc->op2));
cris_cc_mask(dc, CC_MASK_NZ);
- l0 = tcg_temp_local_new(TCG_TYPE_TL);
+ l0 = tcg_temp_local_new();
cris_alu(dc, CC_OP_BTST, l0, cpu_R[dc->op2], cpu_R[dc->op1], 4);
cris_update_cc_op(dc, CC_OP_FLAGS, 4);
t_gen_mov_preg_TN(dc, PR_CCS, l0);
dc->op1, dc->op2));
cris_cc_mask(dc, CC_MASK_NZ);
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
dec_prep_move_r(dc, dc->op1, dc->op2, size, 0, t0);
cris_alu(dc, CC_OP_MOVE, cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
tcg_temp_free(t0);
dc->op1, dc->op2));
cris_cc_mask(dc, CC_MASK_NZ);
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
/* Size can only be qi or hi. */
t_gen_sext(t0, cpu_R[dc->op1], size);
cris_alu(dc, CC_OP_MOVE,
dc->op1, dc->op2));
cris_cc_mask(dc, CC_MASK_NZVC);
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
/* Size can only be qi or hi. */
t_gen_zext(t0, cpu_R[dc->op1], size);
cris_alu(dc, CC_OP_ADD,
dc->op1, dc->op2));
cris_cc_mask(dc, CC_MASK_NZVC);
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
/* Size can only be qi or hi. */
t_gen_sext(t0, cpu_R[dc->op1], size);
cris_alu(dc, CC_OP_ADD,
dc->op1, dc->op2));
cris_cc_mask(dc, CC_MASK_NZVC);
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
/* Size can only be qi or hi. */
t_gen_zext(t0, cpu_R[dc->op1], size);
cris_alu(dc, CC_OP_SUB,
dc->op1, dc->op2));
cris_cc_mask(dc, CC_MASK_NZVC);
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
/* Size can only be qi or hi. */
t_gen_sext(t0, cpu_R[dc->op1], size);
cris_alu(dc, CC_OP_SUB,
{
DIS(fprintf (logfile, "move $r%u, $s%u\n", dc->op1, dc->op2));
cris_cc_mask(dc, 0);
- tcg_gen_helper_0_2(helper_movl_sreg_reg,
- tcg_const_tl(dc->op2), tcg_const_tl(dc->op1));
+ gen_helper_movl_sreg_reg(tcg_const_tl(dc->op2), tcg_const_tl(dc->op1));
return 2;
}
static unsigned int dec_move_sr(DisasContext *dc)
{
DIS(fprintf (logfile, "move $s%u, $r%u\n", dc->op2, dc->op1));
cris_cc_mask(dc, 0);
- tcg_gen_helper_0_2(helper_movl_reg_sreg,
- tcg_const_tl(dc->op1), tcg_const_tl(dc->op2));
+ gen_helper_movl_reg_sreg(tcg_const_tl(dc->op1), tcg_const_tl(dc->op2));
return 2;
}
DIS(fprintf (logfile, "move $r%u, $p%u\n", dc->op1, dc->op2));
cris_cc_mask(dc, 0);
- t[0] = tcg_temp_new(TCG_TYPE_TL);
+ t[0] = tcg_temp_new();
if (dc->op2 == PR_CCS) {
cris_evaluate_flags(dc);
t_gen_mov_TN_reg(t[0], dc->op1);
if (dc->tb_flags & U_FLAG) {
- t[1] = tcg_temp_new(TCG_TYPE_TL);
+ t[1] = tcg_temp_new();
/* User space is not allowed to touch all flags. */
tcg_gen_andi_tl(t[0], t[0], 0x39f);
tcg_gen_andi_tl(t[1], cpu_PR[PR_CCS], ~0x39f);
if (dc->op2 == PR_CCS)
cris_evaluate_flags(dc);
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
t_gen_mov_TN_preg(t0, dc->op2);
cris_alu(dc, CC_OP_MOVE,
cpu_R[dc->op1], cpu_R[dc->op1], t0, preg_sizes[dc->op2]);
else {
TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
insn_len = dec_prep_move_m(dc, 0, memsize, t0);
cris_cc_mask(dc, CC_MASK_NZ);
cris_alu(dc, CC_OP_MOVE,
static inline void cris_alu_m_alloc_temps(TCGv *t)
{
- t[0] = tcg_temp_new(TCG_TYPE_TL);
- t[1] = tcg_temp_new(TCG_TYPE_TL);
+ t[0] = tcg_temp_new();
+ t[1] = tcg_temp_new();
}
static inline void cris_alu_m_free_temps(TCGv *t)
dc->op1, dc->postinc ? "+]" : "]",
dc->op2));
- l[0] = tcg_temp_local_new(TCG_TYPE_TL);
- l[1] = tcg_temp_local_new(TCG_TYPE_TL);
+ l[0] = tcg_temp_local_new();
+ l[1] = tcg_temp_local_new();
insn_len = dec_prep_alu_m(dc, 0, memsize, l[0], l[1]);
cris_cc_mask(dc, CC_MASK_NZ);
cris_alu(dc, CC_OP_BOUND, cpu_R[dc->op2], l[0], l[1], 4);
/* prepare store. Address in T0, value in T1. */
if (dc->op2 == PR_CCS)
cris_evaluate_flags(dc);
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
t_gen_mov_TN_preg(t0, dc->op2);
cris_flush_cc_state(dc);
gen_store(dc, cpu_R[dc->op1], t0, memsize);
static unsigned int dec_movem_mr(DisasContext *dc)
{
- TCGv tmp[16];
+ TCGv_i64 tmp[16];
+ TCGv tmp32;
TCGv addr;
int i;
int nr = dc->op2 + 1;
DIS(fprintf (logfile, "movem [$r%u%s, $r%u\n", dc->op1,
dc->postinc ? "+]" : "]", dc->op2));
- addr = tcg_temp_new(TCG_TYPE_TL);
+ addr = tcg_temp_new();
/* There are probably better ways of doing this. */
cris_flush_cc_state(dc);
for (i = 0; i < (nr >> 1); i++) {
- tmp[i] = tcg_temp_new(TCG_TYPE_I64);
+ tmp[i] = tcg_temp_new_i64();
tcg_gen_addi_tl(addr, cpu_R[dc->op1], i * 8);
- gen_load(dc, tmp[i], addr, 8, 0);
+ gen_load64(dc, tmp[i], addr);
}
if (nr & 1) {
- tmp[i] = tcg_temp_new(TCG_TYPE_I32);
+ tmp32 = tcg_temp_new_i32();
tcg_gen_addi_tl(addr, cpu_R[dc->op1], i * 8);
- gen_load(dc, tmp[i], addr, 4, 0);
+ gen_load(dc, tmp32, addr, 4, 0);
}
tcg_temp_free(addr);
tcg_gen_trunc_i64_i32(cpu_R[i * 2], tmp[i]);
tcg_gen_shri_i64(tmp[i], tmp[i], 32);
tcg_gen_trunc_i64_i32(cpu_R[i * 2 + 1], tmp[i]);
- tcg_temp_free(tmp[i]);
+ tcg_temp_free_i64(tmp[i]);
}
if (nr & 1) {
- tcg_gen_mov_tl(cpu_R[dc->op2], tmp[i]);
- tcg_temp_free(tmp[i]);
+ tcg_gen_mov_tl(cpu_R[dc->op2], tmp32);
+ tcg_temp_free(tmp32);
}
/* writeback the updated pointer value. */
cris_flush_cc_state(dc);
- tmp = tcg_temp_new(TCG_TYPE_TL);
- addr = tcg_temp_new(TCG_TYPE_TL);
+ tmp = tcg_temp_new();
+ addr = tcg_temp_new();
tcg_gen_movi_tl(tmp, 4);
tcg_gen_mov_tl(addr, cpu_R[dc->op1]);
for (i = 0; i <= dc->op2; i++) {
/* rfe. */
DIS(fprintf(logfile, "rfe\n"));
cris_evaluate_flags(dc);
- tcg_gen_helper_0_0(helper_rfe);
+ gen_helper_rfe();
dc->is_jmp = DISAS_UPDATE;
break;
case 5:
/* rfn. */
DIS(fprintf(logfile, "rfn\n"));
cris_evaluate_flags(dc);
- tcg_gen_helper_0_0(helper_rfn);
+ gen_helper_rfn();
dc->is_jmp = DISAS_UPDATE;
break;
case 6:
tcg_initialized = 1;
- cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
- cc_x = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
+ cc_x = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, cc_x), "cc_x");
- cc_src = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cc_src = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, cc_src), "cc_src");
- cc_dest = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cc_dest = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, cc_dest),
"cc_dest");
- cc_result = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cc_result = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, cc_result),
"cc_result");
- cc_op = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cc_op = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, cc_op), "cc_op");
- cc_size = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cc_size = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, cc_size),
"cc_size");
- cc_mask = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cc_mask = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, cc_mask),
"cc_mask");
- env_pc = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ env_pc = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, pc),
"pc");
- env_btarget = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ env_btarget = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, btarget),
"btarget");
- env_btaken = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ env_btaken = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, btaken),
"btaken");
for (i = 0; i < 16; i++) {
- cpu_R[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_R[i] = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, regs[i]),
regnames[i]);
}
for (i = 0; i < 16; i++) {
- cpu_PR[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_PR[i] = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, pregs[i]),
pregnames[i]);
}
- TCG_HELPER(helper_raise_exception);
- TCG_HELPER(helper_dump);
-
- TCG_HELPER(helper_tlb_flush_pid);
- TCG_HELPER(helper_movl_sreg_reg);
- TCG_HELPER(helper_movl_reg_sreg);
- TCG_HELPER(helper_rfe);
- TCG_HELPER(helper_rfn);
+#define GEN_HELPER 2
+#include "helper.h"
- TCG_HELPER(helper_evaluate_flags_muls);
- TCG_HELPER(helper_evaluate_flags_mulu);
- TCG_HELPER(helper_evaluate_flags_mcp);
- TCG_HELPER(helper_evaluate_flags_alu_4);
- TCG_HELPER(helper_evaluate_flags_move_4);
- TCG_HELPER(helper_evaluate_flags_move_2);
- TCG_HELPER(helper_evaluate_flags);
- TCG_HELPER(helper_top_evaluate_flags);
return env;
}
int (*compute_c)(void); /* return the C flag */
} CCTable;
-extern CCTable cc_table[];
-
#if defined(CONFIG_USER_ONLY)
static inline void cpu_clone_regs(CPUState *env, target_ulong newsp)
{
static inline uint32_t compute_eflags(void)
{
- return env->eflags | cc_table[CC_OP].compute_all() | (DF & DF_MASK);
+ return env->eflags | helper_cc_compute_all(CC_OP) | (DF & DF_MASK);
}
/* NOTE: CC_OP must be modified manually to CC_OP_EFLAGS */
#include "svm.h"
#include "qemu-common.h"
#include "kvm.h"
+#include "helper.h"
//#define DEBUG_MMU
-#ifndef DEF_HELPER
-#define DEF_HELPER(ret, name, params) ret name params;
-#endif
+#include "def-helper.h"
+
+DEF_HELPER_FLAGS_1(cc_compute_all, TCG_CALL_PURE, i32, int)
+DEF_HELPER_FLAGS_1(cc_compute_c, TCG_CALL_PURE, i32, int)
-DEF_HELPER(void, helper_lock, (void))
-DEF_HELPER(void, helper_unlock, (void))
-DEF_HELPER(void, helper_write_eflags, (target_ulong t0, uint32_t update_mask))
-DEF_HELPER(target_ulong, helper_read_eflags, (void))
-DEF_HELPER(void, helper_divb_AL, (target_ulong t0))
-DEF_HELPER(void, helper_idivb_AL, (target_ulong t0))
-DEF_HELPER(void, helper_divw_AX, (target_ulong t0))
-DEF_HELPER(void, helper_idivw_AX, (target_ulong t0))
-DEF_HELPER(void, helper_divl_EAX, (target_ulong t0))
-DEF_HELPER(void, helper_idivl_EAX, (target_ulong t0))
+DEF_HELPER_0(lock, void)
+DEF_HELPER_0(unlock, void)
+DEF_HELPER_2(write_eflags, void, tl, i32)
+DEF_HELPER_0(read_eflags, tl)
+DEF_HELPER_1(divb_AL, void, tl)
+DEF_HELPER_1(idivb_AL, void, tl)
+DEF_HELPER_1(divw_AX, void, tl)
+DEF_HELPER_1(idivw_AX, void, tl)
+DEF_HELPER_1(divl_EAX, void, tl)
+DEF_HELPER_1(idivl_EAX, void, tl)
#ifdef TARGET_X86_64
-DEF_HELPER(void, helper_mulq_EAX_T0, (target_ulong t0))
-DEF_HELPER(void, helper_imulq_EAX_T0, (target_ulong t0))
-DEF_HELPER(target_ulong, helper_imulq_T0_T1, (target_ulong t0, target_ulong t1))
-DEF_HELPER(void, helper_divq_EAX, (target_ulong t0))
-DEF_HELPER(void, helper_idivq_EAX, (target_ulong t0))
+DEF_HELPER_1(mulq_EAX_T0, void, tl)
+DEF_HELPER_1(imulq_EAX_T0, void, tl)
+DEF_HELPER_2(imulq_T0_T1, tl, tl, tl)
+DEF_HELPER_1(divq_EAX, void, tl)
+DEF_HELPER_1(idivq_EAX, void, tl)
#endif
-DEF_HELPER(void, helper_aam, (int base))
-DEF_HELPER(void, helper_aad, (int base))
-DEF_HELPER(void, helper_aaa, (void))
-DEF_HELPER(void, helper_aas, (void))
-DEF_HELPER(void, helper_daa, (void))
-DEF_HELPER(void, helper_das, (void))
+DEF_HELPER_1(aam, void, int)
+DEF_HELPER_1(aad, void, int)
+DEF_HELPER_0(aaa, void)
+DEF_HELPER_0(aas, void)
+DEF_HELPER_0(daa, void)
+DEF_HELPER_0(das, void)
-DEF_HELPER(target_ulong, helper_lsl, (target_ulong selector1))
-DEF_HELPER(target_ulong, helper_lar, (target_ulong selector1))
-DEF_HELPER(void, helper_verr, (target_ulong selector1))
-DEF_HELPER(void, helper_verw, (target_ulong selector1))
-DEF_HELPER(void, helper_lldt, (int selector))
-DEF_HELPER(void, helper_ltr, (int selector))
-DEF_HELPER(void, helper_load_seg, (int seg_reg, int selector))
-DEF_HELPER(void, helper_ljmp_protected, (int new_cs, target_ulong new_eip,
- int next_eip_addend))
-DEF_HELPER(void, helper_lcall_real, (int new_cs, target_ulong new_eip1,
- int shift, int next_eip))
-DEF_HELPER(void, helper_lcall_protected, (int new_cs, target_ulong new_eip,
- int shift, int next_eip_addend))
-DEF_HELPER(void, helper_iret_real, (int shift))
-DEF_HELPER(void, helper_iret_protected, (int shift, int next_eip))
-DEF_HELPER(void, helper_lret_protected, (int shift, int addend))
-DEF_HELPER(target_ulong, helper_read_crN, (int reg))
-DEF_HELPER(void, helper_write_crN, (int reg, target_ulong t0))
-DEF_HELPER(void, helper_lmsw, (target_ulong t0))
-DEF_HELPER(void, helper_clts, (void))
-DEF_HELPER(void, helper_movl_drN_T0, (int reg, target_ulong t0))
-DEF_HELPER(void, helper_invlpg, (target_ulong addr))
+DEF_HELPER_1(lsl, tl, tl)
+DEF_HELPER_1(lar, tl, tl)
+DEF_HELPER_1(verr, void, tl)
+DEF_HELPER_1(verw, void, tl)
+DEF_HELPER_1(lldt, void, int)
+DEF_HELPER_1(ltr, void, int)
+DEF_HELPER_2(load_seg, void, int, int)
+DEF_HELPER_3(ljmp_protected, void, int, tl, int)
+DEF_HELPER_4(lcall_real, void, int, tl, int, int)
+DEF_HELPER_4(lcall_protected, void, int, tl, int, int)
+DEF_HELPER_1(iret_real, void, int)
+DEF_HELPER_2(iret_protected, void, int, int)
+DEF_HELPER_2(lret_protected, void, int, int)
+DEF_HELPER_1(read_crN, tl, int)
+DEF_HELPER_2(write_crN, void, int, tl)
+DEF_HELPER_1(lmsw, void, tl)
+DEF_HELPER_0(clts, void)
+DEF_HELPER_2(movl_drN_T0, void, int, tl)
+DEF_HELPER_1(invlpg, void, tl)
-DEF_HELPER(void, helper_enter_level, (int level, int data32, target_ulong t1))
+DEF_HELPER_3(enter_level, void, int, int, tl)
#ifdef TARGET_X86_64
-DEF_HELPER(void, helper_enter64_level, (int level, int data64, target_ulong t1))
+DEF_HELPER_3(enter64_level, void, int, int, tl)
#endif
-DEF_HELPER(void, helper_sysenter, (void))
-DEF_HELPER(void, helper_sysexit, (int dflag))
+DEF_HELPER_0(sysenter, void)
+DEF_HELPER_1(sysexit, void, int)
#ifdef TARGET_X86_64
-DEF_HELPER(void, helper_syscall, (int next_eip_addend))
-DEF_HELPER(void, helper_sysret, (int dflag))
+DEF_HELPER_1(syscall, void, int)
+DEF_HELPER_1(sysret, void, int)
#endif
-DEF_HELPER(void, helper_hlt, (int next_eip_addend))
-DEF_HELPER(void, helper_monitor, (target_ulong ptr))
-DEF_HELPER(void, helper_mwait, (int next_eip_addend))
-DEF_HELPER(void, helper_debug, (void))
-DEF_HELPER(void, helper_raise_interrupt, (int intno, int next_eip_addend))
-DEF_HELPER(void, helper_raise_exception, (int exception_index))
-DEF_HELPER(void, helper_cli, (void))
-DEF_HELPER(void, helper_sti, (void))
-DEF_HELPER(void, helper_set_inhibit_irq, (void))
-DEF_HELPER(void, helper_reset_inhibit_irq, (void))
-DEF_HELPER(void, helper_boundw, (target_ulong a0, int v))
-DEF_HELPER(void, helper_boundl, (target_ulong a0, int v))
-DEF_HELPER(void, helper_rsm, (void))
-DEF_HELPER(void, helper_into, (int next_eip_addend))
-DEF_HELPER(void, helper_cmpxchg8b, (target_ulong a0))
+DEF_HELPER_1(hlt, void, int)
+DEF_HELPER_1(monitor, void, tl)
+DEF_HELPER_1(mwait, void, int)
+DEF_HELPER_0(debug, void)
+DEF_HELPER_2(raise_interrupt, void, int, int)
+DEF_HELPER_1(raise_exception, void, int)
+DEF_HELPER_0(cli, void)
+DEF_HELPER_0(sti, void)
+DEF_HELPER_0(set_inhibit_irq, void)
+DEF_HELPER_0(reset_inhibit_irq, void)
+DEF_HELPER_2(boundw, void, tl, int)
+DEF_HELPER_2(boundl, void, tl, int)
+DEF_HELPER_0(rsm, void)
+DEF_HELPER_1(into, void, int)
+DEF_HELPER_1(cmpxchg8b, void, tl)
#ifdef TARGET_X86_64
-DEF_HELPER(void, helper_cmpxchg16b, (target_ulong a0))
+DEF_HELPER_1(cmpxchg16b, void, tl)
#endif
-DEF_HELPER(void, helper_single_step, (void))
-DEF_HELPER(void, helper_cpuid, (void))
-DEF_HELPER(void, helper_rdtsc, (void))
-DEF_HELPER(void, helper_rdpmc, (void))
-DEF_HELPER(void, helper_rdmsr, (void))
-DEF_HELPER(void, helper_wrmsr, (void))
+DEF_HELPER_0(single_step, void)
+DEF_HELPER_0(cpuid, void)
+DEF_HELPER_0(rdtsc, void)
+DEF_HELPER_0(rdpmc, void)
+DEF_HELPER_0(rdmsr, void)
+DEF_HELPER_0(wrmsr, void)
-DEF_HELPER(void, helper_check_iob, (uint32_t t0))
-DEF_HELPER(void, helper_check_iow, (uint32_t t0))
-DEF_HELPER(void, helper_check_iol, (uint32_t t0))
-DEF_HELPER(void, helper_outb, (uint32_t port, uint32_t data))
-DEF_HELPER(target_ulong, helper_inb, (uint32_t port))
-DEF_HELPER(void, helper_outw, (uint32_t port, uint32_t data))
-DEF_HELPER(target_ulong, helper_inw, (uint32_t port))
-DEF_HELPER(void, helper_outl, (uint32_t port, uint32_t data))
-DEF_HELPER(target_ulong, helper_inl, (uint32_t port))
+DEF_HELPER_1(check_iob, void, i32)
+DEF_HELPER_1(check_iow, void, i32)
+DEF_HELPER_1(check_iol, void, i32)
+DEF_HELPER_2(outb, void, i32, i32)
+DEF_HELPER_1(inb, tl, i32)
+DEF_HELPER_2(outw, void, i32, i32)
+DEF_HELPER_1(inw, tl, i32)
+DEF_HELPER_2(outl, void, i32, i32)
+DEF_HELPER_1(inl, tl, i32)
-DEF_HELPER(void, helper_svm_check_intercept_param, (uint32_t type, uint64_t param))
-DEF_HELPER(void, helper_vmexit, (uint32_t exit_code, uint64_t exit_info_1))
-DEF_HELPER(void, helper_svm_check_io, (uint32_t port, uint32_t param,
- uint32_t next_eip_addend))
-DEF_HELPER(void, helper_vmrun, (int aflag, int next_eip_addend))
-DEF_HELPER(void, helper_vmmcall, (void))
-DEF_HELPER(void, helper_vmload, (int aflag))
-DEF_HELPER(void, helper_vmsave, (int aflag))
-DEF_HELPER(void, helper_stgi, (void))
-DEF_HELPER(void, helper_clgi, (void))
-DEF_HELPER(void, helper_skinit, (void))
-DEF_HELPER(void, helper_invlpga, (int aflag))
+DEF_HELPER_2(svm_check_intercept_param, void, i32, i64)
+DEF_HELPER_2(vmexit, void, i32, i64)
+DEF_HELPER_3(svm_check_io, void, i32, i32, i32)
+DEF_HELPER_2(vmrun, void, int, int)
+DEF_HELPER_0(vmmcall, void)
+DEF_HELPER_1(vmload, void, int)
+DEF_HELPER_1(vmsave, void, int)
+DEF_HELPER_0(stgi, void)
+DEF_HELPER_0(clgi, void)
+DEF_HELPER_0(skinit, void)
+DEF_HELPER_1(invlpga, void, int)
/* x86 FPU */
-DEF_HELPER(void, helper_flds_FT0, (uint32_t val))
-DEF_HELPER(void, helper_fldl_FT0, (uint64_t val))
-DEF_HELPER(void, helper_fildl_FT0, (int32_t val))
-DEF_HELPER(void, helper_flds_ST0, (uint32_t val))
-DEF_HELPER(void, helper_fldl_ST0, (uint64_t val))
-DEF_HELPER(void, helper_fildl_ST0, (int32_t val))
-DEF_HELPER(void, helper_fildll_ST0, (int64_t val))
-DEF_HELPER(uint32_t, helper_fsts_ST0, (void))
-DEF_HELPER(uint64_t, helper_fstl_ST0, (void))
-DEF_HELPER(int32_t, helper_fist_ST0, (void))
-DEF_HELPER(int32_t, helper_fistl_ST0, (void))
-DEF_HELPER(int64_t, helper_fistll_ST0, (void))
-DEF_HELPER(int32_t, helper_fistt_ST0, (void))
-DEF_HELPER(int32_t, helper_fisttl_ST0, (void))
-DEF_HELPER(int64_t, helper_fisttll_ST0, (void))
-DEF_HELPER(void, helper_fldt_ST0, (target_ulong ptr))
-DEF_HELPER(void, helper_fstt_ST0, (target_ulong ptr))
-DEF_HELPER(void, helper_fpush, (void))
-DEF_HELPER(void, helper_fpop, (void))
-DEF_HELPER(void, helper_fdecstp, (void))
-DEF_HELPER(void, helper_fincstp, (void))
-DEF_HELPER(void, helper_ffree_STN, (int st_index))
-DEF_HELPER(void, helper_fmov_ST0_FT0, (void))
-DEF_HELPER(void, helper_fmov_FT0_STN, (int st_index))
-DEF_HELPER(void, helper_fmov_ST0_STN, (int st_index))
-DEF_HELPER(void, helper_fmov_STN_ST0, (int st_index))
-DEF_HELPER(void, helper_fxchg_ST0_STN, (int st_index))
-DEF_HELPER(void, helper_fcom_ST0_FT0, (void))
-DEF_HELPER(void, helper_fucom_ST0_FT0, (void))
-DEF_HELPER(void, helper_fcomi_ST0_FT0, (void))
-DEF_HELPER(void, helper_fucomi_ST0_FT0, (void))
-DEF_HELPER(void, helper_fadd_ST0_FT0, (void))
-DEF_HELPER(void, helper_fmul_ST0_FT0, (void))
-DEF_HELPER(void, helper_fsub_ST0_FT0, (void))
-DEF_HELPER(void, helper_fsubr_ST0_FT0, (void))
-DEF_HELPER(void, helper_fdiv_ST0_FT0, (void))
-DEF_HELPER(void, helper_fdivr_ST0_FT0, (void))
-DEF_HELPER(void, helper_fadd_STN_ST0, (int st_index))
-DEF_HELPER(void, helper_fmul_STN_ST0, (int st_index))
-DEF_HELPER(void, helper_fsub_STN_ST0, (int st_index))
-DEF_HELPER(void, helper_fsubr_STN_ST0, (int st_index))
-DEF_HELPER(void, helper_fdiv_STN_ST0, (int st_index))
-DEF_HELPER(void, helper_fdivr_STN_ST0, (int st_index))
-DEF_HELPER(void, helper_fchs_ST0, (void))
-DEF_HELPER(void, helper_fabs_ST0, (void))
-DEF_HELPER(void, helper_fxam_ST0, (void))
-DEF_HELPER(void, helper_fld1_ST0, (void))
-DEF_HELPER(void, helper_fldl2t_ST0, (void))
-DEF_HELPER(void, helper_fldl2e_ST0, (void))
-DEF_HELPER(void, helper_fldpi_ST0, (void))
-DEF_HELPER(void, helper_fldlg2_ST0, (void))
-DEF_HELPER(void, helper_fldln2_ST0, (void))
-DEF_HELPER(void, helper_fldz_ST0, (void))
-DEF_HELPER(void, helper_fldz_FT0, (void))
-DEF_HELPER(uint32_t, helper_fnstsw, (void))
-DEF_HELPER(uint32_t, helper_fnstcw, (void))
-DEF_HELPER(void, helper_fldcw, (uint32_t val))
-DEF_HELPER(void, helper_fclex, (void))
-DEF_HELPER(void, helper_fwait, (void))
-DEF_HELPER(void, helper_fninit, (void))
-DEF_HELPER(void, helper_fbld_ST0, (target_ulong ptr))
-DEF_HELPER(void, helper_fbst_ST0, (target_ulong ptr))
-DEF_HELPER(void, helper_f2xm1, (void))
-DEF_HELPER(void, helper_fyl2x, (void))
-DEF_HELPER(void, helper_fptan, (void))
-DEF_HELPER(void, helper_fpatan, (void))
-DEF_HELPER(void, helper_fxtract, (void))
-DEF_HELPER(void, helper_fprem1, (void))
-DEF_HELPER(void, helper_fprem, (void))
-DEF_HELPER(void, helper_fyl2xp1, (void))
-DEF_HELPER(void, helper_fsqrt, (void))
-DEF_HELPER(void, helper_fsincos, (void))
-DEF_HELPER(void, helper_frndint, (void))
-DEF_HELPER(void, helper_fscale, (void))
-DEF_HELPER(void, helper_fsin, (void))
-DEF_HELPER(void, helper_fcos, (void))
-DEF_HELPER(void, helper_fstenv, (target_ulong ptr, int data32))
-DEF_HELPER(void, helper_fldenv, (target_ulong ptr, int data32))
-DEF_HELPER(void, helper_fsave, (target_ulong ptr, int data32))
-DEF_HELPER(void, helper_frstor, (target_ulong ptr, int data32))
-DEF_HELPER(void, helper_fxsave, (target_ulong ptr, int data64))
-DEF_HELPER(void, helper_fxrstor, (target_ulong ptr, int data64))
-DEF_HELPER(target_ulong, helper_bsf, (target_ulong t0))
-DEF_HELPER(target_ulong, helper_bsr, (target_ulong t0))
+DEF_HELPER_1(flds_FT0, void, i32)
+DEF_HELPER_1(fldl_FT0, void, i64)
+DEF_HELPER_1(fildl_FT0, void, s32)
+DEF_HELPER_1(flds_ST0, void, i32)
+DEF_HELPER_1(fldl_ST0, void, i64)
+DEF_HELPER_1(fildl_ST0, void, s32)
+DEF_HELPER_1(fildll_ST0, void, s64)
+DEF_HELPER_0(fsts_ST0, i32)
+DEF_HELPER_0(fstl_ST0, i64)
+DEF_HELPER_0(fist_ST0, s32)
+DEF_HELPER_0(fistl_ST0, s32)
+DEF_HELPER_0(fistll_ST0, s64)
+DEF_HELPER_0(fistt_ST0, s32)
+DEF_HELPER_0(fisttl_ST0, s32)
+DEF_HELPER_0(fisttll_ST0, s64)
+DEF_HELPER_1(fldt_ST0, void, tl)
+DEF_HELPER_1(fstt_ST0, void, tl)
+DEF_HELPER_0(fpush, void)
+DEF_HELPER_0(fpop, void)
+DEF_HELPER_0(fdecstp, void)
+DEF_HELPER_0(fincstp, void)
+DEF_HELPER_1(ffree_STN, void, int)
+DEF_HELPER_0(fmov_ST0_FT0, void)
+DEF_HELPER_1(fmov_FT0_STN, void, int)
+DEF_HELPER_1(fmov_ST0_STN, void, int)
+DEF_HELPER_1(fmov_STN_ST0, void, int)
+DEF_HELPER_1(fxchg_ST0_STN, void, int)
+DEF_HELPER_0(fcom_ST0_FT0, void)
+DEF_HELPER_0(fucom_ST0_FT0, void)
+DEF_HELPER_0(fcomi_ST0_FT0, void)
+DEF_HELPER_0(fucomi_ST0_FT0, void)
+DEF_HELPER_0(fadd_ST0_FT0, void)
+DEF_HELPER_0(fmul_ST0_FT0, void)
+DEF_HELPER_0(fsub_ST0_FT0, void)
+DEF_HELPER_0(fsubr_ST0_FT0, void)
+DEF_HELPER_0(fdiv_ST0_FT0, void)
+DEF_HELPER_0(fdivr_ST0_FT0, void)
+DEF_HELPER_1(fadd_STN_ST0, void, int)
+DEF_HELPER_1(fmul_STN_ST0, void, int)
+DEF_HELPER_1(fsub_STN_ST0, void, int)
+DEF_HELPER_1(fsubr_STN_ST0, void, int)
+DEF_HELPER_1(fdiv_STN_ST0, void, int)
+DEF_HELPER_1(fdivr_STN_ST0, void, int)
+DEF_HELPER_0(fchs_ST0, void)
+DEF_HELPER_0(fabs_ST0, void)
+DEF_HELPER_0(fxam_ST0, void)
+DEF_HELPER_0(fld1_ST0, void)
+DEF_HELPER_0(fldl2t_ST0, void)
+DEF_HELPER_0(fldl2e_ST0, void)
+DEF_HELPER_0(fldpi_ST0, void)
+DEF_HELPER_0(fldlg2_ST0, void)
+DEF_HELPER_0(fldln2_ST0, void)
+DEF_HELPER_0(fldz_ST0, void)
+DEF_HELPER_0(fldz_FT0, void)
+DEF_HELPER_0(fnstsw, i32)
+DEF_HELPER_0(fnstcw, i32)
+DEF_HELPER_1(fldcw, void, i32)
+DEF_HELPER_0(fclex, void)
+DEF_HELPER_0(fwait, void)
+DEF_HELPER_0(fninit, void)
+DEF_HELPER_1(fbld_ST0, void, tl)
+DEF_HELPER_1(fbst_ST0, void, tl)
+DEF_HELPER_0(f2xm1, void)
+DEF_HELPER_0(fyl2x, void)
+DEF_HELPER_0(fptan, void)
+DEF_HELPER_0(fpatan, void)
+DEF_HELPER_0(fxtract, void)
+DEF_HELPER_0(fprem1, void)
+DEF_HELPER_0(fprem, void)
+DEF_HELPER_0(fyl2xp1, void)
+DEF_HELPER_0(fsqrt, void)
+DEF_HELPER_0(fsincos, void)
+DEF_HELPER_0(frndint, void)
+DEF_HELPER_0(fscale, void)
+DEF_HELPER_0(fsin, void)
+DEF_HELPER_0(fcos, void)
+DEF_HELPER_2(fstenv, void, tl, int)
+DEF_HELPER_2(fldenv, void, tl, int)
+DEF_HELPER_2(fsave, void, tl, int)
+DEF_HELPER_2(frstor, void, tl, int)
+DEF_HELPER_2(fxsave, void, tl, int)
+DEF_HELPER_2(fxrstor, void, tl, int)
+DEF_HELPER_1(bsf, tl, tl)
+DEF_HELPER_1(bsr, tl, tl)
/* MMX/SSE */
-DEF_HELPER(void, helper_enter_mmx, (void))
-DEF_HELPER(void, helper_emms, (void))
-DEF_HELPER(void, helper_movq, (uint64_t *d, uint64_t *s))
+DEF_HELPER_0(enter_mmx, void)
+DEF_HELPER_0(emms, void)
+DEF_HELPER_2(movq, void, ptr, ptr)
#define SHIFT 0
#include "ops_sse_header.h"
#define SHIFT 1
#include "ops_sse_header.h"
-DEF_HELPER(target_ulong, helper_rclb, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, helper_rclw, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, helper_rcll, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, helper_rcrb, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, helper_rcrw, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, helper_rcrl, (target_ulong t0, target_ulong t1))
+DEF_HELPER_2(rclb, tl, tl, tl)
+DEF_HELPER_2(rclw, tl, tl, tl)
+DEF_HELPER_2(rcll, tl, tl, tl)
+DEF_HELPER_2(rcrb, tl, tl, tl)
+DEF_HELPER_2(rcrw, tl, tl, tl)
+DEF_HELPER_2(rcrl, tl, tl, tl)
#ifdef TARGET_X86_64
-DEF_HELPER(target_ulong, helper_rclq, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, helper_rcrq, (target_ulong t0, target_ulong t1))
+DEF_HELPER_2(rclq, tl, tl, tl)
+DEF_HELPER_2(rcrq, tl, tl, tl)
#endif
-#undef DEF_HELPER
+#include "def-helper.h"
count = rclb_table[count];
#endif
if (count) {
- eflags = cc_table[CC_OP].compute_all();
+ eflags = helper_cc_compute_all(CC_OP);
t0 &= DATA_MASK;
src = t0;
res = (t0 << count) | ((target_ulong)(eflags & CC_C) << (count - 1));
count = rclb_table[count];
#endif
if (count) {
- eflags = cc_table[CC_OP].compute_all();
+ eflags = helper_cc_compute_all(CC_OP);
t0 &= DATA_MASK;
src = t0;
res = (t0 >> count) | ((target_ulong)(eflags & CC_C) << (DATA_BITS - count));
target_ulong helper_read_eflags(void)
{
uint32_t eflags;
- eflags = cc_table[CC_OP].compute_all();
+ eflags = helper_cc_compute_all(CC_OP);
eflags |= (DF & DF_MASK);
eflags |= env->eflags & ~(VM_MASK | RF_MASK);
return eflags;
int al, ah, af;
int eflags;
- eflags = cc_table[CC_OP].compute_all();
+ eflags = helper_cc_compute_all(CC_OP);
af = eflags & CC_A;
al = EAX & 0xff;
ah = (EAX >> 8) & 0xff;
int al, ah, af;
int eflags;
- eflags = cc_table[CC_OP].compute_all();
+ eflags = helper_cc_compute_all(CC_OP);
af = eflags & CC_A;
al = EAX & 0xff;
ah = (EAX >> 8) & 0xff;
int al, af, cf;
int eflags;
- eflags = cc_table[CC_OP].compute_all();
+ eflags = helper_cc_compute_all(CC_OP);
cf = eflags & CC_C;
af = eflags & CC_A;
al = EAX & 0xff;
int al, al1, af, cf;
int eflags;
- eflags = cc_table[CC_OP].compute_all();
+ eflags = helper_cc_compute_all(CC_OP);
cf = eflags & CC_C;
af = eflags & CC_A;
al = EAX & 0xff;
void helper_into(int next_eip_addend)
{
int eflags;
- eflags = cc_table[CC_OP].compute_all();
+ eflags = helper_cc_compute_all(CC_OP);
if (eflags & CC_O) {
raise_interrupt(EXCP04_INTO, 1, 0, next_eip_addend);
}
uint64_t d;
int eflags;
- eflags = cc_table[CC_OP].compute_all();
+ eflags = helper_cc_compute_all(CC_OP);
d = ldq(a0);
if (d == (((uint64_t)EDX << 32) | (uint32_t)EAX)) {
stq(a0, ((uint64_t)ECX << 32) | (uint32_t)EBX);
if ((a0 & 0xf) != 0)
raise_exception(EXCP0D_GPF);
- eflags = cc_table[CC_OP].compute_all();
+ eflags = helper_cc_compute_all(CC_OP);
d0 = ldq(a0);
d1 = ldq(a0 + 8);
if (d0 == EAX && d1 == EDX) {
int rpl, dpl, cpl, type;
selector = selector1 & 0xffff;
- eflags = cc_table[CC_OP].compute_all();
+ eflags = helper_cc_compute_all(CC_OP);
if (load_segment(&e1, &e2, selector) != 0)
goto fail;
rpl = selector & 3;
int rpl, dpl, cpl, type;
selector = selector1 & 0xffff;
- eflags = cc_table[CC_OP].compute_all();
+ eflags = helper_cc_compute_all(CC_OP);
if ((selector & 0xfffc) == 0)
goto fail;
if (load_segment(&e1, &e2, selector) != 0)
int rpl, dpl, cpl;
selector = selector1 & 0xffff;
- eflags = cc_table[CC_OP].compute_all();
+ eflags = helper_cc_compute_all(CC_OP);
if ((selector & 0xfffc) == 0)
goto fail;
if (load_segment(&e1, &e2, selector) != 0)
int rpl, dpl, cpl;
selector = selector1 & 0xffff;
- eflags = cc_table[CC_OP].compute_all();
+ eflags = helper_cc_compute_all(CC_OP);
if ((selector & 0xfffc) == 0)
goto fail;
if (load_segment(&e1, &e2, selector) != 0)
int ret;
ret = floatx_compare(ST0, FT0, &env->fp_status);
- eflags = cc_table[CC_OP].compute_all();
+ eflags = helper_cc_compute_all(CC_OP);
eflags = (eflags & ~(CC_Z | CC_P | CC_C)) | fcomi_ccval[ret + 1];
CC_SRC = eflags;
FORCE_RET();
int ret;
ret = floatx_compare_quiet(ST0, FT0, &env->fp_status);
- eflags = cc_table[CC_OP].compute_all();
+ eflags = helper_cc_compute_all(CC_OP);
eflags = (eflags & ~(CC_Z | CC_P | CC_C)) | fcomi_ccval[ret + 1];
CC_SRC = eflags;
FORCE_RET();
}
/* XXX: suppress */
-void helper_movq(uint64_t *d, uint64_t *s)
+void helper_movq(void *d, void *s)
{
- *d = *s;
+ *(uint64_t *)d = *(uint64_t *)s;
}
#define SHIFT 0
return CC_SRC & CC_C;
}
-CCTable cc_table[CC_OP_NB] = {
- [CC_OP_DYNAMIC] = { /* should never happen */ },
+uint32_t helper_cc_compute_all(int op)
+{
+ switch (op) {
+ default: /* should never happen */ return 0;
- [CC_OP_EFLAGS] = { compute_all_eflags, compute_c_eflags },
+ case CC_OP_EFLAGS: return compute_all_eflags();
- [CC_OP_MULB] = { compute_all_mulb, compute_c_mull },
- [CC_OP_MULW] = { compute_all_mulw, compute_c_mull },
- [CC_OP_MULL] = { compute_all_mull, compute_c_mull },
+ case CC_OP_MULB: return compute_all_mulb();
+ case CC_OP_MULW: return compute_all_mulw();
+ case CC_OP_MULL: return compute_all_mull();
- [CC_OP_ADDB] = { compute_all_addb, compute_c_addb },
- [CC_OP_ADDW] = { compute_all_addw, compute_c_addw },
- [CC_OP_ADDL] = { compute_all_addl, compute_c_addl },
+ case CC_OP_ADDB: return compute_all_addb();
+ case CC_OP_ADDW: return compute_all_addw();
+ case CC_OP_ADDL: return compute_all_addl();
- [CC_OP_ADCB] = { compute_all_adcb, compute_c_adcb },
- [CC_OP_ADCW] = { compute_all_adcw, compute_c_adcw },
- [CC_OP_ADCL] = { compute_all_adcl, compute_c_adcl },
+ case CC_OP_ADCB: return compute_all_adcb();
+ case CC_OP_ADCW: return compute_all_adcw();
+ case CC_OP_ADCL: return compute_all_adcl();
- [CC_OP_SUBB] = { compute_all_subb, compute_c_subb },
- [CC_OP_SUBW] = { compute_all_subw, compute_c_subw },
- [CC_OP_SUBL] = { compute_all_subl, compute_c_subl },
+ case CC_OP_SUBB: return compute_all_subb();
+ case CC_OP_SUBW: return compute_all_subw();
+ case CC_OP_SUBL: return compute_all_subl();
- [CC_OP_SBBB] = { compute_all_sbbb, compute_c_sbbb },
- [CC_OP_SBBW] = { compute_all_sbbw, compute_c_sbbw },
- [CC_OP_SBBL] = { compute_all_sbbl, compute_c_sbbl },
+ case CC_OP_SBBB: return compute_all_sbbb();
+ case CC_OP_SBBW: return compute_all_sbbw();
+ case CC_OP_SBBL: return compute_all_sbbl();
- [CC_OP_LOGICB] = { compute_all_logicb, compute_c_logicb },
- [CC_OP_LOGICW] = { compute_all_logicw, compute_c_logicw },
- [CC_OP_LOGICL] = { compute_all_logicl, compute_c_logicl },
+ case CC_OP_LOGICB: return compute_all_logicb();
+ case CC_OP_LOGICW: return compute_all_logicw();
+ case CC_OP_LOGICL: return compute_all_logicl();
- [CC_OP_INCB] = { compute_all_incb, compute_c_incl },
- [CC_OP_INCW] = { compute_all_incw, compute_c_incl },
- [CC_OP_INCL] = { compute_all_incl, compute_c_incl },
+ case CC_OP_INCB: return compute_all_incb();
+ case CC_OP_INCW: return compute_all_incw();
+ case CC_OP_INCL: return compute_all_incl();
- [CC_OP_DECB] = { compute_all_decb, compute_c_incl },
- [CC_OP_DECW] = { compute_all_decw, compute_c_incl },
- [CC_OP_DECL] = { compute_all_decl, compute_c_incl },
+ case CC_OP_DECB: return compute_all_decb();
+ case CC_OP_DECW: return compute_all_decw();
+ case CC_OP_DECL: return compute_all_decl();
- [CC_OP_SHLB] = { compute_all_shlb, compute_c_shlb },
- [CC_OP_SHLW] = { compute_all_shlw, compute_c_shlw },
- [CC_OP_SHLL] = { compute_all_shll, compute_c_shll },
+ case CC_OP_SHLB: return compute_all_shlb();
+ case CC_OP_SHLW: return compute_all_shlw();
+ case CC_OP_SHLL: return compute_all_shll();
- [CC_OP_SARB] = { compute_all_sarb, compute_c_sarl },
- [CC_OP_SARW] = { compute_all_sarw, compute_c_sarl },
- [CC_OP_SARL] = { compute_all_sarl, compute_c_sarl },
+ case CC_OP_SARB: return compute_all_sarb();
+ case CC_OP_SARW: return compute_all_sarw();
+ case CC_OP_SARL: return compute_all_sarl();
#ifdef TARGET_X86_64
- [CC_OP_MULQ] = { compute_all_mulq, compute_c_mull },
+ case CC_OP_MULQ: return compute_all_mulq();
- [CC_OP_ADDQ] = { compute_all_addq, compute_c_addq },
+ case CC_OP_ADDQ: return compute_all_addq();
- [CC_OP_ADCQ] = { compute_all_adcq, compute_c_adcq },
+ case CC_OP_ADCQ: return compute_all_adcq();
- [CC_OP_SUBQ] = { compute_all_subq, compute_c_subq },
+ case CC_OP_SUBQ: return compute_all_subq();
- [CC_OP_SBBQ] = { compute_all_sbbq, compute_c_sbbq },
+ case CC_OP_SBBQ: return compute_all_sbbq();
- [CC_OP_LOGICQ] = { compute_all_logicq, compute_c_logicq },
+ case CC_OP_LOGICQ: return compute_all_logicq();
- [CC_OP_INCQ] = { compute_all_incq, compute_c_incl },
+ case CC_OP_INCQ: return compute_all_incq();
- [CC_OP_DECQ] = { compute_all_decq, compute_c_incl },
+ case CC_OP_DECQ: return compute_all_decq();
- [CC_OP_SHLQ] = { compute_all_shlq, compute_c_shlq },
+ case CC_OP_SHLQ: return compute_all_shlq();
- [CC_OP_SARQ] = { compute_all_sarq, compute_c_sarl },
+ case CC_OP_SARQ: return compute_all_sarq();
#endif
-};
+ }
+}
+
+uint32_t helper_cc_compute_c(int op)
+{
+ switch (op) {
+ default: /* should never happen */ return 0;
+
+ case CC_OP_EFLAGS: return compute_c_eflags();
+
+ case CC_OP_MULB: return compute_c_mull();
+ case CC_OP_MULW: return compute_c_mull();
+ case CC_OP_MULL: return compute_c_mull();
+
+ case CC_OP_ADDB: return compute_c_addb();
+ case CC_OP_ADDW: return compute_c_addw();
+ case CC_OP_ADDL: return compute_c_addl();
+
+ case CC_OP_ADCB: return compute_c_adcb();
+ case CC_OP_ADCW: return compute_c_adcw();
+ case CC_OP_ADCL: return compute_c_adcl();
+
+ case CC_OP_SUBB: return compute_c_subb();
+ case CC_OP_SUBW: return compute_c_subw();
+ case CC_OP_SUBL: return compute_c_subl();
+
+ case CC_OP_SBBB: return compute_c_sbbb();
+ case CC_OP_SBBW: return compute_c_sbbw();
+ case CC_OP_SBBL: return compute_c_sbbl();
+
+ case CC_OP_LOGICB: return compute_c_logicb();
+ case CC_OP_LOGICW: return compute_c_logicw();
+ case CC_OP_LOGICL: return compute_c_logicl();
+
+ case CC_OP_INCB: return compute_c_incl();
+ case CC_OP_INCW: return compute_c_incl();
+ case CC_OP_INCL: return compute_c_incl();
+
+ case CC_OP_DECB: return compute_c_incl();
+ case CC_OP_DECW: return compute_c_incl();
+ case CC_OP_DECL: return compute_c_incl();
+ case CC_OP_SHLB: return compute_c_shlb();
+ case CC_OP_SHLW: return compute_c_shlw();
+ case CC_OP_SHLL: return compute_c_shll();
+
+ case CC_OP_SARB: return compute_c_sarl();
+ case CC_OP_SARW: return compute_c_sarl();
+ case CC_OP_SARL: return compute_c_sarl();
+
+#ifdef TARGET_X86_64
+ case CC_OP_MULQ: return compute_c_mull();
+
+ case CC_OP_ADDQ: return compute_c_addq();
+
+ case CC_OP_ADCQ: return compute_c_adcq();
+
+ case CC_OP_SUBQ: return compute_c_subq();
+
+ case CC_OP_SBBQ: return compute_c_sbbq();
+
+ case CC_OP_LOGICQ: return compute_c_logicq();
+
+ case CC_OP_INCQ: return compute_c_incl();
+
+ case CC_OP_DECQ: return compute_c_incl();
+
+ case CC_OP_SHLQ: return compute_c_shlq();
+
+ case CC_OP_SARQ: return compute_c_sarl();
+#endif
+ }
+}
#define SUFFIX _xmm
#endif
-DEF_HELPER(void, glue(helper_psrlw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_psraw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_psllw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_psrld, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_psrad, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pslld, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_psrlq, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_psllq, SUFFIX), (Reg *d, Reg *s))
+#define dh_alias_Reg ptr
+#define dh_alias_XMMReg ptr
+#define dh_alias_MMXReg ptr
+#define dh_ctype_Reg Reg *
+#define dh_ctype_XMMReg XMMReg *
+#define dh_ctype_MMXReg MMXReg *
+
+DEF_HELPER_2(glue(psrlw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(psraw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(psllw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(psrld, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(psrad, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pslld, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(psrlq, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(psllq, SUFFIX), void, Reg, Reg)
#if SHIFT == 1
-DEF_HELPER(void, glue(helper_psrldq, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pslldq, SUFFIX), (Reg *d, Reg *s))
+DEF_HELPER_2(glue(psrldq, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pslldq, SUFFIX), void, Reg, Reg)
#endif
#define SSE_HELPER_B(name, F)\
- DEF_HELPER(void, glue(name, SUFFIX), (Reg *d, Reg *s))
+ DEF_HELPER_2(glue(name, SUFFIX), void, Reg, Reg)
#define SSE_HELPER_W(name, F)\
- DEF_HELPER(void, glue(name, SUFFIX), (Reg *d, Reg *s))
+ DEF_HELPER_2(glue(name, SUFFIX), void, Reg, Reg)
#define SSE_HELPER_L(name, F)\
- DEF_HELPER(void, glue(name, SUFFIX), (Reg *d, Reg *s))
+ DEF_HELPER_2(glue(name, SUFFIX), void, Reg, Reg)
#define SSE_HELPER_Q(name, F)\
- DEF_HELPER(void, glue(name, SUFFIX), (Reg *d, Reg *s))
+ DEF_HELPER_2(glue(name, SUFFIX), void, Reg, Reg)
-SSE_HELPER_B(helper_paddb, FADD)
-SSE_HELPER_W(helper_paddw, FADD)
-SSE_HELPER_L(helper_paddl, FADD)
-SSE_HELPER_Q(helper_paddq, FADD)
+SSE_HELPER_B(paddb, FADD)
+SSE_HELPER_W(paddw, FADD)
+SSE_HELPER_L(paddl, FADD)
+SSE_HELPER_Q(paddq, FADD)
-SSE_HELPER_B(helper_psubb, FSUB)
-SSE_HELPER_W(helper_psubw, FSUB)
-SSE_HELPER_L(helper_psubl, FSUB)
-SSE_HELPER_Q(helper_psubq, FSUB)
+SSE_HELPER_B(psubb, FSUB)
+SSE_HELPER_W(psubw, FSUB)
+SSE_HELPER_L(psubl, FSUB)
+SSE_HELPER_Q(psubq, FSUB)
-SSE_HELPER_B(helper_paddusb, FADDUB)
-SSE_HELPER_B(helper_paddsb, FADDSB)
-SSE_HELPER_B(helper_psubusb, FSUBUB)
-SSE_HELPER_B(helper_psubsb, FSUBSB)
+SSE_HELPER_B(paddusb, FADDUB)
+SSE_HELPER_B(paddsb, FADDSB)
+SSE_HELPER_B(psubusb, FSUBUB)
+SSE_HELPER_B(psubsb, FSUBSB)
-SSE_HELPER_W(helper_paddusw, FADDUW)
-SSE_HELPER_W(helper_paddsw, FADDSW)
-SSE_HELPER_W(helper_psubusw, FSUBUW)
-SSE_HELPER_W(helper_psubsw, FSUBSW)
+SSE_HELPER_W(paddusw, FADDUW)
+SSE_HELPER_W(paddsw, FADDSW)
+SSE_HELPER_W(psubusw, FSUBUW)
+SSE_HELPER_W(psubsw, FSUBSW)
-SSE_HELPER_B(helper_pminub, FMINUB)
-SSE_HELPER_B(helper_pmaxub, FMAXUB)
+SSE_HELPER_B(pminub, FMINUB)
+SSE_HELPER_B(pmaxub, FMAXUB)
-SSE_HELPER_W(helper_pminsw, FMINSW)
-SSE_HELPER_W(helper_pmaxsw, FMAXSW)
+SSE_HELPER_W(pminsw, FMINSW)
+SSE_HELPER_W(pmaxsw, FMAXSW)
-SSE_HELPER_Q(helper_pand, FAND)
-SSE_HELPER_Q(helper_pandn, FANDN)
-SSE_HELPER_Q(helper_por, FOR)
-SSE_HELPER_Q(helper_pxor, FXOR)
+SSE_HELPER_Q(pand, FAND)
+SSE_HELPER_Q(pandn, FANDN)
+SSE_HELPER_Q(por, FOR)
+SSE_HELPER_Q(pxor, FXOR)
-SSE_HELPER_B(helper_pcmpgtb, FCMPGTB)
-SSE_HELPER_W(helper_pcmpgtw, FCMPGTW)
-SSE_HELPER_L(helper_pcmpgtl, FCMPGTL)
+SSE_HELPER_B(pcmpgtb, FCMPGTB)
+SSE_HELPER_W(pcmpgtw, FCMPGTW)
+SSE_HELPER_L(pcmpgtl, FCMPGTL)
-SSE_HELPER_B(helper_pcmpeqb, FCMPEQ)
-SSE_HELPER_W(helper_pcmpeqw, FCMPEQ)
-SSE_HELPER_L(helper_pcmpeql, FCMPEQ)
+SSE_HELPER_B(pcmpeqb, FCMPEQ)
+SSE_HELPER_W(pcmpeqw, FCMPEQ)
+SSE_HELPER_L(pcmpeql, FCMPEQ)
-SSE_HELPER_W(helper_pmullw, FMULLW)
+SSE_HELPER_W(pmullw, FMULLW)
#if SHIFT == 0
-SSE_HELPER_W(helper_pmulhrw, FMULHRW)
+SSE_HELPER_W(pmulhrw, FMULHRW)
#endif
-SSE_HELPER_W(helper_pmulhuw, FMULHUW)
-SSE_HELPER_W(helper_pmulhw, FMULHW)
+SSE_HELPER_W(pmulhuw, FMULHUW)
+SSE_HELPER_W(pmulhw, FMULHW)
-SSE_HELPER_B(helper_pavgb, FAVG)
-SSE_HELPER_W(helper_pavgw, FAVG)
+SSE_HELPER_B(pavgb, FAVG)
+SSE_HELPER_W(pavgw, FAVG)
-DEF_HELPER(void, glue(helper_pmuludq, SUFFIX) , (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmaddwd, SUFFIX) , (Reg *d, Reg *s))
+DEF_HELPER_2(glue(pmuludq, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmaddwd, SUFFIX), void, Reg, Reg)
-DEF_HELPER(void, glue(helper_psadbw, SUFFIX) , (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_maskmov, SUFFIX) , (Reg *d, Reg *s, target_ulong a0))
-DEF_HELPER(void, glue(helper_movl_mm_T0, SUFFIX) , (Reg *d, uint32_t val))
+DEF_HELPER_2(glue(psadbw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_3(glue(maskmov, SUFFIX), void, Reg, Reg, tl)
+DEF_HELPER_2(glue(movl_mm_T0, SUFFIX), void, Reg, i32)
#ifdef TARGET_X86_64
-DEF_HELPER(void, glue(helper_movq_mm_T0, SUFFIX) , (Reg *d, uint64_t val))
+DEF_HELPER_2(glue(movq_mm_T0, SUFFIX), void, Reg, i64)
#endif
#if SHIFT == 0
-DEF_HELPER(void, glue(helper_pshufw, SUFFIX) , (Reg *d, Reg *s, int order))
+DEF_HELPER_3(glue(pshufw, SUFFIX), void, Reg, Reg, int)
#else
-DEF_HELPER(void, helper_shufps, (Reg *d, Reg *s, int order))
-DEF_HELPER(void, helper_shufpd, (Reg *d, Reg *s, int order))
-DEF_HELPER(void, glue(helper_pshufd, SUFFIX) , (Reg *d, Reg *s, int order))
-DEF_HELPER(void, glue(helper_pshuflw, SUFFIX) , (Reg *d, Reg *s, int order))
-DEF_HELPER(void, glue(helper_pshufhw, SUFFIX) , (Reg *d, Reg *s, int order))
+DEF_HELPER_3(shufps, void, Reg, Reg, int)
+DEF_HELPER_3(shufpd, void, Reg, Reg, int)
+DEF_HELPER_3(glue(pshufd, SUFFIX), void, Reg, Reg, int)
+DEF_HELPER_3(glue(pshuflw, SUFFIX), void, Reg, Reg, int)
+DEF_HELPER_3(glue(pshufhw, SUFFIX), void, Reg, Reg, int)
#endif
#if SHIFT == 1
/* XXX: not accurate */
#define SSE_HELPER_S(name, F)\
- DEF_HELPER(void, helper_ ## name ## ps , (Reg *d, Reg *s)) \
- DEF_HELPER(void, helper_ ## name ## ss , (Reg *d, Reg *s)) \
- DEF_HELPER(void, helper_ ## name ## pd , (Reg *d, Reg *s)) \
- DEF_HELPER(void, helper_ ## name ## sd , (Reg *d, Reg *s))
+ DEF_HELPER_2(name ## ps , void, Reg, Reg) \
+ DEF_HELPER_2(name ## ss , void, Reg, Reg) \
+ DEF_HELPER_2(name ## pd , void, Reg, Reg) \
+ DEF_HELPER_2(name ## sd , void, Reg, Reg)
SSE_HELPER_S(add, FPU_ADD)
SSE_HELPER_S(sub, FPU_SUB)
SSE_HELPER_S(sqrt, FPU_SQRT)
-DEF_HELPER(void, helper_cvtps2pd, (Reg *d, Reg *s))
-DEF_HELPER(void, helper_cvtpd2ps, (Reg *d, Reg *s))
-DEF_HELPER(void, helper_cvtss2sd, (Reg *d, Reg *s))
-DEF_HELPER(void, helper_cvtsd2ss, (Reg *d, Reg *s))
-DEF_HELPER(void, helper_cvtdq2ps, (Reg *d, Reg *s))
-DEF_HELPER(void, helper_cvtdq2pd, (Reg *d, Reg *s))
-DEF_HELPER(void, helper_cvtpi2ps, (XMMReg *d, MMXReg *s))
-DEF_HELPER(void, helper_cvtpi2pd, (XMMReg *d, MMXReg *s))
-DEF_HELPER(void, helper_cvtsi2ss, (XMMReg *d, uint32_t val))
-DEF_HELPER(void, helper_cvtsi2sd, (XMMReg *d, uint32_t val))
+DEF_HELPER_2(cvtps2pd, void, Reg, Reg)
+DEF_HELPER_2(cvtpd2ps, void, Reg, Reg)
+DEF_HELPER_2(cvtss2sd, void, Reg, Reg)
+DEF_HELPER_2(cvtsd2ss, void, Reg, Reg)
+DEF_HELPER_2(cvtdq2ps, void, Reg, Reg)
+DEF_HELPER_2(cvtdq2pd, void, Reg, Reg)
+DEF_HELPER_2(cvtpi2ps, void, XMMReg, MMXReg)
+DEF_HELPER_2(cvtpi2pd, void, XMMReg, MMXReg)
+DEF_HELPER_2(cvtsi2ss, void, XMMReg, i32)
+DEF_HELPER_2(cvtsi2sd, void, XMMReg, i32)
#ifdef TARGET_X86_64
-DEF_HELPER(void, helper_cvtsq2ss, (XMMReg *d, uint64_t val))
-DEF_HELPER(void, helper_cvtsq2sd, (XMMReg *d, uint64_t val))
+DEF_HELPER_2(cvtsq2ss, void, XMMReg, i64)
+DEF_HELPER_2(cvtsq2sd, void, XMMReg, i64)
#endif
-DEF_HELPER(void, helper_cvtps2dq, (XMMReg *d, XMMReg *s))
-DEF_HELPER(void, helper_cvtpd2dq, (XMMReg *d, XMMReg *s))
-DEF_HELPER(void, helper_cvtps2pi, (MMXReg *d, XMMReg *s))
-DEF_HELPER(void, helper_cvtpd2pi, (MMXReg *d, XMMReg *s))
-DEF_HELPER(int32_t, helper_cvtss2si, (XMMReg *s))
-DEF_HELPER(int32_t, helper_cvtsd2si, (XMMReg *s))
+DEF_HELPER_2(cvtps2dq, void, XMMReg, XMMReg)
+DEF_HELPER_2(cvtpd2dq, void, XMMReg, XMMReg)
+DEF_HELPER_2(cvtps2pi, void, MMXReg, XMMReg)
+DEF_HELPER_2(cvtpd2pi, void, MMXReg, XMMReg)
+DEF_HELPER_1(cvtss2si, s32, XMMReg)
+DEF_HELPER_1(cvtsd2si, s32, XMMReg)
#ifdef TARGET_X86_64
-DEF_HELPER(int64_t, helper_cvtss2sq, (XMMReg *s))
-DEF_HELPER(int64_t, helper_cvtsd2sq, (XMMReg *s))
+DEF_HELPER_1(cvtss2sq, s64, XMMReg)
+DEF_HELPER_1(cvtsd2sq, s64, XMMReg)
#endif
-DEF_HELPER(void, helper_cvttps2dq, (XMMReg *d, XMMReg *s))
-DEF_HELPER(void, helper_cvttpd2dq, (XMMReg *d, XMMReg *s))
-DEF_HELPER(void, helper_cvttps2pi, (MMXReg *d, XMMReg *s))
-DEF_HELPER(void, helper_cvttpd2pi, (MMXReg *d, XMMReg *s))
-DEF_HELPER(int32_t, helper_cvttss2si, (XMMReg *s))
-DEF_HELPER(int32_t, helper_cvttsd2si, (XMMReg *s))
+DEF_HELPER_2(cvttps2dq, void, XMMReg, XMMReg)
+DEF_HELPER_2(cvttpd2dq, void, XMMReg, XMMReg)
+DEF_HELPER_2(cvttps2pi, void, MMXReg, XMMReg)
+DEF_HELPER_2(cvttpd2pi, void, MMXReg, XMMReg)
+DEF_HELPER_1(cvttss2si, s32, XMMReg)
+DEF_HELPER_1(cvttsd2si, s32, XMMReg)
#ifdef TARGET_X86_64
-DEF_HELPER(int64_t, helper_cvttss2sq, (XMMReg *s))
-DEF_HELPER(int64_t, helper_cvttsd2sq, (XMMReg *s))
+DEF_HELPER_1(cvttss2sq, s64, XMMReg)
+DEF_HELPER_1(cvttsd2sq, s64, XMMReg)
#endif
-DEF_HELPER(void, helper_rsqrtps, (XMMReg *d, XMMReg *s))
-DEF_HELPER(void, helper_rsqrtss, (XMMReg *d, XMMReg *s))
-DEF_HELPER(void, helper_rcpps, (XMMReg *d, XMMReg *s))
-DEF_HELPER(void, helper_rcpss, (XMMReg *d, XMMReg *s))
-DEF_HELPER(void, helper_haddps, (XMMReg *d, XMMReg *s))
-DEF_HELPER(void, helper_haddpd, (XMMReg *d, XMMReg *s))
-DEF_HELPER(void, helper_hsubps, (XMMReg *d, XMMReg *s))
-DEF_HELPER(void, helper_hsubpd, (XMMReg *d, XMMReg *s))
-DEF_HELPER(void, helper_addsubps, (XMMReg *d, XMMReg *s))
-DEF_HELPER(void, helper_addsubpd, (XMMReg *d, XMMReg *s))
+DEF_HELPER_2(rsqrtps, void, XMMReg, XMMReg)
+DEF_HELPER_2(rsqrtss, void, XMMReg, XMMReg)
+DEF_HELPER_2(rcpps, void, XMMReg, XMMReg)
+DEF_HELPER_2(rcpss, void, XMMReg, XMMReg)
+DEF_HELPER_2(haddps, void, XMMReg, XMMReg)
+DEF_HELPER_2(haddpd, void, XMMReg, XMMReg)
+DEF_HELPER_2(hsubps, void, XMMReg, XMMReg)
+DEF_HELPER_2(hsubpd, void, XMMReg, XMMReg)
+DEF_HELPER_2(addsubps, void, XMMReg, XMMReg)
+DEF_HELPER_2(addsubpd, void, XMMReg, XMMReg)
#define SSE_HELPER_CMP(name, F)\
- DEF_HELPER(void, helper_ ## name ## ps , (Reg *d, Reg *s)) \
- DEF_HELPER(void, helper_ ## name ## ss , (Reg *d, Reg *s)) \
- DEF_HELPER(void, helper_ ## name ## pd , (Reg *d, Reg *s)) \
- DEF_HELPER(void, helper_ ## name ## sd , (Reg *d, Reg *s))
+ DEF_HELPER_2( name ## ps , void, Reg, Reg) \
+ DEF_HELPER_2( name ## ss , void, Reg, Reg) \
+ DEF_HELPER_2( name ## pd , void, Reg, Reg) \
+ DEF_HELPER_2( name ## sd , void, Reg, Reg)
SSE_HELPER_CMP(cmpeq, FPU_CMPEQ)
SSE_HELPER_CMP(cmplt, FPU_CMPLT)
SSE_HELPER_CMP(cmpnle, FPU_CMPNLE)
SSE_HELPER_CMP(cmpord, FPU_CMPORD)
-DEF_HELPER(void, helper_ucomiss, (Reg *d, Reg *s))
-DEF_HELPER(void, helper_comiss, (Reg *d, Reg *s))
-DEF_HELPER(void, helper_ucomisd, (Reg *d, Reg *s))
-DEF_HELPER(void, helper_comisd, (Reg *d, Reg *s))
-DEF_HELPER(uint32_t, helper_movmskps, (Reg *s))
-DEF_HELPER(uint32_t, helper_movmskpd, (Reg *s))
+DEF_HELPER_2(ucomiss, void, Reg, Reg)
+DEF_HELPER_2(comiss, void, Reg, Reg)
+DEF_HELPER_2(ucomisd, void, Reg, Reg)
+DEF_HELPER_2(comisd, void, Reg, Reg)
+DEF_HELPER_1(movmskps, i32, Reg)
+DEF_HELPER_1(movmskpd, i32, Reg)
#endif
-DEF_HELPER(uint32_t, glue(helper_pmovmskb, SUFFIX), (Reg *s))
-DEF_HELPER(void, glue(helper_packsswb, SUFFIX) , (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_packuswb, SUFFIX) , (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_packssdw, SUFFIX) , (Reg *d, Reg *s))
+DEF_HELPER_1(glue(pmovmskb, SUFFIX), i32, Reg)
+DEF_HELPER_2(glue(packsswb, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(packuswb, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(packssdw, SUFFIX), void, Reg, Reg)
#define UNPCK_OP(base_name, base) \
- DEF_HELPER(void, glue(helper_punpck ## base_name ## bw, SUFFIX) , (Reg *d, Reg *s)) \
- DEF_HELPER(void, glue(helper_punpck ## base_name ## wd, SUFFIX) , (Reg *d, Reg *s)) \
- DEF_HELPER(void, glue(helper_punpck ## base_name ## dq, SUFFIX) , (Reg *d, Reg *s))
+ DEF_HELPER_2(glue(punpck ## base_name ## bw, SUFFIX) , void, Reg, Reg) \
+ DEF_HELPER_2(glue(punpck ## base_name ## wd, SUFFIX) , void, Reg, Reg) \
+ DEF_HELPER_2(glue(punpck ## base_name ## dq, SUFFIX) , void, Reg, Reg)
UNPCK_OP(l, 0)
UNPCK_OP(h, 1)
#if SHIFT == 1
-DEF_HELPER(void, glue(helper_punpcklqdq, SUFFIX) , (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_punpckhqdq, SUFFIX) , (Reg *d, Reg *s))
+DEF_HELPER_2(glue(punpcklqdq, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(punpckhqdq, SUFFIX), void, Reg, Reg)
#endif
/* 3DNow! float ops */
#if SHIFT == 0
-DEF_HELPER(void, helper_pi2fd, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pi2fw, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pf2id, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pf2iw, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pfacc, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pfadd, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pfcmpeq, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pfcmpge, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pfcmpgt, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pfmax, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pfmin, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pfmul, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pfnacc, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pfpnacc, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pfrcp, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pfrsqrt, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pfsub, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pfsubr, (MMXReg *d, MMXReg *s))
-DEF_HELPER(void, helper_pswapd, (MMXReg *d, MMXReg *s))
+DEF_HELPER_2(pi2fd, void, MMXReg, MMXReg)
+DEF_HELPER_2(pi2fw, void, MMXReg, MMXReg)
+DEF_HELPER_2(pf2id, void, MMXReg, MMXReg)
+DEF_HELPER_2(pf2iw, void, MMXReg, MMXReg)
+DEF_HELPER_2(pfacc, void, MMXReg, MMXReg)
+DEF_HELPER_2(pfadd, void, MMXReg, MMXReg)
+DEF_HELPER_2(pfcmpeq, void, MMXReg, MMXReg)
+DEF_HELPER_2(pfcmpge, void, MMXReg, MMXReg)
+DEF_HELPER_2(pfcmpgt, void, MMXReg, MMXReg)
+DEF_HELPER_2(pfmax, void, MMXReg, MMXReg)
+DEF_HELPER_2(pfmin, void, MMXReg, MMXReg)
+DEF_HELPER_2(pfmul, void, MMXReg, MMXReg)
+DEF_HELPER_2(pfnacc, void, MMXReg, MMXReg)
+DEF_HELPER_2(pfpnacc, void, MMXReg, MMXReg)
+DEF_HELPER_2(pfrcp, void, MMXReg, MMXReg)
+DEF_HELPER_2(pfrsqrt, void, MMXReg, MMXReg)
+DEF_HELPER_2(pfsub, void, MMXReg, MMXReg)
+DEF_HELPER_2(pfsubr, void, MMXReg, MMXReg)
+DEF_HELPER_2(pswapd, void, MMXReg, MMXReg)
#endif
/* SSSE3 op helpers */
-DEF_HELPER(void, glue(helper_phaddw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_phaddd, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_phaddsw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_phsubw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_phsubd, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_phsubsw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pabsb, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pabsw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pabsd, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmaddubsw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmulhrsw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pshufb, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_psignb, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_psignw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_psignd, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_palignr, SUFFIX), (Reg *d, Reg *s, int32_t shift))
+DEF_HELPER_2(glue(phaddw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(phaddd, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(phaddsw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(phsubw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(phsubd, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(phsubsw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pabsb, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pabsw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pabsd, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmaddubsw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmulhrsw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pshufb, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(psignb, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(psignw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(psignd, SUFFIX), void, Reg, Reg)
+DEF_HELPER_3(glue(palignr, SUFFIX), void, Reg, Reg, s32)
/* SSE4.1 op helpers */
#if SHIFT == 1
-DEF_HELPER(void, glue(helper_pblendvb, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_blendvps, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_blendvpd, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_ptest, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmovsxbw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmovsxbd, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmovsxbq, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmovsxwd, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmovsxwq, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmovsxdq, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmovzxbw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmovzxbd, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmovzxbq, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmovzxwd, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmovzxwq, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmovzxdq, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmuldq, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pcmpeqq, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_packusdw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pminsb, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pminsd, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pminuw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pminud, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmaxsb, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmaxsd, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmaxuw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmaxud, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pmulld, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_phminposuw, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_roundps, SUFFIX), (Reg *d, Reg *s, uint32_t mode))
-DEF_HELPER(void, glue(helper_roundpd, SUFFIX), (Reg *d, Reg *s, uint32_t mode))
-DEF_HELPER(void, glue(helper_roundss, SUFFIX), (Reg *d, Reg *s, uint32_t mode))
-DEF_HELPER(void, glue(helper_roundsd, SUFFIX), (Reg *d, Reg *s, uint32_t mode))
-DEF_HELPER(void, glue(helper_blendps, SUFFIX), (Reg *d, Reg *s, uint32_t imm))
-DEF_HELPER(void, glue(helper_blendpd, SUFFIX), (Reg *d, Reg *s, uint32_t imm))
-DEF_HELPER(void, glue(helper_pblendw, SUFFIX), (Reg *d, Reg *s, uint32_t imm))
-DEF_HELPER(void, glue(helper_dpps, SUFFIX), (Reg *d, Reg *s, uint32_t mask))
-DEF_HELPER(void, glue(helper_dppd, SUFFIX), (Reg *d, Reg *s, uint32_t mask))
-DEF_HELPER(void, glue(helper_mpsadbw, SUFFIX), (Reg *d, Reg *s, uint32_t off))
+DEF_HELPER_2(glue(pblendvb, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(blendvps, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(blendvpd, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(ptest, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmovsxbw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmovsxbd, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmovsxbq, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmovsxwd, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmovsxwq, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmovsxdq, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmovzxbw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmovzxbd, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmovzxbq, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmovzxwd, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmovzxwq, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmovzxdq, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmuldq, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pcmpeqq, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(packusdw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pminsb, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pminsd, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pminuw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pminud, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmaxsb, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmaxsd, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmaxuw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmaxud, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(pmulld, SUFFIX), void, Reg, Reg)
+DEF_HELPER_2(glue(phminposuw, SUFFIX), void, Reg, Reg)
+DEF_HELPER_3(glue(roundps, SUFFIX), void, Reg, Reg, i32)
+DEF_HELPER_3(glue(roundpd, SUFFIX), void, Reg, Reg, i32)
+DEF_HELPER_3(glue(roundss, SUFFIX), void, Reg, Reg, i32)
+DEF_HELPER_3(glue(roundsd, SUFFIX), void, Reg, Reg, i32)
+DEF_HELPER_3(glue(blendps, SUFFIX), void, Reg, Reg, i32)
+DEF_HELPER_3(glue(blendpd, SUFFIX), void, Reg, Reg, i32)
+DEF_HELPER_3(glue(pblendw, SUFFIX), void, Reg, Reg, i32)
+DEF_HELPER_3(glue(dpps, SUFFIX), void, Reg, Reg, i32)
+DEF_HELPER_3(glue(dppd, SUFFIX), void, Reg, Reg, i32)
+DEF_HELPER_3(glue(mpsadbw, SUFFIX), void, Reg, Reg, i32)
#endif
/* SSE4.2 op helpers */
#if SHIFT == 1
-DEF_HELPER(void, glue(helper_pcmpgtq, SUFFIX), (Reg *d, Reg *s))
-DEF_HELPER(void, glue(helper_pcmpestri, SUFFIX), (Reg *d, Reg *s, uint32_t ctl))
-DEF_HELPER(void, glue(helper_pcmpestrm, SUFFIX), (Reg *d, Reg *s, uint32_t ctl))
-DEF_HELPER(void, glue(helper_pcmpistri, SUFFIX), (Reg *d, Reg *s, uint32_t ctl))
-DEF_HELPER(void, glue(helper_pcmpistrm, SUFFIX), (Reg *d, Reg *s, uint32_t ctl))
-DEF_HELPER(target_ulong, helper_crc32,
- (uint32_t crc1, target_ulong msg, uint32_t len))
-DEF_HELPER(target_ulong, helper_popcnt, (target_ulong n, uint32_t type))
+DEF_HELPER_2(glue(pcmpgtq, SUFFIX), void, Reg, Reg)
+DEF_HELPER_3(glue(pcmpestri, SUFFIX), void, Reg, Reg, i32)
+DEF_HELPER_3(glue(pcmpestrm, SUFFIX), void, Reg, Reg, i32)
+DEF_HELPER_3(glue(pcmpistri, SUFFIX), void, Reg, Reg, i32)
+DEF_HELPER_3(glue(pcmpistrm, SUFFIX), void, Reg, Reg, i32)
+DEF_HELPER_3(crc32, tl, i32, tl, i32)
+DEF_HELPER_2(popcnt, tl, tl, i32)
#endif
#undef SHIFT
#include "cpu.h"
#include "exec-all.h"
#include "disas.h"
-#include "helper.h"
#include "tcg-op.h"
+#include "helper.h"
+#define GEN_HELPER 1
+#include "helper.h"
+
#define PREFIX_REPZ 0x01
#define PREFIX_REPNZ 0x02
#define PREFIX_LOCK 0x04
//#define MACRO_TEST 1
/* global register indexes */
-static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
+static TCGv_ptr cpu_env;
+static TCGv cpu_A0, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
+static TCGv_i32 cpu_cc_op;
/* local temps */
static TCGv cpu_T[2], cpu_T3;
/* local register indexes (only used inside old micro ops) */
-static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
+static TCGv cpu_tmp0, cpu_tmp4;
+static TCGv_ptr cpu_ptr0, cpu_ptr1;
+static TCGv_i32 cpu_tmp2_i32, cpu_tmp3_i32;
+static TCGv_i64 cpu_tmp1_i64;
static TCGv cpu_tmp5, cpu_tmp6;
#include "gen-icount.h"
break;
default:
case 3:
+ /* Should never happen on 32-bit targets. */
+#ifdef TARGET_X86_64
tcg_gen_qemu_ld64(t0, a0, mem_index);
+#endif
break;
}
}
break;
default:
case 3:
+ /* Should never happen on 32-bit targets. */
+#ifdef TARGET_X86_64
tcg_gen_qemu_st64(t0, a0, mem_index);
+#endif
break;
}
}
tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
}
-static void *helper_in_func[3] = {
- helper_inb,
- helper_inw,
- helper_inl,
-};
+static void gen_helper_in_func(int ot, TCGv v, TCGv_i32 n)
+{
+ switch (ot) {
+ case 0: gen_helper_inb(v, n); break;
+ case 1: gen_helper_inw(v, n); break;
+ case 2: gen_helper_inl(v, n); break;
+ }
-static void *helper_out_func[3] = {
- helper_outb,
- helper_outw,
- helper_outl,
-};
+}
-static void *gen_check_io_func[3] = {
- helper_check_iob,
- helper_check_iow,
- helper_check_iol,
-};
+static void gen_helper_out_func(int ot, TCGv_i32 v, TCGv_i32 n)
+{
+ switch (ot) {
+ case 0: gen_helper_outb(v, n); break;
+ case 1: gen_helper_outw(v, n); break;
+ case 2: gen_helper_outl(v, n); break;
+ }
+
+}
static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
uint32_t svm_flags)
gen_jmp_im(cur_eip);
state_saved = 1;
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_1(gen_check_io_func[ot],
- cpu_tmp2_i32);
+ switch (ot) {
+ case 0: gen_helper_check_iob(cpu_tmp2_i32); break;
+ case 1: gen_helper_check_iow(cpu_tmp2_i32); break;
+ case 2: gen_helper_check_iol(cpu_tmp2_i32); break;
+ }
}
if(s->flags & HF_SVMI_MASK) {
if (!state_saved) {
svm_flags |= (1 << (4 + ot));
next_eip = s->pc - s->cs_base;
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_3(helper_svm_check_io,
- cpu_tmp2_i32,
- tcg_const_i32(svm_flags),
- tcg_const_i32(next_eip - cur_eip));
+ gen_helper_svm_check_io(cpu_tmp2_i32, tcg_const_i32(svm_flags),
+ tcg_const_i32(next_eip - cur_eip));
}
}
/* compute eflags.C to reg */
static void gen_compute_eflags_c(TCGv reg)
{
-#if TCG_TARGET_REG_BITS == 32
- tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
- tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
- (long)cc_table + offsetof(CCTable, compute_c));
- tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
- tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
- 1, &cpu_tmp2_i32, 0, NULL);
-#else
- tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
- tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
- tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
- (long)cc_table + offsetof(CCTable, compute_c));
- tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
- tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
- 1, &cpu_tmp2_i32, 0, NULL);
-#endif
+ gen_helper_cc_compute_c(cpu_tmp2_i32, cpu_cc_op);
tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
}
/* compute all eflags to cc_src */
static void gen_compute_eflags(TCGv reg)
{
-#if TCG_TARGET_REG_BITS == 32
- tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
- tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
- (long)cc_table + offsetof(CCTable, compute_all));
- tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
- tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
- 1, &cpu_tmp2_i32, 0, NULL);
-#else
- tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
- tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
- tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
- (long)cc_table + offsetof(CCTable, compute_all));
- tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
- tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
- 1, &cpu_tmp2_i32, 0, NULL);
-#endif
+ gen_helper_cc_compute_all(cpu_tmp2_i32, cpu_cc_op);
tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
}
gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
- tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
+ gen_helper_in_func(ot, cpu_T[0], cpu_tmp2_i32);
gen_op_st_T0_A0(ot + s->mem_index);
gen_op_movl_T0_Dshift(ot);
gen_op_add_reg_T0(s->aflag, R_EDI);
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
- tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
+ gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
gen_op_movl_T0_Dshift(ot);
gen_op_add_reg_T0(s->aflag, R_ESI);
GEN_REPZ2(scas)
GEN_REPZ2(cmps)
-static void *helper_fp_arith_ST0_FT0[8] = {
- helper_fadd_ST0_FT0,
- helper_fmul_ST0_FT0,
- helper_fcom_ST0_FT0,
- helper_fcom_ST0_FT0,
- helper_fsub_ST0_FT0,
- helper_fsubr_ST0_FT0,
- helper_fdiv_ST0_FT0,
- helper_fdivr_ST0_FT0,
-};
+static void gen_helper_fp_arith_ST0_FT0(int op)
+{
+ switch (op) {
+ case 0: gen_helper_fadd_ST0_FT0(); break;
+ case 1: gen_helper_fmul_ST0_FT0(); break;
+ case 2: gen_helper_fcom_ST0_FT0(); break;
+ case 3: gen_helper_fcom_ST0_FT0(); break;
+ case 4: gen_helper_fsub_ST0_FT0(); break;
+ case 5: gen_helper_fsubr_ST0_FT0(); break;
+ case 6: gen_helper_fdiv_ST0_FT0(); break;
+ case 7: gen_helper_fdivr_ST0_FT0(); break;
+ }
+}
/* NOTE the exception in "r" op ordering */
-static void *helper_fp_arith_STN_ST0[8] = {
- helper_fadd_STN_ST0,
- helper_fmul_STN_ST0,
- NULL,
- NULL,
- helper_fsubr_STN_ST0,
- helper_fsub_STN_ST0,
- helper_fdivr_STN_ST0,
- helper_fdiv_STN_ST0,
-};
+static void gen_helper_fp_arith_STN_ST0(int op, int opreg)
+{
+ TCGv_i32 tmp = tcg_const_i32(opreg);
+ switch (op) {
+ case 0: gen_helper_fadd_STN_ST0(tmp); break;
+ case 1: gen_helper_fmul_STN_ST0(tmp); break;
+ case 4: gen_helper_fsubr_STN_ST0(tmp); break;
+ case 5: gen_helper_fsub_STN_ST0(tmp); break;
+ case 6: gen_helper_fdivr_STN_ST0(tmp); break;
+ case 7: gen_helper_fdiv_STN_ST0(tmp); break;
+ }
+}
/* if d == OR_TMP0, it means memory operand (address in A0) */
static void gen_op(DisasContext *s1, int op, int ot, int d)
gen_op_set_cc_op(s->cc_op);
/* XXX: inefficient */
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
- t1 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
+ t1 = tcg_temp_local_new();
tcg_gen_mov_tl(t0, cpu_T[0]);
tcg_gen_mov_tl(t1, cpu_T3);
TCGv t0, t1, t2, a0;
/* XXX: inefficient, but we must use local temps */
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
- t1 = tcg_temp_local_new(TCG_TYPE_TL);
- t2 = tcg_temp_local_new(TCG_TYPE_TL);
- a0 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
+ t1 = tcg_temp_local_new();
+ t2 = tcg_temp_local_new();
+ a0 = tcg_temp_local_new();
if (ot == OT_QUAD)
mask = 0x3f;
tcg_temp_free(a0);
}
-static void *helper_rotc[8] = {
- helper_rclb,
- helper_rclw,
- helper_rcll,
- X86_64_ONLY(helper_rclq),
- helper_rcrb,
- helper_rcrw,
- helper_rcrl,
- X86_64_ONLY(helper_rcrq),
-};
-
/* XXX: add faster immediate = 1 case */
static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
int is_right)
else
gen_op_mov_TN_reg(ot, 0, op1);
- tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
- cpu_T[0], cpu_T[0], cpu_T[1]);
+ if (is_right) {
+ switch (ot) {
+ case 0: gen_helper_rcrb(cpu_T[0], cpu_T[0], cpu_T[1]); break;
+ case 1: gen_helper_rcrw(cpu_T[0], cpu_T[0], cpu_T[1]); break;
+ case 2: gen_helper_rcrl(cpu_T[0], cpu_T[0], cpu_T[1]); break;
+#ifdef TARGET_X86_64
+ case 3: gen_helper_rcrq(cpu_T[0], cpu_T[0], cpu_T[1]); break;
+#endif
+ }
+ } else {
+ switch (ot) {
+ case 0: gen_helper_rclb(cpu_T[0], cpu_T[0], cpu_T[1]); break;
+ case 1: gen_helper_rclw(cpu_T[0], cpu_T[0], cpu_T[1]); break;
+ case 2: gen_helper_rcll(cpu_T[0], cpu_T[0], cpu_T[1]); break;
+#ifdef TARGET_X86_64
+ case 3: gen_helper_rclq(cpu_T[0], cpu_T[0], cpu_T[1]); break;
+#endif
+ }
+ }
/* store */
if (op1 == OR_TMP0)
gen_op_st_T0_A0(ot + s->mem_index);
target_ulong mask;
TCGv t0, t1, t2, a0;
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
- t1 = tcg_temp_local_new(TCG_TYPE_TL);
- t2 = tcg_temp_local_new(TCG_TYPE_TL);
- a0 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
+ t1 = tcg_temp_local_new();
+ t2 = tcg_temp_local_new();
+ a0 = tcg_temp_local_new();
if (ot == OT_QUAD)
mask = 0x3f;
if (is_fast_jcc_case(s, b)) {
/* nominal case: we use a jump */
/* XXX: make it faster by adding new instructions in TCG */
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
tcg_gen_movi_tl(t0, 0);
l1 = gen_new_label();
gen_jcc1(s, s->cc_op, b ^ 1, l1);
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(cur_eip);
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
+ gen_helper_load_seg(tcg_const_i32(seg_reg), cpu_tmp2_i32);
/* abort translation because the addseg value may change or
because ss32 may change. For R_SS, translation must always
stop as a special handling must be done to disable hardware
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_2(helper_svm_check_intercept_param,
- tcg_const_i32(type), tcg_const_i64(param));
+ gen_helper_svm_check_intercept_param(tcg_const_i32(type),
+ tcg_const_i64(param));
}
static inline void
gen_op_st_T0_A0(ot + s->mem_index);
if (level) {
/* XXX: must save state */
- tcg_gen_helper_0_3(helper_enter64_level,
- tcg_const_i32(level),
- tcg_const_i32((ot == OT_QUAD)),
- cpu_T[1]);
+ gen_helper_enter64_level(tcg_const_i32(level),
+ tcg_const_i32((ot == OT_QUAD)),
+ cpu_T[1]);
}
gen_op_mov_reg_T1(ot, R_EBP);
tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
gen_op_st_T0_A0(ot + s->mem_index);
if (level) {
/* XXX: must save state */
- tcg_gen_helper_0_3(helper_enter_level,
- tcg_const_i32(level),
- tcg_const_i32(s->dflag),
- cpu_T[1]);
+ gen_helper_enter_level(tcg_const_i32(level),
+ tcg_const_i32(s->dflag),
+ cpu_T[1]);
}
gen_op_mov_reg_T1(ot, R_EBP);
tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(cur_eip);
- tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
+ gen_helper_raise_exception(tcg_const_i32(trapno));
s->is_jmp = 3;
}
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(cur_eip);
- tcg_gen_helper_0_2(helper_raise_interrupt,
- tcg_const_i32(intno),
- tcg_const_i32(next_eip - cur_eip));
+ gen_helper_raise_interrupt(tcg_const_i32(intno),
+ tcg_const_i32(next_eip - cur_eip));
s->is_jmp = 3;
}
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(cur_eip);
- tcg_gen_helper_0_0(helper_debug);
+ gen_helper_debug();
s->is_jmp = 3;
}
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
- tcg_gen_helper_0_0(helper_reset_inhibit_irq);
+ gen_helper_reset_inhibit_irq();
}
if (s->singlestep_enabled) {
- tcg_gen_helper_0_0(helper_debug);
+ gen_helper_debug();
} else if (s->tf) {
- tcg_gen_helper_0_0(helper_single_step);
+ gen_helper_single_step();
} else {
tcg_gen_exit_tb(0);
}
#define SSE_SPECIAL ((void *)1)
#define SSE_DUMMY ((void *)2)
-#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
-#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
- helper_ ## x ## ss, helper_ ## x ## sd, }
+#define MMX_OP2(x) { gen_helper_ ## x ## _mmx, gen_helper_ ## x ## _xmm }
+#define SSE_FOP(x) { gen_helper_ ## x ## ps, gen_helper_ ## x ## pd, \
+ gen_helper_ ## x ## ss, gen_helper_ ## x ## sd, }
static void *sse_op_table1[256][4] = {
/* 3DNow! extensions */
[0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
[0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
[0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
- [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
- [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
+ [0x14] = { gen_helper_punpckldq_xmm, gen_helper_punpcklqdq_xmm },
+ [0x15] = { gen_helper_punpckhdq_xmm, gen_helper_punpckhqdq_xmm },
[0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
[0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
[0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
[0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
[0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
- [0x2e] = { helper_ucomiss, helper_ucomisd },
- [0x2f] = { helper_comiss, helper_comisd },
+ [0x2e] = { gen_helper_ucomiss, gen_helper_ucomisd },
+ [0x2f] = { gen_helper_comiss, gen_helper_comisd },
[0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
[0x51] = SSE_FOP(sqrt),
- [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
- [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
- [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
- [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
- [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
- [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
+ [0x52] = { gen_helper_rsqrtps, NULL, gen_helper_rsqrtss, NULL },
+ [0x53] = { gen_helper_rcpps, NULL, gen_helper_rcpss, NULL },
+ [0x54] = { gen_helper_pand_xmm, gen_helper_pand_xmm }, /* andps, andpd */
+ [0x55] = { gen_helper_pandn_xmm, gen_helper_pandn_xmm }, /* andnps, andnpd */
+ [0x56] = { gen_helper_por_xmm, gen_helper_por_xmm }, /* orps, orpd */
+ [0x57] = { gen_helper_pxor_xmm, gen_helper_pxor_xmm }, /* xorps, xorpd */
[0x58] = SSE_FOP(add),
[0x59] = SSE_FOP(mul),
- [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
- helper_cvtss2sd, helper_cvtsd2ss },
- [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
+ [0x5a] = { gen_helper_cvtps2pd, gen_helper_cvtpd2ps,
+ gen_helper_cvtss2sd, gen_helper_cvtsd2ss },
+ [0x5b] = { gen_helper_cvtdq2ps, gen_helper_cvtps2dq, gen_helper_cvttps2dq },
[0x5c] = SSE_FOP(sub),
[0x5d] = SSE_FOP(min),
[0x5e] = SSE_FOP(div),
[0x5f] = SSE_FOP(max),
[0xc2] = SSE_FOP(cmpeq),
- [0xc6] = { helper_shufps, helper_shufpd },
+ [0xc6] = { gen_helper_shufps, gen_helper_shufpd },
[0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
[0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
[0x69] = MMX_OP2(punpckhwd),
[0x6a] = MMX_OP2(punpckhdq),
[0x6b] = MMX_OP2(packssdw),
- [0x6c] = { NULL, helper_punpcklqdq_xmm },
- [0x6d] = { NULL, helper_punpckhqdq_xmm },
+ [0x6c] = { NULL, gen_helper_punpcklqdq_xmm },
+ [0x6d] = { NULL, gen_helper_punpckhqdq_xmm },
[0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
[0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
- [0x70] = { helper_pshufw_mmx,
- helper_pshufd_xmm,
- helper_pshufhw_xmm,
- helper_pshuflw_xmm },
+ [0x70] = { gen_helper_pshufw_mmx,
+ gen_helper_pshufd_xmm,
+ gen_helper_pshufhw_xmm,
+ gen_helper_pshuflw_xmm },
[0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
[0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
[0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
[0x75] = MMX_OP2(pcmpeqw),
[0x76] = MMX_OP2(pcmpeql),
[0x77] = { SSE_DUMMY }, /* emms */
- [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
- [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
+ [0x7c] = { NULL, gen_helper_haddpd, NULL, gen_helper_haddps },
+ [0x7d] = { NULL, gen_helper_hsubpd, NULL, gen_helper_hsubps },
[0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
[0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
[0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
[0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
- [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
+ [0xd0] = { NULL, gen_helper_addsubpd, NULL, gen_helper_addsubps },
[0xd1] = MMX_OP2(psrlw),
[0xd2] = MMX_OP2(psrld),
[0xd3] = MMX_OP2(psrlq),
[0xe3] = MMX_OP2(pavgw),
[0xe4] = MMX_OP2(pmulhuw),
[0xe5] = MMX_OP2(pmulhw),
- [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
+ [0xe6] = { NULL, gen_helper_cvttpd2dq, gen_helper_cvtdq2pd, gen_helper_cvtpd2dq },
[0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
[0xe8] = MMX_OP2(psubsb),
[0xe9] = MMX_OP2(psubsw),
[8 + 4] = MMX_OP2(psrad),
[8 + 6] = MMX_OP2(pslld),
[16 + 2] = MMX_OP2(psrlq),
- [16 + 3] = { NULL, helper_psrldq_xmm },
+ [16 + 3] = { NULL, gen_helper_psrldq_xmm },
[16 + 6] = MMX_OP2(psllq),
- [16 + 7] = { NULL, helper_pslldq_xmm },
+ [16 + 7] = { NULL, gen_helper_pslldq_xmm },
};
static void *sse_op_table3[4 * 3] = {
- helper_cvtsi2ss,
- helper_cvtsi2sd,
- X86_64_ONLY(helper_cvtsq2ss),
- X86_64_ONLY(helper_cvtsq2sd),
-
- helper_cvttss2si,
- helper_cvttsd2si,
- X86_64_ONLY(helper_cvttss2sq),
- X86_64_ONLY(helper_cvttsd2sq),
-
- helper_cvtss2si,
- helper_cvtsd2si,
- X86_64_ONLY(helper_cvtss2sq),
- X86_64_ONLY(helper_cvtsd2sq),
+ gen_helper_cvtsi2ss,
+ gen_helper_cvtsi2sd,
+ X86_64_ONLY(gen_helper_cvtsq2ss),
+ X86_64_ONLY(gen_helper_cvtsq2sd),
+
+ gen_helper_cvttss2si,
+ gen_helper_cvttsd2si,
+ X86_64_ONLY(gen_helper_cvttss2sq),
+ X86_64_ONLY(gen_helper_cvttsd2sq),
+
+ gen_helper_cvtss2si,
+ gen_helper_cvtsd2si,
+ X86_64_ONLY(gen_helper_cvtss2sq),
+ X86_64_ONLY(gen_helper_cvtsd2sq),
};
static void *sse_op_table4[8][4] = {
};
static void *sse_op_table5[256] = {
- [0x0c] = helper_pi2fw,
- [0x0d] = helper_pi2fd,
- [0x1c] = helper_pf2iw,
- [0x1d] = helper_pf2id,
- [0x8a] = helper_pfnacc,
- [0x8e] = helper_pfpnacc,
- [0x90] = helper_pfcmpge,
- [0x94] = helper_pfmin,
- [0x96] = helper_pfrcp,
- [0x97] = helper_pfrsqrt,
- [0x9a] = helper_pfsub,
- [0x9e] = helper_pfadd,
- [0xa0] = helper_pfcmpgt,
- [0xa4] = helper_pfmax,
- [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
- [0xa7] = helper_movq, /* pfrsqit1 */
- [0xaa] = helper_pfsubr,
- [0xae] = helper_pfacc,
- [0xb0] = helper_pfcmpeq,
- [0xb4] = helper_pfmul,
- [0xb6] = helper_movq, /* pfrcpit2 */
- [0xb7] = helper_pmulhrw_mmx,
- [0xbb] = helper_pswapd,
- [0xbf] = helper_pavgb_mmx /* pavgusb */
+ [0x0c] = gen_helper_pi2fw,
+ [0x0d] = gen_helper_pi2fd,
+ [0x1c] = gen_helper_pf2iw,
+ [0x1d] = gen_helper_pf2id,
+ [0x8a] = gen_helper_pfnacc,
+ [0x8e] = gen_helper_pfpnacc,
+ [0x90] = gen_helper_pfcmpge,
+ [0x94] = gen_helper_pfmin,
+ [0x96] = gen_helper_pfrcp,
+ [0x97] = gen_helper_pfrsqrt,
+ [0x9a] = gen_helper_pfsub,
+ [0x9e] = gen_helper_pfadd,
+ [0xa0] = gen_helper_pfcmpgt,
+ [0xa4] = gen_helper_pfmax,
+ [0xa6] = gen_helper_movq, /* pfrcpit1; no need to actually increase precision */
+ [0xa7] = gen_helper_movq, /* pfrsqit1 */
+ [0xaa] = gen_helper_pfsubr,
+ [0xae] = gen_helper_pfacc,
+ [0xb0] = gen_helper_pfcmpeq,
+ [0xb4] = gen_helper_pfmul,
+ [0xb6] = gen_helper_movq, /* pfrcpit2 */
+ [0xb7] = gen_helper_pmulhrw_mmx,
+ [0xbb] = gen_helper_pswapd,
+ [0xbf] = gen_helper_pavgb_mmx /* pavgusb */
};
struct sse_op_helper_s {
void *op[2]; uint32_t ext_mask;
};
#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
-#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
-#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
+#define SSE41_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
+#define SSE42_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
static struct sse_op_helper_s sse_op_table6[256] = {
[0x00] = SSSE3_OP(pshufb),
if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
goto illegal_op;
/* femms */
- tcg_gen_helper_0_0(helper_emms);
+ gen_helper_emms();
return;
}
if (b == 0x77) {
/* emms */
- tcg_gen_helper_0_0(helper_emms);
+ gen_helper_emms();
return;
}
/* prepare MMX state (XXX: optimize by storing fptt and fptags in
the static cpu state) */
if (!is_xmm) {
- tcg_gen_helper_0_0(helper_enter_mmx);
+ gen_helper_enter_mmx();
}
modrm = ldub_code(s->pc++);
gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
offsetof(CPUX86State,fpregs[reg].mmx));
- tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
+ tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
+ gen_helper_movl_mm_T0_mmx(cpu_ptr0, cpu_tmp2_i32);
}
break;
case 0x16e: /* movd xmm, ea */
gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
offsetof(CPUX86State,xmm_regs[reg]));
- tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
+ gen_helper_movq_mm_T0_xmm(cpu_ptr0, cpu_T[0]);
} else
#endif
{
tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
offsetof(CPUX86State,xmm_regs[reg]));
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
+ gen_helper_movl_mm_T0_xmm(cpu_ptr0, cpu_tmp2_i32);
}
break;
case 0x6f: /* movq mm, ea */
}
tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
- tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
+ ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
break;
case 0x050: /* movmskps */
rm = (modrm & 7) | REX_B(s);
tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
offsetof(CPUX86State,xmm_regs[rm]));
- tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
+ gen_helper_movmskps(cpu_tmp2_i32, cpu_ptr0);
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
gen_op_mov_reg_T0(OT_LONG, reg);
break;
rm = (modrm & 7) | REX_B(s);
tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
offsetof(CPUX86State,xmm_regs[rm]));
- tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
+ gen_helper_movmskpd(cpu_tmp2_i32, cpu_ptr0);
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
gen_op_mov_reg_T0(OT_LONG, reg);
break;
case 0x02a: /* cvtpi2ps */
case 0x12a: /* cvtpi2pd */
- tcg_gen_helper_0_0(helper_enter_mmx);
+ gen_helper_enter_mmx();
if (mod != 3) {
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
op2_offset = offsetof(CPUX86State,mmx_t0);
tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
switch(b >> 8) {
case 0x0:
- tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
+ gen_helper_cvtpi2ps(cpu_ptr0, cpu_ptr1);
break;
default:
case 0x1:
- tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
+ gen_helper_cvtpi2pd(cpu_ptr0, cpu_ptr1);
break;
}
break;
sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
if (ot == OT_LONG) {
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
+ ((void (*)(TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_tmp2_i32);
} else {
- tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
+ ((void (*)(TCGv_ptr, TCGv))sse_op2)(cpu_ptr0, cpu_T[0]);
}
break;
case 0x02c: /* cvttps2pi */
case 0x12c: /* cvttpd2pi */
case 0x02d: /* cvtps2pi */
case 0x12d: /* cvtpd2pi */
- tcg_gen_helper_0_0(helper_enter_mmx);
+ gen_helper_enter_mmx();
if (mod != 3) {
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
op2_offset = offsetof(CPUX86State,xmm_t0);
tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
switch(b) {
case 0x02c:
- tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
+ gen_helper_cvttps2pi(cpu_ptr0, cpu_ptr1);
break;
case 0x12c:
- tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
+ gen_helper_cvttpd2pi(cpu_ptr0, cpu_ptr1);
break;
case 0x02d:
- tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
+ gen_helper_cvtps2pi(cpu_ptr0, cpu_ptr1);
break;
case 0x12d:
- tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
+ gen_helper_cvtpd2pi(cpu_ptr0, cpu_ptr1);
break;
}
break;
(b & 1) * 4];
tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
if (ot == OT_LONG) {
- tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
+ ((void (*)(TCGv_i32, TCGv_ptr))sse_op2)(cpu_tmp2_i32, cpu_ptr0);
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
} else {
- tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
+ ((void (*)(TCGv, TCGv_ptr))sse_op2)(cpu_T[0], cpu_ptr0);
}
gen_op_mov_reg_T0(ot, reg);
break;
}
break;
case 0x2d6: /* movq2dq */
- tcg_gen_helper_0_0(helper_enter_mmx);
+ gen_helper_enter_mmx();
rm = (modrm & 7);
gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
offsetof(CPUX86State,fpregs[rm].mmx));
gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
break;
case 0x3d6: /* movdq2q */
- tcg_gen_helper_0_0(helper_enter_mmx);
+ gen_helper_enter_mmx();
rm = (modrm & 7) | REX_B(s);
gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
if (b1) {
rm = (modrm & 7) | REX_B(s);
tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
- tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
+ gen_helper_pmovmskb_xmm(cpu_tmp2_i32, cpu_ptr0);
} else {
rm = (modrm & 7);
tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
- tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
+ gen_helper_pmovmskb_mmx(cpu_tmp2_i32, cpu_ptr0);
}
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
reg = ((modrm >> 3) & 7) | rex_r;
break;
case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
- tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
+ tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
(s->mem_index >> 2) - 1);
+ tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
offsetof(XMMReg, XMM_L(0)));
break;
tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
- tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
+ ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
if (b == 0x17)
s->cc_op = CC_OP_EFLAGS;
gen_op_mov_TN_reg(OT_LONG, 0, reg);
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
- tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
- cpu_T[0], tcg_const_i32(8 << ot));
+ gen_helper_crc32(cpu_T[0], cpu_tmp2_i32,
+ cpu_T[0], tcg_const_i32(8 << ot));
ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
gen_op_mov_reg_T0(ot, reg);
tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
offsetof(CPUX86State,
xmm_regs[reg].XMM_L(val & 3)));
+ tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
if (mod == 3)
- gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
+ gen_op_mov_reg_v(ot, rm, cpu_T[0]);
else
- tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
+ tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
(s->mem_index >> 2) - 1);
} else { /* pextrq */
+#ifdef TARGET_X86_64
tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
offsetof(CPUX86State,
xmm_regs[reg].XMM_Q(val & 1)));
else
tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
(s->mem_index >> 2) - 1);
+#else
+ goto illegal_op;
+#endif
}
break;
case 0x17: /* extractps */
if (mod == 3)
gen_op_mov_TN_reg(OT_LONG, 0, rm);
else
- tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
+ tcg_gen_qemu_ld8u(cpu_tmp0, cpu_A0,
(s->mem_index >> 2) - 1);
- tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
+ tcg_gen_st8_tl(cpu_tmp0, cpu_env, offsetof(CPUX86State,
xmm_regs[reg].XMM_B(val & 15)));
break;
case 0x21: /* insertps */
- if (mod == 3)
+ if (mod == 3) {
tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
offsetof(CPUX86State,xmm_regs[rm]
.XMM_L((val >> 6) & 3)));
- else
- tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
+ } else {
+ tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
(s->mem_index >> 2) - 1);
+ tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
+ }
tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
offsetof(CPUX86State,xmm_regs[reg]
.XMM_L((val >> 4) & 3)));
case 0x22:
if (ot == OT_LONG) { /* pinsrd */
if (mod == 3)
- gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
+ gen_op_mov_v_reg(ot, cpu_tmp0, rm);
else
- tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
+ tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
(s->mem_index >> 2) - 1);
+ tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
offsetof(CPUX86State,
xmm_regs[reg].XMM_L(val & 3)));
} else { /* pinsrq */
+#ifdef TARGET_X86_64
if (mod == 3)
gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
else
tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
offsetof(CPUX86State,
xmm_regs[reg].XMM_Q(val & 1)));
+#else
+ goto illegal_op;
+#endif
}
break;
}
tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
- tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
+ ((void (*)(TCGv_ptr, TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
break;
default:
goto illegal_op;
goto illegal_op;
tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
- tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
+ ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
break;
case 0x70: /* pshufx insn */
case 0xc6: /* pshufx insn */
val = ldub_code(s->pc++);
tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
- tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
+ ((void (*)(TCGv_ptr, TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
break;
case 0xc2:
/* compare insns */
sse_op2 = sse_op_table4[val][b1];
tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
- tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
+ ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
break;
case 0xf7:
/* maskmov : we must prepare A0 */
tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
- tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
+ ((void (*)(TCGv_ptr, TCGv_ptr, TCGv))sse_op2)(cpu_ptr0, cpu_ptr1, cpu_A0);
break;
default:
tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
- tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
+ ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1);
break;
}
if (b == 0x2e || b == 0x2f) {
/* lock generation */
if (prefixes & PREFIX_LOCK)
- tcg_gen_helper_0_0(helper_lock);
+ gen_helper_lock();
/* now check op code */
reswitch:
tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
#else
{
- TCGv t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I64);
- t1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 t0, t1;
+ t0 = tcg_temp_new_i64();
+ t1 = tcg_temp_new_i64();
gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
tcg_gen_extu_i32_i64(t0, cpu_T[0]);
tcg_gen_extu_i32_i64(t1, cpu_T[1]);
break;
#ifdef TARGET_X86_64
case OT_QUAD:
- tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
+ gen_helper_mulq_EAX_T0(cpu_T[0]);
s->cc_op = CC_OP_MULQ;
break;
#endif
gen_op_mov_reg_T0(OT_LONG, R_EDX);
#else
{
- TCGv t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I64);
- t1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 t0, t1;
+ t0 = tcg_temp_new_i64();
+ t1 = tcg_temp_new_i64();
gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
tcg_gen_ext_i32_i64(t0, cpu_T[0]);
tcg_gen_ext_i32_i64(t1, cpu_T[1]);
break;
#ifdef TARGET_X86_64
case OT_QUAD:
- tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
+ gen_helper_imulq_EAX_T0(cpu_T[0]);
s->cc_op = CC_OP_MULQ;
break;
#endif
switch(ot) {
case OT_BYTE:
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
+ gen_helper_divb_AL(cpu_T[0]);
break;
case OT_WORD:
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
+ gen_helper_divw_AX(cpu_T[0]);
break;
default:
case OT_LONG:
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
+ gen_helper_divl_EAX(cpu_T[0]);
break;
#ifdef TARGET_X86_64
case OT_QUAD:
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
+ gen_helper_divq_EAX(cpu_T[0]);
break;
#endif
}
switch(ot) {
case OT_BYTE:
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
+ gen_helper_idivb_AL(cpu_T[0]);
break;
case OT_WORD:
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
+ gen_helper_idivw_AX(cpu_T[0]);
break;
default:
case OT_LONG:
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
+ gen_helper_idivl_EAX(cpu_T[0]);
break;
#ifdef TARGET_X86_64
case OT_QUAD:
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
+ gen_helper_idivq_EAX(cpu_T[0]);
break;
#endif
}
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_4(helper_lcall_protected,
- cpu_tmp2_i32, cpu_T[1],
- tcg_const_i32(dflag),
- tcg_const_i32(s->pc - pc_start));
+ gen_helper_lcall_protected(cpu_tmp2_i32, cpu_T[1],
+ tcg_const_i32(dflag),
+ tcg_const_i32(s->pc - pc_start));
} else {
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_4(helper_lcall_real,
- cpu_tmp2_i32, cpu_T[1],
- tcg_const_i32(dflag),
- tcg_const_i32(s->pc - s->cs_base));
+ gen_helper_lcall_real(cpu_tmp2_i32, cpu_T[1],
+ tcg_const_i32(dflag),
+ tcg_const_i32(s->pc - s->cs_base));
}
gen_eob(s);
break;
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_3(helper_ljmp_protected,
- cpu_tmp2_i32,
- cpu_T[1],
- tcg_const_i32(s->pc - pc_start));
+ gen_helper_ljmp_protected(cpu_tmp2_i32, cpu_T[1],
+ tcg_const_i32(s->pc - pc_start));
} else {
gen_op_movl_seg_T0_vm(R_CS);
gen_op_movl_T0_T1();
#ifdef TARGET_X86_64
if (ot == OT_QUAD) {
- tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
+ gen_helper_imulq_T0_T1(cpu_T[0], cpu_T[0], cpu_T[1]);
} else
#endif
if (ot == OT_LONG) {
tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
#else
{
- TCGv t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I64);
- t1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 t0, t1;
+ t0 = tcg_temp_new_i64();
+ t1 = tcg_temp_new_i64();
tcg_gen_ext_i32_i64(t0, cpu_T[0]);
tcg_gen_ext_i32_i64(t1, cpu_T[1]);
tcg_gen_mul_i64(t0, t0, t1);
modrm = ldub_code(s->pc++);
reg = ((modrm >> 3) & 7) | rex_r;
mod = (modrm >> 6) & 3;
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
- t1 = tcg_temp_local_new(TCG_TYPE_TL);
- t2 = tcg_temp_local_new(TCG_TYPE_TL);
- a0 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
+ t1 = tcg_temp_local_new();
+ t2 = tcg_temp_local_new();
+ a0 = tcg_temp_local_new();
gen_op_mov_v_reg(ot, t1, reg);
if (mod == 3) {
rm = (modrm & 7) | REX_B(s);
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
- tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
+ gen_helper_cmpxchg16b(cpu_A0);
} else
#endif
{
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
- tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
+ gen_helper_cmpxchg8b(cpu_A0);
}
s->cc_op = CC_OP_EFLAGS;
break;
/* If several instructions disable interrupts, only the
_first_ does it */
if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
- tcg_gen_helper_0_0(helper_set_inhibit_irq);
+ gen_helper_set_inhibit_irq();
s->tf = 0;
}
if (s->is_jmp) {
/* If several instructions disable interrupts, only the
_first_ does it */
if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
- tcg_gen_helper_0_0(helper_set_inhibit_irq);
+ gen_helper_set_inhibit_irq();
s->tf = 0;
}
if (s->is_jmp) {
gen_op_mov_TN_reg(ot, 0, reg);
/* for xchg, lock is implicit */
if (!(prefixes & PREFIX_LOCK))
- tcg_gen_helper_0_0(helper_lock);
+ gen_helper_lock();
gen_op_ld_T1_A0(ot + s->mem_index);
gen_op_st_T0_A0(ot + s->mem_index);
if (!(prefixes & PREFIX_LOCK))
- tcg_gen_helper_0_0(helper_unlock);
+ gen_helper_unlock();
gen_op_mov_reg_T1(ot, reg);
}
break;
case 0:
gen_op_ld_T0_A0(OT_LONG + s->mem_index);
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
+ gen_helper_flds_FT0(cpu_tmp2_i32);
break;
case 1:
gen_op_ld_T0_A0(OT_LONG + s->mem_index);
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
+ gen_helper_fildl_FT0(cpu_tmp2_i32);
break;
case 2:
tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
(s->mem_index >> 2) - 1);
- tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
+ gen_helper_fldl_FT0(cpu_tmp1_i64);
break;
case 3:
default:
gen_op_lds_T0_A0(OT_WORD + s->mem_index);
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
+ gen_helper_fildl_FT0(cpu_tmp2_i32);
break;
}
- tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
+ gen_helper_fp_arith_ST0_FT0(op1);
if (op1 == 3) {
/* fcomp needs pop */
- tcg_gen_helper_0_0(helper_fpop);
+ gen_helper_fpop();
}
}
break;
case 0:
gen_op_ld_T0_A0(OT_LONG + s->mem_index);
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
+ gen_helper_flds_ST0(cpu_tmp2_i32);
break;
case 1:
gen_op_ld_T0_A0(OT_LONG + s->mem_index);
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
+ gen_helper_fildl_ST0(cpu_tmp2_i32);
break;
case 2:
tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
(s->mem_index >> 2) - 1);
- tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
+ gen_helper_fldl_ST0(cpu_tmp1_i64);
break;
case 3:
default:
gen_op_lds_T0_A0(OT_WORD + s->mem_index);
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
+ gen_helper_fildl_ST0(cpu_tmp2_i32);
break;
}
break;
/* XXX: the corresponding CPUID bit must be tested ! */
switch(op >> 4) {
case 1:
- tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
+ gen_helper_fisttl_ST0(cpu_tmp2_i32);
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
gen_op_st_T0_A0(OT_LONG + s->mem_index);
break;
case 2:
- tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
+ gen_helper_fisttll_ST0(cpu_tmp1_i64);
tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
(s->mem_index >> 2) - 1);
break;
case 3:
default:
- tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
+ gen_helper_fistt_ST0(cpu_tmp2_i32);
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
gen_op_st_T0_A0(OT_WORD + s->mem_index);
break;
}
- tcg_gen_helper_0_0(helper_fpop);
+ gen_helper_fpop();
break;
default:
switch(op >> 4) {
case 0:
- tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
+ gen_helper_fsts_ST0(cpu_tmp2_i32);
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
gen_op_st_T0_A0(OT_LONG + s->mem_index);
break;
case 1:
- tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
+ gen_helper_fistl_ST0(cpu_tmp2_i32);
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
gen_op_st_T0_A0(OT_LONG + s->mem_index);
break;
case 2:
- tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
+ gen_helper_fstl_ST0(cpu_tmp1_i64);
tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
(s->mem_index >> 2) - 1);
break;
case 3:
default:
- tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
+ gen_helper_fist_ST0(cpu_tmp2_i32);
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
gen_op_st_T0_A0(OT_WORD + s->mem_index);
break;
}
if ((op & 7) == 3)
- tcg_gen_helper_0_0(helper_fpop);
+ gen_helper_fpop();
break;
}
break;
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_2(helper_fldenv,
+ gen_helper_fldenv(
cpu_A0, tcg_const_i32(s->dflag));
break;
case 0x0d: /* fldcw mem */
gen_op_ld_T0_A0(OT_WORD + s->mem_index);
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
+ gen_helper_fldcw(cpu_tmp2_i32);
break;
case 0x0e: /* fnstenv mem */
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_2(helper_fstenv,
- cpu_A0, tcg_const_i32(s->dflag));
+ gen_helper_fstenv(cpu_A0, tcg_const_i32(s->dflag));
break;
case 0x0f: /* fnstcw mem */
- tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
+ gen_helper_fnstcw(cpu_tmp2_i32);
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
gen_op_st_T0_A0(OT_WORD + s->mem_index);
break;
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
+ gen_helper_fldt_ST0(cpu_A0);
break;
case 0x1f: /* fstpt mem */
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
- tcg_gen_helper_0_0(helper_fpop);
+ gen_helper_fstt_ST0(cpu_A0);
+ gen_helper_fpop();
break;
case 0x2c: /* frstor mem */
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_2(helper_frstor,
- cpu_A0, tcg_const_i32(s->dflag));
+ gen_helper_frstor(cpu_A0, tcg_const_i32(s->dflag));
break;
case 0x2e: /* fnsave mem */
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_2(helper_fsave,
- cpu_A0, tcg_const_i32(s->dflag));
+ gen_helper_fsave(cpu_A0, tcg_const_i32(s->dflag));
break;
case 0x2f: /* fnstsw mem */
- tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
+ gen_helper_fnstsw(cpu_tmp2_i32);
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
gen_op_st_T0_A0(OT_WORD + s->mem_index);
break;
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
+ gen_helper_fbld_ST0(cpu_A0);
break;
case 0x3e: /* fbstp */
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
- tcg_gen_helper_0_0(helper_fpop);
+ gen_helper_fbst_ST0(cpu_A0);
+ gen_helper_fpop();
break;
case 0x3d: /* fildll */
tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
(s->mem_index >> 2) - 1);
- tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
+ gen_helper_fildll_ST0(cpu_tmp1_i64);
break;
case 0x3f: /* fistpll */
- tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
+ gen_helper_fistll_ST0(cpu_tmp1_i64);
tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
(s->mem_index >> 2) - 1);
- tcg_gen_helper_0_0(helper_fpop);
+ gen_helper_fpop();
break;
default:
goto illegal_op;
switch(op) {
case 0x08: /* fld sti */
- tcg_gen_helper_0_0(helper_fpush);
- tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
+ gen_helper_fpush();
+ gen_helper_fmov_ST0_STN(tcg_const_i32((opreg + 1) & 7));
break;
case 0x09: /* fxchg sti */
case 0x29: /* fxchg4 sti, undocumented op */
case 0x39: /* fxchg7 sti, undocumented op */
- tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
+ gen_helper_fxchg_ST0_STN(tcg_const_i32(opreg));
break;
case 0x0a: /* grp d9/2 */
switch(rm) {
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_0(helper_fwait);
+ gen_helper_fwait();
break;
default:
goto illegal_op;
case 0x0c: /* grp d9/4 */
switch(rm) {
case 0: /* fchs */
- tcg_gen_helper_0_0(helper_fchs_ST0);
+ gen_helper_fchs_ST0();
break;
case 1: /* fabs */
- tcg_gen_helper_0_0(helper_fabs_ST0);
+ gen_helper_fabs_ST0();
break;
case 4: /* ftst */
- tcg_gen_helper_0_0(helper_fldz_FT0);
- tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
+ gen_helper_fldz_FT0();
+ gen_helper_fcom_ST0_FT0();
break;
case 5: /* fxam */
- tcg_gen_helper_0_0(helper_fxam_ST0);
+ gen_helper_fxam_ST0();
break;
default:
goto illegal_op;
{
switch(rm) {
case 0:
- tcg_gen_helper_0_0(helper_fpush);
- tcg_gen_helper_0_0(helper_fld1_ST0);
+ gen_helper_fpush();
+ gen_helper_fld1_ST0();
break;
case 1:
- tcg_gen_helper_0_0(helper_fpush);
- tcg_gen_helper_0_0(helper_fldl2t_ST0);
+ gen_helper_fpush();
+ gen_helper_fldl2t_ST0();
break;
case 2:
- tcg_gen_helper_0_0(helper_fpush);
- tcg_gen_helper_0_0(helper_fldl2e_ST0);
+ gen_helper_fpush();
+ gen_helper_fldl2e_ST0();
break;
case 3:
- tcg_gen_helper_0_0(helper_fpush);
- tcg_gen_helper_0_0(helper_fldpi_ST0);
+ gen_helper_fpush();
+ gen_helper_fldpi_ST0();
break;
case 4:
- tcg_gen_helper_0_0(helper_fpush);
- tcg_gen_helper_0_0(helper_fldlg2_ST0);
+ gen_helper_fpush();
+ gen_helper_fldlg2_ST0();
break;
case 5:
- tcg_gen_helper_0_0(helper_fpush);
- tcg_gen_helper_0_0(helper_fldln2_ST0);
+ gen_helper_fpush();
+ gen_helper_fldln2_ST0();
break;
case 6:
- tcg_gen_helper_0_0(helper_fpush);
- tcg_gen_helper_0_0(helper_fldz_ST0);
+ gen_helper_fpush();
+ gen_helper_fldz_ST0();
break;
default:
goto illegal_op;
case 0x0e: /* grp d9/6 */
switch(rm) {
case 0: /* f2xm1 */
- tcg_gen_helper_0_0(helper_f2xm1);
+ gen_helper_f2xm1();
break;
case 1: /* fyl2x */
- tcg_gen_helper_0_0(helper_fyl2x);
+ gen_helper_fyl2x();
break;
case 2: /* fptan */
- tcg_gen_helper_0_0(helper_fptan);
+ gen_helper_fptan();
break;
case 3: /* fpatan */
- tcg_gen_helper_0_0(helper_fpatan);
+ gen_helper_fpatan();
break;
case 4: /* fxtract */
- tcg_gen_helper_0_0(helper_fxtract);
+ gen_helper_fxtract();
break;
case 5: /* fprem1 */
- tcg_gen_helper_0_0(helper_fprem1);
+ gen_helper_fprem1();
break;
case 6: /* fdecstp */
- tcg_gen_helper_0_0(helper_fdecstp);
+ gen_helper_fdecstp();
break;
default:
case 7: /* fincstp */
- tcg_gen_helper_0_0(helper_fincstp);
+ gen_helper_fincstp();
break;
}
break;
case 0x0f: /* grp d9/7 */
switch(rm) {
case 0: /* fprem */
- tcg_gen_helper_0_0(helper_fprem);
+ gen_helper_fprem();
break;
case 1: /* fyl2xp1 */
- tcg_gen_helper_0_0(helper_fyl2xp1);
+ gen_helper_fyl2xp1();
break;
case 2: /* fsqrt */
- tcg_gen_helper_0_0(helper_fsqrt);
+ gen_helper_fsqrt();
break;
case 3: /* fsincos */
- tcg_gen_helper_0_0(helper_fsincos);
+ gen_helper_fsincos();
break;
case 5: /* fscale */
- tcg_gen_helper_0_0(helper_fscale);
+ gen_helper_fscale();
break;
case 4: /* frndint */
- tcg_gen_helper_0_0(helper_frndint);
+ gen_helper_frndint();
break;
case 6: /* fsin */
- tcg_gen_helper_0_0(helper_fsin);
+ gen_helper_fsin();
break;
default:
case 7: /* fcos */
- tcg_gen_helper_0_0(helper_fcos);
+ gen_helper_fcos();
break;
}
break;
op1 = op & 7;
if (op >= 0x20) {
- tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
+ gen_helper_fp_arith_STN_ST0(op1, opreg);
if (op >= 0x30)
- tcg_gen_helper_0_0(helper_fpop);
+ gen_helper_fpop();
} else {
- tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
- tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
+ gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
+ gen_helper_fp_arith_ST0_FT0(op1);
}
}
break;
case 0x02: /* fcom */
case 0x22: /* fcom2, undocumented op */
- tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
- tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
+ gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
+ gen_helper_fcom_ST0_FT0();
break;
case 0x03: /* fcomp */
case 0x23: /* fcomp3, undocumented op */
case 0x32: /* fcomp5, undocumented op */
- tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
- tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
- tcg_gen_helper_0_0(helper_fpop);
+ gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
+ gen_helper_fcom_ST0_FT0();
+ gen_helper_fpop();
break;
case 0x15: /* da/5 */
switch(rm) {
case 1: /* fucompp */
- tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
- tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
- tcg_gen_helper_0_0(helper_fpop);
- tcg_gen_helper_0_0(helper_fpop);
+ gen_helper_fmov_FT0_STN(tcg_const_i32(1));
+ gen_helper_fucom_ST0_FT0();
+ gen_helper_fpop();
+ gen_helper_fpop();
break;
default:
goto illegal_op;
case 1: /* fdisi (287 only, just do nop here) */
break;
case 2: /* fclex */
- tcg_gen_helper_0_0(helper_fclex);
+ gen_helper_fclex();
break;
case 3: /* fninit */
- tcg_gen_helper_0_0(helper_fninit);
+ gen_helper_fninit();
break;
case 4: /* fsetpm (287 only, just do nop here) */
break;
case 0x1d: /* fucomi */
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
- tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
- tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
+ gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
+ gen_helper_fucomi_ST0_FT0();
s->cc_op = CC_OP_EFLAGS;
break;
case 0x1e: /* fcomi */
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
- tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
- tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
+ gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
+ gen_helper_fcomi_ST0_FT0();
s->cc_op = CC_OP_EFLAGS;
break;
case 0x28: /* ffree sti */
- tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
+ gen_helper_ffree_STN(tcg_const_i32(opreg));
break;
case 0x2a: /* fst sti */
- tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
+ gen_helper_fmov_STN_ST0(tcg_const_i32(opreg));
break;
case 0x2b: /* fstp sti */
case 0x0b: /* fstp1 sti, undocumented op */
case 0x3a: /* fstp8 sti, undocumented op */
case 0x3b: /* fstp9 sti, undocumented op */
- tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
- tcg_gen_helper_0_0(helper_fpop);
+ gen_helper_fmov_STN_ST0(tcg_const_i32(opreg));
+ gen_helper_fpop();
break;
case 0x2c: /* fucom st(i) */
- tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
- tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
+ gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
+ gen_helper_fucom_ST0_FT0();
break;
case 0x2d: /* fucomp st(i) */
- tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
- tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
- tcg_gen_helper_0_0(helper_fpop);
+ gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
+ gen_helper_fucom_ST0_FT0();
+ gen_helper_fpop();
break;
case 0x33: /* de/3 */
switch(rm) {
case 1: /* fcompp */
- tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
- tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
- tcg_gen_helper_0_0(helper_fpop);
- tcg_gen_helper_0_0(helper_fpop);
+ gen_helper_fmov_FT0_STN(tcg_const_i32(1));
+ gen_helper_fcom_ST0_FT0();
+ gen_helper_fpop();
+ gen_helper_fpop();
break;
default:
goto illegal_op;
}
break;
case 0x38: /* ffreep sti, undocumented op */
- tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
- tcg_gen_helper_0_0(helper_fpop);
+ gen_helper_ffree_STN(tcg_const_i32(opreg));
+ gen_helper_fpop();
break;
case 0x3c: /* df/4 */
switch(rm) {
case 0:
- tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
+ gen_helper_fnstsw(cpu_tmp2_i32);
tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
gen_op_mov_reg_T0(OT_WORD, R_EAX);
break;
case 0x3d: /* fucomip */
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
- tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
- tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
- tcg_gen_helper_0_0(helper_fpop);
+ gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
+ gen_helper_fucomi_ST0_FT0();
+ gen_helper_fpop();
s->cc_op = CC_OP_EFLAGS;
break;
case 0x3e: /* fcomip */
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
- tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
- tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
- tcg_gen_helper_0_0(helper_fpop);
+ gen_helper_fmov_FT0_STN(tcg_const_i32(opreg));
+ gen_helper_fcomi_ST0_FT0();
+ gen_helper_fpop();
s->cc_op = CC_OP_EFLAGS;
break;
case 0x10 ... 0x13: /* fcmovxx */
op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
l1 = gen_new_label();
gen_jcc1(s, s->cc_op, op1, l1);
- tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
+ gen_helper_fmov_ST0_STN(tcg_const_i32(opreg));
gen_set_label(l1);
}
break;
if (use_icount)
gen_io_start();
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
+ gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32);
gen_op_mov_reg_T1(ot, R_EAX);
if (use_icount) {
gen_io_end();
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
- tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
+ gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
if (use_icount) {
gen_io_end();
gen_jmp(s, s->pc - s->cs_base);
if (use_icount)
gen_io_start();
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
+ gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32);
gen_op_mov_reg_T1(ot, R_EAX);
if (use_icount) {
gen_io_end();
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
- tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
+ gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
if (use_icount) {
gen_io_end();
gen_jmp(s, s->pc - s->cs_base);
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_2(helper_lret_protected,
- tcg_const_i32(s->dflag),
- tcg_const_i32(val));
+ gen_helper_lret_protected(tcg_const_i32(s->dflag),
+ tcg_const_i32(val));
} else {
gen_stack_A0(s);
/* pop offset */
gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
if (!s->pe) {
/* real mode */
- tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
+ gen_helper_iret_real(tcg_const_i32(s->dflag));
s->cc_op = CC_OP_EFLAGS;
} else if (s->vm86) {
if (s->iopl != 3) {
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
} else {
- tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
+ gen_helper_iret_real(tcg_const_i32(s->dflag));
s->cc_op = CC_OP_EFLAGS;
}
} else {
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_2(helper_iret_protected,
- tcg_const_i32(s->dflag),
- tcg_const_i32(s->pc - s->cs_base));
+ gen_helper_iret_protected(tcg_const_i32(s->dflag),
+ tcg_const_i32(s->pc - s->cs_base));
s->cc_op = CC_OP_EFLAGS;
}
gen_eob(s);
modrm = ldub_code(s->pc++);
reg = ((modrm >> 3) & 7) | rex_r;
mod = (modrm >> 6) & 3;
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
if (mod != 3) {
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
} else {
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
- tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
+ gen_helper_read_eflags(cpu_T[0]);
gen_push_T0(s);
}
break;
gen_pop_T0(s);
if (s->cpl == 0) {
if (s->dflag) {
- tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
+ gen_helper_write_eflags(cpu_T[0],
tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
} else {
- tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
+ gen_helper_write_eflags(cpu_T[0],
tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
}
} else {
if (s->cpl <= s->iopl) {
if (s->dflag) {
- tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
+ gen_helper_write_eflags(cpu_T[0],
tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
} else {
- tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
+ gen_helper_write_eflags(cpu_T[0],
tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
}
} else {
if (s->dflag) {
- tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
+ gen_helper_write_eflags(cpu_T[0],
tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
} else {
- tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
+ gen_helper_write_eflags(cpu_T[0],
tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
}
}
gen_extu(ot, cpu_T[0]);
label1 = gen_new_label();
tcg_gen_movi_tl(cpu_cc_dst, 0);
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
tcg_gen_mov_tl(t0, cpu_T[0]);
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
if (b & 1) {
- tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
+ gen_helper_bsr(cpu_T[0], t0);
} else {
- tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
+ gen_helper_bsf(cpu_T[0], t0);
}
gen_op_mov_reg_T0(ot, reg);
tcg_gen_movi_tl(cpu_cc_dst, 1);
goto illegal_op;
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
- tcg_gen_helper_0_0(helper_daa);
+ gen_helper_daa();
s->cc_op = CC_OP_EFLAGS;
break;
case 0x2f: /* das */
goto illegal_op;
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
- tcg_gen_helper_0_0(helper_das);
+ gen_helper_das();
s->cc_op = CC_OP_EFLAGS;
break;
case 0x37: /* aaa */
goto illegal_op;
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
- tcg_gen_helper_0_0(helper_aaa);
+ gen_helper_aaa();
s->cc_op = CC_OP_EFLAGS;
break;
case 0x3f: /* aas */
goto illegal_op;
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
- tcg_gen_helper_0_0(helper_aas);
+ gen_helper_aas();
s->cc_op = CC_OP_EFLAGS;
break;
case 0xd4: /* aam */
if (val == 0) {
gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
} else {
- tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
+ gen_helper_aam(tcg_const_i32(val));
s->cc_op = CC_OP_LOGICB;
}
break;
if (CODE64(s))
goto illegal_op;
val = ldub_code(s->pc++);
- tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
+ gen_helper_aad(tcg_const_i32(val));
s->cc_op = CC_OP_LOGICB;
break;
/************************/
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_0(helper_fwait);
+ gen_helper_fwait();
}
break;
case 0xcc: /* int3 */
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
+ gen_helper_into(tcg_const_i32(s->pc - pc_start));
break;
case 0xf1: /* icebp (undocumented, exits to external debugger) */
gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
case 0xfa: /* cli */
if (!s->vm86) {
if (s->cpl <= s->iopl) {
- tcg_gen_helper_0_0(helper_cli);
+ gen_helper_cli();
} else {
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
}
} else {
if (s->iopl == 3) {
- tcg_gen_helper_0_0(helper_cli);
+ gen_helper_cli();
} else {
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
}
if (!s->vm86) {
if (s->cpl <= s->iopl) {
gen_sti:
- tcg_gen_helper_0_0(helper_sti);
+ gen_helper_sti();
/* interruptions are enabled only the first insn after sti */
/* If several instructions disable interrupts, only the
_first_ does it */
if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
- tcg_gen_helper_0_0(helper_set_inhibit_irq);
+ gen_helper_set_inhibit_irq();
/* give a chance to handle pending irqs */
gen_jmp_im(s->pc - s->cs_base);
gen_eob(s);
gen_jmp_im(pc_start - s->cs_base);
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
if (ot == OT_WORD)
- tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
+ gen_helper_boundw(cpu_A0, cpu_tmp2_i32);
else
- tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
+ gen_helper_boundl(cpu_A0, cpu_tmp2_i32);
break;
case 0x1c8 ... 0x1cf: /* bswap reg */
reg = (b & 7) | REX_B(s);
gen_op_mov_reg_T0(OT_QUAD, reg);
} else
{
- TCGv tmp0;
+ TCGv_i32 tmp0;
gen_op_mov_TN_reg(OT_LONG, 0, reg);
- tmp0 = tcg_temp_new(TCG_TYPE_I32);
+ tmp0 = tcg_temp_new_i32();
tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
tcg_gen_bswap_i32(tmp0, tmp0);
tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
if (b & 2) {
- tcg_gen_helper_0_0(helper_rdmsr);
+ gen_helper_rdmsr();
} else {
- tcg_gen_helper_0_0(helper_wrmsr);
+ gen_helper_wrmsr();
}
}
break;
gen_jmp_im(pc_start - s->cs_base);
if (use_icount)
gen_io_start();
- tcg_gen_helper_0_0(helper_rdtsc);
+ gen_helper_rdtsc();
if (use_icount) {
gen_io_end();
gen_jmp(s, s->pc - s->cs_base);
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_0(helper_rdpmc);
+ gen_helper_rdpmc();
break;
case 0x134: /* sysenter */
/* For Intel SYSENTER is valid on 64-bit */
s->cc_op = CC_OP_DYNAMIC;
}
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_0(helper_sysenter);
+ gen_helper_sysenter();
gen_eob(s);
}
break;
s->cc_op = CC_OP_DYNAMIC;
}
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
+ gen_helper_sysexit(tcg_const_i32(dflag));
gen_eob(s);
}
break;
s->cc_op = CC_OP_DYNAMIC;
}
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
+ gen_helper_syscall(tcg_const_i32(s->pc - pc_start));
gen_eob(s);
break;
case 0x107: /* sysret */
s->cc_op = CC_OP_DYNAMIC;
}
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
+ gen_helper_sysret(tcg_const_i32(s->dflag));
/* condition codes are modified only in long mode */
if (s->lma)
s->cc_op = CC_OP_EFLAGS;
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_0(helper_cpuid);
+ gen_helper_cpuid();
break;
case 0xf4: /* hlt */
if (s->cpl != 0) {
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
+ gen_helper_hlt(tcg_const_i32(s->pc - pc_start));
s->is_jmp = 3;
}
break;
gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
gen_jmp_im(pc_start - s->cs_base);
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
+ gen_helper_lldt(cpu_tmp2_i32);
}
break;
case 1: /* str */
gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
gen_jmp_im(pc_start - s->cs_base);
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
- tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
+ gen_helper_ltr(cpu_tmp2_i32);
}
break;
case 4: /* verr */
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
if (op == 4)
- tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
+ gen_helper_verr(cpu_T[0]);
else
- tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
+ gen_helper_verw(cpu_T[0]);
s->cc_op = CC_OP_EFLAGS;
break;
default:
gen_op_andl_A0_ffff();
}
gen_add_A0_ds_seg(s);
- tcg_gen_helper_0_1(helper_monitor, cpu_A0);
+ gen_helper_monitor(cpu_A0);
break;
case 1: /* mwait */
if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
s->cc_op = CC_OP_DYNAMIC;
}
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
+ gen_helper_mwait(tcg_const_i32(s->pc - pc_start));
gen_eob(s);
break;
default:
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
break;
} else {
- tcg_gen_helper_0_2(helper_vmrun,
- tcg_const_i32(s->aflag),
- tcg_const_i32(s->pc - pc_start));
+ gen_helper_vmrun(tcg_const_i32(s->aflag),
+ tcg_const_i32(s->pc - pc_start));
tcg_gen_exit_tb(0);
s->is_jmp = 3;
}
case 1: /* VMMCALL */
if (!(s->flags & HF_SVME_MASK))
goto illegal_op;
- tcg_gen_helper_0_0(helper_vmmcall);
+ gen_helper_vmmcall();
break;
case 2: /* VMLOAD */
if (!(s->flags & HF_SVME_MASK) || !s->pe)
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
break;
} else {
- tcg_gen_helper_0_1(helper_vmload,
- tcg_const_i32(s->aflag));
+ gen_helper_vmload(tcg_const_i32(s->aflag));
}
break;
case 3: /* VMSAVE */
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
break;
} else {
- tcg_gen_helper_0_1(helper_vmsave,
- tcg_const_i32(s->aflag));
+ gen_helper_vmsave(tcg_const_i32(s->aflag));
}
break;
case 4: /* STGI */
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
break;
} else {
- tcg_gen_helper_0_0(helper_stgi);
+ gen_helper_stgi();
}
break;
case 5: /* CLGI */
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
break;
} else {
- tcg_gen_helper_0_0(helper_clgi);
+ gen_helper_clgi();
}
break;
case 6: /* SKINIT */
!(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
!s->pe)
goto illegal_op;
- tcg_gen_helper_0_0(helper_skinit);
+ gen_helper_skinit();
break;
case 7: /* INVLPGA */
if (!(s->flags & HF_SVME_MASK) || !s->pe)
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
break;
} else {
- tcg_gen_helper_0_1(helper_invlpga,
- tcg_const_i32(s->aflag));
+ gen_helper_invlpga(tcg_const_i32(s->aflag));
}
break;
default:
} else {
gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
- tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
+ gen_helper_lmsw(cpu_T[0]);
gen_jmp_im(s->pc - s->cs_base);
gen_eob(s);
}
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
gen_lea_modrm(s, modrm, ®_addr, &offset_addr);
- tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
+ gen_helper_invlpg(cpu_A0);
gen_jmp_im(s->pc - s->cs_base);
gen_eob(s);
}
if (!s->pe || s->vm86)
goto illegal_op;
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
- t1 = tcg_temp_local_new(TCG_TYPE_TL);
- t2 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
+ t1 = tcg_temp_local_new();
+ t2 = tcg_temp_local_new();
ot = OT_WORD;
modrm = ldub_code(s->pc++);
reg = (modrm >> 3) & 7;
modrm = ldub_code(s->pc++);
reg = ((modrm >> 3) & 7) | rex_r;
gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
if (b == 0x102)
- tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
+ gen_helper_lar(t0, cpu_T[0]);
else
- tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
+ gen_helper_lsl(t0, cpu_T[0]);
tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
label1 = gen_new_label();
tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
gen_jmp_im(pc_start - s->cs_base);
if (b & 2) {
gen_op_mov_TN_reg(ot, 0, rm);
- tcg_gen_helper_0_2(helper_write_crN,
- tcg_const_i32(reg), cpu_T[0]);
+ gen_helper_write_crN(tcg_const_i32(reg), cpu_T[0]);
gen_jmp_im(s->pc - s->cs_base);
gen_eob(s);
} else {
- tcg_gen_helper_1_1(helper_read_crN,
- cpu_T[0], tcg_const_i32(reg));
+ gen_helper_read_crN(cpu_T[0], tcg_const_i32(reg));
gen_op_mov_reg_T0(ot, rm);
}
break;
if (b & 2) {
gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
gen_op_mov_TN_reg(ot, 0, rm);
- tcg_gen_helper_0_2(helper_movl_drN_T0,
- tcg_const_i32(reg), cpu_T[0]);
+ gen_helper_movl_drN_T0(tcg_const_i32(reg), cpu_T[0]);
gen_jmp_im(s->pc - s->cs_base);
gen_eob(s);
} else {
gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
} else {
gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
- tcg_gen_helper_0_0(helper_clts);
+ gen_helper_clts();
/* abort block because static cpu state changed */
gen_jmp_im(s->pc - s->cs_base);
gen_eob(s);
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_2(helper_fxsave,
- cpu_A0, tcg_const_i32((s->dflag == 2)));
+ gen_helper_fxsave(cpu_A0, tcg_const_i32((s->dflag == 2)));
break;
case 1: /* fxrstor */
if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
if (s->cc_op != CC_OP_DYNAMIC)
gen_op_set_cc_op(s->cc_op);
gen_jmp_im(pc_start - s->cs_base);
- tcg_gen_helper_0_2(helper_fxrstor,
- cpu_A0, tcg_const_i32((s->dflag == 2)));
+ gen_helper_fxrstor(cpu_A0, tcg_const_i32((s->dflag == 2)));
break;
case 2: /* ldmxcsr */
case 3: /* stmxcsr */
s->cc_op = CC_OP_DYNAMIC;
}
gen_jmp_im(s->pc - s->cs_base);
- tcg_gen_helper_0_0(helper_rsm);
+ gen_helper_rsm();
gen_eob(s);
break;
case 0x1b8: /* SSE4.2 popcnt */
ot = OT_QUAD;
gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
- tcg_gen_helper_1_2(helper_popcnt,
- cpu_T[0], cpu_T[0], tcg_const_i32(ot));
+ gen_helper_popcnt(cpu_T[0], cpu_T[0], tcg_const_i32(ot));
gen_op_mov_reg_T0(ot, reg);
s->cc_op = CC_OP_EFLAGS;
}
/* lock generation */
if (s->prefix & PREFIX_LOCK)
- tcg_gen_helper_0_0(helper_unlock);
+ gen_helper_unlock();
return s->pc;
illegal_op:
if (s->prefix & PREFIX_LOCK)
- tcg_gen_helper_0_0(helper_unlock);
+ gen_helper_unlock();
/* XXX: ensure that no lock was generated */
gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
return s->pc;
#else
assert(sizeof(CCTable) == (1 << 4));
#endif
- cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
- cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
- TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
- cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
- cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
- cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
+ cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
+ cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, cc_op), "cc_op");
+ cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
+ "cc_src");
+ cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
+ "cc_dst");
+ cpu_cc_tmp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_tmp),
+ "cc_tmp");
/* register helpers */
-
-#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
+#define GEN_HELPER 2
#include "helper.h"
}
printf("ERROR addseg\n");
#endif
- cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
- cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
- cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
- cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
-
- cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
- cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
- cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
- cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
- cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
- cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
- cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
- cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
- cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
+ cpu_T[0] = tcg_temp_new();
+ cpu_T[1] = tcg_temp_new();
+ cpu_A0 = tcg_temp_new();
+ cpu_T3 = tcg_temp_new();
+
+ cpu_tmp0 = tcg_temp_new();
+ cpu_tmp1_i64 = tcg_temp_new_i64();
+ cpu_tmp2_i32 = tcg_temp_new_i32();
+ cpu_tmp3_i32 = tcg_temp_new_i32();
+ cpu_tmp4 = tcg_temp_new();
+ cpu_tmp5 = tcg_temp_new();
+ cpu_tmp6 = tcg_temp_new();
+ cpu_ptr0 = tcg_temp_new_ptr();
+ cpu_ptr1 = tcg_temp_new_ptr();
gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
-#ifndef DEF_HELPER
-#define DEF_HELPER(name, ret, args) ret glue(helper_,name) args;
-#endif
+#include "def-helper.h"
-#ifdef GEN_HELPER
-#define DEF_HELPER_0_0(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name(void) \
-{ \
- tcg_gen_helper_0_0(helper_##name); \
-}
-#define DEF_HELPER_0_1(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name(TCGv arg1) \
-{ \
- tcg_gen_helper_0_1(helper_##name, arg1); \
-}
-#define DEF_HELPER_0_2(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name(TCGv arg1, TCGv arg2) \
-{ \
- tcg_gen_helper_0_2(helper_##name, arg1, arg2); \
-}
-#define DEF_HELPER_0_3(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name( \
- TCGv arg1, TCGv arg2, TCGv arg3) \
-{ \
- tcg_gen_helper_0_3(helper_##name, arg1, arg2, arg3); \
-}
-#define DEF_HELPER_1_0(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name(TCGv ret) \
-{ \
- tcg_gen_helper_1_0(helper_##name, ret); \
-}
-#define DEF_HELPER_1_1(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name(TCGv ret, TCGv arg1) \
-{ \
- tcg_gen_helper_1_1(helper_##name, ret, arg1); \
-}
-#define DEF_HELPER_1_2(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name(TCGv ret, TCGv arg1, TCGv arg2) \
-{ \
- tcg_gen_helper_1_2(helper_##name, ret, arg1, arg2); \
-}
-#define DEF_HELPER_1_3(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name(TCGv ret, \
- TCGv arg1, TCGv arg2, TCGv arg3) \
-{ \
- tcg_gen_helper_1_3(helper_##name, ret, arg1, arg2, arg3); \
-}
-#define DEF_HELPER_1_4(name, ret, args) \
-DEF_HELPER(name, ret, args) \
-static inline void gen_helper_##name(TCGv ret, \
- TCGv arg1, TCGv arg2, TCGv arg3, TCGv arg4) \
-{ \
- tcg_gen_helper_1_4(helper_##name, ret, arg1, arg2, arg3, arg4); \
-}
-#else /* !GEN_HELPER */
-#define DEF_HELPER_0_0 DEF_HELPER
-#define DEF_HELPER_0_1 DEF_HELPER
-#define DEF_HELPER_0_2 DEF_HELPER
-#define DEF_HELPER_0_3 DEF_HELPER
-#define DEF_HELPER_1_0 DEF_HELPER
-#define DEF_HELPER_1_1 DEF_HELPER
-#define DEF_HELPER_1_2 DEF_HELPER
-#define DEF_HELPER_1_3 DEF_HELPER
-#define DEF_HELPER_1_4 DEF_HELPER
-#define HELPER(x) glue(helper_,x)
-#endif
+DEF_HELPER_1(bitrev, i32, i32)
+DEF_HELPER_1(ff1, i32, i32)
+DEF_HELPER_2(sats, i32, i32, i32)
+DEF_HELPER_2(divu, void, env, i32)
+DEF_HELPER_2(divs, void, env, i32)
+DEF_HELPER_3(addx_cc, i32, env, i32, i32)
+DEF_HELPER_3(subx_cc, i32, env, i32, i32)
+DEF_HELPER_3(shl_cc, i32, env, i32, i32)
+DEF_HELPER_3(shr_cc, i32, env, i32, i32)
+DEF_HELPER_3(sar_cc, i32, env, i32, i32)
+DEF_HELPER_2(xflag_lt, i32, i32, i32)
+DEF_HELPER_2(set_sr, void, env, i32)
+DEF_HELPER_3(movec, void, env, i32, i32)
-DEF_HELPER_1_1(bitrev, uint32_t, (uint32_t))
-DEF_HELPER_1_1(ff1, uint32_t, (uint32_t))
-DEF_HELPER_1_2(sats, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_0_2(divu, void, (CPUState *, uint32_t))
-DEF_HELPER_0_2(divs, void, (CPUState *, uint32_t))
-DEF_HELPER_1_3(addx_cc, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(subx_cc, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(shl_cc, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(shr_cc, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(sar_cc, uint32_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_2(xflag_lt, uint32_t, (uint32_t, uint32_t))
-DEF_HELPER_0_2(set_sr, void, (CPUState *, uint32_t))
-DEF_HELPER_0_3(movec, void, (CPUState *, uint32_t, uint32_t))
+DEF_HELPER_2(f64_to_i32, f32, env, f64)
+DEF_HELPER_2(f64_to_f32, f32, env, f64)
+DEF_HELPER_2(i32_to_f64, f64, env, i32)
+DEF_HELPER_2(f32_to_f64, f64, env, f32)
+DEF_HELPER_2(iround_f64, f64, env, f64)
+DEF_HELPER_2(itrunc_f64, f64, env, f64)
+DEF_HELPER_2(sqrt_f64, f64, env, f64)
+DEF_HELPER_1(abs_f64, f64, f64)
+DEF_HELPER_1(chs_f64, f64, f64)
+DEF_HELPER_3(add_f64, f64, env, f64, f64)
+DEF_HELPER_3(sub_f64, f64, env, f64, f64)
+DEF_HELPER_3(mul_f64, f64, env, f64, f64)
+DEF_HELPER_3(div_f64, f64, env, f64, f64)
+DEF_HELPER_3(sub_cmp_f64, f64, env, f64, f64)
+DEF_HELPER_2(compare_f64, i32, env, f64)
-DEF_HELPER_1_2(f64_to_i32, float32, (CPUState *, float64))
-DEF_HELPER_1_2(f64_to_f32, float32, (CPUState *, float64))
-DEF_HELPER_1_2(i32_to_f64, float64, (CPUState *, uint32_t))
-DEF_HELPER_1_2(f32_to_f64, float64, (CPUState *, float32))
-DEF_HELPER_1_2(iround_f64, float64, (CPUState *, float64))
-DEF_HELPER_1_2(itrunc_f64, float64, (CPUState *, float64))
-DEF_HELPER_1_2(sqrt_f64, float64, (CPUState *, float64))
-DEF_HELPER_1_1(abs_f64, float64, (float64))
-DEF_HELPER_1_1(chs_f64, float64, (float64))
-DEF_HELPER_1_3(add_f64, float64, (CPUState *, float64, float64))
-DEF_HELPER_1_3(sub_f64, float64, (CPUState *, float64, float64))
-DEF_HELPER_1_3(mul_f64, float64, (CPUState *, float64, float64))
-DEF_HELPER_1_3(div_f64, float64, (CPUState *, float64, float64))
-DEF_HELPER_1_3(sub_cmp_f64, float64, (CPUState *, float64, float64))
-DEF_HELPER_1_2(compare_f64, uint32_t, (CPUState *, float64))
+DEF_HELPER_3(mac_move, void, env, i32, i32)
+DEF_HELPER_3(macmulf, i64, env, i32, i32)
+DEF_HELPER_3(macmuls, i64, env, i32, i32)
+DEF_HELPER_3(macmulu, i64, env, i32, i32)
+DEF_HELPER_2(macsats, void, env, i32)
+DEF_HELPER_2(macsatu, void, env, i32)
+DEF_HELPER_2(macsatf, void, env, i32)
+DEF_HELPER_2(mac_set_flags, void, env, i32)
+DEF_HELPER_2(set_macsr, void, env, i32)
+DEF_HELPER_2(get_macf, i32, env, i64)
+DEF_HELPER_1(get_macs, i32, i64)
+DEF_HELPER_1(get_macu, i32, i64)
+DEF_HELPER_2(get_mac_extf, i32, env, i32)
+DEF_HELPER_2(get_mac_exti, i32, env, i32)
+DEF_HELPER_3(set_mac_extf, void, env, i32, i32)
+DEF_HELPER_3(set_mac_exts, void, env, i32, i32)
+DEF_HELPER_3(set_mac_extu, void, env, i32, i32)
-DEF_HELPER_0_3(mac_move, void, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(macmulf, uint64_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(macmuls, uint64_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_1_3(macmulu, uint64_t, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_0_2(macsats, void, (CPUState *, uint32_t))
-DEF_HELPER_0_2(macsatu, void, (CPUState *, uint32_t))
-DEF_HELPER_0_2(macsatf, void, (CPUState *, uint32_t))
-DEF_HELPER_0_2(mac_set_flags, void, (CPUState *, uint32_t))
-DEF_HELPER_0_2(set_macsr, void, (CPUState *, uint32_t))
-DEF_HELPER_1_2(get_macf, uint32_t, (CPUState *, uint64_t))
-DEF_HELPER_1_1(get_macs, uint32_t, (uint64_t))
-DEF_HELPER_1_1(get_macu, uint32_t, (uint64_t))
-DEF_HELPER_1_2(get_mac_extf, uint32_t, (CPUState *, uint32_t))
-DEF_HELPER_1_2(get_mac_exti, uint32_t, (CPUState *, uint32_t))
-DEF_HELPER_0_3(set_mac_extf, void, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_0_3(set_mac_exts, void, (CPUState *, uint32_t, uint32_t))
-DEF_HELPER_0_3(set_mac_extu, void, (CPUState *, uint32_t, uint32_t))
+DEF_HELPER_2(flush_flags, void, env, i32)
+DEF_HELPER_1(raise_exception, void, i32)
-DEF_HELPER_0_2(flush_flags, void, (CPUState *, uint32_t))
-DEF_HELPER_0_1(raise_exception, void, (uint32_t))
-
-#undef DEF_HELPER
-#undef DEF_HELPER_0_0
-#undef DEF_HELPER_0_1
-#undef DEF_HELPER_0_2
-#undef DEF_HELPER_0_3
-#undef DEF_HELPER_1_0
-#undef DEF_HELPER_1_1
-#undef DEF_HELPER_1_2
-#undef DEF_HELPER_1_3
-#undef DEF_HELPER_1_4
-#undef GEN_HELPER
-#undef DEF_HELPER
+#include "def-helper.h"
#include "tcg-op.h"
#include "qemu-log.h"
+#include "helpers.h"
#define GEN_HELPER 1
#include "helpers.h"
//#define DEBUG_DISPATCH 1
/* Fake floating point. */
-#define TCG_TYPE_F32 TCG_TYPE_I32
-#define TCG_TYPE_F64 TCG_TYPE_I64
#define tcg_gen_mov_f64 tcg_gen_mov_i64
-#define tcg_gen_qemu_ldf32 tcg_gen_qemu_ld32u
#define tcg_gen_qemu_ldf64 tcg_gen_qemu_ld64
-#define tcg_gen_qemu_stf32 tcg_gen_qemu_st32
#define tcg_gen_qemu_stf64 tcg_gen_qemu_st64
-#define gen_helper_pack_32_f32 tcg_gen_mov_i32
-#define gen_helper_pack_f32_32 tcg_gen_mov_i32
#define DEFO32(name, offset) static TCGv QREG_##name;
-#define DEFO64(name, offset) static TCGv QREG_##name;
-#define DEFF64(name, offset) static TCGv QREG_##name;
+#define DEFO64(name, offset) static TCGv_i64 QREG_##name;
+#define DEFF64(name, offset) static TCGv_i64 QREG_##name;
#include "qregs.def"
#undef DEFO32
#undef DEFO64
#undef DEFF64
-static TCGv cpu_env;
+static TCGv_ptr cpu_env;
static char cpu_reg_names[3*8*3 + 5*4];
static TCGv cpu_dregs[8];
static TCGv cpu_aregs[8];
-static TCGv cpu_fregs[8];
-static TCGv cpu_macc[4];
+static TCGv_i64 cpu_fregs[8];
+static TCGv_i64 cpu_macc[4];
#define DREG(insn, pos) cpu_dregs[((insn) >> (pos)) & 7]
#define AREG(insn, pos) cpu_aregs[((insn) >> (pos)) & 7]
#define QREG_SP cpu_aregs[7]
static TCGv NULL_QREG;
-#define IS_NULL_QREG(t) (GET_TCGV(t) == GET_TCGV(NULL_QREG))
+#define IS_NULL_QREG(t) (TCGV_EQUAL(t, NULL_QREG))
/* Used to distinguish stores from bad addressing modes. */
static TCGv store_dummy;
char *p;
int i;
-#define DEFO32(name, offset) QREG_##name = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0, offsetof(CPUState, offset), #name);
-#define DEFO64(name, offset) QREG_##name = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0, offsetof(CPUState, offset), #name);
+#define DEFO32(name, offset) QREG_##name = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, offset), #name);
+#define DEFO64(name, offset) QREG_##name = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, offset), #name);
#define DEFF64(name, offset) DEFO64(name, offset)
#include "qregs.def"
#undef DEFO32
#undef DEFO64
#undef DEFF64
- cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
+ cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
p = cpu_reg_names;
for (i = 0; i < 8; i++) {
sprintf(p, "D%d", i);
- cpu_dregs[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
+ cpu_dregs[i] = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUM68KState, dregs[i]), p);
p += 3;
sprintf(p, "A%d", i);
- cpu_aregs[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
+ cpu_aregs[i] = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUM68KState, aregs[i]), p);
p += 3;
sprintf(p, "F%d", i);
- cpu_fregs[i] = tcg_global_mem_new(TCG_TYPE_F64, TCG_AREG0,
+ cpu_fregs[i] = tcg_global_mem_new_i64(TCG_AREG0,
offsetof(CPUM68KState, fregs[i]), p);
p += 3;
}
for (i = 0; i < 4; i++) {
sprintf(p, "ACC%d", i);
- cpu_macc[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
+ cpu_macc[i] = tcg_global_mem_new_i64(TCG_AREG0,
offsetof(CPUM68KState, macc[i]), p);
p += 5;
}
- NULL_QREG = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0, -4, "NULL");
- store_dummy = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0, -8, "NULL");
+ NULL_QREG = tcg_global_mem_new(TCG_AREG0, -4, "NULL");
+ store_dummy = tcg_global_mem_new(TCG_AREG0, -8, "NULL");
-#define DEF_HELPER(name, ret, args) \
- tcg_register_helper(HELPER(name), #name);
+#define GEN_HELPER 2
#include "helpers.h"
}
struct TranslationBlock *tb;
int singlestep_enabled;
int is_mem;
- TCGv mactmp;
+ TCGv_i64 mactmp;
+ int done_mac;
} DisasContext;
#define DISAS_JUMP_NEXT 4
/* FIXME: Remove this. */
#define gen_im32(val) tcg_const_i32(val)
-#define QMODE_I32 TCG_TYPE_I32
-#define QMODE_I64 TCG_TYPE_I64
-#define QMODE_F32 TCG_TYPE_F32
-#define QMODE_F64 TCG_TYPE_F64
-static inline TCGv gen_new_qreg(int mode)
-{
- return tcg_temp_new(mode);
-}
-
/* Generate a load from the specified address. Narrow values are
sign extended to full register width. */
static inline TCGv gen_load(DisasContext * s, int opsize, TCGv addr, int sign)
TCGv tmp;
int index = IS_USER(s);
s->is_mem = 1;
+ tmp = tcg_temp_new_i32();
switch(opsize) {
case OS_BYTE:
- tmp = gen_new_qreg(QMODE_I32);
if (sign)
tcg_gen_qemu_ld8s(tmp, addr, index);
else
tcg_gen_qemu_ld8u(tmp, addr, index);
break;
case OS_WORD:
- tmp = gen_new_qreg(QMODE_I32);
if (sign)
tcg_gen_qemu_ld16s(tmp, addr, index);
else
tcg_gen_qemu_ld16u(tmp, addr, index);
break;
case OS_LONG:
- tmp = gen_new_qreg(QMODE_I32);
- tcg_gen_qemu_ld32u(tmp, addr, index);
- break;
case OS_SINGLE:
- tmp = gen_new_qreg(QMODE_F32);
- tcg_gen_qemu_ldf32(tmp, addr, index);
- break;
- case OS_DOUBLE:
- tmp = gen_new_qreg(QMODE_F64);
- tcg_gen_qemu_ldf64(tmp, addr, index);
+ tcg_gen_qemu_ld32u(tmp, addr, index);
break;
default:
qemu_assert(0, "bad load size");
return tmp;
}
+static inline TCGv_i64 gen_load64(DisasContext * s, TCGv addr)
+{
+ TCGv_i64 tmp;
+ int index = IS_USER(s);
+ s->is_mem = 1;
+ tmp = tcg_temp_new_i64();
+ tcg_gen_qemu_ldf64(tmp, addr, index);
+ gen_throws_exception = gen_last_qop;
+ return tmp;
+}
+
/* Generate a store. */
static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val)
{
tcg_gen_qemu_st16(val, addr, index);
break;
case OS_LONG:
- tcg_gen_qemu_st32(val, addr, index);
- break;
case OS_SINGLE:
- tcg_gen_qemu_stf32(val, addr, index);
- break;
- case OS_DOUBLE:
- tcg_gen_qemu_stf64(val, addr, index);
+ tcg_gen_qemu_st32(val, addr, index);
break;
default:
qemu_assert(0, "bad store size");
gen_throws_exception = gen_last_qop;
}
+static inline void gen_store64(DisasContext *s, TCGv addr, TCGv_i64 val)
+{
+ int index = IS_USER(s);
+ s->is_mem = 1;
+ tcg_gen_qemu_stf64(val, addr, index);
+ gen_throws_exception = gen_last_qop;
+}
+
typedef enum {
EA_STORE,
EA_LOADU,
} else {
bd = 0;
}
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
if ((ext & 0x44) == 0) {
/* pre-index */
add = gen_addr_index(ext, tmp);
}
} else {
/* brief extension word format */
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
add = gen_addr_index(ext, tmp);
if (!IS_NULL_QREG(base)) {
tcg_gen_add_i32(tmp, add, base);
switch (opsize) {
case OS_BYTE:
tcg_gen_andi_i32(reg, reg, 0xffffff00);
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_ext8u_i32(tmp, val);
tcg_gen_or_i32(reg, reg, tmp);
break;
case OS_WORD:
tcg_gen_andi_i32(reg, reg, 0xffff0000);
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_ext16u_i32(tmp, val);
tcg_gen_or_i32(reg, reg, tmp);
break;
case OS_LONG:
- tcg_gen_mov_i32(reg, val);
- break;
case OS_SINGLE:
- gen_helper_pack_32_f32(reg, val);
+ tcg_gen_mov_i32(reg, val);
break;
default:
qemu_assert(0, "Bad operand size");
switch (opsize) {
case OS_BYTE:
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
if (sign)
tcg_gen_ext8s_i32(tmp, val);
else
tcg_gen_ext8u_i32(tmp, val);
break;
case OS_WORD:
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
if (sign)
tcg_gen_ext16s_i32(tmp, val);
else
tcg_gen_ext16u_i32(tmp, val);
break;
case OS_LONG:
- tmp = val;
- break;
case OS_SINGLE:
- tmp = gen_new_qreg(QMODE_F32);
- gen_helper_pack_f32_32(tmp, val);
+ tmp = val;
break;
default:
qemu_assert(0, "Bad operand size");
return AREG(insn, 0);
case 4: /* Indirect predecrememnt. */
reg = AREG(insn, 0);
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
return tmp;
case 5: /* Indirect displacement. */
reg = AREG(insn, 0);
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
ext = lduw_code(s->pc);
s->pc += 2;
tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
offset = read_im32(s);
return gen_im32(offset);
case 2: /* pc displacement */
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
offset = s->pc;
offset += ldsw_code(s->pc);
s->pc += 2;
case 1: /* F */
break;
case 2: /* HI (!C && !Z) */
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C | CCF_Z);
tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
break;
case 3: /* LS (C || Z) */
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C | CCF_Z);
tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
break;
case 4: /* CC (!C) */
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C);
tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
break;
case 5: /* CS (C) */
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C);
tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
break;
case 6: /* NE (!Z) */
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_Z);
tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
break;
case 7: /* EQ (Z) */
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_Z);
tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
break;
case 8: /* VC (!V) */
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_V);
tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
break;
case 9: /* VS (V) */
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_V);
tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
break;
case 10: /* PL (!N) */
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
break;
case 11: /* MI (N) */
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
break;
case 12: /* GE (!(N ^ V)) */
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
assert(CCF_V == (CCF_N >> 2));
tcg_gen_shri_i32(tmp, QREG_CC_DEST, 2);
tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
break;
case 13: /* LT (N ^ V) */
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
assert(CCF_V == (CCF_N >> 2));
tcg_gen_shri_i32(tmp, QREG_CC_DEST, 2);
tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
break;
case 14: /* GT (!(Z || (N ^ V))) */
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
assert(CCF_V == (CCF_N >> 2));
tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
tcg_gen_shri_i32(tmp, tmp, 2);
tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
break;
case 15: /* LE (Z || (N ^ V)) */
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
assert(CCF_V == (CCF_N >> 2));
tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
tcg_gen_shri_i32(tmp, tmp, 2);
sign = (insn & 0x100) != 0;
reg = DREG(insn, 9);
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
if (sign)
tcg_gen_ext16s_i32(tmp, reg);
else
gen_helper_divu(cpu_env, tcg_const_i32(1));
}
- tmp = gen_new_qreg(QMODE_I32);
- src = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
+ src = tcg_temp_new();
tcg_gen_ext16u_i32(tmp, QREG_DIV1);
tcg_gen_shli_i32(src, QREG_DIV2, 16);
tcg_gen_or_i32(reg, tmp, src);
add = (insn & 0x4000) != 0;
reg = DREG(insn, 9);
- dest = gen_new_qreg(QMODE_I32);
+ dest = tcg_temp_new();
if (insn & 0x100) {
SRC_EA(tmp, OS_LONG, 0, &addr);
src = reg;
op = (insn >> 6) & 3;
SRC_EA(src1, opsize, 0, op ? &addr: NULL);
src2 = DREG(insn, 9);
- dest = gen_new_qreg(QMODE_I32);
+ dest = tcg_temp_new();
gen_flush_flags(s);
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
if (opsize == OS_BYTE)
tcg_gen_andi_i32(tmp, src2, 7);
else
tcg_gen_andi_i32(tmp, src2, 31);
src2 = tmp;
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_shr_i32(tmp, src1, src2);
tcg_gen_andi_i32(tmp, tmp, 1);
tcg_gen_shli_i32(tmp, tmp, 2);
{
TCGv tmp;
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_subi_i32(tmp, QREG_SP, 4);
gen_store(s, OS_LONG, tmp, val);
tcg_gen_mov_i32(QREG_SP, tmp);
gen_addr_fault(s);
return;
}
- addr = gen_new_qreg(QMODE_I32);
+ addr = tcg_temp_new();
tcg_gen_mov_i32(addr, tmp);
is_load = ((insn & 0x0400) != 0);
for (i = 0; i < 16; i++, mask >>= 1) {
bitnum &= 31;
mask = 1 << bitnum;
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
assert (CCF_Z == (1 << 2));
if (bitnum > 2)
tcg_gen_shri_i32(tmp, src1, bitnum - 2);
op = (insn >> 9) & 7;
SRC_EA(src1, OS_LONG, 0, (op == 6) ? NULL : &addr);
im = read_im32(s);
- dest = gen_new_qreg(QMODE_I32);
+ dest = tcg_temp_new();
switch (op) {
case 0: /* ori */
tcg_gen_ori_i32(dest, src1, im);
TCGv dest;
gen_flush_flags(s);
- dest = gen_new_qreg(QMODE_I32);
+ dest = tcg_temp_new();
tcg_gen_shli_i32(dest, QREG_CC_X, 4);
tcg_gen_or_i32(dest, dest, QREG_CC_DEST);
return dest;
TCGv src1;
reg = DREG(insn, 0);
- src1 = gen_new_qreg(QMODE_I32);
+ src1 = tcg_temp_new();
tcg_gen_mov_i32(src1, reg);
tcg_gen_neg_i32(reg, src1);
s->cc_op = CC_OP_SUB;
s->cc_op = CC_OP_FLAGS;
if ((insn & 0x38) == 0)
{
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
reg = DREG(insn, 0);
tcg_gen_andi_i32(QREG_CC_DEST, reg, 0xf);
tcg_gen_shri_i32(tmp, reg, 4);
TCGv src2;
TCGv reg;
- src1 = gen_new_qreg(QMODE_I32);
- src2 = gen_new_qreg(QMODE_I32);
+ src1 = tcg_temp_new();
+ src2 = tcg_temp_new();
reg = DREG(insn, 0);
tcg_gen_shli_i32(src1, reg, 16);
tcg_gen_shri_i32(src2, reg, 16);
reg = DREG(insn, 0);
op = (insn >> 6) & 7;
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
if (op == 3)
tcg_gen_ext16s_i32(tmp, reg);
else
TCGv src1;
TCGv addr;
- dest = gen_new_qreg(QMODE_I32);
+ dest = tcg_temp_new();
SRC_EA(src1, OS_BYTE, 1, &addr);
gen_logic_cc(s, src1);
tcg_gen_ori_i32(dest, src1, 0x80);
}
reg = DREG(ext, 12);
SRC_EA(src1, OS_LONG, 0, NULL);
- dest = gen_new_qreg(QMODE_I32);
+ dest = tcg_temp_new();
tcg_gen_mul_i32(dest, src1, reg);
tcg_gen_mov_i32(reg, dest);
/* Unlike m68k, coldfire always clears the overflow bit. */
offset = ldsw_code(s->pc);
s->pc += 2;
reg = AREG(insn, 0);
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_subi_i32(tmp, QREG_SP, 4);
gen_store(s, OS_LONG, tmp, reg);
if ((insn & 7) != 7)
TCGv reg;
TCGv tmp;
- src = gen_new_qreg(QMODE_I32);
+ src = tcg_temp_new();
reg = AREG(insn, 0);
tcg_gen_mov_i32(src, reg);
tmp = gen_load(s, OS_LONG, src, 0);
val = (insn >> 9) & 7;
if (val == 0)
val = 8;
- dest = gen_new_qreg(QMODE_I32);
+ dest = tcg_temp_new();
tcg_gen_mov_i32(dest, src1);
if ((insn & 0x38) == 0x08) {
/* Don't update condition codes if the destination is an
TCGv addr;
reg = DREG(insn, 9);
- dest = gen_new_qreg(QMODE_I32);
+ dest = tcg_temp_new();
if (insn & 0x100) {
SRC_EA(src, OS_LONG, 0, &addr);
tcg_gen_or_i32(dest, src, reg);
}
SRC_EA(src, opsize, 1, NULL);
reg = DREG(insn, 9);
- dest = gen_new_qreg(QMODE_I32);
+ dest = tcg_temp_new();
tcg_gen_sub_i32(dest, reg, src);
gen_update_cc_add(dest, src);
}
}
SRC_EA(src, opsize, 1, NULL);
reg = AREG(insn, 9);
- dest = gen_new_qreg(QMODE_I32);
+ dest = tcg_temp_new();
tcg_gen_sub_i32(dest, reg, src);
gen_update_cc_add(dest, src);
s->cc_op = CC_OP_SUB;
SRC_EA(src, OS_LONG, 0, &addr);
reg = DREG(insn, 9);
- dest = gen_new_qreg(QMODE_I32);
+ dest = tcg_temp_new();
tcg_gen_xor_i32(dest, src, reg);
gen_logic_cc(s, dest);
DEST_EA(insn, OS_LONG, dest, &addr);
TCGv addr;
reg = DREG(insn, 9);
- dest = gen_new_qreg(QMODE_I32);
+ dest = tcg_temp_new();
if (insn & 0x100) {
SRC_EA(src, OS_LONG, 0, &addr);
tcg_gen_and_i32(dest, src, reg);
TCGv sr;
ccr = gen_get_ccr(s);
- sr = gen_new_qreg(QMODE_I32);
+ sr = tcg_temp_new();
tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
tcg_gen_or_i32(sr, sr, ccr);
return sr;
DISAS_INSN(fpu)
{
uint16_t ext;
+ int32_t offset;
int opmode;
- TCGv src;
- TCGv dest;
- TCGv res;
+ TCGv_i64 src;
+ TCGv_i64 dest;
+ TCGv_i64 res;
+ TCGv tmp32;
int round;
+ int set_dest;
int opsize;
ext = lduw_code(s->pc);
goto undef;
case 3: /* fmove out */
src = FREG(ext, 7);
+ tmp32 = tcg_temp_new_i32();
/* fmove */
/* ??? TODO: Proper behavior on overflow. */
switch ((ext >> 10) & 7) {
case 0:
opsize = OS_LONG;
- res = gen_new_qreg(QMODE_I32);
- gen_helper_f64_to_i32(res, cpu_env, src);
+ gen_helper_f64_to_i32(tmp32, cpu_env, src);
break;
case 1:
opsize = OS_SINGLE;
- res = gen_new_qreg(QMODE_F32);
- gen_helper_f64_to_f32(res, cpu_env, src);
+ gen_helper_f64_to_f32(tmp32, cpu_env, src);
break;
case 4:
opsize = OS_WORD;
- res = gen_new_qreg(QMODE_I32);
- gen_helper_f64_to_i32(res, cpu_env, src);
- break;
- case 5:
- opsize = OS_DOUBLE;
- res = src;
+ gen_helper_f64_to_i32(tmp32, cpu_env, src);
break;
+ case 5: /* OS_DOUBLE */
+ tcg_gen_mov_i32(tmp32, AREG(insn, 0));
+ switch (insn >> 3) {
+ case 2:
+ case 3:
+ case 4:
+ tcg_gen_addi_i32(tmp32, tmp32, -8);
+ break;
+ case 5:
+ offset = ldsw_code(s->pc);
+ s->pc += 2;
+ tcg_gen_addi_i32(tmp32, tmp32, offset);
+ break;
+ default:
+ goto undef;
+ }
+ gen_store64(s, tmp32, src);
+ switch (insn >> 3) {
+ case 3:
+ tcg_gen_addi_i32(tmp32, tmp32, 8);
+ tcg_gen_mov_i32(AREG(insn, 0), tmp32);
+ break;
+ case 4:
+ tcg_gen_mov_i32(AREG(insn, 0), tmp32);
+ break;
+ }
+ tcg_temp_free_i32(tmp32);
+ return;
case 6:
opsize = OS_BYTE;
- res = gen_new_qreg(QMODE_I32);
- gen_helper_f64_to_i32(res, cpu_env, src);
+ gen_helper_f64_to_i32(tmp32, cpu_env, src);
break;
default:
goto undef;
}
- DEST_EA(insn, opsize, res, NULL);
+ DEST_EA(insn, opsize, tmp32, NULL);
+ tcg_temp_free_i32(tmp32);
return;
case 4: /* fmove to control register. */
switch ((ext >> 10) & 7) {
switch ((ext >> 10) & 7) {
case 4: /* FPCR */
/* Not implemented. Always return zero. */
- res = gen_im32(0);
+ tmp32 = gen_im32(0);
break;
case 1: /* FPIAR */
case 2: /* FPSR */
(ext >> 10) & 7);
goto undef;
}
- DEST_EA(insn, OS_LONG, res, NULL);
+ DEST_EA(insn, OS_LONG, tmp32, NULL);
break;
case 6: /* fmovem */
case 7:
int i;
if ((ext & 0x1f00) != 0x1000 || (ext & 0xff) == 0)
goto undef;
- src = gen_lea(s, insn, OS_LONG);
- if (IS_NULL_QREG(src)) {
+ tmp32 = gen_lea(s, insn, OS_LONG);
+ if (IS_NULL_QREG(tmp32)) {
gen_addr_fault(s);
return;
}
- addr = gen_new_qreg(QMODE_I32);
- tcg_gen_mov_i32(addr, src);
+ addr = tcg_temp_new_i32();
+ tcg_gen_mov_i32(addr, tmp32);
mask = 0x80;
for (i = 0; i < 8; i++) {
if (ext & mask) {
}
mask >>= 1;
}
+ tcg_temp_free_i32(tmp32);
}
return;
}
if (ext & (1 << 14)) {
- TCGv tmp;
-
/* Source effective address. */
switch ((ext >> 10) & 7) {
case 0: opsize = OS_LONG; break;
default:
goto undef;
}
- SRC_EA(tmp, opsize, 1, NULL);
if (opsize == OS_DOUBLE) {
- src = tmp;
+ tmp32 = tcg_temp_new_i32();
+ tcg_gen_mov_i32(tmp32, AREG(insn, 0));
+ switch (insn >> 3) {
+ case 2:
+ case 3:
+ case 4:
+ tcg_gen_addi_i32(tmp32, tmp32, -8);
+ break;
+ case 5:
+ offset = ldsw_code(s->pc);
+ s->pc += 2;
+ tcg_gen_addi_i32(tmp32, tmp32, offset);
+ break;
+ case 7:
+ offset = ldsw_code(s->pc);
+ offset += s->pc - 2;
+ s->pc += 2;
+ tcg_gen_addi_i32(tmp32, tmp32, offset);
+ break;
+ default:
+ goto undef;
+ }
+ src = gen_load64(s, tmp32);
+ switch (insn >> 3) {
+ case 3:
+ tcg_gen_addi_i32(tmp32, tmp32, 8);
+ tcg_gen_mov_i32(AREG(insn, 0), tmp32);
+ break;
+ case 4:
+ tcg_gen_mov_i32(AREG(insn, 0), tmp32);
+ break;
+ }
+ tcg_temp_free_i32(tmp32);
} else {
- src = gen_new_qreg(QMODE_F64);
+ SRC_EA(tmp32, opsize, 1, NULL);
+ src = tcg_temp_new_i64();
switch (opsize) {
case OS_LONG:
case OS_WORD:
case OS_BYTE:
- gen_helper_i32_to_f64(src, cpu_env, tmp);
+ gen_helper_i32_to_f64(src, cpu_env, tmp32);
break;
case OS_SINGLE:
- gen_helper_f32_to_f64(src, cpu_env, tmp);
+ gen_helper_f32_to_f64(src, cpu_env, tmp32);
break;
}
}
src = FREG(ext, 10);
}
dest = FREG(ext, 7);
- res = gen_new_qreg(QMODE_F64);
+ res = tcg_temp_new_i64();
if (opmode != 0x3a)
tcg_gen_mov_f64(res, dest);
round = 1;
+ set_dest = 1;
switch (opmode) {
case 0: case 0x40: case 0x44: /* fmove */
tcg_gen_mov_f64(res, src);
break;
case 0x38: /* fcmp */
gen_helper_sub_cmp_f64(res, cpu_env, res, src);
- dest = NULL_QREG;
+ set_dest = 0;
round = 0;
break;
case 0x3a: /* ftst */
tcg_gen_mov_f64(res, src);
- dest = NULL_QREG;
+ set_dest = 0;
round = 0;
break;
default:
goto undef;
}
+ if (ext & (1 << 14)) {
+ tcg_temp_free_i64(src);
+ }
if (round) {
if (opmode & 0x40) {
if ((opmode & 0x4) != 0)
}
}
if (round) {
- TCGv tmp;
-
- tmp = gen_new_qreg(QMODE_F32);
+ TCGv tmp = tcg_temp_new_i32();
gen_helper_f64_to_f32(tmp, cpu_env, res);
gen_helper_f32_to_f64(res, cpu_env, tmp);
+ tcg_temp_free_i32(tmp);
}
tcg_gen_mov_f64(QREG_FP_RESULT, res);
- if (!IS_NULL_QREG(dest)) {
+ if (set_dest) {
tcg_gen_mov_f64(dest, res);
}
+ tcg_temp_free_i64(res);
return;
undef:
+ /* FIXME: Is this right for offset addressing modes? */
s->pc -= 2;
disas_undef_fpu(s, insn);
}
l1 = gen_new_label();
/* TODO: Raise BSUN exception. */
- flag = gen_new_qreg(QMODE_I32);
+ flag = tcg_temp_new();
gen_helper_compare_f64(flag, cpu_env, QREG_FP_RESULT);
/* Jump to l1 if condition is true. */
switch (insn & 0xf) {
static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
{
- TCGv tmp = gen_new_qreg(QMODE_I32);
+ TCGv tmp = tcg_temp_new();
if (s->env->macsr & MACSR_FI) {
if (upper)
tcg_gen_andi_i32(tmp, val, 0xffff0000);
int dual;
TCGv saved_flags;
- if (IS_NULL_QREG(s->mactmp))
- s->mactmp = tcg_temp_new(TCG_TYPE_I64);
+ if (!s->done_mac) {
+ s->mactmp = tcg_temp_new_i64();
+ s->done_mac = 1;
+ }
ext = lduw_code(s->pc);
s->pc += 2;
if (insn & 0x30) {
/* MAC with load. */
tmp = gen_lea(s, insn, OS_LONG);
- addr = gen_new_qreg(QMODE_I32);
+ addr = tcg_temp_new();
tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
/* Load the value now to ensure correct exception behavior.
Perform writeback after reading the MAC inputs. */
if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
/* Skip the multiply if we know we will ignore it. */
l1 = gen_new_label();
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
gen_op_jmp_nz32(tmp, l1);
}
if (dual) {
/* Save the overflow flag from the multiply. */
- saved_flags = gen_new_qreg(QMODE_I32);
+ saved_flags = tcg_temp_new();
tcg_gen_mov_i32(saved_flags, QREG_MACSR);
} else {
saved_flags = NULL_QREG;
if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
/* Skip the accumulate if the value is already saturated. */
l1 = gen_new_label();
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
gen_op_and32(tmp, QREG_MACSR, gen_im32(MACSR_PAV0 << acc));
gen_op_jmp_nz32(tmp, l1);
}
if ((s->env->macsr & MACSR_OMC) != 0) {
/* Skip the accumulate if the value is already saturated. */
l1 = gen_new_label();
- tmp = gen_new_qreg(QMODE_I32);
+ tmp = tcg_temp_new();
gen_op_and32(tmp, QREG_MACSR, gen_im32(MACSR_PAV0 << acc));
gen_op_jmp_nz32(tmp, l1);
}
DISAS_INSN(from_mac)
{
TCGv rx;
- TCGv acc;
+ TCGv_i64 acc;
int accnum;
rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
accnum = (insn >> 9) & 3;
acc = MACREG(accnum);
if (s->env->macsr & MACSR_FI) {
- gen_helper_get_macf(cpu_env, rx, acc);
+ gen_helper_get_macf(rx, cpu_env, acc);
} else if ((s->env->macsr & MACSR_OMC) == 0) {
tcg_gen_trunc_i64_i32(rx, acc);
} else if (s->env->macsr & MACSR_SU) {
DISAS_INSN(to_mac)
{
- TCGv acc;
+ TCGv_i64 acc;
TCGv val;
int accnum;
accnum = (insn >> 9) & 3;
dc->fpcr = env->fpcr;
dc->user = (env->sr & SR_S) == 0;
dc->is_mem = 0;
- dc->mactmp = NULL_QREG;
+ dc->done_mac = 0;
lj = -1;
num_insns = 0;
max_insns = tb->cflags & CF_COUNT_MASK;
-#ifndef DEF_HELPER
-#define DEF_HELPER(ret, name, params) ret name params;
-#endif
+#include "def-helper.h"
+
+/* FIXME: We should rename the helper functions and remove this hack. */
+#undef HELPER
+#define HELPER(name) do_##name
-DEF_HELPER(void, do_raise_exception_err, (int excp, int err))
-DEF_HELPER(void, do_raise_exception, (int excp))
-DEF_HELPER(void, do_interrupt_restart, (void))
+
+DEF_HELPER_2(raise_exception_err, void, i32, int)
+DEF_HELPER_1(raise_exception, void, i32)
+DEF_HELPER_0(interrupt_restart, void)
#ifdef TARGET_MIPS64
-DEF_HELPER(target_ulong, do_ldl, (target_ulong t0, target_ulong t1, int mem_idx))
-DEF_HELPER(target_ulong, do_ldr, (target_ulong t0, target_ulong t1, int mem_idx))
-DEF_HELPER(void, do_sdl, (target_ulong t0, target_ulong t1, int mem_idx))
-DEF_HELPER(void, do_sdr, (target_ulong t0, target_ulong t1, int mem_idx))
+DEF_HELPER_3(ldl, tl, tl, tl, int)
+DEF_HELPER_3(ldr, tl, tl, tl, int)
+DEF_HELPER_3(sdl, void, tl, tl, int)
+DEF_HELPER_3(sdr, void, tl, tl, int)
#endif
-DEF_HELPER(target_ulong, do_lwl, (target_ulong t0, target_ulong t1, int mem_idx))
-DEF_HELPER(target_ulong, do_lwr, (target_ulong t0, target_ulong t1, int mem_idx))
-DEF_HELPER(void, do_swl, (target_ulong t0, target_ulong t1, int mem_idx))
-DEF_HELPER(void, do_swr, (target_ulong t0, target_ulong t1, int mem_idx))
+DEF_HELPER_3(lwl, tl, tl, tl, int)
+DEF_HELPER_3(lwr, tl, tl, tl, int)
+DEF_HELPER_3(swl, void, tl, tl, int)
+DEF_HELPER_3(swr, void, tl, tl, int)
-DEF_HELPER(target_ulong, do_clo, (target_ulong t0))
-DEF_HELPER(target_ulong, do_clz, (target_ulong t0))
+DEF_HELPER_1(clo, tl, tl)
+DEF_HELPER_1(clz, tl, tl)
#ifdef TARGET_MIPS64
-DEF_HELPER(target_ulong, do_dclo, (target_ulong t0))
-DEF_HELPER(target_ulong, do_dclz, (target_ulong t0))
-DEF_HELPER(void, do_dmult, (target_ulong t0, target_ulong t1))
-DEF_HELPER(void, do_dmultu, (target_ulong t0, target_ulong t1))
+DEF_HELPER_1(dclo, tl, tl)
+DEF_HELPER_1(dclz, tl, tl)
+DEF_HELPER_2(dmult, void, tl, tl)
+DEF_HELPER_2(dmultu, void, tl, tl)
#endif
-DEF_HELPER(target_ulong, do_muls, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, do_mulsu, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, do_macc, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, do_maccu, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, do_msac, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, do_msacu, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, do_mulhi, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, do_mulhiu, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, do_mulshi, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, do_mulshiu, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, do_macchi, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, do_macchiu, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, do_msachi, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, do_msachiu, (target_ulong t0, target_ulong t1))
+DEF_HELPER_2(muls, tl, tl, tl)
+DEF_HELPER_2(mulsu, tl, tl, tl)
+DEF_HELPER_2(macc, tl, tl, tl)
+DEF_HELPER_2(maccu, tl, tl, tl)
+DEF_HELPER_2(msac, tl, tl, tl)
+DEF_HELPER_2(msacu, tl, tl, tl)
+DEF_HELPER_2(mulhi, tl, tl, tl)
+DEF_HELPER_2(mulhiu, tl, tl, tl)
+DEF_HELPER_2(mulshi, tl, tl, tl)
+DEF_HELPER_2(mulshiu, tl, tl, tl)
+DEF_HELPER_2(macchi, tl, tl, tl)
+DEF_HELPER_2(macchiu, tl, tl, tl)
+DEF_HELPER_2(msachi, tl, tl, tl)
+DEF_HELPER_2(msachiu, tl, tl, tl)
#ifndef CONFIG_USER_ONLY
/* CP0 helpers */
-DEF_HELPER(target_ulong, do_mfc0_mvpcontrol, (void))
-DEF_HELPER(target_ulong, do_mfc0_mvpconf0, (void))
-DEF_HELPER(target_ulong, do_mfc0_mvpconf1, (void))
-DEF_HELPER(target_ulong, do_mfc0_random, (void))
-DEF_HELPER(target_ulong, do_mfc0_tcstatus, (void))
-DEF_HELPER(target_ulong, do_mftc0_tcstatus, (void))
-DEF_HELPER(target_ulong, do_mfc0_tcbind, (void))
-DEF_HELPER(target_ulong, do_mftc0_tcbind, (void))
-DEF_HELPER(target_ulong, do_mfc0_tcrestart, (void))
-DEF_HELPER(target_ulong, do_mftc0_tcrestart, (void))
-DEF_HELPER(target_ulong, do_mfc0_tchalt, (void))
-DEF_HELPER(target_ulong, do_mftc0_tchalt, (void))
-DEF_HELPER(target_ulong, do_mfc0_tccontext, (void))
-DEF_HELPER(target_ulong, do_mftc0_tccontext, (void))
-DEF_HELPER(target_ulong, do_mfc0_tcschedule, (void))
-DEF_HELPER(target_ulong, do_mftc0_tcschedule, (void))
-DEF_HELPER(target_ulong, do_mfc0_tcschefback, (void))
-DEF_HELPER(target_ulong, do_mftc0_tcschefback, (void))
-DEF_HELPER(target_ulong, do_mfc0_count, (void))
-DEF_HELPER(target_ulong, do_mftc0_entryhi, (void))
-DEF_HELPER(target_ulong, do_mftc0_status, (void))
-DEF_HELPER(target_ulong, do_mfc0_lladdr, (void))
-DEF_HELPER(target_ulong, do_mfc0_watchlo, (uint32_t sel))
-DEF_HELPER(target_ulong, do_mfc0_watchhi, (uint32_t sel))
-DEF_HELPER(target_ulong, do_mfc0_debug, (void))
-DEF_HELPER(target_ulong, do_mftc0_debug, (void))
+DEF_HELPER_0(mfc0_mvpcontrol, tl)
+DEF_HELPER_0(mfc0_mvpconf0, tl)
+DEF_HELPER_0(mfc0_mvpconf1, tl)
+DEF_HELPER_0(mfc0_random, tl)
+DEF_HELPER_0(mfc0_tcstatus, tl)
+DEF_HELPER_0(mftc0_tcstatus, tl)
+DEF_HELPER_0(mfc0_tcbind, tl)
+DEF_HELPER_0(mftc0_tcbind, tl)
+DEF_HELPER_0(mfc0_tcrestart, tl)
+DEF_HELPER_0(mftc0_tcrestart, tl)
+DEF_HELPER_0(mfc0_tchalt, tl)
+DEF_HELPER_0(mftc0_tchalt, tl)
+DEF_HELPER_0(mfc0_tccontext, tl)
+DEF_HELPER_0(mftc0_tccontext, tl)
+DEF_HELPER_0(mfc0_tcschedule, tl)
+DEF_HELPER_0(mftc0_tcschedule, tl)
+DEF_HELPER_0(mfc0_tcschefback, tl)
+DEF_HELPER_0(mftc0_tcschefback, tl)
+DEF_HELPER_0(mfc0_count, tl)
+DEF_HELPER_0(mftc0_entryhi, tl)
+DEF_HELPER_0(mftc0_status, tl)
+DEF_HELPER_0(mfc0_lladdr, tl)
+DEF_HELPER_1(mfc0_watchlo, tl, i32)
+DEF_HELPER_1(mfc0_watchhi, tl, i32)
+DEF_HELPER_0(mfc0_debug, tl)
+DEF_HELPER_0(mftc0_debug, tl)
#ifdef TARGET_MIPS64
-DEF_HELPER(target_ulong, do_dmfc0_tcrestart, (void))
-DEF_HELPER(target_ulong, do_dmfc0_tchalt, (void))
-DEF_HELPER(target_ulong, do_dmfc0_tccontext, (void))
-DEF_HELPER(target_ulong, do_dmfc0_tcschedule, (void))
-DEF_HELPER(target_ulong, do_dmfc0_tcschefback, (void))
-DEF_HELPER(target_ulong, do_dmfc0_lladdr, (void))
-DEF_HELPER(target_ulong, do_dmfc0_watchlo, (uint32_t sel))
+DEF_HELPER_0(dmfc0_tcrestart, tl)
+DEF_HELPER_0(dmfc0_tchalt, tl)
+DEF_HELPER_0(dmfc0_tccontext, tl)
+DEF_HELPER_0(dmfc0_tcschedule, tl)
+DEF_HELPER_0(dmfc0_tcschefback, tl)
+DEF_HELPER_0(dmfc0_lladdr, tl)
+DEF_HELPER_1(dmfc0_watchlo, tl, i32)
#endif /* TARGET_MIPS64 */
-DEF_HELPER(void, do_mtc0_index, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_mvpcontrol, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_vpecontrol, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_vpeconf0, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_vpeconf1, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_yqmask, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_vpeopt, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_entrylo0, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_tcstatus, (target_ulong t0))
-DEF_HELPER(void, do_mttc0_tcstatus, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_tcbind, (target_ulong t0))
-DEF_HELPER(void, do_mttc0_tcbind, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_tcrestart, (target_ulong t0))
-DEF_HELPER(void, do_mttc0_tcrestart, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_tchalt, (target_ulong t0))
-DEF_HELPER(void, do_mttc0_tchalt, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_tccontext, (target_ulong t0))
-DEF_HELPER(void, do_mttc0_tccontext, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_tcschedule, (target_ulong t0))
-DEF_HELPER(void, do_mttc0_tcschedule, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_tcschefback, (target_ulong t0))
-DEF_HELPER(void, do_mttc0_tcschefback, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_entrylo1, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_context, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_pagemask, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_pagegrain, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_wired, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_srsconf0, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_srsconf1, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_srsconf2, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_srsconf3, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_srsconf4, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_hwrena, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_count, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_entryhi, (target_ulong t0))
-DEF_HELPER(void, do_mttc0_entryhi, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_compare, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_status, (target_ulong t0))
-DEF_HELPER(void, do_mttc0_status, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_intctl, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_srsctl, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_cause, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_ebase, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_config0, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_config2, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_watchlo, (target_ulong t0, uint32_t sel))
-DEF_HELPER(void, do_mtc0_watchhi, (target_ulong t0, uint32_t sel))
-DEF_HELPER(void, do_mtc0_xcontext, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_framemask, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_debug, (target_ulong t0))
-DEF_HELPER(void, do_mttc0_debug, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_performance0, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_taglo, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_datalo, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_taghi, (target_ulong t0))
-DEF_HELPER(void, do_mtc0_datahi, (target_ulong t0))
+DEF_HELPER_1(mtc0_index, void, tl)
+DEF_HELPER_1(mtc0_mvpcontrol, void, tl)
+DEF_HELPER_1(mtc0_vpecontrol, void, tl)
+DEF_HELPER_1(mtc0_vpeconf0, void, tl)
+DEF_HELPER_1(mtc0_vpeconf1, void, tl)
+DEF_HELPER_1(mtc0_yqmask, void, tl)
+DEF_HELPER_1(mtc0_vpeopt, void, tl)
+DEF_HELPER_1(mtc0_entrylo0, void, tl)
+DEF_HELPER_1(mtc0_tcstatus, void, tl)
+DEF_HELPER_1(mttc0_tcstatus, void, tl)
+DEF_HELPER_1(mtc0_tcbind, void, tl)
+DEF_HELPER_1(mttc0_tcbind, void, tl)
+DEF_HELPER_1(mtc0_tcrestart, void, tl)
+DEF_HELPER_1(mttc0_tcrestart, void, tl)
+DEF_HELPER_1(mtc0_tchalt, void, tl)
+DEF_HELPER_1(mttc0_tchalt, void, tl)
+DEF_HELPER_1(mtc0_tccontext, void, tl)
+DEF_HELPER_1(mttc0_tccontext, void, tl)
+DEF_HELPER_1(mtc0_tcschedule, void, tl)
+DEF_HELPER_1(mttc0_tcschedule, void, tl)
+DEF_HELPER_1(mtc0_tcschefback, void, tl)
+DEF_HELPER_1(mttc0_tcschefback, void, tl)
+DEF_HELPER_1(mtc0_entrylo1, void, tl)
+DEF_HELPER_1(mtc0_context, void, tl)
+DEF_HELPER_1(mtc0_pagemask, void, tl)
+DEF_HELPER_1(mtc0_pagegrain, void, tl)
+DEF_HELPER_1(mtc0_wired, void, tl)
+DEF_HELPER_1(mtc0_srsconf0, void, tl)
+DEF_HELPER_1(mtc0_srsconf1, void, tl)
+DEF_HELPER_1(mtc0_srsconf2, void, tl)
+DEF_HELPER_1(mtc0_srsconf3, void, tl)
+DEF_HELPER_1(mtc0_srsconf4, void, tl)
+DEF_HELPER_1(mtc0_hwrena, void, tl)
+DEF_HELPER_1(mtc0_count, void, tl)
+DEF_HELPER_1(mtc0_entryhi, void, tl)
+DEF_HELPER_1(mttc0_entryhi, void, tl)
+DEF_HELPER_1(mtc0_compare, void, tl)
+DEF_HELPER_1(mtc0_status, void, tl)
+DEF_HELPER_1(mttc0_status, void, tl)
+DEF_HELPER_1(mtc0_intctl, void, tl)
+DEF_HELPER_1(mtc0_srsctl, void, tl)
+DEF_HELPER_1(mtc0_cause, void, tl)
+DEF_HELPER_1(mtc0_ebase, void, tl)
+DEF_HELPER_1(mtc0_config0, void, tl)
+DEF_HELPER_1(mtc0_config2, void, tl)
+DEF_HELPER_2(mtc0_watchlo, void, tl, i32)
+DEF_HELPER_2(mtc0_watchhi, void, tl, i32)
+DEF_HELPER_1(mtc0_xcontext, void, tl)
+DEF_HELPER_1(mtc0_framemask, void, tl)
+DEF_HELPER_1(mtc0_debug, void, tl)
+DEF_HELPER_1(mttc0_debug, void, tl)
+DEF_HELPER_1(mtc0_performance0, void, tl)
+DEF_HELPER_1(mtc0_taglo, void, tl)
+DEF_HELPER_1(mtc0_datalo, void, tl)
+DEF_HELPER_1(mtc0_taghi, void, tl)
+DEF_HELPER_1(mtc0_datahi, void, tl)
/* MIPS MT functions */
-DEF_HELPER(target_ulong, do_mftgpr, (uint32_t sel))
-DEF_HELPER(target_ulong, do_mftlo, (uint32_t sel))
-DEF_HELPER(target_ulong, do_mfthi, (uint32_t sel))
-DEF_HELPER(target_ulong, do_mftacx, (uint32_t sel))
-DEF_HELPER(target_ulong, do_mftdsp, (void))
-DEF_HELPER(void, do_mttgpr, (target_ulong t0, uint32_t sel))
-DEF_HELPER(void, do_mttlo, (target_ulong t0, uint32_t sel))
-DEF_HELPER(void, do_mtthi, (target_ulong t0, uint32_t sel))
-DEF_HELPER(void, do_mttacx, (target_ulong t0, uint32_t sel))
-DEF_HELPER(void, do_mttdsp, (target_ulong t0))
-DEF_HELPER(target_ulong, do_dmt, (target_ulong t0))
-DEF_HELPER(target_ulong, do_emt, (target_ulong t0))
-DEF_HELPER(target_ulong, do_dvpe, (target_ulong t0))
-DEF_HELPER(target_ulong, do_evpe, (target_ulong t0))
+DEF_HELPER_1(mftgpr, tl, i32);
+DEF_HELPER_1(mftlo, tl, i32)
+DEF_HELPER_1(mfthi, tl, i32)
+DEF_HELPER_1(mftacx, tl, i32)
+DEF_HELPER_0(mftdsp, tl)
+DEF_HELPER_2(mttgpr, void, tl, i32)
+DEF_HELPER_2(mttlo, void, tl, i32)
+DEF_HELPER_2(mtthi, void, tl, i32)
+DEF_HELPER_2(mttacx, void, tl, i32)
+DEF_HELPER_1(mttdsp, void, tl)
+DEF_HELPER_1(dmt, tl, tl)
+DEF_HELPER_1(emt, tl, tl)
+DEF_HELPER_1(dvpe, tl, tl)
+DEF_HELPER_1(evpe, tl, tl)
#endif /* !CONFIG_USER_ONLY */
-DEF_HELPER(void, do_fork, (target_ulong t0, target_ulong t1))
-DEF_HELPER(target_ulong, do_yield, (target_ulong t0))
+DEF_HELPER_2(fork, void, tl, tl)
+DEF_HELPER_1(yield, tl, tl)
/* CP1 functions */
-DEF_HELPER(target_ulong, do_cfc1, (uint32_t reg))
-DEF_HELPER(void, do_ctc1, (target_ulong t0, uint32_t reg))
+DEF_HELPER_1(cfc1, tl, i32)
+DEF_HELPER_2(ctc1, void, tl, i32)
-DEF_HELPER(uint64_t, do_float_cvtd_s, (uint32_t fst0))
-DEF_HELPER(uint64_t, do_float_cvtd_w, (uint32_t wt0))
-DEF_HELPER(uint64_t, do_float_cvtd_l, (uint64_t dt0))
-DEF_HELPER(uint64_t, do_float_cvtl_d, (uint64_t fd0))
-DEF_HELPER(uint64_t, do_float_cvtl_s, (uint32_t fst0))
-DEF_HELPER(uint64_t, do_float_cvtps_pw, (uint64_t dt0))
-DEF_HELPER(uint64_t, do_float_cvtpw_ps, (uint64_t fdt0))
-DEF_HELPER(uint32_t, do_float_cvts_d, (uint64_t fd0))
-DEF_HELPER(uint32_t, do_float_cvts_w, (uint32_t wt0))
-DEF_HELPER(uint32_t, do_float_cvts_l, (uint64_t dt0))
-DEF_HELPER(uint32_t, do_float_cvts_pl, (uint32_t wt0))
-DEF_HELPER(uint32_t, do_float_cvts_pu, (uint32_t wth0))
-DEF_HELPER(uint32_t, do_float_cvtw_s, (uint32_t fst0))
-DEF_HELPER(uint32_t, do_float_cvtw_d, (uint64_t fd0))
+DEF_HELPER_1(float_cvtd_s, i64, i32)
+DEF_HELPER_1(float_cvtd_w, i64, i32)
+DEF_HELPER_1(float_cvtd_l, i64, i64)
+DEF_HELPER_1(float_cvtl_d, i64, i64)
+DEF_HELPER_1(float_cvtl_s, i64, i32)
+DEF_HELPER_1(float_cvtps_pw, i64, i64)
+DEF_HELPER_1(float_cvtpw_ps, i64, i64)
+DEF_HELPER_1(float_cvts_d, i32, i64)
+DEF_HELPER_1(float_cvts_w, i32, i32)
+DEF_HELPER_1(float_cvts_l, i32, i64)
+DEF_HELPER_1(float_cvts_pl, i32, i32)
+DEF_HELPER_1(float_cvts_pu, i32, i32)
+DEF_HELPER_1(float_cvtw_s, i32, i32)
+DEF_HELPER_1(float_cvtw_d, i32, i64)
-DEF_HELPER(uint64_t, do_float_addr_ps, (uint64_t fdt0, uint64_t fdt1))
-DEF_HELPER(uint64_t, do_float_mulr_ps, (uint64_t fdt0, uint64_t fdt1))
+DEF_HELPER_2(float_addr_ps, i64, i64, i64)
+DEF_HELPER_2(float_mulr_ps, i64, i64, i64)
-#define FOP_PROTO(op) \
-DEF_HELPER(uint64_t, do_float_ ## op ## l_s, (uint32_t fst0)) \
-DEF_HELPER(uint64_t, do_float_ ## op ## l_d, (uint64_t fdt0)) \
-DEF_HELPER(uint32_t, do_float_ ## op ## w_s, (uint32_t fst0)) \
-DEF_HELPER(uint32_t, do_float_ ## op ## w_d, (uint64_t fdt0))
+#define FOP_PROTO(op) \
+DEF_HELPER_1(float_ ## op ## l_s, i64, i32) \
+DEF_HELPER_1(float_ ## op ## l_d, i64, i64) \
+DEF_HELPER_1(float_ ## op ## w_s, i32, i32) \
+DEF_HELPER_1(float_ ## op ## w_d, i32, i64)
FOP_PROTO(round)
FOP_PROTO(trunc)
FOP_PROTO(ceil)
FOP_PROTO(floor)
#undef FOP_PROTO
-#define FOP_PROTO(op) \
-DEF_HELPER(uint32_t, do_float_ ## op ## _s, (uint32_t fst0)) \
-DEF_HELPER(uint64_t, do_float_ ## op ## _d, (uint64_t fdt0))
+#define FOP_PROTO(op) \
+DEF_HELPER_1(float_ ## op ## _s, i32, i32) \
+DEF_HELPER_1(float_ ## op ## _d, i64, i64)
FOP_PROTO(sqrt)
FOP_PROTO(rsqrt)
FOP_PROTO(recip)
#undef FOP_PROTO
-#define FOP_PROTO(op) \
-DEF_HELPER(uint32_t, do_float_ ## op ## _s, (uint32_t fst0)) \
-DEF_HELPER(uint64_t, do_float_ ## op ## _d, (uint64_t fdt0)) \
-DEF_HELPER(uint64_t, do_float_ ## op ## _ps, (uint64_t fdt0))
+#define FOP_PROTO(op) \
+DEF_HELPER_1(float_ ## op ## _s, i32, i32) \
+DEF_HELPER_1(float_ ## op ## _d, i64, i64) \
+DEF_HELPER_1(float_ ## op ## _ps, i64, i64)
FOP_PROTO(abs)
FOP_PROTO(chs)
FOP_PROTO(recip1)
FOP_PROTO(rsqrt1)
#undef FOP_PROTO
-#define FOP_PROTO(op) \
-DEF_HELPER(uint32_t, do_float_ ## op ## _s, (uint32_t fst0, uint32_t fst2)) \
-DEF_HELPER(uint64_t, do_float_ ## op ## _d, (uint64_t fdt0, uint64_t fdt2)) \
-DEF_HELPER(uint64_t, do_float_ ## op ## _ps, (uint64_t fdt0, uint64_t fdt2))
+#define FOP_PROTO(op) \
+DEF_HELPER_2(float_ ## op ## _s, i32, i32, i32) \
+DEF_HELPER_2(float_ ## op ## _d, i64, i64, i64) \
+DEF_HELPER_2(float_ ## op ## _ps, i64, i64, i64)
FOP_PROTO(add)
FOP_PROTO(sub)
FOP_PROTO(mul)
FOP_PROTO(rsqrt2)
#undef FOP_PROTO
-#define FOP_PROTO(op) \
-DEF_HELPER(uint32_t, do_float_ ## op ## _s, (uint32_t fst0, uint32_t fst1, \
- uint32_t fst2)) \
-DEF_HELPER(uint64_t, do_float_ ## op ## _d, (uint64_t fdt0, uint64_t fdt1, \
- uint64_t fdt2)) \
-DEF_HELPER(uint64_t, do_float_ ## op ## _ps, (uint64_t fdt0, uint64_t fdt1, \
- uint64_t fdt2))
+#define FOP_PROTO(op) \
+DEF_HELPER_3(float_ ## op ## _s, i32, i32, i32, i32) \
+DEF_HELPER_3(float_ ## op ## _d, i64, i64, i64, i64) \
+DEF_HELPER_3(float_ ## op ## _ps, i64, i64, i64, i64)
FOP_PROTO(muladd)
FOP_PROTO(mulsub)
FOP_PROTO(nmuladd)
FOP_PROTO(nmulsub)
#undef FOP_PROTO
-#define FOP_PROTO(op) \
-DEF_HELPER(void, do_cmp_d_ ## op, (uint64_t fdt0, uint64_t fdt1, int cc)) \
-DEF_HELPER(void, do_cmpabs_d_ ## op, (uint64_t fdt0, uint64_t fdt1, int cc)) \
-DEF_HELPER(void, do_cmp_s_ ## op, (uint32_t fst0, uint32_t fst1, int cc)) \
-DEF_HELPER(void, do_cmpabs_s_ ## op, (uint32_t fst0, uint32_t fst1, int cc)) \
-DEF_HELPER(void, do_cmp_ps_ ## op, (uint64_t fdt0, uint64_t fdt1, int cc)) \
-DEF_HELPER(void, do_cmpabs_ps_ ## op, (uint64_t fdt0, uint64_t fdt1, int cc))
+#define FOP_PROTO(op) \
+DEF_HELPER_3(cmp_d_ ## op, void, i64, i64, int) \
+DEF_HELPER_3(cmpabs_d_ ## op, void, i64, i64, int) \
+DEF_HELPER_3(cmp_s_ ## op, void, i32, i32, int) \
+DEF_HELPER_3(cmpabs_s_ ## op, void, i32, i32, int) \
+DEF_HELPER_3(cmp_ps_ ## op, void, i64, i64, int) \
+DEF_HELPER_3(cmpabs_ps_ ## op, void, i64, i64, int)
FOP_PROTO(f)
FOP_PROTO(un)
FOP_PROTO(eq)
/* Special functions */
#ifndef CONFIG_USER_ONLY
-DEF_HELPER(target_ulong, do_di, (void))
-DEF_HELPER(target_ulong, do_ei, (void))
-DEF_HELPER(void, do_eret, (void))
-DEF_HELPER(void, do_deret, (void))
+DEF_HELPER_0(tlbwi, void)
+DEF_HELPER_0(tlbwr, void)
+DEF_HELPER_0(tlbp, void)
+DEF_HELPER_0(tlbr, void)
+DEF_HELPER_0(di, tl)
+DEF_HELPER_0(ei, tl)
+DEF_HELPER_0(eret, void)
+DEF_HELPER_0(deret, void)
#endif /* !CONFIG_USER_ONLY */
-DEF_HELPER(target_ulong, do_rdhwr_cpunum, (void))
-DEF_HELPER(target_ulong, do_rdhwr_synci_step, (void))
-DEF_HELPER(target_ulong, do_rdhwr_cc, (void))
-DEF_HELPER(target_ulong, do_rdhwr_ccres, (void))
-DEF_HELPER(void, do_pmon, (int function))
-DEF_HELPER(void, do_wait, (void))
+DEF_HELPER_0(rdhwr_cpunum, tl)
+DEF_HELPER_0(rdhwr_synci_step, tl)
+DEF_HELPER_0(rdhwr_cc, tl)
+DEF_HELPER_0(rdhwr_ccres, tl)
+DEF_HELPER_1(pmon, void, int)
+DEF_HELPER_0(wait, void)
+
+#include "def-helper.h"
#include "host-utils.h"
+#include "helper.h"
/*****************************************************************************/
/* Exceptions processing helpers */
(tlb->C1 << 3) | (tlb->PFN[1] >> 6);
}
+void do_tlbwi(void)
+{
+ env->tlb->do_tlbwi();
+}
+
+void do_tlbwr(void)
+{
+ env->tlb->do_tlbwr();
+}
+
+void do_tlbp(void)
+{
+ env->tlb->do_tlbp();
+}
+
+void do_tlbr(void)
+{
+ env->tlb->do_tlbr();
+}
+
/* Specials */
target_ulong do_di (void)
{
#include "cpu.h"
#include "exec-all.h"
#include "disas.h"
-#include "helper.h"
#include "tcg-op.h"
#include "qemu-common.h"
+#include "helper.h"
+#define GEN_HELPER 1
+#include "helper.h"
+
//#define MIPS_DEBUG_DISAS
//#define MIPS_DEBUG_SIGN_EXTENSIONS
//#define MIPS_SINGLE_STEP
};
/* global register indices */
-static TCGv cpu_env, cpu_gpr[32], cpu_PC;
+static TCGv_ptr cpu_env;
+static TCGv cpu_gpr[32], cpu_PC;
static TCGv cpu_HI[MIPS_DSP_ACC], cpu_LO[MIPS_DSP_ACC], cpu_ACX[MIPS_DSP_ACC];
-static TCGv cpu_dspctrl, bcond, btarget;
-static TCGv fpu_fpr32[32], fpu_fpr32h[32], fpu_fpr64[32], fpu_fcr0, fpu_fcr31;
+static TCGv cpu_dspctrl, btarget;
+static TCGv_i32 bcond;
+static TCGv_i32 fpu_fpr32[32], fpu_fpr32h[32];
+static TCGv_i64 fpu_fpr64[32];
+static TCGv_i32 fpu_fcr0, fpu_fcr31;
#include "gen-icount.h"
-static inline void tcg_gen_helper_0_i(void *func, uint32_t arg)
-
-{
- TCGv tmp = tcg_const_i32(arg);
-
- tcg_gen_helper_0_1(func, tmp);
- tcg_temp_free(tmp);
-}
-
-static inline void tcg_gen_helper_0_ii(void *func, uint32_t arg1, uint32_t arg2)
-{
- TCGv tmp1 = tcg_const_i32(arg1);
- TCGv tmp2 = tcg_const_i32(arg2);
-
- tcg_gen_helper_0_2(func, tmp1, tmp2);
- tcg_temp_free(tmp1);
- tcg_temp_free(tmp2);
-}
-
-static inline void tcg_gen_helper_0_1i(void *func, TCGv arg1, uint32_t arg2)
-{
- TCGv tmp = tcg_const_i32(arg2);
-
- tcg_gen_helper_0_2(func, arg1, tmp);
- tcg_temp_free(tmp);
-}
-
-static inline void tcg_gen_helper_0_2i(void *func, TCGv arg1, TCGv arg2, uint32_t arg3)
-{
- TCGv tmp = tcg_const_i32(arg3);
-
- tcg_gen_helper_0_3(func, arg1, arg2, tmp);
- tcg_temp_free(tmp);
-}
-
-static inline void tcg_gen_helper_0_1ii(void *func, TCGv arg1, uint32_t arg2, uint32_t arg3)
-{
- TCGv tmp1 = tcg_const_i32(arg2);
- TCGv tmp2 = tcg_const_i32(arg3);
-
- tcg_gen_helper_0_3(func, arg1, tmp1, tmp2);
- tcg_temp_free(tmp1);
- tcg_temp_free(tmp2);
-}
-
-static inline void tcg_gen_helper_1_i(void *func, TCGv ret, uint32_t arg)
-{
- TCGv tmp = tcg_const_i32(arg);
-
- tcg_gen_helper_1_1(func, ret, tmp);
- tcg_temp_free(tmp);
-}
-
-static inline void tcg_gen_helper_1_1i(void *func, TCGv ret, TCGv arg1, uint32_t arg2)
-{
- TCGv tmp = tcg_const_i32(arg2);
-
- tcg_gen_helper_1_2(func, ret, arg1, tmp);
- tcg_temp_free(tmp);
-}
-
-static inline void tcg_gen_helper_1_1ii(void *func, TCGv ret, TCGv arg1, uint32_t arg2, uint32_t arg3)
-{
- TCGv tmp1 = tcg_const_i32(arg2);
- TCGv tmp2 = tcg_const_i32(arg3);
-
- tcg_gen_helper_1_3(func, ret, arg1, tmp1, tmp2);
- tcg_temp_free(tmp1);
- tcg_temp_free(tmp2);
-}
-
-static inline void tcg_gen_helper_1_2i(void *func, TCGv ret, TCGv arg1, TCGv arg2, uint32_t arg3)
-{
- TCGv tmp = tcg_const_i32(arg3);
+#define gen_helper_0i(name, arg) do { \
+ TCGv_i32 helper_tmp = tcg_const_i32(arg); \
+ gen_helper_##name(helper_tmp); \
+ tcg_temp_free_i32(helper_tmp); \
+ } while(0)
- tcg_gen_helper_1_3(func, ret, arg1, arg2, tmp);
- tcg_temp_free(tmp);
-}
+#define gen_helper_1i(name, arg1, arg2) do { \
+ TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
+ gen_helper_##name(arg1, helper_tmp); \
+ tcg_temp_free_i32(helper_tmp); \
+ } while(0)
-static inline void tcg_gen_helper_1_2ii(void *func, TCGv ret, TCGv arg1, TCGv arg2, uint32_t arg3, uint32_t arg4)
-{
- TCGv tmp1 = tcg_const_i32(arg3);
- TCGv tmp2 = tcg_const_i32(arg4);
+#define gen_helper_2i(name, arg1, arg2, arg3) do { \
+ TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
+ gen_helper_##name(arg1, arg2, helper_tmp); \
+ tcg_temp_free_i32(helper_tmp); \
+ } while(0)
- tcg_gen_helper_1_4(func, ret, arg1, arg2, tmp1, tmp2);
- tcg_temp_free(tmp1);
- tcg_temp_free(tmp2);
-}
+#define gen_helper_3i(name, arg1, arg2, arg3, arg4) do { \
+ TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
+ gen_helper_##name(arg1, arg2, arg3, helper_tmp); \
+ tcg_temp_free_i32(helper_tmp); \
+ } while(0)
typedef struct DisasContext {
struct TranslationBlock *tb;
/* Moves to/from shadow registers. */
static inline void gen_load_srsgpr (int from, int to)
{
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv r_tmp1 = tcg_temp_new();
if (from == 0)
tcg_gen_movi_tl(r_tmp1, 0);
else {
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 r_tmp2 = tcg_temp_new_i32();
+ TCGv_ptr addr = tcg_temp_new_ptr();
tcg_gen_ld_i32(r_tmp2, cpu_env, offsetof(CPUState, CP0_SRSCtl));
tcg_gen_shri_i32(r_tmp2, r_tmp2, CP0SRSCtl_PSS);
tcg_gen_andi_i32(r_tmp2, r_tmp2, 0xf);
tcg_gen_muli_i32(r_tmp2, r_tmp2, sizeof(target_ulong) * 32);
- tcg_gen_add_i32(r_tmp2, cpu_env, r_tmp2);
+ tcg_gen_ext_i32_ptr(addr, r_tmp2);
+ tcg_gen_add_ptr(addr, cpu_env, addr);
- tcg_gen_ld_tl(r_tmp1, r_tmp2, sizeof(target_ulong) * from);
- tcg_temp_free(r_tmp2);
+ tcg_gen_ld_tl(r_tmp1, addr, sizeof(target_ulong) * from);
+ tcg_temp_free_ptr(addr);
+ tcg_temp_free_i32(r_tmp2);
}
gen_store_gpr(r_tmp1, to);
tcg_temp_free(r_tmp1);
static inline void gen_store_srsgpr (int from, int to)
{
if (to != 0) {
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_TL);
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv r_tmp1 = tcg_temp_new();
+ TCGv_i32 r_tmp2 = tcg_temp_new_i32();
+ TCGv_ptr addr = tcg_temp_new_ptr();
gen_load_gpr(r_tmp1, from);
tcg_gen_ld_i32(r_tmp2, cpu_env, offsetof(CPUState, CP0_SRSCtl));
tcg_gen_shri_i32(r_tmp2, r_tmp2, CP0SRSCtl_PSS);
tcg_gen_andi_i32(r_tmp2, r_tmp2, 0xf);
tcg_gen_muli_i32(r_tmp2, r_tmp2, sizeof(target_ulong) * 32);
- tcg_gen_add_i32(r_tmp2, cpu_env, r_tmp2);
+ tcg_gen_ext_i32_ptr(addr, r_tmp2);
+ tcg_gen_add_ptr(addr, cpu_env, addr);
- tcg_gen_st_tl(r_tmp1, r_tmp2, sizeof(target_ulong) * to);
+ tcg_gen_st_tl(r_tmp1, addr, sizeof(target_ulong) * to);
+ tcg_temp_free_ptr(addr);
+ tcg_temp_free_i32(r_tmp2);
tcg_temp_free(r_tmp1);
- tcg_temp_free(r_tmp2);
}
}
/* Floating point register moves. */
-static inline void gen_load_fpr32 (TCGv t, int reg)
+static inline void gen_load_fpr32 (TCGv_i32 t, int reg)
{
tcg_gen_mov_i32(t, fpu_fpr32[reg]);
}
-static inline void gen_store_fpr32 (TCGv t, int reg)
+static inline void gen_store_fpr32 (TCGv_i32 t, int reg)
{
tcg_gen_mov_i32(fpu_fpr32[reg], t);
}
-static inline void gen_load_fpr64 (DisasContext *ctx, TCGv t, int reg)
+static inline void gen_load_fpr64 (DisasContext *ctx, TCGv_i64 t, int reg)
{
if (ctx->hflags & MIPS_HFLAG_F64)
tcg_gen_mov_i64(t, fpu_fpr64[reg]);
}
}
-static inline void gen_store_fpr64 (DisasContext *ctx, TCGv t, int reg)
+static inline void gen_store_fpr64 (DisasContext *ctx, TCGv_i64 t, int reg)
{
if (ctx->hflags & MIPS_HFLAG_F64)
tcg_gen_mov_i64(fpu_fpr64[reg], t);
}
}
-static inline void gen_load_fpr32h (TCGv t, int reg)
+static inline void gen_load_fpr32h (TCGv_i32 t, int reg)
{
tcg_gen_mov_i32(t, fpu_fpr32h[reg]);
}
-static inline void gen_store_fpr32h (TCGv t, int reg)
+static inline void gen_store_fpr32h (TCGv_i32 t, int reg)
{
tcg_gen_mov_i32(fpu_fpr32h[reg], t);
}
-static inline void get_fp_cond (TCGv t)
+static inline void get_fp_cond (TCGv_i32 t)
{
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_I32);
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 r_tmp1 = tcg_temp_new_i32();
+ TCGv_i32 r_tmp2 = tcg_temp_new_i32();
tcg_gen_shri_i32(r_tmp2, fpu_fcr31, 24);
tcg_gen_andi_i32(r_tmp2, r_tmp2, 0xfe);
tcg_gen_shri_i32(r_tmp1, fpu_fcr31, 23);
tcg_gen_andi_i32(r_tmp1, r_tmp1, 0x1);
tcg_gen_or_i32(t, r_tmp1, r_tmp2);
- tcg_temp_free(r_tmp1);
- tcg_temp_free(r_tmp2);
+ tcg_temp_free_i32(r_tmp1);
+ tcg_temp_free_i32(r_tmp2);
}
-typedef void (fcmp_fun32)(uint32_t, uint32_t, int);
-typedef void (fcmp_fun64)(uint64_t, uint64_t, int);
-
-#define FOP_CONDS(fcmp_fun, type, fmt) \
-static fcmp_fun * fcmp ## type ## _ ## fmt ## _table[16] = { \
- do_cmp ## type ## _ ## fmt ## _f, \
- do_cmp ## type ## _ ## fmt ## _un, \
- do_cmp ## type ## _ ## fmt ## _eq, \
- do_cmp ## type ## _ ## fmt ## _ueq, \
- do_cmp ## type ## _ ## fmt ## _olt, \
- do_cmp ## type ## _ ## fmt ## _ult, \
- do_cmp ## type ## _ ## fmt ## _ole, \
- do_cmp ## type ## _ ## fmt ## _ule, \
- do_cmp ## type ## _ ## fmt ## _sf, \
- do_cmp ## type ## _ ## fmt ## _ngle, \
- do_cmp ## type ## _ ## fmt ## _seq, \
- do_cmp ## type ## _ ## fmt ## _ngl, \
- do_cmp ## type ## _ ## fmt ## _lt, \
- do_cmp ## type ## _ ## fmt ## _nge, \
- do_cmp ## type ## _ ## fmt ## _le, \
- do_cmp ## type ## _ ## fmt ## _ngt, \
-}; \
-static inline void gen_cmp ## type ## _ ## fmt(int n, TCGv a, TCGv b, int cc) \
+#define FOP_CONDS(type, fmt, bits) \
+static inline void gen_cmp ## type ## _ ## fmt(int n, TCGv_i##bits a, \
+ TCGv_i##bits b, int cc) \
{ \
- tcg_gen_helper_0_2i(fcmp ## type ## _ ## fmt ## _table[n], a, b, cc); \
+ switch (n) { \
+ case 0: gen_helper_2i(cmp ## type ## _ ## fmt ## _f, a, b, cc); break;\
+ case 1: gen_helper_2i(cmp ## type ## _ ## fmt ## _un, a, b, cc); break;\
+ case 2: gen_helper_2i(cmp ## type ## _ ## fmt ## _eq, a, b, cc); break;\
+ case 3: gen_helper_2i(cmp ## type ## _ ## fmt ## _ueq, a, b, cc); break;\
+ case 4: gen_helper_2i(cmp ## type ## _ ## fmt ## _olt, a, b, cc); break;\
+ case 5: gen_helper_2i(cmp ## type ## _ ## fmt ## _ult, a, b, cc); break;\
+ case 6: gen_helper_2i(cmp ## type ## _ ## fmt ## _ole, a, b, cc); break;\
+ case 7: gen_helper_2i(cmp ## type ## _ ## fmt ## _ule, a, b, cc); break;\
+ case 8: gen_helper_2i(cmp ## type ## _ ## fmt ## _sf, a, b, cc); break;\
+ case 9: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngle, a, b, cc); break;\
+ case 10: gen_helper_2i(cmp ## type ## _ ## fmt ## _seq, a, b, cc); break;\
+ case 11: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngl, a, b, cc); break;\
+ case 12: gen_helper_2i(cmp ## type ## _ ## fmt ## _lt, a, b, cc); break;\
+ case 13: gen_helper_2i(cmp ## type ## _ ## fmt ## _nge, a, b, cc); break;\
+ case 14: gen_helper_2i(cmp ## type ## _ ## fmt ## _le, a, b, cc); break;\
+ case 15: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngt, a, b, cc); break;\
+ default: abort(); \
+ } \
}
-FOP_CONDS(fcmp_fun64, , d)
-FOP_CONDS(fcmp_fun64, abs, d)
-FOP_CONDS(fcmp_fun32, , s)
-FOP_CONDS(fcmp_fun32, abs, s)
-FOP_CONDS(fcmp_fun64, , ps)
-FOP_CONDS(fcmp_fun64, abs, ps)
+FOP_CONDS(, d, 64)
+FOP_CONDS(abs, d, 64)
+FOP_CONDS(, s, 32)
+FOP_CONDS(abs, s, 32)
+FOP_CONDS(, ps, 64)
+FOP_CONDS(abs, ps, 64)
#undef FOP_CONDS
/* Tests */
ctx->saved_pc = ctx->pc;
}
if (ctx->hflags != ctx->saved_hflags) {
- TCGv r_tmp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 r_tmp = tcg_temp_new_i32();
tcg_gen_movi_i32(r_tmp, ctx->hflags);
tcg_gen_st_i32(r_tmp, cpu_env, offsetof(CPUState, hflags));
- tcg_temp_free(r_tmp);
+ tcg_temp_free_i32(r_tmp);
ctx->saved_hflags = ctx->hflags;
switch (ctx->hflags & MIPS_HFLAG_BMASK) {
case MIPS_HFLAG_BR:
static inline void
generate_exception_err (DisasContext *ctx, int excp, int err)
{
+ TCGv_i32 texcp = tcg_const_i32(excp);
+ TCGv_i32 terr = tcg_const_i32(err);
save_cpu_state(ctx, 1);
- tcg_gen_helper_0_ii(do_raise_exception_err, excp, err);
- tcg_gen_helper_0_0(do_interrupt_restart);
+ gen_helper_raise_exception_err(texcp, terr);
+ tcg_temp_free_i32(terr);
+ tcg_temp_free_i32(texcp);
+ gen_helper_interrupt_restart();
tcg_gen_exit_tb(0);
}
generate_exception (DisasContext *ctx, int excp)
{
save_cpu_state(ctx, 1);
- tcg_gen_helper_0_i(do_raise_exception, excp);
- tcg_gen_helper_0_0(do_interrupt_restart);
+ gen_helper_0i(raise_exception, excp);
+ gen_helper_interrupt_restart();
tcg_gen_exit_tb(0);
}
#define OP_ST_ATOMIC(insn,fname,almask) \
static inline void op_ldst_##insn(TCGv t0, TCGv t1, DisasContext *ctx) \
{ \
- TCGv r_tmp = tcg_temp_local_new(TCG_TYPE_TL); \
+ TCGv r_tmp = tcg_temp_local_new(); \
int l1 = gen_new_label(); \
int l2 = gen_new_label(); \
int l3 = gen_new_label(); \
int base, int16_t offset)
{
const char *opn = "ldst";
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
- TCGv t1 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
+ TCGv t1 = tcg_temp_local_new();
if (base == 0) {
tcg_gen_movi_tl(t0, offset);
case OPC_LDL:
save_cpu_state(ctx, 1);
gen_load_gpr(t1, rt);
- tcg_gen_helper_1_2i(do_ldl, t1, t0, t1, ctx->mem_idx);
+ gen_helper_3i(ldl, t1, t0, t1, ctx->mem_idx);
gen_store_gpr(t1, rt);
opn = "ldl";
break;
case OPC_SDL:
save_cpu_state(ctx, 1);
gen_load_gpr(t1, rt);
- tcg_gen_helper_0_2i(do_sdl, t0, t1, ctx->mem_idx);
+ gen_helper_2i(sdl, t0, t1, ctx->mem_idx);
opn = "sdl";
break;
case OPC_LDR:
save_cpu_state(ctx, 1);
gen_load_gpr(t1, rt);
- tcg_gen_helper_1_2i(do_ldr, t1, t0, t1, ctx->mem_idx);
+ gen_helper_3i(ldr, t1, t0, t1, ctx->mem_idx);
gen_store_gpr(t1, rt);
opn = "ldr";
break;
case OPC_SDR:
save_cpu_state(ctx, 1);
gen_load_gpr(t1, rt);
- tcg_gen_helper_0_2i(do_sdr, t0, t1, ctx->mem_idx);
+ gen_helper_2i(sdr, t0, t1, ctx->mem_idx);
opn = "sdr";
break;
#endif
case OPC_LWL:
save_cpu_state(ctx, 1);
gen_load_gpr(t1, rt);
- tcg_gen_helper_1_2i(do_lwl, t1, t0, t1, ctx->mem_idx);
+ gen_helper_3i(lwl, t1, t0, t1, ctx->mem_idx);
gen_store_gpr(t1, rt);
opn = "lwl";
break;
case OPC_SWL:
save_cpu_state(ctx, 1);
gen_load_gpr(t1, rt);
- tcg_gen_helper_0_2i(do_swl, t0, t1, ctx->mem_idx);
+ gen_helper_2i(swl, t0, t1, ctx->mem_idx);
opn = "swr";
break;
case OPC_LWR:
save_cpu_state(ctx, 1);
gen_load_gpr(t1, rt);
- tcg_gen_helper_1_2i(do_lwr, t1, t0, t1, ctx->mem_idx);
+ gen_helper_3i(lwr, t1, t0, t1, ctx->mem_idx);
gen_store_gpr(t1, rt);
opn = "lwr";
break;
case OPC_SWR:
save_cpu_state(ctx, 1);
gen_load_gpr(t1, rt);
- tcg_gen_helper_0_2i(do_swr, t0, t1, ctx->mem_idx);
+ gen_helper_2i(swr, t0, t1, ctx->mem_idx);
opn = "swr";
break;
case OPC_LL:
int base, int16_t offset)
{
const char *opn = "flt_ldst";
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
if (base == 0) {
tcg_gen_movi_tl(t0, offset);
} else if (offset == 0) {
gen_load_gpr(t0, base);
} else {
- TCGv t1 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t1 = tcg_temp_local_new();
gen_load_gpr(t0, base);
tcg_gen_movi_tl(t1, offset);
switch (opc) {
case OPC_LWC1:
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv t1 = tcg_temp_new();
- tcg_gen_qemu_ld32s(fp0, t0, ctx->mem_idx);
+ tcg_gen_qemu_ld32s(t1, t0, ctx->mem_idx);
+ tcg_gen_trunc_tl_i32(fp0, t1);
gen_store_fpr32(fp0, ft);
- tcg_temp_free(fp0);
+ tcg_temp_free(t1);
+ tcg_temp_free_i32(fp0);
}
opn = "lwc1";
break;
case OPC_SWC1:
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv t1 = tcg_temp_new();
gen_load_fpr32(fp0, ft);
- tcg_gen_qemu_st32(fp0, t0, ctx->mem_idx);
- tcg_temp_free(fp0);
+ tcg_gen_extu_i32_tl(t1, fp0);
+ tcg_gen_qemu_st32(t1, t0, ctx->mem_idx);
+ tcg_temp_free(t1);
+ tcg_temp_free_i32(fp0);
}
opn = "swc1";
break;
case OPC_LDC1:
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
tcg_gen_qemu_ld64(fp0, t0, ctx->mem_idx);
gen_store_fpr64(ctx, fp0, ft);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "ldc1";
break;
case OPC_SDC1:
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, ft);
tcg_gen_qemu_st64(fp0, t0, ctx->mem_idx);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "sdc1";
break;
{
target_ulong uimm;
const char *opn = "imm arith";
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
if (rt == 0 && opc != OPC_ADDI && opc != OPC_DADDI) {
/* If no destination, treat it as a NOP.
switch (opc) {
case OPC_ADDI:
{
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_TL);
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv r_tmp1 = tcg_temp_new();
+ TCGv r_tmp2 = tcg_temp_new();
int l1 = gen_new_label();
save_cpu_state(ctx, 1);
#if defined(TARGET_MIPS64)
case OPC_DADDI:
{
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_TL);
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv r_tmp1 = tcg_temp_new();
+ TCGv r_tmp2 = tcg_temp_new();
int l1 = gen_new_label();
save_cpu_state(ctx, 1);
/* rotr is decoded as srl on non-R2 CPUs */
if (env->insn_flags & ISA_MIPS32R2) {
if (uimm != 0) {
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 r_tmp1 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(r_tmp1, t0);
tcg_gen_rotri_i32(r_tmp1, r_tmp1, uimm);
tcg_gen_ext_i32_tl(t0, r_tmp1);
- tcg_temp_free(r_tmp1);
+ tcg_temp_free_i32(r_tmp1);
}
opn = "rotr";
} else {
int rd, int rs, int rt)
{
const char *opn = "arith";
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
- TCGv t1 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
+ TCGv t1 = tcg_temp_local_new();
if (rd == 0 && opc != OPC_ADD && opc != OPC_SUB
&& opc != OPC_DADD && opc != OPC_DSUB) {
switch (opc) {
case OPC_ADD:
{
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_TL);
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv r_tmp1 = tcg_temp_new();
+ TCGv r_tmp2 = tcg_temp_new();
int l1 = gen_new_label();
save_cpu_state(ctx, 1);
break;
case OPC_SUB:
{
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_TL);
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv r_tmp1 = tcg_temp_new();
+ TCGv r_tmp2 = tcg_temp_new();
int l1 = gen_new_label();
save_cpu_state(ctx, 1);
#if defined(TARGET_MIPS64)
case OPC_DADD:
{
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_TL);
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv r_tmp1 = tcg_temp_new();
+ TCGv r_tmp2 = tcg_temp_new();
int l1 = gen_new_label();
save_cpu_state(ctx, 1);
break;
case OPC_DSUB:
{
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_TL);
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv r_tmp1 = tcg_temp_new();
+ TCGv r_tmp2 = tcg_temp_new();
int l1 = gen_new_label();
save_cpu_state(ctx, 1);
tcg_gen_andi_tl(t0, t0, 0x1f);
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
{
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_I32);
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 r_tmp1 = tcg_temp_new_i32();
+ TCGv_i32 r_tmp2 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(r_tmp1, t0);
tcg_gen_trunc_tl_i32(r_tmp2, t1);
tcg_gen_rotr_i32(r_tmp1, r_tmp1, r_tmp2);
- tcg_temp_free(r_tmp1);
- tcg_temp_free(r_tmp2);
+ tcg_temp_free_i32(r_tmp1);
+ tcg_temp_free_i32(r_tmp2);
tcg_gen_br(l2);
}
gen_set_label(l1);
static void gen_HILO (DisasContext *ctx, uint32_t opc, int reg)
{
const char *opn = "hilo";
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
if (reg == 0 && (opc == OPC_MFHI || opc == OPC_MFLO)) {
/* Treat as NOP. */
int rs, int rt)
{
const char *opn = "mul/div";
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
- TCGv t1 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
+ TCGv t1 = tcg_temp_local_new();
gen_load_gpr(t0, rs);
gen_load_gpr(t1, rt);
tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
{
int l2 = gen_new_label();
- TCGv r_tmp1 = tcg_temp_local_new(TCG_TYPE_I32);
- TCGv r_tmp2 = tcg_temp_local_new(TCG_TYPE_I32);
- TCGv r_tmp3 = tcg_temp_local_new(TCG_TYPE_I32);
+ TCGv_i32 r_tmp1 = tcg_temp_local_new_i32();
+ TCGv_i32 r_tmp2 = tcg_temp_local_new_i32();
+ TCGv_i32 r_tmp3 = tcg_temp_local_new_i32();
tcg_gen_trunc_tl_i32(r_tmp1, t0);
tcg_gen_trunc_tl_i32(r_tmp2, t1);
tcg_gen_rem_i32(r_tmp2, r_tmp1, r_tmp2);
tcg_gen_ext_i32_tl(cpu_LO[0], r_tmp3);
tcg_gen_ext_i32_tl(cpu_HI[0], r_tmp2);
- tcg_temp_free(r_tmp1);
- tcg_temp_free(r_tmp2);
- tcg_temp_free(r_tmp3);
+ tcg_temp_free_i32(r_tmp1);
+ tcg_temp_free_i32(r_tmp2);
+ tcg_temp_free_i32(r_tmp3);
}
gen_set_label(l1);
}
tcg_gen_ext32s_tl(t1, t1);
tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
{
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_I32);
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_I32);
- TCGv r_tmp3 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 r_tmp1 = tcg_temp_new_i32();
+ TCGv_i32 r_tmp2 = tcg_temp_new_i32();
+ TCGv_i32 r_tmp3 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(r_tmp1, t0);
tcg_gen_trunc_tl_i32(r_tmp2, t1);
tcg_gen_remu_i32(r_tmp1, r_tmp1, r_tmp2);
tcg_gen_ext_i32_tl(cpu_LO[0], r_tmp3);
tcg_gen_ext_i32_tl(cpu_HI[0], r_tmp1);
- tcg_temp_free(r_tmp1);
- tcg_temp_free(r_tmp2);
- tcg_temp_free(r_tmp3);
+ tcg_temp_free_i32(r_tmp1);
+ tcg_temp_free_i32(r_tmp2);
+ tcg_temp_free_i32(r_tmp3);
}
gen_set_label(l1);
}
break;
case OPC_MULT:
{
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 r_tmp1 = tcg_temp_new_i64();
+ TCGv_i64 r_tmp2 = tcg_temp_new_i64();
tcg_gen_ext_tl_i64(r_tmp1, t0);
tcg_gen_ext_tl_i64(r_tmp2, t1);
tcg_gen_mul_i64(r_tmp1, r_tmp1, r_tmp2);
- tcg_temp_free(r_tmp2);
+ tcg_temp_free_i64(r_tmp2);
tcg_gen_trunc_i64_tl(t0, r_tmp1);
tcg_gen_shri_i64(r_tmp1, r_tmp1, 32);
tcg_gen_trunc_i64_tl(t1, r_tmp1);
- tcg_temp_free(r_tmp1);
+ tcg_temp_free_i64(r_tmp1);
tcg_gen_ext32s_tl(cpu_LO[0], t0);
tcg_gen_ext32s_tl(cpu_HI[0], t1);
}
break;
case OPC_MULTU:
{
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 r_tmp1 = tcg_temp_new_i64();
+ TCGv_i64 r_tmp2 = tcg_temp_new_i64();
tcg_gen_ext32u_tl(t0, t0);
tcg_gen_ext32u_tl(t1, t1);
tcg_gen_extu_tl_i64(r_tmp1, t0);
tcg_gen_extu_tl_i64(r_tmp2, t1);
tcg_gen_mul_i64(r_tmp1, r_tmp1, r_tmp2);
- tcg_temp_free(r_tmp2);
+ tcg_temp_free_i64(r_tmp2);
tcg_gen_trunc_i64_tl(t0, r_tmp1);
tcg_gen_shri_i64(r_tmp1, r_tmp1, 32);
tcg_gen_trunc_i64_tl(t1, r_tmp1);
- tcg_temp_free(r_tmp1);
+ tcg_temp_free_i64(r_tmp1);
tcg_gen_ext32s_tl(cpu_LO[0], t0);
tcg_gen_ext32s_tl(cpu_HI[0], t1);
}
opn = "ddivu";
break;
case OPC_DMULT:
- tcg_gen_helper_0_2(do_dmult, t0, t1);
+ gen_helper_dmult(t0, t1);
opn = "dmult";
break;
case OPC_DMULTU:
- tcg_gen_helper_0_2(do_dmultu, t0, t1);
+ gen_helper_dmultu(t0, t1);
opn = "dmultu";
break;
#endif
case OPC_MADD:
{
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 r_tmp1 = tcg_temp_new_i64();
+ TCGv_i64 r_tmp2 = tcg_temp_new_i64();
tcg_gen_ext_tl_i64(r_tmp1, t0);
tcg_gen_ext_tl_i64(r_tmp2, t1);
tcg_gen_mul_i64(r_tmp1, r_tmp1, r_tmp2);
tcg_gen_concat_tl_i64(r_tmp2, cpu_LO[0], cpu_HI[0]);
tcg_gen_add_i64(r_tmp1, r_tmp1, r_tmp2);
- tcg_temp_free(r_tmp2);
+ tcg_temp_free_i64(r_tmp2);
tcg_gen_trunc_i64_tl(t0, r_tmp1);
tcg_gen_shri_i64(r_tmp1, r_tmp1, 32);
tcg_gen_trunc_i64_tl(t1, r_tmp1);
- tcg_temp_free(r_tmp1);
+ tcg_temp_free_i64(r_tmp1);
tcg_gen_ext32s_tl(cpu_LO[0], t0);
tcg_gen_ext32s_tl(cpu_LO[1], t1);
}
break;
case OPC_MADDU:
{
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 r_tmp1 = tcg_temp_new_i64();
+ TCGv_i64 r_tmp2 = tcg_temp_new_i64();
tcg_gen_ext32u_tl(t0, t0);
tcg_gen_ext32u_tl(t1, t1);
tcg_gen_mul_i64(r_tmp1, r_tmp1, r_tmp2);
tcg_gen_concat_tl_i64(r_tmp2, cpu_LO[0], cpu_HI[0]);
tcg_gen_add_i64(r_tmp1, r_tmp1, r_tmp2);
- tcg_temp_free(r_tmp2);
+ tcg_temp_free_i64(r_tmp2);
tcg_gen_trunc_i64_tl(t0, r_tmp1);
tcg_gen_shri_i64(r_tmp1, r_tmp1, 32);
tcg_gen_trunc_i64_tl(t1, r_tmp1);
- tcg_temp_free(r_tmp1);
+ tcg_temp_free_i64(r_tmp1);
tcg_gen_ext32s_tl(cpu_LO[0], t0);
tcg_gen_ext32s_tl(cpu_HI[0], t1);
}
break;
case OPC_MSUB:
{
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 r_tmp1 = tcg_temp_new_i64();
+ TCGv_i64 r_tmp2 = tcg_temp_new_i64();
tcg_gen_ext_tl_i64(r_tmp1, t0);
tcg_gen_ext_tl_i64(r_tmp2, t1);
tcg_gen_mul_i64(r_tmp1, r_tmp1, r_tmp2);
tcg_gen_concat_tl_i64(r_tmp2, cpu_LO[0], cpu_HI[0]);
tcg_gen_sub_i64(r_tmp1, r_tmp1, r_tmp2);
- tcg_temp_free(r_tmp2);
+ tcg_temp_free_i64(r_tmp2);
tcg_gen_trunc_i64_tl(t0, r_tmp1);
tcg_gen_shri_i64(r_tmp1, r_tmp1, 32);
tcg_gen_trunc_i64_tl(t1, r_tmp1);
- tcg_temp_free(r_tmp1);
+ tcg_temp_free_i64(r_tmp1);
tcg_gen_ext32s_tl(cpu_LO[0], t0);
tcg_gen_ext32s_tl(cpu_HI[0], t1);
}
break;
case OPC_MSUBU:
{
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv r_tmp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 r_tmp1 = tcg_temp_new_i64();
+ TCGv_i64 r_tmp2 = tcg_temp_new_i64();
tcg_gen_ext32u_tl(t0, t0);
tcg_gen_ext32u_tl(t1, t1);
tcg_gen_mul_i64(r_tmp1, r_tmp1, r_tmp2);
tcg_gen_concat_tl_i64(r_tmp2, cpu_LO[0], cpu_HI[0]);
tcg_gen_sub_i64(r_tmp1, r_tmp1, r_tmp2);
- tcg_temp_free(r_tmp2);
+ tcg_temp_free_i64(r_tmp2);
tcg_gen_trunc_i64_tl(t0, r_tmp1);
tcg_gen_shri_i64(r_tmp1, r_tmp1, 32);
tcg_gen_trunc_i64_tl(t1, r_tmp1);
- tcg_temp_free(r_tmp1);
+ tcg_temp_free_i64(r_tmp1);
tcg_gen_ext32s_tl(cpu_LO[0], t0);
tcg_gen_ext32s_tl(cpu_HI[0], t1);
}
int rd, int rs, int rt)
{
const char *opn = "mul vr54xx";
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
- TCGv t1 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
+ TCGv t1 = tcg_temp_local_new();
gen_load_gpr(t0, rs);
gen_load_gpr(t1, rt);
switch (opc) {
case OPC_VR54XX_MULS:
- tcg_gen_helper_1_2(do_muls, t0, t0, t1);
+ gen_helper_muls(t0, t0, t1);
opn = "muls";
break;
case OPC_VR54XX_MULSU:
- tcg_gen_helper_1_2(do_mulsu, t0, t0, t1);
+ gen_helper_mulsu(t0, t0, t1);
opn = "mulsu";
break;
case OPC_VR54XX_MACC:
- tcg_gen_helper_1_2(do_macc, t0, t0, t1);
+ gen_helper_macc(t0, t0, t1);
opn = "macc";
break;
case OPC_VR54XX_MACCU:
- tcg_gen_helper_1_2(do_maccu, t0, t0, t1);
+ gen_helper_maccu(t0, t0, t1);
opn = "maccu";
break;
case OPC_VR54XX_MSAC:
- tcg_gen_helper_1_2(do_msac, t0, t0, t1);
+ gen_helper_msac(t0, t0, t1);
opn = "msac";
break;
case OPC_VR54XX_MSACU:
- tcg_gen_helper_1_2(do_msacu, t0, t0, t1);
+ gen_helper_msacu(t0, t0, t1);
opn = "msacu";
break;
case OPC_VR54XX_MULHI:
- tcg_gen_helper_1_2(do_mulhi, t0, t0, t1);
+ gen_helper_mulhi(t0, t0, t1);
opn = "mulhi";
break;
case OPC_VR54XX_MULHIU:
- tcg_gen_helper_1_2(do_mulhiu, t0, t0, t1);
+ gen_helper_mulhiu(t0, t0, t1);
opn = "mulhiu";
break;
case OPC_VR54XX_MULSHI:
- tcg_gen_helper_1_2(do_mulshi, t0, t0, t1);
+ gen_helper_mulshi(t0, t0, t1);
opn = "mulshi";
break;
case OPC_VR54XX_MULSHIU:
- tcg_gen_helper_1_2(do_mulshiu, t0, t0, t1);
+ gen_helper_mulshiu(t0, t0, t1);
opn = "mulshiu";
break;
case OPC_VR54XX_MACCHI:
- tcg_gen_helper_1_2(do_macchi, t0, t0, t1);
+ gen_helper_macchi(t0, t0, t1);
opn = "macchi";
break;
case OPC_VR54XX_MACCHIU:
- tcg_gen_helper_1_2(do_macchiu, t0, t0, t1);
+ gen_helper_macchiu(t0, t0, t1);
opn = "macchiu";
break;
case OPC_VR54XX_MSACHI:
- tcg_gen_helper_1_2(do_msachi, t0, t0, t1);
+ gen_helper_msachi(t0, t0, t1);
opn = "msachi";
break;
case OPC_VR54XX_MSACHIU:
- tcg_gen_helper_1_2(do_msachiu, t0, t0, t1);
+ gen_helper_msachiu(t0, t0, t1);
opn = "msachiu";
break;
default:
int rd, int rs)
{
const char *opn = "CLx";
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
if (rd == 0) {
/* Treat as NOP. */
gen_load_gpr(t0, rs);
switch (opc) {
case OPC_CLO:
- tcg_gen_helper_1_1(do_clo, t0, t0);
+ gen_helper_clo(t0, t0);
opn = "clo";
break;
case OPC_CLZ:
- tcg_gen_helper_1_1(do_clz, t0, t0);
+ gen_helper_clz(t0, t0);
opn = "clz";
break;
#if defined(TARGET_MIPS64)
case OPC_DCLO:
- tcg_gen_helper_1_1(do_dclo, t0, t0);
+ gen_helper_dclo(t0, t0);
opn = "dclo";
break;
case OPC_DCLZ:
- tcg_gen_helper_1_1(do_dclz, t0, t0);
+ gen_helper_dclz(t0, t0);
opn = "dclz";
break;
#endif
int rs, int rt, int16_t imm)
{
int cond;
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
- TCGv t1 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
+ TCGv t1 = tcg_temp_local_new();
cond = 0;
/* Load needed operands */
int l1 = gen_new_label();
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
- tcg_gen_helper_0_i(do_raise_exception, EXCP_TRAP);
+ gen_helper_0i(raise_exception, EXCP_TRAP);
gen_set_label(l1);
}
ctx->bstate = BS_STOP;
target_ulong btgt = -1;
int blink = 0;
int bcond_compute = 0;
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
- TCGv t1 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
+ TCGv t1 = tcg_temp_local_new();
if (ctx->hflags & MIPS_HFLAG_BMASK) {
#ifdef MIPS_DEBUG_DISAS
static void gen_bitops (DisasContext *ctx, uint32_t opc, int rt,
int rs, int lsb, int msb)
{
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
- TCGv t1 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_new();
+ TCGv t1 = tcg_temp_new();
target_ulong mask;
gen_load_gpr(t1, rs);
static void gen_bshfl (DisasContext *ctx, uint32_t op2, int rt, int rd)
{
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
- TCGv t1 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_new();
+ TCGv t1 = tcg_temp_new();
gen_load_gpr(t1, rt);
switch (op2) {
/* CP0 (MMU and control) */
static inline void gen_mfc0_load32 (TCGv t, target_ulong off)
{
- TCGv r_tmp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 r_tmp = tcg_temp_new_i32();
tcg_gen_ld_i32(r_tmp, cpu_env, off);
tcg_gen_ext_i32_tl(t, r_tmp);
- tcg_temp_free(r_tmp);
+ tcg_temp_free_i32(r_tmp);
}
static inline void gen_mfc0_load64 (TCGv t, target_ulong off)
static inline void gen_mtc0_store32 (TCGv t, target_ulong off)
{
- TCGv r_tmp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 r_tmp = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(r_tmp, t);
tcg_gen_st_i32(r_tmp, cpu_env, off);
- tcg_temp_free(r_tmp);
+ tcg_temp_free_i32(r_tmp);
}
static inline void gen_mtc0_store64 (TCGv t, target_ulong off)
break;
case 1:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_mfc0_mvpcontrol, t0);
+ gen_helper_mfc0_mvpcontrol(t0);
rn = "MVPControl";
break;
case 2:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_mfc0_mvpconf0, t0);
+ gen_helper_mfc0_mvpconf0(t0);
rn = "MVPConf0";
break;
case 3:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_mfc0_mvpconf1, t0);
+ gen_helper_mfc0_mvpconf1(t0);
rn = "MVPConf1";
break;
default:
case 1:
switch (sel) {
case 0:
- tcg_gen_helper_1_0(do_mfc0_random, t0);
+ gen_helper_mfc0_random(t0);
rn = "Random";
break;
case 1:
break;
case 1:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_mfc0_tcstatus, t0);
+ gen_helper_mfc0_tcstatus(t0);
rn = "TCStatus";
break;
case 2:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_mfc0_tcbind, t0);
+ gen_helper_mfc0_tcbind(t0);
rn = "TCBind";
break;
case 3:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_mfc0_tcrestart, t0);
+ gen_helper_mfc0_tcrestart(t0);
rn = "TCRestart";
break;
case 4:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_mfc0_tchalt, t0);
+ gen_helper_mfc0_tchalt(t0);
rn = "TCHalt";
break;
case 5:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_mfc0_tccontext, t0);
+ gen_helper_mfc0_tccontext(t0);
rn = "TCContext";
break;
case 6:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_mfc0_tcschedule, t0);
+ gen_helper_mfc0_tcschedule(t0);
rn = "TCSchedule";
break;
case 7:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_mfc0_tcschefback, t0);
+ gen_helper_mfc0_tcschefback(t0);
rn = "TCScheFBack";
break;
default:
rn = "Context";
break;
case 1:
-// tcg_gen_helper_1_0(do_mfc0_contextconfig, t0); /* SmartMIPS ASE */
+// gen_helper_mfc0_contextconfig(t0); /* SmartMIPS ASE */
rn = "ContextConfig";
// break;
default:
/* Mark as an IO operation because we read the time. */
if (use_icount)
gen_io_start();
- tcg_gen_helper_1_0(do_mfc0_count, t0);
+ gen_helper_mfc0_count(t0);
if (use_icount) {
gen_io_end();
ctx->bstate = BS_STOP;
case 17:
switch (sel) {
case 0:
- tcg_gen_helper_1_0(do_mfc0_lladdr, t0);
+ gen_helper_mfc0_lladdr(t0);
rn = "LLAddr";
break;
default:
case 18:
switch (sel) {
case 0 ... 7:
- tcg_gen_helper_1_i(do_mfc0_watchlo, t0, sel);
+ gen_helper_1i(mfc0_watchlo, t0, sel);
rn = "WatchLo";
break;
default:
case 19:
switch (sel) {
case 0 ...7:
- tcg_gen_helper_1_i(do_mfc0_watchhi, t0, sel);
+ gen_helper_1i(mfc0_watchhi, t0, sel);
rn = "WatchHi";
break;
default:
case 23:
switch (sel) {
case 0:
- tcg_gen_helper_1_0(do_mfc0_debug, t0); /* EJTAG support */
+ gen_helper_mfc0_debug(t0); /* EJTAG support */
rn = "Debug";
break;
case 1:
-// tcg_gen_helper_1_0(do_mfc0_tracecontrol, t0); /* PDtrace support */
+// gen_helper_mfc0_tracecontrol(t0); /* PDtrace support */
rn = "TraceControl";
// break;
case 2:
-// tcg_gen_helper_1_0(do_mfc0_tracecontrol2, t0); /* PDtrace support */
+// gen_helper_mfc0_tracecontrol2(t0); /* PDtrace support */
rn = "TraceControl2";
// break;
case 3:
-// tcg_gen_helper_1_0(do_mfc0_usertracedata, t0); /* PDtrace support */
+// gen_helper_mfc0_usertracedata(t0); /* PDtrace support */
rn = "UserTraceData";
// break;
case 4:
-// tcg_gen_helper_1_0(do_mfc0_tracebpc, t0); /* PDtrace support */
+// gen_helper_mfc0_tracebpc(t0); /* PDtrace support */
rn = "TraceBPC";
// break;
default:
rn = "Performance0";
break;
case 1:
-// tcg_gen_helper_1_0(do_mfc0_performance1, t0);
+// gen_helper_mfc0_performance1(t0);
rn = "Performance1";
// break;
case 2:
-// tcg_gen_helper_1_0(do_mfc0_performance2, t0);
+// gen_helper_mfc0_performance2(t0);
rn = "Performance2";
// break;
case 3:
-// tcg_gen_helper_1_0(do_mfc0_performance3, t0);
+// gen_helper_mfc0_performance3(t0);
rn = "Performance3";
// break;
case 4:
-// tcg_gen_helper_1_0(do_mfc0_performance4, t0);
+// gen_helper_mfc0_performance4(t0);
rn = "Performance4";
// break;
case 5:
-// tcg_gen_helper_1_0(do_mfc0_performance5, t0);
+// gen_helper_mfc0_performance5(t0);
rn = "Performance5";
// break;
case 6:
-// tcg_gen_helper_1_0(do_mfc0_performance6, t0);
+// gen_helper_mfc0_performance6(t0);
rn = "Performance6";
// break;
case 7:
-// tcg_gen_helper_1_0(do_mfc0_performance7, t0);
+// gen_helper_mfc0_performance7(t0);
rn = "Performance7";
// break;
default:
case 0:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_index, t0);
+ gen_helper_mtc0_index(t0);
rn = "Index";
break;
case 1:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_mvpcontrol, t0);
+ gen_helper_mtc0_mvpcontrol(t0);
rn = "MVPControl";
break;
case 2:
break;
case 1:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_vpecontrol, t0);
+ gen_helper_mtc0_vpecontrol(t0);
rn = "VPEControl";
break;
case 2:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_vpeconf0, t0);
+ gen_helper_mtc0_vpeconf0(t0);
rn = "VPEConf0";
break;
case 3:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_vpeconf1, t0);
+ gen_helper_mtc0_vpeconf1(t0);
rn = "VPEConf1";
break;
case 4:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_yqmask, t0);
+ gen_helper_mtc0_yqmask(t0);
rn = "YQMask";
break;
case 5:
break;
case 7:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_vpeopt, t0);
+ gen_helper_mtc0_vpeopt(t0);
rn = "VPEOpt";
break;
default:
case 2:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_entrylo0, t0);
+ gen_helper_mtc0_entrylo0(t0);
rn = "EntryLo0";
break;
case 1:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_tcstatus, t0);
+ gen_helper_mtc0_tcstatus(t0);
rn = "TCStatus";
break;
case 2:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_tcbind, t0);
+ gen_helper_mtc0_tcbind(t0);
rn = "TCBind";
break;
case 3:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_tcrestart, t0);
+ gen_helper_mtc0_tcrestart(t0);
rn = "TCRestart";
break;
case 4:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_tchalt, t0);
+ gen_helper_mtc0_tchalt(t0);
rn = "TCHalt";
break;
case 5:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_tccontext, t0);
+ gen_helper_mtc0_tccontext(t0);
rn = "TCContext";
break;
case 6:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_tcschedule, t0);
+ gen_helper_mtc0_tcschedule(t0);
rn = "TCSchedule";
break;
case 7:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_tcschefback, t0);
+ gen_helper_mtc0_tcschefback(t0);
rn = "TCScheFBack";
break;
default:
case 3:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_entrylo1, t0);
+ gen_helper_mtc0_entrylo1(t0);
rn = "EntryLo1";
break;
default:
case 4:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_context, t0);
+ gen_helper_mtc0_context(t0);
rn = "Context";
break;
case 1:
-// tcg_gen_helper_0_1(do_mtc0_contextconfig, t0); /* SmartMIPS ASE */
+// gen_helper_mtc0_contextconfig(t0); /* SmartMIPS ASE */
rn = "ContextConfig";
// break;
default:
case 5:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_pagemask, t0);
+ gen_helper_mtc0_pagemask(t0);
rn = "PageMask";
break;
case 1:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_pagegrain, t0);
+ gen_helper_mtc0_pagegrain(t0);
rn = "PageGrain";
break;
default:
case 6:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_wired, t0);
+ gen_helper_mtc0_wired(t0);
rn = "Wired";
break;
case 1:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_srsconf0, t0);
+ gen_helper_mtc0_srsconf0(t0);
rn = "SRSConf0";
break;
case 2:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_srsconf1, t0);
+ gen_helper_mtc0_srsconf1(t0);
rn = "SRSConf1";
break;
case 3:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_srsconf2, t0);
+ gen_helper_mtc0_srsconf2(t0);
rn = "SRSConf2";
break;
case 4:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_srsconf3, t0);
+ gen_helper_mtc0_srsconf3(t0);
rn = "SRSConf3";
break;
case 5:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_srsconf4, t0);
+ gen_helper_mtc0_srsconf4(t0);
rn = "SRSConf4";
break;
default:
switch (sel) {
case 0:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_hwrena, t0);
+ gen_helper_mtc0_hwrena(t0);
rn = "HWREna";
break;
default:
case 9:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_count, t0);
+ gen_helper_mtc0_count(t0);
rn = "Count";
break;
/* 6,7 are implementation dependent */
case 10:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_entryhi, t0);
+ gen_helper_mtc0_entryhi(t0);
rn = "EntryHi";
break;
default:
case 11:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_compare, t0);
+ gen_helper_mtc0_compare(t0);
rn = "Compare";
break;
/* 6,7 are implementation dependent */
case 12:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_status, t0);
+ gen_helper_mtc0_status(t0);
/* BS_STOP isn't good enough here, hflags may have changed. */
gen_save_pc(ctx->pc + 4);
ctx->bstate = BS_EXCP;
break;
case 1:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_intctl, t0);
+ gen_helper_mtc0_intctl(t0);
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
rn = "IntCtl";
break;
case 2:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_srsctl, t0);
+ gen_helper_mtc0_srsctl(t0);
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
rn = "SRSCtl";
case 13:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_cause, t0);
+ gen_helper_mtc0_cause(t0);
rn = "Cause";
break;
default:
break;
case 1:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_ebase, t0);
+ gen_helper_mtc0_ebase(t0);
rn = "EBase";
break;
default:
case 16:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_config0, t0);
+ gen_helper_mtc0_config0(t0);
rn = "Config";
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
rn = "Config1";
break;
case 2:
- tcg_gen_helper_0_1(do_mtc0_config2, t0);
+ gen_helper_mtc0_config2(t0);
rn = "Config2";
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
case 18:
switch (sel) {
case 0 ... 7:
- tcg_gen_helper_0_1i(do_mtc0_watchlo, t0, sel);
+ gen_helper_1i(mtc0_watchlo, t0, sel);
rn = "WatchLo";
break;
default:
case 19:
switch (sel) {
case 0 ... 7:
- tcg_gen_helper_0_1i(do_mtc0_watchhi, t0, sel);
+ gen_helper_1i(mtc0_watchhi, t0, sel);
rn = "WatchHi";
break;
default:
case 0:
#if defined(TARGET_MIPS64)
check_insn(env, ctx, ISA_MIPS3);
- tcg_gen_helper_0_1(do_mtc0_xcontext, t0);
+ gen_helper_mtc0_xcontext(t0);
rn = "XContext";
break;
#endif
/* Officially reserved, but sel 0 is used for R1x000 framemask */
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_framemask, t0);
+ gen_helper_mtc0_framemask(t0);
rn = "Framemask";
break;
default:
case 23:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_debug, t0); /* EJTAG support */
+ gen_helper_mtc0_debug(t0); /* EJTAG support */
/* BS_STOP isn't good enough here, hflags may have changed. */
gen_save_pc(ctx->pc + 4);
ctx->bstate = BS_EXCP;
rn = "Debug";
break;
case 1:
-// tcg_gen_helper_0_1(do_mtc0_tracecontrol, t0); /* PDtrace support */
+// gen_helper_mtc0_tracecontrol(t0); /* PDtrace support */
rn = "TraceControl";
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
// break;
case 2:
-// tcg_gen_helper_0_1(do_mtc0_tracecontrol2, t0); /* PDtrace support */
+// gen_helper_mtc0_tracecontrol2(t0); /* PDtrace support */
rn = "TraceControl2";
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
case 3:
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
-// tcg_gen_helper_0_1(do_mtc0_usertracedata, t0); /* PDtrace support */
+// gen_helper_mtc0_usertracedata(t0); /* PDtrace support */
rn = "UserTraceData";
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
// break;
case 4:
-// tcg_gen_helper_0_1(do_mtc0_tracebpc, t0); /* PDtrace support */
+// gen_helper_mtc0_tracebpc(t0); /* PDtrace support */
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
rn = "TraceBPC";
case 25:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_performance0, t0);
+ gen_helper_mtc0_performance0(t0);
rn = "Performance0";
break;
case 1:
-// tcg_gen_helper_0_1(do_mtc0_performance1, t0);
+// gen_helper_mtc0_performance1(t0);
rn = "Performance1";
// break;
case 2:
-// tcg_gen_helper_0_1(do_mtc0_performance2, t0);
+// gen_helper_mtc0_performance2(t0);
rn = "Performance2";
// break;
case 3:
-// tcg_gen_helper_0_1(do_mtc0_performance3, t0);
+// gen_helper_mtc0_performance3(t0);
rn = "Performance3";
// break;
case 4:
-// tcg_gen_helper_0_1(do_mtc0_performance4, t0);
+// gen_helper_mtc0_performance4(t0);
rn = "Performance4";
// break;
case 5:
-// tcg_gen_helper_0_1(do_mtc0_performance5, t0);
+// gen_helper_mtc0_performance5(t0);
rn = "Performance5";
// break;
case 6:
-// tcg_gen_helper_0_1(do_mtc0_performance6, t0);
+// gen_helper_mtc0_performance6(t0);
rn = "Performance6";
// break;
case 7:
-// tcg_gen_helper_0_1(do_mtc0_performance7, t0);
+// gen_helper_mtc0_performance7(t0);
rn = "Performance7";
// break;
default:
case 2:
case 4:
case 6:
- tcg_gen_helper_0_1(do_mtc0_taglo, t0);
+ gen_helper_mtc0_taglo(t0);
rn = "TagLo";
break;
case 1:
case 3:
case 5:
case 7:
- tcg_gen_helper_0_1(do_mtc0_datalo, t0);
+ gen_helper_mtc0_datalo(t0);
rn = "DataLo";
break;
default:
case 2:
case 4:
case 6:
- tcg_gen_helper_0_1(do_mtc0_taghi, t0);
+ gen_helper_mtc0_taghi(t0);
rn = "TagHi";
break;
case 1:
case 3:
case 5:
case 7:
- tcg_gen_helper_0_1(do_mtc0_datahi, t0);
+ gen_helper_mtc0_datahi(t0);
rn = "DataHi";
break;
default:
break;
case 1:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_mfc0_mvpcontrol, t0);
+ gen_helper_mfc0_mvpcontrol(t0);
rn = "MVPControl";
break;
case 2:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_mfc0_mvpconf0, t0);
+ gen_helper_mfc0_mvpconf0(t0);
rn = "MVPConf0";
break;
case 3:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_mfc0_mvpconf1, t0);
+ gen_helper_mfc0_mvpconf1(t0);
rn = "MVPConf1";
break;
default:
case 1:
switch (sel) {
case 0:
- tcg_gen_helper_1_0(do_mfc0_random, t0);
+ gen_helper_mfc0_random(t0);
rn = "Random";
break;
case 1:
break;
case 1:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_mfc0_tcstatus, t0);
+ gen_helper_mfc0_tcstatus(t0);
rn = "TCStatus";
break;
case 2:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_mfc0_tcbind, t0);
+ gen_helper_mfc0_tcbind(t0);
rn = "TCBind";
break;
case 3:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_dmfc0_tcrestart, t0);
+ gen_helper_dmfc0_tcrestart(t0);
rn = "TCRestart";
break;
case 4:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_dmfc0_tchalt, t0);
+ gen_helper_dmfc0_tchalt(t0);
rn = "TCHalt";
break;
case 5:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_dmfc0_tccontext, t0);
+ gen_helper_dmfc0_tccontext(t0);
rn = "TCContext";
break;
case 6:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_dmfc0_tcschedule, t0);
+ gen_helper_dmfc0_tcschedule(t0);
rn = "TCSchedule";
break;
case 7:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_0(do_dmfc0_tcschefback, t0);
+ gen_helper_dmfc0_tcschefback(t0);
rn = "TCScheFBack";
break;
default:
rn = "Context";
break;
case 1:
-// tcg_gen_helper_1_0(do_dmfc0_contextconfig, t0); /* SmartMIPS ASE */
+// gen_helper_dmfc0_contextconfig(t0); /* SmartMIPS ASE */
rn = "ContextConfig";
// break;
default:
/* Mark as an IO operation because we read the time. */
if (use_icount)
gen_io_start();
- tcg_gen_helper_1_0(do_mfc0_count, t0);
+ gen_helper_mfc0_count(t0);
if (use_icount) {
gen_io_end();
ctx->bstate = BS_STOP;
case 17:
switch (sel) {
case 0:
- tcg_gen_helper_1_0(do_dmfc0_lladdr, t0);
+ gen_helper_dmfc0_lladdr(t0);
rn = "LLAddr";
break;
default:
case 18:
switch (sel) {
case 0 ... 7:
- tcg_gen_helper_1_i(do_dmfc0_watchlo, t0, sel);
+ gen_helper_1i(dmfc0_watchlo, t0, sel);
rn = "WatchLo";
break;
default:
case 19:
switch (sel) {
case 0 ... 7:
- tcg_gen_helper_1_i(do_mfc0_watchhi, t0, sel);
+ gen_helper_1i(mfc0_watchhi, t0, sel);
rn = "WatchHi";
break;
default:
case 23:
switch (sel) {
case 0:
- tcg_gen_helper_1_0(do_mfc0_debug, t0); /* EJTAG support */
+ gen_helper_mfc0_debug(t0); /* EJTAG support */
rn = "Debug";
break;
case 1:
-// tcg_gen_helper_1_0(do_dmfc0_tracecontrol, t0); /* PDtrace support */
+// gen_helper_dmfc0_tracecontrol(t0); /* PDtrace support */
rn = "TraceControl";
// break;
case 2:
-// tcg_gen_helper_1_0(do_dmfc0_tracecontrol2, t0); /* PDtrace support */
+// gen_helper_dmfc0_tracecontrol2(t0); /* PDtrace support */
rn = "TraceControl2";
// break;
case 3:
-// tcg_gen_helper_1_0(do_dmfc0_usertracedata, t0); /* PDtrace support */
+// gen_helper_dmfc0_usertracedata(t0); /* PDtrace support */
rn = "UserTraceData";
// break;
case 4:
-// tcg_gen_helper_1_0(do_dmfc0_tracebpc, t0); /* PDtrace support */
+// gen_helper_dmfc0_tracebpc(t0); /* PDtrace support */
rn = "TraceBPC";
// break;
default:
rn = "Performance0";
break;
case 1:
-// tcg_gen_helper_1_0(do_dmfc0_performance1, t0);
+// gen_helper_dmfc0_performance1(t0);
rn = "Performance1";
// break;
case 2:
-// tcg_gen_helper_1_0(do_dmfc0_performance2, t0);
+// gen_helper_dmfc0_performance2(t0);
rn = "Performance2";
// break;
case 3:
-// tcg_gen_helper_1_0(do_dmfc0_performance3, t0);
+// gen_helper_dmfc0_performance3(t0);
rn = "Performance3";
// break;
case 4:
-// tcg_gen_helper_1_0(do_dmfc0_performance4, t0);
+// gen_helper_dmfc0_performance4(t0);
rn = "Performance4";
// break;
case 5:
-// tcg_gen_helper_1_0(do_dmfc0_performance5, t0);
+// gen_helper_dmfc0_performance5(t0);
rn = "Performance5";
// break;
case 6:
-// tcg_gen_helper_1_0(do_dmfc0_performance6, t0);
+// gen_helper_dmfc0_performance6(t0);
rn = "Performance6";
// break;
case 7:
-// tcg_gen_helper_1_0(do_dmfc0_performance7, t0);
+// gen_helper_dmfc0_performance7(t0);
rn = "Performance7";
// break;
default:
case 0:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_index, t0);
+ gen_helper_mtc0_index(t0);
rn = "Index";
break;
case 1:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_mvpcontrol, t0);
+ gen_helper_mtc0_mvpcontrol(t0);
rn = "MVPControl";
break;
case 2:
break;
case 1:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_vpecontrol, t0);
+ gen_helper_mtc0_vpecontrol(t0);
rn = "VPEControl";
break;
case 2:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_vpeconf0, t0);
+ gen_helper_mtc0_vpeconf0(t0);
rn = "VPEConf0";
break;
case 3:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_vpeconf1, t0);
+ gen_helper_mtc0_vpeconf1(t0);
rn = "VPEConf1";
break;
case 4:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_yqmask, t0);
+ gen_helper_mtc0_yqmask(t0);
rn = "YQMask";
break;
case 5:
break;
case 7:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_vpeopt, t0);
+ gen_helper_mtc0_vpeopt(t0);
rn = "VPEOpt";
break;
default:
case 2:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_entrylo0, t0);
+ gen_helper_mtc0_entrylo0(t0);
rn = "EntryLo0";
break;
case 1:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_tcstatus, t0);
+ gen_helper_mtc0_tcstatus(t0);
rn = "TCStatus";
break;
case 2:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_tcbind, t0);
+ gen_helper_mtc0_tcbind(t0);
rn = "TCBind";
break;
case 3:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_tcrestart, t0);
+ gen_helper_mtc0_tcrestart(t0);
rn = "TCRestart";
break;
case 4:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_tchalt, t0);
+ gen_helper_mtc0_tchalt(t0);
rn = "TCHalt";
break;
case 5:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_tccontext, t0);
+ gen_helper_mtc0_tccontext(t0);
rn = "TCContext";
break;
case 6:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_tcschedule, t0);
+ gen_helper_mtc0_tcschedule(t0);
rn = "TCSchedule";
break;
case 7:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_0_1(do_mtc0_tcschefback, t0);
+ gen_helper_mtc0_tcschefback(t0);
rn = "TCScheFBack";
break;
default:
case 3:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_entrylo1, t0);
+ gen_helper_mtc0_entrylo1(t0);
rn = "EntryLo1";
break;
default:
case 4:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_context, t0);
+ gen_helper_mtc0_context(t0);
rn = "Context";
break;
case 1:
-// tcg_gen_helper_0_1(do_mtc0_contextconfig, t0); /* SmartMIPS ASE */
+// gen_helper_mtc0_contextconfig(t0); /* SmartMIPS ASE */
rn = "ContextConfig";
// break;
default:
case 5:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_pagemask, t0);
+ gen_helper_mtc0_pagemask(t0);
rn = "PageMask";
break;
case 1:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_pagegrain, t0);
+ gen_helper_mtc0_pagegrain(t0);
rn = "PageGrain";
break;
default:
case 6:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_wired, t0);
+ gen_helper_mtc0_wired(t0);
rn = "Wired";
break;
case 1:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_srsconf0, t0);
+ gen_helper_mtc0_srsconf0(t0);
rn = "SRSConf0";
break;
case 2:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_srsconf1, t0);
+ gen_helper_mtc0_srsconf1(t0);
rn = "SRSConf1";
break;
case 3:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_srsconf2, t0);
+ gen_helper_mtc0_srsconf2(t0);
rn = "SRSConf2";
break;
case 4:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_srsconf3, t0);
+ gen_helper_mtc0_srsconf3(t0);
rn = "SRSConf3";
break;
case 5:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_srsconf4, t0);
+ gen_helper_mtc0_srsconf4(t0);
rn = "SRSConf4";
break;
default:
switch (sel) {
case 0:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_hwrena, t0);
+ gen_helper_mtc0_hwrena(t0);
rn = "HWREna";
break;
default:
case 9:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_count, t0);
+ gen_helper_mtc0_count(t0);
rn = "Count";
break;
/* 6,7 are implementation dependent */
case 10:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_entryhi, t0);
+ gen_helper_mtc0_entryhi(t0);
rn = "EntryHi";
break;
default:
case 11:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_compare, t0);
+ gen_helper_mtc0_compare(t0);
rn = "Compare";
break;
/* 6,7 are implementation dependent */
case 12:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_status, t0);
+ gen_helper_mtc0_status(t0);
/* BS_STOP isn't good enough here, hflags may have changed. */
gen_save_pc(ctx->pc + 4);
ctx->bstate = BS_EXCP;
break;
case 1:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_intctl, t0);
+ gen_helper_mtc0_intctl(t0);
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
rn = "IntCtl";
break;
case 2:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_srsctl, t0);
+ gen_helper_mtc0_srsctl(t0);
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
rn = "SRSCtl";
case 13:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_cause, t0);
+ gen_helper_mtc0_cause(t0);
rn = "Cause";
break;
default:
break;
case 1:
check_insn(env, ctx, ISA_MIPS32R2);
- tcg_gen_helper_0_1(do_mtc0_ebase, t0);
+ gen_helper_mtc0_ebase(t0);
rn = "EBase";
break;
default:
case 16:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_config0, t0);
+ gen_helper_mtc0_config0(t0);
rn = "Config";
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
rn = "Config1";
break;
case 2:
- tcg_gen_helper_0_1(do_mtc0_config2, t0);
+ gen_helper_mtc0_config2(t0);
rn = "Config2";
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
case 18:
switch (sel) {
case 0 ... 7:
- tcg_gen_helper_0_1i(do_mtc0_watchlo, t0, sel);
+ gen_helper_1i(mtc0_watchlo, t0, sel);
rn = "WatchLo";
break;
default:
case 19:
switch (sel) {
case 0 ... 7:
- tcg_gen_helper_0_1i(do_mtc0_watchhi, t0, sel);
+ gen_helper_1i(mtc0_watchhi, t0, sel);
rn = "WatchHi";
break;
default:
switch (sel) {
case 0:
check_insn(env, ctx, ISA_MIPS3);
- tcg_gen_helper_0_1(do_mtc0_xcontext, t0);
+ gen_helper_mtc0_xcontext(t0);
rn = "XContext";
break;
default:
/* Officially reserved, but sel 0 is used for R1x000 framemask */
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_framemask, t0);
+ gen_helper_mtc0_framemask(t0);
rn = "Framemask";
break;
default:
case 23:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_debug, t0); /* EJTAG support */
+ gen_helper_mtc0_debug(t0); /* EJTAG support */
/* BS_STOP isn't good enough here, hflags may have changed. */
gen_save_pc(ctx->pc + 4);
ctx->bstate = BS_EXCP;
rn = "Debug";
break;
case 1:
-// tcg_gen_helper_0_1(do_mtc0_tracecontrol, t0); /* PDtrace support */
+// gen_helper_mtc0_tracecontrol(t0); /* PDtrace support */
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
rn = "TraceControl";
// break;
case 2:
-// tcg_gen_helper_0_1(do_mtc0_tracecontrol2, t0); /* PDtrace support */
+// gen_helper_mtc0_tracecontrol2(t0); /* PDtrace support */
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
rn = "TraceControl2";
// break;
case 3:
-// tcg_gen_helper_0_1(do_mtc0_usertracedata, t0); /* PDtrace support */
+// gen_helper_mtc0_usertracedata(t0); /* PDtrace support */
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
rn = "UserTraceData";
// break;
case 4:
-// tcg_gen_helper_0_1(do_mtc0_tracebpc, t0); /* PDtrace support */
+// gen_helper_mtc0_tracebpc(t0); /* PDtrace support */
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
rn = "TraceBPC";
case 25:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mtc0_performance0, t0);
+ gen_helper_mtc0_performance0(t0);
rn = "Performance0";
break;
case 1:
-// tcg_gen_helper_0_1(do_mtc0_performance1, t0);
+// gen_helper_mtc0_performance1(t0);
rn = "Performance1";
// break;
case 2:
-// tcg_gen_helper_0_1(do_mtc0_performance2, t0);
+// gen_helper_mtc0_performance2(t0);
rn = "Performance2";
// break;
case 3:
-// tcg_gen_helper_0_1(do_mtc0_performance3, t0);
+// gen_helper_mtc0_performance3(t0);
rn = "Performance3";
// break;
case 4:
-// tcg_gen_helper_0_1(do_mtc0_performance4, t0);
+// gen_helper_mtc0_performance4(t0);
rn = "Performance4";
// break;
case 5:
-// tcg_gen_helper_0_1(do_mtc0_performance5, t0);
+// gen_helper_mtc0_performance5(t0);
rn = "Performance5";
// break;
case 6:
-// tcg_gen_helper_0_1(do_mtc0_performance6, t0);
+// gen_helper_mtc0_performance6(t0);
rn = "Performance6";
// break;
case 7:
-// tcg_gen_helper_0_1(do_mtc0_performance7, t0);
+// gen_helper_mtc0_performance7(t0);
rn = "Performance7";
// break;
default:
case 2:
case 4:
case 6:
- tcg_gen_helper_0_1(do_mtc0_taglo, t0);
+ gen_helper_mtc0_taglo(t0);
rn = "TagLo";
break;
case 1:
case 3:
case 5:
case 7:
- tcg_gen_helper_0_1(do_mtc0_datalo, t0);
+ gen_helper_mtc0_datalo(t0);
rn = "DataLo";
break;
default:
case 2:
case 4:
case 6:
- tcg_gen_helper_0_1(do_mtc0_taghi, t0);
+ gen_helper_mtc0_taghi(t0);
rn = "TagHi";
break;
case 1:
case 3:
case 5:
case 7:
- tcg_gen_helper_0_1(do_mtc0_datahi, t0);
+ gen_helper_mtc0_datahi(t0);
rn = "DataHi";
break;
default:
int u, int sel, int h)
{
int other_tc = env->CP0_VPEControl & (0xff << CP0VPECo_TargTC);
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
if ((env->CP0_VPEConf0 & (1 << CP0VPEC0_MVP)) == 0 &&
((env->tcs[other_tc].CP0_TCBind & (0xf << CP0TCBd_CurVPE)) !=
case 2:
switch (sel) {
case 1:
- tcg_gen_helper_1_0(do_mftc0_tcstatus, t0);
+ gen_helper_mftc0_tcstatus(t0);
break;
case 2:
- tcg_gen_helper_1_0(do_mftc0_tcbind, t0);
+ gen_helper_mftc0_tcbind(t0);
break;
case 3:
- tcg_gen_helper_1_0(do_mftc0_tcrestart, t0);
+ gen_helper_mftc0_tcrestart(t0);
break;
case 4:
- tcg_gen_helper_1_0(do_mftc0_tchalt, t0);
+ gen_helper_mftc0_tchalt(t0);
break;
case 5:
- tcg_gen_helper_1_0(do_mftc0_tccontext, t0);
+ gen_helper_mftc0_tccontext(t0);
break;
case 6:
- tcg_gen_helper_1_0(do_mftc0_tcschedule, t0);
+ gen_helper_mftc0_tcschedule(t0);
break;
case 7:
- tcg_gen_helper_1_0(do_mftc0_tcschefback, t0);
+ gen_helper_mftc0_tcschefback(t0);
break;
default:
gen_mfc0(env, ctx, t0, rt, sel);
case 10:
switch (sel) {
case 0:
- tcg_gen_helper_1_0(do_mftc0_entryhi, t0);
+ gen_helper_mftc0_entryhi(t0);
break;
default:
gen_mfc0(env, ctx, t0, rt, sel);
case 12:
switch (sel) {
case 0:
- tcg_gen_helper_1_0(do_mftc0_status, t0);
+ gen_helper_mftc0_status(t0);
break;
default:
gen_mfc0(env, ctx, t0, rt, sel);
case 23:
switch (sel) {
case 0:
- tcg_gen_helper_1_0(do_mftc0_debug, t0);
+ gen_helper_mftc0_debug(t0);
break;
default:
gen_mfc0(env, ctx, t0, rt, sel);
} else switch (sel) {
/* GPR registers. */
case 0:
- tcg_gen_helper_1_i(do_mftgpr, t0, rt);
+ gen_helper_1i(mftgpr, t0, rt);
break;
/* Auxiliary CPU registers */
case 1:
switch (rt) {
case 0:
- tcg_gen_helper_1_i(do_mftlo, t0, 0);
+ gen_helper_1i(mftlo, t0, 0);
break;
case 1:
- tcg_gen_helper_1_i(do_mfthi, t0, 0);
+ gen_helper_1i(mfthi, t0, 0);
break;
case 2:
- tcg_gen_helper_1_i(do_mftacx, t0, 0);
+ gen_helper_1i(mftacx, t0, 0);
break;
case 4:
- tcg_gen_helper_1_i(do_mftlo, t0, 1);
+ gen_helper_1i(mftlo, t0, 1);
break;
case 5:
- tcg_gen_helper_1_i(do_mfthi, t0, 1);
+ gen_helper_1i(mfthi, t0, 1);
break;
case 6:
- tcg_gen_helper_1_i(do_mftacx, t0, 1);
+ gen_helper_1i(mftacx, t0, 1);
break;
case 8:
- tcg_gen_helper_1_i(do_mftlo, t0, 2);
+ gen_helper_1i(mftlo, t0, 2);
break;
case 9:
- tcg_gen_helper_1_i(do_mfthi, t0, 2);
+ gen_helper_1i(mfthi, t0, 2);
break;
case 10:
- tcg_gen_helper_1_i(do_mftacx, t0, 2);
+ gen_helper_1i(mftacx, t0, 2);
break;
case 12:
- tcg_gen_helper_1_i(do_mftlo, t0, 3);
+ gen_helper_1i(mftlo, t0, 3);
break;
case 13:
- tcg_gen_helper_1_i(do_mfthi, t0, 3);
+ gen_helper_1i(mfthi, t0, 3);
break;
case 14:
- tcg_gen_helper_1_i(do_mftacx, t0, 3);
+ gen_helper_1i(mftacx, t0, 3);
break;
case 16:
- tcg_gen_helper_1_0(do_mftdsp, t0);
+ gen_helper_mftdsp(t0);
break;
default:
goto die;
case 2:
/* XXX: For now we support only a single FPU context. */
if (h == 0) {
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, rt);
tcg_gen_ext_i32_tl(t0, fp0);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
} else {
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32h(fp0, rt);
tcg_gen_ext_i32_tl(t0, fp0);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
break;
case 3:
/* XXX: For now we support only a single FPU context. */
- tcg_gen_helper_1_1i(do_cfc1, t0, t0, rt);
+ gen_helper_1i(cfc1, t0, rt);
break;
/* COP2: Not implemented. */
case 4:
int u, int sel, int h)
{
int other_tc = env->CP0_VPEControl & (0xff << CP0VPECo_TargTC);
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
gen_load_gpr(t0, rt);
if ((env->CP0_VPEConf0 & (1 << CP0VPEC0_MVP)) == 0 &&
case 2:
switch (sel) {
case 1:
- tcg_gen_helper_0_1(do_mttc0_tcstatus, t0);
+ gen_helper_mttc0_tcstatus(t0);
break;
case 2:
- tcg_gen_helper_0_1(do_mttc0_tcbind, t0);
+ gen_helper_mttc0_tcbind(t0);
break;
case 3:
- tcg_gen_helper_0_1(do_mttc0_tcrestart, t0);
+ gen_helper_mttc0_tcrestart(t0);
break;
case 4:
- tcg_gen_helper_0_1(do_mttc0_tchalt, t0);
+ gen_helper_mttc0_tchalt(t0);
break;
case 5:
- tcg_gen_helper_0_1(do_mttc0_tccontext, t0);
+ gen_helper_mttc0_tccontext(t0);
break;
case 6:
- tcg_gen_helper_0_1(do_mttc0_tcschedule, t0);
+ gen_helper_mttc0_tcschedule(t0);
break;
case 7:
- tcg_gen_helper_0_1(do_mttc0_tcschefback, t0);
+ gen_helper_mttc0_tcschefback(t0);
break;
default:
gen_mtc0(env, ctx, t0, rd, sel);
case 10:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mttc0_entryhi, t0);
+ gen_helper_mttc0_entryhi(t0);
break;
default:
gen_mtc0(env, ctx, t0, rd, sel);
case 12:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mttc0_status, t0);
+ gen_helper_mttc0_status(t0);
break;
default:
gen_mtc0(env, ctx, t0, rd, sel);
case 23:
switch (sel) {
case 0:
- tcg_gen_helper_0_1(do_mttc0_debug, t0);
+ gen_helper_mttc0_debug(t0);
break;
default:
gen_mtc0(env, ctx, t0, rd, sel);
} else switch (sel) {
/* GPR registers. */
case 0:
- tcg_gen_helper_0_1i(do_mttgpr, t0, rd);
+ gen_helper_1i(mttgpr, t0, rd);
break;
/* Auxiliary CPU registers */
case 1:
switch (rd) {
case 0:
- tcg_gen_helper_0_1i(do_mttlo, t0, 0);
+ gen_helper_1i(mttlo, t0, 0);
break;
case 1:
- tcg_gen_helper_0_1i(do_mtthi, t0, 0);
+ gen_helper_1i(mtthi, t0, 0);
break;
case 2:
- tcg_gen_helper_0_1i(do_mttacx, t0, 0);
+ gen_helper_1i(mttacx, t0, 0);
break;
case 4:
- tcg_gen_helper_0_1i(do_mttlo, t0, 1);
+ gen_helper_1i(mttlo, t0, 1);
break;
case 5:
- tcg_gen_helper_0_1i(do_mtthi, t0, 1);
+ gen_helper_1i(mtthi, t0, 1);
break;
case 6:
- tcg_gen_helper_0_1i(do_mttacx, t0, 1);
+ gen_helper_1i(mttacx, t0, 1);
break;
case 8:
- tcg_gen_helper_0_1i(do_mttlo, t0, 2);
+ gen_helper_1i(mttlo, t0, 2);
break;
case 9:
- tcg_gen_helper_0_1i(do_mtthi, t0, 2);
+ gen_helper_1i(mtthi, t0, 2);
break;
case 10:
- tcg_gen_helper_0_1i(do_mttacx, t0, 2);
+ gen_helper_1i(mttacx, t0, 2);
break;
case 12:
- tcg_gen_helper_0_1i(do_mttlo, t0, 3);
+ gen_helper_1i(mttlo, t0, 3);
break;
case 13:
- tcg_gen_helper_0_1i(do_mtthi, t0, 3);
+ gen_helper_1i(mtthi, t0, 3);
break;
case 14:
- tcg_gen_helper_0_1i(do_mttacx, t0, 3);
+ gen_helper_1i(mttacx, t0, 3);
break;
case 16:
- tcg_gen_helper_0_1(do_mttdsp, t0);
+ gen_helper_mttdsp(t0);
break;
default:
goto die;
case 2:
/* XXX: For now we support only a single FPU context. */
if (h == 0) {
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(fp0, t0);
gen_store_fpr32(fp0, rd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
} else {
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(fp0, t0);
gen_store_fpr32h(fp0, rd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
break;
case 3:
/* XXX: For now we support only a single FPU context. */
- tcg_gen_helper_0_1i(do_ctc1, t0, rd);
+ gen_helper_1i(ctc1, t0, rd);
break;
/* COP2: Not implemented. */
case 4:
return;
}
{
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
gen_mfc0(env, ctx, t0, rd, ctx->opcode & 0x7);
gen_store_gpr(t0, rt);
break;
case OPC_MTC0:
{
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
gen_load_gpr(t0, rt);
save_cpu_state(ctx, 1);
return;
}
{
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
gen_dmfc0(env, ctx, t0, rd, ctx->opcode & 0x7);
gen_store_gpr(t0, rt);
case OPC_DMTC0:
check_insn(env, ctx, ISA_MIPS3);
{
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
gen_load_gpr(t0, rt);
save_cpu_state(ctx, 1);
opn = "tlbwi";
if (!env->tlb->do_tlbwi)
goto die;
- tcg_gen_helper_0_0(env->tlb->do_tlbwi);
+ gen_helper_tlbwi();
break;
case OPC_TLBWR:
opn = "tlbwr";
if (!env->tlb->do_tlbwr)
goto die;
- tcg_gen_helper_0_0(env->tlb->do_tlbwr);
+ gen_helper_tlbwr();
break;
case OPC_TLBP:
opn = "tlbp";
if (!env->tlb->do_tlbp)
goto die;
- tcg_gen_helper_0_0(env->tlb->do_tlbp);
+ gen_helper_tlbp();
break;
case OPC_TLBR:
opn = "tlbr";
if (!env->tlb->do_tlbr)
goto die;
- tcg_gen_helper_0_0(env->tlb->do_tlbr);
+ gen_helper_tlbr();
break;
case OPC_ERET:
opn = "eret";
check_insn(env, ctx, ISA_MIPS2);
save_cpu_state(ctx, 1);
- tcg_gen_helper_0_0(do_eret);
+ gen_helper_eret();
ctx->bstate = BS_EXCP;
break;
case OPC_DERET:
generate_exception(ctx, EXCP_RI);
} else {
save_cpu_state(ctx, 1);
- tcg_gen_helper_0_0(do_deret);
+ gen_helper_deret();
ctx->bstate = BS_EXCP;
}
break;
ctx->pc += 4;
save_cpu_state(ctx, 1);
ctx->pc -= 4;
- tcg_gen_helper_0_0(do_wait);
+ gen_helper_wait();
ctx->bstate = BS_EXCP;
break;
default:
{
target_ulong btarget;
const char *opn = "cp1 cond branch";
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv_i32 t0 = tcg_temp_new_i32();
if (cc != 0)
check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
ctx->btarget = btarget;
out:
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
/* Coprocessor 1 (FPU) */
static void gen_cp1 (DisasContext *ctx, uint32_t opc, int rt, int fs)
{
const char *opn = "cp1 move";
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
switch (opc) {
case OPC_MFC1:
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
tcg_gen_ext_i32_tl(t0, fp0);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
gen_store_gpr(t0, rt);
opn = "mfc1";
case OPC_MTC1:
gen_load_gpr(t0, rt);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(fp0, t0);
gen_store_fpr32(fp0, fs);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "mtc1";
break;
case OPC_CFC1:
- tcg_gen_helper_1_i(do_cfc1, t0, fs);
+ gen_helper_1i(cfc1, t0, fs);
gen_store_gpr(t0, rt);
opn = "cfc1";
break;
case OPC_CTC1:
gen_load_gpr(t0, rt);
- tcg_gen_helper_0_1i(do_ctc1, t0, fs);
+ gen_helper_1i(ctc1, t0, fs);
opn = "ctc1";
break;
case OPC_DMFC1:
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_mov_tl(t0, fp0);
- tcg_temp_free(fp0);
+ tcg_gen_trunc_i64_tl(t0, fp0);
+ tcg_temp_free_i64(fp0);
}
gen_store_gpr(t0, rt);
opn = "dmfc1";
case OPC_DMTC1:
gen_load_gpr(t0, rt);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
- tcg_gen_mov_tl(fp0, t0);
+ tcg_gen_extu_tl_i64(fp0, t0);
gen_store_fpr64(ctx, fp0, fs);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "dmtc1";
break;
case OPC_MFHC1:
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32h(fp0, fs);
tcg_gen_ext_i32_tl(t0, fp0);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
gen_store_gpr(t0, rt);
opn = "mfhc1";
case OPC_MTHC1:
gen_load_gpr(t0, rt);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(fp0, t0);
gen_store_fpr32h(fp0, fs);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "mthc1";
break;
int l1 = gen_new_label();
uint32_t ccbit;
TCGCond cond;
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
- TCGv r_tmp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv t0 = tcg_temp_local_new();
+ TCGv_i32 r_tmp = tcg_temp_new_i32();
if (cc)
ccbit = 1 << (24 + cc);
gen_load_gpr(t0, rd);
tcg_gen_andi_i32(r_tmp, fpu_fcr31, ccbit);
tcg_gen_brcondi_i32(cond, r_tmp, 0, l1);
+ tcg_temp_free_i32(r_tmp);
gen_load_gpr(t0, rs);
gen_set_label(l1);
gen_store_gpr(t0, rd);
{
uint32_t ccbit;
int cond;
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp0 = tcg_temp_local_new(TCG_TYPE_I32);
+ TCGv_i32 r_tmp1 = tcg_temp_new_i32();
+ TCGv_i32 fp0 = tcg_temp_local_new_i32();
int l1 = gen_new_label();
if (cc)
gen_load_fpr32(fp0, fd);
tcg_gen_andi_i32(r_tmp1, fpu_fcr31, ccbit);
tcg_gen_brcondi_i32(cond, r_tmp1, 0, l1);
+ tcg_temp_free_i32(r_tmp1);
gen_load_fpr32(fp0, fs);
gen_set_label(l1);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
static inline void gen_movcf_d (DisasContext *ctx, int fs, int fd, int cc, int tf)
{
uint32_t ccbit;
int cond;
- TCGv r_tmp1 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp0 = tcg_temp_local_new(TCG_TYPE_I64);
+ TCGv_i32 r_tmp1 = tcg_temp_new_i32();
+ TCGv_i64 fp0 = tcg_temp_local_new_i64();
int l1 = gen_new_label();
if (cc)
gen_load_fpr64(ctx, fp0, fd);
tcg_gen_andi_i32(r_tmp1, fpu_fcr31, ccbit);
tcg_gen_brcondi_i32(cond, r_tmp1, 0, l1);
+ tcg_temp_free_i32(r_tmp1);
gen_load_fpr64(ctx, fp0, fs);
gen_set_label(l1);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
static inline void gen_movcf_ps (int fs, int fd, int cc, int tf)
{
uint32_t ccbit1, ccbit2;
int cond;
- TCGv r_tmp1 = tcg_temp_local_new(TCG_TYPE_I32);
- TCGv fp0 = tcg_temp_local_new(TCG_TYPE_I32);
+ TCGv_i32 r_tmp1 = tcg_temp_new_i32();
+ TCGv_i32 fp0 = tcg_temp_local_new_i32();
int l1 = gen_new_label();
int l2 = gen_new_label();
gen_set_label(l2);
gen_store_fpr32h(fp0, fd);
- tcg_temp_free(r_tmp1);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(r_tmp1);
+ tcg_temp_free_i32(fp0);
}
switch (ctx->opcode & FOP(0x3f, 0x1f)) {
case FOP(0, 16):
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv_i32 fp1 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
gen_load_fpr32(fp1, ft);
- tcg_gen_helper_1_2(do_float_add_s, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_add_s(fp0, fp0, fp1);
+ tcg_temp_free_i32(fp1);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "add.s";
optype = BINOP;
break;
case FOP(1, 16):
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv_i32 fp1 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
gen_load_fpr32(fp1, ft);
- tcg_gen_helper_1_2(do_float_sub_s, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_sub_s(fp0, fp0, fp1);
+ tcg_temp_free_i32(fp1);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "sub.s";
optype = BINOP;
break;
case FOP(2, 16):
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv_i32 fp1 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
gen_load_fpr32(fp1, ft);
- tcg_gen_helper_1_2(do_float_mul_s, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_mul_s(fp0, fp0, fp1);
+ tcg_temp_free_i32(fp1);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "mul.s";
optype = BINOP;
break;
case FOP(3, 16):
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv_i32 fp1 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
gen_load_fpr32(fp1, ft);
- tcg_gen_helper_1_2(do_float_div_s, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_div_s(fp0, fp0, fp1);
+ tcg_temp_free_i32(fp1);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "div.s";
optype = BINOP;
break;
case FOP(4, 16):
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
- tcg_gen_helper_1_1(do_float_sqrt_s, fp0, fp0);
+ gen_helper_float_sqrt_s(fp0, fp0);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "sqrt.s";
break;
case FOP(5, 16):
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
- tcg_gen_helper_1_1(do_float_abs_s, fp0, fp0);
+ gen_helper_float_abs_s(fp0, fp0);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "abs.s";
break;
case FOP(6, 16):
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "mov.s";
break;
case FOP(7, 16):
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
- tcg_gen_helper_1_1(do_float_chs_s, fp0, fp0);
+ gen_helper_float_chs_s(fp0, fp0);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "neg.s";
break;
case FOP(8, 16):
check_cp1_64bitmode(ctx);
{
- TCGv fp32 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp64 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i32 fp32 = tcg_temp_new_i32();
+ TCGv_i64 fp64 = tcg_temp_new_i64();
gen_load_fpr32(fp32, fs);
- tcg_gen_helper_1_1(do_float_roundl_s, fp64, fp32);
- tcg_temp_free(fp32);
+ gen_helper_float_roundl_s(fp64, fp32);
+ tcg_temp_free_i32(fp32);
gen_store_fpr64(ctx, fp64, fd);
- tcg_temp_free(fp64);
+ tcg_temp_free_i64(fp64);
}
opn = "round.l.s";
break;
case FOP(9, 16):
check_cp1_64bitmode(ctx);
{
- TCGv fp32 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp64 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i32 fp32 = tcg_temp_new_i32();
+ TCGv_i64 fp64 = tcg_temp_new_i64();
gen_load_fpr32(fp32, fs);
- tcg_gen_helper_1_1(do_float_truncl_s, fp64, fp32);
- tcg_temp_free(fp32);
+ gen_helper_float_truncl_s(fp64, fp32);
+ tcg_temp_free_i32(fp32);
gen_store_fpr64(ctx, fp64, fd);
- tcg_temp_free(fp64);
+ tcg_temp_free_i64(fp64);
}
opn = "trunc.l.s";
break;
case FOP(10, 16):
check_cp1_64bitmode(ctx);
{
- TCGv fp32 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp64 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i32 fp32 = tcg_temp_new_i32();
+ TCGv_i64 fp64 = tcg_temp_new_i64();
gen_load_fpr32(fp32, fs);
- tcg_gen_helper_1_1(do_float_ceill_s, fp64, fp32);
- tcg_temp_free(fp32);
+ gen_helper_float_ceill_s(fp64, fp32);
+ tcg_temp_free_i32(fp32);
gen_store_fpr64(ctx, fp64, fd);
- tcg_temp_free(fp64);
+ tcg_temp_free_i64(fp64);
}
opn = "ceil.l.s";
break;
case FOP(11, 16):
check_cp1_64bitmode(ctx);
{
- TCGv fp32 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp64 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i32 fp32 = tcg_temp_new_i32();
+ TCGv_i64 fp64 = tcg_temp_new_i64();
gen_load_fpr32(fp32, fs);
- tcg_gen_helper_1_1(do_float_floorl_s, fp64, fp32);
- tcg_temp_free(fp32);
+ gen_helper_float_floorl_s(fp64, fp32);
+ tcg_temp_free_i32(fp32);
gen_store_fpr64(ctx, fp64, fd);
- tcg_temp_free(fp64);
+ tcg_temp_free_i64(fp64);
}
opn = "floor.l.s";
break;
case FOP(12, 16):
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
- tcg_gen_helper_1_1(do_float_roundw_s, fp0, fp0);
+ gen_helper_float_roundw_s(fp0, fp0);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "round.w.s";
break;
case FOP(13, 16):
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
- tcg_gen_helper_1_1(do_float_truncw_s, fp0, fp0);
+ gen_helper_float_truncw_s(fp0, fp0);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "trunc.w.s";
break;
case FOP(14, 16):
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
- tcg_gen_helper_1_1(do_float_ceilw_s, fp0, fp0);
+ gen_helper_float_ceilw_s(fp0, fp0);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "ceil.w.s";
break;
case FOP(15, 16):
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
- tcg_gen_helper_1_1(do_float_floorw_s, fp0, fp0);
+ gen_helper_float_floorw_s(fp0, fp0);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "floor.w.s";
break;
case FOP(18, 16):
{
int l1 = gen_new_label();
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
- TCGv fp0 = tcg_temp_local_new(TCG_TYPE_I32);
+ TCGv t0 = tcg_temp_new();
+ TCGv_i32 fp0 = tcg_temp_local_new_i32();
gen_load_gpr(t0, ft);
tcg_gen_brcondi_tl(TCG_COND_NE, t0, 0, l1);
gen_load_fpr32(fp0, fs);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
gen_set_label(l1);
tcg_temp_free(t0);
}
case FOP(19, 16):
{
int l1 = gen_new_label();
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
- TCGv fp0 = tcg_temp_local_new(TCG_TYPE_I32);
+ TCGv t0 = tcg_temp_new();
+ TCGv_i32 fp0 = tcg_temp_local_new_i32();
gen_load_gpr(t0, ft);
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
gen_load_fpr32(fp0, fs);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
gen_set_label(l1);
tcg_temp_free(t0);
}
case FOP(21, 16):
check_cop1x(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
- tcg_gen_helper_1_1(do_float_recip_s, fp0, fp0);
+ gen_helper_float_recip_s(fp0, fp0);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "recip.s";
break;
case FOP(22, 16):
check_cop1x(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
- tcg_gen_helper_1_1(do_float_rsqrt_s, fp0, fp0);
+ gen_helper_float_rsqrt_s(fp0, fp0);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "rsqrt.s";
break;
case FOP(28, 16):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv_i32 fp1 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
gen_load_fpr32(fp1, fd);
- tcg_gen_helper_1_2(do_float_recip2_s, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_recip2_s(fp0, fp0, fp1);
+ tcg_temp_free_i32(fp1);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "recip2.s";
break;
case FOP(29, 16):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
- tcg_gen_helper_1_1(do_float_recip1_s, fp0, fp0);
+ gen_helper_float_recip1_s(fp0, fp0);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "recip1.s";
break;
case FOP(30, 16):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
- tcg_gen_helper_1_1(do_float_rsqrt1_s, fp0, fp0);
+ gen_helper_float_rsqrt1_s(fp0, fp0);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "rsqrt1.s";
break;
case FOP(31, 16):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv_i32 fp1 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
gen_load_fpr32(fp1, ft);
- tcg_gen_helper_1_2(do_float_rsqrt2_s, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_rsqrt2_s(fp0, fp0, fp1);
+ tcg_temp_free_i32(fp1);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "rsqrt2.s";
break;
case FOP(33, 16):
check_cp1_registers(ctx, fd);
{
- TCGv fp32 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp64 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i32 fp32 = tcg_temp_new_i32();
+ TCGv_i64 fp64 = tcg_temp_new_i64();
gen_load_fpr32(fp32, fs);
- tcg_gen_helper_1_1(do_float_cvtd_s, fp64, fp32);
- tcg_temp_free(fp32);
+ gen_helper_float_cvtd_s(fp64, fp32);
+ tcg_temp_free_i32(fp32);
gen_store_fpr64(ctx, fp64, fd);
- tcg_temp_free(fp64);
+ tcg_temp_free_i64(fp64);
}
opn = "cvt.d.s";
break;
case FOP(36, 16):
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
- tcg_gen_helper_1_1(do_float_cvtw_s, fp0, fp0);
+ gen_helper_float_cvtw_s(fp0, fp0);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "cvt.w.s";
break;
case FOP(37, 16):
check_cp1_64bitmode(ctx);
{
- TCGv fp32 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp64 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i32 fp32 = tcg_temp_new_i32();
+ TCGv_i64 fp64 = tcg_temp_new_i64();
gen_load_fpr32(fp32, fs);
- tcg_gen_helper_1_1(do_float_cvtl_s, fp64, fp32);
- tcg_temp_free(fp32);
+ gen_helper_float_cvtl_s(fp64, fp32);
+ tcg_temp_free_i32(fp32);
gen_store_fpr64(ctx, fp64, fd);
- tcg_temp_free(fp64);
+ tcg_temp_free_i64(fp64);
}
opn = "cvt.l.s";
break;
case FOP(38, 16):
check_cp1_64bitmode(ctx);
{
- TCGv fp64 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp32_0 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp32_1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i64 fp64 = tcg_temp_new_i64();
+ TCGv_i32 fp32_0 = tcg_temp_new_i32();
+ TCGv_i32 fp32_1 = tcg_temp_new_i32();
gen_load_fpr32(fp32_0, fs);
gen_load_fpr32(fp32_1, ft);
tcg_gen_concat_i32_i64(fp64, fp32_0, fp32_1);
- tcg_temp_free(fp32_1);
- tcg_temp_free(fp32_0);
+ tcg_temp_free_i32(fp32_1);
+ tcg_temp_free_i32(fp32_0);
gen_store_fpr64(ctx, fp64, fd);
- tcg_temp_free(fp64);
+ tcg_temp_free_i64(fp64);
}
opn = "cvt.ps.s";
break;
case FOP(62, 16):
case FOP(63, 16):
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv_i32 fp1 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
gen_load_fpr32(fp1, ft);
gen_cmp_s(func-48, fp0, fp1, cc);
opn = condnames[func-48];
}
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ tcg_temp_free_i32(fp0);
+ tcg_temp_free_i32(fp1);
}
break;
case FOP(0, 17):
check_cp1_registers(ctx, fs | ft | fd);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
- tcg_gen_helper_1_2(do_float_add_d, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_add_d(fp0, fp0, fp1);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "add.d";
optype = BINOP;
case FOP(1, 17):
check_cp1_registers(ctx, fs | ft | fd);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
- tcg_gen_helper_1_2(do_float_sub_d, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_sub_d(fp0, fp0, fp1);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "sub.d";
optype = BINOP;
case FOP(2, 17):
check_cp1_registers(ctx, fs | ft | fd);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
- tcg_gen_helper_1_2(do_float_mul_d, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_mul_d(fp0, fp0, fp1);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "mul.d";
optype = BINOP;
case FOP(3, 17):
check_cp1_registers(ctx, fs | ft | fd);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
- tcg_gen_helper_1_2(do_float_div_d, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_div_d(fp0, fp0, fp1);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "div.d";
optype = BINOP;
case FOP(4, 17):
check_cp1_registers(ctx, fs | fd);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_sqrt_d, fp0, fp0);
+ gen_helper_float_sqrt_d(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "sqrt.d";
break;
case FOP(5, 17):
check_cp1_registers(ctx, fs | fd);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_abs_d, fp0, fp0);
+ gen_helper_float_abs_d(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "abs.d";
break;
case FOP(6, 17):
check_cp1_registers(ctx, fs | fd);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "mov.d";
break;
case FOP(7, 17):
check_cp1_registers(ctx, fs | fd);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_chs_d, fp0, fp0);
+ gen_helper_float_chs_d(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "neg.d";
break;
case FOP(8, 17):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_roundl_d, fp0, fp0);
+ gen_helper_float_roundl_d(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "round.l.d";
break;
case FOP(9, 17):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_truncl_d, fp0, fp0);
+ gen_helper_float_truncl_d(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "trunc.l.d";
break;
case FOP(10, 17):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_ceill_d, fp0, fp0);
+ gen_helper_float_ceill_d(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "ceil.l.d";
break;
case FOP(11, 17):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_floorl_d, fp0, fp0);
+ gen_helper_float_floorl_d(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "floor.l.d";
break;
case FOP(12, 17):
check_cp1_registers(ctx, fs);
{
- TCGv fp32 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp64 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i32 fp32 = tcg_temp_new_i32();
+ TCGv_i64 fp64 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp64, fs);
- tcg_gen_helper_1_1(do_float_roundw_d, fp32, fp64);
- tcg_temp_free(fp64);
+ gen_helper_float_roundw_d(fp32, fp64);
+ tcg_temp_free_i64(fp64);
gen_store_fpr32(fp32, fd);
- tcg_temp_free(fp32);
+ tcg_temp_free_i32(fp32);
}
opn = "round.w.d";
break;
case FOP(13, 17):
check_cp1_registers(ctx, fs);
{
- TCGv fp32 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp64 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i32 fp32 = tcg_temp_new_i32();
+ TCGv_i64 fp64 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp64, fs);
- tcg_gen_helper_1_1(do_float_truncw_d, fp32, fp64);
- tcg_temp_free(fp64);
+ gen_helper_float_truncw_d(fp32, fp64);
+ tcg_temp_free_i64(fp64);
gen_store_fpr32(fp32, fd);
- tcg_temp_free(fp32);
+ tcg_temp_free_i32(fp32);
}
opn = "trunc.w.d";
break;
case FOP(14, 17):
check_cp1_registers(ctx, fs);
{
- TCGv fp32 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp64 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i32 fp32 = tcg_temp_new_i32();
+ TCGv_i64 fp64 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp64, fs);
- tcg_gen_helper_1_1(do_float_ceilw_d, fp32, fp64);
- tcg_temp_free(fp64);
+ gen_helper_float_ceilw_d(fp32, fp64);
+ tcg_temp_free_i64(fp64);
gen_store_fpr32(fp32, fd);
- tcg_temp_free(fp32);
+ tcg_temp_free_i32(fp32);
}
opn = "ceil.w.d";
break;
case FOP(15, 17):
check_cp1_registers(ctx, fs);
{
- TCGv fp32 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp64 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i32 fp32 = tcg_temp_new_i32();
+ TCGv_i64 fp64 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp64, fs);
- tcg_gen_helper_1_1(do_float_floorw_d, fp32, fp64);
- tcg_temp_free(fp64);
+ gen_helper_float_floorw_d(fp32, fp64);
+ tcg_temp_free_i64(fp64);
gen_store_fpr32(fp32, fd);
- tcg_temp_free(fp32);
+ tcg_temp_free_i32(fp32);
}
opn = "floor.w.d";
break;
case FOP(18, 17):
{
int l1 = gen_new_label();
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
- TCGv fp0 = tcg_temp_local_new(TCG_TYPE_I64);
+ TCGv t0 = tcg_temp_new();
+ TCGv_i64 fp0 = tcg_temp_local_new_i64();
gen_load_gpr(t0, ft);
tcg_gen_brcondi_tl(TCG_COND_NE, t0, 0, l1);
gen_load_fpr64(ctx, fp0, fs);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
gen_set_label(l1);
tcg_temp_free(t0);
}
case FOP(19, 17):
{
int l1 = gen_new_label();
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
- TCGv fp0 = tcg_temp_local_new(TCG_TYPE_I64);
+ TCGv t0 = tcg_temp_new();
+ TCGv_i64 fp0 = tcg_temp_local_new_i64();
gen_load_gpr(t0, ft);
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
gen_load_fpr64(ctx, fp0, fs);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
gen_set_label(l1);
tcg_temp_free(t0);
}
case FOP(21, 17):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_recip_d, fp0, fp0);
+ gen_helper_float_recip_d(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "recip.d";
break;
case FOP(22, 17):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_rsqrt_d, fp0, fp0);
+ gen_helper_float_rsqrt_d(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "rsqrt.d";
break;
case FOP(28, 17):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
- tcg_gen_helper_1_2(do_float_recip2_d, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_recip2_d(fp0, fp0, fp1);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "recip2.d";
break;
case FOP(29, 17):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_recip1_d, fp0, fp0);
+ gen_helper_float_recip1_d(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "recip1.d";
break;
case FOP(30, 17):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_rsqrt1_d, fp0, fp0);
+ gen_helper_float_rsqrt1_d(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "rsqrt1.d";
break;
case FOP(31, 17):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
- tcg_gen_helper_1_2(do_float_rsqrt2_d, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_rsqrt2_d(fp0, fp0, fp1);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "rsqrt2.d";
break;
case FOP(62, 17):
case FOP(63, 17):
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
gen_cmp_d(func-48, fp0, fp1, cc);
opn = condnames[func-48];
}
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ tcg_temp_free_i64(fp0);
+ tcg_temp_free_i64(fp1);
}
break;
case FOP(32, 17):
check_cp1_registers(ctx, fs);
{
- TCGv fp32 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp64 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i32 fp32 = tcg_temp_new_i32();
+ TCGv_i64 fp64 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp64, fs);
- tcg_gen_helper_1_1(do_float_cvts_d, fp32, fp64);
- tcg_temp_free(fp64);
+ gen_helper_float_cvts_d(fp32, fp64);
+ tcg_temp_free_i64(fp64);
gen_store_fpr32(fp32, fd);
- tcg_temp_free(fp32);
+ tcg_temp_free_i32(fp32);
}
opn = "cvt.s.d";
break;
case FOP(36, 17):
check_cp1_registers(ctx, fs);
{
- TCGv fp32 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp64 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i32 fp32 = tcg_temp_new_i32();
+ TCGv_i64 fp64 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp64, fs);
- tcg_gen_helper_1_1(do_float_cvtw_d, fp32, fp64);
- tcg_temp_free(fp64);
+ gen_helper_float_cvtw_d(fp32, fp64);
+ tcg_temp_free_i64(fp64);
gen_store_fpr32(fp32, fd);
- tcg_temp_free(fp32);
+ tcg_temp_free_i32(fp32);
}
opn = "cvt.w.d";
break;
case FOP(37, 17):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_cvtl_d, fp0, fp0);
+ gen_helper_float_cvtl_d(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "cvt.l.d";
break;
case FOP(32, 20):
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
- tcg_gen_helper_1_1(do_float_cvts_w, fp0, fp0);
+ gen_helper_float_cvts_w(fp0, fp0);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "cvt.s.w";
break;
case FOP(33, 20):
check_cp1_registers(ctx, fd);
{
- TCGv fp32 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp64 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i32 fp32 = tcg_temp_new_i32();
+ TCGv_i64 fp64 = tcg_temp_new_i64();
gen_load_fpr32(fp32, fs);
- tcg_gen_helper_1_1(do_float_cvtd_w, fp64, fp32);
- tcg_temp_free(fp32);
+ gen_helper_float_cvtd_w(fp64, fp32);
+ tcg_temp_free_i32(fp32);
gen_store_fpr64(ctx, fp64, fd);
- tcg_temp_free(fp64);
+ tcg_temp_free_i64(fp64);
}
opn = "cvt.d.w";
break;
case FOP(32, 21):
check_cp1_64bitmode(ctx);
{
- TCGv fp32 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp64 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i32 fp32 = tcg_temp_new_i32();
+ TCGv_i64 fp64 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp64, fs);
- tcg_gen_helper_1_1(do_float_cvts_l, fp32, fp64);
- tcg_temp_free(fp64);
+ gen_helper_float_cvts_l(fp32, fp64);
+ tcg_temp_free_i64(fp64);
gen_store_fpr32(fp32, fd);
- tcg_temp_free(fp32);
+ tcg_temp_free_i32(fp32);
}
opn = "cvt.s.l";
break;
case FOP(33, 21):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_cvtd_l, fp0, fp0);
+ gen_helper_float_cvtd_l(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "cvt.d.l";
break;
case FOP(38, 20):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_cvtps_pw, fp0, fp0);
+ gen_helper_float_cvtps_pw(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "cvt.ps.pw";
break;
case FOP(0, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
- tcg_gen_helper_1_2(do_float_add_ps, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_add_ps(fp0, fp0, fp1);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "add.ps";
break;
case FOP(1, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
- tcg_gen_helper_1_2(do_float_sub_ps, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_sub_ps(fp0, fp0, fp1);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "sub.ps";
break;
case FOP(2, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
- tcg_gen_helper_1_2(do_float_mul_ps, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_mul_ps(fp0, fp0, fp1);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "mul.ps";
break;
case FOP(5, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_abs_ps, fp0, fp0);
+ gen_helper_float_abs_ps(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "abs.ps";
break;
case FOP(6, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "mov.ps";
break;
case FOP(7, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_chs_ps, fp0, fp0);
+ gen_helper_float_chs_ps(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "neg.ps";
break;
check_cp1_64bitmode(ctx);
{
int l1 = gen_new_label();
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
- TCGv fp0 = tcg_temp_local_new(TCG_TYPE_I32);
- TCGv fph0 = tcg_temp_local_new(TCG_TYPE_I32);
+ TCGv t0 = tcg_temp_new();
+ TCGv_i32 fp0 = tcg_temp_local_new_i32();
+ TCGv_i32 fph0 = tcg_temp_local_new_i32();
gen_load_gpr(t0, ft);
tcg_gen_brcondi_tl(TCG_COND_NE, t0, 0, l1);
gen_load_fpr32h(fph0, fs);
gen_store_fpr32(fp0, fd);
gen_store_fpr32h(fph0, fd);
- tcg_temp_free(fp0);
- tcg_temp_free(fph0);
+ tcg_temp_free_i32(fp0);
+ tcg_temp_free_i32(fph0);
gen_set_label(l1);
tcg_temp_free(t0);
}
check_cp1_64bitmode(ctx);
{
int l1 = gen_new_label();
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
- TCGv fp0 = tcg_temp_local_new(TCG_TYPE_I32);
- TCGv fph0 = tcg_temp_local_new(TCG_TYPE_I32);
+ TCGv t0 = tcg_temp_new();
+ TCGv_i32 fp0 = tcg_temp_local_new_i32();
+ TCGv_i32 fph0 = tcg_temp_local_new_i32();
gen_load_gpr(t0, ft);
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
gen_load_fpr32h(fph0, fs);
gen_store_fpr32(fp0, fd);
gen_store_fpr32h(fph0, fd);
- tcg_temp_free(fp0);
- tcg_temp_free(fph0);
+ tcg_temp_free_i32(fp0);
+ tcg_temp_free_i32(fph0);
gen_set_label(l1);
tcg_temp_free(t0);
}
case FOP(24, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, ft);
gen_load_fpr64(ctx, fp1, fs);
- tcg_gen_helper_1_2(do_float_addr_ps, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_addr_ps(fp0, fp0, fp1);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "addr.ps";
break;
case FOP(26, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, ft);
gen_load_fpr64(ctx, fp1, fs);
- tcg_gen_helper_1_2(do_float_mulr_ps, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_mulr_ps(fp0, fp0, fp1);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "mulr.ps";
break;
case FOP(28, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, fd);
- tcg_gen_helper_1_2(do_float_recip2_ps, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_recip2_ps(fp0, fp0, fp1);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "recip2.ps";
break;
case FOP(29, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_recip1_ps, fp0, fp0);
+ gen_helper_float_recip1_ps(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "recip1.ps";
break;
case FOP(30, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_rsqrt1_ps, fp0, fp0);
+ gen_helper_float_rsqrt1_ps(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "rsqrt1.ps";
break;
case FOP(31, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
- tcg_gen_helper_1_2(do_float_rsqrt2_ps, fp0, fp0, fp1);
- tcg_temp_free(fp1);
+ gen_helper_float_rsqrt2_ps(fp0, fp0, fp1);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "rsqrt2.ps";
break;
case FOP(32, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32h(fp0, fs);
- tcg_gen_helper_1_1(do_float_cvts_pu, fp0, fp0);
+ gen_helper_float_cvts_pu(fp0, fp0);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "cvt.s.pu";
break;
case FOP(36, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
- tcg_gen_helper_1_1(do_float_cvtpw_ps, fp0, fp0);
+ gen_helper_float_cvtpw_ps(fp0, fp0);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "cvt.pw.ps";
break;
case FOP(40, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
- tcg_gen_helper_1_1(do_float_cvts_pl, fp0, fp0);
+ gen_helper_float_cvts_pl(fp0, fp0);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "cvt.s.pl";
break;
case FOP(44, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv_i32 fp1 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
gen_load_fpr32(fp1, ft);
gen_store_fpr32h(fp0, fd);
gen_store_fpr32(fp1, fd);
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ tcg_temp_free_i32(fp0);
+ tcg_temp_free_i32(fp1);
}
opn = "pll.ps";
break;
case FOP(45, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv_i32 fp1 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
gen_load_fpr32h(fp1, ft);
gen_store_fpr32(fp1, fd);
gen_store_fpr32h(fp0, fd);
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ tcg_temp_free_i32(fp0);
+ tcg_temp_free_i32(fp1);
}
opn = "plu.ps";
break;
case FOP(46, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv_i32 fp1 = tcg_temp_new_i32();
gen_load_fpr32h(fp0, fs);
gen_load_fpr32(fp1, ft);
gen_store_fpr32(fp1, fd);
gen_store_fpr32h(fp0, fd);
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ tcg_temp_free_i32(fp0);
+ tcg_temp_free_i32(fp1);
}
opn = "pul.ps";
break;
case FOP(47, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv_i32 fp1 = tcg_temp_new_i32();
gen_load_fpr32h(fp0, fs);
gen_load_fpr32h(fp1, ft);
gen_store_fpr32(fp1, fd);
gen_store_fpr32h(fp0, fd);
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ tcg_temp_free_i32(fp0);
+ tcg_temp_free_i32(fp1);
}
opn = "puu.ps";
break;
case FOP(63, 22):
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
gen_cmp_ps(func-48, fp0, fp1, cc);
opn = condnames[func-48];
}
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ tcg_temp_free_i64(fp0);
+ tcg_temp_free_i64(fp1);
}
break;
default:
{
const char *opn = "extended float load/store";
int store = 0;
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
- TCGv t1 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
+ TCGv t1 = tcg_temp_local_new();
if (base == 0) {
gen_load_gpr(t0, index);
case OPC_LWXC1:
check_cop1x(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
- tcg_gen_qemu_ld32s(fp0, t0, ctx->mem_idx);
+ tcg_gen_qemu_ld32s(t1, t0, ctx->mem_idx);
+ tcg_gen_trunc_tl_i32(fp0, t1);
gen_store_fpr32(fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i32(fp0);
}
opn = "lwxc1";
break;
check_cop1x(ctx);
check_cp1_registers(ctx, fd);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
tcg_gen_qemu_ld64(fp0, t0, ctx->mem_idx);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "ldxc1";
break;
check_cp1_64bitmode(ctx);
tcg_gen_andi_tl(t0, t0, ~0x7);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
tcg_gen_qemu_ld64(fp0, t0, ctx->mem_idx);
gen_store_fpr64(ctx, fp0, fd);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "luxc1";
break;
case OPC_SWXC1:
check_cop1x(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
- tcg_gen_qemu_st32(fp0, t0, ctx->mem_idx);
- tcg_temp_free(fp0);
+ tcg_gen_extu_i32_tl(t1, fp0);
+ tcg_gen_qemu_st32(t1, t0, ctx->mem_idx);
+ tcg_temp_free_i32(fp0);
}
opn = "swxc1";
store = 1;
check_cop1x(ctx);
check_cp1_registers(ctx, fs);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
tcg_gen_qemu_st64(fp0, t0, ctx->mem_idx);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "sdxc1";
store = 1;
check_cp1_64bitmode(ctx);
tcg_gen_andi_tl(t0, t0, ~0x7);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
tcg_gen_qemu_st64(fp0, t0, ctx->mem_idx);
- tcg_temp_free(fp0);
+ tcg_temp_free_i64(fp0);
}
opn = "suxc1";
store = 1;
case OPC_ALNV_PS:
check_cp1_64bitmode(ctx);
{
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
- TCGv fp0 = tcg_temp_local_new(TCG_TYPE_I32);
- TCGv fph0 = tcg_temp_local_new(TCG_TYPE_I32);
- TCGv fp1 = tcg_temp_local_new(TCG_TYPE_I32);
- TCGv fph1 = tcg_temp_local_new(TCG_TYPE_I32);
+ TCGv t0 = tcg_temp_local_new();
+ TCGv_i32 fp0 = tcg_temp_local_new_i32();
+ TCGv_i32 fph0 = tcg_temp_local_new_i32();
+ TCGv_i32 fp1 = tcg_temp_local_new_i32();
+ TCGv_i32 fph1 = tcg_temp_local_new_i32();
int l1 = gen_new_label();
int l2 = gen_new_label();
gen_store_fpr32h(fp1, fd);
#endif
gen_set_label(l2);
- tcg_temp_free(fp0);
- tcg_temp_free(fph0);
- tcg_temp_free(fp1);
- tcg_temp_free(fph1);
+ tcg_temp_free_i32(fp0);
+ tcg_temp_free_i32(fph0);
+ tcg_temp_free_i32(fp1);
+ tcg_temp_free_i32(fph1);
}
opn = "alnv.ps";
break;
case OPC_MADD_S:
check_cop1x(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp2 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv_i32 fp1 = tcg_temp_new_i32();
+ TCGv_i32 fp2 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
gen_load_fpr32(fp1, ft);
gen_load_fpr32(fp2, fr);
- tcg_gen_helper_1_3(do_float_muladd_s, fp2, fp0, fp1, fp2);
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ gen_helper_float_muladd_s(fp2, fp0, fp1, fp2);
+ tcg_temp_free_i32(fp0);
+ tcg_temp_free_i32(fp1);
gen_store_fpr32(fp2, fd);
- tcg_temp_free(fp2);
+ tcg_temp_free_i32(fp2);
}
opn = "madd.s";
break;
check_cop1x(ctx);
check_cp1_registers(ctx, fd | fs | ft | fr);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
+ TCGv_i64 fp2 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
gen_load_fpr64(ctx, fp2, fr);
- tcg_gen_helper_1_3(do_float_muladd_d, fp2, fp0, fp1, fp2);
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ gen_helper_float_muladd_d(fp2, fp0, fp1, fp2);
+ tcg_temp_free_i64(fp0);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp2, fd);
- tcg_temp_free(fp2);
+ tcg_temp_free_i64(fp2);
}
opn = "madd.d";
break;
case OPC_MADD_PS:
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
+ TCGv_i64 fp2 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
gen_load_fpr64(ctx, fp2, fr);
- tcg_gen_helper_1_3(do_float_muladd_ps, fp2, fp0, fp1, fp2);
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ gen_helper_float_muladd_ps(fp2, fp0, fp1, fp2);
+ tcg_temp_free_i64(fp0);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp2, fd);
- tcg_temp_free(fp2);
+ tcg_temp_free_i64(fp2);
}
opn = "madd.ps";
break;
case OPC_MSUB_S:
check_cop1x(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp2 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv_i32 fp1 = tcg_temp_new_i32();
+ TCGv_i32 fp2 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
gen_load_fpr32(fp1, ft);
gen_load_fpr32(fp2, fr);
- tcg_gen_helper_1_3(do_float_mulsub_s, fp2, fp0, fp1, fp2);
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ gen_helper_float_mulsub_s(fp2, fp0, fp1, fp2);
+ tcg_temp_free_i32(fp0);
+ tcg_temp_free_i32(fp1);
gen_store_fpr32(fp2, fd);
- tcg_temp_free(fp2);
+ tcg_temp_free_i32(fp2);
}
opn = "msub.s";
break;
check_cop1x(ctx);
check_cp1_registers(ctx, fd | fs | ft | fr);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
+ TCGv_i64 fp2 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
gen_load_fpr64(ctx, fp2, fr);
- tcg_gen_helper_1_3(do_float_mulsub_d, fp2, fp0, fp1, fp2);
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ gen_helper_float_mulsub_d(fp2, fp0, fp1, fp2);
+ tcg_temp_free_i64(fp0);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp2, fd);
- tcg_temp_free(fp2);
+ tcg_temp_free_i64(fp2);
}
opn = "msub.d";
break;
case OPC_MSUB_PS:
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
+ TCGv_i64 fp2 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
gen_load_fpr64(ctx, fp2, fr);
- tcg_gen_helper_1_3(do_float_mulsub_ps, fp2, fp0, fp1, fp2);
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ gen_helper_float_mulsub_ps(fp2, fp0, fp1, fp2);
+ tcg_temp_free_i64(fp0);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp2, fd);
- tcg_temp_free(fp2);
+ tcg_temp_free_i64(fp2);
}
opn = "msub.ps";
break;
case OPC_NMADD_S:
check_cop1x(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp2 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv_i32 fp1 = tcg_temp_new_i32();
+ TCGv_i32 fp2 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
gen_load_fpr32(fp1, ft);
gen_load_fpr32(fp2, fr);
- tcg_gen_helper_1_3(do_float_nmuladd_s, fp2, fp0, fp1, fp2);
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ gen_helper_float_nmuladd_s(fp2, fp0, fp1, fp2);
+ tcg_temp_free_i32(fp0);
+ tcg_temp_free_i32(fp1);
gen_store_fpr32(fp2, fd);
- tcg_temp_free(fp2);
+ tcg_temp_free_i32(fp2);
}
opn = "nmadd.s";
break;
check_cop1x(ctx);
check_cp1_registers(ctx, fd | fs | ft | fr);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
+ TCGv_i64 fp2 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
gen_load_fpr64(ctx, fp2, fr);
- tcg_gen_helper_1_3(do_float_nmuladd_d, fp2, fp0, fp1, fp2);
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ gen_helper_float_nmuladd_d(fp2, fp0, fp1, fp2);
+ tcg_temp_free_i64(fp0);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp2, fd);
- tcg_temp_free(fp2);
+ tcg_temp_free_i64(fp2);
}
opn = "nmadd.d";
break;
case OPC_NMADD_PS:
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
+ TCGv_i64 fp2 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
gen_load_fpr64(ctx, fp2, fr);
- tcg_gen_helper_1_3(do_float_nmuladd_ps, fp2, fp0, fp1, fp2);
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ gen_helper_float_nmuladd_ps(fp2, fp0, fp1, fp2);
+ tcg_temp_free_i64(fp0);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp2, fd);
- tcg_temp_free(fp2);
+ tcg_temp_free_i64(fp2);
}
opn = "nmadd.ps";
break;
case OPC_NMSUB_S:
check_cop1x(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I32);
- TCGv fp2 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp0 = tcg_temp_new_i32();
+ TCGv_i32 fp1 = tcg_temp_new_i32();
+ TCGv_i32 fp2 = tcg_temp_new_i32();
gen_load_fpr32(fp0, fs);
gen_load_fpr32(fp1, ft);
gen_load_fpr32(fp2, fr);
- tcg_gen_helper_1_3(do_float_nmulsub_s, fp2, fp0, fp1, fp2);
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ gen_helper_float_nmulsub_s(fp2, fp0, fp1, fp2);
+ tcg_temp_free_i32(fp0);
+ tcg_temp_free_i32(fp1);
gen_store_fpr32(fp2, fd);
- tcg_temp_free(fp2);
+ tcg_temp_free_i32(fp2);
}
opn = "nmsub.s";
break;
check_cop1x(ctx);
check_cp1_registers(ctx, fd | fs | ft | fr);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
+ TCGv_i64 fp2 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
gen_load_fpr64(ctx, fp2, fr);
- tcg_gen_helper_1_3(do_float_nmulsub_d, fp2, fp0, fp1, fp2);
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ gen_helper_float_nmulsub_d(fp2, fp0, fp1, fp2);
+ tcg_temp_free_i64(fp0);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp2, fd);
- tcg_temp_free(fp2);
+ tcg_temp_free_i64(fp2);
}
opn = "nmsub.d";
break;
case OPC_NMSUB_PS:
check_cp1_64bitmode(ctx);
{
- TCGv fp0 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv fp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp0 = tcg_temp_new_i64();
+ TCGv_i64 fp1 = tcg_temp_new_i64();
+ TCGv_i64 fp2 = tcg_temp_new_i64();
gen_load_fpr64(ctx, fp0, fs);
gen_load_fpr64(ctx, fp1, ft);
gen_load_fpr64(ctx, fp2, fr);
- tcg_gen_helper_1_3(do_float_nmulsub_ps, fp2, fp0, fp1, fp2);
- tcg_temp_free(fp0);
- tcg_temp_free(fp1);
+ gen_helper_float_nmulsub_ps(fp2, fp0, fp1, fp2);
+ tcg_temp_free_i64(fp0);
+ tcg_temp_free_i64(fp1);
gen_store_fpr64(ctx, fp2, fd);
- tcg_temp_free(fp2);
+ tcg_temp_free_i64(fp2);
}
opn = "nmsub.ps";
break;
MIPS_DEBUG("blikely condition (" TARGET_FMT_lx ")", ctx->pc + 4);
tcg_gen_brcondi_i32(TCG_COND_NE, bcond, 0, l1);
{
- TCGv r_tmp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 r_tmp = tcg_temp_new_i32();
tcg_gen_movi_i32(r_tmp, ctx->hflags & ~MIPS_HFLAG_BMASK);
tcg_gen_st_i32(r_tmp, cpu_env, offsetof(CPUState, hflags));
- tcg_temp_free(r_tmp);
+ tcg_temp_free_i32(r_tmp);
}
gen_goto_tb(ctx, 1, ctx->pc + 4);
gen_set_label(l1);
MIPS_INVAL("PMON / selsl");
generate_exception(ctx, EXCP_RI);
#else
- tcg_gen_helper_0_i(do_pmon, sa);
+ gen_helper_0i(pmon, sa);
#endif
break;
case OPC_SYSCALL:
case OPC_RDHWR:
check_insn(env, ctx, ISA_MIPS32R2);
{
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
switch (rd) {
case 0:
save_cpu_state(ctx, 1);
- tcg_gen_helper_1_0(do_rdhwr_cpunum, t0);
+ gen_helper_rdhwr_cpunum(t0);
break;
case 1:
save_cpu_state(ctx, 1);
- tcg_gen_helper_1_0(do_rdhwr_synci_step, t0);
+ gen_helper_rdhwr_synci_step(t0);
break;
case 2:
save_cpu_state(ctx, 1);
- tcg_gen_helper_1_0(do_rdhwr_cc, t0);
+ gen_helper_rdhwr_cc(t0);
break;
case 3:
save_cpu_state(ctx, 1);
- tcg_gen_helper_1_0(do_rdhwr_ccres, t0);
+ gen_helper_rdhwr_ccres(t0);
break;
case 29:
if (env->user_mode_only) {
case OPC_FORK:
check_insn(env, ctx, ASE_MT);
{
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
- TCGv t1 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
+ TCGv t1 = tcg_temp_local_new();
gen_load_gpr(t0, rt);
gen_load_gpr(t1, rs);
- tcg_gen_helper_0_2(do_fork, t0, t1);
+ gen_helper_fork(t0, t1);
tcg_temp_free(t0);
tcg_temp_free(t1);
}
case OPC_YIELD:
check_insn(env, ctx, ASE_MT);
{
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
gen_load_gpr(t0, rs);
- tcg_gen_helper_1_1(do_yield, t0, t0);
+ gen_helper_yield(t0, t0);
gen_store_gpr(t0, rd);
tcg_temp_free(t0);
}
case OPC_MFMC0:
#ifndef CONFIG_USER_ONLY
if (!env->user_mode_only) {
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
op2 = MASK_MFMC0(ctx->opcode);
switch (op2) {
case OPC_DMT:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_1(do_dmt, t0, t0);
+ gen_helper_dmt(t0, t0);
break;
case OPC_EMT:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_1(do_emt, t0, t0);
+ gen_helper_emt(t0, t0);
break;
case OPC_DVPE:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_1(do_dvpe, t0, t0);
+ gen_helper_dvpe(t0, t0);
break;
case OPC_EVPE:
check_insn(env, ctx, ASE_MT);
- tcg_gen_helper_1_1(do_evpe, t0, t0);
+ gen_helper_evpe(t0, t0);
break;
case OPC_DI:
check_insn(env, ctx, ISA_MIPS32R2);
save_cpu_state(ctx, 1);
- tcg_gen_helper_1_0(do_di, t0);
+ gen_helper_di(t0);
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
break;
case OPC_EI:
check_insn(env, ctx, ISA_MIPS32R2);
save_cpu_state(ctx, 1);
- tcg_gen_helper_1_0(do_ei, t0);
+ gen_helper_ei(t0);
/* Stop translation as we may have switched the execution mode */
ctx->bstate = BS_STOP;
break;
if (env->breakpoints[j] == ctx.pc) {
save_cpu_state(&ctx, 1);
ctx.bstate = BS_BRANCH;
- tcg_gen_helper_0_i(do_raise_exception, EXCP_DEBUG);
+ gen_helper_0i(raise_exception, EXCP_DEBUG);
/* Include the breakpoint location or the tb won't
* be flushed when it must be. */
ctx.pc += 4;
gen_io_end();
if (env->singlestep_enabled) {
save_cpu_state(&ctx, ctx.bstate == BS_NONE);
- tcg_gen_helper_0_i(do_raise_exception, EXCP_DEBUG);
+ gen_helper_0i(raise_exception, EXCP_DEBUG);
} else {
switch (ctx.bstate) {
case BS_STOP:
- tcg_gen_helper_0_0(do_interrupt_restart);
+ gen_helper_interrupt_restart();
gen_goto_tb(&ctx, 0, ctx.pc);
break;
case BS_NONE:
gen_goto_tb(&ctx, 0, ctx.pc);
break;
case BS_EXCP:
- tcg_gen_helper_0_0(do_interrupt_restart);
+ gen_helper_interrupt_restart();
tcg_gen_exit_tb(0);
break;
case BS_BRANCH:
if (inited)
return;
- cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
+ cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
for (i = 0; i < 32; i++)
- cpu_gpr[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, active_tc.gpr[i]),
regnames[i]);
- cpu_PC = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_PC = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, active_tc.PC), "PC");
for (i = 0; i < MIPS_DSP_ACC; i++) {
- cpu_HI[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_HI[i] = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, active_tc.HI[i]),
regnames_HI[i]);
- cpu_LO[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_LO[i] = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, active_tc.LO[i]),
regnames_LO[i]);
- cpu_ACX[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_ACX[i] = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, active_tc.ACX[i]),
regnames_ACX[i]);
}
- cpu_dspctrl = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_dspctrl = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, active_tc.DSPControl),
"DSPControl");
- bcond = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, bcond), "bcond");
- btarget = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ bcond = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, bcond), "bcond");
+ btarget = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, btarget), "btarget");
for (i = 0; i < 32; i++)
- fpu_fpr32[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, active_fpu.fpr[i].w[FP_ENDIAN_IDX]),
- fregnames[i]);
+ fpu_fpr32[i] = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, active_fpu.fpr[i].w[FP_ENDIAN_IDX]),
+ fregnames[i]);
for (i = 0; i < 32; i++)
- fpu_fpr64[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
- offsetof(CPUState, active_fpu.fpr[i]),
- fregnames_64[i]);
+ fpu_fpr64[i] = tcg_global_mem_new_i64(TCG_AREG0,
+ offsetof(CPUState, active_fpu.fpr[i]),
+ fregnames_64[i]);
for (i = 0; i < 32; i++)
- fpu_fpr32h[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, active_fpu.fpr[i].w[!FP_ENDIAN_IDX]),
- fregnames_h[i]);
- fpu_fcr0 = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, active_fpu.fcr0),
- "fcr0");
- fpu_fcr31 = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, active_fpu.fcr31),
- "fcr31");
+ fpu_fpr32h[i] = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, active_fpu.fpr[i].w[!FP_ENDIAN_IDX]),
+ fregnames_h[i]);
+ fpu_fcr0 = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, active_fpu.fcr0),
+ "fcr0");
+ fpu_fcr31 = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, active_fpu.fcr31),
+ "fcr31");
/* register helpers */
-#undef DEF_HELPER
-#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
+#define GEN_HELPER 2
#include "helper.h"
inited = 1;
#include "exec-all.h"
#include "helper_regs.h"
#include "qemu-common.h"
+#include "helper.h"
//#define DEBUG_MMU
//#define DEBUG_BATS
-#ifndef DEF_HELPER
-#define DEF_HELPER(ret, name, params) ret name params;
-#endif
+#include "def-helper.h"
-DEF_HELPER(uint32_t, helper_fcmpo, (void))
-DEF_HELPER(uint32_t, helper_fcmpu, (void))
+DEF_HELPER_0(fcmpo, i32)
+DEF_HELPER_0(fcmpu, i32)
-DEF_HELPER(uint32_t, helper_load_cr, (void))
-DEF_HELPER(void, helper_store_cr, (target_ulong, uint32_t))
+DEF_HELPER_0(load_cr, tl)
+DEF_HELPER_2(store_cr, void, tl, i32)
#if defined(TARGET_PPC64)
-DEF_HELPER(uint64_t, helper_mulhd, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_mulhdu, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_mulldo, (uint64_t, uint64_t))
+DEF_HELPER_2(mulhd, i64, i64, i64)
+DEF_HELPER_2(mulhdu, i64, i64, i64)
+DEF_HELPER_2(mulldo, i64, i64, i64)
#endif
-DEF_HELPER(target_ulong, helper_cntlzw, (target_ulong t))
-DEF_HELPER(target_ulong, helper_popcntb, (target_ulong val))
-DEF_HELPER(target_ulong, helper_sraw, (target_ulong, target_ulong))
+DEF_HELPER_1(cntlzw, tl, tl)
+DEF_HELPER_1(popcntb, tl, tl)
+DEF_HELPER_2(sraw, tl, tl, tl)
#if defined(TARGET_PPC64)
-DEF_HELPER(target_ulong, helper_cntlzd, (target_ulong t))
-DEF_HELPER(target_ulong, helper_popcntb_64, (target_ulong val))
-DEF_HELPER(target_ulong, helper_srad, (target_ulong, target_ulong))
+DEF_HELPER_1(cntlzd, tl, tl)
+DEF_HELPER_1(popcntb_64, tl, tl)
+DEF_HELPER_2(srad, tl, tl, tl)
#endif
-DEF_HELPER(uint32_t, helper_cntlsw32, (uint32_t))
-DEF_HELPER(uint32_t, helper_cntlzw32, (uint32_t))
-DEF_HELPER(uint32_t, helper_brinc, (uint32_t, uint32_t))
+DEF_HELPER_1(cntlsw32, i32, i32)
+DEF_HELPER_1(cntlzw32, i32, i32)
+DEF_HELPER_2(brinc, tl, tl, tl)
+
+#include "def-helper.h"
*/
#include "exec.h"
#include "host-utils.h"
+#include "helper.h"
#include "helper_regs.h"
#include "op_helper.h"
/*****************************************************************************/
/* Registers load and stores */
-uint32_t helper_load_cr (void)
+target_ulong helper_load_cr (void)
{
return (env->crf[0] << 28) |
(env->crf[1] << 24) |
#include "cpu.h"
#include "exec-all.h"
#include "disas.h"
-#include "helper.h"
#include "tcg-op.h"
#include "qemu-common.h"
+#include "helper.h"
+#define GEN_HELPER 1
+#include "helper.h"
+
#define CPU_SINGLE_STEP 0x1
#define CPU_BRANCH_STEP 0x2
#define GDBSTUB_SINGLE_STEP 0x4
/* Code translation helpers */
/* global register indexes */
-static TCGv cpu_env;
+static TCGv_ptr cpu_env;
static char cpu_reg_names[10*3 + 22*4 /* GPR */
#if !defined(TARGET_PPC64)
+ 10*4 + 22*5 /* SPE GPRh */
#if !defined(TARGET_PPC64)
static TCGv cpu_gprh[32];
#endif
-static TCGv cpu_fpr[32];
-static TCGv cpu_avrh[32], cpu_avrl[32];
-static TCGv cpu_crf[8];
+static TCGv_i64 cpu_fpr[32];
+static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
+static TCGv_i32 cpu_crf[8];
static TCGv cpu_nip;
static TCGv cpu_ctr;
static TCGv cpu_lr;
static TCGv cpu_xer;
-static TCGv cpu_fpscr;
+static TCGv_i32 cpu_fpscr;
/* dyngen register indexes */
static TCGv cpu_T[3];
#if defined(TARGET_PPC64)
#define cpu_T64 cpu_T
#else
-static TCGv cpu_T64[3];
+static TCGv_i64 cpu_T64[3];
#endif
-static TCGv cpu_FT[3];
-static TCGv cpu_AVRh[3], cpu_AVRl[3];
+static TCGv_i64 cpu_FT[3];
+static TCGv_i64 cpu_AVRh[3], cpu_AVRl[3];
#include "gen-icount.h"
if (done_init)
return;
- cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
+ cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
#if TARGET_LONG_BITS > HOST_LONG_BITS
- cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0, offsetof(CPUState, t0), "T0");
- cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0, offsetof(CPUState, t1), "T1");
- cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0, offsetof(CPUState, t2), "T2");
+ cpu_T[0] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t0), "T0");
+ cpu_T[1] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t1), "T1");
+ cpu_T[2] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t2), "T2");
#else
- cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
- cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
+ cpu_T[0] = tcg_global_reg_new(TCG_AREG1, "T0");
+ cpu_T[1] = tcg_global_reg_new(TCG_AREG2, "T1");
#ifdef HOST_I386
/* XXX: This is a temporary workaround for i386.
* On i386 qemu_st32 runs out of registers.
* The proper fix is to remove cpu_T.
*/
- cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0, offsetof(CPUState, t2), "T2");
+ cpu_T[2] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t2), "T2");
#else
- cpu_T[2] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "T2");
+ cpu_T[2] = tcg_global_reg_new(TCG_AREG3, "T2");
#endif
#endif
#if !defined(TARGET_PPC64)
- cpu_T64[0] = tcg_global_mem_new(TCG_TYPE_I64,
- TCG_AREG0, offsetof(CPUState, t0_64),
- "T0_64");
- cpu_T64[1] = tcg_global_mem_new(TCG_TYPE_I64,
- TCG_AREG0, offsetof(CPUState, t1_64),
- "T1_64");
- cpu_T64[2] = tcg_global_mem_new(TCG_TYPE_I64,
- TCG_AREG0, offsetof(CPUState, t2_64),
- "T2_64");
-#endif
-
- cpu_FT[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
- offsetof(CPUState, ft0), "FT0");
- cpu_FT[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
- offsetof(CPUState, ft1), "FT1");
- cpu_FT[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
- offsetof(CPUState, ft2), "FT2");
-
- cpu_AVRh[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
+ cpu_T64[0] = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, t0_64),
+ "T0_64");
+ cpu_T64[1] = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, t1_64),
+ "T1_64");
+ cpu_T64[2] = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, t2_64),
+ "T2_64");
+#endif
+
+ cpu_FT[0] = tcg_global_mem_new_i64(TCG_AREG0,
+ offsetof(CPUState, ft0), "FT0");
+ cpu_FT[1] = tcg_global_mem_new_i64(TCG_AREG0,
+ offsetof(CPUState, ft1), "FT1");
+ cpu_FT[2] = tcg_global_mem_new_i64(TCG_AREG0,
+ offsetof(CPUState, ft2), "FT2");
+
+ cpu_AVRh[0] = tcg_global_mem_new_i64(TCG_AREG0,
offsetof(CPUState, avr0.u64[0]), "AVR0H");
- cpu_AVRl[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
+ cpu_AVRl[0] = tcg_global_mem_new_i64(TCG_AREG0,
offsetof(CPUState, avr0.u64[1]), "AVR0L");
- cpu_AVRh[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
+ cpu_AVRh[1] = tcg_global_mem_new_i64(TCG_AREG0,
offsetof(CPUState, avr1.u64[0]), "AVR1H");
- cpu_AVRl[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
+ cpu_AVRl[1] = tcg_global_mem_new_i64(TCG_AREG0,
offsetof(CPUState, avr1.u64[1]), "AVR1L");
- cpu_AVRh[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
+ cpu_AVRh[2] = tcg_global_mem_new_i64(TCG_AREG0,
offsetof(CPUState, avr2.u64[0]), "AVR2H");
- cpu_AVRl[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
+ cpu_AVRl[2] = tcg_global_mem_new_i64(TCG_AREG0,
offsetof(CPUState, avr2.u64[1]), "AVR2L");
p = cpu_reg_names;
for (i = 0; i < 8; i++) {
sprintf(p, "crf%d", i);
- cpu_crf[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, crf[i]), p);
+ cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, crf[i]), p);
p += 5;
}
for (i = 0; i < 32; i++) {
sprintf(p, "r%d", i);
- cpu_gpr[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, gpr[i]), p);
p += (i < 10) ? 3 : 4;
#if !defined(TARGET_PPC64)
sprintf(p, "r%dH", i);
- cpu_gprh[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, gprh[i]), p);
+ cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, gprh[i]), p);
p += (i < 10) ? 4 : 5;
#endif
sprintf(p, "fp%d", i);
- cpu_fpr[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
- offsetof(CPUState, fpr[i]), p);
+ cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
+ offsetof(CPUState, fpr[i]), p);
p += (i < 10) ? 4 : 5;
sprintf(p, "avr%dH", i);
- cpu_avrh[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
+ cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
offsetof(CPUState, avr[i].u64[0]), p);
p += (i < 10) ? 6 : 7;
sprintf(p, "avr%dL", i);
- cpu_avrl[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
+ cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
offsetof(CPUState, avr[i].u64[1]), p);
p += (i < 10) ? 6 : 7;
}
- cpu_nip = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_nip = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, nip), "nip");
- cpu_ctr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_ctr = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, ctr), "ctr");
- cpu_lr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_lr = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, lr), "lr");
- cpu_xer = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
+ cpu_xer = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, xer), "xer");
- cpu_fpscr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, fpscr), "fpscr");
+ cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, fpscr), "fpscr");
/* register helpers */
-#undef DEF_HELPER
-#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
+#define GEN_HELPER 2
#include "helper.h"
done_init = 1;
*gen_fprf_ptr++ = gen_opc_ptr;
#endif
gen_op_compute_fprf(1);
- if (unlikely(set_rc))
- tcg_gen_andi_i32(cpu_crf[1], cpu_T[0], 0xf);
+ if (unlikely(set_rc)) {
+ tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_T[0]);
+ tcg_gen_andi_i32(cpu_crf[1], cpu_crf[1], 0xf);
+ }
gen_op_float_check_status();
} else if (unlikely(set_rc)) {
/* We always need to compute fpcc */
gen_op_compute_fprf(0);
- tcg_gen_andi_i32(cpu_crf[1], cpu_T[0], 0xf);
+ tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_T[0]);
+ tcg_gen_andi_i32(cpu_crf[1], cpu_crf[1], 0xf);
if (set_fprf)
gen_op_float_check_status();
}
static always_inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
{
TCGv t0, t1;
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
- t1 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
+ t1 = tcg_temp_local_new();
if (s) {
tcg_gen_ext32s_tl(t0, arg0);
tcg_gen_ext32s_tl(t1, arg1);
int l1, l2;
uint32_t bi = rC(ctx->opcode);
uint32_t mask;
- TCGv t0;
+ TCGv_i32 t0;
l1 = gen_new_label();
l2 = gen_new_label();
mask = 1 << (3 - (bi & 0x03));
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ t0 = tcg_temp_new_i32();
tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask);
tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
if (rA(ctx->opcode) == 0)
gen_set_label(l1);
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
gen_set_label(l2);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
/*** Integer arithmetic ***/
l1 = gen_new_label();
/* Start with XER OV disabled, the most likely case */
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
tcg_gen_xor_tl(t0, arg0, arg1);
#if defined(TARGET_PPC64)
if (!ctx->sf_mode)
#if defined(TARGET_PPC64)
if (!(ctx->sf_mode)) {
TCGv t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_TL);
- t1 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
+ t1 = tcg_temp_new();
tcg_gen_ext32u_tl(t0, arg1);
tcg_gen_ext32u_tl(t1, arg2);
TCGv t0, t1;
if ((!compute_ca && !compute_ov) ||
- (GET_TCGV(ret) != GET_TCGV(arg1) && GET_TCGV(ret) != GET_TCGV(arg2))) {
+ (!TCGV_EQUAL(ret,arg1) && !TCGV_EQUAL(ret, arg2))) {
t0 = ret;
} else {
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
}
if (add_ca) {
- t1 = tcg_temp_local_new(TCG_TYPE_TL);
+ t1 = tcg_temp_local_new();
tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
tcg_gen_shri_tl(t1, t1, XER_CA);
}
if (unlikely(Rc(ctx->opcode) != 0))
gen_set_Rc0(ctx, t0);
- if (GET_TCGV(t0) != GET_TCGV(ret)) {
+ if (!TCGV_EQUAL(t0, ret)) {
tcg_gen_mov_tl(ret, t0);
tcg_temp_free(t0);
}
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
if (likely(simm != 0)) {
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
tcg_gen_addi_tl(t0, arg1, simm);
gen_op_arith_compute_ca(ctx, t0, arg1, 0);
tcg_gen_mov_tl(ret, t0);
{
int l1 = gen_new_label();
int l2 = gen_new_label();
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_I32);
- TCGv t1 = tcg_temp_local_new(TCG_TYPE_I32);
+ TCGv_i32 t0 = tcg_temp_local_new_i32();
+ TCGv_i32 t1 = tcg_temp_local_new_i32();
tcg_gen_trunc_tl_i32(t0, arg1);
tcg_gen_trunc_tl_i32(t1, arg2);
}
gen_set_label(l2);
tcg_gen_extu_i32_tl(ret, t0);
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i32(t0);
+ tcg_temp_free_i32(t1);
if (unlikely(Rc(ctx->opcode) != 0))
gen_set_Rc0(ctx, ret);
}
/* mulhw mulhw. */
GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER)
{
- TCGv t0, t1;
+ TCGv_i64 t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I64);
- t1 = tcg_temp_new(TCG_TYPE_I64);
+ t0 = tcg_temp_new_i64();
+ t1 = tcg_temp_new_i64();
#if defined(TARGET_PPC64)
tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
tcg_gen_shri_i64(t0, t0, 32);
tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
#endif
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i64(t0);
+ tcg_temp_free_i64(t1);
if (unlikely(Rc(ctx->opcode) != 0))
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
}
/* mulhwu mulhwu. */
GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER)
{
- TCGv t0, t1;
+ TCGv_i64 t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I64);
- t1 = tcg_temp_new(TCG_TYPE_I64);
+ t0 = tcg_temp_new_i64();
+ t1 = tcg_temp_new_i64();
#if defined(TARGET_PPC64)
tcg_gen_ext32u_i64(t0, cpu_gpr[rA(ctx->opcode)]);
tcg_gen_ext32u_i64(t1, cpu_gpr[rB(ctx->opcode)]);
tcg_gen_shri_i64(t0, t0, 32);
tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
#endif
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i64(t0);
+ tcg_temp_free_i64(t1);
if (unlikely(Rc(ctx->opcode) != 0))
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
}
GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER)
{
int l1;
- TCGv t0, t1;
+ TCGv_i64 t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I64);
- t1 = tcg_temp_new(TCG_TYPE_I64);
+ t0 = tcg_temp_new_i64();
+ t1 = tcg_temp_new_i64();
l1 = gen_new_label();
/* Start with XER OV disabled, the most likely case */
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
#endif
tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
gen_set_label(l1);
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i64(t0);
+ tcg_temp_free_i64(t1);
if (unlikely(Rc(ctx->opcode) != 0))
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
}
#define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) \
{ \
- tcg_gen_helper_1_2(helper_##name, cpu_gpr[rD(ctx->opcode)], \
+ gen_helper_##name (cpu_gpr[rD(ctx->opcode)], \
cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
if (unlikely(Rc(ctx->opcode) != 0)) \
gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
{
int l1 = gen_new_label();
int l2 = gen_new_label();
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
#if defined(TARGET_PPC64)
if (ctx->sf_mode) {
tcg_gen_mov_tl(t0, arg1);
TCGv t0, t1;
if ((!compute_ca && !compute_ov) ||
- (GET_TCGV(ret) != GET_TCGV(arg1) && GET_TCGV(ret) != GET_TCGV(arg2))) {
+ (!TCGV_EQUAL(ret, arg1) && !TCGV_EQUAL(ret, arg2))) {
t0 = ret;
} else {
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
}
if (add_ca) {
- t1 = tcg_temp_local_new(TCG_TYPE_TL);
+ t1 = tcg_temp_local_new();
tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
tcg_gen_shri_tl(t1, t1, XER_CA);
}
if (unlikely(Rc(ctx->opcode) != 0))
gen_set_Rc0(ctx, t0);
- if (GET_TCGV(t0) != GET_TCGV(ret)) {
+ if (!TCGV_EQUAL(t0, ret)) {
tcg_gen_mov_tl(ret, t0);
tcg_temp_free(t0);
}
{
/* Start with XER CA and OV disabled, the most likely case */
tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_local_new();
TCGv t1 = tcg_const_local_tl(SIMM(ctx->opcode));
tcg_gen_sub_tl(t0, t1, cpu_gpr[rA(ctx->opcode)]);
gen_op_arith_compute_ca(ctx, t0, t1, 1);
/* cntlzw */
GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER)
{
- tcg_gen_helper_1_1(helper_cntlzw, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
+ gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
if (unlikely(Rc(ctx->opcode) != 0))
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
}
break;
}
if (prio) {
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_new();
tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, spr[SPR_PPR]));
tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
{
#if defined(TARGET_PPC64)
if (ctx->sf_mode)
- tcg_gen_helper_1_1(helper_popcntb_64, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
+ gen_helper_popcntb_64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
else
#endif
- tcg_gen_helper_1_1(helper_popcntb, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
+ gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
}
#if defined(TARGET_PPC64)
/* cntlzd */
GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B)
{
- tcg_gen_helper_1_1(helper_cntlzd, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
+ gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
if (unlikely(Rc(ctx->opcode) != 0))
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
}
if (likely(sh == 0 && mb == 0 && me == 31)) {
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
} else {
- TCGv t0, t1;
target_ulong mask;
-
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv t1;
+ TCGv t0 = tcg_temp_new();
#if defined(TARGET_PPC64)
- t1 = tcg_temp_new(TCG_TYPE_I32);
- tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
- tcg_gen_rotli_i32(t1, t1, sh);
- tcg_gen_extu_i32_i64(t0, t1);
- tcg_temp_free(t1);
+ TCGv_i32 t2 = tcg_temp_new_i32();
+ tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]);
+ tcg_gen_rotli_i32(t2, t2, sh);
+ tcg_gen_extu_i32_i64(t0, t2);
+ tcg_temp_free_i32(t2);
#else
tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
#endif
me += 32;
#endif
mask = MASK(mb, me);
- t1 = tcg_temp_new(TCG_TYPE_TL);
+ t1 = tcg_temp_new();
tcg_gen_andi_tl(t0, t0, mask);
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
if (likely(sh == 0)) {
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
} else {
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_new();
tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
tcg_gen_shli_tl(t0, t0, sh);
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
tcg_temp_free(t0);
}
} else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) {
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_new();
tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
tcg_gen_shri_tl(t0, t0, mb);
tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
tcg_temp_free(t0);
} else {
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_new();
#if defined(TARGET_PPC64)
- TCGv t1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t1 = tcg_temp_new_i32();
tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
tcg_gen_rotli_i32(t1, t1, sh);
tcg_gen_extu_i32_i64(t0, t1);
- tcg_temp_free(t1);
+ tcg_temp_free_i32(t1);
#else
tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
#endif
uint32_t mb, me;
TCGv t0;
#if defined(TARGET_PPC64)
- TCGv t1, t2;
+ TCGv_i32 t1, t2;
#endif
mb = MB(ctx->opcode);
me = ME(ctx->opcode);
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f);
#if defined(TARGET_PPC64)
- t1 = tcg_temp_new(TCG_TYPE_I32);
- t2 = tcg_temp_new(TCG_TYPE_I32);
+ t1 = tcg_temp_new_i32();
+ t2 = tcg_temp_new_i32();
tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
tcg_gen_trunc_i64_i32(t2, t0);
tcg_gen_rotl_i32(t1, t1, t2);
tcg_gen_extu_i32_i64(t0, t1);
- tcg_temp_free(t1);
- tcg_temp_free(t2);
+ tcg_temp_free_i32(t1);
+ tcg_temp_free_i32(t2);
#else
tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0);
#endif
} else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) {
tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb);
} else {
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_new();
tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
if (likely(mb == 0 && me == 63)) {
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
mb = MB(ctx->opcode);
me = ME(ctx->opcode);
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
if (unlikely(mb != 0 || me != 63)) {
TCGv t0, t1;
target_ulong mask;
- t0 = tcg_temp_new(TCG_TYPE_TL);
+ t0 = tcg_temp_new();
tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
- t1 = tcg_temp_new(TCG_TYPE_TL);
+ t1 = tcg_temp_new();
mask = MASK(mb, me);
tcg_gen_andi_tl(t0, t0, mask);
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
l1 = gen_new_label();
l2 = gen_new_label();
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x20, l1);
tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
/* sraw & sraw. */
GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER)
{
- tcg_gen_helper_1_2(helper_sraw, cpu_gpr[rA(ctx->opcode)],
- cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
+ gen_helper_sraw(cpu_gpr[rA(ctx->opcode)],
+ cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
if (unlikely(Rc(ctx->opcode) != 0))
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
}
TCGv t0;
l1 = gen_new_label();
l2 = gen_new_label();
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
l1 = gen_new_label();
l2 = gen_new_label();
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x20, l1);
tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
tcg_gen_br(l2);
gen_set_label(l1);
- t1 = tcg_temp_new(TCG_TYPE_TL);
+ t1 = tcg_temp_new();
tcg_gen_ext32u_tl(t1, cpu_gpr[rS(ctx->opcode)]);
tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t1, t0);
tcg_temp_free(t1);
l1 = gen_new_label();
l2 = gen_new_label();
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x7f);
tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x40, l1);
tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
/* srad & srad. */
GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B)
{
- tcg_gen_helper_1_2(helper_srad, cpu_gpr[rA(ctx->opcode)],
- cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
+ gen_helper_srad(cpu_gpr[rA(ctx->opcode)],
+ cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
if (unlikely(Rc(ctx->opcode) != 0))
gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
}
TCGv t0;
l1 = gen_new_label();
l2 = gen_new_label();
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
l1 = gen_new_label();
l2 = gen_new_label();
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x7f);
tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x40, l1);
tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]);
tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]);
gen_reset_fpstatus();
- tcg_gen_helper_1_0(helper_fcmpo, cpu_crf[crfD(ctx->opcode)]);
+ gen_helper_fcmpo(cpu_crf[crfD(ctx->opcode)]);
gen_op_float_check_status();
}
tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]);
tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]);
gen_reset_fpstatus();
- tcg_gen_helper_1_0(helper_fcmpu, cpu_crf[crfD(ctx->opcode)]);
+ gen_helper_fcmpu(cpu_crf[crfD(ctx->opcode)]);
gen_op_float_check_status();
}
if (likely(flags & 2)) \
tcg_gen_qemu_ld##width(t0, t1, flags >> 2); \
else { \
- TCGv addr = tcg_temp_new(TCG_TYPE_TL); \
+ TCGv addr = tcg_temp_new(); \
tcg_gen_ext32u_tl(addr, t1); \
tcg_gen_qemu_ld##width(t0, addr, flags >> 2); \
tcg_temp_free(addr); \
if (likely(flags & 2)) \
tcg_gen_qemu_st##width(t0, t1, flags >> 2); \
else { \
- TCGv addr = tcg_temp_new(TCG_TYPE_TL); \
+ TCGv addr = tcg_temp_new(); \
tcg_gen_ext32u_tl(addr, t1); \
tcg_gen_qemu_st##width(t0, addr, flags >> 2); \
tcg_temp_free(addr); \
static always_inline void gen_qemu_ld16u(TCGv arg0, TCGv arg1, int flags)
{
if (unlikely(flags & 1)) {
- TCGv t0;
+ TCGv_i32 t0;
gen_qemu_ld16u_ppc64(arg0, arg1, flags);
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ t0 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(t0, arg0);
tcg_gen_bswap16_i32(t0, t0);
tcg_gen_extu_i32_tl(arg0, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
} else
gen_qemu_ld16u_ppc64(arg0, arg1, flags);
}
static always_inline void gen_qemu_ld16s(TCGv arg0, TCGv arg1, int flags)
{
if (unlikely(flags & 1)) {
- TCGv t0;
+ TCGv_i32 t0;
gen_qemu_ld16u_ppc64(arg0, arg1, flags);
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ t0 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(t0, arg0);
tcg_gen_bswap16_i32(t0, t0);
tcg_gen_extu_i32_tl(arg0, t0);
tcg_gen_ext16s_tl(arg0, arg0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
} else
gen_qemu_ld16s_ppc64(arg0, arg1, flags);
}
static always_inline void gen_qemu_ld32u(TCGv arg0, TCGv arg1, int flags)
{
if (unlikely(flags & 1)) {
- TCGv t0;
+ TCGv_i32 t0;
gen_qemu_ld32u_ppc64(arg0, arg1, flags);
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ t0 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(t0, arg0);
tcg_gen_bswap_i32(t0, t0);
tcg_gen_extu_i32_tl(arg0, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
} else
gen_qemu_ld32u_ppc64(arg0, arg1, flags);
}
static always_inline void gen_qemu_ld32s(TCGv arg0, TCGv arg1, int flags)
{
if (unlikely(flags & 1)) {
- TCGv t0;
+ TCGv_i32 t0;
gen_qemu_ld32u_ppc64(arg0, arg1, flags);
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ t0 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(t0, arg0);
tcg_gen_bswap_i32(t0, t0);
tcg_gen_ext_i32_tl(arg0, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
} else
gen_qemu_ld32s_ppc64(arg0, arg1, flags);
}
static always_inline void gen_qemu_st16(TCGv arg0, TCGv arg1, int flags)
{
if (unlikely(flags & 1)) {
- TCGv t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0;
+ TCGv_i64 t1;
+ t0 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(t0, arg0);
tcg_gen_ext16u_i32(t0, t0);
tcg_gen_bswap16_i32(t0, t0);
- t1 = tcg_temp_new(TCG_TYPE_I64);
+ t1 = tcg_temp_new_i64();
tcg_gen_extu_i32_tl(t1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
gen_qemu_st16_ppc64(t1, arg1, flags);
- tcg_temp_free(t1);
+ tcg_temp_free_i64(t1);
} else
gen_qemu_st16_ppc64(arg0, arg1, flags);
}
static always_inline void gen_qemu_st32(TCGv arg0, TCGv arg1, int flags)
{
if (unlikely(flags & 1)) {
- TCGv t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0;
+ TCGv_i64 t1;
+ t0 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(t0, arg0);
tcg_gen_bswap_i32(t0, t0);
- t1 = tcg_temp_new(TCG_TYPE_I64);
+ t1 = tcg_temp_new_i64();
tcg_gen_extu_i32_tl(t1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
gen_qemu_st32_ppc64(t1, arg1, flags);
- tcg_temp_free(t1);
+ tcg_temp_free_i64(t1);
} else
gen_qemu_st32_ppc64(arg0, arg1, flags);
}
static always_inline void gen_qemu_st64(TCGv arg0, TCGv arg1, int flags)
{
if (unlikely(flags & 1)) {
- TCGv t0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 t0 = tcg_temp_new_i64();
tcg_gen_bswap_i64(t0, arg0);
gen_qemu_st64_ppc64(t0, arg1, flags);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
} else
gen_qemu_st64_ppc64(arg0, arg1, flags);
}
GEN_QEMU_LD_PPC32(16s)
GEN_QEMU_LD_PPC32(32u)
GEN_QEMU_LD_PPC32(32s)
-GEN_QEMU_LD_PPC32(64)
#define GEN_QEMU_ST_PPC32(width) \
static always_inline void gen_qemu_st##width##_ppc32(TCGv arg0, TCGv arg1, int flags)\
GEN_QEMU_ST_PPC32(8)
GEN_QEMU_ST_PPC32(16)
GEN_QEMU_ST_PPC32(32)
-GEN_QEMU_ST_PPC32(64)
static always_inline void gen_qemu_ld8u(TCGv arg0, TCGv arg1, int flags)
{
tcg_gen_bswap_i32(arg0, arg0);
}
-static always_inline void gen_qemu_ld64(TCGv arg0, TCGv arg1, int flags)
-{
- gen_qemu_ld64_ppc32(arg0, arg1, flags);
- if (unlikely(flags & 1))
- tcg_gen_bswap_i64(arg0, arg0);
-}
-
static always_inline void gen_qemu_st8(TCGv arg0, TCGv arg1, int flags)
{
gen_qemu_st8_ppc32(arg0, arg1, flags);
static always_inline void gen_qemu_st16(TCGv arg0, TCGv arg1, int flags)
{
if (unlikely(flags & 1)) {
- TCGv temp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 temp = tcg_temp_new_i32();
tcg_gen_ext16u_i32(temp, arg0);
tcg_gen_bswap16_i32(temp, temp);
gen_qemu_st16_ppc32(temp, arg1, flags);
- tcg_temp_free(temp);
+ tcg_temp_free_i32(temp);
} else
gen_qemu_st16_ppc32(arg0, arg1, flags);
}
static always_inline void gen_qemu_st32(TCGv arg0, TCGv arg1, int flags)
{
if (unlikely(flags & 1)) {
- TCGv temp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 temp = tcg_temp_new_i32();
tcg_gen_bswap_i32(temp, arg0);
gen_qemu_st32_ppc32(temp, arg1, flags);
- tcg_temp_free(temp);
+ tcg_temp_free_i32(temp);
} else
gen_qemu_st32_ppc32(arg0, arg1, flags);
}
-static always_inline void gen_qemu_st64(TCGv arg0, TCGv arg1, int flags)
-{
- if (unlikely(flags & 1)) {
- TCGv temp = tcg_temp_new(TCG_TYPE_I64);
- tcg_gen_bswap_i64(temp, arg0);
- gen_qemu_st64_ppc32(temp, arg1, flags);
- tcg_temp_free(temp);
- } else
- gen_qemu_st64_ppc32(arg0, arg1, flags);
-}
-
#endif
#define GEN_LD(width, opc, type) \
GEN_HANDLER(l##width, opc, 0xFF, 0xFF, 0x00000000, type) \
{ \
- TCGv EA = tcg_temp_new(TCG_TYPE_TL); \
+ TCGv EA = tcg_temp_new(); \
gen_addr_imm_index(EA, ctx, 0); \
gen_qemu_ld##width(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
tcg_temp_free(EA); \
GEN_EXCP_INVAL(ctx); \
return; \
} \
- EA = tcg_temp_new(TCG_TYPE_TL); \
+ EA = tcg_temp_new(); \
if (type == PPC_64B) \
gen_addr_imm_index(EA, ctx, 0x03); \
else \
GEN_EXCP_INVAL(ctx); \
return; \
} \
- EA = tcg_temp_new(TCG_TYPE_TL); \
+ EA = tcg_temp_new(); \
gen_addr_reg_index(EA, ctx); \
gen_qemu_ld##width(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
#define GEN_LDX(width, opc2, opc3, type) \
GEN_HANDLER(l##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
{ \
- TCGv EA = tcg_temp_new(TCG_TYPE_TL); \
+ TCGv EA = tcg_temp_new(); \
gen_addr_reg_index(EA, ctx); \
gen_qemu_ld##width(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
tcg_temp_free(EA); \
return;
}
}
- EA = tcg_temp_new(TCG_TYPE_TL);
+ EA = tcg_temp_new();
gen_addr_imm_index(EA, ctx, 0x03);
if (ctx->opcode & 0x02) {
/* lwa (lwau is undefined) */
GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
return;
}
- EA = tcg_temp_new(TCG_TYPE_TL);
+ EA = tcg_temp_new();
gen_addr_imm_index(EA, ctx, 0x0F);
gen_qemu_ld64(cpu_gpr[rd], EA, ctx->mem_idx);
tcg_gen_addi_tl(EA, EA, 8);
#define GEN_ST(width, opc, type) \
GEN_HANDLER(st##width, opc, 0xFF, 0xFF, 0x00000000, type) \
{ \
- TCGv EA = tcg_temp_new(TCG_TYPE_TL); \
+ TCGv EA = tcg_temp_new(); \
gen_addr_imm_index(EA, ctx, 0); \
gen_qemu_st##width(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
tcg_temp_free(EA); \
GEN_EXCP_INVAL(ctx); \
return; \
} \
- EA = tcg_temp_new(TCG_TYPE_TL); \
+ EA = tcg_temp_new(); \
if (type == PPC_64B) \
gen_addr_imm_index(EA, ctx, 0x03); \
else \
GEN_EXCP_INVAL(ctx); \
return; \
} \
- EA = tcg_temp_new(TCG_TYPE_TL); \
+ EA = tcg_temp_new(); \
gen_addr_reg_index(EA, ctx); \
gen_qemu_st##width(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
#define GEN_STX(width, opc2, opc3, type) \
GEN_HANDLER(st##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
{ \
- TCGv EA = tcg_temp_new(TCG_TYPE_TL); \
+ TCGv EA = tcg_temp_new(); \
gen_addr_reg_index(EA, ctx); \
gen_qemu_st##width(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
tcg_temp_free(EA); \
GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
return;
}
- EA = tcg_temp_new(TCG_TYPE_TL);
+ EA = tcg_temp_new();
gen_addr_imm_index(EA, ctx, 0x03);
gen_qemu_st64(cpu_gpr[rs], EA, ctx->mem_idx);
tcg_gen_addi_tl(EA, EA, 8);
return;
}
}
- EA = tcg_temp_new(TCG_TYPE_TL);
+ EA = tcg_temp_new();
gen_addr_imm_index(EA, ctx, 0x03);
gen_qemu_st64(cpu_gpr[rs], EA, ctx->mem_idx);
if (Rc(ctx->opcode))
/* lhbrx */
void always_inline gen_qemu_ld16ur(TCGv t0, TCGv t1, int flags)
{
- TCGv temp = tcg_temp_new(TCG_TYPE_I32);
- gen_qemu_ld16u(temp, t1, flags);
+ TCGv_i32 temp = tcg_temp_new_i32();
+ gen_qemu_ld16u(t0, t1, flags);
+ tcg_gen_trunc_tl_i32(temp, t0);
tcg_gen_bswap16_i32(temp, temp);
tcg_gen_extu_i32_tl(t0, temp);
- tcg_temp_free(temp);
+ tcg_temp_free_i32(temp);
}
GEN_LDX(16ur, 0x16, 0x18, PPC_INTEGER);
/* lwbrx */
void always_inline gen_qemu_ld32ur(TCGv t0, TCGv t1, int flags)
{
- TCGv temp = tcg_temp_new(TCG_TYPE_I32);
- gen_qemu_ld32u(temp, t1, flags);
+ TCGv_i32 temp = tcg_temp_new_i32();
+ gen_qemu_ld32u(t0, t1, flags);
+ tcg_gen_trunc_tl_i32(temp, t0);
tcg_gen_bswap_i32(temp, temp);
tcg_gen_extu_i32_tl(t0, temp);
- tcg_temp_free(temp);
+ tcg_temp_free_i32(temp);
}
GEN_LDX(32ur, 0x16, 0x10, PPC_INTEGER);
/* sthbrx */
void always_inline gen_qemu_st16r(TCGv t0, TCGv t1, int flags)
{
- TCGv temp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 temp = tcg_temp_new_i32();
+ TCGv t2 = tcg_temp_new();
tcg_gen_trunc_tl_i32(temp, t0);
tcg_gen_ext16u_i32(temp, temp);
tcg_gen_bswap16_i32(temp, temp);
- gen_qemu_st16(temp, t1, flags);
- tcg_temp_free(temp);
+ tcg_gen_extu_i32_tl(t2, temp);
+ tcg_temp_free_i32(temp);
+ gen_qemu_st16(t2, t1, flags);
+ tcg_temp_free(t2);
}
GEN_STX(16r, 0x16, 0x1C, PPC_INTEGER);
/* stwbrx */
void always_inline gen_qemu_st32r(TCGv t0, TCGv t1, int flags)
{
- TCGv temp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 temp = tcg_temp_new_i32();
+ TCGv t2 = tcg_temp_new();
tcg_gen_trunc_tl_i32(temp, t0);
tcg_gen_bswap_i32(temp, temp);
- gen_qemu_st32(temp, t1, flags);
- tcg_temp_free(temp);
+ tcg_gen_extu_i32_tl(t2, temp);
+ tcg_temp_free_i32(temp);
+ gen_qemu_st16(t2, t1, flags);
+ tcg_temp_free(t2);
}
GEN_STX(32r, 0x16, 0x14, PPC_INTEGER);
ctx->exception = POWERPC_EXCP_BRANCH;
if (type == BCOND_LR || type == BCOND_CTR) {
- target = tcg_temp_local_new(TCG_TYPE_TL);
+ target = tcg_temp_local_new();
if (type == BCOND_CTR)
tcg_gen_mov_tl(target, cpu_ctr);
else
l1 = gen_new_label();
if ((bo & 0x4) == 0) {
/* Decrement and test CTR */
- TCGv temp = tcg_temp_new(TCG_TYPE_TL);
+ TCGv temp = tcg_temp_new();
if (unlikely(type == BCOND_CTR)) {
GEN_EXCP_INVAL(ctx);
return;
} else {
tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
}
+ tcg_temp_free(temp);
}
if ((bo & 0x10) == 0) {
/* Test CR */
uint32_t bi = BI(ctx->opcode);
uint32_t mask = 1 << (3 - (bi & 0x03));
- TCGv temp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 temp = tcg_temp_new_i32();
if (bo & 0x8) {
tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
}
+ tcg_temp_free_i32(temp);
}
if (type == BCOND_IM) {
target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
{ \
uint8_t bitmask; \
int sh; \
- TCGv t0, t1; \
+ TCGv_i32 t0, t1; \
sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
- t0 = tcg_temp_new(TCG_TYPE_I32); \
+ t0 = tcg_temp_new_i32(); \
if (sh > 0) \
tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
else if (sh < 0) \
tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
else \
tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
- t1 = tcg_temp_new(TCG_TYPE_I32); \
+ t1 = tcg_temp_new_i32(); \
sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
if (sh > 0) \
tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
tcg_gen_andi_i32(t0, t0, bitmask); \
tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
- tcg_temp_free(t0); \
- tcg_temp_free(t1); \
+ tcg_temp_free_i32(t0); \
+ tcg_temp_free_i32(t1); \
}
/* crand */
tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
}
} else {
- tcg_gen_helper_1_0(helper_load_cr, cpu_gpr[rD(ctx->opcode)]);
+ gen_helper_load_cr(cpu_gpr[rD(ctx->opcode)]);
}
}
crm = CRM(ctx->opcode);
if (likely((ctx->opcode & 0x00100000) || (crm ^ (crm - 1)) == 0)) {
+ TCGv_i32 temp = tcg_temp_new_i32();
crn = ffs(crm);
- tcg_gen_shri_i32(cpu_crf[7 - crn], cpu_gpr[rS(ctx->opcode)], crn * 4);
+ tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
+ tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
+ tcg_temp_free_i32(temp);
} else {
- TCGv t0 = tcg_const_tl(crm);
- tcg_gen_helper_0_2(helper_store_cr, cpu_gpr[rS(ctx->opcode)], t0);
- tcg_temp_free(t0);
+ TCGv_i32 temp = tcg_const_i32(crm);
+ gen_helper_store_cr(cpu_gpr[rS(ctx->opcode)], temp);
+ tcg_temp_free_i32(temp);
}
}
GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE)
{
/* XXX: specification says this is treated as a load by the MMU */
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_new();
gen_addr_reg_index(t0, ctx);
gen_qemu_ld8u(t0, t0, ctx->mem_idx);
tcg_temp_free(t0);
GEN_EXCP_PRIVOPC(ctx);
return;
}
- EA = tcg_temp_new(TCG_TYPE_TL);
+ EA = tcg_temp_new();
gen_addr_reg_index(EA, ctx);
- val = tcg_temp_new(TCG_TYPE_TL);
+ val = tcg_temp_new();
/* XXX: specification says this should be treated as a store by the MMU */
gen_qemu_ld8u(val, EA, ctx->mem_idx);
gen_qemu_st8(val, EA, ctx->mem_idx);
GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE)
{
/* XXX: specification say this is treated as a load by the MMU */
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_new();
gen_addr_reg_index(t0, ctx);
gen_qemu_ld8u(t0, t0, ctx->mem_idx);
tcg_temp_free(t0);
{
TCGv t0, t1;
- t0 = tcg_temp_local_new(TCG_TYPE_TL);
- t1 = tcg_temp_local_new(TCG_TYPE_TL);
+ t0 = tcg_temp_local_new();
+ t1 = tcg_temp_local_new();
switch (opc3 & 0x0D) {
case 0x05:
GEN_EXCP_PRIVOPC(ctx);
return;
}
- EA = tcg_temp_new(TCG_TYPE_TL);
+ EA = tcg_temp_new();
gen_addr_reg_index(EA, ctx);
- val = tcg_temp_new(TCG_TYPE_TL);
+ val = tcg_temp_new();
gen_qemu_ld32u(val, EA, ctx->mem_idx);
tcg_temp_free(val);
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
tcg_gen_or_tl(cpu_xer, cpu_xer, cpu_T[0]);
if (Rc(ctx->opcode)) {
gen_op_440_dlmzb_update_Rc();
- tcg_gen_andi_i32(cpu_crf[0], cpu_T[0], 0xf);
+ tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_T[0]);
+ tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 0xf);
}
}
/*** SPE extension ***/
/* Register moves */
-static always_inline void gen_load_gpr64(TCGv t, int reg) {
+static always_inline void gen_load_gpr64(TCGv_i64 t, int reg) {
#if defined(TARGET_PPC64)
tcg_gen_mov_i64(t, cpu_gpr[reg]);
#else
#endif
}
-static always_inline void gen_store_gpr64(int reg, TCGv t) {
+static always_inline void gen_store_gpr64(int reg, TCGv_i64 t) {
#if defined(TARGET_PPC64)
tcg_gen_mov_i64(cpu_gpr[reg], t);
#else
+ TCGv_i64 tmp = tcg_temp_new_i64();
tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
- TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
tcg_gen_shri_i64(tmp, t, 32);
tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
- tcg_temp_free(tmp);
+ tcg_temp_free_i64(tmp);
#endif
}
GEN_EXCP_NO_AP(ctx); \
return; \
} \
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_I32); \
- TCGv t1 = tcg_temp_local_new(TCG_TYPE_I32); \
- TCGv t2 = tcg_temp_local_new(TCG_TYPE_I64); \
+ TCGv_i32 t0 = tcg_temp_local_new_i32(); \
+ TCGv_i32 t1 = tcg_temp_local_new_i32(); \
+ TCGv_i64 t2 = tcg_temp_local_new_i64(); \
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
tcg_opi(t0, t0, rB(ctx->opcode)); \
tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
tcg_gen_trunc_i64_i32(t1, t2); \
- tcg_temp_free(t2); \
+ tcg_temp_free_i64(t2); \
tcg_opi(t1, t1, rB(ctx->opcode)); \
tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
- tcg_temp_free(t0); \
- tcg_temp_free(t1); \
+ tcg_temp_free_i32(t0); \
+ tcg_temp_free_i32(t1); \
}
#else
#define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
GEN_EXCP_NO_AP(ctx); \
return; \
} \
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_I32); \
- TCGv t1 = tcg_temp_local_new(TCG_TYPE_I32); \
- TCGv t2 = tcg_temp_local_new(TCG_TYPE_I64); \
+ TCGv_i32 t0 = tcg_temp_local_new_i32(); \
+ TCGv_i32 t1 = tcg_temp_local_new_i32(); \
+ TCGv_i64 t2 = tcg_temp_local_new_i64(); \
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
tcg_op(t0, t0); \
tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
tcg_gen_trunc_i64_i32(t1, t2); \
- tcg_temp_free(t2); \
+ tcg_temp_free_i64(t2); \
tcg_op(t1, t1); \
tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
- tcg_temp_free(t0); \
- tcg_temp_free(t1); \
+ tcg_temp_free_i32(t0); \
+ tcg_temp_free_i32(t1); \
}
#else
-#define GEN_SPEOP_ARITH1(name, tcg_op) \
+#define GEN_SPEOP_ARITH1(name, tcg_op) \
static always_inline void gen_##name (DisasContext *ctx) \
{ \
if (unlikely(!ctx->spe_enabled)) { \
}
#endif
-static always_inline void gen_op_evabs (TCGv ret, TCGv arg1)
+static always_inline void gen_op_evabs (TCGv_i32 ret, TCGv_i32 arg1)
{
int l1 = gen_new_label();
int l2 = gen_new_label();
tcg_gen_neg_i32(ret, arg1);
tcg_gen_br(l2);
gen_set_label(l1);
- tcg_gen_mov_tl(ret, arg1);
+ tcg_gen_mov_i32(ret, arg1);
gen_set_label(l2);
}
GEN_SPEOP_ARITH1(evabs, gen_op_evabs);
GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32);
GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32);
GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32);
-static always_inline void gen_op_evrndw (TCGv ret, TCGv arg1)
+static always_inline void gen_op_evrndw (TCGv_i32 ret, TCGv_i32 arg1)
{
tcg_gen_addi_i32(ret, arg1, 0x8000);
tcg_gen_ext16u_i32(ret, ret);
}
GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw);
-static always_inline void gen_op_cntlsw (TCGv ret, TCGv arg1)
-{
- tcg_gen_helper_1_1(helper_cntlsw32, ret, arg1);
-}
-GEN_SPEOP_ARITH1(evcntlsw, gen_op_cntlsw);
-static always_inline void gen_op_cntlzw (TCGv ret, TCGv arg1)
-{
- tcg_gen_helper_1_1(helper_cntlzw32, ret, arg1);
-}
-GEN_SPEOP_ARITH1(evcntlzw, gen_op_cntlzw);
+GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32);
+GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32);
#if defined(TARGET_PPC64)
#define GEN_SPEOP_ARITH2(name, tcg_op) \
GEN_EXCP_NO_AP(ctx); \
return; \
} \
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_I32); \
- TCGv t1 = tcg_temp_local_new(TCG_TYPE_I32); \
- TCGv t2 = tcg_temp_local_new(TCG_TYPE_I32); \
- TCGv t3 = tcg_temp_local_new(TCG_TYPE_I64); \
+ TCGv_i32 t0 = tcg_temp_local_new_i32(); \
+ TCGv_i32 t1 = tcg_temp_local_new_i32(); \
+ TCGv_i32 t2 = tcg_temp_local_new_i32(); \
+ TCGv_i64 t3 = tcg_temp_local_new(TCG_TYPE_I64); \
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \
tcg_op(t0, t0, t2); \
tcg_gen_trunc_i64_i32(t1, t3); \
tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \
tcg_gen_trunc_i64_i32(t2, t3); \
- tcg_temp_free(t3); \
+ tcg_temp_free_i64(t3); \
tcg_op(t1, t1, t2); \
- tcg_temp_free(t2); \
+ tcg_temp_free_i32(t2); \
tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
- tcg_temp_free(t0); \
- tcg_temp_free(t1); \
+ tcg_temp_free_i32(t0); \
+ tcg_temp_free_i32(t1); \
}
#else
#define GEN_SPEOP_ARITH2(name, tcg_op) \
}
#endif
-static always_inline void gen_op_evsrwu (TCGv ret, TCGv arg1, TCGv arg2)
+static always_inline void gen_op_evsrwu (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- TCGv t0;
+ TCGv_i32 t0;
int l1, l2;
l1 = gen_new_label();
l2 = gen_new_label();
- t0 = tcg_temp_local_new(TCG_TYPE_I32);
+ t0 = tcg_temp_local_new_i32();
/* No error here: 6 bits are used */
tcg_gen_andi_i32(t0, arg2, 0x3F);
tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
gen_set_label(l1);
tcg_gen_movi_i32(ret, 0);
tcg_gen_br(l2);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu);
-static always_inline void gen_op_evsrws (TCGv ret, TCGv arg1, TCGv arg2)
+static always_inline void gen_op_evsrws (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- TCGv t0;
+ TCGv_i32 t0;
int l1, l2;
l1 = gen_new_label();
l2 = gen_new_label();
- t0 = tcg_temp_local_new(TCG_TYPE_I32);
+ t0 = tcg_temp_local_new_i32();
/* No error here: 6 bits are used */
tcg_gen_andi_i32(t0, arg2, 0x3F);
tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
gen_set_label(l1);
tcg_gen_movi_i32(ret, 0);
tcg_gen_br(l2);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws);
-static always_inline void gen_op_evslw (TCGv ret, TCGv arg1, TCGv arg2)
+static always_inline void gen_op_evslw (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- TCGv t0;
+ TCGv_i32 t0;
int l1, l2;
l1 = gen_new_label();
l2 = gen_new_label();
- t0 = tcg_temp_local_new(TCG_TYPE_I32);
+ t0 = tcg_temp_local_new_i32();
/* No error here: 6 bits are used */
tcg_gen_andi_i32(t0, arg2, 0x3F);
tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
gen_set_label(l1);
tcg_gen_movi_i32(ret, 0);
tcg_gen_br(l2);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
GEN_SPEOP_ARITH2(evslw, gen_op_evslw);
-static always_inline void gen_op_evrlw (TCGv ret, TCGv arg1, TCGv arg2)
+static always_inline void gen_op_evrlw (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- TCGv t0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0 = tcg_temp_new_i32();
tcg_gen_andi_i32(t0, arg2, 0x1F);
tcg_gen_rotl_i32(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw);
static always_inline void gen_evmergehi (DisasContext *ctx)
return;
}
#if defined(TARGET_PPC64)
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
- TCGv t1 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_new();
+ TCGv t1 = tcg_temp_new();
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
#endif
}
GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32);
-static always_inline void gen_op_evsubf (TCGv ret, TCGv arg1, TCGv arg2)
+static always_inline void gen_op_evsubf (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
tcg_gen_sub_i32(ret, arg2, arg1);
}
GEN_EXCP_NO_AP(ctx); \
return; \
} \
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_I32); \
- TCGv t1 = tcg_temp_local_new(TCG_TYPE_I32); \
- TCGv t2 = tcg_temp_local_new(TCG_TYPE_I64); \
+ TCGv_i32 t0 = tcg_temp_local_new_i32(); \
+ TCGv_i32 t1 = tcg_temp_local_new_i32(); \
+ TCGv_i64 t2 = tcg_temp_local_new_i64(); \
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
tcg_op(t0, t0, rA(ctx->opcode)); \
tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
tcg_gen_trunc_i64_i32(t1, t2); \
- tcg_temp_free(t2); \
+ tcg_temp_free_i64(t2); \
tcg_op(t1, t1, rA(ctx->opcode)); \
tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
- tcg_temp_free(t0); \
- tcg_temp_free(t1); \
+ tcg_temp_free_i32(t0); \
+ tcg_temp_free_i32(t1); \
}
#else
#define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
int l2 = gen_new_label(); \
int l3 = gen_new_label(); \
int l4 = gen_new_label(); \
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_I32); \
- TCGv t1 = tcg_temp_local_new(TCG_TYPE_I32); \
- TCGv t2 = tcg_temp_local_new(TCG_TYPE_I64); \
+ TCGv_i32 t0 = tcg_temp_local_new_i32(); \
+ TCGv_i32 t1 = tcg_temp_local_new_i32(); \
+ TCGv_i64 t2 = tcg_temp_local_new_i64(); \
tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \
- tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \
+ tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
tcg_gen_br(l2); \
gen_set_label(l1); \
tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
tcg_gen_trunc_i64_i32(t0, t2); \
tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
tcg_gen_trunc_i64_i32(t1, t2); \
- tcg_temp_free(t2); \
+ tcg_temp_free_i64(t2); \
tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \
tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
~(CRF_CH | CRF_CH_AND_CL)); \
tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
CRF_CH | CRF_CH_OR_CL); \
gen_set_label(l4); \
- tcg_temp_free(t0); \
- tcg_temp_free(t1); \
+ tcg_temp_free_i32(t0); \
+ tcg_temp_free_i32(t1); \
}
#else
#define GEN_SPEOP_COMP(name, tcg_cond) \
static always_inline void gen_brinc (DisasContext *ctx)
{
/* Note: brinc is usable even if SPE is disabled */
- tcg_gen_helper_1_2(helper_brinc, cpu_gpr[rD(ctx->opcode)],
- cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
+ gen_helper_brinc(cpu_gpr[rD(ctx->opcode)],
+ cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
}
static always_inline void gen_evmergelo (DisasContext *ctx)
{
return;
}
#if defined(TARGET_PPC64)
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
- TCGv t1 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_new();
+ TCGv t1 = tcg_temp_new();
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFLL);
tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
return;
}
#if defined(TARGET_PPC64)
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
- TCGv t1 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_new();
+ TCGv t1 = tcg_temp_new();
tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFLL);
tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
return;
}
#if defined(TARGET_PPC64)
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
- TCGv t1 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_new();
+ TCGv t1 = tcg_temp_new();
tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
int32_t imm = (int32_t)(rA(ctx->opcode) << 11) >> 27;
#if defined(TARGET_PPC64)
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
- TCGv t1 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_new();
+ TCGv t1 = tcg_temp_new();
tcg_gen_movi_tl(t0, imm);
tcg_gen_shri_tl(t1, t0, 32);
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
uint32_t imm = rA(ctx->opcode) << 11;
#if defined(TARGET_PPC64)
- TCGv t0 = tcg_temp_new(TCG_TYPE_TL);
- TCGv t1 = tcg_temp_new(TCG_TYPE_TL);
+ TCGv t0 = tcg_temp_new();
+ TCGv t1 = tcg_temp_new();
tcg_gen_movi_tl(t0, imm);
tcg_gen_shri_tl(t1, t0, 32);
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
int l2 = gen_new_label();
int l3 = gen_new_label();
int l4 = gen_new_label();
- TCGv t0 = tcg_temp_local_new(TCG_TYPE_I32);
+ TCGv_i32 t0 = tcg_temp_local_new_i32();
#if defined(TARGET_PPC64)
- TCGv t1 = tcg_temp_local_new(TCG_TYPE_TL);
- TCGv t2 = tcg_temp_local_new(TCG_TYPE_TL);
+ TCGv t1 = tcg_temp_local_new();
+ TCGv t2 = tcg_temp_local_new();
#endif
tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3);
tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
#endif
gen_set_label(l4);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
#if defined(TARGET_PPC64)
tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2);
tcg_temp_free(t1);
#define GEN_SPEFPUOP_COMP(name) \
static always_inline void gen_##name (DisasContext *ctx) \
{ \
+ TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; \
if (unlikely(!ctx->spe_enabled)) { \
GEN_EXCP_NO_AP(ctx); \
return; \
gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
gen_load_gpr64(cpu_T64[1], rB(ctx->opcode)); \
gen_op_##name(); \
- tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf); \
+ tcg_gen_trunc_tl_i32(crf, cpu_T[0]); \
+ tcg_gen_andi_i32(crf, crf, 0xf); \
}
/* Single precision floating-point vectors operations */
-#ifndef DEF_HELPER
-#define DEF_HELPER(ret, name, params) ret name params;
-#endif
+#include "def-helper.h"
-DEF_HELPER(void, helper_ldtlb, (void))
-DEF_HELPER(void, helper_raise_illegal_instruction, (void))
-DEF_HELPER(void, helper_raise_slot_illegal_instruction, (void))
-DEF_HELPER(void, helper_debug, (void))
-DEF_HELPER(void, helper_sleep, (uint32_t))
-DEF_HELPER(void, helper_trapa, (uint32_t))
+DEF_HELPER_0(ldtlb, void)
+DEF_HELPER_0(raise_illegal_instruction, void)
+DEF_HELPER_0(raise_slot_illegal_instruction, void)
+DEF_HELPER_0(debug, void)
+DEF_HELPER_1(sleep, void, i32)
+DEF_HELPER_1(trapa, void, i32)
-DEF_HELPER(uint32_t, helper_addv, (uint32_t, uint32_t))
-DEF_HELPER(uint32_t, helper_addc, (uint32_t, uint32_t))
-DEF_HELPER(uint32_t, helper_subv, (uint32_t, uint32_t))
-DEF_HELPER(uint32_t, helper_subc, (uint32_t, uint32_t))
-DEF_HELPER(uint32_t, helper_negc, (uint32_t))
-DEF_HELPER(uint32_t, helper_div1, (uint32_t, uint32_t))
-DEF_HELPER(void, helper_macl, (uint32_t, uint32_t))
-DEF_HELPER(void, helper_macw, (uint32_t, uint32_t))
+DEF_HELPER_2(addv, i32, i32, i32)
+DEF_HELPER_2(addc, i32, i32, i32)
+DEF_HELPER_2(subv, i32, i32, i32)
+DEF_HELPER_2(subc, i32, i32, i32)
+DEF_HELPER_1(negc, i32, i32)
+DEF_HELPER_2(div1, i32, i32, i32)
+DEF_HELPER_2(macl, void, i32, i32)
+DEF_HELPER_2(macw, void, i32, i32)
-DEF_HELPER(void, helper_ld_fpscr, (uint32_t))
+DEF_HELPER_1(ld_fpscr, void, i32)
-DEF_HELPER(uint32_t, helper_fabs_FT, (uint32_t))
-DEF_HELPER(uint64_t, helper_fabs_DT, (uint64_t))
-DEF_HELPER(uint32_t, helper_fadd_FT, (uint32_t, uint32_t))
-DEF_HELPER(uint64_t, helper_fadd_DT, (uint64_t, uint64_t))
-DEF_HELPER(uint64_t, helper_fcnvsd_FT_DT, (uint32_t))
-DEF_HELPER(uint32_t, helper_fcnvds_DT_FT, (uint64_t))
+DEF_HELPER_1(fabs_FT, i32, i32)
+DEF_HELPER_1(fabs_DT, i64, i64)
+DEF_HELPER_2(fadd_FT, i32, i32, i32)
+DEF_HELPER_2(fadd_DT, i64, i64, i64)
+DEF_HELPER_1(fcnvsd_FT_DT, i64, i32)
+DEF_HELPER_1(fcnvds_DT_FT, i32, i64)
-DEF_HELPER(void, helper_fcmp_eq_FT, (uint32_t, uint32_t))
-DEF_HELPER(void, helper_fcmp_eq_DT, (uint64_t, uint64_t))
-DEF_HELPER(void, helper_fcmp_gt_FT, (uint32_t, uint32_t))
-DEF_HELPER(void, helper_fcmp_gt_DT, (uint64_t, uint64_t))
-DEF_HELPER(uint32_t, helper_fdiv_FT, (uint32_t, uint32_t))
-DEF_HELPER(uint64_t, helper_fdiv_DT, (uint64_t, uint64_t))
-DEF_HELPER(uint32_t, helper_float_FT, (uint32_t))
-DEF_HELPER(uint64_t, helper_float_DT, (uint32_t))
-DEF_HELPER(uint32_t, helper_fmul_FT, (uint32_t, uint32_t))
-DEF_HELPER(uint64_t, helper_fmul_DT, (uint64_t, uint64_t))
-DEF_HELPER(uint32_t, helper_fneg_T, (uint32_t))
-DEF_HELPER(uint32_t, helper_fsub_FT, (uint32_t, uint32_t))
-DEF_HELPER(uint64_t, helper_fsub_DT, (uint64_t, uint64_t))
-DEF_HELPER(uint32_t, helper_fsqrt_FT, (uint32_t))
-DEF_HELPER(uint64_t, helper_fsqrt_DT, (uint64_t))
-DEF_HELPER(uint32_t, helper_ftrc_FT, (uint32_t))
-DEF_HELPER(uint32_t, helper_ftrc_DT, (uint64_t))
+DEF_HELPER_2(fcmp_eq_FT, void, i32, i32)
+DEF_HELPER_2(fcmp_eq_DT, void, i64, i64)
+DEF_HELPER_2(fcmp_gt_FT, void, i32, i32)
+DEF_HELPER_2(fcmp_gt_DT, void, i64, i64)
+DEF_HELPER_2(fdiv_FT, i32, i32, i32)
+DEF_HELPER_2(fdiv_DT, i64, i64, i64)
+DEF_HELPER_1(float_FT, i32, i32)
+DEF_HELPER_1(float_DT, i64, i32)
+DEF_HELPER_2(fmul_FT, i32, i32, i32)
+DEF_HELPER_2(fmul_DT, i64, i64, i64)
+DEF_HELPER_1(fneg_T, i32, i32)
+DEF_HELPER_2(fsub_FT, i32, i32, i32)
+DEF_HELPER_2(fsub_DT, i64, i64, i64)
+DEF_HELPER_1(fsqrt_FT, i32, i32)
+DEF_HELPER_1(fsqrt_DT, i64, i64)
+DEF_HELPER_1(ftrc_FT, i32, i32)
+DEF_HELPER_1(ftrc_DT, i32, i64)
+
+#include "def-helper.h"
*/
#include <assert.h>
#include "exec.h"
+#include "helper.h"
#ifndef CONFIG_USER_ONLY
#include "cpu.h"
#include "exec-all.h"
#include "disas.h"
-#include "helper.h"
#include "tcg-op.h"
#include "qemu-common.h"
+#include "helper.h"
+#define GEN_HELPER 1
+#include "helper.h"
+
typedef struct DisasContext {
struct TranslationBlock *tb;
target_ulong pc;
};
/* global register indexes */
-static TCGv cpu_env;
+static TCGv_ptr cpu_env;
static TCGv cpu_gregs[24];
static TCGv cpu_pc, cpu_sr, cpu_ssr, cpu_spc, cpu_gbr;
static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl;
if (done_init)
return;
- cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
+ cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
for (i = 0; i < 24; i++)
- cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
+ cpu_gregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
offsetof(CPUState, gregs[i]),
gregnames[i]);
- cpu_pc = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, pc), "PC");
- cpu_sr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, sr), "SR");
- cpu_ssr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, ssr), "SSR");
- cpu_spc = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, spc), "SPC");
- cpu_gbr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, gbr), "GBR");
- cpu_vbr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, vbr), "VBR");
- cpu_sgr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, sgr), "SGR");
- cpu_dbr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, dbr), "DBR");
- cpu_mach = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, mach), "MACH");
- cpu_macl = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, macl), "MACL");
- cpu_pr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, pr), "PR");
- cpu_fpscr = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, fpscr), "FPSCR");
- cpu_fpul = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, fpul), "FPUL");
-
- cpu_flags = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, flags), "_flags_");
- cpu_delayed_pc = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, delayed_pc),
- "_delayed_pc_");
+ cpu_pc = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, pc), "PC");
+ cpu_sr = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, sr), "SR");
+ cpu_ssr = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, ssr), "SSR");
+ cpu_spc = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, spc), "SPC");
+ cpu_gbr = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, gbr), "GBR");
+ cpu_vbr = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, vbr), "VBR");
+ cpu_sgr = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, sgr), "SGR");
+ cpu_dbr = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, dbr), "DBR");
+ cpu_mach = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, mach), "MACH");
+ cpu_macl = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, macl), "MACL");
+ cpu_pr = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, pr), "PR");
+ cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, fpscr), "FPSCR");
+ cpu_fpul = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, fpul), "FPUL");
+
+ cpu_flags = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, flags), "_flags_");
+ cpu_delayed_pc = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, delayed_pc),
+ "_delayed_pc_");
/* register helpers */
-#undef DEF_HELPER
-#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
+#define GEN_HELPER 2
#include "helper.h"
done_init = 1;
} else {
tcg_gen_movi_i32(cpu_pc, dest);
if (ctx->singlestep_enabled)
- tcg_gen_helper_0_0(helper_debug);
+ gen_helper_debug();
tcg_gen_exit_tb(0);
}
}
delayed jump as immediate jump are conditinal jumps */
tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc);
if (ctx->singlestep_enabled)
- tcg_gen_helper_0_0(helper_debug);
+ gen_helper_debug();
tcg_gen_exit_tb(0);
} else {
gen_goto_tb(ctx, 0, ctx->delayed_pc);
TCGv sr;
int label = gen_new_label();
tcg_gen_movi_i32(cpu_delayed_pc, delayed_pc);
- sr = tcg_temp_new(TCG_TYPE_I32);
+ sr = tcg_temp_new();
tcg_gen_andi_i32(sr, cpu_sr, SR_T);
tcg_gen_brcondi_i32(TCG_COND_NE, sr, t ? SR_T : 0, label);
tcg_gen_ori_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
TCGv sr;
l1 = gen_new_label();
- sr = tcg_temp_new(TCG_TYPE_I32);
+ sr = tcg_temp_new();
tcg_gen_andi_i32(sr, cpu_sr, SR_T);
tcg_gen_brcondi_i32(TCG_COND_EQ, sr, SR_T, l1);
gen_goto_tb(ctx, 0, ifnott);
TCGv ds;
l1 = gen_new_label();
- ds = tcg_temp_new(TCG_TYPE_I32);
+ ds = tcg_temp_new();
tcg_gen_andi_i32(ds, cpu_flags, DELAY_SLOT_TRUE);
tcg_gen_brcondi_i32(TCG_COND_EQ, ds, DELAY_SLOT_TRUE, l1);
gen_goto_tb(ctx, 1, ctx->pc + 2);
static inline void gen_copy_bit_i32(TCGv t0, int p0, TCGv t1, int p1)
{
- TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv tmp = tcg_temp_new();
p0 &= 0x1f;
p1 &= 0x1f;
}
-static inline void gen_load_fpr32(TCGv t, int reg)
+static inline void gen_load_fpr32(TCGv_i32 t, int reg)
{
tcg_gen_ld_i32(t, cpu_env, offsetof(CPUState, fregs[reg]));
}
-static inline void gen_load_fpr64(TCGv t, int reg)
+static inline void gen_load_fpr64(TCGv_i64 t, int reg)
{
- TCGv tmp1 = tcg_temp_new(TCG_TYPE_I32);
- TCGv tmp2 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 tmp1 = tcg_temp_new_i32();
+ TCGv_i32 tmp2 = tcg_temp_new_i32();
tcg_gen_ld_i32(tmp1, cpu_env, offsetof(CPUState, fregs[reg]));
tcg_gen_ld_i32(tmp2, cpu_env, offsetof(CPUState, fregs[reg + 1]));
tcg_gen_concat_i32_i64(t, tmp2, tmp1);
- tcg_temp_free(tmp1);
- tcg_temp_free(tmp2);
+ tcg_temp_free_i32(tmp1);
+ tcg_temp_free_i32(tmp2);
}
-static inline void gen_store_fpr32(TCGv t, int reg)
+static inline void gen_store_fpr32(TCGv_i32 t, int reg)
{
tcg_gen_st_i32(t, cpu_env, offsetof(CPUState, fregs[reg]));
}
-static inline void gen_store_fpr64 (TCGv t, int reg)
+static inline void gen_store_fpr64 (TCGv_i64 t, int reg)
{
- TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 tmp = tcg_temp_new_i32();
tcg_gen_trunc_i64_i32(tmp, t);
tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUState, fregs[reg + 1]));
tcg_gen_shri_i64(t, t, 32);
tcg_gen_trunc_i64_i32(tmp, t);
tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUState, fregs[reg]));
- tcg_temp_free(tmp);
+ tcg_temp_free_i32(tmp);
}
#define B3_0 (ctx->opcode & 0xf)
#define CHECK_NOT_DELAY_SLOT \
if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) \
- {tcg_gen_helper_0_0(helper_raise_slot_illegal_instruction); ctx->bstate = BS_EXCP; \
+ {gen_helper_raise_slot_illegal_instruction(); ctx->bstate = BS_EXCP; \
return;}
#define CHECK_PRIVILEGED \
if (IS_USER(ctx)) { \
- tcg_gen_helper_0_0(helper_raise_illegal_instruction); \
+ gen_helper_raise_illegal_instruction(); \
ctx->bstate = BS_EXCP; \
return; \
}
return;
case 0x0038: /* ldtlb */
CHECK_PRIVILEGED
- tcg_gen_helper_0_0(helper_ldtlb);
+ gen_helper_ldtlb();
return;
case 0x002b: /* rte */
CHECK_PRIVILEGED
return;
case 0x001b: /* sleep */
CHECK_PRIVILEGED
- tcg_gen_helper_0_1(helper_sleep, tcg_const_i32(ctx->pc + 2));
+ gen_helper_sleep(tcg_const_i32(ctx->pc + 2));
return;
}
switch (ctx->opcode & 0xf000) {
case 0x1000: /* mov.l Rm,@(disp,Rn) */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_addi_i32(addr, REG(B11_8), B3_0 * 4);
tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0x5000: /* mov.l @(disp,Rm),Rn */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 4);
tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0x2004: /* mov.b Rm,@-Rn */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_subi_i32(addr, REG(B11_8), 1);
tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx); /* might cause re-execution */
tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1); /* modify register status */
return;
case 0x2005: /* mov.w Rm,@-Rn */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_subi_i32(addr, REG(B11_8), 2);
tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 2);
return;
case 0x2006: /* mov.l Rm,@-Rn */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_subi_i32(addr, REG(B11_8), 4);
tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 4);
return;
case 0x0004: /* mov.b Rm,@(R0,Rn) */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_add_i32(addr, REG(B11_8), REG(0));
tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0x0005: /* mov.w Rm,@(R0,Rn) */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_add_i32(addr, REG(B11_8), REG(0));
tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0x0006: /* mov.l Rm,@(R0,Rn) */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_add_i32(addr, REG(B11_8), REG(0));
tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0x000c: /* mov.b @(R0,Rm),Rn */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_add_i32(addr, REG(B7_4), REG(0));
tcg_gen_qemu_ld8s(REG(B11_8), addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0x000d: /* mov.w @(R0,Rm),Rn */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_add_i32(addr, REG(B7_4), REG(0));
tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0x000e: /* mov.l @(R0,Rm),Rn */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_add_i32(addr, REG(B7_4), REG(0));
tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
tcg_temp_free(addr);
case 0x6008: /* swap.b Rm,Rn */
{
TCGv highw, high, low;
- highw = tcg_temp_new(TCG_TYPE_I32);
+ highw = tcg_temp_new();
tcg_gen_andi_i32(highw, REG(B7_4), 0xffff0000);
- high = tcg_temp_new(TCG_TYPE_I32);
+ high = tcg_temp_new();
tcg_gen_ext8u_i32(high, REG(B7_4));
tcg_gen_shli_i32(high, high, 8);
- low = tcg_temp_new(TCG_TYPE_I32);
+ low = tcg_temp_new();
tcg_gen_shri_i32(low, REG(B7_4), 8);
tcg_gen_ext8u_i32(low, low);
tcg_gen_or_i32(REG(B11_8), high, low);
case 0x6009: /* swap.w Rm,Rn */
{
TCGv high, low;
- high = tcg_temp_new(TCG_TYPE_I32);
+ high = tcg_temp_new();
tcg_gen_ext16u_i32(high, REG(B7_4));
tcg_gen_shli_i32(high, high, 16);
- low = tcg_temp_new(TCG_TYPE_I32);
+ low = tcg_temp_new();
tcg_gen_shri_i32(low, REG(B7_4), 16);
tcg_gen_ext16u_i32(low, low);
tcg_gen_or_i32(REG(B11_8), high, low);
case 0x200d: /* xtrct Rm,Rn */
{
TCGv high, low;
- high = tcg_temp_new(TCG_TYPE_I32);
+ high = tcg_temp_new();
tcg_gen_ext16u_i32(high, REG(B7_4));
tcg_gen_shli_i32(high, high, 16);
- low = tcg_temp_new(TCG_TYPE_I32);
+ low = tcg_temp_new();
tcg_gen_shri_i32(low, REG(B11_8), 16);
tcg_gen_ext16u_i32(low, low);
tcg_gen_or_i32(REG(B11_8), high, low);
tcg_gen_add_i32(REG(B11_8), REG(B11_8), REG(B7_4));
return;
case 0x300e: /* addc Rm,Rn */
- tcg_gen_helper_1_2(helper_addc, REG(B11_8), REG(B7_4), REG(B11_8));
+ gen_helper_addc(REG(B11_8), REG(B7_4), REG(B11_8));
return;
case 0x300f: /* addv Rm,Rn */
- tcg_gen_helper_1_2(helper_addv, REG(B11_8), REG(B7_4), REG(B11_8));
+ gen_helper_addv(REG(B11_8), REG(B7_4), REG(B11_8));
return;
case 0x2009: /* and Rm,Rn */
tcg_gen_and_i32(REG(B11_8), REG(B11_8), REG(B7_4));
{
gen_copy_bit_i32(cpu_sr, 8, REG(B11_8), 31); /* SR_Q */
gen_copy_bit_i32(cpu_sr, 9, REG(B7_4), 31); /* SR_M */
- TCGv val = tcg_temp_new(TCG_TYPE_I32);
+ TCGv val = tcg_temp_new();
tcg_gen_xor_i32(val, REG(B7_4), REG(B11_8));
gen_copy_bit_i32(cpu_sr, 0, val, 31); /* SR_T */
tcg_temp_free(val);
}
return;
case 0x3004: /* div1 Rm,Rn */
- tcg_gen_helper_1_2(helper_div1, REG(B11_8), REG(B7_4), REG(B11_8));
+ gen_helper_div1(REG(B11_8), REG(B7_4), REG(B11_8));
return;
case 0x300d: /* dmuls.l Rm,Rn */
{
- TCGv tmp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv tmp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 tmp1 = tcg_temp_new_i64();
+ TCGv_i64 tmp2 = tcg_temp_new_i64();
tcg_gen_ext_i32_i64(tmp1, REG(B7_4));
tcg_gen_ext_i32_i64(tmp2, REG(B11_8));
tcg_gen_shri_i64(tmp1, tmp1, 32);
tcg_gen_trunc_i64_i32(cpu_mach, tmp1);
- tcg_temp_free(tmp2);
- tcg_temp_free(tmp1);
+ tcg_temp_free_i64(tmp2);
+ tcg_temp_free_i64(tmp1);
}
return;
case 0x3005: /* dmulu.l Rm,Rn */
{
- TCGv tmp1 = tcg_temp_new(TCG_TYPE_I64);
- TCGv tmp2 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 tmp1 = tcg_temp_new_i64();
+ TCGv_i64 tmp2 = tcg_temp_new_i64();
tcg_gen_extu_i32_i64(tmp1, REG(B7_4));
tcg_gen_extu_i32_i64(tmp2, REG(B11_8));
tcg_gen_shri_i64(tmp1, tmp1, 32);
tcg_gen_trunc_i64_i32(cpu_mach, tmp1);
- tcg_temp_free(tmp2);
- tcg_temp_free(tmp1);
+ tcg_temp_free_i64(tmp2);
+ tcg_temp_free_i64(tmp1);
}
return;
case 0x600e: /* exts.b Rm,Rn */
case 0x000f: /* mac.l @Rm+,@Rn+ */
{
TCGv arg0, arg1;
- arg0 = tcg_temp_new(TCG_TYPE_I32);
+ arg0 = tcg_temp_new();
tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
- arg1 = tcg_temp_new(TCG_TYPE_I32);
+ arg1 = tcg_temp_new();
tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
- tcg_gen_helper_0_2(helper_macl, arg0, arg1);
+ gen_helper_macl(arg0, arg1);
tcg_temp_free(arg1);
tcg_temp_free(arg0);
tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
case 0x400f: /* mac.w @Rm+,@Rn+ */
{
TCGv arg0, arg1;
- arg0 = tcg_temp_new(TCG_TYPE_I32);
+ arg0 = tcg_temp_new();
tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
- arg1 = tcg_temp_new(TCG_TYPE_I32);
+ arg1 = tcg_temp_new();
tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
- tcg_gen_helper_0_2(helper_macw, arg0, arg1);
+ gen_helper_macw(arg0, arg1);
tcg_temp_free(arg1);
tcg_temp_free(arg0);
tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 2);
case 0x200f: /* muls.w Rm,Rn */
{
TCGv arg0, arg1;
- arg0 = tcg_temp_new(TCG_TYPE_I32);
+ arg0 = tcg_temp_new();
tcg_gen_ext16s_i32(arg0, REG(B7_4));
- arg1 = tcg_temp_new(TCG_TYPE_I32);
+ arg1 = tcg_temp_new();
tcg_gen_ext16s_i32(arg1, REG(B11_8));
tcg_gen_mul_i32(cpu_macl, arg0, arg1);
tcg_temp_free(arg1);
case 0x200e: /* mulu.w Rm,Rn */
{
TCGv arg0, arg1;
- arg0 = tcg_temp_new(TCG_TYPE_I32);
+ arg0 = tcg_temp_new();
tcg_gen_ext16u_i32(arg0, REG(B7_4));
- arg1 = tcg_temp_new(TCG_TYPE_I32);
+ arg1 = tcg_temp_new();
tcg_gen_ext16u_i32(arg1, REG(B11_8));
tcg_gen_mul_i32(cpu_macl, arg0, arg1);
tcg_temp_free(arg1);
tcg_gen_neg_i32(REG(B11_8), REG(B7_4));
return;
case 0x600a: /* negc Rm,Rn */
- tcg_gen_helper_1_1(helper_negc, REG(B11_8), REG(B7_4));
+ gen_helper_negc(REG(B11_8), REG(B7_4));
return;
case 0x6007: /* not Rm,Rn */
tcg_gen_not_i32(REG(B11_8), REG(B7_4));
tcg_gen_sub_i32(REG(B11_8), REG(B11_8), REG(B7_4));
return;
case 0x300a: /* subc Rm,Rn */
- tcg_gen_helper_1_2(helper_subc, REG(B11_8), REG(B7_4), REG(B11_8));
+ gen_helper_subc(REG(B11_8), REG(B7_4), REG(B11_8));
return;
case 0x300b: /* subv Rm,Rn */
- tcg_gen_helper_1_2(helper_subv, REG(B11_8), REG(B7_4), REG(B11_8));
+ gen_helper_subv(REG(B11_8), REG(B7_4), REG(B11_8));
return;
case 0x2008: /* tst Rm,Rn */
{
- TCGv val = tcg_temp_new(TCG_TYPE_I32);
+ TCGv val = tcg_temp_new();
tcg_gen_and_i32(val, REG(B7_4), REG(B11_8));
gen_cmp_imm(TCG_COND_EQ, val, 0);
tcg_temp_free(val);
return;
case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */
if (ctx->fpscr & FPSCR_SZ) {
- TCGv fp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp = tcg_temp_new_i64();
gen_load_fpr64(fp, XREG(B7_4));
gen_store_fpr64(fp, XREG(B11_8));
- tcg_temp_free(fp);
+ tcg_temp_free_i64(fp);
} else {
- TCGv fp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp = tcg_temp_new_i32();
gen_load_fpr32(fp, FREG(B7_4));
gen_store_fpr32(fp, FREG(B11_8));
- tcg_temp_free(fp);
+ tcg_temp_free_i32(fp);
}
return;
case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */
if (ctx->fpscr & FPSCR_SZ) {
- TCGv fp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp = tcg_temp_new_i64();
gen_load_fpr64(fp, XREG(B7_4));
tcg_gen_qemu_st64(fp, REG(B11_8), ctx->memidx);
- tcg_temp_free(fp);
+ tcg_temp_free_i64(fp);
} else {
- TCGv fp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp = tcg_temp_new_i32();
gen_load_fpr32(fp, FREG(B7_4));
tcg_gen_qemu_st32(fp, REG(B11_8), ctx->memidx);
- tcg_temp_free(fp);
+ tcg_temp_free_i32(fp);
}
return;
case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */
if (ctx->fpscr & FPSCR_SZ) {
- TCGv fp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp = tcg_temp_new_i64();
tcg_gen_qemu_ld64(fp, REG(B7_4), ctx->memidx);
gen_store_fpr64(fp, XREG(B11_8));
- tcg_temp_free(fp);
+ tcg_temp_free_i64(fp);
} else {
- TCGv fp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp = tcg_temp_new_i32();
tcg_gen_qemu_ld32u(fp, REG(B7_4), ctx->memidx);
gen_store_fpr32(fp, FREG(B11_8));
- tcg_temp_free(fp);
+ tcg_temp_free_i32(fp);
}
return;
case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */
if (ctx->fpscr & FPSCR_SZ) {
- TCGv fp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp = tcg_temp_new_i64();
tcg_gen_qemu_ld64(fp, REG(B7_4), ctx->memidx);
gen_store_fpr64(fp, XREG(B11_8));
- tcg_temp_free(fp);
+ tcg_temp_free_i64(fp);
tcg_gen_addi_i32(REG(B7_4),REG(B7_4), 8);
} else {
- TCGv fp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp = tcg_temp_new_i32();
tcg_gen_qemu_ld32u(fp, REG(B7_4), ctx->memidx);
gen_store_fpr32(fp, FREG(B11_8));
- tcg_temp_free(fp);
+ tcg_temp_free_i32(fp);
tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
}
return;
case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */
if (ctx->fpscr & FPSCR_SZ) {
- TCGv addr, fp;
- addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr;
+ TCGv_i64 fp;
+ addr = tcg_temp_new();
tcg_gen_subi_i32(addr, REG(B11_8), 8);
- fp = tcg_temp_new(TCG_TYPE_I64);
+ fp = tcg_temp_new_i64();
gen_load_fpr64(fp, XREG(B7_4));
tcg_gen_qemu_st64(fp, addr, ctx->memidx);
- tcg_temp_free(fp);
+ tcg_temp_free_i64(fp);
tcg_temp_free(addr);
tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 8);
} else {
- TCGv addr, fp;
- addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr;
+ TCGv_i32 fp;
+ addr = tcg_temp_new_i32();
tcg_gen_subi_i32(addr, REG(B11_8), 4);
- fp = tcg_temp_new(TCG_TYPE_I32);
+ fp = tcg_temp_new_i32();
gen_load_fpr32(fp, FREG(B7_4));
tcg_gen_qemu_st32(fp, addr, ctx->memidx);
- tcg_temp_free(fp);
+ tcg_temp_free_i32(fp);
tcg_temp_free(addr);
tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 4);
}
return;
case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new_i32();
tcg_gen_add_i32(addr, REG(B7_4), REG(0));
if (ctx->fpscr & FPSCR_SZ) {
- TCGv fp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp = tcg_temp_new_i64();
tcg_gen_qemu_ld64(fp, addr, ctx->memidx);
gen_store_fpr64(fp, XREG(B11_8));
- tcg_temp_free(fp);
+ tcg_temp_free_i64(fp);
} else {
- TCGv fp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp = tcg_temp_new_i32();
tcg_gen_qemu_ld32u(fp, addr, ctx->memidx);
gen_store_fpr32(fp, FREG(B11_8));
- tcg_temp_free(fp);
+ tcg_temp_free_i32(fp);
}
tcg_temp_free(addr);
}
return;
case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_add_i32(addr, REG(B11_8), REG(0));
if (ctx->fpscr & FPSCR_SZ) {
- TCGv fp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp = tcg_temp_new_i64();
gen_load_fpr64(fp, XREG(B7_4));
tcg_gen_qemu_st64(fp, addr, ctx->memidx);
- tcg_temp_free(fp);
+ tcg_temp_free_i64(fp);
} else {
- TCGv fp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp = tcg_temp_new_i32();
gen_load_fpr32(fp, FREG(B7_4));
tcg_gen_qemu_st32(fp, addr, ctx->memidx);
- tcg_temp_free(fp);
+ tcg_temp_free_i32(fp);
}
tcg_temp_free(addr);
}
case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
{
- TCGv fp0, fp1;
-
if (ctx->fpscr & FPSCR_PR) {
+ TCGv_i64 fp0, fp1;
+
if (ctx->opcode & 0x0110)
break; /* illegal instruction */
- fp0 = tcg_temp_new(TCG_TYPE_I64);
- fp1 = tcg_temp_new(TCG_TYPE_I64);
+ fp0 = tcg_temp_new_i64();
+ fp1 = tcg_temp_new_i64();
gen_load_fpr64(fp0, DREG(B11_8));
gen_load_fpr64(fp1, DREG(B7_4));
- }
- else {
- fp0 = tcg_temp_new(TCG_TYPE_I32);
- fp1 = tcg_temp_new(TCG_TYPE_I32);
+ switch (ctx->opcode & 0xf00f) {
+ case 0xf000: /* fadd Rm,Rn */
+ gen_helper_fadd_DT(fp0, fp0, fp1);
+ break;
+ case 0xf001: /* fsub Rm,Rn */
+ gen_helper_fsub_DT(fp0, fp0, fp1);
+ break;
+ case 0xf002: /* fmul Rm,Rn */
+ gen_helper_fmul_DT(fp0, fp0, fp1);
+ break;
+ case 0xf003: /* fdiv Rm,Rn */
+ gen_helper_fdiv_DT(fp0, fp0, fp1);
+ break;
+ case 0xf004: /* fcmp/eq Rm,Rn */
+ gen_helper_fcmp_eq_DT(fp0, fp1);
+ return;
+ case 0xf005: /* fcmp/gt Rm,Rn */
+ gen_helper_fcmp_gt_DT(fp0, fp1);
+ return;
+ }
+ gen_store_fpr64(fp0, DREG(B11_8));
+ tcg_temp_free_i64(fp0);
+ tcg_temp_free_i64(fp1);
+ } else {
+ TCGv_i32 fp0, fp1;
+
+ fp0 = tcg_temp_new_i32();
+ fp1 = tcg_temp_new_i32();
gen_load_fpr32(fp0, FREG(B11_8));
gen_load_fpr32(fp1, FREG(B7_4));
- }
- switch (ctx->opcode & 0xf00f) {
- case 0xf000: /* fadd Rm,Rn */
- if (ctx->fpscr & FPSCR_PR)
- tcg_gen_helper_1_2(helper_fadd_DT, fp0, fp0, fp1);
- else
- tcg_gen_helper_1_2(helper_fadd_FT, fp0, fp0, fp1);
- break;
- case 0xf001: /* fsub Rm,Rn */
- if (ctx->fpscr & FPSCR_PR)
- tcg_gen_helper_1_2(helper_fsub_DT, fp0, fp0, fp1);
- else
- tcg_gen_helper_1_2(helper_fsub_FT, fp0, fp0, fp1);
- break;
- case 0xf002: /* fmul Rm,Rn */
- if (ctx->fpscr & FPSCR_PR)
- tcg_gen_helper_1_2(helper_fmul_DT, fp0, fp0, fp1);
- else
- tcg_gen_helper_1_2(helper_fmul_FT, fp0, fp0, fp1);
- break;
- case 0xf003: /* fdiv Rm,Rn */
- if (ctx->fpscr & FPSCR_PR)
- tcg_gen_helper_1_2(helper_fdiv_DT, fp0, fp0, fp1);
- else
- tcg_gen_helper_1_2(helper_fdiv_FT, fp0, fp0, fp1);
- break;
- case 0xf004: /* fcmp/eq Rm,Rn */
- if (ctx->fpscr & FPSCR_PR)
- tcg_gen_helper_0_2(helper_fcmp_eq_DT, fp0, fp1);
- else
- tcg_gen_helper_0_2(helper_fcmp_eq_FT, fp0, fp1);
- return;
- case 0xf005: /* fcmp/gt Rm,Rn */
- if (ctx->fpscr & FPSCR_PR)
- tcg_gen_helper_0_2(helper_fcmp_gt_DT, fp0, fp1);
- else
- tcg_gen_helper_0_2(helper_fcmp_gt_FT, fp0, fp1);
- return;
- }
-
- if (ctx->fpscr & FPSCR_PR) {
- gen_store_fpr64(fp0, DREG(B11_8));
- }
- else {
+ switch (ctx->opcode & 0xf00f) {
+ case 0xf000: /* fadd Rm,Rn */
+ gen_helper_fadd_FT(fp0, fp0, fp1);
+ break;
+ case 0xf001: /* fsub Rm,Rn */
+ gen_helper_fsub_FT(fp0, fp0, fp1);
+ break;
+ case 0xf002: /* fmul Rm,Rn */
+ gen_helper_fmul_FT(fp0, fp0, fp1);
+ break;
+ case 0xf003: /* fdiv Rm,Rn */
+ gen_helper_fdiv_FT(fp0, fp0, fp1);
+ break;
+ case 0xf004: /* fcmp/eq Rm,Rn */
+ gen_helper_fcmp_eq_FT(fp0, fp1);
+ return;
+ case 0xf005: /* fcmp/gt Rm,Rn */
+ gen_helper_fcmp_gt_FT(fp0, fp1);
+ return;
+ }
gen_store_fpr32(fp0, FREG(B11_8));
+ tcg_temp_free_i32(fp0);
+ tcg_temp_free_i32(fp1);
}
- tcg_temp_free(fp1);
- tcg_temp_free(fp0);
}
return;
}
case 0xcd00: /* and.b #imm,@(R0,GBR) */
{
TCGv addr, val;
- addr = tcg_temp_new(TCG_TYPE_I32);
+ addr = tcg_temp_new();
tcg_gen_add_i32(addr, REG(0), cpu_gbr);
- val = tcg_temp_new(TCG_TYPE_I32);
+ val = tcg_temp_new();
tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
tcg_gen_andi_i32(val, val, B7_0);
tcg_gen_qemu_st8(val, addr, ctx->memidx);
return;
case 0xc400: /* mov.b @(disp,GBR),R0 */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0xc500: /* mov.w @(disp,GBR),R0 */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0xc600: /* mov.l @(disp,GBR),R0 */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
tcg_gen_qemu_ld32s(REG(0), addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0xc000: /* mov.b R0,@(disp,GBR) */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0xc100: /* mov.w R0,@(disp,GBR) */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0xc200: /* mov.l R0,@(disp,GBR) */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
tcg_gen_qemu_st32(REG(0), addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0x8000: /* mov.b R0,@(disp,Rn) */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0x8100: /* mov.w R0,@(disp,Rn) */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0x8400: /* mov.b @(disp,Rn),R0 */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0x8500: /* mov.w @(disp,Rn),R0 */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
tcg_temp_free(addr);
case 0xcf00: /* or.b #imm,@(R0,GBR) */
{
TCGv addr, val;
- addr = tcg_temp_new(TCG_TYPE_I32);
+ addr = tcg_temp_new();
tcg_gen_add_i32(addr, REG(0), cpu_gbr);
- val = tcg_temp_new(TCG_TYPE_I32);
+ val = tcg_temp_new();
tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
tcg_gen_ori_i32(val, val, B7_0);
tcg_gen_qemu_st8(val, addr, ctx->memidx);
CHECK_NOT_DELAY_SLOT
tcg_gen_movi_i32(cpu_pc, ctx->pc);
imm = tcg_const_i32(B7_0);
- tcg_gen_helper_0_1(helper_trapa, imm);
+ gen_helper_trapa(imm);
tcg_temp_free(imm);
ctx->bstate = BS_BRANCH;
}
return;
case 0xc800: /* tst #imm,R0 */
{
- TCGv val = tcg_temp_new(TCG_TYPE_I32);
+ TCGv val = tcg_temp_new();
tcg_gen_andi_i32(val, REG(0), B7_0);
gen_cmp_imm(TCG_COND_EQ, val, 0);
tcg_temp_free(val);
return;
case 0xcc00: /* tst.b #imm,@(R0,GBR) */
{
- TCGv val = tcg_temp_new(TCG_TYPE_I32);
+ TCGv val = tcg_temp_new();
tcg_gen_add_i32(val, REG(0), cpu_gbr);
tcg_gen_qemu_ld8u(val, val, ctx->memidx);
tcg_gen_andi_i32(val, val, B7_0);
case 0xce00: /* xor.b #imm,@(R0,GBR) */
{
TCGv addr, val;
- addr = tcg_temp_new(TCG_TYPE_I32);
+ addr = tcg_temp_new();
tcg_gen_add_i32(addr, REG(0), cpu_gbr);
- val = tcg_temp_new(TCG_TYPE_I32);
+ val = tcg_temp_new();
tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
tcg_gen_xori_i32(val, val, B7_0);
tcg_gen_qemu_st8(val, addr, ctx->memidx);
case 0x4083: /* stc.l Rm_BANK,@-Rn */
CHECK_PRIVILEGED
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_subi_i32(addr, REG(B11_8), 4);
tcg_gen_qemu_st32(ALTREG(B6_4), addr, ctx->memidx);
tcg_temp_free(addr);
case 0x4007: /* ldc.l @Rm+,SR */
CHECK_PRIVILEGED
{
- TCGv val = tcg_temp_new(TCG_TYPE_I32);
+ TCGv val = tcg_temp_new();
tcg_gen_qemu_ld32s(val, REG(B11_8), ctx->memidx);
tcg_gen_andi_i32(cpu_sr, val, 0x700083f3);
tcg_temp_free(val);
case 0x4003: /* stc SR,@-Rn */
CHECK_PRIVILEGED
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_subi_i32(addr, REG(B11_8), 4);
tcg_gen_qemu_st32(cpu_sr, addr, ctx->memidx);
tcg_temp_free(addr);
case stpnum: \
prechk \
{ \
- TCGv addr = tcg_temp_new(TCG_TYPE_I32); \
+ TCGv addr = tcg_temp_new(); \
tcg_gen_subi_i32(addr, REG(B11_8), 4); \
tcg_gen_qemu_st32 (cpu_##reg, addr, ctx->memidx); \
tcg_temp_free(addr); \
LDST(pr, 0x402a, 0x4026, 0x002a, 0x4022, {})
LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052, {})
case 0x406a: /* lds Rm,FPSCR */
- tcg_gen_helper_0_1(helper_ld_fpscr, REG(B11_8));
+ gen_helper_ld_fpscr(REG(B11_8));
ctx->bstate = BS_STOP;
return;
case 0x4066: /* lds.l @Rm+,FPSCR */
{
- TCGv addr = tcg_temp_new(TCG_TYPE_I32);
+ TCGv addr = tcg_temp_new();
tcg_gen_qemu_ld32s(addr, REG(B11_8), ctx->memidx);
tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
- tcg_gen_helper_0_1(helper_ld_fpscr, addr);
+ gen_helper_ld_fpscr(addr);
tcg_temp_free(addr);
ctx->bstate = BS_STOP;
}
case 0x4062: /* sts FPSCR,@-Rn */
{
TCGv addr, val;
- val = tcg_temp_new(TCG_TYPE_I32);
+ val = tcg_temp_new();
tcg_gen_andi_i32(val, cpu_fpscr, 0x003fffff);
- addr = tcg_temp_new(TCG_TYPE_I32);
+ addr = tcg_temp_new();
tcg_gen_subi_i32(addr, REG(B11_8), 4);
tcg_gen_qemu_st32(val, addr, ctx->memidx);
tcg_temp_free(addr);
return;
case 0x0093: /* ocbi @Rn */
{
- TCGv dummy = tcg_temp_new(TCG_TYPE_I32);
+ TCGv dummy = tcg_temp_new();
tcg_gen_qemu_ld32s(dummy, REG(B11_8), ctx->memidx);
tcg_temp_free(dummy);
}
return;
case 0x00a3: /* ocbp @Rn */
{
- TCGv dummy = tcg_temp_new(TCG_TYPE_I32);
+ TCGv dummy = tcg_temp_new();
tcg_gen_qemu_ld32s(dummy, REG(B11_8), ctx->memidx);
tcg_temp_free(dummy);
}
return;
case 0x00b3: /* ocbwb @Rn */
{
- TCGv dummy = tcg_temp_new(TCG_TYPE_I32);
+ TCGv dummy = tcg_temp_new();
tcg_gen_qemu_ld32s(dummy, REG(B11_8), ctx->memidx);
tcg_temp_free(dummy);
}
return;
case 0x4024: /* rotcl Rn */
{
- TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv tmp = tcg_temp_new();
tcg_gen_mov_i32(tmp, cpu_sr);
gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
return;
case 0x4025: /* rotcr Rn */
{
- TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv tmp = tcg_temp_new();
tcg_gen_mov_i32(tmp, cpu_sr);
gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
return;
case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */
{
- TCGv fp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv fp = tcg_temp_new();
tcg_gen_mov_i32(fp, cpu_fpul);
gen_store_fpr32(fp, FREG(B11_8));
tcg_temp_free(fp);
return;
case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */
{
- TCGv fp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv fp = tcg_temp_new();
gen_load_fpr32(fp, FREG(B11_8));
tcg_gen_mov_i32(cpu_fpul, fp);
tcg_temp_free(fp);
return;
case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */
if (ctx->fpscr & FPSCR_PR) {
- TCGv fp;
+ TCGv_i64 fp;
if (ctx->opcode & 0x0100)
break; /* illegal instruction */
- fp = tcg_temp_new(TCG_TYPE_I64);
- tcg_gen_helper_1_1(helper_float_DT, fp, cpu_fpul);
+ fp = tcg_temp_new_i64();
+ gen_helper_float_DT(fp, cpu_fpul);
gen_store_fpr64(fp, DREG(B11_8));
- tcg_temp_free(fp);
+ tcg_temp_free_i64(fp);
}
else {
- TCGv fp = tcg_temp_new(TCG_TYPE_I32);
- tcg_gen_helper_1_1(helper_float_FT, fp, cpu_fpul);
+ TCGv_i32 fp = tcg_temp_new_i32();
+ gen_helper_float_FT(fp, cpu_fpul);
gen_store_fpr32(fp, FREG(B11_8));
- tcg_temp_free(fp);
+ tcg_temp_free_i32(fp);
}
return;
case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
if (ctx->fpscr & FPSCR_PR) {
- TCGv fp;
+ TCGv_i64 fp;
if (ctx->opcode & 0x0100)
break; /* illegal instruction */
- fp = tcg_temp_new(TCG_TYPE_I64);
+ fp = tcg_temp_new_i64();
gen_load_fpr64(fp, DREG(B11_8));
- tcg_gen_helper_1_1(helper_ftrc_DT, cpu_fpul, fp);
- tcg_temp_free(fp);
+ gen_helper_ftrc_DT(cpu_fpul, fp);
+ tcg_temp_free_i64(fp);
}
else {
- TCGv fp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp = tcg_temp_new_i32();
gen_load_fpr32(fp, FREG(B11_8));
- tcg_gen_helper_1_1(helper_ftrc_FT, cpu_fpul, fp);
- tcg_temp_free(fp);
+ gen_helper_ftrc_FT(cpu_fpul, fp);
+ tcg_temp_free_i32(fp);
}
return;
case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */
{
- TCGv fp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp = tcg_temp_new_i32();
gen_load_fpr32(fp, FREG(B11_8));
- tcg_gen_helper_1_1(helper_fneg_T, fp, fp);
+ gen_helper_fneg_T(fp, fp);
gen_store_fpr32(fp, FREG(B11_8));
- tcg_temp_free(fp);
+ tcg_temp_free_i32(fp);
}
return;
case 0xf05d: /* fabs FRn/DRn */
if (ctx->fpscr & FPSCR_PR) {
if (ctx->opcode & 0x0100)
break; /* illegal instruction */
- TCGv fp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp = tcg_temp_new_i64();
gen_load_fpr64(fp, DREG(B11_8));
- tcg_gen_helper_1_1(helper_fabs_DT, fp, fp);
+ gen_helper_fabs_DT(fp, fp);
gen_store_fpr64(fp, DREG(B11_8));
- tcg_temp_free(fp);
+ tcg_temp_free_i64(fp);
} else {
- TCGv fp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp = tcg_temp_new_i32();
gen_load_fpr32(fp, FREG(B11_8));
- tcg_gen_helper_1_1(helper_fabs_FT, fp, fp);
+ gen_helper_fabs_FT(fp, fp);
gen_store_fpr32(fp, FREG(B11_8));
- tcg_temp_free(fp);
+ tcg_temp_free_i32(fp);
}
return;
case 0xf06d: /* fsqrt FRn */
if (ctx->fpscr & FPSCR_PR) {
if (ctx->opcode & 0x0100)
break; /* illegal instruction */
- TCGv fp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp = tcg_temp_new_i64();
gen_load_fpr64(fp, DREG(B11_8));
- tcg_gen_helper_1_1(helper_fsqrt_DT, fp, fp);
+ gen_helper_fsqrt_DT(fp, fp);
gen_store_fpr64(fp, DREG(B11_8));
- tcg_temp_free(fp);
+ tcg_temp_free_i64(fp);
} else {
- TCGv fp = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 fp = tcg_temp_new_i32();
gen_load_fpr32(fp, FREG(B11_8));
- tcg_gen_helper_1_1(helper_fsqrt_FT, fp, fp);
+ gen_helper_fsqrt_FT(fp, fp);
gen_store_fpr32(fp, FREG(B11_8));
- tcg_temp_free(fp);
+ tcg_temp_free_i32(fp);
}
return;
case 0xf07d: /* fsrra FRn */
break;
case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */
if (!(ctx->fpscr & FPSCR_PR)) {
- TCGv val = tcg_const_i32(0);
+ TCGv_i32 val = tcg_const_i32(0);
gen_load_fpr32(val, FREG(B11_8));
- tcg_temp_free(val);
+ tcg_temp_free_i32(val);
return;
}
break;
case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */
if (!(ctx->fpscr & FPSCR_PR)) {
- TCGv val = tcg_const_i32(0x3f800000);
+ TCGv_i32 val = tcg_const_i32(0x3f800000);
gen_load_fpr32(val, FREG(B11_8));
- tcg_temp_free(val);
+ tcg_temp_free_i32(val);
return;
}
break;
case 0xf0ad: /* fcnvsd FPUL,DRn */
{
- TCGv fp = tcg_temp_new(TCG_TYPE_I64);
- tcg_gen_helper_1_1(helper_fcnvsd_FT_DT, fp, cpu_fpul);
+ TCGv_i64 fp = tcg_temp_new_i64();
+ gen_helper_fcnvsd_FT_DT(fp, cpu_fpul);
gen_store_fpr64(fp, DREG(B11_8));
- tcg_temp_free(fp);
+ tcg_temp_free_i64(fp);
}
return;
case 0xf0bd: /* fcnvds DRn,FPUL */
{
- TCGv fp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 fp = tcg_temp_new_i64();
gen_load_fpr64(fp, DREG(B11_8));
- tcg_gen_helper_1_1(helper_fcnvds_DT_FT, cpu_fpul, fp);
- tcg_temp_free(fp);
+ gen_helper_fcnvds_DT_FT(cpu_fpul, fp);
+ tcg_temp_free_i64(fp);
}
return;
}
fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n",
ctx->opcode, ctx->pc);
- tcg_gen_helper_0_0(helper_raise_illegal_instruction);
+ gen_helper_raise_illegal_instruction();
ctx->bstate = BS_EXCP;
}
if (ctx.pc == env->breakpoints[i]) {
/* We have hit a breakpoint - make sure PC is up-to-date */
tcg_gen_movi_i32(cpu_pc, ctx.pc);
- tcg_gen_helper_0_0(helper_debug);
+ gen_helper_debug();
ctx.bstate = BS_EXCP;
break;
}
gen_io_end();
if (env->singlestep_enabled) {
tcg_gen_movi_i32(cpu_pc, ctx.pc);
- tcg_gen_helper_0_0(helper_debug);
+ gen_helper_debug();
} else {
switch (ctx.bstate) {
case BS_STOP:
-#ifndef DEF_HELPER
-#define DEF_HELPER(ret, name, params) ret name params;
-#endif
+#include "def-helper.h"
#ifndef TARGET_SPARC64
-DEF_HELPER(void, helper_rett, (void))
-DEF_HELPER(void, helper_wrpsr, (target_ulong new_psr))
-DEF_HELPER(target_ulong, helper_rdpsr, (void))
+DEF_HELPER_0(rett, void)
+DEF_HELPER_1(wrpsr, void, tl)
+DEF_HELPER_0(rdpsr, tl)
#else
-DEF_HELPER(void, helper_wrpstate, (target_ulong new_state))
-DEF_HELPER(void, helper_done, (void))
-DEF_HELPER(void, helper_retry, (void))
-DEF_HELPER(void, helper_flushw, (void))
-DEF_HELPER(void, helper_saved, (void))
-DEF_HELPER(void, helper_restored, (void))
-DEF_HELPER(target_ulong, helper_rdccr, (void))
-DEF_HELPER(void, helper_wrccr, (target_ulong new_ccr))
-DEF_HELPER(target_ulong, helper_rdcwp, (void))
-DEF_HELPER(void, helper_wrcwp, (target_ulong new_cwp))
-DEF_HELPER(target_ulong, helper_array8, (target_ulong pixel_addr, \
- target_ulong cubesize))
-DEF_HELPER(target_ulong, helper_alignaddr, (target_ulong addr, \
- target_ulong offset))
-DEF_HELPER(target_ulong, helper_popc, (target_ulong val))
-DEF_HELPER(void, helper_ldda_asi, (target_ulong addr, int asi, int rd))
-DEF_HELPER(void, helper_ldf_asi, (target_ulong addr, int asi, int size, int rd))
-DEF_HELPER(void, helper_stf_asi, (target_ulong addr, int asi, int size, int rd))
-DEF_HELPER(target_ulong, helper_cas_asi, (target_ulong addr, \
- target_ulong val1, \
- target_ulong val2, uint32_t asi))
-DEF_HELPER(target_ulong, helper_casx_asi, (target_ulong addr, \
- target_ulong val1, \
- target_ulong val2, uint32_t asi))
-DEF_HELPER(void, helper_set_softint, (uint64_t value))
-DEF_HELPER(void, helper_clear_softint, (uint64_t value))
-DEF_HELPER(void, helper_write_softint, (uint64_t value))
-DEF_HELPER(void, helper_tick_set_count, (void *opaque, uint64_t count))
-DEF_HELPER(uint64_t, helper_tick_get_count, (void *opaque))
-DEF_HELPER(void, helper_tick_set_limit, (void *opaque, uint64_t limit))
+DEF_HELPER_1(wrpstate, void, tl)
+DEF_HELPER_0(done, void)
+DEF_HELPER_0(retry, void)
+DEF_HELPER_0(flushw, void)
+DEF_HELPER_0(saved, void)
+DEF_HELPER_0(restored, void)
+DEF_HELPER_0(rdccr, tl)
+DEF_HELPER_1(wrccr, void, tl)
+DEF_HELPER_0(rdcwp, tl)
+DEF_HELPER_1(wrcwp, void, tl)
+DEF_HELPER_2(array8, tl, tl, tl)
+DEF_HELPER_2(alignaddr, tl, tl, tl)
+DEF_HELPER_1(popc, tl, tl)
+DEF_HELPER_3(ldda_asi, void, tl, int, int)
+DEF_HELPER_4(ldf_asi, void, tl, int, int, int)
+DEF_HELPER_4(stf_asi, void, tl, int, int, int)
+DEF_HELPER_4(cas_asi, tl, tl, tl, tl, i32)
+DEF_HELPER_4(casx_asi, tl, tl, tl, tl, i32)
+DEF_HELPER_1(set_softint, void, i64)
+DEF_HELPER_1(clear_softint, void, i64)
+DEF_HELPER_1(write_softint, void, i64)
+DEF_HELPER_2(tick_set_count, void, ptr, i64)
+DEF_HELPER_1(tick_get_count, i64, ptr)
+DEF_HELPER_2(tick_set_limit, void, ptr, i64)
#endif
-DEF_HELPER(void, helper_check_align, (target_ulong addr, uint32_t align))
-DEF_HELPER(void, helper_debug, (void))
-DEF_HELPER(void, helper_save, (void))
-DEF_HELPER(void, helper_restore, (void))
-DEF_HELPER(void, helper_flush, (target_ulong addr))
-DEF_HELPER(target_ulong, helper_udiv, (target_ulong a, target_ulong b))
-DEF_HELPER(target_ulong, helper_sdiv, (target_ulong a, target_ulong b))
-DEF_HELPER(void, helper_stdf, (target_ulong addr, int mem_idx))
-DEF_HELPER(void, helper_lddf, (target_ulong addr, int mem_idx))
-DEF_HELPER(void, helper_ldqf, (target_ulong addr, int mem_idx))
-DEF_HELPER(void, helper_stqf, (target_ulong addr, int mem_idx))
+DEF_HELPER_2(check_align, void, tl, i32)
+DEF_HELPER_0(debug, void)
+DEF_HELPER_0(save, void)
+DEF_HELPER_0(restore, void)
+DEF_HELPER_1(flush, void, tl)
+DEF_HELPER_2(udiv, tl, tl, tl)
+DEF_HELPER_2(sdiv, tl, tl, tl)
+DEF_HELPER_2(stdf, void, tl, int)
+DEF_HELPER_2(lddf, void, tl, int)
+DEF_HELPER_2(ldqf, void, tl, int)
+DEF_HELPER_2(stqf, void, tl, int)
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
-DEF_HELPER(uint64_t, helper_ld_asi, (target_ulong addr, int asi, int size, \
- int sign))
-DEF_HELPER(void, helper_st_asi, (target_ulong addr, uint64_t val, int asi, \
- int size))
+DEF_HELPER_4(ld_asi, i64, tl, int, int, int)
+DEF_HELPER_4(st_asi, void, tl, i64, int, int)
#endif
-DEF_HELPER(void, helper_ldfsr, (uint32_t new_fsr))
-DEF_HELPER(void, helper_check_ieee_exceptions, (void))
-DEF_HELPER(void, helper_clear_float_exceptions, (void))
-DEF_HELPER(float32, helper_fabss, (float32 src))
-DEF_HELPER(float32, helper_fsqrts, (float32 src))
-DEF_HELPER(void, helper_fsqrtd, (void))
-DEF_HELPER(void, helper_fcmps, (float32 src1, float32 src2))
-DEF_HELPER(void, helper_fcmpd, (void))
-DEF_HELPER(void, helper_fcmpes, (float32 src1, float32 src2))
-DEF_HELPER(void, helper_fcmped, (void))
-DEF_HELPER(void, helper_fsqrtq, (void))
-DEF_HELPER(void, helper_fcmpq, (void))
-DEF_HELPER(void, helper_fcmpeq, (void))
+DEF_HELPER_1(ldfsr, void, i32)
+DEF_HELPER_0(check_ieee_exceptions, void)
+DEF_HELPER_0(clear_float_exceptions, void)
+DEF_HELPER_1(fabss, f32, f32)
+DEF_HELPER_1(fsqrts, f32, f32)
+DEF_HELPER_0(fsqrtd, void)
+DEF_HELPER_2(fcmps, void, f32, f32)
+DEF_HELPER_0(fcmpd, void)
+DEF_HELPER_2(fcmpes, void, f32, f32)
+DEF_HELPER_0(fcmped, void)
+DEF_HELPER_0(fsqrtq, void)
+DEF_HELPER_0(fcmpq, void)
+DEF_HELPER_0(fcmpeq, void)
#ifdef TARGET_SPARC64
-DEF_HELPER(void, helper_ldxfsr, (uint64_t new_fsr))
-DEF_HELPER(void, helper_fabsd, (void))
-DEF_HELPER(void, helper_fcmps_fcc1, (float32 src1, float32 src2))
-DEF_HELPER(void, helper_fcmps_fcc2, (float32 src1, float32 src2))
-DEF_HELPER(void, helper_fcmps_fcc3, (float32 src1, float32 src2))
-DEF_HELPER(void, helper_fcmpd_fcc1, (void))
-DEF_HELPER(void, helper_fcmpd_fcc2, (void))
-DEF_HELPER(void, helper_fcmpd_fcc3, (void))
-DEF_HELPER(void, helper_fcmpes_fcc1, (float32 src1, float32 src2))
-DEF_HELPER(void, helper_fcmpes_fcc2, (float32 src1, float32 src2))
-DEF_HELPER(void, helper_fcmpes_fcc3, (float32 src1, float32 src2))
-DEF_HELPER(void, helper_fcmped_fcc1, (void))
-DEF_HELPER(void, helper_fcmped_fcc2, (void))
-DEF_HELPER(void, helper_fcmped_fcc3, (void))
-DEF_HELPER(void, helper_fabsq, (void))
-DEF_HELPER(void, helper_fcmpq_fcc1, (void))
-DEF_HELPER(void, helper_fcmpq_fcc2, (void))
-DEF_HELPER(void, helper_fcmpq_fcc3, (void))
-DEF_HELPER(void, helper_fcmpeq_fcc1, (void))
-DEF_HELPER(void, helper_fcmpeq_fcc2, (void))
-DEF_HELPER(void, helper_fcmpeq_fcc3, (void))
+DEF_HELPER_1(ldxfsr, void, i64)
+DEF_HELPER_0(fabsd, void)
+DEF_HELPER_2(fcmps_fcc1, void, f32, f32)
+DEF_HELPER_2(fcmps_fcc2, void, f32, f32)
+DEF_HELPER_2(fcmps_fcc3, void, f32, f32)
+DEF_HELPER_0(fcmpd_fcc1, void)
+DEF_HELPER_0(fcmpd_fcc2, void)
+DEF_HELPER_0(fcmpd_fcc3, void)
+DEF_HELPER_2(fcmpes_fcc1, void, f32, f32)
+DEF_HELPER_2(fcmpes_fcc2, void, f32, f32)
+DEF_HELPER_2(fcmpes_fcc3, void, f32, f32)
+DEF_HELPER_0(fcmped_fcc1, void)
+DEF_HELPER_0(fcmped_fcc2, void)
+DEF_HELPER_0(fcmped_fcc3, void)
+DEF_HELPER_0(fabsq, void)
+DEF_HELPER_0(fcmpq_fcc1, void)
+DEF_HELPER_0(fcmpq_fcc2, void)
+DEF_HELPER_0(fcmpq_fcc3, void)
+DEF_HELPER_0(fcmpeq_fcc1, void)
+DEF_HELPER_0(fcmpeq_fcc2, void)
+DEF_HELPER_0(fcmpeq_fcc3, void)
#endif
-DEF_HELPER(void, raise_exception, (int tt))
-#define F_HELPER_0_0(name) DEF_HELPER(void, helper_f ## name, (void))
+DEF_HELPER_1(raise_exception, void, int)
+#define F_HELPER_0_0(name) DEF_HELPER_0(f ## name, void)
#define F_HELPER_DQ_0_0(name) \
F_HELPER_0_0(name ## d); \
F_HELPER_0_0(name ## q)
F_HELPER_DQ_0_0(mul);
F_HELPER_DQ_0_0(div);
-DEF_HELPER(float32, helper_fadds, (float32 src1, float32 src2))
-DEF_HELPER(float32, helper_fsubs, (float32 src1, float32 src2))
-DEF_HELPER(float32, helper_fmuls, (float32 src1, float32 src2))
-DEF_HELPER(float32, helper_fdivs, (float32 src1, float32 src2))
+DEF_HELPER_2(fadds, f32, f32, f32)
+DEF_HELPER_2(fsubs, f32, f32, f32)
+DEF_HELPER_2(fmuls, f32, f32, f32)
+DEF_HELPER_2(fdivs, f32, f32, f32)
-DEF_HELPER(void, helper_fsmuld, (float32 src1, float32 src2))
+DEF_HELPER_2(fsmuld, void, f32, f32)
F_HELPER_0_0(dmulq);
-DEF_HELPER(float32, helper_fnegs, (float32 src))
-DEF_HELPER(void, helper_fitod, (int32_t src))
-DEF_HELPER(void, helper_fitoq, (int32_t src))
+DEF_HELPER_1(fnegs, f32, f32)
+DEF_HELPER_1(fitod, void, s32)
+DEF_HELPER_1(fitoq, void, s32)
-DEF_HELPER(float32, helper_fitos, (int32_t src))
+DEF_HELPER_1(fitos, f32, s32)
#ifdef TARGET_SPARC64
-DEF_HELPER(void, helper_fnegd, (void))
-DEF_HELPER(void, helper_fnegq, (void))
-DEF_HELPER(uint32_t, helper_fxtos, (void))
+DEF_HELPER_0(fnegd, void)
+DEF_HELPER_0(fnegq, void)
+DEF_HELPER_0(fxtos, i32)
F_HELPER_DQ_0_0(xto);
#endif
-DEF_HELPER(float32, helper_fdtos, (void))
-DEF_HELPER(void, helper_fstod, (float32 src))
-DEF_HELPER(float32, helper_fqtos, (void))
-DEF_HELPER(void, helper_fstoq, (float32 src))
+DEF_HELPER_0(fdtos, f32)
+DEF_HELPER_1(fstod, void, f32)
+DEF_HELPER_0(fqtos, f32)
+DEF_HELPER_1(fstoq, void, f32)
F_HELPER_0_0(qtod);
F_HELPER_0_0(dtoq);
-DEF_HELPER(int32_t, helper_fstoi, (float32 src))
-DEF_HELPER(int32_t, helper_fdtoi, (void))
-DEF_HELPER(int32_t, helper_fqtoi, (void))
+DEF_HELPER_1(fstoi, s32, f32)
+DEF_HELPER_0(fdtoi, s32)
+DEF_HELPER_0(fqtoi, s32)
#ifdef TARGET_SPARC64
-DEF_HELPER(void, helper_fstox, (uint32_t src))
+DEF_HELPER_1(fstox, void, i32)
F_HELPER_0_0(dtox);
F_HELPER_0_0(qtox);
F_HELPER_0_0(aligndata);
F_HELPER_0_0(expand);
#define VIS_HELPER(name) \
F_HELPER_0_0(name##16); \
- DEF_HELPER(uint32_t, helper_f ## name ## 16s, (uint32_t src1, uint32_t src2))\
+ DEF_HELPER_2(f ## name ## 16s, i32, i32, i32) \
F_HELPER_0_0(name##32); \
- DEF_HELPER(uint32_t, helper_f ## name ## 32s, (uint32_t src1, uint32_t src2))
+ DEF_HELPER_2(f ## name ## 32s, i32, i32, i32)
VIS_HELPER(padd);
VIS_HELPER(psub);
#undef F_HELPER_DQ_0_0
#undef VIS_HELPER
#undef VIS_CMPHELPER
+
+#include "def-helper.h"
cpu_loop_exit();
}
+void HELPER(raise_exception)(int tt)
+{
+ raise_exception(tt);
+}
+
static inline void set_cwp(int new_cwp)
{
cpu_set_cwp(env, new_cwp);
#include "helper.h"
#include "tcg-op.h"
+#define GEN_HELPER 1
+#include "helper.h"
+
#define DEBUG_DISAS
#define DYNAMIC_PC 1 /* dynamic pc value */
according to jump_pc[T2] */
/* global register indexes */
-static TCGv cpu_env, cpu_regwptr;
+static TCGv_ptr cpu_env, cpu_regwptr;
static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
-static TCGv cpu_psr, cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
+static TCGv_i32 cpu_psr;
+static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
static TCGv cpu_y;
#ifndef CONFIG_USER_ONLY
static TCGv cpu_tbr;
#endif
static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
#ifdef TARGET_SPARC64
-static TCGv cpu_xcc, cpu_asi, cpu_fprs, cpu_gsr;
+static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
+static TCGv cpu_gsr;
static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
-static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver, cpu_softint;
+static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
+static TCGv_i32 cpu_softint;
#else
static TCGv cpu_wim;
#endif
/* local register indexes (only used inside old micro ops) */
-static TCGv cpu_tmp0, cpu_tmp32, cpu_tmp64;
+static TCGv cpu_tmp0;
+static TCGv_i32 cpu_tmp32;
+static TCGv_i64 cpu_tmp64;
/* Floating point registers */
-static TCGv cpu_fpr[TARGET_FPREGS];
+static TCGv_i32 cpu_fpr[TARGET_FPREGS];
#include "gen-icount.h"
}
// XXX suboptimal
-static inline void gen_mov_reg_N(TCGv reg, TCGv src)
+static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
{
tcg_gen_extu_i32_tl(reg, src);
tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
tcg_gen_andi_tl(reg, reg, 0x1);
}
-static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
+static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
{
tcg_gen_extu_i32_tl(reg, src);
tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
tcg_gen_andi_tl(reg, reg, 0x1);
}
-static inline void gen_mov_reg_V(TCGv reg, TCGv src)
+static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
{
tcg_gen_extu_i32_tl(reg, src);
tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
tcg_gen_andi_tl(reg, reg, 0x1);
}
-static inline void gen_mov_reg_C(TCGv reg, TCGv src)
+static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
{
tcg_gen_extu_i32_tl(reg, src);
tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
l1 = gen_new_label();
l2 = gen_new_label();
- r_temp = tcg_temp_new(TCG_TYPE_TL);
+ r_temp = tcg_temp_new();
tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
int l1;
l1 = gen_new_label();
- r_temp1 = tcg_temp_new(TCG_TYPE_TL);
- r_temp2 = tcg_temp_new(TCG_TYPE_TL);
+ r_temp1 = tcg_temp_new();
+ r_temp2 = tcg_temp_new();
tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
{
TCGv r_temp;
- r_temp = tcg_temp_new(TCG_TYPE_TL);
+ r_temp = tcg_temp_new();
tcg_gen_xor_tl(r_temp, src1, src2);
tcg_gen_not_tl(r_temp, r_temp);
tcg_gen_xor_tl(cpu_tmp0, src1, dst);
{
TCGv r_temp;
- r_temp = tcg_temp_new(TCG_TYPE_TL);
+ r_temp = tcg_temp_new();
tcg_gen_xor_tl(r_temp, src1, src2);
tcg_gen_not_tl(r_temp, r_temp);
tcg_gen_xor_tl(cpu_tmp0, src1, dst);
static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
{
- TCGv r_temp, r_const;
+ TCGv r_temp;
+ TCGv_i32 r_const;
int l1;
l1 = gen_new_label();
- r_temp = tcg_temp_new(TCG_TYPE_TL);
+ r_temp = tcg_temp_new();
tcg_gen_xor_tl(r_temp, src1, src2);
tcg_gen_not_tl(r_temp, r_temp);
tcg_gen_xor_tl(cpu_tmp0, src1, dst);
tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
r_const = tcg_const_i32(TT_TOVF);
- tcg_gen_helper_0_1(raise_exception, r_const);
- tcg_temp_free(r_const);
+ gen_helper_raise_exception(r_const);
+ tcg_temp_free_i32(r_const);
gen_set_label(l1);
tcg_temp_free(r_temp);
}
static inline void gen_tag_tv(TCGv src1, TCGv src2)
{
int l1;
- TCGv r_const;
+ TCGv_i32 r_const;
l1 = gen_new_label();
tcg_gen_or_tl(cpu_tmp0, src1, src2);
tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
r_const = tcg_const_i32(TT_TOVF);
- tcg_gen_helper_0_1(raise_exception, r_const);
- tcg_temp_free(r_const);
+ gen_helper_raise_exception(r_const);
+ tcg_temp_free_i32(r_const);
gen_set_label(l1);
}
int l1;
l1 = gen_new_label();
- r_temp1 = tcg_temp_new(TCG_TYPE_TL);
- r_temp2 = tcg_temp_new(TCG_TYPE_TL);
+ r_temp1 = tcg_temp_new();
+ r_temp2 = tcg_temp_new();
tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
{
TCGv r_temp;
- r_temp = tcg_temp_new(TCG_TYPE_TL);
+ r_temp = tcg_temp_new();
tcg_gen_xor_tl(r_temp, src1, src2);
tcg_gen_xor_tl(cpu_tmp0, src1, dst);
tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
{
TCGv r_temp;
- r_temp = tcg_temp_new(TCG_TYPE_TL);
+ r_temp = tcg_temp_new();
tcg_gen_xor_tl(r_temp, src1, src2);
tcg_gen_xor_tl(cpu_tmp0, src1, dst);
tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
{
- TCGv r_temp, r_const;
+ TCGv r_temp;
+ TCGv_i32 r_const;
int l1;
l1 = gen_new_label();
- r_temp = tcg_temp_new(TCG_TYPE_TL);
+ r_temp = tcg_temp_new();
tcg_gen_xor_tl(r_temp, src1, src2);
tcg_gen_xor_tl(cpu_tmp0, src1, dst);
tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
r_const = tcg_const_i32(TT_TOVF);
- tcg_gen_helper_0_1(raise_exception, r_const);
- tcg_temp_free(r_const);
+ gen_helper_raise_exception(r_const);
+ tcg_temp_free_i32(r_const);
gen_set_label(l1);
tcg_temp_free(r_temp);
}
int l1;
l1 = gen_new_label();
- r_temp = tcg_temp_new(TCG_TYPE_TL);
+ r_temp = tcg_temp_new();
/* old op:
if (!(env->y & 1))
static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
{
- TCGv r_temp, r_temp2;
+ TCGv_i64 r_temp, r_temp2;
- r_temp = tcg_temp_new(TCG_TYPE_I64);
- r_temp2 = tcg_temp_new(TCG_TYPE_I64);
+ r_temp = tcg_temp_new_i64();
+ r_temp2 = tcg_temp_new_i64();
tcg_gen_extu_tl_i64(r_temp, src2);
tcg_gen_extu_tl_i64(r_temp2, src1);
tcg_gen_shri_i64(r_temp, r_temp2, 32);
tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
- tcg_temp_free(r_temp);
+ tcg_temp_free_i64(r_temp);
tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
#ifdef TARGET_SPARC64
tcg_gen_mov_i64(dst, r_temp2);
#else
tcg_gen_trunc_i64_tl(dst, r_temp2);
#endif
- tcg_temp_free(r_temp2);
+ tcg_temp_free_i64(r_temp2);
}
static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
{
- TCGv r_temp, r_temp2;
+ TCGv_i64 r_temp, r_temp2;
- r_temp = tcg_temp_new(TCG_TYPE_I64);
- r_temp2 = tcg_temp_new(TCG_TYPE_I64);
+ r_temp = tcg_temp_new_i64();
+ r_temp2 = tcg_temp_new_i64();
tcg_gen_ext_tl_i64(r_temp, src2);
tcg_gen_ext_tl_i64(r_temp2, src1);
tcg_gen_shri_i64(r_temp, r_temp2, 32);
tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
- tcg_temp_free(r_temp);
+ tcg_temp_free_i64(r_temp);
tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
#ifdef TARGET_SPARC64
tcg_gen_mov_i64(dst, r_temp2);
#else
tcg_gen_trunc_i64_tl(dst, r_temp2);
#endif
- tcg_temp_free(r_temp2);
+ tcg_temp_free_i64(r_temp2);
}
#ifdef TARGET_SPARC64
static inline void gen_trap_ifdivzero_tl(TCGv divisor)
{
- TCGv r_const;
+ TCGv_i32 r_const;
int l1;
l1 = gen_new_label();
tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
r_const = tcg_const_i32(TT_DIV_ZERO);
- tcg_gen_helper_0_1(raise_exception, r_const);
- tcg_temp_free(r_const);
+ gen_helper_raise_exception(r_const);
+ tcg_temp_free_i32(r_const);
gen_set_label(l1);
}
}
// Z
-static inline void gen_op_eval_be(TCGv dst, TCGv src)
+static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
{
gen_mov_reg_Z(dst, src);
}
// Z | (N ^ V)
-static inline void gen_op_eval_ble(TCGv dst, TCGv src)
+static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
{
gen_mov_reg_N(cpu_tmp0, src);
gen_mov_reg_V(dst, src);
}
// N ^ V
-static inline void gen_op_eval_bl(TCGv dst, TCGv src)
+static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
{
gen_mov_reg_V(cpu_tmp0, src);
gen_mov_reg_N(dst, src);
}
// C | Z
-static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
+static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
{
gen_mov_reg_Z(cpu_tmp0, src);
gen_mov_reg_C(dst, src);
}
// C
-static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
+static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
{
gen_mov_reg_C(dst, src);
}
// V
-static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
+static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
{
gen_mov_reg_V(dst, src);
}
}
// N
-static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
+static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
{
gen_mov_reg_N(dst, src);
}
// !Z
-static inline void gen_op_eval_bne(TCGv dst, TCGv src)
+static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
{
gen_mov_reg_Z(dst, src);
tcg_gen_xori_tl(dst, dst, 0x1);
}
// !(Z | (N ^ V))
-static inline void gen_op_eval_bg(TCGv dst, TCGv src)
+static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
{
gen_mov_reg_N(cpu_tmp0, src);
gen_mov_reg_V(dst, src);
}
// !(N ^ V)
-static inline void gen_op_eval_bge(TCGv dst, TCGv src)
+static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
{
gen_mov_reg_V(cpu_tmp0, src);
gen_mov_reg_N(dst, src);
}
// !(C | Z)
-static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
+static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
{
gen_mov_reg_Z(cpu_tmp0, src);
gen_mov_reg_C(dst, src);
}
// !C
-static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
+static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
{
gen_mov_reg_C(dst, src);
tcg_gen_xori_tl(dst, dst, 0x1);
}
// !N
-static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
+static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
{
gen_mov_reg_N(dst, src);
tcg_gen_xori_tl(dst, dst, 0x1);
}
// !V
-static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
+static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
{
gen_mov_reg_V(dst, src);
tcg_gen_xori_tl(dst, dst, 0x1);
static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
{
- TCGv r_src;
+ TCGv_i32 r_src;
#ifdef TARGET_SPARC64
if (cc)
}
}
-static GenOpFunc * const gen_fcmpd[4] = {
- helper_fcmpd,
- helper_fcmpd_fcc1,
- helper_fcmpd_fcc2,
- helper_fcmpd_fcc3,
-};
-
-static GenOpFunc * const gen_fcmpq[4] = {
- helper_fcmpq,
- helper_fcmpq_fcc1,
- helper_fcmpq_fcc2,
- helper_fcmpq_fcc3,
-};
-
-static GenOpFunc * const gen_fcmped[4] = {
- helper_fcmped,
- helper_fcmped_fcc1,
- helper_fcmped_fcc2,
- helper_fcmped_fcc3,
-};
-
-static GenOpFunc * const gen_fcmpeq[4] = {
- helper_fcmpeq,
- helper_fcmpeq_fcc1,
- helper_fcmpeq_fcc2,
- helper_fcmpeq_fcc3,
-};
-
-static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
+static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
{
switch (fccno) {
case 0:
- tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
+ gen_helper_fcmps(r_rs1, r_rs2);
break;
case 1:
- tcg_gen_helper_0_2(helper_fcmps_fcc1, r_rs1, r_rs2);
+ gen_helper_fcmps_fcc1(r_rs1, r_rs2);
break;
case 2:
- tcg_gen_helper_0_2(helper_fcmps_fcc2, r_rs1, r_rs2);
+ gen_helper_fcmps_fcc2(r_rs1, r_rs2);
break;
case 3:
- tcg_gen_helper_0_2(helper_fcmps_fcc3, r_rs1, r_rs2);
+ gen_helper_fcmps_fcc3(r_rs1, r_rs2);
break;
}
}
static inline void gen_op_fcmpd(int fccno)
{
- tcg_gen_helper_0_0(gen_fcmpd[fccno]);
+ switch (fccno) {
+ case 0:
+ gen_helper_fcmpd();
+ break;
+ case 1:
+ gen_helper_fcmpd_fcc1();
+ break;
+ case 2:
+ gen_helper_fcmpd_fcc2();
+ break;
+ case 3:
+ gen_helper_fcmpd_fcc3();
+ break;
+ }
}
static inline void gen_op_fcmpq(int fccno)
{
- tcg_gen_helper_0_0(gen_fcmpq[fccno]);
+ switch (fccno) {
+ case 0:
+ gen_helper_fcmpq();
+ break;
+ case 1:
+ gen_helper_fcmpq_fcc1();
+ break;
+ case 2:
+ gen_helper_fcmpq_fcc2();
+ break;
+ case 3:
+ gen_helper_fcmpq_fcc3();
+ break;
+ }
}
-static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
+static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
{
switch (fccno) {
case 0:
- tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
+ gen_helper_fcmpes(r_rs1, r_rs2);
break;
case 1:
- tcg_gen_helper_0_2(helper_fcmpes_fcc1, r_rs1, r_rs2);
+ gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
break;
case 2:
- tcg_gen_helper_0_2(helper_fcmpes_fcc2, r_rs1, r_rs2);
+ gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
break;
case 3:
- tcg_gen_helper_0_2(helper_fcmpes_fcc3, r_rs1, r_rs2);
+ gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
break;
}
}
static inline void gen_op_fcmped(int fccno)
{
- tcg_gen_helper_0_0(gen_fcmped[fccno]);
+ switch (fccno) {
+ case 0:
+ gen_helper_fcmped();
+ break;
+ case 1:
+ gen_helper_fcmped_fcc1();
+ break;
+ case 2:
+ gen_helper_fcmped_fcc2();
+ break;
+ case 3:
+ gen_helper_fcmped_fcc3();
+ break;
+ }
}
static inline void gen_op_fcmpeq(int fccno)
{
- tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
+ switch (fccno) {
+ case 0:
+ gen_helper_fcmpeq();
+ break;
+ case 1:
+ gen_helper_fcmpeq_fcc1();
+ break;
+ case 2:
+ gen_helper_fcmpeq_fcc2();
+ break;
+ case 3:
+ gen_helper_fcmpeq_fcc3();
+ break;
+ }
}
#else
static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
{
- tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
+ gen_helper_fcmps(r_rs1, r_rs2);
}
static inline void gen_op_fcmpd(int fccno)
{
- tcg_gen_helper_0_0(helper_fcmpd);
+ gen_helper_fcmpd();
}
static inline void gen_op_fcmpq(int fccno)
{
- tcg_gen_helper_0_0(helper_fcmpq);
+ gen_helper_fcmpq();
}
static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
{
- tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
+ gen_helper_fcmpes(r_rs1, r_rs2);
}
static inline void gen_op_fcmped(int fccno)
{
- tcg_gen_helper_0_0(helper_fcmped);
+ gen_helper_fcmped();
}
static inline void gen_op_fcmpeq(int fccno)
{
- tcg_gen_helper_0_0(helper_fcmpeq);
+ gen_helper_fcmpeq();
}
#endif
static inline void gen_op_fpexception_im(int fsr_flags)
{
- TCGv r_const;
+ TCGv_i32 r_const;
tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
r_const = tcg_const_i32(TT_FP_EXCP);
- tcg_gen_helper_0_1(raise_exception, r_const);
- tcg_temp_free(r_const);
+ gen_helper_raise_exception(r_const);
+ tcg_temp_free_i32(r_const);
}
static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
{
#if !defined(CONFIG_USER_ONLY)
if (!dc->fpu_enabled) {
- TCGv r_const;
+ TCGv_i32 r_const;
save_state(dc, r_cond);
r_const = tcg_const_i32(TT_NFPU_INSN);
- tcg_gen_helper_0_1(raise_exception, r_const);
- tcg_temp_free(r_const);
+ gen_helper_raise_exception(r_const);
+ tcg_temp_free_i32(r_const);
dc->is_br = 1;
return 1;
}
static inline void gen_clear_float_exceptions(void)
{
- tcg_gen_helper_0_0(helper_clear_float_exceptions);
+ gen_helper_clear_float_exceptions();
}
/* asi moves */
#ifdef TARGET_SPARC64
-static inline TCGv gen_get_asi(int insn, TCGv r_addr)
+static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
{
int asi;
- TCGv r_asi;
+ TCGv_i32 r_asi;
if (IS_IMM) {
- r_asi = tcg_temp_new(TCG_TYPE_I32);
+ r_asi = tcg_temp_new_i32();
tcg_gen_mov_i32(r_asi, cpu_asi);
} else {
asi = GET_FIELD(insn, 19, 26);
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
int sign)
{
- TCGv r_asi, r_size, r_sign;
+ TCGv_i32 r_asi, r_size, r_sign;
r_asi = gen_get_asi(insn, addr);
r_size = tcg_const_i32(size);
r_sign = tcg_const_i32(sign);
- tcg_gen_helper_1_4(helper_ld_asi, dst, addr, r_asi, r_size, r_sign);
- tcg_temp_free(r_sign);
- tcg_temp_free(r_size);
- tcg_temp_free(r_asi);
+ gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
+ tcg_temp_free_i32(r_sign);
+ tcg_temp_free_i32(r_size);
+ tcg_temp_free_i32(r_asi);
}
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
{
- TCGv r_asi, r_size;
+ TCGv_i32 r_asi, r_size;
r_asi = gen_get_asi(insn, addr);
r_size = tcg_const_i32(size);
- tcg_gen_helper_0_4(helper_st_asi, addr, src, r_asi, r_size);
- tcg_temp_free(r_size);
- tcg_temp_free(r_asi);
+ gen_helper_st_asi(addr, src, r_asi, r_size);
+ tcg_temp_free_i32(r_size);
+ tcg_temp_free_i32(r_asi);
}
static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
{
- TCGv r_asi, r_size, r_rd;
+ TCGv_i32 r_asi, r_size, r_rd;
r_asi = gen_get_asi(insn, addr);
r_size = tcg_const_i32(size);
r_rd = tcg_const_i32(rd);
- tcg_gen_helper_0_4(helper_ldf_asi, addr, r_asi, r_size, r_rd);
- tcg_temp_free(r_rd);
- tcg_temp_free(r_size);
- tcg_temp_free(r_asi);
+ gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
+ tcg_temp_free_i32(r_rd);
+ tcg_temp_free_i32(r_size);
+ tcg_temp_free_i32(r_asi);
}
static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
{
- TCGv r_asi, r_size, r_rd;
+ TCGv_i32 r_asi, r_size, r_rd;
r_asi = gen_get_asi(insn, addr);
r_size = tcg_const_i32(size);
r_rd = tcg_const_i32(rd);
- tcg_gen_helper_0_4(helper_stf_asi, addr, r_asi, r_size, r_rd);
- tcg_temp_free(r_rd);
- tcg_temp_free(r_size);
- tcg_temp_free(r_asi);
+ gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
+ tcg_temp_free_i32(r_rd);
+ tcg_temp_free_i32(r_size);
+ tcg_temp_free_i32(r_asi);
}
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
{
- TCGv r_asi, r_size, r_sign;
+ TCGv_i32 r_asi, r_size, r_sign;
r_asi = gen_get_asi(insn, addr);
r_size = tcg_const_i32(4);
r_sign = tcg_const_i32(0);
- tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
- tcg_temp_free(r_sign);
- tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
- tcg_temp_free(r_size);
- tcg_temp_free(r_asi);
+ gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
+ tcg_temp_free_i32(r_sign);
+ gen_helper_st_asi(addr, dst, r_asi, r_size);
+ tcg_temp_free_i32(r_size);
+ tcg_temp_free_i32(r_asi);
tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
}
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
{
- TCGv r_asi, r_rd;
+ TCGv_i32 r_asi, r_rd;
r_asi = gen_get_asi(insn, addr);
r_rd = tcg_const_i32(rd);
- tcg_gen_helper_0_3(helper_ldda_asi, addr, r_asi, r_rd);
- tcg_temp_free(r_rd);
- tcg_temp_free(r_asi);
+ gen_helper_ldda_asi(addr, r_asi, r_rd);
+ tcg_temp_free_i32(r_rd);
+ tcg_temp_free_i32(r_asi);
}
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
{
- TCGv r_asi, r_size;
+ TCGv_i32 r_asi, r_size;
gen_movl_reg_TN(rd + 1, cpu_tmp0);
tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
r_asi = gen_get_asi(insn, addr);
r_size = tcg_const_i32(8);
- tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
- tcg_temp_free(r_size);
- tcg_temp_free(r_asi);
+ gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
+ tcg_temp_free_i32(r_size);
+ tcg_temp_free_i32(r_asi);
}
static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
int rd)
{
- TCGv r_val1, r_asi;
+ TCGv r_val1;
+ TCGv_i32 r_asi;
- r_val1 = tcg_temp_new(TCG_TYPE_TL);
+ r_val1 = tcg_temp_new();
gen_movl_reg_TN(rd, r_val1);
r_asi = gen_get_asi(insn, addr);
- tcg_gen_helper_1_4(helper_cas_asi, dst, addr, r_val1, val2, r_asi);
- tcg_temp_free(r_asi);
+ gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
+ tcg_temp_free_i32(r_asi);
tcg_temp_free(r_val1);
}
static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
int rd)
{
- TCGv r_asi;
+ TCGv_i32 r_asi;
gen_movl_reg_TN(rd, cpu_tmp64);
r_asi = gen_get_asi(insn, addr);
- tcg_gen_helper_1_4(helper_casx_asi, dst, addr, cpu_tmp64, val2, r_asi);
- tcg_temp_free(r_asi);
+ gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
+ tcg_temp_free_i32(r_asi);
}
#elif !defined(CONFIG_USER_ONLY)
static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
int sign)
{
- TCGv r_asi, r_size, r_sign;
+ TCGv_i32 r_asi, r_size, r_sign;
r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
r_size = tcg_const_i32(size);
r_sign = tcg_const_i32(sign);
- tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
+ gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
tcg_temp_free(r_sign);
tcg_temp_free(r_size);
tcg_temp_free(r_asi);
static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
{
- TCGv r_asi, r_size;
+ TCGv_i32 r_asi, r_size;
tcg_gen_extu_tl_i64(cpu_tmp64, src);
r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
r_size = tcg_const_i32(size);
- tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
+ gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
tcg_temp_free(r_size);
tcg_temp_free(r_asi);
}
static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
{
- TCGv r_asi, r_size, r_sign;
+ TCGv_i32 r_asi, r_size, r_sign;
+ TCGv_i64 r_val;
r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
r_size = tcg_const_i32(4);
r_sign = tcg_const_i32(0);
- tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
+ gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
tcg_temp_free(r_sign);
- tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
+ r_val = tcg_temp_new_i64();
+ tcg_gen_extu_tl_i64(r_val, dst);
+ gen_helper_st_asi(addr, r_val, r_asi, r_size);
+ tcg_temp_free_i64(r_val);
tcg_temp_free(r_size);
tcg_temp_free(r_asi);
tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
{
- TCGv r_asi, r_size, r_sign;
+ TCGv_i32 r_asi, r_size, r_sign;
r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
r_size = tcg_const_i32(8);
r_sign = tcg_const_i32(0);
- tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
+ gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
tcg_temp_free(r_sign);
tcg_temp_free(r_size);
tcg_temp_free(r_asi);
static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
{
- TCGv r_asi, r_size;
+ TCGv_i32 r_asi, r_size;
gen_movl_reg_TN(rd + 1, cpu_tmp0);
tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
r_size = tcg_const_i32(8);
- tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
+ gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
tcg_temp_free(r_size);
tcg_temp_free(r_asi);
}
#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
{
- TCGv r_val, r_asi, r_size;
+ TCGv_i64 r_val;
+ TCGv_i32 r_asi, r_size;
gen_ld_asi(dst, addr, insn, 1, 0);
r_val = tcg_const_i64(0xffULL);
r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
r_size = tcg_const_i32(1);
- tcg_gen_helper_0_4(helper_st_asi, addr, r_val, r_asi, r_size);
- tcg_temp_free(r_size);
- tcg_temp_free(r_asi);
- tcg_temp_free(r_val);
+ gen_helper_st_asi(addr, r_val, r_asi, r_size);
+ tcg_temp_free_i32(r_size);
+ tcg_temp_free_i32(r_asi);
+ tcg_temp_free_i64(r_val);
}
#endif
rd = GET_FIELD(insn, 2, 6);
- cpu_src1 = tcg_temp_new(TCG_TYPE_TL); // const
- cpu_src2 = tcg_temp_new(TCG_TYPE_TL); // const
+ cpu_src1 = tcg_temp_new(); // const
+ cpu_src2 = tcg_temp_new(); // const
switch (opc) {
case 0: /* branches/sethi */
else
tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
- tcg_gen_helper_0_1(raise_exception, cpu_dst);
+ tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
+ gen_helper_raise_exception(cpu_tmp32);
} else if (cond != 0) {
- TCGv r_cond = tcg_temp_new(TCG_TYPE_TL);
+ TCGv r_cond = tcg_temp_new();
int l1;
#ifdef TARGET_SPARC64
/* V9 icc/xcc */
else
tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
- tcg_gen_helper_0_1(raise_exception, cpu_dst);
+ tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
+ gen_helper_raise_exception(cpu_tmp32);
gen_set_label(l1);
tcg_temp_free(r_cond);
break;
#ifdef TARGET_SPARC64
case 0x2: /* V9 rdccr */
- tcg_gen_helper_1_0(helper_rdccr, cpu_dst);
+ gen_helper_rdccr(cpu_dst);
gen_movl_TN_reg(rd, cpu_dst);
break;
case 0x3: /* V9 rdasi */
break;
case 0x4: /* V9 rdtick */
{
- TCGv r_tickptr;
+ TCGv_ptr r_tickptr;
- r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
+ r_tickptr = tcg_temp_new_ptr();
tcg_gen_ld_ptr(r_tickptr, cpu_env,
offsetof(CPUState, tick));
- tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
- r_tickptr);
- tcg_temp_free(r_tickptr);
+ gen_helper_tick_get_count(cpu_dst, r_tickptr);
+ tcg_temp_free_ptr(r_tickptr);
gen_movl_TN_reg(rd, cpu_dst);
}
break;
break;
case 0x18: /* System tick */
{
- TCGv r_tickptr;
+ TCGv_ptr r_tickptr;
- r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
+ r_tickptr = tcg_temp_new_ptr();
tcg_gen_ld_ptr(r_tickptr, cpu_env,
offsetof(CPUState, stick));
- tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
- r_tickptr);
- tcg_temp_free(r_tickptr);
+ gen_helper_tick_get_count(cpu_dst, r_tickptr);
+ tcg_temp_free_ptr(r_tickptr);
gen_movl_TN_reg(rd, cpu_dst);
}
break;
#ifndef TARGET_SPARC64
if (!supervisor(dc))
goto priv_insn;
- tcg_gen_helper_1_0(helper_rdpsr, cpu_dst);
+ gen_helper_rdpsr(cpu_dst);
#else
CHECK_IU_FEATURE(dc, HYPV);
if (!hypervisor(dc))
switch (rs1) {
case 0: // tpc
{
- TCGv r_tsptr;
+ TCGv_ptr r_tsptr;
- r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
+ r_tsptr = tcg_temp_new_ptr();
tcg_gen_ld_ptr(r_tsptr, cpu_env,
offsetof(CPUState, tsptr));
- tcg_gen_ld_tl(cpu_tmp32, r_tsptr,
+ tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
offsetof(trap_state, tpc));
- tcg_temp_free(r_tsptr);
- tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
+ tcg_temp_free_ptr(r_tsptr);
}
break;
case 1: // tnpc
{
- TCGv r_tsptr;
+ TCGv_ptr r_tsptr;
- r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
+ r_tsptr = tcg_temp_new_ptr();
tcg_gen_ld_ptr(r_tsptr, cpu_env,
offsetof(CPUState, tsptr));
tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
offsetof(trap_state, tnpc));
- tcg_temp_free(r_tsptr);
+ tcg_temp_free_ptr(r_tsptr);
}
break;
case 2: // tstate
{
- TCGv r_tsptr;
+ TCGv_ptr r_tsptr;
- r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
+ r_tsptr = tcg_temp_new_ptr();
tcg_gen_ld_ptr(r_tsptr, cpu_env,
offsetof(CPUState, tsptr));
tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
offsetof(trap_state, tstate));
- tcg_temp_free(r_tsptr);
+ tcg_temp_free_ptr(r_tsptr);
}
break;
case 3: // tt
{
- TCGv r_tsptr;
+ TCGv_ptr r_tsptr;
- r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
+ r_tsptr = tcg_temp_new_ptr();
tcg_gen_ld_ptr(r_tsptr, cpu_env,
offsetof(CPUState, tsptr));
- tcg_gen_ld_i32(cpu_tmp0, r_tsptr,
+ tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
offsetof(trap_state, tt));
- tcg_temp_free(r_tsptr);
+ tcg_temp_free_ptr(r_tsptr);
+ tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
}
break;
case 4: // tick
{
- TCGv r_tickptr;
+ TCGv_ptr r_tickptr;
- r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
+ r_tickptr = tcg_temp_new_ptr();
tcg_gen_ld_ptr(r_tickptr, cpu_env,
offsetof(CPUState, tick));
- tcg_gen_helper_1_1(helper_tick_get_count, cpu_tmp0,
- r_tickptr);
+ gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
gen_movl_TN_reg(rd, cpu_tmp0);
- tcg_temp_free(r_tickptr);
+ tcg_temp_free_ptr(r_tickptr);
}
break;
case 5: // tba
tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
break;
case 9: // cwp
- tcg_gen_helper_1_0(helper_rdcwp, cpu_tmp0);
+ gen_helper_rdcwp(cpu_tmp0);
break;
case 10: // cansave
tcg_gen_ld_i32(cpu_tmp32, cpu_env,
} else if (xop == 0x2b) { /* rdtbr / V9 flushw */
#ifdef TARGET_SPARC64
save_state(dc, cpu_cond);
- tcg_gen_helper_0_0(helper_flushw);
+ gen_helper_flushw();
#else
if (!supervisor(dc))
goto priv_insn;
tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
break;
case 0x5: /* fnegs */
- tcg_gen_helper_1_1(helper_fnegs, cpu_fpr[rd],
- cpu_fpr[rs2]);
+ gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
break;
case 0x9: /* fabss */
- tcg_gen_helper_1_1(helper_fabss, cpu_fpr[rd],
- cpu_fpr[rs2]);
+ gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
break;
case 0x29: /* fsqrts */
CHECK_FPU_FEATURE(dc, FSQRT);
gen_clear_float_exceptions();
- tcg_gen_helper_1_1(helper_fsqrts, cpu_tmp32,
- cpu_fpr[rs2]);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
+ gen_helper_check_ieee_exceptions();
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
break;
case 0x2a: /* fsqrtd */
CHECK_FPU_FEATURE(dc, FSQRT);
gen_op_load_fpr_DT1(DFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_0_0(helper_fsqrtd);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fsqrtd();
+ gen_helper_check_ieee_exceptions();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x2b: /* fsqrtq */
CHECK_FPU_FEATURE(dc, FLOAT128);
gen_op_load_fpr_QT1(QFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_0_0(helper_fsqrtq);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fsqrtq();
+ gen_helper_check_ieee_exceptions();
gen_op_store_QT0_fpr(QFPREG(rd));
break;
case 0x41: /* fadds */
gen_clear_float_exceptions();
- tcg_gen_helper_1_2(helper_fadds, cpu_tmp32,
- cpu_fpr[rs1], cpu_fpr[rs2]);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fadds(cpu_tmp32,
+ cpu_fpr[rs1], cpu_fpr[rs2]);
+ gen_helper_check_ieee_exceptions();
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
break;
case 0x42:
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_0_0(helper_faddd);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_faddd();
+ gen_helper_check_ieee_exceptions();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x43: /* faddq */
gen_op_load_fpr_QT0(QFPREG(rs1));
gen_op_load_fpr_QT1(QFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_0_0(helper_faddq);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_faddq();
+ gen_helper_check_ieee_exceptions();
gen_op_store_QT0_fpr(QFPREG(rd));
break;
case 0x45: /* fsubs */
gen_clear_float_exceptions();
- tcg_gen_helper_1_2(helper_fsubs, cpu_tmp32,
- cpu_fpr[rs1], cpu_fpr[rs2]);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fsubs(cpu_tmp32,
+ cpu_fpr[rs1], cpu_fpr[rs2]);
+ gen_helper_check_ieee_exceptions();
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
break;
case 0x46:
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_0_0(helper_fsubd);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fsubd();
+ gen_helper_check_ieee_exceptions();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x47: /* fsubq */
gen_op_load_fpr_QT0(QFPREG(rs1));
gen_op_load_fpr_QT1(QFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_0_0(helper_fsubq);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fsubq();
+ gen_helper_check_ieee_exceptions();
gen_op_store_QT0_fpr(QFPREG(rd));
break;
case 0x49: /* fmuls */
CHECK_FPU_FEATURE(dc, FMUL);
gen_clear_float_exceptions();
- tcg_gen_helper_1_2(helper_fmuls, cpu_tmp32,
- cpu_fpr[rs1], cpu_fpr[rs2]);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fmuls(cpu_tmp32,
+ cpu_fpr[rs1], cpu_fpr[rs2]);
+ gen_helper_check_ieee_exceptions();
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
break;
case 0x4a: /* fmuld */
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_0_0(helper_fmuld);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fmuld();
+ gen_helper_check_ieee_exceptions();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x4b: /* fmulq */
gen_op_load_fpr_QT0(QFPREG(rs1));
gen_op_load_fpr_QT1(QFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_0_0(helper_fmulq);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fmulq();
+ gen_helper_check_ieee_exceptions();
gen_op_store_QT0_fpr(QFPREG(rd));
break;
case 0x4d: /* fdivs */
gen_clear_float_exceptions();
- tcg_gen_helper_1_2(helper_fdivs, cpu_tmp32,
- cpu_fpr[rs1], cpu_fpr[rs2]);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fdivs(cpu_tmp32,
+ cpu_fpr[rs1], cpu_fpr[rs2]);
+ gen_helper_check_ieee_exceptions();
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
break;
case 0x4e:
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_0_0(helper_fdivd);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fdivd();
+ gen_helper_check_ieee_exceptions();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x4f: /* fdivq */
gen_op_load_fpr_QT0(QFPREG(rs1));
gen_op_load_fpr_QT1(QFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_0_0(helper_fdivq);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fdivq();
+ gen_helper_check_ieee_exceptions();
gen_op_store_QT0_fpr(QFPREG(rd));
break;
case 0x69: /* fsmuld */
CHECK_FPU_FEATURE(dc, FSMULD);
gen_clear_float_exceptions();
- tcg_gen_helper_0_2(helper_fsmuld, cpu_fpr[rs1],
- cpu_fpr[rs2]);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
+ gen_helper_check_ieee_exceptions();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x6e: /* fdmulq */
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_0_0(helper_fdmulq);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fdmulq();
+ gen_helper_check_ieee_exceptions();
gen_op_store_QT0_fpr(QFPREG(rd));
break;
case 0xc4: /* fitos */
gen_clear_float_exceptions();
- tcg_gen_helper_1_1(helper_fitos, cpu_tmp32,
- cpu_fpr[rs2]);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
+ gen_helper_check_ieee_exceptions();
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
break;
case 0xc6: /* fdtos */
gen_op_load_fpr_DT1(DFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_1_0(helper_fdtos, cpu_tmp32);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fdtos(cpu_tmp32);
+ gen_helper_check_ieee_exceptions();
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
break;
case 0xc7: /* fqtos */
CHECK_FPU_FEATURE(dc, FLOAT128);
gen_op_load_fpr_QT1(QFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_1_0(helper_fqtos, cpu_tmp32);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fqtos(cpu_tmp32);
+ gen_helper_check_ieee_exceptions();
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
break;
case 0xc8: /* fitod */
- tcg_gen_helper_0_1(helper_fitod, cpu_fpr[rs2]);
+ gen_helper_fitod(cpu_fpr[rs2]);
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0xc9: /* fstod */
- tcg_gen_helper_0_1(helper_fstod, cpu_fpr[rs2]);
+ gen_helper_fstod(cpu_fpr[rs2]);
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0xcb: /* fqtod */
CHECK_FPU_FEATURE(dc, FLOAT128);
gen_op_load_fpr_QT1(QFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_0_0(helper_fqtod);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fqtod();
+ gen_helper_check_ieee_exceptions();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0xcc: /* fitoq */
CHECK_FPU_FEATURE(dc, FLOAT128);
- tcg_gen_helper_0_1(helper_fitoq, cpu_fpr[rs2]);
+ gen_helper_fitoq(cpu_fpr[rs2]);
gen_op_store_QT0_fpr(QFPREG(rd));
break;
case 0xcd: /* fstoq */
CHECK_FPU_FEATURE(dc, FLOAT128);
- tcg_gen_helper_0_1(helper_fstoq, cpu_fpr[rs2]);
+ gen_helper_fstoq(cpu_fpr[rs2]);
gen_op_store_QT0_fpr(QFPREG(rd));
break;
case 0xce: /* fdtoq */
CHECK_FPU_FEATURE(dc, FLOAT128);
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fdtoq);
+ gen_helper_fdtoq();
gen_op_store_QT0_fpr(QFPREG(rd));
break;
case 0xd1: /* fstoi */
gen_clear_float_exceptions();
- tcg_gen_helper_1_1(helper_fstoi, cpu_tmp32,
- cpu_fpr[rs2]);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
+ gen_helper_check_ieee_exceptions();
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
break;
case 0xd2: /* fdtoi */
gen_op_load_fpr_DT1(DFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_1_0(helper_fdtoi, cpu_tmp32);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fdtoi(cpu_tmp32);
+ gen_helper_check_ieee_exceptions();
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
break;
case 0xd3: /* fqtoi */
CHECK_FPU_FEATURE(dc, FLOAT128);
gen_op_load_fpr_QT1(QFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_1_0(helper_fqtoi, cpu_tmp32);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fqtoi(cpu_tmp32);
+ gen_helper_check_ieee_exceptions();
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
break;
#ifdef TARGET_SPARC64
break;
case 0x6: /* V9 fnegd */
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fnegd);
+ gen_helper_fnegd();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x7: /* V9 fnegq */
CHECK_FPU_FEATURE(dc, FLOAT128);
gen_op_load_fpr_QT1(QFPREG(rs2));
- tcg_gen_helper_0_0(helper_fnegq);
+ gen_helper_fnegq();
gen_op_store_QT0_fpr(QFPREG(rd));
break;
case 0xa: /* V9 fabsd */
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fabsd);
+ gen_helper_fabsd();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0xb: /* V9 fabsq */
CHECK_FPU_FEATURE(dc, FLOAT128);
gen_op_load_fpr_QT1(QFPREG(rs2));
- tcg_gen_helper_0_0(helper_fabsq);
+ gen_helper_fabsq();
gen_op_store_QT0_fpr(QFPREG(rd));
break;
case 0x81: /* V9 fstox */
gen_clear_float_exceptions();
- tcg_gen_helper_0_1(helper_fstox, cpu_fpr[rs2]);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fstox(cpu_fpr[rs2]);
+ gen_helper_check_ieee_exceptions();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x82: /* V9 fdtox */
gen_op_load_fpr_DT1(DFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_0_0(helper_fdtox);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fdtox();
+ gen_helper_check_ieee_exceptions();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x83: /* V9 fqtox */
CHECK_FPU_FEATURE(dc, FLOAT128);
gen_op_load_fpr_QT1(QFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_0_0(helper_fqtox);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fqtox();
+ gen_helper_check_ieee_exceptions();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x84: /* V9 fxtos */
gen_op_load_fpr_DT1(DFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_1_0(helper_fxtos, cpu_tmp32);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fxtos(cpu_tmp32);
+ gen_helper_check_ieee_exceptions();
tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
break;
case 0x88: /* V9 fxtod */
gen_op_load_fpr_DT1(DFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_0_0(helper_fxtod);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fxtod();
+ gen_helper_check_ieee_exceptions();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x8c: /* V9 fxtoq */
CHECK_FPU_FEATURE(dc, FLOAT128);
gen_op_load_fpr_DT1(DFPREG(rs2));
gen_clear_float_exceptions();
- tcg_gen_helper_0_0(helper_fxtoq);
- tcg_gen_helper_0_0(helper_check_ieee_exceptions);
+ gen_helper_fxtoq();
+ gen_helper_check_ieee_exceptions();
gen_op_store_QT0_fpr(QFPREG(rd));
break;
#endif
int l1; \
\
l1 = gen_new_label(); \
- r_cond = tcg_temp_new(TCG_TYPE_TL); \
+ r_cond = tcg_temp_new(); \
cond = GET_FIELD_SP(insn, 14, 17); \
gen_fcond(r_cond, fcc, cond); \
tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
int l1; \
\
l1 = gen_new_label(); \
- r_cond = tcg_temp_new(TCG_TYPE_TL); \
+ r_cond = tcg_temp_new(); \
cond = GET_FIELD_SP(insn, 14, 17); \
gen_fcond(r_cond, fcc, cond); \
tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
int l1; \
\
l1 = gen_new_label(); \
- r_cond = tcg_temp_new(TCG_TYPE_TL); \
+ r_cond = tcg_temp_new(); \
cond = GET_FIELD_SP(insn, 14, 17); \
gen_fcond(r_cond, fcc, cond); \
tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
int l1; \
\
l1 = gen_new_label(); \
- r_cond = tcg_temp_new(TCG_TYPE_TL); \
+ r_cond = tcg_temp_new(); \
cond = GET_FIELD_SP(insn, 14, 17); \
gen_cond(r_cond, icc, cond); \
tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
int l1; \
\
l1 = gen_new_label(); \
- r_cond = tcg_temp_new(TCG_TYPE_TL); \
+ r_cond = tcg_temp_new(); \
cond = GET_FIELD_SP(insn, 14, 17); \
gen_cond(r_cond, icc, cond); \
tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
int l1; \
\
l1 = gen_new_label(); \
- r_cond = tcg_temp_new(TCG_TYPE_TL); \
+ r_cond = tcg_temp_new(); \
cond = GET_FIELD_SP(insn, 14, 17); \
gen_cond(r_cond, icc, cond); \
tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
int l1; \
\
l1 = gen_new_label(); \
- r_cond = tcg_temp_new(TCG_TYPE_TL); \
+ r_cond = tcg_temp_new(); \
cond = GET_FIELD_SP(insn, 14, 17); \
gen_cond(r_cond, icc, cond); \
tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
#endif
case 0xe:
CHECK_IU_FEATURE(dc, DIV);
- tcg_gen_helper_1_2(helper_udiv, cpu_dst, cpu_src1,
- cpu_src2);
+ gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
if (xop & 0x10)
gen_op_div_cc(cpu_dst);
break;
case 0xf:
CHECK_IU_FEATURE(dc, DIV);
- tcg_gen_helper_1_2(helper_sdiv, cpu_dst, cpu_src1,
- cpu_src2);
+ gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
if (xop & 0x10)
gen_op_div_cc(cpu_dst);
break;
#else
case 0x2: /* V9 wrccr */
tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
- tcg_gen_helper_0_1(helper_wrccr, cpu_dst);
+ gen_helper_wrccr(cpu_dst);
break;
case 0x3: /* V9 wrasi */
tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
if (!supervisor(dc))
goto illegal_insn;
tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
- tcg_gen_helper_0_1(helper_set_softint,
- cpu_tmp64);
+ gen_helper_set_softint(cpu_tmp64);
break;
case 0x15: /* Softint clear */
if (!supervisor(dc))
goto illegal_insn;
tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
- tcg_gen_helper_0_1(helper_clear_softint,
- cpu_tmp64);
+ gen_helper_clear_softint(cpu_tmp64);
break;
case 0x16: /* Softint write */
if (!supervisor(dc))
goto illegal_insn;
tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
- tcg_gen_helper_0_1(helper_write_softint,
- cpu_tmp64);
+ gen_helper_write_softint(cpu_tmp64);
break;
case 0x17: /* Tick compare */
#if !defined(CONFIG_USER_ONLY)
goto illegal_insn;
#endif
{
- TCGv r_tickptr;
+ TCGv_ptr r_tickptr;
tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
cpu_src2);
- r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
+ r_tickptr = tcg_temp_new_ptr();
tcg_gen_ld_ptr(r_tickptr, cpu_env,
offsetof(CPUState, tick));
- tcg_gen_helper_0_2(helper_tick_set_limit,
- r_tickptr, cpu_tick_cmpr);
- tcg_temp_free(r_tickptr);
+ gen_helper_tick_set_limit(r_tickptr,
+ cpu_tick_cmpr);
+ tcg_temp_free_ptr(r_tickptr);
}
break;
case 0x18: /* System tick */
goto illegal_insn;
#endif
{
- TCGv r_tickptr;
+ TCGv_ptr r_tickptr;
tcg_gen_xor_tl(cpu_dst, cpu_src1,
cpu_src2);
- r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
+ r_tickptr = tcg_temp_new_ptr();
tcg_gen_ld_ptr(r_tickptr, cpu_env,
offsetof(CPUState, stick));
- tcg_gen_helper_0_2(helper_tick_set_count,
- r_tickptr, cpu_dst);
- tcg_temp_free(r_tickptr);
+ gen_helper_tick_set_count(r_tickptr,
+ cpu_dst);
+ tcg_temp_free_ptr(r_tickptr);
}
break;
case 0x19: /* System tick compare */
goto illegal_insn;
#endif
{
- TCGv r_tickptr;
+ TCGv_ptr r_tickptr;
tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
cpu_src2);
- r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
+ r_tickptr = tcg_temp_new_ptr();
tcg_gen_ld_ptr(r_tickptr, cpu_env,
offsetof(CPUState, stick));
- tcg_gen_helper_0_2(helper_tick_set_limit,
- r_tickptr, cpu_stick_cmpr);
- tcg_temp_free(r_tickptr);
+ gen_helper_tick_set_limit(r_tickptr,
+ cpu_stick_cmpr);
+ tcg_temp_free_ptr(r_tickptr);
}
break;
#ifdef TARGET_SPARC64
switch (rd) {
case 0:
- tcg_gen_helper_0_0(helper_saved);
+ gen_helper_saved();
break;
case 1:
- tcg_gen_helper_0_0(helper_restored);
+ gen_helper_restored();
break;
case 2: /* UA2005 allclean */
case 3: /* UA2005 otherw */
}
#else
tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
- tcg_gen_helper_0_1(helper_wrpsr, cpu_dst);
+ gen_helper_wrpsr(cpu_dst);
save_state(dc, cpu_cond);
gen_op_next_insn();
tcg_gen_exit_tb(0);
switch (rd) {
case 0: // tpc
{
- TCGv r_tsptr;
+ TCGv_ptr r_tsptr;
- r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
+ r_tsptr = tcg_temp_new_ptr();
tcg_gen_ld_ptr(r_tsptr, cpu_env,
offsetof(CPUState, tsptr));
tcg_gen_st_tl(cpu_tmp0, r_tsptr,
offsetof(trap_state, tpc));
- tcg_temp_free(r_tsptr);
+ tcg_temp_free_ptr(r_tsptr);
}
break;
case 1: // tnpc
{
- TCGv r_tsptr;
+ TCGv_ptr r_tsptr;
- r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
+ r_tsptr = tcg_temp_new_ptr();
tcg_gen_ld_ptr(r_tsptr, cpu_env,
offsetof(CPUState, tsptr));
tcg_gen_st_tl(cpu_tmp0, r_tsptr,
offsetof(trap_state, tnpc));
- tcg_temp_free(r_tsptr);
+ tcg_temp_free_ptr(r_tsptr);
}
break;
case 2: // tstate
{
- TCGv r_tsptr;
+ TCGv_ptr r_tsptr;
- r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
+ r_tsptr = tcg_temp_new_ptr();
tcg_gen_ld_ptr(r_tsptr, cpu_env,
offsetof(CPUState, tsptr));
tcg_gen_st_tl(cpu_tmp0, r_tsptr,
offsetof(trap_state,
tstate));
- tcg_temp_free(r_tsptr);
+ tcg_temp_free_ptr(r_tsptr);
}
break;
case 3: // tt
{
- TCGv r_tsptr;
+ TCGv_ptr r_tsptr;
- r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
+ r_tsptr = tcg_temp_new_ptr();
tcg_gen_ld_ptr(r_tsptr, cpu_env,
offsetof(CPUState, tsptr));
tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
tcg_gen_st_i32(cpu_tmp32, r_tsptr,
offsetof(trap_state, tt));
- tcg_temp_free(r_tsptr);
+ tcg_temp_free_ptr(r_tsptr);
}
break;
case 4: // tick
{
- TCGv r_tickptr;
+ TCGv_ptr r_tickptr;
- r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
+ r_tickptr = tcg_temp_new_ptr();
tcg_gen_ld_ptr(r_tickptr, cpu_env,
offsetof(CPUState, tick));
- tcg_gen_helper_0_2(helper_tick_set_count,
- r_tickptr, cpu_tmp0);
- tcg_temp_free(r_tickptr);
+ gen_helper_tick_set_count(r_tickptr,
+ cpu_tmp0);
+ tcg_temp_free_ptr(r_tickptr);
}
break;
case 5: // tba
break;
case 6: // pstate
save_state(dc, cpu_cond);
- tcg_gen_helper_0_1(helper_wrpstate, cpu_tmp0);
+ gen_helper_wrpstate(cpu_tmp0);
gen_op_next_insn();
tcg_gen_exit_tb(0);
dc->is_br = 1;
psrpil));
break;
case 9: // cwp
- tcg_gen_helper_0_1(helper_wrcwp, cpu_tmp0);
+ gen_helper_wrcwp(cpu_tmp0);
break;
case 10: // cansave
tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
break;
case 31: // hstick_cmpr
{
- TCGv r_tickptr;
+ TCGv_ptr r_tickptr;
tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
- r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
+ r_tickptr = tcg_temp_new_ptr();
tcg_gen_ld_ptr(r_tickptr, cpu_env,
offsetof(CPUState, hstick));
- tcg_gen_helper_0_2(helper_tick_set_limit,
- r_tickptr, cpu_hstick_cmpr);
- tcg_temp_free(r_tickptr);
+ gen_helper_tick_set_limit(r_tickptr,
+ cpu_hstick_cmpr);
+ tcg_temp_free_ptr(r_tickptr);
}
break;
case 6: // hver readonly
TCGv r_cond;
int l1;
- r_cond = tcg_temp_new(TCG_TYPE_TL);
+ r_cond = tcg_temp_new();
if (insn & (1 << 18)) {
if (cc == 0)
gen_cond(r_cond, 0, cond);
case 0x2e: /* V9 popc */
{
cpu_src2 = get_src2(insn, cpu_src2);
- tcg_gen_helper_1_1(helper_popc, cpu_dst,
- cpu_src2);
+ gen_helper_popc(cpu_dst, cpu_src2);
gen_movl_TN_reg(rd, cpu_dst);
}
case 0x2f: /* V9 movr */
CHECK_FPU_FEATURE(dc, VIS1);
cpu_src1 = get_src1(insn, cpu_src1);
gen_movl_reg_TN(rs2, cpu_src2);
- tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
- cpu_src2);
+ gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
gen_movl_TN_reg(rd, cpu_dst);
break;
case 0x012: /* VIS I array16 */
CHECK_FPU_FEATURE(dc, VIS1);
cpu_src1 = get_src1(insn, cpu_src1);
gen_movl_reg_TN(rs2, cpu_src2);
- tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
- cpu_src2);
+ gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
gen_movl_TN_reg(rd, cpu_dst);
break;
CHECK_FPU_FEATURE(dc, VIS1);
cpu_src1 = get_src1(insn, cpu_src1);
gen_movl_reg_TN(rs2, cpu_src2);
- tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
- cpu_src2);
+ gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
gen_movl_TN_reg(rd, cpu_dst);
break;
CHECK_FPU_FEATURE(dc, VIS1);
cpu_src1 = get_src1(insn, cpu_src1);
gen_movl_reg_TN(rs2, cpu_src2);
- tcg_gen_helper_1_2(helper_alignaddr, cpu_dst, cpu_src1,
- cpu_src2);
+ gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
gen_movl_TN_reg(rd, cpu_dst);
break;
case 0x019: /* VIS II bmask */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fcmple16);
+ gen_helper_fcmple16();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x022: /* VIS I fcmpne16 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fcmpne16);
+ gen_helper_fcmpne16();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x024: /* VIS I fcmple32 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fcmple32);
+ gen_helper_fcmple32();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x026: /* VIS I fcmpne32 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fcmpne32);
+ gen_helper_fcmpne32();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x028: /* VIS I fcmpgt16 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fcmpgt16);
+ gen_helper_fcmpgt16();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x02a: /* VIS I fcmpeq16 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fcmpeq16);
+ gen_helper_fcmpeq16();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x02c: /* VIS I fcmpgt32 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fcmpgt32);
+ gen_helper_fcmpgt32();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x02e: /* VIS I fcmpeq32 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fcmpeq32);
+ gen_helper_fcmpeq32();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x031: /* VIS I fmul8x16 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fmul8x16);
+ gen_helper_fmul8x16();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x033: /* VIS I fmul8x16au */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fmul8x16au);
+ gen_helper_fmul8x16au();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x035: /* VIS I fmul8x16al */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fmul8x16al);
+ gen_helper_fmul8x16al();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x036: /* VIS I fmul8sux16 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fmul8sux16);
+ gen_helper_fmul8sux16();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x037: /* VIS I fmul8ulx16 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fmul8ulx16);
+ gen_helper_fmul8ulx16();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x038: /* VIS I fmuld8sux16 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fmuld8sux16);
+ gen_helper_fmuld8sux16();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x039: /* VIS I fmuld8ulx16 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fmuld8ulx16);
+ gen_helper_fmuld8ulx16();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x03a: /* VIS I fpack32 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_faligndata);
+ gen_helper_faligndata();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x04b: /* VIS I fpmerge */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fpmerge);
+ gen_helper_fpmerge();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x04c: /* VIS II bshuffle */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fexpand);
+ gen_helper_fexpand();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x050: /* VIS I fpadd16 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fpadd16);
+ gen_helper_fpadd16();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x051: /* VIS I fpadd16s */
CHECK_FPU_FEATURE(dc, VIS1);
- tcg_gen_helper_1_2(helper_fpadd16s, cpu_fpr[rd],
- cpu_fpr[rs1], cpu_fpr[rs2]);
+ gen_helper_fpadd16s(cpu_fpr[rd],
+ cpu_fpr[rs1], cpu_fpr[rs2]);
break;
case 0x052: /* VIS I fpadd32 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fpadd32);
+ gen_helper_fpadd32();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x053: /* VIS I fpadd32s */
CHECK_FPU_FEATURE(dc, VIS1);
- tcg_gen_helper_1_2(helper_fpadd32s, cpu_fpr[rd],
- cpu_fpr[rs1], cpu_fpr[rs2]);
+ gen_helper_fpadd32s(cpu_fpr[rd],
+ cpu_fpr[rs1], cpu_fpr[rs2]);
break;
case 0x054: /* VIS I fpsub16 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fpsub16);
+ gen_helper_fpsub16();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x055: /* VIS I fpsub16s */
CHECK_FPU_FEATURE(dc, VIS1);
- tcg_gen_helper_1_2(helper_fpsub16s, cpu_fpr[rd],
- cpu_fpr[rs1], cpu_fpr[rs2]);
+ gen_helper_fpsub16s(cpu_fpr[rd],
+ cpu_fpr[rs1], cpu_fpr[rs2]);
break;
case 0x056: /* VIS I fpsub32 */
CHECK_FPU_FEATURE(dc, VIS1);
gen_op_load_fpr_DT0(DFPREG(rs1));
gen_op_load_fpr_DT1(DFPREG(rs2));
- tcg_gen_helper_0_0(helper_fpsub32);
+ gen_helper_fpsub32();
gen_op_store_DT0_fpr(DFPREG(rd));
break;
case 0x057: /* VIS I fpsub32s */
CHECK_FPU_FEATURE(dc, VIS1);
- tcg_gen_helper_1_2(helper_fpsub32s, cpu_fpr[rd],
- cpu_fpr[rs1], cpu_fpr[rs2]);
+ gen_helper_fpsub32s(cpu_fpr[rd],
+ cpu_fpr[rs1], cpu_fpr[rs2]);
break;
case 0x060: /* VIS I fzero */
CHECK_FPU_FEATURE(dc, VIS1);
#endif
#ifdef TARGET_SPARC64
} else if (xop == 0x39) { /* V9 return */
- TCGv r_const;
+ TCGv_i32 r_const;
save_state(dc, cpu_cond);
cpu_src1 = get_src1(insn, cpu_src1);
} else
tcg_gen_mov_tl(cpu_dst, cpu_src1);
}
- tcg_gen_helper_0_0(helper_restore);
+ gen_helper_restore();
gen_mov_pc_npc(dc, cpu_cond);
r_const = tcg_const_i32(3);
- tcg_gen_helper_0_2(helper_check_align, cpu_dst, r_const);
- tcg_temp_free(r_const);
+ gen_helper_check_align(cpu_dst, r_const);
+ tcg_temp_free_i32(r_const);
tcg_gen_mov_tl(cpu_npc, cpu_dst);
dc->npc = DYNAMIC_PC;
goto jmp_insn;
switch (xop) {
case 0x38: /* jmpl */
{
- TCGv r_const;
+ TCGv r_pc;
+ TCGv_i32 r_const;
- r_const = tcg_const_tl(dc->pc);
- gen_movl_TN_reg(rd, r_const);
- tcg_temp_free(r_const);
+ r_pc = tcg_const_tl(dc->pc);
+ gen_movl_TN_reg(rd, r_pc);
+ tcg_temp_free(r_pc);
gen_mov_pc_npc(dc, cpu_cond);
r_const = tcg_const_i32(3);
- tcg_gen_helper_0_2(helper_check_align, cpu_dst,
- r_const);
- tcg_temp_free(r_const);
+ gen_helper_check_align(cpu_dst, r_const);
+ tcg_temp_free_i32(r_const);
tcg_gen_mov_tl(cpu_npc, cpu_dst);
dc->npc = DYNAMIC_PC;
}
#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
case 0x39: /* rett, V9 return */
{
- TCGv r_const;
+ TCGv_i32 r_const;
if (!supervisor(dc))
goto priv_insn;
gen_mov_pc_npc(dc, cpu_cond);
r_const = tcg_const_i32(3);
- tcg_gen_helper_0_2(helper_check_align, cpu_dst,
- r_const);
- tcg_temp_free(r_const);
+ gen_helper_check_align(cpu_dst, r_const);
+ tcg_temp_free_i32(r_const);
tcg_gen_mov_tl(cpu_npc, cpu_dst);
dc->npc = DYNAMIC_PC;
- tcg_gen_helper_0_0(helper_rett);
+ gen_helper_rett();
}
goto jmp_insn;
#endif
case 0x3b: /* flush */
if (!((dc)->def->features & CPU_FEATURE_FLUSH))
goto unimp_flush;
- tcg_gen_helper_0_1(helper_flush, cpu_dst);
+ gen_helper_flush(cpu_dst);
break;
case 0x3c: /* save */
save_state(dc, cpu_cond);
- tcg_gen_helper_0_0(helper_save);
+ gen_helper_save();
gen_movl_TN_reg(rd, cpu_dst);
break;
case 0x3d: /* restore */
save_state(dc, cpu_cond);
- tcg_gen_helper_0_0(helper_restore);
+ gen_helper_restore();
gen_movl_TN_reg(rd, cpu_dst);
break;
#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
goto priv_insn;
dc->npc = DYNAMIC_PC;
dc->pc = DYNAMIC_PC;
- tcg_gen_helper_0_0(helper_done);
+ gen_helper_done();
goto jmp_insn;
case 1:
if (!supervisor(dc))
goto priv_insn;
dc->npc = DYNAMIC_PC;
dc->pc = DYNAMIC_PC;
- tcg_gen_helper_0_0(helper_retry);
+ gen_helper_retry();
goto jmp_insn;
default:
goto illegal_insn;
if (rd & 1)
goto illegal_insn;
else {
- TCGv r_const;
+ TCGv_i32 r_const;
save_state(dc, cpu_cond);
r_const = tcg_const_i32(7);
- tcg_gen_helper_0_2(helper_check_align, cpu_addr,
- r_const); // XXX remove
- tcg_temp_free(r_const);
+ gen_helper_check_align(cpu_addr, r_const); // XXX remove
+ tcg_temp_free_i32(r_const);
gen_address_mask(dc, cpu_addr);
tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
gen_address_mask(dc, cpu_addr);
if (rd == 1) {
tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
- tcg_gen_helper_0_1(helper_ldxfsr, cpu_tmp64);
+ gen_helper_ldxfsr(cpu_tmp64);
} else
#else
{
tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
- tcg_gen_helper_0_1(helper_ldfsr, cpu_tmp32);
+ gen_helper_ldfsr(cpu_tmp32);
}
#endif
break;
case 0x22: /* load quad fpreg */
{
- TCGv r_const;
+ TCGv_i32 r_const;
CHECK_FPU_FEATURE(dc, FLOAT128);
r_const = tcg_const_i32(dc->mem_idx);
- tcg_gen_helper_0_2(helper_ldqf, cpu_addr, r_const);
- tcg_temp_free(r_const);
+ gen_helper_ldqf(cpu_addr, r_const);
+ tcg_temp_free_i32(r_const);
gen_op_store_QT0_fpr(QFPREG(rd));
}
break;
case 0x23: /* load double fpreg */
{
- TCGv r_const;
+ TCGv_i32 r_const;
r_const = tcg_const_i32(dc->mem_idx);
- tcg_gen_helper_0_2(helper_lddf, cpu_addr, r_const);
- tcg_temp_free(r_const);
+ gen_helper_lddf(cpu_addr, r_const);
+ tcg_temp_free_i32(r_const);
gen_op_store_DT0_fpr(DFPREG(rd));
}
break;
if (rd & 1)
goto illegal_insn;
else {
- TCGv r_const;
+ TCGv_i32 r_const;
save_state(dc, cpu_cond);
gen_address_mask(dc, cpu_addr);
r_const = tcg_const_i32(7);
- tcg_gen_helper_0_2(helper_check_align, cpu_addr,
- r_const); // XXX remove
- tcg_temp_free(r_const);
+ gen_helper_check_align(cpu_addr, r_const); // XXX remove
+ tcg_temp_free_i32(r_const);
gen_movl_reg_TN(rd + 1, cpu_tmp0);
tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
#ifdef TARGET_SPARC64
/* V9 stqf, store quad fpreg */
{
- TCGv r_const;
+ TCGv_i32 r_const;
CHECK_FPU_FEATURE(dc, FLOAT128);
gen_op_load_fpr_QT0(QFPREG(rd));
r_const = tcg_const_i32(dc->mem_idx);
- tcg_gen_helper_0_2(helper_stqf, cpu_addr, r_const);
- tcg_temp_free(r_const);
+ gen_helper_stqf(cpu_addr, r_const);
+ tcg_temp_free_i32(r_const);
}
break;
#else /* !TARGET_SPARC64 */
#endif
case 0x27: /* store double fpreg */
{
- TCGv r_const;
+ TCGv_i32 r_const;
gen_op_load_fpr_DT0(DFPREG(rd));
r_const = tcg_const_i32(dc->mem_idx);
- tcg_gen_helper_0_2(helper_stdf, cpu_addr, r_const);
- tcg_temp_free(r_const);
+ gen_helper_stdf(cpu_addr, r_const);
+ tcg_temp_free_i32(r_const);
}
break;
default:
break;
case 0x36: /* V9 stqfa */
{
- TCGv r_const;
+ TCGv_i32 r_const;
CHECK_FPU_FEATURE(dc, FLOAT128);
r_const = tcg_const_i32(7);
- tcg_gen_helper_0_2(helper_check_align, cpu_addr,
- r_const);
- tcg_temp_free(r_const);
+ gen_helper_check_align(cpu_addr, r_const);
+ tcg_temp_free_i32(r_const);
gen_op_load_fpr_QT0(QFPREG(rd));
gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
}
return;
illegal_insn:
{
- TCGv r_const;
+ TCGv_i32 r_const;
save_state(dc, cpu_cond);
r_const = tcg_const_i32(TT_ILL_INSN);
- tcg_gen_helper_0_1(raise_exception, r_const);
- tcg_temp_free(r_const);
+ gen_helper_raise_exception(r_const);
+ tcg_temp_free_i32(r_const);
dc->is_br = 1;
}
return;
unimp_flush:
{
- TCGv r_const;
+ TCGv_i32 r_const;
save_state(dc, cpu_cond);
r_const = tcg_const_i32(TT_UNIMP_FLUSH);
- tcg_gen_helper_0_1(raise_exception, r_const);
- tcg_temp_free(r_const);
+ gen_helper_raise_exception(r_const);
+ tcg_temp_free_i32(r_const);
dc->is_br = 1;
}
return;
#if !defined(CONFIG_USER_ONLY)
priv_insn:
{
- TCGv r_const;
+ TCGv_i32 r_const;
save_state(dc, cpu_cond);
r_const = tcg_const_i32(TT_PRIV_INSN);
- tcg_gen_helper_0_1(raise_exception, r_const);
- tcg_temp_free(r_const);
+ gen_helper_raise_exception(r_const);
+ tcg_temp_free_i32(r_const);
dc->is_br = 1;
}
return;
save_state(dc, cpu_cond);
r_const = tcg_const_i32(TT_NCP_INSN);
- tcg_gen_helper_0_1(raise_exception, r_const);
+ gen_helper_raise_exception(r_const);
tcg_temp_free(r_const);
dc->is_br = 1;
}
#endif
gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
- cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
- cpu_tmp32 = tcg_temp_new(TCG_TYPE_I32);
- cpu_tmp64 = tcg_temp_new(TCG_TYPE_I64);
+ cpu_tmp0 = tcg_temp_new();
+ cpu_tmp32 = tcg_temp_new_i32();
+ cpu_tmp64 = tcg_temp_new_i64();
- cpu_dst = tcg_temp_local_new(TCG_TYPE_TL);
+ cpu_dst = tcg_temp_local_new();
// loads and stores
- cpu_val = tcg_temp_local_new(TCG_TYPE_TL);
- cpu_addr = tcg_temp_local_new(TCG_TYPE_TL);
+ cpu_val = tcg_temp_local_new();
+ cpu_addr = tcg_temp_local_new();
num_insns = 0;
max_insns = tb->cflags & CF_COUNT_MASK;
if (env->breakpoints[j] == dc->pc) {
if (dc->pc != pc_start)
save_state(dc, cpu_cond);
- tcg_gen_helper_0_0(helper_debug);
+ gen_helper_debug();
tcg_gen_exit_tb(0);
dc->is_br = 1;
goto exit_gen_loop;
tcg_temp_free(cpu_addr);
tcg_temp_free(cpu_val);
tcg_temp_free(cpu_dst);
- tcg_temp_free(cpu_tmp64);
- tcg_temp_free(cpu_tmp32);
+ tcg_temp_free_i64(cpu_tmp64);
+ tcg_temp_free_i32(cpu_tmp32);
tcg_temp_free(cpu_tmp0);
if (tb->cflags & CF_LAST_IO)
gen_io_end();
if (!inited) {
inited = 1;
- cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
- cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
- offsetof(CPUState, regwptr),
- "regwptr");
+ cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
+ cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
+ offsetof(CPUState, regwptr),
+ "regwptr");
#ifdef TARGET_SPARC64
- cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
- TCG_AREG0, offsetof(CPUState, xcc),
- "xcc");
- cpu_asi = tcg_global_mem_new(TCG_TYPE_I32,
- TCG_AREG0, offsetof(CPUState, asi),
- "asi");
- cpu_fprs = tcg_global_mem_new(TCG_TYPE_I32,
- TCG_AREG0, offsetof(CPUState, fprs),
- "fprs");
- cpu_gsr = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0, offsetof(CPUState, gsr),
+ cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
+ "xcc");
+ cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
+ "asi");
+ cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
+ "fprs");
+ cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
"gsr");
- cpu_tick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0,
+ cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, tick_cmpr),
"tick_cmpr");
- cpu_stick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0,
+ cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, stick_cmpr),
"stick_cmpr");
- cpu_hstick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0,
+ cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, hstick_cmpr),
"hstick_cmpr");
- cpu_hintp = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
- offsetof(CPUState, hintp),
+ cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
"hintp");
- cpu_htba = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
- offsetof(CPUState, htba),
- "htba");
- cpu_hver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
- offsetof(CPUState, hver),
- "hver");
- cpu_ssr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
+ "htba");
+ cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
+ "hver");
+ cpu_ssr = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, ssr), "ssr");
- cpu_ver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_ver = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, version), "ver");
- cpu_softint = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, softint),
- "softint");
+ cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, softint),
+ "softint");
#else
- cpu_wim = tcg_global_mem_new(TCG_TYPE_I32,
- TCG_AREG0, offsetof(CPUState, wim),
+ cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
"wim");
#endif
- cpu_cond = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0, offsetof(CPUState, cond),
+ cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
"cond");
- cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0, offsetof(CPUState, cc_src),
+ cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
"cc_src");
- cpu_cc_src2 = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, cc_src2),
"cc_src2");
- cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0, offsetof(CPUState, cc_dst),
+ cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
"cc_dst");
- cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
- TCG_AREG0, offsetof(CPUState, psr),
- "psr");
- cpu_fsr = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0, offsetof(CPUState, fsr),
+ cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
+ "psr");
+ cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
"fsr");
- cpu_pc = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0, offsetof(CPUState, pc),
+ cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
"pc");
- cpu_npc = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0, offsetof(CPUState, npc),
- "npc");
- cpu_y = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0, offsetof(CPUState, y), "y");
+ cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
+ "npc");
+ cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
#ifndef CONFIG_USER_ONLY
- cpu_tbr = tcg_global_mem_new(TCG_TYPE_TL,
- TCG_AREG0, offsetof(CPUState, tbr),
+ cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
"tbr");
#endif
for (i = 1; i < 8; i++)
- cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
+ cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
offsetof(CPUState, gregs[i]),
gregnames[i]);
for (i = 0; i < TARGET_FPREGS; i++)
- cpu_fpr[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
- offsetof(CPUState, fpr[i]),
- fregnames[i]);
+ cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
+ offsetof(CPUState, fpr[i]),
+ fregnames[i]);
/* register helpers */
-#undef DEF_HELPER
-#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
+#define GEN_HELPER 2
#include "helper.h"
}
}
int gen_new_label(void);
-static inline void tcg_gen_op1(int opc, TCGv arg1)
+static inline void tcg_gen_op1_i32(int opc, TCGv_i32 arg1)
{
*gen_opc_ptr++ = opc;
- *gen_opparam_ptr++ = GET_TCGV(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
+}
+
+static inline void tcg_gen_op1_i64(int opc, TCGv_i64 arg1)
+{
+ *gen_opc_ptr++ = opc;
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
}
static inline void tcg_gen_op1i(int opc, TCGArg arg1)
*gen_opparam_ptr++ = arg1;
}
-static inline void tcg_gen_op2(int opc, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_op2_i32(int opc, TCGv_i32 arg1, TCGv_i32 arg2)
+{
+ *gen_opc_ptr++ = opc;
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
+}
+
+static inline void tcg_gen_op2_i64(int opc, TCGv_i64 arg1, TCGv_i64 arg2)
+{
+ *gen_opc_ptr++ = opc;
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
+}
+
+static inline void tcg_gen_op2i_i32(int opc, TCGv_i32 arg1, TCGArg arg2)
{
*gen_opc_ptr++ = opc;
- *gen_opparam_ptr++ = GET_TCGV(arg1);
- *gen_opparam_ptr++ = GET_TCGV(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
+ *gen_opparam_ptr++ = arg2;
}
-static inline void tcg_gen_op2i(int opc, TCGv arg1, TCGArg arg2)
+static inline void tcg_gen_op2i_i64(int opc, TCGv_i64 arg1, TCGArg arg2)
{
*gen_opc_ptr++ = opc;
- *gen_opparam_ptr++ = GET_TCGV(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
*gen_opparam_ptr++ = arg2;
}
*gen_opparam_ptr++ = arg2;
}
-static inline void tcg_gen_op3(int opc, TCGv arg1, TCGv arg2, TCGv arg3)
+static inline void tcg_gen_op3_i32(int opc, TCGv_i32 arg1, TCGv_i32 arg2,
+ TCGv_i32 arg3)
+{
+ *gen_opc_ptr++ = opc;
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg3);
+}
+
+static inline void tcg_gen_op3_i64(int opc, TCGv_i64 arg1, TCGv_i64 arg2,
+ TCGv_i64 arg3)
+{
+ *gen_opc_ptr++ = opc;
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg3);
+}
+
+static inline void tcg_gen_op3i_i32(int opc, TCGv_i32 arg1, TCGv_i32 arg2,
+ TCGArg arg3)
{
*gen_opc_ptr++ = opc;
- *gen_opparam_ptr++ = GET_TCGV(arg1);
- *gen_opparam_ptr++ = GET_TCGV(arg2);
- *gen_opparam_ptr++ = GET_TCGV(arg3);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
+ *gen_opparam_ptr++ = arg3;
}
-static inline void tcg_gen_op3i(int opc, TCGv arg1, TCGv arg2, TCGArg arg3)
+static inline void tcg_gen_op3i_i64(int opc, TCGv_i64 arg1, TCGv_i64 arg2,
+ TCGArg arg3)
{
*gen_opc_ptr++ = opc;
- *gen_opparam_ptr++ = GET_TCGV(arg1);
- *gen_opparam_ptr++ = GET_TCGV(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
*gen_opparam_ptr++ = arg3;
}
-static inline void tcg_gen_op4(int opc, TCGv arg1, TCGv arg2, TCGv arg3,
- TCGv arg4)
+static inline void tcg_gen_ldst_op_i32(int opc, TCGv_i32 val, TCGv_ptr base,
+ TCGArg offset)
+{
+ *gen_opc_ptr++ = opc;
+ *gen_opparam_ptr++ = GET_TCGV_I32(val);
+ *gen_opparam_ptr++ = GET_TCGV_PTR(base);
+ *gen_opparam_ptr++ = offset;
+}
+
+static inline void tcg_gen_ldst_op_i64(int opc, TCGv_i64 val, TCGv_ptr base,
+ TCGArg offset)
+{
+ *gen_opc_ptr++ = opc;
+ *gen_opparam_ptr++ = GET_TCGV_I32(val);
+ *gen_opparam_ptr++ = GET_TCGV_PTR(base);
+ *gen_opparam_ptr++ = offset;
+}
+
+static inline void tcg_gen_qemu_ldst_op_i64_i32(int opc, TCGv_i64 val, TCGv_i32 addr,
+ TCGArg mem_index)
+{
+ *gen_opc_ptr++ = opc;
+ *gen_opparam_ptr++ = GET_TCGV_I64(val);
+ *gen_opparam_ptr++ = GET_TCGV_I32(addr);
+ *gen_opparam_ptr++ = mem_index;
+}
+
+static inline void tcg_gen_qemu_ldst_op_i64_i64(int opc, TCGv_i64 val, TCGv_i64 addr,
+ TCGArg mem_index)
+{
+ *gen_opc_ptr++ = opc;
+ *gen_opparam_ptr++ = GET_TCGV_I64(val);
+ *gen_opparam_ptr++ = GET_TCGV_I64(addr);
+ *gen_opparam_ptr++ = mem_index;
+}
+
+static inline void tcg_gen_op4_i32(int opc, TCGv_i32 arg1, TCGv_i32 arg2,
+ TCGv_i32 arg3, TCGv_i32 arg4)
+{
+ *gen_opc_ptr++ = opc;
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg3);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg4);
+}
+
+static inline void tcg_gen_op4_i64(int opc, TCGv_i64 arg1, TCGv_i64 arg2,
+ TCGv_i64 arg3, TCGv_i32 arg4)
+{
+ *gen_opc_ptr++ = opc;
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg3);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg4);
+}
+
+static inline void tcg_gen_op4i_i32(int opc, TCGv_i32 arg1, TCGv_i32 arg2,
+ TCGv_i32 arg3, TCGArg arg4)
+{
+ *gen_opc_ptr++ = opc;
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg3);
+ *gen_opparam_ptr++ = arg4;
+}
+
+static inline void tcg_gen_op4i_i64(int opc, TCGv_i64 arg1, TCGv_i64 arg2,
+ TCGv_i64 arg3, TCGArg arg4)
{
*gen_opc_ptr++ = opc;
- *gen_opparam_ptr++ = GET_TCGV(arg1);
- *gen_opparam_ptr++ = GET_TCGV(arg2);
- *gen_opparam_ptr++ = GET_TCGV(arg3);
- *gen_opparam_ptr++ = GET_TCGV(arg4);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg3);
+ *gen_opparam_ptr++ = arg4;
}
-static inline void tcg_gen_op4i(int opc, TCGv arg1, TCGv arg2, TCGv arg3,
- TCGArg arg4)
+static inline void tcg_gen_op4ii_i32(int opc, TCGv_i32 arg1, TCGv_i32 arg2,
+ TCGArg arg3, TCGArg arg4)
{
*gen_opc_ptr++ = opc;
- *gen_opparam_ptr++ = GET_TCGV(arg1);
- *gen_opparam_ptr++ = GET_TCGV(arg2);
- *gen_opparam_ptr++ = GET_TCGV(arg3);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
+ *gen_opparam_ptr++ = arg3;
*gen_opparam_ptr++ = arg4;
}
-static inline void tcg_gen_op4ii(int opc, TCGv arg1, TCGv arg2, TCGArg arg3,
- TCGArg arg4)
+static inline void tcg_gen_op4ii_i64(int opc, TCGv_i64 arg1, TCGv_i64 arg2,
+ TCGArg arg3, TCGArg arg4)
{
*gen_opc_ptr++ = opc;
- *gen_opparam_ptr++ = GET_TCGV(arg1);
- *gen_opparam_ptr++ = GET_TCGV(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
*gen_opparam_ptr++ = arg3;
*gen_opparam_ptr++ = arg4;
}
-static inline void tcg_gen_op5(int opc, TCGv arg1, TCGv arg2,
- TCGv arg3, TCGv arg4,
- TCGv arg5)
+static inline void tcg_gen_op5_i32(int opc, TCGv_i32 arg1, TCGv_i32 arg2,
+ TCGv_i32 arg3, TCGv_i32 arg4, TCGv_i32 arg5)
+{
+ *gen_opc_ptr++ = opc;
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg3);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg4);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg5);
+}
+
+static inline void tcg_gen_op5_i64(int opc, TCGv_i64 arg1, TCGv_i64 arg2,
+ TCGv_i64 arg3, TCGv_i64 arg4, TCGv_i64 arg5)
+{
+ *gen_opc_ptr++ = opc;
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg3);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg4);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg5);
+}
+
+static inline void tcg_gen_op5i_i32(int opc, TCGv_i32 arg1, TCGv_i32 arg2,
+ TCGv_i32 arg3, TCGv_i32 arg4, TCGArg arg5)
{
*gen_opc_ptr++ = opc;
- *gen_opparam_ptr++ = GET_TCGV(arg1);
- *gen_opparam_ptr++ = GET_TCGV(arg2);
- *gen_opparam_ptr++ = GET_TCGV(arg3);
- *gen_opparam_ptr++ = GET_TCGV(arg4);
- *gen_opparam_ptr++ = GET_TCGV(arg5);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg3);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg4);
+ *gen_opparam_ptr++ = arg5;
}
-static inline void tcg_gen_op5i(int opc, TCGv arg1, TCGv arg2,
- TCGv arg3, TCGv arg4,
- TCGArg arg5)
+static inline void tcg_gen_op5i_i64(int opc, TCGv_i64 arg1, TCGv_i64 arg2,
+ TCGv_i64 arg3, TCGv_i64 arg4, TCGArg arg5)
{
*gen_opc_ptr++ = opc;
- *gen_opparam_ptr++ = GET_TCGV(arg1);
- *gen_opparam_ptr++ = GET_TCGV(arg2);
- *gen_opparam_ptr++ = GET_TCGV(arg3);
- *gen_opparam_ptr++ = GET_TCGV(arg4);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg3);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg4);
*gen_opparam_ptr++ = arg5;
}
-static inline void tcg_gen_op6(int opc, TCGv arg1, TCGv arg2,
- TCGv arg3, TCGv arg4,
- TCGv arg5, TCGv arg6)
+static inline void tcg_gen_op6_i32(int opc, TCGv_i32 arg1, TCGv_i32 arg2,
+ TCGv_i32 arg3, TCGv_i32 arg4, TCGv_i32 arg5,
+ TCGv_i32 arg6)
+{
+ *gen_opc_ptr++ = opc;
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg3);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg4);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg5);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg6);
+}
+
+static inline void tcg_gen_op6_i64(int opc, TCGv_i64 arg1, TCGv_i64 arg2,
+ TCGv_i64 arg3, TCGv_i64 arg4, TCGv_i64 arg5,
+ TCGv_i64 arg6)
{
*gen_opc_ptr++ = opc;
- *gen_opparam_ptr++ = GET_TCGV(arg1);
- *gen_opparam_ptr++ = GET_TCGV(arg2);
- *gen_opparam_ptr++ = GET_TCGV(arg3);
- *gen_opparam_ptr++ = GET_TCGV(arg4);
- *gen_opparam_ptr++ = GET_TCGV(arg5);
- *gen_opparam_ptr++ = GET_TCGV(arg6);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg3);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg4);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg5);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg6);
}
-static inline void tcg_gen_op6ii(int opc, TCGv arg1, TCGv arg2,
- TCGv arg3, TCGv arg4,
- TCGArg arg5, TCGArg arg6)
+static inline void tcg_gen_op6ii_i32(int opc, TCGv_i32 arg1, TCGv_i32 arg2,
+ TCGv_i32 arg3, TCGv_i32 arg4, TCGArg arg5,
+ TCGArg arg6)
{
*gen_opc_ptr++ = opc;
- *gen_opparam_ptr++ = GET_TCGV(arg1);
- *gen_opparam_ptr++ = GET_TCGV(arg2);
- *gen_opparam_ptr++ = GET_TCGV(arg3);
- *gen_opparam_ptr++ = GET_TCGV(arg4);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg3);
+ *gen_opparam_ptr++ = GET_TCGV_I32(arg4);
+ *gen_opparam_ptr++ = arg5;
+ *gen_opparam_ptr++ = arg6;
+}
+
+static inline void tcg_gen_op6ii_i64(int opc, TCGv_i64 arg1, TCGv_i64 arg2,
+ TCGv_i64 arg3, TCGv_i64 arg4, TCGArg arg5,
+ TCGArg arg6)
+{
+ *gen_opc_ptr++ = opc;
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg3);
+ *gen_opparam_ptr++ = GET_TCGV_I64(arg4);
*gen_opparam_ptr++ = arg5;
*gen_opparam_ptr++ = arg6;
}
tcg_gen_op1i(INDEX_op_br, label);
}
-static inline void tcg_gen_mov_i32(TCGv ret, TCGv arg)
+static inline void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg)
{
- if (GET_TCGV(ret) != GET_TCGV(arg))
- tcg_gen_op2(INDEX_op_mov_i32, ret, arg);
+ if (GET_TCGV_I32(ret) != GET_TCGV_I32(arg))
+ tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg);
}
-static inline void tcg_gen_movi_i32(TCGv ret, int32_t arg)
+static inline void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
{
- tcg_gen_op2i(INDEX_op_movi_i32, ret, arg);
+ tcg_gen_op2i_i32(INDEX_op_movi_i32, ret, arg);
}
/* helper calls */
-#define TCG_HELPER_CALL_FLAGS 0
+static inline void tcg_gen_helperN(void *func, int flags, int sizemask,
+ TCGArg ret, int nargs, TCGArg *args)
+{
+ TCGv_ptr fn;
+ fn = tcg_const_ptr((tcg_target_long)func);
+ tcg_gen_callN(&tcg_ctx, fn, flags, sizemask, ret,
+ nargs, args);
+ tcg_temp_free_ptr(fn);
+}
-static inline void tcg_gen_helper_0_0(void *func)
+/* FIXME: Should this be pure? */
+static inline void tcg_gen_helper64(void *func, TCGv_i64 ret,
+ TCGv_i64 a, TCGv_i64 b)
{
- TCGv t0;
- t0 = tcg_const_ptr((tcg_target_long)func);
- tcg_gen_call(&tcg_ctx,
- t0, TCG_HELPER_CALL_FLAGS,
- 0, NULL, 0, NULL);
- tcg_temp_free(t0);
-}
-
-static inline void tcg_gen_helper_0_1(void *func, TCGv arg)
-{
- TCGv t0;
- t0 = tcg_const_ptr((tcg_target_long)func);
- tcg_gen_call(&tcg_ctx,
- t0, TCG_HELPER_CALL_FLAGS,
- 0, NULL, 1, &arg);
- tcg_temp_free(t0);
-}
-
-static inline void tcg_gen_helper_0_2(void *func, TCGv arg1, TCGv arg2)
-{
- TCGv args[2];
- TCGv t0;
- args[0] = arg1;
- args[1] = arg2;
- t0 = tcg_const_ptr((tcg_target_long)func);
- tcg_gen_call(&tcg_ctx,
- t0, TCG_HELPER_CALL_FLAGS,
- 0, NULL, 2, args);
- tcg_temp_free(t0);
-}
-
-static inline void tcg_gen_helper_0_3(void *func,
- TCGv arg1, TCGv arg2, TCGv arg3)
-{
- TCGv args[3];
- TCGv t0;
- args[0] = arg1;
- args[1] = arg2;
- args[2] = arg3;
- t0 = tcg_const_ptr((tcg_target_long)func);
- tcg_gen_call(&tcg_ctx,
- t0, TCG_HELPER_CALL_FLAGS,
- 0, NULL, 3, args);
- tcg_temp_free(t0);
-}
-
-static inline void tcg_gen_helper_0_4(void *func, TCGv arg1, TCGv arg2,
- TCGv arg3, TCGv arg4)
-{
- TCGv args[4];
- TCGv t0;
- args[0] = arg1;
- args[1] = arg2;
- args[2] = arg3;
- args[3] = arg4;
- t0 = tcg_const_ptr((tcg_target_long)func);
- tcg_gen_call(&tcg_ctx,
- t0, TCG_HELPER_CALL_FLAGS,
- 0, NULL, 4, args);
- tcg_temp_free(t0);
-}
-
-static inline void tcg_gen_helper_1_0(void *func, TCGv ret)
-{
- TCGv t0;
- t0 = tcg_const_ptr((tcg_target_long)func);
- tcg_gen_call(&tcg_ctx,
- t0, TCG_HELPER_CALL_FLAGS,
- 1, &ret, 0, NULL);
- tcg_temp_free(t0);
-}
-
-static inline void tcg_gen_helper_1_1(void *func, TCGv ret, TCGv arg1)
-{
- TCGv t0;
- t0 = tcg_const_ptr((tcg_target_long)func);
- tcg_gen_call(&tcg_ctx,
- t0, TCG_HELPER_CALL_FLAGS,
- 1, &ret, 1, &arg1);
- tcg_temp_free(t0);
-}
-
-static inline void tcg_gen_helper_1_2(void *func, TCGv ret,
- TCGv arg1, TCGv arg2)
-{
- TCGv args[2];
- TCGv t0;
- args[0] = arg1;
- args[1] = arg2;
- t0 = tcg_const_ptr((tcg_target_long)func);
- tcg_gen_call(&tcg_ctx,
- t0, TCG_HELPER_CALL_FLAGS,
- 1, &ret, 2, args);
- tcg_temp_free(t0);
-}
-
-static inline void tcg_gen_helper_1_3(void *func, TCGv ret,
- TCGv arg1, TCGv arg2, TCGv arg3)
-{
- TCGv args[3];
- TCGv t0;
- args[0] = arg1;
- args[1] = arg2;
- args[2] = arg3;
- t0 = tcg_const_ptr((tcg_target_long)func);
- tcg_gen_call(&tcg_ctx,
- t0, TCG_HELPER_CALL_FLAGS,
- 1, &ret, 3, args);
- tcg_temp_free(t0);
-}
-
-static inline void tcg_gen_helper_1_4(void *func, TCGv ret,
- TCGv arg1, TCGv arg2, TCGv arg3,
- TCGv arg4)
-{
- TCGv args[4];
- TCGv t0;
- args[0] = arg1;
- args[1] = arg2;
- args[2] = arg3;
- args[3] = arg4;
- t0 = tcg_const_ptr((tcg_target_long)func);
- tcg_gen_call(&tcg_ctx,
- t0, TCG_HELPER_CALL_FLAGS,
- 1, &ret, 4, args);
- tcg_temp_free(t0);
+ TCGv_ptr fn;
+ TCGArg args[2];
+ fn = tcg_const_ptr((tcg_target_long)func);
+ args[0] = GET_TCGV_I64(a);
+ args[1] = GET_TCGV_I64(b);
+ tcg_gen_callN(&tcg_ctx, fn, 0, 7, GET_TCGV_I64(ret), 2, args);
+ tcg_temp_free_ptr(fn);
}
/* 32 bit ops */
-static inline void tcg_gen_ld8u_i32(TCGv ret, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_ld8u_i32, ret, arg2, offset);
+ tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32, ret, arg2, offset);
}
-static inline void tcg_gen_ld8s_i32(TCGv ret, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_ld8s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_ld8s_i32, ret, arg2, offset);
+ tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32, ret, arg2, offset);
}
-static inline void tcg_gen_ld16u_i32(TCGv ret, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_ld16u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_ld16u_i32, ret, arg2, offset);
+ tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32, ret, arg2, offset);
}
-static inline void tcg_gen_ld16s_i32(TCGv ret, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_ld16s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_ld16s_i32, ret, arg2, offset);
+ tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32, ret, arg2, offset);
}
-static inline void tcg_gen_ld_i32(TCGv ret, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_ld_i32, ret, arg2, offset);
+ tcg_gen_ldst_op_i32(INDEX_op_ld_i32, ret, arg2, offset);
}
-static inline void tcg_gen_st8_i32(TCGv arg1, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_st8_i32, arg1, arg2, offset);
+ tcg_gen_ldst_op_i32(INDEX_op_st8_i32, arg1, arg2, offset);
}
-static inline void tcg_gen_st16_i32(TCGv arg1, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_st16_i32, arg1, arg2, offset);
+ tcg_gen_ldst_op_i32(INDEX_op_st16_i32, arg1, arg2, offset);
}
-static inline void tcg_gen_st_i32(TCGv arg1, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_st_i32, arg1, arg2, offset);
+ tcg_gen_ldst_op_i32(INDEX_op_st_i32, arg1, arg2, offset);
}
-static inline void tcg_gen_add_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_add_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3(INDEX_op_add_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_add_i32, ret, arg1, arg2);
}
-static inline void tcg_gen_addi_i32(TCGv ret, TCGv arg1, int32_t arg2)
+static inline void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
{
/* some cases can be optimized here */
if (arg2 == 0) {
tcg_gen_mov_i32(ret, arg1);
} else {
- TCGv t0 = tcg_const_i32(arg2);
+ TCGv_i32 t0 = tcg_const_i32(arg2);
tcg_gen_add_i32(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
}
-static inline void tcg_gen_sub_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_sub_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3(INDEX_op_sub_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_sub_i32, ret, arg1, arg2);
}
-static inline void tcg_gen_subfi_i32(TCGv ret, int32_t arg1, TCGv arg2)
+static inline void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
{
- TCGv t0 = tcg_const_i32(arg1);
+ TCGv_i32 t0 = tcg_const_i32(arg1);
tcg_gen_sub_i32(ret, t0, arg2);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
-static inline void tcg_gen_subi_i32(TCGv ret, TCGv arg1, int32_t arg2)
+static inline void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
{
/* some cases can be optimized here */
if (arg2 == 0) {
tcg_gen_mov_i32(ret, arg1);
} else {
- TCGv t0 = tcg_const_i32(arg2);
+ TCGv_i32 t0 = tcg_const_i32(arg2);
tcg_gen_sub_i32(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
}
-static inline void tcg_gen_and_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3(INDEX_op_and_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2);
}
-static inline void tcg_gen_andi_i32(TCGv ret, TCGv arg1, int32_t arg2)
+static inline void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
{
/* some cases can be optimized here */
if (arg2 == 0) {
} else if (arg2 == 0xffffffff) {
tcg_gen_mov_i32(ret, arg1);
} else {
- TCGv t0 = tcg_const_i32(arg2);
+ TCGv_i32 t0 = tcg_const_i32(arg2);
tcg_gen_and_i32(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
}
-static inline void tcg_gen_or_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3(INDEX_op_or_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2);
}
-static inline void tcg_gen_ori_i32(TCGv ret, TCGv arg1, int32_t arg2)
+static inline void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
{
/* some cases can be optimized here */
if (arg2 == 0xffffffff) {
} else if (arg2 == 0) {
tcg_gen_mov_i32(ret, arg1);
} else {
- TCGv t0 = tcg_const_i32(arg2);
+ TCGv_i32 t0 = tcg_const_i32(arg2);
tcg_gen_or_i32(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
}
-static inline void tcg_gen_xor_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3(INDEX_op_xor_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2);
}
-static inline void tcg_gen_xori_i32(TCGv ret, TCGv arg1, int32_t arg2)
+static inline void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
{
/* some cases can be optimized here */
if (arg2 == 0) {
tcg_gen_mov_i32(ret, arg1);
} else {
- TCGv t0 = tcg_const_i32(arg2);
+ TCGv_i32 t0 = tcg_const_i32(arg2);
tcg_gen_xor_i32(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
}
-static inline void tcg_gen_shl_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_shl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3(INDEX_op_shl_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_shl_i32, ret, arg1, arg2);
}
-static inline void tcg_gen_shli_i32(TCGv ret, TCGv arg1, int32_t arg2)
+static inline void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
{
if (arg2 == 0) {
tcg_gen_mov_i32(ret, arg1);
} else {
- TCGv t0 = tcg_const_i32(arg2);
+ TCGv_i32 t0 = tcg_const_i32(arg2);
tcg_gen_shl_i32(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
}
-static inline void tcg_gen_shr_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_shr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3(INDEX_op_shr_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_shr_i32, ret, arg1, arg2);
}
-static inline void tcg_gen_shri_i32(TCGv ret, TCGv arg1, int32_t arg2)
+static inline void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
{
if (arg2 == 0) {
tcg_gen_mov_i32(ret, arg1);
} else {
- TCGv t0 = tcg_const_i32(arg2);
+ TCGv_i32 t0 = tcg_const_i32(arg2);
tcg_gen_shr_i32(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
}
-static inline void tcg_gen_sar_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_sar_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3(INDEX_op_sar_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_sar_i32, ret, arg1, arg2);
}
-static inline void tcg_gen_sari_i32(TCGv ret, TCGv arg1, int32_t arg2)
+static inline void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
{
if (arg2 == 0) {
tcg_gen_mov_i32(ret, arg1);
} else {
- TCGv t0 = tcg_const_i32(arg2);
+ TCGv_i32 t0 = tcg_const_i32(arg2);
tcg_gen_sar_i32(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
}
-static inline void tcg_gen_brcond_i32(int cond, TCGv arg1, TCGv arg2,
+static inline void tcg_gen_brcond_i32(int cond, TCGv_i32 arg1, TCGv_i32 arg2,
int label_index)
{
- tcg_gen_op4ii(INDEX_op_brcond_i32, arg1, arg2, cond, label_index);
+ tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_index);
}
-static inline void tcg_gen_brcondi_i32(int cond, TCGv arg1, int32_t arg2,
+static inline void tcg_gen_brcondi_i32(int cond, TCGv_i32 arg1, int32_t arg2,
int label_index)
{
- TCGv t0 = tcg_const_i32(arg2);
+ TCGv_i32 t0 = tcg_const_i32(arg2);
tcg_gen_brcond_i32(cond, arg1, t0, label_index);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
-static inline void tcg_gen_mul_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_mul_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3(INDEX_op_mul_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_mul_i32, ret, arg1, arg2);
}
-static inline void tcg_gen_muli_i32(TCGv ret, TCGv arg1, int32_t arg2)
+static inline void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
{
- TCGv t0 = tcg_const_i32(arg2);
+ TCGv_i32 t0 = tcg_const_i32(arg2);
tcg_gen_mul_i32(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
#ifdef TCG_TARGET_HAS_div_i32
-static inline void tcg_gen_div_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3(INDEX_op_div_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
}
-static inline void tcg_gen_rem_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3(INDEX_op_rem_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
}
-static inline void tcg_gen_divu_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3(INDEX_op_divu_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
}
-static inline void tcg_gen_remu_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3(INDEX_op_remu_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
}
#else
-static inline void tcg_gen_div_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0;
+ t0 = tcg_temp_new_i32();
tcg_gen_sari_i32(t0, arg1, 31);
- tcg_gen_op5(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
- tcg_temp_free(t0);
+ tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
+ tcg_temp_free_i32(t0);
}
-static inline void tcg_gen_rem_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0;
+ t0 = tcg_temp_new_i32();
tcg_gen_sari_i32(t0, arg1, 31);
- tcg_gen_op5(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
- tcg_temp_free(t0);
+ tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
+ tcg_temp_free_i32(t0);
}
-static inline void tcg_gen_divu_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0;
+ t0 = tcg_temp_new_i32();
tcg_gen_movi_i32(t0, 0);
- tcg_gen_op5(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
- tcg_temp_free(t0);
+ tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
+ tcg_temp_free_i32(t0);
}
-static inline void tcg_gen_remu_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0;
+ t0 = tcg_temp_new_i32();
tcg_gen_movi_i32(t0, 0);
- tcg_gen_op5(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
- tcg_temp_free(t0);
+ tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
+ tcg_temp_free_i32(t0);
}
#endif
#if TCG_TARGET_REG_BITS == 32
-static inline void tcg_gen_mov_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- if (GET_TCGV(ret) != GET_TCGV(arg)) {
- tcg_gen_mov_i32(ret, arg);
+ if (GET_TCGV_I64(ret) != GET_TCGV_I64(arg)) {
+ tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
}
}
-static inline void tcg_gen_movi_i64(TCGv ret, int64_t arg)
+static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
{
- tcg_gen_movi_i32(ret, arg);
+ tcg_gen_movi_i32(TCGV_LOW(ret), arg);
tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
}
-static inline void tcg_gen_ld8u_i64(TCGv ret, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2,
+ tcg_target_long offset)
{
- tcg_gen_ld8u_i32(ret, arg2, offset);
+ tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
}
-static inline void tcg_gen_ld8s_i64(TCGv ret, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2,
+ tcg_target_long offset)
{
- tcg_gen_ld8s_i32(ret, arg2, offset);
- tcg_gen_sari_i32(TCGV_HIGH(ret), ret, 31);
+ tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
+ tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31);
}
-static inline void tcg_gen_ld16u_i64(TCGv ret, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2,
+ tcg_target_long offset)
{
- tcg_gen_ld16u_i32(ret, arg2, offset);
+ tcg_gen_ld16u_i32(TCGV_LOW(ret), TCGV_LOW(arg2), offset);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
}
-static inline void tcg_gen_ld16s_i64(TCGv ret, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2,
+ tcg_target_long offset)
{
- tcg_gen_ld16s_i32(ret, arg2, offset);
- tcg_gen_sari_i32(TCGV_HIGH(ret), ret, 31);
+ tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
+ tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
}
-static inline void tcg_gen_ld32u_i64(TCGv ret, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2,
+ tcg_target_long offset)
{
- tcg_gen_ld_i32(ret, arg2, offset);
+ tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
}
-static inline void tcg_gen_ld32s_i64(TCGv ret, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2,
+ tcg_target_long offset)
{
- tcg_gen_ld_i32(ret, arg2, offset);
- tcg_gen_sari_i32(TCGV_HIGH(ret), ret, 31);
+ tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
+ tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
}
-static inline void tcg_gen_ld_i64(TCGv ret, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2,
+ tcg_target_long offset)
{
/* since arg2 and ret have different types, they cannot be the
same temporary */
#ifdef TCG_TARGET_WORDS_BIGENDIAN
tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
- tcg_gen_ld_i32(ret, arg2, offset + 4);
+ tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
#else
- tcg_gen_ld_i32(ret, arg2, offset);
+ tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
#endif
}
-static inline void tcg_gen_st8_i64(TCGv arg1, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2,
+ tcg_target_long offset)
{
- tcg_gen_st8_i32(arg1, arg2, offset);
+ tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
}
-static inline void tcg_gen_st16_i64(TCGv arg1, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2,
+ tcg_target_long offset)
{
- tcg_gen_st16_i32(arg1, arg2, offset);
+ tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
}
-static inline void tcg_gen_st32_i64(TCGv arg1, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2,
+ tcg_target_long offset)
{
- tcg_gen_st_i32(arg1, arg2, offset);
+ tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
}
-static inline void tcg_gen_st_i64(TCGv arg1, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2,
+ tcg_target_long offset)
{
#ifdef TCG_TARGET_WORDS_BIGENDIAN
tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
- tcg_gen_st_i32(arg1, arg2, offset + 4);
+ tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
#else
- tcg_gen_st_i32(arg1, arg2, offset);
+ tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
#endif
}
-static inline void tcg_gen_add_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_op6(INDEX_op_add2_i32, ret, TCGV_HIGH(ret),
- arg1, TCGV_HIGH(arg1), arg2, TCGV_HIGH(arg2));
+ tcg_gen_op6_i32(INDEX_op_add2_i32, TCGV_LOW(ret), TCGV_HIGH(ret),
+ TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2),
+ TCGV_HIGH(arg2));
}
-static inline void tcg_gen_sub_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_op6(INDEX_op_sub2_i32, ret, TCGV_HIGH(ret),
- arg1, TCGV_HIGH(arg1), arg2, TCGV_HIGH(arg2));
+ tcg_gen_op6_i32(INDEX_op_sub2_i32, TCGV_LOW(ret), TCGV_HIGH(ret),
+ TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2),
+ TCGV_HIGH(arg2));
}
-static inline void tcg_gen_and_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_and_i32(ret, arg1, arg2);
+ tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
}
-static inline void tcg_gen_andi_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
- tcg_gen_andi_i32(ret, arg1, arg2);
+ tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
}
-static inline void tcg_gen_or_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_or_i32(ret, arg1, arg2);
+ tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
}
-static inline void tcg_gen_ori_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
- tcg_gen_ori_i32(ret, arg1, arg2);
+ tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
}
-static inline void tcg_gen_xor_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_xor_i32(ret, arg1, arg2);
+ tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
}
-static inline void tcg_gen_xori_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
- tcg_gen_xori_i32(ret, arg1, arg2);
+ tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
}
/* XXX: use generic code when basic block handling is OK or CPU
specific code (x86) */
-static inline void tcg_gen_shl_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_helper_1_2(tcg_helper_shl_i64, ret, arg1, arg2);
+ tcg_gen_helper64(tcg_helper_shl_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_shli_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
}
-static inline void tcg_gen_shr_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_helper_1_2(tcg_helper_shr_i64, ret, arg1, arg2);
+ tcg_gen_helper64(tcg_helper_shr_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_shri_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
}
-static inline void tcg_gen_sar_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_helper_1_2(tcg_helper_sar_i64, ret, arg1, arg2);
+ tcg_gen_helper64(tcg_helper_sar_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_sari_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
}
-static inline void tcg_gen_brcond_i64(int cond, TCGv arg1, TCGv arg2,
+static inline void tcg_gen_brcond_i64(int cond, TCGv_i64 arg1, TCGv_i64 arg2,
int label_index)
{
- tcg_gen_op6ii(INDEX_op_brcond2_i32,
- arg1, TCGV_HIGH(arg1), arg2, TCGV_HIGH(arg2),
- cond, label_index);
+ tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
+ TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2),
+ TCGV_HIGH(arg2), cond, label_index);
}
-static inline void tcg_gen_mul_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- TCGv t0, t1;
-
- t0 = tcg_temp_new(TCG_TYPE_I64);
- t1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i64 t0;
+ TCGv_i32 t1;
- tcg_gen_op4(INDEX_op_mulu2_i32, t0, TCGV_HIGH(t0), arg1, arg2);
-
- tcg_gen_mul_i32(t1, arg1, TCGV_HIGH(arg2));
+ t0 = tcg_temp_new_i64();
+ t1 = tcg_temp_new_i32();
+
+ tcg_gen_op4_i32(INDEX_op_mulu2_i32, TCGV_LOW(t0), TCGV_HIGH(t0),
+ TCGV_LOW(arg1), TCGV_LOW(arg2));
+
+ tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
- tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), arg2);
+ tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
-
+
tcg_gen_mov_i64(ret, t0);
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i64(t0);
+ tcg_temp_free_i32(t1);
}
-static inline void tcg_gen_div_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_helper_1_2(tcg_helper_div_i64, ret, arg1, arg2);
+ tcg_gen_helper64(tcg_helper_div_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_rem_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_helper_1_2(tcg_helper_rem_i64, ret, arg1, arg2);
+ tcg_gen_helper64(tcg_helper_rem_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_divu_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_helper_1_2(tcg_helper_divu_i64, ret, arg1, arg2);
+ tcg_gen_helper64(tcg_helper_divu_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_remu_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_helper_1_2(tcg_helper_remu_i64, ret, arg1, arg2);
+ tcg_gen_helper64(tcg_helper_remu_i64, ret, arg1, arg2);
}
#else
-static inline void tcg_gen_mov_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- if (GET_TCGV(ret) != GET_TCGV(arg))
- tcg_gen_op2(INDEX_op_mov_i64, ret, arg);
+ if (GET_TCGV_I64(ret) != GET_TCGV_I64(arg))
+ tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg);
}
-static inline void tcg_gen_movi_i64(TCGv ret, int64_t arg)
+static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
{
- tcg_gen_op2i(INDEX_op_movi_i64, ret, arg);
+ tcg_gen_op2i_i64(INDEX_op_movi_i64, ret, arg);
}
-static inline void tcg_gen_ld8u_i64(TCGv ret, TCGv arg2,
+static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_i64 arg2,
tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_ld8u_i64, ret, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64, ret, arg2, offset);
}
-static inline void tcg_gen_ld8s_i64(TCGv ret, TCGv arg2,
+static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_i64 arg2,
tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_ld8s_i64, ret, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64, ret, arg2, offset);
}
-static inline void tcg_gen_ld16u_i64(TCGv ret, TCGv arg2,
+static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_i64 arg2,
tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_ld16u_i64, ret, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64, ret, arg2, offset);
}
-static inline void tcg_gen_ld16s_i64(TCGv ret, TCGv arg2,
+static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_i64 arg2,
tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_ld16s_i64, ret, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64, ret, arg2, offset);
}
-static inline void tcg_gen_ld32u_i64(TCGv ret, TCGv arg2,
+static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_i64 arg2,
tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_ld32u_i64, ret, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64, ret, arg2, offset);
}
-static inline void tcg_gen_ld32s_i64(TCGv ret, TCGv arg2,
+static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_i64 arg2,
tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_ld32s_i64, ret, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64, ret, arg2, offset);
}
-static inline void tcg_gen_ld_i64(TCGv ret, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_i64 arg2, tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_ld_i64, ret, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_ld_i64, ret, arg2, offset);
}
-static inline void tcg_gen_st8_i64(TCGv arg1, TCGv arg2,
+static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_i64 arg2,
tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_st8_i64, arg1, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset);
}
-static inline void tcg_gen_st16_i64(TCGv arg1, TCGv arg2,
+static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_i64 arg2,
tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_st16_i64, arg1, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset);
}
-static inline void tcg_gen_st32_i64(TCGv arg1, TCGv arg2,
+static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_i64 arg2,
tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_st32_i64, arg1, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset);
}
-static inline void tcg_gen_st_i64(TCGv arg1, TCGv arg2, tcg_target_long offset)
+static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_i64 arg2, tcg_target_long offset)
{
- tcg_gen_op3i(INDEX_op_st_i64, arg1, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset);
}
-static inline void tcg_gen_add_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_op3(INDEX_op_add_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_add_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_sub_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_op3(INDEX_op_sub_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_sub_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_and_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_op3(INDEX_op_and_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_andi_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
- TCGv t0 = tcg_const_i64(arg2);
+ TCGv_i64 t0 = tcg_const_i64(arg2);
tcg_gen_and_i64(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
}
-static inline void tcg_gen_or_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_op3(INDEX_op_or_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_ori_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
- TCGv t0 = tcg_const_i64(arg2);
+ TCGv_i64 t0 = tcg_const_i64(arg2);
tcg_gen_or_i64(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
}
-static inline void tcg_gen_xor_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_op3(INDEX_op_xor_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_xori_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
- TCGv t0 = tcg_const_i64(arg2);
+ TCGv_i64 t0 = tcg_const_i64(arg2);
tcg_gen_xor_i64(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
}
-static inline void tcg_gen_shl_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_op3(INDEX_op_shl_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_shl_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_shli_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
if (arg2 == 0) {
tcg_gen_mov_i64(ret, arg1);
} else {
- TCGv t0 = tcg_const_i64(arg2);
+ TCGv_i64 t0 = tcg_const_i64(arg2);
tcg_gen_shl_i64(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
}
}
-static inline void tcg_gen_shr_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_op3(INDEX_op_shr_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_shr_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_shri_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
if (arg2 == 0) {
tcg_gen_mov_i64(ret, arg1);
} else {
- TCGv t0 = tcg_const_i64(arg2);
+ TCGv_i64 t0 = tcg_const_i64(arg2);
tcg_gen_shr_i64(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
}
}
-static inline void tcg_gen_sar_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_op3(INDEX_op_sar_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_sar_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_sari_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
if (arg2 == 0) {
tcg_gen_mov_i64(ret, arg1);
} else {
- TCGv t0 = tcg_const_i64(arg2);
+ TCGv_i64 t0 = tcg_const_i64(arg2);
tcg_gen_sar_i64(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
}
}
-static inline void tcg_gen_brcond_i64(int cond, TCGv arg1, TCGv arg2,
+static inline void tcg_gen_brcond_i64(int cond, TCGv_i64 arg1, TCGv_i64 arg2,
int label_index)
{
- tcg_gen_op4ii(INDEX_op_brcond_i64, arg1, arg2, cond, label_index);
+ tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond, label_index);
}
-static inline void tcg_gen_mul_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_op3(INDEX_op_mul_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2);
}
#ifdef TCG_TARGET_HAS_div_i64
-static inline void tcg_gen_div_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_op3(INDEX_op_div_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_rem_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_op3(INDEX_op_rem_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_divu_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_op3(INDEX_op_divu_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
}
-static inline void tcg_gen_remu_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_op3(INDEX_op_remu_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
}
#else
-static inline void tcg_gen_div_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 t0;
+ t0 = tcg_temp_new_i64();
tcg_gen_sari_i64(t0, arg1, 63);
- tcg_gen_op5(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
- tcg_temp_free(t0);
+ tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
+ tcg_temp_free_i64(t0);
}
-static inline void tcg_gen_rem_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 t0;
+ t0 = tcg_temp_new_i64();
tcg_gen_sari_i64(t0, arg1, 63);
- tcg_gen_op5(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
- tcg_temp_free(t0);
+ tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
+ tcg_temp_free_i64(t0);
}
-static inline void tcg_gen_divu_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 t0;
+ t0 = tcg_temp_new_i64();
tcg_gen_movi_i64(t0, 0);
- tcg_gen_op5(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
- tcg_temp_free(t0);
+ tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
+ tcg_temp_free_i64(t0);
}
-static inline void tcg_gen_remu_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 t0;
+ t0 = tcg_temp_new_i64();
tcg_gen_movi_i64(t0, 0);
- tcg_gen_op5(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
- tcg_temp_free(t0);
+ tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
+ tcg_temp_free_i64(t0);
}
#endif
#endif
-static inline void tcg_gen_addi_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
/* some cases can be optimized here */
if (arg2 == 0) {
tcg_gen_mov_i64(ret, arg1);
} else {
- TCGv t0 = tcg_const_i64(arg2);
+ TCGv_i64 t0 = tcg_const_i64(arg2);
tcg_gen_add_i64(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
}
}
-static inline void tcg_gen_subfi_i64(TCGv ret, int64_t arg1, TCGv arg2)
+static inline void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
{
- TCGv t0 = tcg_const_i64(arg1);
+ TCGv_i64 t0 = tcg_const_i64(arg1);
tcg_gen_sub_i64(ret, t0, arg2);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
}
-static inline void tcg_gen_subi_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
/* some cases can be optimized here */
if (arg2 == 0) {
tcg_gen_mov_i64(ret, arg1);
} else {
- TCGv t0 = tcg_const_i64(arg2);
+ TCGv_i64 t0 = tcg_const_i64(arg2);
tcg_gen_sub_i64(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
}
}
-static inline void tcg_gen_brcondi_i64(int cond, TCGv arg1, int64_t arg2,
+static inline void tcg_gen_brcondi_i64(int cond, TCGv_i64 arg1, int64_t arg2,
int label_index)
{
- TCGv t0 = tcg_const_i64(arg2);
+ TCGv_i64 t0 = tcg_const_i64(arg2);
tcg_gen_brcond_i64(cond, arg1, t0, label_index);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
}
-static inline void tcg_gen_muli_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
- TCGv t0 = tcg_const_i64(arg2);
+ TCGv_i64 t0 = tcg_const_i64(arg2);
tcg_gen_mul_i64(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
}
/***************************************/
/* optional operations */
-static inline void tcg_gen_ext8s_i32(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
{
#ifdef TCG_TARGET_HAS_ext8s_i32
- tcg_gen_op2(INDEX_op_ext8s_i32, ret, arg);
+ tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
#else
tcg_gen_shli_i32(ret, arg, 24);
tcg_gen_sari_i32(ret, ret, 24);
#endif
}
-static inline void tcg_gen_ext16s_i32(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
{
#ifdef TCG_TARGET_HAS_ext16s_i32
- tcg_gen_op2(INDEX_op_ext16s_i32, ret, arg);
+ tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
#else
tcg_gen_shli_i32(ret, arg, 16);
tcg_gen_sari_i32(ret, ret, 16);
/* These are currently just for convenience.
We assume a target will recognise these automatically . */
-static inline void tcg_gen_ext8u_i32(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
{
tcg_gen_andi_i32(ret, arg, 0xffu);
}
-static inline void tcg_gen_ext16u_i32(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
{
tcg_gen_andi_i32(ret, arg, 0xffffu);
}
/* Note: we assume the two high bytes are set to zero */
-static inline void tcg_gen_bswap16_i32(TCGv ret, TCGv arg)
+static inline void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
{
#ifdef TCG_TARGET_HAS_bswap16_i32
- tcg_gen_op2(INDEX_op_bswap16_i32, ret, arg);
+ tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
#else
- TCGv t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I32);
- t1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0, t1;
+ t0 = tcg_temp_new_i32();
+ t1 = tcg_temp_new_i32();
tcg_gen_shri_i32(t0, arg, 8);
tcg_gen_andi_i32(t1, arg, 0x000000ff);
tcg_gen_shli_i32(t1, t1, 8);
tcg_gen_or_i32(ret, t0, t1);
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i32(t0);
+ tcg_temp_free_i32(t1);
#endif
}
-static inline void tcg_gen_bswap_i32(TCGv ret, TCGv arg)
+static inline void tcg_gen_bswap_i32(TCGv_i32 ret, TCGv_i32 arg)
{
#ifdef TCG_TARGET_HAS_bswap_i32
- tcg_gen_op2(INDEX_op_bswap_i32, ret, arg);
+ tcg_gen_op2_i32(INDEX_op_bswap_i32, ret, arg);
#else
- TCGv t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I32);
- t1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0, t1;
+ t0 = tcg_temp_new_i32();
+ t1 = tcg_temp_new_i32();
tcg_gen_shli_i32(t0, arg, 24);
tcg_gen_shri_i32(t1, arg, 24);
tcg_gen_or_i32(ret, t0, t1);
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i32(t0);
+ tcg_temp_free_i32(t1);
#endif
}
#if TCG_TARGET_REG_BITS == 32
-static inline void tcg_gen_ext8s_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- tcg_gen_ext8s_i32(ret, arg);
- tcg_gen_sari_i32(TCGV_HIGH(ret), ret, 31);
+ tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
+ tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
}
-static inline void tcg_gen_ext16s_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- tcg_gen_ext16s_i32(ret, arg);
- tcg_gen_sari_i32(TCGV_HIGH(ret), ret, 31);
+ tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
+ tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
}
-static inline void tcg_gen_ext32s_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- tcg_gen_mov_i32(ret, arg);
- tcg_gen_sari_i32(TCGV_HIGH(ret), ret, 31);
+ tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
+ tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
}
-static inline void tcg_gen_ext8u_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- tcg_gen_ext8u_i32(ret, arg);
+ tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
}
-static inline void tcg_gen_ext16u_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- tcg_gen_ext16u_i32(ret, arg);
+ tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
}
-static inline void tcg_gen_ext32u_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- tcg_gen_mov_i32(ret, arg);
+ tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
}
-static inline void tcg_gen_trunc_i64_i32(TCGv ret, TCGv arg)
+static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
{
- tcg_gen_mov_i32(ret, arg);
+ tcg_gen_mov_i32(ret, TCGV_LOW(arg));
}
-static inline void tcg_gen_extu_i32_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
{
- tcg_gen_mov_i32(ret, arg);
+ tcg_gen_mov_i32(TCGV_LOW(ret), arg);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
}
-static inline void tcg_gen_ext_i32_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
{
- tcg_gen_mov_i32(ret, arg);
- tcg_gen_sari_i32(TCGV_HIGH(ret), ret, 31);
+ tcg_gen_mov_i32(TCGV_LOW(ret), arg);
+ tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
}
-static inline void tcg_gen_bswap_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_bswap_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- TCGv t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I32);
- t1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0, t1;
+ t0 = tcg_temp_new_i32();
+ t1 = tcg_temp_new_i32();
- tcg_gen_bswap_i32(t0, arg);
+ tcg_gen_bswap_i32(t0, TCGV_LOW(arg));
tcg_gen_bswap_i32(t1, TCGV_HIGH(arg));
- tcg_gen_mov_i32(ret, t1);
+ tcg_gen_mov_i32(TCGV_LOW(ret), t1);
tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i32(t0);
+ tcg_temp_free_i32(t1);
}
#else
-static inline void tcg_gen_ext8s_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
{
#ifdef TCG_TARGET_HAS_ext8s_i64
- tcg_gen_op2(INDEX_op_ext8s_i64, ret, arg);
+ tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
#else
tcg_gen_shli_i64(ret, arg, 56);
tcg_gen_sari_i64(ret, ret, 56);
#endif
}
-static inline void tcg_gen_ext16s_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
{
#ifdef TCG_TARGET_HAS_ext16s_i64
- tcg_gen_op2(INDEX_op_ext16s_i64, ret, arg);
+ tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
#else
tcg_gen_shli_i64(ret, arg, 48);
tcg_gen_sari_i64(ret, ret, 48);
#endif
}
-static inline void tcg_gen_ext32s_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
{
#ifdef TCG_TARGET_HAS_ext32s_i64
- tcg_gen_op2(INDEX_op_ext32s_i64, ret, arg);
+ tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
#else
tcg_gen_shli_i64(ret, arg, 32);
tcg_gen_sari_i64(ret, ret, 32);
#endif
}
-static inline void tcg_gen_ext8u_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
{
tcg_gen_andi_i64(ret, arg, 0xffu);
}
-static inline void tcg_gen_ext16u_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
{
tcg_gen_andi_i64(ret, arg, 0xffffu);
}
-static inline void tcg_gen_ext32u_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
{
tcg_gen_andi_i64(ret, arg, 0xffffffffu);
}
/* Note: we assume the target supports move between 32 and 64 bit
registers. This will probably break MIPS64 targets. */
-static inline void tcg_gen_trunc_i64_i32(TCGv ret, TCGv arg)
+static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
{
- tcg_gen_mov_i32(ret, arg);
+ tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
}
/* Note: we assume the target supports move between 32 and 64 bit
registers */
-static inline void tcg_gen_extu_i32_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
{
- tcg_gen_andi_i64(ret, arg, 0xffffffffu);
+ tcg_gen_andi_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)), 0xffffffffu);
}
/* Note: we assume the target supports move between 32 and 64 bit
registers */
-static inline void tcg_gen_ext_i32_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
{
- tcg_gen_ext32s_i64(ret, arg);
+ tcg_gen_ext32s_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)));
}
-static inline void tcg_gen_bswap_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_bswap_i64(TCGv_i64 ret, TCGv_i64 arg)
{
#ifdef TCG_TARGET_HAS_bswap_i64
- tcg_gen_op2(INDEX_op_bswap_i64, ret, arg);
+ tcg_gen_op2_i64(INDEX_op_bswap_i64, ret, arg);
#else
- TCGv t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I32);
- t1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0, t1;
+ t0 = tcg_temp_new_i32();
+ t1 = tcg_temp_new_i32();
tcg_gen_shli_i64(t0, arg, 56);
tcg_gen_shri_i64(t1, arg, 56);
tcg_gen_or_i64(ret, t0, t1);
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i32(t0);
+ tcg_temp_free_i32(t1);
#endif
}
#endif
-static inline void tcg_gen_neg_i32(TCGv ret, TCGv arg)
+static inline void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg)
{
#ifdef TCG_TARGET_HAS_neg_i32
- tcg_gen_op2(INDEX_op_neg_i32, ret, arg);
+ tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg);
#else
- TCGv t0 = tcg_const_i32(0);
+ TCGv_i32 t0 = tcg_const_i32(0);
tcg_gen_sub_i32(ret, t0, arg);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
#endif
}
-static inline void tcg_gen_neg_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg)
{
#ifdef TCG_TARGET_HAS_neg_i64
- tcg_gen_op2(INDEX_op_neg_i64, ret, arg);
+ tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg);
#else
- TCGv t0 = tcg_const_i64(0);
+ TCGv_i64 t0 = tcg_const_i64(0);
tcg_gen_sub_i64(ret, t0, arg);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
#endif
}
-static inline void tcg_gen_not_i32(TCGv ret, TCGv arg)
+static inline void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg)
{
tcg_gen_xori_i32(ret, arg, -1);
}
-static inline void tcg_gen_not_i64(TCGv ret, TCGv arg)
+static inline void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
{
tcg_gen_xori_i64(ret, arg, -1);
}
-static inline void tcg_gen_discard_i32(TCGv arg)
+static inline void tcg_gen_discard_i32(TCGv_i32 arg)
{
- tcg_gen_op1(INDEX_op_discard, arg);
+ tcg_gen_op1_i32(INDEX_op_discard, arg);
}
#if TCG_TARGET_REG_BITS == 32
-static inline void tcg_gen_discard_i64(TCGv arg)
+static inline void tcg_gen_discard_i64(TCGv_i64 arg)
{
- tcg_gen_discard_i32(arg);
+ tcg_gen_discard_i32(TCGV_LOW(arg));
tcg_gen_discard_i32(TCGV_HIGH(arg));
}
#else
-static inline void tcg_gen_discard_i64(TCGv arg)
+static inline void tcg_gen_discard_i64(TCGv_i64 arg)
{
- tcg_gen_op1(INDEX_op_discard, arg);
+ tcg_gen_op1_i64(INDEX_op_discard, arg);
}
#endif
-static inline void tcg_gen_concat_i32_i64(TCGv dest, TCGv low, TCGv high)
+static inline void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
{
#if TCG_TARGET_REG_BITS == 32
- tcg_gen_mov_i32(dest, low);
+ tcg_gen_mov_i32(TCGV_LOW(dest), low);
tcg_gen_mov_i32(TCGV_HIGH(dest), high);
#else
- TCGv tmp = tcg_temp_new (TCG_TYPE_I64);
+ TCGv_i64 tmp = tcg_temp_new_i64();
/* This extension is only needed for type correctness.
We may be able to do better given target specific information. */
tcg_gen_extu_i32_i64(tmp, high);
tcg_gen_shli_i64(tmp, tmp, 32);
tcg_gen_extu_i32_i64(dest, low);
tcg_gen_or_i64(dest, dest, tmp);
- tcg_temp_free(tmp);
+ tcg_temp_free_i64(tmp);
#endif
}
-static inline void tcg_gen_concat32_i64(TCGv dest, TCGv low, TCGv high)
+static inline void tcg_gen_concat32_i64(TCGv_i64 dest, TCGv_i64 low, TCGv_i64 high)
{
#if TCG_TARGET_REG_BITS == 32
- tcg_gen_concat_i32_i64(dest, low, high);
+ tcg_gen_concat_i32_i64(dest, TCGV_LOW(low), TCGV_LOW(high));
#else
- TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 tmp = tcg_temp_new_i64();
tcg_gen_ext32u_i64(dest, low);
tcg_gen_shli_i64(tmp, high, 32);
tcg_gen_or_i64(dest, dest, tmp);
- tcg_temp_free(tmp);
+ tcg_temp_free_i64(tmp);
#endif
}
-static inline void tcg_gen_andc_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0;
+ t0 = tcg_temp_new_i32();
tcg_gen_not_i32(t0, arg2);
tcg_gen_and_i32(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
-static inline void tcg_gen_andc_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 t0;
+ t0 = tcg_temp_new_i64();
tcg_gen_not_i64(t0, arg2);
tcg_gen_and_i64(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
}
-static inline void tcg_gen_eqv_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0;
+ t0 = tcg_temp_new_i32();
tcg_gen_xor_i32(t0, arg1, arg2);
tcg_gen_not_i32(ret, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
-static inline void tcg_gen_eqv_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 t0;
+ t0 = tcg_temp_new_i64();
tcg_gen_xor_i64(t0, arg1, arg2);
tcg_gen_not_i64(ret, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
}
-static inline void tcg_gen_nand_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0;
+ t0 = tcg_temp_new_i32();
tcg_gen_and_i32(t0, arg1, arg2);
tcg_gen_not_i32(ret, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
-static inline void tcg_gen_nand_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 t0;
+ t0 = tcg_temp_new_i64();
tcg_gen_and_i64(t0, arg1, arg2);
tcg_gen_not_i64(ret, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
}
-static inline void tcg_gen_nor_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0;
+ t0 = tcg_temp_new_i32();
tcg_gen_or_i32(t0, arg1, arg2);
tcg_gen_not_i32(ret, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
-static inline void tcg_gen_nor_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 t0;
+ t0 = tcg_temp_new_i64();
tcg_gen_or_i64(t0, arg1, arg2);
tcg_gen_not_i64(ret, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
}
-static inline void tcg_gen_orc_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0;
+ t0 = tcg_temp_new_i32();
tcg_gen_not_i32(t0, arg2);
tcg_gen_or_i32(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i32(t0);
}
-static inline void tcg_gen_orc_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 t0;
+ t0 = tcg_temp_new_i64();
tcg_gen_not_i64(t0, arg2);
tcg_gen_or_i64(ret, arg1, t0);
- tcg_temp_free(t0);
+ tcg_temp_free_i64(t0);
}
-static inline void tcg_gen_rotl_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- TCGv t0, t1;
+ TCGv_i32 t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I32);
- t1 = tcg_temp_new(TCG_TYPE_I32);
+ t0 = tcg_temp_new_i32();
+ t1 = tcg_temp_new_i32();
tcg_gen_shl_i32(t0, arg1, arg2);
tcg_gen_subfi_i32(t1, 32, arg2);
tcg_gen_shr_i32(t1, arg1, t1);
tcg_gen_or_i32(ret, t0, t1);
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i32(t0);
+ tcg_temp_free_i32(t1);
}
-static inline void tcg_gen_rotl_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- TCGv t0, t1;
+ TCGv_i64 t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I64);
- t1 = tcg_temp_new(TCG_TYPE_I64);
+ t0 = tcg_temp_new_i64();
+ t1 = tcg_temp_new_i64();
tcg_gen_shl_i64(t0, arg1, arg2);
tcg_gen_subfi_i64(t1, 64, arg2);
tcg_gen_shr_i64(t1, arg1, t1);
tcg_gen_or_i64(ret, t0, t1);
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i64(t0);
+ tcg_temp_free_i64(t1);
}
-static inline void tcg_gen_rotli_i32(TCGv ret, TCGv arg1, int32_t arg2)
+static inline void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
{
/* some cases can be optimized here */
if (arg2 == 0) {
tcg_gen_mov_i32(ret, arg1);
} else {
- TCGv t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I32);
- t1 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0, t1;
+ t0 = tcg_temp_new_i32();
+ t1 = tcg_temp_new_i32();
tcg_gen_shli_i32(t0, arg1, arg2);
tcg_gen_shri_i32(t1, arg1, 32 - arg2);
tcg_gen_or_i32(ret, t0, t1);
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i32(t0);
+ tcg_temp_free_i32(t1);
}
}
-static inline void tcg_gen_rotli_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
/* some cases can be optimized here */
if (arg2 == 0) {
tcg_gen_mov_i64(ret, arg1);
} else {
- TCGv t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I64);
- t1 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 t0, t1;
+ t0 = tcg_temp_new_i64();
+ t1 = tcg_temp_new_i64();
tcg_gen_shli_i64(t0, arg1, arg2);
tcg_gen_shri_i64(t1, arg1, 64 - arg2);
tcg_gen_or_i64(ret, t0, t1);
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i64(t0);
+ tcg_temp_free_i64(t1);
}
}
-static inline void tcg_gen_rotr_i32(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- TCGv t0, t1;
+ TCGv_i32 t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I32);
- t1 = tcg_temp_new(TCG_TYPE_I32);
+ t0 = tcg_temp_new_i32();
+ t1 = tcg_temp_new_i32();
tcg_gen_shr_i32(t0, arg1, arg2);
tcg_gen_subfi_i32(t1, 32, arg2);
tcg_gen_shl_i32(t1, arg1, t1);
tcg_gen_or_i32(ret, t0, t1);
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i32(t0);
+ tcg_temp_free_i32(t1);
}
-static inline void tcg_gen_rotr_i64(TCGv ret, TCGv arg1, TCGv arg2)
+static inline void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- TCGv t0, t1;
+ TCGv_i64 t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I64);
- t1 = tcg_temp_new(TCG_TYPE_I64);
+ t0 = tcg_temp_new_i64();
+ t1 = tcg_temp_new_i64();
tcg_gen_shl_i64(t0, arg1, arg2);
tcg_gen_subfi_i64(t1, 64, arg2);
tcg_gen_shl_i64(t1, arg1, t1);
tcg_gen_or_i64(ret, t0, t1);
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i64(t0);
+ tcg_temp_free_i64(t1);
}
-static inline void tcg_gen_rotri_i32(TCGv ret, TCGv arg1, int32_t arg2)
+static inline void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
{
/* some cases can be optimized here */
if (arg2 == 0) {
}
}
-static inline void tcg_gen_rotri_i64(TCGv ret, TCGv arg1, int64_t arg2)
+static inline void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
/* some cases can be optimized here */
if (arg2 == 0) {
#error must include QEMU headers
#endif
+#if TARGET_LONG_BITS == 32
+#define TCGv TCGv_i32
+#define tcg_temp_new() tcg_temp_new_i32()
+#define tcg_global_reg_new tcg_global_reg_new_i32
+#define tcg_global_mem_new tcg_global_mem_new_i32
+#define tcg_temp_local_new(t) tcg_temp_local_new_i32()
+#define tcg_temp_free tcg_temp_free_i32
+#define tcg_gen_qemu_ldst_op tcg_gen_op3i_i32
+#define tcg_gen_qemu_ldst_op_i64 tcg_gen_qemu_ldst_op_i64_i32
+#define TCGV_UNUSED(x) TCGV_UNUSED_I32(x)
+#define TCGV_EQUAL(a, b) (GET_TCGV_I32(a) == GET_TCGV_I32(b))
+#else
+#define TCGv TCGv_i64
+#define tcg_temp_new() tcg_temp_new_i64()
+#define tcg_global_reg_new tcg_global_reg_new_i64
+#define tcg_global_mem_new tcg_global_mem_new_i64
+#define tcg_temp_local_new(t) tcg_temp_local_new_i64()
+#define tcg_temp_free tcg_temp_free_i64
+#define tcg_gen_qemu_ldst_op tcg_gen_op3i_i64
+#define tcg_gen_qemu_ldst_op_i64 tcg_gen_qemu_ldst_op_i64_i64
+#define TCGV_UNUSED(x) TCGV_UNUSED_I64(x)
+#define TCGV_EQUAL(a, b) (GET_TCGV_I64(a) == GET_TCGV_I64(b))
+#endif
+
/* debug info: write the PC of the corresponding QEMU CPU instruction */
static inline void tcg_gen_debug_insn_start(uint64_t pc)
{
static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index)
{
#if TARGET_LONG_BITS == 32
- tcg_gen_op3i(INDEX_op_qemu_ld8u, ret, addr, mem_index);
+ tcg_gen_op3i_i32(INDEX_op_qemu_ld8u, ret, addr, mem_index);
#else
- tcg_gen_op4i(INDEX_op_qemu_ld8u, ret, addr, TCGV_HIGH(addr), mem_index);
+ tcg_gen_op4i_i32(INDEX_op_qemu_ld8u, TCGV_LOW(ret), TCGV_LOW(addr),
+ TCGV_HIGH(addr), mem_index);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
#endif
}
static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index)
{
#if TARGET_LONG_BITS == 32
- tcg_gen_op3i(INDEX_op_qemu_ld8s, ret, addr, mem_index);
+ tcg_gen_op3i_i32(INDEX_op_qemu_ld8s, ret, addr, mem_index);
#else
- tcg_gen_op4i(INDEX_op_qemu_ld8s, ret, addr, TCGV_HIGH(addr), mem_index);
- tcg_gen_sari_i32(TCGV_HIGH(ret), ret, 31);
+ tcg_gen_op4i_i32(INDEX_op_qemu_ld8s, TCGV_LOW(ret), TCGV_LOW(addr),
+ TCGV_HIGH(addr), mem_index);
+ tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
#endif
}
static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index)
{
#if TARGET_LONG_BITS == 32
- tcg_gen_op3i(INDEX_op_qemu_ld16u, ret, addr, mem_index);
+ tcg_gen_op3i_i32(INDEX_op_qemu_ld16u, ret, addr, mem_index);
#else
- tcg_gen_op4i(INDEX_op_qemu_ld16u, ret, addr, TCGV_HIGH(addr), mem_index);
+ tcg_gen_op4i_i32(INDEX_op_qemu_ld16u, TCGV_LOW(ret), TCGV_LOW(addr),
+ TCGV_HIGH(addr), mem_index);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
#endif
}
static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index)
{
#if TARGET_LONG_BITS == 32
- tcg_gen_op3i(INDEX_op_qemu_ld16s, ret, addr, mem_index);
+ tcg_gen_op3i_i32(INDEX_op_qemu_ld16s, ret, addr, mem_index);
#else
- tcg_gen_op4i(INDEX_op_qemu_ld16s, ret, addr, TCGV_HIGH(addr), mem_index);
- tcg_gen_sari_i32(TCGV_HIGH(ret), ret, 31);
+ tcg_gen_op4i_i32(INDEX_op_qemu_ld16s, TCGV_LOW(ret), TCGV_LOW(addr),
+ TCGV_HIGH(addr), mem_index);
+ tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
#endif
}
static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index)
{
#if TARGET_LONG_BITS == 32
- tcg_gen_op3i(INDEX_op_qemu_ld32u, ret, addr, mem_index);
+ tcg_gen_op3i_i32(INDEX_op_qemu_ld32u, ret, addr, mem_index);
#else
- tcg_gen_op4i(INDEX_op_qemu_ld32u, ret, addr, TCGV_HIGH(addr), mem_index);
+ tcg_gen_op4i_i32(INDEX_op_qemu_ld32u, TCGV_LOW(ret), TCGV_LOW(addr),
+ TCGV_HIGH(addr), mem_index);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
#endif
}
static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index)
{
#if TARGET_LONG_BITS == 32
- tcg_gen_op3i(INDEX_op_qemu_ld32u, ret, addr, mem_index);
+ tcg_gen_op3i_i32(INDEX_op_qemu_ld32u, ret, addr, mem_index);
#else
- tcg_gen_op4i(INDEX_op_qemu_ld32u, ret, addr, TCGV_HIGH(addr), mem_index);
- tcg_gen_sari_i32(TCGV_HIGH(ret), ret, 31);
+ tcg_gen_op4i_i32(INDEX_op_qemu_ld32u, TCGV_LOW(ret), TCGV_LOW(addr),
+ TCGV_HIGH(addr), mem_index);
+ tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
#endif
}
-static inline void tcg_gen_qemu_ld64(TCGv ret, TCGv addr, int mem_index)
+static inline void tcg_gen_qemu_ld64(TCGv_i64 ret, TCGv addr, int mem_index)
{
#if TARGET_LONG_BITS == 32
- tcg_gen_op4i(INDEX_op_qemu_ld64, ret, TCGV_HIGH(ret), addr, mem_index);
+ tcg_gen_op4i_i32(INDEX_op_qemu_ld64, TCGV_LOW(ret), TCGV_HIGH(ret), addr, mem_index);
#else
- tcg_gen_op5i(INDEX_op_qemu_ld64, ret, TCGV_HIGH(ret),
- addr, TCGV_HIGH(addr), mem_index);
+ tcg_gen_op5i_i32(INDEX_op_qemu_ld64, TCGV_LOW(ret), TCGV_HIGH(ret),
+ TCGV_LOW(addr), TCGV_HIGH(addr), mem_index);
#endif
}
static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index)
{
#if TARGET_LONG_BITS == 32
- tcg_gen_op3i(INDEX_op_qemu_st8, arg, addr, mem_index);
+ tcg_gen_op3i_i32(INDEX_op_qemu_st8, arg, addr, mem_index);
#else
- tcg_gen_op4i(INDEX_op_qemu_st8, arg, addr, TCGV_HIGH(addr), mem_index);
+ tcg_gen_op4i_i32(INDEX_op_qemu_st8, TCGV_LOW(arg), TCGV_LOW(addr),
+ TCGV_HIGH(addr), mem_index);
#endif
}
static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index)
{
#if TARGET_LONG_BITS == 32
- tcg_gen_op3i(INDEX_op_qemu_st16, arg, addr, mem_index);
+ tcg_gen_op3i_i32(INDEX_op_qemu_st16, arg, addr, mem_index);
#else
- tcg_gen_op4i(INDEX_op_qemu_st16, arg, addr, TCGV_HIGH(addr), mem_index);
+ tcg_gen_op4i_i32(INDEX_op_qemu_st16, TCGV_LOW(arg), TCGV_LOW(addr),
+ TCGV_HIGH(addr), mem_index);
#endif
}
static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index)
{
#if TARGET_LONG_BITS == 32
- tcg_gen_op3i(INDEX_op_qemu_st32, arg, addr, mem_index);
+ tcg_gen_op3i_i32(INDEX_op_qemu_st32, arg, addr, mem_index);
#else
- tcg_gen_op4i(INDEX_op_qemu_st32, arg, addr, TCGV_HIGH(addr), mem_index);
+ tcg_gen_op4i_i32(INDEX_op_qemu_st32, TCGV_LOW(arg), TCGV_LOW(addr),
+ TCGV_HIGH(addr), mem_index);
#endif
}
-static inline void tcg_gen_qemu_st64(TCGv arg, TCGv addr, int mem_index)
+static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index)
{
#if TARGET_LONG_BITS == 32
- tcg_gen_op4i(INDEX_op_qemu_st64, arg, TCGV_HIGH(arg), addr, mem_index);
+ tcg_gen_op4i_i32(INDEX_op_qemu_st64, TCGV_LOW(arg), TCGV_HIGH(arg), addr,
+ mem_index);
#else
- tcg_gen_op5i(INDEX_op_qemu_st64, arg, TCGV_HIGH(arg),
- addr, TCGV_HIGH(addr), mem_index);
+ tcg_gen_op5i_i32(INDEX_op_qemu_st64, TCGV_LOW(arg), TCGV_HIGH(arg),
+ TCGV_LOW(addr), TCGV_HIGH(addr), mem_index);
#endif
}
static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index)
{
- tcg_gen_op3i(INDEX_op_qemu_ld8u, ret, addr, mem_index);
+ tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld8u, ret, addr, mem_index);
}
static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index)
{
- tcg_gen_op3i(INDEX_op_qemu_ld8s, ret, addr, mem_index);
+ tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld8s, ret, addr, mem_index);
}
static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index)
{
- tcg_gen_op3i(INDEX_op_qemu_ld16u, ret, addr, mem_index);
+ tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld16u, ret, addr, mem_index);
}
static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index)
{
- tcg_gen_op3i(INDEX_op_qemu_ld16s, ret, addr, mem_index);
+ tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld16s, ret, addr, mem_index);
}
static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index)
{
- tcg_gen_op3i(INDEX_op_qemu_ld32u, ret, addr, mem_index);
+ tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32u, ret, addr, mem_index);
}
static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index)
{
- tcg_gen_op3i(INDEX_op_qemu_ld32s, ret, addr, mem_index);
+ tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32s, ret, addr, mem_index);
}
-static inline void tcg_gen_qemu_ld64(TCGv ret, TCGv addr, int mem_index)
+static inline void tcg_gen_qemu_ld64(TCGv_i64 ret, TCGv addr, int mem_index)
{
- tcg_gen_op3i(INDEX_op_qemu_ld64, ret, addr, mem_index);
+ tcg_gen_qemu_ldst_op_i64(INDEX_op_qemu_ld64, ret, addr, mem_index);
}
static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index)
{
- tcg_gen_op3i(INDEX_op_qemu_st8, arg, addr, mem_index);
+ tcg_gen_qemu_ldst_op(INDEX_op_qemu_st8, arg, addr, mem_index);
}
static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index)
{
- tcg_gen_op3i(INDEX_op_qemu_st16, arg, addr, mem_index);
+ tcg_gen_qemu_ldst_op(INDEX_op_qemu_st16, arg, addr, mem_index);
}
static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index)
{
- tcg_gen_op3i(INDEX_op_qemu_st32, arg, addr, mem_index);
+ tcg_gen_qemu_ldst_op(INDEX_op_qemu_st32, arg, addr, mem_index);
}
-static inline void tcg_gen_qemu_st64(TCGv arg, TCGv addr, int mem_index)
+static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index)
{
- tcg_gen_op3i(INDEX_op_qemu_st64, arg, addr, mem_index);
+ tcg_gen_qemu_ldst_op_i64(INDEX_op_qemu_st64, arg, addr, mem_index);
}
#define tcg_gen_ld_ptr tcg_gen_ld_i64
tcg_abort();
}
-TCGv tcg_global_reg_new(TCGType type, int reg, const char *name)
+static inline int tcg_global_reg_new_internal(TCGType type, int reg,
+ const char *name)
{
TCGContext *s = &tcg_ctx;
TCGTemp *ts;
ts->name = name;
s->nb_globals++;
tcg_regset_set_reg(s->reserved_regs, reg);
- return MAKE_TCGV(idx);
+ return idx;
+}
+
+TCGv_i32 tcg_global_reg_new_i32(int reg, const char *name)
+{
+ int idx;
+
+ idx = tcg_global_reg_new_internal(TCG_TYPE_I32, reg, name);
+ return MAKE_TCGV_I32(idx);
+}
+
+TCGv_i64 tcg_global_reg_new_i64(int reg, const char *name)
+{
+ int idx;
+
+ idx = tcg_global_reg_new_internal(TCG_TYPE_I64, reg, name);
+ return MAKE_TCGV_I64(idx);
}
#if TCG_TARGET_REG_BITS == 32
/* temporary hack to avoid register shortage for tcg_qemu_st64() */
-TCGv tcg_global_reg2_new_hack(TCGType type, int reg1, int reg2,
- const char *name)
+TCGv_i64 tcg_global_reg2_new_hack(TCGType type, int reg1, int reg2,
+ const char *name)
{
TCGContext *s = &tcg_ctx;
TCGTemp *ts;
ts->name = strdup(buf);
s->nb_globals += 2;
- return MAKE_TCGV(idx);
+ return MAKE_TCGV_I64(idx);
}
#endif
-TCGv tcg_global_mem_new(TCGType type, int reg, tcg_target_long offset,
- const char *name)
+static inline int tcg_global_mem_new_internal(TCGType type, int reg,
+ tcg_target_long offset,
+ const char *name)
{
TCGContext *s = &tcg_ctx;
TCGTemp *ts;
ts->name = name;
s->nb_globals++;
}
- return MAKE_TCGV(idx);
+ return idx;
+}
+
+TCGv_i32 tcg_global_mem_new_i32(int reg, tcg_target_long offset,
+ const char *name)
+{
+ int idx;
+
+ idx = tcg_global_mem_new_internal(TCG_TYPE_I32, reg, offset, name);
+ return MAKE_TCGV_I32(idx);
+}
+
+TCGv_i64 tcg_global_mem_new_i64(int reg, tcg_target_long offset,
+ const char *name)
+{
+ int idx;
+
+ idx = tcg_global_mem_new_internal(TCG_TYPE_I64, reg, offset, name);
+ return MAKE_TCGV_I64(idx);
}
-TCGv tcg_temp_new_internal(TCGType type, int temp_local)
+static inline int tcg_temp_new_internal(TCGType type, int temp_local)
{
TCGContext *s = &tcg_ctx;
TCGTemp *ts;
s->nb_temps++;
}
}
- return MAKE_TCGV(idx);
+ return idx;
}
-void tcg_temp_free(TCGv arg)
+TCGv_i32 tcg_temp_new_internal_i32(int temp_local)
+{
+ int idx;
+
+ idx = tcg_temp_new_internal(TCG_TYPE_I32, temp_local);
+ return MAKE_TCGV_I32(idx);
+}
+
+TCGv_i64 tcg_temp_new_internal_i64(int temp_local)
+{
+ int idx;
+
+ idx = tcg_temp_new_internal(TCG_TYPE_I64, temp_local);
+ return MAKE_TCGV_I64(idx);
+}
+
+static inline void tcg_temp_free_internal(int idx)
{
TCGContext *s = &tcg_ctx;
TCGTemp *ts;
- int idx = GET_TCGV(arg);
int k;
assert(idx >= s->nb_globals && idx < s->nb_temps);
s->first_free_temp[k] = idx;
}
+void tcg_temp_free_i32(TCGv_i32 arg)
+{
+ tcg_temp_free_internal(GET_TCGV_I32(arg));
+}
+
+void tcg_temp_free_i64(TCGv_i64 arg)
+{
+ tcg_temp_free_internal(GET_TCGV_I64(arg));
+}
-TCGv tcg_const_i32(int32_t val)
+TCGv_i32 tcg_const_i32(int32_t val)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I32);
+ TCGv_i32 t0;
+ t0 = tcg_temp_new_i32();
tcg_gen_movi_i32(t0, val);
return t0;
}
-TCGv tcg_const_i64(int64_t val)
+TCGv_i64 tcg_const_i64(int64_t val)
{
- TCGv t0;
- t0 = tcg_temp_new(TCG_TYPE_I64);
+ TCGv_i64 t0;
+ t0 = tcg_temp_new_i64();
tcg_gen_movi_i64(t0, val);
return t0;
}
-TCGv tcg_const_local_i32(int32_t val)
+TCGv_i32 tcg_const_local_i32(int32_t val)
{
- TCGv t0;
- t0 = tcg_temp_local_new(TCG_TYPE_I32);
+ TCGv_i32 t0;
+ t0 = tcg_temp_local_new_i32();
tcg_gen_movi_i32(t0, val);
return t0;
}
-TCGv tcg_const_local_i64(int64_t val)
+TCGv_i64 tcg_const_local_i64(int64_t val)
{
- TCGv t0;
- t0 = tcg_temp_local_new(TCG_TYPE_I64);
+ TCGv_i64 t0;
+ t0 = tcg_temp_local_new_i64();
tcg_gen_movi_i64(t0, val);
return t0;
}
s->nb_helpers++;
}
-static inline TCGType tcg_get_base_type(TCGContext *s, TCGv arg)
-{
- return s->temps[GET_TCGV(arg)].base_type;
-}
-
-static void tcg_gen_call_internal(TCGContext *s, TCGv func,
- unsigned int flags,
- unsigned int nb_rets, const TCGv *rets,
- unsigned int nb_params, const TCGv *params)
-{
- int i;
- *gen_opc_ptr++ = INDEX_op_call;
- *gen_opparam_ptr++ = (nb_rets << 16) | (nb_params + 1);
- for(i = 0; i < nb_rets; i++) {
- *gen_opparam_ptr++ = GET_TCGV(rets[i]);
- }
- for(i = 0; i < nb_params; i++) {
- *gen_opparam_ptr++ = GET_TCGV(params[i]);
- }
- *gen_opparam_ptr++ = GET_TCGV(func);
-
- *gen_opparam_ptr++ = flags;
- /* total parameters, needed to go backward in the instruction stream */
- *gen_opparam_ptr++ = 1 + nb_rets + nb_params + 3;
-}
-
-
-#if TCG_TARGET_REG_BITS < 64
/* Note: we convert the 64 bit args to 32 bit and do some alignment
and endian swap. Maybe it would be better to do the alignment
and endian swap in tcg_reg_alloc_call(). */
-void tcg_gen_call(TCGContext *s, TCGv func, unsigned int flags,
- unsigned int nb_rets, const TCGv *rets,
- unsigned int nb_params, const TCGv *args1)
+void tcg_gen_callN(TCGContext *s, TCGv_ptr func, unsigned int flags,
+ int sizemask, TCGArg ret, int nargs, TCGArg *args)
{
- TCGv ret, *args2, rets_2[2], arg;
- int j, i, call_type;
-
- if (nb_rets == 1) {
- ret = rets[0];
- if (tcg_get_base_type(s, ret) == TCG_TYPE_I64) {
- nb_rets = 2;
+ int call_type;
+ int i;
+ int real_args;
+ int nb_rets;
+ TCGArg *nparam;
+ *gen_opc_ptr++ = INDEX_op_call;
+ nparam = gen_opparam_ptr++;
+ call_type = (flags & TCG_CALL_TYPE_MASK);
+ if (ret != TCG_CALL_DUMMY_ARG) {
+#if TCG_TARGET_REG_BITS < 64
+ if (sizemask & 1) {
#ifdef TCG_TARGET_WORDS_BIGENDIAN
- rets_2[0] = TCGV_HIGH(ret);
- rets_2[1] = ret;
+ *gen_opparam_ptr++ = ret + 1;
+ *gen_opparam_ptr++ = ret;
#else
- rets_2[0] = ret;
- rets_2[1] = TCGV_HIGH(ret);
+ *gen_opparam_ptr++ = ret;
+ *gen_opparam_ptr++ = ret + 1;
#endif
- rets = rets_2;
+ nb_rets = 2;
+ } else
+#endif
+ {
+ *gen_opparam_ptr++ = ret;
+ nb_rets = 1;
}
+ } else {
+ nb_rets = 0;
}
- args2 = alloca((nb_params * 3) * sizeof(TCGv));
- j = 0;
- call_type = (flags & TCG_CALL_TYPE_MASK);
- for(i = 0; i < nb_params; i++) {
- arg = args1[i];
- if (tcg_get_base_type(s, arg) == TCG_TYPE_I64) {
+ real_args = 0;
+ for (i = 0; i < nargs; i++) {
+#if TCG_TARGET_REG_BITS < 64
+ if (sizemask & (2 << i)) {
#ifdef TCG_TARGET_I386
/* REGPARM case: if the third parameter is 64 bit, it is
allocated on the stack */
- if (j == 2 && call_type == TCG_CALL_TYPE_REGPARM) {
+ if (i == 2 && call_type == TCG_CALL_TYPE_REGPARM) {
call_type = TCG_CALL_TYPE_REGPARM_2;
flags = (flags & ~TCG_CALL_TYPE_MASK) | call_type;
}
- args2[j++] = arg;
- args2[j++] = TCGV_HIGH(arg);
-#else
+#endif
#ifdef TCG_TARGET_CALL_ALIGN_ARGS
/* some targets want aligned 64 bit args */
- if (j & 1) {
- args2[j++] = TCG_CALL_DUMMY_ARG;
+ if (i & 1) {
+ *gen_opparam_ptr++ = TCG_CALL_DUMMY_ARG;
}
#endif
#ifdef TCG_TARGET_WORDS_BIGENDIAN
- args2[j++] = TCGV_HIGH(arg);
- args2[j++] = arg;
+ *gen_opparam_ptr++ = args[i] + 1;
+ *gen_opparam_ptr++ = args[i];
#else
- args2[j++] = arg;
- args2[j++] = TCGV_HIGH(arg);
+ *gen_opparam_ptr++ = args[i];
+ *gen_opparam_ptr++ = args[i] + 1;
#endif
+ real_args += 2;
+ } else
#endif
- } else {
- args2[j++] = arg;
+ {
+ *gen_opparam_ptr++ = args[i];
+ real_args++;
}
}
- tcg_gen_call_internal(s, func, flags,
- nb_rets, rets, j, args2);
-}
-#else
-void tcg_gen_call(TCGContext *s, TCGv func, unsigned int flags,
- unsigned int nb_rets, const TCGv *rets,
- unsigned int nb_params, const TCGv *args1)
-{
- tcg_gen_call_internal(s, func, flags,
- nb_rets, rets, nb_params, args1);
+ *gen_opparam_ptr++ = GET_TCGV_PTR(func);
+
+ *gen_opparam_ptr++ = flags;
+
+ *nparam = (nb_rets << 16) | (real_args + 1);
+
+ /* total parameters, needed to go backward in the instruction stream */
+ *gen_opparam_ptr++ = 1 + nb_rets + real_args + 3;
}
-#endif
#if TCG_TARGET_REG_BITS == 32
-void tcg_gen_shifti_i64(TCGv ret, TCGv arg1,
+void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
int c, int right, int arith)
{
if (c == 0) {
- tcg_gen_mov_i32(ret, arg1);
+ tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
} else if (c >= 32) {
c -= 32;
if (right) {
if (arith) {
- tcg_gen_sari_i32(ret, TCGV_HIGH(arg1), c);
+ tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
} else {
- tcg_gen_shri_i32(ret, TCGV_HIGH(arg1), c);
+ tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
}
} else {
- tcg_gen_shli_i32(TCGV_HIGH(ret), arg1, c);
- tcg_gen_movi_i32(ret, 0);
+ tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
+ tcg_gen_movi_i32(TCGV_LOW(ret), 0);
}
} else {
- TCGv t0, t1;
+ TCGv_i32 t0, t1;
- t0 = tcg_temp_new(TCG_TYPE_I32);
- t1 = tcg_temp_new(TCG_TYPE_I32);
+ t0 = tcg_temp_new_i32();
+ t1 = tcg_temp_new_i32();
if (right) {
tcg_gen_shli_i32(t0, TCGV_HIGH(arg1), 32 - c);
if (arith)
tcg_gen_sari_i32(t1, TCGV_HIGH(arg1), c);
- else
+ else
tcg_gen_shri_i32(t1, TCGV_HIGH(arg1), c);
- tcg_gen_shri_i32(ret, arg1, c);
- tcg_gen_or_i32(ret, ret, t0);
+ tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
+ tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t0);
tcg_gen_mov_i32(TCGV_HIGH(ret), t1);
} else {
- tcg_gen_shri_i32(t0, arg1, 32 - c);
+ tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
/* Note: ret can be the same as arg1, so we use t1 */
- tcg_gen_shli_i32(t1, arg1, c);
+ tcg_gen_shli_i32(t1, TCGV_LOW(arg1), c);
tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t0);
- tcg_gen_mov_i32(ret, t1);
+ tcg_gen_mov_i32(TCGV_LOW(ret), t1);
}
- tcg_temp_free(t0);
- tcg_temp_free(t1);
+ tcg_temp_free_i32(t0);
+ tcg_temp_free_i32(t1);
}
}
#endif
return buf;
}
-char *tcg_get_arg_str(TCGContext *s, char *buf, int buf_size, TCGv arg)
+char *tcg_get_arg_str_i32(TCGContext *s, char *buf, int buf_size, TCGv_i32 arg)
+{
+ return tcg_get_arg_str_idx(s, buf, buf_size, GET_TCGV_I32(arg));
+}
+
+char *tcg_get_arg_str_i64(TCGContext *s, char *buf, int buf_size, TCGv_i64 arg)
{
- return tcg_get_arg_str_idx(s, buf, buf_size, GET_TCGV(arg));
+ return tcg_get_arg_str_idx(s, buf, buf_size, GET_TCGV_I32(arg));
}
static int helper_cmp(const void *p1, const void *p2)
expecially on targets with braindamaged ABIs (e.g. i386).
We use plain int by default to avoid this runtime overhead.
Users of tcg_gen_* don't need to know about any of this, and should
- treat TCGv as an opaque type. */
+ treat TCGv as an opaque type.
+ In additon we do typechecking for different types of variables. TCGv_i32
+ and TCGv_i64 are 32/64-bit variables respectively. TCGv and TCGv_ptr
+ are aliases for target_ulong and host pointer sized values respectively.
+ */
//#define DEBUG_TCGV 1
typedef struct
{
int n;
-} TCGv;
+} TCGv_i32;
-#define MAKE_TCGV(i) __extension__ \
- ({ TCGv make_tcgv_tmp = {i}; make_tcgv_tmp;})
-#define GET_TCGV(t) ((t).n)
+typedef struct
+{
+ int n;
+} TCGv_i64;
+
+#define MAKE_TCGV_I32(i) __extension__ \
+ ({ TCGv_i32 make_tcgv_tmp = {i}; make_tcgv_tmp;})
+#define MAKE_TCGV_I64(i) __extension__ \
+ ({ TCGv_i64 make_tcgv_tmp = {i}; make_tcgv_tmp;})
+#define GET_TCGV_I32(t) ((t).n)
+#define GET_TCGV_I64(t) ((t).n)
#if TCG_TARGET_REG_BITS == 32
-#define TCGV_HIGH(t) MAKE_TCGV(GET_TCGV(t) + 1)
+#define TCGV_LOW(t) MAKE_TCGV_I32(GET_TCGV_I64(t))
+#define TCGV_HIGH(t) MAKE_TCGV_I32(GET_TCGV_I64(t) + 1)
#endif
#else /* !DEBUG_TCGV */
-typedef int TCGv;
-#define MAKE_TCGV(x) (x)
-#define GET_TCGV(t) (t)
+typedef int TCGv_i32;
+typedef int TCGv_i64;
+#define MAKE_TCGV_I32(x) (x)
+#define MAKE_TCGV_I64(x) (x)
+#define GET_TCGV_I32(t) (t)
+#define GET_TCGV_I64(t) (t)
#if TCG_TARGET_REG_BITS == 32
+#define TCGV_LOW(t) (t)
#define TCGV_HIGH(t) ((t) + 1)
#endif
#endif /* DEBUG_TCGV */
/* Dummy definition to avoid compiler warnings. */
-#define TCGV_UNUSED(x) x = MAKE_TCGV(-1)
+#define TCGV_UNUSED_I32(x) x = MAKE_TCGV_I32(-1)
+#define TCGV_UNUSED_I64(x) x = MAKE_TCGV_I64(-1)
/* call flags */
#define TCG_CALL_TYPE_MASK 0x000f
#define TCG_CALL_PURE 0x0010
/* used to align parameters */
-#define TCG_CALL_DUMMY_TCGV MAKE_TCGV(-1)
+#define TCG_CALL_DUMMY_TCGV MAKE_TCGV_I32(-1)
#define TCG_CALL_DUMMY_ARG ((TCGArg)(-1))
typedef enum {
void tcg_set_frame(TCGContext *s, int reg,
tcg_target_long start, tcg_target_long size);
-TCGv tcg_global_reg_new(TCGType type, int reg, const char *name);
-TCGv tcg_global_reg2_new_hack(TCGType type, int reg1, int reg2,
- const char *name);
-TCGv tcg_global_mem_new(TCGType type, int reg, tcg_target_long offset,
- const char *name);
-TCGv tcg_temp_new_internal(TCGType type, int temp_local);
-static inline TCGv tcg_temp_new(TCGType type)
+TCGv_i64 tcg_global_reg2_new_hack(TCGType type, int reg1, int reg2,
+ const char *name);
+
+TCGv_i32 tcg_global_reg_new_i32(int reg, const char *name);
+TCGv_i32 tcg_global_mem_new_i32(int reg, tcg_target_long offset,
+ const char *name);
+TCGv_i32 tcg_temp_new_internal_i32(int temp_local);
+static inline TCGv_i32 tcg_temp_new_i32(void)
+{
+ return tcg_temp_new_internal_i32(0);
+}
+static inline TCGv_i32 tcg_temp_local_new_i32(void)
+{
+ return tcg_temp_new_internal_i32(1);
+}
+void tcg_temp_free_i32(TCGv_i32 arg);
+char *tcg_get_arg_str_i32(TCGContext *s, char *buf, int buf_size, TCGv_i32 arg);
+
+TCGv_i64 tcg_global_reg_new_i64(int reg, const char *name);
+TCGv_i64 tcg_global_mem_new_i64(int reg, tcg_target_long offset,
+ const char *name);
+TCGv_i64 tcg_temp_new_internal_i64(int temp_local);
+static inline TCGv_i64 tcg_temp_new_i64(void)
{
- return tcg_temp_new_internal(type, 0);
+ return tcg_temp_new_internal_i64(0);
}
-static inline TCGv tcg_temp_local_new(TCGType type)
+static inline TCGv_i64 tcg_temp_local_new_i64(void)
{
- return tcg_temp_new_internal(type, 1);
+ return tcg_temp_new_internal_i64(1);
}
-void tcg_temp_free(TCGv arg);
-char *tcg_get_arg_str(TCGContext *s, char *buf, int buf_size, TCGv arg);
+void tcg_temp_free_i64(TCGv_i64 arg);
+char *tcg_get_arg_str_i64(TCGContext *s, char *buf, int buf_size, TCGv_i64 arg);
+
void tcg_dump_info(FILE *f,
int (*cpu_fprintf)(FILE *f, const char *fmt, ...));
void tcg_add_target_add_op_defs(const TCGTargetOpDef *tdefs);
-void tcg_gen_call(TCGContext *s, TCGv func, unsigned int flags,
- unsigned int nb_rets, const TCGv *rets,
- unsigned int nb_params, const TCGv *args1);
-void tcg_gen_shifti_i64(TCGv ret, TCGv arg1,
- int c, int right, int arith);
-
-/* only used for debugging purposes */
-void tcg_register_helper(void *func, const char *name);
-#define TCG_HELPER(func) tcg_register_helper(func, #func)
-const char *tcg_helper_get_name(TCGContext *s, void *func);
-void tcg_dump_ops(TCGContext *s, FILE *outfile);
-
-void dump_ops(const uint16_t *opc_buf, const TCGArg *opparam_buf);
-TCGv tcg_const_i32(int32_t val);
-TCGv tcg_const_i64(int64_t val);
-TCGv tcg_const_local_i32(int32_t val);
-TCGv tcg_const_local_i64(int64_t val);
-
#if TCG_TARGET_REG_BITS == 32
#define tcg_const_ptr tcg_const_i32
#define tcg_add_ptr tcg_add_i32
#define tcg_sub_ptr tcg_sub_i32
+#define TCGv_ptr TCGv_i32
+#define GET_TCGV_PTR GET_TCGV_I32
+#define tcg_global_reg_new_ptr tcg_global_reg_new_i32
+#define tcg_global_mem_new_ptr tcg_global_mem_new_i32
+#define tcg_temp_new_ptr tcg_temp_new_i32
+#define tcg_temp_free_ptr tcg_temp_free_i32
#else
#define tcg_const_ptr tcg_const_i64
#define tcg_add_ptr tcg_add_i64
#define tcg_sub_ptr tcg_sub_i64
+#define TCGv_ptr TCGv_i64
+#define GET_TCGV_PTR GET_TCGV_I64
+#define tcg_global_reg_new_ptr tcg_global_reg_new_i64
+#define tcg_global_mem_new_ptr tcg_global_mem_new_i64
+#define tcg_temp_new_ptr tcg_temp_new_i64
+#define tcg_temp_free_ptr tcg_temp_free_i64
#endif
+void tcg_gen_callN(TCGContext *s, TCGv_ptr func, unsigned int flags,
+ int sizemask, TCGArg ret, int nargs, TCGArg *args);
+
+void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
+ int c, int right, int arith);
+
+/* only used for debugging purposes */
+void tcg_register_helper(void *func, const char *name);
+const char *tcg_helper_get_name(TCGContext *s, void *func);
+void tcg_dump_ops(TCGContext *s, FILE *outfile);
+
+void dump_ops(const uint16_t *opc_buf, const TCGArg *opparam_buf);
+TCGv_i32 tcg_const_i32(int32_t val);
+TCGv_i64 tcg_const_i64(int64_t val);
+TCGv_i32 tcg_const_local_i32(int32_t val);
+TCGv_i64 tcg_const_local_i64(int64_t val);
+
void tcg_out_reloc(TCGContext *s, uint8_t *code_ptr, int type,
int label_index, long addend);
const TCGArg *tcg_gen_code_op(TCGContext *s, int opc, const TCGArg *args1,