tcg_temp_free_i32(t0);
}
-#ifdef TCG_TARGET_HAS_div_i32
static inline void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
-}
-
-static inline void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
-{
- tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
-}
-
-static inline void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
-{
- tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
-}
-
-static inline void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
-{
- tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
-}
-#elif defined(TCG_TARGET_HAS_div2_i32)
-static inline void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
-{
- TCGv_i32 t0;
- t0 = tcg_temp_new_i32();
- tcg_gen_sari_i32(t0, arg1, 31);
- tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
- tcg_temp_free_i32(t0);
-}
-
-static inline void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
-{
- TCGv_i32 t0;
- t0 = tcg_temp_new_i32();
- tcg_gen_sari_i32(t0, arg1, 31);
- tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
- tcg_temp_free_i32(t0);
-}
-
-static inline void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
-{
- TCGv_i32 t0;
- t0 = tcg_temp_new_i32();
- tcg_gen_movi_i32(t0, 0);
- tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
- tcg_temp_free_i32(t0);
-}
-
-static inline void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
-{
- TCGv_i32 t0;
- t0 = tcg_temp_new_i32();
- tcg_gen_movi_i32(t0, 0);
- tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
- tcg_temp_free_i32(t0);
-}
-#else
-static inline void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
-{
- int sizemask = 0;
- /* Return value and both arguments are 32-bit and signed. */
- sizemask |= tcg_gen_sizemask(0, 0, 1);
- sizemask |= tcg_gen_sizemask(1, 0, 1);
- sizemask |= tcg_gen_sizemask(2, 0, 1);
-
- tcg_gen_helper32(tcg_helper_div_i32, sizemask, ret, arg1, arg2);
+ if (TCG_TARGET_HAS_div_i32) {
+ tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
+ } else if (TCG_TARGET_HAS_div2_i32) {
+ TCGv_i32 t0 = tcg_temp_new_i32();
+ tcg_gen_sari_i32(t0, arg1, 31);
+ tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
+ tcg_temp_free_i32(t0);
+ } else {
+ int sizemask = 0;
+ /* Return value and both arguments are 32-bit and signed. */
+ sizemask |= tcg_gen_sizemask(0, 0, 1);
+ sizemask |= tcg_gen_sizemask(1, 0, 1);
+ sizemask |= tcg_gen_sizemask(2, 0, 1);
+ tcg_gen_helper32(tcg_helper_div_i32, sizemask, ret, arg1, arg2);
+ }
}
static inline void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- int sizemask = 0;
- /* Return value and both arguments are 32-bit and signed. */
- sizemask |= tcg_gen_sizemask(0, 0, 1);
- sizemask |= tcg_gen_sizemask(1, 0, 1);
- sizemask |= tcg_gen_sizemask(2, 0, 1);
-
- tcg_gen_helper32(tcg_helper_rem_i32, sizemask, ret, arg1, arg2);
+ if (TCG_TARGET_HAS_div_i32) {
+ tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
+ } else if (TCG_TARGET_HAS_div2_i32) {
+ TCGv_i32 t0 = tcg_temp_new_i32();
+ tcg_gen_sari_i32(t0, arg1, 31);
+ tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
+ tcg_temp_free_i32(t0);
+ } else {
+ int sizemask = 0;
+ /* Return value and both arguments are 32-bit and signed. */
+ sizemask |= tcg_gen_sizemask(0, 0, 1);
+ sizemask |= tcg_gen_sizemask(1, 0, 1);
+ sizemask |= tcg_gen_sizemask(2, 0, 1);
+ tcg_gen_helper32(tcg_helper_rem_i32, sizemask, ret, arg1, arg2);
+ }
}
static inline void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- int sizemask = 0;
- /* Return value and both arguments are 32-bit and unsigned. */
- sizemask |= tcg_gen_sizemask(0, 0, 0);
- sizemask |= tcg_gen_sizemask(1, 0, 0);
- sizemask |= tcg_gen_sizemask(2, 0, 0);
-
- tcg_gen_helper32(tcg_helper_divu_i32, sizemask, ret, arg1, arg2);
+ if (TCG_TARGET_HAS_div_i32) {
+ tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
+ } else if (TCG_TARGET_HAS_div2_i32) {
+ TCGv_i32 t0 = tcg_temp_new_i32();
+ tcg_gen_movi_i32(t0, 0);
+ tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
+ tcg_temp_free_i32(t0);
+ } else {
+ int sizemask = 0;
+ /* Return value and both arguments are 32-bit and unsigned. */
+ sizemask |= tcg_gen_sizemask(0, 0, 0);
+ sizemask |= tcg_gen_sizemask(1, 0, 0);
+ sizemask |= tcg_gen_sizemask(2, 0, 0);
+ tcg_gen_helper32(tcg_helper_divu_i32, sizemask, ret, arg1, arg2);
+ }
}
static inline void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- int sizemask = 0;
- /* Return value and both arguments are 32-bit and unsigned. */
- sizemask |= tcg_gen_sizemask(0, 0, 0);
- sizemask |= tcg_gen_sizemask(1, 0, 0);
- sizemask |= tcg_gen_sizemask(2, 0, 0);
-
- tcg_gen_helper32(tcg_helper_remu_i32, sizemask, ret, arg1, arg2);
+ if (TCG_TARGET_HAS_div_i32) {
+ tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
+ } else if (TCG_TARGET_HAS_div2_i32) {
+ TCGv_i32 t0 = tcg_temp_new_i32();
+ tcg_gen_movi_i32(t0, 0);
+ tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
+ tcg_temp_free_i32(t0);
+ } else {
+ int sizemask = 0;
+ /* Return value and both arguments are 32-bit and unsigned. */
+ sizemask |= tcg_gen_sizemask(0, 0, 0);
+ sizemask |= tcg_gen_sizemask(1, 0, 0);
+ sizemask |= tcg_gen_sizemask(2, 0, 0);
+ tcg_gen_helper32(tcg_helper_remu_i32, sizemask, ret, arg1, arg2);
+ }
}
-#endif
#if TCG_TARGET_REG_BITS == 32
tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2);
}
-#ifdef TCG_TARGET_HAS_div_i64
-static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
-}
-
-static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
-}
-
-static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
-}
-
-static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
-}
-#elif defined(TCG_TARGET_HAS_div2_i64)
-static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- TCGv_i64 t0;
- t0 = tcg_temp_new_i64();
- tcg_gen_sari_i64(t0, arg1, 63);
- tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
- tcg_temp_free_i64(t0);
-}
-
-static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- TCGv_i64 t0;
- t0 = tcg_temp_new_i64();
- tcg_gen_sari_i64(t0, arg1, 63);
- tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
- tcg_temp_free_i64(t0);
-}
-
-static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- TCGv_i64 t0;
- t0 = tcg_temp_new_i64();
- tcg_gen_movi_i64(t0, 0);
- tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
- tcg_temp_free_i64(t0);
-}
-
-static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- TCGv_i64 t0;
- t0 = tcg_temp_new_i64();
- tcg_gen_movi_i64(t0, 0);
- tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
- tcg_temp_free_i64(t0);
-}
-#else
static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- int sizemask = 0;
- /* Return value and both arguments are 64-bit and signed. */
- sizemask |= tcg_gen_sizemask(0, 1, 1);
- sizemask |= tcg_gen_sizemask(1, 1, 1);
- sizemask |= tcg_gen_sizemask(2, 1, 1);
-
- tcg_gen_helper64(tcg_helper_div_i64, sizemask, ret, arg1, arg2);
+ if (TCG_TARGET_HAS_div_i64) {
+ tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
+ } else if (TCG_TARGET_HAS_div2_i64) {
+ TCGv_i64 t0 = tcg_temp_new_i64();
+ tcg_gen_sari_i64(t0, arg1, 63);
+ tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
+ tcg_temp_free_i64(t0);
+ } else {
+ int sizemask = 0;
+ /* Return value and both arguments are 64-bit and signed. */
+ sizemask |= tcg_gen_sizemask(0, 1, 1);
+ sizemask |= tcg_gen_sizemask(1, 1, 1);
+ sizemask |= tcg_gen_sizemask(2, 1, 1);
+ tcg_gen_helper64(tcg_helper_div_i64, sizemask, ret, arg1, arg2);
+ }
}
static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- int sizemask = 0;
- /* Return value and both arguments are 64-bit and signed. */
- sizemask |= tcg_gen_sizemask(0, 1, 1);
- sizemask |= tcg_gen_sizemask(1, 1, 1);
- sizemask |= tcg_gen_sizemask(2, 1, 1);
-
- tcg_gen_helper64(tcg_helper_rem_i64, sizemask, ret, arg1, arg2);
+ if (TCG_TARGET_HAS_div_i64) {
+ tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
+ } else if (TCG_TARGET_HAS_div2_i64) {
+ TCGv_i64 t0 = tcg_temp_new_i64();
+ tcg_gen_sari_i64(t0, arg1, 63);
+ tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
+ tcg_temp_free_i64(t0);
+ } else {
+ int sizemask = 0;
+ /* Return value and both arguments are 64-bit and signed. */
+ sizemask |= tcg_gen_sizemask(0, 1, 1);
+ sizemask |= tcg_gen_sizemask(1, 1, 1);
+ sizemask |= tcg_gen_sizemask(2, 1, 1);
+ tcg_gen_helper64(tcg_helper_rem_i64, sizemask, ret, arg1, arg2);
+ }
}
static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- int sizemask = 0;
- /* Return value and both arguments are 64-bit and unsigned. */
- sizemask |= tcg_gen_sizemask(0, 1, 0);
- sizemask |= tcg_gen_sizemask(1, 1, 0);
- sizemask |= tcg_gen_sizemask(2, 1, 0);
-
- tcg_gen_helper64(tcg_helper_divu_i64, sizemask, ret, arg1, arg2);
+ if (TCG_TARGET_HAS_div_i64) {
+ tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
+ } else if (TCG_TARGET_HAS_div2_i64) {
+ TCGv_i64 t0 = tcg_temp_new_i64();
+ tcg_gen_movi_i64(t0, 0);
+ tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
+ tcg_temp_free_i64(t0);
+ } else {
+ int sizemask = 0;
+ /* Return value and both arguments are 64-bit and unsigned. */
+ sizemask |= tcg_gen_sizemask(0, 1, 0);
+ sizemask |= tcg_gen_sizemask(1, 1, 0);
+ sizemask |= tcg_gen_sizemask(2, 1, 0);
+ tcg_gen_helper64(tcg_helper_divu_i64, sizemask, ret, arg1, arg2);
+ }
}
static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- int sizemask = 0;
- /* Return value and both arguments are 64-bit and unsigned. */
- sizemask |= tcg_gen_sizemask(0, 1, 0);
- sizemask |= tcg_gen_sizemask(1, 1, 0);
- sizemask |= tcg_gen_sizemask(2, 1, 0);
-
- tcg_gen_helper64(tcg_helper_remu_i64, sizemask, ret, arg1, arg2);
+ if (TCG_TARGET_HAS_div_i64) {
+ tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
+ } else if (TCG_TARGET_HAS_div2_i64) {
+ TCGv_i64 t0 = tcg_temp_new_i64();
+ tcg_gen_movi_i64(t0, 0);
+ tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
+ tcg_temp_free_i64(t0);
+ } else {
+ int sizemask = 0;
+ /* Return value and both arguments are 64-bit and unsigned. */
+ sizemask |= tcg_gen_sizemask(0, 1, 0);
+ sizemask |= tcg_gen_sizemask(1, 1, 0);
+ sizemask |= tcg_gen_sizemask(2, 1, 0);
+ tcg_gen_helper64(tcg_helper_remu_i64, sizemask, ret, arg1, arg2);
+ }
}
-#endif
-
-#endif
+#endif /* TCG_TARGET_REG_BITS == 32 */
static inline void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
static inline void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
{
-#ifdef TCG_TARGET_HAS_ext8s_i32
- tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
-#else
- tcg_gen_shli_i32(ret, arg, 24);
- tcg_gen_sari_i32(ret, ret, 24);
-#endif
+ if (TCG_TARGET_HAS_ext8s_i32) {
+ tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
+ } else {
+ tcg_gen_shli_i32(ret, arg, 24);
+ tcg_gen_sari_i32(ret, ret, 24);
+ }
}
static inline void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
{
-#ifdef TCG_TARGET_HAS_ext16s_i32
- tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
-#else
- tcg_gen_shli_i32(ret, arg, 16);
- tcg_gen_sari_i32(ret, ret, 16);
-#endif
+ if (TCG_TARGET_HAS_ext16s_i32) {
+ tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
+ } else {
+ tcg_gen_shli_i32(ret, arg, 16);
+ tcg_gen_sari_i32(ret, ret, 16);
+ }
}
static inline void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
{
-#ifdef TCG_TARGET_HAS_ext8u_i32
- tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
-#else
- tcg_gen_andi_i32(ret, arg, 0xffu);
-#endif
+ if (TCG_TARGET_HAS_ext8u_i32) {
+ tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
+ } else {
+ tcg_gen_andi_i32(ret, arg, 0xffu);
+ }
}
static inline void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
{
-#ifdef TCG_TARGET_HAS_ext16u_i32
- tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
-#else
- tcg_gen_andi_i32(ret, arg, 0xffffu);
-#endif
+ if (TCG_TARGET_HAS_ext16u_i32) {
+ tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
+ } else {
+ tcg_gen_andi_i32(ret, arg, 0xffffu);
+ }
}
/* Note: we assume the two high bytes are set to zero */
static inline void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
{
-#ifdef TCG_TARGET_HAS_bswap16_i32
- tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
-#else
- TCGv_i32 t0 = tcg_temp_new_i32();
+ if (TCG_TARGET_HAS_bswap16_i32) {
+ tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
+ } else {
+ TCGv_i32 t0 = tcg_temp_new_i32();
- tcg_gen_ext8u_i32(t0, arg);
- tcg_gen_shli_i32(t0, t0, 8);
- tcg_gen_shri_i32(ret, arg, 8);
- tcg_gen_or_i32(ret, ret, t0);
- tcg_temp_free_i32(t0);
-#endif
+ tcg_gen_ext8u_i32(t0, arg);
+ tcg_gen_shli_i32(t0, t0, 8);
+ tcg_gen_shri_i32(ret, arg, 8);
+ tcg_gen_or_i32(ret, ret, t0);
+ tcg_temp_free_i32(t0);
+ }
}
static inline void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
{
-#ifdef TCG_TARGET_HAS_bswap32_i32
- tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
-#else
- TCGv_i32 t0, t1;
- t0 = tcg_temp_new_i32();
- t1 = tcg_temp_new_i32();
+ if (TCG_TARGET_HAS_bswap32_i32) {
+ tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
+ } else {
+ TCGv_i32 t0, t1;
+ t0 = tcg_temp_new_i32();
+ t1 = tcg_temp_new_i32();
- tcg_gen_shli_i32(t0, arg, 24);
+ tcg_gen_shli_i32(t0, arg, 24);
- tcg_gen_andi_i32(t1, arg, 0x0000ff00);
- tcg_gen_shli_i32(t1, t1, 8);
- tcg_gen_or_i32(t0, t0, t1);
+ tcg_gen_andi_i32(t1, arg, 0x0000ff00);
+ tcg_gen_shli_i32(t1, t1, 8);
+ tcg_gen_or_i32(t0, t0, t1);
- tcg_gen_shri_i32(t1, arg, 8);
- tcg_gen_andi_i32(t1, t1, 0x0000ff00);
- tcg_gen_or_i32(t0, t0, t1);
+ tcg_gen_shri_i32(t1, arg, 8);
+ tcg_gen_andi_i32(t1, t1, 0x0000ff00);
+ tcg_gen_or_i32(t0, t0, t1);
- tcg_gen_shri_i32(t1, arg, 24);
- tcg_gen_or_i32(ret, t0, t1);
- tcg_temp_free_i32(t0);
- tcg_temp_free_i32(t1);
-#endif
+ tcg_gen_shri_i32(t1, arg, 24);
+ tcg_gen_or_i32(ret, t0, t1);
+ tcg_temp_free_i32(t0);
+ tcg_temp_free_i32(t1);
+ }
}
#if TCG_TARGET_REG_BITS == 32
static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
{
-#ifdef TCG_TARGET_HAS_ext8s_i64
- tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
-#else
- tcg_gen_shli_i64(ret, arg, 56);
- tcg_gen_sari_i64(ret, ret, 56);
-#endif
+ if (TCG_TARGET_HAS_ext8s_i64) {
+ tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
+ } else {
+ tcg_gen_shli_i64(ret, arg, 56);
+ tcg_gen_sari_i64(ret, ret, 56);
+ }
}
static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
{
-#ifdef TCG_TARGET_HAS_ext16s_i64
- tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
-#else
- tcg_gen_shli_i64(ret, arg, 48);
- tcg_gen_sari_i64(ret, ret, 48);
-#endif
+ if (TCG_TARGET_HAS_ext16s_i64) {
+ tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
+ } else {
+ tcg_gen_shli_i64(ret, arg, 48);
+ tcg_gen_sari_i64(ret, ret, 48);
+ }
}
static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
{
-#ifdef TCG_TARGET_HAS_ext32s_i64
- tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
-#else
- tcg_gen_shli_i64(ret, arg, 32);
- tcg_gen_sari_i64(ret, ret, 32);
-#endif
+ if (TCG_TARGET_HAS_ext32s_i64) {
+ tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
+ } else {
+ tcg_gen_shli_i64(ret, arg, 32);
+ tcg_gen_sari_i64(ret, ret, 32);
+ }
}
static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
{
-#ifdef TCG_TARGET_HAS_ext8u_i64
- tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
-#else
- tcg_gen_andi_i64(ret, arg, 0xffu);
-#endif
+ if (TCG_TARGET_HAS_ext8u_i64) {
+ tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
+ } else {
+ tcg_gen_andi_i64(ret, arg, 0xffu);
+ }
}
static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
{
-#ifdef TCG_TARGET_HAS_ext16u_i64
- tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
-#else
- tcg_gen_andi_i64(ret, arg, 0xffffu);
-#endif
+ if (TCG_TARGET_HAS_ext16u_i64) {
+ tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
+ } else {
+ tcg_gen_andi_i64(ret, arg, 0xffffu);
+ }
}
static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
{
-#ifdef TCG_TARGET_HAS_ext32u_i64
- tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
-#else
- tcg_gen_andi_i64(ret, arg, 0xffffffffu);
-#endif
+ if (TCG_TARGET_HAS_ext32u_i64) {
+ tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
+ } else {
+ tcg_gen_andi_i64(ret, arg, 0xffffffffu);
+ }
}
/* Note: we assume the target supports move between 32 and 64 bit
/* Note: we assume the six high bytes are set to zero */
static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
{
-#ifdef TCG_TARGET_HAS_bswap16_i64
- tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
-#else
- TCGv_i64 t0 = tcg_temp_new_i64();
+ if (TCG_TARGET_HAS_bswap16_i64) {
+ tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
+ } else {
+ TCGv_i64 t0 = tcg_temp_new_i64();
- tcg_gen_ext8u_i64(t0, arg);
- tcg_gen_shli_i64(t0, t0, 8);
- tcg_gen_shri_i64(ret, arg, 8);
- tcg_gen_or_i64(ret, ret, t0);
- tcg_temp_free_i64(t0);
-#endif
+ tcg_gen_ext8u_i64(t0, arg);
+ tcg_gen_shli_i64(t0, t0, 8);
+ tcg_gen_shri_i64(ret, arg, 8);
+ tcg_gen_or_i64(ret, ret, t0);
+ tcg_temp_free_i64(t0);
+ }
}
/* Note: we assume the four high bytes are set to zero */
static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
{
-#ifdef TCG_TARGET_HAS_bswap32_i64
- tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
-#else
- TCGv_i64 t0, t1;
- t0 = tcg_temp_new_i64();
- t1 = tcg_temp_new_i64();
+ if (TCG_TARGET_HAS_bswap32_i64) {
+ tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
+ } else {
+ TCGv_i64 t0, t1;
+ t0 = tcg_temp_new_i64();
+ t1 = tcg_temp_new_i64();
- tcg_gen_shli_i64(t0, arg, 24);
- tcg_gen_ext32u_i64(t0, t0);
+ tcg_gen_shli_i64(t0, arg, 24);
+ tcg_gen_ext32u_i64(t0, t0);
- tcg_gen_andi_i64(t1, arg, 0x0000ff00);
- tcg_gen_shli_i64(t1, t1, 8);
- tcg_gen_or_i64(t0, t0, t1);
+ tcg_gen_andi_i64(t1, arg, 0x0000ff00);
+ tcg_gen_shli_i64(t1, t1, 8);
+ tcg_gen_or_i64(t0, t0, t1);
- tcg_gen_shri_i64(t1, arg, 8);
- tcg_gen_andi_i64(t1, t1, 0x0000ff00);
- tcg_gen_or_i64(t0, t0, t1);
+ tcg_gen_shri_i64(t1, arg, 8);
+ tcg_gen_andi_i64(t1, t1, 0x0000ff00);
+ tcg_gen_or_i64(t0, t0, t1);
- tcg_gen_shri_i64(t1, arg, 24);
- tcg_gen_or_i64(ret, t0, t1);
- tcg_temp_free_i64(t0);
- tcg_temp_free_i64(t1);
-#endif
+ tcg_gen_shri_i64(t1, arg, 24);
+ tcg_gen_or_i64(ret, t0, t1);
+ tcg_temp_free_i64(t0);
+ tcg_temp_free_i64(t1);
+ }
}
static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
{
-#ifdef TCG_TARGET_HAS_bswap64_i64
- tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
-#else
- TCGv_i64 t0 = tcg_temp_new_i64();
- TCGv_i64 t1 = tcg_temp_new_i64();
+ if (TCG_TARGET_HAS_bswap64_i64) {
+ tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
+ } else {
+ TCGv_i64 t0 = tcg_temp_new_i64();
+ TCGv_i64 t1 = tcg_temp_new_i64();
- tcg_gen_shli_i64(t0, arg, 56);
+ tcg_gen_shli_i64(t0, arg, 56);
- tcg_gen_andi_i64(t1, arg, 0x0000ff00);
- tcg_gen_shli_i64(t1, t1, 40);
- tcg_gen_or_i64(t0, t0, t1);
+ tcg_gen_andi_i64(t1, arg, 0x0000ff00);
+ tcg_gen_shli_i64(t1, t1, 40);
+ tcg_gen_or_i64(t0, t0, t1);
- tcg_gen_andi_i64(t1, arg, 0x00ff0000);
- tcg_gen_shli_i64(t1, t1, 24);
- tcg_gen_or_i64(t0, t0, t1);
+ tcg_gen_andi_i64(t1, arg, 0x00ff0000);
+ tcg_gen_shli_i64(t1, t1, 24);
+ tcg_gen_or_i64(t0, t0, t1);
- tcg_gen_andi_i64(t1, arg, 0xff000000);
- tcg_gen_shli_i64(t1, t1, 8);
- tcg_gen_or_i64(t0, t0, t1);
+ tcg_gen_andi_i64(t1, arg, 0xff000000);
+ tcg_gen_shli_i64(t1, t1, 8);
+ tcg_gen_or_i64(t0, t0, t1);
- tcg_gen_shri_i64(t1, arg, 8);
- tcg_gen_andi_i64(t1, t1, 0xff000000);
- tcg_gen_or_i64(t0, t0, t1);
+ tcg_gen_shri_i64(t1, arg, 8);
+ tcg_gen_andi_i64(t1, t1, 0xff000000);
+ tcg_gen_or_i64(t0, t0, t1);
- tcg_gen_shri_i64(t1, arg, 24);
- tcg_gen_andi_i64(t1, t1, 0x00ff0000);
- tcg_gen_or_i64(t0, t0, t1);
+ tcg_gen_shri_i64(t1, arg, 24);
+ tcg_gen_andi_i64(t1, t1, 0x00ff0000);
+ tcg_gen_or_i64(t0, t0, t1);
- tcg_gen_shri_i64(t1, arg, 40);
- tcg_gen_andi_i64(t1, t1, 0x0000ff00);
- tcg_gen_or_i64(t0, t0, t1);
+ tcg_gen_shri_i64(t1, arg, 40);
+ tcg_gen_andi_i64(t1, t1, 0x0000ff00);
+ tcg_gen_or_i64(t0, t0, t1);
- tcg_gen_shri_i64(t1, arg, 56);
- tcg_gen_or_i64(ret, t0, t1);
- tcg_temp_free_i64(t0);
- tcg_temp_free_i64(t1);
-#endif
+ tcg_gen_shri_i64(t1, arg, 56);
+ tcg_gen_or_i64(ret, t0, t1);
+ tcg_temp_free_i64(t0);
+ tcg_temp_free_i64(t1);
+ }
}
#endif
static inline void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg)
{
-#ifdef TCG_TARGET_HAS_neg_i32
- tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg);
-#else
- TCGv_i32 t0 = tcg_const_i32(0);
- tcg_gen_sub_i32(ret, t0, arg);
- tcg_temp_free_i32(t0);
-#endif
+ if (TCG_TARGET_HAS_neg_i32) {
+ tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg);
+ } else {
+ TCGv_i32 t0 = tcg_const_i32(0);
+ tcg_gen_sub_i32(ret, t0, arg);
+ tcg_temp_free_i32(t0);
+ }
}
static inline void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg)
{
-#ifdef TCG_TARGET_HAS_neg_i64
- tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg);
-#else
- TCGv_i64 t0 = tcg_const_i64(0);
- tcg_gen_sub_i64(ret, t0, arg);
- tcg_temp_free_i64(t0);
-#endif
+ if (TCG_TARGET_HAS_neg_i64) {
+ tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg);
+ } else {
+ TCGv_i64 t0 = tcg_const_i64(0);
+ tcg_gen_sub_i64(ret, t0, arg);
+ tcg_temp_free_i64(t0);
+ }
}
static inline void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg)
{
-#ifdef TCG_TARGET_HAS_not_i32
- tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg);
-#else
- tcg_gen_xori_i32(ret, arg, -1);
-#endif
+ if (TCG_TARGET_HAS_not_i32) {
+ tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg);
+ } else {
+ tcg_gen_xori_i32(ret, arg, -1);
+ }
}
static inline void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
{
-#ifdef TCG_TARGET_HAS_not_i64
- tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
-#elif defined(TCG_TARGET_HAS_not_i32) && TCG_TARGET_REG_BITS == 32
+#if TCG_TARGET_REG_BITS == 64
+ if (TCG_TARGET_HAS_not_i64) {
+ tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
+ } else {
+ tcg_gen_xori_i64(ret, arg, -1);
+ }
+#else
tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
-#else
- tcg_gen_xori_i64(ret, arg, -1);
#endif
}
tcg_gen_op1_i32(INDEX_op_discard, arg);
}
-#if TCG_TARGET_REG_BITS == 32
static inline void tcg_gen_discard_i64(TCGv_i64 arg)
{
+#if TCG_TARGET_REG_BITS == 32
tcg_gen_discard_i32(TCGV_LOW(arg));
tcg_gen_discard_i32(TCGV_HIGH(arg));
-}
#else
-static inline void tcg_gen_discard_i64(TCGv_i64 arg)
-{
tcg_gen_op1_i64(INDEX_op_discard, arg);
-}
#endif
+}
static inline void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
{
static inline void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
-#ifdef TCG_TARGET_HAS_andc_i32
- tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
-#else
- TCGv_i32 t0;
- t0 = tcg_temp_new_i32();
- tcg_gen_not_i32(t0, arg2);
- tcg_gen_and_i32(ret, arg1, t0);
- tcg_temp_free_i32(t0);
-#endif
+ if (TCG_TARGET_HAS_andc_i32) {
+ tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
+ } else {
+ TCGv_i32 t0 = tcg_temp_new_i32();
+ tcg_gen_not_i32(t0, arg2);
+ tcg_gen_and_i32(ret, arg1, t0);
+ tcg_temp_free_i32(t0);
+ }
}
static inline void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
-#ifdef TCG_TARGET_HAS_andc_i64
- tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
-#elif defined(TCG_TARGET_HAS_andc_i32) && TCG_TARGET_REG_BITS == 32
+#if TCG_TARGET_REG_BITS == 64
+ if (TCG_TARGET_HAS_andc_i64) {
+ tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
+ } else {
+ TCGv_i64 t0 = tcg_temp_new_i64();
+ tcg_gen_not_i64(t0, arg2);
+ tcg_gen_and_i64(ret, arg1, t0);
+ tcg_temp_free_i64(t0);
+ }
+#else
tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
-#else
- TCGv_i64 t0;
- t0 = tcg_temp_new_i64();
- tcg_gen_not_i64(t0, arg2);
- tcg_gen_and_i64(ret, arg1, t0);
- tcg_temp_free_i64(t0);
#endif
}
static inline void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
-#ifdef TCG_TARGET_HAS_eqv_i32
- tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
-#else
- tcg_gen_xor_i32(ret, arg1, arg2);
- tcg_gen_not_i32(ret, ret);
-#endif
+ if (TCG_TARGET_HAS_eqv_i32) {
+ tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
+ } else {
+ tcg_gen_xor_i32(ret, arg1, arg2);
+ tcg_gen_not_i32(ret, ret);
+ }
}
static inline void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
-#ifdef TCG_TARGET_HAS_eqv_i64
- tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
-#elif defined(TCG_TARGET_HAS_eqv_i32) && TCG_TARGET_REG_BITS == 32
+#if TCG_TARGET_REG_BITS == 64
+ if (TCG_TARGET_HAS_eqv_i64) {
+ tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
+ } else {
+ tcg_gen_xor_i64(ret, arg1, arg2);
+ tcg_gen_not_i64(ret, ret);
+ }
+#else
tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
-#else
- tcg_gen_xor_i64(ret, arg1, arg2);
- tcg_gen_not_i64(ret, ret);
#endif
}
static inline void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
-#ifdef TCG_TARGET_HAS_nand_i32
- tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
-#else
- tcg_gen_and_i32(ret, arg1, arg2);
- tcg_gen_not_i32(ret, ret);
-#endif
+ if (TCG_TARGET_HAS_nand_i32) {
+ tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
+ } else {
+ tcg_gen_and_i32(ret, arg1, arg2);
+ tcg_gen_not_i32(ret, ret);
+ }
}
static inline void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
-#ifdef TCG_TARGET_HAS_nand_i64
- tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
-#elif defined(TCG_TARGET_HAS_nand_i32) && TCG_TARGET_REG_BITS == 32
+#if TCG_TARGET_REG_BITS == 64
+ if (TCG_TARGET_HAS_nand_i64) {
+ tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
+ } else {
+ tcg_gen_and_i64(ret, arg1, arg2);
+ tcg_gen_not_i64(ret, ret);
+ }
+#else
tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
-#else
- tcg_gen_and_i64(ret, arg1, arg2);
- tcg_gen_not_i64(ret, ret);
#endif
}
static inline void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
-#ifdef TCG_TARGET_HAS_nor_i32
- tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
-#else
- tcg_gen_or_i32(ret, arg1, arg2);
- tcg_gen_not_i32(ret, ret);
-#endif
+ if (TCG_TARGET_HAS_nor_i32) {
+ tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
+ } else {
+ tcg_gen_or_i32(ret, arg1, arg2);
+ tcg_gen_not_i32(ret, ret);
+ }
}
static inline void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
-#ifdef TCG_TARGET_HAS_nor_i64
- tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
-#elif defined(TCG_TARGET_HAS_nor_i32) && TCG_TARGET_REG_BITS == 32
+#if TCG_TARGET_REG_BITS == 64
+ if (TCG_TARGET_HAS_nor_i64) {
+ tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
+ } else {
+ tcg_gen_or_i64(ret, arg1, arg2);
+ tcg_gen_not_i64(ret, ret);
+ }
+#else
tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
-#else
- tcg_gen_or_i64(ret, arg1, arg2);
- tcg_gen_not_i64(ret, ret);
#endif
}
static inline void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
-#ifdef TCG_TARGET_HAS_orc_i32
- tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
-#else
- TCGv_i32 t0;
- t0 = tcg_temp_new_i32();
- tcg_gen_not_i32(t0, arg2);
- tcg_gen_or_i32(ret, arg1, t0);
- tcg_temp_free_i32(t0);
-#endif
+ if (TCG_TARGET_HAS_orc_i32) {
+ tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
+ } else {
+ TCGv_i32 t0 = tcg_temp_new_i32();
+ tcg_gen_not_i32(t0, arg2);
+ tcg_gen_or_i32(ret, arg1, t0);
+ tcg_temp_free_i32(t0);
+ }
}
static inline void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
-#ifdef TCG_TARGET_HAS_orc_i64
- tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
-#elif defined(TCG_TARGET_HAS_orc_i32) && TCG_TARGET_REG_BITS == 32
+#if TCG_TARGET_REG_BITS == 64
+ if (TCG_TARGET_HAS_orc_i64) {
+ tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
+ } else {
+ TCGv_i64 t0 = tcg_temp_new_i64();
+ tcg_gen_not_i64(t0, arg2);
+ tcg_gen_or_i64(ret, arg1, t0);
+ tcg_temp_free_i64(t0);
+ }
+#else
tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
-#else
- TCGv_i64 t0;
- t0 = tcg_temp_new_i64();
- tcg_gen_not_i64(t0, arg2);
- tcg_gen_or_i64(ret, arg1, t0);
- tcg_temp_free_i64(t0);
#endif
}
static inline void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
-#ifdef TCG_TARGET_HAS_rot_i32
- tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
-#else
- TCGv_i32 t0, t1;
+ if (TCG_TARGET_HAS_rot_i32) {
+ tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
+ } else {
+ TCGv_i32 t0, t1;
- t0 = tcg_temp_new_i32();
- t1 = tcg_temp_new_i32();
- tcg_gen_shl_i32(t0, arg1, arg2);
- tcg_gen_subfi_i32(t1, 32, arg2);
- tcg_gen_shr_i32(t1, arg1, t1);
- tcg_gen_or_i32(ret, t0, t1);
- tcg_temp_free_i32(t0);
- tcg_temp_free_i32(t1);
-#endif
+ t0 = tcg_temp_new_i32();
+ t1 = tcg_temp_new_i32();
+ tcg_gen_shl_i32(t0, arg1, arg2);
+ tcg_gen_subfi_i32(t1, 32, arg2);
+ tcg_gen_shr_i32(t1, arg1, t1);
+ tcg_gen_or_i32(ret, t0, t1);
+ tcg_temp_free_i32(t0);
+ tcg_temp_free_i32(t1);
+ }
}
static inline void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
-#ifdef TCG_TARGET_HAS_rot_i64
- tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
-#else
- TCGv_i64 t0, t1;
-
- t0 = tcg_temp_new_i64();
- t1 = tcg_temp_new_i64();
- tcg_gen_shl_i64(t0, arg1, arg2);
- tcg_gen_subfi_i64(t1, 64, arg2);
- tcg_gen_shr_i64(t1, arg1, t1);
- tcg_gen_or_i64(ret, t0, t1);
- tcg_temp_free_i64(t0);
- tcg_temp_free_i64(t1);
-#endif
+ if (TCG_TARGET_HAS_rot_i64) {
+ tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
+ } else {
+ TCGv_i64 t0, t1;
+ t0 = tcg_temp_new_i64();
+ t1 = tcg_temp_new_i64();
+ tcg_gen_shl_i64(t0, arg1, arg2);
+ tcg_gen_subfi_i64(t1, 64, arg2);
+ tcg_gen_shr_i64(t1, arg1, t1);
+ tcg_gen_or_i64(ret, t0, t1);
+ tcg_temp_free_i64(t0);
+ tcg_temp_free_i64(t1);
+ }
}
static inline void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
/* some cases can be optimized here */
if (arg2 == 0) {
tcg_gen_mov_i32(ret, arg1);
- } else {
-#ifdef TCG_TARGET_HAS_rot_i32
+ } else if (TCG_TARGET_HAS_rot_i32) {
TCGv_i32 t0 = tcg_const_i32(arg2);
tcg_gen_rotl_i32(ret, arg1, t0);
tcg_temp_free_i32(t0);
-#else
+ } else {
TCGv_i32 t0, t1;
t0 = tcg_temp_new_i32();
t1 = tcg_temp_new_i32();
tcg_gen_or_i32(ret, t0, t1);
tcg_temp_free_i32(t0);
tcg_temp_free_i32(t1);
-#endif
}
}
/* some cases can be optimized here */
if (arg2 == 0) {
tcg_gen_mov_i64(ret, arg1);
- } else {
-#ifdef TCG_TARGET_HAS_rot_i64
+ } else if (TCG_TARGET_HAS_rot_i64) {
TCGv_i64 t0 = tcg_const_i64(arg2);
tcg_gen_rotl_i64(ret, arg1, t0);
tcg_temp_free_i64(t0);
-#else
+ } else {
TCGv_i64 t0, t1;
t0 = tcg_temp_new_i64();
t1 = tcg_temp_new_i64();
tcg_gen_or_i64(ret, t0, t1);
tcg_temp_free_i64(t0);
tcg_temp_free_i64(t1);
-#endif
}
}
static inline void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
-#ifdef TCG_TARGET_HAS_rot_i32
- tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
-#else
- TCGv_i32 t0, t1;
+ if (TCG_TARGET_HAS_rot_i32) {
+ tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
+ } else {
+ TCGv_i32 t0, t1;
- t0 = tcg_temp_new_i32();
- t1 = tcg_temp_new_i32();
- tcg_gen_shr_i32(t0, arg1, arg2);
- tcg_gen_subfi_i32(t1, 32, arg2);
- tcg_gen_shl_i32(t1, arg1, t1);
- tcg_gen_or_i32(ret, t0, t1);
- tcg_temp_free_i32(t0);
- tcg_temp_free_i32(t1);
-#endif
+ t0 = tcg_temp_new_i32();
+ t1 = tcg_temp_new_i32();
+ tcg_gen_shr_i32(t0, arg1, arg2);
+ tcg_gen_subfi_i32(t1, 32, arg2);
+ tcg_gen_shl_i32(t1, arg1, t1);
+ tcg_gen_or_i32(ret, t0, t1);
+ tcg_temp_free_i32(t0);
+ tcg_temp_free_i32(t1);
+ }
}
static inline void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
-#ifdef TCG_TARGET_HAS_rot_i64
- tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
-#else
- TCGv_i64 t0, t1;
-
- t0 = tcg_temp_new_i64();
- t1 = tcg_temp_new_i64();
- tcg_gen_shr_i64(t0, arg1, arg2);
- tcg_gen_subfi_i64(t1, 64, arg2);
- tcg_gen_shl_i64(t1, arg1, t1);
- tcg_gen_or_i64(ret, t0, t1);
- tcg_temp_free_i64(t0);
- tcg_temp_free_i64(t1);
-#endif
+ if (TCG_TARGET_HAS_rot_i64) {
+ tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
+ } else {
+ TCGv_i64 t0, t1;
+ t0 = tcg_temp_new_i64();
+ t1 = tcg_temp_new_i64();
+ tcg_gen_shr_i64(t0, arg1, arg2);
+ tcg_gen_subfi_i64(t1, 64, arg2);
+ tcg_gen_shl_i64(t1, arg1, t1);
+ tcg_gen_or_i64(ret, t0, t1);
+ tcg_temp_free_i64(t0);
+ tcg_temp_free_i64(t1);
+ }
}
static inline void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
TCGv_i32 arg2, unsigned int ofs,
unsigned int len)
{
-#ifdef TCG_TARGET_HAS_deposit_i32
- tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
-#else
- uint32_t mask = (1u << len) - 1;
- TCGv_i32 t1 = tcg_temp_new_i32 ();
+ if (TCG_TARGET_HAS_deposit_i32) {
+ tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
+ } else {
+ uint32_t mask = (1u << len) - 1;
+ TCGv_i32 t1 = tcg_temp_new_i32 ();
- tcg_gen_andi_i32(t1, arg2, mask);
- tcg_gen_shli_i32(t1, t1, ofs);
- tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
- tcg_gen_or_i32(ret, ret, t1);
+ tcg_gen_andi_i32(t1, arg2, mask);
+ tcg_gen_shli_i32(t1, t1, ofs);
+ tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
+ tcg_gen_or_i32(ret, ret, t1);
- tcg_temp_free_i32(t1);
-#endif
+ tcg_temp_free_i32(t1);
+ }
}
static inline void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1,
TCGv_i64 arg2, unsigned int ofs,
unsigned int len)
{
-#ifdef TCG_TARGET_HAS_deposit_i64
- tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
-#else
- uint64_t mask = (1ull << len) - 1;
- TCGv_i64 t1 = tcg_temp_new_i64 ();
+ if (TCG_TARGET_HAS_deposit_i64) {
+ tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
+ } else {
+ uint64_t mask = (1ull << len) - 1;
+ TCGv_i64 t1 = tcg_temp_new_i64 ();
- tcg_gen_andi_i64(t1, arg2, mask);
- tcg_gen_shli_i64(t1, t1, ofs);
- tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
- tcg_gen_or_i64(ret, ret, t1);
+ tcg_gen_andi_i64(t1, arg2, mask);
+ tcg_gen_shli_i64(t1, t1, ofs);
+ tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
+ tcg_gen_or_i64(ret, ret, t1);
- tcg_temp_free_i64(t1);
-#endif
+ tcg_temp_free_i64(t1);
+ }
}
/***************************************/
DEF(jmp, 0, 1, 0, TCG_OPF_BB_END | TCG_OPF_SIDE_EFFECTS)
DEF(br, 0, 0, 1, TCG_OPF_BB_END | TCG_OPF_SIDE_EFFECTS)
+#define IMPL(X) (X ? 0 : TCG_OPF_NOT_PRESENT)
+#if TCG_TARGET_REG_BITS == 32
+# define IMPL64 TCG_OPF_64BIT | TCG_OPF_NOT_PRESENT
+#else
+# define IMPL64 TCG_OPF_64BIT
+#endif
+
DEF(mov_i32, 1, 1, 0, 0)
DEF(movi_i32, 1, 0, 1, 0)
DEF(setcond_i32, 1, 2, 1, 0)
DEF(add_i32, 1, 2, 0, 0)
DEF(sub_i32, 1, 2, 0, 0)
DEF(mul_i32, 1, 2, 0, 0)
-#ifdef TCG_TARGET_HAS_div_i32
-DEF(div_i32, 1, 2, 0, 0)
-DEF(divu_i32, 1, 2, 0, 0)
-DEF(rem_i32, 1, 2, 0, 0)
-DEF(remu_i32, 1, 2, 0, 0)
-#endif
-#ifdef TCG_TARGET_HAS_div2_i32
-DEF(div2_i32, 2, 3, 0, 0)
-DEF(divu2_i32, 2, 3, 0, 0)
-#endif
+DEF(div_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_div_i32))
+DEF(divu_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_div_i32))
+DEF(rem_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_div_i32))
+DEF(remu_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_div_i32))
+DEF(div2_i32, 2, 3, 0, IMPL(TCG_TARGET_HAS_div2_i32))
+DEF(divu2_i32, 2, 3, 0, IMPL(TCG_TARGET_HAS_div2_i32))
DEF(and_i32, 1, 2, 0, 0)
DEF(or_i32, 1, 2, 0, 0)
DEF(xor_i32, 1, 2, 0, 0)
DEF(shl_i32, 1, 2, 0, 0)
DEF(shr_i32, 1, 2, 0, 0)
DEF(sar_i32, 1, 2, 0, 0)
-#ifdef TCG_TARGET_HAS_rot_i32
-DEF(rotl_i32, 1, 2, 0, 0)
-DEF(rotr_i32, 1, 2, 0, 0)
-#endif
-#ifdef TCG_TARGET_HAS_deposit_i32
-DEF(deposit_i32, 1, 2, 2, 0)
-#endif
+DEF(rotl_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_rot_i32))
+DEF(rotr_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_rot_i32))
+DEF(deposit_i32, 1, 2, 2, IMPL(TCG_TARGET_HAS_deposit_i32))
DEF(brcond_i32, 0, 2, 2, TCG_OPF_BB_END | TCG_OPF_SIDE_EFFECTS)
-#if TCG_TARGET_REG_BITS == 32
-DEF(add2_i32, 2, 4, 0, 0)
-DEF(sub2_i32, 2, 4, 0, 0)
-DEF(brcond2_i32, 0, 4, 2, TCG_OPF_BB_END | TCG_OPF_SIDE_EFFECTS)
-DEF(mulu2_i32, 2, 2, 0, 0)
-DEF(setcond2_i32, 1, 4, 1, 0)
-#endif
-#ifdef TCG_TARGET_HAS_ext8s_i32
-DEF(ext8s_i32, 1, 1, 0, 0)
-#endif
-#ifdef TCG_TARGET_HAS_ext16s_i32
-DEF(ext16s_i32, 1, 1, 0, 0)
-#endif
-#ifdef TCG_TARGET_HAS_ext8u_i32
-DEF(ext8u_i32, 1, 1, 0, 0)
-#endif
-#ifdef TCG_TARGET_HAS_ext16u_i32
-DEF(ext16u_i32, 1, 1, 0, 0)
-#endif
-#ifdef TCG_TARGET_HAS_bswap16_i32
-DEF(bswap16_i32, 1, 1, 0, 0)
-#endif
-#ifdef TCG_TARGET_HAS_bswap32_i32
-DEF(bswap32_i32, 1, 1, 0, 0)
-#endif
-#ifdef TCG_TARGET_HAS_not_i32
-DEF(not_i32, 1, 1, 0, 0)
-#endif
-#ifdef TCG_TARGET_HAS_neg_i32
-DEF(neg_i32, 1, 1, 0, 0)
-#endif
-#ifdef TCG_TARGET_HAS_andc_i32
-DEF(andc_i32, 1, 2, 0, 0)
-#endif
-#ifdef TCG_TARGET_HAS_orc_i32
-DEF(orc_i32, 1, 2, 0, 0)
-#endif
-#ifdef TCG_TARGET_HAS_eqv_i32
-DEF(eqv_i32, 1, 2, 0, 0)
-#endif
-#ifdef TCG_TARGET_HAS_nand_i32
-DEF(nand_i32, 1, 2, 0, 0)
-#endif
-#ifdef TCG_TARGET_HAS_nor_i32
-DEF(nor_i32, 1, 2, 0, 0)
-#endif
-#if TCG_TARGET_REG_BITS == 64
-DEF(mov_i64, 1, 1, 0, TCG_OPF_64BIT)
-DEF(movi_i64, 1, 0, 1, TCG_OPF_64BIT)
-DEF(setcond_i64, 1, 2, 1, TCG_OPF_64BIT)
+DEF(add2_i32, 2, 4, 0, IMPL(TCG_TARGET_REG_BITS == 32))
+DEF(sub2_i32, 2, 4, 0, IMPL(TCG_TARGET_REG_BITS == 32))
+DEF(brcond2_i32, 0, 4, 2,
+ TCG_OPF_BB_END | TCG_OPF_SIDE_EFFECTS | IMPL(TCG_TARGET_REG_BITS == 32))
+DEF(mulu2_i32, 2, 2, 0, IMPL(TCG_TARGET_REG_BITS == 32))
+DEF(setcond2_i32, 1, 4, 1, IMPL(TCG_TARGET_REG_BITS == 32))
+
+DEF(ext8s_i32, 1, 1, 0, IMPL(TCG_TARGET_HAS_ext8s_i32))
+DEF(ext16s_i32, 1, 1, 0, IMPL(TCG_TARGET_HAS_ext16s_i32))
+DEF(ext8u_i32, 1, 1, 0, IMPL(TCG_TARGET_HAS_ext8u_i32))
+DEF(ext16u_i32, 1, 1, 0, IMPL(TCG_TARGET_HAS_ext16u_i32))
+DEF(bswap16_i32, 1, 1, 0, IMPL(TCG_TARGET_HAS_bswap16_i32))
+DEF(bswap32_i32, 1, 1, 0, IMPL(TCG_TARGET_HAS_bswap32_i32))
+DEF(not_i32, 1, 1, 0, IMPL(TCG_TARGET_HAS_not_i32))
+DEF(neg_i32, 1, 1, 0, IMPL(TCG_TARGET_HAS_neg_i32))
+DEF(andc_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_andc_i32))
+DEF(orc_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_orc_i32))
+DEF(eqv_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_eqv_i32))
+DEF(nand_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_nand_i32))
+DEF(nor_i32, 1, 2, 0, IMPL(TCG_TARGET_HAS_nor_i32))
+
+DEF(mov_i64, 1, 1, 0, IMPL64)
+DEF(movi_i64, 1, 0, 1, IMPL64)
+DEF(setcond_i64, 1, 2, 1, IMPL64)
/* load/store */
-DEF(ld8u_i64, 1, 1, 1, TCG_OPF_64BIT)
-DEF(ld8s_i64, 1, 1, 1, TCG_OPF_64BIT)
-DEF(ld16u_i64, 1, 1, 1, TCG_OPF_64BIT)
-DEF(ld16s_i64, 1, 1, 1, TCG_OPF_64BIT)
-DEF(ld32u_i64, 1, 1, 1, TCG_OPF_64BIT)
-DEF(ld32s_i64, 1, 1, 1, TCG_OPF_64BIT)
-DEF(ld_i64, 1, 1, 1, TCG_OPF_64BIT)
-DEF(st8_i64, 0, 2, 1, TCG_OPF_SIDE_EFFECTS | TCG_OPF_64BIT)
-DEF(st16_i64, 0, 2, 1, TCG_OPF_SIDE_EFFECTS | TCG_OPF_64BIT)
-DEF(st32_i64, 0, 2, 1, TCG_OPF_SIDE_EFFECTS | TCG_OPF_64BIT)
-DEF(st_i64, 0, 2, 1, TCG_OPF_SIDE_EFFECTS | TCG_OPF_64BIT)
+DEF(ld8u_i64, 1, 1, 1, IMPL64)
+DEF(ld8s_i64, 1, 1, 1, IMPL64)
+DEF(ld16u_i64, 1, 1, 1, IMPL64)
+DEF(ld16s_i64, 1, 1, 1, IMPL64)
+DEF(ld32u_i64, 1, 1, 1, IMPL64)
+DEF(ld32s_i64, 1, 1, 1, IMPL64)
+DEF(ld_i64, 1, 1, 1, IMPL64)
+DEF(st8_i64, 0, 2, 1, TCG_OPF_SIDE_EFFECTS | IMPL64)
+DEF(st16_i64, 0, 2, 1, TCG_OPF_SIDE_EFFECTS | IMPL64)
+DEF(st32_i64, 0, 2, 1, TCG_OPF_SIDE_EFFECTS | IMPL64)
+DEF(st_i64, 0, 2, 1, TCG_OPF_SIDE_EFFECTS | IMPL64)
/* arith */
-DEF(add_i64, 1, 2, 0, TCG_OPF_64BIT)
-DEF(sub_i64, 1, 2, 0, TCG_OPF_64BIT)
-DEF(mul_i64, 1, 2, 0, TCG_OPF_64BIT)
-#ifdef TCG_TARGET_HAS_div_i64
-DEF(div_i64, 1, 2, 0, TCG_OPF_64BIT)
-DEF(divu_i64, 1, 2, 0, TCG_OPF_64BIT)
-DEF(rem_i64, 1, 2, 0, TCG_OPF_64BIT)
-DEF(remu_i64, 1, 2, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_div2_i64
-DEF(div2_i64, 2, 3, 0, TCG_OPF_64BIT)
-DEF(divu2_i64, 2, 3, 0, TCG_OPF_64BIT)
-#endif
-DEF(and_i64, 1, 2, 0, TCG_OPF_64BIT)
-DEF(or_i64, 1, 2, 0, TCG_OPF_64BIT)
-DEF(xor_i64, 1, 2, 0, TCG_OPF_64BIT)
+DEF(add_i64, 1, 2, 0, IMPL64)
+DEF(sub_i64, 1, 2, 0, IMPL64)
+DEF(mul_i64, 1, 2, 0, IMPL64)
+DEF(div_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_div_i64))
+DEF(divu_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_div_i64))
+DEF(rem_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_div_i64))
+DEF(remu_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_div_i64))
+DEF(div2_i64, 2, 3, 0, IMPL64 | IMPL(TCG_TARGET_HAS_div2_i64))
+DEF(divu2_i64, 2, 3, 0, IMPL64 | IMPL(TCG_TARGET_HAS_div2_i64))
+DEF(and_i64, 1, 2, 0, IMPL64)
+DEF(or_i64, 1, 2, 0, IMPL64)
+DEF(xor_i64, 1, 2, 0, IMPL64)
/* shifts/rotates */
-DEF(shl_i64, 1, 2, 0, TCG_OPF_64BIT)
-DEF(shr_i64, 1, 2, 0, TCG_OPF_64BIT)
-DEF(sar_i64, 1, 2, 0, TCG_OPF_64BIT)
-#ifdef TCG_TARGET_HAS_rot_i64
-DEF(rotl_i64, 1, 2, 0, TCG_OPF_64BIT)
-DEF(rotr_i64, 1, 2, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_deposit_i64
-DEF(deposit_i64, 1, 2, 2, TCG_OPF_64BIT)
-#endif
+DEF(shl_i64, 1, 2, 0, IMPL64)
+DEF(shr_i64, 1, 2, 0, IMPL64)
+DEF(sar_i64, 1, 2, 0, IMPL64)
+DEF(rotl_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_rot_i64))
+DEF(rotr_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_rot_i64))
+DEF(deposit_i64, 1, 2, 2, IMPL64 | IMPL(TCG_TARGET_HAS_deposit_i64))
-DEF(brcond_i64, 0, 2, 2, TCG_OPF_BB_END | TCG_OPF_SIDE_EFFECTS | TCG_OPF_64BIT)
-#ifdef TCG_TARGET_HAS_ext8s_i64
-DEF(ext8s_i64, 1, 1, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_ext16s_i64
-DEF(ext16s_i64, 1, 1, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_ext32s_i64
-DEF(ext32s_i64, 1, 1, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_ext8u_i64
-DEF(ext8u_i64, 1, 1, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_ext16u_i64
-DEF(ext16u_i64, 1, 1, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_ext32u_i64
-DEF(ext32u_i64, 1, 1, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_bswap16_i64
-DEF(bswap16_i64, 1, 1, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_bswap32_i64
-DEF(bswap32_i64, 1, 1, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_bswap64_i64
-DEF(bswap64_i64, 1, 1, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_not_i64
-DEF(not_i64, 1, 1, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_neg_i64
-DEF(neg_i64, 1, 1, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_andc_i64
-DEF(andc_i64, 1, 2, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_orc_i64
-DEF(orc_i64, 1, 2, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_eqv_i64
-DEF(eqv_i64, 1, 2, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_nand_i64
-DEF(nand_i64, 1, 2, 0, TCG_OPF_64BIT)
-#endif
-#ifdef TCG_TARGET_HAS_nor_i64
-DEF(nor_i64, 1, 2, 0, TCG_OPF_64BIT)
-#endif
-#endif
+DEF(brcond_i64, 0, 2, 2, TCG_OPF_BB_END | TCG_OPF_SIDE_EFFECTS | IMPL64)
+DEF(ext8s_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_ext8s_i64))
+DEF(ext16s_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_ext16s_i64))
+DEF(ext32s_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_ext32s_i64))
+DEF(ext8u_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_ext8u_i64))
+DEF(ext16u_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_ext16u_i64))
+DEF(ext32u_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_ext32u_i64))
+DEF(bswap16_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_bswap16_i64))
+DEF(bswap32_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_bswap32_i64))
+DEF(bswap64_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_bswap64_i64))
+DEF(not_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_not_i64))
+DEF(neg_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_neg_i64))
+DEF(andc_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_andc_i64))
+DEF(orc_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_orc_i64))
+DEF(eqv_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_eqv_i64))
+DEF(nand_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_nand_i64))
+DEF(nor_i64, 1, 2, 0, IMPL64 | IMPL(TCG_TARGET_HAS_nor_i64))
/* QEMU specific */
#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
#endif /* TCG_TARGET_REG_BITS != 32 */
+#undef IMPL
+#undef IMPL64
#undef DEF