#define OPC_ADD_GvEv (OPC_ARITH_GvEv | (ARITH_ADD << 3))
#define OPC_BSWAP (0xc8 | P_EXT)
#define OPC_CALL_Jz (0xe8)
+#define OPC_CMOVCC (0x40 | P_EXT) /* ... plus condition code */
#define OPC_CMP_GvEv (OPC_ARITH_GvEv | (ARITH_CMP << 3))
#define OPC_DEC_r32 (0x48)
#define OPC_IMUL_GvEv (0xaf | P_EXT)
}
#endif
+static void tcg_out_movcond32(TCGContext *s, TCGCond cond, TCGArg dest,
+ TCGArg c1, TCGArg c2, int const_c2,
+ TCGArg v1)
+{
+ tcg_out_cmp(s, c1, c2, const_c2, 0);
+ tcg_out_modrm(s, OPC_CMOVCC | tcg_cond_to_jcc[cond], dest, v1);
+}
+
+#if TCG_TARGET_REG_BITS == 64
+static void tcg_out_movcond64(TCGContext *s, TCGCond cond, TCGArg dest,
+ TCGArg c1, TCGArg c2, int const_c2,
+ TCGArg v1)
+{
+ tcg_out_cmp(s, c1, c2, const_c2, P_REXW);
+ tcg_out_modrm(s, OPC_CMOVCC | tcg_cond_to_jcc[cond] | P_REXW, dest, v1);
+}
+#endif
+
static void tcg_out_branch(TCGContext *s, int call, tcg_target_long dest)
{
tcg_target_long disp = dest - (tcg_target_long)s->code_ptr - 5;
tcg_out_setcond32(s, args[3], args[0], args[1],
args[2], const_args[2]);
break;
+ case INDEX_op_movcond_i32:
+ tcg_out_movcond32(s, args[5], args[0], args[1],
+ args[2], const_args[2], args[3]);
+ break;
OP_32_64(bswap16):
tcg_out_rolw_8(s, args[0]);
tcg_out_setcond64(s, args[3], args[0], args[1],
args[2], const_args[2]);
break;
+ case INDEX_op_movcond_i64:
+ tcg_out_movcond64(s, args[5], args[0], args[1],
+ args[2], const_args[2], args[3]);
+ break;
case INDEX_op_bswap64_i64:
tcg_out_bswap64(s, args[0]);
{ INDEX_op_setcond_i32, { "q", "r", "ri" } },
{ INDEX_op_deposit_i32, { "Q", "0", "Q" } },
+ { INDEX_op_movcond_i32, { "r", "r", "ri", "r", "0" } },
#if TCG_TARGET_REG_BITS == 32
{ INDEX_op_mulu2_i32, { "a", "d", "a", "r" } },
{ INDEX_op_ext32u_i64, { "r", "r" } },
{ INDEX_op_deposit_i64, { "Q", "0", "Q" } },
+ { INDEX_op_movcond_i64, { "r", "r", "re", "r", "0" } },
#endif
#if TCG_TARGET_REG_BITS == 64
#define TCG_TARGET_HAS_nand_i32 0
#define TCG_TARGET_HAS_nor_i32 0
#define TCG_TARGET_HAS_deposit_i32 1
+#if defined(__x86_64__) || defined(__i686__)
+/* Use cmov only if the compiler is already doing so. */
+#define TCG_TARGET_HAS_movcond_i32 1
+#else
#define TCG_TARGET_HAS_movcond_i32 0
+#endif
#if TCG_TARGET_REG_BITS == 64
#define TCG_TARGET_HAS_div2_i64 1
#define TCG_TARGET_HAS_nand_i64 0
#define TCG_TARGET_HAS_nor_i64 0
#define TCG_TARGET_HAS_deposit_i64 1
-#define TCG_TARGET_HAS_movcond_i64 0
+#define TCG_TARGET_HAS_movcond_i64 1
#endif
#define TCG_TARGET_deposit_i32_valid(ofs, len) \