#define Ed { OP_E, d_mode }
#define Edq { OP_E, dq_mode }
#define Edqw { OP_E, dqw_mode }
+#define EdqwS { OP_E, dqw_swap_mode }
#define Edqb { OP_E, dqb_mode }
+#define Edb { OP_E, db_mode }
+#define Edw { OP_E, dw_mode }
#define Edqd { OP_E, dqd_mode }
#define Eq { OP_E, q_mode }
#define indirEv { OP_indirE, stack_v_mode }
#define XMask { OP_Mask, mask_mode }
#define MaskG { OP_G, mask_mode }
#define MaskE { OP_E, mask_mode }
+#define MaskBDE { OP_E, mask_bd_mode }
#define MaskR { OP_R, mask_mode }
#define MaskVex { OP_VEX, mask_mode }
dq_mode,
/* registers like dq_mode, memory like w_mode. */
dqw_mode,
+ dqw_swap_mode,
bnd_mode,
/* 4- or 6-byte pointer operand */
f_mode,
o_mode,
/* registers like dq_mode, memory like b_mode. */
dqb_mode,
+ /* registers like d_mode, memory like b_mode. */
+ db_mode,
+ /* registers like d_mode, memory like w_mode. */
+ dw_mode,
/* registers like dq_mode, memory like d_mode. */
dqd_mode,
/* normal vex mode */
/* Mask register operand. */
mask_mode,
+ /* Mask register operand. */
+ mask_bd_mode,
es_reg,
cs_reg,
REG_XOP_TBM_01,
REG_XOP_TBM_02,
+ REG_EVEX_0F71,
REG_EVEX_0F72,
REG_EVEX_0F73,
REG_EVEX_0F38C6,
MOD_0F1A_PREFIX_0,
MOD_0F1B_PREFIX_0,
MOD_0F1B_PREFIX_1,
- MOD_0F20,
- MOD_0F21,
- MOD_0F22,
- MOD_0F23,
MOD_0F24,
MOD_0F26,
MOD_0F2B_PREFIX_0,
PREFIX_0FAE_REG_1,
PREFIX_0FAE_REG_2,
PREFIX_0FAE_REG_3,
+ PREFIX_0FAE_REG_6,
PREFIX_0FAE_REG_7,
+ PREFIX_RM_0_0FAE_REG_7,
PREFIX_0FB8,
PREFIX_0FBC,
PREFIX_0FBD,
PREFIX_VEX_0F45,
PREFIX_VEX_0F46,
PREFIX_VEX_0F47,
+ PREFIX_VEX_0F4A,
PREFIX_VEX_0F4B,
PREFIX_VEX_0F51,
PREFIX_VEX_0F52,
PREFIX_VEX_0F92,
PREFIX_VEX_0F93,
PREFIX_VEX_0F98,
+ PREFIX_VEX_0F99,
PREFIX_VEX_0FC2,
PREFIX_VEX_0FC4,
PREFIX_VEX_0FC5,
PREFIX_VEX_0F3A21,
PREFIX_VEX_0F3A22,
PREFIX_VEX_0F3A30,
+ PREFIX_VEX_0F3A31,
PREFIX_VEX_0F3A32,
+ PREFIX_VEX_0F3A33,
PREFIX_VEX_0F3A38,
PREFIX_VEX_0F3A39,
PREFIX_VEX_0F3A40,
PREFIX_EVEX_0F2E,
PREFIX_EVEX_0F2F,
PREFIX_EVEX_0F51,
+ PREFIX_EVEX_0F54,
+ PREFIX_EVEX_0F55,
+ PREFIX_EVEX_0F56,
+ PREFIX_EVEX_0F57,
PREFIX_EVEX_0F58,
PREFIX_EVEX_0F59,
PREFIX_EVEX_0F5A,
PREFIX_EVEX_0F5D,
PREFIX_EVEX_0F5E,
PREFIX_EVEX_0F5F,
+ PREFIX_EVEX_0F60,
+ PREFIX_EVEX_0F61,
PREFIX_EVEX_0F62,
+ PREFIX_EVEX_0F63,
+ PREFIX_EVEX_0F64,
+ PREFIX_EVEX_0F65,
PREFIX_EVEX_0F66,
+ PREFIX_EVEX_0F67,
+ PREFIX_EVEX_0F68,
+ PREFIX_EVEX_0F69,
PREFIX_EVEX_0F6A,
+ PREFIX_EVEX_0F6B,
PREFIX_EVEX_0F6C,
PREFIX_EVEX_0F6D,
PREFIX_EVEX_0F6E,
PREFIX_EVEX_0F6F,
PREFIX_EVEX_0F70,
+ PREFIX_EVEX_0F71_REG_2,
+ PREFIX_EVEX_0F71_REG_4,
+ PREFIX_EVEX_0F71_REG_6,
PREFIX_EVEX_0F72_REG_0,
PREFIX_EVEX_0F72_REG_1,
PREFIX_EVEX_0F72_REG_2,
PREFIX_EVEX_0F72_REG_4,
PREFIX_EVEX_0F72_REG_6,
PREFIX_EVEX_0F73_REG_2,
+ PREFIX_EVEX_0F73_REG_3,
PREFIX_EVEX_0F73_REG_6,
+ PREFIX_EVEX_0F73_REG_7,
+ PREFIX_EVEX_0F74,
+ PREFIX_EVEX_0F75,
PREFIX_EVEX_0F76,
PREFIX_EVEX_0F78,
PREFIX_EVEX_0F79,
PREFIX_EVEX_0F7E,
PREFIX_EVEX_0F7F,
PREFIX_EVEX_0FC2,
+ PREFIX_EVEX_0FC4,
+ PREFIX_EVEX_0FC5,
PREFIX_EVEX_0FC6,
+ PREFIX_EVEX_0FD1,
PREFIX_EVEX_0FD2,
PREFIX_EVEX_0FD3,
PREFIX_EVEX_0FD4,
+ PREFIX_EVEX_0FD5,
PREFIX_EVEX_0FD6,
+ PREFIX_EVEX_0FD8,
+ PREFIX_EVEX_0FD9,
+ PREFIX_EVEX_0FDA,
PREFIX_EVEX_0FDB,
+ PREFIX_EVEX_0FDC,
+ PREFIX_EVEX_0FDD,
+ PREFIX_EVEX_0FDE,
PREFIX_EVEX_0FDF,
+ PREFIX_EVEX_0FE0,
+ PREFIX_EVEX_0FE1,
PREFIX_EVEX_0FE2,
+ PREFIX_EVEX_0FE3,
+ PREFIX_EVEX_0FE4,
+ PREFIX_EVEX_0FE5,
PREFIX_EVEX_0FE6,
PREFIX_EVEX_0FE7,
+ PREFIX_EVEX_0FE8,
+ PREFIX_EVEX_0FE9,
+ PREFIX_EVEX_0FEA,
PREFIX_EVEX_0FEB,
+ PREFIX_EVEX_0FEC,
+ PREFIX_EVEX_0FED,
+ PREFIX_EVEX_0FEE,
PREFIX_EVEX_0FEF,
+ PREFIX_EVEX_0FF1,
PREFIX_EVEX_0FF2,
PREFIX_EVEX_0FF3,
PREFIX_EVEX_0FF4,
+ PREFIX_EVEX_0FF5,
+ PREFIX_EVEX_0FF6,
+ PREFIX_EVEX_0FF8,
+ PREFIX_EVEX_0FF9,
PREFIX_EVEX_0FFA,
PREFIX_EVEX_0FFB,
+ PREFIX_EVEX_0FFC,
+ PREFIX_EVEX_0FFD,
PREFIX_EVEX_0FFE,
+ PREFIX_EVEX_0F3800,
+ PREFIX_EVEX_0F3804,
+ PREFIX_EVEX_0F380B,
PREFIX_EVEX_0F380C,
PREFIX_EVEX_0F380D,
+ PREFIX_EVEX_0F3810,
PREFIX_EVEX_0F3811,
PREFIX_EVEX_0F3812,
PREFIX_EVEX_0F3813,
PREFIX_EVEX_0F3819,
PREFIX_EVEX_0F381A,
PREFIX_EVEX_0F381B,
+ PREFIX_EVEX_0F381C,
+ PREFIX_EVEX_0F381D,
PREFIX_EVEX_0F381E,
PREFIX_EVEX_0F381F,
+ PREFIX_EVEX_0F3820,
PREFIX_EVEX_0F3821,
PREFIX_EVEX_0F3822,
PREFIX_EVEX_0F3823,
PREFIX_EVEX_0F3824,
PREFIX_EVEX_0F3825,
+ PREFIX_EVEX_0F3826,
PREFIX_EVEX_0F3827,
PREFIX_EVEX_0F3828,
PREFIX_EVEX_0F3829,
PREFIX_EVEX_0F382A,
+ PREFIX_EVEX_0F382B,
PREFIX_EVEX_0F382C,
PREFIX_EVEX_0F382D,
+ PREFIX_EVEX_0F3830,
PREFIX_EVEX_0F3831,
PREFIX_EVEX_0F3832,
PREFIX_EVEX_0F3833,
PREFIX_EVEX_0F3835,
PREFIX_EVEX_0F3836,
PREFIX_EVEX_0F3837,
+ PREFIX_EVEX_0F3838,
PREFIX_EVEX_0F3839,
PREFIX_EVEX_0F383A,
PREFIX_EVEX_0F383B,
+ PREFIX_EVEX_0F383C,
PREFIX_EVEX_0F383D,
+ PREFIX_EVEX_0F383E,
PREFIX_EVEX_0F383F,
PREFIX_EVEX_0F3840,
PREFIX_EVEX_0F3842,
PREFIX_EVEX_0F385B,
PREFIX_EVEX_0F3864,
PREFIX_EVEX_0F3865,
+ PREFIX_EVEX_0F3866,
+ PREFIX_EVEX_0F3875,
PREFIX_EVEX_0F3876,
PREFIX_EVEX_0F3877,
+ PREFIX_EVEX_0F3878,
+ PREFIX_EVEX_0F3879,
+ PREFIX_EVEX_0F387A,
+ PREFIX_EVEX_0F387B,
PREFIX_EVEX_0F387C,
+ PREFIX_EVEX_0F387D,
PREFIX_EVEX_0F387E,
PREFIX_EVEX_0F387F,
+ PREFIX_EVEX_0F3883,
PREFIX_EVEX_0F3888,
PREFIX_EVEX_0F3889,
PREFIX_EVEX_0F388A,
PREFIX_EVEX_0F388B,
+ PREFIX_EVEX_0F388D,
PREFIX_EVEX_0F3890,
PREFIX_EVEX_0F3891,
PREFIX_EVEX_0F3892,
PREFIX_EVEX_0F38AD,
PREFIX_EVEX_0F38AE,
PREFIX_EVEX_0F38AF,
+ PREFIX_EVEX_0F38B4,
+ PREFIX_EVEX_0F38B5,
PREFIX_EVEX_0F38B6,
PREFIX_EVEX_0F38B7,
PREFIX_EVEX_0F38B8,
PREFIX_EVEX_0F3A09,
PREFIX_EVEX_0F3A0A,
PREFIX_EVEX_0F3A0B,
+ PREFIX_EVEX_0F3A0F,
+ PREFIX_EVEX_0F3A14,
+ PREFIX_EVEX_0F3A15,
+ PREFIX_EVEX_0F3A16,
PREFIX_EVEX_0F3A17,
PREFIX_EVEX_0F3A18,
PREFIX_EVEX_0F3A19,
PREFIX_EVEX_0F3A1D,
PREFIX_EVEX_0F3A1E,
PREFIX_EVEX_0F3A1F,
+ PREFIX_EVEX_0F3A20,
PREFIX_EVEX_0F3A21,
+ PREFIX_EVEX_0F3A22,
PREFIX_EVEX_0F3A23,
PREFIX_EVEX_0F3A25,
PREFIX_EVEX_0F3A26,
PREFIX_EVEX_0F3A39,
PREFIX_EVEX_0F3A3A,
PREFIX_EVEX_0F3A3B,
+ PREFIX_EVEX_0F3A3E,
+ PREFIX_EVEX_0F3A3F,
+ PREFIX_EVEX_0F3A42,
PREFIX_EVEX_0F3A43,
+ PREFIX_EVEX_0F3A50,
+ PREFIX_EVEX_0F3A51,
PREFIX_EVEX_0F3A54,
PREFIX_EVEX_0F3A55,
+ PREFIX_EVEX_0F3A56,
+ PREFIX_EVEX_0F3A57,
+ PREFIX_EVEX_0F3A66,
+ PREFIX_EVEX_0F3A67
};
enum
VEX_LEN_0F2F_P_0,
VEX_LEN_0F2F_P_2,
VEX_LEN_0F41_P_0,
+ VEX_LEN_0F41_P_2,
VEX_LEN_0F42_P_0,
+ VEX_LEN_0F42_P_2,
VEX_LEN_0F44_P_0,
+ VEX_LEN_0F44_P_2,
VEX_LEN_0F45_P_0,
+ VEX_LEN_0F45_P_2,
VEX_LEN_0F46_P_0,
+ VEX_LEN_0F46_P_2,
VEX_LEN_0F47_P_0,
+ VEX_LEN_0F47_P_2,
+ VEX_LEN_0F4A_P_0,
+ VEX_LEN_0F4A_P_2,
+ VEX_LEN_0F4B_P_0,
VEX_LEN_0F4B_P_2,
VEX_LEN_0F51_P_1,
VEX_LEN_0F51_P_3,
VEX_LEN_0F7E_P_1,
VEX_LEN_0F7E_P_2,
VEX_LEN_0F90_P_0,
+ VEX_LEN_0F90_P_2,
VEX_LEN_0F91_P_0,
+ VEX_LEN_0F91_P_2,
VEX_LEN_0F92_P_0,
+ VEX_LEN_0F92_P_2,
+ VEX_LEN_0F92_P_3,
VEX_LEN_0F93_P_0,
+ VEX_LEN_0F93_P_2,
+ VEX_LEN_0F93_P_3,
VEX_LEN_0F98_P_0,
+ VEX_LEN_0F98_P_2,
+ VEX_LEN_0F99_P_0,
+ VEX_LEN_0F99_P_2,
VEX_LEN_0FAE_R_2_M_0,
VEX_LEN_0FAE_R_3_M_0,
VEX_LEN_0FC2_P_1,
VEX_LEN_0F3A21_P_2,
VEX_LEN_0F3A22_P_2,
VEX_LEN_0F3A30_P_2,
+ VEX_LEN_0F3A31_P_2,
VEX_LEN_0F3A32_P_2,
+ VEX_LEN_0F3A33_P_2,
VEX_LEN_0F3A38_P_2,
VEX_LEN_0F3A39_P_2,
VEX_LEN_0F3A41_P_2,
VEX_W_0F2F_P_0,
VEX_W_0F2F_P_2,
VEX_W_0F41_P_0_LEN_1,
+ VEX_W_0F41_P_2_LEN_1,
VEX_W_0F42_P_0_LEN_1,
+ VEX_W_0F42_P_2_LEN_1,
VEX_W_0F44_P_0_LEN_0,
+ VEX_W_0F44_P_2_LEN_0,
VEX_W_0F45_P_0_LEN_1,
+ VEX_W_0F45_P_2_LEN_1,
VEX_W_0F46_P_0_LEN_1,
+ VEX_W_0F46_P_2_LEN_1,
VEX_W_0F47_P_0_LEN_1,
+ VEX_W_0F47_P_2_LEN_1,
+ VEX_W_0F4A_P_0_LEN_1,
+ VEX_W_0F4A_P_2_LEN_1,
+ VEX_W_0F4B_P_0_LEN_1,
VEX_W_0F4B_P_2_LEN_1,
VEX_W_0F50_M_0,
VEX_W_0F51_P_0,
VEX_W_0F7F_P_1,
VEX_W_0F7F_P_2,
VEX_W_0F90_P_0_LEN_0,
+ VEX_W_0F90_P_2_LEN_0,
VEX_W_0F91_P_0_LEN_0,
+ VEX_W_0F91_P_2_LEN_0,
VEX_W_0F92_P_0_LEN_0,
+ VEX_W_0F92_P_2_LEN_0,
+ VEX_W_0F92_P_3_LEN_0,
VEX_W_0F93_P_0_LEN_0,
+ VEX_W_0F93_P_2_LEN_0,
+ VEX_W_0F93_P_3_LEN_0,
VEX_W_0F98_P_0_LEN_0,
+ VEX_W_0F98_P_2_LEN_0,
+ VEX_W_0F99_P_0_LEN_0,
+ VEX_W_0F99_P_2_LEN_0,
VEX_W_0FAE_R_2_M_0,
VEX_W_0FAE_R_3_M_0,
VEX_W_0FC2_P_0,
VEX_W_0F3A20_P_2,
VEX_W_0F3A21_P_2,
VEX_W_0F3A30_P_2_LEN_0,
+ VEX_W_0F3A31_P_2_LEN_0,
VEX_W_0F3A32_P_2_LEN_0,
+ VEX_W_0F3A33_P_2_LEN_0,
VEX_W_0F3A38_P_2,
VEX_W_0F3A39_P_2,
VEX_W_0F3A40_P_2,
EVEX_W_0F51_P_1,
EVEX_W_0F51_P_2,
EVEX_W_0F51_P_3,
+ EVEX_W_0F54_P_0,
+ EVEX_W_0F54_P_2,
+ EVEX_W_0F55_P_0,
+ EVEX_W_0F55_P_2,
+ EVEX_W_0F56_P_0,
+ EVEX_W_0F56_P_2,
+ EVEX_W_0F57_P_0,
+ EVEX_W_0F57_P_2,
EVEX_W_0F58_P_0,
EVEX_W_0F58_P_1,
EVEX_W_0F58_P_2,
EVEX_W_0F62_P_2,
EVEX_W_0F66_P_2,
EVEX_W_0F6A_P_2,
+ EVEX_W_0F6B_P_2,
EVEX_W_0F6C_P_2,
EVEX_W_0F6D_P_2,
EVEX_W_0F6E_P_2,
EVEX_W_0F6F_P_1,
EVEX_W_0F6F_P_2,
+ EVEX_W_0F6F_P_3,
EVEX_W_0F70_P_2,
EVEX_W_0F72_R_2_P_2,
EVEX_W_0F72_R_6_P_2,
EVEX_W_0F73_R_6_P_2,
EVEX_W_0F76_P_2,
EVEX_W_0F78_P_0,
+ EVEX_W_0F78_P_2,
EVEX_W_0F79_P_0,
+ EVEX_W_0F79_P_2,
EVEX_W_0F7A_P_1,
+ EVEX_W_0F7A_P_2,
EVEX_W_0F7A_P_3,
EVEX_W_0F7B_P_1,
+ EVEX_W_0F7B_P_2,
EVEX_W_0F7B_P_3,
EVEX_W_0F7E_P_1,
EVEX_W_0F7E_P_2,
EVEX_W_0F7F_P_1,
EVEX_W_0F7F_P_2,
+ EVEX_W_0F7F_P_3,
EVEX_W_0FC2_P_0,
EVEX_W_0FC2_P_1,
EVEX_W_0FC2_P_2,
EVEX_W_0FFE_P_2,
EVEX_W_0F380C_P_2,
EVEX_W_0F380D_P_2,
+ EVEX_W_0F3810_P_1,
+ EVEX_W_0F3810_P_2,
EVEX_W_0F3811_P_1,
+ EVEX_W_0F3811_P_2,
EVEX_W_0F3812_P_1,
+ EVEX_W_0F3812_P_2,
EVEX_W_0F3813_P_1,
EVEX_W_0F3813_P_2,
EVEX_W_0F3814_P_1,
EVEX_W_0F381B_P_2,
EVEX_W_0F381E_P_2,
EVEX_W_0F381F_P_2,
+ EVEX_W_0F3820_P_1,
EVEX_W_0F3821_P_1,
EVEX_W_0F3822_P_1,
EVEX_W_0F3823_P_1,
EVEX_W_0F3824_P_1,
EVEX_W_0F3825_P_1,
EVEX_W_0F3825_P_2,
+ EVEX_W_0F3826_P_1,
+ EVEX_W_0F3826_P_2,
+ EVEX_W_0F3828_P_1,
EVEX_W_0F3828_P_2,
+ EVEX_W_0F3829_P_1,
EVEX_W_0F3829_P_2,
EVEX_W_0F382A_P_1,
EVEX_W_0F382A_P_2,
+ EVEX_W_0F382B_P_2,
+ EVEX_W_0F3830_P_1,
EVEX_W_0F3831_P_1,
EVEX_W_0F3832_P_1,
EVEX_W_0F3833_P_1,
EVEX_W_0F3835_P_1,
EVEX_W_0F3835_P_2,
EVEX_W_0F3837_P_2,
+ EVEX_W_0F3838_P_1,
+ EVEX_W_0F3839_P_1,
EVEX_W_0F383A_P_1,
EVEX_W_0F3840_P_2,
EVEX_W_0F3858_P_2,
EVEX_W_0F3859_P_2,
EVEX_W_0F385A_P_2,
EVEX_W_0F385B_P_2,
+ EVEX_W_0F3866_P_2,
+ EVEX_W_0F3875_P_2,
+ EVEX_W_0F3878_P_2,
+ EVEX_W_0F3879_P_2,
+ EVEX_W_0F387A_P_2,
+ EVEX_W_0F387B_P_2,
+ EVEX_W_0F387D_P_2,
+ EVEX_W_0F3883_P_2,
+ EVEX_W_0F388D_P_2,
EVEX_W_0F3891_P_2,
EVEX_W_0F3893_P_2,
EVEX_W_0F38A1_P_2,
EVEX_W_0F3A09_P_2,
EVEX_W_0F3A0A_P_2,
EVEX_W_0F3A0B_P_2,
+ EVEX_W_0F3A16_P_2,
EVEX_W_0F3A18_P_2,
EVEX_W_0F3A19_P_2,
EVEX_W_0F3A1A_P_2,
EVEX_W_0F3A1B_P_2,
EVEX_W_0F3A1D_P_2,
EVEX_W_0F3A21_P_2,
+ EVEX_W_0F3A22_P_2,
EVEX_W_0F3A23_P_2,
EVEX_W_0F3A38_P_2,
EVEX_W_0F3A39_P_2,
EVEX_W_0F3A3A_P_2,
EVEX_W_0F3A3B_P_2,
+ EVEX_W_0F3A3E_P_2,
+ EVEX_W_0F3A3F_P_2,
+ EVEX_W_0F3A42_P_2,
EVEX_W_0F3A43_P_2,
+ EVEX_W_0F3A50_P_2,
+ EVEX_W_0F3A51_P_2,
+ EVEX_W_0F3A56_P_2,
+ EVEX_W_0F3A57_P_2,
+ EVEX_W_0F3A66_P_2,
+ EVEX_W_0F3A67_P_2
};
typedef void (*op_rtn) (int bytemode, int sizeflag);
"LS" => print "abs" in 64bit mode and behave as 'S' otherwise
"LV" => print "abs" for 64bit operand and behave as 'S' otherwise
"LW" => print 'd', 'q' depending on the VEX.W bit
+ "LP" => print 'w' or 'l' ('d' in Intel mode) if instruction has
+ an operand size prefix, or suffix_always is true. print
+ 'q' if rex prefix is present.
Many of the above letters print nothing in Intel mode. See "putop"
for the details.
{ "int3", { XX } },
{ "int", { Ib } },
{ X86_64_TABLE (X86_64_CE) },
- { "iretP", { XX } },
+ { "iret%LP", { XX } },
/* d0 */
{ REG_TABLE (REG_D0) },
{ REG_TABLE (REG_D1) },
{ Bad_Opcode },
{ "syscall", { XX } },
{ "clts", { XX } },
- { "sysretP", { XX } },
+ { "sysret%LP", { XX } },
/* 08 */
{ "invd", { XX } },
{ "wbinvd", { XX } },
{ "nopQ", { Ev } },
{ "nopQ", { Ev } },
/* 20 */
- { MOD_TABLE (MOD_0F20) },
- { MOD_TABLE (MOD_0F21) },
- { MOD_TABLE (MOD_0F22) },
- { MOD_TABLE (MOD_0F23) },
+ { "movZ", { Rm, Cm } },
+ { "movZ", { Rm, Dm } },
+ { "movZ", { Cm, Rm } },
+ { "movZ", { Dm, Rm } },
{ MOD_TABLE (MOD_0F24) },
{ Bad_Opcode },
{ MOD_TABLE (MOD_0F26) },
static int last_addr_prefix;
static int last_rex_prefix;
static int last_seg_prefix;
+static int fwait_prefix;
/* The PREFIX_REPZ/PREFIX_REPNZ/PREFIX_DATA prefix is mandatory. */
static int mandatory_prefix;
/* The active segment register prefix. */
{ "wrgsbase", { Ev } },
},
+ /* PREFIX_0FAE_REG_6 */
+ {
+ { "xsaveopt", { FXSAVE } },
+ { Bad_Opcode },
+ { "clwb", { Mb } },
+ },
+
/* PREFIX_0FAE_REG_7 */
{
{ "clflush", { Mb } },
{ "clflushopt", { Mb } },
},
+ /* PREFIX_RM_0_0FAE_REG_7 */
+ {
+ { "sfence", { Skip_MODRM } },
+ { Bad_Opcode },
+ { "pcommit", { Skip_MODRM } },
+ },
+
/* PREFIX_0FB8 */
{
{ Bad_Opcode },
/* PREFIX_VEX_0F41 */
{
{ VEX_LEN_TABLE (VEX_LEN_0F41_P_0) },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F41_P_2) },
},
/* PREFIX_VEX_0F42 */
{
{ VEX_LEN_TABLE (VEX_LEN_0F42_P_0) },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F42_P_2) },
},
/* PREFIX_VEX_0F44 */
{
{ VEX_LEN_TABLE (VEX_LEN_0F44_P_0) },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F44_P_2) },
},
/* PREFIX_VEX_0F45 */
{
{ VEX_LEN_TABLE (VEX_LEN_0F45_P_0) },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F45_P_2) },
},
/* PREFIX_VEX_0F46 */
{
{ VEX_LEN_TABLE (VEX_LEN_0F46_P_0) },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F46_P_2) },
},
/* PREFIX_VEX_0F47 */
{
{ VEX_LEN_TABLE (VEX_LEN_0F47_P_0) },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F47_P_2) },
},
- /* PREFIX_VEX_0F4B */
+ /* PREFIX_VEX_0F4A */
{
+ { VEX_LEN_TABLE (VEX_LEN_0F4A_P_0) },
{ Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F4A_P_2) },
+ },
+
+ /* PREFIX_VEX_0F4B */
+ {
+ { VEX_LEN_TABLE (VEX_LEN_0F4B_P_0) },
{ Bad_Opcode },
{ VEX_LEN_TABLE (VEX_LEN_0F4B_P_2) },
},
/* PREFIX_VEX_0F90 */
{
{ VEX_LEN_TABLE (VEX_LEN_0F90_P_0) },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F90_P_2) },
},
/* PREFIX_VEX_0F91 */
{
{ VEX_LEN_TABLE (VEX_LEN_0F91_P_0) },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F91_P_2) },
},
/* PREFIX_VEX_0F92 */
{
{ VEX_LEN_TABLE (VEX_LEN_0F92_P_0) },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F92_P_2) },
+ { VEX_LEN_TABLE (VEX_LEN_0F92_P_3) },
},
/* PREFIX_VEX_0F93 */
{
{ VEX_LEN_TABLE (VEX_LEN_0F93_P_0) },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F93_P_2) },
+ { VEX_LEN_TABLE (VEX_LEN_0F93_P_3) },
},
/* PREFIX_VEX_0F98 */
{
{ VEX_LEN_TABLE (VEX_LEN_0F98_P_0) },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F98_P_2) },
+ },
+
+ /* PREFIX_VEX_0F99 */
+ {
+ { VEX_LEN_TABLE (VEX_LEN_0F99_P_0) },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F99_P_2) },
},
/* PREFIX_VEX_0FC2 */
{ VEX_LEN_TABLE (VEX_LEN_0F3A30_P_2) },
},
+ /* PREFIX_VEX_0F3A31 */
+ {
+ { Bad_Opcode },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F3A31_P_2) },
+ },
+
/* PREFIX_VEX_0F3A32 */
{
{ Bad_Opcode },
{ VEX_LEN_TABLE (VEX_LEN_0F3A32_P_2) },
},
+ /* PREFIX_VEX_0F3A33 */
+ {
+ { Bad_Opcode },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F3A33_P_2) },
+ },
+
/* PREFIX_VEX_0F3A38 */
{
{ Bad_Opcode },
/* 48 */
{ Bad_Opcode },
{ Bad_Opcode },
- { Bad_Opcode },
+ { PREFIX_TABLE (PREFIX_VEX_0F4A) },
{ PREFIX_TABLE (PREFIX_VEX_0F4B) },
{ Bad_Opcode },
{ Bad_Opcode },
{ Bad_Opcode },
/* 98 */
{ PREFIX_TABLE (PREFIX_VEX_0F98) },
- { Bad_Opcode },
+ { PREFIX_TABLE (PREFIX_VEX_0F99) },
{ Bad_Opcode },
{ Bad_Opcode },
{ Bad_Opcode },
{ Bad_Opcode },
/* 30 */
{ PREFIX_TABLE (PREFIX_VEX_0F3A30) },
- { Bad_Opcode },
+ { PREFIX_TABLE (PREFIX_VEX_0F3A31) },
{ PREFIX_TABLE (PREFIX_VEX_0F3A32) },
- { Bad_Opcode },
+ { PREFIX_TABLE (PREFIX_VEX_0F3A33) },
{ Bad_Opcode },
{ Bad_Opcode },
{ Bad_Opcode },
{ Bad_Opcode },
{ VEX_W_TABLE (VEX_W_0F41_P_0_LEN_1) },
},
+ /* VEX_LEN_0F41_P_2 */
+ {
+ { Bad_Opcode },
+ { VEX_W_TABLE (VEX_W_0F41_P_2_LEN_1) },
+ },
/* VEX_LEN_0F42_P_0 */
{
{ Bad_Opcode },
{ VEX_W_TABLE (VEX_W_0F42_P_0_LEN_1) },
},
+ /* VEX_LEN_0F42_P_2 */
+ {
+ { Bad_Opcode },
+ { VEX_W_TABLE (VEX_W_0F42_P_2_LEN_1) },
+ },
/* VEX_LEN_0F44_P_0 */
{
{ VEX_W_TABLE (VEX_W_0F44_P_0_LEN_0) },
},
+ /* VEX_LEN_0F44_P_2 */
+ {
+ { VEX_W_TABLE (VEX_W_0F44_P_2_LEN_0) },
+ },
/* VEX_LEN_0F45_P_0 */
{
{ Bad_Opcode },
{ VEX_W_TABLE (VEX_W_0F45_P_0_LEN_1) },
},
+ /* VEX_LEN_0F45_P_2 */
+ {
+ { Bad_Opcode },
+ { VEX_W_TABLE (VEX_W_0F45_P_2_LEN_1) },
+ },
/* VEX_LEN_0F46_P_0 */
{
{ Bad_Opcode },
{ VEX_W_TABLE (VEX_W_0F46_P_0_LEN_1) },
},
+ /* VEX_LEN_0F46_P_2 */
+ {
+ { Bad_Opcode },
+ { VEX_W_TABLE (VEX_W_0F46_P_2_LEN_1) },
+ },
/* VEX_LEN_0F47_P_0 */
{
{ Bad_Opcode },
{ VEX_W_TABLE (VEX_W_0F47_P_0_LEN_1) },
},
+ /* VEX_LEN_0F47_P_2 */
+ {
+ { Bad_Opcode },
+ { VEX_W_TABLE (VEX_W_0F47_P_2_LEN_1) },
+ },
+ /* VEX_LEN_0F4A_P_0 */
+ {
+ { Bad_Opcode },
+ { VEX_W_TABLE (VEX_W_0F4A_P_0_LEN_1) },
+ },
+ /* VEX_LEN_0F4A_P_2 */
+ {
+ { Bad_Opcode },
+ { VEX_W_TABLE (VEX_W_0F4A_P_2_LEN_1) },
+ },
+ /* VEX_LEN_0F4B_P_0 */
+ {
+ { Bad_Opcode },
+ { VEX_W_TABLE (VEX_W_0F4B_P_0_LEN_1) },
+ },
/* VEX_LEN_0F4B_P_2 */
{
{ Bad_Opcode },
{ VEX_W_TABLE (VEX_W_0F90_P_0_LEN_0) },
},
+ /* VEX_LEN_0F90_P_2 */
+ {
+ { VEX_W_TABLE (VEX_W_0F90_P_2_LEN_0) },
+ },
+
/* VEX_LEN_0F91_P_0 */
{
{ VEX_W_TABLE (VEX_W_0F91_P_0_LEN_0) },
},
+ /* VEX_LEN_0F91_P_2 */
+ {
+ { VEX_W_TABLE (VEX_W_0F91_P_2_LEN_0) },
+ },
+
/* VEX_LEN_0F92_P_0 */
{
{ VEX_W_TABLE (VEX_W_0F92_P_0_LEN_0) },
},
+ /* VEX_LEN_0F92_P_2 */
+ {
+ { VEX_W_TABLE (VEX_W_0F92_P_2_LEN_0) },
+ },
+
+ /* VEX_LEN_0F92_P_3 */
+ {
+ { VEX_W_TABLE (VEX_W_0F92_P_3_LEN_0) },
+ },
+
/* VEX_LEN_0F93_P_0 */
{
{ VEX_W_TABLE (VEX_W_0F93_P_0_LEN_0) },
},
+ /* VEX_LEN_0F93_P_2 */
+ {
+ { VEX_W_TABLE (VEX_W_0F93_P_2_LEN_0) },
+ },
+
+ /* VEX_LEN_0F93_P_3 */
+ {
+ { VEX_W_TABLE (VEX_W_0F93_P_3_LEN_0) },
+ },
+
/* VEX_LEN_0F98_P_0 */
{
{ VEX_W_TABLE (VEX_W_0F98_P_0_LEN_0) },
},
+ /* VEX_LEN_0F98_P_2 */
+ {
+ { VEX_W_TABLE (VEX_W_0F98_P_2_LEN_0) },
+ },
+
+ /* VEX_LEN_0F99_P_0 */
+ {
+ { VEX_W_TABLE (VEX_W_0F99_P_0_LEN_0) },
+ },
+
+ /* VEX_LEN_0F99_P_2 */
+ {
+ { VEX_W_TABLE (VEX_W_0F99_P_2_LEN_0) },
+ },
+
/* VEX_LEN_0FAE_R_2_M_0 */
{
{ VEX_W_TABLE (VEX_W_0FAE_R_2_M_0) },
{ VEX_W_TABLE (VEX_W_0F3A30_P_2_LEN_0) },
},
+ /* VEX_LEN_0F3A31_P_2 */
+ {
+ { VEX_W_TABLE (VEX_W_0F3A31_P_2_LEN_0) },
+ },
+
/* VEX_LEN_0F3A32_P_2 */
{
{ VEX_W_TABLE (VEX_W_0F3A32_P_2_LEN_0) },
},
+ /* VEX_LEN_0F3A33_P_2 */
+ {
+ { VEX_W_TABLE (VEX_W_0F3A33_P_2_LEN_0) },
+ },
+
/* VEX_LEN_0F3A38_P_2 */
{
{ Bad_Opcode },
{
/* VEX_W_0F41_P_0_LEN_1 */
{ "kandw", { MaskG, MaskVex, MaskR } },
+ { "kandq", { MaskG, MaskVex, MaskR } },
+ },
+ {
+ /* VEX_W_0F41_P_2_LEN_1 */
+ { "kandb", { MaskG, MaskVex, MaskR } },
+ { "kandd", { MaskG, MaskVex, MaskR } },
},
{
/* VEX_W_0F42_P_0_LEN_1 */
{ "kandnw", { MaskG, MaskVex, MaskR } },
+ { "kandnq", { MaskG, MaskVex, MaskR } },
+ },
+ {
+ /* VEX_W_0F42_P_2_LEN_1 */
+ { "kandnb", { MaskG, MaskVex, MaskR } },
+ { "kandnd", { MaskG, MaskVex, MaskR } },
},
{
/* VEX_W_0F44_P_0_LEN_0 */
{ "knotw", { MaskG, MaskR } },
+ { "knotq", { MaskG, MaskR } },
+ },
+ {
+ /* VEX_W_0F44_P_2_LEN_0 */
+ { "knotb", { MaskG, MaskR } },
+ { "knotd", { MaskG, MaskR } },
},
{
/* VEX_W_0F45_P_0_LEN_1 */
{ "korw", { MaskG, MaskVex, MaskR } },
+ { "korq", { MaskG, MaskVex, MaskR } },
+ },
+ {
+ /* VEX_W_0F45_P_2_LEN_1 */
+ { "korb", { MaskG, MaskVex, MaskR } },
+ { "kord", { MaskG, MaskVex, MaskR } },
},
{
/* VEX_W_0F46_P_0_LEN_1 */
{ "kxnorw", { MaskG, MaskVex, MaskR } },
+ { "kxnorq", { MaskG, MaskVex, MaskR } },
+ },
+ {
+ /* VEX_W_0F46_P_2_LEN_1 */
+ { "kxnorb", { MaskG, MaskVex, MaskR } },
+ { "kxnord", { MaskG, MaskVex, MaskR } },
},
{
/* VEX_W_0F47_P_0_LEN_1 */
{ "kxorw", { MaskG, MaskVex, MaskR } },
+ { "kxorq", { MaskG, MaskVex, MaskR } },
+ },
+ {
+ /* VEX_W_0F47_P_2_LEN_1 */
+ { "kxorb", { MaskG, MaskVex, MaskR } },
+ { "kxord", { MaskG, MaskVex, MaskR } },
+ },
+ {
+ /* VEX_W_0F4A_P_0_LEN_1 */
+ { "kaddw", { MaskG, MaskVex, MaskR } },
+ { "kaddq", { MaskG, MaskVex, MaskR } },
+ },
+ {
+ /* VEX_W_0F4A_P_2_LEN_1 */
+ { "kaddb", { MaskG, MaskVex, MaskR } },
+ { "kaddd", { MaskG, MaskVex, MaskR } },
+ },
+ {
+ /* VEX_W_0F4B_P_0_LEN_1 */
+ { "kunpckwd", { MaskG, MaskVex, MaskR } },
+ { "kunpckdq", { MaskG, MaskVex, MaskR } },
},
{
/* VEX_W_0F4B_P_2_LEN_1 */
{
/* VEX_W_0F90_P_0_LEN_0 */
{ "kmovw", { MaskG, MaskE } },
+ { "kmovq", { MaskG, MaskE } },
+ },
+ {
+ /* VEX_W_0F90_P_2_LEN_0 */
+ { "kmovb", { MaskG, MaskBDE } },
+ { "kmovd", { MaskG, MaskBDE } },
},
{
/* VEX_W_0F91_P_0_LEN_0 */
{ "kmovw", { Ew, MaskG } },
+ { "kmovq", { Eq, MaskG } },
+ },
+ {
+ /* VEX_W_0F91_P_2_LEN_0 */
+ { "kmovb", { Eb, MaskG } },
+ { "kmovd", { Ed, MaskG } },
},
{
/* VEX_W_0F92_P_0_LEN_0 */
{ "kmovw", { MaskG, Rdq } },
},
{
+ /* VEX_W_0F92_P_2_LEN_0 */
+ { "kmovb", { MaskG, Rdq } },
+ },
+ {
+ /* VEX_W_0F92_P_3_LEN_0 */
+ { "kmovd", { MaskG, Rdq } },
+ { "kmovq", { MaskG, Rdq } },
+ },
+ {
/* VEX_W_0F93_P_0_LEN_0 */
{ "kmovw", { Gdq, MaskR } },
},
{
+ /* VEX_W_0F93_P_2_LEN_0 */
+ { "kmovb", { Gdq, MaskR } },
+ },
+ {
+ /* VEX_W_0F93_P_3_LEN_0 */
+ { "kmovd", { Gdq, MaskR } },
+ { "kmovq", { Gdq, MaskR } },
+ },
+ {
/* VEX_W_0F98_P_0_LEN_0 */
{ "kortestw", { MaskG, MaskR } },
+ { "kortestq", { MaskG, MaskR } },
+ },
+ {
+ /* VEX_W_0F98_P_2_LEN_0 */
+ { "kortestb", { MaskG, MaskR } },
+ { "kortestd", { MaskG, MaskR } },
+ },
+ {
+ /* VEX_W_0F99_P_0_LEN_0 */
+ { "ktestw", { MaskG, MaskR } },
+ { "ktestq", { MaskG, MaskR } },
+ },
+ {
+ /* VEX_W_0F99_P_2_LEN_0 */
+ { "ktestb", { MaskG, MaskR } },
+ { "ktestd", { MaskG, MaskR } },
},
{
/* VEX_W_0FAE_R_2_M_0 */
{ "vinsertps", { XM, Vex128, EXd, Ib } },
},
{
- /* VEX_W_0F3A30_P_2 */
- { Bad_Opcode },
+ /* VEX_W_0F3A30_P_2_LEN_0 */
+ { "kshiftrb", { MaskG, MaskR, Ib } },
{ "kshiftrw", { MaskG, MaskR, Ib } },
},
{
- /* VEX_W_0F3A32_P_2 */
- { Bad_Opcode },
+ /* VEX_W_0F3A31_P_2_LEN_0 */
+ { "kshiftrd", { MaskG, MaskR, Ib } },
+ { "kshiftrq", { MaskG, MaskR, Ib } },
+ },
+ {
+ /* VEX_W_0F3A32_P_2_LEN_0 */
+ { "kshiftlb", { MaskG, MaskR, Ib } },
{ "kshiftlw", { MaskG, MaskR, Ib } },
},
{
+ /* VEX_W_0F3A33_P_2_LEN_0 */
+ { "kshiftld", { MaskG, MaskR, Ib } },
+ { "kshiftlq", { MaskG, MaskR, Ib } },
+ },
+ {
/* VEX_W_0F3A38_P_2 */
{ "vinserti128", { XM, Vex256, EXxmm, Ib } },
},
{ "nopQ", { Ev } },
},
{
- /* MOD_0F20 */
- { Bad_Opcode },
- { "movZ", { Rm, Cm } },
- },
- {
- /* MOD_0F21 */
- { Bad_Opcode },
- { "movZ", { Rm, Dm } },
- },
- {
- /* MOD_0F22 */
- { Bad_Opcode },
- { "movZ", { Cm, Rm } },
- },
- {
- /* MOD_0F23 */
- { Bad_Opcode },
- { "movZ", { Dm, Rm } },
- },
- {
/* MOD_0F24 */
{ Bad_Opcode },
{ "movL", { Rd, Td } },
},
{
/* MOD_0FAE_REG_6 */
- { "xsaveopt", { FXSAVE } },
+ { PREFIX_TABLE (PREFIX_0FAE_REG_6) },
{ RM_TABLE (RM_0FAE_REG_6) },
},
{
},
{
/* RM_0FAE_REG_7 */
- { "sfence", { Skip_MODRM } },
+ { PREFIX_TABLE (PREFIX_RM_0_0FAE_REG_7) },
},
};
last_addr_prefix = -1;
last_rex_prefix = -1;
last_seg_prefix = -1;
+ fwait_prefix = -1;
active_seg_prefix = 0;
for (i = 0; i < (int) ARRAY_SIZE (all_prefixes); i++)
all_prefixes[i] = 0;
/* fwait is really an instruction. If there are prefixes
before the fwait, they belong to the fwait, *not* to the
following instruction. */
+ fwait_prefix = i;
if (prefixes || rex)
{
prefixes |= PREFIX_FWAIT;
&& ((*codep < 0xd8) || (*codep > 0xdf))))
{
/* Handle prefixes before fwait. */
- for (i = 0;
- i < (int) ARRAY_SIZE (all_prefixes) && all_prefixes[i];
+ for (i = 0; i < fwait_prefix && all_prefixes[i];
i++)
(*info->fprintf_func) (info->stream, "%s ",
prefix_name (all_prefixes[i], sizeflag));
break;
}
/* Fall through. */
+ goto case_P;
case 'P':
- if (intel_syntax)
+ if (l == 0 && len == 1)
{
- if ((rex & REX_W) == 0
- && (prefixes & PREFIX_DATA))
+case_P:
+ if (intel_syntax)
{
- if ((sizeflag & DFLAG) == 0)
- *obufp++ = 'w';
- used_prefixes |= (prefixes & PREFIX_DATA);
+ if ((rex & REX_W) == 0
+ && (prefixes & PREFIX_DATA))
+ {
+ if ((sizeflag & DFLAG) == 0)
+ *obufp++ = 'w';
+ used_prefixes |= (prefixes & PREFIX_DATA);
+ }
+ break;
+ }
+ if ((prefixes & PREFIX_DATA)
+ || (rex & REX_W)
+ || (sizeflag & SUFFIX_ALWAYS))
+ {
+ USED_REX (REX_W);
+ if (rex & REX_W)
+ *obufp++ = 'q';
+ else
+ {
+ if (sizeflag & DFLAG)
+ *obufp++ = 'l';
+ else
+ *obufp++ = 'w';
+ used_prefixes |= (prefixes & PREFIX_DATA);
+ }
}
- break;
}
- if ((prefixes & PREFIX_DATA)
- || (rex & REX_W)
- || (sizeflag & SUFFIX_ALWAYS))
+ else
{
- USED_REX (REX_W);
- if (rex & REX_W)
- *obufp++ = 'q';
- else
+ if (l != 1 || len != 2 || last[0] != 'L')
{
- if (sizeflag & DFLAG)
- *obufp++ = 'l';
- else
- *obufp++ = 'w';
- used_prefixes |= (prefixes & PREFIX_DATA);
+ SAVE_LAST (*p);
+ break;
+ }
+
+ if ((prefixes & PREFIX_DATA)
+ || (rex & REX_W)
+ || (sizeflag & SUFFIX_ALWAYS))
+ {
+ USED_REX (REX_W);
+ if (rex & REX_W)
+ *obufp++ = 'q';
+ else
+ {
+ if (sizeflag & DFLAG)
+ *obufp++ = intel_syntax ? 'd' : 'l';
+ else
+ *obufp++ = 'w';
+ used_prefixes |= (prefixes & PREFIX_DATA);
+ }
}
}
break;
case b_mode:
case b_swap_mode:
case dqb_mode:
+ case db_mode:
oappend ("BYTE PTR ");
break;
case w_mode:
+ case dw_mode:
case dqw_mode:
+ case dqw_swap_mode:
oappend ("WORD PTR ");
break;
case stack_v_mode:
}
else
{
- if (vex.length != 512)
- abort ();
- oappend ("ZMMWORD PTR ");
+ switch (vex.length)
+ {
+ case 128:
+ oappend ("XMMWORD PTR ");
+ break;
+ case 256:
+ oappend ("YMMWORD PTR ");
+ break;
+ case 512:
+ oappend ("ZMMWORD PTR ");
+ break;
+ default:
+ abort ();
+ }
}
break;
case vex_vsib_q_w_d_mode:
case vex_vsib_d_w_d_mode:
- if (!need_vex || !vex.evex || vex.length != 512)
+ if (!need_vex || !vex.evex)
abort ();
- oappend ("YMMWORD PTR ");
+ switch (vex.length)
+ {
+ case 128:
+ oappend ("QWORD PTR ");
+ break;
+ case 256:
+ oappend ("XMMWORD PTR ");
+ break;
+ case 512:
+ oappend ("YMMWORD PTR ");
+ break;
+ default:
+ abort ();
+ }
break;
+ case mask_bd_mode:
+ if (!need_vex || vex.length != 128)
+ abort ();
+ if (vex.w)
+ oappend ("DWORD PTR ");
+ else
+ oappend ("BYTE PTR ");
+ break;
case mask_mode:
if (!need_vex)
abort ();
- /* Currently the only instructions, which allows either mask or
- memory operand, are AVX512's KMOVW instructions. They need
- Word-sized operand. */
- if (vex.w || vex.length != 128)
- abort ();
- oappend ("WORD PTR ");
+ if (vex.w)
+ oappend ("QWORD PTR ");
+ else
+ oappend ("WORD PTR ");
break;
case v_bnd_mode:
default:
reg += 8;
if ((sizeflag & SUFFIX_ALWAYS)
- && (bytemode == b_swap_mode || bytemode == v_swap_mode))
+ && (bytemode == b_swap_mode
+ || bytemode == v_swap_mode
+ || bytemode == dqw_swap_mode))
swap_operand ();
switch (bytemode)
names = names16;
break;
case d_mode:
+ case dw_mode:
+ case db_mode:
names = names32;
break;
case q_mode:
case dqb_mode:
case dqd_mode:
case dqw_mode:
+ case dqw_swap_mode:
USED_REX (REX_W);
if (rex & REX_W)
names = names64;
used_prefixes |= (prefixes & PREFIX_DATA);
}
break;
+ case mask_bd_mode:
case mask_mode:
names = names_mask;
break;
/* In EVEX, if operand doesn't allow broadcast, vex.b should be 0. */
if (vex.b
&& bytemode != x_mode
+ && bytemode != xmmq_mode
&& bytemode != evex_half_bcst_xmmq_mode)
{
BadOp ();
}
switch (bytemode)
{
+ case dqw_mode:
+ case dw_mode:
+ case dqw_swap_mode:
+ shift = 1;
+ break;
+ case dqb_mode:
+ case db_mode:
+ shift = 0;
+ break;
case vex_vsib_d_w_dq_mode:
case vex_vsib_d_w_d_mode:
case vex_vsib_q_w_dq_mode:
break;
case x_mode:
case evex_half_bcst_xmmq_mode:
+ case xmmq_mode:
if (vex.b)
{
shift = vex.w ? 3 : 2;
/* Fall through if vex.b == 0. */
case xmmqd_mode:
case xmmdw_mode:
- case xmmq_mode:
case ymmq_mode:
case evex_x_nobcst_mode:
case x_swap_mode:
operand). We might want to make it 3, 4 or 5 (e.g. for
xmmq_mode). In case of broadcast enabled the corrections
aren't needed, as element size is always 32 or 64 bits. */
- if (bytemode == xmmq_mode
- || (bytemode == evex_half_bcst_xmmq_mode
- && !vex.b))
+ if (!vex.b
+ && (bytemode == xmmq_mode
+ || bytemode == evex_half_bcst_xmmq_mode))
shift -= 1;
else if (bytemode == xmmqd_mode)
shift -= 2;
else if (bytemode == xmmdw_mode)
shift -= 3;
+ else if (bytemode == ymmq_mode && vex.length == 128)
+ shift -= 1;
}
else
shift = 0;
}
if (vex.evex && vex.b
&& (bytemode == x_mode
+ || bytemode == xmmq_mode
|| bytemode == evex_half_bcst_xmmq_mode))
{
- if (vex.w || bytemode == evex_half_bcst_xmmq_mode)
- oappend ("{1to8}");
+ if (vex.w
+ || bytemode == xmmq_mode
+ || bytemode == evex_half_bcst_xmmq_mode)
+ {
+ switch (vex.length)
+ {
+ case 128:
+ oappend ("{1to2}");
+ break;
+ case 256:
+ oappend ("{1to4}");
+ break;
+ case 512:
+ oappend ("{1to8}");
+ break;
+ default:
+ abort ();
+ }
+ }
else
- oappend ("{1to16}");
+ {
+ switch (vex.length)
+ {
+ case 128:
+ oappend ("{1to4}");
+ break;
+ case 256:
+ oappend ("{1to8}");
+ break;
+ case 512:
+ oappend ("{1to16}");
+ break;
+ default:
+ abort ();
+ }
+ }
}
}
oappend (names16[modrm.reg + add]);
break;
case d_mode:
+ case db_mode:
+ case dw_mode:
oappend (names32[modrm.reg + add]);
break;
case q_mode:
case dqb_mode:
case dqd_mode:
case dqw_mode:
+ case dqw_swap_mode:
USED_REX (REX_W);
if (rex & REX_W)
oappend (names64[modrm.reg + add]);
else
oappend (names32[modrm.reg + add]);
break;
+ case mask_bd_mode:
case mask_mode:
oappend (names_mask[modrm.reg + add]);
break;
static void
OP_R (int bytemode, int sizeflag)
{
- if (modrm.mod == 3)
- OP_E (bytemode, sizeflag);
- else
- BadOp ();
+ /* Skip mod/rm byte. */
+ MODRM_CHECK;
+ codep++;
+ OP_E_register (bytemode, sizeflag);
}
static void
if ((sizeflag & SUFFIX_ALWAYS)
&& (bytemode == x_swap_mode
|| bytemode == d_swap_mode
+ || bytemode == dqw_swap_mode
|| bytemode == d_scalar_swap_mode
|| bytemode == q_swap_mode
|| bytemode == q_scalar_swap_mode))
else
names = names32;
break;
+ case mask_bd_mode:
case mask_mode:
names = names_mask;
break;
case vex_vsib_q_w_d_mode:
names = vex.w ? names_ymm : names_xmm;
break;
+ case mask_bd_mode:
case mask_mode:
names = names_mask;
break;
OP_Mask (int bytemode, int sizeflag ATTRIBUTE_UNUSED)
{
if (!vex.evex
- || bytemode != mask_mode)
+ || (bytemode != mask_mode && bytemode != mask_bd_mode))
abort ();
USED_REX (REX_R);