summaryrefslogtreecommitdiff
path: root/opcodes/i386-dis.c
diff options
context:
space:
mode:
Diffstat (limited to 'opcodes/i386-dis.c')
-rw-r--r--opcodes/i386-dis.c1337
1 files changed, 1306 insertions, 31 deletions
diff --git a/opcodes/i386-dis.c b/opcodes/i386-dis.c
index 2c47d744bdf..7d8a18c34cc 100644
--- a/opcodes/i386-dis.c
+++ b/opcodes/i386-dis.c
@@ -94,11 +94,13 @@ static void OP_EX_VexW (int, int);
static void OP_EX_VexImmW (int, int);
static void OP_XMM_Vex (int, int);
static void OP_XMM_VexW (int, int);
+static void OP_Rounding (int, int);
static void OP_REG_VexI4 (int, int);
static void PCLMUL_Fixup (int, int);
static void VEXI4_Fixup (int, int);
static void VZERO_Fixup (int, int);
static void VCMP_Fixup (int, int);
+static void VPCMP_Fixup (int, int);
static void OP_0f07 (int, int);
static void OP_Monitor (int, int);
static void OP_Mwait (int, int);
@@ -123,6 +125,8 @@ static void OP_Vex_2src_2 (int, int);
static void MOVBE_Fixup (int, int);
+static void OP_Mask (int, int);
+
struct dis_private {
/* Points to first byte not fetched. */
bfd_byte *max_fetched;
@@ -256,6 +260,7 @@ fetch_data (struct disassemble_info *info, bfd_byte *addr)
#define Gm { OP_G, m_mode }
#define Gw { OP_G, w_mode }
#define Rd { OP_R, d_mode }
+#define Rdq { OP_R, dq_mode }
#define Rm { OP_R, m_mode }
#define Ib { OP_I, b_mode }
#define sIb { OP_sI, b_mode } /* sign extened byte */
@@ -345,6 +350,7 @@ fetch_data (struct disassemble_info *info, bfd_byte *addr)
#define XMScalar { OP_XMM, scalar_mode }
#define XMGatherQ { OP_XMM, vex_vsib_q_w_dq_mode }
#define XMM { OP_XMM, xmm_mode }
+#define XMxmmq { OP_XMM, xmmq_mode }
#define EM { OP_EM, v_mode }
#define EMS { OP_EM, v_swap_mode }
#define EMd { OP_EM, d_mode }
@@ -353,6 +359,7 @@ fetch_data (struct disassemble_info *info, bfd_byte *addr)
#define EXd { OP_EX, d_mode }
#define EXdScalar { OP_EX, d_scalar_mode }
#define EXdS { OP_EX, d_swap_mode }
+#define EXdScalarS { OP_EX, d_scalar_swap_mode }
#define EXq { OP_EX, q_mode }
#define EXqScalar { OP_EX, q_scalar_mode }
#define EXqScalarS { OP_EX, q_scalar_swap_mode }
@@ -360,16 +367,21 @@ fetch_data (struct disassemble_info *info, bfd_byte *addr)
#define EXx { OP_EX, x_mode }
#define EXxS { OP_EX, x_swap_mode }
#define EXxmm { OP_EX, xmm_mode }
+#define EXymm { OP_EX, ymm_mode }
#define EXxmmq { OP_EX, xmmq_mode }
+#define EXEvexHalfBcstXmmq { OP_EX, evex_half_bcst_xmmq_mode }
#define EXxmm_mb { OP_EX, xmm_mb_mode }
#define EXxmm_mw { OP_EX, xmm_mw_mode }
#define EXxmm_md { OP_EX, xmm_md_mode }
#define EXxmm_mq { OP_EX, xmm_mq_mode }
+#define EXxmm_mdq { OP_EX, xmm_mdq_mode }
#define EXxmmdw { OP_EX, xmmdw_mode }
#define EXxmmqd { OP_EX, xmmqd_mode }
#define EXymmq { OP_EX, ymmq_mode }
#define EXVexWdq { OP_EX, vex_w_dq_mode }
#define EXVexWdqScalar { OP_EX, vex_scalar_w_dq_mode }
+#define EXEvexXGscat { OP_EX, evex_x_gscat_mode }
+#define EXEvexXNoBcst { OP_EX, evex_x_nobcst_mode }
#define MS { OP_MS, v_mode }
#define XS { OP_XS, v_mode }
#define EMCq { OP_EMC, q_mode }
@@ -405,6 +417,16 @@ fetch_data (struct disassemble_info *info, bfd_byte *addr)
#define PCLMUL { PCLMUL_Fixup, 0 }
#define VZERO { VZERO_Fixup, 0 }
#define VCMP { VCMP_Fixup, 0 }
+#define VPCMP { VPCMP_Fixup, 0 }
+
+#define EXxEVexR { OP_Rounding, evex_rounding_mode }
+#define EXxEVexS { OP_Rounding, evex_sae_mode }
+
+#define XMask { OP_Mask, mask_mode }
+#define MaskG { OP_G, mask_mode }
+#define MaskE { OP_E, mask_mode }
+#define MaskR { OP_R, mask_mode }
+#define MaskVex { OP_VEX, mask_mode }
#define MVexVSIBDWpX { OP_M, vex_vsib_d_w_dq_mode }
#define MVexVSIBQWpX { OP_M, vex_vsib_q_w_dq_mode }
@@ -461,14 +483,24 @@ enum
q_swap_mode,
/* ten-byte operand */
t_mode,
- /* 16-byte XMM or 32-byte YMM operand */
+ /* 16-byte XMM, 32-byte YMM or 64-byte ZMM operand. In EVEX with
+ broadcast enabled. */
x_mode,
- /* 16-byte XMM or 32-byte YMM operand with operand swapped */
+ /* Similar to x_mode, but with different EVEX mem shifts. */
+ evex_x_gscat_mode,
+ /* Similar to x_mode, but with disabled broadcast. */
+ evex_x_nobcst_mode,
+ /* Similar to x_mode, but with operands swapped and disabled broadcast
+ in EVEX. */
x_swap_mode,
/* 16-byte XMM operand */
xmm_mode,
- /* 16-byte XMM or quad word operand */
+ /* XMM, XMM or YMM register operand, or quad word, xmmword or ymmword
+ memory operand (depending on vector length). Broadcast isn't
+ allowed. */
xmmq_mode,
+ /* Same as xmmq_mode, but broadcast is allowed. */
+ evex_half_bcst_xmmq_mode,
/* XMM register or byte memory operand */
xmm_mb_mode,
/* XMM register or word memory operand */
@@ -477,11 +509,16 @@ enum
xmm_md_mode,
/* XMM register or quad word memory operand */
xmm_mq_mode,
- /* 16-byte XMM, word or double word operand */
+ /* XMM register or double/quad word memory operand, depending on
+ VEX.W. */
+ xmm_mdq_mode,
+ /* 16-byte XMM, word, double word or quad word operand. */
xmmdw_mode,
- /* 16-byte XMM, double word or quad word operand */
+ /* 16-byte XMM, double word, quad word operand or xmm word operand. */
xmmqd_mode,
- /* 32-byte YMM or quad word operand */
+ /* 32-byte YMM operand */
+ ymm_mode,
+ /* quad word, ymmword or zmmword memory operand. */
ymmq_mode,
/* 32-byte YMM or 16-byte word operand */
ymmxmm_mode,
@@ -539,6 +576,14 @@ enum
/* like vex_w_dq_mode, ignore vector length. */
vex_scalar_w_dq_mode,
+ /* Static rounding. */
+ evex_rounding_mode,
+ /* Supress all exceptions. */
+ evex_sae_mode,
+
+ /* Mask register operand. */
+ mask_mode,
+
es_reg,
cs_reg,
ss_reg,
@@ -599,7 +644,8 @@ enum
USE_VEX_C4_TABLE,
USE_VEX_C5_TABLE,
USE_VEX_LEN_TABLE,
- USE_VEX_W_TABLE
+ USE_VEX_W_TABLE,
+ USE_EVEX_TABLE
};
#define FLOAT NULL, { { NULL, FLOATCODE } }
@@ -616,6 +662,7 @@ enum
#define VEX_C5_TABLE(I) DIS386 (USE_VEX_C5_TABLE, (I))
#define VEX_LEN_TABLE(I) DIS386 (USE_VEX_LEN_TABLE, (I))
#define VEX_W_TABLE(I) DIS386 (USE_VEX_W_TABLE, (I))
+#define EVEX_TABLE(I) DIS386 (USE_EVEX_TABLE, (I))
enum
{
@@ -655,7 +702,12 @@ enum
REG_XOP_LWPCB,
REG_XOP_LWP,
REG_XOP_TBM_01,
- REG_XOP_TBM_02
+ REG_XOP_TBM_02,
+
+ REG_EVEX_0F72,
+ REG_EVEX_0F73,
+ REG_EVEX_0F38C6,
+ REG_EVEX_0F38C7
};
enum
@@ -754,6 +806,21 @@ enum
MOD_VEX_0F385A_PREFIX_2,
MOD_VEX_0F388C_PREFIX_2,
MOD_VEX_0F388E_PREFIX_2,
+
+ MOD_EVEX_0F10_PREFIX_1,
+ MOD_EVEX_0F10_PREFIX_3,
+ MOD_EVEX_0F11_PREFIX_1,
+ MOD_EVEX_0F11_PREFIX_3,
+ MOD_EVEX_0F12_PREFIX_0,
+ MOD_EVEX_0F16_PREFIX_0,
+ MOD_EVEX_0F38C6_REG_1,
+ MOD_EVEX_0F38C6_REG_2,
+ MOD_EVEX_0F38C6_REG_5,
+ MOD_EVEX_0F38C6_REG_6,
+ MOD_EVEX_0F38C7_REG_1,
+ MOD_EVEX_0F38C7_REG_2,
+ MOD_EVEX_0F38C7_REG_5,
+ MOD_EVEX_0F38C7_REG_6
};
enum
@@ -908,6 +975,13 @@ enum
PREFIX_VEX_0F2D,
PREFIX_VEX_0F2E,
PREFIX_VEX_0F2F,
+ PREFIX_VEX_0F41,
+ PREFIX_VEX_0F42,
+ PREFIX_VEX_0F44,
+ PREFIX_VEX_0F45,
+ PREFIX_VEX_0F46,
+ PREFIX_VEX_0F47,
+ PREFIX_VEX_0F4B,
PREFIX_VEX_0F51,
PREFIX_VEX_0F52,
PREFIX_VEX_0F53,
@@ -954,6 +1028,11 @@ enum
PREFIX_VEX_0F7D,
PREFIX_VEX_0F7E,
PREFIX_VEX_0F7F,
+ PREFIX_VEX_0F90,
+ PREFIX_VEX_0F91,
+ PREFIX_VEX_0F92,
+ PREFIX_VEX_0F93,
+ PREFIX_VEX_0F98,
PREFIX_VEX_0FC2,
PREFIX_VEX_0FC4,
PREFIX_VEX_0FC5,
@@ -1141,6 +1220,8 @@ enum
PREFIX_VEX_0F3A20,
PREFIX_VEX_0F3A21,
PREFIX_VEX_0F3A22,
+ PREFIX_VEX_0F3A30,
+ PREFIX_VEX_0F3A32,
PREFIX_VEX_0F3A38,
PREFIX_VEX_0F3A39,
PREFIX_VEX_0F3A40,
@@ -1178,7 +1259,221 @@ enum
PREFIX_VEX_0F3A7E,
PREFIX_VEX_0F3A7F,
PREFIX_VEX_0F3ADF,
- PREFIX_VEX_0F3AF0
+ PREFIX_VEX_0F3AF0,
+
+ PREFIX_EVEX_0F10,
+ PREFIX_EVEX_0F11,
+ PREFIX_EVEX_0F12,
+ PREFIX_EVEX_0F13,
+ PREFIX_EVEX_0F14,
+ PREFIX_EVEX_0F15,
+ PREFIX_EVEX_0F16,
+ PREFIX_EVEX_0F17,
+ PREFIX_EVEX_0F28,
+ PREFIX_EVEX_0F29,
+ PREFIX_EVEX_0F2A,
+ PREFIX_EVEX_0F2B,
+ PREFIX_EVEX_0F2C,
+ PREFIX_EVEX_0F2D,
+ PREFIX_EVEX_0F2E,
+ PREFIX_EVEX_0F2F,
+ PREFIX_EVEX_0F51,
+ PREFIX_EVEX_0F58,
+ PREFIX_EVEX_0F59,
+ PREFIX_EVEX_0F5A,
+ PREFIX_EVEX_0F5B,
+ PREFIX_EVEX_0F5C,
+ PREFIX_EVEX_0F5D,
+ PREFIX_EVEX_0F5E,
+ PREFIX_EVEX_0F5F,
+ PREFIX_EVEX_0F62,
+ PREFIX_EVEX_0F66,
+ PREFIX_EVEX_0F6A,
+ PREFIX_EVEX_0F6C,
+ PREFIX_EVEX_0F6D,
+ PREFIX_EVEX_0F6E,
+ PREFIX_EVEX_0F6F,
+ PREFIX_EVEX_0F70,
+ PREFIX_EVEX_0F72_REG_0,
+ PREFIX_EVEX_0F72_REG_1,
+ PREFIX_EVEX_0F72_REG_2,
+ PREFIX_EVEX_0F72_REG_4,
+ PREFIX_EVEX_0F72_REG_6,
+ PREFIX_EVEX_0F73_REG_2,
+ PREFIX_EVEX_0F73_REG_6,
+ PREFIX_EVEX_0F76,
+ PREFIX_EVEX_0F78,
+ PREFIX_EVEX_0F79,
+ PREFIX_EVEX_0F7A,
+ PREFIX_EVEX_0F7B,
+ PREFIX_EVEX_0F7E,
+ PREFIX_EVEX_0F7F,
+ PREFIX_EVEX_0FC2,
+ PREFIX_EVEX_0FC6,
+ PREFIX_EVEX_0FD2,
+ PREFIX_EVEX_0FD3,
+ PREFIX_EVEX_0FD4,
+ PREFIX_EVEX_0FD6,
+ PREFIX_EVEX_0FDB,
+ PREFIX_EVEX_0FDF,
+ PREFIX_EVEX_0FE2,
+ PREFIX_EVEX_0FE6,
+ PREFIX_EVEX_0FE7,
+ PREFIX_EVEX_0FEB,
+ PREFIX_EVEX_0FEF,
+ PREFIX_EVEX_0FF2,
+ PREFIX_EVEX_0FF3,
+ PREFIX_EVEX_0FF4,
+ PREFIX_EVEX_0FFA,
+ PREFIX_EVEX_0FFB,
+ PREFIX_EVEX_0FFE,
+ PREFIX_EVEX_0F380C,
+ PREFIX_EVEX_0F380D,
+ PREFIX_EVEX_0F3811,
+ PREFIX_EVEX_0F3812,
+ PREFIX_EVEX_0F3813,
+ PREFIX_EVEX_0F3814,
+ PREFIX_EVEX_0F3815,
+ PREFIX_EVEX_0F3816,
+ PREFIX_EVEX_0F3818,
+ PREFIX_EVEX_0F3819,
+ PREFIX_EVEX_0F381A,
+ PREFIX_EVEX_0F381B,
+ PREFIX_EVEX_0F381E,
+ PREFIX_EVEX_0F381F,
+ PREFIX_EVEX_0F3821,
+ PREFIX_EVEX_0F3822,
+ PREFIX_EVEX_0F3823,
+ PREFIX_EVEX_0F3824,
+ PREFIX_EVEX_0F3825,
+ PREFIX_EVEX_0F3827,
+ PREFIX_EVEX_0F3828,
+ PREFIX_EVEX_0F3829,
+ PREFIX_EVEX_0F382A,
+ PREFIX_EVEX_0F382C,
+ PREFIX_EVEX_0F382D,
+ PREFIX_EVEX_0F3831,
+ PREFIX_EVEX_0F3832,
+ PREFIX_EVEX_0F3833,
+ PREFIX_EVEX_0F3834,
+ PREFIX_EVEX_0F3835,
+ PREFIX_EVEX_0F3836,
+ PREFIX_EVEX_0F3837,
+ PREFIX_EVEX_0F3839,
+ PREFIX_EVEX_0F383A,
+ PREFIX_EVEX_0F383B,
+ PREFIX_EVEX_0F383D,
+ PREFIX_EVEX_0F383F,
+ PREFIX_EVEX_0F3840,
+ PREFIX_EVEX_0F3842,
+ PREFIX_EVEX_0F3843,
+ PREFIX_EVEX_0F3844,
+ PREFIX_EVEX_0F3845,
+ PREFIX_EVEX_0F3846,
+ PREFIX_EVEX_0F3847,
+ PREFIX_EVEX_0F384C,
+ PREFIX_EVEX_0F384D,
+ PREFIX_EVEX_0F384E,
+ PREFIX_EVEX_0F384F,
+ PREFIX_EVEX_0F3858,
+ PREFIX_EVEX_0F3859,
+ PREFIX_EVEX_0F385A,
+ PREFIX_EVEX_0F385B,
+ PREFIX_EVEX_0F3864,
+ PREFIX_EVEX_0F3865,
+ PREFIX_EVEX_0F3876,
+ PREFIX_EVEX_0F3877,
+ PREFIX_EVEX_0F387C,
+ PREFIX_EVEX_0F387E,
+ PREFIX_EVEX_0F387F,
+ PREFIX_EVEX_0F3888,
+ PREFIX_EVEX_0F3889,
+ PREFIX_EVEX_0F388A,
+ PREFIX_EVEX_0F388B,
+ PREFIX_EVEX_0F3890,
+ PREFIX_EVEX_0F3891,
+ PREFIX_EVEX_0F3892,
+ PREFIX_EVEX_0F3893,
+ PREFIX_EVEX_0F3896,
+ PREFIX_EVEX_0F3897,
+ PREFIX_EVEX_0F3898,
+ PREFIX_EVEX_0F3899,
+ PREFIX_EVEX_0F389A,
+ PREFIX_EVEX_0F389B,
+ PREFIX_EVEX_0F389C,
+ PREFIX_EVEX_0F389D,
+ PREFIX_EVEX_0F389E,
+ PREFIX_EVEX_0F389F,
+ PREFIX_EVEX_0F38A0,
+ PREFIX_EVEX_0F38A1,
+ PREFIX_EVEX_0F38A2,
+ PREFIX_EVEX_0F38A3,
+ PREFIX_EVEX_0F38A6,
+ PREFIX_EVEX_0F38A7,
+ PREFIX_EVEX_0F38A8,
+ PREFIX_EVEX_0F38A9,
+ PREFIX_EVEX_0F38AA,
+ PREFIX_EVEX_0F38AB,
+ PREFIX_EVEX_0F38AC,
+ PREFIX_EVEX_0F38AD,
+ PREFIX_EVEX_0F38AE,
+ PREFIX_EVEX_0F38AF,
+ PREFIX_EVEX_0F38B6,
+ PREFIX_EVEX_0F38B7,
+ PREFIX_EVEX_0F38B8,
+ PREFIX_EVEX_0F38B9,
+ PREFIX_EVEX_0F38BA,
+ PREFIX_EVEX_0F38BB,
+ PREFIX_EVEX_0F38BC,
+ PREFIX_EVEX_0F38BD,
+ PREFIX_EVEX_0F38BE,
+ PREFIX_EVEX_0F38BF,
+ PREFIX_EVEX_0F38C4,
+ PREFIX_EVEX_0F38C6_REG_1,
+ PREFIX_EVEX_0F38C6_REG_2,
+ PREFIX_EVEX_0F38C6_REG_5,
+ PREFIX_EVEX_0F38C6_REG_6,
+ PREFIX_EVEX_0F38C7_REG_1,
+ PREFIX_EVEX_0F38C7_REG_2,
+ PREFIX_EVEX_0F38C7_REG_5,
+ PREFIX_EVEX_0F38C7_REG_6,
+ PREFIX_EVEX_0F38C8,
+ PREFIX_EVEX_0F38CA,
+ PREFIX_EVEX_0F38CB,
+ PREFIX_EVEX_0F38CC,
+ PREFIX_EVEX_0F38CD,
+
+ PREFIX_EVEX_0F3A00,
+ PREFIX_EVEX_0F3A01,
+ PREFIX_EVEX_0F3A03,
+ PREFIX_EVEX_0F3A04,
+ PREFIX_EVEX_0F3A05,
+ PREFIX_EVEX_0F3A08,
+ PREFIX_EVEX_0F3A09,
+ PREFIX_EVEX_0F3A0A,
+ PREFIX_EVEX_0F3A0B,
+ PREFIX_EVEX_0F3A17,
+ PREFIX_EVEX_0F3A18,
+ PREFIX_EVEX_0F3A19,
+ PREFIX_EVEX_0F3A1A,
+ PREFIX_EVEX_0F3A1B,
+ PREFIX_EVEX_0F3A1D,
+ PREFIX_EVEX_0F3A1E,
+ PREFIX_EVEX_0F3A1F,
+ PREFIX_EVEX_0F3A21,
+ PREFIX_EVEX_0F3A23,
+ PREFIX_EVEX_0F3A25,
+ PREFIX_EVEX_0F3A26,
+ PREFIX_EVEX_0F3A27,
+ PREFIX_EVEX_0F3A38,
+ PREFIX_EVEX_0F3A39,
+ PREFIX_EVEX_0F3A3A,
+ PREFIX_EVEX_0F3A3B,
+ PREFIX_EVEX_0F3A3E,
+ PREFIX_EVEX_0F3A3F,
+ PREFIX_EVEX_0F3A43,
+ PREFIX_EVEX_0F3A54,
+ PREFIX_EVEX_0F3A55,
};
enum
@@ -1236,6 +1531,13 @@ enum
enum
{
+ EVEX_0F = 0,
+ EVEX_0F38,
+ EVEX_0F3A
+};
+
+enum
+{
VEX_LEN_0F10_P_1 = 0,
VEX_LEN_0F10_P_3,
VEX_LEN_0F11_P_1,
@@ -1258,6 +1560,13 @@ enum
VEX_LEN_0F2E_P_2,
VEX_LEN_0F2F_P_0,
VEX_LEN_0F2F_P_2,
+ VEX_LEN_0F41_P_0,
+ VEX_LEN_0F42_P_0,
+ VEX_LEN_0F44_P_0,
+ VEX_LEN_0F45_P_0,
+ VEX_LEN_0F46_P_0,
+ VEX_LEN_0F47_P_0,
+ VEX_LEN_0F4B_P_2,
VEX_LEN_0F51_P_1,
VEX_LEN_0F51_P_3,
VEX_LEN_0F52_P_1,
@@ -1279,6 +1588,11 @@ enum
VEX_LEN_0F6E_P_2,
VEX_LEN_0F7E_P_1,
VEX_LEN_0F7E_P_2,
+ VEX_LEN_0F90_P_0,
+ VEX_LEN_0F91_P_0,
+ VEX_LEN_0F92_P_0,
+ VEX_LEN_0F93_P_0,
+ VEX_LEN_0F98_P_0,
VEX_LEN_0FAE_R_2_M_0,
VEX_LEN_0FAE_R_3_M_0,
VEX_LEN_0FC2_P_1,
@@ -1324,6 +1638,8 @@ enum
VEX_LEN_0F3A20_P_2,
VEX_LEN_0F3A21_P_2,
VEX_LEN_0F3A22_P_2,
+ VEX_LEN_0F3A30_P_2,
+ VEX_LEN_0F3A32_P_2,
VEX_LEN_0F3A38_P_2,
VEX_LEN_0F3A39_P_2,
VEX_LEN_0F3A41_P_2,
@@ -1385,6 +1701,13 @@ enum
VEX_W_0F2E_P_2,
VEX_W_0F2F_P_0,
VEX_W_0F2F_P_2,
+ VEX_W_0F41_P_0_LEN_1,
+ VEX_W_0F42_P_0_LEN_1,
+ VEX_W_0F44_P_0_LEN_0,
+ VEX_W_0F45_P_0_LEN_1,
+ VEX_W_0F46_P_0_LEN_1,
+ VEX_W_0F47_P_0_LEN_1,
+ VEX_W_0F4B_P_2_LEN_1,
VEX_W_0F50_M_0,
VEX_W_0F51_P_0,
VEX_W_0F51_P_1,
@@ -1464,6 +1787,11 @@ enum
VEX_W_0F7E_P_1,
VEX_W_0F7F_P_1,
VEX_W_0F7F_P_2,
+ VEX_W_0F90_P_0_LEN_0,
+ VEX_W_0F91_P_0_LEN_0,
+ VEX_W_0F92_P_0_LEN_0,
+ VEX_W_0F93_P_0_LEN_0,
+ VEX_W_0F98_P_0_LEN_0,
VEX_W_0FAE_R_2_M_0,
VEX_W_0FAE_R_3_M_0,
VEX_W_0FC2_P_0,
@@ -1609,6 +1937,8 @@ enum
VEX_W_0F3A19_P_2,
VEX_W_0F3A20_P_2,
VEX_W_0F3A21_P_2,
+ VEX_W_0F3A30_P_2_LEN_0,
+ VEX_W_0F3A32_P_2_LEN_0,
VEX_W_0F3A38_P_2,
VEX_W_0F3A39_P_2,
VEX_W_0F3A40_P_2,
@@ -1625,7 +1955,194 @@ enum
VEX_W_0F3A61_P_2,
VEX_W_0F3A62_P_2,
VEX_W_0F3A63_P_2,
- VEX_W_0F3ADF_P_2
+ VEX_W_0F3ADF_P_2,
+
+ EVEX_W_0F10_P_0,
+ EVEX_W_0F10_P_1_M_0,
+ EVEX_W_0F10_P_1_M_1,
+ EVEX_W_0F10_P_2,
+ EVEX_W_0F10_P_3_M_0,
+ EVEX_W_0F10_P_3_M_1,
+ EVEX_W_0F11_P_0,
+ EVEX_W_0F11_P_1_M_0,
+ EVEX_W_0F11_P_1_M_1,
+ EVEX_W_0F11_P_2,
+ EVEX_W_0F11_P_3_M_0,
+ EVEX_W_0F11_P_3_M_1,
+ EVEX_W_0F12_P_0_M_0,
+ EVEX_W_0F12_P_0_M_1,
+ EVEX_W_0F12_P_1,
+ EVEX_W_0F12_P_2,
+ EVEX_W_0F12_P_3,
+ EVEX_W_0F13_P_0,
+ EVEX_W_0F13_P_2,
+ EVEX_W_0F14_P_0,
+ EVEX_W_0F14_P_2,
+ EVEX_W_0F15_P_0,
+ EVEX_W_0F15_P_2,
+ EVEX_W_0F16_P_0_M_0,
+ EVEX_W_0F16_P_0_M_1,
+ EVEX_W_0F16_P_1,
+ EVEX_W_0F16_P_2,
+ EVEX_W_0F17_P_0,
+ EVEX_W_0F17_P_2,
+ EVEX_W_0F28_P_0,
+ EVEX_W_0F28_P_2,
+ EVEX_W_0F29_P_0,
+ EVEX_W_0F29_P_2,
+ EVEX_W_0F2A_P_1,
+ EVEX_W_0F2A_P_3,
+ EVEX_W_0F2B_P_0,
+ EVEX_W_0F2B_P_2,
+ EVEX_W_0F2E_P_0,
+ EVEX_W_0F2E_P_2,
+ EVEX_W_0F2F_P_0,
+ EVEX_W_0F2F_P_2,
+ EVEX_W_0F51_P_0,
+ EVEX_W_0F51_P_1,
+ EVEX_W_0F51_P_2,
+ EVEX_W_0F51_P_3,
+ EVEX_W_0F58_P_0,
+ EVEX_W_0F58_P_1,
+ EVEX_W_0F58_P_2,
+ EVEX_W_0F58_P_3,
+ EVEX_W_0F59_P_0,
+ EVEX_W_0F59_P_1,
+ EVEX_W_0F59_P_2,
+ EVEX_W_0F59_P_3,
+ EVEX_W_0F5A_P_0,
+ EVEX_W_0F5A_P_1,
+ EVEX_W_0F5A_P_2,
+ EVEX_W_0F5A_P_3,
+ EVEX_W_0F5B_P_0,
+ EVEX_W_0F5B_P_1,
+ EVEX_W_0F5B_P_2,
+ EVEX_W_0F5C_P_0,
+ EVEX_W_0F5C_P_1,
+ EVEX_W_0F5C_P_2,
+ EVEX_W_0F5C_P_3,
+ EVEX_W_0F5D_P_0,
+ EVEX_W_0F5D_P_1,
+ EVEX_W_0F5D_P_2,
+ EVEX_W_0F5D_P_3,
+ EVEX_W_0F5E_P_0,
+ EVEX_W_0F5E_P_1,
+ EVEX_W_0F5E_P_2,
+ EVEX_W_0F5E_P_3,
+ EVEX_W_0F5F_P_0,
+ EVEX_W_0F5F_P_1,
+ EVEX_W_0F5F_P_2,
+ EVEX_W_0F5F_P_3,
+ EVEX_W_0F62_P_2,
+ EVEX_W_0F66_P_2,
+ EVEX_W_0F6A_P_2,
+ EVEX_W_0F6C_P_2,
+ EVEX_W_0F6D_P_2,
+ EVEX_W_0F6E_P_2,
+ EVEX_W_0F6F_P_1,
+ EVEX_W_0F6F_P_2,
+ EVEX_W_0F70_P_2,
+ EVEX_W_0F72_R_2_P_2,
+ EVEX_W_0F72_R_6_P_2,
+ EVEX_W_0F73_R_2_P_2,
+ EVEX_W_0F73_R_6_P_2,
+ EVEX_W_0F76_P_2,
+ EVEX_W_0F78_P_0,
+ EVEX_W_0F79_P_0,
+ EVEX_W_0F7A_P_1,
+ EVEX_W_0F7A_P_3,
+ EVEX_W_0F7B_P_1,
+ EVEX_W_0F7B_P_3,
+ EVEX_W_0F7E_P_1,
+ EVEX_W_0F7E_P_2,
+ EVEX_W_0F7F_P_1,
+ EVEX_W_0F7F_P_2,
+ EVEX_W_0FC2_P_0,
+ EVEX_W_0FC2_P_1,
+ EVEX_W_0FC2_P_2,
+ EVEX_W_0FC2_P_3,
+ EVEX_W_0FC6_P_0,
+ EVEX_W_0FC6_P_2,
+ EVEX_W_0FD2_P_2,
+ EVEX_W_0FD3_P_2,
+ EVEX_W_0FD4_P_2,
+ EVEX_W_0FD6_P_2,
+ EVEX_W_0FE6_P_1,
+ EVEX_W_0FE6_P_2,
+ EVEX_W_0FE6_P_3,
+ EVEX_W_0FE7_P_2,
+ EVEX_W_0FF2_P_2,
+ EVEX_W_0FF3_P_2,
+ EVEX_W_0FF4_P_2,
+ EVEX_W_0FFA_P_2,
+ EVEX_W_0FFB_P_2,
+ EVEX_W_0FFE_P_2,
+ EVEX_W_0F380C_P_2,
+ EVEX_W_0F380D_P_2,
+ EVEX_W_0F3811_P_1,
+ EVEX_W_0F3812_P_1,
+ EVEX_W_0F3813_P_1,
+ EVEX_W_0F3813_P_2,
+ EVEX_W_0F3814_P_1,
+ EVEX_W_0F3815_P_1,
+ EVEX_W_0F3818_P_2,
+ EVEX_W_0F3819_P_2,
+ EVEX_W_0F381A_P_2,
+ EVEX_W_0F381B_P_2,
+ EVEX_W_0F381E_P_2,
+ EVEX_W_0F381F_P_2,
+ EVEX_W_0F3821_P_1,
+ EVEX_W_0F3822_P_1,
+ EVEX_W_0F3823_P_1,
+ EVEX_W_0F3824_P_1,
+ EVEX_W_0F3825_P_1,
+ EVEX_W_0F3825_P_2,
+ EVEX_W_0F3828_P_2,
+ EVEX_W_0F3829_P_2,
+ EVEX_W_0F382A_P_1,
+ EVEX_W_0F382A_P_2,
+ EVEX_W_0F3831_P_1,
+ EVEX_W_0F3832_P_1,
+ EVEX_W_0F3833_P_1,
+ EVEX_W_0F3834_P_1,
+ EVEX_W_0F3835_P_1,
+ EVEX_W_0F3835_P_2,
+ EVEX_W_0F3837_P_2,
+ EVEX_W_0F383A_P_1,
+ EVEX_W_0F3840_P_2,
+ EVEX_W_0F3858_P_2,
+ EVEX_W_0F3859_P_2,
+ EVEX_W_0F385A_P_2,
+ EVEX_W_0F385B_P_2,
+ EVEX_W_0F3891_P_2,
+ EVEX_W_0F3893_P_2,
+ EVEX_W_0F38A1_P_2,
+ EVEX_W_0F38A3_P_2,
+ EVEX_W_0F38C7_R_1_P_2,
+ EVEX_W_0F38C7_R_2_P_2,
+ EVEX_W_0F38C7_R_5_P_2,
+ EVEX_W_0F38C7_R_6_P_2,
+
+ EVEX_W_0F3A00_P_2,
+ EVEX_W_0F3A01_P_2,
+ EVEX_W_0F3A04_P_2,
+ EVEX_W_0F3A05_P_2,
+ EVEX_W_0F3A08_P_2,
+ EVEX_W_0F3A09_P_2,
+ EVEX_W_0F3A0A_P_2,
+ EVEX_W_0F3A0B_P_2,
+ EVEX_W_0F3A18_P_2,
+ EVEX_W_0F3A19_P_2,
+ EVEX_W_0F3A1A_P_2,
+ EVEX_W_0F3A1B_P_2,
+ EVEX_W_0F3A1D_P_2,
+ EVEX_W_0F3A21_P_2,
+ EVEX_W_0F3A23_P_2,
+ EVEX_W_0F3A38_P_2,
+ EVEX_W_0F3A39_P_2,
+ EVEX_W_0F3A3A_P_2,
+ EVEX_W_0F3A3B_P_2,
+ EVEX_W_0F3A43_P_2,
};
typedef void (*op_rtn) (int bytemode, int sizeflag);
@@ -2360,6 +2877,13 @@ static struct
int length;
int prefix;
int w;
+ int evex;
+ int r;
+ int v;
+ int mask_register_specifier;
+ int zeroing;
+ int ll;
+ int b;
}
vex;
static unsigned char need_vex;
@@ -2467,13 +2991,21 @@ static const char *intel_names_xmm[] = {
"xmm0", "xmm1", "xmm2", "xmm3",
"xmm4", "xmm5", "xmm6", "xmm7",
"xmm8", "xmm9", "xmm10", "xmm11",
- "xmm12", "xmm13", "xmm14", "xmm15"
+ "xmm12", "xmm13", "xmm14", "xmm15",
+ "xmm16", "xmm17", "xmm18", "xmm19",
+ "xmm20", "xmm21", "xmm22", "xmm23",
+ "xmm24", "xmm25", "xmm26", "xmm27",
+ "xmm28", "xmm29", "xmm30", "xmm31"
};
static const char *att_names_xmm[] = {
"%xmm0", "%xmm1", "%xmm2", "%xmm3",
"%xmm4", "%xmm5", "%xmm6", "%xmm7",
"%xmm8", "%xmm9", "%xmm10", "%xmm11",
- "%xmm12", "%xmm13", "%xmm14", "%xmm15"
+ "%xmm12", "%xmm13", "%xmm14", "%xmm15",
+ "%xmm16", "%xmm17", "%xmm18", "%xmm19",
+ "%xmm20", "%xmm21", "%xmm22", "%xmm23",
+ "%xmm24", "%xmm25", "%xmm26", "%xmm27",
+ "%xmm28", "%xmm29", "%xmm30", "%xmm31"
};
static const char **names_ymm;
@@ -2481,13 +3013,59 @@ static const char *intel_names_ymm[] = {
"ymm0", "ymm1", "ymm2", "ymm3",
"ymm4", "ymm5", "ymm6", "ymm7",
"ymm8", "ymm9", "ymm10", "ymm11",
- "ymm12", "ymm13", "ymm14", "ymm15"
+ "ymm12", "ymm13", "ymm14", "ymm15",
+ "ymm16", "ymm17", "ymm18", "ymm19",
+ "ymm20", "ymm21", "ymm22", "ymm23",
+ "ymm24", "ymm25", "ymm26", "ymm27",
+ "ymm28", "ymm29", "ymm30", "ymm31"
};
static const char *att_names_ymm[] = {
"%ymm0", "%ymm1", "%ymm2", "%ymm3",
"%ymm4", "%ymm5", "%ymm6", "%ymm7",
"%ymm8", "%ymm9", "%ymm10", "%ymm11",
- "%ymm12", "%ymm13", "%ymm14", "%ymm15"
+ "%ymm12", "%ymm13", "%ymm14", "%ymm15",
+ "%ymm16", "%ymm17", "%ymm18", "%ymm19",
+ "%ymm20", "%ymm21", "%ymm22", "%ymm23",
+ "%ymm24", "%ymm25", "%ymm26", "%ymm27",
+ "%ymm28", "%ymm29", "%ymm30", "%ymm31"
+};
+
+static const char **names_zmm;
+static const char *intel_names_zmm[] = {
+ "zmm0", "zmm1", "zmm2", "zmm3",
+ "zmm4", "zmm5", "zmm6", "zmm7",
+ "zmm8", "zmm9", "zmm10", "zmm11",
+ "zmm12", "zmm13", "zmm14", "zmm15",
+ "zmm16", "zmm17", "zmm18", "zmm19",
+ "zmm20", "zmm21", "zmm22", "zmm23",
+ "zmm24", "zmm25", "zmm26", "zmm27",
+ "zmm28", "zmm29", "zmm30", "zmm31"
+};
+static const char *att_names_zmm[] = {
+ "%zmm0", "%zmm1", "%zmm2", "%zmm3",
+ "%zmm4", "%zmm5", "%zmm6", "%zmm7",
+ "%zmm8", "%zmm9", "%zmm10", "%zmm11",
+ "%zmm12", "%zmm13", "%zmm14", "%zmm15",
+ "%zmm16", "%zmm17", "%zmm18", "%zmm19",
+ "%zmm20", "%zmm21", "%zmm22", "%zmm23",
+ "%zmm24", "%zmm25", "%zmm26", "%zmm27",
+ "%zmm28", "%zmm29", "%zmm30", "%zmm31"
+};
+
+static const char **names_mask;
+static const char *intel_names_mask[] = {
+ "k0", "k1", "k2", "k3", "k4", "k5", "k6", "k7"
+};
+static const char *att_names_mask[] = {
+ "%k0", "%k1", "%k2", "%k3", "%k4", "%k5", "%k6", "%k7"
+};
+
+static const char *names_rounding[] =
+{
+ "{rn-sae}",
+ "{rd-sae}",
+ "{ru-sae}",
+ "{rz-sae}"
};
static const struct dis386 reg_table[][8] = {
@@ -2685,7 +3263,7 @@ static const struct dis386 reg_table[][8] = {
{
{ "prefetch", { Mb } },
{ "prefetchw", { Mb } },
- { "prefetch", { Mb } },
+ { "prefetchwt1", { Mb } },
{ "prefetch", { Mb } },
{ "prefetch", { Mb } },
{ "prefetch", { Mb } },
@@ -2858,6 +3436,9 @@ static const struct dis386 reg_table[][8] = {
{ Bad_Opcode },
{ "blci", { { OP_LWP_E, 0 }, Ev } },
},
+#define NEED_REG_TABLE
+#include "i386-dis-evex.h"
+#undef NEED_REG_TABLE
};
static const struct dis386 prefix_table[][4] = {
@@ -3826,6 +4407,43 @@ static const struct dis386 prefix_table[][4] = {
{ VEX_LEN_TABLE (VEX_LEN_0F2F_P_2) },
},
+ /* PREFIX_VEX_0F41 */
+ {
+ { VEX_LEN_TABLE (VEX_LEN_0F41_P_0) },
+ },
+
+ /* PREFIX_VEX_0F42 */
+ {
+ { VEX_LEN_TABLE (VEX_LEN_0F42_P_0) },
+ },
+
+ /* PREFIX_VEX_0F44 */
+ {
+ { VEX_LEN_TABLE (VEX_LEN_0F44_P_0) },
+ },
+
+ /* PREFIX_VEX_0F45 */
+ {
+ { VEX_LEN_TABLE (VEX_LEN_0F45_P_0) },
+ },
+
+ /* PREFIX_VEX_0F46 */
+ {
+ { VEX_LEN_TABLE (VEX_LEN_0F46_P_0) },
+ },
+
+ /* PREFIX_VEX_0F47 */
+ {
+ { VEX_LEN_TABLE (VEX_LEN_0F47_P_0) },
+ },
+
+ /* PREFIX_VEX_0F4B */
+ {
+ { Bad_Opcode },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F4B_P_2) },
+ },
+
/* PREFIX_VEX_0F51 */
{
{ VEX_W_TABLE (VEX_W_0F51_P_0) },
@@ -4155,6 +4773,31 @@ static const struct dis386 prefix_table[][4] = {
{ VEX_W_TABLE (VEX_W_0F7F_P_2) },
},
+ /* PREFIX_VEX_0F90 */
+ {
+ { VEX_LEN_TABLE (VEX_LEN_0F90_P_0) },
+ },
+
+ /* PREFIX_VEX_0F91 */
+ {
+ { VEX_LEN_TABLE (VEX_LEN_0F91_P_0) },
+ },
+
+ /* PREFIX_VEX_0F92 */
+ {
+ { VEX_LEN_TABLE (VEX_LEN_0F92_P_0) },
+ },
+
+ /* PREFIX_VEX_0F93 */
+ {
+ { VEX_LEN_TABLE (VEX_LEN_0F93_P_0) },
+ },
+
+ /* PREFIX_VEX_0F98 */
+ {
+ { VEX_LEN_TABLE (VEX_LEN_0F98_P_0) },
+ },
+
/* PREFIX_VEX_0FC2 */
{
{ VEX_W_TABLE (VEX_W_0FC2_P_0) },
@@ -5464,6 +6107,20 @@ static const struct dis386 prefix_table[][4] = {
{ VEX_LEN_TABLE (VEX_LEN_0F3A22_P_2) },
},
+ /* PREFIX_VEX_0F3A30 */
+ {
+ { Bad_Opcode },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F3A30_P_2) },
+ },
+
+ /* PREFIX_VEX_0F3A32 */
+ {
+ { Bad_Opcode },
+ { Bad_Opcode },
+ { VEX_LEN_TABLE (VEX_LEN_0F3A32_P_2) },
+ },
+
/* PREFIX_VEX_0F3A38 */
{
{ Bad_Opcode },
@@ -5732,6 +6389,10 @@ static const struct dis386 prefix_table[][4] = {
{ Bad_Opcode },
{ VEX_LEN_TABLE (VEX_LEN_0F3AF0_P_3) },
},
+
+#define NEED_PREFIX_TABLE
+#include "i386-dis-evex.h"
+#undef NEED_PREFIX_TABLE
};
static const struct dis386 x86_64_table[][2] = {
@@ -5803,6 +6464,7 @@ static const struct dis386 x86_64_table[][2] = {
/* X86_64_62 */
{
{ MOD_TABLE (MOD_62_32BIT) },
+ { EVEX_TABLE (EVEX_0F) },
},
/* X86_64_63 */
@@ -7716,18 +8378,18 @@ static const struct dis386 vex_table[][256] = {
{ Bad_Opcode },
/* 40 */
{ Bad_Opcode },
+ { PREFIX_TABLE (PREFIX_VEX_0F41) },
+ { PREFIX_TABLE (PREFIX_VEX_0F42) },
{ Bad_Opcode },
- { Bad_Opcode },
- { Bad_Opcode },
- { Bad_Opcode },
- { Bad_Opcode },
- { Bad_Opcode },
- { Bad_Opcode },
+ { PREFIX_TABLE (PREFIX_VEX_0F44) },
+ { PREFIX_TABLE (PREFIX_VEX_0F45) },
+ { PREFIX_TABLE (PREFIX_VEX_0F46) },
+ { PREFIX_TABLE (PREFIX_VEX_0F47) },
/* 48 */
{ Bad_Opcode },
{ Bad_Opcode },
{ Bad_Opcode },
- { Bad_Opcode },
+ { PREFIX_TABLE (PREFIX_VEX_0F4B) },
{ Bad_Opcode },
{ Bad_Opcode },
{ Bad_Opcode },
@@ -7805,16 +8467,16 @@ static const struct dis386 vex_table[][256] = {
{ Bad_Opcode },
{ Bad_Opcode },
/* 90 */
- { Bad_Opcode },
- { Bad_Opcode },
- { Bad_Opcode },
- { Bad_Opcode },
+ { PREFIX_TABLE (PREFIX_VEX_0F90) },
+ { PREFIX_TABLE (PREFIX_VEX_0F91) },
+ { PREFIX_TABLE (PREFIX_VEX_0F92) },
+ { PREFIX_TABLE (PREFIX_VEX_0F93) },
{ Bad_Opcode },
{ Bad_Opcode },
{ Bad_Opcode },
{ Bad_Opcode },
/* 98 */
- { Bad_Opcode },
+ { PREFIX_TABLE (PREFIX_VEX_0F98) },
{ Bad_Opcode },
{ Bad_Opcode },
{ Bad_Opcode },
@@ -8279,9 +8941,9 @@ static const struct dis386 vex_table[][256] = {
{ Bad_Opcode },
{ Bad_Opcode },
/* 30 */
+ { PREFIX_TABLE (PREFIX_VEX_0F3A30) },
{ Bad_Opcode },
- { Bad_Opcode },
- { Bad_Opcode },
+ { PREFIX_TABLE (PREFIX_VEX_0F3A32) },
{ Bad_Opcode },
{ Bad_Opcode },
{ Bad_Opcode },
@@ -8515,6 +9177,9 @@ static const struct dis386 vex_table[][256] = {
},
};
+#define NEED_OPCODE_TABLE
+#include "i386-dis-evex.h"
+#undef NEED_OPCODE_TABLE
static const struct dis386 vex_len_table[][2] = {
/* VEX_LEN_0F10_P_1 */
{
@@ -8640,6 +9305,41 @@ static const struct dis386 vex_len_table[][2] = {
{ VEX_W_TABLE (VEX_W_0F2F_P_2) },
},
+ /* VEX_LEN_0F41_P_0 */
+ {
+ { Bad_Opcode },
+ { VEX_W_TABLE (VEX_W_0F41_P_0_LEN_1) },
+ },
+ /* VEX_LEN_0F42_P_0 */
+ {
+ { Bad_Opcode },
+ { VEX_W_TABLE (VEX_W_0F42_P_0_LEN_1) },
+ },
+ /* VEX_LEN_0F44_P_0 */
+ {
+ { VEX_W_TABLE (VEX_W_0F44_P_0_LEN_0) },
+ },
+ /* VEX_LEN_0F45_P_0 */
+ {
+ { Bad_Opcode },
+ { VEX_W_TABLE (VEX_W_0F45_P_0_LEN_1) },
+ },
+ /* VEX_LEN_0F46_P_0 */
+ {
+ { Bad_Opcode },
+ { VEX_W_TABLE (VEX_W_0F46_P_0_LEN_1) },
+ },
+ /* VEX_LEN_0F47_P_0 */
+ {
+ { Bad_Opcode },
+ { VEX_W_TABLE (VEX_W_0F47_P_0_LEN_1) },
+ },
+ /* VEX_LEN_0F4B_P_2 */
+ {
+ { Bad_Opcode },
+ { VEX_W_TABLE (VEX_W_0F4B_P_2_LEN_1) },
+ },
+
/* VEX_LEN_0F51_P_1 */
{
{ VEX_W_TABLE (VEX_W_0F51_P_1) },
@@ -8766,6 +9466,31 @@ static const struct dis386 vex_len_table[][2] = {
{ "vmovK", { Edq, XMScalar } },
},
+ /* VEX_LEN_0F90_P_0 */
+ {
+ { VEX_W_TABLE (VEX_W_0F90_P_0_LEN_0) },
+ },
+
+ /* VEX_LEN_0F91_P_0 */
+ {
+ { VEX_W_TABLE (VEX_W_0F91_P_0_LEN_0) },
+ },
+
+ /* VEX_LEN_0F92_P_0 */
+ {
+ { VEX_W_TABLE (VEX_W_0F92_P_0_LEN_0) },
+ },
+
+ /* VEX_LEN_0F93_P_0 */
+ {
+ { VEX_W_TABLE (VEX_W_0F93_P_0_LEN_0) },
+ },
+
+ /* VEX_LEN_0F98_P_0 */
+ {
+ { VEX_W_TABLE (VEX_W_0F98_P_0_LEN_0) },
+ },
+
/* VEX_LEN_0FAE_R_2_M_0 */
{
{ VEX_W_TABLE (VEX_W_0FAE_R_2_M_0) },
@@ -9006,6 +9731,16 @@ static const struct dis386 vex_len_table[][2] = {
{ "vpinsrK", { XM, Vex128, Edq, Ib } },
},
+ /* VEX_LEN_0F3A30_P_2 */
+ {
+ { VEX_W_TABLE (VEX_W_0F3A30_P_2_LEN_0) },
+ },
+
+ /* VEX_LEN_0F3A32_P_2 */
+ {
+ { VEX_W_TABLE (VEX_W_0F3A32_P_2_LEN_0) },
+ },
+
/* VEX_LEN_0F3A38_P_2 */
{
{ Bad_Opcode },
@@ -9271,6 +10006,34 @@ static const struct dis386 vex_w_table[][2] = {
{ "vcomisd", { XMScalar, EXqScalar } },
},
{
+ /* VEX_W_0F41_P_0_LEN_1 */
+ { "kandw", { MaskG, MaskVex, MaskR } },
+ },
+ {
+ /* VEX_W_0F42_P_0_LEN_1 */
+ { "kandnw", { MaskG, MaskVex, MaskR } },
+ },
+ {
+ /* VEX_W_0F44_P_0_LEN_0 */
+ { "knotw", { MaskG, MaskR } },
+ },
+ {
+ /* VEX_W_0F45_P_0_LEN_1 */
+ { "korw", { MaskG, MaskVex, MaskR } },
+ },
+ {
+ /* VEX_W_0F46_P_0_LEN_1 */
+ { "kxnorw", { MaskG, MaskVex, MaskR } },
+ },
+ {
+ /* VEX_W_0F47_P_0_LEN_1 */
+ { "kxorw", { MaskG, MaskVex, MaskR } },
+ },
+ {
+ /* VEX_W_0F4B_P_2_LEN_1 */
+ { "kunpckbw", { MaskG, MaskVex, MaskR } },
+ },
+ {
/* VEX_W_0F50_M_0 */
{ "vmovmskpX", { Gdq, XS } },
},
@@ -9587,6 +10350,26 @@ static const struct dis386 vex_w_table[][2] = {
{ "vmovdqa", { EXxS, XM } },
},
{
+ /* VEX_W_0F90_P_0_LEN_0 */
+ { "kmovw", { MaskG, MaskE } },
+ },
+ {
+ /* VEX_W_0F91_P_0_LEN_0 */
+ { "kmovw", { Ew, MaskG } },
+ },
+ {
+ /* VEX_W_0F92_P_0_LEN_0 */
+ { "kmovw", { MaskG, Rdq } },
+ },
+ {
+ /* VEX_W_0F93_P_0_LEN_0 */
+ { "kmovw", { Gdq, MaskR } },
+ },
+ {
+ /* VEX_W_0F98_P_0_LEN_0 */
+ { "kortestw", { MaskG, MaskR } },
+ },
+ {
/* VEX_W_0FAE_R_2_M_0 */
{ "vldmxcsr", { Md } },
},
@@ -10169,6 +10952,16 @@ static const struct dis386 vex_w_table[][2] = {
{ "vinsertps", { XM, Vex128, EXd, Ib } },
},
{
+ /* VEX_W_0F3A30_P_2 */
+ { Bad_Opcode },
+ { "kshiftrw", { MaskG, MaskR, Ib } },
+ },
+ {
+ /* VEX_W_0F3A32_P_2 */
+ { Bad_Opcode },
+ { "kshiftlw", { MaskG, MaskR, Ib } },
+ },
+ {
/* VEX_W_0F3A38_P_2 */
{ "vinserti128", { XM, Vex256, EXxmm, Ib } },
},
@@ -10238,6 +11031,9 @@ static const struct dis386 vex_w_table[][2] = {
/* VEX_W_0F3ADF_P_2 */
{ "vaeskeygenassist", { XM, EXx, Ib } },
},
+#define NEED_VEX_W_TABLE
+#include "i386-dis-evex.h"
+#undef NEED_VEX_W_TABLE
};
static const struct dis386 mod_table[][2] = {
@@ -10527,6 +11323,7 @@ static const struct dis386 mod_table[][2] = {
{
/* MOD_62_32BIT */
{ "bound{S|}", { Gv, Ma } },
+ { EVEX_TABLE (EVEX_0F) },
},
{
/* MOD_C4_32BIT */
@@ -10672,6 +11469,9 @@ static const struct dis386 mod_table[][2] = {
/* MOD_VEX_0F388E_PREFIX_2 */
{ "vpmaskmov%LW", { Mx, Vex, XM } },
},
+#define NEED_MOD_TABLE
+#include "i386-dis-evex.h"
+#undef NEED_MOD_TABLE
};
static const struct dis386 rm_table[][8] = {
@@ -11251,6 +12051,7 @@ get_valid_dis386 (const struct dis386 *dp, disassemble_info *info)
break;
case USE_VEX_C4_TABLE:
+ /* VEX prefix. */
FETCH_DATA (info, codep + 3);
/* All bits in the REX prefix are ignored. */
rex_ignored = rex;
@@ -11315,6 +12116,7 @@ get_valid_dis386 (const struct dis386 *dp, disassemble_info *info)
break;
case USE_VEX_C5_TABLE:
+ /* VEX prefix. */
FETCH_DATA (info, codep + 2);
/* All bits in the REX prefix are ignored. */
rex_ignored = rex;
@@ -11368,6 +12170,110 @@ get_valid_dis386 (const struct dis386 *dp, disassemble_info *info)
dp = &vex_w_table[dp->op[1].bytemode][vex.w ? 1 : 0];
break;
+ case USE_EVEX_TABLE:
+ two_source_ops = 0;
+ /* EVEX prefix. */
+ vex.evex = 1;
+ FETCH_DATA (info, codep + 4);
+ /* All bits in the REX prefix are ignored. */
+ rex_ignored = rex;
+ /* The first byte after 0x62. */
+ rex = ~(*codep >> 5) & 0x7;
+ vex.r = *codep & 0x10;
+ switch ((*codep & 0xf))
+ {
+ default:
+ return &bad_opcode;
+ case 0x1:
+ vex_table_index = EVEX_0F;
+ break;
+ case 0x2:
+ vex_table_index = EVEX_0F38;
+ break;
+ case 0x3:
+ vex_table_index = EVEX_0F3A;
+ break;
+ }
+
+ /* The second byte after 0x62. */
+ codep++;
+ vex.w = *codep & 0x80;
+ if (vex.w && address_mode == mode_64bit)
+ rex |= REX_W;
+
+ vex.register_specifier = (~(*codep >> 3)) & 0xf;
+ if (address_mode != mode_64bit)
+ {
+ /* In 16/32-bit mode silently ignore following bits. */
+ rex &= ~REX_B;
+ vex.r = 1;
+ vex.v = 1;
+ vex.register_specifier &= 0x7;
+ }
+
+ /* The U bit. */
+ if (!(*codep & 0x4))
+ return &bad_opcode;
+
+ switch ((*codep & 0x3))
+ {
+ case 0:
+ vex.prefix = 0;
+ break;
+ case 1:
+ vex.prefix = DATA_PREFIX_OPCODE;
+ break;
+ case 2:
+ vex.prefix = REPE_PREFIX_OPCODE;
+ break;
+ case 3:
+ vex.prefix = REPNE_PREFIX_OPCODE;
+ break;
+ }
+
+ /* The third byte after 0x62. */
+ codep++;
+
+ /* Remember the static rounding bits. */
+ vex.ll = (*codep >> 5) & 3;
+ vex.b = (*codep & 0x10) != 0;
+
+ vex.v = *codep & 0x8;
+ vex.mask_register_specifier = *codep & 0x7;
+ vex.zeroing = *codep & 0x80;
+
+ need_vex = 1;
+ need_vex_reg = 1;
+ codep++;
+ vindex = *codep++;
+ dp = &evex_table[vex_table_index][vindex];
+ FETCH_DATA (info, codep + 1);
+ modrm.mod = (*codep >> 6) & 3;
+ modrm.reg = (*codep >> 3) & 7;
+ modrm.rm = *codep & 7;
+
+ /* Set vector length. */
+ if (modrm.mod == 3 && vex.b)
+ vex.length = 512;
+ else
+ {
+ switch (vex.ll)
+ {
+ case 0x0:
+ vex.length = 128;
+ break;
+ case 0x1:
+ vex.length = 256;
+ break;
+ case 0x2:
+ vex.length = 512;
+ break;
+ default:
+ return &bad_opcode;
+ }
+ }
+ break;
+
case 0:
dp = &bad_opcode;
break;
@@ -11498,8 +12404,10 @@ print_insn (bfd_vma pc, disassemble_info *info)
names_bnd = intel_names_bnd;
names_xmm = intel_names_xmm;
names_ymm = intel_names_ymm;
+ names_zmm = intel_names_zmm;
index64 = intel_index64;
index32 = intel_index32;
+ names_mask = intel_names_mask;
index16 = intel_index16;
open_char = '[';
close_char = ']';
@@ -11518,8 +12426,10 @@ print_insn (bfd_vma pc, disassemble_info *info)
names_bnd = att_names_bnd;
names_xmm = att_names_xmm;
names_ymm = att_names_ymm;
+ names_zmm = att_names_zmm;
index64 = att_index64;
index32 = att_index32;
+ names_mask = att_names_mask;
index16 = att_index16;
open_char = '(';
close_char = ')';
@@ -11671,6 +12581,7 @@ print_insn (bfd_vma pc, disassemble_info *info)
need_vex = 0;
need_vex_reg = 0;
vex_w_done = 0;
+ vex.evex = 0;
if (dp->name == NULL && dp->op[0].bytemode == FLOATCODE)
{
@@ -11689,6 +12600,20 @@ print_insn (bfd_vma pc, disassemble_info *info)
op_ad = MAX_OPERANDS - 1 - i;
if (dp->op[i].rtn)
(*dp->op[i].rtn) (dp->op[i].bytemode, sizeflag);
+ /* For EVEX instruction after the last operand masking
+ should be printed. */
+ if (i == 0 && vex.evex)
+ {
+ /* Don't print {%k0}. */
+ if (vex.mask_register_specifier)
+ {
+ oappend ("{");
+ oappend (names_mask[vex.mask_register_specifier]);
+ oappend ("}");
+ }
+ if (vex.zeroing)
+ oappend ("{z}");
+ }
}
}
}
@@ -12847,6 +13772,17 @@ print_displacement (char *buf, bfd_vma disp)
static void
intel_operand_size (int bytemode, int sizeflag)
{
+ if (vex.evex
+ && vex.b
+ && (bytemode == x_mode
+ || bytemode == evex_half_bcst_xmmq_mode))
+ {
+ if (vex.w)
+ oappend ("QWORD PTR ");
+ else
+ oappend ("DWORD PTR ");
+ return;
+ }
switch (bytemode)
{
case b_mode:
@@ -12926,6 +13862,8 @@ intel_operand_size (int bytemode, int sizeflag)
break;
case x_mode:
case x_swap_mode:
+ case evex_x_gscat_mode:
+ case evex_x_nobcst_mode:
if (need_vex)
{
switch (vex.length)
@@ -12936,6 +13874,9 @@ intel_operand_size (int bytemode, int sizeflag)
case 256:
oappend ("YMMWORD PTR ");
break;
+ case 512:
+ oappend ("ZMMWORD PTR ");
+ break;
default:
abort ();
}
@@ -12946,7 +13887,11 @@ intel_operand_size (int bytemode, int sizeflag)
case xmm_mode:
oappend ("XMMWORD PTR ");
break;
+ case ymm_mode:
+ oappend ("YMMWORD PTR ");
+ break;
case xmmq_mode:
+ case evex_half_bcst_xmmq_mode:
if (!need_vex)
abort ();
@@ -12958,6 +13903,9 @@ intel_operand_size (int bytemode, int sizeflag)
case 256:
oappend ("XMMWORD PTR ");
break;
+ case 512:
+ oappend ("YMMWORD PTR ");
+ break;
default:
abort ();
}
@@ -12970,6 +13918,7 @@ intel_operand_size (int bytemode, int sizeflag)
{
case 128:
case 256:
+ case 512:
oappend ("BYTE PTR ");
break;
default:
@@ -12984,6 +13933,7 @@ intel_operand_size (int bytemode, int sizeflag)
{
case 128:
case 256:
+ case 512:
oappend ("WORD PTR ");
break;
default:
@@ -12998,6 +13948,7 @@ intel_operand_size (int bytemode, int sizeflag)
{
case 128:
case 256:
+ case 512:
oappend ("DWORD PTR ");
break;
default:
@@ -13012,6 +13963,7 @@ intel_operand_size (int bytemode, int sizeflag)
{
case 128:
case 256:
+ case 512:
oappend ("QWORD PTR ");
break;
default:
@@ -13030,6 +13982,9 @@ intel_operand_size (int bytemode, int sizeflag)
case 256:
oappend ("DWORD PTR ");
break;
+ case 512:
+ oappend ("QWORD PTR ");
+ break;
default:
abort ();
}
@@ -13046,6 +14001,9 @@ intel_operand_size (int bytemode, int sizeflag)
case 256:
oappend ("QWORD PTR ");
break;
+ case 512:
+ oappend ("XMMWORD PTR ");
+ break;
default:
abort ();
}
@@ -13062,6 +14020,9 @@ intel_operand_size (int bytemode, int sizeflag)
case 256:
oappend ("YMMWORD PTR ");
break;
+ case 512:
+ oappend ("ZMMWORD PTR ");
+ break;
default:
abort ();
}
@@ -13083,10 +14044,9 @@ intel_operand_size (int bytemode, int sizeflag)
case o_mode:
oappend ("OWORD PTR ");
break;
+ case xmm_mdq_mode:
case vex_w_dq_mode:
case vex_scalar_w_dq_mode:
- case vex_vsib_d_w_dq_mode:
- case vex_vsib_q_w_dq_mode:
if (!need_vex)
abort ();
@@ -13095,6 +14055,35 @@ intel_operand_size (int bytemode, int sizeflag)
else
oappend ("DWORD PTR ");
break;
+ case vex_vsib_d_w_dq_mode:
+ case vex_vsib_q_w_dq_mode:
+ if (!need_vex)
+ abort ();
+
+ if (!vex.evex)
+ {
+ if (vex.w)
+ oappend ("QWORD PTR ");
+ else
+ oappend ("DWORD PTR ");
+ }
+ else
+ {
+ if (vex.length != 512)
+ abort ();
+ oappend ("ZMMWORD PTR ");
+ }
+ break;
+ case mask_mode:
+ if (!need_vex)
+ abort ();
+ /* Currently the only instructions, which allows either mask or
+ memory operand, are AVX512's KMOVW instructions. They need
+ Word-sized operand. */
+ if (vex.w || vex.length != 128)
+ abort ();
+ oappend ("WORD PTR ");
+ break;
default:
break;
}
@@ -13168,6 +14157,9 @@ OP_E_register (int bytemode, int sizeflag)
used_prefixes |= (prefixes & PREFIX_DATA);
}
break;
+ case mask_mode:
+ names = names_mask;
+ break;
case 0:
return;
default:
@@ -13183,6 +14175,104 @@ OP_E_memory (int bytemode, int sizeflag)
bfd_vma disp = 0;
int add = (rex & REX_B) ? 8 : 0;
int riprel = 0;
+ int shift;
+
+ if (vex.evex)
+ {
+ /* In EVEX, if operand doesn't allow broadcast, vex.b should be 0. */
+ if (vex.b
+ && bytemode != x_mode
+ && bytemode != evex_half_bcst_xmmq_mode)
+ {
+ BadOp ();
+ return;
+ }
+ switch (bytemode)
+ {
+ case vex_vsib_d_w_dq_mode:
+ case evex_x_gscat_mode:
+ case xmm_mdq_mode:
+ shift = vex.w ? 3 : 2;
+ break;
+ case vex_vsib_q_w_dq_mode:
+ shift = 3;
+ break;
+ case x_mode:
+ case evex_half_bcst_xmmq_mode:
+ if (vex.b)
+ {
+ shift = vex.w ? 3 : 2;
+ break;
+ }
+ /* Fall through if vex.b == 0. */
+ case xmmqd_mode:
+ case xmmdw_mode:
+ case xmmq_mode:
+ case ymmq_mode:
+ case evex_x_nobcst_mode:
+ case x_swap_mode:
+ switch (vex.length)
+ {
+ case 128:
+ shift = 4;
+ break;
+ case 256:
+ shift = 5;
+ break;
+ case 512:
+ shift = 6;
+ break;
+ default:
+ abort ();
+ }
+ break;
+ case ymm_mode:
+ shift = 5;
+ break;
+ case xmm_mode:
+ shift = 4;
+ break;
+ case xmm_mq_mode:
+ case q_mode:
+ case q_scalar_mode:
+ case q_swap_mode:
+ case q_scalar_swap_mode:
+ shift = 3;
+ break;
+ case dqd_mode:
+ case xmm_md_mode:
+ case d_mode:
+ case d_scalar_mode:
+ case d_swap_mode:
+ case d_scalar_swap_mode:
+ shift = 2;
+ break;
+ case xmm_mw_mode:
+ shift = 1;
+ break;
+ case xmm_mb_mode:
+ shift = 0;
+ break;
+ default:
+ abort ();
+ }
+ /* Make necessary corrections to shift for modes that need it.
+ For these modes we currently have shift 4, 5 or 6 depending on
+ vex.length (it corresponds to xmmword, ymmword or zmmword
+ operand). We might want to make it 3, 4 or 5 (e.g. for
+ xmmq_mode). In case of broadcast enabled the corrections
+ aren't needed, as element size is always 32 or 64 bits. */
+ if (bytemode == xmmq_mode
+ || (bytemode == evex_half_bcst_xmmq_mode
+ && !vex.b))
+ shift -= 1;
+ else if (bytemode == xmmqd_mode)
+ shift -= 2;
+ else if (bytemode == xmmdw_mode)
+ shift -= 3;
+ }
+ else
+ shift = 0;
USED_REX (REX_B);
if (intel_syntax)
@@ -13224,6 +14314,11 @@ OP_E_memory (int bytemode, int sizeflag)
case vex_vsib_q_w_dq_mode:
if (!need_vex)
abort ();
+ if (vex.evex)
+ {
+ if (!vex.v)
+ vindex += 16;
+ }
haveindex = 1;
switch (vex.length)
@@ -13237,6 +14332,12 @@ OP_E_memory (int bytemode, int sizeflag)
else
indexes64 = indexes32 = names_xmm;
break;
+ case 512:
+ if (!vex.w || bytemode == vex_vsib_q_w_dq_mode)
+ indexes64 = indexes32 = names_zmm;
+ else
+ indexes64 = indexes32 = names_ymm;
+ break;
default:
abort ();
}
@@ -13267,6 +14368,8 @@ OP_E_memory (int bytemode, int sizeflag)
disp = *codep++;
if ((disp & 0x80) != 0)
disp -= 0x100;
+ if (vex.evex && shift > 0)
+ disp <<= shift;
break;
case 2:
disp = get32s ();
@@ -13459,6 +14562,15 @@ OP_E_memory (int bytemode, int sizeflag)
oappend (scratchbuf);
}
}
+ if (vex.evex && vex.b
+ && (bytemode == x_mode
+ || bytemode == evex_half_bcst_xmmq_mode))
+ {
+ if (vex.w || bytemode == evex_half_bcst_xmmq_mode)
+ oappend ("{1to8}");
+ else
+ oappend ("{1to16}");
+ }
}
static void
@@ -13525,6 +14637,9 @@ OP_G (int bytemode, int sizeflag)
else
oappend (names32[modrm.reg + add]);
break;
+ case mask_mode:
+ oappend (names_mask[modrm.reg + add]);
+ break;
default:
oappend (INTERNAL_DISASSEMBLER_ERROR);
break;
@@ -14197,8 +15312,17 @@ OP_XMM (int bytemode, int sizeflag ATTRIBUTE_UNUSED)
USED_REX (REX_R);
if (rex & REX_R)
reg += 8;
+ if (vex.evex)
+ {
+ if (!vex.r)
+ reg += 16;
+ }
+
if (need_vex
&& bytemode != xmm_mode
+ && bytemode != xmmq_mode
+ && bytemode != evex_half_bcst_xmmq_mode
+ && bytemode != ymm_mode
&& bytemode != scalar_mode)
{
switch (vex.length)
@@ -14212,10 +15336,31 @@ OP_XMM (int bytemode, int sizeflag ATTRIBUTE_UNUSED)
else
names = names_xmm;
break;
+ case 512:
+ names = names_zmm;
+ break;
default:
abort ();
}
}
+ else if (bytemode == xmmq_mode
+ || bytemode == evex_half_bcst_xmmq_mode)
+ {
+ switch (vex.length)
+ {
+ case 128:
+ case 256:
+ names = names_xmm;
+ break;
+ case 512:
+ names = names_ymm;
+ break;
+ default:
+ abort ();
+ }
+ }
+ else if (bytemode == ymm_mode)
+ names = names_ymm;
else
names = names_xmm;
oappend (names[reg]);
@@ -14312,6 +15457,12 @@ OP_EX (int bytemode, int sizeflag)
USED_REX (REX_B);
if (rex & REX_B)
reg += 8;
+ if (vex.evex)
+ {
+ USED_REX (REX_X);
+ if ((rex & REX_X))
+ reg += 16;
+ }
if ((sizeflag & SUFFIX_ALWAYS)
&& (bytemode == x_swap_mode
@@ -14329,7 +15480,10 @@ OP_EX (int bytemode, int sizeflag)
&& bytemode != xmm_mw_mode
&& bytemode != xmm_md_mode
&& bytemode != xmm_mq_mode
+ && bytemode != xmm_mdq_mode
&& bytemode != xmmq_mode
+ && bytemode != evex_half_bcst_xmmq_mode
+ && bytemode != ymm_mode
&& bytemode != d_scalar_mode
&& bytemode != d_scalar_swap_mode
&& bytemode != q_scalar_mode
@@ -14344,10 +15498,31 @@ OP_EX (int bytemode, int sizeflag)
case 256:
names = names_ymm;
break;
+ case 512:
+ names = names_zmm;
+ break;
default:
abort ();
}
}
+ else if (bytemode == xmmq_mode
+ || bytemode == evex_half_bcst_xmmq_mode)
+ {
+ switch (vex.length)
+ {
+ case 128:
+ case 256:
+ names = names_xmm;
+ break;
+ case 512:
+ names = names_ymm;
+ break;
+ default:
+ abort ();
+ }
+ }
+ else if (bytemode == ymm_mode)
+ names = names_ymm;
else
names = names_xmm;
oappend (names[reg]);
@@ -14842,6 +16017,12 @@ OP_VEX (int bytemode, int sizeflag ATTRIBUTE_UNUSED)
return;
reg = vex.register_specifier;
+ if (vex.evex)
+ {
+ if (!vex.v)
+ reg += 16;
+ }
+
if (bytemode == vex_scalar_mode)
{
oappend (names_xmm[reg]);
@@ -14864,6 +16045,9 @@ OP_VEX (int bytemode, int sizeflag ATTRIBUTE_UNUSED)
else
names = names32;
break;
+ case mask_mode:
+ names = names_mask;
+ break;
default:
abort ();
return;
@@ -14879,11 +16063,17 @@ OP_VEX (int bytemode, int sizeflag ATTRIBUTE_UNUSED)
case vex_vsib_q_w_dq_mode:
names = vex.w ? names_ymm : names_xmm;
break;
+ case mask_mode:
+ names = names_mask;
+ break;
default:
abort ();
return;
}
break;
+ case 512:
+ names = names_zmm;
+ break;
default:
abort ();
break;
@@ -15286,6 +16476,53 @@ VCMP_Fixup (int bytemode ATTRIBUTE_UNUSED, int sizeflag ATTRIBUTE_UNUSED)
}
}
+static void
+VPCMP_Fixup (int bytemode ATTRIBUTE_UNUSED,
+ int sizeflag ATTRIBUTE_UNUSED)
+{
+ unsigned int cmp_type;
+
+ if (!vex.evex)
+ abort ();
+
+ FETCH_DATA (the_info, codep + 1);
+ cmp_type = *codep++ & 0xff;
+ /* There are aliases for immediates 0, 1, 2, 4, 5, 6.
+ If it's the case, print suffix, otherwise - print the immediate. */
+ if (cmp_type < ARRAY_SIZE (simd_cmp_op)
+ && cmp_type != 3
+ && cmp_type != 7)
+ {
+ char suffix [3];
+ char *p = mnemonicendp - 2;
+
+ /* vpcmp* can have both one- and two-lettered suffix. */
+ if (p[0] == 'p')
+ {
+ p++;
+ suffix[0] = p[0];
+ suffix[1] = '\0';
+ }
+ else
+ {
+ suffix[0] = p[0];
+ suffix[1] = p[1];
+ suffix[2] = '\0';
+ }
+
+ sprintf (p, "%s%s", simd_cmp_op[cmp_type].name, suffix);
+ mnemonicendp += simd_cmp_op[cmp_type].len;
+ }
+ else
+ {
+ /* We have a reserved extension byte. Output it directly. */
+ scratchbuf[0] = '$';
+ print_operand_value (scratchbuf + 1, 1, cmp_type);
+ oappend (scratchbuf + intel_syntax);
+ scratchbuf[0] = '\0';
+ }
+}
+
static const struct op pclmul_op[] =
{
{ STRING_COMMA_LEN ("lql") },
@@ -15407,3 +16644,41 @@ OP_LWP_E (int bytemode ATTRIBUTE_UNUSED, int sizeflag ATTRIBUTE_UNUSED)
oappend (names[vex.register_specifier]);
}
+
+static void
+OP_Mask (int bytemode, int sizeflag ATTRIBUTE_UNUSED)
+{
+ if (!vex.evex
+ || bytemode != mask_mode)
+ abort ();
+
+ USED_REX (REX_R);
+ if ((rex & REX_R) != 0 || !vex.r)
+ {
+ BadOp ();
+ return;
+ }
+
+ oappend (names_mask [modrm.reg]);
+}
+
+static void
+OP_Rounding (int bytemode, int sizeflag ATTRIBUTE_UNUSED)
+{
+ if (!vex.evex
+ || (bytemode != evex_rounding_mode
+ && bytemode != evex_sae_mode))
+ abort ();
+ if (modrm.mod == 3 && vex.b)
+ switch (bytemode)
+ {
+ case evex_rounding_mode:
+ oappend (names_rounding[vex.ll]);
+ break;
+ case evex_sae_mode:
+ oappend ("{sae}");
+ break;
+ default:
+ break;
+ }
+}