static void OP_XS (int, int);
static void OP_M (int, int);
static void OP_VEX (int, int);
+static void OP_VexR (int, int);
static void OP_VexW (int, int);
-static void OP_EX_Vex (int, int);
-static void OP_XMM_Vex (int, int);
static void OP_Rounding (int, int);
static void OP_REG_VexI4 (int, int);
static void OP_VexI4 (int, int);
#define Vex { OP_VEX, vex_mode }
#define VexW { OP_VexW, vex_mode }
#define VexScalar { OP_VEX, vex_scalar_mode }
+#define VexScalarR { OP_VexR, vex_scalar_mode }
#define VexGatherQ { OP_VEX, vex_vsib_q_w_dq_mode }
-#define Vex128 { OP_VEX, vex128_mode }
-#define Vex256 { OP_VEX, vex256_mode }
#define VexGdq { OP_VEX, dq_mode }
#define VexTmm { OP_VEX, tmm_mode }
-#define EXdVexScalarS { OP_EX_Vex, d_scalar_swap_mode }
-#define EXqVexScalarS { OP_EX_Vex, q_scalar_swap_mode }
-#define XMVexScalar { OP_XMM_Vex, scalar_mode }
#define XMVexI4 { OP_REG_VexI4, x_mode }
#define XMVexScalarI4 { OP_REG_VexI4, scalar_mode }
#define VexI4 { OP_VexI4, 0 }
dqd_mode,
/* normal vex mode */
vex_mode,
- /* 128bit vex mode */
- vex128_mode,
- /* 256bit vex mode */
- vex256_mode,
/* Operand size depends on the VEX.W bit, with VSIB dword indices. */
vex_vsib_d_w_dq_mode,
/* scalar, ignore vector length. */
scalar_mode,
- /* like d_swap_mode, ignore vector length. */
- d_scalar_swap_mode,
- /* like q_swap_mode, ignore vector length. */
- q_scalar_swap_mode,
/* like vex_mode, ignore vector length. */
vex_scalar_mode,
/* Operand size depends on the VEX.W bit, ignore vector length. */
VEX_W_0F3816_P_2,
VEX_W_0F3818_P_2,
VEX_W_0F3819_P_2,
- VEX_W_0F381A_P_2_M_0,
+ VEX_W_0F381A_P_2_M_0_L_0,
VEX_W_0F382C_P_2_M_0,
VEX_W_0F382D_P_2_M_0,
VEX_W_0F382E_P_2_M_0,
VEX_W_0F384B_X86_64_P_3,
VEX_W_0F3858_P_2,
VEX_W_0F3859_P_2,
- VEX_W_0F385A_P_2_M_0,
+ VEX_W_0F385A_P_2_M_0_L_0,
VEX_W_0F385C_X86_64_P_1,
VEX_W_0F385E_X86_64_P_0,
VEX_W_0F385E_X86_64_P_1,
VEX_W_0F3A02_P_2,
VEX_W_0F3A04_P_2,
VEX_W_0F3A05_P_2,
- VEX_W_0F3A06_P_2,
- VEX_W_0F3A18_P_2,
- VEX_W_0F3A19_P_2,
+ VEX_W_0F3A06_P_2_L_0,
+ VEX_W_0F3A18_P_2_L_0,
+ VEX_W_0F3A19_P_2_L_0,
VEX_W_0F3A1D_P_2,
VEX_W_0F3A30_P_2_LEN_0,
VEX_W_0F3A31_P_2_LEN_0,
VEX_W_0F3A32_P_2_LEN_0,
VEX_W_0F3A33_P_2_LEN_0,
- VEX_W_0F3A38_P_2,
- VEX_W_0F3A39_P_2,
- VEX_W_0F3A46_P_2,
+ VEX_W_0F3A38_P_2_L_0,
+ VEX_W_0F3A39_P_2_L_0,
+ VEX_W_0F3A46_P_2_L_0,
VEX_W_0F3A4A_P_2,
VEX_W_0F3A4B_P_2,
VEX_W_0F3A4C_P_2,
"LB" => print "abs" in 64bit mode and behave as 'B' otherwise
"LS" => print "abs" in 64bit mode and behave as 'S' otherwise
"LV" => print "abs" for 64bit operand and behave as 'S' otherwise
- "LW" => print 'd', 'q' depending on the VEX.W bit
+ "DQ" => print 'd' or 'q' depending on the VEX.W bit
"BW" => print 'b' or 'w' depending on the EVEX.W bit
"LP" => print 'w' or 'l' ('d' in Intel mode) if instruction has
an operand size prefix, or suffix_always is true. print
}
vex;
static unsigned char need_vex;
-static unsigned char need_vex_reg;
struct op
{
/* PREFIX_VEX_0F10 */
{
{ "vmovups", { XM, EXx }, 0 },
- { "vmovss", { XMVexScalar, VexScalar, EXxmm_md }, 0 },
+ { "vmovss", { XMScalar, VexScalarR, EXxmm_md }, 0 },
{ "vmovupd", { XM, EXx }, 0 },
- { "vmovsd", { XMVexScalar, VexScalar, EXxmm_mq }, 0 },
+ { "vmovsd", { XMScalar, VexScalarR, EXxmm_mq }, 0 },
},
/* PREFIX_VEX_0F11 */
{
{ "vmovups", { EXxS, XM }, 0 },
- { "vmovss", { EXdVexScalarS, VexScalar, XMScalar }, 0 },
+ { "vmovss", { EXdS, VexScalarR, XMScalar }, 0 },
{ "vmovupd", { EXxS, XM }, 0 },
- { "vmovsd", { EXqVexScalarS, VexScalar, XMScalar }, 0 },
+ { "vmovsd", { EXqS, VexScalarR, XMScalar }, 0 },
},
/* PREFIX_VEX_0F12 */
{
{ Bad_Opcode },
{ Bad_Opcode },
- { "vpsrlv%LW", { XM, Vex, EXx }, 0 },
+ { "vpsrlv%DQ", { XM, Vex, EXx }, 0 },
},
/* PREFIX_VEX_0F3846 */
{
{ Bad_Opcode },
{ Bad_Opcode },
- { "vpsllv%LW", { XM, Vex, EXx }, 0 },
+ { "vpsllv%DQ", { XM, Vex, EXx }, 0 },
},
/* PREFIX_VEX_0F3849_X86_64 */
{
{ Bad_Opcode },
{ Bad_Opcode },
- { "vpgatherd%LW", { XM, MVexVSIBDWpX, Vex }, 0 },
+ { "vpgatherd%DQ", { XM, MVexVSIBDWpX, Vex }, 0 },
},
/* PREFIX_VEX_0F3891 */
{
{ Bad_Opcode },
{ Bad_Opcode },
- { "vpgatherq%LW", { XMGatherQ, MVexVSIBQWpX, VexGatherQ }, 0 },
+ { "vpgatherq%DQ", { XMGatherQ, MVexVSIBQWpX, VexGatherQ }, 0 },
},
/* PREFIX_VEX_0F3892 */
static const struct dis386 vex_len_table[][2] = {
/* VEX_LEN_0F12_P_0_M_0 / VEX_LEN_0F12_P_2_M_0 */
{
- { "vmovlpX", { XM, Vex128, EXq }, 0 },
+ { "vmovlpX", { XM, Vex, EXq }, 0 },
},
/* VEX_LEN_0F12_P_0_M_1 */
{
- { "vmovhlps", { XM, Vex128, EXq }, 0 },
+ { "vmovhlps", { XM, Vex, EXq }, 0 },
},
/* VEX_LEN_0F13_M_0 */
/* VEX_LEN_0F16_P_0_M_0 / VEX_LEN_0F16_P_2_M_0 */
{
- { "vmovhpX", { XM, Vex128, EXq }, 0 },
+ { "vmovhpX", { XM, Vex, EXq }, 0 },
},
/* VEX_LEN_0F16_P_0_M_1 */
{
- { "vmovlhps", { XM, Vex128, EXq }, 0 },
+ { "vmovlhps", { XM, Vex, EXq }, 0 },
},
/* VEX_LEN_0F17_M_0 */
/* VEX_LEN_0FC4_P_2 */
{
- { "vpinsrw", { XM, Vex128, Edqw, Ib }, 0 },
+ { "vpinsrw", { XM, Vex, Edqw, Ib }, 0 },
},
/* VEX_LEN_0FC5_P_2 */
/* VEX_LEN_0FD6_P_2 */
{
- { "vmovq", { EXqVexScalarS, XMScalar }, 0 },
+ { "vmovq", { EXqS, XMScalar }, 0 },
},
/* VEX_LEN_0FF7_P_2 */
/* VEX_LEN_0F381A_P_2_M_0 */
{
{ Bad_Opcode },
- { VEX_W_TABLE (VEX_W_0F381A_P_2_M_0) },
+ { VEX_W_TABLE (VEX_W_0F381A_P_2_M_0_L_0) },
},
/* VEX_LEN_0F3836_P_2 */
/* VEX_LEN_0F385A_P_2_M_0 */
{
{ Bad_Opcode },
- { VEX_W_TABLE (VEX_W_0F385A_P_2_M_0) },
+ { VEX_W_TABLE (VEX_W_0F385A_P_2_M_0_L_0) },
},
/* VEX_LEN_0F385C_X86_64_P_1_W_0_M_0 */
/* VEX_LEN_0F3A06_P_2 */
{
{ Bad_Opcode },
- { VEX_W_TABLE (VEX_W_0F3A06_P_2) },
+ { VEX_W_TABLE (VEX_W_0F3A06_P_2_L_0) },
},
/* VEX_LEN_0F3A14_P_2 */
/* VEX_LEN_0F3A18_P_2 */
{
{ Bad_Opcode },
- { VEX_W_TABLE (VEX_W_0F3A18_P_2) },
+ { VEX_W_TABLE (VEX_W_0F3A18_P_2_L_0) },
},
/* VEX_LEN_0F3A19_P_2 */
{
{ Bad_Opcode },
- { VEX_W_TABLE (VEX_W_0F3A19_P_2) },
+ { VEX_W_TABLE (VEX_W_0F3A19_P_2_L_0) },
},
/* VEX_LEN_0F3A20_P_2 */
{
- { "vpinsrb", { XM, Vex128, Edqb, Ib }, 0 },
+ { "vpinsrb", { XM, Vex, Edqb, Ib }, 0 },
},
/* VEX_LEN_0F3A21_P_2 */
{
- { "vinsertps", { XM, Vex128, EXd, Ib }, 0 },
+ { "vinsertps", { XM, Vex, EXd, Ib }, 0 },
},
/* VEX_LEN_0F3A22_P_2 */
{
- { "vpinsrK", { XM, Vex128, Edq, Ib }, 0 },
+ { "vpinsrK", { XM, Vex, Edq, Ib }, 0 },
},
/* VEX_LEN_0F3A30_P_2 */
/* VEX_LEN_0F3A38_P_2 */
{
{ Bad_Opcode },
- { VEX_W_TABLE (VEX_W_0F3A38_P_2) },
+ { VEX_W_TABLE (VEX_W_0F3A38_P_2_L_0) },
},
/* VEX_LEN_0F3A39_P_2 */
{
{ Bad_Opcode },
- { VEX_W_TABLE (VEX_W_0F3A39_P_2) },
+ { VEX_W_TABLE (VEX_W_0F3A39_P_2_L_0) },
},
/* VEX_LEN_0F3A41_P_2 */
{
- { "vdppd", { XM, Vex128, EXx, Ib }, 0 },
+ { "vdppd", { XM, Vex, EXx, Ib }, 0 },
},
/* VEX_LEN_0F3A46_P_2 */
{
{ Bad_Opcode },
- { VEX_W_TABLE (VEX_W_0F3A46_P_2) },
+ { VEX_W_TABLE (VEX_W_0F3A46_P_2_L_0) },
},
/* VEX_LEN_0F3A60_P_2 */
{ "vbroadcastsd", { XM, EXxmm_mq }, 0 },
},
{
- /* VEX_W_0F381A_P_2_M_0 */
+ /* VEX_W_0F381A_P_2_M_0_L_0 */
{ "vbroadcastf128", { XM, Mxmm }, 0 },
},
{
{ "vpbroadcastq", { XM, EXxmm_mq }, 0 },
},
{
- /* VEX_W_0F385A_P_2_M_0 */
+ /* VEX_W_0F385A_P_2_M_0_L_0 */
{ "vbroadcasti128", { XM, Mxmm }, 0 },
},
{
{ "vpermilpd", { XM, EXx, Ib }, 0 },
},
{
- /* VEX_W_0F3A06_P_2 */
- { "vperm2f128", { XM, Vex256, EXx, Ib }, 0 },
+ /* VEX_W_0F3A06_P_2_L_0 */
+ { "vperm2f128", { XM, Vex, EXx, Ib }, 0 },
},
{
- /* VEX_W_0F3A18_P_2 */
- { "vinsertf128", { XM, Vex256, EXxmm, Ib }, 0 },
+ /* VEX_W_0F3A18_P_2_L_0 */
+ { "vinsertf128", { XM, Vex, EXxmm, Ib }, 0 },
},
{
- /* VEX_W_0F3A19_P_2 */
+ /* VEX_W_0F3A19_P_2_L_0 */
{ "vextractf128", { EXxmm, XM, Ib }, 0 },
},
{
{ MOD_TABLE (MOD_VEX_W_1_0F3A33_P_2_LEN_0) },
},
{
- /* VEX_W_0F3A38_P_2 */
- { "vinserti128", { XM, Vex256, EXxmm, Ib }, 0 },
+ /* VEX_W_0F3A38_P_2_L_0 */
+ { "vinserti128", { XM, Vex, EXxmm, Ib }, 0 },
},
{
- /* VEX_W_0F3A39_P_2 */
+ /* VEX_W_0F3A39_P_2_L_0 */
{ "vextracti128", { EXxmm, XM, Ib }, 0 },
},
{
- /* VEX_W_0F3A46_P_2 */
- { "vperm2i128", { XM, Vex256, EXx, Ib }, 0 },
+ /* VEX_W_0F3A46_P_2_L_0 */
+ { "vperm2i128", { XM, Vex, EXx, Ib }, 0 },
},
{
/* VEX_W_0F3A4A_P_2 */
},
/* VEX_W_0FXOP_08_CC_L_0 */
{
- { "vpcomb", { XM, Vex128, EXx, VPCOM }, 0 },
+ { "vpcomb", { XM, Vex, EXx, VPCOM }, 0 },
},
/* VEX_W_0FXOP_08_CD_L_0 */
{
- { "vpcomw", { XM, Vex128, EXx, VPCOM }, 0 },
+ { "vpcomw", { XM, Vex, EXx, VPCOM }, 0 },
},
/* VEX_W_0FXOP_08_CE_L_0 */
{
- { "vpcomd", { XM, Vex128, EXx, VPCOM }, 0 },
+ { "vpcomd", { XM, Vex, EXx, VPCOM }, 0 },
},
/* VEX_W_0FXOP_08_CF_L_0 */
{
- { "vpcomq", { XM, Vex128, EXx, VPCOM }, 0 },
+ { "vpcomq", { XM, Vex, EXx, VPCOM }, 0 },
},
/* VEX_W_0FXOP_08_EC_L_0 */
{
- { "vpcomub", { XM, Vex128, EXx, VPCOM }, 0 },
+ { "vpcomub", { XM, Vex, EXx, VPCOM }, 0 },
},
/* VEX_W_0FXOP_08_ED_L_0 */
{
- { "vpcomuw", { XM, Vex128, EXx, VPCOM }, 0 },
+ { "vpcomuw", { XM, Vex, EXx, VPCOM }, 0 },
},
/* VEX_W_0FXOP_08_EE_L_0 */
{
- { "vpcomud", { XM, Vex128, EXx, VPCOM }, 0 },
+ { "vpcomud", { XM, Vex, EXx, VPCOM }, 0 },
},
/* VEX_W_0FXOP_08_EF_L_0 */
{
- { "vpcomuq", { XM, Vex128, EXx, VPCOM }, 0 },
+ { "vpcomuq", { XM, Vex, EXx, VPCOM }, 0 },
},
/* VEX_W_0FXOP_09_80 */
{
},
{
/* MOD_VEX_0F388C_PREFIX_2 */
- { "vpmaskmov%LW", { XM, Vex, Mx }, 0 },
+ { "vpmaskmov%DQ", { XM, Vex, Mx }, 0 },
},
{
/* MOD_VEX_0F388E_PREFIX_2 */
- { "vpmaskmov%LW", { Mx, Vex, XM }, 0 },
+ { "vpmaskmov%DQ", { Mx, Vex, XM }, 0 },
},
{
/* MOD_VEX_W_0_0F3A30_P_2_LEN_0 */
break;
}
need_vex = 1;
- need_vex_reg = 1;
codep++;
vindex = *codep++;
dp = &xop_table[vex_table_index][vindex];
break;
}
need_vex = 1;
- need_vex_reg = 1;
codep++;
vindex = *codep++;
dp = &vex_table[vex_table_index][vindex];
break;
}
need_vex = 1;
- need_vex_reg = 1;
codep++;
vindex = *codep++;
dp = &vex_table[dp->op[1].bytemode][vindex];
}
need_vex = 1;
- need_vex_reg = 1;
codep++;
vindex = *codep++;
dp = &evex_table[vex_table_index][vindex];
}
need_vex = 0;
- need_vex_reg = 0;
memset (&vex, 0, sizeof (vex));
if (dp->name == NULL && dp->op[0].bytemode == FLOATCODE)
}
}
}
+ else if (l == 1 && last[0] == 'D')
+ *obufp++ = vex.w ? 'q' : 'd';
else if (l == 1 && last[0] == 'L')
{
if (cond ? modrm.mod == 3 && !(sizeflag & SUFFIX_ALWAYS)
abort ();
if (last[0] == 'X')
*obufp++ = vex.w ? 'd': 's';
- else if (last[0] == 'L')
- *obufp++ = vex.w ? 'q': 'd';
else if (last[0] == 'B')
*obufp++ = vex.w ? 'w': 'b';
else
used_prefixes |= (prefixes & PREFIX_DATA);
break;
case d_mode:
- case d_scalar_swap_mode:
case d_swap_mode:
case dqd_mode:
oappend ("DWORD PTR ");
break;
case q_mode:
- case q_scalar_swap_mode:
case q_swap_mode:
oappend ("QWORD PTR ");
break;
case xmm_md_mode:
case d_mode:
case d_swap_mode:
- case d_scalar_swap_mode:
shift = 2;
break;
}
case xmm_mq_mode:
case q_mode:
case q_swap_mode:
- case q_scalar_swap_mode:
shift = 3;
break;
case bw_unit_mode:
if ((sizeflag & SUFFIX_ALWAYS)
&& (bytemode == x_swap_mode
|| bytemode == d_swap_mode
- || bytemode == d_scalar_swap_mode
- || bytemode == q_swap_mode
- || bytemode == q_scalar_swap_mode))
+ || bytemode == q_swap_mode))
swap_operand ();
if (need_vex
&& bytemode != evex_half_bcst_xmmq_mode
&& bytemode != ymm_mode
&& bytemode != tmm_mode
- && bytemode != d_scalar_swap_mode
- && bytemode != q_scalar_swap_mode
&& bytemode != vex_scalar_w_dq_mode)
{
switch (vex.length)
if (!need_vex)
abort ();
- if (!need_vex_reg)
- return;
-
reg = vex.register_specifier;
vex.register_specifier = 0;
if (address_mode != mode_64bit)
switch (bytemode)
{
case vex_mode:
- case vex128_mode:
case vex_vsib_q_w_dq_mode:
case vex_vsib_q_w_d_mode:
names = names_xmm;
switch (bytemode)
{
case vex_mode:
- case vex256_mode:
names = names_ymm;
break;
case vex_vsib_q_w_dq_mode:
oappend (names[reg]);
}
+static void
+OP_VexR (int bytemode, int sizeflag)
+{
+ if (modrm.mod == 3)
+ OP_VEX (bytemode, sizeflag);
+}
+
static void
OP_VexW (int bytemode, int sizeflag)
{
oappend_maybe_intel (scratchbuf);
}
-static void
-OP_EX_Vex (int bytemode, int sizeflag)
-{
- if (modrm.mod != 3)
- need_vex_reg = 0;
- OP_EX (bytemode, sizeflag);
-}
-
-static void
-OP_XMM_Vex (int bytemode, int sizeflag)
-{
- if (modrm.mod != 3)
- need_vex_reg = 0;
- OP_XMM (bytemode, sizeflag);
-}
-
static void
VPCMP_Fixup (int bytemode ATTRIBUTE_UNUSED,
int sizeflag ATTRIBUTE_UNUSED)