2018-11-06 Jan Beulich <jbeulich@suse.com>
+ * config/tc-i386.c (build_vex_prefix, build_evex_prefix):
+ Consider execution mode when .vexw is not set.
+ * testsuite/gas/i386/avx-wig.s,
+ testsuite/gas/i386/x86-64-avx-wig.s: Add BMI, BMI2, TBM, LWP,
+ vcvtsi2s*, vcvt*2si, vmovd, vpcmpestr*, vpextrd, and vpinsrd
+ cases.
+ * testsuite/gas/i386/evex-wig.s: Add vcvt*si2s*, vcvt*2*si,
+ vextractps, vpextrb, vpextrw, vpinsrb, and vpinsrw cases.
+ * testsuite/gas/i386/x86-64-evex-wig.s: Add vpextrb, vpextrw,
+ vpinsrb, and vpinsrw cases.
+ * testsuite/gas/i386/avx-wig.d, testsuite/gas/i386/evex-wig.d,
+ testsuite/gas/i386/evex-wig1-intel.d,
+ testsuite/gas/i386/x86-64-evex-wig1.d,
+ testsuite/gas/i386/x86-64-evex-wig1-intel.d: Adjust expectations.
+ * testsuite/gas/i386/evex-wig2.d,
+ testsuite/gas/i386/evex-wig2.s: Delete.
+ * testsuite/gas/i386/i386.exp: Drop deleted test.
+
+2018-11-06 Jan Beulich <jbeulich@suse.com>
+
* testsuite/gas/i386/evex-lig-2.s,
testsuite/gas/i386/x86-64-evex-lig-2.s: Add extract and insert
cases.
else if (i.tm.opcode_modifier.vexw)
w = i.tm.opcode_modifier.vexw == VEXW1 ? 1 : 0;
else
- w = (i.rex & REX_W) ? 1 : 0;
+ w = (flag_code == CODE_64BIT ? i.rex & REX_W : vexwig == vexw1) ? 1 : 0;
/* Use 2-byte VEX prefix if possible. */
if (w == 0
else if (i.tm.opcode_modifier.vexw)
w = i.tm.opcode_modifier.vexw == VEXW1 ? 1 : 0;
else
- w = (i.rex & REX_W) ? 1 : 0;
+ w = (flag_code == CODE_64BIT ? i.rex & REX_W : evexwig == evexw1) ? 1 : 0;
/* Encode the U bit. */
implied_prefix |= 0x4;
Disassembly of section .text:
0+ <_start>:
+ +[a-f0-9]+: c4 e2 f8 f2 00 andn \(%eax\),%eax,%eax
+ +[a-f0-9]+: c4 e2 f8 f7 00 bextr %eax,\(%eax\),%eax
+ +[a-f0-9]+: 8f ea f8 10 00 00 00 00 00 bextr \$0x0,\(%eax\),%eax
+ +[a-f0-9]+: 8f e9 f8 01 08 blcfill \(%eax\),%eax
+ +[a-f0-9]+: 8f e9 f8 02 30 blci \(%eax\),%eax
+ +[a-f0-9]+: 8f e9 f8 01 28 blcic \(%eax\),%eax
+ +[a-f0-9]+: 8f e9 f8 02 08 blcmsk \(%eax\),%eax
+ +[a-f0-9]+: 8f e9 f8 01 18 blcs \(%eax\),%eax
+ +[a-f0-9]+: 8f e9 f8 01 10 blsfill \(%eax\),%eax
+ +[a-f0-9]+: c4 e2 f8 f3 18 blsi \(%eax\),%eax
+ +[a-f0-9]+: 8f e9 f8 01 30 blsic \(%eax\),%eax
+ +[a-f0-9]+: c4 e2 f8 f3 10 blsmsk \(%eax\),%eax
+ +[a-f0-9]+: c4 e2 f8 f3 08 blsr \(%eax\),%eax
+ +[a-f0-9]+: c4 e2 f8 f5 00 bzhi %eax,\(%eax\),%eax
+ +[a-f0-9]+: 8f e9 f8 12 c0 llwpcb %eax
+ +[a-f0-9]+: 8f ea f8 12 00 00 00 00 00 lwpins \$0x0,\(%eax\),%eax
+ +[a-f0-9]+: 8f ea f8 12 08 00 00 00 00 lwpval \$0x0,\(%eax\),%eax
+ +[a-f0-9]+: c4 e2 fb f6 00 mulx \(%eax\),%eax,%eax
+ +[a-f0-9]+: c4 e2 fb f5 00 pdep \(%eax\),%eax,%eax
+ +[a-f0-9]+: c4 e2 fa f5 00 pext \(%eax\),%eax,%eax
+ +[a-f0-9]+: c4 e3 fb f0 00 00 rorx \$0x0,\(%eax\),%eax
+ +[a-f0-9]+: c4 e2 fa f7 00 sarx %eax,\(%eax\),%eax
+ +[a-f0-9]+: c4 e2 f9 f7 00 shlx %eax,\(%eax\),%eax
+ +[a-f0-9]+: c4 e2 fb f7 00 shrx %eax,\(%eax\),%eax
+ +[a-f0-9]+: 8f e9 f8 12 c8 slwpcb %eax
+ +[a-f0-9]+: 8f e9 f8 01 38 t1mskc \(%eax\),%eax
+ +[a-f0-9]+: 8f e9 f8 01 20 tzmsk \(%eax\),%eax
+[a-f0-9]+: c4 e1 cd 58 d4 vaddpd %ymm4,%ymm6,%ymm2
+[a-f0-9]+: c4 e1 cc 58 d4 vaddps %ymm4,%ymm6,%ymm2
+[a-f0-9]+: c4 e1 cb 58 d4 vaddsd %xmm4,%xmm6,%xmm2
+[a-f0-9]+: c4 e1 fd 5b f4 vcvtps2dq %ymm4,%ymm6
+[a-f0-9]+: c4 e1 fc 5a e4 vcvtps2pd %xmm4,%ymm4
+[a-f0-9]+: c4 e1 cb 5a d4 vcvtsd2ss %xmm4,%xmm6,%xmm2
+ +[a-f0-9]+: c4 e1 fa 2a c0 vcvtsi2ss %eax,%xmm0,%xmm0
+ +[a-f0-9]+: c4 e1 fa 2a 00 vcvtsi2ssl? \(%eax\),%xmm0,%xmm0
+ +[a-f0-9]+: c4 e1 fb 2a c0 vcvtsi2sd %eax,%xmm0,%xmm0
+ +[a-f0-9]+: c4 e1 fb 2a 00 vcvtsi2sdl? \(%eax\),%xmm0,%xmm0
+[a-f0-9]+: c4 e1 ca 5a d4 vcvtss2sd %xmm4,%xmm6,%xmm2
+ +[a-f0-9]+: c4 e1 fa 2d c0 vcvtss2si %xmm0,%eax
+ +[a-f0-9]+: c4 e1 fb 2d c0 vcvtsd2si %xmm0,%eax
+[a-f0-9]+: c4 e1 fd e6 e4 vcvttpd2dq %ymm4,%xmm4
+[a-f0-9]+: c4 e1 f9 e6 f4 vcvttpd2dq %xmm4,%xmm6
+[a-f0-9]+: c4 e1 fd e6 e4 vcvttpd2dq %ymm4,%xmm4
+[a-f0-9]+: c4 e1 fe 5b f4 vcvttps2dq %ymm4,%ymm6
+ +[a-f0-9]+: c4 e1 fa 2c c0 vcvttss2si %xmm0,%eax
+ +[a-f0-9]+: c4 e1 fb 2c c0 vcvttsd2si %xmm0,%eax
+[a-f0-9]+: c4 e1 cd 5e d4 vdivpd %ymm4,%ymm6,%ymm2
+[a-f0-9]+: c4 e1 cc 5e d4 vdivps %ymm4,%ymm6,%ymm2
+[a-f0-9]+: c4 e1 cb 5e d4 vdivsd %xmm4,%xmm6,%xmm2
+[a-f0-9]+: c4 e1 fc 28 f4 vmovaps %ymm4,%ymm6
+[a-f0-9]+: c4 e1 fd 29 e6 vmovapd %ymm4,%ymm6
+[a-f0-9]+: c4 e1 fc 29 e6 vmovaps %ymm4,%ymm6
+ +[a-f0-9]+: c4 e1 f9 6e c0 vmovd %eax,%xmm0
+ +[a-f0-9]+: c4 e1 f9 6e 00 vmovd \(%eax\),%xmm0
+ +[a-f0-9]+: c4 e1 f9 7e c0 vmovd %xmm0,%eax
+ +[a-f0-9]+: c4 e1 f9 7e 00 vmovd %xmm0,\(%eax\)
+[a-f0-9]+: c4 e1 ff 12 f4 vmovddup %ymm4,%ymm6
+[a-f0-9]+: c4 e1 fd 6f f4 vmovdqa %ymm4,%ymm6
+[a-f0-9]+: c4 e1 fe 6f f4 vmovdqu %ymm4,%ymm6
+[a-f0-9]+: c4 e1 c9 76 d4 vpcmpeqd %xmm4,%xmm6,%xmm2
+[a-f0-9]+: c4 e2 c9 29 d4 vpcmpeqq %xmm4,%xmm6,%xmm2
+[a-f0-9]+: c4 e1 c9 75 d4 vpcmpeqw %xmm4,%xmm6,%xmm2
+ +[a-f0-9]+: c4 e3 f9 61 c0 00 vpcmpestri \$0x0,%xmm0,%xmm0
+ +[a-f0-9]+: c4 e3 f9 60 c0 00 vpcmpestrm \$0x0,%xmm0,%xmm0
+[a-f0-9]+: c4 e1 c9 64 d4 vpcmpgtb %xmm4,%xmm6,%xmm2
+[a-f0-9]+: c4 e1 c9 66 d4 vpcmpgtd %xmm4,%xmm6,%xmm2
+[a-f0-9]+: c4 e2 c9 37 d4 vpcmpgtq %xmm4,%xmm6,%xmm2
+[a-f0-9]+: c4 e1 c9 65 d4 vpcmpgtw %xmm4,%xmm6,%xmm2
+[a-f0-9]+: c4 e3 f9 63 f4 07 vpcmpistri \$0x7,%xmm4,%xmm6
+[a-f0-9]+: c4 e3 f9 62 f4 07 vpcmpistrm \$0x7,%xmm4,%xmm6
+ +[a-f0-9]+: c4 e3 f9 16 c0 00 vpextrd \$0x0,%xmm0,%eax
+ +[a-f0-9]+: c4 e3 f9 16 00 00 vpextrd \$0x0,%xmm0,\(%eax\)
+[a-f0-9]+: c4 e2 c9 02 d4 vphaddd %xmm4,%xmm6,%xmm2
+[a-f0-9]+: c4 e2 c9 03 d4 vphaddsw %xmm4,%xmm6,%xmm2
+[a-f0-9]+: c4 e2 c9 01 d4 vphaddw %xmm4,%xmm6,%xmm2
+[a-f0-9]+: c4 e2 c9 06 d4 vphsubd %xmm4,%xmm6,%xmm2
+[a-f0-9]+: c4 e2 c9 07 d4 vphsubsw %xmm4,%xmm6,%xmm2
+[a-f0-9]+: c4 e2 c9 05 d4 vphsubw %xmm4,%xmm6,%xmm2
+ +[a-f0-9]+: c4 e3 f9 22 c0 00 vpinsrd \$0x0,%eax,%xmm0,%xmm0
+ +[a-f0-9]+: c4 e3 f9 22 00 00 vpinsrd \$0x0,\(%eax\),%xmm0,%xmm0
+[a-f0-9]+: c4 e2 c9 04 d4 vpmaddubsw %xmm4,%xmm6,%xmm2
+[a-f0-9]+: c4 e1 c9 f5 d4 vpmaddwd %xmm4,%xmm6,%xmm2
+[a-f0-9]+: c4 e2 c9 3c d4 vpmaxsb %xmm4,%xmm6,%xmm2
.allow_index_reg
.text
_start:
+ andn (%eax), %eax, %eax
+ bextr %eax, (%eax), %eax
+ bextr $0, (%eax), %eax
+ blcfill (%eax), %eax
+ blci (%eax), %eax
+ blcic (%eax), %eax
+ blcmsk (%eax), %eax
+ blcs (%eax), %eax
+ blsfill (%eax), %eax
+ blsi (%eax), %eax
+ blsic (%eax), %eax
+ blsmsk (%eax), %eax
+ blsr (%eax), %eax
+ bzhi %eax, (%eax), %eax
+ llwpcb %eax
+ lwpins $0, (%eax), %eax
+ lwpval $0, (%eax), %eax
+ mulx (%eax), %eax, %eax
+ pdep (%eax), %eax, %eax
+ pext (%eax), %eax, %eax
+ rorx $0, (%eax), %eax
+ sarx %eax, (%eax), %eax
+ shlx %eax, (%eax), %eax
+ shrx %eax, (%eax), %eax
+ slwpcb %eax
+ t1mskc (%eax), %eax
+ tzmsk (%eax), %eax
vaddpd %ymm4,%ymm6,%ymm2
vaddps %ymm4,%ymm6,%ymm2
vaddsd %xmm4,%xmm6,%xmm2
vcvtps2dq %ymm4,%ymm6
vcvtps2pd %xmm4,%ymm4
vcvtsd2ss %xmm4,%xmm6,%xmm2
+ vcvtsi2ss %eax, %xmm0, %xmm0
+ vcvtsi2ss (%eax), %xmm0, %xmm0
+ vcvtsi2sd %eax, %xmm0, %xmm0
+ vcvtsi2sd (%eax), %xmm0, %xmm0
vcvtss2sd %xmm4,%xmm6,%xmm2
+ vcvtss2si %xmm0, %eax
+ vcvtsd2si %xmm0, %eax
vcvttpd2dqy %ymm4,%xmm4
vcvttpd2dqx %xmm4,%xmm6
vcvttpd2dqy %ymm4,%xmm4
vcvttps2dq %ymm4,%ymm6
+ vcvttss2si %xmm0, %eax
+ vcvttsd2si %xmm0, %eax
vdivpd %ymm4,%ymm6,%ymm2
vdivps %ymm4,%ymm6,%ymm2
vdivsd %xmm4,%xmm6,%xmm2
vmovaps %ymm4,%ymm6
{store} vmovapd %ymm4,%ymm6
{store} vmovaps %ymm4,%ymm6
+ vmovd %eax, %xmm0
+ vmovd (%eax), %xmm0
+ vmovd %xmm0, %eax
+ vmovd %xmm0, (%eax)
vmovddup %ymm4,%ymm6
vmovdqa %ymm4,%ymm6
vmovdqu %ymm4,%ymm6
vpcmpeqd %xmm4,%xmm6,%xmm2
vpcmpeqq %xmm4,%xmm6,%xmm2
vpcmpeqw %xmm4,%xmm6,%xmm2
+ vpcmpestri $0, %xmm0, %xmm0
+ vpcmpestrm $0, %xmm0, %xmm0
vpcmpgtb %xmm4,%xmm6,%xmm2
vpcmpgtd %xmm4,%xmm6,%xmm2
vpcmpgtq %xmm4,%xmm6,%xmm2
vpcmpgtw %xmm4,%xmm6,%xmm2
vpcmpistri $7,%xmm4,%xmm6
vpcmpistrm $7,%xmm4,%xmm6
+ vpextrd $0, %xmm0, %eax
+ vpextrd $0, %xmm0, (%eax)
vphaddd %xmm4,%xmm6,%xmm2
vphaddsw %xmm4,%xmm6,%xmm2
vphaddw %xmm4,%xmm6,%xmm2
vphsubd %xmm4,%xmm6,%xmm2
vphsubsw %xmm4,%xmm6,%xmm2
vphsubw %xmm4,%xmm6,%xmm2
+ vpinsrd $0, %eax, %xmm0, %xmm0
+ vpinsrd $0, (%eax), %xmm0, %xmm0
vpmaddubsw %xmm4,%xmm6,%xmm2
vpmaddwd %xmm4,%xmm6,%xmm2
vpmaxsb %xmm4,%xmm6,%xmm2
.text
_start:
+ {evex} vcvtsi2ss %eax, %xmm0, %xmm0
+ {evex} vcvtsi2ss 4(%eax), %xmm0, %xmm0
+
+ {evex} vcvtsi2sd %eax, %xmm0, %xmm0
+ {evex} vcvtsi2sd 4(%eax), %xmm0, %xmm0
+
+ {evex} vcvtss2si %xmm0, %eax
+
+ {evex} vcvtsd2si %xmm0, %eax
+
+ {evex} vcvttss2si %xmm0, %eax
+
+ {evex} vcvttsd2si %xmm0, %eax
+
+ vcvtusi2ss %eax, %xmm0, %xmm0
+ vcvtusi2ss 4(%eax), %xmm0, %xmm0
+
+ vcvtusi2sd %eax, %xmm0, %xmm0
+ vcvtusi2sd 4(%eax), %xmm0, %xmm0
+
+ vcvtss2usi %xmm0, %eax
+
+ vcvtsd2usi %xmm0, %eax
+
+ vcvttss2usi %xmm0, %eax
+
+ vcvttsd2usi %xmm0, %eax
+
+ {evex} vextractps $0, %xmm0, %eax
+ {evex} vextractps $0, %xmm0, 4(%eax)
+
+ {evex} vpextrb $0, %xmm0, %eax
+ {evex} vpextrb $0, %xmm0, 1(%eax)
+
+ {evex} vpextrw $0, %xmm0, %eax
+ {evex} {store} vpextrw $0, %xmm0, %eax
+ {evex} vpextrw $0, %xmm0, 2(%eax)
+
+ {evex} vpinsrb $0, %eax, %xmm0, %xmm0
+ {evex} vpinsrb $0, 1(%eax), %xmm0, %xmm0
+
+ {evex} vpinsrw $0, %eax, %xmm0, %xmm0
+ {evex} vpinsrw $0, 2(%eax), %xmm0, %xmm0
+
vpmovsxbd %xmm5, %zmm6{%k7} # AVX512
vpmovsxbd %xmm5, %zmm6{%k7}{z} # AVX512
vpmovsxbd (%ecx), %zmm6{%k7} # AVX512
Disassembly of section .text:
0+ <_start>:
+[ ]*[a-f0-9]+: 62 f1 fe 08 2a c0 vcvtsi2ss xmm0,xmm0,eax
+[ ]*[a-f0-9]+: 62 f1 fe 08 2a 40 01 vcvtsi2ss xmm0,xmm0,DWORD PTR \[eax\+0x4\]
+[ ]*[a-f0-9]+: 62 f1 ff 08 2a c0 vcvtsi2sd xmm0,xmm0,eax
+[ ]*[a-f0-9]+: 62 f1 ff 08 2a 40 01 vcvtsi2sd xmm0,xmm0,DWORD PTR \[eax\+0x4\]
+[ ]*[a-f0-9]+: 62 f1 fe 08 2d c0 vcvtss2si eax,xmm0
+[ ]*[a-f0-9]+: 62 f1 ff 08 2d c0 vcvtsd2si eax,xmm0
+[ ]*[a-f0-9]+: 62 f1 fe 08 2c c0 vcvttss2si eax,xmm0
+[ ]*[a-f0-9]+: 62 f1 ff 08 2c c0 vcvttsd2si eax,xmm0
+[ ]*[a-f0-9]+: 62 f1 fe 08 7b c0 vcvtusi2ss xmm0,xmm0,eax
+[ ]*[a-f0-9]+: 62 f1 fe 08 7b 40 01 vcvtusi2ss xmm0,xmm0,DWORD PTR \[eax\+0x4\]
+[ ]*[a-f0-9]+: 62 f1 ff 08 7b c0 vcvtusi2sd xmm0,xmm0,eax
+[ ]*[a-f0-9]+: 62 f1 ff 08 7b 40 01 vcvtusi2sd xmm0,xmm0,DWORD PTR \[eax\+0x4\]
+[ ]*[a-f0-9]+: 62 f1 fe 08 79 c0 vcvtss2usi eax,xmm0
+[ ]*[a-f0-9]+: 62 f1 ff 08 79 c0 vcvtsd2usi eax,xmm0
+[ ]*[a-f0-9]+: 62 f1 fe 08 78 c0 vcvttss2usi eax,xmm0
+[ ]*[a-f0-9]+: 62 f1 ff 08 78 c0 vcvttsd2usi eax,xmm0
+[ ]*[a-f0-9]+: 62 f3 fd 08 17 c0 00 vextractps eax,xmm0,0x0
+[ ]*[a-f0-9]+: 62 f3 fd 08 17 40 01 00 vextractps DWORD PTR \[eax\+0x4\],xmm0,0x0
+[ ]*[a-f0-9]+: 62 f3 fd 08 14 c0 00 vpextrb eax,xmm0,0x0
+[ ]*[a-f0-9]+: 62 f3 fd 08 14 40 01 00 vpextrb BYTE PTR \[eax\+0x1\],xmm0,0x0
+[ ]*[a-f0-9]+: 62 f1 fd 08 c5 c0 00 vpextrw eax,xmm0,0x0
+[ ]*[a-f0-9]+: 62 f3 fd 08 15 c0 00 vpextrw eax,xmm0,0x0
+[ ]*[a-f0-9]+: 62 f3 fd 08 15 40 01 00 vpextrw WORD PTR \[eax\+0x2\],xmm0,0x0
+[ ]*[a-f0-9]+: 62 f3 fd 08 20 c0 00 vpinsrb xmm0,xmm0,eax,0x0
+[ ]*[a-f0-9]+: 62 f3 fd 08 20 40 01 00 vpinsrb xmm0,xmm0,BYTE PTR \[eax\+0x1\],0x0
+[ ]*[a-f0-9]+: 62 f1 fd 08 c4 c0 00 vpinsrw xmm0,xmm0,eax,0x0
+[ ]*[a-f0-9]+: 62 f1 fd 08 c4 40 01 00 vpinsrw xmm0,xmm0,WORD PTR \[eax\+0x2\],0x0
[ ]*[a-f0-9]+: 62 f2 fd 4f 21 f5 vpmovsxbd zmm6\{k7\},xmm5
[ ]*[a-f0-9]+: 62 f2 fd cf 21 f5 vpmovsxbd zmm6\{k7\}\{z\},xmm5
[ ]*[a-f0-9]+: 62 f2 fd 4f 21 31 vpmovsxbd zmm6\{k7\},XMMWORD PTR \[ecx\]
Disassembly of section .text:
0+ <_start>:
+[ ]*[a-f0-9]+: 62 f1 fe 08 2a c0 vcvtsi2ss %eax,%xmm0,%xmm0
+[ ]*[a-f0-9]+: 62 f1 fe 08 2a 40 01 vcvtsi2ssl 0x4\(%eax\),%xmm0,%xmm0
+[ ]*[a-f0-9]+: 62 f1 ff 08 2a c0 vcvtsi2sd %eax,%xmm0,%xmm0
+[ ]*[a-f0-9]+: 62 f1 ff 08 2a 40 01 vcvtsi2sdl 0x4\(%eax\),%xmm0,%xmm0
+[ ]*[a-f0-9]+: 62 f1 fe 08 2d c0 vcvtss2si %xmm0,%eax
+[ ]*[a-f0-9]+: 62 f1 ff 08 2d c0 vcvtsd2si %xmm0,%eax
+[ ]*[a-f0-9]+: 62 f1 fe 08 2c c0 vcvttss2si %xmm0,%eax
+[ ]*[a-f0-9]+: 62 f1 ff 08 2c c0 vcvttsd2si %xmm0,%eax
+[ ]*[a-f0-9]+: 62 f1 fe 08 7b c0 vcvtusi2ss %eax,%xmm0,%xmm0
+[ ]*[a-f0-9]+: 62 f1 fe 08 7b 40 01 vcvtusi2ssl 0x4\(%eax\),%xmm0,%xmm0
+[ ]*[a-f0-9]+: 62 f1 ff 08 7b c0 vcvtusi2sd %eax,%xmm0,%xmm0
+[ ]*[a-f0-9]+: 62 f1 ff 08 7b 40 01 vcvtusi2sdl 0x4\(%eax\),%xmm0,%xmm0
+[ ]*[a-f0-9]+: 62 f1 fe 08 79 c0 vcvtss2usi %xmm0,%eax
+[ ]*[a-f0-9]+: 62 f1 ff 08 79 c0 vcvtsd2usi %xmm0,%eax
+[ ]*[a-f0-9]+: 62 f1 fe 08 78 c0 vcvttss2usi %xmm0,%eax
+[ ]*[a-f0-9]+: 62 f1 ff 08 78 c0 vcvttsd2usi %xmm0,%eax
+[ ]*[a-f0-9]+: 62 f3 fd 08 17 c0 00 vextractps \$0x0,%xmm0,%eax
+[ ]*[a-f0-9]+: 62 f3 fd 08 17 40 01 00 vextractps \$0x0,%xmm0,0x4\(%eax\)
+[ ]*[a-f0-9]+: 62 f3 fd 08 14 c0 00 vpextrb \$0x0,%xmm0,%eax
+[ ]*[a-f0-9]+: 62 f3 fd 08 14 40 01 00 vpextrb \$0x0,%xmm0,0x1\(%eax\)
+[ ]*[a-f0-9]+: 62 f1 fd 08 c5 c0 00 vpextrw \$0x0,%xmm0,%eax
+[ ]*[a-f0-9]+: 62 f3 fd 08 15 c0 00 vpextrw \$0x0,%xmm0,%eax
+[ ]*[a-f0-9]+: 62 f3 fd 08 15 40 01 00 vpextrw \$0x0,%xmm0,0x2\(%eax\)
+[ ]*[a-f0-9]+: 62 f3 fd 08 20 c0 00 vpinsrb \$0x0,%eax,%xmm0,%xmm0
+[ ]*[a-f0-9]+: 62 f3 fd 08 20 40 01 00 vpinsrb \$0x0,0x1\(%eax\),%xmm0,%xmm0
+[ ]*[a-f0-9]+: 62 f1 fd 08 c4 c0 00 vpinsrw \$0x0,%eax,%xmm0,%xmm0
+[ ]*[a-f0-9]+: 62 f1 fd 08 c4 40 01 00 vpinsrw \$0x0,0x2\(%eax\),%xmm0,%xmm0
[ ]*[a-f0-9]+: 62 f2 fd 4f 21 f5 vpmovsxbd %xmm5,%zmm6\{%k7\}
[ ]*[a-f0-9]+: 62 f2 fd cf 21 f5 vpmovsxbd %xmm5,%zmm6\{%k7\}\{z\}
[ ]*[a-f0-9]+: 62 f2 fd 4f 21 31 vpmovsxbd \(%ecx\),%zmm6\{%k7\}
+++ /dev/null
-#as: -mevexwig=1
-#objdump: -dw
-#name: i386 non-WIG EVEX insns with -mevexwig=1
-
-.*: +file format .*
-
-
-Disassembly of section .text:
-
-0+ <_start>:
- +[a-f0-9]+: 62 f1 56 38 2a f0 vcvtsi2ss %eax,\{rd-sae\},%xmm5,%xmm6
- +[a-f0-9]+: 62 f1 56 08 2a f0 vcvtsi2ss %eax,%xmm5,%xmm6
- +[a-f0-9]+: 62 f1 57 08 2a f0 vcvtsi2sd %eax,%xmm5,%xmm6
- +[a-f0-9]+: 62 f1 56 38 7b f0 vcvtusi2ss %eax,\{rd-sae\},%xmm5,%xmm6
- +[a-f0-9]+: 62 f1 56 08 7b f0 vcvtusi2ss %eax,%xmm5,%xmm6
- +[a-f0-9]+: 62 f1 57 08 7b f0 vcvtusi2sd %eax,%xmm5,%xmm6
-#pass
+++ /dev/null
-# Check non-WIG EVEX instructions with -mevexwig=1
-
- .allow_index_reg
- .text
-_start:
- vcvtsi2ss %eax, {rd-sae}, %xmm5, %xmm6
- {evex} vcvtsi2ss %eax, %xmm5, %xmm6
- {evex} vcvtsi2sd %eax, %xmm5, %xmm6
- vcvtusi2ss %eax, {rd-sae}, %xmm5, %xmm6
- {evex} vcvtusi2ss %eax, %xmm5, %xmm6
- {evex} vcvtusi2sd %eax, %xmm5, %xmm6
run_dump_test "evex-lig-2"
run_dump_test "evex-wig1"
run_dump_test "evex-wig1-intel"
- run_dump_test "evex-wig2"
run_dump_test "sse2avx"
run_list_test "inval-avx" "-al"
run_list_test "inval-avx512f" "-al"
vextractps $123, %xmm29, -512(%rdx) # AVX512 Disp8
vextractps $123, %xmm29, -516(%rdx) # AVX512
+ {evex} vpextrb $0, %xmm0, %eax
+ {evex} vpextrb $0, %xmm0, (%rax)
+
+ {evex} vpextrw $0, %xmm0, %eax
+ {evex} {store} vpextrw $0, %xmm0, %eax
+ {evex} vpextrw $0, %xmm0, (%rax)
+
+ {evex} vpinsrb $0, %eax, %xmm0, %xmm0
+ {evex} vpinsrb $0, (%rax), %xmm0, %xmm0
+
+ {evex} vpinsrw $0, %eax, %xmm0, %xmm0
+ {evex} vpinsrw $0, (%rax), %xmm0, %xmm0
+
vpmovsxbd %xmm29, %zmm30{%k7} # AVX512
vpmovsxbd %xmm29, %zmm30{%k7}{z} # AVX512
vpmovsxbd (%rcx), %zmm30{%k7} # AVX512
[ ]*[a-f0-9]+: 62 63 fd 08 17 aa 00 02 00 00 7b vextractps DWORD PTR \[rdx\+0x200\],xmm29,0x7b
[ ]*[a-f0-9]+: 62 63 fd 08 17 6a 80 7b vextractps DWORD PTR \[rdx-0x200\],xmm29,0x7b
[ ]*[a-f0-9]+: 62 63 fd 08 17 aa fc fd ff ff 7b vextractps DWORD PTR \[rdx-0x204\],xmm29,0x7b
+[ ]*[a-f0-9]+: 62 f3 fd 08 14 c0 00 vpextrb rax,xmm0,0x0
+[ ]*[a-f0-9]+: 62 f3 fd 08 14 00 00 vpextrb BYTE PTR \[rax\],xmm0,0x0
+[ ]*[a-f0-9]+: 62 f1 fd 08 c5 c0 00 vpextrw rax,xmm0,0x0
+[ ]*[a-f0-9]+: 62 f3 fd 08 15 c0 00 vpextrw rax,xmm0,0x0
+[ ]*[a-f0-9]+: 62 f3 fd 08 15 00 00 vpextrw WORD PTR \[rax\],xmm0,0x0
+[ ]*[a-f0-9]+: 62 f3 fd 08 20 c0 00 vpinsrb xmm0,xmm0,eax,0x0
+[ ]*[a-f0-9]+: 62 f3 fd 08 20 00 00 vpinsrb xmm0,xmm0,BYTE PTR \[rax\],0x0
+[ ]*[a-f0-9]+: 62 f1 fd 08 c4 c0 00 vpinsrw xmm0,xmm0,eax,0x0
+[ ]*[a-f0-9]+: 62 f1 fd 08 c4 00 00 vpinsrw xmm0,xmm0,WORD PTR \[rax\],0x0
[ ]*[a-f0-9]+: 62 02 fd 4f 21 f5 vpmovsxbd zmm30\{k7\},xmm29
[ ]*[a-f0-9]+: 62 02 fd cf 21 f5 vpmovsxbd zmm30\{k7\}\{z\},xmm29
[ ]*[a-f0-9]+: 62 62 fd 4f 21 31 vpmovsxbd zmm30\{k7\},XMMWORD PTR \[rcx\]
[ ]*[a-f0-9]+: 62 63 fd 08 17 aa 00 02 00 00 7b vextractps \$0x7b,%xmm29,0x200\(%rdx\)
[ ]*[a-f0-9]+: 62 63 fd 08 17 6a 80 7b vextractps \$0x7b,%xmm29,-0x200\(%rdx\)
[ ]*[a-f0-9]+: 62 63 fd 08 17 aa fc fd ff ff 7b vextractps \$0x7b,%xmm29,-0x204\(%rdx\)
+[ ]*[a-f0-9]+: 62 f3 fd 08 14 c0 00 vpextrb \$0x0,%xmm0,%rax
+[ ]*[a-f0-9]+: 62 f3 fd 08 14 00 00 vpextrb \$0x0,%xmm0,\(%rax\)
+[ ]*[a-f0-9]+: 62 f1 fd 08 c5 c0 00 vpextrw \$0x0,%xmm0,%rax
+[ ]*[a-f0-9]+: 62 f3 fd 08 15 c0 00 vpextrw \$0x0,%xmm0,%rax
+[ ]*[a-f0-9]+: 62 f3 fd 08 15 00 00 vpextrw \$0x0,%xmm0,\(%rax\)
+[ ]*[a-f0-9]+: 62 f3 fd 08 20 c0 00 vpinsrb \$0x0,%eax,%xmm0,%xmm0
+[ ]*[a-f0-9]+: 62 f3 fd 08 20 00 00 vpinsrb \$0x0,\(%rax\),%xmm0,%xmm0
+[ ]*[a-f0-9]+: 62 f1 fd 08 c4 c0 00 vpinsrw \$0x0,%eax,%xmm0,%xmm0
+[ ]*[a-f0-9]+: 62 f1 fd 08 c4 00 00 vpinsrw \$0x0,\(%rax\),%xmm0,%xmm0
[ ]*[a-f0-9]+: 62 02 fd 4f 21 f5 vpmovsxbd %xmm29,%zmm30\{%k7\}
[ ]*[a-f0-9]+: 62 02 fd cf 21 f5 vpmovsxbd %xmm29,%zmm30\{%k7\}\{z\}
[ ]*[a-f0-9]+: 62 62 fd 4f 21 31 vpmovsxbd \(%rcx\),%zmm30\{%k7\}
2018-11-06 Jan Beulich <jbeulich@suse.com>
+ * i386-opc.tbl (vcvtsi2sd, vcvtsi2ss, vmovd, vpcmpestri,
+ vpcmpestrm, vpextrd, vpinsrd, vpbroadcastd, vcvtusi2sd,
+ vcvtusi2ss, kmovd): Drop VexW=1.
+ * i386-tbl.h: Re-generate.
+
+2018-11-06 Jan Beulich <jbeulich@suse.com>
+
* i386-opc.tbl (Vex128, Vex256, VexLIG, EVex128, EVex256,
EVex512, EVexLIG, EVexDYN): New.
(ldmxcsr, stmxcsr, vldmxcsr, vstmxcsr, all BMI, BMI2, and TBM
vcvtps2pd, 2, 0x5a, None, 1, CpuAVX, Modrm|Vex=2|VexOpcode=0|VexWIG|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Xmmword|Unspecified|BaseIndex, RegYMM }
vcvtsd2si, 2, 0xf22d, None, 1, CpuAVX, Modrm|Vex=3|VexOpcode=0|IgnoreSize|No_bSuf|No_wSuf|No_sSuf|No_ldSuf|ToDword, { Qword|Unspecified|BaseIndex|RegXMM, Reg32|Reg64 }
vcvtsd2ss, 3, 0xf25a, None, 1, CpuAVX, Modrm|Vex=3|VexOpcode=0|VexVVVV=1|VexWIG|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Qword|Unspecified|BaseIndex|RegXMM, RegXMM, RegXMM }
-vcvtsi2sd, 3, 0xf22a, None, 1, CpuAVX|CpuNo64, Modrm|Vex=3|VexOpcode=0|VexVVVV=1|VexW=1|IgnoreSize|No_bSuf|No_wSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32|Dword|Unspecified|BaseIndex, RegXMM, RegXMM }
+vcvtsi2sd, 3, 0xf22a, None, 1, CpuAVX|CpuNo64, Modrm|Vex=3|VexOpcode=0|VexVVVV=1|IgnoreSize|No_bSuf|No_wSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32|Unspecified|BaseIndex, RegXMM, RegXMM }
vcvtsi2sd, 3, 0xf22a, None, 1, CpuAVX|Cpu64, Modrm|Vex=3|VexOpcode=0|VexVVVV=1|No_bSuf|No_wSuf|No_sSuf|No_ldSuf, { Reg32|Reg64|Dword|Qword|Unspecified|BaseIndex, RegXMM, RegXMM }
-vcvtsi2ss, 3, 0xf32a, None, 1, CpuAVX|CpuNo64, Modrm|Vex=3|VexOpcode=0|VexVVVV=1|VexW=1|IgnoreSize|No_bSuf|No_wSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32|Dword|Unspecified|BaseIndex, RegXMM, RegXMM }
+vcvtsi2ss, 3, 0xf32a, None, 1, CpuAVX|CpuNo64, Modrm|Vex=3|VexOpcode=0|VexVVVV=1|IgnoreSize|No_bSuf|No_wSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32|Unspecified|BaseIndex, RegXMM, RegXMM }
vcvtsi2ss, 3, 0xf32a, None, 1, CpuAVX|Cpu64, Modrm|Vex=3|VexOpcode=0|VexVVVV=1|No_bSuf|No_wSuf|No_sSuf|No_ldSuf, { Reg32|Reg64|Dword|Qword|Unspecified|BaseIndex, RegXMM, RegXMM }
vcvtss2sd, 3, 0xf35a, None, 1, CpuAVX, Modrm|Vex=3|VexOpcode=0|VexVVVV=1|VexWIG|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Dword|Unspecified|BaseIndex|RegXMM, RegXMM, RegXMM }
vcvtss2si, 2, 0xf32d, None, 1, CpuAVX, Modrm|Vex=3|VexOpcode=0|IgnoreSize|No_bSuf|No_wSuf|No_sSuf|No_ldSuf|ToQword, { Dword|Unspecified|BaseIndex|RegXMM, Reg32|Reg64 }
// by Intel AVX spec). To avoid extra template in gcc x86 backend and
// support assembler for AMD64, we accept 64bit operand on vmovd so
// that we can use one template for both SSE and AVX instructions.
-vmovd, 2, 0x666e, None, 1, CpuAVX, D|Modrm|Vex=1|VexOpcode=0|VexW=1|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32|Unspecified|BaseIndex, RegXMM }
+vmovd, 2, 0x666e, None, 1, CpuAVX, D|Modrm|Vex=1|VexOpcode=0|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32|Unspecified|BaseIndex, RegXMM }
vmovd, 2, 0x666e, None, 1, CpuAVX|Cpu64, D|Modrm|Vex=1|VexOpcode=0|VexW=2|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf|Size64, { Reg64|RegMem, RegXMM }
vmovddup, 2, 0xf212, None, 1, CpuAVX, Modrm|Vex|VexOpcode=0|VexWIG|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Qword|Unspecified|BaseIndex|RegXMM, RegXMM }
vmovddup, 2, 0xf212, None, 1, CpuAVX, Modrm|Vex=2|VexOpcode=0|VexWIG|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Unspecified|BaseIndex|RegYMM, RegYMM }
vpcmpeqd, 3, 0x6676, None, 1, CpuAVX, Modrm|Vex|VexOpcode=0|VexVVVV=1|VexWIG|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Unspecified|BaseIndex|RegXMM, RegXMM, RegXMM }
vpcmpeqq, 3, 0x6629, None, 1, CpuAVX, Modrm|Vex|VexOpcode=1|VexVVVV=1|VexWIG|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Unspecified|BaseIndex|RegXMM, RegXMM, RegXMM }
vpcmpeqw, 3, 0x6675, None, 1, CpuAVX, Modrm|Vex|VexOpcode=0|VexVVVV=1|VexWIG|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Unspecified|BaseIndex|RegXMM, RegXMM, RegXMM }
-vpcmpestri, 3, 0x6661, None, 1, CpuAVX|CpuNo64, Modrm|Vex|VexOpcode=2|VexW=1|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, Unspecified|BaseIndex|RegXMM, RegXMM }
+vpcmpestri, 3, 0x6661, None, 1, CpuAVX|CpuNo64, Modrm|Vex|VexOpcode=2|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, Unspecified|BaseIndex|RegXMM, RegXMM }
vpcmpestri, 3, 0x6661, None, 1, CpuAVX|Cpu64, Modrm|Vex|VexOpcode=2|IgnoreSize|No_bSuf|No_wSuf|No_sSuf|No_ldSuf, { Imm8, Xmmword|Unspecified|BaseIndex|RegXMM, RegXMM }
-vpcmpestrm, 3, 0x6660, None, 1, CpuAVX|CpuNo64, Modrm|Vex|VexOpcode=2|VexW=1|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, Unspecified|BaseIndex|RegXMM, RegXMM }
+vpcmpestrm, 3, 0x6660, None, 1, CpuAVX|CpuNo64, Modrm|Vex|VexOpcode=2|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, Unspecified|BaseIndex|RegXMM, RegXMM }
vpcmpestrm, 3, 0x6660, None, 1, CpuAVX|Cpu64, Modrm|Vex|VexOpcode=2|IgnoreSize|No_bSuf|No_wSuf|No_sSuf|No_ldSuf, { Imm8, Xmmword|Unspecified|BaseIndex|RegXMM, RegXMM }
vpcmpgtb, 3, 0x6664, None, 1, CpuAVX, Modrm|Vex|VexOpcode=0|VexVVVV=1|VexWIG|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Unspecified|BaseIndex|RegXMM, RegXMM, RegXMM }
vpcmpgtd, 3, 0x6666, None, 1, CpuAVX, Modrm|Vex|VexOpcode=0|VexVVVV=1|VexWIG|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Unspecified|BaseIndex|RegXMM, RegXMM, RegXMM }
vpermilps, 3, 0x6604, None, 1, CpuAVX, Modrm|Vex|VexOpcode=2|VexW=1|CheckRegSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, Unspecified|BaseIndex|RegXMM|RegYMM, RegXMM|RegYMM }
vpextrb, 3, 0x6614, None, 1, CpuAVX, Modrm|Vex|VexOpcode=2|VexW=1|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf|NoRex64, { Imm8, RegXMM, Reg32|Reg64|RegMem }
vpextrb, 3, 0x6614, None, 1, CpuAVX, Modrm|Vex|VexOpcode=2|VexW=1|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, RegXMM, Byte|Unspecified|BaseIndex }
-vpextrd, 3, 0x6616, None, 1, CpuAVX, Modrm|Vex|VexOpcode=2|VexW=1|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, RegXMM, Reg32|Dword|Unspecified|BaseIndex }
+vpextrd, 3, 0x6616, None, 1, CpuAVX, Modrm|Vex|VexOpcode=2|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, RegXMM, Reg32|Dword|Unspecified|BaseIndex }
vpextrq, 3, 0x6616, None, 1, CpuAVX|Cpu64, Modrm|Vex|VexOpcode=2|VexW=2|Size64|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, RegXMM, Reg64|Qword|Unspecified|BaseIndex }
vpextrw, 3, 0x66c5, None, 1, CpuAVX, Load|Modrm|Vex|VexOpcode=0|VexW=1|IgnoreSize|No_bSuf|No_wSuf|No_sSuf|No_ldSuf|NoRex64, { Imm8, RegXMM, Reg32|Reg64 }
vpextrw, 3, 0x6615, None, 1, CpuAVX, Modrm|Vex|VexOpcode=2|VexW=1|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf|NoRex64, { Imm8, RegXMM, Reg32|Reg64|RegMem }
vphsubw, 3, 0x6605, None, 1, CpuAVX, Modrm|Vex|VexOpcode=1|VexVVVV=1|VexWIG|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Unspecified|BaseIndex|RegXMM, RegXMM, RegXMM }
vpinsrb, 4, 0x6620, None, 1, CpuAVX, Modrm|Vex|VexOpcode=2|VexVVVV=1|VexW=1|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf|NoRex64, { Imm8, Reg32|Reg64, RegXMM, RegXMM }
vpinsrb, 4, 0x6620, None, 1, CpuAVX, Modrm|Vex|VexOpcode=2|VexVVVV=1|VexW=1|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, Byte|Unspecified|BaseIndex, RegXMM, RegXMM }
-vpinsrd, 4, 0x6622, None, 1, CpuAVX, Modrm|Vex|VexOpcode=2|VexVVVV=1|VexW=1|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, Reg32|Dword|Unspecified|BaseIndex, RegXMM, RegXMM }
+vpinsrd, 4, 0x6622, None, 1, CpuAVX, Modrm|Vex|VexOpcode=2|VexVVVV=1|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, Reg32|Dword|Unspecified|BaseIndex, RegXMM, RegXMM }
vpinsrq, 4, 0x6622, None, 1, CpuAVX|Cpu64, Modrm|Vex|VexOpcode=2|Size64|VexVVVV=1|VexW=2|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, Reg64|Qword|Unspecified|BaseIndex, RegXMM, RegXMM }
vpinsrw, 4, 0x66c4, None, 1, CpuAVX, Modrm|Vex|VexOpcode=0|VexVVVV=1|VexW=1|IgnoreSize|No_bSuf|No_wSuf|No_sSuf|No_ldSuf|NoRex64, { Imm8, Reg32|Reg64, RegXMM, RegXMM }
vpinsrw, 4, 0x66c4, None, 1, CpuAVX, Modrm|Vex|VexOpcode=0|VexVVVV=1|VexW=1|IgnoreSize|No_bSuf|No_wSuf|No_sSuf|No_ldSuf, { Imm8, Word|Unspecified|BaseIndex, RegXMM, RegXMM }
vbroadcastsd, 2, 0x6619, None, 1, CpuAVX512F, Modrm|Masking=3|VexOpcode=1|VexW=2|Disp8MemShift=3|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { RegXMM|Qword|Unspecified|BaseIndex, RegYMM|RegZMM }
vpbroadcastd, 2, 0x6658, None, 1, CpuAVX512F, Modrm|Masking=3|VexOpcode=1|VexW=1|Disp8MemShift=2|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { RegXMM|Dword|Unspecified|BaseIndex, RegXMM|RegYMM|RegZMM }
-vpbroadcastd, 2, 0x667C, None, 1, CpuAVX512F, Modrm|Masking=3|VexOpcode=1|VexW=1|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32, RegXMM|RegYMM|RegZMM }
+vpbroadcastd, 2, 0x667C, None, 1, CpuAVX512F, Modrm|Masking=3|VexOpcode=1|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32, RegXMM|RegYMM|RegZMM }
vcmppd, 4, 0x66C2, None, 1, CpuAVX512F, Modrm|Masking=2|VexOpcode=0|VexVVVV=1|VexW=2|Broadcast|Disp8ShiftVL|CheckRegSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, RegXMM|RegYMM|RegZMM|Qword|Unspecified|BaseIndex, RegXMM|RegYMM|RegZMM, RegMask }
vcmppd, 5, 0x66C2, None, 1, CpuAVX512F, Modrm|EVex=1|Masking=2|VexOpcode=0|VexVVVV=1|VexW=2|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf|SAE, { Imm8, Imm8, RegZMM, RegZMM, RegMask }
vcvtsd2ss, 3, 0xF25A, None, 1, CpuAVX512F, Modrm|EVex=4|Masking=3|VexOpcode=0|VexVVVV=1|VexW=2|Disp8MemShift=3|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { RegXMM|Qword|Unspecified|BaseIndex, RegXMM, RegXMM }
vcvtsd2ss, 4, 0xF25A, None, 1, CpuAVX512F, Modrm|EVex=4|Masking=3|VexOpcode=0|VexVVVV=1|VexW=2|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf|StaticRounding|SAE, { Imm8, RegXMM, RegXMM, RegXMM }
-vcvtsi2sd, 3, 0xF22A, None, 1, CpuAVX512F|CpuNo64, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|VexW=1|Disp8MemShift=2|No_bSuf|No_wSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32|Unspecified|BaseIndex, RegXMM, RegXMM }
+vcvtsi2sd, 3, 0xF22A, None, 1, CpuAVX512F|CpuNo64, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|Disp8MemShift=2|No_bSuf|No_wSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32|Unspecified|BaseIndex, RegXMM, RegXMM }
vcvtsi2sd, 3, 0xF22A, None, 1, CpuAVX512F|Cpu64, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|Disp8ShiftVL|No_bSuf|No_wSuf|No_sSuf|No_ldSuf, { Reg32|Reg64|Unspecified|BaseIndex, RegXMM, RegXMM }
vcvtsi2sd, 4, 0xF22A, None, 1, CpuAVX512F|Cpu64, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|VexW=2|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_ldSuf|StaticRounding|SAE, { Reg64, Imm8, RegXMM, RegXMM }
vcvtsi2sd, 4, 0xF22A, None, 1, CpuAVX512F|Cpu64, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|VexW=2|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_ldSuf|StaticRounding|SAE|IntelSyntax, { Imm8, Reg64, RegXMM, RegXMM }
-vcvtusi2sd, 3, 0xF27B, None, 1, CpuAVX512F|CpuNo64, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|VexW=1|Disp8MemShift=2|No_bSuf|No_wSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32|Unspecified|BaseIndex, RegXMM, RegXMM }
+vcvtusi2sd, 3, 0xF27B, None, 1, CpuAVX512F|CpuNo64, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|Disp8MemShift=2|No_bSuf|No_wSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32|Unspecified|BaseIndex, RegXMM, RegXMM }
vcvtusi2sd, 3, 0xF27B, None, 1, CpuAVX512F|Cpu64, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|Disp8ShiftVL|No_bSuf|No_wSuf|No_sSuf|No_ldSuf, { Reg32|Reg64|Unspecified|BaseIndex, RegXMM, RegXMM }
vcvtusi2sd, 4, 0xF27B, None, 1, CpuAVX512F|Cpu64, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|VexW=2|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_ldSuf|StaticRounding|SAE, { Reg64, Imm8, RegXMM, RegXMM }
vcvtusi2sd, 4, 0xF27B, None, 1, CpuAVX512F|Cpu64, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|VexW=2|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_ldSuf|StaticRounding|SAE|IntelSyntax, { Imm8, Reg64, RegXMM, RegXMM }
-vcvtsi2ss, 3, 0xF32A, None, 1, CpuAVX512F|CpuNo64, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|VexW=1|Disp8MemShift=2|No_bSuf|No_wSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32|Unspecified|BaseIndex, RegXMM, RegXMM }
+vcvtsi2ss, 3, 0xF32A, None, 1, CpuAVX512F|CpuNo64, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|Disp8MemShift=2|No_bSuf|No_wSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32|Unspecified|BaseIndex, RegXMM, RegXMM }
vcvtsi2ss, 3, 0xF32A, None, 1, CpuAVX512F|Cpu64, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|Disp8ShiftVL|No_bSuf|No_wSuf|No_sSuf|No_ldSuf, { Reg32|Reg64|Unspecified|BaseIndex, RegXMM, RegXMM }
vcvtsi2ss, 4, 0xF32A, None, 1, CpuAVX512F, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|No_bSuf|No_wSuf|No_sSuf|No_ldSuf|StaticRounding|SAE, { Reg32|Reg64, Imm8, RegXMM, RegXMM }
vcvtsi2ss, 4, 0xF32A, None, 1, CpuAVX512F, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|No_bSuf|No_wSuf|No_sSuf|No_ldSuf|StaticRounding|SAE|IntelSyntax, { Imm8, Reg32|Reg64, RegXMM, RegXMM }
-vcvtusi2ss, 3, 0xF37B, None, 1, CpuAVX512F|CpuNo64, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|VexW=1|Disp8MemShift=2|No_bSuf|No_wSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32|Unspecified|BaseIndex, RegXMM, RegXMM }
+vcvtusi2ss, 3, 0xF37B, None, 1, CpuAVX512F|CpuNo64, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|Disp8MemShift=2|No_bSuf|No_wSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32|Unspecified|BaseIndex, RegXMM, RegXMM }
vcvtusi2ss, 3, 0xF37B, None, 1, CpuAVX512F|Cpu64, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|Disp8ShiftVL|No_bSuf|No_wSuf|No_sSuf|No_ldSuf, { Reg32|Reg64|Unspecified|BaseIndex, RegXMM, RegXMM }
vcvtusi2ss, 4, 0xF37B, None, 1, CpuAVX512F, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|No_bSuf|No_wSuf|No_sSuf|No_ldSuf|StaticRounding|SAE, { Reg32|Reg64, Imm8, RegXMM, RegXMM }
vcvtusi2ss, 4, 0xF37B, None, 1, CpuAVX512F, Modrm|EVex=4|VexOpcode=0|VexVVVV=1|No_bSuf|No_wSuf|No_sSuf|No_ldSuf|StaticRounding|SAE|IntelSyntax, { Imm8, Reg32|Reg64, RegXMM, RegXMM }
vmovntps, 2, 0x2B, None, 1, CpuAVX512F, Modrm|VexOpcode=0|VexW=1|Disp8ShiftVL|CheckRegSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { RegXMM|RegYMM|RegZMM, XMMword|YMMword|ZMMword|Unspecified|BaseIndex }
vmovups, 2, 0x10, None, 1, CpuAVX512F, D|Modrm|MaskingMorZ|VexOpcode=0|VexW=1|Disp8ShiftVL|CheckRegSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { RegXMM|RegYMM|RegZMM|Unspecified|BaseIndex, RegXMM|RegYMM|RegZMM }
-vmovd, 2, 0x666E, None, 1, CpuAVX512F, D|Modrm|EVex=2|VexOpcode=0|VexW=1|Disp8MemShift=2|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32|Unspecified|BaseIndex, RegXMM }
+vmovd, 2, 0x666E, None, 1, CpuAVX512F, D|Modrm|EVex=2|VexOpcode=0|Disp8MemShift=2|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32|Unspecified|BaseIndex, RegXMM }
vmovddup, 2, 0xF212, None, 1, CpuAVX512F, Modrm|Masking=3|VexOpcode=0|VexW=2|Disp8ShiftVL|CheckRegSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { RegYMM|RegZMM|Unspecified|BaseIndex, RegYMM|RegZMM }
kandnd, 3, 0x6642, None, 1, CpuAVX512BW, Modrm|Vex=2|VexOpcode=0|VexVVVV=1|VexW=2|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf|Optimize, { RegMask, RegMask, RegMask }
kmovd, 2, 0x6690, None, 1, CpuAVX512BW, Modrm|Vex=1|VexOpcode=0|VexW=2|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { RegMask|Dword|Unspecified|BaseIndex, RegMask }
kmovd, 2, 0x6691, None, 1, CpuAVX512BW, Modrm|Vex=1|VexOpcode=0|VexW=2|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { RegMask, Dword|Unspecified|BaseIndex }
-kmovd, 2, 0xF292, None, 1, CpuAVX512BW, D|Modrm|Vex=1|VexOpcode=0|VexW=1|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32, RegMask }
+kmovd, 2, 0xF292, None, 1, CpuAVX512BW, D|Modrm|Vex=1|VexOpcode=0|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Reg32, RegMask }
knotd, 2, 0x6644, None, 1, CpuAVX512BW, Modrm|Vex=1|VexOpcode=0|VexW=2|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { RegMask, RegMask }
kord, 3, 0x6645, None, 1, CpuAVX512BW, Modrm|Vex=2|VexOpcode=0|VexVVVV=1|VexW=2|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { RegMask, RegMask, RegMask }
kortestd, 2, 0x6698, None, 1, CpuAVX512BW, Modrm|Vex=1|VexOpcode=0|VexW=2|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { RegMask, RegMask }
vinserti32x8, 4, 0x663A, None, 1, CpuAVX512DQ, Modrm|EVex=1|Masking=3|VexOpcode=2|VexVVVV=1|VexW=1|Disp8MemShift=5|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, RegYMM|Unspecified|BaseIndex, RegZMM, RegZMM }
vfpclassss, 3, 0x6667, None, 1, CpuAVX512DQ, Modrm|EVex=4|Masking=2|VexOpcode=2|VexW=1|Disp8MemShift=2|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, RegXMM|Dword|Unspecified|BaseIndex, RegMask }
-vpextrd, 3, 0x6616, None, 1, CpuAVX512DQ, Modrm|EVex128|VexOpcode=2|VexW=1|Disp8MemShift=2|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, RegXMM, Reg32|Dword|Unspecified|BaseIndex }
-vpinsrd, 4, 0x6622, None, 1, CpuAVX512DQ, Modrm|EVex128|VexOpcode=2|VexVVVV=1|VexW=1|Disp8MemShift=2|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, Reg32|Dword|Unspecified|BaseIndex, RegXMM, RegXMM }
+vpextrd, 3, 0x6616, None, 1, CpuAVX512DQ, Modrm|EVex128|VexOpcode=2|Disp8MemShift=2|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, RegXMM, Reg32|Dword|Unspecified|BaseIndex }
+vpinsrd, 4, 0x6622, None, 1, CpuAVX512DQ, Modrm|EVex128|VexOpcode=2|VexVVVV=1|Disp8MemShift=2|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, Reg32|Dword|Unspecified|BaseIndex, RegXMM, RegXMM }
vfpclasssd, 3, 0x6667, None, 1, CpuAVX512DQ, Modrm|EVex=4|Masking=2|VexOpcode=2|VexW=2|Disp8MemShift=3|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, RegXMM|Qword|Unspecified|BaseIndex, RegMask }
vpextrq, 3, 0x6616, None, 1, CpuAVX512DQ, Modrm|EVex128|VexOpcode=2|VexW=2|Disp8MemShift=3|IgnoreSize|No_bSuf|No_wSuf|No_lSuf|No_sSuf|No_qSuf|No_ldSuf, { Imm8, RegXMM, Reg64|Qword|Unspecified|BaseIndex }
0, 0, 0, 0, 1, 0 } },
{ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 1,
- 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
{ { { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0 } },
0, 0, 0, 0, 1, 0 } },
{ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
- 1, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0 },
+ 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0 },
{ { { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0 } },
0, 0, 0, 0, 1, 0 } },
{ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 1,
- 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
{ { { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0 } },
0, 0, 0, 0, 1, 0 } },
{ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
- 1, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0 },
+ 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0 },
{ { { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0 } },
0, 0, 0, 0, 0, 0 } },
{ 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0,
- 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
{ { { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1,
0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0 } },
0, 0, 0, 0, 0, 0 } },
{ 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 1, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0 },
+ 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0 },
{ { { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1,
0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0 } },
0, 0, 0, 0, 1, 0 } },
{ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0,
- 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
{ { { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0 } },
0, 0, 0, 0, 1, 0 } },
{ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0,
- 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
{ { { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0 } },
0, 0, 0, 0, 0, 0 } },
{ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0,
- 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
{ { { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0 } },
0, 0, 0, 0, 0, 0 } },
{ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 1, 2, 0, 0, 0, 0, 2, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0 },
+ 0, 2, 0, 0, 0, 0, 2, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0 },
{ { { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0 } },
0, 0, 0, 0, 0, 0 } },
{ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
- 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
{ { { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0 } },
0, 0, 0, 0, 0, 0 } },
{ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
- 1, 2, 0, 0, 0, 0, 2, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0 },
+ 0, 2, 0, 0, 0, 0, 2, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0 },
{ { { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0 } },
0, 0, 0, 0, 0, 0 } },
{ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 1, 1, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ 0, 1, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
{ { { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0 } },
0, 0, 0, 0, 1, 0 } },
{ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
- 1, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0 },
+ 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0 },
{ { { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0 } },
0, 0, 0, 0, 1, 0 } },
{ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
- 1, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0 },
+ 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0 },
{ { { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 0 } },
0, 0, 0, 0, 0, 0 } },
{ 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0,
- 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
{ { { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0 } },