2018-04-26 Jan Beulich <jbeulich@suse.com>
+ * config/tc-i386.c (parse_real_register): Check .cpuvrex before
+ recording EVEX encoding. Don't check previously specified
+ encoding.
+ * testsuite/gas/i386/xmmhi32.s: Add {x,y,z}mm{16,24} cases.
+ * testsuite/gas/i386/xmmhi32.d: Adjust expectations.
+ * testsuite/gas/i386/xmmhi64.s, testsuite/gas/i386/xmmhi64.d:
+ New.
+ * testsuite/gas/i386/i386.exp: Run new test.
+
+2018-04-26 Jan Beulich <jbeulich@suse.com>
+
* testsuite/gas/i386/xsave.s: Add AVX, LWP, MPX, and PKU
dependency tests.
* testsuite/gas/i386/xsave.d,
&& (r->reg_num == RegEiz || r->reg_num == RegRiz))
return (const reg_entry *) NULL;
- /* Upper 16 vector register is only available with VREX in 64bit
- mode. */
- if ((r->reg_flags & RegVRex))
+ /* Upper 16 vector registers are only available with VREX in 64bit
+ mode, and require EVEX encoding. */
+ if (r->reg_flags & RegVRex)
{
- if (i.vec_encoding == vex_encoding_default)
- i.vec_encoding = vex_encoding_evex;
-
if (!cpu_arch_flags.bitfield.cpuvrex
- || i.vec_encoding != vex_encoding_evex
|| flag_code != CODE_64BIT)
return (const reg_entry *) NULL;
+
+ i.vec_encoding = vex_encoding_evex;
}
if (((r->reg_flags & (RegRex64 | RegRex))
0+ <xmm>:
[ ]*[a-f0-9]+: c5 f0 58 05 00 00 00 00 vaddps 0x0,%xmm1,%xmm0 [a-f0-9]+: R_386_32 xmm8
+[ ]*[a-f0-9]+: c5 f0 58 05 00 00 00 00 vaddps 0x0,%xmm1,%xmm0 [a-f0-9]+: R_386_32 xmm16
+[ ]*[a-f0-9]+: c5 f0 58 05 00 00 00 00 vaddps 0x0,%xmm1,%xmm0 [a-f0-9]+: R_386_32 xmm24
[ ]*[a-f0-9]+: c5 f4 58 05 00 00 00 00 vaddps 0x0,%ymm1,%ymm0 [a-f0-9]+: R_386_32 ymm8
+[ ]*[a-f0-9]+: c5 f4 58 05 00 00 00 00 vaddps 0x0,%ymm1,%ymm0 [a-f0-9]+: R_386_32 ymm16
+[ ]*[a-f0-9]+: c5 f4 58 05 00 00 00 00 vaddps 0x0,%ymm1,%ymm0 [a-f0-9]+: R_386_32 ymm24
[ ]*[a-f0-9]+: 62 f1 74 48 58 05 00 00 00 00 vaddps 0x0,%zmm1,%zmm0 [a-f0-9]+: R_386_32 zmm8
[ ]*[a-f0-9]+: 62 f1 74 48 58 05 00 00 00 00 vaddps 0x0,%zmm1,%zmm0 [a-f0-9]+: R_386_32 zmm16
[ ]*[a-f0-9]+: 62 f1 74 48 58 05 00 00 00 00 vaddps 0x0,%zmm1,%zmm0 [a-f0-9]+: R_386_32 zmm24
[ ]*[a-f0-9]+: c5 f9 6f 05 00 00 00 00 vmovdqa 0x0,%xmm0 [a-f0-9]+: R_386_32 xmm8
+[ ]*[a-f0-9]+: c5 f9 6f 05 00 00 00 00 vmovdqa 0x0,%xmm0 [a-f0-9]+: R_386_32 xmm16
+[ ]*[a-f0-9]+: c5 f9 6f 05 00 00 00 00 vmovdqa 0x0,%xmm0 [a-f0-9]+: R_386_32 xmm24
[ ]*[a-f0-9]+: c5 fd 6f 05 00 00 00 00 vmovdqa 0x0,%ymm0 [a-f0-9]+: R_386_32 ymm8
+[ ]*[a-f0-9]+: c5 fd 6f 05 00 00 00 00 vmovdqa 0x0,%ymm0 [a-f0-9]+: R_386_32 ymm16
+[ ]*[a-f0-9]+: c5 fd 6f 05 00 00 00 00 vmovdqa 0x0,%ymm0 [a-f0-9]+: R_386_32 ymm24
[ ]*[a-f0-9]+: c5 f9 7f 05 00 00 00 00 vmovdqa %xmm0,0x0 [a-f0-9]+: R_386_32 xmm8
+[ ]*[a-f0-9]+: c5 f9 7f 05 00 00 00 00 vmovdqa %xmm0,0x0 [a-f0-9]+: R_386_32 xmm16
+[ ]*[a-f0-9]+: c5 f9 7f 05 00 00 00 00 vmovdqa %xmm0,0x0 [a-f0-9]+: R_386_32 xmm24
[ ]*[a-f0-9]+: c5 fd 7f 05 00 00 00 00 vmovdqa %ymm0,0x0 [a-f0-9]+: R_386_32 ymm8
+[ ]*[a-f0-9]+: c5 fd 7f 05 00 00 00 00 vmovdqa %ymm0,0x0 [a-f0-9]+: R_386_32 ymm16
+[ ]*[a-f0-9]+: c5 fd 7f 05 00 00 00 00 vmovdqa %ymm0,0x0 [a-f0-9]+: R_386_32 ymm24
[ ]*[a-f0-9]+: c5 f0 58 05 00 00 00 00 vaddps 0x0,%xmm1,%xmm0 [a-f0-9]+: R_386_32 xmm8
+[ ]*[a-f0-9]+: c5 f0 58 05 00 00 00 00 vaddps 0x0,%xmm1,%xmm0 [a-f0-9]+: R_386_32 xmm16
+[ ]*[a-f0-9]+: c5 f0 58 05 00 00 00 00 vaddps 0x0,%xmm1,%xmm0 [a-f0-9]+: R_386_32 xmm24
[ ]*[a-f0-9]+: c5 f4 58 05 00 00 00 00 vaddps 0x0,%ymm1,%ymm0 [a-f0-9]+: R_386_32 ymm8
+[ ]*[a-f0-9]+: c5 f4 58 05 00 00 00 00 vaddps 0x0,%ymm1,%ymm0 [a-f0-9]+: R_386_32 ymm16
+[ ]*[a-f0-9]+: c5 f4 58 05 00 00 00 00 vaddps 0x0,%ymm1,%ymm0 [a-f0-9]+: R_386_32 ymm24
[ ]*[a-f0-9]+: c5 f9 6f 05 00 00 00 00 vmovdqa 0x0,%xmm0 [a-f0-9]+: R_386_32 zmm0
[ ]*[a-f0-9]+: c5 f9 6f 05 00 00 00 00 vmovdqa 0x0,%xmm0 [a-f0-9]+: R_386_32 k0
[ ]*[a-f0-9]+: 0f 58 05 00 00 00 00 addps 0x0,%xmm0 [a-f0-9]+: R_386_32 xmm8
+[ ]*[a-f0-9]+: 0f 58 05 00 00 00 00 addps 0x0,%xmm0 [a-f0-9]+: R_386_32 xmm16
+[ ]*[a-f0-9]+: 0f 58 05 00 00 00 00 addps 0x0,%xmm0 [a-f0-9]+: R_386_32 xmm24
[ ]*[a-f0-9]+: 0f 58 05 00 00 00 00 addps 0x0,%xmm0 [a-f0-9]+: R_386_32 ymm0
[ ]*[a-f0-9]+: 0f 58 05 00 00 00 00 addps 0x0,%xmm0 [a-f0-9]+: R_386_32 ymm8
+[ ]*[a-f0-9]+: 0f 58 05 00 00 00 00 addps 0x0,%xmm0 [a-f0-9]+: R_386_32 ymm16
+[ ]*[a-f0-9]+: 0f 58 05 00 00 00 00 addps 0x0,%xmm0 [a-f0-9]+: R_386_32 ymm24
[ ]*[a-f0-9]+: 0f 58 05 00 00 00 00 addps 0x0,%xmm0 [a-f0-9]+: R_386_32 zmm0
[ ]*[a-f0-9]+: 0f 58 05 00 00 00 00 addps 0x0,%xmm0 [a-f0-9]+: R_386_32 k0
[ ]*[a-f0-9]+: a1 00 00 00 00 mov 0x0,%eax [a-f0-9]+: R_386_32 xmm0
.code32
xmm:
vaddps xmm0, xmm1, xmm8
+ vaddps xmm0, xmm1, xmm16
+ vaddps xmm0, xmm1, xmm24
vaddps ymm0, ymm1, ymm8
+ vaddps ymm0, ymm1, ymm16
+ vaddps ymm0, ymm1, ymm24
vaddps zmm0, zmm1, zmm8
vaddps zmm0, zmm1, zmm16
vaddps zmm0, zmm1, zmm24
vmovdqa xmm0, xmm8
+ vmovdqa xmm0, xmm16
+ vmovdqa xmm0, xmm24
vmovdqa ymm0, ymm8
+ vmovdqa ymm0, ymm16
+ vmovdqa ymm0, ymm24
vmovdqa xmm8, xmm0
+ vmovdqa xmm16, xmm0
+ vmovdqa xmm24, xmm0
vmovdqa ymm8, ymm0
+ vmovdqa ymm16, ymm0
+ vmovdqa ymm24, ymm0
.arch .noavx512f
vaddps xmm0, xmm1, xmm8
+ vaddps xmm0, xmm1, xmm16
+ vaddps xmm0, xmm1, xmm24
vaddps ymm0, ymm1, ymm8
+ vaddps ymm0, ymm1, ymm16
+ vaddps ymm0, ymm1, ymm24
vmovdqa xmm0, zmm0
vmovdqa xmm0, k0
.arch .noavx
addps xmm0, xmm8
+ addps xmm0, xmm16
+ addps xmm0, xmm24
addps xmm0, ymm0
addps xmm0, ymm8
+ addps xmm0, ymm16
+ addps xmm0, ymm24
addps xmm0, zmm0
addps xmm0, k0