From a35a59036ebae3efcdf5e8167610e0656fca9770 Mon Sep 17 00:00:00 2001 From: "H.J. Lu" Date: Thu, 11 Jun 2020 12:41:18 -0700 Subject: [PATCH] x86_64: Use %xmmN with vpxor to clear a vector register Since "vpxor %xmmN, %xmmN, %xmmN" clears the whole vector register, use %xmmN, instead of %ymmN, with vpxor to clear a vector register. --- sysdeps/x86_64/multiarch/strcmp-avx2.S | 4 ++-- sysdeps/x86_64/multiarch/strrchr-avx2.S | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sysdeps/x86_64/multiarch/strcmp-avx2.S b/sysdeps/x86_64/multiarch/strcmp-avx2.S index 48d03a9..5f88a68 100644 --- a/sysdeps/x86_64/multiarch/strcmp-avx2.S +++ b/sysdeps/x86_64/multiarch/strcmp-avx2.S @@ -91,8 +91,8 @@ ENTRY (STRCMP) # endif movl %edi, %eax xorl %edx, %edx - /* Make %ymm7 all zeros in this function. */ - vpxor %ymm7, %ymm7, %ymm7 + /* Make %xmm7 (%ymm7) all zeros in this function. */ + vpxor %xmm7, %xmm7, %xmm7 orl %esi, %eax andl $(PAGE_SIZE - 1), %eax cmpl $(PAGE_SIZE - (VEC_SIZE * 4)), %eax diff --git a/sysdeps/x86_64/multiarch/strrchr-avx2.S b/sysdeps/x86_64/multiarch/strrchr-avx2.S index 23077b4..146bdd5 100644 --- a/sysdeps/x86_64/multiarch/strrchr-avx2.S +++ b/sysdeps/x86_64/multiarch/strrchr-avx2.S @@ -44,7 +44,7 @@ ENTRY (STRRCHR) movl %edi, %ecx /* Broadcast CHAR to YMM4. */ VPBROADCAST %xmm4, %ymm4 - vpxor %ymm0, %ymm0, %ymm0 + vpxor %xmm0, %xmm0, %xmm0 /* Check if we may cross page boundary with one vector load. */ andl $(2 * VEC_SIZE - 1), %ecx -- 2.7.4