commit
3ec5d83d2a237d39e7fd6ef7a0bc8ac4c171a4a5
Author: H.J. Lu <hjl.tools@gmail.com>
Date: Sat Jan 25 14:19:40 2020 -0800
x86-64: Avoid rep movsb with short distance [BZ #27130]
introduced some regressions on Intel processors without Fast Short REP
MOV (FSRM). Add Avoid_Short_Distance_REP_MOVSB to avoid rep movsb with
short distance only on Intel processors with FSRM. bench-memmove-large
on Skylake server shows that cycles of __memmove_evex_unaligned_erms
improves for the following data size:
before after Improvement
length=4127, align1=3, align2=0: 479.38 349.25 27%
length=4223, align1=9, align2=5: 405.62 333.25 18%
length=8223, align1=3, align2=0: 786.12 496.38 37%
length=8319, align1=9, align2=5: 727.50 501.38 31%
length=16415, align1=3, align2=0: 1436.88 840.00 41%
length=16511, align1=9, align2=5: 1375.50 836.38 39%
length=32799, align1=3, align2=0: 2890.00 1860.12 36%
length=32895, align1=9, align2=5: 2891.38 1931.88 33%
/* Threshold to stop using Enhanced REP MOVSB. */
long int __x86_rep_movsb_stop_threshold attribute_hidden;
+/* A bit-wise OR of string/memory requirements for optimal performance
+ e.g. X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB. These bits
+ are used at runtime to tune implementation behavior. */
+int __x86_string_control attribute_hidden;
+
static void
init_cacheinfo (void)
{
__x86_rep_movsb_threshold = cpu_features->rep_movsb_threshold;
__x86_rep_stosb_threshold = cpu_features->rep_stosb_threshold;
__x86_rep_movsb_stop_threshold = cpu_features->rep_movsb_stop_threshold;
+
+ if (CPU_FEATURES_ARCH_P (cpu_features, Avoid_Short_Distance_REP_MOVSB))
+ __x86_string_control
+ |= X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB;
}
#endif
cpu_features->preferred[index_arch_Prefer_AVX2_STRCMP]
|= bit_arch_Prefer_AVX2_STRCMP;
}
+
+ /* Avoid avoid short distance REP MOVSB on processor with FSRM. */
+ if (CPU_FEATURES_CPU_P (cpu_features, FSRM))
+ cpu_features->preferred[index_arch_Avoid_Short_Distance_REP_MOVSB]
+ |= bit_arch_Avoid_Short_Distance_REP_MOVSB;
}
/* This spells out "AuthenticAMD" or "HygonGenuine". */
else if ((ebx == 0x68747541 && ecx == 0x444d4163 && edx == 0x69746e65)
BIT (MathVec_Prefer_No_AVX512)
BIT (Prefer_FSRM)
BIT (Prefer_AVX2_STRCMP)
+BIT (Avoid_Short_Distance_REP_MOVSB)
#define STATE_SAVE_MASK \
((1 << 1) | (1 << 2) | (1 << 3) | (1 << 5) | (1 << 6) | (1 << 7))
+/* Constants for bits in __x86_string_control: */
+
+/* Avoid short distance REP MOVSB. */
+#define X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB (1 << 0)
+
#ifdef __ASSEMBLER__
/* Syntactic details of assembler. */
/* Avoid slow backward REP MOVSB. */
jb L(more_8x_vec_backward)
# if AVOID_SHORT_DISTANCE_REP_MOVSB
+ andl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
+ jz 3f
movq %rdi, %rcx
subq %rsi, %rcx
jmp 2f
# endif
1:
# if AVOID_SHORT_DISTANCE_REP_MOVSB
+ andl $X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB, __x86_string_control(%rip)
+ jz 3f
movq %rsi, %rcx
subq %rdi, %rcx
2:
is N*4GB + [1..63] with N >= 0. */
cmpl $63, %ecx
jbe L(more_2x_vec) /* Avoid "rep movsb" if ECX <= 63. */
+3:
# endif
mov %RDX_LP, %RCX_LP
rep movsb