2012-05-15 H.J. Lu <hongjiu.lu@intel.com>
+ * sysdeps/x86_64/multiarch/memcpy-ssse3-back.S: Load cache
+ sizes into R*_LP.
+
+2012-05-15 H.J. Lu <hongjiu.lu@intel.com>
+
* sysdeps/x86_64/strcmp.S: Load pointers into R*_LP.
2012-05-15 H.J. Lu <hongjiu.lu@intel.com>
and $0xf, %r9
jz L(shl_0)
#ifdef DATA_CACHE_SIZE
- mov $DATA_CACHE_SIZE, %rcx
+ mov $DATA_CACHE_SIZE, %RCX_LP
#else
- mov __x86_64_data_cache_size(%rip), %rcx
+ mov __x86_64_data_cache_size(%rip), %RCX_LP
#endif
cmp %rcx, %rdx
jae L(gobble_mem_fwd)
ALIGN (4)
L(copy_backward):
#ifdef DATA_CACHE_SIZE
- mov $DATA_CACHE_SIZE, %rcx
+ mov $DATA_CACHE_SIZE, %RCX_LP
#else
- mov __x86_64_data_cache_size(%rip), %rcx
+ mov __x86_64_data_cache_size(%rip), %RCX_LP
#endif
shl $1, %rcx
cmp %rcx, %rdx
shr $8, %r9
add %rdx, %r9
#ifdef DATA_CACHE_SIZE
- cmp $DATA_CACHE_SIZE_HALF, %r9
+ cmp $DATA_CACHE_SIZE_HALF, %R9_LP
#else
- cmp __x86_64_data_cache_size_half(%rip), %r9
+ cmp __x86_64_data_cache_size_half(%rip), %R9_LP
#endif
jae L(gobble_mem_fwd)
sub $0x80, %rdx
add $16, %rdi
#ifdef SHARED_CACHE_SIZE_HALF
- mov $SHARED_CACHE_SIZE_HALF, %rcx
+ mov $SHARED_CACHE_SIZE_HALF, %RCX_LP
#else
- mov __x86_64_shared_cache_size_half(%rip), %rcx
+ mov __x86_64_shared_cache_size_half(%rip), %RCX_LP
#endif
#ifdef USE_AS_MEMMOVE
mov %rsi, %r9
#ifdef SHARED_CACHE_SIZE_HALF
- mov $SHARED_CACHE_SIZE_HALF, %rcx
+ mov $SHARED_CACHE_SIZE_HALF, %RCX_LP
#else
- mov __x86_64_shared_cache_size_half(%rip), %rcx
+ mov __x86_64_shared_cache_size_half(%rip), %RCX_LP
#endif
#ifdef USE_AS_MEMMOVE
mov %rdi, %r9