Handle AVX saving on x86-64 in interrupted smbol lookups.
authorUlrich Drepper <drepper@redhat.com>
Tue, 25 Aug 2009 17:42:30 +0000 (10:42 -0700)
committerUlrich Drepper <drepper@redhat.com>
Tue, 25 Aug 2009 17:42:30 +0000 (10:42 -0700)
If a signal arrived during a symbol lookup and the signal handler also
required a symbol lookup, the end of the lookup in the signal handler reset
the flag whether restoring AVX/SSE registers is needed.  Resetting means
in this case that the tail part of the outer lookup code will try to
restore the registers and this can fail miserably.  We now restore to the
previous value which makes nesting calls possible.

ChangeLog
nptl/ChangeLog
nptl/sysdeps/x86_64/tls.h
sysdeps/x86_64/dl-trampoline.S

index b2d98e4..560f5db 100644 (file)
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,8 @@
+2009-08-25  Ulrich Drepper  <drepper@redhat.com>
+
+       * sysdeps/x86_64/dl-trampoline.S (_dl_runtime_profile): Remove
+       leftover YMM_SIZE definition.
+
 2009-08-24  Ulrich Drepper  <drepper@redhat.com>
 
        * math/math_private.h (ieee_double_shape_type): Add uint64_t word to
index 3887969..a9a0168 100644 (file)
@@ -1,3 +1,10 @@
+2009-08-25  Ulrich Drepper  <drepper@redhat.com>
+
+       * sysdeps/x86_64/tls.h (RTLD_ENABLE_FOREIGN_CALL): Store old value
+       of the field in local variables.
+       (RTLD_FINALIZE_FOREIGN_CALL): Restore rtld_must_xmm_save from local
+       variable and don't unconditionally clear it.
+
 2009-08-24  Ulrich Drepper  <drepper@redhat.com>
 
        * pthread_create.c (start_thread): Hint to the kernel that memory for
index 4212038..e39eb5f 100644 (file)
@@ -188,7 +188,7 @@ typedef struct
 
    The contained asm must *not* be marked volatile since otherwise
    assignments like
-        pthread_descr self = thread_self();
+       pthread_descr self = thread_self();
    do not get optimized away.  */
 # define THREAD_SELF \
   ({ struct pthread *__self;                                                 \
@@ -404,7 +404,12 @@ extern void _dl_x86_64_restore_sse (void);
 # define RTLD_CHECK_FOREIGN_CALL \
   (THREAD_GETMEM (THREAD_SELF, header.rtld_must_xmm_save) != 0)
 
+/* NB: Don't use the xchg operation because that would imply a lock
+   prefix which is expensive and unnecessary.  The cache line is also
+   not contested at all.  */
 #  define RTLD_ENABLE_FOREIGN_CALL \
+  int old_rtld_must_xmm_save = THREAD_GETMEM (THREAD_SELF,                   \
+                                             header.rtld_must_xmm_save);     \
   THREAD_SETMEM (THREAD_SELF, header.rtld_must_xmm_save, 1)
 
 #  define RTLD_PREPARE_FOREIGN_CALL \
@@ -419,7 +424,8 @@ extern void _dl_x86_64_restore_sse (void);
   do {                                                                       \
     if (THREAD_GETMEM (THREAD_SELF, header.rtld_must_xmm_save) == 0)         \
       _dl_x86_64_restore_sse ();                                             \
-    THREAD_SETMEM (THREAD_SELF, header.rtld_must_xmm_save, 0);               \
+    THREAD_SETMEM (THREAD_SELF, header.rtld_must_xmm_save,                   \
+                  old_rtld_must_xmm_save);                                   \
   } while (0)
 # endif
 
index f9c60ad..5564a11 100644 (file)
@@ -197,7 +197,6 @@ _dl_x86_64_save_sse:
        ret
 L(no_avx5):
 # endif
-# define YMM_SIZE 16
        movdqa  %xmm0, %fs:RTLD_SAVESPACE_SSE+0*XMM_SIZE
        movdqa  %xmm1, %fs:RTLD_SAVESPACE_SSE+1*XMM_SIZE
        movdqa  %xmm2, %fs:RTLD_SAVESPACE_SSE+2*XMM_SIZE