From: Adhemerval Zanella Date: Tue, 25 Nov 2014 19:32:54 +0000 (-0500) Subject: powerpc: Fix missing barriers in atomic_exchange_and_add_{acq,rel} X-Git-Tag: glibc-2.21~326 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=704f794714704ba430d84d10d6809acaf7ca59bf;p=platform%2Fupstream%2Fglibc.git powerpc: Fix missing barriers in atomic_exchange_and_add_{acq,rel} On powerpc, atomic_exchange_and_add is implemented without any barriers. This patchs adds the missing instruction and memory barrier for acquire and release semanthics. --- diff --git a/ChangeLog b/ChangeLog index 50be79d..efd18ee 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,19 @@ +2014-11-26 Adhemerval Zanella + + * csu/tst-atomic.c (do_test): Add atomic_exchange_and_add_{acq,rel} + tests. + * sysdeps/powerpc/bits/atomic.h + (__arch_atomic_exchange_and_add_32_acq): Add definition. + (__arch_atomic_exchange_and_add_32_rel): Likewise. + (atomic_exchange_and_add_acq): Likewise. + (atomic_exchange_and_add_rel): Likewise. + * sysdeps/powerpc/powerpc32/bits/atomic.h + (__arch_atomic_exchange_and_add_64_acq): Add definition. + (__arch_atomic_exchange_and_add_64_rel): Likewise. + * sysdeps/powerpc/powerpc64/bits/atomic.h + (__arch_atomic_exchange_and_add_64_acq): Add definition. + (__arch_atomic_exchange_and_add_64_rel): Likewise. + 2014-11-26 Torvald Riegel * nptl/tpp.c (__init_sched_fifo_prio, __pthread_tpp_change_priority): diff --git a/csu/tst-atomic.c b/csu/tst-atomic.c index c6e786d..5ab651e 100644 --- a/csu/tst-atomic.c +++ b/csu/tst-atomic.c @@ -113,6 +113,22 @@ do_test (void) ret = 1; } + mem = 2; + if (atomic_exchange_and_add_acq (&mem, 11) != 2 + || mem != 13) + { + puts ("atomic_exchange_and_add test failed"); + ret = 1; + } + + mem = 2; + if (atomic_exchange_and_add_rel (&mem, 11) != 2 + || mem != 13) + { + puts ("atomic_exchange_and_add test failed"); + ret = 1; + } + mem = -21; atomic_add (&mem, 22); if (mem != 1) diff --git a/sysdeps/powerpc/bits/atomic.h b/sysdeps/powerpc/bits/atomic.h index f312676..b05b0f7 100644 --- a/sysdeps/powerpc/bits/atomic.h +++ b/sysdeps/powerpc/bits/atomic.h @@ -152,6 +152,34 @@ typedef uintmax_t uatomic_max_t; __val; \ }) +#define __arch_atomic_exchange_and_add_32_acq(mem, value) \ + ({ \ + __typeof (*mem) __val, __tmp; \ + __asm __volatile ("1: lwarx %0,0,%3" MUTEX_HINT_ACQ "\n" \ + " add %1,%0,%4\n" \ + " stwcx. %1,0,%3\n" \ + " bne- 1b\n" \ + __ARCH_ACQ_INSTR \ + : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \ + : "b" (mem), "r" (value), "m" (*mem) \ + : "cr0", "memory"); \ + __val; \ + }) + +#define __arch_atomic_exchange_and_add_32_rel(mem, value) \ + ({ \ + __typeof (*mem) __val, __tmp; \ + __asm __volatile (__ARCH_REL_INSTR "\n" \ + "1: lwarx %0,0,%3" MUTEX_HINT_REL "\n" \ + " add %1,%0,%4\n" \ + " stwcx. %1,0,%3\n" \ + " bne- 1b" \ + : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \ + : "b" (mem), "r" (value), "m" (*mem) \ + : "cr0", "memory"); \ + __val; \ + }) + #define __arch_atomic_increment_val_32(mem) \ ({ \ __typeof (*(mem)) __val; \ @@ -252,6 +280,28 @@ typedef uintmax_t uatomic_max_t; abort (); \ __result; \ }) +#define atomic_exchange_and_add_acq(mem, value) \ + ({ \ + __typeof (*(mem)) __result; \ + if (sizeof (*mem) == 4) \ + __result = __arch_atomic_exchange_and_add_32_acq (mem, value); \ + else if (sizeof (*mem) == 8) \ + __result = __arch_atomic_exchange_and_add_64_acq (mem, value); \ + else \ + abort (); \ + __result; \ + }) +#define atomic_exchange_and_add_rel(mem, value) \ + ({ \ + __typeof (*(mem)) __result; \ + if (sizeof (*mem) == 4) \ + __result = __arch_atomic_exchange_and_add_32_rel (mem, value); \ + else if (sizeof (*mem) == 8) \ + __result = __arch_atomic_exchange_and_add_64_rel (mem, value); \ + else \ + abort (); \ + __result; \ + }) #define atomic_increment_val(mem) \ ({ \ diff --git a/sysdeps/powerpc/powerpc32/bits/atomic.h b/sysdeps/powerpc/powerpc32/bits/atomic.h index 117b5a0..e2a1bf4 100644 --- a/sysdeps/powerpc/powerpc32/bits/atomic.h +++ b/sysdeps/powerpc/powerpc32/bits/atomic.h @@ -98,6 +98,12 @@ #define __arch_atomic_exchange_and_add_64(mem, value) \ ({ abort (); (*mem) = (value); }) +#define __arch_atomic_exchange_and_add_64_acq(mem, value) \ + ({ abort (); (*mem) = (value); }) + +#define __arch_atomic_exchange_and_add_64_rel(mem, value) \ + ({ abort (); (*mem) = (value); }) + #define __arch_atomic_increment_val_64(mem) \ ({ abort (); (*mem)++; }) diff --git a/sysdeps/powerpc/powerpc64/bits/atomic.h b/sysdeps/powerpc/powerpc64/bits/atomic.h index 83b5dfe..46117b0 100644 --- a/sysdeps/powerpc/powerpc64/bits/atomic.h +++ b/sysdeps/powerpc/powerpc64/bits/atomic.h @@ -186,6 +186,34 @@ __val; \ }) +#define __arch_atomic_exchange_and_add_64_acq(mem, value) \ + ({ \ + __typeof (*mem) __val, __tmp; \ + __asm __volatile ("1: ldarx %0,0,%3" MUTEX_HINT_ACQ "\n" \ + " add %1,%0,%4\n" \ + " stdcx. %1,0,%3\n" \ + " bne- 1b\n" \ + __ARCH_ACQ_INSTR \ + : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \ + : "b" (mem), "r" (value), "m" (*mem) \ + : "cr0", "memory"); \ + __val; \ + }) + +#define __arch_atomic_exchange_and_add_64_rel(mem, value) \ + ({ \ + __typeof (*mem) __val, __tmp; \ + __asm __volatile (__ARCH_REL_INSTR "\n" \ + "1: ldarx %0,0,%3" MUTEX_HINT_REL "\n" \ + " add %1,%0,%4\n" \ + " stdcx. %1,0,%3\n" \ + " bne- 1b" \ + : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \ + : "b" (mem), "r" (value), "m" (*mem) \ + : "cr0", "memory"); \ + __val; \ + }) + #define __arch_atomic_increment_val_64(mem) \ ({ \ __typeof (*(mem)) __val; \