From: Vladimir Sadov Date: Fri, 25 Oct 2019 17:14:45 +0000 (-0700) Subject: Use half-fences for volatile loads/stores on Windows ARM64 (dotnet/coreclr#27384) X-Git-Tag: submit/tizen/20210909.063632~11030^2~242 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=c322db18a12a947e5be6f9276ffc52b6ae3aae47;p=platform%2Fupstream%2Fdotnet%2Fruntime.git Use half-fences for volatile loads/stores on Windows ARM64 (dotnet/coreclr#27384) * Use half-fences for volatile load/stores on Windows ARM64 * Updated Volatile.h in gc/env as well. unified on type names and warning suppression. Commit migrated from https://github.com/dotnet/coreclr/commit/c128dbacaac207364b01c2d16890031e5aab47a4 --- diff --git a/src/coreclr/src/gc/env/volatile.h b/src/coreclr/src/gc/env/volatile.h index 311e126..de613ad 100644 --- a/src/coreclr/src/gc/env/volatile.h +++ b/src/coreclr/src/gc/env/volatile.h @@ -127,6 +127,9 @@ struct RemoveVolatile // Starting at version 3.8, clang errors out on initializing of type int * to volatile int *. To fix this, we add two templates to cast away volatility // Helper structures for casting away volatileness +#if defined(_ARM64_) && defined(_MSC_VER) +#include +#endif template inline @@ -145,6 +148,32 @@ T VolatileLoad(T const * pt) val = *(T volatile const *)pt; asm volatile ("dmb ishld" : : : "memory"); } +#elif defined(_ARM64_) && defined(_MSC_VER) +// silence warnings on casts in branches that are not taken. +#pragma warning(push) +#pragma warning(disable : 4302) +#pragma warning(disable : 4311) +#pragma warning(disable : 4312) + T val; + switch (sizeof(T)) + { + case 1: + val = (typename RemoveVolatile::type)__ldar8 ((unsigned __int8 volatile*)pt); + break; + case 2: + val = (typename RemoveVolatile::type)__ldar16((unsigned __int16 volatile*)pt); + break; + case 4: + val = (typename RemoveVolatile::type)__ldar32((unsigned __int32 volatile*)pt); + break; + case 8: + val = (typename RemoveVolatile::type)__ldar64((unsigned __int64 volatile*)pt); + break; + default: + val = *(T volatile const*)pt; + __dmb(_ARM64_BARRIER_ISHLD); + } +#pragma warning(pop) #else T val = *(T volatile const *)pt; VOLATILE_MEMORY_BARRIER(); @@ -199,6 +228,31 @@ void VolatileStore(T* pt, T val) VOLATILE_MEMORY_BARRIER(); *(T volatile *)pt = val; } +#elif defined(_ARM64_) && defined(_MSC_VER) +// silence warnings on casts in branches that are not taken. +#pragma warning(push) +#pragma warning(disable : 4302) +#pragma warning(disable : 4311) +#pragma warning(disable : 4312) + switch (sizeof(T)) + { + case 1: + __stlr8 ((unsigned __int8 volatile*)pt, (unsigned __int8) val); + break; + case 2: + __stlr16((unsigned __int16 volatile*)pt, (unsigned __int16)val); + break; + case 4: + __stlr32((unsigned __int32 volatile*)pt, (unsigned __int32)val); + break; + case 8: + __stlr64((unsigned __int64 volatile*)pt ,(unsigned __int64)val); + break; + default: + __dmb(_ARM64_BARRIER_ISH); + *(T volatile *)pt = val; + } +#pragma warning(pop) #else VOLATILE_MEMORY_BARRIER(); *(T volatile *)pt = val; diff --git a/src/coreclr/src/inc/volatile.h b/src/coreclr/src/inc/volatile.h index fa756ef..c6355fc 100644 --- a/src/coreclr/src/inc/volatile.h +++ b/src/coreclr/src/inc/volatile.h @@ -129,6 +129,9 @@ struct RemoveVolatile // Starting at version 3.8, clang errors out on initializing of type int * to volatile int *. To fix this, we add two templates to cast away volatility // Helper structures for casting away volatileness +#if defined(_ARM64_) && defined(_MSC_VER) +#include +#endif template inline @@ -149,6 +152,32 @@ T VolatileLoad(T const * pt) val = *(T volatile const *)pt; asm volatile ("dmb ishld" : : : "memory"); } +#elif defined(_ARM64_) && defined(_MSC_VER) +// silence warnings on casts in branches that are not taken. +#pragma warning(push) +#pragma warning(disable : 4302) +#pragma warning(disable : 4311) +#pragma warning(disable : 4312) + T val; + switch (sizeof(T)) + { + case 1: + val = (typename RemoveVolatile::type)__ldar8 ((unsigned __int8 volatile*)pt); + break; + case 2: + val = (typename RemoveVolatile::type)__ldar16((unsigned __int16 volatile*)pt); + break; + case 4: + val = (typename RemoveVolatile::type)__ldar32((unsigned __int32 volatile*)pt); + break; + case 8: + val = (typename RemoveVolatile::type)__ldar64((unsigned __int64 volatile*)pt); + break; + default: + val = *(T volatile const*)pt; + __dmb(_ARM64_BARRIER_ISHLD); + } +#pragma warning(pop) #else T val = *(T volatile const *)pt; VOLATILE_MEMORY_BARRIER(); @@ -208,6 +237,31 @@ void VolatileStore(T* pt, T val) VOLATILE_MEMORY_BARRIER(); *(T volatile *)pt = val; } +#elif defined(_ARM64_) && defined(_MSC_VER) +// silence warnings on casts in branches that are not taken. +#pragma warning(push) +#pragma warning(disable : 4302) +#pragma warning(disable : 4311) +#pragma warning(disable : 4312) + switch (sizeof(T)) + { + case 1: + __stlr8 ((unsigned __int8 volatile*)pt, (unsigned __int8) val); + break; + case 2: + __stlr16((unsigned __int16 volatile*)pt, (unsigned __int16)val); + break; + case 4: + __stlr32((unsigned __int32 volatile*)pt, (unsigned __int32)val); + break; + case 8: + __stlr64((unsigned __int64 volatile*)pt ,(unsigned __int64)val); + break; + default: + __dmb(_ARM64_BARRIER_ISH); + *(T volatile *)pt = val; + } +#pragma warning(pop) #else VOLATILE_MEMORY_BARRIER(); *(T volatile *)pt = val;