[NativeAOT] Address some COOP native helpers that could cause suspension pauses....
authorVladimir Sadov <vsadov@microsoft.com>
Wed, 16 Nov 2022 18:48:20 +0000 (10:48 -0800)
committerGitHub <noreply@github.com>
Wed, 16 Nov 2022 18:48:20 +0000 (10:48 -0800)
* Memmove<T> to shared source.

* Delete RhpCopyObjectContents

* RhCompareObjectContentsAndPadding is ok, but added an assert.

* remove unused BulkMoveWithWriteBarrier in Augments

12 files changed:
src/coreclr/System.Private.CoreLib/src/System/Buffer.CoreCLR.cs
src/coreclr/nativeaot/Runtime.Base/src/System/Runtime/InternalCalls.cs
src/coreclr/nativeaot/Runtime.Base/src/System/Runtime/RuntimeExports.cs
src/coreclr/nativeaot/Runtime.Base/src/System/Runtime/RuntimeImports.cs
src/coreclr/nativeaot/Runtime/gcrhenv.cpp
src/coreclr/nativeaot/System.Private.CoreLib/src/Internal/Runtime/Augments/RuntimeAugments.cs
src/coreclr/nativeaot/System.Private.CoreLib/src/System/Buffer.NativeAot.cs
src/coreclr/nativeaot/System.Private.CoreLib/src/System/Object.NativeAot.cs
src/coreclr/nativeaot/System.Private.CoreLib/src/System/Runtime/CompilerServices/RuntimeHelpers.NativeAot.cs
src/coreclr/nativeaot/System.Private.CoreLib/src/System/Runtime/RuntimeImports.cs
src/libraries/System.Private.CoreLib/src/System/Buffer.cs
src/mono/System.Private.CoreLib/src/System/Buffer.Mono.cs

index 68e4d77..2a375e7 100644 (file)
@@ -9,78 +9,15 @@ namespace System
 {
     public partial class Buffer
     {
-        // Non-inlinable wrapper around the QCall that avoids polluting the fast path
-        // with P/Invoke prolog/epilog.
-        [MethodImpl(MethodImplOptions.NoInlining)]
-        internal static unsafe void _ZeroMemory(ref byte b, nuint byteLength)
-        {
-            fixed (byte* bytePointer = &b)
-            {
-                __ZeroMemory(bytePointer, byteLength);
-            }
-        }
-
         [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "Buffer_Clear")]
         private static unsafe partial void __ZeroMemory(void* b, nuint byteLength);
 
-        // The maximum block size to for __BulkMoveWithWriteBarrier FCall. This is required to avoid GC starvation.
-#if DEBUG // Stress the mechanism in debug builds
-        private const uint BulkMoveWithWriteBarrierChunk = 0x400;
-#else
-        private const uint BulkMoveWithWriteBarrierChunk = 0x4000;
-#endif
-
-        internal static void BulkMoveWithWriteBarrier(ref byte destination, ref byte source, nuint byteCount)
-        {
-            if (byteCount <= BulkMoveWithWriteBarrierChunk)
-                __BulkMoveWithWriteBarrier(ref destination, ref source, byteCount);
-            else
-                _BulkMoveWithWriteBarrier(ref destination, ref source, byteCount);
-        }
-
-#pragma warning disable IDE0060 // https://github.com/dotnet/roslyn-analyzers/issues/6228
-        // Non-inlinable wrapper around the loop for copying large blocks in chunks
-        [MethodImpl(MethodImplOptions.NoInlining)]
-        private static void _BulkMoveWithWriteBarrier(ref byte destination, ref byte source, nuint byteCount)
-        {
-            Debug.Assert(byteCount > BulkMoveWithWriteBarrierChunk);
-
-            if (Unsafe.AreSame(ref source, ref destination))
-                return;
-
-            // This is equivalent to: (destination - source) >= byteCount || (destination - source) < 0
-            if ((nuint)(nint)Unsafe.ByteOffset(ref source, ref destination) >= byteCount)
-            {
-                // Copy forwards
-                do
-                {
-                    byteCount -= BulkMoveWithWriteBarrierChunk;
-                    __BulkMoveWithWriteBarrier(ref destination, ref source, BulkMoveWithWriteBarrierChunk);
-                    destination = ref Unsafe.AddByteOffset(ref destination, BulkMoveWithWriteBarrierChunk);
-                    source = ref Unsafe.AddByteOffset(ref source, BulkMoveWithWriteBarrierChunk);
-                }
-                while (byteCount > BulkMoveWithWriteBarrierChunk);
-            }
-            else
-            {
-                // Copy backwards
-                do
-                {
-                    byteCount -= BulkMoveWithWriteBarrierChunk;
-                    __BulkMoveWithWriteBarrier(ref Unsafe.AddByteOffset(ref destination, byteCount), ref Unsafe.AddByteOffset(ref source, byteCount), BulkMoveWithWriteBarrierChunk);
-                }
-                while (byteCount > BulkMoveWithWriteBarrierChunk);
-            }
-            __BulkMoveWithWriteBarrier(ref destination, ref source, byteCount);
-        }
-#pragma warning restore IDE0060 // https://github.com/dotnet/roslyn-analyzers/issues/6228
+        [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "Buffer_MemMove")]
+        private static unsafe partial void __Memmove(byte* dest, byte* src, nuint len);
 
         [MethodImpl(MethodImplOptions.InternalCall)]
         private static extern void __BulkMoveWithWriteBarrier(ref byte destination, ref byte source, nuint byteCount);
 
-        [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "Buffer_MemMove")]
-        private static unsafe partial void __Memmove(byte* dest, byte* src, nuint len);
-
         // Used by ilmarshalers.cpp
         internal static unsafe void Memcpy(byte* dest, byte* src, int len)
         {
@@ -96,26 +33,5 @@ namespace System
 
             Memmove(ref *(pDest + (uint)destIndex), ref Unsafe.Add(ref MemoryMarshal.GetArrayDataReference(src), (nint)(uint)srcIndex /* force zero-extension */), (uint)len);
         }
-
-        [MethodImpl(MethodImplOptions.AggressiveInlining)]
-        internal static void Memmove<T>(ref T destination, ref T source, nuint elementCount)
-        {
-            if (!RuntimeHelpers.IsReferenceOrContainsReferences<T>())
-            {
-                // Blittable memmove
-                Memmove(
-                    ref Unsafe.As<T, byte>(ref destination),
-                    ref Unsafe.As<T, byte>(ref source),
-                    elementCount * (nuint)Unsafe.SizeOf<T>());
-            }
-            else
-            {
-                // Non-blittable memmove
-                BulkMoveWithWriteBarrier(
-                    ref Unsafe.As<T, byte>(ref destination),
-                    ref Unsafe.As<T, byte>(ref source),
-                    elementCount * (nuint)Unsafe.SizeOf<T>());
-            }
-        }
     }
 }
index 9cfde63..6cb63b9 100644 (file)
@@ -144,10 +144,6 @@ namespace System.Runtime
         internal static extern unsafe object RhpNewFastMisalign(MethodTable * pEEType);
 #endif // FEATURE_64BIT_ALIGNMENT
 
-        [RuntimeImport(Redhawk.BaseName, "RhpCopyObjectContents")]
-        [MethodImpl(MethodImplOptions.InternalCall)]
-        internal static extern unsafe void RhpCopyObjectContents(object objDest, object objSrc);
-
         [RuntimeImport(Redhawk.BaseName, "RhpAssignRef")]
         [MethodImpl(MethodImplOptions.InternalCall)]
         internal static extern unsafe void RhpAssignRef(ref object address, object obj);
index 4a4ac45..c6ab819 100644 (file)
@@ -277,21 +277,6 @@ namespace System.Runtime
             }
         }
 
-        [RuntimeExport("RhMemberwiseClone")]
-        public static unsafe object RhMemberwiseClone(object src)
-        {
-            object objClone;
-
-            if (src.GetMethodTable()->IsArray)
-                objClone = RhNewArray(src.GetMethodTable(), Unsafe.As<Array>(src).Length);
-            else
-                objClone = RhNewObject(src.GetMethodTable());
-
-            InternalCalls.RhpCopyObjectContents(objClone, src);
-
-            return objClone;
-        }
-
         [RuntimeExport("RhGetCurrentThreadStackTrace")]
         [MethodImpl(MethodImplOptions.NoInlining)] // Ensures that the RhGetCurrentThreadStackTrace frame is always present
         public static unsafe int RhGetCurrentThreadStackTrace(IntPtr[] outputBuffer)
index 2d7aac8..76835dd 100644 (file)
@@ -43,11 +43,15 @@ namespace System.Runtime
         [RuntimeImport(RuntimeLibrary, "RhNewObject")]
         internal static extern object RhNewObject(EETypePtr pEEType);
 
+        // Move memory which may be on the heap which may have object references in it.
+        // In general, a memcpy on the heap is unsafe, but this is able to perform the
+        // correct write barrier such that the GC is not incorrectly impacted.
+        // NOTE: it is only ok to use this directly when copying small chunks of memory (like boxing a struct)
+        //       otherwise use helpers from System.Buffer to avoid running uninterruptible code for too long
         [MethodImplAttribute(MethodImplOptions.InternalCall)]
         [RuntimeImport(RuntimeLibrary, "RhBulkMoveWithWriteBarrier")]
         internal static extern unsafe void RhBulkMoveWithWriteBarrier(ref byte dmem, ref byte smem, uint size);
 
-
         // Allocate handle.
         [MethodImpl(MethodImplOptions.InternalCall)]
         [RuntimeImport(RuntimeLibrary, "RhpHandleAlloc")]
index 6d8b819..5686438 100644 (file)
@@ -575,34 +575,18 @@ uint32_t RedhawkGCInterface::GetGCDescSize(void * pType)
     return (uint32_t)CGCDesc::GetCGCDescFromMT(pMT)->GetSize();
 }
 
-COOP_PINVOKE_HELPER(void, RhpCopyObjectContents, (Object* pobjDest, Object* pobjSrc))
-{
-    size_t cbDest = pobjDest->GetSize() - sizeof(ObjHeader);
-    size_t cbSrc = pobjSrc->GetSize() - sizeof(ObjHeader);
-    if (cbSrc != cbDest)
-        return;
-
-    ASSERT(pobjDest->get_EEType()->HasReferenceFields() == pobjSrc->get_EEType()->HasReferenceFields());
-
-    if (pobjDest->get_EEType()->HasReferenceFields())
-    {
-        GCSafeCopyMemoryWithWriteBarrier(pobjDest, pobjSrc, cbDest);
-    }
-    else
-    {
-        memcpy(pobjDest, pobjSrc, cbDest);
-    }
-}
-
 COOP_PINVOKE_HELPER(FC_BOOL_RET, RhCompareObjectContentsAndPadding, (Object* pObj1, Object* pObj2))
 {
     ASSERT(pObj1->get_EEType()->IsEquivalentTo(pObj2->get_EEType()));
+    ASSERT(pObj1->get_EEType()->IsValueType());
+
     MethodTable * pEEType = pObj1->get_EEType();
     size_t cbFields = pEEType->get_BaseSize() - (sizeof(ObjHeader) + sizeof(MethodTable*));
 
     uint8_t * pbFields1 = (uint8_t*)pObj1 + sizeof(MethodTable*);
     uint8_t * pbFields2 = (uint8_t*)pObj2 + sizeof(MethodTable*);
 
+    // memcmp is ok in a COOP method as we are comparing structs which are typically small.
     FC_RETURN_BOOL(memcmp(pbFields1, pbFields2, cbFields) == 0);
 }
 
index 970a2b1..8ee2c47 100644 (file)
@@ -846,14 +846,6 @@ namespace Internal.Runtime.Augments
             return new RuntimeTypeHandle(obj.GetEETypePtr());
         }
 
-        // Move memory which may be on the heap which may have object references in it.
-        // In general, a memcpy on the heap is unsafe, but this is able to perform the
-        // correct write barrier such that the GC is not incorrectly impacted.
-        public static unsafe void BulkMoveWithWriteBarrier(IntPtr dmem, IntPtr smem, int size)
-        {
-            RuntimeImports.RhBulkMoveWithWriteBarrier(ref *(byte*)dmem.ToPointer(), ref *(byte*)smem.ToPointer(), (uint)size);
-        }
-
         public static IntPtr GetUniversalTransitionThunk()
         {
             return RuntimeImports.RhGetUniversalTransitionThunk();
index 1cd5d7c..4dd685c 100644 (file)
@@ -10,45 +10,16 @@ namespace System
 {
     public static partial class Buffer
     {
-        // Non-inlinable wrapper around the QCall that avoids polluting the fast path
-        // with P/Invoke prolog/epilog.
-        [MethodImpl(MethodImplOptions.NoInlining)]
-        internal static unsafe void _ZeroMemory(ref byte b, nuint byteLength)
-        {
-            fixed (byte* bytePointer = &b)
-            {
-                RuntimeImports.memset(bytePointer, 0, byteLength);
-            }
-        }
-
-        internal static void BulkMoveWithWriteBarrier(ref byte dmem, ref byte smem, nuint size)
-            => RuntimeImports.RhBulkMoveWithWriteBarrier(ref dmem, ref smem, size);
+        [MethodImpl(MethodImplOptions.AggressiveInlining)]
+        private static unsafe void __ZeroMemory(void* b, nuint byteLength) =>
+            RuntimeImports.memset((byte*)b, 0, byteLength);
 
         [MethodImpl(MethodImplOptions.AggressiveInlining)]
         private static unsafe void __Memmove(byte* dest, byte* src, nuint len) =>
             RuntimeImports.memmove(dest, src, len);
 
-        // This method has different signature for x64 and other platforms and is done for performance reasons.
         [MethodImpl(MethodImplOptions.AggressiveInlining)]
-        internal static unsafe void Memmove<T>(ref T destination, ref T source, nuint elementCount)
-        {
-            if (!RuntimeHelpers.IsReferenceOrContainsReferences<T>())
-            {
-                // Blittable memmove
-
-                Memmove(
-                    ref Unsafe.As<T, byte>(ref destination),
-                    ref Unsafe.As<T, byte>(ref source),
-                    elementCount * (nuint)Unsafe.SizeOf<T>());
-            }
-            else
-            {
-                // Non-blittable memmove
-                RuntimeImports.RhBulkMoveWithWriteBarrier(
-                    ref Unsafe.As<T, byte>(ref destination),
-                    ref Unsafe.As<T, byte>(ref source),
-                    elementCount * (nuint)Unsafe.SizeOf<T>());
-            }
-        }
+        private static void __BulkMoveWithWriteBarrier(ref byte destination, ref byte source, nuint byteCount) =>
+            RuntimeImports.RhBulkMoveWithWriteBarrier(ref destination, ref source, byteCount);
     }
 }
index d803542..87ff2a7 100644 (file)
@@ -26,9 +26,24 @@ namespace System
         }
 
         [Intrinsic]
-        protected object MemberwiseClone()
+        protected internal unsafe object MemberwiseClone()
         {
-            return RuntimeImports.RhMemberwiseClone(this);
+            object clone = this.GetMethodTable()->IsArray ?
+                RuntimeExports.RhNewArray(this.GetMethodTable(), Unsafe.As<Array>(this).Length) :
+                RuntimeExports.RhNewObject(this.GetMethodTable());
+
+            // copy contents of "this" to the clone
+
+            nuint byteCount = RuntimeHelpers.GetRawObjectDataSize(this);
+            ref byte src = ref this.GetRawData();
+            ref byte dst = ref clone.GetRawData();
+
+            if (this.GetMethodTable()->HasGCPointers)
+                Buffer.BulkMoveWithWriteBarrier(ref dst, ref src, byteCount);
+            else
+                Buffer.Memmove(ref dst, ref src, byteCount);
+
+            return clone;
         }
     }
 }
index 86a87d6..5c6bf96 100644 (file)
@@ -72,7 +72,7 @@ namespace System.Runtime.CompilerServices
             if ((!eeType.IsValueType) || eeType.IsPrimitive)
                 return obj;
 
-            return RuntimeImports.RhMemberwiseClone(obj);
+            return obj.MemberwiseClone();
         }
 
         public static new bool Equals(object? o1, object? o2)
@@ -208,8 +208,16 @@ namespace System.Runtime.CompilerServices
 
         internal static unsafe nuint GetRawObjectDataSize(this object obj)
         {
-            Debug.Assert(obj.GetEETypePtr().ComponentSize == 0);
-            return obj.GetEETypePtr().BaseSize - (uint)sizeof(ObjHeader) - (uint)sizeof(MethodTable*);
+            MethodTable* pMT = GetMethodTable(obj);
+
+            // See comment on RawArrayData for details
+            nuint rawSize = pMT->BaseSize - (nuint)(2 * sizeof(IntPtr));
+            if (pMT->HasComponentSize)
+                rawSize += (uint)Unsafe.As<RawArrayData>(obj).Length * (nuint)pMT->ComponentSize;
+
+            GC.KeepAlive(obj); // Keep MethodTable alive
+
+            return rawSize;
         }
 
         internal static unsafe ushort GetElementSize(this Array array)
index 48b799e..b5f6a17 100644 (file)
@@ -382,10 +382,6 @@ namespace System.Runtime
         internal static unsafe void RhUnbox(object? obj, ref byte data, EETypePtr pUnboxToEEType)
             => RhUnbox(obj, ref data, pUnboxToEEType.ToPointer());
 
-        [MethodImpl(MethodImplOptions.InternalCall)]
-        [RuntimeImport(RuntimeLibrary, "RhMemberwiseClone")]
-        internal static extern object RhMemberwiseClone(object obj);
-
         // Busy spin for the given number of iterations.
         [LibraryImport(RuntimeLibrary, EntryPoint = "RhSpinWait")]
         [SuppressGCTransition]
index c14bd68..7ea4502 100644 (file)
@@ -333,5 +333,93 @@ namespace System
         [StructLayout(LayoutKind.Sequential, Size = 64)]
         private struct Block64 { }
 #endif // HAS_CUSTOM_BLOCKS
+
+        // Non-inlinable wrapper around the QCall that avoids polluting the fast path
+        // with P/Invoke prolog/epilog.
+        [MethodImpl(MethodImplOptions.NoInlining)]
+        internal static unsafe void _ZeroMemory(ref byte b, nuint byteLength)
+        {
+            fixed (byte* bytePointer = &b)
+            {
+                __ZeroMemory(bytePointer, byteLength);
+            }
+        }
+
+#if !MONO // Mono BulkMoveWithWriteBarrier is in terms of elements (not bytes) and takes a type handle.
+
+        [MethodImpl(MethodImplOptions.AggressiveInlining)]
+        internal static void Memmove<T>(ref T destination, ref T source, nuint elementCount)
+        {
+            if (!RuntimeHelpers.IsReferenceOrContainsReferences<T>())
+            {
+                // Blittable memmove
+                Memmove(
+                    ref Unsafe.As<T, byte>(ref destination),
+                    ref Unsafe.As<T, byte>(ref source),
+                    elementCount * (nuint)Unsafe.SizeOf<T>());
+            }
+            else
+            {
+                // Non-blittable memmove
+                BulkMoveWithWriteBarrier(
+                    ref Unsafe.As<T, byte>(ref destination),
+                    ref Unsafe.As<T, byte>(ref source),
+                    elementCount * (nuint)Unsafe.SizeOf<T>());
+            }
+        }
+
+        // The maximum block size to for __BulkMoveWithWriteBarrier FCall. This is required to avoid GC starvation.
+#if DEBUG // Stress the mechanism in debug builds
+        private const uint BulkMoveWithWriteBarrierChunk = 0x400;
+#else
+        private const uint BulkMoveWithWriteBarrierChunk = 0x4000;
+#endif
+
+        internal static void BulkMoveWithWriteBarrier(ref byte destination, ref byte source, nuint byteCount)
+        {
+            if (byteCount <= BulkMoveWithWriteBarrierChunk)
+                __BulkMoveWithWriteBarrier(ref destination, ref source, byteCount);
+            else
+                _BulkMoveWithWriteBarrier(ref destination, ref source, byteCount);
+        }
+
+#pragma warning disable IDE0060 // https://github.com/dotnet/roslyn-analyzers/issues/6228
+        // Non-inlinable wrapper around the loop for copying large blocks in chunks
+        [MethodImpl(MethodImplOptions.NoInlining)]
+        private static void _BulkMoveWithWriteBarrier(ref byte destination, ref byte source, nuint byteCount)
+        {
+            Debug.Assert(byteCount > BulkMoveWithWriteBarrierChunk);
+
+            if (Unsafe.AreSame(ref source, ref destination))
+                return;
+
+            // This is equivalent to: (destination - source) >= byteCount || (destination - source) < 0
+            if ((nuint)(nint)Unsafe.ByteOffset(ref source, ref destination) >= byteCount)
+            {
+                // Copy forwards
+                do
+                {
+                    byteCount -= BulkMoveWithWriteBarrierChunk;
+                    __BulkMoveWithWriteBarrier(ref destination, ref source, BulkMoveWithWriteBarrierChunk);
+                    destination = ref Unsafe.AddByteOffset(ref destination, BulkMoveWithWriteBarrierChunk);
+                    source = ref Unsafe.AddByteOffset(ref source, BulkMoveWithWriteBarrierChunk);
+                }
+                while (byteCount > BulkMoveWithWriteBarrierChunk);
+            }
+            else
+            {
+                // Copy backwards
+                do
+                {
+                    byteCount -= BulkMoveWithWriteBarrierChunk;
+                    __BulkMoveWithWriteBarrier(ref Unsafe.AddByteOffset(ref destination, byteCount), ref Unsafe.AddByteOffset(ref source, byteCount), BulkMoveWithWriteBarrierChunk);
+                }
+                while (byteCount > BulkMoveWithWriteBarrierChunk);
+            }
+            __BulkMoveWithWriteBarrier(ref destination, ref source, byteCount);
+        }
+#pragma warning restore IDE0060 // https://github.com/dotnet/roslyn-analyzers/issues/6228
+
+#endif // !MONO
     }
 }
index 8ab237f..ffd9f7d 100644 (file)
@@ -8,22 +8,13 @@ namespace System
     public partial class Buffer
     {
         [MethodImpl(MethodImplOptions.InternalCall)]
-        private static extern unsafe void __Memmove(byte* dest, byte* src, nuint len);
+        private static extern unsafe void __ZeroMemory(void* b, nuint byteLength);
 
         [MethodImpl(MethodImplOptions.InternalCall)]
-        private static extern void BulkMoveWithWriteBarrier(ref byte dmem, ref byte smem, nuint len, IntPtr type_handle);
-
-        [MethodImpl(MethodImplOptions.NoInlining)]
-        internal static unsafe void _ZeroMemory(ref byte b, nuint byteLength)
-        {
-            fixed (byte* bytePointer = &b)
-            {
-                __ZeroMemory(bytePointer, byteLength);
-            }
-        }
+        private static extern unsafe void __Memmove(byte* dest, byte* src, nuint len);
 
         [MethodImpl(MethodImplOptions.InternalCall)]
-        private static extern unsafe void __ZeroMemory(void* p, nuint byteLength);
+        private static extern void BulkMoveWithWriteBarrier(ref byte dmem, ref byte smem, nuint len, IntPtr type_handle);
 
         [MethodImpl(MethodImplOptions.AggressiveInlining)]
         internal static void Memmove<T>(ref T destination, ref T source, nuint elementCount)