Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
88 changes: 2 additions & 86 deletions src/coreclr/System.Private.CoreLib/src/System/Buffer.CoreCLR.cs
Original file line number Diff line number Diff line change
Expand Up @@ -9,78 +9,15 @@ namespace System
{
public partial class Buffer
{
// Non-inlinable wrapper around the QCall that avoids polluting the fast path
// with P/Invoke prolog/epilog.
[MethodImpl(MethodImplOptions.NoInlining)]
internal static unsafe void _ZeroMemory(ref byte b, nuint byteLength)
{
fixed (byte* bytePointer = &b)
{
__ZeroMemory(bytePointer, byteLength);
}
}

[LibraryImport(RuntimeHelpers.QCall, EntryPoint = "Buffer_Clear")]
private static unsafe partial void __ZeroMemory(void* b, nuint byteLength);

// The maximum block size to for __BulkMoveWithWriteBarrier FCall. This is required to avoid GC starvation.
#if DEBUG // Stress the mechanism in debug builds
private const uint BulkMoveWithWriteBarrierChunk = 0x400;
#else
private const uint BulkMoveWithWriteBarrierChunk = 0x4000;
#endif

internal static void BulkMoveWithWriteBarrier(ref byte destination, ref byte source, nuint byteCount)
{
if (byteCount <= BulkMoveWithWriteBarrierChunk)
__BulkMoveWithWriteBarrier(ref destination, ref source, byteCount);
else
_BulkMoveWithWriteBarrier(ref destination, ref source, byteCount);
}

#pragma warning disable IDE0060 // https://github.com/dotnet/roslyn-analyzers/issues/6228
// Non-inlinable wrapper around the loop for copying large blocks in chunks
[MethodImpl(MethodImplOptions.NoInlining)]
private static void _BulkMoveWithWriteBarrier(ref byte destination, ref byte source, nuint byteCount)
{
Debug.Assert(byteCount > BulkMoveWithWriteBarrierChunk);

if (Unsafe.AreSame(ref source, ref destination))
return;

// This is equivalent to: (destination - source) >= byteCount || (destination - source) < 0
if ((nuint)(nint)Unsafe.ByteOffset(ref source, ref destination) >= byteCount)
{
// Copy forwards
do
{
byteCount -= BulkMoveWithWriteBarrierChunk;
__BulkMoveWithWriteBarrier(ref destination, ref source, BulkMoveWithWriteBarrierChunk);
destination = ref Unsafe.AddByteOffset(ref destination, BulkMoveWithWriteBarrierChunk);
source = ref Unsafe.AddByteOffset(ref source, BulkMoveWithWriteBarrierChunk);
}
while (byteCount > BulkMoveWithWriteBarrierChunk);
}
else
{
// Copy backwards
do
{
byteCount -= BulkMoveWithWriteBarrierChunk;
__BulkMoveWithWriteBarrier(ref Unsafe.AddByteOffset(ref destination, byteCount), ref Unsafe.AddByteOffset(ref source, byteCount), BulkMoveWithWriteBarrierChunk);
}
while (byteCount > BulkMoveWithWriteBarrierChunk);
}
__BulkMoveWithWriteBarrier(ref destination, ref source, byteCount);
}
#pragma warning restore IDE0060 // https://github.com/dotnet/roslyn-analyzers/issues/6228
[LibraryImport(RuntimeHelpers.QCall, EntryPoint = "Buffer_MemMove")]
private static unsafe partial void __Memmove(byte* dest, byte* src, nuint len);

[MethodImpl(MethodImplOptions.InternalCall)]
private static extern void __BulkMoveWithWriteBarrier(ref byte destination, ref byte source, nuint byteCount);

[LibraryImport(RuntimeHelpers.QCall, EntryPoint = "Buffer_MemMove")]
private static unsafe partial void __Memmove(byte* dest, byte* src, nuint len);

// Used by ilmarshalers.cpp
internal static unsafe void Memcpy(byte* dest, byte* src, int len)
{
Expand All @@ -96,26 +33,5 @@ internal static unsafe void Memcpy(byte* pDest, int destIndex, byte[] src, int s

Memmove(ref *(pDest + (uint)destIndex), ref Unsafe.Add(ref MemoryMarshal.GetArrayDataReference(src), (nint)(uint)srcIndex /* force zero-extension */), (uint)len);
}

[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static void Memmove<T>(ref T destination, ref T source, nuint elementCount)
{
if (!RuntimeHelpers.IsReferenceOrContainsReferences<T>())
{
// Blittable memmove
Memmove(
ref Unsafe.As<T, byte>(ref destination),
ref Unsafe.As<T, byte>(ref source),
elementCount * (nuint)Unsafe.SizeOf<T>());
}
else
{
// Non-blittable memmove
BulkMoveWithWriteBarrier(
ref Unsafe.As<T, byte>(ref destination),
ref Unsafe.As<T, byte>(ref source),
elementCount * (nuint)Unsafe.SizeOf<T>());
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -144,10 +144,6 @@ internal static int RhEndNoGCRegion()
internal static extern unsafe object RhpNewFastMisalign(MethodTable * pEEType);
#endif // FEATURE_64BIT_ALIGNMENT

[RuntimeImport(Redhawk.BaseName, "RhpCopyObjectContents")]
[MethodImpl(MethodImplOptions.InternalCall)]
internal static extern unsafe void RhpCopyObjectContents(object objDest, object objSrc);

[RuntimeImport(Redhawk.BaseName, "RhpAssignRef")]
[MethodImpl(MethodImplOptions.InternalCall)]
internal static extern unsafe void RhpAssignRef(ref object address, object obj);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -277,21 +277,6 @@ public static unsafe void RhUnbox(object? obj, ref byte data, MethodTable* pUnbo
}
}

[RuntimeExport("RhMemberwiseClone")]
public static unsafe object RhMemberwiseClone(object src)
{
object objClone;

if (src.GetMethodTable()->IsArray)
objClone = RhNewArray(src.GetMethodTable(), Unsafe.As<Array>(src).Length);
else
objClone = RhNewObject(src.GetMethodTable());

InternalCalls.RhpCopyObjectContents(objClone, src);

return objClone;
}

[RuntimeExport("RhGetCurrentThreadStackTrace")]
[MethodImpl(MethodImplOptions.NoInlining)] // Ensures that the RhGetCurrentThreadStackTrace frame is always present
public static unsafe int RhGetCurrentThreadStackTrace(IntPtr[] outputBuffer)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,15 @@ internal static IntPtr RhGetModuleSection(TypeManagerHandle module, ReadyToRunSe
[RuntimeImport(RuntimeLibrary, "RhNewObject")]
internal static extern object RhNewObject(EETypePtr pEEType);

// Move memory which may be on the heap which may have object references in it.
// In general, a memcpy on the heap is unsafe, but this is able to perform the
// correct write barrier such that the GC is not incorrectly impacted.
// NOTE: it is only ok to use this directly when copying small chunks of memory (like boxing a struct)
// otherwise use helpers from System.Buffer to avoid running uninterruptible code for too long
[MethodImplAttribute(MethodImplOptions.InternalCall)]
[RuntimeImport(RuntimeLibrary, "RhBulkMoveWithWriteBarrier")]
internal static extern unsafe void RhBulkMoveWithWriteBarrier(ref byte dmem, ref byte smem, uint size);


// Allocate handle.
[MethodImpl(MethodImplOptions.InternalCall)]
[RuntimeImport(RuntimeLibrary, "RhpHandleAlloc")]
Expand Down
22 changes: 3 additions & 19 deletions src/coreclr/nativeaot/Runtime/gcrhenv.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -575,34 +575,18 @@ uint32_t RedhawkGCInterface::GetGCDescSize(void * pType)
return (uint32_t)CGCDesc::GetCGCDescFromMT(pMT)->GetSize();
}

COOP_PINVOKE_HELPER(void, RhpCopyObjectContents, (Object* pobjDest, Object* pobjSrc))
{
size_t cbDest = pobjDest->GetSize() - sizeof(ObjHeader);
size_t cbSrc = pobjSrc->GetSize() - sizeof(ObjHeader);
if (cbSrc != cbDest)
return;

ASSERT(pobjDest->get_EEType()->HasReferenceFields() == pobjSrc->get_EEType()->HasReferenceFields());

if (pobjDest->get_EEType()->HasReferenceFields())
{
GCSafeCopyMemoryWithWriteBarrier(pobjDest, pobjSrc, cbDest);
}
else
{
memcpy(pobjDest, pobjSrc, cbDest);
}
}

COOP_PINVOKE_HELPER(FC_BOOL_RET, RhCompareObjectContentsAndPadding, (Object* pObj1, Object* pObj2))
{
ASSERT(pObj1->get_EEType()->IsEquivalentTo(pObj2->get_EEType()));
ASSERT(pObj1->get_EEType()->IsValueType());

MethodTable * pEEType = pObj1->get_EEType();
size_t cbFields = pEEType->get_BaseSize() - (sizeof(ObjHeader) + sizeof(MethodTable*));

uint8_t * pbFields1 = (uint8_t*)pObj1 + sizeof(MethodTable*);
uint8_t * pbFields2 = (uint8_t*)pObj2 + sizeof(MethodTable*);

// memcmp is ok in a COOP method as we are comparing structs which are typically small.
FC_RETURN_BOOL(memcmp(pbFields1, pbFields2, cbFields) == 0);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -846,14 +846,6 @@ public static unsafe RuntimeTypeHandle GetRuntimeTypeHandleFromObjectReference(o
return new RuntimeTypeHandle(obj.GetEETypePtr());
}

// Move memory which may be on the heap which may have object references in it.
// In general, a memcpy on the heap is unsafe, but this is able to perform the
// correct write barrier such that the GC is not incorrectly impacted.
public static unsafe void BulkMoveWithWriteBarrier(IntPtr dmem, IntPtr smem, int size)
{
RuntimeImports.RhBulkMoveWithWriteBarrier(ref *(byte*)dmem.ToPointer(), ref *(byte*)smem.ToPointer(), (uint)size);
}

public static IntPtr GetUniversalTransitionThunk()
{
return RuntimeImports.RhGetUniversalTransitionThunk();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,45 +10,16 @@ namespace System
{
public static partial class Buffer
{
// Non-inlinable wrapper around the QCall that avoids polluting the fast path
// with P/Invoke prolog/epilog.
[MethodImpl(MethodImplOptions.NoInlining)]
internal static unsafe void _ZeroMemory(ref byte b, nuint byteLength)
{
fixed (byte* bytePointer = &b)
{
RuntimeImports.memset(bytePointer, 0, byteLength);
}
}

internal static void BulkMoveWithWriteBarrier(ref byte dmem, ref byte smem, nuint size)
=> RuntimeImports.RhBulkMoveWithWriteBarrier(ref dmem, ref smem, size);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static unsafe void __ZeroMemory(void* b, nuint byteLength) =>
RuntimeImports.memset((byte*)b, 0, byteLength);

[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static unsafe void __Memmove(byte* dest, byte* src, nuint len) =>
RuntimeImports.memmove(dest, src, len);

// This method has different signature for x64 and other platforms and is done for performance reasons.
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static unsafe void Memmove<T>(ref T destination, ref T source, nuint elementCount)
{
if (!RuntimeHelpers.IsReferenceOrContainsReferences<T>())
{
// Blittable memmove

Memmove(
ref Unsafe.As<T, byte>(ref destination),
ref Unsafe.As<T, byte>(ref source),
elementCount * (nuint)Unsafe.SizeOf<T>());
}
else
{
// Non-blittable memmove
RuntimeImports.RhBulkMoveWithWriteBarrier(
ref Unsafe.As<T, byte>(ref destination),
ref Unsafe.As<T, byte>(ref source),
elementCount * (nuint)Unsafe.SizeOf<T>());
}
}
private static void __BulkMoveWithWriteBarrier(ref byte destination, ref byte source, nuint byteCount) =>
RuntimeImports.RhBulkMoveWithWriteBarrier(ref destination, ref source, byteCount);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,24 @@ public Type GetType()
}

[Intrinsic]
protected object MemberwiseClone()
protected internal unsafe object MemberwiseClone()
{
return RuntimeImports.RhMemberwiseClone(this);
object clone = this.GetMethodTable()->IsArray ?
RuntimeExports.RhNewArray(this.GetMethodTable(), Unsafe.As<Array>(this).Length) :
RuntimeExports.RhNewObject(this.GetMethodTable());

// copy contents of "this" to the clone

nuint byteCount = RuntimeHelpers.GetRawObjectDataSize(this);
ref byte src = ref this.GetRawData();
ref byte dst = ref clone.GetRawData();

if (this.GetMethodTable()->HasGCPointers)
Buffer.BulkMoveWithWriteBarrier(ref dst, ref src, byteCount);
else
Buffer.Memmove(ref dst, ref src, byteCount);

return clone;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ public static object GetObjectValue(object? obj)
if ((!eeType.IsValueType) || eeType.IsPrimitive)
return obj;

return RuntimeImports.RhMemberwiseClone(obj);
return obj.MemberwiseClone();
}

public static new bool Equals(object? o1, object? o2)
Expand Down Expand Up @@ -208,8 +208,16 @@ internal static ref byte GetRawData(this object obj) =>

internal static unsafe nuint GetRawObjectDataSize(this object obj)
{
Debug.Assert(obj.GetEETypePtr().ComponentSize == 0);
return obj.GetEETypePtr().BaseSize - (uint)sizeof(ObjHeader) - (uint)sizeof(MethodTable*);
MethodTable* pMT = GetMethodTable(obj);

// See comment on RawArrayData for details
nuint rawSize = pMT->BaseSize - (nuint)(2 * sizeof(IntPtr));
if (pMT->HasComponentSize)
rawSize += (uint)Unsafe.As<RawArrayData>(obj).Length * (nuint)pMT->ComponentSize;

GC.KeepAlive(obj); // Keep MethodTable alive

return rawSize;
}

internal static unsafe ushort GetElementSize(this Array array)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -382,10 +382,6 @@ internal static unsafe object RhBox(EETypePtr pEEType, ref byte data)
internal static unsafe void RhUnbox(object? obj, ref byte data, EETypePtr pUnboxToEEType)
=> RhUnbox(obj, ref data, pUnboxToEEType.ToPointer());

[MethodImpl(MethodImplOptions.InternalCall)]
[RuntimeImport(RuntimeLibrary, "RhMemberwiseClone")]
internal static extern object RhMemberwiseClone(object obj);

// Busy spin for the given number of iterations.
[LibraryImport(RuntimeLibrary, EntryPoint = "RhSpinWait")]
[SuppressGCTransition]
Expand Down
Loading