mirror of
https://github.com/VSadov/Satori.git
synced 2025-06-08 03:27:04 +09:00
fixes after rebasing
This commit is contained in:
parent
af826162ba
commit
5a8c497d68
64 changed files with 2508 additions and 4699 deletions
|
@ -19,11 +19,21 @@ namespace standalone
|
||||||
::GCToEEInterface::RestartEE(bFinishedGC);
|
::GCToEEInterface::RestartEE(bFinishedGC);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void GcScanCurrentStackRoots(promote_func* fn, ScanContext* sc)
|
||||||
|
{
|
||||||
|
::GCToEEInterface::GcScanCurrentStackRoots(fn, sc);
|
||||||
|
}
|
||||||
|
|
||||||
void GcScanRoots(promote_func* fn, int condemned, int max_gen, ScanContext* sc)
|
void GcScanRoots(promote_func* fn, int condemned, int max_gen, ScanContext* sc)
|
||||||
{
|
{
|
||||||
::GCToEEInterface::GcScanRoots(fn, condemned, max_gen, sc);
|
::GCToEEInterface::GcScanRoots(fn, condemned, max_gen, sc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void GcPoll()
|
||||||
|
{
|
||||||
|
::GCToEEInterface::GcPoll();
|
||||||
|
}
|
||||||
|
|
||||||
void GcStartWork(int condemned, int max_gen)
|
void GcStartWork(int condemned, int max_gen)
|
||||||
{
|
{
|
||||||
::GCToEEInterface::GcStartWork(condemned, max_gen);
|
::GCToEEInterface::GcStartWork(condemned, max_gen);
|
||||||
|
|
|
@ -338,7 +338,7 @@ inline bool IsServerHeap()
|
||||||
{
|
{
|
||||||
#ifdef FEATURE_SVR_GC
|
#ifdef FEATURE_SVR_GC
|
||||||
assert(g_gc_heap_type != GC_HEAP_INVALID);
|
assert(g_gc_heap_type != GC_HEAP_INVALID);
|
||||||
return g_gc_heap_type == GC_HEAP_SVR;
|
return g_gc_heap_type >= GC_HEAP_SVR;
|
||||||
#else // FEATURE_SVR_GC
|
#else // FEATURE_SVR_GC
|
||||||
return false;
|
return false;
|
||||||
#endif // FEATURE_SVR_GC
|
#endif // FEATURE_SVR_GC
|
||||||
|
|
|
@ -73,7 +73,11 @@ HHANDLETABLE GCHandleStore::GetTable()
|
||||||
|
|
||||||
OBJECTHANDLE GCHandleStore::CreateHandleOfType(Object* object, HandleType type)
|
OBJECTHANDLE GCHandleStore::CreateHandleOfType(Object* object, HandleType type)
|
||||||
{
|
{
|
||||||
|
#ifdef FEATURE_SATORI_GC
|
||||||
|
HHANDLETABLE handletable = _underlyingBucket.pTable[GetCurrentThreadHomeHeapNumber()];
|
||||||
|
#else
|
||||||
HHANDLETABLE handletable = GetTable();
|
HHANDLETABLE handletable = GetTable();
|
||||||
|
#endif
|
||||||
return ::HndCreateHandle(handletable, type, ObjectToOBJECTREF(object));
|
return ::HndCreateHandle(handletable, type, ObjectToOBJECTREF(object));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -787,6 +787,10 @@ void BlockResetAgeMapForBlocksWorker(uint32_t *pdwGen, uint32_t dwClumpMask, Sca
|
||||||
STATIC_CONTRACT_GC_NOTRIGGER;
|
STATIC_CONTRACT_GC_NOTRIGGER;
|
||||||
STATIC_CONTRACT_MODE_COOPERATIVE;
|
STATIC_CONTRACT_MODE_COOPERATIVE;
|
||||||
|
|
||||||
|
#if FEATURE_SATORI_GC
|
||||||
|
__UNREACHABLE();
|
||||||
|
#endif
|
||||||
|
|
||||||
// fetch the table segment we are working in
|
// fetch the table segment we are working in
|
||||||
TableSegment *pSegment = pInfo->pCurrentSegment;
|
TableSegment *pSegment = pInfo->pCurrentSegment;
|
||||||
|
|
||||||
|
|
|
@ -1326,11 +1326,13 @@ bool Ref_ScanDependentHandlesForPromotion(DhContext *pDhContext)
|
||||||
if (walk->pBuckets[i] != NULL)
|
if (walk->pBuckets[i] != NULL)
|
||||||
{
|
{
|
||||||
int uCPUindex = getSlotNumber(pDhContext->m_pScanContext);
|
int uCPUindex = getSlotNumber(pDhContext->m_pScanContext);
|
||||||
// int uCPUlimit = getNumberOfSlots();
|
|
||||||
// assert(uCPUlimit > 0);
|
|
||||||
int uCPUstep = getThreadCount(pDhContext->m_pScanContext);
|
|
||||||
HHANDLETABLE* pTable = walk->pBuckets[i]->pTable;
|
HHANDLETABLE* pTable = walk->pBuckets[i]->pTable;
|
||||||
// for ( ; uCPUindex < uCPUlimit; uCPUindex += uCPUstep)
|
#if !defined(FEATURE_SATORI_GC)
|
||||||
|
int uCPUlimit = getNumberOfSlots();
|
||||||
|
assert(uCPUlimit > 0);
|
||||||
|
int uCPUstep = getThreadCount(pDhContext->m_pScanContext);
|
||||||
|
for (; uCPUindex < uCPUlimit; uCPUindex += uCPUstep)
|
||||||
|
#endif
|
||||||
{
|
{
|
||||||
HHANDLETABLE hTable = pTable[uCPUindex];
|
HHANDLETABLE hTable = pTable[uCPUindex];
|
||||||
if (hTable)
|
if (hTable)
|
||||||
|
@ -1411,11 +1413,13 @@ void Ref_ScanWeakInteriorPointersForRelocation(uint32_t condemned, uint32_t maxg
|
||||||
if (walk->pBuckets[i] != NULL)
|
if (walk->pBuckets[i] != NULL)
|
||||||
{
|
{
|
||||||
int uCPUindex = getSlotNumber(sc);
|
int uCPUindex = getSlotNumber(sc);
|
||||||
|
HHANDLETABLE* pTable = walk->pBuckets[i]->pTable;
|
||||||
|
#if !defined(FEATURE_SATORI_GC)
|
||||||
int uCPUlimit = getNumberOfSlots();
|
int uCPUlimit = getNumberOfSlots();
|
||||||
assert(uCPUlimit > 0);
|
assert(uCPUlimit > 0);
|
||||||
int uCPUstep = getThreadCount(sc);
|
int uCPUstep = getThreadCount(sc);
|
||||||
HHANDLETABLE* pTable = walk->pBuckets[i]->pTable;
|
for (; uCPUindex < uCPUlimit; uCPUindex += uCPUstep)
|
||||||
for ( ; uCPUindex < uCPUlimit; uCPUindex += uCPUstep)
|
#endif
|
||||||
{
|
{
|
||||||
HHANDLETABLE hTable = pTable[uCPUindex];
|
HHANDLETABLE hTable = pTable[uCPUindex];
|
||||||
if (hTable)
|
if (hTable)
|
||||||
|
|
|
@ -125,7 +125,7 @@ private:
|
||||||
#else
|
#else
|
||||||
BitScanReverse(&highestBit, value);
|
BitScanReverse(&highestBit, value);
|
||||||
#endif
|
#endif
|
||||||
return min(highestBit - Satori::REGION_BITS, Satori::ALLOCATOR_BUCKET_COUNT - 1);
|
return min((int)highestBit - Satori::REGION_BITS, Satori::ALLOCATOR_BUCKET_COUNT - 1);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -891,3 +891,12 @@ uint64_t SatoriGC::GetGenerationBudget(int generation)
|
||||||
// avoid IDE0060: Remove unused parameter 'generation'
|
// avoid IDE0060: Remove unused parameter 'generation'
|
||||||
return -1 + 0 * generation;
|
return -1 + 0 * generation;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
size_t SatoriGC::GetLOHThreshold()
|
||||||
|
{
|
||||||
|
return Satori::LARGE_OBJECT_THRESHOLD;
|
||||||
|
}
|
||||||
|
|
||||||
|
void SatoriGC::DiagWalkHeapWithACHandling(walk_fn fn, void *context, int gen_number, bool walk_large_object_heap_p)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
|
@ -165,6 +165,10 @@ public:
|
||||||
|
|
||||||
// Inherited via IGCHeapInternal
|
// Inherited via IGCHeapInternal
|
||||||
uint64_t GetGenerationBudget(int generation) override;
|
uint64_t GetGenerationBudget(int generation) override;
|
||||||
|
|
||||||
|
// Inherited via IGCHeapInternal
|
||||||
|
size_t GetLOHThreshold() override;
|
||||||
|
void DiagWalkHeapWithACHandling(walk_fn fn, void *context, int gen_number, bool walk_large_object_heap_p) override;
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -91,7 +91,7 @@ void SatoriObject::DirtyCardsForContent()
|
||||||
{
|
{
|
||||||
_ASSERTE(IsMarked());
|
_ASSERTE(IsMarked());
|
||||||
MethodTable* mt = RawGetMethodTable();
|
MethodTable* mt = RawGetMethodTable();
|
||||||
if (mt->ContainsPointersOrCollectible())
|
if (mt->ContainsGCPointersOrCollectible())
|
||||||
{
|
{
|
||||||
SatoriPage* page = ContainingRegion()->m_containingPage;
|
SatoriPage* page = ContainingRegion()->m_containingPage;
|
||||||
// if dealing with a collectible type, include MT in the dirty range
|
// if dealing with a collectible type, include MT in the dirty range
|
||||||
|
|
|
@ -282,7 +282,7 @@ inline void SatoriObject::ForEachObjectRef(F lambda, bool includeCollectibleAllo
|
||||||
lambda((SatoriObject**)&loaderAllocator);
|
lambda((SatoriObject**)&loaderAllocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!mt->ContainsPointers())
|
if (!mt->ContainsGCPointers())
|
||||||
{
|
{
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -354,7 +354,7 @@ inline void SatoriObject::ForEachObjectRef(F lambda, size_t size, bool includeCo
|
||||||
lambda((SatoriObject**)&loaderAllocator);
|
lambda((SatoriObject**)&loaderAllocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!mt->ContainsPointers())
|
if (!mt->ContainsGCPointers())
|
||||||
{
|
{
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -421,7 +421,7 @@ inline void SatoriObject::ForEachObjectRef(F lambda, size_t start, size_t end)
|
||||||
lambda((SatoriObject**)&loaderAllocator);
|
lambda((SatoriObject**)&loaderAllocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!mt->ContainsPointers())
|
if (!mt->ContainsGCPointers())
|
||||||
{
|
{
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1073,10 +1073,10 @@ void SatoriRecycler::AdjustHeuristics()
|
||||||
|
|
||||||
// we trigger GC when ephemeral size grows to SatoriUtil::Gen1Target(),
|
// we trigger GC when ephemeral size grows to SatoriUtil::Gen1Target(),
|
||||||
// the budget is the diff to reach that
|
// the budget is the diff to reach that
|
||||||
size_t newGen1Budget = max(MIN_GEN1_BUDGET, ephemeralOccupancy * (SatoriUtil::Gen1Target() - 100) / 100);
|
size_t newGen1Budget = max((size_t)MIN_GEN1_BUDGET, ephemeralOccupancy * (SatoriUtil::Gen1Target() - 100) / 100);
|
||||||
|
|
||||||
// alternatively we allow gen1 allocs up to 1/8 of total limit.
|
// alternatively we allow gen1 allocs up to 1/8 of total limit.
|
||||||
size_t altNewGen1Budget = max(MIN_GEN1_BUDGET, m_totalLimit / 8);
|
size_t altNewGen1Budget = max((size_t)MIN_GEN1_BUDGET, m_totalLimit / 8);
|
||||||
|
|
||||||
// take max of both budgets
|
// take max of both budgets
|
||||||
newGen1Budget = max(newGen1Budget, altNewGen1Budget);
|
newGen1Budget = max(newGen1Budget, altNewGen1Budget);
|
||||||
|
@ -2114,7 +2114,7 @@ bool SatoriRecycler::DrainMarkQueuesConcurrent(SatoriWorkChunk* srcChunk, int64_
|
||||||
|
|
||||||
void SatoriRecycler::ScheduleMarkAsChildRanges(SatoriObject* o)
|
void SatoriRecycler::ScheduleMarkAsChildRanges(SatoriObject* o)
|
||||||
{
|
{
|
||||||
if (o->RawGetMethodTable()->ContainsPointersOrCollectible())
|
if (o->RawGetMethodTable()->ContainsGCPointersOrCollectible())
|
||||||
{
|
{
|
||||||
size_t start = o->Start();
|
size_t start = o->Start();
|
||||||
size_t remains = o->Size();
|
size_t remains = o->Size();
|
||||||
|
@ -2146,7 +2146,7 @@ void SatoriRecycler::ScheduleMarkAsChildRanges(SatoriObject* o)
|
||||||
|
|
||||||
bool SatoriRecycler::ScheduleUpdateAsChildRanges(SatoriObject* o)
|
bool SatoriRecycler::ScheduleUpdateAsChildRanges(SatoriObject* o)
|
||||||
{
|
{
|
||||||
if (o->RawGetMethodTable()->ContainsPointers())
|
if (o->RawGetMethodTable()->ContainsGCPointers())
|
||||||
{
|
{
|
||||||
size_t start = o->Start() + sizeof(size_t);
|
size_t start = o->Start() + sizeof(size_t);
|
||||||
size_t remains = o->Size() - sizeof(size_t);
|
size_t remains = o->Size() - sizeof(size_t);
|
||||||
|
|
|
@ -912,7 +912,7 @@ inline void SatoriRegion::PushToMarkStackIfHasPointers(SatoriObject* obj)
|
||||||
_ASSERTE(obj->SameRegion(this));
|
_ASSERTE(obj->SameRegion(this));
|
||||||
_ASSERTE(!obj->GetNextInLocalMarkStack());
|
_ASSERTE(!obj->GetNextInLocalMarkStack());
|
||||||
|
|
||||||
if (obj->RawGetMethodTable()->ContainsPointersOrCollectible())
|
if (obj->RawGetMethodTable()->ContainsGCPointersOrCollectible())
|
||||||
{
|
{
|
||||||
obj->SetNextInLocalMarkStack(m_markStack);
|
obj->SetNextInLocalMarkStack(m_markStack);
|
||||||
_ASSERTE(m_markStack == obj->GetNextInLocalMarkStack());
|
_ASSERTE(m_markStack == obj->GetNextInLocalMarkStack());
|
||||||
|
|
|
@ -308,7 +308,7 @@ bool GCEvent::CreateOSManualEventNoThrow(bool initialState)
|
||||||
}
|
}
|
||||||
|
|
||||||
#define _INC_PTHREADS
|
#define _INC_PTHREADS
|
||||||
#include "..\satori\SatoriGate.h"
|
#include "../satori/SatoriGate.h"
|
||||||
|
|
||||||
#if defined(TARGET_LINUX)
|
#if defined(TARGET_LINUX)
|
||||||
|
|
||||||
|
|
|
@ -204,11 +204,12 @@
|
||||||
// Registers no longer containing GC pointers after CORINFO_HELP_ASSIGN_REF and CORINFO_HELP_CHECKED_ASSIGN_REF.
|
// Registers no longer containing GC pointers after CORINFO_HELP_ASSIGN_REF and CORINFO_HELP_CHECKED_ASSIGN_REF.
|
||||||
#define RBM_CALLEE_GCTRASH_WRITEBARRIER RBM_CALLEE_TRASH_NOGC
|
#define RBM_CALLEE_GCTRASH_WRITEBARRIER RBM_CALLEE_TRASH_NOGC
|
||||||
|
|
||||||
// Registers no longer containing GC pointers after CORINFO_HELP_ASSIGN_BYREF.
|
// TODO: Satori make more precise?
|
||||||
#define RBM_CALLEE_GCTRASH_WRITEBARRIER_BYREF (RBM_RAX | RBM_RCX)
|
|
||||||
|
|
||||||
// Registers killed by CORINFO_HELP_ASSIGN_BYREF.
|
// Registers killed by CORINFO_HELP_ASSIGN_BYREF.
|
||||||
#define RBM_CALLEE_TRASH_WRITEBARRIER_BYREF (RBM_RSI | RBM_RDI | RBM_CALLEE_GCTRASH_WRITEBARRIER_BYREF)
|
#define RBM_CALLEE_TRASH_WRITEBARRIER_BYREF (RBM_RSI | RBM_RDI | RBM_CALLEE_TRASH_NOGC)
|
||||||
|
|
||||||
|
// Registers no longer containing GC pointers after CORINFO_HELP_ASSIGN_BYREF.
|
||||||
|
#define RBM_CALLEE_GCTRASH_WRITEBARRIER_BYREF (RBM_CALLEE_TRASH_NOGC & ~(RBM_RDI | RBM_RSI))
|
||||||
|
|
||||||
// We have two register classifications
|
// We have two register classifications
|
||||||
// * callee trash: aka volatile or caller saved
|
// * callee trash: aka volatile or caller saved
|
||||||
|
|
|
@ -245,7 +245,6 @@ The .NET Foundation licenses this file to you under the MIT license.
|
||||||
<LinkerArg Include="-L/usr/local/lib -linotify" Condition="'$(_targetOS)' == 'freebsd'" />
|
<LinkerArg Include="-L/usr/local/lib -linotify" Condition="'$(_targetOS)' == 'freebsd'" />
|
||||||
<LinkerArg Include="@(ExtraLinkerArg->'-Wl,%(Identity)')" />
|
<LinkerArg Include="@(ExtraLinkerArg->'-Wl,%(Identity)')" />
|
||||||
<LinkerArg Include="@(NativeFramework->'-framework %(Identity)')" Condition="'$(_IsApplePlatform)' == 'true'" />
|
<LinkerArg Include="@(NativeFramework->'-framework %(Identity)')" Condition="'$(_IsApplePlatform)' == 'true'" />
|
||||||
<LinkerArg Include="-ld_classic" Condition="'$(_IsApplePlatform)' == 'true'" />
|
|
||||||
<LinkerArg Include="-Wl,--eh-frame-hdr" Condition="'$(_IsApplePlatform)' != 'true'" />
|
<LinkerArg Include="-Wl,--eh-frame-hdr" Condition="'$(_IsApplePlatform)' != 'true'" />
|
||||||
|
|
||||||
<!-- Google requires all the native libraries to be aligned to 16 bytes (for 16k memory page size)
|
<!-- Google requires all the native libraries to be aligned to 16 bytes (for 16k memory page size)
|
||||||
|
|
|
@ -270,7 +270,7 @@ namespace System.Runtime
|
||||||
internal static extern void RhpSignalFinalizationComplete(uint fCount, int observedFullGcCount);
|
internal static extern void RhpSignalFinalizationComplete(uint fCount, int observedFullGcCount);
|
||||||
|
|
||||||
[DllImport(Redhawk.BaseName)]
|
[DllImport(Redhawk.BaseName)]
|
||||||
internal static extern object RhpGetNextFinalizableObject();
|
internal static extern unsafe void RhpGetNextFinalizableObject(void* pResult);
|
||||||
|
|
||||||
[DllImport(Redhawk.BaseName)]
|
[DllImport(Redhawk.BaseName)]
|
||||||
internal static extern ulong RhpGetTickCount64();
|
internal static extern ulong RhpGetTickCount64();
|
||||||
|
|
|
@ -276,6 +276,14 @@ EXTERN_C int32_t __stdcall RhpPInvokeExceptionGuard(PEXCEPTION_RECORD pExc
|
||||||
FCDECL2(void, RhpThrowHwEx, int exceptionCode, TADDR faultingIP);
|
FCDECL2(void, RhpThrowHwEx, int exceptionCode, TADDR faultingIP);
|
||||||
|
|
||||||
EXTERN_C CODE_LOCATION RhpAssignRefAVLocation;
|
EXTERN_C CODE_LOCATION RhpAssignRefAVLocation;
|
||||||
|
EXTERN_C CODE_LOCATION RhpAssignRefAVLocationNotHeap;
|
||||||
|
EXTERN_C CODE_LOCATION RhpCheckedAssignRefAVLocation;
|
||||||
|
EXTERN_C CODE_LOCATION RhpByRefAssignRefAVLocation1;
|
||||||
|
|
||||||
|
#if !defined(HOST_ARM64)
|
||||||
|
EXTERN_C CODE_LOCATION RhpByRefAssignRefAVLocation2;
|
||||||
|
#endif
|
||||||
|
|
||||||
#if defined(HOST_X86)
|
#if defined(HOST_X86)
|
||||||
EXTERN_C CODE_LOCATION RhpAssignRefEAXAVLocation;
|
EXTERN_C CODE_LOCATION RhpAssignRefEAXAVLocation;
|
||||||
EXTERN_C CODE_LOCATION RhpAssignRefECXAVLocation;
|
EXTERN_C CODE_LOCATION RhpAssignRefECXAVLocation;
|
||||||
|
@ -299,17 +307,18 @@ EXTERN_C CODE_LOCATION RhpByRefAssignRefAVLocation1;
|
||||||
EXTERN_C CODE_LOCATION RhpByRefAssignRefAVLocation2;
|
EXTERN_C CODE_LOCATION RhpByRefAssignRefAVLocation2;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if defined(HOST_ARM64) && !defined(LSE_INSTRUCTIONS_ENABLED_BY_DEFAULT)
|
|
||||||
EXTERN_C CODE_LOCATION RhpCheckedLockCmpXchgAVLocation2;
|
|
||||||
EXTERN_C CODE_LOCATION RhpCheckedXchgAVLocation2;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
static bool InWriteBarrierHelper(uintptr_t faultingIP)
|
static bool InWriteBarrierHelper(uintptr_t faultingIP)
|
||||||
{
|
{
|
||||||
#ifndef USE_PORTABLE_HELPERS
|
#ifndef USE_PORTABLE_HELPERS
|
||||||
static uintptr_t writeBarrierAVLocations[] =
|
static uintptr_t writeBarrierAVLocations[] =
|
||||||
{
|
{
|
||||||
(uintptr_t)&RhpAssignRefAVLocation,
|
(uintptr_t)&RhpAssignRefAVLocation,
|
||||||
|
(uintptr_t)&RhpAssignRefAVLocationNotHeap,
|
||||||
|
(uintptr_t)&RhpCheckedAssignRefAVLocation,
|
||||||
|
(uintptr_t)&RhpByRefAssignRefAVLocation1,
|
||||||
|
#if !defined(HOST_ARM64)
|
||||||
|
(uintptr_t)&RhpByRefAssignRefAVLocation2,
|
||||||
|
#endif
|
||||||
#if defined(HOST_X86)
|
#if defined(HOST_X86)
|
||||||
(uintptr_t)&RhpAssignRefEAXAVLocation,
|
(uintptr_t)&RhpAssignRefEAXAVLocation,
|
||||||
(uintptr_t)&RhpAssignRefECXAVLocation,
|
(uintptr_t)&RhpAssignRefECXAVLocation,
|
||||||
|
|
|
@ -217,17 +217,16 @@ EXTERN_C UInt32_BOOL QCALLTYPE RhpWaitForFinalizerRequest()
|
||||||
//
|
//
|
||||||
|
|
||||||
// Fetch next object which needs finalization or return null if we've reached the end of the list.
|
// Fetch next object which needs finalization or return null if we've reached the end of the list.
|
||||||
FCIMPL0(OBJECTREF, RhpGetNextFinalizableObject)
|
EXTERN_C void QCALLTYPE RhpGetNextFinalizableObject(Object** pResult)
|
||||||
{
|
{
|
||||||
Thread* pThread = ThreadStore::GetCurrentThread();
|
Thread* pThread = ThreadStore::GetCurrentThread();
|
||||||
pThread->DeferTransitionFrame();
|
pThread->DeferTransitionFrame();
|
||||||
pThread->DisablePreemptiveMode();
|
pThread->DisablePreemptiveMode();
|
||||||
|
|
||||||
OBJECTREF refNext = NULL;
|
|
||||||
while (true)
|
while (true)
|
||||||
{
|
{
|
||||||
// Get the next finalizable object. If we get back NULL we've reached the end of the list.
|
// Get the next finalizable object. If we get back NULL we've reached the end of the list.
|
||||||
refNext = GCHeapUtilities::GetGCHeap()->GetNextFinalizable();
|
OBJECTREF refNext = GCHeapUtilities::GetGCHeap()->GetNextFinalizable();
|
||||||
if (refNext != NULL)
|
if (refNext != NULL)
|
||||||
{
|
{
|
||||||
// The queue may contain objects which have been marked as finalized already (via GC.SuppressFinalize()
|
// The queue may contain objects which have been marked as finalized already (via GC.SuppressFinalize()
|
||||||
|
@ -240,10 +239,9 @@ FCIMPL0(OBJECTREF, RhpGetNextFinalizableObject)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
*pResult = refNext;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
pThread->EnablePreemptiveMode();
|
pThread->EnablePreemptiveMode();
|
||||||
return refNext;
|
|
||||||
}
|
}
|
||||||
FCIMPLEND
|
|
||||||
|
|
|
@ -52,6 +52,9 @@ bool InitializeGC()
|
||||||
g_heap_type = GC_HEAP_WKS;
|
g_heap_type = GC_HEAP_WKS;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
//TODO: Satori
|
||||||
|
g_heap_type = GC_HEAP_SATORI;
|
||||||
|
|
||||||
if (g_pRhConfig->GetgcConservative())
|
if (g_pRhConfig->GetgcConservative())
|
||||||
{
|
{
|
||||||
GetRuntimeInstance()->EnableConservativeStackReporting();
|
GetRuntimeInstance()->EnableConservativeStackReporting();
|
||||||
|
|
|
@ -73,3 +73,4 @@ FCIMPL3(void, RhBulkMoveWithWriteBarrier, uint8_t* pDest, uint8_t* pSrc, size_t
|
||||||
}
|
}
|
||||||
#endif //FEATURE_SATORI_GC
|
#endif //FEATURE_SATORI_GC
|
||||||
}
|
}
|
||||||
|
FCIMPLEND
|
||||||
|
|
|
@ -54,7 +54,7 @@ public:
|
||||||
#endif
|
#endif
|
||||||
#else
|
#else
|
||||||
// Satori does not mess up MT pointers.
|
// Satori does not mess up MT pointers.
|
||||||
{ return get_EEType(); }
|
{ return GetMethodTable(); }
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
ObjHeader * GetHeader() { return dac_cast<DPTR(ObjHeader)>(dac_cast<TADDR>(this) - SYNC_BLOCK_SKEW); }
|
ObjHeader * GetHeader() { return dac_cast<DPTR(ObjHeader)>(dac_cast<TADDR>(this) - SYNC_BLOCK_SKEW); }
|
||||||
|
|
|
@ -607,12 +607,10 @@ LEAF_ENTRY RhpCheckedLockCmpXchg, _TEXT
|
||||||
jb RecordEscape_CmpXchg // target is exposed. record an escape.
|
jb RecordEscape_CmpXchg // target is exposed. record an escape.
|
||||||
|
|
||||||
JustAssign_CmpXchg:
|
JustAssign_CmpXchg:
|
||||||
ALTERNATE_ENTRY RhpCheckedLockCmpXchgAVLocationNotHeap
|
|
||||||
lock cmpxchg [rdi], rsi // no card marking, src is not a heap object
|
lock cmpxchg [rdi], rsi // no card marking, src is not a heap object
|
||||||
ret
|
ret
|
||||||
|
|
||||||
AssignAndMarkCards_CmpXchg:
|
AssignAndMarkCards_CmpXchg:
|
||||||
ALTERNATE_ENTRY RhpCheckedLockCmpXchgAVLocation
|
|
||||||
lock cmpxchg [rdi], rsi
|
lock cmpxchg [rdi], rsi
|
||||||
jne Exit_CmpXchg
|
jne Exit_CmpXchg
|
||||||
|
|
||||||
|
@ -777,12 +775,10 @@ LEAF_ENTRY RhpCheckedXchg, _TEXT
|
||||||
jb RecordEscape_Xchg // target is exposed. record an escape.
|
jb RecordEscape_Xchg // target is exposed. record an escape.
|
||||||
|
|
||||||
JustAssign_Xchg:
|
JustAssign_Xchg:
|
||||||
ALTERNATE_ENTRY RhpCheckedXchgAVLocationNotHeap
|
|
||||||
xchg [rdi], rax // no card marking, src is not a heap object
|
xchg [rdi], rax // no card marking, src is not a heap object
|
||||||
ret
|
ret
|
||||||
|
|
||||||
AssignAndMarkCards_Xchg:
|
AssignAndMarkCards_Xchg:
|
||||||
ALTERNATE_ENTRY RhpCheckedXchgAVLocation
|
|
||||||
xchg [rdi], rax
|
xchg [rdi], rax
|
||||||
|
|
||||||
// TUNING: barriers in different modes could be separate pieces of code, but barrier switch
|
// TUNING: barriers in different modes could be separate pieces of code, but barrier switch
|
||||||
|
|
|
@ -603,12 +603,10 @@ endif
|
||||||
jb RecordEscape ; target is exposed. record an escape.
|
jb RecordEscape ; target is exposed. record an escape.
|
||||||
|
|
||||||
JustAssign:
|
JustAssign:
|
||||||
ALTERNATE_ENTRY RhpCheckedLockCmpXchgAVLocationNotHeap
|
|
||||||
lock cmpxchg [rcx], rdx ; no card marking, src is not a heap object
|
lock cmpxchg [rcx], rdx ; no card marking, src is not a heap object
|
||||||
ret
|
ret
|
||||||
|
|
||||||
AssignAndMarkCards:
|
AssignAndMarkCards:
|
||||||
ALTERNATE_ENTRY RhpCheckedLockCmpXchgAVLocation
|
|
||||||
lock cmpxchg [rcx], rdx
|
lock cmpxchg [rcx], rdx
|
||||||
jne Exit
|
jne Exit
|
||||||
|
|
||||||
|
@ -766,12 +764,10 @@ endif
|
||||||
jb RecordEscape ; target is exposed. record an escape.
|
jb RecordEscape ; target is exposed. record an escape.
|
||||||
|
|
||||||
JustAssign:
|
JustAssign:
|
||||||
ALTERNATE_ENTRY RhpCheckedXchgAVLocationNotHeap
|
|
||||||
xchg [rcx], rax ; no card marking, src is not a heap object
|
xchg [rcx], rax ; no card marking, src is not a heap object
|
||||||
ret
|
ret
|
||||||
|
|
||||||
AssignAndMarkCards:
|
AssignAndMarkCards:
|
||||||
ALTERNATE_ENTRY RhpCheckedXchgAVLocation
|
|
||||||
xchg [rcx], rax
|
xchg [rcx], rax
|
||||||
|
|
||||||
; TUNING: barriers in different modes could be separate pieces of code, but barrier switch
|
; TUNING: barriers in different modes could be separate pieces of code, but barrier switch
|
||||||
|
|
|
@ -710,7 +710,6 @@ LEAF_ENTRY RhpCheckedLockCmpXchg
|
||||||
|
|
||||||
LOCAL_LABEL(JustAssign_Cmp_Xchg):
|
LOCAL_LABEL(JustAssign_Cmp_Xchg):
|
||||||
// skip setting cards
|
// skip setting cards
|
||||||
ALTERNATE_ENTRY RhpCheckedLockCmpXchgAVLocationNotHeap
|
|
||||||
mov x10, #0
|
mov x10, #0
|
||||||
|
|
||||||
LOCAL_LABEL(AssignAndMarkCards_Cmp_Xchg):
|
LOCAL_LABEL(AssignAndMarkCards_Cmp_Xchg):
|
||||||
|
@ -723,7 +722,6 @@ ALTERNATE_ENTRY RhpCheckedLockCmpXchgAVLocationNotHeap
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
mov x17, x2
|
mov x17, x2
|
||||||
ALTERNATE_ENTRY RhpCheckedLockCmpXchgAVLocation
|
|
||||||
casal x2, x1, [x0] // exchange
|
casal x2, x1, [x0] // exchange
|
||||||
mov x0, x2 // x0 = result
|
mov x0, x2 // x0 = result
|
||||||
cmp x2, x17
|
cmp x2, x17
|
||||||
|
@ -736,7 +734,6 @@ LOCAL_LABEL(NoUpdate_Cmp_Xchg):
|
||||||
ret lr
|
ret lr
|
||||||
|
|
||||||
LOCAL_LABEL(TryAgain1_Cmp_Xchg):
|
LOCAL_LABEL(TryAgain1_Cmp_Xchg):
|
||||||
ALTERNATE_ENTRY RhpCheckedLockCmpXchgAVLocation2
|
|
||||||
ldaxr x0, [x14]
|
ldaxr x0, [x14]
|
||||||
cmp x0, x2
|
cmp x0, x2
|
||||||
bne LOCAL_LABEL(NoUpdate_Cmp_Xchg)
|
bne LOCAL_LABEL(NoUpdate_Cmp_Xchg)
|
||||||
|
@ -927,11 +924,9 @@ LEAF_ENTRY RhpCheckedXchg, _TEXT
|
||||||
LOCAL_LABEL(JustAssign_Xchg):
|
LOCAL_LABEL(JustAssign_Xchg):
|
||||||
// TODO: VS use LSE_INSTRUCTIONS_ENABLED_BY_DEFAULT instead
|
// TODO: VS use LSE_INSTRUCTIONS_ENABLED_BY_DEFAULT instead
|
||||||
#ifdef TARGET_OSX
|
#ifdef TARGET_OSX
|
||||||
ALTERNATE_ENTRY RhpCheckedXchgAVLocationNotHeap
|
|
||||||
swpal x1, x0, [x0] // exchange
|
swpal x1, x0, [x0] // exchange
|
||||||
#else
|
#else
|
||||||
LOCAL_LABEL(TryAgain_Xchg):
|
LOCAL_LABEL(TryAgain_Xchg):
|
||||||
ALTERNATE_ENTRY RhpCheckedXchgAVLocationNotHeap
|
|
||||||
ldaxr x17, [x0]
|
ldaxr x17, [x0]
|
||||||
stlxr w12, x1, [x0]
|
stlxr w12, x1, [x0]
|
||||||
cbnz w12, LOCAL_LABEL(TryAgain_Xchg)
|
cbnz w12, LOCAL_LABEL(TryAgain_Xchg)
|
||||||
|
@ -943,12 +938,9 @@ ALTERNATE_ENTRY RhpCheckedXchgAVLocationNotHeap
|
||||||
LOCAL_LABEL(AssignAndMarkCards_Xchg):
|
LOCAL_LABEL(AssignAndMarkCards_Xchg):
|
||||||
mov x14, x0 // x14 = dst
|
mov x14, x0 // x14 = dst
|
||||||
#ifdef TARGET_OSX
|
#ifdef TARGET_OSX
|
||||||
ALTERNATE_ENTRY RhpCheckedXchgAVLocation
|
|
||||||
swpal x1, x0, [x0] // exchange
|
swpal x1, x0, [x0] // exchange
|
||||||
#else
|
#else
|
||||||
LOCAL_LABEL(TryAgain1_Xchg):
|
LOCAL_LABEL(TryAgain1_Xchg):
|
||||||
ALTERNATE_ENTRY RhpCheckedXchgAVLocation
|
|
||||||
ALTERNATE_ENTRY RhpCheckedXchgAVLocation2
|
|
||||||
ldaxr x17, [x0]
|
ldaxr x17, [x0]
|
||||||
stlxr w12, x1, [x0]
|
stlxr w12, x1, [x0]
|
||||||
cbnz w12, LOCAL_LABEL(TryAgain1_Xchg)
|
cbnz w12, LOCAL_LABEL(TryAgain1_Xchg)
|
||||||
|
|
|
@ -691,7 +691,6 @@ RecordEscape
|
||||||
|
|
||||||
JustAssign_Cmp_Xchg
|
JustAssign_Cmp_Xchg
|
||||||
;; skip setting cards
|
;; skip setting cards
|
||||||
ALTERNATE_ENTRY RhpCheckedLockCmpXchgAVLocationNotHeap
|
|
||||||
mov x10, #0
|
mov x10, #0
|
||||||
|
|
||||||
AssignAndMarkCards_Cmp_Xchg
|
AssignAndMarkCards_Cmp_Xchg
|
||||||
|
@ -704,7 +703,6 @@ AssignAndMarkCards_Cmp_Xchg
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
mov x17, x2
|
mov x17, x2
|
||||||
ALTERNATE_ENTRY RhpCheckedLockCmpXchgAVLocation
|
|
||||||
casal x2, x1, [x0] ;; exchange
|
casal x2, x1, [x0] ;; exchange
|
||||||
mov x0, x2 ;; x0 = result
|
mov x0, x2 ;; x0 = result
|
||||||
cmp x2, x17
|
cmp x2, x17
|
||||||
|
@ -717,7 +715,6 @@ NoUpdate_Cmp_Xchg
|
||||||
ret lr
|
ret lr
|
||||||
|
|
||||||
TryAgain1_Cmp_Xchg
|
TryAgain1_Cmp_Xchg
|
||||||
ALTERNATE_ENTRY RhpCheckedLockCmpXchgAVLocation2
|
|
||||||
ldaxr x0, [x14]
|
ldaxr x0, [x14]
|
||||||
cmp x0, x2
|
cmp x0, x2
|
||||||
bne NoUpdate_Cmp_Xchg
|
bne NoUpdate_Cmp_Xchg
|
||||||
|
@ -901,7 +898,6 @@ RecordEscape_Cmp_Xchg
|
||||||
|
|
||||||
JustAssign_Xchg
|
JustAssign_Xchg
|
||||||
TryAgain_Xchg
|
TryAgain_Xchg
|
||||||
ALTERNATE_ENTRY RhpCheckedXchgAVLocationNotHeap
|
|
||||||
;; TODO: VS use LSE_INSTRUCTIONS_ENABLED_BY_DEFAULT instead
|
;; TODO: VS use LSE_INSTRUCTIONS_ENABLED_BY_DEFAULT instead
|
||||||
ldaxr x17, [x0]
|
ldaxr x17, [x0]
|
||||||
stlxr w12, x1, [x0]
|
stlxr w12, x1, [x0]
|
||||||
|
@ -913,8 +909,6 @@ TryAgain_Xchg
|
||||||
AssignAndMarkCards_Xchg
|
AssignAndMarkCards_Xchg
|
||||||
mov x14, x0 ;; x14 = dst
|
mov x14, x0 ;; x14 = dst
|
||||||
TryAgain1_Xchg
|
TryAgain1_Xchg
|
||||||
ALTERNATE_ENTRY RhpCheckedXchgAVLocation
|
|
||||||
ALTERNATE_ENTRY RhpCheckedXchgAVLocation2
|
|
||||||
ldaxr x17, [x0]
|
ldaxr x17, [x0]
|
||||||
stlxr w12, x1, [x0]
|
stlxr w12, x1, [x0]
|
||||||
cbnz w12, TryAgain1_Xchg
|
cbnz w12, TryAgain1_Xchg
|
||||||
|
|
|
@ -91,6 +91,47 @@ void GCToEEInterface::BeforeGcScanRoots(int condemned, bool is_bgc, bool is_conc
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Scan current stack
|
||||||
|
*/
|
||||||
|
|
||||||
|
void GCToEEInterface::GcScanCurrentStackRoots(ScanFunc* fn, ScanContext* sc)
|
||||||
|
{
|
||||||
|
Thread* pThread = ThreadStore::GetCurrentThread();
|
||||||
|
if (pThread->IsGCSpecial())
|
||||||
|
return;
|
||||||
|
|
||||||
|
InlinedThreadStaticRoot* pRoot = pThread->GetInlinedThreadStaticList();
|
||||||
|
while (pRoot != NULL)
|
||||||
|
{
|
||||||
|
STRESS_LOG2(LF_GC | LF_GCROOTS, LL_INFO100, "{ Scanning Thread's %p inline thread statics root %p. \n", pThread, pRoot);
|
||||||
|
EnumGcRef(&pRoot->m_threadStaticsBase, GCRK_Object, fn, sc);
|
||||||
|
pRoot = pRoot->m_next;
|
||||||
|
}
|
||||||
|
|
||||||
|
STRESS_LOG1(LF_GC | LF_GCROOTS, LL_INFO100, "{ Scanning Thread's %p thread statics root. \n", pThread);
|
||||||
|
EnumGcRef(pThread->GetThreadStaticStorage(), GCRK_Object, fn, sc);
|
||||||
|
|
||||||
|
STRESS_LOG1(LF_GC | LF_GCROOTS, LL_INFO100, "{ Starting scan of Thread %p\n", pThread);
|
||||||
|
sc->thread_under_crawl = pThread;
|
||||||
|
#if defined(FEATURE_EVENT_TRACE) && !defined(DACCESS_COMPILE)
|
||||||
|
sc->dwEtwRootKind = kEtwGCRootKindStack;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
pThread->GcScanRoots(fn, sc);
|
||||||
|
|
||||||
|
#if defined(FEATURE_EVENT_TRACE) && !defined(DACCESS_COMPILE)
|
||||||
|
sc->dwEtwRootKind = kEtwGCRootKindOther;
|
||||||
|
#endif
|
||||||
|
STRESS_LOG1(LF_GC | LF_GCROOTS, LL_INFO100, "Ending scan of Thread %p }\n", pThread);
|
||||||
|
|
||||||
|
sc->thread_under_crawl = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Scan all stack roots
|
||||||
|
*/
|
||||||
|
|
||||||
void GCToEEInterface::GcScanRoots(ScanFunc* fn, int condemned, int max_gen, ScanContext* sc)
|
void GCToEEInterface::GcScanRoots(ScanFunc* fn, int condemned, int max_gen, ScanContext* sc)
|
||||||
{
|
{
|
||||||
// STRESS_LOG1(LF_GCROOTS, LL_INFO10, "GCScan: Phase = %s\n", sc->promotion ? "promote" : "relocate");
|
// STRESS_LOG1(LF_GCROOTS, LL_INFO10, "GCScan: Phase = %s\n", sc->promotion ? "promote" : "relocate");
|
||||||
|
@ -154,6 +195,20 @@ void GCToEEInterface::AfterGcScanRoots(int condemned, int /*max_gen*/, ScanConte
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void GCToEEInterface::GcPoll()
|
||||||
|
{
|
||||||
|
if (ThreadStore::IsTrapThreadsRequested())
|
||||||
|
{
|
||||||
|
Thread* pThread = ThreadStore::GetCurrentThread();
|
||||||
|
assert(!pThread->IsGCSpecial());
|
||||||
|
assert(pThread->IsCurrentThreadInCooperativeMode());
|
||||||
|
assert(pThread != ThreadStore::GetSuspendingThread());
|
||||||
|
|
||||||
|
pThread->EnablePreemptiveMode();
|
||||||
|
pThread->DisablePreemptiveMode();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void GCToEEInterface::GcDone(int condemned)
|
void GCToEEInterface::GcDone(int condemned)
|
||||||
{
|
{
|
||||||
// Invoke any registered callouts for the end of the collection.
|
// Invoke any registered callouts for the end of the collection.
|
||||||
|
@ -559,35 +614,6 @@ struct ThreadStubArguments
|
||||||
CLREventStatic m_ThreadStartedEvent;
|
CLREventStatic m_ThreadStartedEvent;
|
||||||
};
|
};
|
||||||
|
|
||||||
static bool CreateUnsuspendableThread(void (*threadStart)(void*), void* arg, const char* name)
|
|
||||||
{
|
|
||||||
UNREFERENCED_PARAMETER(name);
|
|
||||||
|
|
||||||
ThreadStubArguments* threadStubArgs = new (nothrow) ThreadStubArguments();
|
|
||||||
if (!threadStubArgs)
|
|
||||||
return false;
|
|
||||||
|
|
||||||
threadStubArgs->m_pRealStartRoutine = threadStart;
|
|
||||||
threadStubArgs->m_pRealContext = arg;
|
|
||||||
|
|
||||||
// Helper used to wrap the start routine of background GC threads so we can do things like initialize the
|
|
||||||
// Redhawk thread state which requires running in the new thread's context.
|
|
||||||
auto threadStub = [](void* argument) -> DWORD
|
|
||||||
{
|
|
||||||
ThreadStore::RawGetCurrentThread()->SetGCSpecial();
|
|
||||||
|
|
||||||
ThreadStubArguments* pStartContext = (ThreadStubArguments*)argument;
|
|
||||||
auto realStartRoutine = pStartContext->m_pRealStartRoutine;
|
|
||||||
void* realContext = pStartContext->m_pRealContext;
|
|
||||||
delete pStartContext;
|
|
||||||
|
|
||||||
STRESS_LOG_RESERVE_MEM(GC_STRESSLOG_MULTIPLY);
|
|
||||||
|
|
||||||
realStartRoutine(realContext);
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
static bool CreateNonSuspendableThread(void (*threadStart)(void*), void* arg, const char* name)
|
static bool CreateNonSuspendableThread(void (*threadStart)(void*), void* arg, const char* name)
|
||||||
{
|
{
|
||||||
UNREFERENCED_PARAMETER(name);
|
UNREFERENCED_PARAMETER(name);
|
||||||
|
|
|
@ -32,6 +32,8 @@ namespace Internal.Runtime
|
||||||
|
|
||||||
private object? TryAllocateObject(MethodTable* type, nuint objectSize)
|
private object? TryAllocateObject(MethodTable* type, nuint objectSize)
|
||||||
{
|
{
|
||||||
|
// TODO: Satori can allocate immortal objects naturally. Use that instead. (see: AllocateImmortalObject)
|
||||||
|
|
||||||
HalfBakedObject* obj = null;
|
HalfBakedObject* obj = null;
|
||||||
|
|
||||||
using (m_Crst.EnterScope())
|
using (m_Crst.EnterScope())
|
||||||
|
|
|
@ -57,14 +57,9 @@ ifndef FEATURE_SATORI_GC
|
||||||
; RDI - address of ref-field (assigned to)
|
; RDI - address of ref-field (assigned to)
|
||||||
; RSI - address of the data (source)
|
; RSI - address of the data (source)
|
||||||
; RCX is trashed
|
; RCX is trashed
|
||||||
; RAX is trashed
|
; RAX is trashed when FEATURE_USE_SOFTWARE_WRITE_WATCH_FOR_GC_HEAP is defined
|
||||||
;
|
|
||||||
; NOTE: Keep in sync with RBM_CALLEE_TRASH_WRITEBARRIER_BYREF and RBM_CALLEE_GCTRASH_WRITEBARRIER_BYREF
|
|
||||||
; if you add more trashed registers.
|
|
||||||
;
|
|
||||||
; Exit:
|
; Exit:
|
||||||
; RDI, RSI are incremented by SIZEOF(LPVOID)
|
; RDI, RSI are incremented by SIZEOF(LPVOID)
|
||||||
;
|
|
||||||
LEAF_ENTRY JIT_ByRefWriteBarrier, _TEXT
|
LEAF_ENTRY JIT_ByRefWriteBarrier, _TEXT
|
||||||
mov rcx, [rsi]
|
mov rcx, [rsi]
|
||||||
|
|
||||||
|
@ -157,6 +152,8 @@ endif
|
||||||
cmp rcx, [g_ephemeral_high]
|
cmp rcx, [g_ephemeral_high]
|
||||||
jnb Exit
|
jnb Exit
|
||||||
|
|
||||||
|
; do the following checks only if we are allowed to trash rax
|
||||||
|
; otherwise we don't have enough registers
|
||||||
ifdef FEATURE_USE_SOFTWARE_WRITE_WATCH_FOR_GC_HEAP
|
ifdef FEATURE_USE_SOFTWARE_WRITE_WATCH_FOR_GC_HEAP
|
||||||
mov rax, rcx
|
mov rax, rcx
|
||||||
|
|
||||||
|
@ -317,6 +314,8 @@ Section segment para 'DATA'
|
||||||
JIT_WriteBarrier_Loc:
|
JIT_WriteBarrier_Loc:
|
||||||
dq 0
|
dq 0
|
||||||
|
|
||||||
|
extern JIT_WriteBarrier:proc
|
||||||
|
|
||||||
LEAF_ENTRY JIT_WriteBarrier_Callable, _TEXT
|
LEAF_ENTRY JIT_WriteBarrier_Callable, _TEXT
|
||||||
; JIT_WriteBarrier(Object** dst, Object* src)
|
; JIT_WriteBarrier(Object** dst, Object* src)
|
||||||
|
|
||||||
|
@ -327,224 +326,6 @@ LEAF_ENTRY JIT_WriteBarrier_Callable, _TEXT
|
||||||
jmp JIT_WriteBarrier
|
jmp JIT_WriteBarrier
|
||||||
LEAF_END JIT_WriteBarrier_Callable, _TEXT
|
LEAF_END JIT_WriteBarrier_Callable, _TEXT
|
||||||
|
|
||||||
; Mark start of the code region that we patch at runtime
|
|
||||||
LEAF_ENTRY JIT_PatchedCodeStart, _TEXT
|
|
||||||
ret
|
|
||||||
LEAF_END JIT_PatchedCodeStart, _TEXT
|
|
||||||
|
|
||||||
; void JIT_CheckedWriteBarrier(Object** dst, Object* src)
|
|
||||||
LEAF_ENTRY JIT_CheckedWriteBarrier, _TEXT
|
|
||||||
; See if dst is in GCHeap
|
|
||||||
mov rax, [g_card_bundle_table] ; fetch the page byte map
|
|
||||||
mov r8, rcx
|
|
||||||
shr r8, 30 ; dst page index
|
|
||||||
cmp byte ptr [rax + r8], 0
|
|
||||||
jne CheckedEntry
|
|
||||||
|
|
||||||
NotInHeap:
|
|
||||||
; See comment above about possible AV
|
|
||||||
mov [rcx], rdx
|
|
||||||
ret
|
|
||||||
LEAF_END_MARKED JIT_CheckedWriteBarrier, _TEXT
|
|
||||||
|
|
||||||
ALTERNATE_ENTRY macro Name
|
|
||||||
|
|
||||||
Name label proc
|
|
||||||
PUBLIC Name
|
|
||||||
endm
|
|
||||||
|
|
||||||
;
|
|
||||||
; rcx - dest address
|
|
||||||
; rdx - object
|
|
||||||
;
|
|
||||||
LEAF_ENTRY JIT_WriteBarrier, _TEXT
|
|
||||||
|
|
||||||
ifdef FEATURE_SATORI_EXTERNAL_OBJECTS
|
|
||||||
; check if src is in heap
|
|
||||||
mov rax, [g_card_bundle_table] ; fetch the page byte map
|
|
||||||
ALTERNATE_ENTRY CheckedEntry
|
|
||||||
mov r8, rdx
|
|
||||||
shr r8, 30 ; src page index
|
|
||||||
cmp byte ptr [rax + r8], 0
|
|
||||||
je JustAssign ; src not in heap
|
|
||||||
else
|
|
||||||
ALTERNATE_ENTRY CheckedEntry
|
|
||||||
endif
|
|
||||||
|
|
||||||
; check for escaping assignment
|
|
||||||
; 1) check if we own the source region
|
|
||||||
mov r8, rdx
|
|
||||||
and r8, 0FFFFFFFFFFE00000h ; source region
|
|
||||||
|
|
||||||
ifndef FEATURE_SATORI_EXTERNAL_OBJECTS
|
|
||||||
jz JustAssign ; assigning null
|
|
||||||
endif
|
|
||||||
|
|
||||||
mov rax, gs:[30h] ; thread tag, TEB on NT
|
|
||||||
cmp qword ptr [r8], rax
|
|
||||||
jne AssignAndMarkCards ; not local to this thread
|
|
||||||
|
|
||||||
; 2) check if the src and dst are from the same region
|
|
||||||
mov rax, rcx
|
|
||||||
and rax, 0FFFFFFFFFFE00000h ; target aligned to region
|
|
||||||
cmp rax, r8
|
|
||||||
jne RecordEscape ; cross region assignment. definitely escaping
|
|
||||||
|
|
||||||
; 3) check if the target is exposed
|
|
||||||
mov rax, rcx
|
|
||||||
and rax, 01FFFFFh
|
|
||||||
shr rax, 3
|
|
||||||
bt qword ptr [r8], rax
|
|
||||||
jb RecordEscape ; target is exposed. record an escape.
|
|
||||||
|
|
||||||
JustAssign:
|
|
||||||
mov [rcx], rdx ; no card marking, src is not a heap object
|
|
||||||
ret
|
|
||||||
|
|
||||||
AssignAndMarkCards:
|
|
||||||
mov [rcx], rdx
|
|
||||||
|
|
||||||
; TUNING: barriers in different modes could be separate pieces of code, but barrier switch
|
|
||||||
; needs to suspend EE, not sure if skipping mode check would worth that much.
|
|
||||||
mov r11, qword ptr [g_sw_ww_table]
|
|
||||||
|
|
||||||
; check the barrier state. this must be done after the assignment (in program order)
|
|
||||||
; if state == 2 we do not set or dirty cards.
|
|
||||||
cmp r11, 2h
|
|
||||||
jne DoCards
|
|
||||||
Exit:
|
|
||||||
ret
|
|
||||||
|
|
||||||
DoCards:
|
|
||||||
; if same region, just check if barrier is not concurrent
|
|
||||||
xor rdx, rcx
|
|
||||||
shr rdx, 21
|
|
||||||
jz CheckConcurrent
|
|
||||||
|
|
||||||
; if src is in gen2/3 and the barrier is not concurrent we do not need to mark cards
|
|
||||||
cmp dword ptr [r8 + 16], 2
|
|
||||||
jl MarkCards
|
|
||||||
|
|
||||||
CheckConcurrent:
|
|
||||||
cmp r11, 0h
|
|
||||||
je Exit
|
|
||||||
|
|
||||||
MarkCards:
|
|
||||||
; fetch card location for rcx
|
|
||||||
mov r9 , [g_card_table] ; fetch the page map
|
|
||||||
mov r8, rcx
|
|
||||||
shr rcx, 30
|
|
||||||
mov rax, qword ptr [r9 + rcx * 8] ; page
|
|
||||||
sub r8, rax ; offset in page
|
|
||||||
mov rdx,r8
|
|
||||||
shr r8, 9 ; card offset
|
|
||||||
shr rdx, 20 ; group index
|
|
||||||
lea rdx, [rax + rdx * 2 + 80h] ; group offset
|
|
||||||
|
|
||||||
; check if concurrent marking is in progress
|
|
||||||
cmp r11, 0h
|
|
||||||
jne DirtyCard
|
|
||||||
|
|
||||||
; SETTING CARD FOR RCX
|
|
||||||
SetCard:
|
|
||||||
cmp byte ptr [rax + r8], 0
|
|
||||||
jne Exit
|
|
||||||
mov byte ptr [rax + r8], 1
|
|
||||||
SetGroup:
|
|
||||||
cmp byte ptr [rdx], 0
|
|
||||||
jne CardSet
|
|
||||||
mov byte ptr [rdx], 1
|
|
||||||
SetPage:
|
|
||||||
cmp byte ptr [rax], 0
|
|
||||||
jne CardSet
|
|
||||||
mov byte ptr [rax], 1
|
|
||||||
|
|
||||||
CardSet:
|
|
||||||
; check if concurrent marking is still not in progress
|
|
||||||
cmp qword ptr [g_sw_ww_table], 0h
|
|
||||||
jne DirtyCard
|
|
||||||
ret
|
|
||||||
|
|
||||||
; DIRTYING CARD FOR RCX
|
|
||||||
DirtyCard:
|
|
||||||
mov byte ptr [rax + r8], 4
|
|
||||||
DirtyGroup:
|
|
||||||
cmp byte ptr [rdx], 4
|
|
||||||
je Exit
|
|
||||||
mov byte ptr [rdx], 4
|
|
||||||
DirtyPage:
|
|
||||||
cmp byte ptr [rax], 4
|
|
||||||
je Exit
|
|
||||||
mov byte ptr [rax], 4
|
|
||||||
ret
|
|
||||||
|
|
||||||
; this is expected to be rare.
|
|
||||||
RecordEscape:
|
|
||||||
|
|
||||||
; 4) check if the source is escaped
|
|
||||||
mov rax, rdx
|
|
||||||
add rax, 8 ; escape bit is MT + 1
|
|
||||||
and rax, 01FFFFFh
|
|
||||||
shr rax, 3
|
|
||||||
bt qword ptr [r8], rax
|
|
||||||
jb AssignAndMarkCards ; source is already escaped.
|
|
||||||
|
|
||||||
; Align rsp
|
|
||||||
mov r9, rsp
|
|
||||||
and rsp, -16
|
|
||||||
|
|
||||||
; save rsp, rcx, rdx, r8 and have enough stack for the callee
|
|
||||||
push r9
|
|
||||||
push rcx
|
|
||||||
push rdx
|
|
||||||
push r8
|
|
||||||
sub rsp, 20h
|
|
||||||
|
|
||||||
; void SatoriRegion::EscapeFn(SatoriObject** dst, SatoriObject* src, SatoriRegion* region)
|
|
||||||
call qword ptr [r8 + 8]
|
|
||||||
|
|
||||||
add rsp, 20h
|
|
||||||
pop r8
|
|
||||||
pop rdx
|
|
||||||
pop rcx
|
|
||||||
pop rsp
|
|
||||||
jmp AssignAndMarkCards
|
|
||||||
LEAF_END_MARKED JIT_WriteBarrier, _TEXT
|
|
||||||
|
|
||||||
; JIT_ByRefWriteBarrier has weird symantics, see usage in StubLinkerX86.cpp
|
|
||||||
;
|
|
||||||
; Entry:
|
|
||||||
; RDI - address of ref-field (assigned to)
|
|
||||||
; RSI - address of the data (source)
|
|
||||||
; Note: RyuJIT assumes that all volatile registers can be trashed by
|
|
||||||
; the CORINFO_HELP_ASSIGN_BYREF helper (i.e. JIT_ByRefWriteBarrier)
|
|
||||||
; except RDI and RSI. This helper uses and defines RDI and RSI, so
|
|
||||||
; they remain as live GC refs or byrefs, and are not killed.
|
|
||||||
; Exit:
|
|
||||||
; RDI, RSI are incremented by SIZEOF(LPVOID)
|
|
||||||
LEAF_ENTRY JIT_ByRefWriteBarrier, _TEXT
|
|
||||||
mov rcx, rdi
|
|
||||||
mov rdx, [rsi]
|
|
||||||
add rdi, 8h
|
|
||||||
add rsi, 8h
|
|
||||||
|
|
||||||
; See if dst is in GCHeap
|
|
||||||
mov rax, [g_card_bundle_table] ; fetch the page byte map
|
|
||||||
mov r8, rcx
|
|
||||||
shr r8, 30 ; dst page index
|
|
||||||
cmp byte ptr [rax + r8], 0
|
|
||||||
jne CheckedEntry
|
|
||||||
|
|
||||||
NotInHeap:
|
|
||||||
mov [rcx], rdx
|
|
||||||
ret
|
|
||||||
LEAF_END_MARKED JIT_ByRefWriteBarrier, _TEXT
|
|
||||||
|
|
||||||
; Mark start of the code region that we patch at runtime
|
|
||||||
LEAF_ENTRY JIT_PatchedCodeLast, _TEXT
|
|
||||||
ret
|
|
||||||
LEAF_END JIT_PatchedCodeLast, _TEXT
|
|
||||||
|
|
||||||
endif ; FEATURE_SATORI_GC
|
endif ; FEATURE_SATORI_GC
|
||||||
|
|
||||||
; The following helper will access ("probe") a word on each page of the stack
|
; The following helper will access ("probe") a word on each page of the stack
|
||||||
|
|
|
@ -5,6 +5,8 @@
|
||||||
#include "unixasmmacros.inc"
|
#include "unixasmmacros.inc"
|
||||||
#include "asmconstants.h"
|
#include "asmconstants.h"
|
||||||
|
|
||||||
|
#ifndef FEATURE_SATORI_GC
|
||||||
|
|
||||||
// JIT_ByRefWriteBarrier has weird semantics, see usage in StubLinkerX86.cpp
|
// JIT_ByRefWriteBarrier has weird semantics, see usage in StubLinkerX86.cpp
|
||||||
//
|
//
|
||||||
// Entry:
|
// Entry:
|
||||||
|
@ -215,248 +217,7 @@ LEAF_END_MARKED JIT_ByRefWriteBarrier, _TEXT
|
||||||
|
|
||||||
#else //FEATURE_SATORI_GC ##############################################################################
|
#else //FEATURE_SATORI_GC ##############################################################################
|
||||||
|
|
||||||
.macro ALTERNATE_ENTRY Name
|
// look in patchedcode.S
|
||||||
.global C_FUNC(\Name)
|
|
||||||
C_FUNC(\Name):
|
|
||||||
.endm
|
|
||||||
|
|
||||||
// Mark start of the code region that we patch at runtime
|
|
||||||
LEAF_ENTRY JIT_PatchedCodeStart, _TEXT
|
|
||||||
ret
|
|
||||||
LEAF_END JIT_PatchedCodeStart, _TEXT
|
|
||||||
|
|
||||||
|
|
||||||
// void JIT_CheckedWriteBarrier(Object** dst, Object* src)
|
|
||||||
LEAF_ENTRY JIT_CheckedWriteBarrier, _TEXT
|
|
||||||
|
|
||||||
// See if dst is in GCHeap
|
|
||||||
PREPARE_EXTERNAL_VAR g_card_bundle_table, rax // fetch the page byte map
|
|
||||||
mov rax, [rax]
|
|
||||||
mov r8, rdi
|
|
||||||
shr r8, 30 // dst page index
|
|
||||||
cmp byte ptr [rax + r8], 0
|
|
||||||
jne C_FUNC(CheckedEntry)
|
|
||||||
|
|
||||||
NotInHeap:
|
|
||||||
// See comment above about possible AV
|
|
||||||
mov [rdi], rsi
|
|
||||||
ret
|
|
||||||
LEAF_END_MARKED JIT_CheckedWriteBarrier, _TEXT
|
|
||||||
|
|
||||||
|
|
||||||
//
|
|
||||||
// rdi - dest address
|
|
||||||
// rsi - object
|
|
||||||
//
|
|
||||||
.balign 16
|
|
||||||
LEAF_ENTRY JIT_WriteBarrier, _TEXT
|
|
||||||
#ifdef FEATURE_SATORI_EXTERNAL_OBJECTS
|
|
||||||
// check if src is in heap
|
|
||||||
PREPARE_EXTERNAL_VAR g_card_bundle_table, rax // fetch the page byte map
|
|
||||||
mov rax, [rax]
|
|
||||||
ALTERNATE_ENTRY CheckedEntry
|
|
||||||
mov r8, rsi
|
|
||||||
shr r8, 30 // src page index
|
|
||||||
cmp byte ptr [rax + r8], 0
|
|
||||||
je JustAssign // src not in heap
|
|
||||||
#else
|
|
||||||
ALTERNATE_ENTRY CheckedEntry
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// check for escaping assignment
|
|
||||||
// 1) check if we own the source region
|
|
||||||
mov rdx, rsi
|
|
||||||
and rdx, 0xFFFFFFFFFFE00000 // source region
|
|
||||||
#ifndef FEATURE_SATORI_EXTERNAL_OBJECTS
|
|
||||||
jz JustAssign // assigning null
|
|
||||||
#endif
|
|
||||||
#ifdef TARGET_OSX
|
|
||||||
mov rax, gs:[0] // thread tag
|
|
||||||
#else
|
|
||||||
mov rax, fs:[0] // thread tag
|
|
||||||
#endif
|
|
||||||
cmp qword ptr [rdx], rax
|
|
||||||
jne AssignAndMarkCards // not local to this thread
|
|
||||||
|
|
||||||
// 2) check if the src and dst are from the same region
|
|
||||||
mov rax, rdi
|
|
||||||
and rax, 0xFFFFFFFFFFE00000 // target aligned to region
|
|
||||||
cmp rax, rdx
|
|
||||||
jnz RecordEscape // cross region assignment. definitely escaping
|
|
||||||
|
|
||||||
// 3) check if the target is exposed
|
|
||||||
mov rax, rdi
|
|
||||||
and rax, 0x1FFFFF
|
|
||||||
shr rax, 3
|
|
||||||
bt qword ptr [rdx], rax
|
|
||||||
jb RecordEscape // target is exposed. record an escape.
|
|
||||||
|
|
||||||
JustAssign:
|
|
||||||
mov [rdi], rsi // no card marking, src is not a heap object
|
|
||||||
|
|
||||||
// set rdi, rsi per contract with JIT_ByRefWriteBarrier
|
|
||||||
add rdi, 8
|
|
||||||
mov rsi, r10
|
|
||||||
ret
|
|
||||||
|
|
||||||
AssignAndMarkCards:
|
|
||||||
mov [rdi], rsi
|
|
||||||
|
|
||||||
// TUNING: barriers in different modes could be separate pieces of code, but barrier switch
|
|
||||||
// needs to suspend EE, not sure if skipping mode check would worth that much.
|
|
||||||
PREPARE_EXTERNAL_VAR g_sw_ww_table, rcx
|
|
||||||
mov r11, [rcx]
|
|
||||||
|
|
||||||
// set rdi per contract with JIT_ByRefWriteBarrier
|
|
||||||
mov rax, rdi
|
|
||||||
add rdi, 8
|
|
||||||
|
|
||||||
// check the barrier state. this must be done after the assignment (in program order)
|
|
||||||
// if state == 2 we do not set or dirty cards.
|
|
||||||
cmp r11, 2
|
|
||||||
jne DoCards
|
|
||||||
// set rsi per contract with JIT_ByRefWriteBarrier
|
|
||||||
mov rsi, r10
|
|
||||||
Exit:
|
|
||||||
ret
|
|
||||||
|
|
||||||
DoCards:
|
|
||||||
// if same region, just check if barrier is not concurrent
|
|
||||||
xor rsi, rax
|
|
||||||
shr rsi, 21
|
|
||||||
// set rsi per contract with JIT_ByRefWriteBarrier
|
|
||||||
mov rsi, r10
|
|
||||||
jz CheckConcurrent // same region, just check if barrier is not concurrent
|
|
||||||
|
|
||||||
// if src is in gen2/3 and the barrier is not concurrent we do not need to mark cards
|
|
||||||
cmp dword ptr [rdx + 16], 2
|
|
||||||
jl MarkCards
|
|
||||||
|
|
||||||
CheckConcurrent:
|
|
||||||
// if concurrent, load card location
|
|
||||||
cmp r11, 0
|
|
||||||
je Exit
|
|
||||||
|
|
||||||
MarkCards:
|
|
||||||
// fetch card location for rax (saved rdi)
|
|
||||||
PREPARE_EXTERNAL_VAR g_card_table, r9
|
|
||||||
mov r9, [r9] // fetch the page map
|
|
||||||
mov rdx, rax
|
|
||||||
shr rax, 30
|
|
||||||
mov rax, qword ptr [r9 + rax * 8] // page
|
|
||||||
sub rdx, rax // offset in page
|
|
||||||
mov r8, rdx
|
|
||||||
shr rdx, 9 // card offset
|
|
||||||
shr r8, 20 // group index
|
|
||||||
lea r8, [rax + r8 * 2 + 0x80] // group offset
|
|
||||||
|
|
||||||
// check if concurrent marking is in progress
|
|
||||||
cmp r11, 0
|
|
||||||
jne DirtyCard
|
|
||||||
|
|
||||||
// SETTING CARD
|
|
||||||
SetCard:
|
|
||||||
cmp byte ptr [rax + rdx], 0
|
|
||||||
jne Exit
|
|
||||||
mov byte ptr [rax + rdx], 1
|
|
||||||
SetGroup:
|
|
||||||
cmp byte ptr [r8], 0
|
|
||||||
jne CardSet
|
|
||||||
mov byte ptr [r8], 1
|
|
||||||
SetPage:
|
|
||||||
cmp byte ptr [rax], 0
|
|
||||||
jne CardSet
|
|
||||||
mov byte ptr [rax], 1
|
|
||||||
|
|
||||||
CardSet:
|
|
||||||
// check if concurrent marking is still not in progress
|
|
||||||
cmp qword ptr [rcx], 0
|
|
||||||
jne DirtyCard
|
|
||||||
ret
|
|
||||||
|
|
||||||
// DIRTYING CARD
|
|
||||||
DirtyCard:
|
|
||||||
mov byte ptr [rax + rdx], 4
|
|
||||||
DirtyGroup:
|
|
||||||
cmp byte ptr [r8], 4
|
|
||||||
je Exit
|
|
||||||
mov byte ptr [r8], 4
|
|
||||||
DirtyPage:
|
|
||||||
cmp byte ptr [rax], 4
|
|
||||||
je Exit
|
|
||||||
mov byte ptr [rax], 4
|
|
||||||
ret
|
|
||||||
|
|
||||||
// this is expected to be rare.
|
|
||||||
RecordEscape:
|
|
||||||
|
|
||||||
// 4) check if the source is escaped
|
|
||||||
mov rax, rsi
|
|
||||||
add rax, 8 // escape bit is MT + 1
|
|
||||||
and rax, 0x1FFFFF
|
|
||||||
shr rax, 3
|
|
||||||
bt qword ptr [rdx], rax
|
|
||||||
jb AssignAndMarkCards // source is already escaped.
|
|
||||||
|
|
||||||
// Align rsp
|
|
||||||
mov r9, rsp
|
|
||||||
and rsp, -16
|
|
||||||
sub rsp, 8
|
|
||||||
|
|
||||||
// save rsp, rdi, rsi, rdx and r10 (possibly preadjusted rsi)
|
|
||||||
push r9
|
|
||||||
push rdi
|
|
||||||
push rsi
|
|
||||||
push rdx
|
|
||||||
push r10
|
|
||||||
|
|
||||||
// void SatoriRegion::EscapeFn(SatoriObject** dst, SatoriObject* src, SatoriRegion* region)
|
|
||||||
call qword ptr [rdx + 8]
|
|
||||||
|
|
||||||
pop r10
|
|
||||||
pop rdx
|
|
||||||
pop rsi
|
|
||||||
pop rdi
|
|
||||||
pop rsp
|
|
||||||
jmp AssignAndMarkCards
|
|
||||||
LEAF_END_MARKED JIT_WriteBarrier, _TEXT
|
|
||||||
|
|
||||||
|
|
||||||
// JIT_ByRefWriteBarrier has weird symantics, see usage in StubLinkerX86.cpp
|
|
||||||
//
|
|
||||||
// Entry:
|
|
||||||
// RDI - address of ref-field (assigned to)
|
|
||||||
// RSI - address of the data (source)
|
|
||||||
// Note: RyuJIT assumes that all volatile registers can be trashed by
|
|
||||||
// the CORINFO_HELP_ASSIGN_BYREF helper (i.e. JIT_ByRefWriteBarrier)
|
|
||||||
// except RDI and RSI. This helper uses and defines RDI and RSI, so
|
|
||||||
// they remain as live GC refs or byrefs, and are not killed.
|
|
||||||
// Exit:
|
|
||||||
// RDI, RSI are incremented by SIZEOF(LPVOID)
|
|
||||||
LEAF_ENTRY JIT_ByRefWriteBarrier, _TEXT
|
|
||||||
lea r10, [rsi + 8]
|
|
||||||
mov rsi, [rsi]
|
|
||||||
|
|
||||||
// See if dst is in GCHeap
|
|
||||||
PREPARE_EXTERNAL_VAR g_card_bundle_table, rax // fetch the page byte map
|
|
||||||
mov rax, [rax]
|
|
||||||
|
|
||||||
mov r8, rdi
|
|
||||||
shr r8, 30 // dst page index
|
|
||||||
cmp byte ptr [rax + r8], 0
|
|
||||||
jne C_FUNC(CheckedEntry)
|
|
||||||
|
|
||||||
NotInHeap_ByRefWriteBarrier:
|
|
||||||
mov [rdi], rsi
|
|
||||||
add rdi, 8
|
|
||||||
mov rsi, r10
|
|
||||||
ret
|
|
||||||
LEAF_END_MARKED JIT_ByRefWriteBarrier, _TEXT
|
|
||||||
|
|
||||||
// Mark start of the code region that we patch at runtime
|
|
||||||
LEAF_ENTRY JIT_PatchedCodeLast, _TEXT
|
|
||||||
ret
|
|
||||||
LEAF_END JIT_PatchedCodeLast, _TEXT
|
|
||||||
|
|
||||||
#endif // FEATURE_SATORI_GC
|
#endif // FEATURE_SATORI_GC
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,8 @@
|
||||||
#include "unixasmmacros.inc"
|
#include "unixasmmacros.inc"
|
||||||
#include "asmconstants.h"
|
#include "asmconstants.h"
|
||||||
|
|
||||||
|
#ifndef FEATURE_SATORI_GC
|
||||||
|
|
||||||
// Mark start of the code region that we patch at runtime
|
// Mark start of the code region that we patch at runtime
|
||||||
LEAF_ENTRY JIT_PatchedCodeStart, _TEXT
|
LEAF_ENTRY JIT_PatchedCodeStart, _TEXT
|
||||||
ret
|
ret
|
||||||
|
@ -243,3 +245,250 @@ LEAF_END_MARKED JIT_WriteBarrier, _TEXT
|
||||||
LEAF_ENTRY JIT_PatchedCodeLast, _TEXT
|
LEAF_ENTRY JIT_PatchedCodeLast, _TEXT
|
||||||
ret
|
ret
|
||||||
LEAF_END JIT_PatchedCodeLast, _TEXT
|
LEAF_END JIT_PatchedCodeLast, _TEXT
|
||||||
|
|
||||||
|
#else //FEATURE_SATORI_GC ##############################################################################
|
||||||
|
|
||||||
|
.macro ALTERNATE_ENTRY Name
|
||||||
|
.global C_FUNC(\Name)
|
||||||
|
C_FUNC(\Name):
|
||||||
|
.endm
|
||||||
|
|
||||||
|
// Mark start of the code region that we patch at runtime
|
||||||
|
LEAF_ENTRY JIT_PatchedCodeStart, _TEXT
|
||||||
|
ret
|
||||||
|
LEAF_END JIT_PatchedCodeStart, _TEXT
|
||||||
|
|
||||||
|
|
||||||
|
// void JIT_CheckedWriteBarrier(Object** dst, Object* src)
|
||||||
|
LEAF_ENTRY JIT_CheckedWriteBarrier, _TEXT
|
||||||
|
|
||||||
|
// See if dst is in GCHeap
|
||||||
|
PREPARE_EXTERNAL_VAR g_card_bundle_table, rax // fetch the page byte map
|
||||||
|
mov rax, [rax]
|
||||||
|
mov r8, rdi
|
||||||
|
shr r8, 30 // dst page index
|
||||||
|
cmp byte ptr [rax + r8], 0
|
||||||
|
jne C_FUNC(CheckedEntry)
|
||||||
|
|
||||||
|
NotInHeap:
|
||||||
|
// See comment above about possible AV
|
||||||
|
mov [rdi], rsi
|
||||||
|
ret
|
||||||
|
LEAF_END_MARKED JIT_CheckedWriteBarrier, _TEXT
|
||||||
|
|
||||||
|
|
||||||
|
//
|
||||||
|
// rdi - dest address
|
||||||
|
// rsi - object
|
||||||
|
//
|
||||||
|
.balign 16
|
||||||
|
LEAF_ENTRY JIT_WriteBarrier, _TEXT
|
||||||
|
#ifdef FEATURE_SATORI_EXTERNAL_OBJECTS
|
||||||
|
// check if src is in heap
|
||||||
|
PREPARE_EXTERNAL_VAR g_card_bundle_table, rax // fetch the page byte map
|
||||||
|
mov rax, [rax]
|
||||||
|
ALTERNATE_ENTRY CheckedEntry
|
||||||
|
mov r8, rsi
|
||||||
|
shr r8, 30 // src page index
|
||||||
|
cmp byte ptr [rax + r8], 0
|
||||||
|
je JustAssign // src not in heap
|
||||||
|
#else
|
||||||
|
ALTERNATE_ENTRY CheckedEntry
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// check for escaping assignment
|
||||||
|
// 1) check if we own the source region
|
||||||
|
mov rdx, rsi
|
||||||
|
and rdx, 0xFFFFFFFFFFE00000 // source region
|
||||||
|
#ifndef FEATURE_SATORI_EXTERNAL_OBJECTS
|
||||||
|
jz JustAssign // assigning null
|
||||||
|
#endif
|
||||||
|
#ifdef TARGET_OSX
|
||||||
|
mov rax, gs:[0] // thread tag
|
||||||
|
#else
|
||||||
|
mov rax, fs:[0] // thread tag
|
||||||
|
#endif
|
||||||
|
cmp qword ptr [rdx], rax
|
||||||
|
jne AssignAndMarkCards // not local to this thread
|
||||||
|
|
||||||
|
// 2) check if the src and dst are from the same region
|
||||||
|
mov rax, rdi
|
||||||
|
and rax, 0xFFFFFFFFFFE00000 // target aligned to region
|
||||||
|
cmp rax, rdx
|
||||||
|
jnz RecordEscape // cross region assignment. definitely escaping
|
||||||
|
|
||||||
|
// 3) check if the target is exposed
|
||||||
|
mov rax, rdi
|
||||||
|
and rax, 0x1FFFFF
|
||||||
|
shr rax, 3
|
||||||
|
bt qword ptr [rdx], rax
|
||||||
|
jb RecordEscape // target is exposed. record an escape.
|
||||||
|
|
||||||
|
JustAssign:
|
||||||
|
mov [rdi], rsi // no card marking, src is not a heap object
|
||||||
|
|
||||||
|
// set rdi, rsi per contract with JIT_ByRefWriteBarrier
|
||||||
|
add rdi, 8
|
||||||
|
mov rsi, r10
|
||||||
|
ret
|
||||||
|
|
||||||
|
AssignAndMarkCards:
|
||||||
|
mov [rdi], rsi
|
||||||
|
|
||||||
|
// TUNING: barriers in different modes could be separate pieces of code, but barrier switch
|
||||||
|
// needs to suspend EE, not sure if skipping mode check would worth that much.
|
||||||
|
PREPARE_EXTERNAL_VAR g_sw_ww_table, rcx
|
||||||
|
mov r11, [rcx]
|
||||||
|
|
||||||
|
// set rdi per contract with JIT_ByRefWriteBarrier
|
||||||
|
mov rax, rdi
|
||||||
|
add rdi, 8
|
||||||
|
|
||||||
|
// check the barrier state. this must be done after the assignment (in program order)
|
||||||
|
// if state == 2 we do not set or dirty cards.
|
||||||
|
cmp r11, 2
|
||||||
|
jne DoCards
|
||||||
|
// set rsi per contract with JIT_ByRefWriteBarrier
|
||||||
|
mov rsi, r10
|
||||||
|
Exit:
|
||||||
|
ret
|
||||||
|
|
||||||
|
DoCards:
|
||||||
|
// if same region, just check if barrier is not concurrent
|
||||||
|
xor rsi, rax
|
||||||
|
shr rsi, 21
|
||||||
|
// set rsi per contract with JIT_ByRefWriteBarrier
|
||||||
|
mov rsi, r10
|
||||||
|
jz CheckConcurrent // same region, just check if barrier is not concurrent
|
||||||
|
|
||||||
|
// if src is in gen2/3 and the barrier is not concurrent we do not need to mark cards
|
||||||
|
cmp dword ptr [rdx + 16], 2
|
||||||
|
jl MarkCards
|
||||||
|
|
||||||
|
CheckConcurrent:
|
||||||
|
// if concurrent, load card location
|
||||||
|
cmp r11, 0
|
||||||
|
je Exit
|
||||||
|
|
||||||
|
MarkCards:
|
||||||
|
// fetch card location for rax (saved rdi)
|
||||||
|
PREPARE_EXTERNAL_VAR g_card_table, r9
|
||||||
|
mov r9, [r9] // fetch the page map
|
||||||
|
mov rdx, rax
|
||||||
|
shr rax, 30
|
||||||
|
mov rax, qword ptr [r9 + rax * 8] // page
|
||||||
|
sub rdx, rax // offset in page
|
||||||
|
mov r8, rdx
|
||||||
|
shr rdx, 9 // card offset
|
||||||
|
shr r8, 20 // group index
|
||||||
|
lea r8, [rax + r8 * 2 + 0x80] // group offset
|
||||||
|
|
||||||
|
// check if concurrent marking is in progress
|
||||||
|
cmp r11, 0
|
||||||
|
jne DirtyCard
|
||||||
|
|
||||||
|
// SETTING CARD
|
||||||
|
SetCard:
|
||||||
|
cmp byte ptr [rax + rdx], 0
|
||||||
|
jne Exit
|
||||||
|
mov byte ptr [rax + rdx], 1
|
||||||
|
SetGroup:
|
||||||
|
cmp byte ptr [r8], 0
|
||||||
|
jne CardSet
|
||||||
|
mov byte ptr [r8], 1
|
||||||
|
SetPage:
|
||||||
|
cmp byte ptr [rax], 0
|
||||||
|
jne CardSet
|
||||||
|
mov byte ptr [rax], 1
|
||||||
|
|
||||||
|
CardSet:
|
||||||
|
// check if concurrent marking is still not in progress
|
||||||
|
cmp qword ptr [rcx], 0
|
||||||
|
jne DirtyCard
|
||||||
|
ret
|
||||||
|
|
||||||
|
// DIRTYING CARD
|
||||||
|
DirtyCard:
|
||||||
|
mov byte ptr [rax + rdx], 4
|
||||||
|
DirtyGroup:
|
||||||
|
cmp byte ptr [r8], 4
|
||||||
|
je Exit
|
||||||
|
mov byte ptr [r8], 4
|
||||||
|
DirtyPage:
|
||||||
|
cmp byte ptr [rax], 4
|
||||||
|
je Exit
|
||||||
|
mov byte ptr [rax], 4
|
||||||
|
ret
|
||||||
|
|
||||||
|
// this is expected to be rare.
|
||||||
|
RecordEscape:
|
||||||
|
|
||||||
|
// 4) check if the source is escaped
|
||||||
|
mov rax, rsi
|
||||||
|
add rax, 8 // escape bit is MT + 1
|
||||||
|
and rax, 0x1FFFFF
|
||||||
|
shr rax, 3
|
||||||
|
bt qword ptr [rdx], rax
|
||||||
|
jb AssignAndMarkCards // source is already escaped.
|
||||||
|
|
||||||
|
// Align rsp
|
||||||
|
mov r9, rsp
|
||||||
|
and rsp, -16
|
||||||
|
sub rsp, 8
|
||||||
|
|
||||||
|
// save rsp, rdi, rsi, rdx and r10 (possibly preadjusted rsi)
|
||||||
|
push r9
|
||||||
|
push rdi
|
||||||
|
push rsi
|
||||||
|
push rdx
|
||||||
|
push r10
|
||||||
|
|
||||||
|
// void SatoriRegion::EscapeFn(SatoriObject** dst, SatoriObject* src, SatoriRegion* region)
|
||||||
|
call qword ptr [rdx + 8]
|
||||||
|
|
||||||
|
pop r10
|
||||||
|
pop rdx
|
||||||
|
pop rsi
|
||||||
|
pop rdi
|
||||||
|
pop rsp
|
||||||
|
jmp AssignAndMarkCards
|
||||||
|
LEAF_END_MARKED JIT_WriteBarrier, _TEXT
|
||||||
|
|
||||||
|
|
||||||
|
// JIT_ByRefWriteBarrier has weird symantics, see usage in StubLinkerX86.cpp
|
||||||
|
//
|
||||||
|
// Entry:
|
||||||
|
// RDI - address of ref-field (assigned to)
|
||||||
|
// RSI - address of the data (source)
|
||||||
|
// Note: RyuJIT assumes that all volatile registers can be trashed by
|
||||||
|
// the CORINFO_HELP_ASSIGN_BYREF helper (i.e. JIT_ByRefWriteBarrier)
|
||||||
|
// except RDI and RSI. This helper uses and defines RDI and RSI, so
|
||||||
|
// they remain as live GC refs or byrefs, and are not killed.
|
||||||
|
// Exit:
|
||||||
|
// RDI, RSI are incremented by SIZEOF(LPVOID)
|
||||||
|
LEAF_ENTRY JIT_ByRefWriteBarrier, _TEXT
|
||||||
|
lea r10, [rsi + 8]
|
||||||
|
mov rsi, [rsi]
|
||||||
|
|
||||||
|
// See if dst is in GCHeap
|
||||||
|
PREPARE_EXTERNAL_VAR g_card_bundle_table, rax // fetch the page byte map
|
||||||
|
mov rax, [rax]
|
||||||
|
|
||||||
|
mov r8, rdi
|
||||||
|
shr r8, 30 // dst page index
|
||||||
|
cmp byte ptr [rax + r8], 0
|
||||||
|
jne C_FUNC(CheckedEntry)
|
||||||
|
|
||||||
|
NotInHeap_ByRefWriteBarrier:
|
||||||
|
mov [rdi], rsi
|
||||||
|
add rdi, 8
|
||||||
|
mov rsi, r10
|
||||||
|
ret
|
||||||
|
LEAF_END_MARKED JIT_ByRefWriteBarrier, _TEXT
|
||||||
|
|
||||||
|
// Mark start of the code region that we patch at runtime
|
||||||
|
LEAF_ENTRY JIT_PatchedCodeLast, _TEXT
|
||||||
|
ret
|
||||||
|
LEAF_END JIT_PatchedCodeLast, _TEXT
|
||||||
|
|
||||||
|
#endif // FEATURE_SATORI_GC
|
||||||
|
|
|
@ -16,6 +16,7 @@ ifdef _DEBUG
|
||||||
extern JIT_WriteBarrier_Debug:proc
|
extern JIT_WriteBarrier_Debug:proc
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
ifndef FEATURE_SATORI_GC
|
||||||
|
|
||||||
; Mark start of the code region that we patch at runtime
|
; Mark start of the code region that we patch at runtime
|
||||||
LEAF_ENTRY JIT_PatchedCodeStart, _TEXT
|
LEAF_ENTRY JIT_PatchedCodeStart, _TEXT
|
||||||
|
@ -199,4 +200,234 @@ LEAF_ENTRY JIT_PatchedCodeLast, _TEXT
|
||||||
ret
|
ret
|
||||||
LEAF_END JIT_PatchedCodeLast, _TEXT
|
LEAF_END JIT_PatchedCodeLast, _TEXT
|
||||||
|
|
||||||
|
else ;FEATURE_SATORI_GC ##########################################################################
|
||||||
|
|
||||||
|
EXTERN g_card_table:QWORD
|
||||||
|
EXTERN g_card_bundle_table:QWORD
|
||||||
|
EXTERN g_sw_ww_table:QWORD
|
||||||
|
|
||||||
|
ifdef FEATURE_USE_SOFTWARE_WRITE_WATCH_FOR_GC_HEAP
|
||||||
|
EXTERN g_sw_ww_enabled_for_gc_heap:BYTE
|
||||||
|
endif
|
||||||
|
|
||||||
|
; Mark start of the code region that we patch at runtime
|
||||||
|
LEAF_ENTRY JIT_PatchedCodeStart, _TEXT
|
||||||
|
ret
|
||||||
|
LEAF_END JIT_PatchedCodeStart, _TEXT
|
||||||
|
|
||||||
|
; void JIT_CheckedWriteBarrier(Object** dst, Object* src)
|
||||||
|
LEAF_ENTRY JIT_CheckedWriteBarrier, _TEXT
|
||||||
|
; See if dst is in GCHeap
|
||||||
|
mov rax, [g_card_bundle_table] ; fetch the page byte map
|
||||||
|
mov r8, rcx
|
||||||
|
shr r8, 30 ; dst page index
|
||||||
|
cmp byte ptr [rax + r8], 0
|
||||||
|
jne CheckedEntry
|
||||||
|
|
||||||
|
NotInHeap:
|
||||||
|
; See comment above about possible AV
|
||||||
|
mov [rcx], rdx
|
||||||
|
ret
|
||||||
|
LEAF_END_MARKED JIT_CheckedWriteBarrier, _TEXT
|
||||||
|
|
||||||
|
ALTERNATE_ENTRY macro Name
|
||||||
|
|
||||||
|
Name label proc
|
||||||
|
PUBLIC Name
|
||||||
|
endm
|
||||||
|
|
||||||
|
;
|
||||||
|
; rcx - dest address
|
||||||
|
; rdx - object
|
||||||
|
;
|
||||||
|
LEAF_ENTRY JIT_WriteBarrier, _TEXT
|
||||||
|
|
||||||
|
ifdef FEATURE_SATORI_EXTERNAL_OBJECTS
|
||||||
|
; check if src is in heap
|
||||||
|
mov rax, [g_card_bundle_table] ; fetch the page byte map
|
||||||
|
ALTERNATE_ENTRY CheckedEntry
|
||||||
|
mov r8, rdx
|
||||||
|
shr r8, 30 ; src page index
|
||||||
|
cmp byte ptr [rax + r8], 0
|
||||||
|
je JustAssign ; src not in heap
|
||||||
|
else
|
||||||
|
ALTERNATE_ENTRY CheckedEntry
|
||||||
|
endif
|
||||||
|
|
||||||
|
; check for escaping assignment
|
||||||
|
; 1) check if we own the source region
|
||||||
|
mov r8, rdx
|
||||||
|
and r8, 0FFFFFFFFFFE00000h ; source region
|
||||||
|
|
||||||
|
ifndef FEATURE_SATORI_EXTERNAL_OBJECTS
|
||||||
|
jz JustAssign ; assigning null
|
||||||
|
endif
|
||||||
|
|
||||||
|
mov rax, gs:[30h] ; thread tag, TEB on NT
|
||||||
|
cmp qword ptr [r8], rax
|
||||||
|
jne AssignAndMarkCards ; not local to this thread
|
||||||
|
|
||||||
|
; 2) check if the src and dst are from the same region
|
||||||
|
mov rax, rcx
|
||||||
|
and rax, 0FFFFFFFFFFE00000h ; target aligned to region
|
||||||
|
cmp rax, r8
|
||||||
|
jne RecordEscape ; cross region assignment. definitely escaping
|
||||||
|
|
||||||
|
; 3) check if the target is exposed
|
||||||
|
mov rax, rcx
|
||||||
|
and rax, 01FFFFFh
|
||||||
|
shr rax, 3
|
||||||
|
bt qword ptr [r8], rax
|
||||||
|
jb RecordEscape ; target is exposed. record an escape.
|
||||||
|
|
||||||
|
JustAssign:
|
||||||
|
mov [rcx], rdx ; no card marking, src is not a heap object
|
||||||
|
ret
|
||||||
|
|
||||||
|
AssignAndMarkCards:
|
||||||
|
mov [rcx], rdx
|
||||||
|
|
||||||
|
; TUNING: barriers in different modes could be separate pieces of code, but barrier switch
|
||||||
|
; needs to suspend EE, not sure if skipping mode check would worth that much.
|
||||||
|
mov r11, qword ptr [g_sw_ww_table]
|
||||||
|
|
||||||
|
; check the barrier state. this must be done after the assignment (in program order)
|
||||||
|
; if state == 2 we do not set or dirty cards.
|
||||||
|
cmp r11, 2h
|
||||||
|
jne DoCards
|
||||||
|
Exit:
|
||||||
|
ret
|
||||||
|
|
||||||
|
DoCards:
|
||||||
|
; if same region, just check if barrier is not concurrent
|
||||||
|
xor rdx, rcx
|
||||||
|
shr rdx, 21
|
||||||
|
jz CheckConcurrent
|
||||||
|
|
||||||
|
; if src is in gen2/3 and the barrier is not concurrent we do not need to mark cards
|
||||||
|
cmp dword ptr [r8 + 16], 2
|
||||||
|
jl MarkCards
|
||||||
|
|
||||||
|
CheckConcurrent:
|
||||||
|
cmp r11, 0h
|
||||||
|
je Exit
|
||||||
|
|
||||||
|
MarkCards:
|
||||||
|
; fetch card location for rcx
|
||||||
|
mov r9 , [g_card_table] ; fetch the page map
|
||||||
|
mov r8, rcx
|
||||||
|
shr rcx, 30
|
||||||
|
mov rax, qword ptr [r9 + rcx * 8] ; page
|
||||||
|
sub r8, rax ; offset in page
|
||||||
|
mov rdx,r8
|
||||||
|
shr r8, 9 ; card offset
|
||||||
|
shr rdx, 20 ; group index
|
||||||
|
lea rdx, [rax + rdx * 2 + 80h] ; group offset
|
||||||
|
|
||||||
|
; check if concurrent marking is in progress
|
||||||
|
cmp r11, 0h
|
||||||
|
jne DirtyCard
|
||||||
|
|
||||||
|
; SETTING CARD FOR RCX
|
||||||
|
SetCard:
|
||||||
|
cmp byte ptr [rax + r8], 0
|
||||||
|
jne Exit
|
||||||
|
mov byte ptr [rax + r8], 1
|
||||||
|
SetGroup:
|
||||||
|
cmp byte ptr [rdx], 0
|
||||||
|
jne CardSet
|
||||||
|
mov byte ptr [rdx], 1
|
||||||
|
SetPage:
|
||||||
|
cmp byte ptr [rax], 0
|
||||||
|
jne CardSet
|
||||||
|
mov byte ptr [rax], 1
|
||||||
|
|
||||||
|
CardSet:
|
||||||
|
; check if concurrent marking is still not in progress
|
||||||
|
cmp qword ptr [g_sw_ww_table], 0h
|
||||||
|
jne DirtyCard
|
||||||
|
ret
|
||||||
|
|
||||||
|
; DIRTYING CARD FOR RCX
|
||||||
|
DirtyCard:
|
||||||
|
mov byte ptr [rax + r8], 4
|
||||||
|
DirtyGroup:
|
||||||
|
cmp byte ptr [rdx], 4
|
||||||
|
je Exit
|
||||||
|
mov byte ptr [rdx], 4
|
||||||
|
DirtyPage:
|
||||||
|
cmp byte ptr [rax], 4
|
||||||
|
je Exit
|
||||||
|
mov byte ptr [rax], 4
|
||||||
|
ret
|
||||||
|
|
||||||
|
; this is expected to be rare.
|
||||||
|
RecordEscape:
|
||||||
|
|
||||||
|
; 4) check if the source is escaped
|
||||||
|
mov rax, rdx
|
||||||
|
add rax, 8 ; escape bit is MT + 1
|
||||||
|
and rax, 01FFFFFh
|
||||||
|
shr rax, 3
|
||||||
|
bt qword ptr [r8], rax
|
||||||
|
jb AssignAndMarkCards ; source is already escaped.
|
||||||
|
|
||||||
|
; Align rsp
|
||||||
|
mov r9, rsp
|
||||||
|
and rsp, -16
|
||||||
|
|
||||||
|
; save rsp, rcx, rdx, r8 and have enough stack for the callee
|
||||||
|
push r9
|
||||||
|
push rcx
|
||||||
|
push rdx
|
||||||
|
push r8
|
||||||
|
sub rsp, 20h
|
||||||
|
|
||||||
|
; void SatoriRegion::EscapeFn(SatoriObject** dst, SatoriObject* src, SatoriRegion* region)
|
||||||
|
call qword ptr [r8 + 8]
|
||||||
|
|
||||||
|
add rsp, 20h
|
||||||
|
pop r8
|
||||||
|
pop rdx
|
||||||
|
pop rcx
|
||||||
|
pop rsp
|
||||||
|
jmp AssignAndMarkCards
|
||||||
|
LEAF_END_MARKED JIT_WriteBarrier, _TEXT
|
||||||
|
|
||||||
|
; JIT_ByRefWriteBarrier has weird symantics, see usage in StubLinkerX86.cpp
|
||||||
|
;
|
||||||
|
; Entry:
|
||||||
|
; RDI - address of ref-field (assigned to)
|
||||||
|
; RSI - address of the data (source)
|
||||||
|
; Note: RyuJIT assumes that all volatile registers can be trashed by
|
||||||
|
; the CORINFO_HELP_ASSIGN_BYREF helper (i.e. JIT_ByRefWriteBarrier)
|
||||||
|
; except RDI and RSI. This helper uses and defines RDI and RSI, so
|
||||||
|
; they remain as live GC refs or byrefs, and are not killed.
|
||||||
|
; Exit:
|
||||||
|
; RDI, RSI are incremented by SIZEOF(LPVOID)
|
||||||
|
LEAF_ENTRY JIT_ByRefWriteBarrier, _TEXT
|
||||||
|
mov rcx, rdi
|
||||||
|
mov rdx, [rsi]
|
||||||
|
add rdi, 8h
|
||||||
|
add rsi, 8h
|
||||||
|
|
||||||
|
; See if dst is in GCHeap
|
||||||
|
mov rax, [g_card_bundle_table] ; fetch the page byte map
|
||||||
|
mov r8, rcx
|
||||||
|
shr r8, 30 ; dst page index
|
||||||
|
cmp byte ptr [rax + r8], 0
|
||||||
|
jne CheckedEntry
|
||||||
|
|
||||||
|
NotInHeap:
|
||||||
|
mov [rcx], rdx
|
||||||
|
ret
|
||||||
|
LEAF_END_MARKED JIT_ByRefWriteBarrier, _TEXT
|
||||||
|
|
||||||
|
; Mark start of the code region that we patch at runtime
|
||||||
|
LEAF_ENTRY JIT_PatchedCodeLast, _TEXT
|
||||||
|
ret
|
||||||
|
LEAF_END JIT_PatchedCodeLast, _TEXT
|
||||||
|
|
||||||
|
endif ; FEATURE_SATORI_GC
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -52,6 +52,8 @@ WRITE_BARRIER_ENTRY JIT_ByRefWriteBarrier
|
||||||
|
|
||||||
WRITE_BARRIER_END JIT_ByRefWriteBarrier
|
WRITE_BARRIER_END JIT_ByRefWriteBarrier
|
||||||
|
|
||||||
|
#ifndef FEATURE_SATORI_GC
|
||||||
|
|
||||||
//-----------------------------------------------------------------------------
|
//-----------------------------------------------------------------------------
|
||||||
// Simple WriteBarriers
|
// Simple WriteBarriers
|
||||||
// void JIT_CheckedWriteBarrier(Object** dst, Object* src)
|
// void JIT_CheckedWriteBarrier(Object** dst, Object* src)
|
||||||
|
@ -201,6 +203,222 @@ LOCAL_LABEL(Exit):
|
||||||
ret lr
|
ret lr
|
||||||
WRITE_BARRIER_END JIT_WriteBarrier
|
WRITE_BARRIER_END JIT_WriteBarrier
|
||||||
|
|
||||||
|
#else // FEATURE_SATORI_GC
|
||||||
|
|
||||||
|
//-----------------------------------------------------------------------------
|
||||||
|
// Simple WriteBarriers
|
||||||
|
// void JIT_CheckedWriteBarrier(Object** dst, Object* src)
|
||||||
|
// On entry:
|
||||||
|
// x14 : the destination address (LHS of the assignment)
|
||||||
|
// x15 : the object reference (RHS of the assignment)
|
||||||
|
//
|
||||||
|
// On exit:
|
||||||
|
// x12 : trashed
|
||||||
|
// x14 : trashed (incremented by 8 to implement JIT_ByRefWriteBarrier contract)
|
||||||
|
// x15 : trashed
|
||||||
|
// x16 : trashed (ip0)
|
||||||
|
// x17 : trashed (ip1)
|
||||||
|
//
|
||||||
|
WRITE_BARRIER_ENTRY JIT_CheckedWriteBarrier
|
||||||
|
// See if dst is in GCHeap
|
||||||
|
ldr x16, LOCAL_LABEL(wbs_card_bundle_table)
|
||||||
|
lsr x17, x14, #30 // dst page index
|
||||||
|
ldrb w12, [x16, x17]
|
||||||
|
cbz x12 , LOCAL_LABEL(NotInHeap)
|
||||||
|
b C_FUNC(CheckedEntry)
|
||||||
|
|
||||||
|
LOCAL_LABEL(NotInHeap):
|
||||||
|
str x15, [x14], #8
|
||||||
|
ret lr
|
||||||
|
WRITE_BARRIER_END JIT_CheckedWriteBarrier
|
||||||
|
|
||||||
|
// void JIT_WriteBarrier(Object** dst, Object* src)
|
||||||
|
// On entry:
|
||||||
|
// x14 : the destination address (LHS of the assignment)
|
||||||
|
// x15 : the object reference (RHS of the assignment)
|
||||||
|
//
|
||||||
|
// On exit:
|
||||||
|
// x12 : trashed
|
||||||
|
// x14 : trashed (incremented by 8 to implement JIT_ByRefWriteBarrier contract)
|
||||||
|
// x15 : trashed
|
||||||
|
// x16 : trashed (ip0)
|
||||||
|
// x17 : trashed (ip1)
|
||||||
|
//
|
||||||
|
WRITE_BARRIER_ENTRY JIT_WriteBarrier
|
||||||
|
// check for escaping assignment
|
||||||
|
// 1) check if we own the source region
|
||||||
|
#ifdef FEATURE_SATORI_EXTERNAL_OBJECTS
|
||||||
|
ldr x16, LOCAL_LABEL(wbs_card_bundle_table)
|
||||||
|
C_FUNC(CheckedEntry):
|
||||||
|
lsr x17, x15, #30 // source page index
|
||||||
|
ldrb w12, [x16, x17]
|
||||||
|
cbz x12, LOCAL_LABEL(JustAssign) // null or external (immutable) object
|
||||||
|
#else
|
||||||
|
C_FUNC(CheckedEntry):
|
||||||
|
cbz x15, LOCAL_LABEL(JustAssign) // assigning null
|
||||||
|
#endif
|
||||||
|
and x16, x15, #0xFFFFFFFFFFE00000 // source region
|
||||||
|
ldr x12, [x16] // region tag
|
||||||
|
#ifdef TARGET_OSX
|
||||||
|
mrs x17, TPIDRRO_EL0
|
||||||
|
and x17, x17, #-8 // thread tag on OSX
|
||||||
|
#else
|
||||||
|
mrs x17, TPIDR_EL0 // thread tag on other Unix
|
||||||
|
#endif
|
||||||
|
cmp x12, x17
|
||||||
|
bne LOCAL_LABEL(AssignAndMarkCards) // not local to this thread
|
||||||
|
|
||||||
|
// 2) check if the src and dst are from the same region
|
||||||
|
and x12, x14, #0xFFFFFFFFFFE00000 // target aligned to region
|
||||||
|
cmp x12, x16
|
||||||
|
bne LOCAL_LABEL(RecordEscape) // cross region assignment. definitely escaping
|
||||||
|
|
||||||
|
// 3) check if the target is exposed
|
||||||
|
ubfx x17, x14,#9,#12 // word index = (dst >> 9) & 0x1FFFFF
|
||||||
|
ldr x17, [x16, x17, lsl #3] // mark word = [region + index * 8]
|
||||||
|
lsr x12, x14, #3 // bit = (dst >> 3) [& 63]
|
||||||
|
lsr x17, x17, x12
|
||||||
|
tbnz x17, #0, LOCAL_LABEL(RecordEscape) // target is exposed. record an escape.
|
||||||
|
|
||||||
|
// UNORDERED! assignment of unescaped, null or external (immutable) object
|
||||||
|
LOCAL_LABEL(JustAssign):
|
||||||
|
str x15, [x14], #8
|
||||||
|
ret lr
|
||||||
|
|
||||||
|
LOCAL_LABEL(AssignAndMarkCards):
|
||||||
|
stlr x15, [x14]
|
||||||
|
|
||||||
|
// TUNING: barriers in different modes could be separate pieces of code, but barrier switch
|
||||||
|
// needs to suspend EE, not sure if skipping mode check would worth that much.
|
||||||
|
ldr x17, LOCAL_LABEL(wbs_sw_ww_table)
|
||||||
|
// check the barrier state. this must be done after the assignment (in program order)
|
||||||
|
// if state == 2 we do not set or dirty cards.
|
||||||
|
tbz x17, #1, LOCAL_LABEL(DoCards)
|
||||||
|
|
||||||
|
LOCAL_LABEL(ExitNoCards):
|
||||||
|
add x14, x14, 8
|
||||||
|
ret lr
|
||||||
|
|
||||||
|
LOCAL_LABEL(DoCards):
|
||||||
|
// if same region, just check if barrier is not concurrent
|
||||||
|
and x12, x14, #0xFFFFFFFFFFE00000 // target aligned to region
|
||||||
|
cmp x12, x16
|
||||||
|
beq LOCAL_LABEL(CheckConcurrent) // same region, just check if barrier is not concurrent
|
||||||
|
|
||||||
|
// if src is in gen2/3 and the barrier is not concurrent we do not need to mark cards
|
||||||
|
ldr w12, [x16, 16] // source region + 16 -> generation
|
||||||
|
tbz x12, #1, LOCAL_LABEL(MarkCards)
|
||||||
|
|
||||||
|
LOCAL_LABEL(CheckConcurrent):
|
||||||
|
// if not concurrent, exit
|
||||||
|
cbz x17, LOCAL_LABEL(ExitNoCards)
|
||||||
|
|
||||||
|
LOCAL_LABEL(MarkCards):
|
||||||
|
// need couple temps. Save before using.
|
||||||
|
stp x2, x3, [sp, -16]!
|
||||||
|
|
||||||
|
// fetch card location for x14
|
||||||
|
ldr x12, LOCAL_LABEL(wbs_card_table) // fetch the page map
|
||||||
|
lsr x16, x14, #30
|
||||||
|
ldr x16, [x12, x16, lsl #3] // page
|
||||||
|
sub x2, x14, x16 // offset in page
|
||||||
|
lsr x15, x2, #20 // group index
|
||||||
|
lsr x2, x2, #9 // card offset
|
||||||
|
lsl x15, x15, #1 // group offset (index * 2)
|
||||||
|
|
||||||
|
// check if concurrent marking is in progress
|
||||||
|
cbnz x17, LOCAL_LABEL(DirtyCard)
|
||||||
|
|
||||||
|
// SETTING CARD FOR X14
|
||||||
|
LOCAL_LABEL(SetCard):
|
||||||
|
ldrb w3, [x16, x2]
|
||||||
|
cbnz w3, LOCAL_LABEL(Exit)
|
||||||
|
mov w17, #1
|
||||||
|
strb w17, [x16, x2]
|
||||||
|
LOCAL_LABEL(SetGroup):
|
||||||
|
add x12, x16, #0x80
|
||||||
|
ldrb w3, [x12, x15]
|
||||||
|
cbnz w3, LOCAL_LABEL(CardSet)
|
||||||
|
strb w17, [x12, x15]
|
||||||
|
LOCAL_LABEL(SetPage):
|
||||||
|
ldrb w3, [x16]
|
||||||
|
cbnz w3, LOCAL_LABEL(CardSet)
|
||||||
|
strb w17, [x16]
|
||||||
|
|
||||||
|
LOCAL_LABEL(CardSet):
|
||||||
|
// check if concurrent marking is still not in progress
|
||||||
|
ldr x12, LOCAL_LABEL(wbs_sw_ww_table) // !wbs_sw_ww_table -> !concurrent
|
||||||
|
cbnz x12, LOCAL_LABEL(DirtyCard)
|
||||||
|
|
||||||
|
LOCAL_LABEL(Exit):
|
||||||
|
ldp x2, x3, [sp], 16
|
||||||
|
add x14, x14, 8
|
||||||
|
ret lr
|
||||||
|
|
||||||
|
// DIRTYING CARD FOR X14
|
||||||
|
LOCAL_LABEL(DirtyCard):
|
||||||
|
mov w17, #4
|
||||||
|
add x2, x2, x16
|
||||||
|
// must be after the field write to allow concurrent clean
|
||||||
|
stlrb w17, [x2]
|
||||||
|
LOCAL_LABEL(DirtyGroup):
|
||||||
|
add x12, x16, #0x80
|
||||||
|
ldrb w3, [x12, x15]
|
||||||
|
tbnz w3, #2, LOCAL_LABEL(Exit)
|
||||||
|
strb w17, [x12, x15]
|
||||||
|
LOCAL_LABEL(DirtyPage):
|
||||||
|
ldrb w3, [x16]
|
||||||
|
tbnz w3, #2, LOCAL_LABEL(Exit)
|
||||||
|
strb w17, [x16]
|
||||||
|
b LOCAL_LABEL(Exit)
|
||||||
|
|
||||||
|
// this is expected to be rare.
|
||||||
|
LOCAL_LABEL(RecordEscape):
|
||||||
|
|
||||||
|
// 4) check if the source is escaped (x16 has source region)
|
||||||
|
add x12, x15, #8 // escape bit is MT + 1
|
||||||
|
ubfx x17, x12, #9,#12 // word index = (dst >> 9) & 0x1FFFFF
|
||||||
|
ldr x17, [x16, x17, lsl #3] // mark word = [region + index * 8]
|
||||||
|
lsr x12, x12, #3 // bit = (dst >> 3) [& 63]
|
||||||
|
lsr x17, x17, x12
|
||||||
|
tbnz x17, #0, LOCAL_LABEL(AssignAndMarkCards) // source is already escaped.
|
||||||
|
|
||||||
|
// because of the barrier call convention
|
||||||
|
// we need to preserve caller-saved x0 through x15 and x29/x30
|
||||||
|
|
||||||
|
stp x29,x30, [sp, -16 * 9]!
|
||||||
|
stp x0, x1, [sp, 16 * 1]
|
||||||
|
stp x2, x3, [sp, 16 * 2]
|
||||||
|
stp x4, x5, [sp, 16 * 3]
|
||||||
|
stp x6, x7, [sp, 16 * 4]
|
||||||
|
stp x8, x9, [sp, 16 * 5]
|
||||||
|
stp x10,x11, [sp, 16 * 6]
|
||||||
|
stp x12,x13, [sp, 16 * 7]
|
||||||
|
stp x14,x15, [sp, 16 * 8]
|
||||||
|
|
||||||
|
// void SatoriRegion::EscapeFn(SatoriObject** dst, SatoriObject* src, SatoriRegion* region)
|
||||||
|
// mov x0, x14 EscapeFn does not use dst, it is just to avoid arg shuffle on x64
|
||||||
|
mov x1, x15
|
||||||
|
mov x2, x16 // source region
|
||||||
|
ldr x12, [x16, #8] // EscapeFn address
|
||||||
|
blr x12
|
||||||
|
|
||||||
|
ldp x0, x1, [sp, 16 * 1]
|
||||||
|
ldp x2, x3, [sp, 16 * 2]
|
||||||
|
ldp x4, x5, [sp, 16 * 3]
|
||||||
|
ldp x6, x7, [sp, 16 * 4]
|
||||||
|
ldp x8, x9, [sp, 16 * 5]
|
||||||
|
ldp x10,x11, [sp, 16 * 6]
|
||||||
|
ldp x12,x13, [sp, 16 * 7]
|
||||||
|
ldp x14,x15, [sp, 16 * 8]
|
||||||
|
ldp x29,x30, [sp], 16 * 9
|
||||||
|
|
||||||
|
and x16, x15, #0xFFFFFFFFFFE00000 // source region
|
||||||
|
b LOCAL_LABEL(AssignAndMarkCards)
|
||||||
|
WRITE_BARRIER_END JIT_WriteBarrier
|
||||||
|
|
||||||
|
#endif // FEATURE_SATORI_GC
|
||||||
|
|
||||||
// Begin patchable literal pool
|
// Begin patchable literal pool
|
||||||
.balign 64 // Align to power of two at least as big as patchable literal pool so that it fits optimally in cache line
|
.balign 64 // Align to power of two at least as big as patchable literal pool so that it fits optimally in cache line
|
||||||
WRITE_BARRIER_ENTRY JIT_WriteBarrier_Table
|
WRITE_BARRIER_ENTRY JIT_WriteBarrier_Table
|
||||||
|
|
|
@ -86,6 +86,8 @@ wbs_GCShadowEnd
|
||||||
|
|
||||||
WRITE_BARRIER_END JIT_ByRefWriteBarrier
|
WRITE_BARRIER_END JIT_ByRefWriteBarrier
|
||||||
|
|
||||||
|
#ifndef FEATURE_SATORI_GC
|
||||||
|
|
||||||
;-----------------------------------------------------------------------------
|
;-----------------------------------------------------------------------------
|
||||||
; Simple WriteBarriers
|
; Simple WriteBarriers
|
||||||
; void JIT_CheckedWriteBarrier(Object** dst, Object* src)
|
; void JIT_CheckedWriteBarrier(Object** dst, Object* src)
|
||||||
|
@ -232,6 +234,215 @@ Exit
|
||||||
ret lr
|
ret lr
|
||||||
WRITE_BARRIER_END JIT_WriteBarrier
|
WRITE_BARRIER_END JIT_WriteBarrier
|
||||||
|
|
||||||
|
#else // FEATURE_SATORI_GC
|
||||||
|
|
||||||
|
;-----------------------------------------------------------------------------
|
||||||
|
; Simple WriteBarriers
|
||||||
|
; void JIT_CheckedWriteBarrier(Object** dst, Object* src)
|
||||||
|
; On entry:
|
||||||
|
; x14 : the destination address (LHS of the assignment)
|
||||||
|
; x15 : the object reference (RHS of the assignment)
|
||||||
|
;
|
||||||
|
; On exit:
|
||||||
|
; x12 : trashed
|
||||||
|
; x14 : trashed (incremented by 8 to implement JIT_ByRefWriteBarrier contract)
|
||||||
|
; x15 : trashed
|
||||||
|
; x17 : trashed (ip1) if FEATURE_USE_SOFTWARE_WRITE_WATCH_FOR_GC_HEAP
|
||||||
|
;
|
||||||
|
WRITE_BARRIER_ENTRY JIT_CheckedWriteBarrier
|
||||||
|
; See if dst is in GCHeap
|
||||||
|
ldr x16, wbs_card_bundle_table
|
||||||
|
lsr x17, x14, #30 ; source page index
|
||||||
|
ldrb w12, [x16, x17]
|
||||||
|
cbnz x12, CheckedEntry
|
||||||
|
|
||||||
|
NotInHeap
|
||||||
|
str x15, [x14], #8
|
||||||
|
ret lr
|
||||||
|
WRITE_BARRIER_END JIT_CheckedWriteBarrier
|
||||||
|
|
||||||
|
; void JIT_WriteBarrier(Object** dst, Object* src)
|
||||||
|
; On entry:
|
||||||
|
; x14 : the destination address (LHS of the assignment)
|
||||||
|
; x15 : the object reference (RHS of the assignment)
|
||||||
|
;
|
||||||
|
; On exit:
|
||||||
|
; x12 : trashed
|
||||||
|
; x14 : trashed (incremented by 8 to implement JIT_ByRefWriteBarrier contract)
|
||||||
|
; x15 : trashed
|
||||||
|
; x16 : trashed (ip0)
|
||||||
|
; x17 : trashed (ip1)
|
||||||
|
;
|
||||||
|
WRITE_BARRIER_ENTRY JIT_WriteBarrier
|
||||||
|
; check for escaping assignment
|
||||||
|
; 1) check if we own the source region
|
||||||
|
#ifdef FEATURE_SATORI_EXTERNAL_OBJECTS
|
||||||
|
ldr x16, wbs_card_bundle_table
|
||||||
|
CheckedEntry
|
||||||
|
lsr x17, x15, #30 ; source page index
|
||||||
|
ldrb w12, [x16, x17]
|
||||||
|
cbz x12, JustAssign ; null or external (immutable) object
|
||||||
|
#else
|
||||||
|
CheckedEntry
|
||||||
|
cbz x15, JustAssign ; assigning null
|
||||||
|
#endif
|
||||||
|
and x16, x15, #0xFFFFFFFFFFE00000 ; source region
|
||||||
|
ldr x12, [x16] ; region tag
|
||||||
|
cmp x12, x18 ; x18 - TEB
|
||||||
|
bne AssignAndMarkCards ; not local to this thread
|
||||||
|
|
||||||
|
; 2) check if the src and dst are from the same region
|
||||||
|
and x12, x14, #0xFFFFFFFFFFE00000 ; target aligned to region
|
||||||
|
cmp x12, x16
|
||||||
|
bne RecordEscape ; cross region assignment. definitely escaping
|
||||||
|
|
||||||
|
; 3) check if the target is exposed
|
||||||
|
ubfx x17, x14,#9,#12 ; word index = (dst >> 9) & 0x1FFFFF
|
||||||
|
ldr x17, [x16, x17, lsl #3] ; mark word = [region + index * 8]
|
||||||
|
lsr x12, x14, #3 ; bit = (dst >> 3) [& 63]
|
||||||
|
lsr x17, x17, x12
|
||||||
|
tbnz x17, #0, RecordEscape ; target is exposed. record an escape.
|
||||||
|
|
||||||
|
; UNORDERED! assignment of unescaped, null or external (immutable) object
|
||||||
|
JustAssign
|
||||||
|
str x15, [x14], #8
|
||||||
|
ret lr
|
||||||
|
|
||||||
|
AssignAndMarkCards
|
||||||
|
stlr x15, [x14]
|
||||||
|
|
||||||
|
; TUNING: barriers in different modes could be separate pieces of code, but barrier switch
|
||||||
|
; needs to suspend EE, not sure if skipping mode check would worth that much.
|
||||||
|
ldr x17, wbs_sw_ww_table
|
||||||
|
; check the barrier state. this must be done after the assignment (in program order
|
||||||
|
; if state == 2 we do not set or dirty cards.
|
||||||
|
tbz x17, #1, DoCards
|
||||||
|
|
||||||
|
ExitNoCards
|
||||||
|
add x14, x14, 8
|
||||||
|
ret lr
|
||||||
|
|
||||||
|
DoCards
|
||||||
|
; if same region, just check if barrier is not concurrent
|
||||||
|
and x12, x14, #0xFFFFFFFFFFE00000 ; target aligned to region
|
||||||
|
cmp x12, x16
|
||||||
|
beq CheckConcurrent ; same region, just check if barrier is not concurrent
|
||||||
|
|
||||||
|
; if src is in gen2/3 and the barrier is not concurrent we do not need to mark cards
|
||||||
|
ldr w12, [x16, 16] ; source region + 16 -> generation
|
||||||
|
tbz x12, #1, MarkCards
|
||||||
|
|
||||||
|
CheckConcurrent
|
||||||
|
; if not concurrent, exit
|
||||||
|
cbz x17, ExitNoCards
|
||||||
|
|
||||||
|
MarkCards
|
||||||
|
; need couple temps. Save before using.
|
||||||
|
stp x2, x3, [sp, -16]!
|
||||||
|
|
||||||
|
; fetch card location for x14
|
||||||
|
ldr x12, wbs_card_table ; fetch the page map
|
||||||
|
lsr x16, x14, #30
|
||||||
|
ldr x16, [x12, x16, lsl #3] ; page
|
||||||
|
sub x2, x14, x16 ; offset in page
|
||||||
|
lsr x15, x2, #20 ; group index
|
||||||
|
lsr x2, x2, #9 ; card offset
|
||||||
|
lsl x15, x15, #1 ; group offset (index * 2)
|
||||||
|
|
||||||
|
; check if concurrent marking is in progress
|
||||||
|
cbnz x17, DirtyCard
|
||||||
|
|
||||||
|
; SETTING CARD FOR X14
|
||||||
|
SetCard
|
||||||
|
ldrb w3, [x16, x2]
|
||||||
|
cbnz w3, Exit
|
||||||
|
mov w17, #1
|
||||||
|
strb w17, [x16, x2]
|
||||||
|
SetGroup
|
||||||
|
add x12, x16, #0x80
|
||||||
|
ldrb w3, [x12, x15]
|
||||||
|
cbnz w3, CardSet
|
||||||
|
strb w17, [x12, x15]
|
||||||
|
SetPage
|
||||||
|
ldrb w3, [x16]
|
||||||
|
cbnz w3, CardSet
|
||||||
|
strb w17, [x16]
|
||||||
|
|
||||||
|
CardSet
|
||||||
|
; check if concurrent marking is still not in progress
|
||||||
|
ldr x12, wbs_sw_ww_table ; !wbs_sw_ww_table -> !concurrent
|
||||||
|
cbnz x12, DirtyCard
|
||||||
|
|
||||||
|
Exit
|
||||||
|
ldp x2, x3, [sp], 16
|
||||||
|
add x14, x14, 8
|
||||||
|
ret lr
|
||||||
|
|
||||||
|
; DIRTYING CARD FOR X14
|
||||||
|
DirtyCard
|
||||||
|
mov w17, #4
|
||||||
|
add x2, x2, x16
|
||||||
|
; must be after the field write to allow concurrent clean
|
||||||
|
stlrb w17, [x2]
|
||||||
|
DirtyGroup
|
||||||
|
add x12, x16, #0x80
|
||||||
|
ldrb w3, [x12, x15]
|
||||||
|
tbnz w3, #2, Exit
|
||||||
|
strb w17, [x12, x15]
|
||||||
|
DirtyPage
|
||||||
|
ldrb w3, [x16]
|
||||||
|
tbnz w3, #2, Exit
|
||||||
|
strb w17, [x16]
|
||||||
|
b Exit
|
||||||
|
|
||||||
|
; this is expected to be rare.
|
||||||
|
RecordEscape
|
||||||
|
|
||||||
|
; 4) check if the source is escaped (x16 has source region)
|
||||||
|
add x12, x15, #8 ; escape bit is MT + 1
|
||||||
|
ubfx x17, x12, #9,#12 ; word index = (dst >> 9) & 0x1FFFFF
|
||||||
|
ldr x17, [x16, x17, lsl #3] ; mark word = [region + index * 8]
|
||||||
|
lsr x12, x12, #3 ; bit = (dst >> 3) [& 63]
|
||||||
|
lsr x17, x17, x12
|
||||||
|
tbnz x17, #0, AssignAndMarkCards ; source is already escaped.
|
||||||
|
|
||||||
|
; because of the barrier call convention
|
||||||
|
; we need to preserve caller-saved x0 through x15 and x29/x30
|
||||||
|
|
||||||
|
stp x29,x30, [sp, -16 * 9]!
|
||||||
|
stp x0, x1, [sp, 16 * 1]
|
||||||
|
stp x2, x3, [sp, 16 * 2]
|
||||||
|
stp x4, x5, [sp, 16 * 3]
|
||||||
|
stp x6, x7, [sp, 16 * 4]
|
||||||
|
stp x8, x9, [sp, 16 * 5]
|
||||||
|
stp x10,x11, [sp, 16 * 6]
|
||||||
|
stp x12,x13, [sp, 16 * 7]
|
||||||
|
stp x14,x15, [sp, 16 * 8]
|
||||||
|
|
||||||
|
; void SatoriRegion::EscapeFn(SatoriObject** dst, SatoriObject* src, SatoriRegion* region)
|
||||||
|
; mov x0, x14 EscapeFn does not use dst, it is just to avoid arg shuffle on x64
|
||||||
|
mov x1, x15
|
||||||
|
mov x2, x16 ; source region
|
||||||
|
ldr x12, [x16, #8] ; EscapeFn address
|
||||||
|
blr x12
|
||||||
|
|
||||||
|
ldp x0, x1, [sp, 16 * 1]
|
||||||
|
ldp x2, x3, [sp, 16 * 2]
|
||||||
|
ldp x4, x5, [sp, 16 * 3]
|
||||||
|
ldp x6, x7, [sp, 16 * 4]
|
||||||
|
ldp x8, x9, [sp, 16 * 5]
|
||||||
|
ldp x10,x11, [sp, 16 * 6]
|
||||||
|
ldp x12,x13, [sp, 16 * 7]
|
||||||
|
ldp x14,x15, [sp, 16 * 8]
|
||||||
|
ldp x29,x30, [sp], 16 * 9
|
||||||
|
|
||||||
|
and x16, x15, #0xFFFFFFFFFFE00000 ; source region
|
||||||
|
b AssignAndMarkCards
|
||||||
|
WRITE_BARRIER_END JIT_WriteBarrier
|
||||||
|
|
||||||
|
#endif // FEATURE_SATORI_GC
|
||||||
|
|
||||||
|
|
||||||
; ------------------------------------------------------------------
|
; ------------------------------------------------------------------
|
||||||
; End of the writeable code region
|
; End of the writeable code region
|
||||||
LEAF_ENTRY JIT_PatchedCodeLast
|
LEAF_ENTRY JIT_PatchedCodeLast
|
||||||
|
|
|
@ -1734,8 +1734,7 @@ static void RuntimeThreadShutdown(void* thread)
|
||||||
// For case where thread calls ExitThread directly, we need to reset the
|
// For case where thread calls ExitThread directly, we need to reset the
|
||||||
// frame pointer. Otherwise stackwalk would AV. We need to do it in cooperative mode.
|
// frame pointer. Otherwise stackwalk would AV. We need to do it in cooperative mode.
|
||||||
// We need to set m_GCOnTransitionsOK so this thread won't trigger GC when toggle GC mode
|
// We need to set m_GCOnTransitionsOK so this thread won't trigger GC when toggle GC mode
|
||||||
//TODO: Satori Is the check for g_fForbidEnterEE needed?
|
if (pThread->m_pFrame != FRAME_TOP)
|
||||||
if (thread->m_pFrame != FRAME_TOP && !g_fForbidEnterEE)
|
|
||||||
{
|
{
|
||||||
#ifdef _DEBUG
|
#ifdef _DEBUG
|
||||||
pThread->m_GCOnTransitionsOK = FALSE;
|
pThread->m_GCOnTransitionsOK = FALSE;
|
||||||
|
|
|
@ -959,6 +959,10 @@ FCIMPL0(INT64, GCInterface::GetTotalAllocatedBytesApproximate)
|
||||||
{
|
{
|
||||||
FCALL_CONTRACT;
|
FCALL_CONTRACT;
|
||||||
|
|
||||||
|
#if FEATURE_SATORI_GC
|
||||||
|
return GCHeapUtilities::GetGCHeap()->GetTotalAllocatedBytes();
|
||||||
|
#else
|
||||||
|
|
||||||
#ifdef TARGET_64BIT
|
#ifdef TARGET_64BIT
|
||||||
uint64_t unused_bytes = Thread::dead_threads_non_alloc_bytes;
|
uint64_t unused_bytes = Thread::dead_threads_non_alloc_bytes;
|
||||||
#else
|
#else
|
||||||
|
@ -983,6 +987,7 @@ FCIMPL0(INT64, GCInterface::GetTotalAllocatedBytesApproximate)
|
||||||
}
|
}
|
||||||
|
|
||||||
return current_high;
|
return current_high;
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
FCIMPLEND;
|
FCIMPLEND;
|
||||||
|
|
||||||
|
@ -994,6 +999,12 @@ extern "C" INT64 QCALLTYPE GCInterface_GetTotalAllocatedBytesPrecise()
|
||||||
|
|
||||||
GCX_COOP();
|
GCX_COOP();
|
||||||
|
|
||||||
|
#if FEATURE_SATORI_GC
|
||||||
|
|
||||||
|
GCHeapUtilities::GetGCHeap()->GarbageCollect(1);
|
||||||
|
|
||||||
|
allocated = GCHeapUtilities::GetGCHeap()->GetTotalAllocatedBytes();
|
||||||
|
#else
|
||||||
// We need to suspend/restart the EE to get each thread's
|
// We need to suspend/restart the EE to get each thread's
|
||||||
// non-allocated memory from their allocation contexts
|
// non-allocated memory from their allocation contexts
|
||||||
|
|
||||||
|
@ -1011,11 +1022,10 @@ extern "C" INT64 QCALLTYPE GCInterface_GetTotalAllocatedBytesPrecise()
|
||||||
}
|
}
|
||||||
|
|
||||||
ThreadSuspend::RestartEE(FALSE, TRUE);
|
ThreadSuspend::RestartEE(FALSE, TRUE);
|
||||||
|
#endif
|
||||||
END_QCALL;
|
END_QCALL;
|
||||||
|
|
||||||
return allocated;
|
return allocated;
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FEATURE_BASICFREEZE
|
#ifdef FEATURE_BASICFREEZE
|
||||||
|
|
|
@ -38,6 +38,7 @@ Object* FrozenObjectHeapManager::TryAllocateObject(PTR_MethodTable type, size_t
|
||||||
Object* obj = nullptr;
|
Object* obj = nullptr;
|
||||||
|
|
||||||
#if FEATURE_SATORI_GC
|
#if FEATURE_SATORI_GC
|
||||||
|
// TODO: Satori does not have any size limitations here.
|
||||||
if (objectSize > FOH_COMMIT_SIZE)
|
if (objectSize > FOH_COMMIT_SIZE)
|
||||||
{
|
{
|
||||||
// The current design doesn't allow objects larger than FOH_COMMIT_SIZE and
|
// The current design doesn't allow objects larger than FOH_COMMIT_SIZE and
|
||||||
|
@ -144,35 +145,6 @@ Object* FrozenObjectHeapManager::TryAllocateObject(PTR_MethodTable type, size_t
|
||||||
#endif // !FEATURE_BASICFREEZE
|
#endif // !FEATURE_BASICFREEZE
|
||||||
}
|
}
|
||||||
|
|
||||||
static void* ReserveMemory(size_t size)
|
|
||||||
{
|
|
||||||
#if defined(TARGET_X86) || defined(TARGET_AMD64)
|
|
||||||
// We have plenty of space in-range on X86/AMD64 so we can afford keeping
|
|
||||||
// FOH segments there so e.g. JIT can use relocs for frozen objects.
|
|
||||||
return ExecutableAllocator::Instance()->Reserve(size);
|
|
||||||
#else
|
|
||||||
return ClrVirtualAlloc(nullptr, size, MEM_RESERVE, PAGE_READWRITE);
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
static void* CommitMemory(void* ptr, size_t size)
|
|
||||||
{
|
|
||||||
#if defined(TARGET_X86) || defined(TARGET_AMD64)
|
|
||||||
return ExecutableAllocator::Instance()->Commit(ptr, size, /*isExecutable*/ false);
|
|
||||||
#else
|
|
||||||
return ClrVirtualAlloc(ptr, size, MEM_COMMIT, PAGE_READWRITE);
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
static void ReleaseMemory(void* ptr)
|
|
||||||
{
|
|
||||||
#if defined(TARGET_X86) || defined(TARGET_AMD64)
|
|
||||||
ExecutableAllocator::Instance()->Release(ptr);
|
|
||||||
#else
|
|
||||||
ClrVirtualFree(ptr, 0, MEM_RELEASE);
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reserve sizeHint bytes of memory for the given frozen segment.
|
// Reserve sizeHint bytes of memory for the given frozen segment.
|
||||||
// The requested size can be be ignored in case of memory pressure and FOH_SEGMENT_DEFAULT_SIZE is used instead.
|
// The requested size can be be ignored in case of memory pressure and FOH_SEGMENT_DEFAULT_SIZE is used instead.
|
||||||
FrozenObjectSegment::FrozenObjectSegment(size_t sizeHint) :
|
FrozenObjectSegment::FrozenObjectSegment(size_t sizeHint) :
|
||||||
|
@ -186,7 +158,7 @@ FrozenObjectSegment::FrozenObjectSegment(size_t sizeHint) :
|
||||||
_ASSERT(m_Size > FOH_COMMIT_SIZE);
|
_ASSERT(m_Size > FOH_COMMIT_SIZE);
|
||||||
_ASSERT(m_Size % FOH_COMMIT_SIZE == 0);
|
_ASSERT(m_Size % FOH_COMMIT_SIZE == 0);
|
||||||
|
|
||||||
void* alloc = ReserveMemory(m_Size);
|
void* alloc = ClrVirtualAlloc(nullptr, m_Size, MEM_RESERVE, PAGE_READWRITE);
|
||||||
if (alloc == nullptr)
|
if (alloc == nullptr)
|
||||||
{
|
{
|
||||||
// Try again with the default FOH size
|
// Try again with the default FOH size
|
||||||
|
@ -195,7 +167,7 @@ FrozenObjectSegment::FrozenObjectSegment(size_t sizeHint) :
|
||||||
m_Size = FOH_SEGMENT_DEFAULT_SIZE;
|
m_Size = FOH_SEGMENT_DEFAULT_SIZE;
|
||||||
_ASSERT(m_Size > FOH_COMMIT_SIZE);
|
_ASSERT(m_Size > FOH_COMMIT_SIZE);
|
||||||
_ASSERT(m_Size % FOH_COMMIT_SIZE == 0);
|
_ASSERT(m_Size % FOH_COMMIT_SIZE == 0);
|
||||||
alloc = ReserveMemory(m_Size);
|
alloc = ClrVirtualAlloc(nullptr, m_Size, MEM_RESERVE, PAGE_READWRITE);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (alloc == nullptr)
|
if (alloc == nullptr)
|
||||||
|
@ -205,10 +177,10 @@ FrozenObjectSegment::FrozenObjectSegment(size_t sizeHint) :
|
||||||
}
|
}
|
||||||
|
|
||||||
// Commit a chunk in advance
|
// Commit a chunk in advance
|
||||||
void* committedAlloc = CommitMemory(alloc, FOH_COMMIT_SIZE);
|
void* committedAlloc = ClrVirtualAlloc(alloc, FOH_COMMIT_SIZE, MEM_COMMIT, PAGE_READWRITE);
|
||||||
if (committedAlloc == nullptr)
|
if (committedAlloc == nullptr)
|
||||||
{
|
{
|
||||||
ReleaseMemory(alloc);
|
ClrVirtualFree(alloc, 0, MEM_RELEASE);
|
||||||
ThrowOutOfMemory();
|
ThrowOutOfMemory();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -290,7 +262,7 @@ Object* FrozenObjectSegment::TryAllocateObject(PTR_MethodTable type, size_t obje
|
||||||
// Make sure we don't go out of bounds during this commit
|
// Make sure we don't go out of bounds during this commit
|
||||||
_ASSERT(m_SizeCommitted + FOH_COMMIT_SIZE <= m_Size);
|
_ASSERT(m_SizeCommitted + FOH_COMMIT_SIZE <= m_Size);
|
||||||
|
|
||||||
if (CommitMemory(m_pStart + m_SizeCommitted, FOH_COMMIT_SIZE) == nullptr)
|
if (ClrVirtualAlloc(m_pStart + m_SizeCommitted, FOH_COMMIT_SIZE, MEM_COMMIT, PAGE_READWRITE) == nullptr)
|
||||||
{
|
{
|
||||||
ThrowOutOfMemory();
|
ThrowOutOfMemory();
|
||||||
}
|
}
|
||||||
|
|
|
@ -301,9 +301,11 @@ void GCToEEInterface::GcScanCurrentStackRoots(promote_func* fn, ScanContext* sc)
|
||||||
#endif // FEATURE_EVENT_TRACE
|
#endif // FEATURE_EVENT_TRACE
|
||||||
ScanStackRoots(pThread, fn, sc);
|
ScanStackRoots(pThread, fn, sc);
|
||||||
ScanTailCallArgBufferRoots(pThread, fn, sc);
|
ScanTailCallArgBufferRoots(pThread, fn, sc);
|
||||||
|
ScanThreadStaticRoots(pThread, fn, sc);
|
||||||
#ifdef FEATURE_EVENT_TRACE
|
#ifdef FEATURE_EVENT_TRACE
|
||||||
sc->dwEtwRootKind = kEtwGCRootKindOther;
|
sc->dwEtwRootKind = kEtwGCRootKindOther;
|
||||||
#endif // FEATURE_EVENT_TRACE
|
#endif // FEATURE_EVENT_TRACE
|
||||||
|
|
||||||
STRESS_LOG2(LF_GC | LF_GCROOTS, LL_INFO100, "Ending scan of Thread %p ID = 0x%x }\n", pThread, pThread->GetThreadId());
|
STRESS_LOG2(LF_GC | LF_GCROOTS, LL_INFO100, "Ending scan of Thread %p ID = 0x%x }\n", pThread, pThread->GetThreadId());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -552,7 +554,7 @@ void GCToEEInterface::GcPoll()
|
||||||
}
|
}
|
||||||
CONTRACTL_END;
|
CONTRACTL_END;
|
||||||
|
|
||||||
if (g_TrapReturningThreads.LoadWithoutBarrier())
|
if (g_TrapReturningThreads)
|
||||||
{
|
{
|
||||||
Thread* pThread = ::GetThread();
|
Thread* pThread = ::GetThread();
|
||||||
_ASSERTE(!ThreadStore::HoldingThreadStore(pThread));
|
_ASSERTE(!ThreadStore::HoldingThreadStore(pThread));
|
||||||
|
|
|
@ -127,7 +127,11 @@ public:
|
||||||
|
|
||||||
static bool UseThreadAllocationContexts()
|
static bool UseThreadAllocationContexts()
|
||||||
{
|
{
|
||||||
|
#ifdef FEATURE_SATORI_GC
|
||||||
|
return true;
|
||||||
|
#else
|
||||||
return s_useThreadAllocationContexts;
|
return s_useThreadAllocationContexts;
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FEATURE_USE_SOFTWARE_WRITE_WATCH_FOR_GC_HEAP
|
#ifdef FEATURE_USE_SOFTWARE_WRITE_WATCH_FOR_GC_HEAP
|
||||||
|
|
|
@ -1120,7 +1120,7 @@ Object* AllocateImmortalObject(MethodTable* pMT, size_t objectSize)
|
||||||
SetTypeHandleOnThreadForAlloc(TypeHandle(pMT));
|
SetTypeHandleOnThreadForAlloc(TypeHandle(pMT));
|
||||||
|
|
||||||
GC_ALLOC_FLAGS flags = GC_ALLOC_IMMORTAL;
|
GC_ALLOC_FLAGS flags = GC_ALLOC_IMMORTAL;
|
||||||
if (pMT->ContainsPointers())
|
if (pMT->ContainsGCPointers())
|
||||||
flags |= GC_ALLOC_CONTAINS_REF;
|
flags |= GC_ALLOC_CONTAINS_REF;
|
||||||
|
|
||||||
#ifdef FEATURE_64BIT_ALIGNMENT
|
#ifdef FEATURE_64BIT_ALIGNMENT
|
||||||
|
@ -1438,7 +1438,7 @@ bool IsInHeapSatori(void* ptr)
|
||||||
void CheckEscapeSatori(Object** dst, Object* ref)
|
void CheckEscapeSatori(Object** dst, Object* ref)
|
||||||
{
|
{
|
||||||
SatoriObject* obj = (SatoriObject*)ref;
|
SatoriObject* obj = (SatoriObject*)ref;
|
||||||
// TODO: no nullcheck when external
|
// TODO: Satori no nullcheck needed when external? (null is susbset of external)
|
||||||
if (!obj || obj->IsExternal())
|
if (!obj || obj->IsExternal())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
@ -1536,14 +1536,13 @@ void ErectWriteBarrierForMT(MethodTable **dst, MethodTable *ref)
|
||||||
STATIC_CONTRACT_NOTHROW;
|
STATIC_CONTRACT_NOTHROW;
|
||||||
STATIC_CONTRACT_GC_NOTRIGGER;
|
STATIC_CONTRACT_GC_NOTRIGGER;
|
||||||
|
|
||||||
|
#if FEATURE_SATORI_GC
|
||||||
|
// this whole thing is unnecessary in Satori
|
||||||
|
__UNREACHABLE();
|
||||||
|
#else
|
||||||
|
|
||||||
*dst = ref;
|
*dst = ref;
|
||||||
|
|
||||||
#if FEATURE_SATORI_GC
|
|
||||||
|
|
||||||
// Satori large objects are allocated in either gen1 or gen2.
|
|
||||||
// PublishObject will sort this out and mark cards as needed.
|
|
||||||
|
|
||||||
#else
|
|
||||||
#ifdef WRITE_BARRIER_CHECK
|
#ifdef WRITE_BARRIER_CHECK
|
||||||
updateGCShadow((Object **)dst, (Object *)ref); // support debugging write barrier, updateGCShadow only cares that these are pointers
|
updateGCShadow((Object **)dst, (Object *)ref); // support debugging write barrier, updateGCShadow only cares that these are pointers
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -1384,9 +1384,12 @@ void* __cdecl GCSafeMemCpy(void * dest, const void * src, size_t len)
|
||||||
STATIC_CONTRACT_GC_NOTRIGGER;
|
STATIC_CONTRACT_GC_NOTRIGGER;
|
||||||
STATIC_CONTRACT_FORBID_FAULT;
|
STATIC_CONTRACT_FORBID_FAULT;
|
||||||
|
|
||||||
|
#ifdef FEATURE_SATORI_GC
|
||||||
if (IsInHeapSatori((Object**)dest))
|
if (IsInHeapSatori((Object**)dest))
|
||||||
//if (!(((*(BYTE**)&dest) < g_lowest_address ) ||
|
#else
|
||||||
// ((*(BYTE**)&dest) >= g_highest_address)))
|
if (!(((*(BYTE**)&dest) < g_lowest_address ) ||
|
||||||
|
((*(BYTE**)&dest) >= g_highest_address)))
|
||||||
|
#endif
|
||||||
{
|
{
|
||||||
Thread* pThread = GetThreadNULLOk();
|
Thread* pThread = GetThreadNULLOk();
|
||||||
|
|
||||||
|
|
|
@ -157,8 +157,13 @@ class Object
|
||||||
VOID SetMethodTableForUOHObject(MethodTable *pMT)
|
VOID SetMethodTableForUOHObject(MethodTable *pMT)
|
||||||
{
|
{
|
||||||
WRAPPER_NO_CONTRACT;
|
WRAPPER_NO_CONTRACT;
|
||||||
|
#if FEATURE_SATORI_GC
|
||||||
|
// nothing extra needs to happen in Satori.
|
||||||
|
m_pMethTab = pMT;
|
||||||
|
#else
|
||||||
// This function must be used if the allocation occurs on a UOH heap, and the method table might be a collectible type
|
// This function must be used if the allocation occurs on a UOH heap, and the method table might be a collectible type
|
||||||
ErectWriteBarrierForMT(&m_pMethTab, pMT);
|
ErectWriteBarrierForMT(&m_pMethTab, pMT);
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
#endif //!DACCESS_COMPILE
|
#endif //!DACCESS_COMPILE
|
||||||
|
|
||||||
|
|
|
@ -3252,40 +3252,6 @@ COR_PRF_SUSPEND_REASON GCSuspendReasonToProfSuspendReason(ThreadSuspend::SUSPEND
|
||||||
}
|
}
|
||||||
#endif // PROFILING_SUPPORTED
|
#endif // PROFILING_SUPPORTED
|
||||||
|
|
||||||
// exponential spinwait with an approximate time limit for waiting in microsecond range.
|
|
||||||
void SpinWait(int usecLimit)
|
|
||||||
{
|
|
||||||
LARGE_INTEGER li;
|
|
||||||
QueryPerformanceCounter(&li);
|
|
||||||
int64_t startTicks = li.QuadPart;
|
|
||||||
|
|
||||||
QueryPerformanceFrequency(&li);
|
|
||||||
int64_t ticksPerSecond = li.QuadPart;
|
|
||||||
int64_t endTicks = startTicks + (usecLimit * ticksPerSecond) / 1000000;
|
|
||||||
|
|
||||||
#ifdef TARGET_UNIX
|
|
||||||
if (usecLimit > 10)
|
|
||||||
{
|
|
||||||
PAL_nanosleep(usecLimit * 1000);
|
|
||||||
}
|
|
||||||
#endif // TARGET_UNIX
|
|
||||||
|
|
||||||
for (int i = 0; i < 30; i++)
|
|
||||||
{
|
|
||||||
QueryPerformanceCounter(&li);
|
|
||||||
int64_t currentTicks = li.QuadPart;
|
|
||||||
if (currentTicks > endTicks)
|
|
||||||
{
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (int j = 0; j < (1 << i); j++)
|
|
||||||
{
|
|
||||||
System_YieldProcessor();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//************************************************************************************
|
//************************************************************************************
|
||||||
//
|
//
|
||||||
// SuspendRuntime is responsible for ensuring that all managed threads reach a
|
// SuspendRuntime is responsible for ensuring that all managed threads reach a
|
||||||
|
|
|
@ -13,24 +13,6 @@
|
||||||
<Compile Include="System\Collections\Concurrent\CDSCollectionETWBCLProvider.cs" />
|
<Compile Include="System\Collections\Concurrent\CDSCollectionETWBCLProvider.cs" />
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentBag.cs" />
|
<Compile Include="System\Collections\Concurrent\ConcurrentBag.cs" />
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary.cs" />
|
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary.cs" />
|
||||||
|
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary\DictionaryImpl.cs" />
|
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary\DictionaryImpl.SnapshotImpl.cs" />
|
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary\DictionaryImpl`2.cs" />
|
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary\DictionaryImpl`3.cs" />
|
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary\DictionaryImplBoxed.cs" />
|
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary\DictionaryImplInt.cs" />
|
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary\DictionaryImplLong.cs" />
|
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary\DictionaryImplNint.cs" />
|
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary\DictionaryImplUint.cs" />
|
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary\DictionaryImplUlong.cs" />
|
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary\DictionaryImplNuint.cs" />
|
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary\DictionaryImplRef.cs" />
|
|
||||||
|
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary\Counter\CounterBase.cs" />
|
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary\Counter\Counter32.cs" />
|
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentDictionary\Counter\Counter64.cs" />
|
|
||||||
|
|
||||||
<Compile Include="System\Collections\Concurrent\ConcurrentStack.cs" />
|
<Compile Include="System\Collections\Concurrent\ConcurrentStack.cs" />
|
||||||
<Compile Include="System\Collections\Concurrent\OrderablePartitioner.cs" />
|
<Compile Include="System\Collections\Concurrent\OrderablePartitioner.cs" />
|
||||||
<Compile Include="System\Collections\Concurrent\Partitioner.cs" />
|
<Compile Include="System\Collections\Concurrent\Partitioner.cs" />
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,236 +0,0 @@
|
||||||
// Copyright (c) 2022 Vladimir Sadov
|
|
||||||
//
|
|
||||||
// Permission is hereby granted, free of charge, to any person
|
|
||||||
// obtaining a copy of this software and associated documentation
|
|
||||||
// files (the "Software"), to deal in the Software without
|
|
||||||
// restriction, including without limitation the rights to use,
|
|
||||||
// copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
// copies of the Software, and to permit persons to whom the
|
|
||||||
// Software is furnished to do so, subject to the following
|
|
||||||
// conditions:
|
|
||||||
//
|
|
||||||
// The above copyright notice and this permission notice shall be
|
|
||||||
// included in all copies or substantial portions of the Software.
|
|
||||||
//
|
|
||||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
|
||||||
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
||||||
// OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
//
|
|
||||||
// Counter32.cs
|
|
||||||
//
|
|
||||||
|
|
||||||
using System;
|
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.Diagnostics;
|
|
||||||
using System.Runtime.CompilerServices;
|
|
||||||
using System.Runtime.InteropServices;
|
|
||||||
using System.Threading;
|
|
||||||
|
|
||||||
#nullable disable
|
|
||||||
|
|
||||||
namespace System.Collections.Concurrent
|
|
||||||
{
|
|
||||||
/// <summary>
|
|
||||||
/// Scalable 32bit counter that can be used from multiple threads.
|
|
||||||
/// </summary>
|
|
||||||
internal sealed class Counter32: CounterBase
|
|
||||||
{
|
|
||||||
private sealed class Cell
|
|
||||||
{
|
|
||||||
[StructLayout(LayoutKind.Explicit, Size = CACHE_LINE * 2 - OBJ_HEADER_SIZE)]
|
|
||||||
public struct SpacedCounter
|
|
||||||
{
|
|
||||||
[FieldOffset(CACHE_LINE - OBJ_HEADER_SIZE)]
|
|
||||||
public int count;
|
|
||||||
}
|
|
||||||
|
|
||||||
public SpacedCounter counter;
|
|
||||||
}
|
|
||||||
|
|
||||||
// spaced out counters
|
|
||||||
private Cell[] cells;
|
|
||||||
|
|
||||||
// default counter
|
|
||||||
private int count;
|
|
||||||
|
|
||||||
// delayed estimated count
|
|
||||||
private int lastCount;
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Initializes a new instance of the <see
|
|
||||||
/// cref="Counter32"/>
|
|
||||||
/// </summary>
|
|
||||||
public Counter32()
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Returns the value of the counter at the time of the call.
|
|
||||||
/// </summary>
|
|
||||||
/// <remarks>
|
|
||||||
/// The value may miss in-progress updates if the counter is being concurrently modified.
|
|
||||||
/// </remarks>
|
|
||||||
public int Value
|
|
||||||
{
|
|
||||||
get
|
|
||||||
{
|
|
||||||
var count = this.count;
|
|
||||||
var cells = this.cells;
|
|
||||||
|
|
||||||
if (cells != null)
|
|
||||||
{
|
|
||||||
for (int i = 0; i < cells.Length; i++)
|
|
||||||
{
|
|
||||||
var cell = cells[i];
|
|
||||||
if (cell != null)
|
|
||||||
{
|
|
||||||
count += cell.counter.count;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return count;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Returns the approximate value of the counter at the time of the call.
|
|
||||||
/// </summary>
|
|
||||||
/// <remarks>
|
|
||||||
/// EstimatedValue could be significantly cheaper to obtain, but may be slightly delayed.
|
|
||||||
/// </remarks>
|
|
||||||
public int EstimatedValue
|
|
||||||
{
|
|
||||||
get
|
|
||||||
{
|
|
||||||
if (this.cells == null)
|
|
||||||
{
|
|
||||||
return this.count;
|
|
||||||
}
|
|
||||||
|
|
||||||
var curTicks = (uint)Environment.TickCount;
|
|
||||||
// more than a millisecond passed?
|
|
||||||
if (curTicks != lastCountTicks)
|
|
||||||
{
|
|
||||||
lastCountTicks = curTicks;
|
|
||||||
lastCount = Value;
|
|
||||||
}
|
|
||||||
|
|
||||||
return lastCount;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Increments the counter by 1.
|
|
||||||
/// </summary>
|
|
||||||
public void Increment()
|
|
||||||
{
|
|
||||||
int curCellCount = this.cellCount;
|
|
||||||
var drift = increment(ref GetCountRef(curCellCount));
|
|
||||||
|
|
||||||
if (drift != 0)
|
|
||||||
{
|
|
||||||
TryAddCell(curCellCount);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Decrements the counter by 1.
|
|
||||||
/// </summary>
|
|
||||||
public void Decrement()
|
|
||||||
{
|
|
||||||
int curCellCount = this.cellCount;
|
|
||||||
var drift = decrement(ref GetCountRef(curCellCount));
|
|
||||||
|
|
||||||
if (drift != 0)
|
|
||||||
{
|
|
||||||
TryAddCell(curCellCount);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Increments the counter by 'value'.
|
|
||||||
/// </summary>
|
|
||||||
public void Add(int value)
|
|
||||||
{
|
|
||||||
int curCellCount = this.cellCount;
|
|
||||||
var drift = add(ref GetCountRef(curCellCount), value);
|
|
||||||
|
|
||||||
if (drift != 0)
|
|
||||||
{
|
|
||||||
TryAddCell(curCellCount);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
||||||
private ref int GetCountRef(int curCellCount)
|
|
||||||
{
|
|
||||||
ref var countRef = ref count;
|
|
||||||
|
|
||||||
Cell[] cells;
|
|
||||||
if ((cells = this.cells) != null && curCellCount > 1)
|
|
||||||
{
|
|
||||||
var cell = cells[GetIndex((uint)curCellCount)];
|
|
||||||
if (cell != null)
|
|
||||||
{
|
|
||||||
countRef = ref cell.counter.count;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ref countRef;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static int increment(ref int val)
|
|
||||||
{
|
|
||||||
return -val - 1 + Interlocked.Increment(ref val);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static int add(ref int val, int inc)
|
|
||||||
{
|
|
||||||
return -val - inc + Interlocked.Add(ref val, inc);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static int decrement(ref int val)
|
|
||||||
{
|
|
||||||
return val - 1 - Interlocked.Decrement(ref val);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void TryAddCell(int curCellCount)
|
|
||||||
{
|
|
||||||
if (curCellCount < s_MaxCellCount)
|
|
||||||
{
|
|
||||||
TryAddCellCore(curCellCount);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
[MethodImpl(MethodImplOptions.NoInlining)]
|
|
||||||
private void TryAddCellCore(int curCellCount)
|
|
||||||
{
|
|
||||||
var cells = this.cells;
|
|
||||||
if (cells == null)
|
|
||||||
{
|
|
||||||
var newCells = new Cell[s_MaxCellCount];
|
|
||||||
cells = Interlocked.CompareExchange(ref this.cells, newCells, null) ?? newCells;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (cells[curCellCount] == null)
|
|
||||||
{
|
|
||||||
Interlocked.CompareExchange(ref cells[curCellCount], new Cell(), null);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.cellCount == curCellCount)
|
|
||||||
{
|
|
||||||
Interlocked.CompareExchange(ref this.cellCount, curCellCount + 1, curCellCount);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,235 +0,0 @@
|
||||||
// Copyright (c) 2022 Vladimir Sadov
|
|
||||||
//
|
|
||||||
// Permission is hereby granted, free of charge, to any person
|
|
||||||
// obtaining a copy of this software and associated documentation
|
|
||||||
// files (the "Software"), to deal in the Software without
|
|
||||||
// restriction, including without limitation the rights to use,
|
|
||||||
// copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
// copies of the Software, and to permit persons to whom the
|
|
||||||
// Software is furnished to do so, subject to the following
|
|
||||||
// conditions:
|
|
||||||
//
|
|
||||||
// The above copyright notice and this permission notice shall be
|
|
||||||
// included in all copies or substantial portions of the Software.
|
|
||||||
//
|
|
||||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
|
||||||
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
||||||
// OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
//
|
|
||||||
// Counter64.cs
|
|
||||||
//
|
|
||||||
|
|
||||||
using System;
|
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.Diagnostics;
|
|
||||||
using System.Runtime.CompilerServices;
|
|
||||||
using System.Runtime.InteropServices;
|
|
||||||
using System.Threading;
|
|
||||||
|
|
||||||
#nullable disable
|
|
||||||
|
|
||||||
namespace System.Collections.Concurrent
|
|
||||||
{
|
|
||||||
/// <summary>
|
|
||||||
/// Scalable 64bit counter that can be used from multiple threads.
|
|
||||||
/// </summary>
|
|
||||||
internal sealed class Counter64 : CounterBase
|
|
||||||
{
|
|
||||||
private sealed class Cell
|
|
||||||
{
|
|
||||||
[StructLayout(LayoutKind.Explicit, Size = CACHE_LINE * 2 - OBJ_HEADER_SIZE)]
|
|
||||||
public struct SpacedCounter
|
|
||||||
{
|
|
||||||
[FieldOffset(CACHE_LINE - OBJ_HEADER_SIZE)]
|
|
||||||
public long count;
|
|
||||||
}
|
|
||||||
|
|
||||||
public SpacedCounter counter;
|
|
||||||
}
|
|
||||||
|
|
||||||
// spaced out counters
|
|
||||||
private Cell[] cells;
|
|
||||||
|
|
||||||
// default counter
|
|
||||||
private long count;
|
|
||||||
|
|
||||||
// delayed count
|
|
||||||
private long lastCount;
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Initializes a new instance of the <see
|
|
||||||
/// cref="Counter32"/>
|
|
||||||
/// </summary>
|
|
||||||
public Counter64()
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Returns the value of the counter at the time of the call.
|
|
||||||
/// </summary>
|
|
||||||
/// <remarks>
|
|
||||||
/// The value may miss in-progress updates if the counter is being concurrently modified.
|
|
||||||
/// </remarks>
|
|
||||||
public long Value
|
|
||||||
{
|
|
||||||
get
|
|
||||||
{
|
|
||||||
var count = this.count;
|
|
||||||
var cells = this.cells;
|
|
||||||
|
|
||||||
if (cells != null)
|
|
||||||
{
|
|
||||||
for (int i = 0; i < cells.Length; i++)
|
|
||||||
{
|
|
||||||
var cell = cells[i];
|
|
||||||
if (cell != null)
|
|
||||||
{
|
|
||||||
count += cell.counter.count;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return count;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Returns the approximate value of the counter at the time of the call.
|
|
||||||
/// </summary>
|
|
||||||
/// <remarks>
|
|
||||||
/// EstimatedValue could be significantly cheaper to obtain, but may be slightly delayed.
|
|
||||||
/// </remarks>
|
|
||||||
public long EstimatedValue
|
|
||||||
{
|
|
||||||
get
|
|
||||||
{
|
|
||||||
if (this.cellCount == 0)
|
|
||||||
{
|
|
||||||
return Value;
|
|
||||||
}
|
|
||||||
|
|
||||||
var curTicks = (uint)Environment.TickCount;
|
|
||||||
// more than a millisecond passed?
|
|
||||||
if (curTicks != lastCountTicks)
|
|
||||||
{
|
|
||||||
lastCountTicks = curTicks;
|
|
||||||
lastCount = Value;
|
|
||||||
}
|
|
||||||
|
|
||||||
return lastCount;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Increments the counter by 1.
|
|
||||||
/// </summary>
|
|
||||||
public void Increment()
|
|
||||||
{
|
|
||||||
int curCellCount = this.cellCount;
|
|
||||||
var drift = increment(ref GetCountRef(curCellCount));
|
|
||||||
|
|
||||||
if (drift != 0)
|
|
||||||
{
|
|
||||||
TryAddCell(curCellCount);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Decrements the counter by 1.
|
|
||||||
/// </summary>
|
|
||||||
public void Decrement()
|
|
||||||
{
|
|
||||||
int curCellCount = this.cellCount;
|
|
||||||
var drift = decrement(ref GetCountRef(curCellCount));
|
|
||||||
|
|
||||||
if (drift != 0)
|
|
||||||
{
|
|
||||||
TryAddCell(curCellCount);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Increments the counter by 'value'.
|
|
||||||
/// </summary>
|
|
||||||
public void Add(int value)
|
|
||||||
{
|
|
||||||
int curCellCount = this.cellCount;
|
|
||||||
var drift = add(ref GetCountRef(curCellCount), value);
|
|
||||||
|
|
||||||
if (drift != 0)
|
|
||||||
{
|
|
||||||
TryAddCell(curCellCount);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
||||||
private ref long GetCountRef(int curCellCount)
|
|
||||||
{
|
|
||||||
ref var countRef = ref count;
|
|
||||||
|
|
||||||
Cell[] cells;
|
|
||||||
if ((cells = this.cells) != null && curCellCount > 1)
|
|
||||||
{
|
|
||||||
var cell = cells[GetIndex((uint)curCellCount)];
|
|
||||||
if (cell != null)
|
|
||||||
{
|
|
||||||
countRef = ref cell.counter.count;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ref countRef;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long increment(ref long val)
|
|
||||||
{
|
|
||||||
return -val - 1 + Interlocked.Increment(ref val);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long add(ref long val, int inc)
|
|
||||||
{
|
|
||||||
return -val - inc + Interlocked.Add(ref val, inc);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long decrement(ref long val)
|
|
||||||
{
|
|
||||||
return val - 1 - Interlocked.Decrement(ref val);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void TryAddCell(int curCellCount)
|
|
||||||
{
|
|
||||||
if (curCellCount < s_MaxCellCount)
|
|
||||||
{
|
|
||||||
TryAddCellCore(curCellCount);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void TryAddCellCore(int curCellCount)
|
|
||||||
{
|
|
||||||
var cells = this.cells;
|
|
||||||
if (cells == null)
|
|
||||||
{
|
|
||||||
var newCells = new Cell[s_MaxCellCount];
|
|
||||||
cells = Interlocked.CompareExchange(ref this.cells, newCells, null) ?? newCells;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (cells[curCellCount] == null)
|
|
||||||
{
|
|
||||||
Interlocked.CompareExchange(ref cells[curCellCount], new Cell(), null);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.cellCount == curCellCount)
|
|
||||||
{
|
|
||||||
Interlocked.CompareExchange(ref this.cellCount, curCellCount + 1, curCellCount);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,62 +0,0 @@
|
||||||
// Copyright (c) 2022 Vladimir Sadov
|
|
||||||
//
|
|
||||||
// Permission is hereby granted, free of charge, to any person
|
|
||||||
// obtaining a copy of this software and associated documentation
|
|
||||||
// files (the "Software"), to deal in the Software without
|
|
||||||
// restriction, including without limitation the rights to use,
|
|
||||||
// copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
// copies of the Software, and to permit persons to whom the
|
|
||||||
// Software is furnished to do so, subject to the following
|
|
||||||
// conditions:
|
|
||||||
//
|
|
||||||
// The above copyright notice and this permission notice shall be
|
|
||||||
// included in all copies or substantial portions of the Software.
|
|
||||||
//
|
|
||||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
|
||||||
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
||||||
// OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
//
|
|
||||||
// CounterBase.cs
|
|
||||||
//
|
|
||||||
|
|
||||||
using System;
|
|
||||||
using System.Collections;
|
|
||||||
using System.Runtime.CompilerServices;
|
|
||||||
|
|
||||||
namespace System.Collections.Concurrent
|
|
||||||
{
|
|
||||||
/// <summary>
|
|
||||||
/// Scalable counter base.
|
|
||||||
/// </summary>
|
|
||||||
internal class CounterBase
|
|
||||||
{
|
|
||||||
private protected const int CACHE_LINE = 64;
|
|
||||||
private protected const int OBJ_HEADER_SIZE = 8;
|
|
||||||
|
|
||||||
private protected static readonly int s_MaxCellCount = HashHelpers.AlignToPowerOfTwo(Environment.ProcessorCount) + 1;
|
|
||||||
|
|
||||||
// how many cells we have
|
|
||||||
private protected int cellCount;
|
|
||||||
|
|
||||||
// delayed count time
|
|
||||||
private protected uint lastCountTicks;
|
|
||||||
|
|
||||||
private protected CounterBase()
|
|
||||||
{
|
|
||||||
// touch a static
|
|
||||||
_ = s_MaxCellCount;
|
|
||||||
}
|
|
||||||
|
|
||||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
||||||
private protected static unsafe int GetIndex(uint cellCount)
|
|
||||||
{
|
|
||||||
nuint addr = (nuint)(&cellCount);
|
|
||||||
return (int)(addr % cellCount);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,112 +0,0 @@
|
||||||
// Copyright (c) 2022 Vladimir Sadov
|
|
||||||
//
|
|
||||||
// Permission is hereby granted, free of charge, to any person
|
|
||||||
// obtaining a copy of this software and associated documentation
|
|
||||||
// files (the "Software"), to deal in the Software without
|
|
||||||
// restriction, including without limitation the rights to use,
|
|
||||||
// copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
// copies of the Software, and to permit persons to whom the
|
|
||||||
// Software is furnished to do so, subject to the following
|
|
||||||
// conditions:
|
|
||||||
//
|
|
||||||
// The above copyright notice and this permission notice shall be
|
|
||||||
// included in all copies or substantial portions of the Software.
|
|
||||||
//
|
|
||||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
|
||||||
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
||||||
// OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
//
|
|
||||||
// DictionaryImpl.SnapshotImpl.cs
|
|
||||||
//
|
|
||||||
|
|
||||||
#nullable disable
|
|
||||||
|
|
||||||
using System;
|
|
||||||
using System.Collections;
|
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.Collections.ObjectModel;
|
|
||||||
using System.Diagnostics;
|
|
||||||
using System.Runtime.CompilerServices;
|
|
||||||
using System.Threading;
|
|
||||||
|
|
||||||
namespace System.Collections.Concurrent
|
|
||||||
{
|
|
||||||
internal abstract partial class DictionaryImpl<TKey, TKeyStore, TValue>
|
|
||||||
: DictionaryImpl<TKey, TValue>
|
|
||||||
{
|
|
||||||
internal override Snapshot GetSnapshot()
|
|
||||||
{
|
|
||||||
return new SnapshotImpl(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
private sealed class SnapshotImpl : Snapshot
|
|
||||||
{
|
|
||||||
private readonly DictionaryImpl<TKey, TKeyStore, TValue> _table;
|
|
||||||
|
|
||||||
public SnapshotImpl(DictionaryImpl<TKey, TKeyStore, TValue> dict)
|
|
||||||
{
|
|
||||||
this._table = dict;
|
|
||||||
|
|
||||||
// linearization point.
|
|
||||||
// if table is quiescent and has no copy in progress,
|
|
||||||
// we can simply iterate over its table.
|
|
||||||
while (true)
|
|
||||||
{
|
|
||||||
if (_table._newTable == null)
|
|
||||||
{
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// there is a copy in progress, finish it and try again
|
|
||||||
_table.HelpCopy(copy_all: true);
|
|
||||||
this._table = (DictionaryImpl<TKey, TKeyStore, TValue>)(this._table._topDict._table);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public override int Count => _table.Count;
|
|
||||||
|
|
||||||
public override bool MoveNext()
|
|
||||||
{
|
|
||||||
var entries = this._table._entries;
|
|
||||||
while (_idx < entries.Length)
|
|
||||||
{
|
|
||||||
var nextEntry = entries[_idx++];
|
|
||||||
|
|
||||||
if (nextEntry.value != null)
|
|
||||||
{
|
|
||||||
var nextKstore = nextEntry.key;
|
|
||||||
if (nextKstore == null)
|
|
||||||
{
|
|
||||||
// slot was deleted.
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
_curKey = _table.keyFromEntry(nextKstore);
|
|
||||||
object nextV = _table.TryGetValue(_curKey);
|
|
||||||
if (nextV != null)
|
|
||||||
{
|
|
||||||
_curValue = _table.FromObjectValue(nextV);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_curKey = default;
|
|
||||||
_curValue = default;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public override void Reset()
|
|
||||||
{
|
|
||||||
_idx = 0;
|
|
||||||
_curKey = default;
|
|
||||||
_curValue = default;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,119 +0,0 @@
|
||||||
// Copyright (c) 2022 Vladimir Sadov
|
|
||||||
//
|
|
||||||
// Permission is hereby granted, free of charge, to any person
|
|
||||||
// obtaining a copy of this software and associated documentation
|
|
||||||
// files (the "Software"), to deal in the Software without
|
|
||||||
// restriction, including without limitation the rights to use,
|
|
||||||
// copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
// copies of the Software, and to permit persons to whom the
|
|
||||||
// Software is furnished to do so, subject to the following
|
|
||||||
// conditions:
|
|
||||||
//
|
|
||||||
// The above copyright notice and this permission notice shall be
|
|
||||||
// included in all copies or substantial portions of the Software.
|
|
||||||
//
|
|
||||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
|
||||||
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
||||||
// OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
//
|
|
||||||
// DictionaryImpl.cs
|
|
||||||
//
|
|
||||||
|
|
||||||
#nullable disable
|
|
||||||
|
|
||||||
using System.Collections.Concurrent;
|
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.Runtime.CompilerServices;
|
|
||||||
|
|
||||||
namespace System.Collections.Concurrent
|
|
||||||
{
|
|
||||||
internal abstract class DictionaryImpl
|
|
||||||
{
|
|
||||||
internal DictionaryImpl() { }
|
|
||||||
|
|
||||||
internal enum ValueMatch
|
|
||||||
{
|
|
||||||
Any, // sets new value unconditionally, used by index set and TryRemove(key)
|
|
||||||
NullOrDead, // set value if original value is null or dead, used by Add/TryAdd
|
|
||||||
NotNullOrDead, // set value if original value is alive, used by Remove
|
|
||||||
OldValue, // sets new value if old value matches
|
|
||||||
}
|
|
||||||
|
|
||||||
internal sealed class Prime
|
|
||||||
{
|
|
||||||
internal object originalValue;
|
|
||||||
|
|
||||||
public Prime(object originalValue)
|
|
||||||
{
|
|
||||||
this.originalValue = originalValue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
internal static readonly object TOMBSTONE = new object();
|
|
||||||
internal static readonly Prime TOMBPRIME = new Prime(TOMBSTONE);
|
|
||||||
internal static readonly object NULLVALUE = new object();
|
|
||||||
|
|
||||||
// represents a trivially copied empty entry
|
|
||||||
// we insert it in the old table during rehashing
|
|
||||||
// to reduce chances that more entries are added
|
|
||||||
protected const int TOMBPRIMEHASH = 1 << 31;
|
|
||||||
|
|
||||||
// we cannot distigush zero keys from uninitialized state
|
|
||||||
// so we force them to have this special hash instead
|
|
||||||
protected const int ZEROHASH = 1 << 30;
|
|
||||||
|
|
||||||
// all regular hashes have both these bits set
|
|
||||||
// to be different from either 0, TOMBPRIMEHASH or ZEROHASH
|
|
||||||
// having only these bits set in a case of Ref key means that the slot is permanently deleted.
|
|
||||||
protected const int SPECIAL_HASH_BITS = TOMBPRIMEHASH | ZEROHASH;
|
|
||||||
|
|
||||||
// Heuristic to decide if we have reprobed toooo many times. Running over
|
|
||||||
// the reprobe limit on a 'get' call acts as a 'miss'; on a 'put' call it
|
|
||||||
// can trigger a table resize. Several places must have exact agreement on
|
|
||||||
// what the reprobe_limit is, so we share it here.
|
|
||||||
protected const int REPROBE_LIMIT = 4;
|
|
||||||
protected const int REPROBE_LIMIT_SHIFT = 8;
|
|
||||||
|
|
||||||
protected static int ReprobeLimit(int lenMask)
|
|
||||||
{
|
|
||||||
// 1/2 of table with some extra
|
|
||||||
return REPROBE_LIMIT + (lenMask >> REPROBE_LIMIT_SHIFT);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected static bool EntryValueNullOrDead(object entryValue)
|
|
||||||
{
|
|
||||||
return entryValue == null || entryValue == TOMBSTONE;
|
|
||||||
}
|
|
||||||
|
|
||||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
||||||
protected static int ReduceHashToIndex(int fullHash, int lenMask)
|
|
||||||
{
|
|
||||||
var h = (uint)fullHash;
|
|
||||||
|
|
||||||
// xor-shift some upper bits down, in case if variations are mostly in high bits
|
|
||||||
// and scatter the bits a little to break up clusters if hashes are periodic (like 42, 43, 44, ...)
|
|
||||||
// long clusters can cause long reprobes. small clusters are ok though.
|
|
||||||
h ^= h >> 15;
|
|
||||||
h ^= h >> 8;
|
|
||||||
h += (h >> 3) * 2654435769u;
|
|
||||||
|
|
||||||
return (int)h & lenMask;
|
|
||||||
}
|
|
||||||
|
|
||||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
||||||
internal static object ToObjectValue<TValue>(TValue value)
|
|
||||||
{
|
|
||||||
if (default(TValue) != null)
|
|
||||||
{
|
|
||||||
return new Boxed<TValue>(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (object)value ?? NULLVALUE;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,165 +0,0 @@
|
||||||
// Copyright (c) 2022 Vladimir Sadov
|
|
||||||
//
|
|
||||||
// Permission is hereby granted, free of charge, to any person
|
|
||||||
// obtaining a copy of this software and associated documentation
|
|
||||||
// files (the "Software"), to deal in the Software without
|
|
||||||
// restriction, including without limitation the rights to use,
|
|
||||||
// copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
// copies of the Software, and to permit persons to whom the
|
|
||||||
// Software is furnished to do so, subject to the following
|
|
||||||
// conditions:
|
|
||||||
//
|
|
||||||
// The above copyright notice and this permission notice shall be
|
|
||||||
// included in all copies or substantial portions of the Software.
|
|
||||||
//
|
|
||||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
|
||||||
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
||||||
// OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
//
|
|
||||||
// DictionaryImplBoxed.cs
|
|
||||||
//
|
|
||||||
|
|
||||||
#nullable disable
|
|
||||||
|
|
||||||
using System;
|
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.Diagnostics;
|
|
||||||
using System.Runtime.CompilerServices;
|
|
||||||
using System.Runtime.InteropServices;
|
|
||||||
using System.Threading;
|
|
||||||
|
|
||||||
namespace System.Collections.Concurrent
|
|
||||||
{
|
|
||||||
internal sealed class DictionaryImplBoxed<TKey, TValue>
|
|
||||||
: DictionaryImpl<TKey, Boxed<TKey>, TValue>
|
|
||||||
{
|
|
||||||
internal DictionaryImplBoxed(int capacity, ConcurrentDictionary<TKey, TValue> topDict)
|
|
||||||
: base(capacity, topDict)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
internal DictionaryImplBoxed(int capacity, DictionaryImplBoxed<TKey, TValue> other)
|
|
||||||
: base(capacity, other)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForPut(ref Boxed<TKey> entryKey, TKey key)
|
|
||||||
{
|
|
||||||
var entryKeyValue = entryKey;
|
|
||||||
if (entryKeyValue == null)
|
|
||||||
{
|
|
||||||
entryKeyValue = Interlocked.CompareExchange(ref entryKey, new Boxed<TKey>(key), null);
|
|
||||||
if (entryKeyValue == null)
|
|
||||||
{
|
|
||||||
// claimed a new slot
|
|
||||||
this.allocatedSlotCount.Increment();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return _keyComparer.Equals(key, entryKey.Value);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForCopy(ref Boxed<TKey> entryKey, Boxed<TKey> key)
|
|
||||||
{
|
|
||||||
var entryKeyValue = entryKey;
|
|
||||||
if (entryKeyValue == null)
|
|
||||||
{
|
|
||||||
entryKeyValue = Interlocked.CompareExchange(ref entryKey, key, null);
|
|
||||||
if (entryKeyValue == null)
|
|
||||||
{
|
|
||||||
// claimed a new slot
|
|
||||||
this.allocatedSlotCount.Increment();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return _keyComparer.Equals(key.Value, entryKey.Value);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool keyEqual(TKey key, Boxed<TKey> entryKey)
|
|
||||||
{
|
|
||||||
//NOTE: slots are claimed in two stages - claim a hash, then set a key
|
|
||||||
// it is possible to observe a slot with a null key, but with hash already set
|
|
||||||
// that is not a match since the key is not yet in the table
|
|
||||||
if (entryKey == null)
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return _keyComparer.Equals(key, entryKey.Value);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override DictionaryImpl<TKey, Boxed<TKey>, TValue> CreateNew(int capacity)
|
|
||||||
{
|
|
||||||
return new DictionaryImplBoxed<TKey, TValue>(capacity, this);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override TKey keyFromEntry(Boxed<TKey> entryKey)
|
|
||||||
{
|
|
||||||
return entryKey.Value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#pragma warning disable CS0659 // Type overrides Object.Equals(object o) but does not override Object.GetHashCode()
|
|
||||||
internal sealed class Boxed<T>
|
|
||||||
{
|
|
||||||
// 0 - allow writes, 1 - someone is writing, 2 frozen.
|
|
||||||
public int writeStatus;
|
|
||||||
public T Value;
|
|
||||||
|
|
||||||
public Boxed(T key)
|
|
||||||
{
|
|
||||||
this.Value = key;
|
|
||||||
}
|
|
||||||
|
|
||||||
public override bool Equals(object obj)
|
|
||||||
{
|
|
||||||
return EqualityComparer<T>.Default.Equals(this.Value, Unsafe.As<Boxed<T>>(obj).Value);
|
|
||||||
}
|
|
||||||
|
|
||||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
||||||
public bool TryVolatileWrite(T value)
|
|
||||||
{
|
|
||||||
if (Interlocked.CompareExchange(ref writeStatus, 1, 0) == 0)
|
|
||||||
{
|
|
||||||
Value = value;
|
|
||||||
Volatile.Write(ref writeStatus, 0);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
||||||
public bool TryCompareExchange(T oldValue, T newValue, out bool changed)
|
|
||||||
{
|
|
||||||
changed = false;
|
|
||||||
if (Interlocked.CompareExchange(ref writeStatus, 1, 0) != 0)
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (EqualityComparer<T>.Default.Equals(Value, oldValue))
|
|
||||||
{
|
|
||||||
Value = newValue;
|
|
||||||
changed = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
Volatile.Write(ref writeStatus, 0);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
||||||
internal void Freeze()
|
|
||||||
{
|
|
||||||
// Wait for writers (1) to leave. Already 2 is ok, or set 0 -> 2.
|
|
||||||
while (Interlocked.CompareExchange(ref writeStatus, 2, 0) == 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#pragma warning restore CS0659 // Type overrides Object.Equals(object o) but does not override Object.GetHashCode()
|
|
||||||
}
|
|
|
@ -1,171 +0,0 @@
|
||||||
// Copyright (c) 2022 Vladimir Sadov
|
|
||||||
//
|
|
||||||
// Permission is hereby granted, free of charge, to any person
|
|
||||||
// obtaining a copy of this software and associated documentation
|
|
||||||
// files (the "Software"), to deal in the Software without
|
|
||||||
// restriction, including without limitation the rights to use,
|
|
||||||
// copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
// copies of the Software, and to permit persons to whom the
|
|
||||||
// Software is furnished to do so, subject to the following
|
|
||||||
// conditions:
|
|
||||||
//
|
|
||||||
// The above copyright notice and this permission notice shall be
|
|
||||||
// included in all copies or substantial portions of the Software.
|
|
||||||
//
|
|
||||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
|
||||||
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
||||||
// OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
//
|
|
||||||
// DictionaryImplInt.cs
|
|
||||||
//
|
|
||||||
|
|
||||||
using System;
|
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.Diagnostics;
|
|
||||||
using System.Runtime.InteropServices;
|
|
||||||
using System.Threading;
|
|
||||||
|
|
||||||
namespace System.Collections.Concurrent
|
|
||||||
{
|
|
||||||
internal sealed class DictionaryImplInt<TValue>
|
|
||||||
: DictionaryImpl<int, int, TValue>
|
|
||||||
{
|
|
||||||
internal DictionaryImplInt(int capacity, ConcurrentDictionary<int, TValue> topDict)
|
|
||||||
: base(capacity, topDict)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
internal DictionaryImplInt(int capacity, DictionaryImplInt<TValue> other)
|
|
||||||
: base(capacity, other)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForPut(ref int entryKey, int key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForCopy(ref int entryKey, int key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
private bool TryClaimSlot(ref int entryKey, int key)
|
|
||||||
{
|
|
||||||
var entryKeyValue = entryKey;
|
|
||||||
//zero keys are claimed via hash
|
|
||||||
if (entryKeyValue == 0 & key != 0)
|
|
||||||
{
|
|
||||||
entryKeyValue = Interlocked.CompareExchange(ref entryKey, key, 0);
|
|
||||||
if (entryKeyValue == 0)
|
|
||||||
{
|
|
||||||
// claimed a new slot
|
|
||||||
this.allocatedSlotCount.Increment();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return key == entryKeyValue || _keyComparer.Equals(key, entryKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override int hash(int key)
|
|
||||||
{
|
|
||||||
if (key == 0)
|
|
||||||
{
|
|
||||||
return ZEROHASH;
|
|
||||||
}
|
|
||||||
|
|
||||||
return base.hash(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool keyEqual(int key, int entryKey)
|
|
||||||
{
|
|
||||||
return key == entryKey || _keyComparer.Equals(key, entryKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override DictionaryImpl<int, int, TValue> CreateNew(int capacity)
|
|
||||||
{
|
|
||||||
return new DictionaryImplInt<TValue>(capacity, this);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override int keyFromEntry(int entryKey)
|
|
||||||
{
|
|
||||||
return entryKey;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
internal sealed class DictionaryImplIntNoComparer<TValue>
|
|
||||||
: DictionaryImpl<int, int, TValue>
|
|
||||||
{
|
|
||||||
internal DictionaryImplIntNoComparer(int capacity, ConcurrentDictionary<int, TValue> topDict)
|
|
||||||
: base(capacity, topDict)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
internal DictionaryImplIntNoComparer(int capacity, DictionaryImplIntNoComparer<TValue> other)
|
|
||||||
: base(capacity, other)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForPut(ref int entryKey, int key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForCopy(ref int entryKey, int key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
private bool TryClaimSlot(ref int entryKey, int key)
|
|
||||||
{
|
|
||||||
var entryKeyValue = entryKey;
|
|
||||||
//zero keys are claimed via hash
|
|
||||||
if (entryKeyValue == 0 & key != 0)
|
|
||||||
{
|
|
||||||
entryKeyValue = Interlocked.CompareExchange(ref entryKey, key, 0);
|
|
||||||
if (entryKeyValue == 0)
|
|
||||||
{
|
|
||||||
// claimed a new slot
|
|
||||||
this.allocatedSlotCount.Increment();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return key == entryKeyValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// inline the base implementation to devirtualize calls to hash and keyEqual
|
|
||||||
internal override object TryGetValue(int key)
|
|
||||||
{
|
|
||||||
return base.TryGetValue(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override int hash(int key)
|
|
||||||
{
|
|
||||||
return (key == 0) ?
|
|
||||||
ZEROHASH :
|
|
||||||
key | SPECIAL_HASH_BITS;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool keyEqual(int key, int entryKey)
|
|
||||||
{
|
|
||||||
return key == entryKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override DictionaryImpl<int, int, TValue> CreateNew(int capacity)
|
|
||||||
{
|
|
||||||
return new DictionaryImplIntNoComparer<TValue>(capacity, this);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override int keyFromEntry(int entryKey)
|
|
||||||
{
|
|
||||||
return entryKey;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,171 +0,0 @@
|
||||||
// Copyright (c) 2022 Vladimir Sadov
|
|
||||||
//
|
|
||||||
// Permission is hereby granted, free of charge, to any person
|
|
||||||
// obtaining a copy of this software and associated documentation
|
|
||||||
// files (the "Software"), to deal in the Software without
|
|
||||||
// restriction, including without limitation the rights to use,
|
|
||||||
// copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
// copies of the Software, and to permit persons to whom the
|
|
||||||
// Software is furnished to do so, subject to the following
|
|
||||||
// conditions:
|
|
||||||
//
|
|
||||||
// The above copyright notice and this permission notice shall be
|
|
||||||
// included in all copies or substantial portions of the Software.
|
|
||||||
//
|
|
||||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
|
||||||
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
||||||
// OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
//
|
|
||||||
// DictionaryImplLong.cs
|
|
||||||
//
|
|
||||||
|
|
||||||
using System;
|
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.Diagnostics;
|
|
||||||
using System.Runtime.InteropServices;
|
|
||||||
using System.Threading;
|
|
||||||
|
|
||||||
namespace System.Collections.Concurrent
|
|
||||||
{
|
|
||||||
internal sealed class DictionaryImplLong<TValue>
|
|
||||||
: DictionaryImpl<long, long, TValue>
|
|
||||||
{
|
|
||||||
internal DictionaryImplLong(int capacity, ConcurrentDictionary<long, TValue> topDict)
|
|
||||||
: base(capacity, topDict)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
internal DictionaryImplLong(int capacity, DictionaryImplLong<TValue> other)
|
|
||||||
: base(capacity, other)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForPut(ref long entryKey, long key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForCopy(ref long entryKey, long key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
private bool TryClaimSlot(ref long entryKey, long key)
|
|
||||||
{
|
|
||||||
var entryKeyValue = entryKey;
|
|
||||||
//zero keys are claimed via hash
|
|
||||||
if (entryKeyValue == 0 & key != 0)
|
|
||||||
{
|
|
||||||
entryKeyValue = Interlocked.CompareExchange(ref entryKey, key, 0);
|
|
||||||
if (entryKeyValue == 0)
|
|
||||||
{
|
|
||||||
// claimed a new slot
|
|
||||||
this.allocatedSlotCount.Increment();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return key == entryKeyValue || _keyComparer.Equals(key, entryKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override int hash(long key)
|
|
||||||
{
|
|
||||||
if (key == 0)
|
|
||||||
{
|
|
||||||
return ZEROHASH;
|
|
||||||
}
|
|
||||||
|
|
||||||
return base.hash(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool keyEqual(long key, long entryKey)
|
|
||||||
{
|
|
||||||
return key == entryKey || _keyComparer.Equals(key, entryKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override DictionaryImpl<long, long, TValue> CreateNew(int capacity)
|
|
||||||
{
|
|
||||||
return new DictionaryImplLong<TValue>(capacity, this);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override long keyFromEntry(long entryKey)
|
|
||||||
{
|
|
||||||
return entryKey;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
internal sealed class DictionaryImplLongNoComparer<TValue>
|
|
||||||
: DictionaryImpl<long, long, TValue>
|
|
||||||
{
|
|
||||||
internal DictionaryImplLongNoComparer(int capacity, ConcurrentDictionary<long, TValue> topDict)
|
|
||||||
: base(capacity, topDict)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
internal DictionaryImplLongNoComparer(int capacity, DictionaryImplLongNoComparer<TValue> other)
|
|
||||||
: base(capacity, other)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForPut(ref long entryKey, long key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForCopy(ref long entryKey, long key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
private bool TryClaimSlot(ref long entryKey, long key)
|
|
||||||
{
|
|
||||||
var entryKeyValue = entryKey;
|
|
||||||
//zero keys are claimed via hash
|
|
||||||
if (entryKeyValue == 0 & key != 0)
|
|
||||||
{
|
|
||||||
entryKeyValue = Interlocked.CompareExchange(ref entryKey, key, 0);
|
|
||||||
if (entryKeyValue == 0)
|
|
||||||
{
|
|
||||||
// claimed a new slot
|
|
||||||
this.allocatedSlotCount.Increment();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return key == entryKeyValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// inline the base implementation to devirtualize calls to hash and keyEqual
|
|
||||||
internal override object TryGetValue(long key)
|
|
||||||
{
|
|
||||||
return base.TryGetValue(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override int hash(long key)
|
|
||||||
{
|
|
||||||
return (key == 0) ?
|
|
||||||
ZEROHASH :
|
|
||||||
key.GetHashCode() | SPECIAL_HASH_BITS;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool keyEqual(long key, long entryKey)
|
|
||||||
{
|
|
||||||
return key == entryKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override DictionaryImpl<long, long, TValue> CreateNew(int capacity)
|
|
||||||
{
|
|
||||||
return new DictionaryImplLongNoComparer<TValue>(capacity, this);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override long keyFromEntry(long entryKey)
|
|
||||||
{
|
|
||||||
return entryKey;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,171 +0,0 @@
|
||||||
// Copyright (c) 2022 Vladimir Sadov
|
|
||||||
//
|
|
||||||
// Permission is hereby granted, free of charge, to any person
|
|
||||||
// obtaining a copy of this software and associated documentation
|
|
||||||
// files (the "Software"), to deal in the Software without
|
|
||||||
// restriction, including without limitation the rights to use,
|
|
||||||
// copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
// copies of the Software, and to permit persons to whom the
|
|
||||||
// Software is furnished to do so, subject to the following
|
|
||||||
// conditions:
|
|
||||||
//
|
|
||||||
// The above copyright notice and this permission notice shall be
|
|
||||||
// included in all copies or substantial portions of the Software.
|
|
||||||
//
|
|
||||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
|
||||||
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
||||||
// OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
//
|
|
||||||
// DictionaryImplNint.cs
|
|
||||||
//
|
|
||||||
|
|
||||||
using System;
|
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.Diagnostics;
|
|
||||||
using System.Runtime.InteropServices;
|
|
||||||
using System.Threading;
|
|
||||||
|
|
||||||
namespace System.Collections.Concurrent
|
|
||||||
{
|
|
||||||
internal sealed class DictionaryImplNint<TValue>
|
|
||||||
: DictionaryImpl<nint, nint, TValue>
|
|
||||||
{
|
|
||||||
internal DictionaryImplNint(int capacity, ConcurrentDictionary<nint, TValue> topDict)
|
|
||||||
: base(capacity, topDict)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
internal DictionaryImplNint(int capacity, DictionaryImplNint<TValue> other)
|
|
||||||
: base(capacity, other)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForPut(ref nint entryKey, nint key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForCopy(ref nint entryKey, nint key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
private bool TryClaimSlot(ref nint entryKey, nint key)
|
|
||||||
{
|
|
||||||
var entryKeyValue = entryKey;
|
|
||||||
//zero keys are claimed via hash
|
|
||||||
if (entryKeyValue == 0 & key != 0)
|
|
||||||
{
|
|
||||||
entryKeyValue = Interlocked.CompareExchange(ref entryKey, key, (nint)0);
|
|
||||||
if (entryKeyValue == 0)
|
|
||||||
{
|
|
||||||
// claimed a new slot
|
|
||||||
this.allocatedSlotCount.Increment();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return key == entryKeyValue || _keyComparer.Equals(key, entryKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override int hash(nint key)
|
|
||||||
{
|
|
||||||
if (key == 0)
|
|
||||||
{
|
|
||||||
return ZEROHASH;
|
|
||||||
}
|
|
||||||
|
|
||||||
return base.hash(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool keyEqual(nint key, nint entryKey)
|
|
||||||
{
|
|
||||||
return key == entryKey || _keyComparer.Equals(key, entryKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override DictionaryImpl<nint, nint, TValue> CreateNew(int capacity)
|
|
||||||
{
|
|
||||||
return new DictionaryImplNint<TValue>(capacity, this);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override nint keyFromEntry(nint entryKey)
|
|
||||||
{
|
|
||||||
return entryKey;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
internal sealed class DictionaryImplNintNoComparer<TValue>
|
|
||||||
: DictionaryImpl<nint, nint, TValue>
|
|
||||||
{
|
|
||||||
internal DictionaryImplNintNoComparer(int capacity, ConcurrentDictionary<nint, TValue> topDict)
|
|
||||||
: base(capacity, topDict)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
internal DictionaryImplNintNoComparer(int capacity, DictionaryImplNintNoComparer<TValue> other)
|
|
||||||
: base(capacity, other)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForPut(ref nint entryKey, nint key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForCopy(ref nint entryKey, nint key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
private bool TryClaimSlot(ref nint entryKey, nint key)
|
|
||||||
{
|
|
||||||
var entryKeyValue = entryKey;
|
|
||||||
//zero keys are claimed via hash
|
|
||||||
if (entryKeyValue == 0 & key != 0)
|
|
||||||
{
|
|
||||||
entryKeyValue = Interlocked.CompareExchange(ref entryKey, key, (nint)0);
|
|
||||||
if (entryKeyValue == 0)
|
|
||||||
{
|
|
||||||
// claimed a new slot
|
|
||||||
this.allocatedSlotCount.Increment();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return key == entryKeyValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// inline the base implementation to devirtualize calls to hash and keyEqual
|
|
||||||
internal override object TryGetValue(nint key)
|
|
||||||
{
|
|
||||||
return base.TryGetValue(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override int hash(nint key)
|
|
||||||
{
|
|
||||||
return (key == 0) ?
|
|
||||||
ZEROHASH :
|
|
||||||
key.GetHashCode() | SPECIAL_HASH_BITS;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool keyEqual(nint key, nint entryKey)
|
|
||||||
{
|
|
||||||
return key == entryKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override DictionaryImpl<nint, nint, TValue> CreateNew(int capacity)
|
|
||||||
{
|
|
||||||
return new DictionaryImplNintNoComparer<TValue>(capacity, this);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override nint keyFromEntry(nint entryKey)
|
|
||||||
{
|
|
||||||
return entryKey;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,171 +0,0 @@
|
||||||
// Copyright (c) 2022 Vladimir Sadov
|
|
||||||
//
|
|
||||||
// Permission is hereby granted, free of charge, to any person
|
|
||||||
// obtaining a copy of this software and associated documentation
|
|
||||||
// files (the "Software"), to deal in the Software without
|
|
||||||
// restriction, including without limitation the rights to use,
|
|
||||||
// copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
// copies of the Software, and to permit persons to whom the
|
|
||||||
// Software is furnished to do so, subject to the following
|
|
||||||
// conditions:
|
|
||||||
//
|
|
||||||
// The above copyright notice and this permission notice shall be
|
|
||||||
// included in all copies or substantial portions of the Software.
|
|
||||||
//
|
|
||||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
|
||||||
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
||||||
// OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
//
|
|
||||||
// DictionaryImplNuint.cs
|
|
||||||
//
|
|
||||||
|
|
||||||
using System;
|
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.Diagnostics;
|
|
||||||
using System.Runtime.InteropServices;
|
|
||||||
using System.Threading;
|
|
||||||
|
|
||||||
namespace System.Collections.Concurrent
|
|
||||||
{
|
|
||||||
internal sealed class DictionaryImplNuint<TValue>
|
|
||||||
: DictionaryImpl<nuint, nuint, TValue>
|
|
||||||
{
|
|
||||||
internal DictionaryImplNuint(int capacity, ConcurrentDictionary<nuint, TValue> topDict)
|
|
||||||
: base(capacity, topDict)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
internal DictionaryImplNuint(int capacity, DictionaryImplNuint<TValue> other)
|
|
||||||
: base(capacity, other)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForPut(ref nuint entryKey, nuint key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForCopy(ref nuint entryKey, nuint key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
private bool TryClaimSlot(ref nuint entryKey, nuint key)
|
|
||||||
{
|
|
||||||
var entryKeyValue = entryKey;
|
|
||||||
//zero keys are claimed via hash
|
|
||||||
if (entryKeyValue == 0 & key != 0)
|
|
||||||
{
|
|
||||||
entryKeyValue = Interlocked.CompareExchange(ref entryKey, key, (nuint)0);
|
|
||||||
if (entryKeyValue == 0)
|
|
||||||
{
|
|
||||||
// claimed a new slot
|
|
||||||
this.allocatedSlotCount.Increment();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return key == entryKeyValue || _keyComparer.Equals(key, entryKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override int hash(nuint key)
|
|
||||||
{
|
|
||||||
if (key == 0)
|
|
||||||
{
|
|
||||||
return ZEROHASH;
|
|
||||||
}
|
|
||||||
|
|
||||||
return base.hash(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool keyEqual(nuint key, nuint entryKey)
|
|
||||||
{
|
|
||||||
return key == entryKey || _keyComparer.Equals(key, entryKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override DictionaryImpl<nuint, nuint, TValue> CreateNew(int capacity)
|
|
||||||
{
|
|
||||||
return new DictionaryImplNuint<TValue>(capacity, this);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override nuint keyFromEntry(nuint entryKey)
|
|
||||||
{
|
|
||||||
return entryKey;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
internal sealed class DictionaryImplNuintNoComparer<TValue>
|
|
||||||
: DictionaryImpl<nuint, nuint, TValue>
|
|
||||||
{
|
|
||||||
internal DictionaryImplNuintNoComparer(int capacity, ConcurrentDictionary<nuint, TValue> topDict)
|
|
||||||
: base(capacity, topDict)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
internal DictionaryImplNuintNoComparer(int capacity, DictionaryImplNuintNoComparer<TValue> other)
|
|
||||||
: base(capacity, other)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForPut(ref nuint entryKey, nuint key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForCopy(ref nuint entryKey, nuint key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
private bool TryClaimSlot(ref nuint entryKey, nuint key)
|
|
||||||
{
|
|
||||||
var entryKeyValue = entryKey;
|
|
||||||
//zero keys are claimed via hash
|
|
||||||
if (entryKeyValue == 0 & key != 0)
|
|
||||||
{
|
|
||||||
entryKeyValue = Interlocked.CompareExchange(ref entryKey, key, (nuint)0);
|
|
||||||
if (entryKeyValue == 0)
|
|
||||||
{
|
|
||||||
// claimed a new slot
|
|
||||||
this.allocatedSlotCount.Increment();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return key == entryKeyValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// inline the base implementation to devirtualize calls to hash and keyEqual
|
|
||||||
internal override object TryGetValue(nuint key)
|
|
||||||
{
|
|
||||||
return base.TryGetValue(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override int hash(nuint key)
|
|
||||||
{
|
|
||||||
return (key == 0) ?
|
|
||||||
ZEROHASH :
|
|
||||||
key.GetHashCode() | SPECIAL_HASH_BITS;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool keyEqual(nuint key, nuint entryKey)
|
|
||||||
{
|
|
||||||
return key == entryKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override DictionaryImpl<nuint, nuint, TValue> CreateNew(int capacity)
|
|
||||||
{
|
|
||||||
return new DictionaryImplNuintNoComparer<TValue>(capacity, this);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override nuint keyFromEntry(nuint entryKey)
|
|
||||||
{
|
|
||||||
return entryKey;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,121 +0,0 @@
|
||||||
// Copyright (c) 2022 Vladimir Sadov
|
|
||||||
//
|
|
||||||
// Permission is hereby granted, free of charge, to any person
|
|
||||||
// obtaining a copy of this software and associated documentation
|
|
||||||
// files (the "Software"), to deal in the Software without
|
|
||||||
// restriction, including without limitation the rights to use,
|
|
||||||
// copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
// copies of the Software, and to permit persons to whom the
|
|
||||||
// Software is furnished to do so, subject to the following
|
|
||||||
// conditions:
|
|
||||||
//
|
|
||||||
// The above copyright notice and this permission notice shall be
|
|
||||||
// included in all copies or substantial portions of the Software.
|
|
||||||
//
|
|
||||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
|
||||||
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
||||||
// OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
//
|
|
||||||
// DictionaryImplRef.cs
|
|
||||||
//
|
|
||||||
|
|
||||||
#nullable disable
|
|
||||||
|
|
||||||
using System;
|
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.Diagnostics;
|
|
||||||
using System.Runtime.CompilerServices;
|
|
||||||
using System.Runtime.InteropServices;
|
|
||||||
using System.Threading;
|
|
||||||
|
|
||||||
namespace System.Collections.Concurrent
|
|
||||||
{
|
|
||||||
internal sealed class DictionaryImplRef<TKey, TKeyStore, TValue>
|
|
||||||
: DictionaryImpl<TKey, TKey, TValue>
|
|
||||||
{
|
|
||||||
internal DictionaryImplRef(int capacity, ConcurrentDictionary<TKey, TValue> topDict)
|
|
||||||
: base(capacity, topDict)
|
|
||||||
{
|
|
||||||
Debug.Assert(!typeof(TKey).IsValueType);
|
|
||||||
}
|
|
||||||
|
|
||||||
internal DictionaryImplRef(int capacity, DictionaryImplRef<TKey, TKeyStore, TValue> other)
|
|
||||||
: base(capacity, other)
|
|
||||||
{
|
|
||||||
Debug.Assert(!typeof(TKey).IsValueType);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForPut(ref TKey entryKey, TKey key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForCopy(ref TKey entryKey, TKey key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
private bool TryClaimSlot(ref TKey entryKey, TKey key)
|
|
||||||
{
|
|
||||||
ref object keyLocation = ref Unsafe.As<TKey, object>(ref entryKey);
|
|
||||||
object entryKeyValue = keyLocation;
|
|
||||||
if (entryKeyValue == null)
|
|
||||||
{
|
|
||||||
entryKeyValue = Interlocked.CompareExchange(ref keyLocation, key, null);
|
|
||||||
if (entryKeyValue == null)
|
|
||||||
{
|
|
||||||
// claimed a new slot
|
|
||||||
this.allocatedSlotCount.Increment();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return (object)key == entryKeyValue ||
|
|
||||||
_keyComparer.Equals(key, Unsafe.As<object, TKey>(ref entryKeyValue));
|
|
||||||
}
|
|
||||||
|
|
||||||
// inline the base implementation to devirtualize calls to hash and keyEqual
|
|
||||||
internal override object TryGetValue(TKey key)
|
|
||||||
{
|
|
||||||
return base.TryGetValue(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override int hash(TKey key)
|
|
||||||
{
|
|
||||||
return base.hash(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool keyEqual(TKey key, TKey entryKey)
|
|
||||||
{
|
|
||||||
if ((object)key == (object)entryKey)
|
|
||||||
{
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
//NOTE: slots are claimed in two stages - claim a hash, then set a key
|
|
||||||
// it is possible to observe a slot with a null key, but with hash already set
|
|
||||||
// that is not a match since the key is not yet in the table
|
|
||||||
if (entryKey == null)
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return _keyComparer.Equals(entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override DictionaryImpl<TKey, TKey, TValue> CreateNew(int capacity)
|
|
||||||
{
|
|
||||||
return new DictionaryImplRef<TKey, TKeyStore, TValue>(capacity, this);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override TKey keyFromEntry(TKey entryKey)
|
|
||||||
{
|
|
||||||
return entryKey;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,171 +0,0 @@
|
||||||
// Copyright (c) 2022 Vladimir Sadov
|
|
||||||
//
|
|
||||||
// Permission is hereby granted, free of charge, to any person
|
|
||||||
// obtaining a copy of this software and associated documentation
|
|
||||||
// files (the "Software"), to deal in the Software without
|
|
||||||
// restriction, including without limitation the rights to use,
|
|
||||||
// copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
// copies of the Software, and to permit persons to whom the
|
|
||||||
// Software is furnished to do so, subject to the following
|
|
||||||
// conditions:
|
|
||||||
//
|
|
||||||
// The above copyright notice and this permission notice shall be
|
|
||||||
// included in all copies or substantial portions of the Software.
|
|
||||||
//
|
|
||||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
|
||||||
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
||||||
// OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
//
|
|
||||||
// DictionaryImplUint.cs
|
|
||||||
//
|
|
||||||
|
|
||||||
using System;
|
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.Diagnostics;
|
|
||||||
using System.Runtime.InteropServices;
|
|
||||||
using System.Threading;
|
|
||||||
|
|
||||||
namespace System.Collections.Concurrent
|
|
||||||
{
|
|
||||||
internal sealed class DictionaryImplUint<TValue>
|
|
||||||
: DictionaryImpl<uint, uint, TValue>
|
|
||||||
{
|
|
||||||
internal DictionaryImplUint(int capacity, ConcurrentDictionary<uint, TValue> topDict)
|
|
||||||
: base(capacity, topDict)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
internal DictionaryImplUint(int capacity, DictionaryImplUint<TValue> other)
|
|
||||||
: base(capacity, other)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForPut(ref uint entryKey, uint key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForCopy(ref uint entryKey, uint key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
private bool TryClaimSlot(ref uint entryKey, uint key)
|
|
||||||
{
|
|
||||||
var entryKeyValue = entryKey;
|
|
||||||
//zero keys are claimed via hash
|
|
||||||
if (entryKeyValue == 0 & key != 0)
|
|
||||||
{
|
|
||||||
entryKeyValue = Interlocked.CompareExchange(ref entryKey, key, 0);
|
|
||||||
if (entryKeyValue == 0)
|
|
||||||
{
|
|
||||||
// claimed a new slot
|
|
||||||
this.allocatedSlotCount.Increment();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return key == entryKeyValue || _keyComparer.Equals(key, entryKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override int hash(uint key)
|
|
||||||
{
|
|
||||||
if (key == 0)
|
|
||||||
{
|
|
||||||
return ZEROHASH;
|
|
||||||
}
|
|
||||||
|
|
||||||
return base.hash(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool keyEqual(uint key, uint entryKey)
|
|
||||||
{
|
|
||||||
return key == entryKey || _keyComparer.Equals(key, entryKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override DictionaryImpl<uint, uint, TValue> CreateNew(int capacity)
|
|
||||||
{
|
|
||||||
return new DictionaryImplUint<TValue>(capacity, this);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override uint keyFromEntry(uint entryKey)
|
|
||||||
{
|
|
||||||
return entryKey;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
internal sealed class DictionaryImplUintNoComparer<TValue>
|
|
||||||
: DictionaryImpl<uint, uint, TValue>
|
|
||||||
{
|
|
||||||
internal DictionaryImplUintNoComparer(int capacity, ConcurrentDictionary<uint, TValue> topDict)
|
|
||||||
: base(capacity, topDict)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
internal DictionaryImplUintNoComparer(int capacity, DictionaryImplUintNoComparer<TValue> other)
|
|
||||||
: base(capacity, other)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForPut(ref uint entryKey, uint key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForCopy(ref uint entryKey, uint key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
private bool TryClaimSlot(ref uint entryKey, uint key)
|
|
||||||
{
|
|
||||||
var entryKeyValue = entryKey;
|
|
||||||
//zero keys are claimed via hash
|
|
||||||
if (entryKeyValue == 0 & key != 0)
|
|
||||||
{
|
|
||||||
entryKeyValue = Interlocked.CompareExchange(ref entryKey, key, 0);
|
|
||||||
if (entryKeyValue == 0)
|
|
||||||
{
|
|
||||||
// claimed a new slot
|
|
||||||
this.allocatedSlotCount.Increment();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return key == entryKeyValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// inline the base implementation to devirtualize calls to hash and keyEqual
|
|
||||||
internal override object TryGetValue(uint key)
|
|
||||||
{
|
|
||||||
return base.TryGetValue(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override int hash(uint key)
|
|
||||||
{
|
|
||||||
return (key == 0) ?
|
|
||||||
ZEROHASH :
|
|
||||||
(int)key | SPECIAL_HASH_BITS;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool keyEqual(uint key, uint entryKey)
|
|
||||||
{
|
|
||||||
return key == entryKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override DictionaryImpl<uint, uint, TValue> CreateNew(int capacity)
|
|
||||||
{
|
|
||||||
return new DictionaryImplUintNoComparer<TValue>(capacity, this);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override uint keyFromEntry(uint entryKey)
|
|
||||||
{
|
|
||||||
return entryKey;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,171 +0,0 @@
|
||||||
// Copyright (c) 2022 Vladimir Sadov
|
|
||||||
//
|
|
||||||
// Permission is hereby granted, free of charge, to any person
|
|
||||||
// obtaining a copy of this software and associated documentation
|
|
||||||
// files (the "Software"), to deal in the Software without
|
|
||||||
// restriction, including without limitation the rights to use,
|
|
||||||
// copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
// copies of the Software, and to permit persons to whom the
|
|
||||||
// Software is furnished to do so, subject to the following
|
|
||||||
// conditions:
|
|
||||||
//
|
|
||||||
// The above copyright notice and this permission notice shall be
|
|
||||||
// included in all copies or substantial portions of the Software.
|
|
||||||
//
|
|
||||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
|
||||||
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
||||||
// OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
//
|
|
||||||
// DictionaryImplUlong.cs
|
|
||||||
//
|
|
||||||
|
|
||||||
using System;
|
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.Diagnostics;
|
|
||||||
using System.Runtime.InteropServices;
|
|
||||||
using System.Threading;
|
|
||||||
|
|
||||||
namespace System.Collections.Concurrent
|
|
||||||
{
|
|
||||||
internal sealed class DictionaryImplUlong<TValue>
|
|
||||||
: DictionaryImpl<ulong, ulong, TValue>
|
|
||||||
{
|
|
||||||
internal DictionaryImplUlong(int capacity, ConcurrentDictionary<ulong, TValue> topDict)
|
|
||||||
: base(capacity, topDict)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
internal DictionaryImplUlong(int capacity, DictionaryImplUlong<TValue> other)
|
|
||||||
: base(capacity, other)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForPut(ref ulong entryKey, ulong key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForCopy(ref ulong entryKey, ulong key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
private bool TryClaimSlot(ref ulong entryKey, ulong key)
|
|
||||||
{
|
|
||||||
var entryKeyValue = entryKey;
|
|
||||||
//zero keys are claimed via hash
|
|
||||||
if (entryKeyValue == 0 & key != 0)
|
|
||||||
{
|
|
||||||
entryKeyValue = Interlocked.CompareExchange(ref entryKey, key, 0);
|
|
||||||
if (entryKeyValue == 0)
|
|
||||||
{
|
|
||||||
// claimed a new slot
|
|
||||||
this.allocatedSlotCount.Increment();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return key == entryKeyValue || _keyComparer.Equals(key, entryKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override int hash(ulong key)
|
|
||||||
{
|
|
||||||
if (key == 0)
|
|
||||||
{
|
|
||||||
return ZEROHASH;
|
|
||||||
}
|
|
||||||
|
|
||||||
return base.hash(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool keyEqual(ulong key, ulong entryKey)
|
|
||||||
{
|
|
||||||
return key == entryKey || _keyComparer.Equals(key, entryKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override DictionaryImpl<ulong, ulong, TValue> CreateNew(int capacity)
|
|
||||||
{
|
|
||||||
return new DictionaryImplUlong<TValue>(capacity, this);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override ulong keyFromEntry(ulong entryKey)
|
|
||||||
{
|
|
||||||
return entryKey;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
internal sealed class DictionaryImplUlongNoComparer<TValue>
|
|
||||||
: DictionaryImpl<ulong, ulong, TValue>
|
|
||||||
{
|
|
||||||
internal DictionaryImplUlongNoComparer(int capacity, ConcurrentDictionary<ulong, TValue> topDict)
|
|
||||||
: base(capacity, topDict)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
internal DictionaryImplUlongNoComparer(int capacity, DictionaryImplUlongNoComparer<TValue> other)
|
|
||||||
: base(capacity, other)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForPut(ref ulong entryKey, ulong key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool TryClaimSlotForCopy(ref ulong entryKey, ulong key)
|
|
||||||
{
|
|
||||||
return TryClaimSlot(ref entryKey, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
private bool TryClaimSlot(ref ulong entryKey, ulong key)
|
|
||||||
{
|
|
||||||
var entryKeyValue = entryKey;
|
|
||||||
//zero keys are claimed via hash
|
|
||||||
if (entryKeyValue == 0 & key != 0)
|
|
||||||
{
|
|
||||||
entryKeyValue = Interlocked.CompareExchange(ref entryKey, key, 0);
|
|
||||||
if (entryKeyValue == 0)
|
|
||||||
{
|
|
||||||
// claimed a new slot
|
|
||||||
this.allocatedSlotCount.Increment();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return key == entryKeyValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// inline the base implementation to devirtualize calls to hash and keyEqual
|
|
||||||
internal override object TryGetValue(ulong key)
|
|
||||||
{
|
|
||||||
return base.TryGetValue(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override int hash(ulong key)
|
|
||||||
{
|
|
||||||
return (key == 0) ?
|
|
||||||
ZEROHASH :
|
|
||||||
key.GetHashCode() | SPECIAL_HASH_BITS;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override bool keyEqual(ulong key, ulong entryKey)
|
|
||||||
{
|
|
||||||
return key == entryKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override DictionaryImpl<ulong, ulong, TValue> CreateNew(int capacity)
|
|
||||||
{
|
|
||||||
return new DictionaryImplUlongNoComparer<TValue>(capacity, this);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected override ulong keyFromEntry(ulong entryKey)
|
|
||||||
{
|
|
||||||
return entryKey;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,107 +0,0 @@
|
||||||
// Copyright (c) 2022 Vladimir Sadov
|
|
||||||
//
|
|
||||||
// Permission is hereby granted, free of charge, to any person
|
|
||||||
// obtaining a copy of this software and associated documentation
|
|
||||||
// files (the "Software"), to deal in the Software without
|
|
||||||
// restriction, including without limitation the rights to use,
|
|
||||||
// copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
// copies of the Software, and to permit persons to whom the
|
|
||||||
// Software is furnished to do so, subject to the following
|
|
||||||
// conditions:
|
|
||||||
//
|
|
||||||
// The above copyright notice and this permission notice shall be
|
|
||||||
// included in all copies or substantial portions of the Software.
|
|
||||||
//
|
|
||||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
|
||||||
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
||||||
// OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
//
|
|
||||||
// DictionaryImpl`2.cs
|
|
||||||
//
|
|
||||||
|
|
||||||
#nullable disable
|
|
||||||
|
|
||||||
using System;
|
|
||||||
using System.Collections;
|
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.Reflection;
|
|
||||||
using System.Runtime.CompilerServices;
|
|
||||||
|
|
||||||
namespace System.Collections.Concurrent
|
|
||||||
{
|
|
||||||
internal abstract class DictionaryImpl<TKey, TValue>
|
|
||||||
: DictionaryImpl
|
|
||||||
{
|
|
||||||
internal readonly bool valueIsValueType = typeof(TValue).IsValueType;
|
|
||||||
internal IEqualityComparer<TKey> _keyComparer;
|
|
||||||
|
|
||||||
internal DictionaryImpl() { }
|
|
||||||
|
|
||||||
internal abstract void Clear();
|
|
||||||
internal abstract int Count { get; }
|
|
||||||
|
|
||||||
internal abstract object TryGetValue(TKey key);
|
|
||||||
internal abstract bool PutIfMatch(TKey key, TValue newVal, ref TValue oldValue, ValueMatch match);
|
|
||||||
internal abstract bool RemoveIfMatch(TKey key, ref TValue oldValue, ValueMatch match);
|
|
||||||
internal abstract TValue GetOrAdd(TKey key, Func<TKey, TValue> valueFactory);
|
|
||||||
|
|
||||||
internal abstract Snapshot GetSnapshot();
|
|
||||||
|
|
||||||
internal abstract class Snapshot
|
|
||||||
{
|
|
||||||
protected int _idx;
|
|
||||||
protected TKey _curKey;
|
|
||||||
protected TValue _curValue;
|
|
||||||
|
|
||||||
public abstract int Count { get; }
|
|
||||||
public abstract bool MoveNext();
|
|
||||||
public abstract void Reset();
|
|
||||||
|
|
||||||
internal DictionaryEntry Entry
|
|
||||||
{
|
|
||||||
get
|
|
||||||
{
|
|
||||||
return new DictionaryEntry(_curKey, _curValue);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
internal KeyValuePair<TKey, TValue> Current
|
|
||||||
{
|
|
||||||
get
|
|
||||||
{
|
|
||||||
return new KeyValuePair<TKey, TValue>(this._curKey, _curValue);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
|
||||||
protected TValue FromObjectValue(object obj)
|
|
||||||
{
|
|
||||||
// regular value type
|
|
||||||
if (default(TValue) != null)
|
|
||||||
{
|
|
||||||
return Unsafe.As<Boxed<TValue>>(obj).Value;
|
|
||||||
}
|
|
||||||
|
|
||||||
// null
|
|
||||||
if (obj == NULLVALUE)
|
|
||||||
{
|
|
||||||
return default(TValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ref type
|
|
||||||
if (!valueIsValueType)
|
|
||||||
{
|
|
||||||
return Unsafe.As<object, TValue>(ref obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
// nullable
|
|
||||||
return (TValue)obj;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load diff
|
@ -95,6 +95,7 @@ public class DblArray1
|
||||||
[SkipOnCoreClr("This test is not compatible with GCStress.", RuntimeTestModes.AnyGCStress)]
|
[SkipOnCoreClr("This test is not compatible with GCStress.", RuntimeTestModes.AnyGCStress)]
|
||||||
[OuterLoop]
|
[OuterLoop]
|
||||||
[ActiveIssue("https://github.com/dotnet/runtime/issues/101284", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsNativeAot))]
|
[ActiveIssue("https://github.com/dotnet/runtime/issues/101284", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsNativeAot))]
|
||||||
|
[ActiveIssue("Satori: tests implementation details")]
|
||||||
public static int TestEntryPoint()
|
public static int TestEntryPoint()
|
||||||
{
|
{
|
||||||
if (RuntimeInformation.ProcessArchitecture == Architecture.X86)
|
if (RuntimeInformation.ProcessArchitecture == Architecture.X86)
|
||||||
|
|
|
@ -284,6 +284,7 @@ public class DblArray
|
||||||
[SkipOnCoreClr("This test is not compatible with GCStress.", RuntimeTestModes.AnyGCStress)]
|
[SkipOnCoreClr("This test is not compatible with GCStress.", RuntimeTestModes.AnyGCStress)]
|
||||||
[SkipOnMono("Needs triage")]
|
[SkipOnMono("Needs triage")]
|
||||||
[ActiveIssue("https://github.com/dotnet/runtime/issues/101284", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsNativeAot))]
|
[ActiveIssue("https://github.com/dotnet/runtime/issues/101284", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsNativeAot))]
|
||||||
|
[ActiveIssue("Satori: tests implementation details")]
|
||||||
public static int TestEntryPoint()
|
public static int TestEntryPoint()
|
||||||
{
|
{
|
||||||
if (RuntimeInformation.ProcessArchitecture == Architecture.X86)
|
if (RuntimeInformation.ProcessArchitecture == Architecture.X86)
|
||||||
|
|
|
@ -145,6 +145,7 @@ public class DblArray3
|
||||||
[SkipOnMono("Needs triage")]
|
[SkipOnMono("Needs triage")]
|
||||||
[OuterLoop]
|
[OuterLoop]
|
||||||
[ActiveIssue("https://github.com/dotnet/runtime/issues/101284", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsNativeAot))]
|
[ActiveIssue("https://github.com/dotnet/runtime/issues/101284", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsNativeAot))]
|
||||||
|
[ActiveIssue("Satori: tests implementation details")]
|
||||||
public static int TestEntryPoint()
|
public static int TestEntryPoint()
|
||||||
{
|
{
|
||||||
Console.WriteLine(RuntimeInformation.ProcessArchitecture);
|
Console.WriteLine(RuntimeInformation.ProcessArchitecture);
|
||||||
|
|
|
@ -23,6 +23,7 @@ public class DblArray4
|
||||||
[SkipOnCoreClr("This test is not compatible with GCStress.", RuntimeTestModes.AnyGCStress)]
|
[SkipOnCoreClr("This test is not compatible with GCStress.", RuntimeTestModes.AnyGCStress)]
|
||||||
[OuterLoop]
|
[OuterLoop]
|
||||||
[ActiveIssue("https://github.com/dotnet/runtime/issues/101284", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsNativeAot))]
|
[ActiveIssue("https://github.com/dotnet/runtime/issues/101284", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsNativeAot))]
|
||||||
|
[ActiveIssue("Satori: tests implementation details")]
|
||||||
public static int TestEntryPoint()
|
public static int TestEntryPoint()
|
||||||
{
|
{
|
||||||
if (RuntimeInformation.ProcessArchitecture == Architecture.X86)
|
if (RuntimeInformation.ProcessArchitecture == Architecture.X86)
|
||||||
|
|
|
@ -100,7 +100,16 @@
|
||||||
<ExcludeList Include="$(XunitTestBinBase)/GC/API/NoGCRegion/**">
|
<ExcludeList Include="$(XunitTestBinBase)/GC/API/NoGCRegion/**">
|
||||||
<Issue>Satori GC apis</Issue>
|
<Issue>Satori GC apis</Issue>
|
||||||
</ExcludeList>
|
</ExcludeList>
|
||||||
|
<ExcludeList Include="$(XunitTestBinBase)/baseservices/RuntimeConfiguration/TestConfigTester/*">
|
||||||
|
<Issue>Satori GC apis</Issue>
|
||||||
|
</ExcludeList>
|
||||||
|
<ExcludeList Include="$(XunitTestBinBase)/GC/API/Refresh/**">
|
||||||
|
<Issue>Satori GC apis</Issue>
|
||||||
|
</ExcludeList>
|
||||||
|
<ExcludeList Include="$(XunitTestBinBase)/GC/API/GetGeneration/*">
|
||||||
|
<Issue>Satori GC apis</Issue>
|
||||||
|
</ExcludeList>
|
||||||
|
|
||||||
<ExcludeList Include="$(XunitTestBinBase)/JIT/Methodical/doublearray/**">
|
<ExcludeList Include="$(XunitTestBinBase)/JIT/Methodical/doublearray/**">
|
||||||
<Issue>Satori GC apis, LOH expectations</Issue>
|
<Issue>Satori GC apis, LOH expectations</Issue>
|
||||||
</ExcludeList>
|
</ExcludeList>
|
||||||
|
@ -130,10 +139,15 @@
|
||||||
<ExcludeList Include="$(XunitTestBinBase)/profiler/gc/gcallocate/*">
|
<ExcludeList Include="$(XunitTestBinBase)/profiler/gc/gcallocate/*">
|
||||||
<Issue>Satori profiler tracing</Issue>
|
<Issue>Satori profiler tracing</Issue>
|
||||||
</ExcludeList>
|
</ExcludeList>
|
||||||
|
<ExcludeList Include="$(XunitTestBinBase)/profiler/gcheapenumeration/**">
|
||||||
|
<Issue>Satori profiler tracing</Issue>
|
||||||
|
</ExcludeList>
|
||||||
<ExcludeList Include="$(XunitTestBinBase)/profiler/handles/handles/*">
|
<ExcludeList Include="$(XunitTestBinBase)/profiler/handles/handles/*">
|
||||||
<Issue>Satori profiler tracing</Issue>
|
<Issue>Satori profiler tracing</Issue>
|
||||||
</ExcludeList>
|
</ExcludeList>
|
||||||
|
<ExcludeList Include="$(XunitTestBinBase)/profiler/unittest/enumthreads/*">
|
||||||
|
<Issue>Satori profiler tracing</Issue>
|
||||||
|
</ExcludeList>
|
||||||
|
|
||||||
<ExcludeList Include="$(XunitTestBinBase)/readytorun/coreroot_determinism/coreroot_determinism/*">
|
<ExcludeList Include="$(XunitTestBinBase)/readytorun/coreroot_determinism/coreroot_determinism/*">
|
||||||
<Issue>Satori Unknown</Issue>
|
<Issue>Satori Unknown</Issue>
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue