diff --git a/src/coreclr/gc/env/gctoeeinterface.standalone.inl b/src/coreclr/gc/env/gctoeeinterface.standalone.inl
index 45786b01d3e..e8dc20e2fd4 100644
--- a/src/coreclr/gc/env/gctoeeinterface.standalone.inl
+++ b/src/coreclr/gc/env/gctoeeinterface.standalone.inl
@@ -19,11 +19,21 @@ namespace standalone
::GCToEEInterface::RestartEE(bFinishedGC);
}
+ void GcScanCurrentStackRoots(promote_func* fn, ScanContext* sc)
+ {
+ ::GCToEEInterface::GcScanCurrentStackRoots(fn, sc);
+ }
+
void GcScanRoots(promote_func* fn, int condemned, int max_gen, ScanContext* sc)
{
::GCToEEInterface::GcScanRoots(fn, condemned, max_gen, sc);
}
+ void GcPoll()
+ {
+ ::GCToEEInterface::GcPoll();
+ }
+
void GcStartWork(int condemned, int max_gen)
{
::GCToEEInterface::GcStartWork(condemned, max_gen);
diff --git a/src/coreclr/gc/gc.h b/src/coreclr/gc/gc.h
index 4087d1fac6b..aa7004ab88e 100644
--- a/src/coreclr/gc/gc.h
+++ b/src/coreclr/gc/gc.h
@@ -338,7 +338,7 @@ inline bool IsServerHeap()
{
#ifdef FEATURE_SVR_GC
assert(g_gc_heap_type != GC_HEAP_INVALID);
- return g_gc_heap_type == GC_HEAP_SVR;
+ return g_gc_heap_type >= GC_HEAP_SVR;
#else // FEATURE_SVR_GC
return false;
#endif // FEATURE_SVR_GC
diff --git a/src/coreclr/gc/gchandletable.cpp b/src/coreclr/gc/gchandletable.cpp
index ef0bbf8c93a..5a2bec144d1 100644
--- a/src/coreclr/gc/gchandletable.cpp
+++ b/src/coreclr/gc/gchandletable.cpp
@@ -73,7 +73,11 @@ HHANDLETABLE GCHandleStore::GetTable()
OBJECTHANDLE GCHandleStore::CreateHandleOfType(Object* object, HandleType type)
{
+#ifdef FEATURE_SATORI_GC
+ HHANDLETABLE handletable = _underlyingBucket.pTable[GetCurrentThreadHomeHeapNumber()];
+#else
HHANDLETABLE handletable = GetTable();
+#endif
return ::HndCreateHandle(handletable, type, ObjectToOBJECTREF(object));
}
diff --git a/src/coreclr/gc/handletablescan.cpp b/src/coreclr/gc/handletablescan.cpp
index e5b27da8e62..34200e794db 100644
--- a/src/coreclr/gc/handletablescan.cpp
+++ b/src/coreclr/gc/handletablescan.cpp
@@ -787,6 +787,10 @@ void BlockResetAgeMapForBlocksWorker(uint32_t *pdwGen, uint32_t dwClumpMask, Sca
STATIC_CONTRACT_GC_NOTRIGGER;
STATIC_CONTRACT_MODE_COOPERATIVE;
+#if FEATURE_SATORI_GC
+ __UNREACHABLE();
+#endif
+
// fetch the table segment we are working in
TableSegment *pSegment = pInfo->pCurrentSegment;
diff --git a/src/coreclr/gc/objecthandle.cpp b/src/coreclr/gc/objecthandle.cpp
index 1b6df1b5f29..a8885b101b1 100644
--- a/src/coreclr/gc/objecthandle.cpp
+++ b/src/coreclr/gc/objecthandle.cpp
@@ -1326,11 +1326,13 @@ bool Ref_ScanDependentHandlesForPromotion(DhContext *pDhContext)
if (walk->pBuckets[i] != NULL)
{
int uCPUindex = getSlotNumber(pDhContext->m_pScanContext);
- // int uCPUlimit = getNumberOfSlots();
- // assert(uCPUlimit > 0);
- int uCPUstep = getThreadCount(pDhContext->m_pScanContext);
HHANDLETABLE* pTable = walk->pBuckets[i]->pTable;
- // for ( ; uCPUindex < uCPUlimit; uCPUindex += uCPUstep)
+#if !defined(FEATURE_SATORI_GC)
+ int uCPUlimit = getNumberOfSlots();
+ assert(uCPUlimit > 0);
+ int uCPUstep = getThreadCount(pDhContext->m_pScanContext);
+ for (; uCPUindex < uCPUlimit; uCPUindex += uCPUstep)
+#endif
{
HHANDLETABLE hTable = pTable[uCPUindex];
if (hTable)
@@ -1411,11 +1413,13 @@ void Ref_ScanWeakInteriorPointersForRelocation(uint32_t condemned, uint32_t maxg
if (walk->pBuckets[i] != NULL)
{
int uCPUindex = getSlotNumber(sc);
+ HHANDLETABLE* pTable = walk->pBuckets[i]->pTable;
+#if !defined(FEATURE_SATORI_GC)
int uCPUlimit = getNumberOfSlots();
assert(uCPUlimit > 0);
int uCPUstep = getThreadCount(sc);
- HHANDLETABLE* pTable = walk->pBuckets[i]->pTable;
- for ( ; uCPUindex < uCPUlimit; uCPUindex += uCPUstep)
+ for (; uCPUindex < uCPUlimit; uCPUindex += uCPUstep)
+#endif
{
HHANDLETABLE hTable = pTable[uCPUindex];
if (hTable)
diff --git a/src/coreclr/gc/satori/SatoriAllocator.h b/src/coreclr/gc/satori/SatoriAllocator.h
index f8ccb7a5363..24c1ef71161 100644
--- a/src/coreclr/gc/satori/SatoriAllocator.h
+++ b/src/coreclr/gc/satori/SatoriAllocator.h
@@ -125,7 +125,7 @@ private:
#else
BitScanReverse(&highestBit, value);
#endif
- return min(highestBit - Satori::REGION_BITS, Satori::ALLOCATOR_BUCKET_COUNT - 1);
+ return min((int)highestBit - Satori::REGION_BITS, Satori::ALLOCATOR_BUCKET_COUNT - 1);
}
};
diff --git a/src/coreclr/gc/satori/SatoriGC.cpp b/src/coreclr/gc/satori/SatoriGC.cpp
index e2e40de7241..2888fd82470 100644
--- a/src/coreclr/gc/satori/SatoriGC.cpp
+++ b/src/coreclr/gc/satori/SatoriGC.cpp
@@ -891,3 +891,12 @@ uint64_t SatoriGC::GetGenerationBudget(int generation)
// avoid IDE0060: Remove unused parameter 'generation'
return -1 + 0 * generation;
}
+
+size_t SatoriGC::GetLOHThreshold()
+{
+ return Satori::LARGE_OBJECT_THRESHOLD;
+}
+
+void SatoriGC::DiagWalkHeapWithACHandling(walk_fn fn, void *context, int gen_number, bool walk_large_object_heap_p)
+{
+}
diff --git a/src/coreclr/gc/satori/SatoriGC.h b/src/coreclr/gc/satori/SatoriGC.h
index a6b23eb0668..b1813b25bf9 100644
--- a/src/coreclr/gc/satori/SatoriGC.h
+++ b/src/coreclr/gc/satori/SatoriGC.h
@@ -165,6 +165,10 @@ public:
// Inherited via IGCHeapInternal
uint64_t GetGenerationBudget(int generation) override;
+
+ // Inherited via IGCHeapInternal
+ size_t GetLOHThreshold() override;
+ void DiagWalkHeapWithACHandling(walk_fn fn, void *context, int gen_number, bool walk_large_object_heap_p) override;
};
#endif
diff --git a/src/coreclr/gc/satori/SatoriObject.cpp b/src/coreclr/gc/satori/SatoriObject.cpp
index 9905607961e..f0431b60415 100644
--- a/src/coreclr/gc/satori/SatoriObject.cpp
+++ b/src/coreclr/gc/satori/SatoriObject.cpp
@@ -91,7 +91,7 @@ void SatoriObject::DirtyCardsForContent()
{
_ASSERTE(IsMarked());
MethodTable* mt = RawGetMethodTable();
- if (mt->ContainsPointersOrCollectible())
+ if (mt->ContainsGCPointersOrCollectible())
{
SatoriPage* page = ContainingRegion()->m_containingPage;
// if dealing with a collectible type, include MT in the dirty range
diff --git a/src/coreclr/gc/satori/SatoriObject.inl b/src/coreclr/gc/satori/SatoriObject.inl
index 49d9cc9be3e..35a6135f69d 100644
--- a/src/coreclr/gc/satori/SatoriObject.inl
+++ b/src/coreclr/gc/satori/SatoriObject.inl
@@ -282,7 +282,7 @@ inline void SatoriObject::ForEachObjectRef(F lambda, bool includeCollectibleAllo
lambda((SatoriObject**)&loaderAllocator);
}
- if (!mt->ContainsPointers())
+ if (!mt->ContainsGCPointers())
{
return;
}
@@ -354,7 +354,7 @@ inline void SatoriObject::ForEachObjectRef(F lambda, size_t size, bool includeCo
lambda((SatoriObject**)&loaderAllocator);
}
- if (!mt->ContainsPointers())
+ if (!mt->ContainsGCPointers())
{
return;
}
@@ -421,7 +421,7 @@ inline void SatoriObject::ForEachObjectRef(F lambda, size_t start, size_t end)
lambda((SatoriObject**)&loaderAllocator);
}
- if (!mt->ContainsPointers())
+ if (!mt->ContainsGCPointers())
{
return;
}
diff --git a/src/coreclr/gc/satori/SatoriRecycler.cpp b/src/coreclr/gc/satori/SatoriRecycler.cpp
index 57959b885b0..e71e2f2b48b 100644
--- a/src/coreclr/gc/satori/SatoriRecycler.cpp
+++ b/src/coreclr/gc/satori/SatoriRecycler.cpp
@@ -1073,10 +1073,10 @@ void SatoriRecycler::AdjustHeuristics()
// we trigger GC when ephemeral size grows to SatoriUtil::Gen1Target(),
// the budget is the diff to reach that
- size_t newGen1Budget = max(MIN_GEN1_BUDGET, ephemeralOccupancy * (SatoriUtil::Gen1Target() - 100) / 100);
+ size_t newGen1Budget = max((size_t)MIN_GEN1_BUDGET, ephemeralOccupancy * (SatoriUtil::Gen1Target() - 100) / 100);
// alternatively we allow gen1 allocs up to 1/8 of total limit.
- size_t altNewGen1Budget = max(MIN_GEN1_BUDGET, m_totalLimit / 8);
+ size_t altNewGen1Budget = max((size_t)MIN_GEN1_BUDGET, m_totalLimit / 8);
// take max of both budgets
newGen1Budget = max(newGen1Budget, altNewGen1Budget);
@@ -2114,7 +2114,7 @@ bool SatoriRecycler::DrainMarkQueuesConcurrent(SatoriWorkChunk* srcChunk, int64_
void SatoriRecycler::ScheduleMarkAsChildRanges(SatoriObject* o)
{
- if (o->RawGetMethodTable()->ContainsPointersOrCollectible())
+ if (o->RawGetMethodTable()->ContainsGCPointersOrCollectible())
{
size_t start = o->Start();
size_t remains = o->Size();
@@ -2146,7 +2146,7 @@ void SatoriRecycler::ScheduleMarkAsChildRanges(SatoriObject* o)
bool SatoriRecycler::ScheduleUpdateAsChildRanges(SatoriObject* o)
{
- if (o->RawGetMethodTable()->ContainsPointers())
+ if (o->RawGetMethodTable()->ContainsGCPointers())
{
size_t start = o->Start() + sizeof(size_t);
size_t remains = o->Size() - sizeof(size_t);
diff --git a/src/coreclr/gc/satori/SatoriRegion.cpp b/src/coreclr/gc/satori/SatoriRegion.cpp
index 59b92b52197..38799d9e041 100644
--- a/src/coreclr/gc/satori/SatoriRegion.cpp
+++ b/src/coreclr/gc/satori/SatoriRegion.cpp
@@ -912,7 +912,7 @@ inline void SatoriRegion::PushToMarkStackIfHasPointers(SatoriObject* obj)
_ASSERTE(obj->SameRegion(this));
_ASSERTE(!obj->GetNextInLocalMarkStack());
- if (obj->RawGetMethodTable()->ContainsPointersOrCollectible())
+ if (obj->RawGetMethodTable()->ContainsGCPointersOrCollectible())
{
obj->SetNextInLocalMarkStack(m_markStack);
_ASSERTE(m_markStack == obj->GetNextInLocalMarkStack());
diff --git a/src/coreclr/gc/unix/events.cpp b/src/coreclr/gc/unix/events.cpp
index f384d15fbcd..3232bbbced5 100644
--- a/src/coreclr/gc/unix/events.cpp
+++ b/src/coreclr/gc/unix/events.cpp
@@ -308,7 +308,7 @@ bool GCEvent::CreateOSManualEventNoThrow(bool initialState)
}
#define _INC_PTHREADS
-#include "..\satori\SatoriGate.h"
+#include "../satori/SatoriGate.h"
#if defined(TARGET_LINUX)
diff --git a/src/coreclr/jit/targetamd64.h b/src/coreclr/jit/targetamd64.h
index 7fc3cc544cb..7277d1af376 100644
--- a/src/coreclr/jit/targetamd64.h
+++ b/src/coreclr/jit/targetamd64.h
@@ -204,11 +204,12 @@
// Registers no longer containing GC pointers after CORINFO_HELP_ASSIGN_REF and CORINFO_HELP_CHECKED_ASSIGN_REF.
#define RBM_CALLEE_GCTRASH_WRITEBARRIER RBM_CALLEE_TRASH_NOGC
- // Registers no longer containing GC pointers after CORINFO_HELP_ASSIGN_BYREF.
- #define RBM_CALLEE_GCTRASH_WRITEBARRIER_BYREF (RBM_RAX | RBM_RCX)
-
+ // TODO: Satori make more precise?
// Registers killed by CORINFO_HELP_ASSIGN_BYREF.
- #define RBM_CALLEE_TRASH_WRITEBARRIER_BYREF (RBM_RSI | RBM_RDI | RBM_CALLEE_GCTRASH_WRITEBARRIER_BYREF)
+ #define RBM_CALLEE_TRASH_WRITEBARRIER_BYREF (RBM_RSI | RBM_RDI | RBM_CALLEE_TRASH_NOGC)
+
+ // Registers no longer containing GC pointers after CORINFO_HELP_ASSIGN_BYREF.
+ #define RBM_CALLEE_GCTRASH_WRITEBARRIER_BYREF (RBM_CALLEE_TRASH_NOGC & ~(RBM_RDI | RBM_RSI))
// We have two register classifications
// * callee trash: aka volatile or caller saved
diff --git a/src/coreclr/nativeaot/BuildIntegration/Microsoft.NETCore.Native.Unix.targets b/src/coreclr/nativeaot/BuildIntegration/Microsoft.NETCore.Native.Unix.targets
index 356c402446c..efd3cfddaf2 100644
--- a/src/coreclr/nativeaot/BuildIntegration/Microsoft.NETCore.Native.Unix.targets
+++ b/src/coreclr/nativeaot/BuildIntegration/Microsoft.NETCore.Native.Unix.targets
@@ -245,7 +245,6 @@ The .NET Foundation licenses this file to you under the MIT license.
-