1
0
Fork 0
mirror of https://github.com/VSadov/Satori.git synced 2025-06-09 09:34:49 +09:00

JIT: produce consistent flow graph when producing or consuming profile data (#85860)

Always try and merge "branch to next" blocks when building the intial flow graph
if BBINSTR or BBOPT is set.

Fixes #85856.
This commit is contained in:
Andy Ayers 2023-05-06 07:53:10 -07:00 committed by GitHub
parent 798f9ef5eb
commit ae1be12478
Signed by: github
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 16 additions and 10 deletions

View file

@ -9471,9 +9471,14 @@ public:
return jitFlags->IsSet(JitFlags::JIT_FLAG_BBINSTR);
}
bool IsInstrumentedOptimized() const
bool IsInstrumentedAndOptimized() const
{
return IsInstrumented() && jitFlags->IsSet(JitFlags::JIT_FLAG_TIER1);
return IsInstrumented() && jitFlags->IsSet(JitFlags::JIT_FLAG_BBOPT);
}
bool IsInstrumentedOrOptimized() const
{
return IsInstrumented() || jitFlags->IsSet(JitFlags::JIT_FLAG_BBOPT);
}
// true if we should use the PINVOKE_{BEGIN,END} helpers instead of generating

View file

@ -1824,8 +1824,9 @@ void Compiler::fgFindJumpTargets(const BYTE* codeAddr, IL_OFFSET codeSize, Fixed
// Compute jump target address
signed jmpDist = (sz == 1) ? getI1LittleEndian(codeAddr) : getI4LittleEndian(codeAddr);
if (compIsForInlining() && jmpDist == 0 &&
(opcode == CEE_LEAVE || opcode == CEE_LEAVE_S || opcode == CEE_BR || opcode == CEE_BR_S))
if ((jmpDist == 0) &&
(opcode == CEE_LEAVE || opcode == CEE_LEAVE_S || opcode == CEE_BR || opcode == CEE_BR_S) &&
opts.IsInstrumentedOrOptimized())
{
break; /* NOP */
}
@ -2974,7 +2975,7 @@ unsigned Compiler::fgMakeBasicBlocks(const BYTE* codeAddr, IL_OFFSET codeSize, F
jmpDist = (sz == 1) ? getI1LittleEndian(codeAddr) : getI4LittleEndian(codeAddr);
if (compIsForInlining() && jmpDist == 0 && (opcode == CEE_BR || opcode == CEE_BR_S))
if ((jmpDist == 0) && (opcode == CEE_BR || opcode == CEE_BR_S) && opts.IsInstrumentedOrOptimized())
{
continue; /* NOP */
}

View file

@ -436,7 +436,7 @@ void BlockCountInstrumentor::RelocateProbes()
{
// We only see such blocks when optimizing. They are flagged by the importer.
//
if (!m_comp->opts.IsInstrumentedOptimized() || ((m_comp->optMethodFlags & OMF_HAS_TAILCALL_SUCCESSOR) == 0))
if (!m_comp->opts.IsInstrumentedAndOptimized() || ((m_comp->optMethodFlags & OMF_HAS_TAILCALL_SUCCESSOR) == 0))
{
// No problematic blocks to worry about.
//
@ -1616,7 +1616,7 @@ void EfficientEdgeCountInstrumentor::RelocateProbes()
{
// We only see such blocks when optimizing. They are flagged by the importer.
//
if (!m_comp->opts.IsInstrumentedOptimized() || ((m_comp->optMethodFlags & OMF_HAS_TAILCALL_SUCCESSOR) == 0))
if (!m_comp->opts.IsInstrumentedAndOptimized() || ((m_comp->optMethodFlags & OMF_HAS_TAILCALL_SUCCESSOR) == 0))
{
// No problematic blocks to worry about.
//

View file

@ -7476,7 +7476,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
case CEE_BR_S:
jmpDist = (sz == 1) ? getI1LittleEndian(codeAddr) : getI4LittleEndian(codeAddr);
if (compIsForInlining() && jmpDist == 0)
if ((jmpDist == 0) && opts.IsInstrumentedOrOptimized())
{
break; /* NOP */
}

View file

@ -1282,7 +1282,7 @@ DONE:
// have to check for anything that might introduce a recursive tail call.
// * We only instrument root method blocks in OSR methods,
//
if ((opts.IsInstrumentedOptimized() || opts.IsOSR()) && !compIsForInlining())
if ((opts.IsInstrumentedAndOptimized() || opts.IsOSR()) && !compIsForInlining())
{
// If a root method tail call candidate block is not a BBJ_RETURN, it should have a unique
// BBJ_RETURN successor. Mark that successor so we can handle it specially during profile
@ -7201,7 +7201,7 @@ bool Compiler::impConsiderCallProbe(GenTreeCall* call, IL_OFFSET ilOffset)
return false;
}
assert(opts.OptimizationDisabled() || opts.IsInstrumentedOptimized());
assert(opts.OptimizationDisabled() || opts.IsInstrumentedAndOptimized());
assert(!compIsForInlining());
// During importation, optionally flag this block as one that