From aecae2c3853ea793ede98906320312ca6c199ec1 Mon Sep 17 00:00:00 2001 From: Aman Khalid Date: Tue, 7 Jan 2025 17:00:14 +0000 Subject: [PATCH] JIT: Enable profile consistency checking up to morph (#111047) Part of #107749. --- src/coreclr/jit/compiler.cpp | 10 +++--- src/coreclr/jit/fgehopt.cpp | 26 ++++++++++++++- src/coreclr/jit/fgopt.cpp | 60 ++++++++++++++++------------------- src/coreclr/jit/fgprofile.cpp | 3 +- src/coreclr/jit/morph.cpp | 58 ++++++--------------------------- 5 files changed, 68 insertions(+), 89 deletions(-) diff --git a/src/coreclr/jit/compiler.cpp b/src/coreclr/jit/compiler.cpp index d3c9f59811b18..52975e90bf885 100644 --- a/src/coreclr/jit/compiler.cpp +++ b/src/coreclr/jit/compiler.cpp @@ -4711,11 +4711,6 @@ void Compiler::compCompile(void** methodCodePtr, uint32_t* methodCodeSize, JitFl // DoPhase(this, PHASE_CLONE_FINALLY, &Compiler::fgCloneFinally); - // Drop back to just checking profile likelihoods. - // - activePhaseChecks &= ~PhaseChecks::CHECK_PROFILE; - activePhaseChecks |= PhaseChecks::CHECK_LIKELIHOODS; - // Do some flow-related optimizations // if (opts.OptimizationEnabled()) @@ -4784,6 +4779,11 @@ void Compiler::compCompile(void** methodCodePtr, uint32_t* methodCodeSize, JitFl // DoPhase(this, PHASE_MORPH_IMPBYREF, &Compiler::fgRetypeImplicitByRefArgs); + // Drop back to just checking profile likelihoods. + // + activePhaseChecks &= ~PhaseChecks::CHECK_PROFILE; + activePhaseChecks |= PhaseChecks::CHECK_LIKELIHOODS; + #ifdef DEBUG // Now that locals have address-taken and implicit byref marked, we can safely apply stress. lvaStressLclFld(); diff --git a/src/coreclr/jit/fgehopt.cpp b/src/coreclr/jit/fgehopt.cpp index 12d91d7e12749..0d620fd1821a9 100644 --- a/src/coreclr/jit/fgehopt.cpp +++ b/src/coreclr/jit/fgehopt.cpp @@ -2410,6 +2410,7 @@ PhaseStatus Compiler::fgTailMergeThrows() } JITDUMP("\n*** found %d merge candidates, rewriting flow\n\n", numCandidates); + bool modifiedProfile = false; // Second pass. // @@ -2418,14 +2419,37 @@ PhaseStatus Compiler::fgTailMergeThrows() { BasicBlock* const nonCanonicalBlock = iter->GetKey(); BasicBlock* const canonicalBlock = iter->GetValue(); + weight_t removedWeight = BB_ZERO_WEIGHT; // Walk pred list of the non canonical block, updating flow to target // the canonical block instead. - for (BasicBlock* const predBlock : nonCanonicalBlock->PredBlocksEditing()) + for (FlowEdge* const predEdge : nonCanonicalBlock->PredEdgesEditing()) { + removedWeight += predEdge->getLikelyWeight(); + BasicBlock* const predBlock = predEdge->getSourceBlock(); JITDUMP("*** " FMT_BB " now branching to " FMT_BB "\n", predBlock->bbNum, canonicalBlock->bbNum); fgReplaceJumpTarget(predBlock, nonCanonicalBlock, canonicalBlock); } + + if (canonicalBlock->hasProfileWeight()) + { + canonicalBlock->setBBProfileWeight(canonicalBlock->bbWeight + removedWeight); + modifiedProfile = true; + + // Don't bother updating flow into nonCanonicalBlock, since it is now unreachable + } + } + + // In practice, when we have true profile data, we can repair it locally above, since the no-return + // calls mean that there is no contribution from the throw blocks to any of their successors. + // However, these blocks won't be morphed into BBJ_THROW blocks until later, + // so mark profile data as inconsistent for now. + if (modifiedProfile) + { + JITDUMP( + "fgTailMergeThrows: Modified flow into no-return blocks that still have successors. Data %s inconsistent.\n", + fgPgoConsistent ? "is now" : "was already"); + fgPgoConsistent = false; } // Update the count of noreturn call sites diff --git a/src/coreclr/jit/fgopt.cpp b/src/coreclr/jit/fgopt.cpp index deb6c0f48d39d..10d381c0caeaa 100644 --- a/src/coreclr/jit/fgopt.cpp +++ b/src/coreclr/jit/fgopt.cpp @@ -1317,30 +1317,7 @@ bool Compiler::fgOptimizeBranchToEmptyUnconditional(BasicBlock* block, BasicBloc } #endif // DEBUG - // - // When we optimize a branch to branch we need to update the profile weight - // of bDest by subtracting out the weight of the path that is being optimized. - // - if (bDest->hasProfileWeight()) - { - FlowEdge* const edge = fgGetPredForBlock(bDest, block); - noway_assert(edge != nullptr); - - const weight_t edgeWeight = edge->getLikelyWeight(); - - // - // Update the bDest->bbWeight - // - if (bDest->bbWeight > edgeWeight) - { - bDest->bbWeight -= edgeWeight; - } - else - { - bDest->bbWeight = BB_ZERO_WEIGHT; - bDest->SetFlags(BBF_RUN_RARELY); // Set the RarelyRun flag - } - } + weight_t removedWeight; // Optimize the JUMP to empty unconditional JUMP to go to the new target switch (block->GetKind()) @@ -1348,6 +1325,7 @@ bool Compiler::fgOptimizeBranchToEmptyUnconditional(BasicBlock* block, BasicBloc case BBJ_ALWAYS: case BBJ_CALLFINALLYRET: { + removedWeight = block->bbWeight; fgRedirectTargetEdge(block, bDest->GetTarget()); break; } @@ -1356,11 +1334,13 @@ bool Compiler::fgOptimizeBranchToEmptyUnconditional(BasicBlock* block, BasicBloc if (block->TrueTargetIs(bDest)) { assert(!block->FalseTargetIs(bDest)); + removedWeight = block->GetTrueEdge()->getLikelyWeight(); fgRedirectTrueEdge(block, bDest->GetTarget()); } else { assert(block->FalseTargetIs(bDest)); + removedWeight = block->GetFalseEdge()->getLikelyWeight(); fgRedirectFalseEdge(block, bDest->GetTarget()); } break; @@ -1369,6 +1349,15 @@ bool Compiler::fgOptimizeBranchToEmptyUnconditional(BasicBlock* block, BasicBloc unreached(); } + // + // When we optimize a branch to branch we need to update the profile weight + // of bDest by subtracting out the weight of the path that is being optimized. + // + if (bDest->hasProfileWeight()) + { + bDest->setBBProfileWeight(max(0.0, bDest->bbWeight - removedWeight)); + } + return true; } return false; @@ -1628,17 +1617,10 @@ bool Compiler::fgOptimizeSwitchBranches(BasicBlock* block) // FlowEdge* const oldEdge = *jmpTab; - if (fgIsUsingProfileWeights() && bDest->hasProfileWeight()) + if (bDest->hasProfileWeight()) { weight_t const branchThroughWeight = oldEdge->getLikelyWeight(); - if (bDest->bbWeight > branchThroughWeight) - { - bDest->bbWeight -= branchThroughWeight; - } - else - { - bDest->bbSetRunRarely(); - } + bDest->setBBProfileWeight(max(0.0, bDest->bbWeight - branchThroughWeight)); } // Update the switch jump table @@ -1676,6 +1658,11 @@ bool Compiler::fgOptimizeSwitchBranches(BasicBlock* block) { // Invalidate the set of unique targets for block, since we modified the targets fgInvalidateSwitchDescMapEntry(block); + + JITDUMP( + "fgOptimizeSwitchBranches: Optimized switch flow. Profile needs to be re-propagated. Data %s consistent.\n", + fgPgoConsistent ? "is now" : "was already"); + fgPgoConsistent = false; } Statement* switchStmt = nullptr; @@ -6632,6 +6619,13 @@ PhaseStatus Compiler::fgHeadTailMerge(bool early) FlowEdge* const newEdge = fgAddRefPred(crossJumpTarget, predBlock); predBlock->SetKindAndTargetEdge(BBJ_ALWAYS, newEdge); } + + // For tail merge we have a common successor of predBlock and + // crossJumpTarget, so the profile update can be done locally. + if (crossJumpTarget->hasProfileWeight()) + { + crossJumpTarget->setBBProfileWeight(crossJumpTarget->bbWeight + predBlock->bbWeight); + } } // We changed things diff --git a/src/coreclr/jit/fgprofile.cpp b/src/coreclr/jit/fgprofile.cpp index 3349f2ff1a320..830c8fe0ce777 100644 --- a/src/coreclr/jit/fgprofile.cpp +++ b/src/coreclr/jit/fgprofile.cpp @@ -5119,7 +5119,8 @@ void Compiler::fgRepairProfileCondToUncond(BasicBlock* block, // If profile weights are consistent, expect at worst a slight underflow. // - if (fgPgoConsistent && (alternateNewWeight < 0.0)) + const bool checkProfileConsistency = hasFlag(activePhaseChecks, PhaseChecks::CHECK_PROFILE); + if (checkProfileConsistency && fgPgoConsistent && (alternateNewWeight < 0.0)) { assert(fgProfileWeightsEqual(alternateNewWeight, 0.0)); } diff --git a/src/coreclr/jit/morph.cpp b/src/coreclr/jit/morph.cpp index 7f6cd2cb3af97..8f42dd8743ce4 100644 --- a/src/coreclr/jit/morph.cpp +++ b/src/coreclr/jit/morph.cpp @@ -12658,63 +12658,23 @@ Compiler::FoldResult Compiler::fgFoldConditional(BasicBlock* block) fgRemoveStmt(block, lastStmt); result = FoldResult::FOLD_REMOVED_LAST_STMT; } - // block is a BBJ_COND that we are folding the conditional for. - // bTaken is the path that will always be taken from block. - // bNotTaken is the path that will never be taken from block. - // - BasicBlock* bTaken; - BasicBlock* bNotTaken; - FlowEdge* edgeTaken; + + FlowEdge *retainedEdge, *removedEdge; if (cond->AsIntCon()->gtIconVal != 0) { - // JTRUE 1 - transform the basic block into a BBJ_ALWAYS - bTaken = block->GetTrueTarget(); - bNotTaken = block->GetFalseTarget(); - - // Remove 'block' from the predecessor list of 'bNotTaken' */ - fgRemoveRefPred(block->GetFalseEdge()); - - edgeTaken = block->GetTrueEdge(); - block->SetKindAndTargetEdge(BBJ_ALWAYS, edgeTaken); + retainedEdge = block->GetTrueEdge(); + removedEdge = block->GetFalseEdge(); } else { - // JTRUE 0 - transform the basic block into a BBJ_ALWAYS - bTaken = block->GetFalseTarget(); - bNotTaken = block->GetTrueTarget(); - - // Remove 'block' from the predecessor list of 'bNotTaken' */ - fgRemoveRefPred(block->GetTrueEdge()); - - edgeTaken = block->GetFalseEdge(); - block->SetKindAndTargetEdge(BBJ_ALWAYS, block->GetFalseEdge()); + retainedEdge = block->GetFalseEdge(); + removedEdge = block->GetTrueEdge(); } - // We examine the taken edge (block -> bTaken) - // if block has valid profile weight and bTaken does not we try to adjust bTaken's weight - // else if bTaken has valid profile weight and block does not we try to adjust block's weight - // We can only adjust the block weights when (the edge block -> bTaken) is the only edge into bTaken - // - if (block->hasProfileWeight()) - { - if (!bTaken->hasProfileWeight()) - { - if ((bTaken->countOfInEdges() == 1) || (bTaken->bbWeight < block->bbWeight)) - { - // Update the weight of bTaken - bTaken->inheritWeight(block); - } - } - } - else if (bTaken->hasProfileWeight()) - { - if (bTaken->countOfInEdges() == 1) - { - // Update the weight of block - block->inheritWeight(bTaken); - } - } + fgRemoveRefPred(removedEdge); + block->SetKindAndTargetEdge(BBJ_ALWAYS, retainedEdge); + fgRepairProfileCondToUncond(block, retainedEdge, removedEdge); #ifdef DEBUG if (verbose)