Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

JIT: Remove BBJ_NONE #94239

Merged
merged 20 commits into from
Nov 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 22 additions & 27 deletions src/coreclr/jit/block.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -285,6 +285,25 @@ bool BasicBlock::IsFirstColdBlock(Compiler* compiler) const
return (this == compiler->fgFirstColdBlock);
}

//------------------------------------------------------------------------
// CanRemoveJumpToNext: determine if jump to the next block can be omitted
//
// Arguments:
// compiler - current compiler instance
//
// Returns:
// true if the peephole optimization is enabled,
// and block is a BBJ_ALWAYS to the next block that we can fall through into
//
bool BasicBlock::CanRemoveJumpToNext(Compiler* compiler)
{
assert(KindIs(BBJ_ALWAYS));
const bool tryJumpOpt = compiler->opts.OptimizationEnabled() || ((bbFlags & BBF_NONE_QUIRK) != 0);
const bool skipJump = tryJumpOpt && JumpsToNext() && !hasAlign() && ((bbFlags & BBF_KEEP_BBJ_ALWAYS) == 0) &&
!compiler->fgInDifferentRegions(this, bbJumpDest);
return skipJump;
}

//------------------------------------------------------------------------
// checkPredListOrder: see if pred list is properly ordered
//
Expand Down Expand Up @@ -719,10 +738,6 @@ void BasicBlock::dspJumpKind()
printf(" (return)");
break;

case BBJ_NONE:
// For fall-through blocks, print nothing.
break;

case BBJ_ALWAYS:
if (bbFlags & BBF_KEEP_BBJ_ALWAYS)
{
Expand Down Expand Up @@ -981,7 +996,7 @@ BasicBlock* BasicBlock::GetUniquePred(Compiler* compiler) const

//------------------------------------------------------------------------
// GetUniqueSucc: Returns the unique successor of a block, if one exists.
// Only considers BBJ_ALWAYS and BBJ_NONE block types.
// Only considers BBJ_ALWAYS block types.
//
// Arguments:
// None.
Expand All @@ -991,18 +1006,7 @@ BasicBlock* BasicBlock::GetUniquePred(Compiler* compiler) const
//
BasicBlock* BasicBlock::GetUniqueSucc() const
{
if (bbJumpKind == BBJ_ALWAYS)
{
return bbJumpDest;
}
else if (bbJumpKind == BBJ_NONE)
{
return bbNext;
}
else
{
return nullptr;
}
return KindIs(BBJ_ALWAYS) ? bbJumpDest : nullptr;
}

// Static vars.
Expand Down Expand Up @@ -1120,7 +1124,6 @@ bool BasicBlock::bbFallsThrough() const
case BBJ_SWITCH:
return false;

case BBJ_NONE:
case BBJ_COND:
return true;

Expand Down Expand Up @@ -1156,7 +1159,6 @@ unsigned BasicBlock::NumSucc() const
case BBJ_EHCATCHRET:
case BBJ_EHFILTERRET:
case BBJ_LEAVE:
case BBJ_NONE:
return 1;

case BBJ_COND:
Expand Down Expand Up @@ -1215,9 +1217,6 @@ BasicBlock* BasicBlock::GetSucc(unsigned i) const
case BBJ_LEAVE:
return bbJumpDest;

case BBJ_NONE:
return bbNext;

case BBJ_COND:
if (i == 0)
{
Expand Down Expand Up @@ -1283,7 +1282,6 @@ unsigned BasicBlock::NumSucc(Compiler* comp)
case BBJ_EHCATCHRET:
case BBJ_EHFILTERRET:
case BBJ_LEAVE:
case BBJ_NONE:
return 1;

case BBJ_COND:
Expand Down Expand Up @@ -1340,9 +1338,6 @@ BasicBlock* BasicBlock::GetSucc(unsigned i, Compiler* comp)
case BBJ_LEAVE:
return bbJumpDest;

case BBJ_NONE:
return bbNext;

case BBJ_COND:
if (i == 0)
{
Expand Down Expand Up @@ -1619,7 +1614,7 @@ BasicBlock* BasicBlock::New(Compiler* compiler, BBjumpKinds jumpKind, BasicBlock
block->bbJumpKind = jumpKind;
block->bbJumpDest = jumpDest;

if (jumpKind == BBJ_THROW)
if (block->KindIs(BBJ_THROW))
{
block->bbSetRunRarely();
}
Expand Down
18 changes: 8 additions & 10 deletions src/coreclr/jit/block.h
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,6 @@ enum BBjumpKinds : BYTE
BBJ_EHCATCHRET, // block ends with a leave out of a catch (only #if defined(FEATURE_EH_FUNCLETS))
BBJ_THROW, // block ends with 'throw'
BBJ_RETURN, // block ends with 'ret'
BBJ_NONE, // block flows into the next one (no jump)
BBJ_ALWAYS, // block always jumps to the target
BBJ_LEAVE, // block always jumps to the target, maybe out of guarded region. Only used until importing.
BBJ_CALLFINALLY, // block always calls the target finally
Expand All @@ -83,7 +82,6 @@ const char* const BBjumpKindNames[] = {
"BBJ_EHCATCHRET",
"BBJ_THROW",
"BBJ_RETURN",
"BBJ_NONE",
"BBJ_ALWAYS",
"BBJ_LEAVE",
"BBJ_CALLFINALLY",
Expand Down Expand Up @@ -430,6 +428,10 @@ enum BasicBlockFlags : unsigned __int64
BBF_RECURSIVE_TAILCALL = MAKE_BBFLAG(40), // Block has recursive tailcall that may turn into a loop
BBF_NO_CSE_IN = MAKE_BBFLAG(41), // Block should kill off any incoming CSE
BBF_CAN_ADD_PRED = MAKE_BBFLAG(42), // Ok to add pred edge to this block, even when "safe" edge creation disabled
BBF_NONE_QUIRK = MAKE_BBFLAG(43), // Block was created as a BBJ_ALWAYS to the next block,
// and should be treated as if it falls through.
// This is just to reduce diffs from removing BBJ_NONE.
// (TODO: Remove this quirk after refactoring Compiler::fgFindInsertPoint)

// The following are sets of flags.

Expand Down Expand Up @@ -459,7 +461,7 @@ enum BasicBlockFlags : unsigned __int64
// TODO: Should BBF_RUN_RARELY be added to BBF_SPLIT_GAINED ?

BBF_SPLIT_GAINED = BBF_DONT_REMOVE | BBF_HAS_JMP | BBF_BACKWARD_JUMP | BBF_HAS_IDX_LEN | BBF_HAS_MD_IDX_LEN | BBF_PROF_WEIGHT | \
BBF_HAS_NEWOBJ | BBF_KEEP_BBJ_ALWAYS | BBF_CLONED_FINALLY_END | BBF_HAS_NULLCHECK | BBF_HAS_HISTOGRAM_PROFILE | BBF_HAS_MDARRAYREF | BBF_NEEDS_GCPOLL,
BBF_HAS_NEWOBJ | BBF_KEEP_BBJ_ALWAYS | BBF_CLONED_FINALLY_END | BBF_HAS_NULLCHECK | BBF_HAS_HISTOGRAM_PROFILE | BBF_HAS_MDARRAYREF | BBF_NEEDS_GCPOLL | BBF_NONE_QUIRK,

// Flags that must be propagated to a new block if code is copied from a block to a new block. These are flags that
// limit processing of a block if the code in question doesn't exist. This is conservative; we might not
Expand Down Expand Up @@ -599,6 +601,8 @@ struct BasicBlock : private LIR::Range

bool IsFirstColdBlock(Compiler* compiler) const;

bool CanRemoveJumpToNext(Compiler* compiler);

unsigned GetJumpOffs() const
{
return bbJumpOffs;
Expand Down Expand Up @@ -729,7 +733,7 @@ struct BasicBlock : private LIR::Range
unsigned dspPreds(); // Print the predecessors (bbPreds)
void dspSuccs(Compiler* compiler); // Print the successors. The 'compiler' argument determines whether EH
// regions are printed: see NumSucc() for details.
void dspJumpKind(); // Print the block jump kind (e.g., BBJ_NONE, BBJ_COND, etc.).
void dspJumpKind(); // Print the block jump kind (e.g., BBJ_ALWAYS, BBJ_COND, etc.).

// Print a simple basic block header for various output, including a list of predecessors and successors.
void dspBlockHeader(Compiler* compiler, bool showKind = true, bool showFlags = false, bool showPreds = true);
Expand Down Expand Up @@ -1771,12 +1775,6 @@ inline BasicBlock::BBSuccList::BBSuccList(const BasicBlock* block)
m_end = &m_succs[1];
break;

case BBJ_NONE:
m_succs[0] = block->bbNext;
m_begin = &m_succs[0];
m_end = &m_succs[1];
break;

case BBJ_COND:
m_succs[0] = block->bbNext;
m_begin = &m_succs[0];
Expand Down
10 changes: 9 additions & 1 deletion src/coreclr/jit/codegencommon.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -379,6 +379,15 @@ void CodeGen::genMarkLabelsForCodegen()
switch (block->GetJumpKind())
{
case BBJ_ALWAYS: // This will also handle the BBJ_ALWAYS of a BBJ_CALLFINALLY/BBJ_ALWAYS pair.
{
// If we can skip this jump, don't create a label for the target
if (block->CanRemoveJumpToNext(compiler))
{
break;
}

FALLTHROUGH;
}
case BBJ_COND:
case BBJ_EHCATCHRET:
JITDUMP(" " FMT_BB " : branch target\n", block->GetJumpDest()->bbNum);
Expand Down Expand Up @@ -422,7 +431,6 @@ void CodeGen::genMarkLabelsForCodegen()
case BBJ_EHFILTERRET:
case BBJ_RETURN:
case BBJ_THROW:
case BBJ_NONE:
break;

default:
Expand Down
39 changes: 22 additions & 17 deletions src/coreclr/jit/codegenlinear.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -599,6 +599,7 @@ void CodeGen::genCodeForBBlist()
noway_assert(genStackLevel == 0);

#ifdef TARGET_AMD64
bool emitNop = false;
// On AMD64, we need to generate a NOP after a call that is the last instruction of the block, in several
// situations, to support proper exception handling semantics. This is mostly to ensure that when the stack
// walker computes an instruction pointer for a frame, that instruction pointer is in the correct EH region.
Expand All @@ -621,6 +622,11 @@ void CodeGen::genCodeForBBlist()
switch (block->GetJumpKind())
{
case BBJ_ALWAYS:
// We might skip generating the jump via a peephole optimization.
// If that happens, make sure a NOP is emitted as the last instruction in the block.
emitNop = true;
break;

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Did we ever hit the case in the old code where we had BBJ_NONE on the last block?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I added an assert(false) to that case in the old code to see if I could get it to hit during a SuperPMI replay, and it never hit across all collections. Also in the new code, I added an assert that BBJ_ALWAYS has a jump before trying to emit the jump, so that we never have a BBJ_ALWAYS that "falls into" nothing at the end of the block list -- that also never hit.

case BBJ_THROW:
case BBJ_CALLFINALLY:
case BBJ_EHCATCHRET:
Expand All @@ -631,20 +637,6 @@ void CodeGen::genCodeForBBlist()
case BBJ_EHFAULTRET:
case BBJ_EHFILTERRET:
// These are the "epilog follows" case, handled in the emitter.

break;

case BBJ_NONE:
if (block->IsLast())
{
// Call immediately before the end of the code; we should never get here .
instGen(INS_BREAKPOINT); // This should never get executed
}
else
{
// We need the NOP
instGen(INS_nop);
}
break;

case BBJ_COND:
Expand Down Expand Up @@ -734,13 +726,26 @@ void CodeGen::genCodeForBBlist()

#endif // !FEATURE_EH_FUNCLETS

case BBJ_NONE:
case BBJ_SWITCH:
break;

case BBJ_ALWAYS:
#ifdef TARGET_XARCH
{
// Peephole optimization: If this block jumps to the next one, skip emitting the jump
// (unless we are jumping between hot/cold sections, or if we need the jump for EH reasons)
// (Skip this if optimizations are disabled, unless the block shouldn't have a jump in the first place)
if (block->CanRemoveJumpToNext(compiler))
{
#ifdef TARGET_AMD64
if (emitNop)
{
instGen(INS_nop);
}
#endif // TARGET_AMD64

break;
}
#ifdef TARGET_XARCH
// If a block was selected to place an alignment instruction because it ended
// with a jump, do not remove jumps from such blocks.
// Do not remove a jump between hot and cold regions.
Expand All @@ -755,10 +760,10 @@ void CodeGen::genCodeForBBlist()
#endif // TARGET_AMD64

inst_JMP(EJ_jmp, block->GetJumpDest(), isRemovableJmpCandidate);
}
#else
inst_JMP(EJ_jmp, block->GetJumpDest());
#endif // TARGET_XARCH
}

FALLTHROUGH;

Expand Down
5 changes: 3 additions & 2 deletions src/coreclr/jit/compiler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5279,8 +5279,9 @@ PhaseStatus Compiler::placeLoopAlignInstructions()
}
}

// If there is an unconditional jump (which is not part of callf/always pair)
if (opts.compJitHideAlignBehindJmp && block->KindIs(BBJ_ALWAYS) && !block->isBBCallAlwaysPairTail())
// If there is an unconditional jump (which is not part of callf/always pair, and isn't to the next block)
if (opts.compJitHideAlignBehindJmp && block->KindIs(BBJ_ALWAYS) && !block->isBBCallAlwaysPairTail() &&
((block->bbFlags & BBF_NONE_QUIRK) == 0))
{
// Track the lower weight blocks
if (block->bbWeight < minBlockSoFar)
Expand Down
5 changes: 4 additions & 1 deletion src/coreclr/jit/compiler.h
Original file line number Diff line number Diff line change
Expand Up @@ -5903,7 +5903,10 @@ class Compiler

bool fgIsThrow(GenTree* tree);

public:
bool fgInDifferentRegions(BasicBlock* blk1, BasicBlock* blk2);

private:
bool fgIsBlockCold(BasicBlock* block);

GenTree* fgMorphCastIntoHelper(GenTree* tree, int helper, GenTree* oper);
Expand Down Expand Up @@ -6483,7 +6486,7 @@ class Compiler
{
if (lpHead->NextIs(lpEntry))
{
assert(lpHead->bbFallsThrough());
assert(lpHead->bbFallsThrough() || lpHead->JumpsToNext());
assert(lpTop == lpEntry);
return true;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Unrelated to your PR, but it seems odd to me that we couldn't have a bottom-entered loop where lpHead immediately precedes the top block.

}
Expand Down
7 changes: 0 additions & 7 deletions src/coreclr/jit/compiler.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -622,10 +622,6 @@ BasicBlockVisit BasicBlock::VisitAllSuccs(Compiler* comp, TFunc func)

return BasicBlockVisit::Continue;

case BBJ_NONE:
RETURN_ON_ABORT(func(bbNext));
return VisitEHSuccs(comp, func);

case BBJ_COND:
RETURN_ON_ABORT(func(bbNext));

Expand Down Expand Up @@ -687,9 +683,6 @@ BasicBlockVisit BasicBlock::VisitRegularSuccs(Compiler* comp, TFunc func)
case BBJ_ALWAYS:
return func(bbJumpDest);

case BBJ_NONE:
return func(bbNext);

case BBJ_COND:
RETURN_ON_ABORT(func(bbNext));

Expand Down
Loading
Loading