Skip to content

Commit

Permalink
Consolidate importer spilling code (#72291)
Browse files Browse the repository at this point in the history
* Add tests

* Fix losing GLOB_REF on the LHS

The comment states we don't need it, which is incorrect.

Diffs are improvements because we block forward substitution of
calls into "ASG(BLK(ADDR(LCL_VAR<field>, ...)))", which allows
morph to leave the "can be replaced with its field" local alone.

* Prospective fix

Spill "glob refs" on stores to "aliased" locals.

* Delete now-not-necessary code

* Fix up asserts

* Clean out '(unsigned)CHECK_SPILL_ALL/NONE' casts

* Don't manually spill for 'st[s]fld'

* Revert 'Clean out '(unsigned)CHECK_SPILL_ALL/NONE' casts'
  • Loading branch information
SingleAccretion committed Jul 23, 2022
1 parent 2bcf0f7 commit c624626
Show file tree
Hide file tree
Showing 4 changed files with 378 additions and 165 deletions.
9 changes: 3 additions & 6 deletions src/coreclr/jit/compiler.h
Original file line number Diff line number Diff line change
Expand Up @@ -3797,11 +3797,8 @@ class Compiler
Statement* impLastStmt; // The last statement for the current BB.

public:
enum
{
CHECK_SPILL_ALL = -1,
CHECK_SPILL_NONE = -2
};
static const unsigned CHECK_SPILL_ALL = static_cast<unsigned>(-1);
static const unsigned CHECK_SPILL_NONE = static_cast<unsigned>(-2);

void impBeginTreeList();
void impEndTreeList(BasicBlock* block, Statement* firstStmt, Statement* lastStmt);
Expand Down Expand Up @@ -4001,7 +3998,7 @@ class Compiler
void impSpillSpecialSideEff();
void impSpillSideEffect(bool spillGlobEffects, unsigned chkLevel DEBUGARG(const char* reason));
void impSpillSideEffects(bool spillGlobEffects, unsigned chkLevel DEBUGARG(const char* reason));
void impSpillLclRefs(unsigned lclNum);
void impSpillLclRefs(unsigned lclNum, unsigned chkLevel);

BasicBlock* impPushCatchArgOnStack(BasicBlock* hndBlk, CORINFO_CLASS_HANDLE clsHnd, bool isSingleBlockFilter);

Expand Down
223 changes: 64 additions & 159 deletions src/coreclr/jit/importer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -453,25 +453,23 @@ inline void Compiler::impAppendStmtCheck(Statement* stmt, unsigned chkLevel)
}
}

if (tree->gtOper == GT_ASG)
if (tree->OperIs(GT_ASG))
{
// For an assignment to a local variable, all references of that
// variable have to be spilled. If it is aliased, all calls and
// indirect accesses have to be spilled

if (tree->AsOp()->gtOp1->gtOper == GT_LCL_VAR)
if (tree->AsOp()->gtOp1->OperIsLocal())
{
unsigned lclNum = tree->AsOp()->gtOp1->AsLclVarCommon()->GetLclNum();
for (unsigned level = 0; level < chkLevel; level++)
{
assert(!gtHasRef(verCurrentState.esStack[level].val, lclNum));
assert(!lvaTable[lclNum].IsAddressExposed() ||
(verCurrentState.esStack[level].val->gtFlags & GTF_SIDE_EFFECT) == 0);
GenTree* stkTree = verCurrentState.esStack[level].val;
assert(!gtHasRef(stkTree, lclNum) || impIsInvariant(stkTree));
assert(!lvaTable[lclNum].IsAddressExposed() || ((stkTree->gtFlags & GTF_SIDE_EFFECT) == 0));
}
}

// If the access may be to global memory, all side effects have to be spilled.

else if (tree->AsOp()->gtOp1->gtFlags & GTF_GLOB_REF)
{
for (unsigned level = 0; level < chkLevel; level++)
Expand All @@ -490,7 +488,7 @@ inline void Compiler::impAppendStmtCheck(Statement* stmt, unsigned chkLevel)
// Arguments:
// stmt - The statement to add.
// chkLevel - [0..chkLevel) is the portion of the stack which we will check
// for interference with stmt and spill if needed.
// for interference with stmt and spilled if needed.
// checkConsumedDebugInfo - Whether to check for consumption of impCurStmtDI. impCurStmtDI
// marks the debug info of the current boundary and is set when we
// start importing IL at that boundary. If this parameter is true,
Expand All @@ -509,61 +507,43 @@ void Compiler::impAppendStmt(Statement* stmt, unsigned chkLevel, bool checkConsu
{
assert(chkLevel <= verCurrentState.esStackDepth);

/* If the statement being appended has any side-effects, check the stack
to see if anything needs to be spilled to preserve correct ordering. */
// If the statement being appended has any side-effects, check the stack to see if anything
// needs to be spilled to preserve correct ordering.

GenTree* expr = stmt->GetRootNode();
GenTreeFlags flags = expr->gtFlags & GTF_GLOB_EFFECT;

// Assignment to (unaliased) locals don't count as a side-effect as
// we handle them specially using impSpillLclRefs(). Temp locals should
// be fine too.

if ((expr->gtOper == GT_ASG) && (expr->AsOp()->gtOp1->gtOper == GT_LCL_VAR) &&
((expr->AsOp()->gtOp1->gtFlags & GTF_GLOB_REF) == 0) && !gtHasLocalsWithAddrOp(expr->AsOp()->gtOp2))
{
GenTreeFlags op2Flags = expr->AsOp()->gtOp2->gtFlags & GTF_GLOB_EFFECT;
assert(flags == (op2Flags | GTF_ASG));
flags = op2Flags;
}

if (flags != 0)
// If the only side effect of this tree is an assignment to an unaliased local, we can avoid
// spilling all pending loads from the stack. Instead we only spill trees with LCL_[VAR|FLD]
// nodes that refer to the local.
//
if (expr->OperIs(GT_ASG) && expr->AsOp()->gtOp1->OperIsLocal())
{
bool spillGlobEffects = false;
LclVarDsc* dstVarDsc = lvaGetDesc(expr->AsOp()->gtOp1->AsLclVarCommon());

if ((flags & GTF_CALL) != 0)
{
// If there is a call, we have to spill global refs
spillGlobEffects = true;
}
else if (!expr->OperIs(GT_ASG))
{
if ((flags & GTF_ASG) != 0)
{
// The expression is not an assignment node but it has an assignment side effect, it
// must be an atomic op, HW intrinsic or some other kind of node that stores to memory.
// Since we don't know what it assigns to, we need to spill global refs.
spillGlobEffects = true;
}
}
else
// We make two assumptions here:
//
// 1. All locals which can be modified indirectly are marked as address-exposed or with
// "lvHasLdAddrOp" -- we will rely on "impSpillSideEffects(spillGlobEffects: true)"
// below to spill them.
// 2. Trees that assign to unaliased locals are always top-level (this avoids having to
// walk down the tree here).
//
// If any of the above are violated (say for some temps), the relevant code must spill
// any possible pending references manually.
//
if (!dstVarDsc->IsAddressExposed() && !dstVarDsc->lvHasLdAddrOp)
{
GenTree* lhs = expr->gtGetOp1();
GenTree* rhs = expr->gtGetOp2();
impSpillLclRefs(lvaGetLclNum(dstVarDsc), chkLevel);

if (((rhs->gtFlags | lhs->gtFlags) & GTF_ASG) != 0)
{
// Either side of the assignment node has an assignment side effect.
// Since we don't know what it assigns to, we need to spill global refs.
spillGlobEffects = true;
}
else if ((lhs->gtFlags & GTF_GLOB_REF) != 0)
{
spillGlobEffects = true;
}
// We still needs to spill things that the RHS could modify/interfere with.
flags = expr->AsOp()->gtOp2->gtFlags & GTF_GLOB_EFFECT;
}
}

impSpillSideEffects(spillGlobEffects, chkLevel DEBUGARG("impAppendStmt"));
if (flags != 0)
{
impSpillSideEffects((flags & (GTF_ASG | GTF_CALL)) != 0, chkLevel DEBUGARG("impAppendStmt"));
}
else
{
Expand Down Expand Up @@ -1500,11 +1480,6 @@ GenTree* Compiler::impAssignStructPtr(GenTree* destAddr,
{
dest = gtNewObjNode(structHnd, destAddr);
gtSetObjGcInfo(dest->AsObj());
// Although an obj as a call argument was always assumed to be a globRef
// (which is itself overly conservative), that is not true of the operands
// of a block assignment.
dest->gtFlags &= ~GTF_GLOB_REF;
dest->gtFlags |= (destAddr->gtFlags & GTF_GLOB_REF);
}
else
{
Expand Down Expand Up @@ -2357,14 +2332,6 @@ GenTree* Compiler::impRuntimeLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken
return gtNewLclvNode(tmp, TYP_I_IMPL);
}

/******************************************************************************
* Spills the stack at verCurrentState.esStack[level] and replaces it with a temp.
* If tnum!=BAD_VAR_NUM, the temp var used to replace the tree is tnum,
* else, grab a new temp.
* For structs (which can be pushed on the stack using obj, etc),
* special handling is needed
*/

struct RecursiveGuard
{
public:
Expand Down Expand Up @@ -2583,21 +2550,27 @@ inline void Compiler::impSpillSpecialSideEff()
}
}

/*****************************************************************************
*
* If the stack contains any trees with references to local #lclNum, assign
* those trees to temps and replace their place on the stack with refs to
* their temps.
*/

void Compiler::impSpillLclRefs(unsigned lclNum)
//------------------------------------------------------------------------
// impSpillLclRefs: Spill all trees referencing the given local.
//
// Arguments:
// lclNum - The local's number
// chkLevel - Height (exclusive) of the portion of the stack to check
//
void Compiler::impSpillLclRefs(unsigned lclNum, unsigned chkLevel)
{
/* Before we make any appends to the tree list we must spill the
* "special" side effects (GTF_ORDER_SIDEEFF) - GT_CATCH_ARG */

// Before we make any appends to the tree list we must spill the
// "special" side effects (GTF_ORDER_SIDEEFF) - GT_CATCH_ARG.
impSpillSpecialSideEff();

for (unsigned level = 0; level < verCurrentState.esStackDepth; level++)
if (chkLevel == CHECK_SPILL_ALL)
{
chkLevel = verCurrentState.esStackDepth;
}

assert(chkLevel <= verCurrentState.esStackDepth);

for (unsigned level = 0; level < chkLevel; level++)
{
GenTree* tree = verCurrentState.esStack[level].val;

Expand Down Expand Up @@ -13181,56 +13154,14 @@ void Compiler::impImportBlockCode(BasicBlock* block)
goto DECODE_OPCODE;

SPILL_APPEND:

// We need to call impSpillLclRefs() for a struct type lclVar.
// This is because there may be loads of that lclVar on the evaluation stack, and
// we need to ensure that those loads are completed before we modify it.
if ((op1->OperGet() == GT_ASG) && varTypeIsStruct(op1->gtGetOp1()))
{
GenTree* lhs = op1->gtGetOp1();
GenTreeLclVarCommon* lclVar = nullptr;
if (lhs->gtOper == GT_LCL_VAR)
{
lclVar = lhs->AsLclVarCommon();
}
else if (lhs->OperIsBlk())
{
// Check if LHS address is within some struct local, to catch
// cases where we're updating the struct by something other than a stfld
GenTree* addr = lhs->AsBlk()->Addr();

// Catches ADDR(LCL_VAR), or ADD(ADDR(LCL_VAR),CNS_INT))
lclVar = addr->IsLocalAddrExpr();

// Catches ADDR(FIELD(... ADDR(LCL_VAR)))
if (lclVar == nullptr)
{
GenTree* lclTree = nullptr;
if (impIsAddressInLocal(addr, &lclTree))
{
lclVar = lclTree->AsLclVarCommon();
}
}
}
if (lclVar != nullptr)
{
impSpillLclRefs(lclVar->GetLclNum());
}
}

/* Append 'op1' to the list of statements */
impAppendTree(op1, (unsigned)CHECK_SPILL_ALL, impCurStmtDI);
goto DONE_APPEND;

APPEND:

/* Append 'op1' to the list of statements */

impAppendTree(op1, (unsigned)CHECK_SPILL_NONE, impCurStmtDI);
goto DONE_APPEND;

DONE_APPEND:

#ifdef DEBUG
// Remember at which BC offset the tree was finished
impNoteLastILoffs();
Expand Down Expand Up @@ -13508,25 +13439,16 @@ void Compiler::impImportBlockCode(BasicBlock* block)
}
}

/* Create the assignment node */

op2 = gtNewLclvNode(lclNum, lclTyp DEBUGARG(opcodeOffs + sz + 1));

/* If the local is aliased or pinned, we need to spill calls and
indirections from the stack. */

if ((lvaTable[lclNum].IsAddressExposed() || lvaTable[lclNum].lvHasLdAddrOp ||
lvaTable[lclNum].lvPinned) &&
(verCurrentState.esStackDepth > 0))
// Stores to pinned locals can have the implicit side effect of "unpinning", so we must spill
// things that could depend on the pin. TODO-Bug: which can actually be anything, including
// unpinned unaliased locals, not just side-effecting trees.
if (lvaTable[lclNum].lvPinned)
{
impSpillSideEffects(false,
(unsigned)CHECK_SPILL_ALL DEBUGARG("Local could be aliased or is pinned"));
impSpillSideEffects(false, CHECK_SPILL_ALL DEBUGARG("Spill before store to pinned local"));
}

/* Spill any refs to the local from the stack */

impSpillLclRefs(lclNum);

// We can generate an assignment to a TYP_FLOAT from a TYP_DOUBLE
// We insert a cast to the dest 'op2' type
//
Expand All @@ -13538,13 +13460,12 @@ void Compiler::impImportBlockCode(BasicBlock* block)

if (varTypeIsStruct(lclTyp))
{
op1 = impAssignStruct(op2, op1, clsHnd, (unsigned)CHECK_SPILL_ALL);
op1 = impAssignStruct(op2, op1, clsHnd, CHECK_SPILL_ALL);
}
else
{
op1 = gtNewAssignNode(op2, op1);
}

goto SPILL_APPEND;

case CEE_LDLOCA:
Expand Down Expand Up @@ -15158,6 +15079,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
#endif

op1 = gtNewOperNode(GT_IND, lclTyp, op1);
op1->gtFlags |= GTF_EXCEPT | GTF_GLOB_REF;

if (prefixFlags & PREFIX_VOLATILE)
{
Expand All @@ -15173,15 +15095,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
}

op1 = gtNewAssignNode(op1, op2);
op1->gtFlags |= GTF_EXCEPT | GTF_GLOB_REF;

// Spill side-effects AND global-data-accesses
if (verCurrentState.esStackDepth > 0)
{
impSpillSideEffects(true, (unsigned)CHECK_SPILL_ALL DEBUGARG("spill side effects before STIND"));
}

goto APPEND;
goto SPILL_APPEND;

case CEE_LDIND_I1:
lclTyp = TYP_BYTE;
Expand Down Expand Up @@ -16338,11 +16252,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
assert(!"Unexpected fieldAccessor");
}

// "impAssignStruct" will back-substitute the field address tree into calls that return things via
// return buffers, so we have to delay calling it until after we have spilled everything needed.
bool deferStructAssign = (lclTyp == TYP_STRUCT);

if (!deferStructAssign)
if (lclTyp != TYP_STRUCT)
{
assert(op1->OperIs(GT_FIELD, GT_IND));

Expand Down Expand Up @@ -16431,8 +16341,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
op1 = gtNewAssignNode(op1, op2);
}

/* Check if the class needs explicit initialization */

// Check if the class needs explicit initialization.
if (fieldInfo.fieldFlags & CORINFO_FLG_FIELD_INITCLASS)
{
GenTree* helperNode = impInitClass(&resolvedToken);
Expand All @@ -16446,16 +16355,12 @@ void Compiler::impImportBlockCode(BasicBlock* block)
}
}

// An indirect store such as "st[s]fld" interferes with indirect accesses, so we must spill
// global refs and potentially aliased locals.
impSpillSideEffects(true, (unsigned)CHECK_SPILL_ALL DEBUGARG("spill side effects before STFLD"));

if (deferStructAssign)
if (lclTyp == TYP_STRUCT)
{
op1 = impAssignStruct(op1, op2, clsHnd, (unsigned)CHECK_SPILL_ALL);
op1 = impAssignStruct(op1, op2, clsHnd, CHECK_SPILL_ALL);
}
goto SPILL_APPEND;
}
goto APPEND;

case CEE_NEWARR:
{
Expand Down
Loading

0 comments on commit c624626

Please sign in to comment.