Skip to content

Commit

Permalink
[SCEV] Split isSCEVExprNeverPoison reasoning explicitly into scope an…
Browse files Browse the repository at this point in the history
…d mustexecute parts [NFC]

Inspired by the needs to D111001 and D109845.  The seperation of concerns also amakes it easier to reason about correctness and completeness.
  • Loading branch information
preames committed Oct 2, 2021
1 parent c1e32b3 commit 26223af
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 4 deletions.
10 changes: 10 additions & 0 deletions llvm/include/llvm/Analysis/ScalarEvolution.h
Expand Up @@ -1920,6 +1920,16 @@ class ScalarEvolution {
/// would trigger undefined behavior on overflow.
SCEV::NoWrapFlags getNoWrapFlagsFromUB(const Value *V);

/// If S trivially defines a scope (without needing to recurse through
/// operands), return the first instruction in it. Else, return nullptr.
/// (See scope definition rules associated with flag discussion above)
const Instruction *getDefinedScopeRoot(const SCEV *S);

/// Given two instructions in the same function, return true if we can
/// prove B must execute given A executes.
bool isGuaranteedToTransferExecutionTo(const Instruction *A,
const Instruction *B);

/// Return true if the SCEV corresponding to \p I is never poison. Proving
/// this is more complex than proving that just \p I is never poison, since
/// SCEV commons expressions across control flow, and you can have cases
Expand Down
31 changes: 27 additions & 4 deletions llvm/lib/Analysis/ScalarEvolution.cpp
Expand Up @@ -6569,6 +6569,31 @@ SCEV::NoWrapFlags ScalarEvolution::getNoWrapFlagsFromUB(const Value *V) {
return isSCEVExprNeverPoison(BinOp) ? Flags : SCEV::FlagAnyWrap;
}

const Instruction *ScalarEvolution::getDefinedScopeRoot(const SCEV *S) {
if (auto *AddRec = dyn_cast<SCEVAddRecExpr>(S))
return &*AddRec->getLoop()->getHeader()->begin();
// TODO: add SCEVConstant and SCEVUnknown caxes here
return nullptr;
}

static bool
isGuaranteedToTransferExecutionToSuccessor(BasicBlock::const_iterator Begin,
BasicBlock::const_iterator End) {
return llvm::all_of( make_range(Begin, End), [](const Instruction &I) {
return isGuaranteedToTransferExecutionToSuccessor(&I);
});
}

bool ScalarEvolution::isGuaranteedToTransferExecutionTo(const Instruction *A,
const Instruction *B) {
if (A->getParent() == B->getParent() &&
::isGuaranteedToTransferExecutionToSuccessor(A->getIterator(),
B->getIterator()))
return true;
return false;
}


bool ScalarEvolution::isSCEVExprNeverPoison(const Instruction *I) {
// Here we check that I is in the header of the innermost loop containing I,
// since we only deal with instructions in the loop header. The actual loop we
Expand Down Expand Up @@ -6600,11 +6625,9 @@ bool ScalarEvolution::isSCEVExprNeverPoison(const Instruction *I) {
// TODO: We can do better here in some cases.
if (!isSCEVable(Op->getType()))
return false;
const SCEV *OpS = getSCEV(Op);
if (auto *AddRecS = dyn_cast<SCEVAddRecExpr>(OpS)) {
if (isGuaranteedToExecuteForEveryIteration(I, AddRecS->getLoop()))
if (auto *DefI = getDefinedScopeRoot(getSCEV(Op)))
if (isGuaranteedToTransferExecutionTo(DefI, I))
return true;
}
}
return false;
}
Expand Down

0 comments on commit 26223af

Please sign in to comment.