Skip to content

Commit

Permalink
[VectorCombine] improve readability; NFC
Browse files Browse the repository at this point in the history
If we are going to allow adjusting the pointer for GEPs,
rearranging the code a bit will make it easier to follow.
  • Loading branch information
rotateright committed Dec 10, 2020
1 parent ed4783f commit 12b684a
Showing 1 changed file with 8 additions and 9 deletions.
17 changes: 8 additions & 9 deletions llvm/lib/Transforms/Vectorize/VectorCombine.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -116,15 +116,16 @@ bool VectorCombine::vectorizeLoadInsert(Instruction &I) {
return false;

// TODO: Extend this to match GEP with constant offsets.
Value *PtrOp = Load->getPointerOperand()->stripPointerCasts();
assert(isa<PointerType>(PtrOp->getType()) && "Expected a pointer type");
unsigned AS = Load->getPointerAddressSpace();
const DataLayout &DL = I.getModule()->getDataLayout();
Value *SrcPtr = Load->getPointerOperand()->stripPointerCasts();
assert(isa<PointerType>(SrcPtr->getType()) && "Expected a pointer type");

// If original AS != Load's AS, we can't bitcast the original pointer and have
// to use Load's operand instead. Ideally we would want to strip pointer casts
// without changing AS, but there's no API to do that ATM.
if (AS != PtrOp->getType()->getPointerAddressSpace())
PtrOp = Load->getPointerOperand();
unsigned AS = Load->getPointerAddressSpace();
if (AS != SrcPtr->getType()->getPointerAddressSpace())
SrcPtr = Load->getPointerOperand();

Type *ScalarTy = Scalar->getType();
uint64_t ScalarSize = ScalarTy->getPrimitiveSizeInBits();
Expand All @@ -136,11 +137,9 @@ bool VectorCombine::vectorizeLoadInsert(Instruction &I) {
unsigned MinVecNumElts = MinVectorSize / ScalarSize;
auto *MinVecTy = VectorType::get(ScalarTy, MinVecNumElts, false);
Align Alignment = Load->getAlign();
const DataLayout &DL = I.getModule()->getDataLayout();
if (!isSafeToLoadUnconditionally(PtrOp, MinVecTy, Alignment, DL, Load, &DT))
if (!isSafeToLoadUnconditionally(SrcPtr, MinVecTy, Alignment, DL, Load, &DT))
return false;


// Original pattern: insertelt undef, load [free casts of] PtrOp, 0
Type *LoadTy = Load->getType();
int OldCost = TTI.getMemoryOpCost(Instruction::Load, LoadTy, Alignment, AS);
Expand All @@ -159,7 +158,7 @@ bool VectorCombine::vectorizeLoadInsert(Instruction &I) {
// It is safe and potentially profitable to load a vector directly:
// inselt undef, load Scalar, 0 --> load VecPtr
IRBuilder<> Builder(Load);
Value *CastedPtr = Builder.CreateBitCast(PtrOp, MinVecTy->getPointerTo(AS));
Value *CastedPtr = Builder.CreateBitCast(SrcPtr, MinVecTy->getPointerTo(AS));
Value *VecLd = Builder.CreateAlignedLoad(MinVecTy, CastedPtr, Alignment);

// If the insert type does not match the target's minimum vector type,
Expand Down

0 comments on commit 12b684a

Please sign in to comment.