diff --git a/llvm/lib/Transforms/InstCombine/InstCombinePHI.cpp b/llvm/lib/Transforms/InstCombine/InstCombinePHI.cpp index 5c747bbaa53af..ef6fb1084ada5 100644 --- a/llvm/lib/Transforms/InstCombine/InstCombinePHI.cpp +++ b/llvm/lib/Transforms/InstCombine/InstCombinePHI.cpp @@ -698,8 +698,7 @@ static bool isSafeAndProfitableToSinkLoad(LoadInst *L) { Instruction *InstCombinerImpl::foldPHIArgLoadIntoPHI(PHINode &PN) { LoadInst *FirstLI = cast(PN.getIncomingValue(0)); - // Can't forward swifterror through a phi. - if (FirstLI->getOperand(0)->isSwiftError()) + if (!canReplaceOperandWithVariable(FirstLI, 0)) return nullptr; // FIXME: This is overconservative; this transform is allowed in some cases @@ -738,8 +737,7 @@ Instruction *InstCombinerImpl::foldPHIArgLoadIntoPHI(PHINode &PN) { LI->getPointerAddressSpace() != LoadAddrSpace) return nullptr; - // Can't forward swifterror through a phi. - if (LI->getOperand(0)->isSwiftError()) + if (!canReplaceOperandWithVariable(LI, 0)) return nullptr; // We can't sink the load if the loaded value could be modified between diff --git a/llvm/lib/Transforms/Utils/Local.cpp b/llvm/lib/Transforms/Utils/Local.cpp index b6ca52e2e5682..f1a7087e4e5a2 100644 --- a/llvm/lib/Transforms/Utils/Local.cpp +++ b/llvm/lib/Transforms/Utils/Local.cpp @@ -3864,6 +3864,12 @@ bool llvm::canReplaceOperandWithVariable(const Instruction *I, unsigned OpIdx) { if (Op->isSwiftError()) return false; + // Protected pointer field loads/stores should be paired with the intrinsic + // to avoid unnecessary address escapes. + if (auto *II = dyn_cast(Op)) + if (II->getIntrinsicID() == Intrinsic::protected_field_ptr) + return false; + // Cannot replace alloca argument with phi/select. if (I->isLifetimeStartOrEnd()) return false; diff --git a/llvm/test/Transforms/PhaseOrdering/phi-protected-field-ptr.ll b/llvm/test/Transforms/PhaseOrdering/phi-protected-field-ptr.ll new file mode 100644 index 0000000000000..bf60de446ea91 --- /dev/null +++ b/llvm/test/Transforms/PhaseOrdering/phi-protected-field-ptr.ll @@ -0,0 +1,38 @@ +; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 5 +; RUN: opt -O2 -S < %s | FileCheck %s + +; Test that no optimization run at -O2 moves the loads into the exit block, +; as this causes unnecessary address escapes with pointer field protection. + +define ptr @phi_prot_ptr(i1 %sel, ptr %p1, ptr %p2) { +; CHECK-LABEL: define ptr @phi_prot_ptr( +; CHECK-SAME: i1 [[SEL:%.*]], ptr readonly [[P1:%.*]], ptr readonly [[P2:%.*]]) local_unnamed_addr #[[ATTR0:[0-9]+]] { +; CHECK-NEXT: br i1 [[SEL]], label %[[T:.*]], label %[[F:.*]] +; CHECK: [[T]]: +; CHECK-NEXT: [[PROTP1:%.*]] = tail call ptr @llvm.protected.field.ptr.p0(ptr [[P1]], i64 1, i1 true) +; CHECK-NEXT: [[LOAD1:%.*]] = load ptr, ptr [[PROTP1]], align 8 +; CHECK-NEXT: br label %[[EXIT:.*]] +; CHECK: [[F]]: +; CHECK-NEXT: [[PROTP2:%.*]] = tail call ptr @llvm.protected.field.ptr.p0(ptr [[P2]], i64 2, i1 true) +; CHECK-NEXT: [[LOAD2:%.*]] = load ptr, ptr [[PROTP2]], align 8 +; CHECK-NEXT: br label %[[EXIT]] +; CHECK: [[EXIT]]: +; CHECK-NEXT: [[RETVAL:%.*]] = phi ptr [ [[LOAD1]], %[[T]] ], [ [[LOAD2]], %[[F]] ] +; CHECK-NEXT: ret ptr [[RETVAL]] +; + br i1 %sel, label %t, label %f + +t: + %protp1 = call ptr @llvm.protected.field.ptr.p0(ptr %p1, i64 1, i1 true) + %load1 = load ptr, ptr %protp1 + br label %exit + +f: + %protp2 = call ptr @llvm.protected.field.ptr.p0(ptr %p2, i64 2, i1 true) + %load2 = load ptr, ptr %protp2 + br label %exit + +exit: + %retval = phi ptr [ %load1, %t ], [ %load2, %f ] + ret ptr %retval +}