From 4cb03c27841af2871b9aa8c30d5507824a5db328 Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Sun, 7 Jul 2019 05:21:36 -0700 Subject: [PATCH] [diagnose-unreachable] Constant fold simple switch_enum_addr to eliminate more unreachable code. This patch comes out of my reading some generic code using .none in transparent functions to conditionally compile out code at -Onone. Sadly, before this the dead code in question wouldn't be compiled out unless the protocol was constrained to be a class protocol. I added a test that validates that this conditional compilation property can be relied on in -Onone code in both cases. --- include/swift/SIL/Projection.h | 21 +- .../Mandatory/DiagnoseUnreachable.cpp | 366 +++++++++++++----- test/SILOptimizer/diagnose_unreachable.sil | 294 ++++++++++++-- ...ditional_compile_out_using_optionals.swift | 134 +++++++ 4 files changed, 679 insertions(+), 136 deletions(-) create mode 100644 test/SILOptimizer/mandatory_conditional_compile_out_using_optionals.swift diff --git a/include/swift/SIL/Projection.h b/include/swift/SIL/Projection.h index fe7534b1042f6..0f90bfc6f12f3 100644 --- a/include/swift/SIL/Projection.h +++ b/include/swift/SIL/Projection.h @@ -377,15 +377,15 @@ class Projection { /// Returns true if this instruction projects from an address type to an /// address subtype. - static SingleValueInstruction *isAddressProjection(SILValue V) { - switch (V->getKind()) { + static SingleValueInstruction *isAddressProjection(SILValue v) { + switch (v->getKind()) { default: return nullptr; case ValueKind::IndexAddrInst: { - auto I = cast(V); - unsigned Scalar; - if (getIntegerIndex(I->getIndex(), Scalar)) - return I; + auto *i = cast(v); + unsigned scalar; + if (getIntegerIndex(i->getIndex(), scalar)) + return i; return nullptr; } case ValueKind::StructElementAddrInst: @@ -394,10 +394,17 @@ class Projection { case ValueKind::ProjectBoxInst: case ValueKind::TupleElementAddrInst: case ValueKind::UncheckedTakeEnumDataAddrInst: - return cast(V); + return cast(v); } } + static SingleValueInstruction *isAddressProjection(SILInstruction *i) { + auto *svi = dyn_cast(i); + if (!svi) + return nullptr; + return isAddressProjection(SILValue(svi)); + } + /// Returns true if this instruction projects from an object type to an object /// subtype. static SingleValueInstruction *isObjectProjection(SILValue V) { diff --git a/lib/SILOptimizer/Mandatory/DiagnoseUnreachable.cpp b/lib/SILOptimizer/Mandatory/DiagnoseUnreachable.cpp index c92d6410642b2..9c88244249445 100644 --- a/lib/SILOptimizer/Mandatory/DiagnoseUnreachable.cpp +++ b/lib/SILOptimizer/Mandatory/DiagnoseUnreachable.cpp @@ -10,18 +10,20 @@ // //===----------------------------------------------------------------------===// -#define DEBUG_TYPE "diagnose-unreachable" -#include "swift/SILOptimizer/PassManager/Passes.h" +#define DEBUG_TYPE "sil-diagnose-unreachable" #include "swift/AST/DiagnosticsSIL.h" #include "swift/AST/Expr.h" #include "swift/AST/Pattern.h" #include "swift/AST/Stmt.h" +#include "swift/SIL/MemAccessUtils.h" +#include "swift/SIL/Projection.h" #include "swift/SIL/SILArgument.h" #include "swift/SIL/SILBuilder.h" #include "swift/SIL/SILUndef.h" -#include "swift/SILOptimizer/Utils/Local.h" -#include "swift/SILOptimizer/Utils/CFG.h" +#include "swift/SILOptimizer/PassManager/Passes.h" #include "swift/SILOptimizer/PassManager/Transforms.h" +#include "swift/SILOptimizer/Utils/CFG.h" +#include "swift/SILOptimizer/Utils/Local.h" #include "llvm/ADT/STLExtras.h" #include "llvm/ADT/Statistic.h" #include "llvm/Support/Debug.h" @@ -33,8 +35,6 @@ STATISTIC(NumTerminatorsFolded, "Number of terminators folded"); STATISTIC(NumBasicBlockArgsPropagated, "Number of basic block arguments propagated"); -typedef llvm::SmallPtrSet SILBasicBlockSet; - template static void diagnose(ASTContext &Context, SourceLoc loc, Diag diag, U &&...args) { @@ -205,6 +205,242 @@ static void propagateBasicBlockArgs(SILBasicBlock &BB) { NumInstructionsRemoved += ToBeDeleted.size(); } +static bool constantFoldEnumTerminator(SILBasicBlock &BB, + UnreachableUserCodeReportingState *State, + SwitchEnumInstBase *SUI, + const EnumElementDecl *TheEnumElem, + SILValue value = SILValue(), + SILValue defaultValue = SILValue()) { + SILBasicBlock *TheSuccessorBlock = nullptr; + int ReachableBlockIdx = -1; + for (unsigned Idx = 0; Idx < SUI->getNumCases(); ++Idx) { + const EnumElementDecl *EI; + SILBasicBlock *BI; + std::tie(EI, BI) = SUI->getCase(Idx); + if (EI == TheEnumElem) { + TheSuccessorBlock = BI; + ReachableBlockIdx = Idx; + break; + } + } + + SILBasicBlock *DB = nullptr; + if (!TheSuccessorBlock) { + if (SUI->hasDefault()) { + DB = SUI->getDefaultBB(); + if (!isa(DB->getTerminator())) { + TheSuccessorBlock = DB; + ReachableBlockIdx = SUI->getNumCases(); + } + } + } + + // Not fully covered switches will be diagnosed later. SILGen represents + // them with a Default basic block with an unreachable instruction. + // We are going to produce an error on all unreachable instructions not + // eliminated by DCE. + if (!TheSuccessorBlock) + return false; + + // Replace the switch with a branch to the TheSuccessorBlock. + SILBuilderWithScope B(&BB, SUI); + SILLocation Loc = SUI->getLoc(); + if (!TheSuccessorBlock->args_empty()) { + // If the successor block that we are looking at is the default block, + // we create an argument not for the enum case, but for the original + // value. + SILValue branchOperand; + if (TheSuccessorBlock != DB) { + assert(value); + branchOperand = value; + } else { + assert(defaultValue); + branchOperand = defaultValue; + } + B.createBranch(Loc, TheSuccessorBlock, branchOperand); + } else + B.createBranch(Loc, TheSuccessorBlock); + + // Produce diagnostic info if we are not within an inlined function or + // template instantiation. + // FIXME: Do not report if we are within a template instantiation. + assert(ReachableBlockIdx >= 0); + if (Loc.is() && State) { + // Find the first unreachable block in the switch so that we could use + // it for better diagnostics. + SILBasicBlock *UnreachableBlock = nullptr; + if (SUI->getNumCases() > 1) { + // More than one case. + UnreachableBlock = (ReachableBlockIdx == 0) ? SUI->getCase(1).second + : SUI->getCase(0).second; + } else { + if (SUI->getNumCases() == 1 && SUI->hasDefault()) { + // One case and a default. + UnreachableBlock = (ReachableBlockIdx == 0) ? SUI->getDefaultBB() + : SUI->getCase(0).second; + } + } + + // Generate diagnostic info. + if (UnreachableBlock && + !State->PossiblyUnreachableBlocks.count(UnreachableBlock)) { + State->PossiblyUnreachableBlocks.insert(UnreachableBlock); + State->MetaMap.insert(std::pair( + UnreachableBlock, + UnreachableInfo{UnreachableKind::FoldedSwitchEnum, Loc, true})); + } + } + + LLVM_DEBUG(llvm::dbgs() << "Folding terminator: " << *SUI); + recursivelyDeleteTriviallyDeadInstructions(SUI, true); + NumTerminatorsFolded++; + return true; +} + +static InjectEnumAddrInst * +getAllocStackSingleInitializingInjectEnumAddr(SwitchEnumAddrInst *SEAI) { + auto *stackSlot = dyn_cast(SEAI->getOperand()); + if (!stackSlot) + return nullptr; + + LLVM_DEBUG(llvm::dbgs() << "Visiting Stack: " << *stackSlot); + + InjectEnumAddrInst *singleInitializer = nullptr; + InitEnumDataAddrInst *singleInitializerAddr = nullptr; + SmallVector worklist(stackSlot->use_begin(), + stackSlot->use_end()); + LLVM_DEBUG(SWIFT_DEFER { llvm::dbgs() << "Exiting!\n"; }); + while (worklist.size()) { + auto *op = worklist.pop_back_val(); + + LLVM_DEBUG(llvm::dbgs() << "Visiting: " << *op->getUser()); + if (auto *svi = Projection::isAddressProjection(op->getUser())) { + LLVM_DEBUG(llvm::dbgs() << "Address projection. Continuing\n"); + copy(svi->getUses(), std::back_inserter(worklist)); + continue; + } + + auto *user = op->getUser(); + + // Skip our self. + if (user == SEAI) { + LLVM_DEBUG(llvm::dbgs() << "Skipping SEAI.\n"); + continue; + } + + if (isa(user) || isa(user) || + isa(user) || isa(user)) { + LLVM_DEBUG(llvm::dbgs() << "Skipping loads/lifetime ends\n"); + continue; + } + + // If we are reading from the memory we are ok. + if (auto *cai = dyn_cast(user)) { + if (cai->getDest() == op->get() || cai->isTakeOfSrc() == IsTake) { + LLVM_DEBUG(llvm::dbgs() << "Found cai taking from src. Bailing!\n"); + return nullptr; + } + LLVM_DEBUG(llvm::dbgs() << "Skipping!\n"); + continue; + } + + // Stash the initializer addr. We want to make sure it doesn't + // escape after we process. + if (auto *iedai = dyn_cast(user)) { + if (singleInitializerAddr) { + LLVM_DEBUG(llvm::dbgs() << "Multiple InitEnumDataAddrInst?!\n"); + return nullptr; + } + singleInitializerAddr = iedai; + LLVM_DEBUG(llvm::dbgs() << "Continuing\n"); + continue; + } + + if (auto *ieai = dyn_cast(user)) { + // If single initializer is already set, + if (singleInitializer) { + LLVM_DEBUG(llvm::dbgs() << "Multiple InitEnumDataAddrInst?!\n"); + return nullptr; + } + singleInitializer = ieai; + LLVM_DEBUG(llvm::dbgs() << "Continuing\n"); + continue; + } + + LLVM_DEBUG(llvm::dbgs() << "Bailing at end of loop!\n"); + return nullptr; + } + + LLVM_DEBUG(llvm::dbgs() << "After Loop\n"); + + // If we didn't find a single initializer bail. We were initialized + // multiple times suggesting we are not actually looking at a SILGen + // temporary. + if (!singleInitializer) { + LLVM_DEBUG(llvm::dbgs() << "Did not find single initializer! Bailing!\n"); + return nullptr; + } + + // If we didn't have an addr, then it means we had a case without a + // payload. + if (!singleInitializerAddr) { + assert(!singleInitializer->getElement()->hasAssociatedValues()); + LLVM_DEBUG(llvm::dbgs() + << "Did not find single initializer addr! Bailing!\n"); + return singleInitializer; + } + + // Otherwise, make sure we are initialized only once and never + // escape. + copy(singleInitializerAddr->getUses(), std::back_inserter(worklist)); + bool foundInitializer = false; + while (worklist.size()) { + auto *op = worklist.pop_back_val(); + LLVM_DEBUG(llvm::dbgs() << "Read only check for: " << *op->getUser()); + + // Look through projections. + if (auto *svi = Projection::isAddressProjection(op->getUser())) { + copy(svi->getUses(), std::back_inserter(worklist)); + continue; + } + + // Skip memory initializing operands. We should only ever see one + // since SILGen always initializes temporary allocations (our + // target) that way. + if (isa(op->getUser())) { + if (foundInitializer) { + LLVM_DEBUG(llvm::dbgs() << "Found multiple initializers! Bailing!\n"); + return nullptr; + } + foundInitializer = true; + continue; + } + + if (auto *cai = dyn_cast(op->getUser())) { + if (cai->getDest() != op->get() || + cai->isInitializationOfDest() != IsInitialization) { + return nullptr; + } + if (foundInitializer) { + LLVM_DEBUG(llvm::dbgs() << "Found multiple initializers! Bailing!\n"); + return nullptr; + } + foundInitializer = true; + continue; + } + + // Anything else consider unacceptable. + LLVM_DEBUG(llvm::dbgs() << "Found unknown addr initializer\n"); + return nullptr; + } + + // If we did not find a single address initializer, bail. + if (!foundInitializer) + return nullptr; + + return singleInitializer; +} + static bool constantFoldTerminator(SILBasicBlock &BB, UnreachableUserCodeReportingState *State) { TermInst *TI = BB.getTerminator(); @@ -268,94 +504,23 @@ static bool constantFoldTerminator(SILBasicBlock &BB, // case #Bool.false!unionelt: bb2 // => // br bb2 - if (auto *SUI = dyn_cast(TI)) { - if (auto *TheEnum = dyn_cast(SUI->getOperand())) { - const EnumElementDecl *TheEnumElem = TheEnum->getElement(); - SILBasicBlock *TheSuccessorBlock = nullptr; - int ReachableBlockIdx = -1; - for (unsigned Idx = 0; Idx < SUI->getNumCases(); ++Idx) { - const EnumElementDecl *EI; - SILBasicBlock *BI; - std::tie(EI, BI) = SUI->getCase(Idx); - if (EI == TheEnumElem) { - TheSuccessorBlock = BI; - ReachableBlockIdx = Idx; - break; - } - } - - SILBasicBlock *DB = nullptr; - if (!TheSuccessorBlock) { - if (SUI->hasDefault()) { - DB = SUI->getDefaultBB(); - if (!isa(DB->getTerminator())) { - TheSuccessorBlock = DB; - ReachableBlockIdx = SUI->getNumCases(); - } - } - } - - // Not fully covered switches will be diagnosed later. SILGen represents - // them with a Default basic block with an unreachable instruction. - // We are going to produce an error on all unreachable instructions not - // eliminated by DCE. - if (!TheSuccessorBlock) - return false; - - // Replace the switch with a branch to the TheSuccessorBlock. - SILBuilderWithScope B(&BB, TI); - SILLocation Loc = TI->getLoc(); - if (!TheSuccessorBlock->args_empty()) { - // If the successor block that we are looking at is the default block, - // we create an argument not for the enum case, but for the original - // value. - SILValue branchOperand; - if (TheSuccessorBlock != DB) { - assert(TheEnum->hasOperand()); - branchOperand = TheEnum->getOperand(); - } else { - branchOperand = TheEnum; - } - B.createBranch(Loc, TheSuccessorBlock, branchOperand); - } else - B.createBranch(Loc, TheSuccessorBlock); - - // Produce diagnostic info if we are not within an inlined function or - // template instantiation. - // FIXME: Do not report if we are within a template instantiation. - assert(ReachableBlockIdx >= 0); - if (Loc.is() && State) { - // Find the first unreachable block in the switch so that we could use - // it for better diagnostics. - SILBasicBlock *UnreachableBlock = nullptr; - if (SUI->getNumCases() > 1) { - // More than one case. - UnreachableBlock = - (ReachableBlockIdx == 0) ? SUI->getCase(1).second: - SUI->getCase(0).second; - } else { - if (SUI->getNumCases() == 1 && SUI->hasDefault()) { - // One case and a default. - UnreachableBlock = - (ReachableBlockIdx == 0) ? SUI->getDefaultBB(): - SUI->getCase(0).second; - } - } - - // Generate diagnostic info. - if (UnreachableBlock && - !State->PossiblyUnreachableBlocks.count(UnreachableBlock)) { - State->PossiblyUnreachableBlocks.insert(UnreachableBlock); - State->MetaMap.insert( - std::pair( - UnreachableBlock, - UnreachableInfo{UnreachableKind::FoldedSwitchEnum, Loc, true})); - } - } - - recursivelyDeleteTriviallyDeadInstructions(TI, true); - NumTerminatorsFolded++; - return true; + if (auto *SEI = dyn_cast(TI)) { + if (auto *TheEnum = dyn_cast(SEI->getOperand())) { + SILValue operand = + TheEnum->hasOperand() ? TheEnum->getOperand() : SILValue(); + return constantFoldEnumTerminator(BB, State, SEI, TheEnum->getElement(), + operand /*case*/, TheEnum /*default*/); + } + } + if (auto *SEAI = dyn_cast(TI)) { + // We look for an alloc_stack that never escapes and that is initialized + // only once. This ensures we only need to find one initialization. This is + // a common pattern when unwrapping optional values in transparent + // functions. + // + // TODO: This needs a better name. + if (auto *IEAI = getAllocStackSingleInitializingInjectEnumAddr(SEAI)) { + return constantFoldEnumTerminator(BB, State, SEAI, IEAI->getElement()); } } @@ -613,12 +778,11 @@ static bool simplifyBlocksWithCallsToNoReturn(SILBasicBlock &BB, /// /// Note, we rely on SILLocation information to determine if SILInstructions /// correspond to user code. -static bool diagnoseUnreachableBlock(const SILBasicBlock &B, - SILModule &M, - const SILBasicBlockSet &Reachable, - UnreachableUserCodeReportingState *State, - const SILBasicBlock *TopLevelB, - llvm::SmallPtrSetImpl &Visited){ +static bool diagnoseUnreachableBlock( + const SILBasicBlock &B, SILModule &M, + const SmallPtrSetImpl &Reachable, + UnreachableUserCodeReportingState *State, const SILBasicBlock *TopLevelB, + llvm::SmallPtrSetImpl &Visited) { if (Visited.count(&B)) return false; Visited.insert(&B); @@ -714,7 +878,7 @@ static bool removeUnreachableBlocks(SILFunction &F, SILModule &M, if (F.empty()) return false; - SILBasicBlockSet Reachable; + SmallPtrSet Reachable; SmallVector Worklist; Worklist.push_back(&F.front()); Reachable.insert(&F.front()); diff --git a/test/SILOptimizer/diagnose_unreachable.sil b/test/SILOptimizer/diagnose_unreachable.sil index 86a7be4a4e8d6..391578752fcb4 100644 --- a/test/SILOptimizer/diagnose_unreachable.sil +++ b/test/SILOptimizer/diagnose_unreachable.sil @@ -5,6 +5,11 @@ import Swift sil @guaranteed_nativeobject_user : $@convention(thin) (@guaranteed Builtin.NativeObject) -> () +enum FakeOptional { +case none +case some(T) +} + sil private @test1 : $() -> () { bb0: %5 = integer_literal $Builtin.Int1, 1 @@ -244,6 +249,11 @@ bb2: // Preds: bb1 bb0 // CHECK-NEXT: {{ unreachable}} // CHECK: } +// CHECK-LABEL: sil @dead_use_of_alloc_stack : +// CHECK: bb +// CHECK: alloc_stack +// CHECK: dealloc_stack +// CHECK: } // end sil function 'dead_use_of_alloc_stack' sil @dead_use_of_alloc_stack : $@convention(thin) () -> () { bb0: %1 = alloc_stack $((), (), ()) @@ -252,14 +262,16 @@ bb0: %3 = tuple () return %3 : $() } -// CHECK-LABEL: sil @dead_use_of_alloc_stack -// CHECK: bb -// CHECK: alloc_stack -// CHECK: dealloc_stack -// CHECK: } enum BoolLike { case true_, false_ } +sil @boollike_escape : $@convention(thin) (@inout BoolLike) -> () + +// CHECK-LABEL: sil @constant_fold_switch_enum : +// CHECK: bb0(%0 : $Int): +// CHECK-NEXT: br bb1 +// CHECK-NOT: switch_enum +// CHECK: } // end sil function 'constant_fold_switch_enum' sil @constant_fold_switch_enum : $@convention(thin) (Int) -> Int { bb0(%0 : $Int): %6 = enum $BoolLike, #BoolLike.false_!enumelt // user: %9 @@ -273,38 +285,35 @@ bb2: bb3: return %0 : $Int -// CHECK-LABEL: sil @constant_fold_switch_enum -// CHECK: bb0(%0 : $Int): -// CHECK-NEXT: br bb1 -// CHECK-NOT: switch_enum -// CHECK: } } enum Singleton { - case x(Int, UnicodeScalar) -}; - -sil @constant_fold_switch_enum_with_payload : $@convention(thin) (Int, UnicodeScalar) -> (Int, UnicodeScalar) { -bb0(%6 : $Int, %10 : $UnicodeScalar): - %11 = tuple (%6 : $Int, %10 : $UnicodeScalar) - %12 = enum $Singleton, #Singleton.x!enumelt.1, %11 : $(Int, UnicodeScalar) - switch_enum %12 : $Singleton, case #Singleton.x!enumelt.1: bb1 - -bb1(%15 : $(Int, UnicodeScalar)): - br bb2(%15 : $(Int, UnicodeScalar)) - -bb2(%16 : $(Int, UnicodeScalar)): - return %16 : $(Int, UnicodeScalar) + case x(Int, Unicode.Scalar) +} -b3: - %111 = tuple (%6 : $Int, %10 : $UnicodeScalar) - br bb2(%111 : $(Int, UnicodeScalar)) +sil @singleton_inout_user : $@convention(thin) (@inout (Int, Unicode.Scalar)) -> () -// CHECK-LABEL: sil @constant_fold_switch_enum_with_payload +// CHECK-LABEL: sil @constant_fold_switch_enum_with_payload : // CHECK: bb0(%{{[0-9]+}} : $Int, %{{[0-9]+}} : $Unicode.Scalar): // CHECK: br bb1 // CHECK: bb1: // CHECK: return +// CHECK: } // end sil function 'constant_fold_switch_enum_with_payload' +sil @constant_fold_switch_enum_with_payload : $@convention(thin) (Int, Unicode.Scalar) -> (Int, Unicode.Scalar) { +bb0(%6 : $Int, %10 : $Unicode.Scalar): + %11 = tuple (%6 : $Int, %10 : $Unicode.Scalar) + %12 = enum $Singleton, #Singleton.x!enumelt.1, %11 : $(Int, Unicode.Scalar) + switch_enum %12 : $Singleton, case #Singleton.x!enumelt.1: bb1 + +bb1(%15 : $(Int, Unicode.Scalar)): + br bb2(%15 : $(Int, Unicode.Scalar)) + +bb2(%16 : $(Int, Unicode.Scalar)): + return %16 : $(Int, Unicode.Scalar) + +b3: + %111 = tuple (%6 : $Int, %10 : $Unicode.Scalar) + br bb2(%111 : $(Int, Unicode.Scalar)) } sil @constant_fold_switch_value : $@convention(thin) (Int) -> Int { @@ -522,3 +531,232 @@ bb1(%1 : @guaranteed $Builtin.NativeObject): %9999 = tuple() return %9999 : $() } + +// CHECK-LABEL: sil @constant_fold_switch_enum_addr : $@convention(thin) (Int) -> Int { +// CHECK: bb0(%0 : $Int): +// CHECK-NEXT: alloc_stack $BoolLike +// CHECK-NEXT: inject_enum_addr +// CHECK-NEXT: br bb1 +// CHECK-NOT: switch_enum_addr +// CHECK: } // end sil function 'constant_fold_switch_enum_addr' +sil @constant_fold_switch_enum_addr : $@convention(thin) (Int) -> Int { +bb0(%0 : $Int): + %1 = alloc_stack $BoolLike + inject_enum_addr %1 : $*BoolLike, #BoolLike.false_!enumelt + switch_enum_addr %1 : $*BoolLike, case #BoolLike.true_!enumelt: bb1, case #BoolLike.false_!enumelt: bb2 + +bb1: + br bb3 + +bb2: + br bb3 + +bb3: + dealloc_stack %1 : $*BoolLike + return %0 : $Int +} + +// CHECK-LABEL: sil @constant_fold_switch_enum_addr_with_payload : $@convention(thin) (Int, Unicode.Scalar) -> () { +// CHECK: bb0( +// CHECK-NEXT: alloc_stack +// CHECK-NEXT: tuple +// CHECK-NEXT: init_enum_data_addr +// CHECK-NEXT: store +// CHECK-NEXT: inject_enum_addr +// CHECK-NEXT: br bb1 +// CHECK: } // end sil function 'constant_fold_switch_enum_addr_with_payload' +sil @constant_fold_switch_enum_addr_with_payload : $@convention(thin) (Int, Unicode.Scalar) -> () { +bb0(%6 : $Int, %10 : $Unicode.Scalar): + %1 = alloc_stack $Singleton + %11 = tuple (%6 : $Int, %10 : $Unicode.Scalar) + %2 = init_enum_data_addr %1 : $*Singleton, #Singleton.x!enumelt.1 + store %11 to %2 : $*(Int, Unicode.Scalar) + inject_enum_addr %1 : $*Singleton, #Singleton.x!enumelt.1 + switch_enum_addr %1 : $*Singleton, case #Singleton.x!enumelt.1: bb1 + +bb1: + br bb2 + +bb2: + dealloc_stack %1 : $*Singleton + %9999 = tuple() + return %9999 : $() +} + +// Make sure we can handle various sorts of non trivial payload destruction scenarios. +// +// CHECK-LABEL: sil @constant_fold_switch_enum_addr_with_nontrivial_payload : $@convention(thin) (@owned Builtin.NativeObject) -> () { +// CHECK: bb0( +// CHECK-NEXT: alloc_stack +// CHECK-NEXT: init_enum_data_addr +// CHECK-NEXT: store +// CHECK-NEXT: inject_enum_addr +// CHECK-NEXT: br bb1 +// CHECK: } // end sil function 'constant_fold_switch_enum_addr_with_nontrivial_payload' +sil @constant_fold_switch_enum_addr_with_nontrivial_payload : $@convention(thin) (@owned Builtin.NativeObject) -> () { +bb0(%0 : $Builtin.NativeObject): + %1 = alloc_stack $FakeOptional + %2 = init_enum_data_addr %1 : $*FakeOptional, #FakeOptional.some!enumelt.1 + store %0 to %2 : $*Builtin.NativeObject + inject_enum_addr %1 : $*FakeOptional, #FakeOptional.some!enumelt.1 + switch_enum_addr %1 : $*FakeOptional, case #FakeOptional.some!enumelt.1: bb1, case #FakeOptional.none!enumelt: bb2 + +bb1: + br bb3 + +bb2: + br bb3 + +bb3: + destroy_addr %1 : $*FakeOptional + dealloc_stack %1 : $*FakeOptional + %9999 = tuple() + return %9999 : $() +} + +// Make sure we can handle various sorts of non trivial payload destruction scenarios. +// +// CHECK-LABEL: sil @constant_fold_switch_enum_addr_with_nontrivial_payload_2 : $@convention(thin) (@owned Builtin.NativeObject) -> () { +// CHECK: bb0( +// CHECK-NEXT: alloc_stack +// CHECK-NEXT: init_enum_data_addr +// CHECK-NEXT: store +// CHECK-NEXT: inject_enum_addr +// CHECK-NEXT: br bb1 +// CHECK: } // end sil function 'constant_fold_switch_enum_addr_with_nontrivial_payload_2' +sil @constant_fold_switch_enum_addr_with_nontrivial_payload_2 : $@convention(thin) (@owned Builtin.NativeObject) -> () { +bb0(%0 : $Builtin.NativeObject): + %1 = alloc_stack $FakeOptional + %2 = init_enum_data_addr %1 : $*FakeOptional, #FakeOptional.some!enumelt.1 + store %0 to %2 : $*Builtin.NativeObject + inject_enum_addr %1 : $*FakeOptional, #FakeOptional.some!enumelt.1 + switch_enum_addr %1 : $*FakeOptional, case #FakeOptional.some!enumelt.1: bb1, case #FakeOptional.none!enumelt: bb2 + +bb1: + br bb3 + +bb2: + br bb3 + +bb3: + %3 = load %1 : $*FakeOptional + release_value %3 : $FakeOptional + dealloc_stack %1 : $*FakeOptional + %9999 = tuple() + return %9999 : $() +} + +// Tests that make sure that we only support simple cases with the switch_enum +// propagation (i.e. initialized by exactly one inject_enum_addr, +// init_enum_data_addr. + +// CHECK-LABEL: sil @constant_fold_switch_enum_addr_multiple_injects : $@convention(thin) (Int) -> Int { +// CHECK: bb0(%0 : $Int): +// CHECK: switch_enum_addr +// CHECK: } // end sil function 'constant_fold_switch_enum_addr_multiple_injects' +sil @constant_fold_switch_enum_addr_multiple_injects : $@convention(thin) (Int) -> Int { +bb0(%0 : $Int): + %1 = alloc_stack $BoolLike + cond_br undef, bb0a, bb0b + +bb0a: + inject_enum_addr %1 : $*BoolLike, #BoolLike.false_!enumelt + br bb0c + +bb0b: + inject_enum_addr %1 : $*BoolLike, #BoolLike.true_!enumelt + br bb0c + +bb0c: + switch_enum_addr %1 : $*BoolLike, case #BoolLike.true_!enumelt: bb1, case #BoolLike.false_!enumelt: bb2 + +bb1: + br bb3 + +bb2: + br bb3 + +bb3: + dealloc_stack %1 : $*BoolLike + return %0 : $Int +} + +// CHECK-LABEL: sil @constant_fold_switch_enum_addr_escape : $@convention(thin) (Int) -> Int { +// CHECK: bb0(%0 : $Int): +// CHECK: switch_enum_addr +// CHECK: } // end sil function 'constant_fold_switch_enum_addr_escape' +sil @constant_fold_switch_enum_addr_escape : $@convention(thin) (Int) -> Int { +bb0(%0 : $Int): + %1 = alloc_stack $BoolLike + inject_enum_addr %1 : $*BoolLike, #BoolLike.false_!enumelt + %2 = function_ref @boollike_escape : $@convention(thin) (@inout BoolLike) -> () + apply %2(%1) : $@convention(thin) (@inout BoolLike) -> () + switch_enum_addr %1 : $*BoolLike, case #BoolLike.true_!enumelt: bb1, case #BoolLike.false_!enumelt: bb2 + +bb1: + br bb3 + +bb2: + br bb3 + +bb3: + dealloc_stack %1 : $*BoolLike + return %0 : $Int +} + +// CHECK-LABEL: sil @constant_fold_switch_enum_addr_with_payload_multiple_init_enum_data_addr : $@convention(thin) (Int, Unicode.Scalar) -> () { +// CHECK: bb0( +// CHECK: switch_enum_addr +// CHECK: } // end sil function 'constant_fold_switch_enum_addr_with_payload_multiple_init_enum_data_addr' +sil @constant_fold_switch_enum_addr_with_payload_multiple_init_enum_data_addr : $@convention(thin) (Int, Unicode.Scalar) -> () { +bb0(%6 : $Int, %10 : $Unicode.Scalar): + %1 = alloc_stack $Singleton + %11 = tuple (%6 : $Int, %10 : $Unicode.Scalar) + cond_br undef, bb0a, bb0b + +bb0a: + %2a = init_enum_data_addr %1 : $*Singleton, #Singleton.x!enumelt.1 + store %11 to %2a : $*(Int, Unicode.Scalar) + br bb0c + +bb0b: + %2b = init_enum_data_addr %1 : $*Singleton, #Singleton.x!enumelt.1 + store undef to %2b : $*(Int, Unicode.Scalar) + br bb0c + +bb0c: + inject_enum_addr %1 : $*Singleton, #Singleton.x!enumelt.1 + switch_enum_addr %1 : $*Singleton, case #Singleton.x!enumelt.1: bb1 + +bb1: + br bb2 + +bb2: + dealloc_stack %1 : $*Singleton + %9999 = tuple() + return %9999 : $() +} + +// CHECK-LABEL: sil @constant_fold_switch_enum_addr_with_payload_escaping_init_enum_data_addr : $@convention(thin) (Int, Unicode.Scalar) -> () { +// CHECK: bb0( +// CHECK: switch_enum_addr +// CHECK: } // end sil function 'constant_fold_switch_enum_addr_with_payload_escaping_init_enum_data_addr' +sil @constant_fold_switch_enum_addr_with_payload_escaping_init_enum_data_addr : $@convention(thin) (Int, Unicode.Scalar) -> () { +bb0(%6 : $Int, %10 : $Unicode.Scalar): + %1 = alloc_stack $Singleton + %11 = tuple (%6 : $Int, %10 : $Unicode.Scalar) + %2a = init_enum_data_addr %1 : $*Singleton, #Singleton.x!enumelt.1 + store %11 to %2a : $*(Int, Unicode.Scalar) + %func = function_ref @singleton_inout_user : $@convention(thin) (@inout (Int, Unicode.Scalar)) -> () + apply %func(%2a) : $@convention(thin) (@inout (Int, Unicode.Scalar)) -> () + inject_enum_addr %1 : $*Singleton, #Singleton.x!enumelt.1 + switch_enum_addr %1 : $*Singleton, case #Singleton.x!enumelt.1: bb1 + +bb1: + br bb2 + +bb2: + dealloc_stack %1 : $*Singleton + %9999 = tuple() + return %9999 : $() +} diff --git a/test/SILOptimizer/mandatory_conditional_compile_out_using_optionals.swift b/test/SILOptimizer/mandatory_conditional_compile_out_using_optionals.swift new file mode 100644 index 0000000000000..74a7942f637a0 --- /dev/null +++ b/test/SILOptimizer/mandatory_conditional_compile_out_using_optionals.swift @@ -0,0 +1,134 @@ +// RUN: %target-swift-frontend -emit-sil -Onone %s | %FileCheck %s + +// This file contains test cases that shows that we can properly conditional +// compile out code in -Onone contexts using transparent. It is important to +// note that all test cases here should have _BOTH_ generic and concrete +// implementations. Users should be able to depend on this in simple transparent +// cases. +// +// The first check, makes sure our silgen codegen is as we expect it. The second +// makes sure we optimize is as expected. + +enum MyEnum { +case first +case second +} + +@_cdecl("cFuncOriginal") +@inline(never) +func cFuncOriginal() -> () {} + +@_cdecl("cFuncRefinement") +@inline(never) +func cFuncRefinement() -> () {} + +class Klass { + final var value: MyEnum = .first +} + +protocol OriginalProtocol { + var value: Optional { get } +} + +extension OriginalProtocol { + @_transparent + var value: Optional { + cFuncOriginal() + return nil + } +} + +protocol RefinementProtocol : OriginalProtocol { + var klass: Klass { get } + var value: Optional { get } +} + +extension RefinementProtocol { + @_transparent + var value: Optional { + cFuncRefinement() + return klass + } +} + +struct OriginalProtocolImpl {} +extension OriginalProtocolImpl : OriginalProtocol {} + +struct RefinementProtocolImpl { + private var _klass: Klass = Klass() + @_transparent var klass: Klass { return _klass } +} +extension RefinementProtocolImpl : RefinementProtocol {} + +@_transparent +func transparentAddressCallee(_ t: T) -> MyEnum { + if let x = t.value { + return x.value + } + return .second +} + +// CHECK-LABEL: sil hidden @$s49mandatory_conditional_compile_out_using_optionals24testOriginalProtocolImplAA6MyEnumOyF : $@convention(thin) () -> MyEnum { +// CHECK-NOT: function_ref @$s49mandatory_conditional_compile_out_using_optionals15cFuncRefinementyyF : +// CHECK: [[FUNCTION_REF:%.*]] = function_ref @$s49mandatory_conditional_compile_out_using_optionals13cFuncOriginalyyF : +// CHECK-NEXT: apply [[FUNCTION_REF]]() +// CHECK-NOT: function_ref @$s49mandatory_conditional_compile_out_using_optionals15cFuncRefinementyyF : +// CHECK: } // end sil function '$s49mandatory_conditional_compile_out_using_optionals24testOriginalProtocolImplAA6MyEnumOyF' +func testOriginalProtocolImpl() -> MyEnum { + let x = OriginalProtocolImpl() + return transparentAddressCallee(x) +} + +// CHECK-LABEL: sil hidden @$s49mandatory_conditional_compile_out_using_optionals26testRefinementProtocolImplAA6MyEnumOyF : $@convention(thin) () -> MyEnum { +// CHECK-NOT: function_ref @$s49mandatory_conditional_compile_out_using_optionals13cFuncOriginalyyF : +// CHECK: [[FUNCTION_REF:%.*]] = function_ref @$s49mandatory_conditional_compile_out_using_optionals15cFuncRefinementyyF : +// CHECK-NEXT: apply [[FUNCTION_REF]]() +// CHECK-NOT: function_ref @$s49mandatory_conditional_compile_out_using_optionals13cFuncOriginalyyF : +// CHECK: } // end sil function '$s49mandatory_conditional_compile_out_using_optionals26testRefinementProtocolImplAA6MyEnumOyF' +func testRefinementProtocolImpl() -> MyEnum { + let x = RefinementProtocolImpl() + return transparentAddressCallee(x) +} + +@_transparent +func transparentObjectCallee(_ t: T) -> MyEnum where T : AnyObject { + if let x = t.value { + return x.value + } + return .second +} + +class OriginalProtocolImplKlass { +} +extension OriginalProtocolImplKlass : OriginalProtocol { +} + +class RefinementProtocolImplKlass { +} +extension RefinementProtocolImplKlass : RefinementProtocol { + var klass: Klass { + return Klass() + } +} + +// CHECK-LABEL: sil hidden @$s49mandatory_conditional_compile_out_using_optionals29testOriginalProtocolImplKlassAA6MyEnumOyF : $@convention(thin) () -> MyEnum { +// CHECK-NOT: function_ref @$s49mandatory_conditional_compile_out_using_optionals15cFuncRefinementyyF : +// CHECK: [[FUNCTION_REF:%.*]] = function_ref @$s49mandatory_conditional_compile_out_using_optionals13cFuncOriginalyyF : +// CHECK-NEXT: apply [[FUNCTION_REF]]() +// CHECK-NOT: function_ref @$s49mandatory_conditional_compile_out_using_optionals15cFuncRefinementyyF : +// CHECK: } // end sil function '$s49mandatory_conditional_compile_out_using_optionals29testOriginalProtocolImplKlassAA6MyEnumOyF' +func testOriginalProtocolImplKlass() -> MyEnum { + let x = OriginalProtocolImplKlass() + return transparentObjectCallee(x) +} + +// CHECK-LABEL: sil hidden @$s49mandatory_conditional_compile_out_using_optionals31testRefinementProtocolImplKlassAA6MyEnumOyF : $@convention(thin) () -> MyEnum { +// CHECK-NOT: function_ref @$s49mandatory_conditional_compile_out_using_optionals13cFuncOriginalyyF : +// CHECK: [[FUNCTION_REF:%.*]] = function_ref @$s49mandatory_conditional_compile_out_using_optionals15cFuncRefinementyyF : +// CHECK-NEXT: apply [[FUNCTION_REF]]() +// CHECK-NOT: function_ref @$s49mandatory_conditional_compile_out_using_optionals13cFuncOriginalyyF : +// CHECK: } // end sil function '$s49mandatory_conditional_compile_out_using_optionals31testRefinementProtocolImplKlassAA6MyEnumOyF' +func testRefinementProtocolImplKlass() -> MyEnum { + let x = RefinementProtocolImplKlass() + return transparentObjectCallee(x) +}