diff --git a/include/swift/AST/KnownDecls.def b/include/swift/AST/KnownDecls.def index 32ce519b12c57..15a5081285d1d 100644 --- a/include/swift/AST/KnownDecls.def +++ b/include/swift/AST/KnownDecls.def @@ -44,6 +44,8 @@ FUNC_DECL(AllocateUninitializedArray, "_allocateUninitializedArray") FUNC_DECL(DeallocateUninitializedArray, "_deallocateUninitializedArray") +FUNC_DECL(FinalizeUninitializedArray, + "_finalizeUninitializedArray") FUNC_DECL(ForceBridgeFromObjectiveC, "_forceBridgeFromObjectiveC") diff --git a/include/swift/AST/SemanticAttrs.def b/include/swift/AST/SemanticAttrs.def index 944d56b9348d6..a77a6b295ad24 100644 --- a/include/swift/AST/SemanticAttrs.def +++ b/include/swift/AST/SemanticAttrs.def @@ -60,6 +60,7 @@ SEMANTICS_ATTR(ARRAY_WITH_UNSAFE_MUTABLE_BUFFER_POINTER, "array.withUnsafeMutabl SEMANTICS_ATTR(ARRAY_COUNT, "array.count") SEMANTICS_ATTR(ARRAY_DEALLOC_UNINITIALIZED, "array.dealloc_uninitialized") SEMANTICS_ATTR(ARRAY_UNINITIALIZED_INTRINSIC, "array.uninitialized_intrinsic") +SEMANTICS_ATTR(ARRAY_FINALIZE_INTRINSIC, "array.finalize_intrinsic") SEMANTICS_ATTR(SEQUENCE_FOR_EACH, "sequence.forEach") diff --git a/include/swift/AST/SemanticAttrs.h b/include/swift/AST/SemanticAttrs.h index b6b9926ca8165..7fe7c38644dee 100644 --- a/include/swift/AST/SemanticAttrs.h +++ b/include/swift/AST/SemanticAttrs.h @@ -19,6 +19,7 @@ #ifndef SWIFT_SEMANTICS_H #define SWIFT_SEMANTICS_H +#include "swift/Basic/LLVM.h" #include "llvm/ADT/StringRef.h" namespace swift { diff --git a/include/swift/SIL/SILNodes.def b/include/swift/SIL/SILNodes.def index 9bd877c60c166..44adf50b7aea8 100644 --- a/include/swift/SIL/SILNodes.def +++ b/include/swift/SIL/SILNodes.def @@ -880,8 +880,13 @@ NODE_RANGE(NonValueInstruction, UnreachableInst, CondFailInst) ABSTRACT_INST(MultipleValueInstruction, SILInstruction) FULLAPPLYSITE_MULTIPLE_VALUE_INST(BeginApplyInst, begin_apply, MultipleValueInstruction, MayHaveSideEffects, MayRelease) + +// begin_cow_mutation is defined to have side effects, because it has +// dependencies with instructions which retain the buffer operand. This prevents +// optimizations from moving begin_cow_mutation instructions across such retain +// instructions. MULTIPLE_VALUE_INST(BeginCOWMutationInst, begin_cow_mutation, - MultipleValueInstruction, None, DoesNotRelease) + MultipleValueInstruction, MayHaveSideEffects, DoesNotRelease) MULTIPLE_VALUE_INST(DestructureStructInst, destructure_struct, MultipleValueInstruction, None, DoesNotRelease) MULTIPLE_VALUE_INST(DestructureTupleInst, destructure_tuple, diff --git a/include/swift/SILOptimizer/Differentiation/Common.h b/include/swift/SILOptimizer/Differentiation/Common.h index 7dff0973e46a8..649be7b871618 100644 --- a/include/swift/SILOptimizer/Differentiation/Common.h +++ b/include/swift/SILOptimizer/Differentiation/Common.h @@ -17,10 +17,12 @@ #ifndef SWIFT_SILOPTIMIZER_UTILS_DIFFERENTIATION_COMMON_H #define SWIFT_SILOPTIMIZER_UTILS_DIFFERENTIATION_COMMON_H +#include "swift/AST/SemanticAttrs.h" #include "swift/SIL/SILDifferentiabilityWitness.h" #include "swift/SIL/SILFunction.h" #include "swift/SIL/SILModule.h" #include "swift/SIL/TypeSubstCloner.h" +#include "swift/SILOptimizer/Analysis/ArraySemantic.h" #include "swift/SILOptimizer/Analysis/DifferentiableActivityAnalysis.h" namespace swift { diff --git a/lib/SIL/IR/SILModule.cpp b/lib/SIL/IR/SILModule.cpp index 50aadb293da66..a25be897da990 100644 --- a/lib/SIL/IR/SILModule.cpp +++ b/lib/SIL/IR/SILModule.cpp @@ -355,6 +355,7 @@ bool SILModule::linkFunction(SILFunction *F, SILModule::LinkingMode Mode) { SILFunction *SILModule::findFunction(StringRef Name, SILLinkage Linkage) { assert((Linkage == SILLinkage::Public || + Linkage == SILLinkage::SharedExternal || Linkage == SILLinkage::PublicExternal) && "Only a lookup of public functions is supported currently"); @@ -405,6 +406,9 @@ SILFunction *SILModule::findFunction(StringRef Name, SILLinkage Linkage) { // compilation, simply convert it into an external declaration, // so that a compiled version from the shared library is used. if (F->isDefinition() && + // Don't eliminate bodies of _alwaysEmitIntoClient functions + // (PublicNonABI linkage is de-serialized as SharedExternal) + F->getLinkage() != SILLinkage::SharedExternal && !F->getModule().getOptions().shouldOptimize()) { F->convertToDeclaration(); } diff --git a/lib/SILGen/SILGenApply.cpp b/lib/SILGen/SILGenApply.cpp index e80d4f8da4d5e..1b18f834f8fcd 100644 --- a/lib/SILGen/SILGenApply.cpp +++ b/lib/SILGen/SILGenApply.cpp @@ -4962,6 +4962,22 @@ void SILGenFunction::emitUninitializedArrayDeallocation(SILLocation loc, SGFContext()); } +ManagedValue SILGenFunction::emitUninitializedArrayFinalization(SILLocation loc, + SILValue array) { + auto &Ctx = getASTContext(); + auto finalize = Ctx.getFinalizeUninitializedArray(); + + CanType arrayTy = array->getType().getASTType(); + + // Invoke the intrinsic. + auto subMap = arrayTy->getContextSubstitutionMap(SGM.M.getSwiftModule(), + Ctx.getArrayDecl()); + RValue result = emitApplyOfLibraryIntrinsic(loc, finalize, subMap, + ManagedValue::forUnmanaged(array), + SGFContext()); + return std::move(result).getScalarValue(); +} + namespace { /// A cleanup that deallocates an uninitialized array. class DeallocateUninitializedArray: public Cleanup { diff --git a/lib/SILGen/SILGenExpr.cpp b/lib/SILGen/SILGenExpr.cpp index 24ee39a453cdb..315fd254557b7 100644 --- a/lib/SILGen/SILGenExpr.cpp +++ b/lib/SILGen/SILGenExpr.cpp @@ -2113,10 +2113,11 @@ ManagedValue Lowering::emitEndVarargs(SILGenFunction &SGF, SILLocation loc, SGF.Cleanups.setCleanupState(varargs.getAbortCleanup(), CleanupState::Dead); // Reactivate the result cleanup. - auto result = varargs.getArray(); - if (result.hasCleanup()) - SGF.Cleanups.setCleanupState(result.getCleanup(), CleanupState::Active); - return result; + auto array = varargs.getArray(); + if (array.hasCleanup()) + SGF.Cleanups.setCleanupState(array.getCleanup(), CleanupState::Active); + + return SGF.emitUninitializedArrayFinalization(loc, array.forward(SGF)); } RValue RValueEmitter::visitTupleExpr(TupleExpr *E, SGFContext C) { diff --git a/lib/SILGen/SILGenFunction.h b/lib/SILGen/SILGenFunction.h index ca922d32de6bf..4ab8b8cbbad40 100644 --- a/lib/SILGen/SILGenFunction.h +++ b/lib/SILGen/SILGenFunction.h @@ -1187,6 +1187,7 @@ class LLVM_LIBRARY_VISIBILITY SILGenFunction CleanupHandle enterDeallocateUninitializedArrayCleanup(SILValue array); void emitUninitializedArrayDeallocation(SILLocation loc, SILValue array); + ManagedValue emitUninitializedArrayFinalization(SILLocation loc, SILValue array); /// Emit a cleanup for an owned value that should be written back at end of /// scope if the value is not forwarded. diff --git a/lib/SILOptimizer/Analysis/MemoryBehavior.cpp b/lib/SILOptimizer/Analysis/MemoryBehavior.cpp index e3e2e5fd1d002..5eed0b391ea95 100644 --- a/lib/SILOptimizer/Analysis/MemoryBehavior.cpp +++ b/lib/SILOptimizer/Analysis/MemoryBehavior.cpp @@ -180,6 +180,7 @@ class MemoryBehaviorVisitor MemBehavior visitStrongReleaseInst(StrongReleaseInst *BI); MemBehavior visitReleaseValueInst(ReleaseValueInst *BI); MemBehavior visitSetDeallocatingInst(SetDeallocatingInst *BI); + MemBehavior visitBeginCOWMutationInst(BeginCOWMutationInst *BCMI); #define ALWAYS_OR_SOMETIMES_LOADABLE_CHECKED_REF_STORAGE(Name, ...) \ MemBehavior visit##Name##ReleaseInst(Name##ReleaseInst *BI); #include "swift/AST/ReferenceStorage.def" @@ -395,6 +396,14 @@ MemBehavior MemoryBehaviorVisitor::visitSetDeallocatingInst(SetDeallocatingInst return MemBehavior::None; } +MemBehavior MemoryBehaviorVisitor:: +visitBeginCOWMutationInst(BeginCOWMutationInst *BCMI) { + // begin_cow_mutation is defined to have side effects, because it has + // dependencies with instructions which retain the buffer operand. + // But it never interferes with any memory address. + return MemBehavior::None; +} + //===----------------------------------------------------------------------===// // Top Level Entrypoint //===----------------------------------------------------------------------===// diff --git a/lib/SILOptimizer/Differentiation/LinearMapInfo.cpp b/lib/SILOptimizer/Differentiation/LinearMapInfo.cpp index 0ad634cae3b28..9fc1a232f8ebf 100644 --- a/lib/SILOptimizer/Differentiation/LinearMapInfo.cpp +++ b/lib/SILOptimizer/Differentiation/LinearMapInfo.cpp @@ -414,6 +414,8 @@ void LinearMapInfo::generateDifferentiationDataStructures( // initialization is linear and handled separately. if (!shouldDifferentiateApplySite(ai) || isArrayLiteralIntrinsic(ai)) continue; + if (ArraySemanticsCall(ai, semantics::ARRAY_FINALIZE_INTRINSIC)) + continue; LLVM_DEBUG(getADDebugStream() << "Adding linear map struct field for " << *ai); addLinearMapToStruct(context, ai); diff --git a/lib/SILOptimizer/Differentiation/PullbackEmitter.cpp b/lib/SILOptimizer/Differentiation/PullbackEmitter.cpp index 5c9d3f8913000..0746e93d24075 100644 --- a/lib/SILOptimizer/Differentiation/PullbackEmitter.cpp +++ b/lib/SILOptimizer/Differentiation/PullbackEmitter.cpp @@ -1424,6 +1424,19 @@ void PullbackEmitter::visitApplyInst(ApplyInst *ai) { // special `store` and `copy_addr` support. if (isArrayLiteralIntrinsic(ai)) return; + auto loc = ai->getLoc(); + auto *bb = ai->getParent(); + // Handle `array.finalize_intrinsic` applications. `array.finalize_intrinsic` + // semantically behaves like an identity function. + if (ArraySemanticsCall(ai, semantics::ARRAY_FINALIZE_INTRINSIC)) { + assert(ai->getNumArguments() == 1 && + "Expected intrinsic to have one operand"); + // Accumulate result's adjoint into argument's adjoint. + auto adjResult = getAdjointValue(bb, ai); + auto origArg = ai->getArgumentsWithoutIndirectResults().front(); + addAdjointValue(bb, origArg, adjResult, loc); + return; + } // Replace a call to a function with a call to its pullback. auto &nestedApplyInfo = getContext().getNestedApplyInfo(); auto applyInfoLookup = nestedApplyInfo.find(ai); @@ -1439,7 +1452,6 @@ void PullbackEmitter::visitApplyInst(ApplyInst *ai) { // Get the pullback. auto *field = getPullbackInfo().lookUpLinearMapDecl(ai); assert(field); - auto loc = ai->getLoc(); auto pullback = getPullbackStructElement(ai->getParent(), field); // Get the original result of the `apply` instruction. @@ -1478,7 +1490,6 @@ void PullbackEmitter::visitApplyInst(ApplyInst *ai) { } // Get formal callee pullback arguments. - auto *bb = ai->getParent(); assert(applyInfo.indices.results->getNumIndices() == 1); for (auto resultIndex : applyInfo.indices.results->getIndices()) { assert(resultIndex < origAllResults.size()); diff --git a/lib/SILOptimizer/Differentiation/VJPEmitter.cpp b/lib/SILOptimizer/Differentiation/VJPEmitter.cpp index e37aa1b1696e6..094690dcbbf1e 100644 --- a/lib/SILOptimizer/Differentiation/VJPEmitter.cpp +++ b/lib/SILOptimizer/Differentiation/VJPEmitter.cpp @@ -541,6 +541,15 @@ void VJPEmitter::visitApplyInst(ApplyInst *ai) { TypeSubstCloner::visitApplyInst(ai); return; } + // If callee is `array.finalize_intrinsic`, do standard cloning. + // `array.finalize_intrinsic` has special-case pullback generation. + if (ArraySemanticsCall(ai, semantics::ARRAY_FINALIZE_INTRINSIC)) { + LLVM_DEBUG(getADDebugStream() + << "Cloning `array.finalize_intrinsic` `apply`:\n" + << *ai << '\n'); + TypeSubstCloner::visitApplyInst(ai); + return; + } // If the original function is a semantic member accessor, do standard // cloning. Semantic member accessors have special pullback generation logic, // so all `apply` instructions can be directly cloned to the VJP. diff --git a/lib/SILOptimizer/LoopTransforms/ArrayOpt.h b/lib/SILOptimizer/LoopTransforms/ArrayOpt.h index 68644a33ee9bb..00bf1e0d9b492 100644 --- a/lib/SILOptimizer/LoopTransforms/ArrayOpt.h +++ b/lib/SILOptimizer/LoopTransforms/ArrayOpt.h @@ -118,15 +118,17 @@ class StructUseCollector { } } - /// Returns true if there is a single address user of the value. - bool hasSingleAddressUse(SILInstruction *SingleAddressUser) { + /// Returns true if there are only address users of the value. + bool hasOnlyAddressUses(ApplyInst *use1, ApplyInst *use2) { if (!AggregateAddressUsers.empty()) return false; if (!ElementAddressUsers.empty()) return false; - if (StructAddressUsers.size() != 1) - return false; - return StructAddressUsers[0] == SingleAddressUser; + for (SILInstruction *user : StructAddressUsers) { + if (user != use1 && user != use2) + return false; + } + return true; } protected: diff --git a/lib/SILOptimizer/LoopTransforms/COWArrayOpt.cpp b/lib/SILOptimizer/LoopTransforms/COWArrayOpt.cpp index fc4ca58f5659c..1141d8dd7ea6d 100644 --- a/lib/SILOptimizer/LoopTransforms/COWArrayOpt.cpp +++ b/lib/SILOptimizer/LoopTransforms/COWArrayOpt.cpp @@ -436,6 +436,18 @@ bool COWArrayOpt::checkSafeArrayAddressUses(UserList &AddressUsers) { return true; } +template +ArraySemanticsCall getEndMutationCall(const UserRange &AddressUsers) { + for (auto *UseInst : AddressUsers) { + if (auto *AI = dyn_cast(UseInst)) { + ArraySemanticsCall ASC(AI); + if (ASC.getKind() == ArrayCallKind::kEndMutation) + return ASC; + } + } + return ArraySemanticsCall(); +} + /// Returns true if this instruction is a safe array use if all of its users are /// also safe array users. static SILValue isTransitiveSafeUser(SILInstruction *I) { @@ -642,7 +654,7 @@ bool COWArrayOpt::hasLoopOnlyDestructorSafeArrayOperations() { // Semantic calls are safe. ArraySemanticsCall Sem(Inst); - if (Sem) { + if (Sem && Sem.hasSelf()) { auto Kind = Sem.getKind(); // Safe because they create new arrays. if (Kind == ArrayCallKind::kArrayInit || @@ -811,8 +823,14 @@ void COWArrayOpt::hoistAddressProjections(Operand &ArrayOp) { } } -/// Check if this call to "make_mutable" is hoistable, and move it, or delete it -/// if it's already hoisted. +/// Check if this call to "make_mutable" is hoistable, and copy it, along with +/// the corresponding end_mutation call, to the loop pre-header. +/// +/// The origial make_mutable/end_mutation calls remain in the loop, because +/// removing them would violate the COW representation rules. +/// Having those calls in the pre-header will then enable COWOpts (after +/// inlining) to constant fold the uniqueness check of the begin_cow_mutation +/// in the loop. bool COWArrayOpt::hoistMakeMutable(ArraySemanticsCall MakeMutable, bool dominatesExits) { LLVM_DEBUG(llvm::dbgs() << " Checking mutable array: " <getUses(), + ValueBase::UseToUser()); + + // There should be a call to end_mutation. Find it so that we can copy it to + // the pre-header. + ArraySemanticsCall EndMutation = getEndMutationCall(ArrayUsers); + if (!EndMutation) { + EndMutation = getEndMutationCall(StructUses.StructAddressUsers); + if (!EndMutation) + return false; + } + // Hoist the make_mutable. LLVM_DEBUG(llvm::dbgs() << " Hoisting make_mutable: " << *MakeMutable); @@ -880,12 +910,18 @@ bool COWArrayOpt::hoistMakeMutable(ArraySemanticsCall MakeMutable, assert(MakeMutable.canHoist(Preheader->getTerminator(), DomTree) && "Should be able to hoist make_mutable"); - MakeMutable.hoist(Preheader->getTerminator(), DomTree); + // Copy the make_mutable and end_mutation calls to the pre-header. + TermInst *insertionPoint = Preheader->getTerminator(); + ApplyInst *hoistedMM = MakeMutable.copyTo(insertionPoint, DomTree); + ApplyInst *EMInst = EndMutation; + ApplyInst *hoistedEM = cast(EMInst->clone(insertionPoint)); + hoistedEM->setArgument(0, hoistedMM->getArgument(0)); + placeFuncRef(hoistedEM, DomTree); // Register array loads. This is needed for hoisting make_mutable calls of // inner arrays in the two-dimensional case. if (arrayContainerIsUnique && - StructUses.hasSingleAddressUse((ApplyInst *)MakeMutable)) { + StructUses.hasOnlyAddressUses((ApplyInst *)MakeMutable, EMInst)) { for (auto use : MakeMutable.getSelf()->getUses()) { if (auto *LI = dyn_cast(use->getUser())) HoistableLoads.insert(LI); @@ -917,39 +953,33 @@ bool COWArrayOpt::run() { // is only mapped to a call once the analysis has determined that no // make_mutable calls are required within the loop body for that array. llvm::SmallDenseMap ArrayMakeMutableMap; - + + llvm::SmallVector makeMutableCalls; + for (auto *BB : Loop->getBlocks()) { if (ColdBlocks.isCold(BB)) continue; - bool dominatesExits = dominatesExitingBlocks(BB); - for (auto II = BB->begin(), IE = BB->end(); II != IE;) { - // Inst may be moved by hoistMakeMutable. - SILInstruction *Inst = &*II; - ++II; - ArraySemanticsCall MakeMutableCall(Inst, "array.make_mutable"); - if (!MakeMutableCall) - continue; + + // Instructions are getting moved around. To not mess with iterator + // invalidation, first collect all calls, and then do the transformation. + for (SILInstruction &I : *BB) { + ArraySemanticsCall MakeMutableCall(&I, "array.make_mutable"); + if (MakeMutableCall) + makeMutableCalls.push_back(MakeMutableCall); + } + bool dominatesExits = dominatesExitingBlocks(BB); + for (ArraySemanticsCall MakeMutableCall : makeMutableCalls) { CurrentArrayAddr = MakeMutableCall.getSelf(); auto HoistedCallEntry = ArrayMakeMutableMap.find(CurrentArrayAddr); if (HoistedCallEntry == ArrayMakeMutableMap.end()) { - if (!hoistMakeMutable(MakeMutableCall, dominatesExits)) { + if (hoistMakeMutable(MakeMutableCall, dominatesExits)) { + ArrayMakeMutableMap[CurrentArrayAddr] = MakeMutableCall; + HasChanged = true; + } else { ArrayMakeMutableMap[CurrentArrayAddr] = nullptr; - continue; } - - ArrayMakeMutableMap[CurrentArrayAddr] = MakeMutableCall; - HasChanged = true; - continue; } - - if (!HoistedCallEntry->second) - continue; - - LLVM_DEBUG(llvm::dbgs() << " Removing make_mutable call: " - << *MakeMutableCall); - MakeMutableCall.removeCall(); - HasChanged = true; } } return HasChanged; diff --git a/lib/SILOptimizer/LoopTransforms/ForEachLoopUnroll.cpp b/lib/SILOptimizer/LoopTransforms/ForEachLoopUnroll.cpp index a1da1e09b6e9f..0aec5461189b8 100644 --- a/lib/SILOptimizer/LoopTransforms/ForEachLoopUnroll.cpp +++ b/lib/SILOptimizer/LoopTransforms/ForEachLoopUnroll.cpp @@ -323,8 +323,15 @@ void ArrayInfo::classifyUsesOfArray(SILValue arrayValue) { // as the array itself is not modified (which is possible with reference // types). ArraySemanticsCall arrayOp(user); - if (!arrayOp.doesNotChangeArray()) - mayBeWritten = true; + if (arrayOp.doesNotChangeArray()) + continue; + + if (arrayOp.getKind() == swift::ArrayCallKind::kArrayFinalizeIntrinsic) { + classifyUsesOfArray((ApplyInst *)arrayOp); + continue; + } + + mayBeWritten = true; } } diff --git a/lib/SILOptimizer/Mandatory/OSLogOptimization.cpp b/lib/SILOptimizer/Mandatory/OSLogOptimization.cpp index c543b72f8f79a..19cb23f37a377 100644 --- a/lib/SILOptimizer/Mandatory/OSLogOptimization.cpp +++ b/lib/SILOptimizer/Mandatory/OSLogOptimization.cpp @@ -539,6 +539,15 @@ static SILValue emitCodeForConstantArray(ArrayRef elements, module.findFunction(allocatorMangledName, SILLinkage::PublicExternal); assert(arrayAllocateFun); + FuncDecl *arrayFinalizeDecl = astContext.getFinalizeUninitializedArray(); + assert(arrayFinalizeDecl); + std::string finalizeMangledName = + SILDeclRef(arrayFinalizeDecl, SILDeclRef::Kind::Func).mangle(); + SILFunction *arrayFinalizeFun = + module.findFunction(finalizeMangledName, SILLinkage::SharedExternal); + assert(arrayFinalizeFun); + module.linkFunction(arrayFinalizeFun); + // Call the _allocateUninitializedArray function with numElementsSIL. The // call returns a two-element tuple, where the first element is the newly // created array and the second element is a pointer to the internal storage @@ -596,7 +605,12 @@ static SILValue emitCodeForConstantArray(ArrayRef elements, StoreOwnershipQualifier::Init); ++elementIndex; } - return arraySIL; + FunctionRefInst *arrayFinalizeRef = + builder.createFunctionRef(loc, arrayFinalizeFun); + ApplyInst *finalizedArray = builder.createApply( + loc, arrayFinalizeRef, subMap, ArrayRef(arraySIL)); + + return finalizedArray; } /// Given a SILValue \p value, return the instruction immediately following the diff --git a/lib/SILOptimizer/Transforms/ObjectOutliner.cpp b/lib/SILOptimizer/Transforms/ObjectOutliner.cpp index 2da566a30041f..7f7289e195acd 100644 --- a/lib/SILOptimizer/Transforms/ObjectOutliner.cpp +++ b/lib/SILOptimizer/Transforms/ObjectOutliner.cpp @@ -35,17 +35,18 @@ class ObjectOutliner { return type.getNominalOrBoundGenericNominal() == ArrayDecl; } - bool isValidUseOfObject(SILInstruction *Val, - bool isCOWObject, - ApplyInst **FindStringCall = nullptr); + bool isValidUseOfObject(SILInstruction *Val, EndCOWMutationInst *toIgnore); + + ApplyInst *findFindStringCall(SILValue V); bool getObjectInitVals(SILValue Val, llvm::DenseMap &MemberStores, llvm::SmallVectorImpl &TailStores, unsigned NumTailTupleElements, - ApplyInst **FindStringCall); + EndCOWMutationInst *toIgnore); bool handleTailAddr(int TailIdx, SILInstruction *I, unsigned NumTailTupleElements, - llvm::SmallVectorImpl &TailStores); + llvm::SmallVectorImpl &TailStores, + EndCOWMutationInst *toIgnore); bool optimizeObjectAllocation(AllocRefInst *ARI); void replaceFindStringCall(ApplyInst *FindStringCall); @@ -116,13 +117,11 @@ static bool isValidInitVal(SILValue V) { } /// Check if a use of an object may prevent outlining the object. -/// -/// If \p isCOWObject is true, then the object reference is wrapped into a -/// COW container. Currently this is just Array. -/// If a use is a call to the findStringSwitchCase semantic call, the apply -/// is returned in \p FindStringCall. -bool ObjectOutliner::isValidUseOfObject(SILInstruction *I, bool isCOWObject, - ApplyInst **FindStringCall) { +bool ObjectOutliner::isValidUseOfObject(SILInstruction *I, + EndCOWMutationInst *toIgnore) { + if (I == toIgnore) + return true; + switch (I->getKind()) { case SILInstructionKind::DebugValueAddrInst: case SILInstructionKind::DebugValueInst: @@ -134,49 +133,22 @@ bool ObjectOutliner::isValidUseOfObject(SILInstruction *I, bool isCOWObject, case SILInstructionKind::SetDeallocatingInst: return true; - case SILInstructionKind::ReturnInst: - case SILInstructionKind::TryApplyInst: - case SILInstructionKind::PartialApplyInst: - case SILInstructionKind::StoreInst: - /// We don't have a representation for COW objects in SIL, so we do some - /// ad-hoc testing: We can ignore uses of a COW object if any use after - /// this will do a uniqueness checking before the object is modified. - return isCOWObject; - - case SILInstructionKind::ApplyInst: - if (!isCOWObject) - return false; - // There should only be a single call to findStringSwitchCase. But even - // if there are multiple calls, it's not problem - we'll just optimize the - // last one we find. - if (cast(I)->hasSemantics(semantics::FIND_STRING_SWITCH_CASE)) - *FindStringCall = cast(I); - return true; - - case SILInstructionKind::StructInst: - if (isCOWType(cast(I)->getType())) { - // The object is wrapped into a COW container. - isCOWObject = true; - } - break; - - case SILInstructionKind::UncheckedRefCastInst: case SILInstructionKind::StructElementAddrInst: case SILInstructionKind::AddressToPointerInst: - assert(!isCOWObject && "instruction cannot have a COW object as operand"); - break; - + case SILInstructionKind::StructInst: case SILInstructionKind::TupleInst: case SILInstructionKind::TupleExtractInst: case SILInstructionKind::EnumInst: - break; - case SILInstructionKind::StructExtractInst: - // To be on the safe side we don't consider the object as COW if it is - // extracted again from the COW container: the uniqueness check may be - // optimized away in this case. - isCOWObject = false; - break; + case SILInstructionKind::UncheckedRefCastInst: + case SILInstructionKind::UpcastInst: { + auto SVI = cast(I); + for (Operand *Use : getNonDebugUses(SVI)) { + if (!isValidUseOfObject(Use->getUser(), toIgnore)) + return false; + } + return true; + } case SILInstructionKind::BuiltinInst: { // Handle the case for comparing addresses. This occurs when the Array @@ -198,26 +170,51 @@ bool ObjectOutliner::isValidUseOfObject(SILInstruction *I, bool isCOWObject, default: return false; } +} - auto SVI = cast(I); - for (Operand *Use : getNonDebugUses(SVI)) { - if (!isValidUseOfObject(Use->getUser(), isCOWObject, FindStringCall)) - return false; +/// Finds a call to findStringSwitchCase in the uses of \p V. +ApplyInst *ObjectOutliner::findFindStringCall(SILValue V) { + for (Operand *use : V->getUses()) { + SILInstruction *user = use->getUser(); + switch (user->getKind()) { + case SILInstructionKind::ApplyInst: + // There should only be a single call to findStringSwitchCase. But even + // if there are multiple calls, it's not problem - we'll just optimize the + // last one we find. + if (cast(user)->hasSemantics(semantics::FIND_STRING_SWITCH_CASE)) + return cast(user); + break; + + case SILInstructionKind::StructInst: + case SILInstructionKind::TupleInst: + case SILInstructionKind::UncheckedRefCastInst: + case SILInstructionKind::UpcastInst: { + if (ApplyInst *foundCall = + findFindStringCall(cast(user))) { + return foundCall; + } + break; + } + + default: + break; + } } - return true; + return nullptr; } /// Handle the address of a tail element. bool ObjectOutliner::handleTailAddr(int TailIdx, SILInstruction *TailAddr, unsigned NumTailTupleElements, - llvm::SmallVectorImpl &TailStores) { + llvm::SmallVectorImpl &TailStores, + EndCOWMutationInst *toIgnore) { if (NumTailTupleElements > 0) { if (auto *TEA = dyn_cast(TailAddr)) { unsigned TupleIdx = TEA->getFieldNo(); assert(TupleIdx < NumTailTupleElements); for (Operand *Use : TEA->getUses()) { if (!handleTailAddr(TailIdx * NumTailTupleElements + TupleIdx, Use->getUser(), 0, - TailStores)) + TailStores, toIgnore)) return false; } return true; @@ -232,7 +229,7 @@ bool ObjectOutliner::handleTailAddr(int TailIdx, SILInstruction *TailAddr, } } } - return isValidUseOfObject(TailAddr, /*isCOWObject*/false); + return isValidUseOfObject(TailAddr, toIgnore); } /// Get the init values for an object's stored properties and its tail elements. @@ -240,12 +237,13 @@ bool ObjectOutliner::getObjectInitVals(SILValue Val, llvm::DenseMap &MemberStores, llvm::SmallVectorImpl &TailStores, unsigned NumTailTupleElements, - ApplyInst **FindStringCall) { + EndCOWMutationInst *toIgnore) { for (Operand *Use : Val->getUses()) { SILInstruction *User = Use->getUser(); if (auto *UC = dyn_cast(User)) { // Upcast is transparent. - if (!getObjectInitVals(UC, MemberStores, TailStores, NumTailTupleElements, FindStringCall)) + if (!getObjectInitVals(UC, MemberStores, TailStores, NumTailTupleElements, + toIgnore)) return false; } else if (auto *REA = dyn_cast(User)) { // The address of a stored property. @@ -255,7 +253,7 @@ bool ObjectOutliner::getObjectInitVals(SILValue Val, if (!isValidInitVal(SI->getSrc()) || MemberStores[REA->getField()]) return false; MemberStores[REA->getField()] = SI; - } else if (!isValidUseOfObject(ElemAddrUser, /*isCOWObject*/false)) { + } else if (!isValidUseOfObject(ElemAddrUser, toIgnore)) { return false; } } @@ -272,15 +270,17 @@ bool ObjectOutliner::getObjectInitVals(SILValue Val, TailIdx = Index->getValue().getZExtValue(); for (Operand *IAUse : IA->getUses()) { - if (!handleTailAddr(TailIdx, IAUse->getUser(), NumTailTupleElements, TailStores)) + if (!handleTailAddr(TailIdx, IAUse->getUser(), NumTailTupleElements, + TailStores, toIgnore)) return false; } // Without an index_addr it's the first tail element. - } else if (!handleTailAddr(/*TailIdx*/0, TailUser, NumTailTupleElements, TailStores)) { + } else if (!handleTailAddr(/*TailIdx*/0, TailUser, NumTailTupleElements, + TailStores, toIgnore)) { return false; } } - } else if (!isValidUseOfObject(User, /*isCOWObject*/false, FindStringCall)) { + } else if (!isValidUseOfObject(User, toIgnore)) { return false; } } @@ -302,10 +302,25 @@ class GlobalVariableMangler : public Mangle::ASTMangler { } }; +static EndCOWMutationInst *getEndCOWMutation(SILValue object) { + for (Operand *use : object->getUses()) { + SILInstruction *user = use->getUser(); + if (auto *upCast = dyn_cast(user)) { + // Look through upcast instructions. + if (EndCOWMutationInst *ecm = getEndCOWMutation(upCast)) + return ecm; + } else if (auto *ecm = dyn_cast(use->getUser())) { + return ecm; + } + } + return nullptr; +} + /// Try to convert an object allocation into a statically initialized object. /// /// In general this works for any class, but in practice it will only kick in -/// for array buffer objects. The use cases are array literals in a function. +/// for copy-on-write buffers, like array buffer objects. +/// The use cases are array literals in a function. /// For example: /// func getarray() -> [Int] { /// return [1, 2, 3] @@ -314,6 +329,12 @@ bool ObjectOutliner::optimizeObjectAllocation(AllocRefInst *ARI) { if (ARI->isObjC()) return false; + // Find the end_cow_mutation. Only for such COW buffer objects we do the + // transformation. + EndCOWMutationInst *endCOW = getEndCOWMutation(ARI); + if (!endCOW || endCOW->doKeepUnique()) + return false; + // Check how many tail allocated elements are on the object. ArrayRef TailCounts = ARI->getTailAllocatedCounts(); SILType TailType; @@ -363,12 +384,13 @@ bool ObjectOutliner::optimizeObjectAllocation(AllocRefInst *ARI) { } TailStores.resize(NumStores); - ApplyInst *FindStringCall = nullptr; // Get the initialization stores of the object's properties and tail // allocated elements. Also check if there are any "bad" uses of the object. - if (!getObjectInitVals(ARI, MemberStores, TailStores, NumTailTupleElems, &FindStringCall)) + if (!getObjectInitVals(ARI, MemberStores, TailStores, NumTailTupleElems, + endCOW)) { return false; + } // Is there a store for all the class properties? if (MemberStores.size() != Fields.size()) @@ -452,6 +474,11 @@ bool ObjectOutliner::optimizeObjectAllocation(AllocRefInst *ARI) { SILBuilder B(ARI); GlobalValueInst *GVI = B.createGlobalValue(ARI->getLoc(), Glob); B.createStrongRetain(ARI->getLoc(), GVI, B.getDefaultAtomicity()); + + ApplyInst *FindStringCall = findFindStringCall(endCOW); + endCOW->replaceAllUsesWith(endCOW->getOperand()); + ToRemove.push_back(endCOW); + llvm::SmallVector Worklist(ARI->use_begin(), ARI->use_end()); while (!Worklist.empty()) { auto *Use = Worklist.pop_back_val(); diff --git a/lib/SILOptimizer/Utils/ConstExpr.cpp b/lib/SILOptimizer/Utils/ConstExpr.cpp index 60db9517dd647..6829541fe2ff6 100644 --- a/lib/SILOptimizer/Utils/ConstExpr.cpp +++ b/lib/SILOptimizer/Utils/ConstExpr.cpp @@ -47,6 +47,8 @@ enum class WellKnownFunction { AllocateUninitializedArray, // Array._endMutation EndArrayMutation, + // _finalizeUninitializedArray + FinalizeUninitializedArray, // Array.append(_:) ArrayAppendElement, // String.init() @@ -75,6 +77,8 @@ static llvm::Optional classifyFunction(SILFunction *fn) { return WellKnownFunction::AllocateUninitializedArray; if (fn->hasSemanticsAttr(semantics::ARRAY_END_MUTATION)) return WellKnownFunction::EndArrayMutation; + if (fn->hasSemanticsAttr(semantics::ARRAY_FINALIZE_INTRINSIC)) + return WellKnownFunction::FinalizeUninitializedArray; if (fn->hasSemanticsAttr(semantics::ARRAY_APPEND_ELEMENT)) return WellKnownFunction::ArrayAppendElement; if (fn->hasSemanticsAttr(semantics::STRING_INIT_EMPTY)) @@ -961,6 +965,21 @@ ConstExprFunctionState::computeWellKnownCallResult(ApplyInst *apply, // _endMutation is a no-op. return None; } + case WellKnownFunction::FinalizeUninitializedArray: { + // This function has the following signature in SIL: + // (Array) -> Array + assert(conventions.getNumParameters() == 1 && + conventions.getNumDirectSILResults() == 1 && + conventions.getNumIndirectSILResults() == 0 && + "unexpected _finalizeUninitializedArray() signature"); + + auto result = getConstantValue(apply->getOperand(1)); + if (!result.isConstant()) + return result; + // Semantically, it's an identity function. + setValue(apply, result); + return None; + } case WellKnownFunction::ArrayAppendElement: { // This function has the following signature in SIL: // (@in Element, @inout Array) -> () diff --git a/stdlib/public/SwiftShims/RefCount.h b/stdlib/public/SwiftShims/RefCount.h index 3b396cff9edc8..50bdb99695f25 100644 --- a/stdlib/public/SwiftShims/RefCount.h +++ b/stdlib/public/SwiftShims/RefCount.h @@ -1271,6 +1271,11 @@ class RefCounts { // Note that this is not equal to the number of outstanding weak pointers. uint32_t getWeakCount() const; +#ifndef NDEBUG + bool isImmutableCOWBuffer(); + bool setIsImmutableCOWBuffer(bool immutable); +#endif + // DO NOT TOUCH. // This exists for the benefits of the Refcounting.cpp tests. Do not use it // elsewhere. @@ -1301,6 +1306,11 @@ class HeapObjectSideTableEntry { std::atomic object; SideTableRefCounts refCounts; +#ifndef NDEBUG + // Used for runtime consistency checking of COW buffers. + bool immutableCOWBuffer = false; +#endif + public: HeapObjectSideTableEntry(HeapObject *newObject) : object(newObject), refCounts() @@ -1455,6 +1465,16 @@ class HeapObjectSideTableEntry { void *getSideTable() { return refCounts.getSideTable(); } + +#ifndef NDEBUG + bool isImmutableCOWBuffer() const { + return immutableCOWBuffer; + } + + void setIsImmutableCOWBuffer(bool immutable) { + immutableCOWBuffer = immutable; + } +#endif }; diff --git a/stdlib/public/core/Array.swift b/stdlib/public/core/Array.swift index bb7c72cd474a8..fd72700ca95b8 100644 --- a/stdlib/public/core/Array.swift +++ b/stdlib/public/core/Array.swift @@ -333,25 +333,38 @@ extension Array { @inlinable @_semantics("array.get_count") internal func _getCount() -> Int { - return _buffer.count + return _buffer.immutableCount } @inlinable @_semantics("array.get_capacity") internal func _getCapacity() -> Int { - return _buffer.capacity + return _buffer.immutableCapacity } @inlinable @_semantics("array.make_mutable") internal mutating func _makeMutableAndUnique() { - if _slowPath(!_buffer.isMutableAndUniquelyReferenced()) { + if _slowPath(!_buffer.beginCOWMutation()) { _buffer = _buffer._consumeAndCreateNew() } } + /// Marks the end of an Array mutation. + /// + /// After a call to `_endMutation` the buffer must not be mutated until a call + /// to `_makeMutableAndUnique`. + @_alwaysEmitIntoClient + @_semantics("array.end_mutation") + internal mutating func _endMutation() { + _buffer.endCOWMutation() + } + /// Check that the given `index` is valid for subscripting, i.e. /// `0 ≤ index < count`. + /// + /// This function is not used anymore, but must stay in the library for ABI + /// compatibility. @inlinable @inline(__always) internal func _checkSubscript_native(_ index: Int) { @@ -375,6 +388,16 @@ extension Array { return _DependenceToken() } + /// Check that the given `index` is valid for subscripting, i.e. + /// `0 ≤ index < count`. + /// + /// - Precondition: The buffer must be uniquely referenced and native. + @_alwaysEmitIntoClient + @_semantics("array.check_subscript") + internal func _checkSubscript_mutating(_ index: Int) { + _buffer._checkValidSubscriptMutating(index) + } + /// Check that the specified `index` is valid, i.e. `0 ≤ index ≤ count`. @inlinable @_semantics("array.check_index") @@ -402,7 +425,7 @@ extension Array { @inlinable @_semantics("array.get_element_address") internal func _getElementAddress(_ index: Int) -> UnsafeMutablePointer { - return _buffer.subscriptBaseAddress + index + return _buffer.firstElementAddress + index } } @@ -711,9 +734,10 @@ extension Array: RandomAccessCollection, MutableCollection { } _modify { _makeMutableAndUnique() // makes the array native, too - _checkSubscript_native(index) - let address = _buffer.subscriptBaseAddress + index + _checkSubscript_mutating(index) + let address = _buffer.mutableFirstElementAddress + index yield &address.pointee + _endMutation(); } } @@ -872,6 +896,7 @@ extension Array: RangeReplaceableCollection { p.initialize(to: repeatedValue) p += 1 } + _endMutation() } @inline(never) @@ -896,7 +921,7 @@ extension Array: RangeReplaceableCollection { // unnecessary uniqueness check. We disable inlining here to curb code // growth. _buffer = Array._allocateBufferUninitialized(minimumCapacity: count) - _buffer.count = count + _buffer.mutableCount = count } // Can't store count here because the buffer might be pointing to the // shared empty array. @@ -941,7 +966,7 @@ extension Array: RangeReplaceableCollection { internal mutating func _deallocateUninitialized() { // Set the count to zero and just release as normal. // Somewhat of a hack. - _buffer.count = 0 + _buffer.mutableCount = 0 } //===--- basic mutations ------------------------------------------------===// @@ -1019,6 +1044,7 @@ extension Array: RangeReplaceableCollection { public mutating func reserveCapacity(_ minimumCapacity: Int) { _reserveCapacityImpl(minimumCapacity: minimumCapacity, growForAppend: false) + _endMutation() } /// Reserves enough space to store `minimumCapacity` elements. @@ -1029,14 +1055,15 @@ extension Array: RangeReplaceableCollection { internal mutating func _reserveCapacityImpl( minimumCapacity: Int, growForAppend: Bool ) { - let isUnique = _buffer.isUniquelyReferenced() - if _slowPath(!isUnique || _getCapacity() < minimumCapacity) { + let isUnique = _buffer.beginCOWMutation() + if _slowPath(!isUnique || _buffer.mutableCapacity < minimumCapacity) { _createNewBuffer(bufferIsUnique: isUnique, - minimumCapacity: Swift.max(minimumCapacity, count), + minimumCapacity: Swift.max(minimumCapacity, _buffer.count), growForAppend: growForAppend) } - _internalInvariant(capacity >= minimumCapacity) - _internalInvariant(capacity == 0 || _buffer.isUniquelyReferenced()) + _internalInvariant(_buffer.mutableCapacity >= minimumCapacity) + _internalInvariant(_buffer.mutableCapacity == 0 || + _buffer.isUniquelyReferenced()) } /// Creates a new buffer, replacing the current buffer. @@ -1072,7 +1099,7 @@ extension Array: RangeReplaceableCollection { @inlinable @_semantics("array.make_mutable") internal mutating func _makeUniqueAndReserveCapacityIfNotUnique() { - if _slowPath(!_buffer.isMutableAndUniquelyReferenced()) { + if _slowPath(!_buffer.beginCOWMutation()) { _createNewBuffer(bufferIsUnique: false, minimumCapacity: count + 1, growForAppend: true) @@ -1082,15 +1109,6 @@ extension Array: RangeReplaceableCollection { @inlinable @_semantics("array.mutate_unknown") internal mutating func _reserveCapacityAssumingUniqueBuffer(oldCount: Int) { - // This is a performance optimization. This code used to be in an || - // statement in the _internalInvariant below. - // - // _internalInvariant(_buffer.capacity == 0 || - // _buffer.isMutableAndUniquelyReferenced()) - // - // SR-6437 - let capacity = _buffer.capacity == 0 - // Due to make_mutable hoisting the situation can arise where we hoist // _makeMutableAndUnique out of loop and use it to replace // _makeUniqueAndReserveCapacityIfNotUnique that preceeds this call. If the @@ -1100,11 +1118,11 @@ extension Array: RangeReplaceableCollection { // This specific case is okay because we will make the buffer unique in this // function because we request a capacity > 0 and therefore _copyToNewBuffer // will be called creating a new buffer. - _internalInvariant(capacity || - _buffer.isMutableAndUniquelyReferenced()) + let capacity = _buffer.mutableCapacity + _internalInvariant(capacity == 0 || _buffer.isMutableAndUniquelyReferenced()) - if _slowPath(oldCount + 1 > _buffer.capacity) { - _createNewBuffer(bufferIsUnique: true, + if _slowPath(oldCount + 1 > capacity) { + _createNewBuffer(bufferIsUnique: capacity > 0, minimumCapacity: oldCount + 1, growForAppend: true) } @@ -1117,10 +1135,10 @@ extension Array: RangeReplaceableCollection { newElement: __owned Element ) { _internalInvariant(_buffer.isMutableAndUniquelyReferenced()) - _internalInvariant(_buffer.capacity >= _buffer.count + 1) + _internalInvariant(_buffer.mutableCapacity >= _buffer.mutableCount + 1) - _buffer.count = oldCount + 1 - (_buffer.firstElementAddress + oldCount).initialize(to: newElement) + _buffer.mutableCount = oldCount + 1 + (_buffer.mutableFirstElementAddress + oldCount).initialize(to: newElement) } /// Adds a new element at the end of the array. @@ -1150,9 +1168,10 @@ extension Array: RangeReplaceableCollection { // Separating uniqueness check and capacity check allows hoisting the // uniqueness check out of a loop. _makeUniqueAndReserveCapacityIfNotUnique() - let oldCount = _getCount() + let oldCount = _buffer.mutableCount _reserveCapacityAssumingUniqueBuffer(oldCount: oldCount) _appendElementAssumeUniqueAndCapacity(oldCount, newElement: newElement) + _endMutation() } /// Adds the elements of a sequence to the end of the array. @@ -1176,14 +1195,19 @@ extension Array: RangeReplaceableCollection { public mutating func append(contentsOf newElements: __owned S) where S.Element == Element { + defer { + _endMutation() + } + let newElementsCount = newElements.underestimatedCount - reserveCapacityForAppend(newElementsCount: newElementsCount) + _reserveCapacityImpl(minimumCapacity: self.count + newElementsCount, + growForAppend: true) - let oldCount = self.count - let startNewElements = _buffer.firstElementAddress + oldCount + let oldCount = _buffer.mutableCount + let startNewElements = _buffer.mutableFirstElementAddress + oldCount let buf = UnsafeMutableBufferPointer( start: startNewElements, - count: self.capacity - oldCount) + count: _buffer.mutableCapacity - oldCount) var (remainder,writtenUpTo) = buf.initialize(from: newElements) @@ -1195,7 +1219,7 @@ extension Array: RangeReplaceableCollection { // This check prevents a data race writing to _swiftEmptyArrayStorage if writtenCount > 0 { - _buffer.count += writtenCount + _buffer.mutableCount = _buffer.mutableCount + writtenCount } if _slowPath(writtenUpTo == buf.endIndex) { @@ -1212,13 +1236,13 @@ extension Array: RangeReplaceableCollection { // there may be elements that didn't fit in the existing buffer, // append them in slow sequence-only mode - var newCount = _getCount() + var newCount = _buffer.mutableCount var nextItem = remainder.next() while nextItem != nil { - reserveCapacityForAppend(newElementsCount: 1) + _reserveCapacityAssumingUniqueBuffer(oldCount: newCount) - let currentCapacity = _getCapacity() - let base = _buffer.firstElementAddress + let currentCapacity = _buffer.mutableCapacity + let base = _buffer.mutableFirstElementAddress // fill while there is another item and spare capacity while let next = nextItem, newCount < currentCapacity { @@ -1226,7 +1250,7 @@ extension Array: RangeReplaceableCollection { newCount += 1 nextItem = remainder.next() } - _buffer.count = newCount + _buffer.mutableCount = newCount } } } @@ -1238,17 +1262,19 @@ extension Array: RangeReplaceableCollection { // for consistency, we need unique self even if newElements is empty. _reserveCapacityImpl(minimumCapacity: self.count + newElementsCount, growForAppend: true) + _endMutation() } @inlinable @_semantics("array.mutate_unknown") public mutating func _customRemoveLast() -> Element? { _makeMutableAndUnique() - let newCount = _getCount() - 1 + let newCount = _buffer.mutableCount - 1 _precondition(newCount >= 0, "Can't removeLast from an empty Array") - let pointer = (_buffer.firstElementAddress + newCount) + let pointer = (_buffer.mutableFirstElementAddress + newCount) let element = pointer.move() - _buffer.count = newCount + _buffer.mutableCount = newCount + _endMutation() return element } @@ -1272,14 +1298,15 @@ extension Array: RangeReplaceableCollection { @_semantics("array.mutate_unknown") public mutating func remove(at index: Int) -> Element { _makeMutableAndUnique() - let currentCount = _getCount() + let currentCount = _buffer.mutableCount _precondition(index < currentCount, "Index out of range") _precondition(index >= 0, "Index out of range") let newCount = currentCount - 1 - let pointer = (_buffer.firstElementAddress + index) + let pointer = (_buffer.mutableFirstElementAddress + index) let result = pointer.move() pointer.moveInitialize(from: pointer + 1, count: newCount - index) - _buffer.count = newCount + _buffer.mutableCount = newCount + _endMutation() return result } @@ -1449,7 +1476,8 @@ extension Array { buffer.baseAddress == firstElementAddress, "Can't reassign buffer in Array(unsafeUninitializedCapacity:initializingWith:)" ) - self._buffer.count = initializedCount + self._buffer.mutableCount = initializedCount + _endMutation() } try initializer(&buffer, &initializedCount) } @@ -1573,7 +1601,7 @@ extension Array { _ body: (inout UnsafeMutableBufferPointer) throws -> R ) rethrows -> R { _makeMutableAndUnique() - let count = self.count + let count = _buffer.mutableCount // Ensure that body can't invalidate the storage or its bounds by // moving self into a temporary working array. @@ -1588,7 +1616,7 @@ extension Array { (work, self) = (self, work) // Create an UnsafeBufferPointer over work that we can pass to body - let pointer = work._buffer.firstElementAddress + let pointer = work._buffer.mutableFirstElementAddress var inoutBufferPointer = UnsafeMutableBufferPointer( start: pointer, count: count) @@ -1600,6 +1628,7 @@ extension Array { "Array withUnsafeMutableBufferPointer: replacing the buffer is not allowed") (work, self) = (self, work) + _endMutation() } // Invoke the body. @@ -1689,8 +1718,10 @@ extension Array { let insertCount = newElements.count let growth = insertCount - eraseCount - reserveCapacityForAppend(newElementsCount: growth) + _reserveCapacityImpl(minimumCapacity: self.count + growth, + growForAppend: true) _buffer.replaceSubrange(subrange, with: insertCount, elementsOf: newElements) + _endMutation() } } diff --git a/stdlib/public/core/ArrayBuffer.swift b/stdlib/public/core/ArrayBuffer.swift index 9e959a9406e1f..29fd636ebc1d7 100644 --- a/stdlib/public/core/ArrayBuffer.swift +++ b/stdlib/public/core/ArrayBuffer.swift @@ -100,21 +100,55 @@ extension _ArrayBuffer { } /// Returns `true` iff this buffer's storage is uniquely-referenced. + /// + /// This function should only be used for internal sanity checks. + /// To guard a buffer mutation, use `beginCOWMutation`. @inlinable internal mutating func isUniquelyReferenced() -> Bool { if !_isClassOrObjCExistential(Element.self) { return _storage.isUniquelyReferencedUnflaggedNative() } - - // This is a performance optimization. This code used to be: - // - // return _storage.isUniquelyReferencedNative() && _isNative. - // - // SR-6437 - if !_storage.isUniquelyReferencedNative() { + return _storage.isUniquelyReferencedNative() && _isNative + } + + /// Returns `true` and puts the buffer in a mutable state iff the buffer's + /// storage is uniquely-referenced. + /// + /// - Precondition: The buffer must be immutable. + /// + /// - Warning: It's a requirement to call `beginCOWMutation` before the buffer + /// is mutated. + @_alwaysEmitIntoClient + internal mutating func beginCOWMutation() -> Bool { + let isUnique: Bool + if !_isClassOrObjCExistential(Element.self) { + isUnique = _storage.beginCOWMutationUnflaggedNative() + } else if !_storage.beginCOWMutationNative() { return false + } else { + isUnique = _isNative + } +#if INTERNAL_CHECKS_ENABLED + if isUnique { + _native.isImmutable = false } - return _isNative +#endif + return isUnique + } + + /// Puts the buffer in an immutable state. + /// + /// - Precondition: The buffer must be mutable. + /// + /// - Warning: After a call to `endCOWMutation` the buffer must not be mutated + /// until the next call of `beginCOWMutation`. + @_alwaysEmitIntoClient + @inline(__always) + internal mutating func endCOWMutation() { +#if INTERNAL_CHECKS_ENABLED + _native.isImmutable = true +#endif + _storage.endCOWMutation() } /// Convert to an NSArray. @@ -168,13 +202,13 @@ extension _ArrayBuffer { // As an optimization, if the original buffer is unique, we can just move // the elements instead of copying. let dest = newBuffer.firstElementAddress - dest.moveInitialize(from: firstElementAddress, + dest.moveInitialize(from: mutableFirstElementAddress, count: c) - _native.count = 0 + _native.mutableCount = 0 } else { _copyContents( subRange: 0.. NativeBuffer? { if _fastPath(isUniquelyReferenced()) { let b = _native - if _fastPath(b.capacity >= minimumCapacity) { + if _fastPath(b.mutableCapacity >= minimumCapacity) { return b } } @@ -310,12 +344,25 @@ extension _ArrayBuffer { return _native.firstElementAddress } + /// A mutable pointer to the first element. + /// + /// - Precondition: the buffer must be mutable. + @_alwaysEmitIntoClient + internal var mutableFirstElementAddress: UnsafeMutablePointer { + _internalInvariant(_isNative, "must be a native buffer") + return _native.mutableFirstElementAddress + } + @inlinable internal var firstElementAddressIfContiguous: UnsafeMutablePointer? { return _fastPath(_isNative) ? firstElementAddress : nil } /// The number of elements the buffer stores. + /// + /// This property is obsolete. It's only used for the ArrayBufferProtocol and + /// to keep backward compatibility. + /// Use `immutableCount` or `mutableCount` instead. @inlinable internal var count: Int { @inline(__always) @@ -327,6 +374,33 @@ extension _ArrayBuffer { _native.count = newValue } } + + /// The number of elements of the buffer. + /// + /// - Precondition: The buffer must be immutable. + @_alwaysEmitIntoClient + internal var immutableCount: Int { + return _fastPath(_isNative) ? _native.immutableCount : _nonNative.endIndex + } + + /// The number of elements of the buffer. + /// + /// - Precondition: The buffer must be mutable. + @_alwaysEmitIntoClient + internal var mutableCount: Int { + @inline(__always) + get { + _internalInvariant( + _isNative, + "attempting to get mutating-count of non-native buffer") + return _native.mutableCount + } + @inline(__always) + set { + _internalInvariant(_isNative, "attempting to update count of Cocoa array") + _native.mutableCount = newValue + } + } /// Traps if an inout violation is detected or if the buffer is /// native and the subscript is out of range. @@ -345,8 +419,6 @@ extension _ArrayBuffer { } } - // TODO: gyb this - /// Traps if an inout violation is detected or if the buffer is /// native and typechecked and the subscript is out of range. /// @@ -366,12 +438,42 @@ extension _ArrayBuffer { } } + /// Traps unless the given `index` is valid for subscripting, i.e. + /// `0 ≤ index < count`. + /// + /// - Precondition: The buffer must be mutable. + @_alwaysEmitIntoClient + internal func _checkValidSubscriptMutating(_ index: Int) { + _native._checkValidSubscriptMutating(index) + } + /// The number of elements the buffer can store without reallocation. + /// + /// This property is obsolete. It's only used for the ArrayBufferProtocol and + /// to keep backward compatibility. + /// Use `immutableCapacity` or `mutableCapacity` instead. @inlinable internal var capacity: Int { return _fastPath(_isNative) ? _native.capacity : _nonNative.endIndex } + /// The number of elements the buffer can store without reallocation. + /// + /// - Precondition: The buffer must be immutable. + @_alwaysEmitIntoClient + internal var immutableCapacity: Int { + return _fastPath(_isNative) ? _native.immutableCapacity : _nonNative.count + } + + /// The number of elements the buffer can store without reallocation. + /// + /// - Precondition: The buffer must be mutable. + @_alwaysEmitIntoClient + internal var mutableCapacity: Int { + _internalInvariant(_isNative, "attempting to get mutating-capacity of non-native buffer") + return _native.mutableCapacity + } + @inlinable @inline(__always) internal func getElement(_ i: Int, wasNativeTypeChecked: Bool) -> Element { diff --git a/stdlib/public/core/ArrayShared.swift b/stdlib/public/core/ArrayShared.swift index 3e7d5939bac03..28c1d25696c55 100644 --- a/stdlib/public/core/ArrayShared.swift +++ b/stdlib/public/core/ArrayShared.swift @@ -64,6 +64,17 @@ func _deallocateUninitializedArray( array._deallocateUninitialized() } +@_alwaysEmitIntoClient +@_semantics("array.finalize_intrinsic") +@_effects(readnone) +public // COMPILER_INTRINSIC +func _finalizeUninitializedArray( + _ array: __owned Array +) -> Array { + var mutableArray = array + mutableArray._endMutation() + return mutableArray +} extension Collection { // Utility method for collections that wish to implement diff --git a/stdlib/public/core/ArraySlice.swift b/stdlib/public/core/ArraySlice.swift index b0da6898cd5e0..8ce49b2f84dce 100644 --- a/stdlib/public/core/ArraySlice.swift +++ b/stdlib/public/core/ArraySlice.swift @@ -164,10 +164,20 @@ extension ArraySlice { @inlinable @_semantics("array.make_mutable") internal mutating func _makeMutableAndUnique() { - if _slowPath(!_buffer.isMutableAndUniquelyReferenced()) { + if _slowPath(!_buffer.beginCOWMutation()) { _buffer = _Buffer(copying: _buffer) } } + + /// Marks the end of a mutation. + /// + /// After a call to `_endMutation` the buffer must not be mutated until a call + /// to `_makeMutableAndUnique`. + @_alwaysEmitIntoClient + @_semantics("array.end_mutation") + internal mutating func _endMutation() { + _buffer.endCOWMutation() + } /// Check that the given `index` is valid for subscripting, i.e. /// `0 ≤ index < count`. @@ -537,6 +547,7 @@ extension ArraySlice: RandomAccessCollection, MutableCollection { _checkSubscript_native(index) let address = _buffer.subscriptBaseAddress + index yield &address.pointee + _endMutation(); } } @@ -688,12 +699,19 @@ extension ArraySlice: RangeReplaceableCollection { @inlinable @_semantics("array.init") public init(repeating repeatedValue: Element, count: Int) { - var p: UnsafeMutablePointer - (self, p) = ArraySlice._allocateUninitialized(count) - for _ in 0..= 0, "Can't construct ArraySlice with count < 0") + if count > 0 { + _buffer = ArraySlice._allocateBufferUninitialized(minimumCapacity: count) + _buffer.count = count + var p = _buffer.firstElementAddress + for _ in 0..( _uninitializedCount: count, minimumCapacity: minimumCapacity) @@ -820,6 +837,7 @@ extension ArraySlice: RangeReplaceableCollection { _buffer: newBuffer, shiftedToStartIndex: _buffer.startIndex) } _internalInvariant(capacity >= minimumCapacity) + _endMutation() } /// Copy the contents of the current buffer to a new unique mutable buffer. @@ -838,7 +856,7 @@ extension ArraySlice: RangeReplaceableCollection { @inlinable @_semantics("array.make_mutable") internal mutating func _makeUniqueAndReserveCapacityIfNotUnique() { - if _slowPath(!_buffer.isMutableAndUniquelyReferenced()) { + if _slowPath(!_buffer.beginCOWMutation()) { _copyToNewBuffer(oldCount: _buffer.count) } } @@ -846,15 +864,6 @@ extension ArraySlice: RangeReplaceableCollection { @inlinable @_semantics("array.mutate_unknown") internal mutating func _reserveCapacityAssumingUniqueBuffer(oldCount: Int) { - // This is a performance optimization. This code used to be in an || - // statement in the _internalInvariant below. - // - // _internalInvariant(_buffer.capacity == 0 || - // _buffer.isMutableAndUniquelyReferenced()) - // - // SR-6437 - let capacity = _buffer.capacity == 0 - // Due to make_mutable hoisting the situation can arise where we hoist // _makeMutableAndUnique out of loop and use it to replace // _makeUniqueAndReserveCapacityIfNotUnique that preceeds this call. If the @@ -864,10 +873,10 @@ extension ArraySlice: RangeReplaceableCollection { // This specific case is okay because we will make the buffer unique in this // function because we request a capacity > 0 and therefore _copyToNewBuffer // will be called creating a new buffer. - _internalInvariant(capacity || - _buffer.isMutableAndUniquelyReferenced()) + let capacity = _buffer.capacity + _internalInvariant(capacity == 0 || _buffer.isMutableAndUniquelyReferenced()) - if _slowPath(oldCount + 1 > _buffer.capacity) { + if _slowPath(oldCount + 1 > capacity) { _copyToNewBuffer(oldCount: oldCount) } } @@ -913,6 +922,7 @@ extension ArraySlice: RangeReplaceableCollection { let oldCount = _getCount() _reserveCapacityAssumingUniqueBuffer(oldCount: oldCount) _appendElementAssumeUniqueAndCapacity(oldCount, newElement: newElement) + _endMutation() } /// Adds the elements of a sequence to the end of the array. @@ -938,6 +948,7 @@ extension ArraySlice: RangeReplaceableCollection { let newElementsCount = newElements.underestimatedCount reserveCapacityForAppend(newElementsCount: newElementsCount) + _ = _buffer.beginCOWMutation() let oldCount = self.count let startNewElements = _buffer.firstElementAddress + oldCount @@ -949,7 +960,7 @@ extension ArraySlice: RangeReplaceableCollection { // trap on underflow from the sequence's underestimate: let writtenCount = buf.distance(from: buf.startIndex, to: writtenUpTo) - _precondition(newElementsCount <= writtenCount, + _precondition(newElementsCount <= writtenCount, "newElements.underestimatedCount was an overestimate") // can't check for overflow as sequences can underestimate @@ -963,6 +974,7 @@ extension ArraySlice: RangeReplaceableCollection { // append them in slow sequence-only mode _buffer._arrayAppendSequence(IteratorSequence(remainder)) } + _endMutation() } @inlinable @@ -1223,7 +1235,7 @@ extension ArraySlice { ) rethrows -> R { let count = self.count // Ensure unique storage - _buffer._outlinedMakeUniqueBuffer(bufferCount: count) + _makeMutableAndUnique() // Ensure that body can't invalidate the storage or its bounds by // moving self into a temporary working array. @@ -1250,6 +1262,7 @@ extension ArraySlice { "ArraySlice withUnsafeMutableBufferPointer: replacing the buffer is not allowed") (work, self) = (self, work) + _endMutation() } // Invoke the body. @@ -1340,14 +1353,13 @@ extension ArraySlice { let insertCount = newElements.count let growth = insertCount - eraseCount - if _buffer.requestUniqueMutableBackingBuffer( - minimumCapacity: oldCount + growth) != nil { - + if _buffer.beginCOWMutation() && _buffer.capacity >= oldCount + growth { _buffer.replaceSubrange( subrange, with: insertCount, elementsOf: newElements) } else { _buffer._arrayOutOfPlaceReplace(subrange, with: newElements, count: insertCount) } + _endMutation() } } diff --git a/stdlib/public/core/BridgeStorage.swift b/stdlib/public/core/BridgeStorage.swift index fcc971f8b6f76..33a7e33fe1c41 100644 --- a/stdlib/public/core/BridgeStorage.swift +++ b/stdlib/public/core/BridgeStorage.swift @@ -75,6 +75,12 @@ internal struct _BridgeStorage { return _isUnique(&rawValue) } + @_alwaysEmitIntoClient + @inline(__always) + internal mutating func beginCOWMutationNative() -> Bool { + return Bool(Builtin.beginCOWMutation(&rawValue)) + } + @inlinable internal var isNative: Bool { @inline(__always) get { @@ -131,6 +137,20 @@ internal struct _BridgeStorage { return _isUnique_native(&rawValue) } + @_alwaysEmitIntoClient + @inline(__always) + internal mutating func beginCOWMutationUnflaggedNative() -> Bool { + _internalInvariant(isNative) + return Bool(Builtin.beginCOWMutation_native(&rawValue)) + } + + @_alwaysEmitIntoClient + @inline(__always) + internal mutating func endCOWMutation() { + _internalInvariant(isNative) + Builtin.endCOWMutation(&rawValue) + } + @inlinable internal var objCInstance: ObjC { @inline(__always) get { diff --git a/stdlib/public/core/Builtin.swift b/stdlib/public/core/Builtin.swift index 8aceb9014df08..e3e472cd299ff 100644 --- a/stdlib/public/core/Builtin.swift +++ b/stdlib/public/core/Builtin.swift @@ -345,6 +345,16 @@ internal func _class_getInstancePositiveExtentSize(_ theClass: AnyClass) -> Int #endif } +#if INTERNAL_CHECKS_ENABLED +@usableFromInline +@_silgen_name("_swift_isImmutableCOWBuffer") +internal func _swift_isImmutableCOWBuffer(_ object: AnyObject) -> Bool + +@usableFromInline +@_silgen_name("_swift_setImmutableCOWBuffer") +internal func _swift_setImmutableCOWBuffer(_ object: AnyObject, _ immutable: Bool) -> Bool +#endif + @inlinable internal func _isValidAddress(_ address: UInt) -> Bool { // TODO: define (and use) ABI max valid pointer value @@ -683,6 +693,13 @@ func _isUnique_native(_ object: inout T) -> Bool { return Bool(Builtin.isUnique_native(&object)) } +@_alwaysEmitIntoClient +@_transparent +public // @testable +func _COWBufferForReading(_ object: T) -> T { + return Builtin.COWBufferForReading(object) +} + /// Returns `true` if type is a POD type. A POD type is a type that does not /// require any special handling on copying or destruction. @_transparent diff --git a/stdlib/public/core/ContiguousArray.swift b/stdlib/public/core/ContiguousArray.swift index def207a400974..b854c18942745 100644 --- a/stdlib/public/core/ContiguousArray.swift +++ b/stdlib/public/core/ContiguousArray.swift @@ -54,23 +54,33 @@ extension ContiguousArray { @inlinable @_semantics("array.get_count") internal func _getCount() -> Int { - return _buffer.count + return _buffer.immutableCount } @inlinable @_semantics("array.get_capacity") internal func _getCapacity() -> Int { - return _buffer.capacity + return _buffer.immutableCapacity } @inlinable @_semantics("array.make_mutable") internal mutating func _makeMutableAndUnique() { - if _slowPath(!_buffer.isMutableAndUniquelyReferenced()) { + if _slowPath(!_buffer.beginCOWMutation()) { _buffer = _buffer._consumeAndCreateNew() } } + /// Marks the end of an Array mutation. + /// + /// After a call to `_endMutation` the buffer must not be mutated until a call + /// to `_makeMutableAndUnique`. + @_alwaysEmitIntoClient + @_semantics("array.end_mutation") + internal mutating func _endMutation() { + _buffer.endCOWMutation() + } + /// Check that the given `index` is valid for subscripting, i.e. /// `0 ≤ index < count`. @inlinable @@ -79,6 +89,16 @@ extension ContiguousArray { _buffer._checkValidSubscript(index) } + /// Check that the given `index` is valid for subscripting, i.e. + /// `0 ≤ index < count`. + /// + /// - Precondition: The buffer must be uniquely referenced and native. + @_alwaysEmitIntoClient + @_semantics("array.check_subscript") + internal func _checkSubscript_mutating(_ index: Int) { + _buffer._checkValidSubscriptMutating(index) + } + /// Check that the specified `index` is valid, i.e. `0 ≤ index ≤ count`. @inlinable @_semantics("array.check_index") @@ -90,7 +110,7 @@ extension ContiguousArray { @inlinable @_semantics("array.get_element_address") internal func _getElementAddress(_ index: Int) -> UnsafeMutablePointer { - return _buffer.subscriptBaseAddress + index + return _buffer.firstElementAddress + index } } @@ -387,9 +407,10 @@ extension ContiguousArray: RandomAccessCollection, MutableCollection { } _modify { _makeMutableAndUnique() - _checkSubscript_native(index) - let address = _buffer.subscriptBaseAddress + index + _checkSubscript_mutating(index) + let address = _buffer.mutableFirstElementAddress + index yield &address.pointee + _endMutation(); } } @@ -546,6 +567,7 @@ extension ContiguousArray: RangeReplaceableCollection { p.initialize(to: repeatedValue) p += 1 } + _endMutation() } @inline(never) @@ -570,7 +592,7 @@ extension ContiguousArray: RangeReplaceableCollection { // unnecessary uniqueness check. We disable inlining here to curb code // growth. _buffer = ContiguousArray._allocateBufferUninitialized(minimumCapacity: count) - _buffer.count = count + _buffer.mutableCount = count } // Can't store count here because the buffer might be pointing to the // shared empty array. @@ -657,6 +679,7 @@ extension ContiguousArray: RangeReplaceableCollection { public mutating func reserveCapacity(_ minimumCapacity: Int) { _reserveCapacityImpl(minimumCapacity: minimumCapacity, growForAppend: false) + _endMutation() } /// Reserves enough space to store `minimumCapacity` elements. @@ -666,14 +689,14 @@ extension ContiguousArray: RangeReplaceableCollection { internal mutating func _reserveCapacityImpl( minimumCapacity: Int, growForAppend: Bool ) { - let isUnique = _buffer.isUniquelyReferenced() - if _slowPath(!isUnique || _getCapacity() < minimumCapacity) { + let isUnique = _buffer.beginCOWMutation() + if _slowPath(!isUnique || _buffer.mutableCapacity < minimumCapacity) { _createNewBuffer(bufferIsUnique: isUnique, - minimumCapacity: Swift.max(minimumCapacity, count), + minimumCapacity: Swift.max(minimumCapacity, _buffer.count), growForAppend: growForAppend) } - _internalInvariant(capacity >= minimumCapacity) - _internalInvariant(capacity == 0 || _buffer.isUniquelyReferenced()) + _internalInvariant(_buffer.mutableCapacity >= minimumCapacity) + _internalInvariant(_buffer.mutableCapacity == 0 || _buffer.isUniquelyReferenced()) } /// Creates a new buffer, replacing the current buffer. @@ -711,7 +734,7 @@ extension ContiguousArray: RangeReplaceableCollection { @inlinable @_semantics("array.make_mutable") internal mutating func _makeUniqueAndReserveCapacityIfNotUnique() { - if _slowPath(!_buffer.isMutableAndUniquelyReferenced()) { + if _slowPath(!_buffer.beginCOWMutation()) { _createNewBuffer(bufferIsUnique: false, minimumCapacity: count + 1, growForAppend: true) @@ -721,15 +744,6 @@ extension ContiguousArray: RangeReplaceableCollection { @inlinable @_semantics("array.mutate_unknown") internal mutating func _reserveCapacityAssumingUniqueBuffer(oldCount: Int) { - // This is a performance optimization. This code used to be in an || - // statement in the _internalInvariant below. - // - // _internalInvariant(_buffer.capacity == 0 || - // _buffer.isMutableAndUniquelyReferenced()) - // - // SR-6437 - let capacity = _buffer.capacity == 0 - // Due to make_mutable hoisting the situation can arise where we hoist // _makeMutableAndUnique out of loop and use it to replace // _makeUniqueAndReserveCapacityIfNotUnique that preceeds this call. If the @@ -739,11 +753,11 @@ extension ContiguousArray: RangeReplaceableCollection { // This specific case is okay because we will make the buffer unique in this // function because we request a capacity > 0 and therefore _copyToNewBuffer // will be called creating a new buffer. - _internalInvariant(capacity || - _buffer.isMutableAndUniquelyReferenced()) + let capacity = _buffer.mutableCapacity + _internalInvariant(capacity == 0 || _buffer.isMutableAndUniquelyReferenced()) - if _slowPath(oldCount + 1 > _buffer.capacity) { - _createNewBuffer(bufferIsUnique: true, + if _slowPath(oldCount + 1 > capacity) { + _createNewBuffer(bufferIsUnique: capacity > 0, minimumCapacity: oldCount + 1, growForAppend: true) } @@ -756,10 +770,10 @@ extension ContiguousArray: RangeReplaceableCollection { newElement: __owned Element ) { _internalInvariant(_buffer.isMutableAndUniquelyReferenced()) - _internalInvariant(_buffer.capacity >= _buffer.count + 1) + _internalInvariant(_buffer.mutableCapacity >= _buffer.mutableCount + 1) - _buffer.count = oldCount + 1 - (_buffer.firstElementAddress + oldCount).initialize(to: newElement) + _buffer.mutableCount = oldCount + 1 + (_buffer.mutableFirstElementAddress + oldCount).initialize(to: newElement) } /// Adds a new element at the end of the array. @@ -789,9 +803,10 @@ extension ContiguousArray: RangeReplaceableCollection { // Separating uniqueness check and capacity check allows hoisting the // uniqueness check out of a loop. _makeUniqueAndReserveCapacityIfNotUnique() - let oldCount = _getCount() + let oldCount = _buffer.mutableCount _reserveCapacityAssumingUniqueBuffer(oldCount: oldCount) _appendElementAssumeUniqueAndCapacity(oldCount, newElement: newElement) + _endMutation() } /// Adds the elements of a sequence to the end of the array. @@ -815,14 +830,19 @@ extension ContiguousArray: RangeReplaceableCollection { public mutating func append(contentsOf newElements: __owned S) where S.Element == Element { + defer { + _endMutation() + } + let newElementsCount = newElements.underestimatedCount - reserveCapacityForAppend(newElementsCount: newElementsCount) + _reserveCapacityImpl(minimumCapacity: self.count + newElementsCount, + growForAppend: true) - let oldCount = self.count - let startNewElements = _buffer.firstElementAddress + oldCount + let oldCount = _buffer.mutableCount + let startNewElements = _buffer.mutableFirstElementAddress + oldCount let buf = UnsafeMutableBufferPointer( start: startNewElements, - count: self.capacity - oldCount) + count: _buffer.mutableCapacity - oldCount) var (remainder,writtenUpTo) = buf.initialize(from: newElements) @@ -834,19 +854,19 @@ extension ContiguousArray: RangeReplaceableCollection { // This check prevents a data race writing to _swiftEmptyArrayStorage if writtenCount > 0 { - _buffer.count += writtenCount + _buffer.mutableCount = _buffer.mutableCount + writtenCount } if writtenUpTo == buf.endIndex { // there may be elements that didn't fit in the existing buffer, // append them in slow sequence-only mode - var newCount = _getCount() + var newCount = _buffer.mutableCount var nextItem = remainder.next() while nextItem != nil { - reserveCapacityForAppend(newElementsCount: 1) + _reserveCapacityAssumingUniqueBuffer(oldCount: newCount) - let currentCapacity = _getCapacity() - let base = _buffer.firstElementAddress + let currentCapacity = _buffer.mutableCapacity + let base = _buffer.mutableFirstElementAddress // fill while there is another item and spare capacity while let next = nextItem, newCount < currentCapacity { @@ -854,7 +874,7 @@ extension ContiguousArray: RangeReplaceableCollection { newCount += 1 nextItem = remainder.next() } - _buffer.count = newCount + _buffer.mutableCount = newCount } } } @@ -866,17 +886,19 @@ extension ContiguousArray: RangeReplaceableCollection { // for consistency, we need unique self even if newElements is empty. _reserveCapacityImpl(minimumCapacity: self.count + newElementsCount, growForAppend: true) + _endMutation() } @inlinable @_semantics("array.mutate_unknown") public mutating func _customRemoveLast() -> Element? { _makeMutableAndUnique() - let newCount = _getCount() - 1 + let newCount = _buffer.mutableCount - 1 _precondition(newCount >= 0, "Can't removeLast from an empty ContiguousArray") - let pointer = (_buffer.firstElementAddress + newCount) + let pointer = (_buffer.mutableFirstElementAddress + newCount) let element = pointer.move() - _buffer.count = newCount + _buffer.mutableCount = newCount + _endMutation() return element } @@ -900,14 +922,15 @@ extension ContiguousArray: RangeReplaceableCollection { @_semantics("array.mutate_unknown") public mutating func remove(at index: Int) -> Element { _makeMutableAndUnique() - let currentCount = _getCount() + let currentCount = _buffer.mutableCount _precondition(index < currentCount, "Index out of range") _precondition(index >= 0, "Index out of range") - let newCount = _getCount() - 1 - let pointer = (_buffer.firstElementAddress + index) + let newCount = currentCount - 1 + let pointer = (_buffer.mutableFirstElementAddress + index) let result = pointer.move() pointer.moveInitialize(from: pointer + 1, count: newCount - index) - _buffer.count = newCount + _buffer.mutableCount = newCount + _endMutation() return result } @@ -1150,7 +1173,7 @@ extension ContiguousArray { _ body: (inout UnsafeMutableBufferPointer) throws -> R ) rethrows -> R { _makeMutableAndUnique() - let count = self.count + let count = _buffer.mutableCount // Ensure that body can't invalidate the storage or its bounds by // moving self into a temporary working array. @@ -1165,7 +1188,7 @@ extension ContiguousArray { (work, self) = (self, work) // Create an UnsafeBufferPointer over work that we can pass to body - let pointer = work._buffer.firstElementAddress + let pointer = work._buffer.mutableFirstElementAddress var inoutBufferPointer = UnsafeMutableBufferPointer( start: pointer, count: count) @@ -1177,6 +1200,7 @@ extension ContiguousArray { "ContiguousArray withUnsafeMutableBufferPointer: replacing the buffer is not allowed") (work, self) = (self, work) + _endMutation() } // Invoke the body. @@ -1267,8 +1291,10 @@ extension ContiguousArray { let insertCount = newElements.count let growth = insertCount - eraseCount - reserveCapacityForAppend(newElementsCount: growth) + _reserveCapacityImpl(minimumCapacity: self.count + growth, + growForAppend: true) _buffer.replaceSubrange(subrange, with: insertCount, elementsOf: newElements) + _endMutation() } } diff --git a/stdlib/public/core/ContiguousArrayBuffer.swift b/stdlib/public/core/ContiguousArrayBuffer.swift index db093fc29f09f..0519e44821b95 100644 --- a/stdlib/public/core/ContiguousArrayBuffer.swift +++ b/stdlib/public/core/ContiguousArrayBuffer.swift @@ -332,6 +332,15 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { Element.self)) } + /// A mutable pointer to the first element. + /// + /// - Precondition: The buffer must be mutable. + @_alwaysEmitIntoClient + internal var mutableFirstElementAddress: UnsafeMutablePointer { + return UnsafeMutablePointer(Builtin.projectTailElems(mutableOrEmptyStorage, + Element.self)) + } + @inlinable internal var firstElementAddressIfContiguous: UnsafeMutablePointer? { return firstElementAddress @@ -399,9 +408,94 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { @inline(__always) internal func getElement(_ i: Int) -> Element { _internalInvariant(i >= 0 && i < count, "Array index out of range") - return firstElementAddress[i] + let addr = UnsafePointer( + Builtin.projectTailElems(immutableStorage, Element.self)) + return addr[i] + } + + /// The storage of an immutable buffer. + /// + /// - Precondition: The buffer must be immutable. + @_alwaysEmitIntoClient + @inline(__always) + internal var immutableStorage : __ContiguousArrayStorageBase { +#if INTERNAL_CHECKS_ENABLED + _internalInvariant(isImmutable, "Array storage is not immutable") +#endif + return Builtin.COWBufferForReading(_storage) } + /// The storage of a mutable buffer. + /// + /// - Precondition: The buffer must be mutable. + @_alwaysEmitIntoClient + @inline(__always) + internal var mutableStorage : __ContiguousArrayStorageBase { +#if INTERNAL_CHECKS_ENABLED + _internalInvariant(isMutable, "Array storage is immutable") +#endif + return _storage + } + + /// The storage of a mutable or empty buffer. + /// + /// - Precondition: The buffer must be mutable or the empty array singleton. + @_alwaysEmitIntoClient + @inline(__always) + internal var mutableOrEmptyStorage : __ContiguousArrayStorageBase { +#if INTERNAL_CHECKS_ENABLED + _internalInvariant(isMutable || _storage.countAndCapacity.capacity == 0, + "Array storage is immutable and not empty") +#endif + return _storage + } + +#if INTERNAL_CHECKS_ENABLED + @_alwaysEmitIntoClient + internal var isImmutable: Bool { + get { +// TODO: Enable COW runtime checks by default (when INTERNAL_CHECKS_ENABLED +// is set). Currently there is a problem with remote AST which needs to be +// fixed. +#if ENABLE_COW_RUNTIME_CHECKS + if #available(macOS 9999, iOS 9999, watchOS 9999, tvOS 9999, *) { + return capacity == 0 || _swift_isImmutableCOWBuffer(_storage) + } +#endif + return true + } + nonmutating set { +#if ENABLE_COW_RUNTIME_CHECKS + if #available(macOS 9999, iOS 9999, watchOS 9999, tvOS 9999, *) { + if newValue { + if capacity > 0 { + let wasImmutable = _swift_setImmutableCOWBuffer(_storage, true) + _internalInvariant(!wasImmutable, + "re-setting immutable array buffer to immutable") + } + } else { + _internalInvariant(capacity > 0, + "setting empty array buffer to mutable") + let wasImmutable = _swift_setImmutableCOWBuffer(_storage, false) + _internalInvariant(wasImmutable, + "re-setting mutable array buffer to mutable") + } + } +#endif + } + } + + @_alwaysEmitIntoClient + internal var isMutable: Bool { +#if ENABLE_COW_RUNTIME_CHECKS + if #available(macOS 9999, iOS 9999, watchOS 9999, tvOS 9999, *) { + return !_swift_isImmutableCOWBuffer(_storage) + } +#endif + return true + } +#endif + /// Get or set the value of the ith element. @inlinable internal subscript(i: Int) -> Element { @@ -424,6 +518,10 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { } /// The number of elements the buffer stores. + /// + /// This property is obsolete. It's only used for the ArrayBufferProtocol and + /// to keep backward compatibility. + /// Use `immutableCount` or `mutableCount` instead. @inlinable internal var count: Int { get { @@ -433,30 +531,97 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { _internalInvariant(newValue >= 0) _internalInvariant( - newValue <= capacity, + newValue <= mutableCapacity, + "Can't grow an array buffer past its capacity") + + mutableStorage.countAndCapacity.count = newValue + } + } + + /// The number of elements of the buffer. + /// + /// - Precondition: The buffer must be immutable. + @_alwaysEmitIntoClient + @inline(__always) + internal var immutableCount: Int { + return immutableStorage.countAndCapacity.count + } + + /// The number of elements of the buffer. + /// + /// - Precondition: The buffer must be mutable. + @_alwaysEmitIntoClient + internal var mutableCount: Int { + @inline(__always) + get { + return mutableOrEmptyStorage.countAndCapacity.count + } + @inline(__always) + nonmutating set { + _internalInvariant(newValue >= 0) + + _internalInvariant( + newValue <= mutableCapacity, "Can't grow an array buffer past its capacity") - _storage.countAndCapacity.count = newValue + mutableStorage.countAndCapacity.count = newValue } } /// Traps unless the given `index` is valid for subscripting, i.e. /// `0 ≤ index < count`. + /// + /// - Precondition: The buffer must be immutable. @inlinable @inline(__always) internal func _checkValidSubscript(_ index: Int) { _precondition( - (index >= 0) && (index < count), + (index >= 0) && (index < immutableCount), + "Index out of range" + ) + } + + /// Traps unless the given `index` is valid for subscripting, i.e. + /// `0 ≤ index < count`. + /// + /// - Precondition: The buffer must be mutable. + @_alwaysEmitIntoClient + @inline(__always) + internal func _checkValidSubscriptMutating(_ index: Int) { + _precondition( + (index >= 0) && (index < mutableCount), "Index out of range" ) } /// The number of elements the buffer can store without reallocation. + /// + /// This property is obsolete. It's only used for the ArrayBufferProtocol and + /// to keep backward compatibility. + /// Use `immutableCapacity` or `mutableCapacity` instead. @inlinable internal var capacity: Int { return _storage.countAndCapacity.capacity } + /// The number of elements the buffer can store without reallocation. + /// + /// - Precondition: The buffer must be immutable. + @_alwaysEmitIntoClient + @inline(__always) + internal var immutableCapacity: Int { + return immutableStorage.countAndCapacity.capacity + } + + /// The number of elements the buffer can store without reallocation. + /// + /// - Precondition: The buffer must be mutable. + @_alwaysEmitIntoClient + @inline(__always) + internal var mutableCapacity: Int { + return mutableOrEmptyStorage.countAndCapacity.capacity + } + /// Copy the elements in `bounds` from this buffer into uninitialized /// memory starting at `target`. Return a pointer "past the end" of the /// just-initialized memory. @@ -492,7 +657,7 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { get { return _SliceBuffer( owner: _storage, - subscriptBaseAddress: subscriptBaseAddress, + subscriptBaseAddress: firstElementAddress, indices: bounds, hasNativeBuffer: true) } @@ -503,14 +668,46 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { /// Returns `true` iff this buffer's storage is uniquely-referenced. /// - /// - Note: This does not mean the buffer is mutable. Other factors - /// may need to be considered, such as whether the buffer could be - /// some immutable Cocoa container. + /// This function should only be used for internal sanity checks. + /// To guard a buffer mutation, use `beginCOWMutation`. @inlinable internal mutating func isUniquelyReferenced() -> Bool { return _isUnique(&_storage) } + /// Returns `true` and puts the buffer in a mutable state iff the buffer's + /// storage is uniquely-referenced. + /// + /// - Precondition: The buffer must be immutable. + /// + /// - Warning: It's a requirement to call `beginCOWMutation` before the buffer + /// is mutated. + @_alwaysEmitIntoClient + internal mutating func beginCOWMutation() -> Bool { + if Bool(Builtin.beginCOWMutation(&_storage)) { +#if INTERNAL_CHECKS_ENABLED + isImmutable = false +#endif + return true + } + return false; + } + + /// Puts the buffer in an immutable state. + /// + /// - Precondition: The buffer must be mutable. + /// + /// - Warning: After a call to `endCOWMutation` the buffer must not be mutated + /// until the next call of `beginCOWMutation`. + @_alwaysEmitIntoClient + @inline(__always) + internal mutating func endCOWMutation() { +#if INTERNAL_CHECKS_ENABLED + isImmutable = true +#endif + Builtin.endCOWMutation(&_storage) + } + /// Creates and returns a new uniquely referenced buffer which is a copy of /// this buffer. /// @@ -553,14 +750,14 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { if bufferIsUnique { // As an optimization, if the original buffer is unique, we can just move // the elements instead of copying. - let dest = newBuffer.firstElementAddress + let dest = newBuffer.mutableFirstElementAddress dest.moveInitialize(from: firstElementAddress, count: c) - count = 0 + mutableCount = 0 } else { _copyContents( subRange: 0..( buf = UnsafeMutableBufferPointer( start: lhs.firstElementAddress + oldCount, count: rhs.count) - lhs.count = newCount + lhs.mutableCount = newCount } else { var newLHS = _ContiguousArrayBuffer( @@ -678,7 +875,7 @@ internal func += ( newLHS.firstElementAddress.moveInitialize( from: lhs.firstElementAddress, count: oldCount) - lhs.count = 0 + lhs.mutableCount = 0 (lhs, newLHS) = (newLHS, lhs) buf = UnsafeMutableBufferPointer( start: lhs.firstElementAddress + oldCount, @@ -779,7 +976,7 @@ internal func _copyCollectionToContiguousArray< return ContiguousArray() } - let result = _ContiguousArrayBuffer( + var result = _ContiguousArrayBuffer( _uninitializedCount: count, minimumCapacity: 0) @@ -796,6 +993,7 @@ internal func _copyCollectionToContiguousArray< _precondition(end == p.endIndex, "invalid Collection: less than 'count' elements in collection") + result.endCOWMutation() return ContiguousArray(_buffer: result) } @@ -847,7 +1045,7 @@ internal struct _UnsafePartiallyInitializedContiguousArrayBuffer { // Since count is always 0 there, this code does nothing anyway newResult.firstElementAddress.moveInitialize( from: result.firstElementAddress, count: result.capacity) - result.count = 0 + result.mutableCount = 0 } (result, newResult) = (newResult, result) } @@ -875,7 +1073,12 @@ internal struct _UnsafePartiallyInitializedContiguousArrayBuffer { @inline(__always) // For performance reasons. internal mutating func finish() -> ContiguousArray { // Adjust the initialized count of the buffer. - result.count = result.capacity - remainingCapacity + if (result.capacity != 0) { + result.mutableCount = result.capacity - remainingCapacity + } else { + _internalInvariant(remainingCapacity == 0) + _internalInvariant(result.count == 0) + } return finishWithOriginalCount() } @@ -894,6 +1097,7 @@ internal struct _UnsafePartiallyInitializedContiguousArrayBuffer { var finalResult = _ContiguousArrayBuffer() (finalResult, result) = (result, finalResult) remainingCapacity = 0 + finalResult.endCOWMutation() return ContiguousArray(_buffer: finalResult) } } diff --git a/stdlib/public/core/SliceBuffer.swift b/stdlib/public/core/SliceBuffer.swift index d936250d0d359..fd19d31646218 100644 --- a/stdlib/public/core/SliceBuffer.swift +++ b/stdlib/public/core/SliceBuffer.swift @@ -176,17 +176,9 @@ internal struct _SliceBuffer minimumCapacity: Int ) -> NativeBuffer? { _invariantCheck() - // This is a performance optimization that was put in to ensure that at - // -Onone, copy of self we make to call _hasNativeBuffer is destroyed before - // we call isUniquelyReferenced. Otherwise, isUniquelyReferenced will always - // fail causing us to always copy. - // - // if _fastPath(_hasNativeBuffer && isUniquelyReferenced) { - // - // SR-6437 - let native = _hasNativeBuffer - let unique = isUniquelyReferenced() - if _fastPath(native && unique) { + // Note: with COW support it's already guaranteed to have a uniquely + // referenced buffer. This check is only needed for backward compatibility. + if _fastPath(isUniquelyReferenced()) { if capacity >= minimumCapacity { // Since we have the last reference, drop any inaccessible // trailing elements in the underlying storage. That will @@ -275,7 +267,7 @@ internal struct _SliceBuffer set { let growth = newValue - count if growth != 0 { - nativeBuffer.count += growth + nativeBuffer.mutableCount += growth self.endIndex += growth } _invariantCheck() @@ -304,11 +296,52 @@ internal struct _SliceBuffer return count } + /// Returns `true` iff this buffer's storage is uniquely-referenced. + /// + /// This function should only be used for internal sanity checks and for + /// backward compatibility. + /// To guard a buffer mutation, use `beginCOWMutation`. @inlinable internal mutating func isUniquelyReferenced() -> Bool { return isKnownUniquelyReferenced(&owner) } + /// Returns `true` and puts the buffer in a mutable state iff the buffer's + /// storage is uniquely-referenced. + /// + /// - Precondition: The buffer must be immutable. + /// + /// - Warning: It's a requirement to call `beginCOWMutation` before the buffer + /// is mutated. + @_alwaysEmitIntoClient + internal mutating func beginCOWMutation() -> Bool { + if !_hasNativeBuffer { + return false + } + if Bool(Builtin.beginCOWMutation(&owner)) { +#if INTERNAL_CHECKS_ENABLED + nativeBuffer.isImmutable = false +#endif + return true + } + return false; + } + + /// Puts the buffer in an immutable state. + /// + /// - Precondition: The buffer must be mutable. + /// + /// - Warning: After a call to `endCOWMutation` the buffer must not be mutated + /// until the next call of `beginCOWMutation`. + @_alwaysEmitIntoClient + @inline(__always) + internal mutating func endCOWMutation() { +#if INTERNAL_CHECKS_ENABLED + nativeBuffer.isImmutable = true +#endif + Builtin.endCOWMutation(&owner) + } + @inlinable internal func getElement(_ i: Int) -> Element { _internalInvariant(i >= startIndex, "slice index is out of range (before startIndex)") diff --git a/stdlib/public/runtime/HeapObject.cpp b/stdlib/public/runtime/HeapObject.cpp index 9c38b5ed1d5e5..c409c23731e16 100644 --- a/stdlib/public/runtime/HeapObject.cpp +++ b/stdlib/public/runtime/HeapObject.cpp @@ -887,6 +887,23 @@ WeakReference *swift::swift_weakTakeAssign(WeakReference *dest, #ifndef NDEBUG +/// Returns true if the "immutable" flag is set on \p object. +/// +/// Used for runtime consistency checking of COW buffers. +SWIFT_RUNTIME_EXPORT +bool _swift_isImmutableCOWBuffer(HeapObject *object) { + return object->refCounts.isImmutableCOWBuffer(); +} + +/// Sets the "immutable" flag on \p object to \p immutable and returns the old +/// value of the flag. +/// +/// Used for runtime consistency checking of COW buffers. +SWIFT_RUNTIME_EXPORT +bool _swift_setImmutableCOWBuffer(HeapObject *object, bool immutable) { + return object->refCounts.setIsImmutableCOWBuffer(immutable); +} + void HeapObject::dump() const { auto *Self = const_cast(this); printf("HeapObject: %p\n", Self); diff --git a/stdlib/public/runtime/RefCount.cpp b/stdlib/public/runtime/RefCount.cpp index 4dc7394f92540..b38f334fb0e21 100644 --- a/stdlib/public/runtime/RefCount.cpp +++ b/stdlib/public/runtime/RefCount.cpp @@ -156,6 +156,28 @@ void _swift_stdlib_immortalize(void *obj) { heapObj->refCounts.setIsImmortal(true); } +#ifndef NDEBUG +// SideTableRefCountBits specialization intentionally does not exist. +template <> +bool RefCounts::isImmutableCOWBuffer() { + if (!hasSideTable()) + return false; + HeapObjectSideTableEntry *sideTable = allocateSideTable(false); + assert(sideTable); + return sideTable->isImmutableCOWBuffer(); +} + +template <> +bool RefCounts::setIsImmutableCOWBuffer(bool immutable) { + HeapObjectSideTableEntry *sideTable = allocateSideTable(false); + assert(sideTable); + bool oldValue = sideTable->isImmutableCOWBuffer(); + sideTable->setIsImmutableCOWBuffer(immutable); + return oldValue; +} + +#endif + // namespace swift } // namespace swift diff --git a/test/AutoDiff/SILOptimizer/activity_analysis.swift b/test/AutoDiff/SILOptimizer/activity_analysis.swift index 45420276a7374..8cd0341b6cea8 100644 --- a/test/AutoDiff/SILOptimizer/activity_analysis.swift +++ b/test/AutoDiff/SILOptimizer/activity_analysis.swift @@ -254,6 +254,8 @@ func testArrayUninitializedIntrinsic(_ x: Float, _ y: Float) -> [Float] { // CHECK: [ACTIVE] %9 = pointer_to_address %8 : $Builtin.RawPointer to [strict] $*Float // CHECK: [VARIED] %11 = integer_literal $Builtin.Word, 1 // CHECK: [ACTIVE] %12 = index_addr %9 : $*Float, %11 : $Builtin.Word +// CHECK: [NONE] // function_ref _finalizeUninitializedArray(_:) +// CHECK: [ACTIVE] %15 = apply %14(%7) : $@convention(thin) <τ_0_0> (@owned Array<τ_0_0>) -> @owned Array<τ_0_0> @differentiable(where T: Differentiable) func testArrayUninitializedIntrinsicGeneric(_ x: T, _ y: T) -> [T] { @@ -271,6 +273,8 @@ func testArrayUninitializedIntrinsicGeneric(_ x: T, _ y: T) -> [T] { // CHECK: [ACTIVE] %9 = pointer_to_address %8 : $Builtin.RawPointer to [strict] $*T // CHECK: [VARIED] %11 = integer_literal $Builtin.Word, 1 // CHECK: [ACTIVE] %12 = index_addr %9 : $*T, %11 : $Builtin.Word +// CHECK: [NONE] // function_ref _finalizeUninitializedArray(_:) +// CHECK: [ACTIVE] %15 = apply %14(%7) : $@convention(thin) <τ_0_0> (@owned Array<τ_0_0>) -> @owned Array<τ_0_0> // TF-952: Test array literal initialized from an address (e.g. `var`). @differentiable @@ -298,6 +302,8 @@ func testArrayUninitializedIntrinsicAddress(_ x: Float, _ y: Float) -> [Float] { // CHECK: [VARIED] %24 = integer_literal $Builtin.Word, 1 // CHECK: [ACTIVE] %25 = index_addr %20 : $*Float, %24 : $Builtin.Word // CHECK: [ACTIVE] %26 = begin_access [read] [static] %4 : $*Float +// CHECK: [NONE] // function_ref _finalizeUninitializedArray(_:) +// CHECK: [ACTIVE] %30 = apply %29(%18) : $@convention(thin) <τ_0_0> (@owned Array<τ_0_0>) -> @owned Array<τ_0_0> // TF-952: Test array literal initialized with `apply` direct results. @differentiable @@ -320,6 +326,8 @@ func testArrayUninitializedIntrinsicFunctionResult(_ x: Float, _ y: Float) -> [F // CHECK: [USEFUL] %16 = metatype $@thin Float.Type // CHECK: [NONE] // function_ref static Float.* infix(_:_:) // CHECK: [ACTIVE] %18 = apply %17(%0, %1, %16) : $@convention(method) (Float, Float, @thin Float.Type) -> Float +// CHECK: [NONE] // function_ref _finalizeUninitializedArray(_:) +// CHECK: [ACTIVE] %21 = apply %20(%7) : $@convention(thin) <τ_0_0> (@owned Array<τ_0_0>) -> @owned Array<τ_0_0> // TF-975: Test nested array literals. @differentiable @@ -338,28 +346,32 @@ func testArrayUninitializedIntrinsicNested(_ x: Float, _ y: Float) -> [Float] { // CHECK: [ACTIVE] %9 = pointer_to_address %8 : $Builtin.RawPointer to [strict] $*Float // CHECK: [VARIED] %11 = integer_literal $Builtin.Word, 1 // CHECK: [ACTIVE] %12 = index_addr %9 : $*Float, %11 : $Builtin.Word -// CHECK: [USEFUL] %15 = integer_literal $Builtin.Word, 2 +// CHECK: [NONE] // function_ref _finalizeUninitializedArray(_:) +// CHECK: [ACTIVE] %15 = apply %14(%7) : $@convention(thin) <τ_0_0> (@owned Array<τ_0_0>) -> @owned Array<τ_0_0> +// CHECK: [USEFUL] %17 = integer_literal $Builtin.Word, 2 // CHECK: [NONE] // function_ref _allocateUninitializedArray(_:) -// CHECK: [ACTIVE] %17 = apply %16(%15) : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned Array<τ_0_0>, Builtin.RawPointer) -// CHECK: [ACTIVE] (**%18**, %19) = destructure_tuple %17 : $(Array, Builtin.RawPointer) -// CHECK: [VARIED] (%18, **%19**) = destructure_tuple %17 : $(Array, Builtin.RawPointer) -// CHECK: [ACTIVE] %20 = pointer_to_address %19 : $Builtin.RawPointer to [strict] $*Float -// CHECK: [ACTIVE] %21 = begin_borrow %7 : $Array -// CHECK: [USEFUL] %22 = integer_literal $Builtin.IntLiteral, 0 -// CHECK: [USEFUL] %23 = metatype $@thin Int.Type +// CHECK: [ACTIVE] %19 = apply %18(%17) : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned Array<τ_0_0>, Builtin.RawPointer) +// CHECK: [ACTIVE] (**%20**, %21) = destructure_tuple %19 : $(Array, Builtin.RawPointer) +// CHECK: [VARIED] (%20, **%21**) = destructure_tuple %19 : $(Array, Builtin.RawPointer) +// CHECK: [ACTIVE] %22 = pointer_to_address %21 : $Builtin.RawPointer to [strict] $*Float +// CHECK: [ACTIVE] %23 = begin_borrow %15 : $Array +// CHECK: [USEFUL] %24 = integer_literal $Builtin.IntLiteral, 0 +// CHECK: [USEFUL] %25 = metatype $@thin Int.Type // CHECK: [NONE] // function_ref Int.init(_builtinIntegerLiteral:) -// CHECK: [USEFUL] %25 = apply %24(%22, %23) : $@convention(method) (Builtin.IntLiteral, @thin Int.Type) -> Int +// CHECK: [USEFUL] %27 = apply %26(%24, %25) : $@convention(method) (Builtin.IntLiteral, @thin Int.Type) -> Int // CHECK: [NONE] // function_ref Array.subscript.getter -// CHECK: [NONE] %27 = apply %26(%20, %25, %21) : $@convention(method) <τ_0_0> (Int, @guaranteed Array<τ_0_0>) -> @out τ_0_0 -// CHECK: [VARIED] %28 = integer_literal $Builtin.Word, 1 -// CHECK: [ACTIVE] %29 = index_addr %20 : $*Float, %28 : $Builtin.Word -// CHECK: [ACTIVE] %30 = begin_borrow %7 : $Array -// CHECK: [USEFUL] %31 = integer_literal $Builtin.IntLiteral, 1 -// CHECK: [USEFUL] %32 = metatype $@thin Int.Type +// CHECK: [NONE] %29 = apply %28(%22, %27, %23) : $@convention(method) <τ_0_0> (Int, @guaranteed Array<τ_0_0>) -> @out τ_0_0 +// CHECK: [VARIED] %30 = integer_literal $Builtin.Word, 1 +// CHECK: [ACTIVE] %31 = index_addr %22 : $*Float, %30 : $Builtin.Word +// CHECK: [ACTIVE] %32 = begin_borrow %15 : $Array +// CHECK: [USEFUL] %33 = integer_literal $Builtin.IntLiteral, 1 +// CHECK: [USEFUL] %34 = metatype $@thin Int.Type // CHECK: [NONE] // function_ref Int.init(_builtinIntegerLiteral:) -// CHECK: [USEFUL] %34 = apply %33(%31, %32) : $@convention(method) (Builtin.IntLiteral, @thin Int.Type) -> Int +// CHECK: [USEFUL] %36 = apply %35(%33, %34) : $@convention(method) (Builtin.IntLiteral, @thin Int.Type) -> Int // CHECK: [NONE] // function_ref Array.subscript.getter -// CHECK: [NONE] %36 = apply %35(%29, %34, %30) : $@convention(method) <τ_0_0> (Int, @guaranteed Array<τ_0_0>) -> @out τ_0_0 +// CHECK: [NONE] %38 = apply %37(%31, %36, %32) : $@convention(method) <τ_0_0> (Int, @guaranteed Array<τ_0_0>) -> @out τ_0_0 +// CHECK: [NONE] // function_ref _finalizeUninitializedArray(_:) +// CHECK: [ACTIVE] %40 = apply %39(%20) : $@convention(thin) <τ_0_0> (@owned Array<τ_0_0>) -> @owned Array<τ_0_0> // TF-978: Test array literal initialized with `apply` indirect results. struct Wrapper: Differentiable { @@ -388,6 +400,8 @@ func testArrayUninitializedIntrinsicApplyIndirectResult(_ x: T, _ y: T) -> [W // CHECK: [ACTIVE] %19 = alloc_stack $T // CHECK: [NONE] // function_ref Wrapper.init(value:) // CHECK: [NONE] %22 = apply %21(%17, %19, %18) : $@convention(method) <τ_0_0 where τ_0_0 : Differentiable> (@in τ_0_0, @thin Wrapper<τ_0_0>.Type) -> @out Wrapper<τ_0_0> +// CHECK: [NONE] // function_ref _finalizeUninitializedArray(_:) +// CHECK: [ACTIVE] %25 = apply %24>(%7) : $@convention(thin) <τ_0_0> (@owned Array<τ_0_0>) -> @owned Array<τ_0_0> //===----------------------------------------------------------------------===// // `inout` argument differentiation @@ -647,24 +661,26 @@ func testBeginApplyActiveButInitiallyNonactiveInoutArgument(x: Float) -> Float { // CHECK: [USEFUL] %10 = metatype $@thin Float.Type // CHECK: [NONE] // function_ref Float.init(_builtinIntegerLiteral:) // CHECK: [USEFUL] %12 = apply %11(%9, %10) : $@convention(method) (Builtin.IntLiteral, @thin Float.Type) -> Float -// CHECK: [USEFUL] %15 = integer_literal $Builtin.IntLiteral, 0 -// CHECK: [USEFUL] %16 = metatype $@thin Int.Type +// CHECK: [NONE] // function_ref _finalizeUninitializedArray(_:) +// CHECK: [USEFUL] %15 = apply %14(%6) : $@convention(thin) <τ_0_0> (@owned Array<τ_0_0>) -> @owned Array<τ_0_0> +// CHECK: [USEFUL] %17 = integer_literal $Builtin.IntLiteral, 0 +// CHECK: [USEFUL] %18 = metatype $@thin Int.Type // CHECK: [NONE] // function_ref Int.init(_builtinIntegerLiteral:) -// CHECK: [USEFUL] %18 = apply %17(%15, %16) : $@convention(method) (Builtin.IntLiteral, @thin Int.Type) -> Int -// CHECK: [ACTIVE] %19 = begin_access [modify] [static] %2 : $*Array +// CHECK: [USEFUL] %20 = apply %19(%17, %18) : $@convention(method) (Builtin.IntLiteral, @thin Int.Type) -> Int +// CHECK: [ACTIVE] %21 = begin_access [modify] [static] %2 : $*Array // CHECK: [NONE] // function_ref Array.subscript.modify -// CHECK: [ACTIVE] (**%21**, %22) = begin_apply %20(%18, %19) : $@yield_once @convention(method) <τ_0_0> (Int, @inout Array<τ_0_0>) -> @yields @inout τ_0_0 -// CHECK: [VARIED] (%21, **%22**) = begin_apply %20(%18, %19) : $@yield_once @convention(method) <τ_0_0> (Int, @inout Array<τ_0_0>) -> @yields @inout τ_0_0 -// CHECK: [USEFUL] %26 = integer_literal $Builtin.IntLiteral, 0 -// CHECK: [USEFUL] %27 = metatype $@thin Int.Type +// CHECK: [ACTIVE] (**%23**, %24) = begin_apply %22(%20, %21) : $@yield_once @convention(method) <τ_0_0> (Int, @inout Array<τ_0_0>) -> @yields @inout τ_0_0 +// CHECK: [VARIED] (%23, **%24**) = begin_apply %22(%20, %21) : $@yield_once @convention(method) <τ_0_0> (Int, @inout Array<τ_0_0>) -> @yields @inout τ_0_0 +// CHECK: [USEFUL] %28 = integer_literal $Builtin.IntLiteral, 0 +// CHECK: [USEFUL] %29 = metatype $@thin Int.Type // CHECK: [NONE] // function_ref Int.init(_builtinIntegerLiteral:) -// CHECK: [USEFUL] %29 = apply %28(%26, %27) : $@convention(method) (Builtin.IntLiteral, @thin Int.Type) -> Int -// CHECK: [ACTIVE] %30 = begin_access [read] [static] %2 : $*Array -// CHECK: [ACTIVE] %31 = load_borrow %30 : $*Array -// CHECK: [ACTIVE] %32 = alloc_stack $Float +// CHECK: [USEFUL] %31 = apply %30(%28, %29) : $@convention(method) (Builtin.IntLiteral, @thin Int.Type) -> Int +// CHECK: [ACTIVE] %32 = begin_access [read] [static] %2 : $*Array +// CHECK: [ACTIVE] %33 = load_borrow %32 : $*Array +// CHECK: [ACTIVE] %34 = alloc_stack $Float // CHECK: [NONE] // function_ref Array.subscript.getter -// CHECK: [NONE] %34 = apply %33(%32, %29, %31) : $@convention(method) <τ_0_0> (Int, @guaranteed Array<τ_0_0>) -> @out τ_0_0 -// CHECK: [ACTIVE] %35 = load [trivial] %32 : $*Float +// CHECK: [NONE] %36 = apply %35(%34, %31, %33) : $@convention(method) <τ_0_0> (Int, @guaranteed Array<τ_0_0>) -> @out τ_0_0 +// CHECK: [ACTIVE] %37 = load [trivial] %34 : $*Float //===----------------------------------------------------------------------===// // Class differentiation diff --git a/test/IRGen/big_types_corner_cases.swift b/test/IRGen/big_types_corner_cases.swift index 543f0a3f51290..f956b23f7effe 100644 --- a/test/IRGen/big_types_corner_cases.swift +++ b/test/IRGen/big_types_corner_cases.swift @@ -136,11 +136,9 @@ public func enumCallee(_ x: LargeEnum) { case .Empty2: break } } -// CHECK-LABEL-64: define{{( dllexport)?}}{{( protected)?}} swiftcc void @"$s22big_types_corner_cases10enumCalleeyAA9LargeEnumOF"(%T22big_types_corner_cases9LargeEnumO* noalias nocapture dereferenceable({{.*}}) %0) #0 { +// CHECK-64-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc void @"$s22big_types_corner_cases10enumCalleeyyAA9LargeEnumOF"(%T22big_types_corner_cases9LargeEnumO* noalias nocapture dereferenceable({{.*}}) %0) #0 { // CHECK-64: alloca %T22big_types_corner_cases9LargeEnumO05InnerF0O // CHECK-64: alloca %T22big_types_corner_cases9LargeEnumO -// CHECK-64: call void @llvm.memcpy.p0i8.p0i8.i64 -// CHECK-64: call void @llvm.memcpy.p0i8.p0i8.i64 // CHECK-64: $ss5print_9separator10terminatoryypd_S2StF // CHECK-64: ret void diff --git a/test/IRGen/newtype.swift b/test/IRGen/newtype.swift index 93848b30ba68c..c913c9593e26c 100644 --- a/test/IRGen/newtype.swift +++ b/test/IRGen/newtype.swift @@ -1,6 +1,8 @@ // RUN: %empty-directory(%t) // RUN: %build-irgen-test-overlays -// RUN: %target-swift-frontend(mock-sdk: -sdk %S/Inputs -I %t -I %S/../IDE/Inputs/custom-modules) %s -emit-ir | %FileCheck %s -DINT=i%target-ptrsize +// RUN: %target-swift-frontend(mock-sdk: -sdk %S/Inputs -I %t -I %S/../IDE/Inputs/custom-modules) %s -emit-ir > %t/out.ll +// RUN: %FileCheck %s -DINT=i%target-ptrsize < %t/out.ll +// RUN: %FileCheck %s -check-prefix=CHECK-CC -DINT=i%target-ptrsize < %t/out.ll // RUN: %target-swift-frontend(mock-sdk: -sdk %S/Inputs -I %t -I %S/../IDE/Inputs/custom-modules) %s -emit-ir -O | %FileCheck %s -check-prefix=OPT -DINT=i%target-ptrsize import CoreFoundation import Foundation @@ -84,22 +86,6 @@ public func compareABIs() { takeMyABINewTypeNonNullNS(newNS!) takeMyABIOldTypeNonNullNS(oldNS!) - // Make sure that the calling conventions align correctly, that is we don't - // have double-indirection or anything else like that - // CHECK: declare %struct.__CFString* @getMyABINewType() - // CHECK: declare %struct.__CFString* @getMyABIOldType() - // - // CHECK: declare void @takeMyABINewType(%struct.__CFString*) - // CHECK: declare void @takeMyABIOldType(%struct.__CFString*) - // - // CHECK: declare void @takeMyABINewTypeNonNull(%struct.__CFString*) - // CHECK: declare void @takeMyABIOldTypeNonNull(%struct.__CFString*) - // - // CHECK: declare %0* @getMyABINewTypeNS() - // CHECK: declare %0* @getMyABIOldTypeNS() - // - // CHECK: declare void @takeMyABINewTypeNonNullNS(%0*) - // CHECK: declare void @takeMyABIOldTypeNonNullNS(%0*) } // OPT-LABEL: define swiftcc i1 @"$s7newtype12compareInitsSbyF" @@ -233,4 +219,20 @@ public func mutateRef() { // OPT: ret void } +// Make sure that the calling conventions align correctly, that is we don't +// have double-indirection or anything else like that +// CHECK-CC: declare %struct.__CFString* @getMyABINewType() +// CHECK-CC: declare %struct.__CFString* @getMyABIOldType() +// +// CHECK-CC: declare void @takeMyABINewType(%struct.__CFString*) +// CHECK-CC: declare void @takeMyABIOldType(%struct.__CFString*) +// +// CHECK-CC: declare void @takeMyABINewTypeNonNull(%struct.__CFString*) +// CHECK-CC: declare void @takeMyABIOldTypeNonNull(%struct.__CFString*) +// +// CHECK-CC: declare %0* @getMyABINewTypeNS() +// CHECK-CC: declare %0* @getMyABIOldTypeNS() +// +// CHECK-CC: declare void @takeMyABINewTypeNonNullNS(%0*) +// CHECK-CC: declare void @takeMyABIOldTypeNonNullNS(%0*) diff --git a/test/IRGen/unmanaged_objc_throw_func.swift b/test/IRGen/unmanaged_objc_throw_func.swift index 128b41b3999f7..43dd9c2bce925 100644 --- a/test/IRGen/unmanaged_objc_throw_func.swift +++ b/test/IRGen/unmanaged_objc_throw_func.swift @@ -15,11 +15,11 @@ import Foundation // CHECK-NEXT: %[[T2:.+]] = extractvalue { %swift.bridge*, i8* } %[[T0]], 1 // CHECK-NEXT: %[[T3:.+]] = bitcast i8* %[[T2]] to %TSi* // CHECK-NEXT: %._value = getelementptr inbounds %TSi, %TSi* %[[T3]], i32 0, i32 0 - // CHECK-NEXT: store i{{32|64}} 1, i{{32|64}}* %._value, align {{[0-9]+}} - // CHECK-NEXT: %[[T4:.+]] = call swiftcc %TSo7NSArrayC* @"$sSa10FoundationE19_bridgeToObjectiveCSo7NSArrayCyF"(%swift.bridge* %[[T1]], %swift.type* @"$sSiN") + // CHECK: %[[T7:.+]] = call swiftcc %swift.bridge* @"$ss27_finalizeUninitializedArrayySayxGABnlF"(%swift.bridge* %[[T1]], %swift.type* @"$sSiN") + // CHECK: %[[T4:.+]] = call swiftcc %TSo7NSArrayC* @"$sSa10FoundationE19_bridgeToObjectiveCSo7NSArrayCyF"(%swift.bridge* %[[T7]], %swift.type* @"$sSiN") // CHECK-NEXT: %[[T5:.+]] = bitcast %TSo7NSArrayC* %[[T4]] to %TSo10CFArrayRefa* // CHECK-NEXT: store %TSo10CFArrayRefa* %[[T5]] - // CHECK-NEXT: call void @swift_bridgeObjectRelease(%swift.bridge* %[[T1]]) #{{[0-9]+}} + // CHECK-NEXT: call void @swift_bridgeObjectRelease(%swift.bridge* %{{[0-9]+}}) #{{[0-9]+}} // CHECK-NEXT: %[[T6:.+]] = bitcast %TSo10CFArrayRefa* %[[T5]] to i8* // CHECK-NEXT: call void @llvm.objc.release(i8* %[[T6]]) // CHECK-NEXT: ret %TSo10CFArrayRefa* %[[T5]] diff --git a/test/SILGen/arguments.swift b/test/SILGen/arguments.swift index 30a8fc708164d..fdd26f1594c9a 100644 --- a/test/SILGen/arguments.swift +++ b/test/SILGen/arguments.swift @@ -14,6 +14,10 @@ func _allocateUninitializedArray(_: Builtin.Word) Builtin.int_trap() } +func _finalizeUninitializedArray(_ a: Array) -> Array { + return a +} + func _deallocateUninitializedArray(_: Array) {} var i:Int, f:Float, c:UnicodeScalar diff --git a/test/SILGen/errors.swift b/test/SILGen/errors.swift index 8919cb24009b5..deb8bb9595970 100644 --- a/test/SILGen/errors.swift +++ b/test/SILGen/errors.swift @@ -639,10 +639,12 @@ func test_variadic(_ cat: Cat) throws { // CHECK: [[NORM_3]]([[CAT3:%.*]] : @owned $Cat): // CHECK-NEXT: store [[CAT3]] to [init] [[ELT3]] // Complete the call and return. +// CHECK: [[FIN_FN:%.*]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF +// CHECK: [[FIN_ARRAY:%.*]] = apply [[FIN_FN]]([[ARRAY]]) // CHECK: [[TAKE_FN:%.*]] = function_ref @$s6errors14take_many_catsyyAA3CatCd_tKF : $@convention(thin) (@guaranteed Array) -> @error Error -// CHECK-NEXT: try_apply [[TAKE_FN]]([[ARRAY]]) : $@convention(thin) (@guaranteed Array) -> @error Error, normal [[NORM_CALL:bb[0-9]+]], error [[ERR_CALL:bb[0-9]+]] +// CHECK-NEXT: try_apply [[TAKE_FN]]([[FIN_ARRAY]]) : $@convention(thin) (@guaranteed Array) -> @error Error, normal [[NORM_CALL:bb[0-9]+]], error [[ERR_CALL:bb[0-9]+]] // CHECK: [[NORM_CALL]]([[T0:%.*]] : $()): -// CHECK-NEXT: destroy_value [[ARRAY]] +// CHECK-NEXT: destroy_value [[FIN_ARRAY]] // CHECK-NEXT: [[T0:%.*]] = tuple () // CHECK-NEXT: return // Failure from element 0. @@ -671,7 +673,7 @@ func test_variadic(_ cat: Cat) throws { // CHECK-NEXT: br [[RETHROW]]([[ERROR]] : $Error) // Failure from call. // CHECK: [[ERR_CALL]]([[ERROR:%.*]] : @owned $Error): -// CHECK-NEXT: destroy_value [[ARRAY]] +// CHECK-NEXT: destroy_value [[FIN_ARRAY]] // CHECK-NEXT: br [[RETHROW]]([[ERROR]] : $Error) // Rethrow. // CHECK: [[RETHROW]]([[ERROR:%.*]] : @owned $Error): diff --git a/test/SILGen/keypaths.swift b/test/SILGen/keypaths.swift index dfa89dbd806b6..7c1e88cff14a4 100644 --- a/test/SILGen/keypaths.swift +++ b/test/SILGen/keypaths.swift @@ -470,19 +470,25 @@ func test_variadics() { // CHECK: [[FN_REF:%[0-9]+]] = function_ref @$ss27_allocateUninitializedArrayySayxG_BptBwlF // CHECK: [[MAKE_ARR:%[0-9]+]] = apply [[FN_REF]]([[ARR_COUNT]]) // CHECK: ([[ARR:%[0-9]+]], %{{[0-9]+}}) = destructure_tuple [[MAKE_ARR]] : $(Array, Builtin.RawPointer) - // CHECK: keypath $KeyPath, (root $SubscriptVariadic1; gettable_property $Int, id @$s8keypaths18SubscriptVariadic1VyS2id_tcig : $@convention(method) (@guaranteed Array, SubscriptVariadic1) -> Int, getter @$s8keypaths18SubscriptVariadic1VyS2id_tcipACTK : $@convention(thin) (@in_guaranteed SubscriptVariadic1, UnsafeRawPointer) -> @out Int, indices [%$0 : $Array : $Array], indices_equals @$sSaySiGTH : $@convention(thin) (UnsafeRawPointer, UnsafeRawPointer) -> Bool, indices_hash @$sSaySiGTh : $@convention(thin) (UnsafeRawPointer) -> Int) ([[ARR]]) + // CHECK: [[FIN_REF:%[0-9]+]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF + // CHECK: [[FIN_ARR:%[0-9]+]] = apply [[FIN_REF]]([[ARR]]) + // CHECK: keypath $KeyPath, (root $SubscriptVariadic1; gettable_property $Int, id @$s8keypaths18SubscriptVariadic1VyS2id_tcig : $@convention(method) (@guaranteed Array, SubscriptVariadic1) -> Int, getter @$s8keypaths18SubscriptVariadic1VyS2id_tcipACTK : $@convention(thin) (@in_guaranteed SubscriptVariadic1, UnsafeRawPointer) -> @out Int, indices [%$0 : $Array : $Array], indices_equals @$sSaySiGTH : $@convention(thin) (UnsafeRawPointer, UnsafeRawPointer) -> Bool, indices_hash @$sSaySiGTh : $@convention(thin) (UnsafeRawPointer) -> Int) ([[FIN_ARR]]) _ = \SubscriptVariadic1.[1, 2, 3] // CHECK: [[ARR_COUNT:%[0-9]+]] = integer_literal $Builtin.Word, 1 // CHECK: [[FN_REF:%[0-9]+]] = function_ref @$ss27_allocateUninitializedArrayySayxG_BptBwlF // CHECK: [[MAKE_ARR:%[0-9]+]] = apply [[FN_REF]]([[ARR_COUNT]]) // CHECK: ([[ARR:%[0-9]+]], %{{[0-9]+}}) = destructure_tuple [[MAKE_ARR]] : $(Array, Builtin.RawPointer) - // CHECK: keypath $KeyPath, (root $SubscriptVariadic1; gettable_property $Int, id @$s8keypaths18SubscriptVariadic1VyS2id_tcig : $@convention(method) (@guaranteed Array, SubscriptVariadic1) -> Int, getter @$s8keypaths18SubscriptVariadic1VyS2id_tcipACTK : $@convention(thin) (@in_guaranteed SubscriptVariadic1, UnsafeRawPointer) -> @out Int, indices [%$0 : $Array : $Array], indices_equals @$sSaySiGTH : $@convention(thin) (UnsafeRawPointer, UnsafeRawPointer) -> Bool, indices_hash @$sSaySiGTh : $@convention(thin) (UnsafeRawPointer) -> Int) ([[ARR]]) + // CHECK: [[FIN_REF:%[0-9]+]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF + // CHECK: [[FIN_ARR:%[0-9]+]] = apply [[FIN_REF]]([[ARR]]) + // CHECK: keypath $KeyPath, (root $SubscriptVariadic1; gettable_property $Int, id @$s8keypaths18SubscriptVariadic1VyS2id_tcig : $@convention(method) (@guaranteed Array, SubscriptVariadic1) -> Int, getter @$s8keypaths18SubscriptVariadic1VyS2id_tcipACTK : $@convention(thin) (@in_guaranteed SubscriptVariadic1, UnsafeRawPointer) -> @out Int, indices [%$0 : $Array : $Array], indices_equals @$sSaySiGTH : $@convention(thin) (UnsafeRawPointer, UnsafeRawPointer) -> Bool, indices_hash @$sSaySiGTh : $@convention(thin) (UnsafeRawPointer) -> Int) ([[FIN_ARR]]) _ = \SubscriptVariadic1.[1] // CHECK: [[ARR_COUNT:%[0-9]+]] = integer_literal $Builtin.Word, 0 // CHECK: [[FN_REF:%[0-9]+]] = function_ref @$ss27_allocateUninitializedArrayySayxG_BptBwlF // CHECK: [[MAKE_ARR:%[0-9]+]] = apply [[FN_REF]]([[ARR_COUNT]]) // CHECK: ([[ARR:%[0-9]+]], %{{[0-9]+}}) = destructure_tuple [[MAKE_ARR]] : $(Array, Builtin.RawPointer) - // CHECK: keypath $KeyPath, (root $SubscriptVariadic1; gettable_property $Int, id @$s8keypaths18SubscriptVariadic1VyS2id_tcig : $@convention(method) (@guaranteed Array, SubscriptVariadic1) -> Int, getter @$s8keypaths18SubscriptVariadic1VyS2id_tcipACTK : $@convention(thin) (@in_guaranteed SubscriptVariadic1, UnsafeRawPointer) -> @out Int, indices [%$0 : $Array : $Array], indices_equals @$sSaySiGTH : $@convention(thin) (UnsafeRawPointer, UnsafeRawPointer) -> Bool, indices_hash @$sSaySiGTh : $@convention(thin) (UnsafeRawPointer) -> Int) ([[ARR]]) + // CHECK: [[FIN_REF:%[0-9]+]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF + // CHECK: [[FIN_ARR:%[0-9]+]] = apply [[FIN_REF]]([[ARR]]) + // CHECK: keypath $KeyPath, (root $SubscriptVariadic1; gettable_property $Int, id @$s8keypaths18SubscriptVariadic1VyS2id_tcig : $@convention(method) (@guaranteed Array, SubscriptVariadic1) -> Int, getter @$s8keypaths18SubscriptVariadic1VyS2id_tcipACTK : $@convention(thin) (@in_guaranteed SubscriptVariadic1, UnsafeRawPointer) -> @out Int, indices [%$0 : $Array : $Array], indices_equals @$sSaySiGTH : $@convention(thin) (UnsafeRawPointer, UnsafeRawPointer) -> Bool, indices_hash @$sSaySiGTh : $@convention(thin) (UnsafeRawPointer) -> Int) ([[FIN_ARR]]) _ = \SubscriptVariadic1.[] _ = \SubscriptVariadic2.["", "1"] @@ -491,7 +497,9 @@ func test_variadics() { // CHECK: [[FN_REF:%[0-9]+]] = function_ref @$ss27_allocateUninitializedArrayySayxG_BptBwlF // CHECK: [[MAKE_ARR:%[0-9]+]] = apply [[FN_REF]]([[ARR_COUNT]]) // CHECK: ([[ARR:%[0-9]+]], %{{[0-9]+}}) = destructure_tuple [[MAKE_ARR]] : $(Array, Builtin.RawPointer) - // CHECK: keypath $KeyPath, (root $SubscriptVariadic2; gettable_property $String, id @$s8keypaths18SubscriptVariadic2Vyxxd_tcs26ExpressibleByStringLiteralRzluig : $@convention(method) <τ_0_0 where τ_0_0 : ExpressibleByStringLiteral> (@guaranteed Array<τ_0_0>, SubscriptVariadic2) -> @out τ_0_0, getter @$s8keypaths18SubscriptVariadic2Vyxxd_tcs26ExpressibleByStringLiteralRzluipACSSTK : $@convention(thin) (@in_guaranteed SubscriptVariadic2, UnsafeRawPointer) -> @out String, indices [%$0 : $Array : $Array], indices_equals @$sSaySSGTH : $@convention(thin) (UnsafeRawPointer, UnsafeRawPointer) -> Bool, indices_hash @$sSaySSGTh : $@convention(thin) (UnsafeRawPointer) -> Int) ([[ARR]]) + // CHECK: [[FIN_REF:%[0-9]+]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF + // CHECK: [[FIN_ARR:%[0-9]+]] = apply [[FIN_REF]]([[ARR]]) + // CHECK: keypath $KeyPath, (root $SubscriptVariadic2; gettable_property $String, id @$s8keypaths18SubscriptVariadic2Vyxxd_tcs26ExpressibleByStringLiteralRzluig : $@convention(method) <τ_0_0 where τ_0_0 : ExpressibleByStringLiteral> (@guaranteed Array<τ_0_0>, SubscriptVariadic2) -> @out τ_0_0, getter @$s8keypaths18SubscriptVariadic2Vyxxd_tcs26ExpressibleByStringLiteralRzluipACSSTK : $@convention(thin) (@in_guaranteed SubscriptVariadic2, UnsafeRawPointer) -> @out String, indices [%$0 : $Array : $Array], indices_equals @$sSaySSGTH : $@convention(thin) (UnsafeRawPointer, UnsafeRawPointer) -> Bool, indices_hash @$sSaySSGTh : $@convention(thin) (UnsafeRawPointer) -> Int) ([[FIN_ARR]]) _ = \SubscriptVariadic2.["", #function] _ = \SubscriptVariadic3.[""] diff --git a/test/SILGen/literals.swift b/test/SILGen/literals.swift index e173d94f02ce1..0b8f2bc833410 100644 --- a/test/SILGen/literals.swift +++ b/test/SILGen/literals.swift @@ -57,9 +57,11 @@ class TakesArrayLiteral : ExpressibleByArrayLiteral { // CHECK: [[IDX1:%.*]] = integer_literal $Builtin.Word, 1 // CHECK: [[POINTER1:%.*]] = index_addr [[POINTER]] : $*Int, [[IDX1]] : $Builtin.Word // CHECK: store [[TMP:%.*]] to [trivial] [[POINTER1]] +// CHECK: [[FIN_FN:%.*]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF +// CHECK: [[FIN_ARR:%.*]] = apply [[FIN_FN]]([[ARR]]) // CHECK: [[METATYPE:%.*]] = metatype $@thick TakesArrayLiteral.Type -// CHECK: [[CTOR:%.*]] = class_method %15 : $@thick TakesArrayLiteral.Type, #TakesArrayLiteral.init!allocator : (TakesArrayLiteral.Type) -> (Element...) -> TakesArrayLiteral, $@convention(method) -// CHECK: [[RESULT:%.*]] = apply [[CTOR]]([[ARR]], [[METATYPE]]) +// CHECK: [[CTOR:%.*]] = class_method [[METATYPE]] : $@thick TakesArrayLiteral.Type, #TakesArrayLiteral.init!allocator : (TakesArrayLiteral.Type) -> (Element...) -> TakesArrayLiteral, $@convention(method) +// CHECK: [[RESULT:%.*]] = apply [[CTOR]]([[FIN_ARR]], [[METATYPE]]) // CHECK: return [[RESULT]] func returnsCustomArray() -> TakesArrayLiteral { // Use temporary to simplify generated_sil @@ -79,9 +81,11 @@ class Klass {} // CHECK: [[CTOR:%.*]] = function_ref @$s8literals5KlassCACycfC : $@convention(method) (@thick Klass.Type) -> @owned Klass // CHECK: [[TMP:%.*]] = apply [[CTOR]]([[KLASS_METATYPE]]) : $@convention(method) (@thick Klass.Type) -> @owned Klass // CHECK: store [[TMP]] to [init] [[POINTER]] +// CHECK: [[FIN_FN:%.*]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF +// CHECK: [[FIN_ARR:%.*]] = apply [[FIN_FN]]([[ARR]]) // CHECK: [[METATYPE:%.*]] = metatype $@thick TakesArrayLiteral.Type // CHECK: [[CTOR:%.*]] = class_method [[METATYPE]] : $@thick TakesArrayLiteral.Type, #TakesArrayLiteral.init!allocator : (TakesArrayLiteral.Type) -> (Element...) -> TakesArrayLiteral, $@convention(method) -// CHECK: [[RESULT:%.*]] = apply [[CTOR]]([[ARR]], [[METATYPE]]) +// CHECK: [[RESULT:%.*]] = apply [[CTOR]]([[FIN_ARR]], [[METATYPE]]) // CHECK: return [[RESULT]] func returnsClassElementArray() -> TakesArrayLiteral { return [Klass()] @@ -98,9 +102,11 @@ struct Foo { // CHECK: ([[ARR:%.*]], [[ADDRESS:%.*]]) = destructure_tuple [[ARR_TMP]] // CHECK: [[POINTER:%.*]] = pointer_to_address [[ADDRESS]] // CHECK: copy_addr %0 to [initialization] [[POINTER]] : $*Foo +// CHECK: [[FIN_FN:%.*]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF +// CHECK: [[FIN_ARR:%.*]] = apply [[FIN_FN]]>([[ARR]]) // CHECK: [[METATYPE:%.*]] = metatype $@thick TakesArrayLiteral>.Type // CHECK: [[CTOR:%.*]] = class_method [[METATYPE]] : $@thick TakesArrayLiteral>.Type, #TakesArrayLiteral.init!allocator : (TakesArrayLiteral.Type) -> (Element...) -> TakesArrayLiteral, $@convention(method) -// CHECK: [[RESULT:%.*]] = apply [[CTOR]]>([[ARR]], [[METATYPE]]) +// CHECK: [[RESULT:%.*]] = apply [[CTOR]]>([[FIN_ARR]], [[METATYPE]]) // CHECK: return [[RESULT]] func returnsAddressOnlyElementArray(t: Foo) -> TakesArrayLiteral> { return [t] @@ -113,9 +119,11 @@ func returnsAddressOnlyElementArray(t: Foo) -> TakesArrayLiteral> { // CHECK: ([[ARR:%.*]], [[ADDRESS:%.*]]) = destructure_tuple [[ARR_TMP]] // CHECK: [[POINTER:%.*]] = pointer_to_address [[ADDRESS]] // CHECK: copy_addr %0 to [initialization] [[POINTER]] : $*Foo +// CHECK: [[FIN_FN:%.*]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF +// CHECK: [[FIN_ARR:%.*]] = apply [[FIN_FN]]>([[ARR]]) // CHECK: [[METATYPE:%.*]] = metatype $@thick TakesArrayLiteral>.Type // CHECK: [[CTOR:%.*]] = class_method [[METATYPE]] : $@thick TakesArrayLiteral>.Type, #TakesArrayLiteral.init!allocator : (TakesArrayLiteral.Type) -> (Element...) -> TakesArrayLiteral, $@convention(method) -// CHECK: [[RESULT:%.*]] = apply [[CTOR]]>([[ARR]], [[METATYPE]]) +// CHECK: [[RESULT:%.*]] = apply [[CTOR]]>([[FIN_ARR]], [[METATYPE]]) // CHECK: return [[RESULT]] extension Foo { func returnsArrayFromSelf() -> TakesArrayLiteral> { @@ -132,9 +140,11 @@ extension Foo { // CHECK: [[ACCESS:%.*]] = begin_access [read] [unknown] %0 : $*Foo // CHECK: copy_addr [[ACCESS]] to [initialization] [[POINTER]] : $*Foo // CHECK: end_access [[ACCESS]] : $*Foo +// CHECK: [[FIN_FN:%.*]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF +// CHECK: [[FIN_ARR:%.*]] = apply [[FIN_FN]]>([[ARR]]) // CHECK: [[METATYPE:%.*]] = metatype $@thick TakesArrayLiteral>.Type // CHECK: [[CTOR:%.*]] = class_method [[METATYPE]] : $@thick TakesArrayLiteral>.Type, #TakesArrayLiteral.init!allocator : (TakesArrayLiteral.Type) -> (Element...) -> TakesArrayLiteral, $@convention(method) -// CHECK: [[RESULT:%.*]] = apply [[CTOR]]>([[ARR]], [[METATYPE]]) +// CHECK: [[RESULT:%.*]] = apply [[CTOR]]>([[FIN_ARR]], [[METATYPE]]) // CHECK: return [[RESULT]] extension Foo { mutating func returnsArrayFromMutatingSelf() -> TakesArrayLiteral> { @@ -159,9 +169,11 @@ struct Foo2 { // CHECK: [[CTOR:%.*]] = function_ref @$s8literals4Foo2V1kAcA5KlassC_tcfC : $@convention(method) (@owned Klass, @thin Foo2.Type) -> @owned Foo2 // CHECK: [[TMP:%.*]] = apply [[CTOR]]([[K]], [[METATYPE_FOO2]]) : $@convention(method) (@owned Klass, @thin Foo2.Type) -> @owned Foo2 // store [[TMP]] to [init] [[POINTER]] : $*Foo2 +// CHECK: [[FIN_FN:%.*]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF +// CHECK: [[FIN_ARR:%.*]] = apply [[FIN_FN]]([[ARR]]) // CHECK: [[METATYPE:%.*]] = metatype $@thick TakesArrayLiteral.Type // CHECK: [[CTOR:%.*]] = class_method [[METATYPE]] : $@thick TakesArrayLiteral.Type, #TakesArrayLiteral.init!allocator : (TakesArrayLiteral.Type) -> (Element...) -> TakesArrayLiteral, $@convention(method) -// CHECK: [[RESULT:%.*]] = apply [[CTOR]]([[ARR]], [[METATYPE]]) +// CHECK: [[RESULT:%.*]] = apply [[CTOR]]([[FIN_ARR]], [[METATYPE]]) // CHECK: return [[RESULT]] func returnsNonTrivialStruct() -> TakesArrayLiteral { return [Foo2(k: Klass())] @@ -180,10 +192,12 @@ func returnsNonTrivialStruct() -> TakesArrayLiteral { // CHECK: [[TMP:%.*]] = apply [[OTHER_FN]]([[ACCESS]]) : $@convention(method) (@inout NestedLValuePath) -> @owned NestedLValuePath // CHECK: end_access [[ACCESS]] : $*NestedLValuePath // CHECK: store [[TMP]] to [init] [[POINTER]] : $*NestedLValuePath +// CHECK: [[FIN_FN:%.*]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF +// CHECK: [[FIN_ARR:%.*]] = apply [[FIN_FN]]([[ARR]]) // CHECK: [[METATYPE:%.*]] = metatype $@thick TakesArrayLiteral.Type // CHECK: [[CTOR:%.*]] = class_method [[METATYPE]] : $@thick TakesArrayLiteral.Type, #TakesArrayLiteral.init!allocator : (TakesArrayLiteral.Type) -> (Element...) -> TakesArrayLiteral, $@convention(method) -// CHECK: [[ARR_RESULT:%.*]] = apply [[CTOR]]([[ARR]], [[METATYPE]]) -// CHECK: [[CTOR:%.*]] = function_ref @$s8literals16NestedLValuePathV3arrAcA17TakesArrayLiteralCyACG_tcfC : $@convention(method) (@owned TakesArrayLiteral, @thin NestedLValuePath.Type) -> @owned NestedLValuePath // user: %18 +// CHECK: [[ARR_RESULT:%.*]] = apply [[CTOR]]([[FIN_ARR]], [[METATYPE]]) +// CHECK: [[CTOR:%.*]] = function_ref @$s8literals16NestedLValuePathV3arrAcA17TakesArrayLiteralCyACG_tcfC : $@convention(method) (@owned TakesArrayLiteral, @thin NestedLValuePath.Type) -> @owned NestedLValuePath // CHECK: [[RESULT:%.*]] = apply [[CTOR]]([[ARR_RESULT]], [[METATYPE_NESTED]]) : $@convention(method) (@owned TakesArrayLiteral, @thin NestedLValuePath.Type) -> @owned NestedLValuePath // CHECK: [[ACCESS:%.*]] = begin_access [modify] [unknown] %0 : $*NestedLValuePath // CHECK: assign [[RESULT]] to [[ACCESS]] : $*NestedLValuePath @@ -214,9 +228,11 @@ protocol WrapsSelfInArray {} // CHECK: [[EXISTENTIAL:%.*]] = init_existential_addr [[POINTER]] : $*WrapsSelfInArray, $Self // CHECK: copy_addr [[ACCESS]] to [initialization] [[EXISTENTIAL]] : $*Self // CHECK: end_access [[ACCESS]] : $*Self +// CHECK: [[FIN_FN:%.*]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF +// CHECK: [[FIN_ARR:%.*]] = apply [[FIN_FN]]([[ARR]]) // CHECK: [[METATYPE:%.*]] = metatype $@thick TakesArrayLiteral.Type // CHECK: [[CTOR:%.*]] = class_method [[METATYPE]] : $@thick TakesArrayLiteral.Type, #TakesArrayLiteral.init!allocator : (TakesArrayLiteral.Type) -> (Element...) -> TakesArrayLiteral, $@convention(method) -// CHECK: [[RESULT:%.*]] = apply [[CTOR]]([[ARR]], [[METATYPE]]) +// CHECK: [[RESULT:%.*]] = apply [[CTOR]]([[FIN_ARR]], [[METATYPE]]) // CHECK: return [[RESULT]] extension WrapsSelfInArray { mutating func wrapInArray() -> TakesArrayLiteral { @@ -245,7 +261,9 @@ func makeBasic() -> T { return T() } // CHECK: try_apply [[FN]]([[POINTER1]]) : {{.*}} normal bb1, error bb2 // CHECK: bb1([[TMP:%.*]] : $()): -// CHECK: return [[ARR]] +// CHECK: [[FIN_FN:%.*]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF +// CHECK: [[FIN_ARR:%.*]] = apply [[FIN_FN]]([[ARR]]) +// CHECK: return [[FIN_ARR]] // CHECK: bb2([[ERR:%.*]] : @owned $Error): // CHECK: destroy_addr [[POINTER]] : $*T @@ -278,9 +296,11 @@ class TakesDictionaryLiteral : ExpressibleByDictionaryLiteral { // CHECK: [[VALUE_ADDR:%.*]] = tuple_element_addr [[TUPLE_ADDR1]] : $*(Int, Int), 1 // CHECK: store [[TMP]] to [trivial] [[KEY_ADDR]] : $*Int // CHECK: store [[TMP]] to [trivial] [[VALUE_ADDR]] : $*Int +// CHECK: [[FIN_FN:%.*]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF +// CHECK: [[FIN_ARR:%.*]] = apply [[FIN_FN]]<(Int, Int)>([[ARR]]) // CHECK: [[METATYPE:%.*]] = metatype $@thick TakesDictionaryLiteral.Type // CHECK: [[CTOR:%.*]] = class_method [[METATYPE]] : $@thick TakesDictionaryLiteral.Type, #TakesDictionaryLiteral.init!allocator : (TakesDictionaryLiteral.Type) -> ((Key, Value)...) -> TakesDictionaryLiteral, $@convention(method) <τ_0_0, τ_0_1> (@owned Array<(τ_0_0, τ_0_1)>, @thick TakesDictionaryLiteral<τ_0_0, τ_0_1>.Type) -> @owned TakesDictionaryLiteral<τ_0_0, τ_0_1> -// CHECK: [[RESULT:%.*]] = apply [[CTOR]](%8, %21) +// CHECK: [[RESULT:%.*]] = apply [[CTOR]]([[FIN_ARR]], [[METATYPE]]) // CHECK: return [[RESULT]] func returnsCustomDictionary() -> TakesDictionaryLiteral { diff --git a/test/SILGen/objc_bridging_array.swift b/test/SILGen/objc_bridging_array.swift index 33c7d136797c8..77d58276290f3 100644 --- a/test/SILGen/objc_bridging_array.swift +++ b/test/SILGen/objc_bridging_array.swift @@ -25,11 +25,13 @@ func setChildren(p: Parent, c: Child) { // CHECK: [[BUFFER:%.*]] = pointer_to_address [[BUFFER_PTR]] : $Builtin.RawPointer to [strict] $*Child // CHECK: [[CHILD:%.*]] = copy_value %1 : $Child // CHECK: store [[CHILD]] to [init] [[BUFFER]] : $*Child +// CHECK: [[FIN_FN:%.*]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF +// CHECK: [[FIN_ARR:%.*]] = apply [[FIN_FN]]([[ARRAY]]) // CHECK: [[FN:%.*]] = function_ref @$sSa10FoundationE19_bridgeToObjectiveCSo7NSArrayCyF : $@convention(method) <τ_0_0> (@guaranteed Array<τ_0_0>) -> @owned NSArray -// CHECK: [[BORROW_ARRAY:%.*]] = begin_borrow [[ARRAY]] : $Array +// CHECK: [[BORROW_ARRAY:%.*]] = begin_borrow [[FIN_ARR]] : $Array // CHECK: [[BRIDGED_ARRAY:%.*]] = apply [[FN]]([[BORROW_ARRAY]]) : $@convention(method) <τ_0_0> (@guaranteed Array<τ_0_0>) -> @owned NSArray // CHECK: end_borrow [[BORROW_ARRAY]] : $Array -// CHECK: destroy_value [[ARRAY]] : $Array +// CHECK: destroy_value [[FIN_ARR]] : $Array // CHECK: [[FN:%.*]] = objc_method [[COPIED]] : $[[OPENED_TYPE]], #Parent.children!setter.foreign : (Self) -> ([Child]) -> (), $@convention(objc_method) <τ_0_0 where τ_0_0 : Parent> (NSArray, τ_0_0) -> () // CHECK: apply [[FN]]<[[OPENED_TYPE]]>([[BRIDGED_ARRAY]], [[COPIED]]) : $@convention(objc_method) <τ_0_0 where τ_0_0 : Parent> (NSArray, τ_0_0) -> () // CHECK: destroy_value [[BRIDGED_ARRAY]] : $NSArray diff --git a/test/SILGen/scalar_to_tuple_args.swift b/test/SILGen/scalar_to_tuple_args.swift index a549998b8f8da..67888cc061196 100644 --- a/test/SILGen/scalar_to_tuple_args.swift +++ b/test/SILGen/scalar_to_tuple_args.swift @@ -58,14 +58,18 @@ tupleWithDefaults(x: (x,x)) // CHECK: [[ADDR:%.*]] = pointer_to_address [[MEMORY]] // CHECK: [[READ:%.*]] = begin_access [read] [dynamic] [[X_ADDR]] : $*Int // CHECK: copy_addr [[READ]] to [initialization] [[ADDR]] +// CHECK: [[FIN_FN:%.*]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF +// CHECK: [[FIN_ARR:%.*]] = apply [[FIN_FN]]([[ARRAY]]) // CHECK: [[VARIADIC_FIRST:%.*]] = function_ref @$s20scalar_to_tuple_args13variadicFirstyySid_tF -// CHECK: apply [[VARIADIC_FIRST]]([[ARRAY]]) +// CHECK: apply [[VARIADIC_FIRST]]([[FIN_ARR]]) variadicFirst(x) // CHECK: [[READ:%.*]] = begin_access [read] [dynamic] [[X_ADDR]] : $*Int // CHECK: [[X:%.*]] = load [trivial] [[READ]] // CHECK: [[ALLOC_ARRAY:%.*]] = apply {{.*}} -> (@owned Array<τ_0_0>, Builtin.RawPointer) // CHECK: ([[ARRAY:%.*]], [[MEMORY:%.*]]) = destructure_tuple [[ALLOC_ARRAY]] +// CHECK: [[FIN_FN:%.*]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF +// CHECK: [[FIN_ARR:%.*]] = apply [[FIN_FN]]([[ARRAY]]) // CHECK: [[VARIADIC_SECOND:%.*]] = function_ref @$s20scalar_to_tuple_args14variadicSecondyySi_SidtF -// CHECK: apply [[VARIADIC_SECOND]]([[X]], [[ARRAY]]) +// CHECK: apply [[VARIADIC_SECOND]]([[X]], [[FIN_ARR]]) variadicSecond(x) diff --git a/test/SILOptimizer/OSLogMandatoryOptTest.sil b/test/SILOptimizer/OSLogMandatoryOptTest.sil index 470958372f323..3fefcf0d78cf7 100644 --- a/test/SILOptimizer/OSLogMandatoryOptTest.sil +++ b/test/SILOptimizer/OSLogMandatoryOptTest.sil @@ -414,11 +414,13 @@ bb0: // CHECK: [[INDEX2:%[0-9]+]] = integer_literal $Builtin.Word, 2 // CHECK: [[INDEXADDR2:%[0-9]+]] = index_addr [[STORAGEADDR]] : $*Int64, [[INDEX2]] : $Builtin.Word // CHECK: store [[ELEM3INT]] to [trivial] [[INDEXADDR2]] : $*Int64 - // CHECK: [[BORROW:%[0-9]+]] = begin_borrow [[ARRAY]] + // CHECK: [[FINALIZEREF:%[0-9]+]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF + // CHECK: [[FINALIZED:%[0-9]+]] = apply [[FINALIZEREF]]([[ARRAY]]) + // CHECK: [[BORROW:%[0-9]+]] = begin_borrow [[FINALIZED]] // CHECK: [[USEREF:%[0-9]+]] = function_ref @useArray // CHECK: apply [[USEREF]]([[BORROW]]) // CHECK: end_borrow [[BORROW]] - // CHECK: destroy_value [[ARRAY]] : $Array + // CHECK: destroy_value [[FINALIZED]] : $Array } /// A stub for OSLogMessage.init. The optimization is driven by this function. @@ -538,11 +540,13 @@ bb0: // CHECK: ([[ARRAY:%[0-9]+]], [[STORAGEPTR:%[0-9]+]]) = destructure_tuple [[TUPLE]] // CHECK: [[STORAGEADDR:%[0-9]+]] = pointer_to_address [[STORAGEPTR]] : $Builtin.RawPointer to [strict] $*String // CHECK: store [[STRINGCONST]] to [init] [[STORAGEADDR]] : $*String - // CHECK: [[BORROW:%[0-9]+]] = begin_borrow [[ARRAY]] + // CHECK: [[FINALIZEREF:%[0-9]+]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF + // CHECK: [[FINALIZED:%[0-9]+]] = apply [[FINALIZEREF]]([[ARRAY]]) + // CHECK: [[BORROW:%[0-9]+]] = begin_borrow [[FINALIZED]] // CHECK: [[USEREF:%[0-9]+]] = function_ref @useArrayString // CHECK: apply [[USEREF]]([[BORROW]]) // CHECK: end_borrow [[BORROW]] - // CHECK: destroy_value [[ARRAY]] : $Array + // CHECK: destroy_value [[FINALIZED]] : $Array } sil [ossa] [Onone] [_semantics "constant_evaluable"] [_semantics "oslog.message.init_stub"] @oslogMessageArrayInterpolationInit : $@convention(thin) (@owned OSLogInterpolationArrayStub) diff --git a/test/SILOptimizer/OSLogMandatoryOptTest.swift b/test/SILOptimizer/OSLogMandatoryOptTest.swift index 3ea45c0a316a7..a41a1ad7ba34f 100644 --- a/test/SILOptimizer/OSLogMandatoryOptTest.swift +++ b/test/SILOptimizer/OSLogMandatoryOptTest.swift @@ -56,7 +56,9 @@ func testSimpleInterpolation() { // We need to wade through some borrows and copy values here. // CHECK-DAG: [[ARGSARRAY2]] = begin_borrow [[ARGSARRAY3:%[0-9]+]] // CHECK-DAG: [[ARGSARRAY3]] = copy_value [[ARGSARRAY4:%[0-9]+]] - // CHECK-DAG: [[ARGSARRAY4]] = begin_borrow [[ARGSARRAY:%[0-9]+]] + // CHECK-DAG: [[ARGSARRAY4]] = begin_borrow [[FINARR:%[0-9]+]] + // CHECK-DAG: [[FINARRFUNC:%[0-9]+]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF + // CHECK-DAG: [[FINARR]] = apply [[FINARRFUNC]]<(inout UnsafeMutablePointer, inout Array) -> ()>([[ARGSARRAY:%[0-9]+]]) // CHECK-DAG: ([[ARGSARRAY]], {{%.*}}) = destructure_tuple [[ARRAYINITRES:%[0-9]+]] // CHECK-DAG: [[ARRAYINITRES]] = apply [[ARRAYINIT:%[0-9]+]]<(inout UnsafeMutablePointer, inout Array) -> ()>([[ARRAYSIZE:%[0-9]+]]) // CHECK-DAG: [[ARRAYINIT]] = function_ref @$ss27_allocateUninitializedArrayySayxG_BptBwlF @@ -105,7 +107,9 @@ func testInterpolationWithFormatOptions() { // CHECK-DAG: store_borrow [[ARGSARRAY2:%[0-9]+]] to [[ARGSARRAYADDR]] // CHECK-DAG: [[ARGSARRAY2]] = begin_borrow [[ARGSARRAY3:%[0-9]+]] // CHECK-DAG: [[ARGSARRAY3]] = copy_value [[ARGSARRAY4:%[0-9]+]] - // CHECK-DAG: [[ARGSARRAY4]] = begin_borrow [[ARGSARRAY:%[0-9]+]] + // CHECK-DAG: [[ARGSARRAY4]] = begin_borrow [[FINARR:%[0-9]+]] + // CHECK-DAG: [[FINARRFUNC:%[0-9]+]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF + // CHECK-DAG: [[FINARR]] = apply [[FINARRFUNC]]<(inout UnsafeMutablePointer, inout Array) -> ()>([[ARGSARRAY:%[0-9]+]]) // CHECK-DAG: ([[ARGSARRAY]], {{%.*}}) = destructure_tuple [[ARRAYINITRES:%[0-9]+]] // CHECK-DAG: [[ARRAYINITRES]] = apply [[ARRAYINIT:%[0-9]+]]<(inout UnsafeMutablePointer, inout Array) -> ()>([[ARRAYSIZE:%[0-9]+]]) // CHECK-DAG: [[ARRAYINIT]] = function_ref @$ss27_allocateUninitializedArrayySayxG_BptBwlF @@ -156,7 +160,9 @@ func testInterpolationWithFormatOptionsAndPrivacy() { // CHECK-DAG: store_borrow [[ARGSARRAY2:%[0-9]+]] to [[ARGSARRAYADDR]] // CHECK-DAG: [[ARGSARRAY2]] = begin_borrow [[ARGSARRAY3:%[0-9]+]] // CHECK-DAG: [[ARGSARRAY3]] = copy_value [[ARGSARRAY4:%[0-9]+]] - // CHECK-DAG: [[ARGSARRAY4]] = begin_borrow [[ARGSARRAY:%[0-9]+]] + // CHECK-DAG: [[ARGSARRAY4]] = begin_borrow [[FINARR:%[0-9]+]] + // CHECK-DAG: [[FINARRFUNC:%[0-9]+]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF + // CHECK-DAG: [[FINARR]] = apply [[FINARRFUNC]]<(inout UnsafeMutablePointer, inout Array) -> ()>([[ARGSARRAY:%[0-9]+]]) // CHECK-DAG: ([[ARGSARRAY]], {{%.*}}) = destructure_tuple [[ARRAYINITRES:%[0-9]+]] // CHECK-DAG: [[ARRAYINITRES]] = apply [[ARRAYINIT:%[0-9]+]]<(inout UnsafeMutablePointer, inout Array) -> ()>([[ARRAYSIZE:%[0-9]+]]) // CHECK-DAG: [[ARRAYINIT]] = function_ref @$ss27_allocateUninitializedArrayySayxG_BptBwlF @@ -213,7 +219,9 @@ func testInterpolationWithMultipleArguments() { // CHECK-DAG: store_borrow [[ARGSARRAY2:%[0-9]+]] to [[ARGSARRAYADDR]] // CHECK-DAG: [[ARGSARRAY2]] = begin_borrow [[ARGSARRAY3:%[0-9]+]] // CHECK-DAG: [[ARGSARRAY3]] = copy_value [[ARGSARRAY4:%[0-9]+]] - // CHECK-DAG: [[ARGSARRAY4]] = begin_borrow [[ARGSARRAY:%[0-9]+]] + // CHECK-DAG: [[ARGSARRAY4]] = begin_borrow [[FINARR:%[0-9]+]] + // CHECK-DAG: [[FINARRFUNC:%[0-9]+]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF + // CHECK-DAG: [[FINARR]] = apply [[FINARRFUNC]]<(inout UnsafeMutablePointer, inout Array) -> ()>([[ARGSARRAY:%[0-9]+]]) // CHECK-DAG: ([[ARGSARRAY]], {{%.*}}) = destructure_tuple [[ARRAYINITRES:%[0-9]+]] // CHECK-DAG: [[ARRAYINITRES]] = apply [[ARRAYINIT:%[0-9]+]]<(inout UnsafeMutablePointer, inout Array) -> ()>([[ARRAYSIZE:%[0-9]+]]) // CHECK-DAG: [[ARRAYINIT]] = function_ref @$ss27_allocateUninitializedArrayySayxG_BptBwlF @@ -359,7 +367,9 @@ func testMessageWithTooManyArguments() { // CHECK-DAG: store_borrow [[ARGSARRAY2:%[0-9]+]] to [[ARGSARRAYADDR]] // CHECK-DAG: [[ARGSARRAY2]] = begin_borrow [[ARGSARRAY3:%[0-9]+]] // CHECK-DAG: [[ARGSARRAY3]] = copy_value [[ARGSARRAY4:%[0-9]+]] - // CHECK-DAG: [[ARGSARRAY4]] = begin_borrow [[ARGSARRAY:%[0-9]+]] + // CHECK-DAG: [[ARGSARRAY4]] = begin_borrow [[FINARR:%[0-9]+]] + // CHECK-DAG: [[FINARRFUNC:%[0-9]+]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF + // CHECK-DAG: [[FINARR]] = apply [[FINARRFUNC]]<(inout UnsafeMutablePointer, inout Array) -> ()>([[ARGSARRAY:%[0-9]+]]) // CHECK-DAG: ([[ARGSARRAY]], {{%.*}}) = destructure_tuple [[ARRAYINITRES:%[0-9]+]] // CHECK-DAG: [[ARRAYINITRES]] = apply [[ARRAYINIT:%[0-9]+]]<(inout UnsafeMutablePointer, inout Array) -> ()>([[ARRAYSIZE:%[0-9]+]]) // CHECK-DAG: [[ARRAYINIT]] = function_ref @$ss27_allocateUninitializedArrayySayxG_BptBwlF @@ -452,7 +462,9 @@ func testDynamicStringArguments() { // CHECK-DAG: store_borrow [[ARGSARRAY2:%[0-9]+]] to [[ARGSARRAYADDR]] // CHECK-DAG: [[ARGSARRAY2]] = begin_borrow [[ARGSARRAY3:%[0-9]+]] // CHECK-DAG: [[ARGSARRAY3]] = copy_value [[ARGSARRAY4:%[0-9]+]] - // CHECK-DAG: [[ARGSARRAY4]] = begin_borrow [[ARGSARRAY:%[0-9]+]] + // CHECK-DAG: [[ARGSARRAY4]] = begin_borrow [[FINARR:%[0-9]+]] + // CHECK-DAG: [[FINARRFUNC:%[0-9]+]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF + // CHECK-DAG: [[FINARR]] = apply [[FINARRFUNC]]<(inout UnsafeMutablePointer, inout Array) -> ()>([[ARGSARRAY:%[0-9]+]]) // CHECK-DAG: ([[ARGSARRAY]], {{%.*}}) = destructure_tuple [[ARRAYINITRES:%[0-9]+]] // CHECK-DAG: [[ARRAYINITRES]] = apply [[ARRAYINIT:%[0-9]+]]<(inout UnsafeMutablePointer, inout Array) -> ()>([[ARRAYSIZE:%[0-9]+]]) // CHECK-DAG: [[ARRAYINIT]] = function_ref @$ss27_allocateUninitializedArrayySayxG_BptBwlF @@ -508,7 +520,9 @@ func testNSObjectInterpolation() { // CHECK-DAG: store_borrow [[ARGSARRAY2:%[0-9]+]] to [[ARGSARRAYADDR]] // CHECK-DAG: [[ARGSARRAY2]] = begin_borrow [[ARGSARRAY3:%[0-9]+]] // CHECK-DAG: [[ARGSARRAY3]] = copy_value [[ARGSARRAY4:%[0-9]+]] - // CHECK-DAG: [[ARGSARRAY4]] = begin_borrow [[ARGSARRAY:%[0-9]+]] + // CHECK-DAG: [[ARGSARRAY4]] = begin_borrow [[FINARR:%[0-9]+]] + // CHECK-DAG: [[FINARRFUNC:%[0-9]+]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF + // CHECK-DAG: [[FINARR]] = apply {{.*}}<(inout UnsafeMutablePointer, inout Array) -> ()>([[ARGSARRAY:%[0-9]+]]) // CHECK-DAG: ([[ARGSARRAY]], {{%.*}}) = destructure_tuple [[ARRAYINITRES:%[0-9]+]] // CHECK-DAG: [[ARRAYINITRES]] = apply [[ARRAYINIT:%[0-9]+]]<(inout UnsafeMutablePointer, inout Array) -> ()>([[ARRAYSIZE:%[0-9]+]]) // CHECK-DAG: [[ARRAYINIT]] = function_ref @$ss27_allocateUninitializedArrayySayxG_BptBwlF @@ -559,7 +573,9 @@ func testDoubleInterpolation() { // CHECK-DAG: store_borrow [[ARGSARRAY2:%[0-9]+]] to [[ARGSARRAYADDR]] // CHECK-DAG: [[ARGSARRAY2]] = begin_borrow [[ARGSARRAY3:%[0-9]+]] // CHECK-DAG: [[ARGSARRAY3]] = copy_value [[ARGSARRAY4:%[0-9]+]] - // CHECK-DAG: [[ARGSARRAY4]] = begin_borrow [[ARGSARRAY:%[0-9]+]] + // CHECK-DAG: [[ARGSARRAY4]] = begin_borrow [[FINARR:%[0-9]+]] + // CHECK-DAG: [[FINARRFUNC:%[0-9]+]] = function_ref @$ss27_finalizeUninitializedArrayySayxGABnlF + // CHECK-DAG: [[FINARR]] = apply [[FINARRFUNC]]<(inout UnsafeMutablePointer, inout Array) -> ()>([[ARGSARRAY:%[0-9]+]]) // CHECK-DAG: ([[ARGSARRAY]], {{%.*}}) = destructure_tuple [[ARRAYINITRES:%[0-9]+]] // CHECK-DAG: [[ARRAYINITRES]] = apply [[ARRAYINIT:%[0-9]+]]<(inout UnsafeMutablePointer, inout Array) -> ()>([[ARRAYSIZE:%[0-9]+]]) // CHECK-DAG: [[ARRAYINIT]] = function_ref @$ss27_allocateUninitializedArrayySayxG_BptBwlF diff --git a/test/SILOptimizer/cowarray_opt.sil b/test/SILOptimizer/cowarray_opt.sil index 10ad28c6e0bf2..77873a8536090 100644 --- a/test/SILOptimizer/cowarray_opt.sil +++ b/test/SILOptimizer/cowarray_opt.sil @@ -46,6 +46,7 @@ class MyArrayStorage { } sil [_semantics "array.make_mutable"] @array_make_mutable : $@convention(method) (@inout MyArray) -> () +sil [_semantics "array.end_mutation"] @array_end_mutation : $@convention(method) (@inout MyArray) -> () sil [_semantics "array.get_count"] @guaranteed_array_get_count : $@convention(method) (@guaranteed MyArray) -> Int sil [_semantics "array.get_capacity"] @guaranteed_array_get_capacity : $@convention(method) (@guaranteed MyArray) -> Int sil [_semantics "array.mutate_unknown"] @array_unknown_mutate : $@convention(method) (@inout MyArray) -> () @@ -59,11 +60,14 @@ sil @unknown : $@convention(thin) () -> () // CHECK-LABEL: sil @simple_hoist // CHECK: bb0([[ARRAY:%[0-9]+]] -// CHECK: [[FUN:%[0-9]+]] = function_ref @array_make_mutable -// CHECK: apply [[FUN]]([[ARRAY]] -// CHECK: bb1 -// CHECK-NOT: array_make_mutable -// CHECK-NOT: apply [[FUN]] +// CHECK: [[MM:%[0-9]+]] = function_ref @array_make_mutable +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: [[EM:%[0-9]+]] = function_ref @array_end_mutation +// CHECK: apply [[EM]]([[ARRAY]] +// CHECK: bb1: +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: apply [[EM]]([[ARRAY]] +// CHECK: } // end sil function 'simple_hoist' sil @simple_hoist : $@convention(thin) (@inout MyArray, @inout Builtin.Int1) -> () { bb0(%0 : $*MyArray, %1 : $*Builtin.Int1): debug_value_addr %0 : $*MyArray @@ -73,24 +77,29 @@ bb0(%0 : $*MyArray, %1 : $*Builtin.Int1): bb1: %5 = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () %6 = apply %5(%0) : $@convention(method) (@inout MyArray) -> () + %7 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %8 = apply %7(%0) : $@convention(method) (@inout MyArray) -> () cond_br undef, bb1, bb2 bb2: - %7 = tuple() - return %7 : $() + %r = tuple() + return %r : $() } // CHECK-LABEL: sil @hoist_ignoring_paired_retain_release_and_hoist -// CHECK: bb0( -// CHECK-NOT: br bb -// CHECK: [[MM:%.*]] = function_ref @array_make_mutable -// CHECK-NOT: br bb -// CHECK: apply [[MM]] -// CHECK: br bb1 +// CHECK: bb0([[ARRAY:%[0-9]+]] +// CHECK: [[MM:%[0-9]+]] = function_ref @array_make_mutable +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: [[EM:%[0-9]+]] = function_ref @array_end_mutation +// CHECK: apply [[EM]]([[ARRAY]] // CHECK: bb1: -// CHECK-NOT: apply +// CHECK: retain +// CHECK: release +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: apply [[EM]]([[ARRAY]] // CHECK: cond_br {{.*}}, bb1 +// CHECK: } // end sil function 'hoist_ignoring_paired_retain_release_and_hoist' sil @hoist_ignoring_paired_retain_release_and_hoist : $@convention(thin) (@inout MyArray, @inout Builtin.Int1) -> () { bb0(%0 : $*MyArray, %1 : $*Builtin.Int1): %2 = load %0 : $*MyArray @@ -103,11 +112,13 @@ bb1: release_value %2 : $MyArray %5 = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () %6 = apply %5(%0) : $@convention(method) (@inout MyArray) -> () + %7 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %8 = apply %7(%0) : $@convention(method) (@inout MyArray) -> () cond_br %3, bb1, bb2 bb2: - %7 = tuple() - return %7 : $() + %r = tuple() + return %r : $() } // CHECK-LABEL: sil @hoist_blocked_by_unpaired_retain_release_1 @@ -129,11 +140,13 @@ bb1: %4 = load %0 : $*MyArray %5 = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () %6 = apply %5(%0) : $@convention(method) (@inout MyArray) -> () + %7 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %8 = apply %7(%0) : $@convention(method) (@inout MyArray) -> () cond_br %3, bb1, bb2 bb2: - %7 = tuple() - return %7 : $() + %r = tuple() + return %r : $() } // CHECK-LABEL: sil @hoist_blocked_by_unpaired_retain_release_2 @@ -151,32 +164,37 @@ bb0(%0 : $*MyArray, %1 : $*Builtin.Int1): bb1: %10 = load %0 : $*MyArray %11 = load %0 : $*MyArray - %8 = struct_extract %10 : $MyArray, #MyArray.buffer - %9 = struct_extract %11 : $MyArray, #MyArray.buffer - retain_value %8 : $ArrayIntBuffer - retain_value %9 : $ArrayIntBuffer - release_value %9 : $ArrayIntBuffer + %12 = struct_extract %10 : $MyArray, #MyArray.buffer + %13 = struct_extract %11 : $MyArray, #MyArray.buffer + retain_value %12 : $ArrayIntBuffer + retain_value %13 : $ArrayIntBuffer + release_value %13 : $ArrayIntBuffer %3 = load %1 : $*Builtin.Int1 %4 = load %0 : $*MyArray %5 = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () %6 = apply %5(%0) : $@convention(method) (@inout MyArray) -> () + %7 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %8 = apply %7(%0) : $@convention(method) (@inout MyArray) -> () cond_br %3, bb1, bb2 bb2: - %7 = tuple() - return %7 : $() + %r = tuple() + return %r : $() } // CHECK-LABEL: sil @hoist_not_blocked_by_unpaired_release -// CHECK: bb0( -// CHECK-NOT: br bb -// CHECK: [[MM:%.*]] = function_ref @array_make_mutable -// CHECK-NOT: br bb -// CHECK: apply [[MM]] -// CHECK: br bb1 +// CHECK: bb0([[ARRAY:%[0-9]+]] +// CHECK: [[MM:%[0-9]+]] = function_ref @array_make_mutable +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: [[EM:%[0-9]+]] = function_ref @array_end_mutation +// CHECK: apply [[EM]]([[ARRAY]] // CHECK: bb1: -// CHECK-NOT: apply -// CHECK: cond_br {{.*}}, bb1 +// CHECK: load +// CHECK: load +// CHECK: release +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: apply [[EM]]([[ARRAY]] +// CHECK: } // end sil function 'hoist_not_blocked_by_unpaired_release' sil @hoist_not_blocked_by_unpaired_release : $@convention(thin) (@inout MyArray, @inout Builtin.Int1) -> () { bb0(%0 : $*MyArray, %1 : $*Builtin.Int1): %2 = load %0 : $*MyArray @@ -188,11 +206,13 @@ bb1: release_value %2 : $MyArray %5 = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () %6 = apply %5(%0) : $@convention(method) (@inout MyArray) -> () + %7 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %8 = apply %7(%0) : $@convention(method) (@inout MyArray) -> () cond_br %3, bb1, bb2 bb2: - %7 = tuple() - return %7 : $() + %r = tuple() + return %r : $() } // CHECK-LABEL: sil @dont_hoist_if_executed_conditionally @@ -215,8 +235,10 @@ bb2: // If this block is never taken, then hoisting to bb0 would change the value of %p3. %5 = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () %6 = apply %5(%0) : $@convention(method) (@inout MyArray) -> () - %7 = load %0 : $*MyArray - br bb4(%7 : $MyArray) + %7 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %8 = apply %7(%0) : $@convention(method) (@inout MyArray) -> () + %9 = load %0 : $*MyArray + br bb4(%9 : $MyArray) bb3: br bb4(%p1 : $MyArray) @@ -229,15 +251,20 @@ bb5(%p3 : $MyArray): } // CHECK-LABEL: sil @cow_should_ignore_mark_dependence_addrproj_use : $@convention(thin) (@inout MyArray, @inout Builtin.Int1) -> () { -// CHECK: bb0( -// CHECK-NOT: br bb -// CHECK: [[MM:%.*]] = function_ref @array_make_mutable -// CHECK-NOT: br bb -// CHECK: apply [[MM]] -// CHECK: br bb1 +// CHECK: bb0([[ARRAY:%[0-9]+]] +// CHECK: [[MM:%[0-9]+]] = function_ref @array_make_mutable +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: [[EM:%[0-9]+]] = function_ref @array_end_mutation +// CHECK: apply [[EM]]([[ARRAY]] // CHECK: bb1: -// CHECK-NOT: apply -// CHECK: cond_br {{.*}}, bb1 +// CHECK: retain +// CHECK: load +// CHECK: load +// CHECK: release +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: apply [[EM]]([[ARRAY]] +// CHECK: mark_dependence +// CHECK: } // end sil function 'cow_should_ignore_mark_dependence_addrproj_use' sil @cow_should_ignore_mark_dependence_addrproj_use : $@convention(thin) (@inout MyArray, @inout Builtin.Int1) -> () { bb0(%0 : $*MyArray, %1 : $*Builtin.Int1): %999 = struct_element_addr %0 : $*MyArray, #MyArray.buffer @@ -253,24 +280,31 @@ bb1: release_value %2 : $MyArray %5 = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () %6 = apply %5(%0) : $@convention(method) (@inout MyArray) -> () + %7 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %8 = apply %7(%0) : $@convention(method) (@inout MyArray) -> () mark_dependence %1 : $*Builtin.Int1 on %99999 : $Builtin.NativeObject cond_br %3, bb1, bb2 bb2: - %7 = tuple() - return %7 : $() + %r = tuple() + return %r : $() } // CHECK-LABEL: sil @cow_should_ignore_mark_dependence_value : $@convention(thin) (@inout MyArray, @inout Builtin.Int1) -> () { -// CHECK: bb0( -// CHECK-NOT: br bb -// CHECK: [[MM:%.*]] = function_ref @array_make_mutable -// CHECK-NOT: br bb -// CHECK: apply [[MM]] -// CHECK: br bb1 +// CHECK: bb0([[ARRAY:%[0-9]+]] +// CHECK: [[MM:%[0-9]+]] = function_ref @array_make_mutable +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: [[EM:%[0-9]+]] = function_ref @array_end_mutation +// CHECK: apply [[EM]]([[ARRAY]] // CHECK: bb1: -// CHECK-NOT: apply -// CHECK: cond_br {{.*}}, bb1 +// CHECK: retain +// CHECK: load +// CHECK: load +// CHECK: release +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: apply [[EM]]([[ARRAY]] +// CHECK: mark_dependence +// CHECK: } // end sil function 'cow_should_ignore_mark_dependence_value' sil @cow_should_ignore_mark_dependence_value : $@convention(thin) (@inout MyArray, @inout Builtin.Int1) -> () { bb0(%0 : $*MyArray, %1 : $*Builtin.Int1): %2 = load %0 : $*MyArray @@ -283,24 +317,32 @@ bb1: release_value %2 : $MyArray %5 = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () %6 = apply %5(%0) : $@convention(method) (@inout MyArray) -> () + %7 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %8 = apply %7(%0) : $@convention(method) (@inout MyArray) -> () mark_dependence %1 : $*Builtin.Int1 on %2 : $MyArray cond_br %3, bb1, bb2 bb2: - %7 = tuple() - return %7 : $() + %r = tuple() + return %r : $() } // CHECK-LABEL: sil @cow_should_ignore_enum : $@convention(thin) (@inout MyArray, @inout Builtin.Int1) -> () { -// CHECK: bb0( -// CHECK-NOT: br bb -// CHECK: [[MM:%.*]] = function_ref @array_make_mutable -// CHECK-NOT: br bb -// CHECK: apply [[MM]] -// CHECK: br bb1 +// CHECK: bb0([[ARRAY:%[0-9]+]] +// CHECK: [[MM:%[0-9]+]] = function_ref @array_make_mutable +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: [[EM:%[0-9]+]] = function_ref @array_end_mutation +// CHECK: apply [[EM]]([[ARRAY]] // CHECK: bb1: -// CHECK-NOT: apply -// CHECK: cond_br {{.*}}, bb1 +// CHECK: retain +// CHECK: load +// CHECK: load +// CHECK: release +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: enum +// CHECK: mark_dependence +// CHECK: apply [[EM]]([[ARRAY]] +// CHECK: } // end sil function 'cow_should_ignore_enum' sil @cow_should_ignore_enum : $@convention(thin) (@inout MyArray, @inout Builtin.Int1) -> () { bb0(%0 : $*MyArray, %1 : $*Builtin.Int1): %2 = load %0 : $*MyArray @@ -313,32 +355,25 @@ bb1: release_value %2 : $MyArray %5 = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () %6 = apply %5(%0) : $@convention(method) (@inout MyArray) -> () - %8 = enum $Optional>, #Optional.some!enumelt, %2 : $MyArray - mark_dependence %1 : $*Builtin.Int1 on %8 : $Optional> + %e = enum $Optional>, #Optional.some!enumelt, %2 : $MyArray + mark_dependence %1 : $*Builtin.Int1 on %e : $Optional> + %7 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %8 = apply %7(%0) : $@convention(method) (@inout MyArray) -> () cond_br %3, bb1, bb2 bb2: - %7 = tuple() - return %7 : $() + %r = tuple() + return %r : $() } // CHECK-LABEL: sil @cow_should_ignore_guaranteed_semantic_call_sequence : $@convention(thin) (@guaranteed MyArrayContainer, Builtin.NativeObject) -> () { // CHECK: bb0 -// CHECK: [[F:%.*]] = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () -// CHECK: apply [[F]]( +// CHECK-DAG: [[MM:%[0-9]+]] = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () +// CHECK-DAG: [[EM:%[0-9]+]] = function_ref @array_end_mutation +// CHECK: apply [[MM]]( +// CHECK: apply [[EM]]( // CHECK: bb1: -// CHECK: bb2: -// CHECK-NOT: apply [[F]]( -// CHECK: bb3: -// CHECK: bb4: -// CHECK-NOT: apply [[F]]( -// CHECK: bb5: -// CHECK: bb6: -// CHECK-NOT: apply [[F]]( -// CHECK: bb7: -// CHECK: bb8: -// CHECK-NOT: apply [[F]]( -// CHECK: bb9: +// CHECK: } // end sil function 'cow_should_ignore_guaranteed_semantic_call_sequence' sil @cow_should_ignore_guaranteed_semantic_call_sequence : $@convention(thin) (@guaranteed MyArrayContainer, Builtin.NativeObject) -> () { bb0(%0 : $MyArrayContainer, %00 : $Builtin.NativeObject): %1 = ref_element_addr %0 : $MyArrayContainer, #MyArrayContainer.array @@ -347,6 +382,7 @@ bb0(%0 : $MyArrayContainer, %00 : $Builtin.NativeObject): %4 = function_ref @guaranteed_array_get_capacity : $@convention(method) (@guaranteed MyArray) -> Int %5 = function_ref @unknown : $@convention(thin) () -> () %6 = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () + %7 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () br bb1 bb1: @@ -356,6 +392,7 @@ bb1: apply %4(%2) : $@convention(method) (@guaranteed MyArray) -> Int release_value %2 : $MyArray apply %6(%1) : $@convention(method) (@inout MyArray) -> () + apply %7(%1) : $@convention(method) (@inout MyArray) -> () cond_br undef, bb1, bb2 bb2: @@ -369,6 +406,7 @@ bb3: fix_lifetime %0 : $MyArrayContainer release_value %2 : $MyArray apply %6(%1) : $@convention(method) (@inout MyArray) -> () + apply %7(%1) : $@convention(method) (@inout MyArray) -> () cond_br undef, bb4, bb3 bb4: @@ -382,6 +420,7 @@ bb5: apply %4(%2) : $@convention(method) (@guaranteed MyArray) -> Int release_value %2 : $MyArray apply %6(%1) : $@convention(method) (@inout MyArray) -> () + apply %7(%1) : $@convention(method) (@inout MyArray) -> () cond_br undef, bb5, bb6 bb6: @@ -395,6 +434,7 @@ bb7: apply %4(%2) : $@convention(method) (@guaranteed MyArray) -> Int release_value %2 : $MyArray apply %6(%1) : $@convention(method) (@inout MyArray) -> () + apply %7(%1) : $@convention(method) (@inout MyArray) -> () cond_br undef, bb7, bb8 bb8: @@ -409,21 +449,30 @@ bb9: release_value %00 : $Builtin.NativeObject release_value %2 : $MyArray apply %6(%1) : $@convention(method) (@inout MyArray) -> () + apply %7(%1) : $@convention(method) (@inout MyArray) -> () cond_br undef, bb9, bb10 bb10: - %7 = tuple() - return %7 : $() + %r = tuple() + return %r : $() } // CHECK: sil @cow_handle_array_address_load -// CHECK: bb0({{.*}}): -// CHECK: apply -// CHECK: br bb1 +// CHECK: bb0([[ARRAY:%[0-9]+]] +// CHECK: [[MM:%[0-9]+]] = function_ref @array_make_mutable +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: [[EM:%[0-9]+]] = function_ref @array_end_mutation +// CHECK: apply [[EM]]([[ARRAY]] // CHECK: bb1: -// CHECK-NOT: apply -// CHECK: bb2 - +// CHECK: load +// CHECK: retain +// CHECK: load +// CHECK: release +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: enum +// CHECK: mark_dependence +// CHECK: apply [[EM]]([[ARRAY]] +// CHECK: } // end sil function 'cow_handle_array_address_load' sil @cow_handle_array_address_load : $@convention(thin) (@inout MyArray, @inout Builtin.Int1) -> () { bb0(%0 : $*MyArray, %1 : $*Builtin.Int1): %2 = load %0 : $*MyArray @@ -434,13 +483,15 @@ bb0(%0 : $*MyArray, %1 : $*Builtin.Int1): bb1: %6 = load %4 : $*Builtin.NativeObject strong_retain %6 : $Builtin.NativeObject - %8 = load %1 : $*Builtin.Int1 + %l = load %1 : $*Builtin.Int1 strong_release %6 : $Builtin.NativeObject %10 = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () %11 = apply %10(%0) : $@convention(method) (@inout MyArray) -> () %12 = enum $Optional>, #Optional.some!enumelt, %2 : $MyArray %13 = mark_dependence %1 : $*Builtin.Int1 on %12 : $Optional> - cond_br %8, bb1, bb2 + %7 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %8 = apply %7(%0) : $@convention(method) (@inout MyArray) -> () + cond_br %l, bb1, bb2 bb2: %15 = tuple () @@ -468,6 +519,7 @@ bb0(%0 : $MyArrayContainer, %00 : $Builtin.NativeObject): %5 = function_ref @unknown : $@convention(thin) () -> () %6 = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () %7 = function_ref @array_unknown_mutate : $@convention(method) (@inout MyArray) -> () + %9 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () br bb1 bb1: @@ -478,6 +530,7 @@ bb1: apply %4(%2) : $@convention(method) (@guaranteed MyArray) -> Int release_value %2 : $MyArray apply %6(%1) : $@convention(method) (@inout MyArray) -> () + apply %9(%1) : $@convention(method) (@inout MyArray) -> () cond_br undef, bb1, bb2 bb2: @@ -490,6 +543,7 @@ bb3: apply %4(%2) : $@convention(method) (@guaranteed MyArray) -> Int release_value %2 : $MyArray apply %6(%1) : $@convention(method) (@inout MyArray) -> () + apply %9(%1) : $@convention(method) (@inout MyArray) -> () cond_br undef, bb3, bb4 bb4: @@ -516,14 +570,18 @@ struct MyInt { } // CHECK-LABEL: sil @hoist_projections -// CHECK: bb0([[CONTAINER:%[0-9]+]] -// CHECK: [[CONTAINER2:%.*]] = struct_element_addr [[CONTAINER]] : $*ContainerContainer -// CHECK: [[ARRAY:%.*]] = struct_element_addr [[CONTAINER2]] : $*Container, -// CHECK: [[FUN:%[0-9]+]] = function_ref @array_make_mutable -// CHECK: apply [[FUN]]([[ARRAY]] -// CHECK: bb1 -// CHECK-NOT: array_make_mutable -// CHECK-NOT: apply [[FUN]] +// CHECK: bb0 +// CHECK: [[SE:%[0-9]+]] = struct_element_addr %0 +// CHECK: [[ARRAY:%[0-9]+]] = struct_element_addr [[SE]] +// CHECK: [[MM:%[0-9]+]] = function_ref @array_make_mutable +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: [[EM:%[0-9]+]] = function_ref @array_end_mutation +// CHECK: apply [[EM]]([[ARRAY]] +// CHECK: bb1: +// CHECK: bb2({{.*}}): +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: apply [[EM]]([[ARRAY]] +// CHECK: } // end sil function 'hoist_projections' sil @hoist_projections : $@convention(thin) (@inout ContainerContainer, @inout Builtin.Int1) -> () { bb0(%0 : $*ContainerContainer, %1 : $*Builtin.Int1): br bb1 @@ -536,19 +594,29 @@ bb3(%3: $*Container): %4 = struct_element_addr %3 : $*Container, #Container.array %5 = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () %6 = apply %5(%4) : $@convention(method) (@inout MyArray) -> () + %7 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %8 = apply %7(%4) : $@convention(method) (@inout MyArray) -> () cond_br undef, bb1, bb2 bb2: - %7 = tuple() - return %7 : $() + %r = tuple() + return %r : $() } // CHECK-LABEL: sil @hoist_non_unary_projections -// CHECK: index_addr -// CHECK: struct_element_addr +// CHECK: bb0 +// CHECK: [[SE:%[0-9]+]] = struct_element_addr %0 +// CHECK: [[IA:%[0-9]+]] = index_addr [[SE]] +// CHECK: [[ARRAY:%[0-9]+]] = struct_element_addr [[IA]] +// CHECK: [[MM:%[0-9]+]] = function_ref @array_make_mutable +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: [[EM:%[0-9]+]] = function_ref @array_end_mutation +// CHECK: apply [[EM]]([[ARRAY]] // CHECK: bb1: -// CHECK-NOT: index_addr -// CHECK-NOT: struct_element_addr +// CHECK: bb2({{.*}}): +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: apply [[EM]]([[ARRAY]] +// CHECK: } // end sil function 'hoist_non_unary_projections' sil @hoist_non_unary_projections : $@convention(thin) (@inout ContainerContainer, @inout Builtin.Int1) -> () { bb0(%0 : $*ContainerContainer, %1 : $*Builtin.Int1): %i = integer_literal $Builtin.Int32, 0 @@ -563,22 +631,28 @@ bb2(%3: $*Container): %4 = struct_element_addr %3i : $*Container, #Container.array %5 = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () %6 = apply %5(%4) : $@convention(method) (@inout MyArray) -> () + %7 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %8 = apply %7(%4) : $@convention(method) (@inout MyArray) -> () cond_br undef, bb1, bb3 bb3: - %7 = tuple() - return %7 : $() + %r = tuple() + return %r : $() } // CHECK-LABEL: sil @hoist_projections2 -// CHECK: bb0([[CONTAINER:%[0-9]+]] -// CHECK: [[CONTAINER2:%.*]] = struct_element_addr [[CONTAINER]] : $*ContainerContainer -// CHECK: [[ARRAY:%.*]] = struct_element_addr [[CONTAINER2]] : $*Container, -// CHECK: [[FUN:%[0-9]+]] = function_ref @array_make_mutable -// CHECK: apply [[FUN]]([[ARRAY]] -// CHECK: bb1 -// CHECK-NOT: array_make_mutable -// CHECK-NOT: apply [[FUN]] +// CHECK: bb0 +// CHECK: [[SE:%[0-9]+]] = struct_element_addr %0 +// CHECK: [[ARRAY:%[0-9]+]] = struct_element_addr [[SE]] +// CHECK: [[MM:%[0-9]+]] = function_ref @array_make_mutable +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: [[EM:%[0-9]+]] = function_ref @array_end_mutation +// CHECK: apply [[EM]]([[ARRAY]] +// CHECK: bb1: +// CHECK: bb2({{.*}}): +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: apply [[EM]] +// CHECK: } // end sil function 'hoist_projections2' sil @hoist_projections2 : $@convention(thin) (@inout ContainerContainer, @inout Builtin.Int1) -> () { bb0(%0 : $*ContainerContainer, %1 : $*Builtin.Int1): br bb1 @@ -591,22 +665,27 @@ bb1: bb3(%4 : $*MyArray): %5 = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () %6 = apply %5(%4) : $@convention(method) (@inout MyArray) -> () + %7 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %8 = apply %7(%4) : $@convention(method) (@inout MyArray) -> () cond_br undef, bb1, bb2 bb2: - %7 = tuple() - return %7 : $() + %r = tuple() + return %r : $() } // CHECK-LABEL: sil @hoist_projections3 -// CHECK: bb0([[CONTAINER:%[0-9]+]] -// CHECK: [[CONTAINER2:%.*]] = struct_element_addr [[CONTAINER]] : $*ContainerContainer -// CHECK: [[ARRAY:%.*]] = struct_element_addr [[CONTAINER2]] : $*Container, -// CHECK: [[FUN:%[0-9]+]] = function_ref @array_make_mutable -// CHECK: apply [[FUN]]([[ARRAY]] -// CHECK: bb1 -// CHECK-NOT: array_make_mutable -// CHECK-NOT: apply [[FUN]] +// CHECK: bb0 +// CHECK: [[SE:%[0-9]+]] = struct_element_addr %0 +// CHECK: [[ARRAY:%[0-9]+]] = struct_element_addr [[SE]] +// CHECK: [[MM:%[0-9]+]] = function_ref @array_make_mutable +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: [[EM:%[0-9]+]] = function_ref @array_end_mutation +// CHECK: apply [[EM]]([[ARRAY]] +// CHECK: bb1: +// CHECK: apply [[MM]]([[ARRAY]] +// CHECK: apply [[EM]]([[ARRAY]] +// CHECK: } // end sil function 'hoist_projections3' sil @hoist_projections3 : $@convention(thin) (@inout ContainerContainer, @inout Builtin.Int1) -> () { bb0(%0 : $*ContainerContainer, %1 : $*Builtin.Int1): br bb1 @@ -616,26 +695,35 @@ bb1: %3 = struct_element_addr %2 : $*Container, #Container.array %5 = function_ref @array_make_mutable : $@convention(method) (@inout MyArray) -> () %6 = apply %5(%3) : $@convention(method) (@inout MyArray) -> () + %7 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %8 = apply %7(%3) : $@convention(method) (@inout MyArray) -> () cond_br undef, bb1, bb2 bb2: - %7 = tuple() - return %7 : $() + %r = tuple() + return %r : $() } // CHECK-LABEL: sil @hoist_array2d -// CHECK: bb0({{.*}}): -// CHECK: apply -// CHECK-NEXT: load -// CHECK-NEXT: struct_extract -// CHECK-NEXT: struct_extract -// CHECK-NEXT: unchecked_ref_cast -// CHECK-NEXT: ref_tail_addr -// CHECK-NEXT: index_addr -// CHECK-NEXT: apply -// CHECK-NEXT: br bb1 -// CHECK: bb1: -// CHECK-NOT: apply +// CHECK: bb0 +// CHECK-DAG: [[MM:%[0-9]+]] = function_ref @array_make_mutable +// CHECK-DAG: [[EM:%[0-9]+]] = function_ref @array_end_mutation +// CHECK: apply [[MM]](%0) +// CHECK: apply [[EM]](%0) +// CHECK: [[L:%[0-9]+]] = load %0 +// CHECK: [[SE1:%[0-9]+]] = struct_extract [[L]] +// CHECK: [[SE2:%[0-9]+]] = struct_extract [[SE1]] +// CHECK: [[CAST:%[0-9]+]] = unchecked_ref_cast [[SE2]] +// CHECK: [[TA:%[0-9]+]] = ref_tail_addr [[CAST]] +// CHECK: [[ARR2:%[0-9]+]] = index_addr [[TA]] +// CHECK: apply [[MM]]([[ARR2]] +// CHECK: apply [[EM]]([[ARR2]] +// CHECK: bb1: +// CHECK: apply [[MM]](%0) +// CHECK: apply [[MM]]([[ARR2]] +// CHECK: apply [[EM]]([[ARR2]] +// CHECK: apply [[EM]](%0) +// CHECK: } // end sil function 'hoist_array2d' sil @hoist_array2d : $@convention(thin) (@inout MyArray) -> () { bb0(%0 : $*MyArray): %2 = load %0 : $*MyArray @@ -652,6 +740,9 @@ bb1: %11 = ref_tail_addr %10 : $MyArrayStorage, $MyArray %12 = index_addr %11 : $*MyArray, %3 : $Builtin.Word %13 = apply %5(%12) : $@convention(method) (@inout MyArray) -> () + %14 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %15 = apply %14(%12) : $@convention(method) (@inout MyArray) -> () + %16 = apply %14(%0) : $@convention(method) (@inout MyArray) -> () cond_br undef, bb1, bb2 bb2: @@ -660,14 +751,23 @@ bb2: } // CHECK-LABEL: sil @dont_hoist_inner_mutating_outer -// CHECK: bb0({{.*}}): -// CHECK: apply -// CHECK-NOT: apply -// CHECK: bb1: -// CHECK: apply -// CHECK: apply -// CHECK-NOT: apply -// CHECK: return +// CHECK: bb0 +// CHECK-DAG: [[MM:%[0-9]+]] = function_ref @array_make_mutable +// CHECK: apply [[MM]](%0) +// CHECK-DAG: [[EM:%[0-9]+]] = function_ref @array_end_mutation +// CHECK: apply [[EM]](%0) +// CHECK: bb1: +// CHECK: apply [[MM]](%0) +// CHECK: [[L:%[0-9]+]] = load %0 +// CHECK: [[SE1:%[0-9]+]] = struct_extract [[L]] +// CHECK: [[SE2:%[0-9]+]] = struct_extract [[SE1]] +// CHECK: [[CAST:%[0-9]+]] = unchecked_ref_cast [[SE2]] +// CHECK: [[TA:%[0-9]+]] = ref_tail_addr [[CAST]] +// CHECK: [[ARR2:%[0-9]+]] = index_addr [[TA]] +// CHECK: apply [[MM]]([[ARR2]] +// CHECK: apply [[EM]]([[ARR2]] +// CHECK: apply [[EM]](%0) +// CHECK: } // end sil function 'dont_hoist_inner_mutating_outer' sil @dont_hoist_inner_mutating_outer : $@convention(thin) (@inout MyArray) -> () { bb0(%0 : $*MyArray): %2 = load %0 : $*MyArray @@ -685,7 +785,10 @@ bb1: %11 = ref_tail_addr %10 : $MyArrayStorage, $MyArray %12 = index_addr %11 : $*MyArray, %3 : $Builtin.Word %13 = apply %5(%12) : $@convention(method) (@inout MyArray) -> () - apply %4(%0) : $@convention(method) (@inout MyArray) -> () + %14 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %15 = apply %14(%12) : $@convention(method) (@inout MyArray) -> () + %16 = apply %4(%0) : $@convention(method) (@inout MyArray) -> () + %17 = apply %14(%0) : $@convention(method) (@inout MyArray) -> () cond_br undef, bb1, bb2 bb2: @@ -694,13 +797,23 @@ bb2: } // CHECK-LABEL: sil @dont_hoist_inner_variant_index -// CHECK: bb0({{.*}}): -// CHECK: apply -// CHECK-NOT: apply -// CHECK: bb1: -// CHECK: apply -// CHECK-NOT: apply -// CHECK: return +// CHECK: bb0 +// CHECK-DAG: [[MM:%[0-9]+]] = function_ref @array_make_mutable +// CHECK: apply [[MM]](%0) +// CHECK-DAG: [[EM:%[0-9]+]] = function_ref @array_end_mutation +// CHECK: apply [[EM]](%0) +// CHECK: bb1: +// CHECK: apply [[MM]](%0) +// CHECK: [[L:%[0-9]+]] = load %0 +// CHECK: [[SE1:%[0-9]+]] = struct_extract [[L]] +// CHECK: [[SE2:%[0-9]+]] = struct_extract [[SE1]] +// CHECK: [[CAST:%[0-9]+]] = unchecked_ref_cast [[SE2]] +// CHECK: [[TA:%[0-9]+]] = ref_tail_addr [[CAST]] +// CHECK: [[ARR2:%[0-9]+]] = index_addr [[TA]] +// CHECK: apply [[MM]]([[ARR2]] +// CHECK: apply [[EM]]([[ARR2]] +// CHECK: apply [[EM]](%0) +// CHECK: } // end sil function 'dont_hoist_inner_variant_index' sil @dont_hoist_inner_variant_index : $@convention(thin) (@inout MyArray, @inout Builtin.Word) -> () { bb0(%0 : $*MyArray, %1 : $*Builtin.Word): %2 = load %0 : $*MyArray @@ -717,6 +830,9 @@ bb1: %11 = ref_tail_addr %10 : $MyArrayStorage, $MyArray %12 = index_addr %11 : $*MyArray, %4 : $Builtin.Word %13 = apply %5(%12) : $@convention(method) (@inout MyArray) -> () + %14 = function_ref @array_end_mutation : $@convention(method) (@inout MyArray) -> () + %15 = apply %14(%12) : $@convention(method) (@inout MyArray) -> () + %17 = apply %14(%0) : $@convention(method) (@inout MyArray) -> () cond_br undef, bb1, bb2 bb2: diff --git a/test/SILOptimizer/globalopt-iter.sil b/test/SILOptimizer/globalopt-iter.sil index 7e35d907d34c6..f1392b9b72ecd 100644 --- a/test/SILOptimizer/globalopt-iter.sil +++ b/test/SILOptimizer/globalopt-iter.sil @@ -19,8 +19,9 @@ class E : B { } sil @patatino : $@convention(thin) () -> () { %0 = integer_literal $Builtin.Word, 0 %1 = alloc_ref [tail_elems $Int64 * %0 : $Builtin.Word] $B - set_deallocating %1 : $B - dealloc_ref %1 : $B + %2 = end_cow_mutation %1 : $B + set_deallocating %2 : $B + dealloc_ref %2 : $B %45 = tuple () return %45 : $() } diff --git a/test/SILOptimizer/licm.sil b/test/SILOptimizer/licm.sil index 153834ffc0f62..6823dda54a085 100644 --- a/test/SILOptimizer/licm.sil +++ b/test/SILOptimizer/licm.sil @@ -355,6 +355,28 @@ bb4: return %10 : $() } +sil @potential_escape : $@convention(thin) (@guaranteed RefElemClass) -> () + +// CHECK-LABEL: sil @dont_hoist_begin_cow_mutation +// CHECK: bb1: +// CHECK-NEXT: begin_cow_mutation +// CHECK-NEXT: end_cow_mutation +// CHECK-NEXT: apply +sil @dont_hoist_begin_cow_mutation : $@convention(thin) (@owned RefElemClass) -> @owned RefElemClass { +bb0(%0 : $RefElemClass): + br bb1 + +bb1: + (%u, %m) = begin_cow_mutation %0 : $RefElemClass + %b = end_cow_mutation %m : $RefElemClass + %f = function_ref @potential_escape : $@convention(thin) (@guaranteed RefElemClass) -> () + %a = apply %f(%b) : $@convention(thin) (@guaranteed RefElemClass) -> () + cond_br undef, bb1, bb2 + +bb2: + return %b : $RefElemClass +} + // CHECK-LABEL: sil @hoist_load_and_store // CHECK: [[V1:%[0-9]+]] = load %0 // CHECK: br bb1([[V1]] : $Int32) diff --git a/test/SILOptimizer/objectoutliner.sil b/test/SILOptimizer/objectoutliner.sil index 5849310d06afe..fe9f45372645b 100644 --- a/test/SILOptimizer/objectoutliner.sil +++ b/test/SILOptimizer/objectoutliner.sil @@ -6,9 +6,10 @@ sil_stage canonical import Builtin import Swift -class Obj { +class Base { } + +class Obj : Base { @_hasStorage var value: Int64 - init() } // CHECK-LABEL: sil_global private @outline_global_simpleTv_ : $Obj = { @@ -41,7 +42,30 @@ bb0: %7 = alloc_ref [tail_elems $Int64 * %0 : $Builtin.Word] $Obj %9 = ref_element_addr %7 : $Obj, #Obj.value store %4 to %9 : $*Int64 - strong_release %7 : $Obj + %10 = end_cow_mutation %7 : $Obj + strong_release %10 : $Obj + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil @outline_global_with_upcast +// CHECK: [[G:%[0-9]+]] = global_value @outline_global_with_upcastTv_ : $Obj +// CHECK: strong_retain [[G]] : $Obj +// CHECK: [[C:%[0-9]+]] = upcast [[G]] : $Obj to $Base +// CHECK-NOT: store +// CHECK: strong_release [[C]] : $Base +// CHECK: return +sil @outline_global_with_upcast : $@convention(thin) () -> () { +bb0: + %0 = integer_literal $Builtin.Word, 0 + %1 = integer_literal $Builtin.Int64, 1 + %4 = struct $Int64 (%1 : $Builtin.Int64) + %7 = alloc_ref [tail_elems $Int64 * %0 : $Builtin.Word] $Obj + %8 = upcast %7 : $Obj to $Base + %9 = ref_element_addr %7 : $Obj, #Obj.value + store %4 to %9 : $*Int64 + %10 = end_cow_mutation %8 : $Base + strong_release %10 : $Base %r = tuple () return %r : $() } @@ -70,7 +94,8 @@ bb0: %19 = integer_literal $Builtin.Word, 1 %20 = index_addr %15 : $*Int64, %19 : $Builtin.Word store %6 to %20 : $*Int64 - strong_release %7 : $Obj + %21 = end_cow_mutation %7 : $Obj + strong_release %21 : $Obj %r = tuple () return %r : $() } @@ -90,8 +115,9 @@ bb0: %5 = alloc_ref [tail_elems $Int64 * %0 : $Builtin.Word] $Obj %6 = ref_element_addr %5 : $Obj, #Obj.value store %4 to %6 : $*Int64 - set_deallocating %5 : $Obj - dealloc_ref %5 : $Obj + %10 = end_cow_mutation %5 : $Obj + set_deallocating %10 : $Obj + dealloc_ref %10 : $Obj %r = tuple () return %r : $() } @@ -107,7 +133,8 @@ bb0: %7 = alloc_ref $Obj %9 = ref_element_addr %7 : $Obj, #Obj.value store %4 to %9 : $*Int64 - strong_release %7 : $Obj + %10 = end_cow_mutation %7 : $Obj + strong_release %10 : $Obj %r = tuple () return %r : $() } @@ -125,7 +152,8 @@ bb0: %9 = ref_element_addr %7 : $Obj, #Obj.value store %4 to %9 : $*Int64 store %4 to %9 : $*Int64 - strong_release %7 : $Obj + %10 = end_cow_mutation %7 : $Obj + strong_release %10 : $Obj %r = tuple () return %r : $() } @@ -140,7 +168,8 @@ bb0: %4 = struct $Int64 (%1 : $Builtin.Int64) %7 = alloc_ref [tail_elems $Int64 * %0 : $Builtin.Word] $Obj %9 = ref_element_addr %7 : $Obj, #Obj.value - strong_release %7 : $Obj + %10 = end_cow_mutation %7 : $Obj + strong_release %10 : $Obj %r = tuple () return %r : $() } @@ -156,7 +185,8 @@ bb0: %7 = alloc_ref [objc] $Obj %9 = ref_element_addr %7 : $Obj, #Obj.value store %4 to %9 : $*Int64 - strong_release %7 : $Obj + %10 = end_cow_mutation %7 : $Obj + strong_release %10 : $Obj %r = tuple () return %r : $() } @@ -177,7 +207,8 @@ bb0: %10 = address_to_pointer %9 : $*Int64 to $Builtin.RawPointer %f = function_ref @take_pointer : $@convention(thin) (Builtin.RawPointer) -> () %a = apply %f(%10) : $@convention(thin) (Builtin.RawPointer) -> () - strong_release %7 : $Obj + %12 = end_cow_mutation %7 : $Obj + strong_release %12 : $Obj %r = tuple () return %r : $() } @@ -215,7 +246,8 @@ bb0: store %4 to %9 : $*Int64 %15 = ref_tail_addr %7 : $Obj, $Int64 store %5 to %15 : $*Int64 - strong_release %7 : $Obj + %16 = end_cow_mutation %7 : $Obj + strong_release %16 : $Obj %r = tuple () return %r : $() } @@ -241,7 +273,8 @@ bb0: %19 = integer_literal $Builtin.Word, 1 %20 = index_addr %15 : $*Int64, %19 : $Builtin.Word store %6 to %20 : $*Int64 - strong_release %7 : $Obj + %21 = end_cow_mutation %7 : $Obj + strong_release %21 : $Obj %r = tuple () return %r : $() } diff --git a/test/SILOptimizer/pointer_conversion.swift b/test/SILOptimizer/pointer_conversion.swift index a26333b86acf8..f62d706affc31 100644 --- a/test/SILOptimizer/pointer_conversion.swift +++ b/test/SILOptimizer/pointer_conversion.swift @@ -1,5 +1,5 @@ // RUN: %target-swift-frontend -emit-sil -O %s | %FileCheck %s -// REQUIRES: optimized_stdlib +// REQUIRES: optimized_stdlib,swift_stdlib_no_asserts // Opaque, unoptimizable functions to call. @_silgen_name("takesConstRawPointer") @@ -86,23 +86,14 @@ public func testMutableArrayToOptional() { public func arrayLiteralPromotion() { takesConstRawPointer([-41,-42,-43,-44]) - // Stack allocate the array. - // TODO: When stdlib checks are enabled, this becomes heap allocated... :-( - // CHECK: alloc_ref {{.*}}[tail_elems $Int * {{.*}} : $Builtin.Word] $_ContiguousArrayStorage - - // Store the elements. - // CHECK: [[ELT:%.+]] = integer_literal $Builtin.Int{{.*}}, -41 - // CHECK: [[ELT:%.+]] = integer_literal $Builtin.Int{{.*}}, -42 - // CHECK: [[ELT:%.+]] = integer_literal $Builtin.Int{{.*}}, -43 - // CHECK: [[ELT:%.+]] = integer_literal $Builtin.Int{{.*}}, -44 - - // Call the function. - // CHECK: [[PTR:%.+]] = mark_dependence - + // Outline the array literal. + // CHECK: [[ARR:%.+]] = global_value + // CHECK: [[CAST:%.+]] = upcast [[ARR]] + // CHECK: [[TADDR:%.+]] = ref_tail_addr [[CAST]] + // CHECK: [[RAWPTR:%.+]] = address_to_pointer [[TADDR]] + // CHECK: [[UNSAFEPTR:%.+]] = struct $UnsafeRawPointer ([[RAWPTR]] + // CHECK: [[PTR:%.+]] = mark_dependence [[UNSAFEPTR]] // CHECK: [[FN:%.+]] = function_ref @takesConstRawPointer // CHECK: apply [[FN]]([[PTR]]) - - // Release the heap value. - // CHECK: strong_release } diff --git a/test/SILOptimizer/stack_promotion_escaping.swift b/test/SILOptimizer/stack_promotion_escaping.swift index 781ad54a27b93..af28c91453882 100644 --- a/test/SILOptimizer/stack_promotion_escaping.swift +++ b/test/SILOptimizer/stack_promotion_escaping.swift @@ -1,4 +1,5 @@ // RUN: %target-swift-frontend -parse-as-library -O -module-name=test %s -emit-sil | %FileCheck %s +// REQUIRES: optimized_stdlib,swift_stdlib_no_asserts final class Item {}