diff --git a/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/LifetimeDependenceScopeFixup.swift b/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/LifetimeDependenceScopeFixup.swift index da57fb2ed947f..94dfc78e820ba 100644 --- a/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/LifetimeDependenceScopeFixup.swift +++ b/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/LifetimeDependenceScopeFixup.swift @@ -744,10 +744,9 @@ extension ScopeExtension { // Append each scope that needs extension to scopesToExtend from the inner to the outer scope. for extScope in scopes.reversed() { - // An outer scope might not originally cover one of its inner scopes. Therefore, extend 'extendedUseRange' to to - // cover this scope's end instructions. The extended scope must at least cover the original scopes because the - // original scopes may protect other operations. var mustExtend = false + // Iterating over scopeEndInst ignores unreachable paths which may not include the dealloc_stack. This is fine + // because the stack allocation effectively covers the entire unreachable path. for scopeEndInst in extScope.endInstructions { switch extendedUseRange.overlaps(pathBegin: extScope.firstInstruction, pathEnd: scopeEndInst, context) { case .containsPath, .containsEnd, .disjoint: @@ -757,6 +756,10 @@ extension ScopeExtension { break case .containsBegin, .overlappedByPath: // containsBegin can occur when the extendable scope has the same begin as the use range. + // + // An outer scope might not originally cover one of its inner scopes. Therefore, extend 'extendedUseRange' to + // to cover this scope's end instructions. The extended scope must at least cover the original scopes because + // the original scopes may protect other operations. extendedUseRange.insert(scopeEndInst) break } diff --git a/SwiftCompilerSources/Sources/Optimizer/Utilities/LifetimeDependenceUtils.swift b/SwiftCompilerSources/Sources/Optimizer/Utilities/LifetimeDependenceUtils.swift index 1074d34a5d481..21de12feaf86d 100644 --- a/SwiftCompilerSources/Sources/Optimizer/Utilities/LifetimeDependenceUtils.swift +++ b/SwiftCompilerSources/Sources/Optimizer/Utilities/LifetimeDependenceUtils.swift @@ -428,6 +428,10 @@ extension LifetimeDependence.Scope { /// /// Returns nil if the dependence scope covers the entire function. Returns an empty range for an unknown scope. /// + /// When this Scope is live on unreachable paths, the returned range may include blocks that are not dominated by the + /// scope introducer. Even though 'range.isValid == false' for such a range, it is still valid for checking that + /// dependencies are in scope since we already know that the Scope introducer dominates all dependent uses. + /// /// Ignore the lifetime of temporary trivial values (with .initialized and .unknown scopes). Temporaries have an /// unknown Scope, which means that LifetimeDependence.Scope did not recognize a VariableScopeInstruction. This is /// important to promote mark_dependence instructions emitted by SILGen to [nonescaping] (e.g. unsafeAddressor). It @@ -558,10 +562,32 @@ extension LifetimeDependence.Scope { } } guard isAddressable, !deallocInsts.isEmpty else { + // Valid on all paths to function exit. return nil } var range = InstructionRange(begin: initializingStore, context) range.insert(contentsOf: deallocInsts) + + // Insert unreachable paths with no dealloc_stack. + var forwardUnreachableWalk = BasicBlockWorklist(context) + defer { forwardUnreachableWalk.deinitialize() } + + // TODO: ensure complete dealloc_stack on all paths in SIL verification, then assert exitBlock.isEmpty. + for exitBlock in range.exitBlocks { + forwardUnreachableWalk.pushIfNotVisited(exitBlock) + } + while let b = forwardUnreachableWalk.pop() { + if let unreachableInst = b.terminator as? UnreachableInst { + // Note: 'unreachableInst' is not necessarilly dominated by 'initializingStore'. This marks the range invalid, + // but leaves it in a usable state that includes all blocks covered by the temporary allocation. The extra + // blocks (backward up to the function entry) are irrelevant becase we already know that 'initializingStore' + // dominates dependent uses. + range.insert(unreachableInst) + } + for succBlock in b.successors { + forwardUnreachableWalk.pushIfNotVisited(succBlock) + } + } return range } } diff --git a/test/SILOptimizer/lifetime_dependence/verify_diagnostics.sil b/test/SILOptimizer/lifetime_dependence/verify_diagnostics.sil index 300d142c38bbe..eeb8e0317f482 100644 --- a/test/SILOptimizer/lifetime_dependence/verify_diagnostics.sil +++ b/test/SILOptimizer/lifetime_dependence/verify_diagnostics.sil @@ -70,6 +70,9 @@ sil @addressInt : $@convention(thin) (@in_guaranteed InlineInt) -> @lifetime(bor sil @addressOfInt : $@convention(thin) (@in_guaranteed Int) -> @lifetime(borrow address 0) @owned Span sil @noAddressInt : $@convention(thin) (@in_guaranteed Int) -> @lifetime(borrow 0) @owned Span sil @useSpan : $@convention(thin) (@guaranteed Span) -> () +sil @useRawSpan : $@convention(thin) (@guaranteed RawSpan) -> @error any Error + +sil @getInlineSpan : $@convention(thin) (@in_guaranteed InlineInt) -> @lifetime(borrow address 0) @owned RawSpan // Test returning a owned dependence on a trivial value sil [ossa] @return_trivial_dependence : $@convention(thin) (@guaranteed C) -> @lifetime(borrow 0) @owned NE { @@ -352,3 +355,68 @@ bb0(%0: $Int): %18 = tuple () return %18 } + +// Test dependence on the temporary stack address of a trivial value. computeAddressableRange must extend the lifetime +// of %tempAddr into the unreachable. +sil hidden [ossa] @testTempAddressUnreachable : $@convention(thin) (@in_guaranteed InlineInt) -> () { +bb0(%0 : $*InlineInt): + %loadArg = load [trivial] %0 + %tempAddr = alloc_stack $InlineInt + store %loadArg to [trivial] %tempAddr + + %f1 = function_ref @getInlineSpan : $@convention(thin) (@in_guaranteed InlineInt) -> @lifetime(borrow address 0) @owned RawSpan + %call = apply %f1(%tempAddr) : $@convention(thin) (@in_guaranteed InlineInt) -> @lifetime(borrow address 0) @owned RawSpan + %md = mark_dependence [unresolved] %call on %tempAddr + + %f2 = function_ref @useRawSpan : $@convention(thin) (@guaranteed RawSpan) -> @error any Error + try_apply %f2(%md) : $@convention(thin) (@guaranteed RawSpan) -> @error any Error, normal bb1, error bb2 + +bb1(%void : $()): + destroy_value %md + dealloc_stack %tempAddr + %99 = tuple () + return %99 + +bb2(%error : @owned $any Error): + destroy_value [dead_end] %md + unreachable +} + +// Test dependence on the temporary stack address of a trivial value. computeAddressableRange must extend the lifetime +// of %tempAddr into the unreachable. +// +// Note that the computed instruction range is marked Invalid because it does not have a single dominating +// block. Nonetheless, the range still include all blocks in which the stack allocation is live, which is all we care +// about. +sil hidden [ossa] @testTempAddressNondominatedUnreachable : $@convention(thin) (@in_guaranteed InlineInt) -> () { +bb0(%0 : $*InlineInt): + cond_br undef, bb1, bb2 + +bb1: + br bb5 + +bb2: + %loadArg = load [trivial] %0 + %tempAddr = alloc_stack $InlineInt + store %loadArg to [trivial] %tempAddr + + %f1 = function_ref @getInlineSpan : $@convention(thin) (@in_guaranteed InlineInt) -> @lifetime(borrow address 0) @owned RawSpan + %call = apply %f1(%tempAddr) : $@convention(thin) (@in_guaranteed InlineInt) -> @lifetime(borrow address 0) @owned RawSpan + %md = mark_dependence [unresolved] %call on %tempAddr + + %f2 = function_ref @useRawSpan : $@convention(thin) (@guaranteed RawSpan) -> @error any Error + try_apply %f2(%md) : $@convention(thin) (@guaranteed RawSpan) -> @error any Error, normal bb3, error bb4 + +bb3(%void : $()): + destroy_value %md + dealloc_stack %tempAddr + %99 = tuple () + return %99 + +bb4(%error : @owned $any Error): + destroy_value [dead_end] %md + br bb5 + +bb5: + unreachable +} diff --git a/test/SILOptimizer/lifetime_dependence/verify_diagnostics.swift b/test/SILOptimizer/lifetime_dependence/verify_diagnostics.swift index fbe3ef1707605..2177a91345ec7 100644 --- a/test/SILOptimizer/lifetime_dependence/verify_diagnostics.swift +++ b/test/SILOptimizer/lifetime_dependence/verify_diagnostics.swift @@ -208,6 +208,19 @@ func testClosureCapture1(_ a: HasMethods) { */ } +public struct InlineInt: BitwiseCopyable { + var val: UInt64 + + var span: RawSpan { + @_addressableSelf + @_lifetime(borrow self) borrowing get { + let buf = UnsafeRawBufferPointer(start: UnsafeRawPointer(Builtin.addressOfBorrow(val)), count: 1) + let span = RawSpan(_unsafeBytes: buf) + return unsafe _overrideLifetime(span, borrowing: val) + } + } +} + // ============================================================================= // Indirect ~Escapable results // ============================================================================= @@ -382,3 +395,17 @@ func returnTempBorrow() -> Borrow { // expected-note@-1{{it depends on the lifetime of this parent value}} return span // expected-note{{this use causes the lifetime-dependent value to escape}} } + +// Test dependence on the temporary stack address of a trivial value. computeAddressableRange must extend the lifetime +// of 'inline' into the unreachable. +// +// This test requires InlineInt to be a trivial value defined in this module so that inline.span generates: +// +// %temp = alloc_stack +// store %arg to [trivial] temp +// apply %get_span(%temp) +// +// If we use InlineArray instead, we get a store_borrow, which is a completely different situation. +func test(inline: InlineInt) { + inline.span.withUnsafeBytes { _ = $0 } +}