diff --git a/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/CMakeLists.txt b/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/CMakeLists.txt index 7cb1b54354ba2..db6c24668ece8 100644 --- a/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/CMakeLists.txt +++ b/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/CMakeLists.txt @@ -14,6 +14,7 @@ swift_compiler_sources(Optimizer InitializeStaticGlobals.swift ObjCBridgingOptimization.swift MergeCondFails.swift + NamedReturnValueOptimization.swift ReleaseDevirtualizer.swift SimplificationPasses.swift StackPromotion.swift diff --git a/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/InitializeStaticGlobals.swift b/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/InitializeStaticGlobals.swift index 5a0bbeee4700c..8c90bf9590d8f 100644 --- a/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/InitializeStaticGlobals.swift +++ b/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/InitializeStaticGlobals.swift @@ -48,7 +48,7 @@ let initializeStaticGlobalsPass = FunctionPass(name: "initialize-static-globals" return } - guard let (allocInst, storeToGlobal) = function.getGlobalInitialization() else { + guard let (allocInst, storeToGlobal) = getGlobalInitialization(of: function) else { return } @@ -62,69 +62,69 @@ let initializeStaticGlobalsPass = FunctionPass(name: "initialize-static-globals" context.erase(instruction: storeToGlobal) } -private extension Function { - /// Analyses the global initializer function and returns the `alloc_global` and `store` - /// instructions which initialize the global. - /// - /// The function's single basic block must contain following code pattern: - /// ``` - /// alloc_global @the_global - /// %a = global_addr @the_global - /// %i = some_const_initializer_insts - /// store %i to %a - /// ``` - func getGlobalInitialization() -> (allocInst: AllocGlobalInst, storeToGlobal: StoreInst)? { +/// Analyses the global initializer function and returns the `alloc_global` and `store` +/// instructions which initialize the global. +/// +/// The function's single basic block must contain following code pattern: +/// ``` +/// alloc_global @the_global +/// %a = global_addr @the_global +/// %i = some_const_initializer_insts +/// store %i to %a +/// ``` +private func getGlobalInitialization(of function: Function) -> (allocInst: AllocGlobalInst, storeToGlobal: StoreInst)? { - guard let block = singleBlock else { - return nil - } + guard let block = function.singleBlock else { + return nil + } - var allocInst: AllocGlobalInst? = nil - var globalAddr: GlobalAddrInst? = nil - var store: StoreInst? = nil + var allocInst: AllocGlobalInst? = nil + var globalAddr: GlobalAddrInst? = nil + var store: StoreInst? = nil - for inst in block.instructions { - switch inst { - case is ReturnInst, - is DebugValueInst, - is DebugStepInst: - break - case let agi as AllocGlobalInst: - if allocInst != nil { - return nil - } - allocInst = agi - case let ga as GlobalAddrInst: - if globalAddr != nil { - return nil - } - guard let agi = allocInst, agi.global == ga.global else { - return nil - } - globalAddr = ga - case let si as StoreInst: - if store != nil { - return nil - } - guard let ga = globalAddr else { - return nil - } - if si.destination != ga { - return nil - } - store = si - default: - if !inst.isValidInStaticInitializerOfGlobal { - return nil - } + for inst in block.instructions { + switch inst { + case is ReturnInst, + is DebugValueInst, + is DebugStepInst: + break + case let agi as AllocGlobalInst: + if allocInst != nil { + return nil + } + allocInst = agi + case let ga as GlobalAddrInst: + if globalAddr != nil { + return nil + } + guard let agi = allocInst, agi.global == ga.global else { + return nil + } + globalAddr = ga + case let si as StoreInst: + if store != nil { + return nil + } + guard let ga = globalAddr else { + return nil + } + if si.destination != ga { + return nil + } + store = si + default: + if !inst.isValidInStaticInitializerOfGlobal { + return nil } } - if let store = store { - return (allocInst: allocInst!, storeToGlobal: store) - } - return nil } + if let store = store { + return (allocInst: allocInst!, storeToGlobal: store) + } + return nil +} +private extension Function { var singleBlock: BasicBlock? { let block = entryBlock if block.next != nil { diff --git a/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/NamedReturnValueOptimization.swift b/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/NamedReturnValueOptimization.swift new file mode 100644 index 0000000000000..951b37c025372 --- /dev/null +++ b/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/NamedReturnValueOptimization.swift @@ -0,0 +1,131 @@ +//===--- NamedReturnValueOptimization.swift --------------------------------==// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===----------------------------------------------------------------------===// + +import SIL + +/// Removes a `copy_addr` to an indirect out argument by replacing the source of the copy +/// (which must be an `alloc_stack`) with the out argument itself. +/// +/// The following SIL pattern will be optimized: +/// +/// sil @foo : $@convention(thin) () -> @out T { +/// bb0(%0 : $*T): +/// %2 = alloc_stack $T +/// ... +/// copy_addr %some_value to [init] %2 // or any other writes to %2 +/// ... +/// bbN: +/// copy_addr [take] %2 to [init] %0 : $*T // the only use of %0 +/// ... // no writes +/// return +/// +/// to: +/// +/// sil @foo : $@convention(thin) (@out T) -> () { +/// bb0(%0 : $*T): +/// %2 = alloc_stack $T // is dead now +/// ... +/// copy_addr %some_value to [init] %0 +/// ... +/// bbN: +/// ... +/// return +/// +/// This optimization can be done because we know that: +/// * The out argument dominates all uses of the copy_addr's source (because it's a function argument). +/// * It's not aliased (by definition). We can't allow aliases to be accessed between the initialization and the return. +/// +/// This pass shouldn't run before serialization. It might prevent predictable memory optimizations +/// in a caller after inlining, because the memory location (the out argument = an alloc_stack in the caller) +/// might be written multiple times after this optimization. +/// +let namedReturnValueOptimization = FunctionPass(name: "named-return-value-optimization") { + (function: Function, context: FunctionPassContext) in + + for outArg in function.arguments[0.. CopyAddrInst? { + guard let singleArgUse = outArg.uses.singleNonDebugUse, + let copyToArg = singleArgUse.instruction as? CopyAddrInst else { + return nil + } + + assert(singleArgUse == copyToArg.destinationOperand, + "single use of out-argument cannot be the source of a copy") + + // Don't perform NRVO unless the copy is a [take]. This is the easiest way + // to determine that the local variable has ownership of its value and ensures + // that removing a copy is a reference count neutral operation. For example, + // this copy can't be trivially eliminated without adding a retain. + // sil @f : $@convention(thin) (@guaranteed T) -> @out T + // bb0(%in : $*T, %out : $T): + // %local = alloc_stack $T + // store %in to %local : $*T + // copy_addr %local to [init] %out : $*T + if !copyToArg.isTakeOfSrc { + return nil + } + + guard let sourceStackAlloc = copyToArg.source as? AllocStackInst else { + return nil + } + + // NRVO for alloc_stack [dynamic_lifetime] will invalidate OSSA invariants. + if sourceStackAlloc.hasDynamicLifetime && copyToArg.parentFunction.hasOwnership { + return nil + } + + if !(copyToArg.parentBlock.terminator is ReturnInst) { + return nil + } + + // This check is overly conservative, because we only need to check if the source + // of the copy is not written to. But the copy to the out argument is usually the last + // instruction of the function, so it doesn't matter. + if isAnyInstructionWritingToMemory(after: copyToArg) { + return nil + } + + return copyToArg +} + +private func performNRVO(with copy: CopyAddrInst, _ context: FunctionPassContext) { + copy.source.uses.replaceAllExceptDealloc(with: copy.destination, context) + assert(copy.source == copy.destination) + context.erase(instruction: copy) +} + +private func isAnyInstructionWritingToMemory(after: Instruction) -> Bool { + var followingInst = after.next + while let fi = followingInst { + if fi.mayWriteToMemory && !(fi is DeallocStackInst) { + return true + } + followingInst = fi.next + } + return false +} + +private extension UseList { + func replaceAllExceptDealloc(with replacement: Value, _ context: some MutatingContext) { + for use in self where !(use.instruction is Deallocation) { + use.set(to: replacement, context) + } + } +} diff --git a/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/SimplificationPasses.swift b/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/SimplificationPasses.swift index 0afcffa020d67..0cf0a664452be 100644 --- a/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/SimplificationPasses.swift +++ b/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/SimplificationPasses.swift @@ -71,9 +71,9 @@ let lateOnoneSimplificationPass = FunctionPass(name: "late-onone-simplification" //===--------------------------------------------------------------------===// -private func runSimplification(on function: Function, _ context: FunctionPassContext, - preserveDebugInfo: Bool, - _ simplify: (Instruction, SimplifyContext) -> ()) { +func runSimplification(on function: Function, _ context: FunctionPassContext, + preserveDebugInfo: Bool, + _ simplify: (Instruction, SimplifyContext) -> ()) { var worklist = InstructionWorklist(context) defer { worklist.deinitialize() } diff --git a/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/CMakeLists.txt b/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/CMakeLists.txt index 5a0a5a6e94f40..9ce3a8865ec23 100644 --- a/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/CMakeLists.txt +++ b/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/CMakeLists.txt @@ -17,6 +17,7 @@ swift_compiler_sources(Optimizer SimplifyDestructure.swift SimplifyGlobalValue.swift SimplifyLoad.swift + SimplifyPartialApply.swift SimplifyStrongRetainRelease.swift SimplifyStructExtract.swift SimplifyTupleExtract.swift diff --git a/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/SimplifyApply.swift b/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/SimplifyApply.swift index 06e5838318a4f..e00549bc48ad3 100644 --- a/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/SimplifyApply.swift +++ b/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/SimplifyApply.swift @@ -14,48 +14,18 @@ import SIL extension ApplyInst : OnoneSimplifyable { func simplify(_ context: SimplifyContext) { - tryReplaceTrivialApplyOfPartialApply(context) + _ = context.tryDevirtualize(apply: self, isMandatory: false) } } -private extension ApplyInst { - func tryReplaceTrivialApplyOfPartialApply(_ context: SimplifyContext) { - guard let pa = callee as? PartialApplyInst else { - return - } - - if pa.referencedFunction == nil { - return - } - - // Currently we don't handle generic closures. For Onone this is good enough. - // TODO: handle it once we replace the SILCombine simplification with this. - if !allArgumentsAreTrivial(arguments) { - return - } - - if !allArgumentsAreTrivial(pa.arguments) { - return - } - - if !substitutionMap.isEmpty { - return - } - - let allArgs = Array(arguments) + Array(pa.arguments) - let builder = Builder(before: self, context) - let newApply = builder.createApply(function: pa.callee, pa.substitutionMap, arguments: allArgs, - isNonThrowing: isNonThrowing, isNonAsync: isNonAsync, - specializationInfo: specializationInfo) - uses.replaceAll(with: newApply, context) - context.erase(instruction: self) - - if context.tryDeleteDeadClosure(closure: pa) { - context.notifyInvalidatedStackNesting() - } +extension TryApplyInst : OnoneSimplifyable { + func simplify(_ context: SimplifyContext) { + _ = context.tryDevirtualize(apply: self, isMandatory: false) } } -private func allArgumentsAreTrivial(_ args: LazyMapSequence) -> Bool { - return !args.contains { !$0.hasTrivialType } +extension BeginApplyInst : OnoneSimplifyable { + func simplify(_ context: SimplifyContext) { + _ = context.tryDevirtualize(apply: self, isMandatory: false) + } } diff --git a/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/SimplifyBuiltin.swift b/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/SimplifyBuiltin.swift index 17faaf0d6144c..62b6cebcd6bed 100644 --- a/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/SimplifyBuiltin.swift +++ b/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/SimplifyBuiltin.swift @@ -24,6 +24,10 @@ extension BuiltinInst : OnoneSimplifyable { optimizeIsSameMetatype(context) case .Once: optimizeBuiltinOnce(context) + case .CanBeObjCClass: + optimizeCanBeClass(context) + case .AssertConf: + optimizeAssertConfig(context) default: if let literal = constantFold(context) { uses.replaceAll(with: literal, context) @@ -90,6 +94,43 @@ private extension BuiltinInst { } return nil } + + func optimizeCanBeClass(_ context: SimplifyContext) { + guard let ty = substitutionMap.replacementTypes[0] else { + return + } + let literal: IntegerLiteralInst + switch ty.canBeClass { + case .IsNot: + let builder = Builder(before: self, context) + literal = builder.createIntegerLiteral(0, type: type) + case .Is: + let builder = Builder(before: self, context) + literal = builder.createIntegerLiteral(1, type: type) + case .CanBe: + return + default: + fatalError() + } + uses.replaceAll(with: literal, context) + context.erase(instruction: self) + } + + func optimizeAssertConfig(_ context: SimplifyContext) { + let literal: IntegerLiteralInst + switch context.options.assertConfiguration { + case .enabled: + let builder = Builder(before: self, context) + literal = builder.createIntegerLiteral(1, type: type) + case .disabled: + let builder = Builder(before: self, context) + literal = builder.createIntegerLiteral(0, type: type) + default: + return + } + uses.replaceAll(with: literal, context) + context.erase(instruction: self) + } } private func typesOfValuesAreEqual(_ lhs: Value, _ rhs: Value, in function: Function) -> Bool? { diff --git a/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/SimplifyPartialApply.swift b/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/SimplifyPartialApply.swift new file mode 100644 index 0000000000000..dd52932ef1dd9 --- /dev/null +++ b/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/SimplifyPartialApply.swift @@ -0,0 +1,34 @@ +//===--- SimplifyPartialApply.swift ---------------------------------------===// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===----------------------------------------------------------------------===// + +import SIL + +extension PartialApplyInst : OnoneSimplifyable { + func simplify(_ context: SimplifyContext) { + let optimizedApplyOfPartialApply = context.tryOptimizeApplyOfPartialApply(closure: self) + if optimizedApplyOfPartialApply { + context.notifyInvalidatedStackNesting() + } + + if context.preserveDebugInfo && uses.contains(where: { $0.instruction is DebugValueInst }) { + return + } + + // Try to delete the partial_apply. + // In case it became dead because of tryOptimizeApplyOfPartialApply, we don't + // need to copy all arguments again (to extend their lifetimes), because it + // was already done in tryOptimizeApplyOfPartialApply. + if context.tryDeleteDeadClosure(closure: self, needKeepArgsAlive: !optimizedApplyOfPartialApply) { + context.notifyInvalidatedStackNesting() + } + } +} diff --git a/SwiftCompilerSources/Sources/Optimizer/ModulePasses/CMakeLists.txt b/SwiftCompilerSources/Sources/Optimizer/ModulePasses/CMakeLists.txt index 19dc9b7ab91c9..6fc9bdf39c5de 100644 --- a/SwiftCompilerSources/Sources/Optimizer/ModulePasses/CMakeLists.txt +++ b/SwiftCompilerSources/Sources/Optimizer/ModulePasses/CMakeLists.txt @@ -7,6 +7,7 @@ # See http://swift.org/CONTRIBUTORS.txt for Swift project authors swift_compiler_sources(Optimizer + MandatoryPerformanceOptimizations.swift ReadOnlyGlobalVariables.swift StackProtection.swift ) diff --git a/SwiftCompilerSources/Sources/Optimizer/ModulePasses/MandatoryPerformanceOptimizations.swift b/SwiftCompilerSources/Sources/Optimizer/ModulePasses/MandatoryPerformanceOptimizations.swift new file mode 100644 index 0000000000000..6cd09ef54cdb7 --- /dev/null +++ b/SwiftCompilerSources/Sources/Optimizer/ModulePasses/MandatoryPerformanceOptimizations.swift @@ -0,0 +1,170 @@ +//===--- MandatoryPerformanceOptimizations.swift --------------------------===// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===----------------------------------------------------------------------===// + +import SIL + +/// Performs mandatory optimizations for performance-annotated functions. +/// +/// Optimizations include: +/// * de-virtualization +/// * mandatory inlining +/// * generic specialization +/// * mandatory memory optimizations +/// * dead alloc elimination +/// * instruction simplification +/// +/// The pass starts with performance-annotated functions and transitively handles +/// called functions. +/// +let mandatoryPerformanceOptimizations = ModulePass(name: "mandatory-performance-optimizations") { + (moduleContext: ModulePassContext) in + + var worklist = FunctionWorklist() + worklist.addAllPerformanceAnnotatedFunctions(of: moduleContext) + + optimizeFunctionsTopDown(using: &worklist, moduleContext) +} + +private func optimizeFunctionsTopDown(using worklist: inout FunctionWorklist, + _ moduleContext: ModulePassContext) { + while let f = worklist.pop() { + moduleContext.transform(function: f) { context in + if !context.loadFunction(function: f, loadCalleesRecursively: true) { + return + } + optimize(function: f, context) + worklist.add(calleesOf: f) + } + } +} + +private func optimize(function: Function, _ context: FunctionPassContext) { + runSimplification(on: function, context, preserveDebugInfo: true) { instruction, simplifyCtxt in + if let i = instruction as? OnoneSimplifyable { + i.simplify(simplifyCtxt) + if instruction.isDeleted { + return + } + } + switch instruction { + case let apply as FullApplySite: + inlineAndDevirtualize(apply: apply, context, simplifyCtxt) + case let mt as MetatypeInst: + if mt.isTriviallyDeadIgnoringDebugUses { + simplifyCtxt.erase(instructionIncludingDebugUses: mt) + } + default: + break + } + } + + _ = context.specializeApplies(in: function, isMandatory: true) + + // If this is a just specialized function, try to optimize copy_addr, etc. + if context.optimizeMemoryAccesses(in: function) { + _ = context.eliminateDeadAllocations(in: function) + } +} + +private func inlineAndDevirtualize(apply: FullApplySite, _ context: FunctionPassContext, _ simplifyCtxt: SimplifyContext) { + + if simplifyCtxt.tryDevirtualize(apply: apply, isMandatory: true) != nil { + return + } + + guard let callee = apply.referencedFunction else { + return + } + + if !context.loadFunction(function: callee, loadCalleesRecursively: true) { + // We don't have the funcion body of the callee. + return + } + + if shouldInline(apply: apply, callee: callee) { + simplifyCtxt.inlineFunction(apply: apply, mandatoryInline: true) + + // In OSSA `partial_apply [on_stack]`s are represented as owned values rather than stack locations. + // It is possible for their destroys to violate stack discipline. + // When inlining into non-OSSA, those destroys are lowered to dealloc_stacks. + // This can result in invalid stack nesting. + if callee.hasOwnership && !apply.parentFunction.hasOwnership { + simplifyCtxt.notifyInvalidatedStackNesting() + } + } +} + +private func shouldInline(apply: FullApplySite, callee: Function) -> Bool { + if callee.isTransparent { + return true + } + if apply is BeginApplyInst { + // Avoid co-routines because they might allocate (their context). + return true + } + if apply.parentFunction.isGlobalInitOnceFunction && callee.inlineStrategy == .always { + // Some arithmetic operations, like integer conversions, are not transparent but `inline(__always)`. + // Force inlining them in global initializers so that it's possible to statically initialize the global. + return true + } + return false +} + +fileprivate struct FunctionWorklist { + private(set) var functions = Array() + private var pushedFunctions = Set() + private var currentIndex = 0 + + mutating func pop() -> Function? { + if currentIndex < functions.count { + let f = functions[currentIndex] + currentIndex += 1 + return f + } + return nil + } + + mutating func addAllPerformanceAnnotatedFunctions(of moduleContext: ModulePassContext) { + for f in moduleContext.functions where f.performanceConstraints != .none { + pushIfNotVisited(f) + } + } + + mutating func add(calleesOf function: Function) { + for inst in function.instructions { + switch inst { + case let apply as ApplySite: + if let callee = apply.referencedFunction { + pushIfNotVisited(callee) + } + case let bi as BuiltinInst: + switch bi.id { + case .Once, .OnceWithContext: + if let fri = bi.operands[1].value as? FunctionRefInst { + pushIfNotVisited(fri.referencedFunction) + } + break; + default: + break + } + default: + break + } + } + } + + mutating func pushIfNotVisited(_ element: Function) { + if pushedFunctions.insert(element).inserted { + functions.append(element) + } + } +} diff --git a/SwiftCompilerSources/Sources/Optimizer/ModulePasses/ReadOnlyGlobalVariables.swift b/SwiftCompilerSources/Sources/Optimizer/ModulePasses/ReadOnlyGlobalVariables.swift index 5d625543557fb..bf2ae44b77406 100644 --- a/SwiftCompilerSources/Sources/Optimizer/ModulePasses/ReadOnlyGlobalVariables.swift +++ b/SwiftCompilerSources/Sources/Optimizer/ModulePasses/ReadOnlyGlobalVariables.swift @@ -27,7 +27,7 @@ let readOnlyGlobalVariablesPass = ModulePass(name: "read-only-global-variables") for f in moduleContext.functions { for inst in f.instructions { if let gAddr = inst as? GlobalAddrInst { - if gAddr.addressHasWrites { + if findWrites(toAddress: gAddr) { writtenGlobals.insert(gAddr.global) } } @@ -43,11 +43,9 @@ let readOnlyGlobalVariablesPass = ModulePass(name: "read-only-global-variables") } } -private extension Value { - var addressHasWrites: Bool { - var walker = FindWrites() - return walker.walkDownUses(ofAddress: self, path: UnusedWalkingPath()) == .abortWalk - } +private func findWrites(toAddress: Value) -> Bool { + var walker = FindWrites() + return walker.walkDownUses(ofAddress: toAddress, path: UnusedWalkingPath()) == .abortWalk } private struct FindWrites : AddressDefUseWalker { diff --git a/SwiftCompilerSources/Sources/Optimizer/PassManager/Context.swift b/SwiftCompilerSources/Sources/Optimizer/PassManager/Context.swift index 91b29e6b754bf..8052be38914bf 100644 --- a/SwiftCompilerSources/Sources/Optimizer/PassManager/Context.swift +++ b/SwiftCompilerSources/Sources/Optimizer/PassManager/Context.swift @@ -64,13 +64,69 @@ extension MutatingContext { func erase(instructionIncludingDebugUses inst: Instruction) { for result in inst.results { for use in result.uses { - assert(use.instruction is DebugValueInst) + assert(use.instruction is DebugValueInst, "instruction to delete may only have debug_value uses") erase(instruction: use.instruction) } } erase(instruction: inst) } + func tryOptimizeApplyOfPartialApply(closure: PartialApplyInst) -> Bool { + if _bridged.tryOptimizeApplyOfPartialApply(closure.bridged) { + notifyInstructionsChanged() + notifyCallsChanged() + + for use in closure.callee.uses { + if use.instruction is FullApplySite { + notifyInstructionChanged(use.instruction) + } + } + return true + } + return false + } + + func tryDeleteDeadClosure(closure: SingleValueInstruction, needKeepArgsAlive: Bool = true) -> Bool { + if _bridged.tryDeleteDeadClosure(closure.bridged, needKeepArgsAlive) { + notifyInstructionsChanged() + return true + } + return false + } + + func tryDevirtualize(apply: FullApplySite, isMandatory: Bool) -> ApplySite? { + let result = _bridged.tryDevirtualizeApply(apply.bridged, isMandatory) + if let newApply = result.newApply.instruction { + erase(instruction: apply) + notifyInstructionsChanged() + notifyCallsChanged() + if result.cfgChanged { + notifyBranchesChanged() + } + notifyInstructionChanged(newApply) + return newApply as! FullApplySite + } + return nil + } + + func inlineFunction(apply: FullApplySite, mandatoryInline: Bool) { + let instAfterInling: Instruction? + switch apply { + case is ApplyInst, is BeginApplyInst: + instAfterInling = apply.next + case is TryApplyInst: + instAfterInling = apply.parentBlock.next?.instructions.first + default: + instAfterInling = nil + } + + _bridged.inlineFunction(apply.bridged, mandatoryInline) + + if let instAfterInling = instAfterInling { + notifyNewInstructions(from: apply, to: instAfterInling) + } + } + /// Copies all instructions of a static init value of a global to the insertion point of `builder`. func copyStaticInitializer(fromInitValue: Value, to builder: Builder) -> Value? { let range = _bridged.copyStaticInitializer(fromInitValue.bridged, builder.bridged) @@ -98,10 +154,6 @@ extension MutatingContext { } } - func tryDeleteDeadClosure(closure: SingleValueInstruction) -> Bool { - _bridged.tryDeleteDeadClosure(closure.bridged) - } - func getContextSubstitutionMap(for type: Type) -> SubstitutionMap { SubstitutionMap(_bridged.getContextSubstitutionMap(type.bridged)) } @@ -157,10 +209,19 @@ struct FunctionPassContext : MutatingContext { return PostDominatorTree(bridged: bridgedPDT) } - func loadFunction(name: StaticString) -> Function? { + func loadFunction(name: StaticString, loadCalleesRecursively: Bool) -> Function? { return name.withUTF8Buffer { (nameBuffer: UnsafeBufferPointer) in - _bridged.loadFunction(llvm.StringRef(nameBuffer.baseAddress, nameBuffer.count)).function + let nameStr = llvm.StringRef(nameBuffer.baseAddress, nameBuffer.count) + return _bridged.loadFunction(nameStr, loadCalleesRecursively).function + } + } + + func loadFunction(function: Function, loadCalleesRecursively: Bool) -> Bool { + if function.isDefinition { + return true } + _bridged.loadFunction(function.bridged, loadCalleesRecursively) + return function.isDefinition } func erase(block: BasicBlock) { @@ -176,6 +237,31 @@ struct FunctionPassContext : MutatingContext { _bridged.asNotificationHandler().notifyChanges(.effectsChanged) } + func optimizeMemoryAccesses(in function: Function) -> Bool { + if swift.optimizeMemoryAccesses(function.bridged.getFunction()) { + notifyInstructionsChanged() + return true + } + return false + } + + func eliminateDeadAllocations(in function: Function) -> Bool { + if swift.eliminateDeadAllocations(function.bridged.getFunction()) { + notifyInstructionsChanged() + return true + } + return false + } + + func specializeApplies(in function: Function, isMandatory: Bool) -> Bool { + if _bridged.specializeAppliesInFunction(function.bridged, isMandatory) { + notifyInstructionsChanged() + notifyCallsChanged() + return true + } + return false + } + /// Copies `initValue` (including all operand instructions, transitively) to the /// static init value of `global`. func createStaticInitializer(for global: GlobalVariable, initValue: SingleValueInstruction) { @@ -300,11 +386,17 @@ extension AllocRefInstBase { extension UseList { func replaceAll(with replacement: Value, _ context: some MutatingContext) { for use in self { - use.instruction.setOperand(at: use.index, to: replacement, context) + use.set(to: replacement, context) } } } +extension Operand { + func set(to value: Value, _ context: some MutatingContext) { + instruction.setOperand(at: index, to: value, context) + } +} + extension Instruction { func setOperand(at index : Int, to value: Value, _ context: some MutatingContext) { if self is FullApplySite && index == ApplyOperands.calleeOperandIndex { diff --git a/SwiftCompilerSources/Sources/Optimizer/PassManager/Options.swift b/SwiftCompilerSources/Sources/Optimizer/PassManager/Options.swift index c221984d45e9d..57ac9585ba774 100644 --- a/SwiftCompilerSources/Sources/Optimizer/PassManager/Options.swift +++ b/SwiftCompilerSources/Sources/Optimizer/PassManager/Options.swift @@ -27,4 +27,18 @@ struct Options { func enableSimplification(for inst: Instruction) -> Bool { _bridged.enableSimplificationFor(inst.bridged) } + + enum AssertConfiguration { + case enabled + case disabled + case unknown + } + + var assertConfiguration: AssertConfiguration { + switch _bridged.getAssertConfiguration() { + case .Debug: return .enabled + case .Release, .Unchecked: return .disabled + default: return .unknown + } + } } diff --git a/SwiftCompilerSources/Sources/Optimizer/PassManager/PassRegistration.swift b/SwiftCompilerSources/Sources/Optimizer/PassManager/PassRegistration.swift index 27ab9ad301f89..7164f5b680cb0 100644 --- a/SwiftCompilerSources/Sources/Optimizer/PassManager/PassRegistration.swift +++ b/SwiftCompilerSources/Sources/Optimizer/PassManager/PassRegistration.swift @@ -61,6 +61,7 @@ private func registerForSILCombine( private func registerSwiftPasses() { // Module passes + registerPass(mandatoryPerformanceOptimizations, { mandatoryPerformanceOptimizations.run($0) }) registerPass(readOnlyGlobalVariablesPass, { readOnlyGlobalVariablesPass.run($0) }) registerPass(stackProtection, { stackProtection.run($0) }) @@ -78,6 +79,7 @@ private func registerSwiftPasses() { registerPass(ononeSimplificationPass, { ononeSimplificationPass.run($0) }) registerPass(lateOnoneSimplificationPass, { lateOnoneSimplificationPass.run($0) }) registerPass(cleanupDebugStepsPass, { cleanupDebugStepsPass.run($0) }) + registerPass(namedReturnValueOptimization, { namedReturnValueOptimization.run($0) }) // Instruction passes registerForSILCombine(BeginCOWMutationInst.self, { run(BeginCOWMutationInst.self, $0) }) diff --git a/SwiftCompilerSources/Sources/Optimizer/Utilities/OptUtils.swift b/SwiftCompilerSources/Sources/Optimizer/Utilities/OptUtils.swift index a6e30f0170e8b..b05b4a6175a02 100644 --- a/SwiftCompilerSources/Sources/Optimizer/Utilities/OptUtils.swift +++ b/SwiftCompilerSources/Sources/Optimizer/Utilities/OptUtils.swift @@ -78,7 +78,7 @@ extension Value { } } -private extension Instruction { +extension Instruction { var isTriviallyDead: Bool { if results.contains(where: { !$0.uses.isEmpty }) { return false diff --git a/SwiftCompilerSources/Sources/SIL/Argument.swift b/SwiftCompilerSources/Sources/SIL/Argument.swift index 25003c2c84e56..6047be903da83 100644 --- a/SwiftCompilerSources/Sources/SIL/Argument.swift +++ b/SwiftCompilerSources/Sources/SIL/Argument.swift @@ -42,6 +42,10 @@ final public class FunctionArgument : Argument { public var convention: ArgumentConvention { bridged.getConvention().convention } + + public var isIndirectResult: Bool { + return index < parentFunction.numIndirectResultArguments + } } final public class BlockArgument : Argument { diff --git a/SwiftCompilerSources/Sources/SIL/Function.swift b/SwiftCompilerSources/Sources/SIL/Function.swift index b5e76f7b0985f..e258404a142ca 100644 --- a/SwiftCompilerSources/Sources/SIL/Function.swift +++ b/SwiftCompilerSources/Sources/SIL/Function.swift @@ -78,6 +78,8 @@ final public class Function : CustomStringConvertible, HasShortDescription, Hash assert(selfIdx >= 0) return selfIdx } + + public var selfArgument: FunctionArgument { arguments[selfArgumentIndex] } public var argumentTypes: ArgumentTypeArray { ArgumentTypeArray(function: self) } public var resultType: Type { bridged.getSILResultType().type } @@ -122,6 +124,8 @@ final public class Function : CustomStringConvertible, HasShortDescription, Hash /// This means that the function terminates the program. public var isProgramTerminationPoint: Bool { hasSemanticsAttribute("programtermination_point") } + public var isTransparent: Bool { bridged.isTransparent() } + /// True if this is a `[global_init]` function. /// /// Such a function is typically a global addressor which calls the global's @@ -187,6 +191,37 @@ final public class Function : CustomStringConvertible, HasShortDescription, Hash } } + public enum PerformanceConstraints { + case none + case noAllocations + case noLocks + } + + public var performanceConstraints: PerformanceConstraints { + switch bridged.getPerformanceConstraints() { + case .None: return .none + case .NoAllocation: return .noAllocations + case .NoLocks: return .noLocks + default: fatalError("unknown performance constraint") + } + } + + public enum InlineStrategy { + case automatic + case never + case always + } + + public var inlineStrategy: InlineStrategy { + switch bridged.getInlineStrategy() { + case .InlineDefault: return .automatic + case .NoInline: return .never + case .AlwaysInline: return .always + default: + fatalError() + } + } + /// True, if the function runs with a swift 5.1 runtime. /// Note that this is function specific, because inlinable functions are de-serialized /// in a client module, which might be compiled with a different deployment target. diff --git a/SwiftCompilerSources/Sources/SIL/Instruction.swift b/SwiftCompilerSources/Sources/SIL/Instruction.swift index adecf096d2434..e26f04578eb15 100644 --- a/SwiftCompilerSources/Sources/SIL/Instruction.swift +++ b/SwiftCompilerSources/Sources/SIL/Instruction.swift @@ -236,10 +236,10 @@ final public class CopyAddrInst : Instruction { public var destination: Value { return destinationOperand.value } public var isTakeOfSrc: Bool { - bridged.CopyAddrInst_isTakeOfSrc() != 0 + bridged.CopyAddrInst_isTakeOfSrc() } public var isInitializationOfDest: Bool { - bridged.CopyAddrInst_isInitializationOfDest() != 0 + bridged.CopyAddrInst_isInitializationOfDest() } } @@ -251,16 +251,6 @@ final public class EndAccessInst : Instruction, UnaryInstruction { final public class EndBorrowInst : Instruction, UnaryInstruction {} -final public class DeallocStackInst : Instruction, UnaryInstruction { - public var allocstack: AllocStackInst { - return operand.value as! AllocStackInst - } -} - -final public class DeallocStackRefInst : Instruction, UnaryInstruction { - public var allocRef: AllocRefInstBase { operand.value as! AllocRefInstBase } -} - final public class MarkUninitializedInst : SingleValueInstruction, UnaryInstruction { } @@ -286,8 +276,6 @@ final public class AbortApplyInst : Instruction, UnaryInstruction {} final public class SetDeallocatingInst : Instruction, UnaryInstruction {} -final public class DeallocRefInst : Instruction, UnaryInstruction {} - public class RefCountingInst : Instruction, UnaryInstruction { public var isAtomic: Bool { bridged.RefCountingInst_getIsAtomic() } } @@ -320,6 +308,32 @@ final public class InjectEnumAddrInst : Instruction, UnaryInstruction, EnumInstr final public class UnimplementedRefCountingInst : RefCountingInst {} +//===----------------------------------------------------------------------===// +// no-value deallocation instructions +//===----------------------------------------------------------------------===// + +public protocol Deallocation : Instruction { } + +final public class DeallocStackInst : Instruction, UnaryInstruction, Deallocation { + public var allocstack: AllocStackInst { + return operand.value as! AllocStackInst + } +} + +final public class DeallocPackInst : Instruction, UnaryInstruction, Deallocation {} + +final public class DeallocStackRefInst : Instruction, UnaryInstruction, Deallocation { + public var allocRef: AllocRefInstBase { operand.value as! AllocRefInstBase } +} + +final public class DeallocRefInst : Instruction, UnaryInstruction, Deallocation {} + +final public class DeallocPartialRefInst : Instruction, Deallocation {} + +final public class DeallocBoxInst : Instruction, UnaryInstruction, Deallocation {} + +final public class DeallocExistentialBoxInst : Instruction, UnaryInstruction, Deallocation {} + //===----------------------------------------------------------------------===// // single-value instructions //===----------------------------------------------------------------------===// @@ -351,6 +365,10 @@ final public class BuiltinInst : SingleValueInstruction { public var id: ID { return bridged.BuiltinInst_getID() } + + public var substitutionMap: SubstitutionMap { + SubstitutionMap(bridged.BuiltinInst_getSubstitutionMap()) + } } final public class UpcastInst : SingleValueInstruction, UnaryInstruction { @@ -534,7 +552,7 @@ final public class RefElementAddrInst : SingleValueInstruction, UnaryInstruction public var instance: Value { operand.value } public var fieldIndex: Int { bridged.RefElementAddrInst_fieldIndex() } - public var fieldIsLet: Bool { bridged.RefElementAddrInst_fieldIsLet() != 0 } + public var fieldIsLet: Bool { bridged.RefElementAddrInst_fieldIsLet() } } final public class RefTailAddrInst : SingleValueInstruction, UnaryInstruction { @@ -664,7 +682,7 @@ class ClassifyBridgeObjectInst : SingleValueInstruction, UnaryInstruction {} final public class PartialApplyInst : SingleValueInstruction, ApplySite { public var numArguments: Int { bridged.PartialApplyInst_numArguments() } - public var isOnStack: Bool { bridged.PartialApplyInst_isOnStack() != 0 } + public var isOnStack: Bool { bridged.PartialApplyInst_isOnStack() } public func calleeArgIndex(callerArgIndex: Int) -> Int { bridged.PartialApply_getCalleeArgIndexOfFirstAppliedArg() + callerArgIndex @@ -719,13 +737,14 @@ class MarkMustCheckInst : SingleValueInstruction, UnaryInstruction {} public protocol Allocation : SingleValueInstruction { } final public class AllocStackInst : SingleValueInstruction, Allocation { + public var hasDynamicLifetime: Bool { bridged.AllocStackInst_hasDynamicLifetime() } } public class AllocRefInstBase : SingleValueInstruction, Allocation { - final public var isObjC: Bool { bridged.AllocRefInstBase_isObjc() != 0 } + final public var isObjC: Bool { bridged.AllocRefInstBase_isObjc() } final public var canAllocOnStack: Bool { - bridged.AllocRefInstBase_canAllocOnStack() != 0 + bridged.AllocRefInstBase_canAllocOnStack() } } diff --git a/SwiftCompilerSources/Sources/SIL/Registration.swift b/SwiftCompilerSources/Sources/SIL/Registration.swift index d9f17d08c8708..e9a4013896cd1 100644 --- a/SwiftCompilerSources/Sources/SIL/Registration.swift +++ b/SwiftCompilerSources/Sources/SIL/Registration.swift @@ -44,8 +44,6 @@ public func registerSILClasses() { register(CopyAddrInst.self) register(EndAccessInst.self) register(EndBorrowInst.self) - register(DeallocStackInst.self) - register(DeallocStackRefInst.self) register(CondFailInst.self) register(MarkUninitializedInst.self) register(FixLifetimeInst.self) @@ -55,7 +53,6 @@ public func registerSILClasses() { register(SetDeallocatingInst.self) register(EndApplyInst.self) register(AbortApplyInst.self) - register(DeallocRefInst.self) register(StrongRetainInst.self) register(RetainValueInst.self) register(StrongReleaseInst.self) @@ -65,6 +62,13 @@ public func registerSILClasses() { register(StrongCopyUnownedValueInst.self) register(StrongCopyUnmanagedValueInst.self) register(InjectEnumAddrInst.self) + register(DeallocStackInst.self) + register(DeallocPackInst.self) + register(DeallocStackRefInst.self) + register(DeallocRefInst.self) + register(DeallocPartialRefInst.self) + register(DeallocBoxInst.self) + register(DeallocExistentialBoxInst.self) register(LoadInst.self) register(LoadWeakInst.self) register(LoadUnownedInst.self) diff --git a/SwiftCompilerSources/Sources/SIL/SubstitutionMap.swift b/SwiftCompilerSources/Sources/SIL/SubstitutionMap.swift index d6295bf530d65..f618f31b8ffaf 100644 --- a/SwiftCompilerSources/Sources/SIL/SubstitutionMap.swift +++ b/SwiftCompilerSources/Sources/SIL/SubstitutionMap.swift @@ -24,4 +24,9 @@ public struct SubstitutionMap { } public var isEmpty: Bool { bridged.empty() } + + public var replacementTypes: OptionalTypeArray { + let types = BridgedTypeArray.fromReplacementTypes(bridged) + return OptionalTypeArray(bridged: types) + } } diff --git a/SwiftCompilerSources/Sources/SIL/Type.swift b/SwiftCompilerSources/Sources/SIL/Type.swift index 22646bd68b628..ca8b4ef3d0d37 100644 --- a/SwiftCompilerSources/Sources/SIL/Type.swift +++ b/SwiftCompilerSources/Sources/SIL/Type.swift @@ -43,6 +43,8 @@ public struct Type : CustomStringConvertible, NoReflectionChildren { public var isMetatype: Bool { bridged.isMetatype() } public var isNoEscapeFunction: Bool { bridged.isNoEscapeFunction() } + public var canBeClass: swift.TypeTraitResult { bridged.canBeClass() } + /// Can only be used if the type is in fact a nominal type (`isNominal` is true). public var nominal: NominalTypeDecl { NominalTypeDecl(bridged: BridgedNominalTypeDecl(decl: bridged.getNominalOrBoundGenericNominal())) @@ -82,6 +84,26 @@ extension Type: Equatable { } } +public struct OptionalTypeArray : RandomAccessCollection, CustomReflectable { + private let bridged: BridgedTypeArray + + public var startIndex: Int { return 0 } + public var endIndex: Int { return bridged.getCount() } + + public init(bridged: BridgedTypeArray) { + self.bridged = bridged + } + + public subscript(_ index: Int) -> Type? { + bridged.getAt(index).typeOrNil + } + + public var customMirror: Mirror { + let c: [Mirror.Child] = map { (label: nil, value: $0 ?? "") } + return Mirror(self, children: c) + } +} + public struct NominalFieldsArray : RandomAccessCollection, FormattedLikeArray { fileprivate let type: Type fileprivate let function: Function @@ -118,6 +140,7 @@ public struct TupleElementArray : RandomAccessCollection, FormattedLikeArray { extension swift.SILType { var type: Type { Type(bridged: self) } + var typeOrNil: Type? { isNull() ? nil : type } } // TODO: use an AST type for this once we have it diff --git a/include/swift/AST/DiagnosticsSIL.def b/include/swift/AST/DiagnosticsSIL.def index 5ac7f16b11e62..4bd2a7d849782 100644 --- a/include/swift/AST/DiagnosticsSIL.def +++ b/include/swift/AST/DiagnosticsSIL.def @@ -310,8 +310,6 @@ WARNING(warn_dead_weak_store,none, "deallocated here", ()) // performance diagnostics -ERROR(performance_annotations_not_enabled,none, - "use -experimental-performance-annotations to enable performance annotations", ()) ERROR(performance_dynamic_casting,none, "dynamic casting can lock or allocate", ()) ERROR(performance_metadata,none, diff --git a/include/swift/AST/DiagnosticsSema.def b/include/swift/AST/DiagnosticsSema.def index 246506057e62c..6ba9bf8e564c9 100644 --- a/include/swift/AST/DiagnosticsSema.def +++ b/include/swift/AST/DiagnosticsSema.def @@ -7039,8 +7039,9 @@ ERROR(macro_undefined,PointsToFirstBadToken, "no macro named %0", (Identifier)) ERROR(external_macro_not_found,none, "external macro implementation type '%0.%1' could not be found for " - "macro %2; the type must be public and provided via " - "'-load-plugin-library'", (StringRef, StringRef, DeclName)) + "macro %2; the type must be public and provided by a macro target in a " + "Swift package, or via '-plugin-path' or '-load-plugin-library'", + (StringRef, StringRef, DeclName)) ERROR(macro_must_be_defined,none, "macro %0 requires a definition", (DeclName)) ERROR(external_macro_outside_macro_definition,none, diff --git a/include/swift/AST/SILOptions.h b/include/swift/AST/SILOptions.h index 525e24980de74..6d42ba5611dd1 100644 --- a/include/swift/AST/SILOptions.h +++ b/include/swift/AST/SILOptions.h @@ -125,9 +125,6 @@ class SILOptions { /// Controls whether cross module optimization is enabled. CrossModuleOptimizationMode CMOMode = CrossModuleOptimizationMode::Off; - /// Enables experimental performance annotations. - bool EnablePerformanceAnnotations = false; - /// Enables the emission of stack protectors in functions. bool EnableStackProtection = true; diff --git a/include/swift/Basic/Features.def b/include/swift/Basic/Features.def index d5a4ea4445de9..500b7648c6e53 100644 --- a/include/swift/Basic/Features.def +++ b/include/swift/Basic/Features.def @@ -101,6 +101,7 @@ LANGUAGE_FEATURE( LANGUAGE_FEATURE(AttachedMacros, 389, "Attached macros", hasSwiftSwiftParser) LANGUAGE_FEATURE(MoveOnly, 390, "noncopyable types", true) LANGUAGE_FEATURE(ParameterPacks, 393, "Value and type parameter packs", true) +SUPPRESSIBLE_LANGUAGE_FEATURE(LexicalLifetimes, 0, "@_eagerMove/@_noEagerMove/@_lexicalLifetimes annotations", true) UPCOMING_FEATURE(ConciseMagicFile, 274, 6) UPCOMING_FEATURE(ForwardTrailingClosures, 286, 6) diff --git a/include/swift/Option/Options.td b/include/swift/Option/Options.td index 0850bc0d2ea01..7742e73b74cac 100644 --- a/include/swift/Option/Options.td +++ b/include/swift/Option/Options.td @@ -935,7 +935,7 @@ def disableCrossModuleOptimization : Flag<["-"], "disable-cmo">, def ExperimentalPerformanceAnnotations : Flag<["-"], "experimental-performance-annotations">, Flags<[HelpHidden, FrontendOption]>, - HelpText<"Enable experimental performance annotations">; + HelpText<"Deprecated, has no effect">; def RemoveRuntimeAsserts : Flag<["-"], "remove-runtime-asserts">, Flags<[FrontendOption]>, diff --git a/include/swift/SIL/SILBridging.h b/include/swift/SIL/SILBridging.h index a87c20da04709..aa61e251effd6 100644 --- a/include/swift/SIL/SILBridging.h +++ b/include/swift/SIL/SILBridging.h @@ -237,6 +237,10 @@ struct BridgedFunction { return getFunction()->isAvailableExternally(); } + bool isTransparent() const { + return getFunction()->isTransparent() == swift::IsTransparent; + } + bool isGlobalInitFunction() const { return getFunction()->isGlobalInit(); } @@ -246,13 +250,27 @@ struct BridgedFunction { } bool hasSemanticsAttr(llvm::StringRef attrName) const { - return getFunction()->hasSemanticsAttr(attrName) ? 1 : 0; + return getFunction()->hasSemanticsAttr(attrName); } swift::EffectsKind getEffectAttribute() const { return getFunction()->getEffectsKind(); } + swift::PerformanceConstraints getPerformanceConstraints() const { + return getFunction()->getPerfConstraints(); + } + + enum class InlineStrategy { + InlineDefault = swift::InlineDefault, + NoInline = swift::NoInline, + AlwaysInline = swift::AlwaysInline + }; + + InlineStrategy getInlineStrategy() const { + return (InlineStrategy)getFunction()->getInlineStrategy(); + } + bool needsStackProtection() const { return getFunction()->needsStackProtection(); } @@ -364,6 +382,25 @@ struct OptionalBridgedInstruction { } }; +struct BridgedTypeArray { + llvm::ArrayRef typeArray; + + SWIFT_IMPORT_UNSAFE + static BridgedTypeArray fromReplacementTypes(swift::SubstitutionMap substMap) { + return {substMap.getReplacementTypes()}; + } + + SwiftInt getCount() const { return SwiftInt(typeArray.size()); } + + SWIFT_IMPORT_UNSAFE + swift::SILType getAt(SwiftInt index) const { + auto ty = swift::CanType(typeArray[index]); + if (ty->isLegalSILType()) + return swift::SILType::getPrimitiveObjectType(ty); + return swift::SILType(); + } +}; + struct BridgedInstruction { SwiftObject obj; @@ -457,6 +494,12 @@ struct BridgedInstruction { return getAs()->getBuiltinInfo().ID; } + SWIFT_IMPORT_UNSAFE + swift::SubstitutionMap BuiltinInst_getSubstitutionMap() const { + return getAs()->getSubstitutions(); + } + + bool AddressToPointerInst_needsStackProtection() const { return getAs()->needsStackProtection(); } @@ -538,7 +581,7 @@ struct BridgedInstruction { return getAs()->getFieldIndex(); } - SwiftInt RefElementAddrInst_fieldIsLet() const { + bool RefElementAddrInst_fieldIsLet() const { return getAs()->getField()->isLet(); } @@ -569,15 +612,19 @@ struct BridgedInstruction { return swift::ApplySite(getInst()).getCalleeArgIndexOfFirstAppliedArg(); } - SwiftInt PartialApplyInst_isOnStack() const { - return getAs()->isOnStack() ? 1 : 0; + bool PartialApplyInst_isOnStack() const { + return getAs()->isOnStack(); + } + + bool AllocStackInst_hasDynamicLifetime() const { + return getAs()->hasDynamicLifetime(); } - SwiftInt AllocRefInstBase_isObjc() const { + bool AllocRefInstBase_isObjc() const { return getAs()->isObjC(); } - SwiftInt AllocRefInstBase_canAllocOnStack() const { + bool AllocRefInstBase_canAllocOnStack() const { return getAs()->canAllocOnStack(); } @@ -613,12 +660,12 @@ struct BridgedInstruction { return getAs()->getEnforcement() == swift::SILAccessEnforcement::Static; } - SwiftInt CopyAddrInst_isTakeOfSrc() const { - return getAs()->isTakeOfSrc() ? 1 : 0; + bool CopyAddrInst_isTakeOfSrc() const { + return getAs()->isTakeOfSrc(); } - SwiftInt CopyAddrInst_isInitializationOfDest() const { - return getAs()->isInitializationOfDest() ? 1 : 0; + bool CopyAddrInst_isInitializationOfDest() const { + return getAs()->isInitializationOfDest(); } void RefCountingInst_setIsAtomic(bool isAtomic) const { diff --git a/include/swift/SIL/SILType.h b/include/swift/SIL/SILType.h index b6400c55b1c96..7ebd47b007f93 100644 --- a/include/swift/SIL/SILType.h +++ b/include/swift/SIL/SILType.h @@ -414,6 +414,10 @@ class SILType { return getASTType()->hasOpenedExistential(); } + TypeTraitResult canBeClass() const { + return getASTType()->canBeClass(); + } + /// Returns true if the referenced type is expressed in terms of one /// or more local archetypes. bool hasLocalArchetype() const { diff --git a/include/swift/SILOptimizer/OptimizerBridging.h b/include/swift/SILOptimizer/OptimizerBridging.h index 04494fc773d6e..c0b60c8ff6431 100644 --- a/include/swift/SILOptimizer/OptimizerBridging.h +++ b/include/swift/SILOptimizer/OptimizerBridging.h @@ -19,6 +19,7 @@ #include "swift/SILOptimizer/Analysis/BasicCalleeAnalysis.h" #include "swift/SILOptimizer/Analysis/DeadEndBlocksAnalysis.h" #include "swift/SILOptimizer/Analysis/DominanceAnalysis.h" +#include "swift/SILOptimizer/Utils/InstOptUtils.h" SWIFT_BEGIN_NULLABILITY_ANNOTATIONS @@ -202,11 +203,23 @@ struct BridgedPassContext { block.getBlock()->eraseFromParent(); } - bool tryDeleteDeadClosure(BridgedInstruction closure) const; + bool tryOptimizeApplyOfPartialApply(BridgedInstruction closure) const; + + bool tryDeleteDeadClosure(BridgedInstruction closure, bool needKeepArgsAlive) const; + + struct DevirtResult { + OptionalBridgedInstruction newApply; + bool cfgChanged; + }; + + SWIFT_IMPORT_UNSAFE + DevirtResult tryDevirtualizeApply(BridgedInstruction apply, bool isMandatory) const; SWIFT_IMPORT_UNSAFE OptionalBridgedValue constantFoldBuiltin(BridgedInstruction builtin) const; + bool specializeAppliesInFunction(BridgedFunction function, bool isMandatory) const; + void createStaticInitializer(BridgedGlobalVar global, BridgedInstruction initValue) const; struct StaticInitCloneResult { @@ -217,6 +230,8 @@ struct BridgedPassContext { SWIFT_IMPORT_UNSAFE StaticInitCloneResult copyStaticInitializer(BridgedValue initValue, BridgedBuilder b) const; + void inlineFunction(BridgedInstruction apply, bool mandatoryInline) const; + SWIFT_IMPORT_UNSAFE BridgedValue getSILUndef(swift::SILType type) const { return {swift::SILUndef::get(type, *invocation->getFunction())}; @@ -380,9 +395,18 @@ struct BridgedPassContext { } SWIFT_IMPORT_UNSAFE - OptionalBridgedFunction loadFunction(llvm::StringRef name) const { + OptionalBridgedFunction loadFunction(llvm::StringRef name, bool loadCalleesRecursively) const { + swift::SILModule *mod = invocation->getPassManager()->getModule(); + return {mod->loadFunction(name, loadCalleesRecursively ? swift::SILModule::LinkingMode::LinkAll + : swift::SILModule::LinkingMode::LinkNormal)}; + } + + SWIFT_IMPORT_UNSAFE + void loadFunction(BridgedFunction function, bool loadCalleesRecursively) const { swift::SILModule *mod = invocation->getPassManager()->getModule(); - return {mod->loadFunction(name, swift::SILModule::LinkingMode::LinkNormal)}; + mod->loadFunction(function.getFunction(), + loadCalleesRecursively ? swift::SILModule::LinkingMode::LinkAll + : swift::SILModule::LinkingMode::LinkNormal); } SWIFT_IMPORT_UNSAFE @@ -421,6 +445,17 @@ struct BridgedPassContext { return mod->getOptions().EnableMoveInoutStackProtection; } + enum class AssertConfiguration { + Debug = swift::SILOptions::Debug, + Release = swift::SILOptions::Release, + Unchecked = swift::SILOptions::Unchecked + }; + + AssertConfiguration getAssertConfiguration() const { + swift::SILModule *mod = invocation->getPassManager()->getModule(); + return (AssertConfiguration)mod->getOptions().AssertConfig; + } + bool enableSimplificationFor(BridgedInstruction inst) const; }; diff --git a/include/swift/SILOptimizer/PassManager/Passes.def b/include/swift/SILOptimizer/PassManager/Passes.def index 1ea86e7e20f82..7e69934cbb723 100644 --- a/include/swift/SILOptimizer/PassManager/Passes.def +++ b/include/swift/SILOptimizer/PassManager/Passes.def @@ -248,8 +248,6 @@ PASS(RedundantLoadElimination, "redundant-load-elim", "Redundant Load Elimination") PASS(DeadStoreElimination, "dead-store-elim", "Dead Store Elimination") -PASS(MandatoryGenericSpecializer, "mandatory-generic-specializer", - "Mandatory Generic Function Specialization on Static Types") PASS(GenericSpecializer, "generic-specializer", "Generic Function Specialization on Static Types") PASS(ExistentialSpecializer, "existential-specializer", @@ -382,6 +380,8 @@ SWIFT_FUNCTION_PASS(SILPrinter, "sil-printer", "Test pass which prints the SIL of a function") SWIFT_MODULE_PASS(FunctionUsesDumper, "dump-function-uses", "Dump the results of FunctionUses") +SWIFT_MODULE_PASS(MandatoryPerformanceOptimizations, "mandatory-performance-optimizations", + "Performs optimizations for performance-annotated functions") SWIFT_MODULE_PASS(ReadOnlyGlobalVariablesPass, "read-only-global-variables", "Converts read-only var-globals to let-globals") SWIFT_MODULE_PASS(StackProtection, "stack-protection", @@ -400,6 +400,8 @@ SWIFT_FUNCTION_PASS(LateOnoneSimplification, "late-onone-simplification", "Peephole simplifications which can only run late in the -Onone pipeline") SWIFT_FUNCTION_PASS(CleanupDebugSteps, "cleanup-debug-steps", "Cleanup debug_step instructions for Onone") +SWIFT_FUNCTION_PASS(NamedReturnValueOptimization, "named-return-value-optimization", + "Optimize copies to an indirect return value") PASS(SimplifyBBArgs, "simplify-bb-args", "SIL Block Argument Simplification") PASS(SimplifyCFG, "simplify-cfg", diff --git a/include/swift/SILOptimizer/Utils/CanonicalizeBorrowScope.h b/include/swift/SILOptimizer/Utils/CanonicalizeBorrowScope.h index f4fe4e72d3a83..92c5438e71dfd 100644 --- a/include/swift/SILOptimizer/Utils/CanonicalizeBorrowScope.h +++ b/include/swift/SILOptimizer/Utils/CanonicalizeBorrowScope.h @@ -142,7 +142,12 @@ class CanonicalizeBorrowScope { assert(borrow && persistentCopies.empty() && (!liveness || liveness->empty())); + borrowedValue = BorrowedValue(); + defUseWorklist.clear(); + blockWorklist.clear(); + persistentCopies.clear(); updatedCopies.clear(); + borrowedValue = borrow; if (liveness) liveness->initializeDef(borrowedValue.value); diff --git a/include/swift/SILOptimizer/Utils/Devirtualize.h b/include/swift/SILOptimizer/Utils/Devirtualize.h index 1090b490a0e59..05c681632ff0d 100644 --- a/include/swift/SILOptimizer/Utils/Devirtualize.h +++ b/include/swift/SILOptimizer/Utils/Devirtualize.h @@ -69,7 +69,8 @@ SubstitutionMap getWitnessMethodSubstitutions(SILModule &Module, ApplySite AI, /// Return the new apply and true if the CFG was also modified. std::pair tryDevirtualizeApply(ApplySite AI, ClassHierarchyAnalysis *CHA, - OptRemark::Emitter *ORE = nullptr); + OptRemark::Emitter *ORE = nullptr, + bool isMandatory = false); bool canDevirtualizeApply(FullApplySite AI, ClassHierarchyAnalysis *CHA); bool canDevirtualizeClassMethod(FullApplySite AI, ClassDecl *CD, OptRemark::Emitter *ORE = nullptr, @@ -108,7 +109,9 @@ tryDevirtualizeClassMethod(FullApplySite AI, SILValue ClassInstance, /// the original apply site. /// /// Return the new apply and true if the CFG was also modified. -std::pair tryDevirtualizeWitnessMethod(ApplySite AI, OptRemark::Emitter *ORE); +std::pair tryDevirtualizeWitnessMethod(ApplySite AI, + OptRemark::Emitter *ORE, + bool isMandatory); /// Delete a successfully-devirtualized apply site. This must always be /// called after devirtualizing an apply; not only is it not semantically diff --git a/include/swift/SILOptimizer/Utils/InstOptUtils.h b/include/swift/SILOptimizer/Utils/InstOptUtils.h index 650c37a237a51..12c5ec847607c 100644 --- a/include/swift/SILOptimizer/Utils/InstOptUtils.h +++ b/include/swift/SILOptimizer/Utils/InstOptUtils.h @@ -569,13 +569,16 @@ IntegerLiteralInst *optimizeBuiltinCanBeObjCClass(BuiltinInst *bi, /// Performs "predictable" memory access optimizations. /// /// See the PredictableMemoryAccessOptimizations pass. -bool optimizeMemoryAccesses(SILFunction &fn); +bool optimizeMemoryAccesses(SILFunction *fn); /// Performs "predictable" dead allocation optimizations. /// /// See the PredictableDeadAllocationElimination pass. -bool eliminateDeadAllocations(SILFunction &fn); +bool eliminateDeadAllocations(SILFunction *fn); +bool specializeAppliesInFunction(SILFunction &F, + SILTransform *transform, + bool isMandatory); } // end namespace swift #endif // SWIFT_SILOPTIMIZER_UTILS_INSTOPTUTILS_H diff --git a/include/swift/SILOptimizer/Utils/InstructionDeleter.h b/include/swift/SILOptimizer/Utils/InstructionDeleter.h index 0282c0bd6d823..9cda730d23d4d 100644 --- a/include/swift/SILOptimizer/Utils/InstructionDeleter.h +++ b/include/swift/SILOptimizer/Utils/InstructionDeleter.h @@ -159,6 +159,7 @@ class InstructionDeleter { /// /// Calls callbacks.notifyWillBeDeleted(). bool deleteIfDead(SILInstruction *inst); + bool deleteIfDead(SILInstruction *inst, bool fixLifetime); /// Delete the instruction \p inst, ignoring its side effects. If any operand /// definitions will become dead after deleting this instruction, track them diff --git a/include/swift/SILOptimizer/Utils/StackNesting.h b/include/swift/SILOptimizer/Utils/StackNesting.h index e7f23e9073b93..3424d73f53b2e 100644 --- a/include/swift/SILOptimizer/Utils/StackNesting.h +++ b/include/swift/SILOptimizer/Utils/StackNesting.h @@ -77,6 +77,11 @@ class StackNesting { /// Used in the setup function to walk over the CFG. bool visited = false; + + /// True for dead-end blocks, i.e. blocks from which there is no path to + /// a function exit, e.g. blocks which end with `unreachable` or an + /// infinite loop. + bool isDeadEnd = false; }; /// Data stored for each stack location (= allocation). diff --git a/include/swift/Sema/ConstraintSystem.h b/include/swift/Sema/ConstraintSystem.h index 736e0502cb9d4..fe165d13f8ba2 100644 --- a/include/swift/Sema/ConstraintSystem.h +++ b/include/swift/Sema/ConstraintSystem.h @@ -2925,7 +2925,7 @@ class ConstraintSystem { /// Whether the argument \p Arg occurs after the code completion token and /// thus should be ignored and not generate any fixes. bool isArgumentIgnoredForCodeCompletion(Expr *Arg) const { - return IgnoredArguments.count(Arg) > 0; + return IgnoredArguments.count(Arg) > 0 && isForCodeCompletion(); } /// Whether the constraint system has ignored any arguments for code diff --git a/lib/AST/ASTPrinter.cpp b/lib/AST/ASTPrinter.cpp index 1f21752637cf0..91d4d9ddebc7a 100644 --- a/lib/AST/ASTPrinter.cpp +++ b/lib/AST/ASTPrinter.cpp @@ -3340,6 +3340,23 @@ static bool usesFeatureFreestandingExpressionMacros(Decl *decl) { return macro->getMacroRoles().contains(MacroRole::Expression); } +static bool usesFeatureLexicalLifetimes(Decl *decl) { + return decl->getAttrs().hasAttribute() + || decl->getAttrs().hasAttribute() + || decl->getAttrs().hasAttribute(); +} + +static void +suppressingFeatureLexicalLifetimes(PrintOptions &options, + llvm::function_ref action) { + unsigned originalExcludeAttrCount = options.ExcludeAttrList.size(); + options.ExcludeAttrList.push_back(DAK_EagerMove); + options.ExcludeAttrList.push_back(DAK_NoEagerMove); + options.ExcludeAttrList.push_back(DAK_LexicalLifetimes); + action(); + options.ExcludeAttrList.resize(originalExcludeAttrCount); +} + static void suppressingFeatureNoAsyncAvailability(PrintOptions &options, llvm::function_ref action) { diff --git a/lib/DriverTool/sil_opt_main.cpp b/lib/DriverTool/sil_opt_main.cpp index 0602dcbe5a9d2..6dd232562ad41 100644 --- a/lib/DriverTool/sil_opt_main.cpp +++ b/lib/DriverTool/sil_opt_main.cpp @@ -638,7 +638,6 @@ int sil_opt_main(ArrayRef argv, void *MainAddr) { SILOpts.OptRecordFile = options.RemarksFilename; SILOpts.OptRecordPasses = options.RemarksPasses; SILOpts.checkSILModuleLeaks = true; - SILOpts.EnablePerformanceAnnotations = true; SILOpts.EnableStackProtection = true; SILOpts.EnableMoveInoutStackProtection = options.EnableMoveInoutStackProtection; diff --git a/lib/Frontend/CompilerInvocation.cpp b/lib/Frontend/CompilerInvocation.cpp index b96bd1ed45a5e..74630bbafff90 100644 --- a/lib/Frontend/CompilerInvocation.cpp +++ b/lib/Frontend/CompilerInvocation.cpp @@ -2058,8 +2058,6 @@ static bool ParseSILArgs(SILOptions &Opts, ArgList &Args, } else if (Args.hasArg(OPT_EnbaleDefaultCMO)) { Opts.CMOMode = CrossModuleOptimizationMode::Default; } - Opts.EnablePerformanceAnnotations |= - Args.hasArg(OPT_ExperimentalPerformanceAnnotations); Opts.EnableStackProtection = Args.hasFlag(OPT_enable_stack_protector, OPT_disable_stack_protector, Opts.EnableStackProtection); diff --git a/lib/IDE/ArgumentCompletion.cpp b/lib/IDE/ArgumentCompletion.cpp index abd9abdcdf9f3..3e6bd0ed650fc 100644 --- a/lib/IDE/ArgumentCompletion.cpp +++ b/lib/IDE/ArgumentCompletion.cpp @@ -53,7 +53,10 @@ bool ArgumentTypeCheckCompletionCallback::addPossibleParams( // Since not all function types are backed by declarations (e.g. closure // paramters), `DeclParam` might be `nullptr`. const AnyFunctionType::Param *TypeParam = &ParamsToPass[Idx]; - const ParamDecl *DeclParam = getParameterAt(Res.FuncDeclRef, Idx); + const ParamDecl *DeclParam = nullptr; + if (Res.FuncDeclRef) { + DeclParam = getParameterAt(Res.FuncDeclRef, Idx); + } bool Required = true; if (DeclParam && DeclParam->isDefaultArgument()) { diff --git a/lib/IRGen/GenCall.cpp b/lib/IRGen/GenCall.cpp index 3855c97b8ed65..5e5ec7f830774 100644 --- a/lib/IRGen/GenCall.cpp +++ b/lib/IRGen/GenCall.cpp @@ -5145,7 +5145,8 @@ Callee irgen::getSwiftFunctionPointerCallee( PointerAuthInfo::forFunctionPointer(IGF.IGM, calleeInfo.OrigFnType); auto fn = isClosure ? FunctionPointer::createSignedClosure(calleeInfo.OrigFnType, fnPtr, authInfo, sig) : - FunctionPointer::createSigned(calleeInfo.OrigFnType, fnPtr, authInfo, sig); + FunctionPointer::createSigned(calleeInfo.OrigFnType, fnPtr, authInfo, sig, + true); if (castOpaqueToRefcountedContext) { assert(dataPtr && dataPtr->getType() == IGF.IGM.OpaquePtrTy && "Expecting trivial closure context"); diff --git a/lib/SILGen/SILGenApply.cpp b/lib/SILGen/SILGenApply.cpp index 43a568f0fa680..d53c93365f6ea 100644 --- a/lib/SILGen/SILGenApply.cpp +++ b/lib/SILGen/SILGenApply.cpp @@ -4261,7 +4261,19 @@ static void emitBorrowedLValueRecursive(SILGenFunction &SGF, } } + // TODO: This does not take into account resilience, we should probably use + // getArgumentType()... but we do not have the SILFunctionType here... assert(param.getInterfaceType() == value.getType().getASTType()); + + // If we have an indirect_guaranteed argument, move this using store_borrow + // into an alloc_stack. + if (SGF.silConv.useLoweredAddresses() && + param.isIndirectInGuaranteed() && value.getType().isObject()) { + SILValue alloca = SGF.emitTemporaryAllocation(loc, value.getType()); + value = SGF.emitFormalEvaluationManagedStoreBorrow(loc, value.getValue(), + alloca); + } + args[argIndex++] = value; } diff --git a/lib/SILGen/SILGenBuilder.cpp b/lib/SILGen/SILGenBuilder.cpp index 143a28ceacc23..84e6304780aa5 100644 --- a/lib/SILGen/SILGenBuilder.cpp +++ b/lib/SILGen/SILGenBuilder.cpp @@ -778,9 +778,19 @@ ManagedValue SILGenBuilder::createStoreBorrow(SILLocation loc, SILValue address) { assert(value.getOwnershipKind() == OwnershipKind::Guaranteed); auto *sbi = createStoreBorrow(loc, value.getValue(), address); + SGF.Cleanups.pushCleanup(sbi); return ManagedValue(sbi, CleanupHandle::invalid()); } +ManagedValue SILGenBuilder::createFormalAccessStoreBorrow(SILLocation loc, + ManagedValue value, + SILValue address) { + assert(value.getOwnershipKind() == OwnershipKind::Guaranteed); + auto *sbi = createStoreBorrow(loc, value.getValue(), address); + return SGF.emitFormalEvaluationManagedBorrowedRValueWithCleanup( + loc, value.getValue(), sbi); +} + ManagedValue SILGenBuilder::createStoreBorrowOrTrivial(SILLocation loc, ManagedValue value, SILValue address) { diff --git a/lib/SILGen/SILGenBuilder.h b/lib/SILGen/SILGenBuilder.h index 97b92d01547e5..61a2d21a983bf 100644 --- a/lib/SILGen/SILGenBuilder.h +++ b/lib/SILGen/SILGenBuilder.h @@ -202,6 +202,8 @@ class SILGenBuilder : public SILBuilder { using SILBuilder::createStoreBorrow; ManagedValue createStoreBorrow(SILLocation loc, ManagedValue value, SILValue address); + ManagedValue createFormalAccessStoreBorrow(SILLocation loc, ManagedValue value, + SILValue address); /// Create a store_borrow if we have a non-trivial value and a store [trivial] /// otherwise. diff --git a/lib/SILGen/SILGenFunction.cpp b/lib/SILGen/SILGenFunction.cpp index eff93dbc86f95..45f5340b86dab 100644 --- a/lib/SILGen/SILGenFunction.cpp +++ b/lib/SILGen/SILGenFunction.cpp @@ -32,6 +32,7 @@ #include "swift/AST/PropertyWrappers.h" #include "swift/AST/SourceFile.h" #include "swift/AST/Types.h" +#include "swift/Basic/Defer.h" #include "swift/SIL/SILArgument.h" #include "swift/SIL/SILProfiler.h" #include "swift/SIL/SILUndef.h" @@ -201,7 +202,17 @@ const SILDebugScope *SILGenFunction::getScopeOrNull(SILLocation Loc, SourceLoc SLoc = Loc.getSourceLoc(); if (!SF || LastSourceLoc == SLoc) return nullptr; - return getOrCreateScope(SLoc); + // Prime VarDeclScopeMap. + auto Scope = getOrCreateScope(SLoc); + if (ForMetaInstruction) + if (ValueDecl *ValDecl = Loc.getAsASTNode()) { + // The source location of a VarDecl isn't necessarily in the same scope + // that the variable resides in for name lookup purposes. + auto ValueScope = VarDeclScopeMap.find(ValDecl); + if (ValueScope != VarDeclScopeMap.end()) + return getOrCreateScope(ValueScope->second, F.getDebugScope()); + } + return Scope; } const SILDebugScope *SILGenFunction::getOrCreateScope(SourceLoc SLoc) { @@ -378,9 +389,14 @@ SILGenFunction::getOrCreateScope(const ast_scope::ASTScopeImpl *ASTScope, if (It != ScopeMap.end()) return It->second; - LLVM_DEBUG( ASTScope->print(llvm::errs(), 0, false, false) ); + LLVM_DEBUG(ASTScope->print(llvm::errs(), 0, false, false)); - SILDebugScope *SILScope = nullptr; + auto cache = [&](const SILDebugScope *SILScope) { + ScopeMap.insert({{ASTScope, InlinedAt}, SILScope}); + assert(SILScope->getParentFunction() == &F && + "inlinedAt points to other function"); + return SILScope; + }; // Decide whether to pick a parent scope instead. if (ASTScope->ignoreInDebugInfo()) { @@ -390,11 +406,37 @@ SILGenFunction::getOrCreateScope(const ast_scope::ASTScopeImpl *ASTScope, return ParentScope->InlinedCallSite != InlinedAt ? FnScope : ParentScope; } + // Collect all variable declarations in this scope. + struct Consumer : public namelookup::AbstractASTScopeDeclConsumer { + const ast_scope::ASTScopeImpl *ASTScope; + VarDeclScopeMapTy &VarDeclScopeMap; + Consumer(const ast_scope::ASTScopeImpl *ASTScope, + VarDeclScopeMapTy &VarDeclScopeMap) + : ASTScope(ASTScope), VarDeclScopeMap(VarDeclScopeMap) {} + + bool consume(ArrayRef values, + NullablePtr baseDC) override { + for (auto &value : values) { + assert(VarDeclScopeMap.count(value) == 0 && "VarDecl appears twice"); + VarDeclScopeMap.insert({value, ASTScope}); + } + return false; + } + bool lookInMembers(const DeclContext *) const override { return false; } +#ifndef NDEBUG + void startingNextLookupStep() override {} + void finishingLookup(std::string) const override {} + bool isTargetLookup() const override { return false; } +#endif + }; + Consumer consumer(ASTScope, VarDeclScopeMap); + ASTScope->lookupLocalsOrMembers(consumer); + // Collapse BraceStmtScopes whose parent is a .*BodyScope. if (auto Parent = ASTScope->getParent().getPtrOrNull()) if (Parent->getSourceRangeOfThisASTNode() == ASTScope->getSourceRangeOfThisASTNode()) - return getOrCreateScope(Parent, FnScope, InlinedAt); + return cache(getOrCreateScope(Parent, FnScope, InlinedAt)); // The calls to defer closures have cleanup source locations pointing to the // defer. Reparent them into the current debug scope. @@ -402,32 +444,30 @@ SILGenFunction::getOrCreateScope(const ast_scope::ASTScopeImpl *ASTScope, while (AncestorScope && AncestorScope != FnASTScope && !ScopeMap.count({AncestorScope, InlinedAt})) { if (auto *FD = dyn_cast_or_null( - AncestorScope->getDeclIfAny().getPtrOrNull())) { + AncestorScope->getDeclIfAny().getPtrOrNull())) { if (cast(FD) != FunctionDC) - return B.getCurrentDebugScope(); + return cache(B.getCurrentDebugScope()); // This is this function's own scope. // If this is the outermost BraceStmt scope, ignore it. if (AncestorScope == ASTScope->getParent().getPtrOrNull()) - return FnScope; + return cache(FnScope); break; } AncestorScope = AncestorScope->getParent().getPtrOrNull(); }; + // Create the scope and recursively its parents. getLookupParent implements a + // special case for GuardBlockStmt, which is nested incorrectly. + auto *ParentScope = ASTScope->getLookupParent().getPtrOrNull(); const SILDebugScope *Parent = - getOrCreateScope(ASTScope->getParent().getPtrOrNull(), FnScope, InlinedAt); + getOrCreateScope(ParentScope, FnScope, InlinedAt); SourceLoc SLoc = ASTScope->getSourceRangeOfThisASTNode().Start; RegularLocation Loc(SLoc); - SILScope = new (SGM.M) + auto *SILScope = new (SGM.M) SILDebugScope(Loc, FnScope->getParentFunction(), Parent, InlinedAt); - ScopeMap.insert({{ASTScope, InlinedAt}, SILScope}); - - assert(SILScope->getParentFunction() == &F && - "inlinedAt points to other function"); - - return SILScope; + return cache(SILScope); } void SILGenFunction::enterDebugScope(SILLocation Loc, bool isBindingScope) { diff --git a/lib/SILGen/SILGenFunction.h b/lib/SILGen/SILGenFunction.h index e152792c81616..c0bb68c0b490a 100644 --- a/lib/SILGen/SILGenFunction.h +++ b/lib/SILGen/SILGenFunction.h @@ -366,6 +366,10 @@ class LLVM_LIBRARY_VISIBILITY SILGenFunction SourceLoc LastSourceLoc; using ASTScopeTy = ast_scope::ASTScopeImpl; const ASTScopeTy *FnASTScope = nullptr; + using VarDeclScopeMapTy = + llvm::SmallDenseMap; + /// The ASTScope each variable declaration belongs to. + VarDeclScopeMapTy VarDeclScopeMap; /// Caches one SILDebugScope for each ASTScope. llvm::SmallDenseMap, const SILDebugScope *, 16> diff --git a/lib/SILGen/SILGenProlog.cpp b/lib/SILGen/SILGenProlog.cpp index a4f81a417c18f..ac71fe161e80f 100644 --- a/lib/SILGen/SILGenProlog.cpp +++ b/lib/SILGen/SILGenProlog.cpp @@ -39,8 +39,12 @@ static void diagnose(ASTContext &Context, SourceLoc loc, Diag diag, } SILValue SILGenFunction::emitSelfDeclForDestructor(VarDecl *selfDecl) { + SILFunctionConventions conventions = F.getConventionsInContext(); + // Emit the implicit 'self' argument. - SILType selfType = getLoweredType(selfDecl->getType()); + SILType selfType = conventions.getSILArgumentType( + conventions.getNumSILArguments() - 1, F.getTypeExpansionContext()); + selfType = F.mapTypeIntoContext(selfType); SILValue selfValue = F.begin()->createFunctionArgument(selfType, selfDecl); // If we have a move only type, then mark it with mark_must_check so we can't diff --git a/lib/SILOptimizer/LoopTransforms/ArrayBoundsCheckOpts.cpp b/lib/SILOptimizer/LoopTransforms/ArrayBoundsCheckOpts.cpp index 29b272abb1471..9a7c867865f02 100644 --- a/lib/SILOptimizer/LoopTransforms/ArrayBoundsCheckOpts.cpp +++ b/lib/SILOptimizer/LoopTransforms/ArrayBoundsCheckOpts.cpp @@ -530,6 +530,18 @@ static SILValue getSub(SILLocation Loc, SILValue Val, unsigned SubVal, return B.createTupleExtract(Loc, AI, 0); } +static SILValue getAdd(SILLocation Loc, SILValue Val, unsigned AddVal, + SILBuilder &B) { + SmallVector Args(1, Val); + Args.push_back(B.createIntegerLiteral(Loc, Val->getType(), AddVal)); + Args.push_back(B.createIntegerLiteral( + Loc, SILType::getBuiltinIntegerType(1, B.getASTContext()), -1)); + + auto *AI = B.createBuiltinBinaryFunctionWithOverflow( + Loc, "sadd_with_overflow", Args); + return B.createTupleExtract(Loc, AI, 0); +} + /// A canonical induction variable incremented by one from Start to End-1. struct InductionInfo { SILArgument *HeaderVal; @@ -552,12 +564,12 @@ struct InductionInfo { SILInstruction *getInstruction() { return Inc; } - SILValue getFirstValue() { - return Start; + SILValue getFirstValue(SILLocation &Loc, SILBuilder &B, unsigned AddVal) { + return AddVal != 0 ? getAdd(Loc, Start, AddVal, B) : Start; } - SILValue getLastValue(SILLocation &Loc, SILBuilder &B) { - return getSub(Loc, End, 1, B); + SILValue getLastValue(SILLocation &Loc, SILBuilder &B, unsigned SubVal) { + return SubVal != 0 ? getSub(Loc, End, SubVal, B) : End; } /// If necessary insert an overflow for this induction variable. @@ -718,8 +730,11 @@ static bool isGuaranteedToBeExecuted(DominanceInfo *DT, SILBasicBlock *Block, /// induction variable. class AccessFunction { InductionInfo *Ind; + bool preIncrement; + + AccessFunction(InductionInfo *I, bool isPreIncrement = false) + : Ind(I), preIncrement(isPreIncrement) {} - AccessFunction(InductionInfo *I) { Ind = I; } public: operator bool() { return Ind != nullptr; } @@ -727,19 +742,50 @@ class AccessFunction { static AccessFunction getLinearFunction(SILValue Idx, InductionAnalysis &IndVars) { // Match the actual induction variable buried in the integer struct. - // %2 = struct $Int(%1 : $Builtin.Word) - // = apply %check_bounds(%array, %2) : $@convention(thin) (Int, ArrayInt) -> () + // bb(%ivar) + // %2 = struct $Int(%ivar : $Builtin.Word) + // = apply %check_bounds(%array, %2) : + // or + // bb(%ivar1) + // %ivar2 = builtin "sadd_with_overflow_Int64"(%ivar1,...) + // %t = tuple_extract %ivar2 + // %s = struct $Int(%t : $Builtin.Word) + // = apply %check_bounds(%array, %s) : + + bool preIncrement = false; + auto ArrayIndexStruct = dyn_cast(Idx); if (!ArrayIndexStruct) return nullptr; auto AsArg = dyn_cast(ArrayIndexStruct->getElements()[0]); - if (!AsArg) - return nullptr; + + if (!AsArg) { + auto *TupleExtract = + dyn_cast(ArrayIndexStruct->getElements()[0]); + + if (!TupleExtract) { + return nullptr; + } + + auto *Builtin = dyn_cast(TupleExtract->getOperand()); + if (!Builtin || Builtin->getBuiltinKind() != BuiltinValueKind::SAddOver) { + return nullptr; + } + + // We don't check if the second argument to the builtin is loop invariant + // here, because only induction variables with a +1 incremenent are + // considered for bounds check optimization. + AsArg = dyn_cast(Builtin->getArguments()[0]); + if (!AsArg) { + return nullptr; + } + preIncrement = true; + } if (auto *Ind = IndVars[AsArg]) - return AccessFunction(Ind); + return AccessFunction(Ind, preIncrement); return nullptr; } @@ -759,7 +805,7 @@ class AccessFunction { SILBuilderWithScope Builder(Preheader->getTerminator(), AI); // Get the first induction value. - auto FirstVal = Ind->getFirstValue(); + auto FirstVal = Ind->getFirstValue(Loc, Builder, preIncrement ? 1 : 0); // Clone the struct for the start index. auto Start = cast(CheckToHoist.getIndex()) ->clone(Preheader->getTerminator()); @@ -771,7 +817,7 @@ class AccessFunction { NewCheck->setOperand(1, Start); // Get the last induction value. - auto LastVal = Ind->getLastValue(Loc, Builder); + auto LastVal = Ind->getLastValue(Loc, Builder, preIncrement ? 0 : 1); // Clone the struct for the end index. auto End = cast(CheckToHoist.getIndex()) ->clone(Preheader->getTerminator()); diff --git a/lib/SILOptimizer/Mandatory/DiagnoseInvalidEscapingCaptures.cpp b/lib/SILOptimizer/Mandatory/DiagnoseInvalidEscapingCaptures.cpp index 7f3b56612c70b..e5e5fb1c4135a 100644 --- a/lib/SILOptimizer/Mandatory/DiagnoseInvalidEscapingCaptures.cpp +++ b/lib/SILOptimizer/Mandatory/DiagnoseInvalidEscapingCaptures.cpp @@ -15,6 +15,8 @@ // //===----------------------------------------------------------------------===// +#define DEBUG_TYPE "sil-diagnose-invalid-escaping-captures" + #include "swift/AST/ASTContext.h" #include "swift/AST/DiagnosticsSIL.h" #include "swift/AST/Expr.h" @@ -199,6 +201,8 @@ bool isUseOfSelfInInitializer(Operand *oper) { } static bool checkForEscapingPartialApplyUses(PartialApplyInst *PAI) { + LLVM_DEBUG(llvm::dbgs() << "Checking for escaping partial apply uses.\n"); + // Avoid exponential path exploration. SmallVector uses; llvm::SmallDenseSet visited; @@ -215,10 +219,16 @@ static bool checkForEscapingPartialApplyUses(PartialApplyInst *PAI) { bool foundEscapingUse = false; while (!uses.empty()) { Operand *oper = uses.pop_back_val(); - foundEscapingUse |= checkNoEscapePartialApplyUse(oper, [&](SILValue V) { + LLVM_DEBUG(llvm::dbgs() << "Visiting user: " << *oper->getUser()); + bool localFoundEscapingUse = checkNoEscapePartialApplyUse(oper, [&](SILValue V) { for (Operand *use : V->getUses()) uselistInsert(use); }); + LLVM_DEBUG( + if (localFoundEscapingUse) + llvm::dbgs() << " Escapes!\n"; + ); + foundEscapingUse |= localFoundEscapingUse; } // If there aren't any, we're fine. @@ -350,6 +360,8 @@ static void checkPartialApply(ASTContext &Context, DeclContext *DC, if (isPartialApplyOfReabstractionThunk(PAI)) return; + LLVM_DEBUG(llvm::dbgs() << "Checking Partial Apply: " << *PAI); + ApplySite apply(PAI); // Collect any non-escaping captures. @@ -584,6 +596,8 @@ class DiagnoseInvalidEscapingCaptures : public SILFunctionTransform { if (F->wasDeserializedCanonical()) return; + LLVM_DEBUG(llvm::dbgs() << "*** Diagnosing escaping captures in function: " + << F->getName() << '\n'); checkEscapingCaptures(F); } }; diff --git a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp index 4db973d245908..dad4e5f435f8f 100644 --- a/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp +++ b/lib/SILOptimizer/Mandatory/MoveOnlyAddressCheckerUtils.cpp @@ -433,7 +433,7 @@ static bool memInstMustConsume(Operand *memOper) { return false; ApplySite applySite(pai); auto convention = applySite.getArgumentConvention(*memOper); - return convention.isInoutConvention(); + return !convention.isInoutConvention(); } case SILInstructionKind::DestroyAddrInst: return true; @@ -1802,8 +1802,9 @@ bool GatherUsesVisitor::visitUse(Operand *op) { } if (auto *pas = dyn_cast(user)) { - if (pas->isOnStack()) { - LLVM_DEBUG(llvm::dbgs() << "Found on stack partial apply!\n"); + if (pas->isOnStack() || + ApplySite(pas).getArgumentConvention(*op).isInoutConvention()) { + LLVM_DEBUG(llvm::dbgs() << "Found on stack partial apply or inout usage!\n"); // On-stack partial applications and their final consumes are always a // liveness use of their captures. auto leafRange = TypeTreeLeafTypeRange::get(op->get(), getRootAddress()); diff --git a/lib/SILOptimizer/Mandatory/MoveOnlyObjectCheckerUtils.cpp b/lib/SILOptimizer/Mandatory/MoveOnlyObjectCheckerUtils.cpp index a87aa2f740944..9cc139af3d2ae 100644 --- a/lib/SILOptimizer/Mandatory/MoveOnlyObjectCheckerUtils.cpp +++ b/lib/SILOptimizer/Mandatory/MoveOnlyObjectCheckerUtils.cpp @@ -110,6 +110,21 @@ bool swift::siloptimizer::searchForCandidateObjectMarkMustChecks( } } + // In the case we have a resilient argument, we may have the following pattern: + // + // bb0(%0 : $*Type): // in_guaranteed + // %1 = load_borrow %0 + // %2 = copy_value + // %3 = mark_must_check [no_copy_or_assign] + if (auto *lbi = dyn_cast(cvi->getOperand())) { + if (auto *arg = dyn_cast(lbi->getOperand())) { + if (arg->getKnownParameterInfo().isIndirectInGuaranteed()) { + moveIntroducersToProcess.insert(mmci); + continue; + } + } + } + if (auto *bbi = dyn_cast(cvi->getOperand())) { if (bbi->isLexical()) { moveIntroducersToProcess.insert(mmci); @@ -656,6 +671,11 @@ void MoveOnlyObjectCheckerPImpl::check(DominanceInfo *domTree, i = copyToMoveOnly; } + // Handle: + // + // bb0(%0 : @guaranteed $Type): + // %1 = copy_value %0 + // %2 = mark_must_check [no_consume_or_assign] %1 if (auto *arg = dyn_cast(i->getOperand(0))) { if (arg->getOwnershipKind() == OwnershipKind::Guaranteed) { for (auto *use : markedInst->getConsumingUses()) { @@ -669,6 +689,28 @@ void MoveOnlyObjectCheckerPImpl::check(DominanceInfo *domTree, continue; } } + + // Handle: + // + // bb0(%0 : $*Type): // in_guaranteed + // %1 = load_borrow %0 + // %2 = copy_value %1 + // %3 = mark_must_check [no_consume_or_assign] %2 + if (auto *lbi = dyn_cast(i->getOperand(0))) { + if (auto *arg = dyn_cast(lbi->getOperand())) { + if (arg->getKnownParameterInfo().isIndirectInGuaranteed()) { + for (auto *use : markedInst->getConsumingUses()) { + destroys.push_back(cast(use->getUser())); + } + while (!destroys.empty()) + destroys.pop_back_val()->eraseFromParent(); + markedInst->replaceAllUsesWith(lbi); + markedInst->eraseFromParent(); + cvi->eraseFromParent(); + continue; + } + } + } } } } diff --git a/lib/SILOptimizer/Mandatory/PerformanceDiagnostics.cpp b/lib/SILOptimizer/Mandatory/PerformanceDiagnostics.cpp index 513ee74bab3cd..b0b06d412619f 100644 --- a/lib/SILOptimizer/Mandatory/PerformanceDiagnostics.cpp +++ b/lib/SILOptimizer/Mandatory/PerformanceDiagnostics.cpp @@ -335,7 +335,7 @@ bool PerformanceDiagnostics::visitInst(SILInstruction *inst, } case SILInstructionKind::MetatypeInst: if (metatypeUsesAreNotRelevant(cast(inst))) - break; + return false; LLVM_FALLTHROUGH; default: // We didn't recognize the instruction, so try to give an error message @@ -486,13 +486,6 @@ class PerformanceDiagnosticsPass : public SILModuleTransform { if (function.wasDeserializedCanonical()) continue; - if (!module->getOptions().EnablePerformanceAnnotations) { - module->getASTContext().Diags.diagnose( - function.getLocation().getSourceLoc(), - diag::performance_annotations_not_enabled); - return; - } - diagnoser.visitFunction(&function, function.getPerfConstraints()); } } diff --git a/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp b/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp index f812959bc3962..43b532953fc3a 100644 --- a/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp +++ b/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp @@ -2853,12 +2853,12 @@ static AllocationInst *getOptimizableAllocation(SILInstruction *i) { return alloc; } -bool swift::optimizeMemoryAccesses(SILFunction &fn) { +bool swift::optimizeMemoryAccesses(SILFunction *fn) { bool changed = false; - DeadEndBlocks deadEndBlocks(&fn); + DeadEndBlocks deadEndBlocks(fn); InstructionDeleter deleter; - for (auto &bb : fn) { + for (auto &bb : *fn) { for (SILInstruction &inst : bb.deletableInstructions()) { // First see if i is an allocation that we can optimize. If not, skip it. AllocationInst *alloc = getOptimizableAllocation(&inst); @@ -2894,14 +2894,14 @@ bool swift::optimizeMemoryAccesses(SILFunction &fn) { return changed; } -bool swift::eliminateDeadAllocations(SILFunction &fn) { - if (!fn.hasOwnership()) +bool swift::eliminateDeadAllocations(SILFunction *fn) { + if (!fn->hasOwnership()) return false; bool changed = false; - DeadEndBlocks deadEndBlocks(&fn); + DeadEndBlocks deadEndBlocks(fn); - for (auto &bb : fn) { + for (auto &bb : *fn) { InstructionDeleter deleter; for (SILInstruction &inst : bb.deletableInstructions()) { // First see if i is an allocation that we can optimize. If not, skip it. @@ -2949,7 +2949,7 @@ class PredictableMemoryAccessOptimizations : public SILFunctionTransform { /// or has a pass order dependency on other early passes. void run() override { // TODO: Can we invalidate here just instructions? - if (optimizeMemoryAccesses(*getFunction())) + if (optimizeMemoryAccesses(getFunction())) invalidateAnalysis(SILAnalysis::InvalidationKind::FunctionBody); } }; @@ -2960,7 +2960,7 @@ class PredictableDeadAllocationElimination : public SILFunctionTransform { if (getFunction()->wasDeserializedCanonical() || !getFunction()->hasOwnership()) return; - if (eliminateDeadAllocations(*getFunction())) + if (eliminateDeadAllocations(getFunction())) invalidateAnalysis(SILAnalysis::InvalidationKind::FunctionBody); } }; diff --git a/lib/SILOptimizer/PassManager/PassManager.cpp b/lib/SILOptimizer/PassManager/PassManager.cpp index 17c63b7d06b57..892bd6b4d0151 100644 --- a/lib/SILOptimizer/PassManager/PassManager.cpp +++ b/lib/SILOptimizer/PassManager/PassManager.cpp @@ -30,7 +30,10 @@ #include "swift/SILOptimizer/Utils/BasicBlockOptUtils.h" #include "swift/SILOptimizer/Utils/ConstantFolding.h" #include "swift/SILOptimizer/Utils/CFGOptUtils.h" +#include "swift/SILOptimizer/Utils/Devirtualize.h" #include "swift/SILOptimizer/Utils/OptimizerStatsUtils.h" +#include "swift/SILOptimizer/Utils/SILInliner.h" +#include "swift/SILOptimizer/Utils/SILOptFunctionBuilder.h" #include "swift/SILOptimizer/Utils/StackNesting.h" #include "swift/SILOptimizer/Utils/InstOptUtils.h" #include "llvm/ADT/DenseMap.h" @@ -1427,8 +1430,25 @@ std::string BridgedPassContext::getModuleDescription() const { return str; } -bool BridgedPassContext::tryDeleteDeadClosure(BridgedInstruction closure) const { - return ::tryDeleteDeadClosure(closure.getAs(), InstModCallbacks()); +bool BridgedPassContext::tryOptimizeApplyOfPartialApply(BridgedInstruction closure) const { + auto *pa = closure.getAs(); + SILBuilder builder(pa); + return ::tryOptimizeApplyOfPartialApply(pa, builder.getBuilderContext(), InstModCallbacks()); +} + +bool BridgedPassContext::tryDeleteDeadClosure(BridgedInstruction closure, bool needKeepArgsAlive) const { + return ::tryDeleteDeadClosure(closure.getAs(), InstModCallbacks(), needKeepArgsAlive); +} + +BridgedPassContext::DevirtResult BridgedPassContext::tryDevirtualizeApply(BridgedInstruction apply, + bool isMandatory) const { + auto cha = invocation->getPassManager()->getAnalysis(); + auto result = ::tryDevirtualizeApply(ApplySite(apply.getInst()), cha, nullptr, isMandatory); + if (result.first) { + OptionalBridgedInstruction newApply(result.first.getInstruction()->asSILNode()); + return {newApply, result.second}; + } + return {{nullptr}, false}; } OptionalBridgedValue BridgedPassContext::constantFoldBuiltin(BridgedInstruction builtin) const { @@ -1437,6 +1457,20 @@ OptionalBridgedValue BridgedPassContext::constantFoldBuiltin(BridgedInstruction return {::constantFoldBuiltin(bi, resultsInError)}; } +void BridgedPassContext::inlineFunction(BridgedInstruction apply, bool mandatoryInline) const { + SILOptFunctionBuilder funcBuilder(*invocation->getTransform()); + InstructionDeleter deleter; + SILInliner::inlineFullApply(FullApplySite(apply.getInst()), + mandatoryInline ? SILInliner::InlineKind::MandatoryInline + : SILInliner::InlineKind::PerformanceInline, + funcBuilder, + deleter); +} + +bool BridgedPassContext::specializeAppliesInFunction(BridgedFunction function, bool isMandatory) const { + return ::specializeAppliesInFunction(*function.getFunction(), invocation->getTransform(), isMandatory); +} + void BridgedPassContext::createStaticInitializer(BridgedGlobalVar global, BridgedInstruction initValue) const { StaticInitCloner::appendToInitializer(global.getGlobal(), initValue.getAs()); } diff --git a/lib/SILOptimizer/PassManager/PassPipeline.cpp b/lib/SILOptimizer/PassManager/PassPipeline.cpp index 064dd8f9c8d8d..7eb8a72f8c100 100644 --- a/lib/SILOptimizer/PassManager/PassPipeline.cpp +++ b/lib/SILOptimizer/PassManager/PassPipeline.cpp @@ -218,8 +218,9 @@ static void addMandatoryDiagnosticOptPipeline(SILPassPipelinePlan &P) { } P.addOptimizeHopToExecutor(); - P.addMandatoryGenericSpecializer(); + // These diagnostic passes must run before OnoneSimplification because + // they rely on completely unoptimized SIL. P.addDiagnoseUnreachable(); P.addDiagnoseInfiniteRecursion(); P.addYieldOnceCheck(); @@ -231,11 +232,6 @@ static void addMandatoryDiagnosticOptPipeline(SILPassPipelinePlan &P) { P.addDiagnoseLifetimeIssues(); } - P.addOnoneSimplification(); - P.addInitializeStaticGlobals(); - - P.addPerformanceDiagnostics(); - // Canonical swift requires all non cond_br critical edges to be split. P.addSplitNonCondBrCriticalEdges(); @@ -243,6 +239,11 @@ static void addMandatoryDiagnosticOptPipeline(SILPassPipelinePlan &P) { // until we can audit the later part of the pipeline. Eventually, this should // occur before IRGen. P.addMoveOnlyTypeEliminator(); + + P.addMandatoryPerformanceOptimizations(); + P.addOnoneSimplification(); + P.addInitializeStaticGlobals(); + P.addPerformanceDiagnostics(); } SILPassPipelinePlan @@ -758,6 +759,10 @@ static void addLowLevelPassPipeline(SILPassPipelinePlan &P) { addFunctionPasses(P, OptimizationLevelKind::LowLevel); + // The NamedReturnValueOptimization shouldn't be done before serialization. + // For details see the comment for `namedReturnValueOptimization`. + P.addNamedReturnValueOptimization(); + P.addDeadObjectElimination(); P.addObjectOutliner(); P.addDeadStoreElimination(); diff --git a/lib/SILOptimizer/Transforms/CopyForwarding.cpp b/lib/SILOptimizer/Transforms/CopyForwarding.cpp index 58f1dfb589729..e8c173de4a73d 100644 --- a/lib/SILOptimizer/Transforms/CopyForwarding.cpp +++ b/lib/SILOptimizer/Transforms/CopyForwarding.cpp @@ -75,7 +75,6 @@ #include "llvm/Support/CommandLine.h" #include "llvm/Support/Debug.h" -STATISTIC(NumCopyNRVO, "Number of copies removed via named return value opt."); STATISTIC(NumCopyForward, "Number of copies removed via forward propagation"); STATISTIC(NumCopyBackward, "Number of copies removed via backward propagation"); @@ -1213,114 +1212,6 @@ void CopyForwarding::forwardCopiesOf(SILValue Def, SILFunction *F) { } } -//===----------------------------------------------------------------------===// -// Named Return Value Optimization -//===----------------------------------------------------------------------===// - -/// Return true if this copy can be eliminated through Named Return Value -/// Optimization (NRVO). -/// -/// Simple NRVO cases are handled naturally via backwardPropagateCopy. However, -/// general NRVO is not handled via local propagation without global data -/// flow. Nonetheless, NRVO is a simple pattern that can be detected using a -/// different technique from propagation. -/// -/// Example: -/// func nrvo(z : Bool) -> T { -/// var rvo : T -/// if (z) { -/// rvo = T(10) -/// } -/// else { -/// rvo = T(1) -/// } -/// return rvo -/// } -/// -/// Because of the control flow, backward propagation with a block will fail to -/// find the initializer for the copy at "return rvo". Instead, we directly -/// check for an NRVO pattern by observing a copy in a return block that is the -/// only use of the copy's dest, which must be an @out arg. If there are no -/// instructions between the copy and the return that may write to the copy's -/// source, we simply replace the source's local stack address with the @out -/// address. -/// -/// The following SIL pattern will be detected: -/// -/// sil @foo : $@convention(thin) (@out T) -> () { -/// bb0(%0 : $*T): -/// %2 = alloc_stack $T -/// ... // arbitrary control flow, but no other uses of %0 -/// bbN: -/// copy_addr [take] %2 to [init] %0 : $*T -/// ... // no writes -/// return -static bool canNRVO(CopyAddrInst *CopyInst) { - // Don't perform NRVO unless the copy is a [take]. This is the easiest way - // to determine that the local variable has ownership of its value and ensures - // that removing a copy is a reference count neutral operation. For example, - // this copy can't be trivially eliminated without adding a retain. - // sil @f : $@convention(thin) (@guaranteed T) -> @out T - // bb0(%in : $*T, %out : $T): - // %local = alloc_stack $T - // store %in to %local : $*T - // copy_addr %local to [init] %out : $*T - if (!CopyInst->isTakeOfSrc()) - return false; - - auto *asi = dyn_cast(CopyInst->getSrc()); - if (!asi || asi->hasDynamicLifetime()) - return false; - - // The copy's dest must be an indirect SIL argument. Otherwise, it may not - // dominate all uses of the source. Worse, it may be aliased. This - // optimization will early-initialize the copy dest, so we can't allow aliases - // to be accessed between the initialization and the return. - auto OutArg = dyn_cast(CopyInst->getDest()); - if (!OutArg) - return false; - - if (!OutArg->isIndirectResult()) - return false; - - SILBasicBlock *BB = CopyInst->getParent(); - if (!isa(BB->getTerminator())) - return false; - - SILValue CopyDest = CopyInst->getDest(); - if (!hasOneNonDebugUse(CopyDest)) - return false; - - auto SI = CopyInst->getIterator(), SE = BB->end(); - for (++SI; SI != SE; ++SI) { - if (SI->mayWriteToMemory() && !isa(SI)) - return false; - } - return true; -} - -/// Replace all uses of \p ASI by \p RHS, except the dealloc_stack. -static void replaceAllUsesExceptDealloc(AllocStackInst *ASI, ValueBase *RHS) { - llvm::SmallVector Uses; - for (Operand *Use : ASI->getUses()) { - if (!isa(Use->getUser())) - Uses.push_back(Use); - } - for (Operand *Use : Uses) { - Use->set(RHS); - } -} - -/// Remove a copy for which canNRVO returned true. -static void performNRVO(CopyAddrInst *CopyInst) { - LLVM_DEBUG(llvm::dbgs() << "NRVO eliminates copy" << *CopyInst); - ++NumCopyNRVO; - replaceAllUsesExceptDealloc(cast(CopyInst->getSrc()), - CopyInst->getDest()); - assert(CopyInst->getSrc() == CopyInst->getDest() && "bad NRVO"); - CopyInst->eraseFromParent(); -} - //===----------------------------------------------------------------------===// // CopyForwardingPass //===----------------------------------------------------------------------===// @@ -1361,16 +1252,10 @@ class CopyForwardingPass : public SILFunctionTransform // Collect a set of identified objects (@in arg or alloc_stack) that are // copied in this function. - // Collect a separate set of copies that can be removed via NRVO. llvm::SmallSetVector CopiedDefs; - llvm::SmallVector NRVOCopies; for (auto &BB : *getFunction()) for (auto II = BB.begin(), IE = BB.end(); II != IE; ++II) { if (auto *CopyInst = dyn_cast(&*II)) { - if (canNRVO(CopyInst)) { - NRVOCopies.push_back(CopyInst); - continue; - } SILValue Def = CopyInst->getSrc(); if (isIdentifiedSourceValue(Def)) CopiedDefs.insert(Def); @@ -1381,12 +1266,6 @@ class CopyForwardingPass : public SILFunctionTransform } } - // Perform NRVO - for (auto Copy : NRVOCopies) { - performNRVO(Copy); - invalidateAnalysis(SILAnalysis::InvalidationKind::CallsAndInstructions); - } - // Perform Copy Forwarding. if (CopiedDefs.empty()) return; diff --git a/lib/SILOptimizer/Transforms/GenericSpecializer.cpp b/lib/SILOptimizer/Transforms/GenericSpecializer.cpp index 18925d8a2fb66..11a4c16206851 100644 --- a/lib/SILOptimizer/Transforms/GenericSpecializer.cpp +++ b/lib/SILOptimizer/Transforms/GenericSpecializer.cpp @@ -74,8 +74,9 @@ static void transferSpecializeAttributeTargets(SILModule &M, } } } +} // end anonymous namespace -static bool specializeAppliesInFunction(SILFunction &F, +bool swift::specializeAppliesInFunction(SILFunction &F, SILTransform *transform, bool isMandatory) { SILOptFunctionBuilder FunctionBuilder(*transform); @@ -172,6 +173,8 @@ static bool specializeAppliesInFunction(SILFunction &F, return Changed; } +namespace { + /// The generic specializer, used in the optimization pipeline. class GenericSpecializer : public SILFunctionTransform { @@ -188,210 +191,8 @@ class GenericSpecializer : public SILFunctionTransform { } }; -/// The mandatory specializer, which runs in the mandatory pipeline. -/// -/// It specializes functions, called from performance-annotated functions -/// (@_noLocks, @_noAllocation). -class MandatoryGenericSpecializer : public SILModuleTransform { - - void run() override; - - bool optimize(SILFunction *func, ClassHierarchyAnalysis *cha, - bool &invalidatedStackNesting); - - bool optimizeInst(SILInstruction *inst, SILOptFunctionBuilder &funcBuilder, - InstructionDeleter &deleter, ClassHierarchyAnalysis *cha, - bool &invalidatedStackNesting); -}; - - -void MandatoryGenericSpecializer::run() { - SILModule *module = getModule(); - - if (!module->getOptions().EnablePerformanceAnnotations) - return; - - ClassHierarchyAnalysis *cha = getAnalysis(); - - llvm::SmallVector workList; - llvm::SmallPtrSet visited; - - // Look for performance-annotated functions. - for (SILFunction &function : *module) { - if (function.getPerfConstraints() != PerformanceConstraints::None) { - workList.push_back(&function); - visited.insert(&function); - } - } - - while (!workList.empty()) { - SILFunction *func = workList.pop_back_val(); - module->linkFunction(func, SILModule::LinkingMode::LinkAll); - if (!func->isDefinition()) - continue; - - // Perform generic specialization and other related optimization. - - bool invalidatedStackNesting = false; - - // To avoid phase ordering problems of the involved optimizations, iterate - // until we reach a fixed point. - // This should always happen, but to be on the safe side, limit the number - // of iterations to 10 (which is more than enough - usually the loop runs - // 1 to 3 times). - for (int i = 0; i < 10; i++) { - bool changed = optimize(func, cha, invalidatedStackNesting); - if (changed) { - invalidateAnalysis(func, SILAnalysis::InvalidationKind::FunctionBody); - } else { - break; - } - } - - if (invalidatedStackNesting) { - StackNesting::fixNesting(func); - } - - // Continue specializing called functions. - for (SILBasicBlock &block : *func) { - for (SILInstruction &inst : block) { - if (auto as = ApplySite::isa(&inst)) { - if (SILFunction *callee = as.getReferencedFunctionOrNull()) { - if (visited.insert(callee).second) - workList.push_back(callee); - } - } - } - } - } -} - -/// Specialize generic calls in \p func and do some other related optimizations: -/// devirtualization and constant-folding of the Builtin.canBeClass. -bool MandatoryGenericSpecializer::optimize(SILFunction *func, - ClassHierarchyAnalysis *cha, - bool &invalidatedStackNesting) { - bool changed = false; - SILOptFunctionBuilder funcBuilder(*this); - InstructionDeleter deleter; - ReachingReturnBlocks rrBlocks(func); - NonErrorHandlingBlocks neBlocks(func); - - // If this is a just specialized function, try to optimize copy_addr, etc. - // instructions. - if (optimizeMemoryAccesses(*func)) { - eliminateDeadAllocations(*func); - changed = true; - } - - // Visiting blocks in reverse order avoids revisiting instructions after block - // splitting, which would be quadratic. - for (SILBasicBlock &block : llvm::reverse(*func)) { - // Only consider blocks which are not on a "throw" path. - if (!rrBlocks.reachesReturn(&block) || !neBlocks.isNonErrorHandling(&block)) - continue; - - for (SILInstruction &inst : block.reverseDeletableInstructions()) { - changed |= optimizeInst(&inst, funcBuilder, deleter, cha, invalidatedStackNesting); - } - } - deleter.cleanupDeadInstructions(); - - if (specializeAppliesInFunction(*func, this, /*isMandatory*/ true)) - changed = true; - - return changed; -} - -bool MandatoryGenericSpecializer:: -optimizeInst(SILInstruction *inst, SILOptFunctionBuilder &funcBuilder, - InstructionDeleter &deleter, ClassHierarchyAnalysis *cha, - bool &invalidatedStackNesting) { - if (auto as = ApplySite::isa(inst)) { - - bool changed = false; - - // Specialization opens opportunities to devirtualize method calls. - if (ApplySite newAS = tryDevirtualizeApply(as, cha).first) { - deleter.forceDelete(as.getInstruction()); - changed = true; - as = newAS; - } - - if (auto *pai = dyn_cast(as)) { - SILBuilderContext builderCtxt(funcBuilder.getModule()); - if (tryOptimizeApplyOfPartialApply(pai, builderCtxt, deleter.getCallbacks())) { - // Try to delete the partial_apply. - // We don't need to copy all arguments again (to extend their lifetimes), - // because it was already done in tryOptimizeApplyOfPartialApply. - tryDeleteDeadClosure(pai, deleter.getCallbacks(), /*needKeepArgsAlive=*/ false); - invalidatedStackNesting = true; - return true; - } - return changed; - } - - auto fas = FullApplySite::isa(as.getInstruction()); - assert(fas); - - SILFunction *callee = fas.getReferencedFunctionOrNull(); - if (!callee) - return changed; - - if (callee->isTransparent() == IsNotTransparent && - // Force inlining of co-routines, because co-routines may allocate - // memory. - !isa(fas.getInstruction())) - return changed; - - if (callee->isExternalDeclaration()) - getModule()->loadFunction(callee, SILModule::LinkingMode::LinkAll); - - if (callee->isExternalDeclaration()) - return changed; - - // If the de-virtualized callee is a transparent function, inline it. - SILInliner::inlineFullApply(fas, SILInliner::InlineKind::MandatoryInline, - funcBuilder, deleter); - if (callee->hasOwnership() && !inst->getFunction()->hasOwnership()) - invalidatedStackNesting = true; - return true; - } - if (auto *bi = dyn_cast(inst)) { - // Constant-fold the Builtin.canBeClass. This is essential for Array code. - if (bi->getBuiltinInfo().ID != BuiltinValueKind::CanBeObjCClass) - return false; - - SILBuilderWithScope builder(bi); - IntegerLiteralInst *lit = optimizeBuiltinCanBeObjCClass(bi, builder); - if (!lit) - return false; - - bi->replaceAllUsesWith(lit); - ConstantFolder constFolder(funcBuilder, getOptions().AssertConfig, - /*EnableDiagnostics*/ false); - constFolder.addToWorklist(lit); - constFolder.processWorkList(); - deleter.forceDelete(bi); - return true; - } - if (auto *mti = dyn_cast(inst)) { - // Remove dead `metatype` instructions which only have `debug_value` uses. - // We lose debug info for such type variables, but this is a compromise we - // need to accept to get allocation/lock free code. - if (onlyHaveDebugUses(mti)) { - deleter.forceDeleteWithUsers(mti); - } - } - return false; -} - } // end anonymous namespace SILTransform *swift::createGenericSpecializer() { return new GenericSpecializer(); } - -SILTransform *swift::createMandatoryGenericSpecializer() { - return new MandatoryGenericSpecializer(); -} diff --git a/lib/SILOptimizer/Utils/CanonicalizeBorrowScope.cpp b/lib/SILOptimizer/Utils/CanonicalizeBorrowScope.cpp index fa1b13a5eb308..59251e49fd968 100644 --- a/lib/SILOptimizer/Utils/CanonicalizeBorrowScope.cpp +++ b/lib/SILOptimizer/Utils/CanonicalizeBorrowScope.cpp @@ -601,7 +601,7 @@ class RewriteOuterBorrowUses { } // If it's not already dead, update this operand bypassing any copies. SILValue innerValue = use->get(); - if (scope.getDeleter().deleteIfDead(user)) { + if (scope.getDeleter().deleteIfDead(user, /*fixLifetime=*/false)) { LLVM_DEBUG(llvm::dbgs() << " Deleted " << *user); } else { use->set(scope.findDefInBorrowScope(use->get())); @@ -692,7 +692,7 @@ SILValue RewriteOuterBorrowUses::createOuterValues(SILValue innerValue) { auto incomingOuterVal = createOuterValues(incomingInnerVal); - auto *insertPt = incomingOuterVal->getNextInstruction(); + auto *insertPt = innerValue->getDefiningInsertionPoint(); auto *clone = innerInst->clone(insertPt); scope.getCallbacks().createdNewInst(clone); Operand *use = &clone->getOperandRef(0); diff --git a/lib/SILOptimizer/Utils/Devirtualize.cpp b/lib/SILOptimizer/Utils/Devirtualize.cpp index a4d70169ce1d8..5b461a8bc2be8 100644 --- a/lib/SILOptimizer/Utils/Devirtualize.cpp +++ b/lib/SILOptimizer/Utils/Devirtualize.cpp @@ -1140,7 +1140,7 @@ static bool isNonGenericThunkOfGenericExternalFunction(SILFunction *thunk) { return false; } -static bool canDevirtualizeWitnessMethod(ApplySite applySite) { +static bool canDevirtualizeWitnessMethod(ApplySite applySite, bool isMandatory) { SILFunction *f; SILWitnessTable *wt; @@ -1183,7 +1183,7 @@ static bool canDevirtualizeWitnessMethod(ApplySite applySite) { // ``` // In the defining module, the generic conformance can be specialized (which is not // possible in the client module, because it's not inlinable). - if (isNonGenericThunkOfGenericExternalFunction(f)) { + if (!isMandatory && isNonGenericThunkOfGenericExternalFunction(f)) { return false; } @@ -1237,8 +1237,9 @@ static bool canDevirtualizeWitnessMethod(ApplySite applySite) { /// of a function_ref, returning the new apply. std::pair swift::tryDevirtualizeWitnessMethod(ApplySite applySite, - OptRemark::Emitter *ore) { - if (!canDevirtualizeWitnessMethod(applySite)) + OptRemark::Emitter *ore, + bool isMandatory) { + if (!canDevirtualizeWitnessMethod(applySite, isMandatory)) return {ApplySite(), false}; SILFunction *f; @@ -1262,7 +1263,7 @@ swift::tryDevirtualizeWitnessMethod(ApplySite applySite, /// Return the new apply and true if the CFG was also modified. std::pair swift::tryDevirtualizeApply(ApplySite applySite, ClassHierarchyAnalysis *cha, - OptRemark::Emitter *ore) { + OptRemark::Emitter *ore, bool isMandatory) { LLVM_DEBUG(llvm::dbgs() << " Trying to devirtualize: " << *applySite.getInstruction()); @@ -1272,7 +1273,7 @@ swift::tryDevirtualizeApply(ApplySite applySite, ClassHierarchyAnalysis *cha, // %9 = apply %8(%6#1) : ... // if (isa(applySite.getCallee())) - return tryDevirtualizeWitnessMethod(applySite, ore); + return tryDevirtualizeWitnessMethod(applySite, ore, isMandatory); // TODO: check if we can also de-virtualize partial applies of class methods. FullApplySite fas = FullApplySite::isa(applySite.getInstruction()); @@ -1344,7 +1345,7 @@ bool swift::canDevirtualizeApply(FullApplySite applySite, // %9 = apply %8(%6#1) : ... // if (isa(applySite.getCallee())) - return canDevirtualizeWitnessMethod(applySite); + return canDevirtualizeWitnessMethod(applySite, /*isMandatory*/ false); /// Optimize a class_method and alloc_ref pair into a direct function /// reference: diff --git a/lib/SILOptimizer/Utils/Generics.cpp b/lib/SILOptimizer/Utils/Generics.cpp index 256a33f57d11f..ae5d6d62af8ac 100644 --- a/lib/SILOptimizer/Utils/Generics.cpp +++ b/lib/SILOptimizer/Utils/Generics.cpp @@ -1025,12 +1025,16 @@ createSpecializedType(CanSILFunctionType SubstFTy, SILModule &M) const { SpecializedResults.push_back(RI); } unsigned idx = 0; + bool removedSelfParam = false; for (SILParameterInfo PI : SubstFTy->getParameters()) { unsigned paramIdx = idx++; PI = PI.getUnsubstituted(M, SubstFTy, context); - if (isDroppedMetatypeArg(param2ArgIndex(paramIdx))) + if (isDroppedMetatypeArg(param2ArgIndex(paramIdx))) { + if (SubstFTy->hasSelfParam() && paramIdx == SubstFTy->getParameters().size() - 1) + removedSelfParam = true; continue; + } bool isTrivial = TrivialArgs.test(param2ArgIndex(paramIdx)); if (!isParamConverted(paramIdx)) { @@ -1061,8 +1065,15 @@ createSpecializedType(CanSILFunctionType SubstFTy, SILModule &M) const { auto Signature = SubstFTy->isPolymorphic() ? SubstFTy->getInvocationGenericSignature() : CanGenericSignature(); + + SILFunctionType::ExtInfo extInfo = SubstFTy->getExtInfo(); + if (extInfo.hasSelfParam() && removedSelfParam) { + extInfo = extInfo.withRepresentation(SILFunctionTypeRepresentation::Thin); + assert(!extInfo.hasSelfParam()); + } + return SILFunctionType::get( - Signature, SubstFTy->getExtInfo(), + Signature, extInfo, SubstFTy->getCoroutineKind(), SubstFTy->getCalleeConvention(), SpecializedParams, SpecializedYields, SpecializedResults, SubstFTy->getOptionalErrorResult(), SubstitutionMap(), SubstitutionMap(), diff --git a/lib/SILOptimizer/Utils/InstOptUtils.cpp b/lib/SILOptimizer/Utils/InstOptUtils.cpp index 56ef8152593cb..70ae4880accf6 100644 --- a/lib/SILOptimizer/Utils/InstOptUtils.cpp +++ b/lib/SILOptimizer/Utils/InstOptUtils.cpp @@ -990,6 +990,14 @@ static bool keepArgsOfPartialApplyAlive(PartialApplyInst *pai, return false; } + // We must not introduce copies for move only types. + // TODO: in OSSA, instead of bailing, it's possible to destroy the arguments + // without the need of copies. + for (Operand *argOp : argsToHandle) { + if (argOp->get()->getType().isMoveOnly()) + return false; + } + for (Operand *argOp : argsToHandle) { SILValue arg = argOp->get(); @@ -1024,11 +1032,14 @@ bool swift::tryDeleteDeadClosure(SingleValueInstruction *closure, if (pa && pa->isOnStack()) { SmallVector deleteInsts; for (auto *use : pa->getUses()) { - if (isa(use->getUser()) - || isa(use->getUser())) - deleteInsts.push_back(use->getUser()); - else if (!deadMarkDependenceUser(use->getUser(), deleteInsts)) + SILInstruction *user = use->getUser(); + if (isa(user) + || isa(user) + || isa(user)) { + deleteInsts.push_back(user); + } else if (!deadMarkDependenceUser(user, deleteInsts)) { return false; + } } for (auto *inst : reverse(deleteInsts)) callbacks.deleteInst(inst); diff --git a/lib/SILOptimizer/Utils/InstructionDeleter.cpp b/lib/SILOptimizer/Utils/InstructionDeleter.cpp index 6a3049facde85..643b0f9ea161f 100644 --- a/lib/SILOptimizer/Utils/InstructionDeleter.cpp +++ b/lib/SILOptimizer/Utils/InstructionDeleter.cpp @@ -276,6 +276,10 @@ void InstructionDeleter::cleanupDeadInstructions() { bool InstructionDeleter::deleteIfDead(SILInstruction *inst) { bool fixLifetime = inst->getFunction()->hasOwnership(); + return deleteIfDead(inst, fixLifetime); +} + +bool InstructionDeleter::deleteIfDead(SILInstruction *inst, bool fixLifetime) { if (isInstructionTriviallyDead(inst) || isScopeAffectingInstructionDead(inst, fixLifetime)) { getCallbacks().notifyWillBeDeleted(inst); diff --git a/lib/SILOptimizer/Utils/PartialApplyCombiner.cpp b/lib/SILOptimizer/Utils/PartialApplyCombiner.cpp index 6ff33d0e3c8e1..c46baf1519dba 100644 --- a/lib/SILOptimizer/Utils/PartialApplyCombiner.cpp +++ b/lib/SILOptimizer/Utils/PartialApplyCombiner.cpp @@ -108,6 +108,14 @@ bool PartialApplyCombiner::copyArgsToTemporaries( return false; } + // We must not introduce copies for move only types. + // TODO: in OSSA, instead of bailing, it's possible to keep the arguments + // alive without the need of copies. + for (Operand *argOp : argsToHandle) { + if (argOp->get()->getType().isMoveOnly()) + return false; + } + for (Operand *argOp : argsToHandle) { SILValue arg = argOp->get(); SILValue tmp = arg; diff --git a/lib/SILOptimizer/Utils/StackNesting.cpp b/lib/SILOptimizer/Utils/StackNesting.cpp index a2cacd8725e34..8ae0feeb634bc 100644 --- a/lib/SILOptimizer/Utils/StackNesting.cpp +++ b/lib/SILOptimizer/Utils/StackNesting.cpp @@ -82,20 +82,38 @@ bool StackNesting::solve() { bool isNested = false; BitVector Bits(StackLocs.size()); + StackList deadEndWorklist(BlockInfos.getFunction()); + // Initialize all bit fields to 1s, expect 0s for the entry block. bool initVal = false; for (auto bd : BlockInfos) { bd.data.AliveStackLocsAtEntry.resize(StackLocs.size(), initVal); initVal = true; + + bd.data.isDeadEnd = !bd.block.getTerminator()->isFunctionExiting(); + if (!bd.data.isDeadEnd) + deadEndWorklist.push_back(&bd.block); + } + + // Calculate the isDeadEnd block flags. + while (!deadEndWorklist.empty()) { + SILBasicBlock *b = deadEndWorklist.pop_back_val(); + for (SILBasicBlock *pred : b->getPredecessorBlocks()) { + BlockInfo &bi = BlockInfos[pred]; + if (bi.isDeadEnd) { + bi.isDeadEnd = false; + deadEndWorklist.push_back(pred); + } + } } // First step: do a forward dataflow analysis to get the live stack locations // at the block exits. - // This is necessary to get the live locations at blocks which end in - // unreachable instructions (otherwise the backward data flow would be - // sufficient). The special thing about unreachable-blocks is that it's - // okay to have alive locations at that point, i.e. locations which are never - // dealloced. We cannot get such locations with a purly backward dataflow. + // This is necessary to get the live locations at dead-end blocks (otherwise + // the backward data flow would be sufficient). + // The special thing about dead-end blocks is that it's okay to have alive + // locations at that point (e.g. at an `unreachable`) i.e. locations which are + // never dealloced. We cannot get such locations with a purly backward dataflow. do { changed = false; @@ -124,7 +142,7 @@ bool StackNesting::solve() { do { changed = false; - for (auto bd : llvm::reverse(BlockInfos)) { + for (auto bd : llvm::reverse(BlockInfos)) { // Collect the alive-bits (at the block exit) from the successor blocks. for (SILBasicBlock *SuccBB : bd.block.getSuccessorBlocks()) { bd.data.AliveStackLocsAtExit |= BlockInfos[SuccBB].AliveStackLocsAtEntry; @@ -134,14 +152,18 @@ bool StackNesting::solve() { && Bits.any()) && "stack location is missing dealloc"); - if (isa(bd.block.getTerminator())) { - // We treat unreachable as an implicit deallocation for all locations - // which are still alive at this point. + if (bd.data.isDeadEnd) { + // We treat `unreachable` as an implicit deallocation for all locations + // which are still alive at this point. The same is true for dead-end + // CFG regions due to an infinite loop. for (int BitNr = Bits.find_first(); BitNr >= 0; BitNr = Bits.find_next(BitNr)) { // For each alive location extend the lifetime of all locations which // are alive at the allocation point. This is the same as we do for // a "real" deallocation instruction (see below). + // In dead-end CFG regions we have to do that for all blocks (because + // of potential infinite loops), whereas in "normal" CFG regions it's + // sufficient to do it at deallocation instructions. Bits |= StackLocs[BitNr].AliveLocs; } bd.data.AliveStackLocsAtExit = Bits; @@ -336,6 +358,8 @@ StackNesting::Changes StackNesting::fixNesting(SILFunction *F) { void StackNesting::dump() const { for (auto bd : BlockInfos) { llvm::dbgs() << "Block " << bd.block.getDebugID(); + if (bd.data.isDeadEnd) + llvm::dbgs() << "(deadend)"; llvm::dbgs() << ": entry-bits="; dumpBits(bd.data.AliveStackLocsAtEntry); llvm::dbgs() << ": exit-bits="; diff --git a/lib/Serialization/SerializedSILLoader.cpp b/lib/Serialization/SerializedSILLoader.cpp index 5cbb4ea3eeb0c..0b555eb5dc5a4 100644 --- a/lib/Serialization/SerializedSILLoader.cpp +++ b/lib/Serialization/SerializedSILLoader.cpp @@ -34,9 +34,6 @@ SerializedSILLoader::SerializedSILLoader( for (auto File : Entry.second->getFiles()) { if (auto LoadedAST = dyn_cast(File)) { auto Des = new SILDeserializer(&LoadedAST->File, *SILMod, callbacks); -#ifndef NDEBUG - SILMod->verify(); -#endif LoadedSILSections.emplace_back(Des); } } diff --git a/test/AutoDiff/SILOptimizer/differentiation_subset_parameters_thunk.swift b/test/AutoDiff/SILOptimizer/differentiation_subset_parameters_thunk.swift index 17324a37f3dd2..2ed9a34cf7cae 100644 --- a/test/AutoDiff/SILOptimizer/differentiation_subset_parameters_thunk.swift +++ b/test/AutoDiff/SILOptimizer/differentiation_subset_parameters_thunk.swift @@ -1,4 +1,4 @@ -// RUN: %target-swift-frontend -emit-sil %s | %FileCheck %s +// RUN: %target-swift-frontend -emit-sil -Xllvm -sil-disable-pass=OnoneSimplification %s | %FileCheck %s import _Differentiation diff --git a/test/ClangImporter/serialization-sil.swift b/test/ClangImporter/serialization-sil.swift index fadb8fe401526..df425bd85ba6f 100644 --- a/test/ClangImporter/serialization-sil.swift +++ b/test/ClangImporter/serialization-sil.swift @@ -1,5 +1,5 @@ // RUN: %empty-directory(%t) -// RUN: %target-swift-frontend -enable-copy-propagation=requested-passes-only -enable-lexical-lifetimes=false -emit-module-path %t/Test.swiftmodule -emit-sil -o /dev/null -module-name Test %s -sdk "" -import-objc-header %S/Inputs/serialization-sil.h +// RUN: %target-swift-frontend -enable-copy-propagation=requested-passes-only -enable-lexical-lifetimes=false -Xllvm -sil-disable-pass=Simplification -emit-module-path %t/Test.swiftmodule -emit-sil -o /dev/null -module-name Test %s -sdk "" -import-objc-header %S/Inputs/serialization-sil.h // RUN: %target-sil-func-extractor %t/Test.swiftmodule -sil-print-debuginfo -func='$s4Test16testPartialApplyyySoAA_pF' -o - | %FileCheck %s // REQUIRES: objc_interop diff --git a/test/DebugInfo/for-scope.swift b/test/DebugInfo/for-scope.swift index 44694aa65e6c0..3b7c3997636f6 100644 --- a/test/DebugInfo/for-scope.swift +++ b/test/DebugInfo/for-scope.swift @@ -8,14 +8,15 @@ public func f(_ xs: [String?]) { sink(x) } } + // CHECK: sil_scope [[F:[0-9]+]] { loc "{{.*}}":5:13 parent @$s1a1fyySaySSSgGF -// CHECK: sil_scope [[S0:[0-9]+]] { loc "{{.*}}":6:3 parent [[F]] } -// CHECK: sil_scope [[S1:[0-9]+]] { loc "{{.*}}":6:15 parent [[S0]] } -// CHECK: sil_scope [[S3:[0-9]+]] { loc "{{.*}}":7:9 parent [[S1]] } -// CHECK: sil_scope [[S4:[0-9]+]] { loc "{{.*}}":7:13 parent [[S3]] } +// CHECK: sil_scope [[S3:[0-9]+]] { loc "{{.*}}":6:3 parent [[F]] } +// CHECK: sil_scope [[S4:[0-9]+]] { loc "{{.*}}":6:15 parent [[S3]] } +// CHECK: sil_scope [[S5:[0-9]+]] { loc "{{.*}}":7:13 parent [[S4]] } +// CHECK: sil_scope [[S6:[0-9]+]] { loc "{{.*}}":7:9 parent [[S4]] } -// CHECK: debug_value %[[X:.*]] : $Optional, let, name "x", {{.*}}, scope [[S0]] -// CHECK: retain_value %[[X]] : $Optional, {{.*}}, scope [[S4]] -// CHECK: debug_value %[[X1:[0-9]+]] : $String, let, name "x", {{.*}}, scope [[S3]] -// CHECK: release_value %[[X1]] : $String, {{.*}}, scope [[S3]] -// CHECK: release_value %[[X]] : $Optional, {{.*}}, scope [[S3]] +// CHECK: debug_value %[[X:.*]] : $Optional, let, name "x", {{.*}}, scope [[S3]] +// CHECK: retain_value %[[X]] : $Optional, {{.*}}, scope [[S5]] +// CHECK: debug_value %[[X1:[0-9]+]] : $String, let, name "x", {{.*}}, scope [[S6]] +// CHECK: release_value %[[X1]] : $String, {{.*}}, scope [[S6]] +// CHECK: release_value %[[X]] : $Optional, {{.*}}, scope [[S6]] diff --git a/test/DebugInfo/guard-let-scope.swift b/test/DebugInfo/guard-let-scope.swift index e2336e296749b..797f08f0b97f6 100644 --- a/test/DebugInfo/guard-let-scope.swift +++ b/test/DebugInfo/guard-let-scope.swift @@ -7,13 +7,13 @@ func f(c: AnyObject??) { guard let x = x, let x = x else { // CHECK: sil_scope [[S3:[0-9]+]] { {{.*}} parent @{{.*}}1f // CHECK: sil_scope [[S4:[0-9]+]] { {{.*}} parent [[S3]] } - // CHECK: sil_scope [[S5:[0-9]+]] { {{.*}} parent [[S4]] } - // CHECK: sil_scope [[S6:[0-9]+]] { loc "{{.*}}":7:3 parent [[S4]] } + // CHECK: sil_scope [[S5:[0-9]+]] { {{.*}} parent [[S3]] } + // CHECK: sil_scope [[S6:[0-9]+]] { loc "{{.*}}":7:3 parent [[S5]] } // CHECK: sil_scope [[S7:[0-9]+]] { loc "{{.*}}":7:17 parent [[S6]] } // CHECK: sil_scope [[S8:[0-9]+]] { loc "{{.*}}":7:28 parent [[S7]] } - // CHECK: debug_value %{{.*}} : $Optional>, let, name "x"{{.*}} scope [[S4]] - // CHECK: debug_value %{{.*}} : $Optional, let, name "x", {{.*}} scope [[S6]] - // CHECK: debug_value %{{.*}} : $AnyObject, let, name "x", {{.*}} scope [[S7]] + // CHECK: debug_value %{{.*}} : $Optional>, let, name "x"{{.*}} scope [[S5]] + // CHECK: debug_value %{{.*}} : $Optional, let, name "x", {{.*}} scope [[S7]] + // CHECK: debug_value %{{.*}} : $AnyObject, let, name "x", {{.*}} scope [[S8]] fatalError() } // CHECK: function_ref {{.*3use.*}} scope [[S8]] diff --git a/test/DebugInfo/guard-let-scope2.swift b/test/DebugInfo/guard-let-scope2.swift index 30211572a8a67..ab85c650cf5c7 100644 --- a/test/DebugInfo/guard-let-scope2.swift +++ b/test/DebugInfo/guard-let-scope2.swift @@ -18,7 +18,7 @@ public func f(x: String?) throws { } // CHECK: sil_scope [[S1:[0-9]+]] { {{.*}} parent @{{.*}}1f // CHECK: sil_scope [[S2:[0-9]+]] { {{.*}} parent [[S1]] } - // CHECK: sil_scope [[S3:[0-9]+]] { {{.*}} parent [[S2]] } + // CHECK: sil_scope [[S3:[0-9]+]] { {{.*}} parent [[S1]] } // CHECK: sil_scope [[S4:[0-9]+]] { {{.*}} parent [[S2]] } // CHECK: alloc_stack {{.*}} $SomeObject, let, name "s", {{.*}} scope [[S4]] guard let s = s else { diff --git a/test/DebugInfo/guard-let-scope3.swift b/test/DebugInfo/guard-let-scope3.swift index d35cdf4dde0ff..f023a3901c069 100644 --- a/test/DebugInfo/guard-let-scope3.swift +++ b/test/DebugInfo/guard-let-scope3.swift @@ -7,12 +7,15 @@ public class S { private var c = [Int : C?]() public func f(_ i: Int) throws -> C { guard let x = c[i], let x else { - // CHECK: sil_scope [[X1:[0-9]+]] { loc "{{.*}}":[[@LINE-1]]:5 - // CHECK: sil_scope [[X2:[0-9]+]] { loc "{{.*}}":[[@LINE-2]]:29 - // CHECK: debug_value {{.*}} : $Optional, let, name "x", {{.*}}, scope [[X1]] - // CHECK: debug_value {{.*}} : $C, let, name "x", {{.*}}, scope [[X2]] - // CHECK-NEXT: scope [[X2]] + // CHECK: sil_scope [[P:[0-9]+]] { loc "{{.*}}":[[@LINE-1]]:5 + // CHECK: sil_scope [[X1:[0-9]+]] { loc "{{.*}}":[[@LINE-2]]:19 parent [[P]] + // CHECK: sil_scope [[X2:[0-9]+]] { loc "{{.*}}":[[@LINE-3]]:29 parent [[X1]] + // CHECK: sil_scope [[GUARD:[0-9]+]] { loc "{{.*}}":[[@LINE-4]]:36 parent [[P]] + // CHECK: debug_value {{.*}} : $Optional, let, name "x", {{.*}}, scope [[X1]] + // CHECK: debug_value {{.*}} : $C, let, name "x", {{.*}}, scope [[X2]] + // CHECK-NEXT: scope [[X2]] throw MyError() + // CHECK: function_ref {{.*}}MyError{{.*}}:[[@LINE-1]]:13, scope [[GUARD]] } return x } diff --git a/test/DebugInfo/if-let-scope.swift b/test/DebugInfo/if-let-scope.swift new file mode 100644 index 0000000000000..452464b1a9f18 --- /dev/null +++ b/test/DebugInfo/if-let-scope.swift @@ -0,0 +1,13 @@ +// RUN: %target-swift-frontend -g -emit-sil %s -parse-as-library -module-name a | %FileCheck %s +func use(_ t: T) {} +public func f(value: String?) { + // CHECK: sil_scope [[S0:[0-9]+]] { loc "{{.*}}":[[@LINE-1]]:13 + if let value, let value = Int(value) { + // CHECK: sil_scope [[S1:[0-9]+]] { loc "{{.*}}":[[@LINE-1]]:10 + // CHECK: sil_scope [[S2:[0-9]+]] { loc "{{.*}}":[[@LINE-2]]:29 parent [[S1]] } + // CHECK: debug_value {{.*}} : $Optional, let, name "value", {{.*}}, scope [[S0]] + // CHECK: debug_value {{.*}} : $String, let, name "value", {{.*}}, scope [[S1]] + // CHECK: debug_value {{.*}} : $Int, let, name "value", {{.*}}, scope [[S2]] + use((value)) + } +} diff --git a/test/DebugInfo/inlinescopes.swift b/test/DebugInfo/inlinescopes.swift index 2e6db0b91e137..d7401355c63d9 100644 --- a/test/DebugInfo/inlinescopes.swift +++ b/test/DebugInfo/inlinescopes.swift @@ -27,10 +27,9 @@ func inlined(_ x: Int64) -> Int64 { let result = transparent(x) // CHECK-DAG: ![[CALL]] = !DILocation(line: [[@LINE-1]], column: {{.*}}, scope: ![[INLINED1:.*]], inlinedAt: ![[INLINEDAT:.*]]) // CHECK-DAG: ![[INLINEDAT]] = !DILocation({{.*}}scope: ![[INLINEDAT1:[0-9]+]] -// CHECK-DAG: ![[INLINED1]] = distinct !DILexicalBlock(scope: ![[INLINED2:[0-9]+]] -// CHECK-DAG: ![[INLINED2]] = distinct !DILexicalBlock(scope: ![[INLINED3:[0-9]+]] +// CHECK-DAG: ![[INLINED1]] = distinct !DILexicalBlock(scope: ![[INLINED:[0-9]+]] // Check if the inlined and removed function still has the correct linkage name. -// CHECK-DAG: ![[INLINED3]] = distinct !DISubprogram(name: "inlined", linkageName: "$s4main7inlinedys5Int64VADF" +// CHECK-DAG: ![[INLINED]] = distinct !DISubprogram(name: "inlined", linkageName: "$s4main7inlinedys5Int64VADF" // TRANSPARENT-CHECK-NOT: !DISubprogram(name: "transparent" return result } diff --git a/test/DebugInfo/let-scope.swift b/test/DebugInfo/let-scope.swift new file mode 100644 index 0000000000000..a7a8cf98499e7 --- /dev/null +++ b/test/DebugInfo/let-scope.swift @@ -0,0 +1,8 @@ +// RUN: %target-swift-frontend -g -emit-sil %s -parse-as-library -module-name a | %FileCheck %s +func use(_ t: T) {} +public func f(value: (Int, Int)) { + let (x, y) = value + // CHECK: debug_value {{.*}}let, name "x", {{.*}}, scope [[LET:[0-9]+]] + // CHECK: debug_value {{.*}}let, name "y", {{.*}}, scope [[LET]] + use((x,y)) +} diff --git a/test/DebugInfo/scopes.swift b/test/DebugInfo/scopes.swift deleted file mode 100644 index 440af0f5b2292..0000000000000 --- a/test/DebugInfo/scopes.swift +++ /dev/null @@ -1,32 +0,0 @@ -// RUN: %target-swift-frontend -g -emit-ir %s | %FileCheck %s - -class UIViewController { -} - -class UISplitViewController : UIViewController { - var delegate : UIViewController? -} - -class UIWindow { - var rootViewController: UIViewController? -} - -class AppDelegate { - var window: UIWindow? - - func application() -> Bool { - // CHECK-DAG: !DILexicalBlock({{.*}}line: [[@LINE+1]], column: 13 - if true { - // Verify that all debug line table entries for the expression - // below are in the same scope. - // - // CHECK-DAG: !DILocalVariable(name: "splitViewController", scope: ![[S1:[0-9]+]] - // CHECK-DAG: ![[S2:[0-9]+]] = distinct !DILexicalBlock(scope: ![[S1]] - // CHECK-DAG: !DILocation(line: [[@LINE+3]], column: 11, scope: ![[S1]]) - // CHECK-DAG: !DILocation(line: [[@LINE+2]], column: 44, scope: ![[S2]]) - // CHECK-DAG: !DILocation(line: [[@LINE+1]], column: 65, scope: ![[S2]]) - let splitViewController = self.window!.rootViewController as! UISplitViewController - } - return true - } -} diff --git a/test/DebugInfo/shadowed-arg.swift b/test/DebugInfo/shadowed-arg.swift index bba3768d67579..68ce0b831151d 100644 --- a/test/DebugInfo/shadowed-arg.swift +++ b/test/DebugInfo/shadowed-arg.swift @@ -17,5 +17,6 @@ public func f(i: Int) { // CHECK: ![[S3]] = distinct !DILexicalBlock(scope: ![[S1]], // SIL: sil_scope [[S1:[0-9]+]] { {{.*}} parent @$s4main1f1iySi_tF // SIL: sil_scope [[S2:[0-9]+]] { {{.*}} parent [[S1]] } +// SIL: sil_scope [[S3:[0-9]+]] { {{.*}} parent [[S1]] } // SIL: debug_value %0 : $Int, let, name "i", argno 1,{{.*}}, scope [[S1]] -// SIL: debug_value {{.*}} : $Array, let, name "i", {{.*}}, scope [[S2]] +// SIL: debug_value {{.*}} : $Array, let, name "i", {{.*}}, scope [[S3]] diff --git a/test/IDE/complete_call_pattern_heuristics.swift b/test/IDE/complete_call_pattern_heuristics.swift index c2385e43f322e..d1beda0bbdd8f 100644 --- a/test/IDE/complete_call_pattern_heuristics.swift +++ b/test/IDE/complete_call_pattern_heuristics.swift @@ -34,3 +34,7 @@ func testArg2Name3() { // LABELED_FIRSTARG-DAG: Pattern/Local/Flair[ArgLabels]: {#arg1: Int#}[#Int#]; // LABELED_FIRSTARG-NOT: ['(']{#arg1: Int#}, {#arg2: Int#}[')'][#Void#]; +func subscriptAccess(info: [String: Int]) { + info[#^SUBSCRIPT_ACCESS^#] +// SUBSCRIPT_ACCESS: Pattern/Local/Flair[ArgLabels]: {#keyPath: KeyPath<[String : Int], Value>#}[#KeyPath<[String : Int], Value>#]; name=keyPath: +} diff --git a/test/IRGen/abitypes.swift b/test/IRGen/abitypes.swift index 87c7796145724..d09c7887ec305 100644 --- a/test/IRGen/abitypes.swift +++ b/test/IRGen/abitypes.swift @@ -18,6 +18,7 @@ import Foundation // arm64-tvos: [[ARM64_MYRECT:%.*]] = type { float, float, float, float } // armv7k-watchos: [[ARMV7K_MYRECT:%.*]] = type { float, float, float, float } // arm64_32-watchos: [[ARM64_MYRECT:%.*]] = type { float, float, float, float } +// arm64-watchos: [[ARM64_MYRECT:%.*]] = type { float, float, float, float } // arm64-macosx: [[ARM64_MYRECT:%.*]] = type { float, float, float, float } class Foo { @@ -43,6 +44,8 @@ class Foo { // armv7k-watchos: define internal [[ARMV7K_MYRECT]] @"$s8abitypes3FooC3bar{{[_0-9a-zA-Z]*}}FTo"(i8* %0, i8* %1) {{[#0-9]*}} { // armv64_32-watchos: define hidden swiftcc { float, float, float, float } @"$s8abitypes3FooC3bar{{[_0-9a-zA-Z]*}}F"(%T8abitypes3FooC* swiftself %0) {{.*}} { // armv64_32-watchos: define internal [[ARMV7K_MYRECT]] @"$s8abitypes3FooC3bar{{[_0-9a-zA-Z]*}}FTo"(i8* %0, i8* %1) {{[#0-9]*}} { + // arm64-watchos: define hidden swiftcc { float, float, float, float } @"$s8abitypes3FooC3bar{{[_0-9a-zA-Z]*}}F"(%T8abitypes3FooC* swiftself %0) {{.*}} { + // arm64-watchos: define internal [[ARM64_MYRECT]] @"$s8abitypes3FooC3bar{{[_0-9a-zA-Z]*}}FTo"(i8* %0, i8* %1) {{[#0-9]*}} { // x86_64-watchos: define hidden swiftcc { float, float, float, float } @"$s8abitypes3FooC3bar{{[_0-9a-zA-Z]*}}F"(%T8abitypes3FooC* swiftself %0) {{.*}} { // x86_64-watchos: define internal { <2 x float>, <2 x float> } @"$s8abitypes3FooC3bar{{[_0-9a-zA-Z]*}}FTo"(i8* %0, i8* %1) {{[#0-9]*}} { @objc dynamic func bar() -> MyRect { @@ -323,6 +326,11 @@ class Foo { // i386-watchos: [[R2:%[0-9]+]] = call swiftcc i1 @"$s8abitypes3FooC6negate{{[_0-9a-zA-Z]*}}F"(i1 [[R1]] // i386-watchos: [[R3:%[0-9]+]] = call swiftcc i1 @"$s10ObjectiveC22_convertBoolToObjCBoolyAA0eF0VSbF"(i1 [[R2]]) // i386-watchos: ret i1 [[R3]] + // + // arm64-watchos-fixme: define hidden i1 @"$s8abitypes3FooC6negate{{[_0-9a-zA-Z]*}}F"(i1, %T8abitypes3FooC*) {{.*}} { + // arm64-watchos-fixme: define internal zeroext i1 @"$s8abitypes3FooC6negate{{[_0-9a-zA-Z]*}}FTo" + // arm64-watchos-fixme: [[R2:%[0-9]+]] = call i1 @"$s8abitypes3FooC6negate{{[_0-9a-zA-Z]*}}F" + // arm64-watchos-fixme: ret i1 [[R2]] @objc dynamic func negate(_ b: Bool) -> Bool { return !b @@ -445,7 +453,16 @@ class Foo { // armv7k-watchos: [[TOOBJCBOOL:%[0-9]+]] = call swiftcc i1 @"$s10ObjectiveC22_convertBoolToObjCBool{{[_0-9a-zA-Z]*}}F"(i1 [[NEG]]) // armv7k-watchos: ret i1 [[TOOBJCBOOL]] // - + // arm64-watchos: define hidden swiftcc i1 @"$s8abitypes3FooC7negate2{{[_0-9a-zA-Z]*}}F"(i1 %0, %T8abitypes3FooC* swiftself %1) {{.*}} { + // arm64-watchos: [[SEL:%[0-9]+]] = load i8*, i8** @"\01L_selector(negate:)", align 8 + // arm64-watchos: [[NEG:%[0-9]+]] = call zeroext i1 bitcast (void ()* @objc_msgSend to i1 ([[RECEIVER:.*]]*, i8*, i1)*)([[RECEIVER]]* {{%[0-9]+}}, i8* [[SEL]], i1 zeroext %0) + // arm64-watchos: ret i1 [[NEG]] + // + // arm64-watchos: define internal zeroext i1 @"$s8abitypes3FooC7negate2{{[_0-9a-zA-Z]*}}FTo"(i8* %0, i8* %1, i1 zeroext %2) + // arm64-watchos: [[NEG:%[0-9]+]] = call swiftcc i1 @"$s8abitypes3FooC7negate2{{[_0-9a-zA-Z]*}}F"(i1 + // arm64-watchos: [[TOOBJCBOOL:%[0-9]+]] = call swiftcc i1 @"$s10ObjectiveC22_convertBoolToObjCBool{{[_0-9a-zA-Z]*}}F"(i1 [[NEG]]) + // arm64-watchos: ret i1 [[TOOBJCBOOL]] + // // arm64-macosx: define internal zeroext i1 @"$s8abitypes3FooC7negate2{{[_0-9a-zA-Z]*}}FTo"(i8* %0, i8* %1, i1 zeroext %2) // arm64-macosx: [[NEG:%[0-9]+]] = call swiftcc i1 @"$s8abitypes3FooC7negate2{{[_0-9a-zA-Z]*}}F"(i1 // arm64-macosx: [[TOOBJCBOOL:%[0-9]+]] = call swiftcc i1 @"$s10ObjectiveC22_convertBoolToObjCBool{{[_0-9a-zA-Z]*}}F"(i1 [[NEG]]) @@ -556,6 +573,9 @@ class Foo { // arm64-tvos: define internal void @"$s8abitypes3FooC14callJustReturn{{[_0-9a-zA-Z]*}}FTo"(%TSo9BigStructV* noalias nocapture sret({{.*}}) %0, i8* %1, i8* %2, [[OPAQUE:.*]]* %3, %TSo9BigStructV* %4) {{[#0-9]*}} { // arm64-macosx: define hidden swiftcc { i64, i64, i64, i64 } @"$s8abitypes3FooC14callJustReturn{{[_0-9a-zA-Z]*}}F"(%TSo13StructReturnsC* %0, i64 %1, i64 %2, i64 %3, i64 %4, %T8abitypes3FooC* swiftself %5) {{.*}} { // arm64-macosx: define internal void @"$s8abitypes3FooC14callJustReturn{{[_0-9a-zA-Z]*}}FTo"(%TSo9BigStructV* noalias nocapture sret({{.*}}) %0, i8* %1, i8* %2, [[OPAQUE:.*]]* %3, %TSo9BigStructV* %4) {{.*}} { + // + // arm64-watchos: define hidden swiftcc { i64, i64, i64, i64 } @"$s8abitypes3FooC14callJustReturn{{[_0-9a-zA-Z]*}}F"(%TSo13StructReturnsC* %0, i64 %1, i64 %2, i64 %3, i64 %4, %T8abitypes3FooC* swiftself %5) {{.*}} { + // arm64-watchos: define internal void @"$s8abitypes3FooC14callJustReturn{{[_0-9a-zA-Z]*}}FTo"(%TSo9BigStructV* noalias nocapture sret({{.*}}) %0, i8* %1, i8* %2, [[OPAQUE:.*]]* %3, %TSo9BigStructV* %4) {{[#0-9]*}} { @objc dynamic func callJustReturn(_ r: StructReturns, with v: BigStruct) -> BigStruct { return r.justReturn(v) } @@ -621,6 +641,17 @@ public func testInlineAgg(_ rect: MyRect) -> Float { // arm64-macosx: store i8 0, i8* [[BYTE_ADDR]], align 8 // arm64-macosx: [[ARG:%.*]] = load i64, i64* [[COERCED]] // arm64-macosx: call void bitcast (void ()* @objc_msgSend to void (i8*, i8*, i64)*)(i8* {{.*}}, i8* {{.*}}, i64 [[ARG]]) +// +// arm64-watchos: define swiftcc void @"$s8abitypes14testBOOLStructyyF"() +// arm64-watchos: [[COERCED:%.*]] = alloca i64 +// arm64-watchos: [[STRUCTPTR:%.*]] = bitcast i64* [[COERCED]] to %TSo14FiveByteStructV +// arm64-watchos: [[PTR0:%.*]] = getelementptr inbounds %TSo14FiveByteStructV, %TSo14FiveByteStructV* [[STRUCTPTR]], {{i.*}} 0, {{i.*}} 0 +// arm64-watchos: [[PTR1:%.*]] = getelementptr inbounds %T10ObjectiveC8ObjCBoolV, %T10ObjectiveC8ObjCBoolV* [[PTR0]], {{i.*}} 0, {{i.*}} 0 +// arm64-watchos: [[PTR2:%.*]] = getelementptr inbounds %TSb, %TSb* [[PTR1]], {{i.*}} 0, {{i.*}} 0 +// arm64-watchos: [[BYTE_ADDR:%.*]] = bitcast i1* [[PTR2]] to i8* +// arm64-watchos: store i8 0, i8* [[BYTE_ADDR]], align 8 +// arm64-watchos: [[ARG:%.*]] = load i64, i64* [[COERCED]] +// arm64-watchos: call void bitcast (void ()* @objc_msgSend to void (i8*, i8*, i64)*)(i8* {{.*}}, i8* {{.*}}, i64 [[ARG]]) public func testBOOLStruct() { let s = FiveByteStruct() MyClass.mymethod(s) diff --git a/test/IRGen/async/partial_apply.sil b/test/IRGen/async/partial_apply.sil index 06597380952fc..2d625f5129515 100644 --- a/test/IRGen/async/partial_apply.sil +++ b/test/IRGen/async/partial_apply.sil @@ -531,7 +531,7 @@ entry(%i : $*ResilientInt, %c : $SwiftClass): // Make sure that we use the heap header size (16) for the initial offset. // CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swift{{(tail)?}}cc void @test_initial_offset( -sil @test_initial_offset : $@async @convention(thin) (@in_guaranteed ResilientInt, @guaranteed SwiftClass) -> () { +sil @test_initial_offset : $@async @convention(thin) (@in ResilientInt, @guaranteed SwiftClass) -> () { bb0(%x : $*ResilientInt, %y : $SwiftClass): %f = function_ref @closure : $@async @convention(thin) (@in_guaranteed ResilientInt, @guaranteed SwiftClass) -> () %p = partial_apply [callee_guaranteed] %f(%x, %y) : $@async @convention(thin) (@in_guaranteed ResilientInt, @guaranteed SwiftClass) -> () diff --git a/test/IRGen/error_self_conformance.sil b/test/IRGen/error_self_conformance.sil index 0ed107914fa6e..f98082de507ec 100644 --- a/test/IRGen/error_self_conformance.sil +++ b/test/IRGen/error_self_conformance.sil @@ -21,6 +21,7 @@ sil @partial_apply_test : $@convention(thin) (@in Error) -> () { entry(%0 : $*Error): %take = function_ref @take_any_error : $@convention(thin) (@in T) -> () %fn = partial_apply %take(%0) : $@convention(thin) (@in T) -> () + release_value %fn : $@callee_owned () ->() %ret = tuple () return %ret : $() } diff --git a/test/IRGen/partial_apply.sil b/test/IRGen/partial_apply.sil index e9248aa533939..af740da64603c 100644 --- a/test/IRGen/partial_apply.sil +++ b/test/IRGen/partial_apply.sil @@ -1,8 +1,8 @@ // RUN: %empty-directory(%t) // RUN: %target-swift-frontend -emit-module -enable-library-evolution -emit-module-path=%t/resilient_struct.swiftmodule -module-name=resilient_struct %S/../Inputs/resilient_struct.swift -// RUN: %target-swift-frontend -I %t -emit-ir %s | %FileCheck %s --check-prefix=CHECK --check-prefix=CHECK-%target-ptrsize +// RUN: %target-swift-frontend -Xllvm -sil-disable-pass=OnoneSimplification -I %t -emit-ir %s | %FileCheck %s --check-prefix=CHECK --check-prefix=CHECK-%target-ptrsize -// REQUIRES: CPU=x86_64 +// REQUIRES: PTRSIZE=64 import Builtin import Swift @@ -743,7 +743,7 @@ sil public_external @closure : $@convention(thin) (@in_guaranteed ResilientInt, // CHECK: = xor i64 [[ALIGNMASK]], -1 // CHECK: = add i64 16, [[ALIGNMASK]] -sil @test_initial_offset : $@convention(thin) (@in_guaranteed ResilientInt, @guaranteed SwiftClass) -> () { +sil @test_initial_offset : $@convention(thin) (@in ResilientInt, @guaranteed SwiftClass) -> () { bb0(%x : $*ResilientInt, %y : $SwiftClass): %f = function_ref @closure : $@convention(thin) (@in_guaranteed ResilientInt, @guaranteed SwiftClass) -> () %p = partial_apply [callee_guaranteed] %f(%x, %y) : $@convention(thin) (@in_guaranteed ResilientInt, @guaranteed SwiftClass) -> () diff --git a/test/IRGen/partial_apply_run_generic_method1.sil b/test/IRGen/partial_apply_run_generic_method1.sil index 944ac5a22bfe5..cc2b6935025ab 100644 --- a/test/IRGen/partial_apply_run_generic_method1.sil +++ b/test/IRGen/partial_apply_run_generic_method1.sil @@ -1,8 +1,8 @@ // RUN: %empty-directory(%t) // RUN: %target-build-swift-dylib(%t/%target-library-name(PrintShims)) %S/../Inputs/print-shims.swift -module-name PrintShims -emit-module -emit-module-path %t/PrintShims.swiftmodule // RUN: %target-codesign %t/%target-library-name(PrintShims) -// RUN: %target-build-swift -g -parse-sil %s -emit-ir -I %t -L %t -lPrintShim | %FileCheck %s --check-prefix=CHECK-LL -// RUN: %target-build-swift -g -parse-sil %s -module-name main -o %t/main -I %t -L %t -lPrintShims %target-rpath(%t) +// RUN: %target-build-swift -g -parse-sil %s -Xllvm -sil-disable-pass=Simplification -emit-ir -I %t -L %t -lPrintShim | %FileCheck %s --check-prefix=CHECK-LL +// RUN: %target-build-swift -g -parse-sil %s -Xllvm -sil-disable-pass=Simplification -module-name main -o %t/main -I %t -L %t -lPrintShims %target-rpath(%t) // RUN: %target-codesign %t/main // RUN: %target-run %t/main %t/%target-library-name(PrintShims) | %FileCheck %s diff --git a/test/IRGen/protocol_resilience.sil b/test/IRGen/protocol_resilience.sil index 5f89d1389bc7e..65d5699c0af87 100644 --- a/test/IRGen/protocol_resilience.sil +++ b/test/IRGen/protocol_resilience.sil @@ -1,7 +1,7 @@ // RUN: %empty-directory(%t) // RUN: %target-swift-frontend -emit-module -enable-library-evolution -emit-module-path=%t/resilient_protocol.swiftmodule -module-name=resilient_protocol %S/../Inputs/resilient_protocol.swift -// RUN: %target-swift-frontend -I %t -emit-ir -enable-library-evolution %s | %FileCheck %s -DINT=i%target-ptrsize -// RUN: %target-swift-frontend -I %t -emit-ir -enable-library-evolution -O %s +// RUN: %target-swift-frontend -I %t -emit-ir -Xllvm -sil-disable-pass=Simplification -enable-library-evolution %s | %FileCheck %s -DINT=i%target-ptrsize +// RUN: %target-swift-frontend -I %t -emit-ir -Xllvm -sil-disable-pass=Simplification -enable-library-evolution -O %s sil_stage canonical diff --git a/test/IRGen/simple_partial_apply_or_not.swift b/test/IRGen/simple_partial_apply_or_not.swift index e888439739694..c2241c3cd630e 100644 --- a/test/IRGen/simple_partial_apply_or_not.swift +++ b/test/IRGen/simple_partial_apply_or_not.swift @@ -1,5 +1,5 @@ -// RUN: %target-swift-emit-ir -module-name test %s | %FileCheck %s -// RUN: %target-run-simple-swift %s | %FileCheck %s --check-prefix=CHECK-EXEC +// RUN: %target-swift-emit-ir -Xllvm -sil-disable-pass=Simplification -module-name test %s | %FileCheck %s +// RUN: %target-run-simple-swift -Xllvm -sil-disable-pass=Simplification %s | %FileCheck %s --check-prefix=CHECK-EXEC // REQUIRES: executable_test diff --git a/test/Macros/attached_macros_diags.swift b/test/Macros/attached_macros_diags.swift index 58e0cf79febd7..bc592ed27bd86 100644 --- a/test/Macros/attached_macros_diags.swift +++ b/test/Macros/attached_macros_diags.swift @@ -38,7 +38,7 @@ struct SkipNestedType { // We select the macro, not the property wrapper. @m1 var x: Int = 0 - // expected-error@-1{{external macro implementation type 'MyMacros.Macro1' could not be found for macro 'm1()'; the type must be public and provided via '-load-plugin-library'}} + // expected-error@-1{{external macro implementation type 'MyMacros.Macro1' could not be found for macro 'm1()'}} } struct TestMacroArgs { diff --git a/test/Macros/external-macro-without-decl.swift b/test/Macros/external-macro-without-decl.swift index 80abec6e6cec3..532cf21ab07d0 100644 --- a/test/Macros/external-macro-without-decl.swift +++ b/test/Macros/external-macro-without-decl.swift @@ -3,7 +3,7 @@ // RUN: %target-typecheck-verify-swift -swift-version 5 -module-name Swift -parse-stdlib // expected-warning@+2{{@expression has been removed in favor of @freestanding(expression)}} -// expected-warning@+1{{external macro implementation type 'A.B' could not be found for macro 'myMacro()'; the type must be public and provided via '-load-plugin-library'}} +// expected-warning@+1{{external macro implementation type 'A.B' could not be found for macro 'myMacro()'}} @expression macro myMacro() = #externalMacro(module: "A", type: "B") // Protocols needed for string literals to work diff --git a/test/Macros/macro_expand_closure.swift b/test/Macros/macro_expand_closure.swift index 356ae29c62936..64a6aa5a48e7d 100644 --- a/test/Macros/macro_expand_closure.swift +++ b/test/Macros/macro_expand_closure.swift @@ -20,8 +20,7 @@ func multiStatementInference() -> Int { // The closure intruduced by the macro expansion should not contain any inline // locations, but instead point directly into the macro buffer. // CHECK-SIL: sil_scope [[S0:[0-9]+]] { loc "@__swiftmacro_9MacroUser23multiStatementInferenceSiyF0cD0fMf_.swift":1:1 parent @$s9MacroUser23multiStatementInferenceSiyFSiyXEfU_ -// CHECK-SIL: sil_scope [[S1:[0-9]+]] { loc "@__swiftmacro_9MacroUser23multiStatementInferenceSiyF0cD0fMf_.swift":2:7 parent [[S0]] } -// CHECK-SIL: sil_scope [[S2:[0-9]+]] { loc "@__swiftmacro_9MacroUser23multiStatementInferenceSiyF0cD0fMf_.swift":2:14 parent [[S1]] } +// CHECK-SIL: sil_scope [[S2:[0-9]+]] { loc "@__swiftmacro_9MacroUser23multiStatementInferenceSiyF0cD0fMf_.swift":2:14 parent [[S0]] } // CHECK-SIL: sil {{.*}} @$s9MacroUser23multiStatementInferenceSiyFSiyXEfU_ // CHECK-SIL-NOT: return diff --git a/test/Macros/macros_diagnostics.swift b/test/Macros/macros_diagnostics.swift index cbae960c02a8e..38ecae461f874 100644 --- a/test/Macros/macros_diagnostics.swift +++ b/test/Macros/macros_diagnostics.swift @@ -137,7 +137,7 @@ func shadow(a: Int, b: Int, stringify: Int) { } func testMissing() { - #missingMacro1("hello") // expected-error{{external macro implementation type 'MissingModule.MissingType' could not be found for macro 'missingMacro1'; the type must be public and provided via '-load-plugin-library'}} + #missingMacro1("hello") // expected-error{{external macro implementation type 'MissingModule.MissingType' could not be found for macro 'missingMacro1'; the type must be public and provided by a macro target in a Swift package, or via '-plugin-path' or '-load-plugin-library'}} } @freestanding(expression) macro undefined() // expected-error{{macro 'undefined()' requires a definition}} diff --git a/test/Macros/parsing.swift b/test/Macros/parsing.swift index 624d8f7b166f4..e98f6f1f3a7df 100644 --- a/test/Macros/parsing.swift +++ b/test/Macros/parsing.swift @@ -6,16 +6,16 @@ protocol P { } protocol Q { associatedtype Assoc } @freestanding(expression) macro m1() -> Int = #externalMacro(module: "A", type: "M1") -// expected-warning@-1{{external macro implementation type 'A.M1' could not be found for macro 'm1()'; the type must be public and provided via '-load-plugin-library'}} +// expected-warning@-1{{external macro implementation type 'A.M1' could not be found for macro 'm1()'}} // expected-note@-2{{'m1()' declared here}} @freestanding(expression) macro m2(_: Int) = #externalMacro(module: "A", type: "M2") -// expected-warning@-1{{external macro implementation type 'A.M2' could not be found for macro 'm2'; the type must be public and provided via '-load-plugin-library'}} +// expected-warning@-1{{external macro implementation type 'A.M2' could not be found for macro 'm2'}} @freestanding(expression) macro m3(a b: Int) -> Int = #externalMacro(module: "A", type: "M3") -// expected-warning@-1{{external macro implementation type 'A.M3' could not be found for macro 'm3(a:)'; the type must be public and provided via '-load-plugin-library'}} +// expected-warning@-1{{external macro implementation type 'A.M3' could not be found for macro 'm3(a:)'}} @freestanding(expression) macro m4() -> T = #externalMacro(module: "A", type: "M4") where T.Assoc: P -// expected-warning@-1{{external macro implementation type 'A.M4' could not be found for macro 'm4()'; the type must be public and provided via '-load-plugin-library'}} +// expected-warning@-1{{external macro implementation type 'A.M4' could not be found for macro 'm4()'}} @freestanding(expression) macro m5(_: T) = #externalMacro(module: "A", type: "M4") -// expected-warning@-1{{external macro implementation type 'A.M4' could not be found for macro 'm5'; the type must be public and provided via '-load-plugin-library'}} +// expected-warning@-1{{external macro implementation type 'A.M4' could not be found for macro 'm5'}} @freestanding(expression) macro m6 = A // expected-error{{expected '(' for macro parameters or ':' for a value-like macro}} // expected-error@-1{{by a macro expansion}} @@ -23,18 +23,18 @@ protocol Q { associatedtype Assoc } // expected-error @+2 {{expected '('}} // expected-error @+1 {{macro 'm7' must declare its applicable roles}} @freestanding macro m7(_: String) = #externalMacro(module: "A", type: "M4") -// expected-warning@-1{{external macro implementation type 'A.M4' could not be found for macro 'm7'; the type must be public and provided via '-load-plugin-library'}} +// expected-warning@-1{{external macro implementation type 'A.M4' could not be found for macro 'm7'}} // expected-error @+2 {{expected a freestanding macro role such as 'expression'}} // expected-error @+1 {{macro 'm8' must declare its applicable roles}} @freestanding(abc) macro m8(_: String) = #externalMacro(module: "A", type: "M4") -// expected-warning@-1{{external macro implementation type 'A.M4' could not be found for macro 'm8'; the type must be public and provided via '-load-plugin-library'}} +// expected-warning@-1{{external macro implementation type 'A.M4' could not be found for macro 'm8'}} @freestanding(declaration, names: arbitrary) macro m9(_: String) = #externalMacro(module: "A", type: "M4") -// expected-warning@-1{{external macro implementation type 'A.M4' could not be found for macro 'm9'; the type must be public and provided via '-load-plugin-library'}} +// expected-warning@-1{{external macro implementation type 'A.M4' could not be found for macro 'm9'}} @freestanding(expression) @freestanding(declaration, names: named(Foo)) @attached(accessor) macro m10(_: String) = #externalMacro(module: "A", type: "M4") -// expected-warning@-1{{external macro implementation type 'A.M4' could not be found for macro 'm10'; the type must be public and provided via '-load-plugin-library'}} +// expected-warning@-1{{external macro implementation type 'A.M4' could not be found for macro 'm10'}} @attached( accessor, @@ -55,4 +55,4 @@ macro am2() -> Void #m1 + 1 // expected-warning @-1 {{result of operator '+' is unused}} -// expected-error @-2 {{external macro implementation type 'A.M1' could not be found for macro 'm1()'; the type must be public and provided via '-load-plugin-library'}} +// expected-error @-2 {{external macro implementation type 'A.M1' could not be found for macro 'm1()'}} diff --git a/test/ModuleInterface/feature-LexicalLifetimes.swift b/test/ModuleInterface/feature-LexicalLifetimes.swift new file mode 100644 index 0000000000000..261815a9388f6 --- /dev/null +++ b/test/ModuleInterface/feature-LexicalLifetimes.swift @@ -0,0 +1,35 @@ +// RUN: %empty-directory(%t) + +// RUN: %target-swift-emit-module-interface(%t/FeatureTest.swiftinterface) %s -module-name FeatureTest -disable-availability-checking +// RUN: %target-swift-typecheck-module-from-interface(%t/FeatureTest.swiftinterface) -module-name FeatureTest -disable-availability-checking +// RUN: %FileCheck %s < %t/FeatureTest.swiftinterface + +// CHECK: #if compiler(>=5.3) && $LexicalLifetimes +// CHECK-NEXT: @_noEagerMove public struct Permanent { +// CHECK-NEXT: } +// CHECK-NEXT: #else +// CHECK-NEXT: public struct Permanent { +// CHECK-NEXT: } +// CHECK-NEXT: #endif +@_noEagerMove +public struct Permanent {} + +// CHECK: #if compiler(>=5.3) && $LexicalLifetimes +// CHECK-NEXT: @_hasMissingDesignatedInitializers @_eagerMove public class Transient { +// CHECK-NEXT: deinit +// CHECK-NEXT: } +// CHECK-NEXT: #else +// CHECK-NEXT: @_hasMissingDesignatedInitializers public class Transient { +// CHECK-NEXT: deinit +// CHECK-NEXT: } +// CHECK-NEXT: #endif +@_eagerMove +public class Transient {} + +// CHECK: #if compiler(>=5.3) && $LexicalLifetimes +// CHECK-NEXT: @_lexicalLifetimes public func lexicalInAModuleWithoutLexicalLifetimes(_ t: FeatureTest.Transient) +// CHECK-NEXT: #else +// CHECK-NEXT: public func lexicalInAModuleWithoutLexicalLifetimes(_ t: FeatureTest.Transient) +// CHECK-NEXT: #endif +@_lexicalLifetimes +public func lexicalInAModuleWithoutLexicalLifetimes(_ t: Transient) {} diff --git a/test/Parse/without_copyable.swift b/test/Parse/without_copyable.swift index 82791ed30b2c7..9afc658266bb4 100644 --- a/test/Parse/without_copyable.swift +++ b/test/Parse/without_copyable.swift @@ -41,7 +41,19 @@ func takeNoncopyableGeneric(_ t: T) {} // expected-error {{expecte ~Copyable // expected-note {{attribute already specified here}} {} -// basic test to ensure it's viewed as a noncopyable struct: -struct HasADeinit: ~Copyable { - deinit {} +// basic tests to ensure it's viewed as a noncopyable struct, by using +// capabilities only available to them +struct HasADeinit: ~Copyable { deinit {} } + +public struct MoveOnlyS1 : ~Copyable { deinit {} } +public struct MoveOnlyS2 : ~Copyable { deinit {} } + +public enum MoveOnlyE1 : ~Copyable { + case holding(s: MoveOnlyS1) + consuming func x() {} +} + +public enum MoveOnlyE2 : ~Copyable { + case holding(s: MoveOnlyS1) + consuming func x() {} } diff --git a/test/SILGen/moveonly_escaping_closure.swift b/test/SILGen/moveonly_escaping_closure.swift index 8c1877b646f02..ceae22785ad71 100644 --- a/test/SILGen/moveonly_escaping_closure.swift +++ b/test/SILGen/moveonly_escaping_closure.swift @@ -607,10 +607,8 @@ func testConsumingEscapeClosureCaptureLet(_ f: consuming @escaping () -> ()) { // CHECK: } // end sil function '$s16moveonly_closure29testGlobalClosureCaptureInOutyyAA9SingleEltVzFyycfU_' var globalClosureCaptureInOut: () -> () = {} func testGlobalClosureCaptureInOut(_ x: inout SingleElt) { - // expected-error @-1 {{'x' consumed but not reinitialized before end of function}} - // expected-note @-2 {{'x' is declared 'inout'}} + // expected-note @-1 {{'x' is declared 'inout'}} globalClosureCaptureInOut = { // expected-error {{escaping closure captures 'inout' parameter 'x'}} - // expected-note @-1 {{consuming use here}} borrowVal(x) // expected-note {{captured here}} consumeVal(x) // expected-note {{captured here}} consumeVal(x) // expected-note {{captured here}} @@ -652,9 +650,7 @@ func testGlobalClosureCaptureInOut(_ x: inout SingleElt) { // CHECK: } // end sil function '$s16moveonly_closure31testLocalLetClosureCaptureInOutyyAA9SingleEltVzFyycfU_' func testLocalLetClosureCaptureInOut(_ x: inout SingleElt) { // expected-note @-1 {{'x' is declared 'inout'}} - // expected-error @-2 {{'x' consumed but not reinitialized before end of function}} let f = { // expected-error {{escaping closure captures 'inout' parameter 'x'}} - // expected-note @-1 {{consuming use here}} borrowVal(x) // expected-note {{captured here}} consumeVal(x) // expected-note {{captured here}} consumeVal(x) // expected-note {{captured here}} @@ -700,10 +696,8 @@ func testLocalLetClosureCaptureInOut(_ x: inout SingleElt) { // CHECK: apply {{%.*}}([[LOADED_READ]], [[LOADED_TAKE]]) // CHECK: } // end sil function '$s16moveonly_closure31testLocalVarClosureCaptureInOutyyAA9SingleEltVzFyycfU_' func testLocalVarClosureCaptureInOut(_ x: inout SingleElt) { - // expected-error @-1 {{'x' consumed but not reinitialized before end of function}} - // expected-note @-2 {{'x' is declared 'inout'}} - var f = { // expected-note {{consuming use here}} - // expected-error @-1 {{escaping closure captures 'inout' parameter 'x'}} + // expected-note @-1 {{'x' is declared 'inout'}} + var f = { // expected-error {{escaping closure captures 'inout' parameter 'x'}} borrowVal(x) // expected-note {{captured here}} consumeVal(x) // expected-note {{captured here}} consumeVal(x) // expected-note {{captured here}} @@ -750,10 +744,8 @@ func testLocalVarClosureCaptureInOut(_ x: inout SingleElt) { // CHECK: apply {{%.*}}([[LOADED_READ]], [[LOADED_TAKE]]) // CHECK: } // end sil function '$s16moveonly_closure026testInOutVarClosureCapturedE0yyyycz_AA9SingleEltVztFyycfU_' func testInOutVarClosureCaptureInOut(_ f: inout () -> (), _ x: inout SingleElt) { - // expected-error @-1 {{'x' consumed but not reinitialized before end of function}} - // expected-note @-2 {{'x' is declared 'inout'}} + // expected-note @-1 {{'x' is declared 'inout'}} f = { // expected-error {{escaping closure captures 'inout' parameter 'x'}} - // expected-note @-1 {{consuming use here}} borrowVal(x) // expected-note {{captured here}} consumeVal(x) // expected-note {{captured here}} consumeVal(x) // expected-note {{captured here}} @@ -805,9 +797,7 @@ func testInOutVarClosureCaptureInOut(_ f: inout () -> (), _ x: inout SingleElt) // CHECK: } // end sil function '$s16moveonly_closure38testConsumingEscapeClosureCaptureInOutyyyycn_AA9SingleEltVztFyycfU_' func testConsumingEscapeClosureCaptureInOut(_ f: consuming @escaping () -> (), _ x: inout SingleElt) { // expected-note @-1 {{'x' is declared 'inout'}} - // expected-error @-2 {{'x' consumed but not reinitialized before end of function}} f = { // expected-error {{escaping closure captures 'inout' parameter 'x'}} - // expected-note @-1 {{consuming use here}} borrowVal(x) // expected-note {{captured here}} consumeVal(x) // expected-note {{captured here}} consumeVal(x) // expected-note {{captured here}} diff --git a/test/SILGen/moveonly_library_evolution.swift b/test/SILGen/moveonly_library_evolution.swift new file mode 100644 index 0000000000000..ddce1f9073fb5 --- /dev/null +++ b/test/SILGen/moveonly_library_evolution.swift @@ -0,0 +1,51 @@ +// RUN: %target-swift-emit-silgen -enable-experimental-feature NoImplicitCopy -enable-library-evolution %s | %FileCheck %s + +//////////////////////// +// MARK: Declarations // +//////////////////////// + +public struct EmptyStruct : ~Copyable {} +public struct NonEmptyStruct : ~Copyable { + var e = EmptyStruct() +} +public class CopyableKlass { + var s = NonEmptyStruct() + + let letStruct = NonEmptyStruct() +} + +public func borrowVal(_ x: borrowing EmptyStruct) {} + +////////////////////// +// MARK: DeinitTest // +////////////////////// + +// CHECK-LABEL: sil [ossa] @$s26moveonly_library_evolution10DeinitTestVfD : $@convention(method) (@in DeinitTest) -> () { +// CHECK: bb0([[ARG:%.*]] : $*DeinitTest): +// CHECK: drop_deinit [[ARG]] +// CHECK: } // end sil function '$s26moveonly_library_evolution10DeinitTestVfD' +public struct DeinitTest : ~Copyable { + deinit { + } +} + +////////////////////////////////////////// +// MARK: Caller Argument Spilling Tests // +////////////////////////////////////////// + +// CHECK-LABEL: sil [ossa] @$s26moveonly_library_evolution29callerArgumentSpillingTestArgyyAA13CopyableKlassCF : $@convention(thin) (@guaranteed CopyableKlass) -> () { +// CHECK: bb0([[ARG:%.*]] : @guaranteed $CopyableKlass): +// CHECK: [[ADDR:%.*]] = ref_element_addr [[ARG]] +// CHECK: [[MARKED_ADDR:%.*]] = mark_must_check [no_consume_or_assign] [[ADDR]] +// CHECK: [[LOADED_VALUE:%.*]] = load [copy] [[MARKED_ADDR]] +// CHECK: [[BORROWED_LOADED_VALUE:%.*]] = begin_borrow [[LOADED_VALUE]] +// CHECK: [[EXT:%.*]] = struct_extract [[BORROWED_LOADED_VALUE]] +// CHECK: [[SPILL:%.*]] = alloc_stack $EmptyStruct +// CHECK: [[STORE_BORROW:%.*]] = store_borrow [[EXT]] to [[SPILL]] +// CHECK: apply {{%.*}}([[STORE_BORROW]]) : $@convention(thin) (@in_guaranteed EmptyStruct) -> () +// CHECK: end_borrow [[STORE_BORROW]] +// CHECK: end_borrow [[BORROWED_LOADED_VALUE]] +// CHECK: } // end sil function '$s26moveonly_library_evolution29callerArgumentSpillingTestArgyyAA13CopyableKlassCF' +public func callerArgumentSpillingTestArg(_ x: CopyableKlass) { + borrowVal(x.letStruct.e) +} diff --git a/test/SILOptimizer/abcopts.sil b/test/SILOptimizer/abcopts.sil index 35d4da216266f..a5f7fc8f6804e 100644 --- a/test/SILOptimizer/abcopts.sil +++ b/test/SILOptimizer/abcopts.sil @@ -1096,6 +1096,192 @@ bb4(%31 : $Builtin.Int32): return %32 : $Int32 } +// SIL pattern for : +// func hoist_new_ind_pattern1(_ a : [Int]) { +// for i in 0...4 { +// ...a[i]... +// } +// } +// Bounds check for the array should be hoisted out of the loop +// +// RANGECHECK-LABEL: sil @hoist_new_ind_pattern1 : +// RANGECHECK: [[CB:%.*]] = function_ref @checkbounds : $@convention(method) (Int32, Bool, @owned ArrayInt) -> _DependenceToken +// RANGECHECK: [[MINUS1:%.*]] = integer_literal $Builtin.Int1, -1 +// RANGECHECK: [[TRUE:%.*]] = struct $Bool ([[MINUS1]] : $Builtin.Int1) +// RANGECHECK: [[ZERO:%.*]] = integer_literal $Builtin.Int32, 0 +// RANGECHECK: [[INTZERO:%.*]] = struct $Int32 ([[ZERO]] : $Builtin.Int32) +// RANGECHECK: [[FOUR:%.*]] = integer_literal $Builtin.Int32, 4 +// RANGECHECK: [[ONE:%.*]] = integer_literal $Builtin.Int32, 1 +// RANGECHECK: [[ANOTHERTRUE:%.*]] = integer_literal $Builtin.Int1, -1 +// RANGECHECK: [[INTONEADD:%.*]] = builtin "sadd_with_overflow_Int32"([[ZERO]] : $Builtin.Int32, [[ONE]] : $Builtin.Int32, [[ANOTHERTRUE]] : $Builtin.Int1) : $(Builtin.Int32, Builtin.Int1) +// RANGECHECK: [[INTONEEX:%.*]] = tuple_extract [[INTONEADD]] : $(Builtin.Int32, Builtin.Int1), 0 +// RANGECHECK: [[INTONE:%.*]] = struct $Int32 ([[INTONEEX]] : $Builtin.Int32) +// RANGECHECK: [[A1:%.*]] = apply [[CB]]([[INTONE]], [[TRUE]], %0) : $@convention(method) (Int32, Bool, @owned ArrayInt) -> _DependenceToken +// RANGECHECK: [[INTFOUR:%.*]] = struct $Int32 ([[FOUR]] : $Builtin.Int32) +// RANGECHECK: [[A2:%.*]] = apply [[CB]]([[INTFOUR]], [[TRUE]], %0) : $@convention(method) (Int32, Bool, @owned ArrayInt) -> _DependenceToken +// RANGECHECK: bb1 +// RANGECHECK-NOT: apply [[CB]] +// RANGECHECK-LABEL: } // end sil function 'hoist_new_ind_pattern1' +sil @hoist_new_ind_pattern1 : $@convention(thin) (@owned ArrayInt) -> () { +bb0(%0 : $ArrayInt): + %minus1 = integer_literal $Builtin.Int1, -1 + %true = struct $Bool(%minus1 : $Builtin.Int1) + %zero = integer_literal $Builtin.Int32, 0 + %int0 = struct $Int32 (%zero : $Builtin.Int32) + %one = integer_literal $Builtin.Int32, 1 + %four = integer_literal $Builtin.Int32, 4 + %intfour = struct $Int32 (%four : $Builtin.Int32) + %cb = function_ref @checkbounds : $@convention(method) (Int32, Bool, @owned ArrayInt) -> _DependenceToken + apply %cb(%int0, %true, %0) : $@convention(method) (Int32, Bool, @owned ArrayInt) -> _DependenceToken + br bb1(%zero : $Builtin.Int32) + +bb1(%4 : $Builtin.Int32): + %5 = builtin "sadd_with_overflow_Int32"(%4 : $Builtin.Int32, %one : $Builtin.Int32, %minus1 : $Builtin.Int1) : $(Builtin.Int32, Builtin.Int1) + %6 = tuple_extract %5 : $(Builtin.Int32, Builtin.Int1), 0 + %7 = tuple_extract %5 : $(Builtin.Int32, Builtin.Int1), 1 + %8 = struct $Int32 (%6 : $Builtin.Int32) + %9 = builtin "cmp_eq_Int32"(%6 : $Builtin.Int32, %four : $Builtin.Int32) : $Builtin.Int1 + apply %cb(%8, %true, %0) : $@convention(method) (Int32, Bool, @owned ArrayInt) -> _DependenceToken + cond_br %9, bb3, bb2 + +bb2: + br bb1(%6 : $Builtin.Int32) + +bb3: + %t = tuple () + return %t : $() +} + +// Currently this is not optimized because the induction var increment is 2 +// Support for this can be added by updating induction variable analysis +// RANGECHECK-LABEL: sil @hoist_new_ind_pattern2 : +// RANGECHECK: [[CB:%.*]] = function_ref @checkbounds : $@convention(method) (Int32, Bool, @owned ArrayInt) -> _DependenceToken +// RANGECHECK: bb1 +// RANGECHECK: apply [[CB]] +// RANGECHECK-LABEL: } // end sil function 'hoist_new_ind_pattern2' +sil @hoist_new_ind_pattern2 : $@convention(thin) (@owned ArrayInt) -> () { +bb0(%0 : $ArrayInt): + %minus1 = integer_literal $Builtin.Int1, -1 + %true = struct $Bool(%minus1 : $Builtin.Int1) + %zero = integer_literal $Builtin.Int32, 0 + %int0 = struct $Int32 (%zero : $Builtin.Int32) + %two = integer_literal $Builtin.Int32, 2 + %four = integer_literal $Builtin.Int32, 4 + %intfour = struct $Int32 (%four : $Builtin.Int32) + %cb = function_ref @checkbounds : $@convention(method) (Int32, Bool, @owned ArrayInt) -> _DependenceToken + apply %cb(%int0, %true, %0) : $@convention(method) (Int32, Bool, @owned ArrayInt) -> _DependenceToken + br bb1(%zero : $Builtin.Int32) + +bb1(%4 : $Builtin.Int32): + %5 = builtin "sadd_with_overflow_Int32"(%4 : $Builtin.Int32, %two : $Builtin.Int32, %minus1 : $Builtin.Int1) : $(Builtin.Int32, Builtin.Int1) + %6 = tuple_extract %5 : $(Builtin.Int32, Builtin.Int1), 0 + %7 = tuple_extract %5 : $(Builtin.Int32, Builtin.Int1), 1 + cond_fail %7 : $Builtin.Int1 + %8 = struct $Int32 (%6 : $Builtin.Int32) + %9 = builtin "cmp_eq_Int32"(%6 : $Builtin.Int32, %four : $Builtin.Int32) : $Builtin.Int1 + apply %cb(%8, %true, %0) : $@convention(method) (Int32, Bool, @owned ArrayInt) -> _DependenceToken + cond_br %9, bb3, bb2 + +bb2: + br bb1(%6 : $Builtin.Int32) + +bb3: + %t = tuple () + return %t : $() +} + +// This is currently not optimized because access function is not recognized +// SIL pattern for : +// for var index in 0...24 +// { +// ...a[index + index]... +// } +// +// RANGECHECK-LABEL: sil @hoist_new_ind_pattern3 : +// RANGECHECK: [[CB:%.*]] = function_ref @checkbounds : $@convention(method) (Int32, Bool, @owned ArrayInt) -> _DependenceToken +// RANGECHECK: bb1 +// RANGECHECK: apply [[CB]] +// RANGECHECK-LABEL: } // end sil function 'hoist_new_ind_pattern3' +sil @hoist_new_ind_pattern3 : $@convention(thin) (@owned ArrayInt) -> () { +bb0(%0 : $ArrayInt): + %minus1 = integer_literal $Builtin.Int1, -1 + %true = struct $Bool(%minus1 : $Builtin.Int1) + %zero = integer_literal $Builtin.Int32, 0 + %int0 = struct $Int32 (%zero : $Builtin.Int32) + %one = integer_literal $Builtin.Int32, 1 + %four = integer_literal $Builtin.Int32, 4 + %intfour = struct $Int32 (%four : $Builtin.Int32) + %cb = function_ref @checkbounds : $@convention(method) (Int32, Bool, @owned ArrayInt) -> _DependenceToken + apply %cb(%int0, %true, %0) : $@convention(method) (Int32, Bool, @owned ArrayInt) -> _DependenceToken + br bb1(%zero : $Builtin.Int32) + +bb1(%4 : $Builtin.Int32): + %5 = builtin "sadd_with_overflow_Int32"(%4 : $Builtin.Int32, %one : $Builtin.Int32, %minus1 : $Builtin.Int1) : $(Builtin.Int32, Builtin.Int1) + %6 = tuple_extract %5 : $(Builtin.Int32, Builtin.Int1), 0 + %7 = tuple_extract %5 : $(Builtin.Int32, Builtin.Int1), 1 + %8 = struct $Int32 (%6 : $Builtin.Int32) + %9 = builtin "cmp_eq_Int32"(%6 : $Builtin.Int32, %four : $Builtin.Int32) : $Builtin.Int1 + %10 = builtin "sadd_with_overflow_Int32"(%6 : $Builtin.Int32, %6 : $Builtin.Int32, %minus1 : $Builtin.Int1) : $(Builtin.Int32, Builtin.Int1) + %11 = tuple_extract %10 : $(Builtin.Int32, Builtin.Int1), 0 + %12 = tuple_extract %10 : $(Builtin.Int32, Builtin.Int1), 1 + %13 = struct $Int32 (%11 : $Builtin.Int32) + apply %cb(%13, %true, %0) : $@convention(method) (Int32, Bool, @owned ArrayInt) -> _DependenceToken + cond_br %9, bb3, bb2 + +bb2: + br bb1(%6 : $Builtin.Int32) + +bb3: + %t = tuple () + return %t : $() +} + + +// RANGECHECK-LABEL: sil @hoist_new_ind_pattern4 : +// RANGECHECK: [[MINUS1:%.*]] = integer_literal $Builtin.Int1, -1 +// RANGECHECK: [[TRUE:%.*]] = struct $Bool ([[MINUS1]] : $Builtin.Int1) +// RANGECHECK: [[ZERO:%.*]] = integer_literal $Builtin.Int32, 0 +// RANGECHECK: [[INTZERO:%.*]] = struct $Int32 ([[ZERO]] : $Builtin.Int32) +// RANGECHECK: [[CB:%.*]] = function_ref @checkbounds2 : $@convention(method) (Int32, Bool, @owned Array) -> _DependenceToken +// RANGECHECK: apply [[CB]]([[INTZERO]], [[TRUE]], %0) : $@convention(method) (Int32, Bool, @owned Array) -> _DependenceToken +// RANGECHECK-NOT: apply [[CB]] +// RANGECHECK-LABEL: } // end sil function 'hoist_new_ind_pattern4' +// for i in 0..) -> () { +bb0(%0 : $Array): + %minus1 = integer_literal $Builtin.Int1, -1 + %true = struct $Bool(%minus1 : $Builtin.Int1) + %zero = integer_literal $Builtin.Int32, 0 + %int0 = struct $Int32 (%zero : $Builtin.Int32) + %one = integer_literal $Builtin.Int32, 1 + %f1 = function_ref @getCount2 : $@convention(method) (@owned Array) -> Int32 + %t1 = apply %f1(%0) : $@convention(method) (@owned Array) -> Int32 + %count = struct_extract %t1 : $Int32, #Int32._value + %cb = function_ref @checkbounds2 : $@convention(method) (Int32, Bool, @owned Array) -> _DependenceToken + apply %cb(%int0, %true, %0) : $@convention(method) (Int32, Bool, @owned Array) -> _DependenceToken + br bb1(%zero : $Builtin.Int32) + +bb1(%4 : $Builtin.Int32): + %5 = builtin "sadd_with_overflow_Int32"(%4 : $Builtin.Int32, %one : $Builtin.Int32, %minus1 : $Builtin.Int1) : $(Builtin.Int32, Builtin.Int1) + %6 = tuple_extract %5 : $(Builtin.Int32, Builtin.Int1), 0 + %7 = tuple_extract %5 : $(Builtin.Int32, Builtin.Int1), 1 + cond_fail %7 : $Builtin.Int1 + %8 = struct $Int32 (%6 : $Builtin.Int32) + apply %cb(%8, %true, %0) : $@convention(method) (Int32, Bool, @owned Array) -> _DependenceToken + %9 = builtin "cmp_eq_Int32"(%6 : $Builtin.Int32, %count : $Builtin.Int32) : $Builtin.Int1 + cond_br %9, bb3, bb2 + +bb2: + br bb1(%6 : $Builtin.Int32) + +bb3: + %t = tuple () + return %t : $() +} + sil public_external [_semantics "array.check_subscript"] @checkbounds_no_meth : $@convention(thin) (Int32, Bool, @owned ArrayInt) -> _DependenceToken { bb0(%0: $Int32, %1: $Bool, %2: $ArrayInt): unreachable diff --git a/test/SILOptimizer/canonicalize_borrow_scope_unit.sil b/test/SILOptimizer/canonicalize_borrow_scope_unit.sil index 49401830eaa46..9c7fcb29e623c 100644 --- a/test/SILOptimizer/canonicalize_borrow_scope_unit.sil +++ b/test/SILOptimizer/canonicalize_borrow_scope_unit.sil @@ -6,9 +6,14 @@ typealias AnyObject = Builtin.AnyObject class C {} class D : C {} +struct S { + @_hasStorage var guts: SGuts +} +class SGuts {} sil @getD : $() -> (@owned D) sil @takeC : $(@owned C) -> () +sil [ossa] @sink : $@convention(thin) <τ_0_0> (@owned τ_0_0) -> () struct Unmanaged where Instance : AnyObject { unowned(unsafe) var _value: @sil_unmanaged Instance @@ -70,10 +75,8 @@ bb0(%instance : @guaranteed $Instance): // CHECK: [[TAKE_C:%[^,]+]] = function_ref @takeC // CHECK: [[C:%[^,]+]] = apply [[GET_C]]() // CHECK: [[OUTER_COPY:%[^,]+]] = copy_value [[C]] -// CHECK: [[OUTER_UPCAST:%[^,]+]] = upcast [[OUTER_COPY]] // CHECK: [[B:%[^,]+]] = begin_borrow [[C]] -// CHECK: [[DEAD_INNER_COPY:%[^,]+]] = copy_value [[B]] -// CHECK: destroy_value [[DEAD_INNER_COPY]] +// CHECK: [[OUTER_UPCAST:%[^,]+]] = upcast [[OUTER_COPY]] // CHECK: [[U:%[^,]+]] = upcast [[B]] // CHECK: [[C1:%[^,]+]] = copy_value [[U]] // CHECK: apply [[TAKE_C]]([[C1]]) @@ -97,3 +100,52 @@ sil [ossa] @dont_rewrite_inner_forwarding_user : $@convention(thin) () -> (@owne destroy_value %d : $D return %u2 : $C } + +// CHECK-LABEL: begin running test {{.*}} on dont_hoist_inner_destructure: canonicalize-borrow-scope +// CHECK-LABEL: sil [ossa] @dont_hoist_inner_destructure : {{.*}} { +// CHECK: {{bb[0-9]+}}([[S:%[^,]+]] : +// CHECK: [[OUTER_COPY:%[^,]+]] = copy_value [[S]] +// CHECK: [[S_BORROW:%[^,]+]] = begin_borrow [[S]] +// CHECK: cond_br undef, [[LEFT:bb[0-9]+]], [[RIGHT:bb[0-9]+]] +// CHECK: [[LEFT]]: +// CHECK: [[INNARDS:%[^,]+]] = destructure_struct [[OUTER_COPY]] +// CHECK: end_borrow [[S_BORROW]] +// CHECK: destroy_value [[INNARDS]] +// CHECK: destroy_value [[S]] +// CHECK: br [[EXIT:bb[0-9]+]] +// CHECK: [[RIGHT]]: +// CHECK: destroy_value [[OUTER_COPY]] +// CHECK: [[SINK:%[^,]+]] = function_ref @sink +// CHECK: [[INNER_COPY:%[^,]+]] = copy_value [[S_BORROW]] +// CHECK: apply [[SINK]]([[INNER_COPY]]) +// CHECK: end_borrow [[S_BORROW]] +// CHECK: destroy_value [[S]] +// CHECK: br [[EXIT]] +// CHECK: [[EXIT]]: +// CHECK-LABEL: } // end sil function 'dont_hoist_inner_destructure' +// CHECK-LABEL: end running test {{.*}} on dont_hoist_inner_destructure: canonicalize-borrow-scope +sil [ossa] @dont_hoist_inner_destructure : $@convention(thin) (@owned S) -> () { +entry(%s : @owned $S): + test_specification "canonicalize-borrow-scope @instruction" + %s_borrow = begin_borrow %s : $S + %s_copy = copy_value %s_borrow : $S + cond_br undef, left, right + +left: + %innards = destructure_struct %s_copy : $S + end_borrow %s_borrow : $S + destroy_value %innards : $SGuts + destroy_value %s : $S + br exit + +right: + %sink = function_ref @sink : $@convention(thin) <τ_0_0> (@owned τ_0_0) -> () + apply %sink(%s_copy) : $@convention(thin) <τ_0_0> (@owned τ_0_0) -> () + end_borrow %s_borrow : $S + destroy_value %s : $S + br exit + +exit: + %retval = tuple () + return %retval : $() +} diff --git a/test/SILOptimizer/capturepromotion-wrong-lexicalscope.swift b/test/SILOptimizer/capturepromotion-wrong-lexicalscope.swift index bf05ad6ed48f3..c76e695fa5c47 100644 --- a/test/SILOptimizer/capturepromotion-wrong-lexicalscope.swift +++ b/test/SILOptimizer/capturepromotion-wrong-lexicalscope.swift @@ -19,12 +19,12 @@ // CHECK: destroy_value [[BOX_COPY]] : ${ var Int }, loc {{.*}}:33:11, scope 4 // CHECK: [[CLOSURE:%[^,]+]] = partial_apply [callee_guaranteed] [[SPECIALIZED_F]]([[REGISTER_11]]) : $@convention(thin) (Int) -> Int, loc {{.*}}:33:11, scope 4 // CHECK: [[BORROW:%.*]] = begin_borrow [lexical] [[CLOSURE]] -// CHECK: debug_value [[BORROW]] : $@callee_guaranteed () -> Int, let, name "f", loc {{.*}}:33:7, scope 5 -// CHECK: [[CLOSURE_COPY:%[^,]+]] = copy_value [[BORROW]] : $@callee_guaranteed () -> Int, loc {{.*}}:34:10, scope 5 +// CHECK: debug_value [[BORROW]] : $@callee_guaranteed () -> Int, let, name "f", loc {{.*}}:33:7, scope 6 +// CHECK: [[CLOSURE_COPY:%[^,]+]] = copy_value [[BORROW]] : $@callee_guaranteed () -> Int, loc {{.*}}:34:10, scope 6 // There used to be an end_borrow here. We leave an emptyline here to preserve line numbers. -// CHECK: destroy_value [[CLOSURE]] : $@callee_guaranteed () -> Int, loc {{.*}}:35:1, scope 5 -// CHECK: destroy_value [[BOX]] : ${ var Int }, loc {{.*}}:35:1, scope 5 -// CHECK: return [[CLOSURE_COPY]] : $@callee_guaranteed () -> Int, loc {{.*}}:34:3, scope 5 +// CHECK: destroy_value [[CLOSURE]] : $@callee_guaranteed () -> Int, loc {{.*}}:35:1, scope 6 +// CHECK: destroy_value [[BOX]] : ${ var Int }, loc {{.*}}:35:1, scope 6 +// CHECK: return [[CLOSURE_COPY]] : $@callee_guaranteed () -> Int, loc {{.*}}:34:3, scope 6 // CHECK: } diff --git a/test/SILOptimizer/cast_folding.swift b/test/SILOptimizer/cast_folding.swift index 01d7d3d3122f8..4dbb298da5178 100644 --- a/test/SILOptimizer/cast_folding.swift +++ b/test/SILOptimizer/cast_folding.swift @@ -527,10 +527,10 @@ func test18_2() -> Bool { // CHECK-LABEL: sil hidden [noinline] @$s12cast_folding6test19SbyF : $@convention(thin) () -> Bool -// CHECK: bb0 -// CHECK-NEXT: %0 = integer_literal $Builtin.Int1, -1 -// CHECK-NEXT: %1 = struct $Bool -// CHECK-NEXT: return %1 +// CHECK: [[I1:%.*]] = integer_literal $Builtin.Int1, -1 +// CHECK: [[B:%.*]] = struct $Bool ([[I1]] +// CHECK: return [[B]] +// CHECK: } // end sil function '$s12cast_folding6test19SbyF' @inline(never) func test19() -> Bool { let t: Any.Type = type(of: 1 as Any) @@ -595,10 +595,10 @@ func test22_2() -> Bool { } // CHECK-LABEL: sil hidden [noinline] @$s12cast_folding6test23SbyF : $@convention(thin) () -> Bool -// CHECK: bb0 -// CHECK-NEXT: %0 = integer_literal $Builtin.Int1, 0 -// CHECK-NEXT: %1 = struct $Bool -// CHECK-NEXT: return %1 +// CHECK: [[I0:%.*]] = integer_literal $Builtin.Int1, 0 +// CHECK: [[B:%.*]] = struct $Bool ([[I0]] +// CHECK: return [[B]] +// CHECK: } // end sil function '$s12cast_folding6test23SbyF' @inline(never) func test23() -> Bool { return cast23(P.self) @@ -626,10 +626,10 @@ func test24_2() -> Bool { // CHECK-LABEL: sil hidden [noinline] @$s12cast_folding6test25SbyF : $@convention(thin) () -> Bool -// CHECK: bb0 -// CHECK-NEXT: %0 = integer_literal $Builtin.Int1, 0 -// CHECK-NEXT: %1 = struct $Bool -// CHECK-NEXT: return %1 +// CHECK: [[I0:%.*]] = integer_literal $Builtin.Int1, 0 +// CHECK: [[B:%.*]] = struct $Bool ([[I0]] +// CHECK: return [[B]] +// CHECK: } // end sil function '$s12cast_folding6test25SbyF' @inline(never) func test25() -> Bool { return cast25(P.self) @@ -647,10 +647,10 @@ func test26() -> Bool { // CHECK-LABEL: sil hidden [noinline] @$s12cast_folding6test27SbyF -// CHECK: bb0 -// CHECK-NEXT: %0 = integer_literal $Builtin.Int1, 0 -// CHECK-NEXT: %1 = struct $Bool -// CHECK-NEXT: return %1 +// CHECK: [[I0:%.*]] = integer_literal $Builtin.Int1, 0 +// CHECK: [[B:%.*]] = struct $Bool ([[I0]] +// CHECK: return [[B]] +// CHECK: } // end sil function '$s12cast_folding6test27SbyF' @inline(never) func test27() -> Bool { return cast27(D.self) @@ -955,61 +955,61 @@ public func test42(_ p: P) -> Bool { return cast42(p) } -// CHECK-LABEL: sil [noinline] @{{.*}}test43{{.*}} -// CHECK: bb0 -// CHECK-NEXT: %0 = integer_literal $Builtin.Int1, -1 -// CHECK-NEXT: %1 = struct $Bool -// CHECK-NEXT: return %1 +// CHECK-LABEL: sil [noinline] @$s12cast_folding6test43SbyF +// CHECK: [[I1:%.*]] = integer_literal $Builtin.Int1, -1 +// CHECK: [[B:%.*]] = struct $Bool ([[I1]] +// CHECK: return [[B]] +// CHECK: } // end sil function '$s12cast_folding6test43SbyF' @inline(never) public func test43() -> Bool { return P.self is Any.Type } -// CHECK-LABEL: sil [noinline] @{{.*}}test44{{.*}} -// CHECK: bb0 -// CHECK-NEXT: %0 = integer_literal $Builtin.Int1, -1 -// CHECK-NEXT: %1 = struct $Bool -// CHECK-NEXT: return %1 +// CHECK-LABEL: sil [noinline] @$s12cast_folding6test44SbyF +// CHECK: [[I1:%.*]] = integer_literal $Builtin.Int1, -1 +// CHECK: [[B:%.*]] = struct $Bool ([[I1]] +// CHECK: return [[B]] +// CHECK: } // end sil function '$s12cast_folding6test44SbyF' @inline(never) public func test44() -> Bool { return Any.self is Any.Type } -// CHECK-LABEL: sil [noinline] @{{.*}}test45{{.*}} -// CHECK: bb0 -// CHECK-NEXT: %0 = integer_literal $Builtin.Int1, -1 -// CHECK-NEXT: %1 = struct $Bool -// CHECK-NEXT: return %1 +// CHECK-LABEL: sil [noinline] @$s12cast_folding6test45SbyF +// CHECK: [[I1:%.*]] = integer_literal $Builtin.Int1, -1 +// CHECK: [[B:%.*]] = struct $Bool ([[I1]] +// CHECK: return [[B]] +// CHECK: } // end sil function '$s12cast_folding6test45SbyF' @inline(never) public func test45() -> Bool { return (P & R).self is Any.Type } -// CHECK-LABEL: sil [noinline] @{{.*}}test46{{.*}} -// CHECK: bb0 -// CHECK-NEXT: %0 = integer_literal $Builtin.Int1, -1 -// CHECK-NEXT: %1 = struct $Bool -// CHECK-NEXT: return %1 +// CHECK-LABEL: sil [noinline] @$s12cast_folding6test46SbyF +// CHECK: [[I1:%.*]] = integer_literal $Builtin.Int1, -1 +// CHECK: [[B:%.*]] = struct $Bool ([[I1]] +// CHECK: return [[B]] +// CHECK: } // end sil function '$s12cast_folding6test46SbyF' @inline(never) public func test46() -> Bool { return AnyObject.self is Any.Type } -// CHECK-LABEL: sil [noinline] @{{.*}}test47{{.*}} -// CHECK: bb0 -// CHECK-NEXT: %0 = integer_literal $Builtin.Int1, -1 -// CHECK-NEXT: %1 = struct $Bool -// CHECK-NEXT: return %1 +// CHECK-LABEL: sil [noinline] @$s12cast_folding6test47SbyF +// CHECK: [[I1:%.*]] = integer_literal $Builtin.Int1, -1 +// CHECK: [[B:%.*]] = struct $Bool ([[I1]] +// CHECK: return [[B]] +// CHECK: } // end sil function '$s12cast_folding6test47SbyF' @inline(never) public func test47() -> Bool { return Any.Type.self is Any.Type } -// CHECK-LABEL: sil [noinline] @{{.*}}test48{{.*}} -// CHECK: bb0 -// CHECK-NEXT: %0 = integer_literal $Builtin.Int1, 0 -// CHECK-NEXT: %1 = struct $Bool -// CHECK-NEXT: return %1 +// CHECK-LABEL: sil [noinline] @$s12cast_folding6test48SbyF +// CHECK: [[I0:%.*]] = integer_literal $Builtin.Int1, 0 +// CHECK: [[B:%.*]] = struct $Bool ([[I0]] +// CHECK: return [[B]] +// CHECK: } // end sil function '$s12cast_folding6test48SbyF' @inline(never) public func test48() -> Bool { return Any.Type.self is Any.Type.Type @@ -1027,10 +1027,10 @@ public func testCastAnyObjectProtocolToAnyObjectType() -> AnyObject.Type? { return cast(AnyObject.self) } -// CHECK-LABEL: // testCastProtocolTypeProtocolToProtocolTypeType -// CHECK: sil [noinline] @{{.*}}testCastProtocol{{.*}}$@convention(thin) () -> Optional<@thick any P.Type.Type> -// CHECK: %0 = enum $Optional{{.*}}, #Optional.none!enumelt -// CHECK-NEXT: return %0 +// CHECK-LABEL: sil [noinline] @$s12cast_folding020testCastProtocolTypee2ToefF0AA1P_pXpXpSgyF : +// CHECK: [[E:%.*]] = enum $Optional{{.*}}, #Optional.none!enumelt +// CHECK: return [[E]] +// CHECK: } // end sil function '$s12cast_folding020testCastProtocolTypee2ToefF0AA1P_pXpXpSgyF' @inline(never) public func testCastProtocolTypeProtocolToProtocolTypeType() -> P.Type.Type? { return P.Type.self as? P.Type.Type diff --git a/test/SILOptimizer/copyforward_ossa.sil b/test/SILOptimizer/copyforward_ossa.sil index 9380e2f42da31..23f986308089b 100644 --- a/test/SILOptimizer/copyforward_ossa.sil +++ b/test/SILOptimizer/copyforward_ossa.sil @@ -20,44 +20,6 @@ sil @f_in_guaranteed : $@convention(thin) (@in_guaranteed T) -> () sil @f_out : $@convention(thin) () -> @out T sil @f_owned : $@convention(thin) (@owned T) -> () -protocol P { - init(_ i : Int32) - mutating func poke() -}; - -// CHECK-LABEL: sil hidden [ossa] @nrvo : -// CHECK-NOT: copy_addr -// CHECK-LABEL: } // end sil function 'nrvo' -sil hidden [ossa] @nrvo : $@convention(thin) (Bool) -> @out T { -bb0(%0 : $*T, %1 : $Bool): - %2 = alloc_stack $T, var, name "ro" // users: %9, %15, %17, %19 - %3 = struct_extract %1 : $Bool, #Bool._value // user: %4 - cond_br %3, bb1, bb2 // id: %4 - -bb1: // Preds: bb0 - %5 = metatype $@thick T.Type // user: %9 - %6 = witness_method $T, #P.init!allocator : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (Int32, @thick τ_0_0.Type) -> @out τ_0_0 // user: %9 - %7 = integer_literal $Builtin.Int32, 10 // user: %8 - %8 = struct $Int32 (%7 : $Builtin.Int32) // user: %9 - %9 = apply %6(%2, %8, %5) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (Int32, @thick τ_0_0.Type) -> @out τ_0_0 - br bb3 // id: %10 - -bb2: // Preds: bb0 - %11 = metatype $@thick T.Type // user: %15 - %12 = witness_method $T, #P.init!allocator : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (Int32, @thick τ_0_0.Type) -> @out τ_0_0 // user: %15 - %13 = integer_literal $Builtin.Int32, 1 // user: %14 - %14 = struct $Int32 (%13 : $Builtin.Int32) // user: %15 - %15 = apply %12(%2, %14, %11) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (Int32, @thick τ_0_0.Type) -> @out τ_0_0 - br bb3 // id: %16 - -bb3: // Preds: bb1 bb2 - copy_addr [take] %2 to [init] %0 : $*T // id: %17 - %18 = tuple () // user: %20 - debug_value %0 : $*T, expr op_deref - dealloc_stack %2 : $*T // id: %19 - return %18 : $() // id: %20 -} - // CHECK-LABEL: sil hidden [ossa] @forward_takeinit : // CHECK-NOT: copy_addr // CHECK-NOT: destroy_addr @@ -312,24 +274,6 @@ bb3: return %13 : $() } -// CHECK-LABEL: sil hidden [ossa] @forward_unchecked_ref_cast_addr : -// CHECK: unchecked_ref_cast_addr -// CHECK-NOT: copy_addr -// CHECK-LABEL: } // end sil function 'forward_unchecked_ref_cast_addr' -sil hidden [ossa] @forward_unchecked_ref_cast_addr : $@convention(thin) (@in AnyObject) -> @out AClass { -bb0(%0 : $*AClass, %1 : $*AnyObject): - %3 = alloc_stack $AnyObject // user: %10 - %4 = alloc_stack $AnyObject // user: %9 - %5 = alloc_stack $AClass // users: %6, %7, %8 - unchecked_ref_cast_addr AnyObject in %1 : $*AnyObject to AClass in %5 : $*AClass // id: %6 - copy_addr [take] %5 to [init] %0 : $*AClass // id: %7 - dealloc_stack %5 : $*AClass // id: %8 - dealloc_stack %4 : $*AnyObject // id: %9 - dealloc_stack %3 : $*AnyObject // id: %10 - %11 = tuple () // user: %12 - return %11 : $() // id: %12 -} - public struct S { @_hasStorage var f: T { get set } @_hasStorage var g: T { get set } diff --git a/test/SILOptimizer/globalopt_global_propagation.swift b/test/SILOptimizer/globalopt_global_propagation.swift index 64f3e63b128ea..83f456b579706 100644 --- a/test/SILOptimizer/globalopt_global_propagation.swift +++ b/test/SILOptimizer/globalopt_global_propagation.swift @@ -1,5 +1,4 @@ // RUN: %target-swift-frontend -parse-as-library -O -emit-sil %s | %FileCheck %s -// RUN: %target-swift-frontend -parse-as-library -O -wmo -emit-sil %s | %FileCheck -check-prefix=CHECK-WMO %s // REQUIRES: swift_in_compiler diff --git a/test/SILOptimizer/mandatory-specializer.sil b/test/SILOptimizer/mandatory-specializer.sil deleted file mode 100644 index a947e5bb80c1f..0000000000000 --- a/test/SILOptimizer/mandatory-specializer.sil +++ /dev/null @@ -1,45 +0,0 @@ -// RUN: %target-sil-opt -mandatory-generic-specializer %s | %FileCheck %s - -sil_stage canonical - -import Builtin -import Swift -import SwiftShims - -// CHECK-LABEL: sil [no_allocation] @deserialize_and_inline_after_devirtualize -// CHECK-NOT: apply -// CHECK: } // end sil function 'deserialize_and_inline_after_devirtualize' -sil [no_allocation] @deserialize_and_inline_after_devirtualize : $@convention(thin) (@in Int) -> () { -bb0(%0 : $*Int): - %1 = metatype $@thick Int.Type - %2 = witness_method $Int, #Comparable."<" : (Self.Type) -> (Self, Self) -> Bool : $@convention(witness_method: Comparable) <τ_0_0 where τ_0_0 : Comparable> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_0, @thick τ_0_0.Type) -> Bool - %3 = apply %2(%0, %0, %1) : $@convention(witness_method: Comparable) <τ_0_0 where τ_0_0 : Comparable> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_0, @thick τ_0_0.Type) -> Bool - %4 = tuple() - return %4 : $() -} - -// CHECK-LABEL: sil [no_allocation] @dont_do_dead_alloc_elimination_on_non_ossa -// CHECK: alloc_stack -// CHECK-NOT: load -// CHECK: } // end sil function 'dont_do_dead_alloc_elimination_on_non_ossa' -sil [no_allocation] @dont_do_dead_alloc_elimination_on_non_ossa : $@convention(thin) (Builtin.Int32) -> Builtin.Int32 { -bb0(%0 : $Builtin.Int32): - %1 = alloc_stack $Builtin.Int32 - store %0 to %1 : $*Builtin.Int32 - %2 = load %1 : $*Builtin.Int32 - dealloc_stack %1 : $*Builtin.Int32 - return %2 : $Builtin.Int32 -} - -sil shared [transparent] [serialized] [thunk] [canonical] @$sSiSLsSL1loiySbx_xtFZTW : $@convention(witness_method: Comparable) (@in_guaranteed Int, @in_guaranteed Int, @thick Int.Type) -> Bool { -bb0(%0 : $*Int, %1 : $*Int, %2 : $@thick Int.Type): - %3 = integer_literal $Builtin.Int1, 0 - %4 = struct $Bool (%3 : $Builtin.Int1) - return %4 : $Bool -} - -sil_witness_table public_external [serialized] Int: Comparable module Swift { - base_protocol Equatable: Int: Equatable module Swift - method #Comparable."<": (Self.Type) -> (Self, Self) -> Bool : @$sSiSLsSL1loiySbx_xtFZTW -} - diff --git a/test/SILOptimizer/mandatory_combiner.sil b/test/SILOptimizer/mandatory_combiner.sil index f80cd37d10e9e..8117dfbb85636 100644 --- a/test/SILOptimizer/mandatory_combiner.sil +++ b/test/SILOptimizer/mandatory_combiner.sil @@ -269,9 +269,9 @@ bb0: // All generic arguments. Partial apply arguments. Apply arguments. // CHECK-LABEL: sil hidden @generic_capture_args : $@convention(thin) () -> @out A { // CHECK: [[FUNCTION_REF:%.*]] = function_ref @first_of_three_addables -// CHECK: [[PARTIAL_APPLY_RESULT:%.*]] = partial_apply [callee_guaranteed] [[FUNCTION_REF]] +// CHECK: = partial_apply [callee_guaranteed] [[FUNCTION_REF]] // Use %0 explicitly because that is the out parameter. -// CHECK: apply [[PARTIAL_APPLY_RESULT]](%0, {{%.*}}) +// CHECK: apply [[FUNCTION_REF]](%0, {{%.*}}) // CHECK: [[RESULT:%.*]] = tuple () // CHECK: return [[RESULT]] : $() // CHECK: } // end sil function 'generic_capture_args' @@ -302,6 +302,7 @@ bb0(%0 : $*A): %23 = apply %16(%0, %20) : $@callee_guaranteed (@in_guaranteed A) -> @out A destroy_addr %20 : $*A dealloc_stack %20 : $*A + debug_value %16 : $@callee_guaranteed (@in_guaranteed A) -> @out A, let, name "x" strong_release %16 : $@callee_guaranteed (@in_guaranteed A) -> @out A strong_release %16 : $@callee_guaranteed (@in_guaranteed A) -> @out A destroy_addr %5 : $*A @@ -320,8 +321,8 @@ bb0(%0 : $*A): // All class arguments. Partial apply arguments. Apply arguments. // CHECK-LABEL: sil hidden @class_capture_args : $@convention(thin) () -> @owned Klass { // CHECK: [[FUNCTION_REF:%.*]] = function_ref @first_of_three_klasses -// CHECK: [[PARTIAL_APPLY_RESULT:%.*]] = partial_apply [callee_guaranteed] [[FUNCTION_REF]]({{%.*}}, {{%.*}}) : -// CHECK: [[RESULT:%.*]] = apply [[PARTIAL_APPLY_RESULT]]({{%.*}}) +// CHECK-NOT: partial_apply +// CHECK: [[RESULT:%.*]] = apply [[FUNCTION_REF]]({{%.*}}) // CHECK: return [[RESULT]] // CHECK: } // end sil function 'class_capture_args' sil hidden @class_capture_args : $@convention(thin) () -> @owned Klass { @@ -352,9 +353,9 @@ bb0: // All existential arguments. Partial apply arguments. Apply arguments. // CHECK-LABEL: sil hidden @existential_capture_args : $@convention(thin) () -> @out any Proto { // CHECK: [[FUNCTION_REF:%.*]] = function_ref @first_of_three_protos -// CHECK: [[PARTIAL_APPLY_RESULT:%.*]] = partial_apply [callee_guaranteed] [[FUNCTION_REF]]({{%.*}}, {{%.*}}) : +// CHECK-NOT: partial_apply // Use %0 explicitly because that is the out parameter. -// CHECK: apply [[PARTIAL_APPLY_RESULT]](%0, {{%.*}}) +// CHECK: apply [[FUNCTION_REF]](%0, {{%.*}}) // CHECK: [[RESULT:%.*]] = tuple () // CHECK: return [[RESULT]] : $() // CHECK: } // end sil function 'existential_capture_args' @@ -396,9 +397,9 @@ bb0(%0 : $*Proto): // Mixed arguments. Trivial partial apply argument. Existential argument. // CHECK-LABEL: sil hidden @mixed_trivialcapture_existentialarg : $@convention(thin) () -> @out any Proto { // CHECK: [[FUNCTION_REF:%.*]] = function_ref @proto_from_proto_and_myint -// CHECK: [[PARTIAL_APPLY_RESULT:%.*]] = partial_apply [callee_guaranteed] [[FUNCTION_REF]]({{%.*}}) : +// CHECK-NOT: partial_apply // Use %0 explicitly because that is the out parameter. -// CHECK: apply [[PARTIAL_APPLY_RESULT]](%0, {{%.*}}) +// CHECK: apply [[FUNCTION_REF]](%0, {{%.*}}) // CHECK: [[RESULT:%.*]] = tuple () // CHECK: return [[RESULT]] : $() // CHECK: } // end sil function 'mixed_trivialcapture_existentialarg' @@ -424,8 +425,8 @@ bb0(%0 : $*Proto): // Mixed arguments. Existential partial apply argument. Trivial argument. // CHECK-LABEL: sil hidden @mixed_existentialcapture_trivialarg : $@convention(thin) () -> MyInt { // CHECK: [[FUNCTION_REF:%.*]] = function_ref @myint_from_myint_and_proto -// CHECK: [[PARTIAL_APPLY_RESULT:%.*]] = partial_apply [callee_guaranteed] [[FUNCTION_REF]]({{%.*}}) : -// CHECK: [[RESULT:%.*]] = apply [[PARTIAL_APPLY_RESULT]]({{%.*}}) +// CHECK-NOT: partial_apply +// CHECK: [[RESULT:%.*]] = apply [[FUNCTION_REF]]({{%.*}}) // CHECK: return [[RESULT]] // CHECK: } // end sil function 'mixed_existentialcapture_trivialarg' sil hidden @mixed_existentialcapture_trivialarg : $@convention(thin) () -> MyInt { @@ -452,8 +453,8 @@ bb0: // Mixed arguments. Mixed partial apply arguments. No arguments. // CHECK-LABEL: sil hidden @mixed_mixedcapture_noargs : $@convention(thin) () -> MyInt { // CHECK: [[FUNCTION_REF:%.*]] = function_ref @myint_from_proto_and_myint -// CHECK: [[PARTIAL_APPLY_RESULT:%.*]] = partial_apply [callee_guaranteed] [[FUNCTION_REF]]({{%.*}}, {{%.*}}) : -// CHECK: [[RESULT:%.*]] = apply [[PARTIAL_APPLY_RESULT]]() +// CHECK-NOT: partial_apply +// CHECK: [[RESULT:%.*]] = apply [[FUNCTION_REF]]({{.*}}) // CHECK: return [[RESULT]] // CHECK: } // end sil function 'mixed_mixedcapture_noargs' sil hidden @mixed_mixedcapture_noargs : $@convention(thin) () -> MyInt { @@ -480,8 +481,8 @@ bb0: // Mixed arguments. No partial apply arguments. Mixed arguments. // CHECK-LABEL: sil hidden @mixed_nocapture_mixedargs : $@convention(thin) () -> MyInt { // CHECK: [[FUNCTION_REF:%.*]] = function_ref @myint_from_proto_and_myint -// CHECK: [[PARTIAL_APPLY_RESULT:%.*]] = partial_apply [callee_guaranteed] [[FUNCTION_REF]]() : -// CHECK: [[RESULT:%.*]] = apply [[PARTIAL_APPLY_RESULT]]({{%.*}}, {{%.*}}) +// CHECK-NOT: partial_apply +// CHECK: [[RESULT:%.*]] = apply [[FUNCTION_REF]]({{%.*}}, {{%.*}}) // CHECK: return [[RESULT]] // CHECK: } // end sil function 'mixed_nocapture_mixedargs' sil hidden @mixed_nocapture_mixedargs : $@convention(thin) () -> MyInt { @@ -674,9 +675,9 @@ bb0: // All generic arguments. Partial apply arguments. Apply arguments. OSSA. // CHECK-LABEL: sil [ossa] @generic_capture_args_ossa : $@convention(thin) () -> @out A { // CHECK: [[FUNCTION_REF:%.*]] = function_ref @first_of_three_addables -// CHECK: [[PARTIAL_APPLY_RESULT:%.*]] = partial_apply [callee_guaranteed] [[FUNCTION_REF]] +// CHECK-NOT: partial_apply // Use %0 explicitly because that is the out parameter. -// CHECK: apply [[PARTIAL_APPLY_RESULT]](%0, {{%.*}}) +// CHECK: apply [[FUNCTION_REF]](%0, {{%.*}}) // CHECK: [[RESULT:%.*]] = tuple () // CHECK: return [[RESULT]] : $() // CHECK: } // end sil function 'generic_capture_args_ossa' @@ -722,8 +723,8 @@ bb0(%0 : $*A): // All class arguments. Partial apply arguments. Apply arguments. OSSA. // CHECK-LABEL: sil [ossa] @class_capture_args_ossa : $@convention(thin) () -> @owned Klass { // CHECK: [[FUNCTION_REF:%.*]] = function_ref @first_of_three_klasses -// CHECK: [[PARTIAL_APPLY_RESULT:%.*]] = partial_apply [callee_guaranteed] [[FUNCTION_REF]]({{%.*}}, {{%.*}}) : -// CHECK: [[RESULT:%.*]] = apply [[PARTIAL_APPLY_RESULT]]({{%.*}}) +// CHECK-NOT: partial_apply +// CHECK: [[RESULT:%.*]] = apply [[FUNCTION_REF]]({{%.*}}) // CHECK: return [[RESULT]] // CHECK: } // end sil function 'class_capture_args_ossa' sil [ossa] @class_capture_args_ossa : $@convention(thin) () -> @owned Klass { @@ -752,9 +753,9 @@ bb0: // All existential arguments. Partial apply arguments. Apply arguments. OSSA. // CHECK-LABEL: sil [ossa] @existential_capture_args_ossa : $@convention(thin) () -> @out any Proto { // CHECK: [[FUNCTION_REF:%.*]] = function_ref @first_of_three_protos -// CHECK: [[PARTIAL_APPLY_RESULT:%.*]] = partial_apply [callee_guaranteed] [[FUNCTION_REF]]({{%.*}}, {{%.*}}) : +// CHECK-NOT: partial_apply // Use %0 explicitly because that is the out parameter. -// CHECK: apply [[PARTIAL_APPLY_RESULT]](%0, {{%.*}}) +// CHECK: apply [[FUNCTION_REF]](%0, {{%.*}}) // CHECK: [[RESULT:%.*]] = tuple () // CHECK: return [[RESULT]] : $() // CHECK: } // end sil function 'existential_capture_args_ossa' @@ -794,9 +795,9 @@ bb0(%0 : $*Proto): // Mixed arguments. Trivial partial apply argument. Existential argument. OSSA. // CHECK-LABEL: sil [ossa] @mixed_trivialcapture_existentialarg_ossa : $@convention(thin) () -> @out any Proto { // CHECK: [[FUNCTION_REF:%.*]] = function_ref @proto_from_proto_and_myint -// CHECK: [[PARTIAL_APPLY_RESULT:%.*]] = partial_apply [callee_guaranteed] [[FUNCTION_REF]]({{%.*}}) : +// CHECK-NOT: partial_apply // Use %0 explicitly because that is the out parameter. -// CHECK: apply [[PARTIAL_APPLY_RESULT]](%0, {{%.*}}) +// CHECK: apply [[FUNCTION_REF]](%0, {{%.*}}) // CHECK: [[RESULT:%.*]] = tuple () // CHECK: return [[RESULT]] : $() // CHECK: } // end sil function 'mixed_trivialcapture_existentialarg_ossa' @@ -820,8 +821,8 @@ bb0(%0 : $*Proto): // Mixed arguments. Existential partial apply argument. Trivial argument. OSSA. // CHECK-LABEL: sil [ossa] @mixed_existentialcapture_trivialarg_ossa : $@convention(thin) () -> MyInt { // CHECK: [[FUNCTION_REF:%.*]] = function_ref @myint_from_myint_and_proto -// CHECK: [[PARTIAL_APPLY_RESULT:%.*]] = partial_apply [callee_guaranteed] [[FUNCTION_REF]]({{%.*}}) : -// CHECK: [[RESULT:%.*]] = apply [[PARTIAL_APPLY_RESULT]]({{%.*}}) +// CHECK-NOT: partial_apply +// CHECK: [[RESULT:%.*]] = apply [[FUNCTION_REF]]({{%.*}}) // CHECK: return [[RESULT]] // CHECK: } // end sil function 'mixed_existentialcapture_trivialarg_ossa' sil [ossa] @mixed_existentialcapture_trivialarg_ossa : $@convention(thin) () -> MyInt { @@ -846,8 +847,8 @@ bb0: // Mixed arguments. Mixed partial apply arguments. No arguments. OSSA. // CHECK-LABEL: sil [ossa] @mixed_mixedcapture_noargs_ossa : $@convention(thin) () -> MyInt { // CHECK: [[FUNCTION_REF:%.*]] = function_ref @myint_from_proto_and_myint -// CHECK: [[PARTIAL_APPLY_RESULT:%.*]] = partial_apply [callee_guaranteed] [[FUNCTION_REF]]({{%.*}}, {{%.*}}) : -// CHECK: [[RESULT:%.*]] = apply [[PARTIAL_APPLY_RESULT]]() +// CHECK-NOT: partial_apply +// CHECK: [[RESULT:%.*]] = apply [[FUNCTION_REF]]({{.*}}) // CHECK: return [[RESULT]] // CHECK: } // end sil function 'mixed_mixedcapture_noargs_ossa' sil [ossa] @mixed_mixedcapture_noargs_ossa : $@convention(thin) () -> MyInt { @@ -872,8 +873,8 @@ bb0: // Mixed arguments. No partial apply arguments. Mixed arguments. OSSA. // CHECK-LABEL: sil [ossa] @mixed_nocapture_mixedargs_ossa : $@convention(thin) () -> MyInt { // CHECK: [[FUNCTION_REF:%.*]] = function_ref @myint_from_proto_and_myint -// CHECK: [[PARTIAL_APPLY_RESULT:%.*]] = partial_apply [callee_guaranteed] [[FUNCTION_REF]]() : -// CHECK: [[RESULT:%.*]] = apply [[PARTIAL_APPLY_RESULT]]({{%.*}}, {{%.*}}) +// CHECK-NOT: partial_apply +// CHECK: [[RESULT:%.*]] = apply [[FUNCTION_REF]]({{%.*}}, {{%.*}}) // CHECK: return [[RESULT]] // CHECK: } // end sil function 'mixed_nocapture_mixedargs_ossa' sil [ossa] @mixed_nocapture_mixedargs_ossa : $@convention(thin) () -> MyInt { diff --git a/test/SILOptimizer/mandatory_generic_specialization.sil b/test/SILOptimizer/mandatory_generic_specialization.sil deleted file mode 100644 index 19d8537a393f6..0000000000000 --- a/test/SILOptimizer/mandatory_generic_specialization.sil +++ /dev/null @@ -1,57 +0,0 @@ -// RUN: %target-sil-opt -enable-sil-verify-all %s -mandatory-generic-specializer | %FileCheck %s - -import Builtin - -sil @paable : $@convention(thin) (Builtin.Int64) -> () -sil @moved_pai_callee : $@convention(thin) (@inout_aliasable Builtin.Int64) -> () - -sil [ossa] [transparent] @partial_apply_on_stack_nesting_violator : $@convention(thin) () -> () { - %paable = function_ref @paable : $@convention(thin) (Builtin.Int64) -> () - %one = integer_literal $Builtin.Int64, 1 - %first = partial_apply [callee_guaranteed] [on_stack] %paable(%one) : $@convention(thin) (Builtin.Int64) -> () - %two = integer_literal $Builtin.Int64, 2 - %second = partial_apply [callee_guaranteed] [on_stack] %paable(%two) : $@convention(thin) (Builtin.Int64) -> () - // Note that the destroy_values do not occur in an order which coincides - // with stack disciplined dealloc_stacks. - destroy_value %first : $@noescape @callee_guaranteed () -> () - destroy_value %second : $@noescape @callee_guaranteed () -> () - %retval = tuple () - return %retval : $() -} - -// Verify that when inlining partial_apply_on_stack_nesting_violator, the stack -// nesting of the on_stack closures is fixed. -// CHECK-LABEL: sil [no_locks] @test_inline_stack_violating_ossa_func : {{.*}} { -// CHECK: [[PAABLE:%[^,]+]] = function_ref @paable -// CHECK: [[FIRST:%[^,]+]] = partial_apply [callee_guaranteed] [on_stack] [[PAABLE]] -// CHECK: [[SECOND:%[^,]+]] = partial_apply [callee_guaranteed] [on_stack] [[PAABLE]] -// CHECK: dealloc_stack [[SECOND]] -// CHECK: dealloc_stack [[FIRST]] -// CHECK-LABEL: } // end sil function 'test_inline_stack_violating_ossa_func' -sil [no_locks] @test_inline_stack_violating_ossa_func : $@convention(thin) () -> () { - %callee = function_ref @partial_apply_on_stack_nesting_violator : $@convention(thin) () -> () - apply %callee() : $@convention(thin) () -> () - %retval = tuple () - return %retval : $() -} - -// CHECK-LABEL: sil hidden [no_allocation] [ossa] @moved_pai : {{.*}} { -// CHECK-NOT: partial_apply -// CHECK-LABEL: } // end sil function 'moved_pai' -sil hidden [no_allocation] [ossa] @moved_pai : $@convention(thin) () -> Builtin.Int64 { -bb0: - %addr = alloc_stack $Builtin.Int64 - %42 = integer_literal $Builtin.Int64, 42 - store %42 to [trivial] %addr : $*Builtin.Int64 - %callee = function_ref @moved_pai_callee : $@convention(thin) (@inout_aliasable Builtin.Int64) -> () - %closure = partial_apply [callee_guaranteed] %callee(%addr) : $@convention(thin) (@inout_aliasable Builtin.Int64) -> () - %closure_lifetime = move_value [lexical] %closure : $@callee_guaranteed () -> () - debug_value %closure_lifetime : $@callee_guaranteed () -> () - %copy = copy_value %closure_lifetime : $@callee_guaranteed () -> () - apply %copy() : $@callee_guaranteed () -> () - destroy_value %copy : $@callee_guaranteed () -> () - %retval = load [trivial] %addr : $*Builtin.Int64 - destroy_value %closure_lifetime : $@callee_guaranteed () -> () - dealloc_stack %addr : $*Builtin.Int64 - return %retval : $Builtin.Int64 -} diff --git a/test/SILOptimizer/mandatory_performance_optimizations.sil b/test/SILOptimizer/mandatory_performance_optimizations.sil new file mode 100644 index 0000000000000..4cbbb185ae490 --- /dev/null +++ b/test/SILOptimizer/mandatory_performance_optimizations.sil @@ -0,0 +1,132 @@ +// RUN: %target-sil-opt -enable-sil-verify-all %s -mandatory-performance-optimizations | %FileCheck %s + +// REQUIRES: swift_in_compiler + +sil_stage canonical + +import Builtin +import Swift +import SwiftShims + + +sil @paable : $@convention(thin) (Builtin.Int64) -> () +sil @moved_pai_callee : $@convention(thin) (@inout_aliasable Builtin.Int64) -> () +sil @use_closure : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> () + +sil [ossa] [transparent] @partial_apply_on_stack_nesting_violator : $@convention(thin) () -> () { + %paable = function_ref @paable : $@convention(thin) (Builtin.Int64) -> () + %one = integer_literal $Builtin.Int64, 1 + %first = partial_apply [callee_guaranteed] [on_stack] %paable(%one) : $@convention(thin) (Builtin.Int64) -> () + %two = integer_literal $Builtin.Int64, 2 + %second = partial_apply [callee_guaranteed] [on_stack] %paable(%two) : $@convention(thin) (Builtin.Int64) -> () + %f = function_ref @use_closure : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> () + apply %f(%first) : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> () + apply %f(%second) : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> () + // Note that the destroy_values do not occur in an order which coincides + // with stack disciplined dealloc_stacks. + destroy_value %first : $@noescape @callee_guaranteed () -> () + destroy_value %second : $@noescape @callee_guaranteed () -> () + %retval = tuple () + return %retval : $() +} + +// Verify that when inlining partial_apply_on_stack_nesting_violator, the stack +// nesting of the on_stack closures is fixed. +// CHECK-LABEL: sil [no_locks] @test_inline_stack_violating_ossa_func : {{.*}} { +// CHECK: [[PAABLE:%[^,]+]] = function_ref @paable +// CHECK: [[FIRST:%[^,]+]] = partial_apply [callee_guaranteed] [on_stack] [[PAABLE]] +// CHECK: [[SECOND:%[^,]+]] = partial_apply [callee_guaranteed] [on_stack] [[PAABLE]] +// CHECK: dealloc_stack [[SECOND]] +// CHECK: dealloc_stack [[FIRST]] +// CHECK-LABEL: } // end sil function 'test_inline_stack_violating_ossa_func' +sil [no_locks] @test_inline_stack_violating_ossa_func : $@convention(thin) () -> () { + %callee = function_ref @partial_apply_on_stack_nesting_violator : $@convention(thin) () -> () + apply %callee() : $@convention(thin) () -> () + %retval = tuple () + return %retval : $() +} + +// CHECK-LABEL: sil hidden [no_allocation] [ossa] @moved_pai : {{.*}} { +// CHECK-NOT: partial_apply +// CHECK-LABEL: } // end sil function 'moved_pai' +sil hidden [no_allocation] [ossa] @moved_pai : $@convention(thin) () -> Builtin.Int64 { +bb0: + %addr = alloc_stack $Builtin.Int64 + %42 = integer_literal $Builtin.Int64, 42 + store %42 to [trivial] %addr : $*Builtin.Int64 + %callee = function_ref @moved_pai_callee : $@convention(thin) (@inout_aliasable Builtin.Int64) -> () + %closure = partial_apply [callee_guaranteed] %callee(%addr) : $@convention(thin) (@inout_aliasable Builtin.Int64) -> () + %closure_lifetime = move_value [lexical] %closure : $@callee_guaranteed () -> () + debug_value %closure_lifetime : $@callee_guaranteed () -> () + %copy = copy_value %closure_lifetime : $@callee_guaranteed () -> () + apply %copy() : $@callee_guaranteed () -> () + destroy_value %copy : $@callee_guaranteed () -> () + %retval = load [trivial] %addr : $*Builtin.Int64 + destroy_value %closure_lifetime : $@callee_guaranteed () -> () + dealloc_stack %addr : $*Builtin.Int64 + return %retval : $Builtin.Int64 +} + +// CHECK-LABEL: sil [no_allocation] @deserialize_and_inline_after_devirtualize +// CHECK-NOT: apply +// CHECK: } // end sil function 'deserialize_and_inline_after_devirtualize' +sil [no_allocation] @deserialize_and_inline_after_devirtualize : $@convention(thin) (@in Int) -> () { +bb0(%0 : $*Int): + %1 = metatype $@thick Int.Type + %2 = witness_method $Int, #Comparable."<" : (Self.Type) -> (Self, Self) -> Bool : $@convention(witness_method: Comparable) <τ_0_0 where τ_0_0 : Comparable> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_0, @thick τ_0_0.Type) -> Bool + %3 = apply %2(%0, %0, %1) : $@convention(witness_method: Comparable) <τ_0_0 where τ_0_0 : Comparable> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_0, @thick τ_0_0.Type) -> Bool + %4 = tuple() + return %4 : $() +} + +// CHECK-LABEL: sil [no_allocation] [ossa] @memopt_and_dead_alloc +// CHECK-NOT: alloc_stack +// CHECK-NOT: load +// CHECK: return %0 +// CHECK: } // end sil function 'memopt_and_dead_alloc' +sil [no_allocation] [ossa] @memopt_and_dead_alloc : $@convention(thin) (Builtin.Int32) -> Builtin.Int32 { +bb0(%0 : $Builtin.Int32): + %1 = alloc_stack $Builtin.Int32 + store %0 to [trivial] %1 : $*Builtin.Int32 + %2 = load [trivial] %1 : $*Builtin.Int32 + dealloc_stack %1 : $*Builtin.Int32 + return %2 : $Builtin.Int32 +} + +// CHECK-LABEL: sil [no_allocation] @dont_do_dead_alloc_elimination_on_non_ossa +// CHECK: alloc_stack +// CHECK-NOT: load +// CHECK: return %0 +// CHECK: } // end sil function 'dont_do_dead_alloc_elimination_on_non_ossa' +sil [no_allocation] @dont_do_dead_alloc_elimination_on_non_ossa : $@convention(thin) (Builtin.Int32) -> Builtin.Int32 { +bb0(%0 : $Builtin.Int32): + %1 = alloc_stack $Builtin.Int32 + store %0 to %1 : $*Builtin.Int32 + %2 = load %1 : $*Builtin.Int32 + dealloc_stack %1 : $*Builtin.Int32 + return %2 : $Builtin.Int32 +} + +// CHECK-LABEL: sil [no_allocation] @dead_metatype : +// CHECK-NOT: metatype +// CHECK-NOT: debug_value +// CHECK: } // end sil function 'dead_metatype' +sil [no_allocation] @dead_metatype : $@convention(thin) () -> () { +bb0: + %0 = metatype $@thick Int.Type + debug_value %0 : $@thick Int.Type + %2 = tuple () + return %2 : $() +} +sil shared [transparent] [serialized] [thunk] [canonical] @$sSiSLsSL1loiySbx_xtFZTW : $@convention(witness_method: Comparable) (@in_guaranteed Int, @in_guaranteed Int, @thick Int.Type) -> Bool { +bb0(%0 : $*Int, %1 : $*Int, %2 : $@thick Int.Type): + %3 = integer_literal $Builtin.Int1, 0 + %4 = struct $Bool (%3 : $Builtin.Int1) + return %4 : $Bool +} + +sil_witness_table public_external [serialized] Int: Comparable module Swift { + base_protocol Equatable: Int: Equatable module Swift + method #Comparable."<": (Self.Type) -> (Self, Self) -> Bool : @$sSiSLsSL1loiySbx_xtFZTW +} + diff --git a/test/SILOptimizer/moveonly_addresschecker_diagnostics.swift b/test/SILOptimizer/moveonly_addresschecker_diagnostics.swift index 208fd18e7598f..8d04e9c598d60 100644 --- a/test/SILOptimizer/moveonly_addresschecker_diagnostics.swift +++ b/test/SILOptimizer/moveonly_addresschecker_diagnostics.swift @@ -3116,9 +3116,8 @@ public func closureCaptureClassUseAfterConsumeError() { } public func closureCaptureClassArgUseAfterConsume(_ x2: inout Klass) { - // expected-error @-1 {{'x2' consumed but not reinitialized before end of function}} - // expected-note @-2 {{'x2' is declared 'inout'}} - let f = { // expected-note {{consuming use here}} + // expected-note @-1 {{'x2' is declared 'inout'}} + let f = { // expected-error @-1 {{escaping closure captures 'inout' parameter 'x2'}} borrowVal(x2) // expected-note {{captured here}} consumeVal(x2) // expected-note {{captured here}} @@ -3230,12 +3229,10 @@ public func closureAndDeferCaptureClassUseAfterConsume3() { } public func closureAndDeferCaptureClassArgUseAfterConsume(_ x2: inout Klass) { - // expected-error @-1 {{'x2' consumed but not reinitialized before end of function}} - // expected-error @-2 {{'x2' consumed in closure but not reinitialized before end of closure}} - // expected-error @-3 {{'x2' consumed more than once}} - // expected-note @-4 {{'x2' is declared 'inout'}} + // expected-error @-1 {{'x2' consumed in closure but not reinitialized before end of closure}} + // expected-error @-2 {{'x2' consumed more than once}} + // expected-note @-3 {{'x2' is declared 'inout'}} let f = { // expected-error {{escaping closure captures 'inout' parameter 'x2'}} - // expected-note @-1 {{consuming use here}} defer { // expected-note {{captured indirectly by this call}} borrowVal(x2) // expected-note {{captured here}} consumeVal(x2) // expected-note {{captured here}} @@ -3280,11 +3277,9 @@ public func closureAndClosureCaptureClassUseAfterConsume2() { public func closureAndClosureCaptureClassArgUseAfterConsume(_ x2: inout Klass) { - // expected-error @-1 {{'x2' consumed but not reinitialized before end of function}} + // expected-note @-1 {{'x2' is declared 'inout'}} // expected-note @-2 {{'x2' is declared 'inout'}} - // expected-note @-3 {{'x2' is declared 'inout'}} let f = { // expected-error {{escaping closure captures 'inout' parameter 'x2'}} - // expected-note @-1 {{consuming use here}} let g = { // expected-error {{escaping closure captures 'inout' parameter 'x2'}} // expected-note @-1 {{captured indirectly by this call}} borrowVal(x2) diff --git a/test/SILOptimizer/moveonly_addresschecker_diagnostics_library_evolution.swift b/test/SILOptimizer/moveonly_addresschecker_diagnostics_library_evolution.swift new file mode 100644 index 0000000000000..18c2518087f52 --- /dev/null +++ b/test/SILOptimizer/moveonly_addresschecker_diagnostics_library_evolution.swift @@ -0,0 +1,172 @@ +// RUN: %target-swift-emit-sil -enable-experimental-feature NoImplicitCopy -sil-verify-all -verify -enable-library-evolution %s + +// This test is used to validate that we properly handle library evolution code +// until we can get all of the normal moveonly_addresschecker_diagnostics test +// case to pass. + +//////////////////////// +// MARK: Declarations // +//////////////////////// + +@_moveOnly public struct EmptyStruct {} +@_moveOnly public struct NonEmptyStruct { + var e = EmptyStruct() +} +public class CopyableKlass { + var varS = NonEmptyStruct() + var letS = NonEmptyStruct() +} + +public func borrowVal(_ x: borrowing NonEmptyStruct) {} +public func borrowVal(_ x: borrowing EmptyStruct) {} +public func consumeVal(_ x: consuming NonEmptyStruct) {} +public func consumeVal(_ x: consuming EmptyStruct) {} + +let copyableKlassLetGlobal = CopyableKlass() +var copyableKlassVarGlobal = CopyableKlass() + +///////////////// +// MARK: Tests // +///////////////// + +public struct DeinitTest : ~Copyable { + deinit {} +} + +public protocol P {} + +public struct GenericDeinitTest : ~Copyable { + deinit {} +} + +////////////////////////////////////////// +// MARK: Caller Argument Let Spill Test // +////////////////////////////////////////// + +public func callerBorrowClassLetFieldForArgumentSpillingTestLet() { + let x = CopyableKlass() + borrowVal(x.letS.e) +} + +public func callerBorrowClassLetFieldForArgumentSpillingTestVar() { + var x = CopyableKlass() + x = CopyableKlass() + borrowVal(x.letS.e) +} + +public func callerBorrowClassLetFieldForArgumentSpillingTestArg(_ x: CopyableKlass) { + borrowVal(x.letS.e) +} + +public func callerBorrowClassLetFieldForArgumentSpillingTestInOutArg(_ x: inout CopyableKlass) { + borrowVal(x.letS.e) +} + +public func callerBorrowClassLetFieldForArgumentSpillingTestConsumingArg(_ x: consuming CopyableKlass) { + borrowVal(x.letS.e) +} + +public func callerBorrowClassLetFieldForArgumentSpillingTestLetGlobal() { + borrowVal(copyableKlassLetGlobal.letS.e) +} + +public func callerBorrowClassLetFieldForArgumentSpillingTestVarGlobal() { + borrowVal(copyableKlassVarGlobal.letS.e) +} + +public func callerConsumeClassLetFieldForArgumentSpillingTestLet() { + let x = CopyableKlass() + consumeVal(x.letS.e) +} + +public func callerConsumeClassLetFieldForArgumentSpillingTestVar() { + var x = CopyableKlass() + x = CopyableKlass() + consumeVal(x.letS.e) +} + +public func callerConsumeClassLetFieldForArgumentSpillingTestArg(_ x: CopyableKlass) { + consumeVal(x.letS.e) +} + +public func callerConsumeClassLetFieldForArgumentSpillingTestInOutArg(_ x: inout CopyableKlass) { + consumeVal(x.letS.e) +} + +public func callerConsumeClassLetFieldForArgumentSpillingTestConsumingArg(_ x: consuming CopyableKlass) { + consumeVal(x.letS.e) +} + +public func callerConsumeClassLetFieldForArgumentSpillingTestLetGlobal() { + consumeVal(copyableKlassLetGlobal.letS.e) +} + +public func callerConsumeClassLetFieldForArgumentSpillingTestVarGlobal() { + consumeVal(copyableKlassVarGlobal.letS.e) +} + +//////////////////// +// MARK: Var Test // +//////////////////// + +public func callerBorrowClassVarFieldForArgumentSpillingTestLet() { + let x = CopyableKlass() + borrowVal(x.varS.e) +} + +public func callerBorrowClassVarFieldForArgumentSpillingTestVar() { + var x = CopyableKlass() + x = CopyableKlass() + borrowVal(x.varS.e) +} + +public func callerBorrowClassVarFieldForArgumentSpillingTestArg(_ x: CopyableKlass) { + borrowVal(x.varS.e) +} + +public func callerBorrowClassVarFieldForArgumentSpillingTestInOutArg(_ x: inout CopyableKlass) { + borrowVal(x.varS.e) +} + +public func callerBorrowClassVarFieldForArgumentSpillingTestConsumingArg(_ x: consuming CopyableKlass) { + borrowVal(x.varS.e) +} + +public func callerBorrowClassVarFieldForArgumentSpillingTestLetGlobal() { + borrowVal(copyableKlassLetGlobal.varS.e) +} + +public func callerBorrowClassVarFieldForArgumentSpillingTestVarGlobal() { + borrowVal(copyableKlassVarGlobal.varS.e) +} + +public func callerConsumeClassVarFieldForArgumentSpillingTestLet() { + let x = CopyableKlass() + consumeVal(x.varS.e) +} + +public func callerConsumeClassVarFieldForArgumentSpillingTestVar() { + var x = CopyableKlass() + x = CopyableKlass() + consumeVal(x.varS.e) +} + +public func callerConsumeClassVarFieldForArgumentSpillingTestArg(_ x: CopyableKlass) { + consumeVal(x.varS.e) +} + +public func callerConsumeClassVarFieldForArgumentSpillingTestInOutArg(_ x: inout CopyableKlass) { + consumeVal(x.varS.e) +} + +public func callerConsumeClassVarFieldForArgumentSpillingTestConsumingArg(_ x: consuming CopyableKlass) { + consumeVal(x.varS.e) +} + +public func callerConsumeClassVarFieldForArgumentSpillingTestLetGlobal() { + consumeVal(copyableKlassLetGlobal.varS.e) +} + +public func callerConsumeClassVarFieldForArgumentSpillingTestVarGlobal() { + consumeVal(copyableKlassVarGlobal.varS.e) +} diff --git a/test/SILOptimizer/moveonly_objectchecker_diagnostics.swift b/test/SILOptimizer/moveonly_objectchecker_diagnostics.swift index e4243e68b4f8f..63a7005817bed 100644 --- a/test/SILOptimizer/moveonly_objectchecker_diagnostics.swift +++ b/test/SILOptimizer/moveonly_objectchecker_diagnostics.swift @@ -2787,9 +2787,7 @@ public func closureCaptureClassUseAfterConsume1(_ x: borrowing Klass) { // expec public func closureCaptureClassUseAfterConsume2(_ x2: inout Klass) { // expected-note @-1 {{'x2' is declared 'inout'}} - // expected-error @-2 {{'x2' consumed but not reinitialized before end of function}} let f = { // expected-error {{escaping closure captures 'inout' parameter 'x2'}} - // expected-note @-1 {{consuming use here}} borrowVal(x2) // expected-note {{captured here}} consumeVal(x2) // expected-note {{captured here}} consumeVal(x2) // expected-note {{captured here}} diff --git a/test/SILOptimizer/moveonly_trivial_addresschecker_diagnostics.swift b/test/SILOptimizer/moveonly_trivial_addresschecker_diagnostics.swift index 8ff992cca1e63..4624ea3a6d401 100644 --- a/test/SILOptimizer/moveonly_trivial_addresschecker_diagnostics.swift +++ b/test/SILOptimizer/moveonly_trivial_addresschecker_diagnostics.swift @@ -1227,9 +1227,8 @@ public func closureCaptureClassUseAfterConsumeError() { } public func closureCaptureClassArgUseAfterConsume(_ x2: inout NonTrivialStruct) { - // expected-error @-1 {{'x2' consumed but not reinitialized before end of function}} - // expected-note @-2 {{'x2' is declared 'inout'}} - let f = { // expected-note {{consuming use here}} + // expected-note @-1 {{'x2' is declared 'inout'}} + let f = { // expected-error @-1 {{escaping closure captures 'inout' parameter 'x2'}} borrowVal(x2) // expected-note {{captured here}} consumeVal(x2) // expected-note {{captured here}} @@ -1341,12 +1340,10 @@ public func closureAndDeferCaptureClassUseAfterConsume3() { } public func closureAndDeferCaptureClassArgUseAfterConsume(_ x2: inout NonTrivialStruct) { - // expected-error @-1 {{'x2' consumed but not reinitialized before end of function}} - // expected-error @-2 {{'x2' consumed in closure but not reinitialized before end of closure}} - // expected-error @-3 {{'x2' consumed more than once}} - // expected-note @-4 {{'x2' is declared 'inout'}} + // expected-error @-1 {{'x2' consumed in closure but not reinitialized before end of closure}} + // expected-error @-2 {{'x2' consumed more than once}} + // expected-note @-3 {{'x2' is declared 'inout'}} let f = { // expected-error {{escaping closure captures 'inout' parameter 'x2'}} - // expected-note @-1 {{consuming use here}} defer { // expected-note {{captured indirectly by this call}} borrowVal(x2) // expected-note {{captured here}} consumeVal(x2) // expected-note {{captured here}} @@ -1391,11 +1388,9 @@ public func closureAndClosureCaptureClassUseAfterConsume2() { public func closureAndClosureCaptureClassArgUseAfterConsume(_ x2: inout NonTrivialStruct) { - // expected-error @-1 {{'x2' consumed but not reinitialized before end of function}} + // expected-note @-1 {{'x2' is declared 'inout'}} // expected-note @-2 {{'x2' is declared 'inout'}} - // expected-note @-3 {{'x2' is declared 'inout'}} let f = { // expected-error {{escaping closure captures 'inout' parameter 'x2'}} - // expected-note @-1 {{consuming use here}} let g = { // expected-error {{escaping closure captures 'inout' parameter 'x2'}} // expected-note @-1 {{captured indirectly by this call}} borrowVal(x2) diff --git a/test/SILOptimizer/named_return_value_opt.sil b/test/SILOptimizer/named_return_value_opt.sil new file mode 100644 index 0000000000000..3ba0209d7c396 --- /dev/null +++ b/test/SILOptimizer/named_return_value_opt.sil @@ -0,0 +1,146 @@ +// RUN: %target-sil-opt -enable-sil-verify-all %s -named-return-value-optimization | %FileCheck %s + +// REQUIRES: swift_in_compiler + +sil_stage canonical + +import Builtin +import Swift + +class AClass {} + +protocol P { + init() +}; + +// CHECK-LABEL: sil [ossa] @nrvo_simple : +// CHECK: copy_addr %1 to [init] %0 +// CHECK-NOT: copy_addr +// CHECK-LABEL: } // end sil function 'nrvo_simple' +sil [ossa] @nrvo_simple : $@convention(thin) (@in_guaranteed T) -> @out T { +bb0(%0 : $*T, %1 : $*T): + %2 = alloc_stack $T + copy_addr %1 to [init] %2 : $*T + copy_addr [take] %2 to [init] %0 : $*T + dealloc_stack %2 : $*T + %18 = tuple () + return %18 : $() +} + +// CHECK-LABEL: sil [ossa] @nrvo_multi_block : +// CHECK: apply {{%[0-9]+}}(%0 +// CHECK: apply {{%[0-9]+}}(%0 +// CHECK-NOT: copy_addr +// CHECK-LABEL: } // end sil function 'nrvo_multi_block' +sil [ossa] @nrvo_multi_block : $@convention(thin) () -> @out T { +bb0(%0 : $*T): + %2 = alloc_stack $T, var, name "ro" + %3 = metatype $@thick T.Type + %4 = witness_method $T, #P.init!allocator : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@thick τ_0_0.Type) -> @out τ_0_0 + cond_br undef, bb1, bb2 + +bb1: + %6 = apply %4(%2, %3) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@thick τ_0_0.Type) -> @out τ_0_0 + br bb3 + +bb2: + %7 = apply %4(%2, %3) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@thick τ_0_0.Type) -> @out τ_0_0 + br bb3 + +bb3: + copy_addr [take] %2 to [init] %0 : $*T + dealloc_stack %2 : $*T + %18 = tuple () + return %18 : $() +} + +// CHECK-LABEL: sil @no_nrvo_no_take : +// CHECK: [[S:%[0-9]+]] = alloc_stack $T +// CHECK: copy_addr [[S]] to [init] %0 +// CHECK-LABEL: } // end sil function 'no_nrvo_no_take' +sil @no_nrvo_no_take : $@convention(thin) (@in_guaranteed T) -> @out T { +bb0(%0 : $*T, %1 : $*T): + %2 = alloc_stack $T + copy_addr %1 to [init] %2 : $*T + copy_addr %2 to [init] %0 : $*T + dealloc_stack %2 : $*T + %18 = tuple () + return %18 : $() +} + +// CHECK-LABEL: sil [ossa] @no_nrvo_write_after_copy : +// CHECK: [[S:%[0-9]+]] = alloc_stack $T +// CHECK: copy_addr [take] [[S]] to [init] %0 +// CHECK-LABEL: } // end sil function 'no_nrvo_write_after_copy' +sil [ossa] @no_nrvo_write_after_copy : $@convention(thin) (@in_guaranteed T, @in_guaranteed T) -> @out T { +bb0(%0 : $*T, %1 : $*T, %2 : $*T): + %3 = alloc_stack $T + copy_addr %1 to [init] %3 : $*T + copy_addr [take] %3 to [init] %0 : $*T + copy_addr %2 to [init] %3 : $*T + destroy_addr %3 : $*T + dealloc_stack %3 : $*T + %18 = tuple () + return %18 : $() +} + +// CHECK-LABEL: sil [ossa] @no_nrvo_copy_not_in_return_block : +// CHECK: [[S:%[0-9]+]] = alloc_stack $T +// CHECK: copy_addr [take] [[S]] to [init] %0 +// CHECK-LABEL: } // end sil function 'no_nrvo_copy_not_in_return_block' +sil [ossa] @no_nrvo_copy_not_in_return_block : $@convention(thin) (@in_guaranteed T, @in_guaranteed T) -> @out T { +bb0(%0 : $*T, %1 : $*T, %2 : $*T): + %3 = alloc_stack $T + copy_addr %1 to [init] %3 : $*T + copy_addr [take] %3 to [init] %0 : $*T + br bb1 +bb1: + copy_addr %2 to [init] %3 : $*T + destroy_addr %3 : $*T + dealloc_stack %3 : $*T + %18 = tuple () + return %18 : $() +} + +// CHECK-LABEL: sil [ossa] @no_nrvo_dynamic_lifetime : +// CHECK: [[S:%[0-9]+]] = alloc_stack [dynamic_lifetime] $T +// CHECK: copy_addr [take] [[S]] to [init] %0 +// CHECK-LABEL: } // end sil function 'no_nrvo_dynamic_lifetime' +sil [ossa] @no_nrvo_dynamic_lifetime : $@convention(thin) (@in_guaranteed T) -> @out T { +bb0(%0 : $*T, %1 : $*T): + %2 = alloc_stack [dynamic_lifetime] $T + copy_addr %1 to [init] %2 : $*T + copy_addr [take] %2 to [init] %0 : $*T + dealloc_stack %2 : $*T + %18 = tuple () + return %18 : $() +} + +// CHECK-LABEL: sil [ossa] @no_nrvo_no_alloc_stack : +// CHECK: copy_addr [take] %1 to [init] %0 +// CHECK-LABEL: } // end sil function 'no_nrvo_no_alloc_stack' +sil [ossa] @no_nrvo_no_alloc_stack : $@convention(thin) (@in T) -> @out T { +bb0(%0 : $*T, %1 : $*T): + copy_addr [take] %1 to [init] %0 : $*T + %18 = tuple () + return %18 : $() +} + +// CHECK-LABEL: sil hidden [ossa] @forward_unchecked_ref_cast_addr : +// CHECK: unchecked_ref_cast_addr +// CHECK-NOT: copy_addr +// CHECK-LABEL: } // end sil function 'forward_unchecked_ref_cast_addr' +sil hidden [ossa] @forward_unchecked_ref_cast_addr : $@convention(thin) (@in AnyObject) -> @out AClass { +bb0(%0 : $*AClass, %1 : $*AnyObject): + %3 = alloc_stack $AnyObject // user: %10 + %4 = alloc_stack $AnyObject // user: %9 + %5 = alloc_stack $AClass // users: %6, %7, %8 + unchecked_ref_cast_addr AnyObject in %1 : $*AnyObject to AClass in %5 : $*AClass // id: %6 + copy_addr [take] %5 to [init] %0 : $*AClass // id: %7 + dealloc_stack %5 : $*AClass // id: %8 + dealloc_stack %4 : $*AnyObject // id: %9 + dealloc_stack %3 : $*AnyObject // id: %10 + %11 = tuple () // user: %12 + return %11 : $() // id: %12 +} + diff --git a/test/SILOptimizer/performance-annotations.swift b/test/SILOptimizer/performance-annotations.swift index 44caf37e93121..6954d4a09ca79 100644 --- a/test/SILOptimizer/performance-annotations.swift +++ b/test/SILOptimizer/performance-annotations.swift @@ -239,3 +239,36 @@ struct Buffer { } } +@_noLocks +func testBitShift(_ x: Int) -> Int { + return x << 1 +} + +@_noLocks +func testUintIntConversion() -> Int { + let u: UInt32 = 5 + return Int(u) +} + +struct OptSet: OptionSet { + let rawValue: Int + + public static var a: OptSet { return OptSet(rawValue: 1) } + public static var b: OptSet { return OptSet(rawValue: 2) } + public static var c: OptSet { return OptSet(rawValue: 4) } + public static var d: OptSet { return OptSet(rawValue: 8) } +} + +@_noLocks +func testOptionSet(_ options: OptSet) -> Bool { + return options.contains(.b) +} + +let globalA = 0xff +let globalB = UInt32(globalA) + +@_noLocks +func testGlobalsWithConversion() -> UInt32 { + return globalB +} + diff --git a/test/SILOptimizer/performance-annotations2.swift b/test/SILOptimizer/performance-annotations2.swift new file mode 100644 index 0000000000000..81c4918221064 --- /dev/null +++ b/test/SILOptimizer/performance-annotations2.swift @@ -0,0 +1,20 @@ +// RUN: %target-swift-frontend -experimental-performance-annotations %s -sil-verify-all -module-name=test -emit-sil | %FileCheck %s + +// REQUIRES: swift_in_compiler,swift_stdlib_no_asserts,optimized_stdlib +// UNSUPPORTED: swift_test_mode_optimize + +public struct Stack { + var size = 42 +} + +// CHECK-LABEL: sil [no_allocation] @$s4test11createStackyyF : +// CHECK: [[F:%[0-9]+]] = function_ref @$s4test5StackVACyxGycfCSi_Tgm5 +// CHECK: [[S:%[0-9]+]] = apply [[F]]() +// CHECK: debug_value [[S]] +// CHECK: } // end sil function '$s4test11createStackyyF' +@_noAllocation +public func createStack() { + let s = Stack() + _ = s.size +} + diff --git a/test/SILOptimizer/simplify_apply.sil b/test/SILOptimizer/simplify_apply.sil index f991bb57a9b73..847ae9f7477ea 100644 --- a/test/SILOptimizer/simplify_apply.sil +++ b/test/SILOptimizer/simplify_apply.sil @@ -2,77 +2,33 @@ // REQUIRES: swift_in_compiler -import Swift -import Builtin - -sil @closure_with_args : $@convention(thin) (Int, Bool) -> () -sil @closure2_with_args : $@convention(thin) (Int, String) -> () -sil @closure3_with_args : $@convention(thin) (String, Bool) -> () -sil @generic_callee_inguaranteed : $@convention(thin) (@in_guaranteed T, @in_guaranteed U) -> () - -// CHECK-LABEL: sil @test_apply_of_partial_apply -// CHECK: [[F:%.*]] = function_ref @closure_with_args -// CHECK-NOT: partial_apply -// CHECK: apply [[F]](%0, %1) -// CHECK: } // end sil function 'test_apply_of_partial_apply' -sil @test_apply_of_partial_apply : $@convention(thin) (Int, Bool) -> () { -bb0(%0 : $Int, %1 : $Bool): - %2 = function_ref @closure_with_args : $@convention(thin) (Int, Bool) -> () - %3 = partial_apply %2(%1) : $@convention(thin) (Int, Bool) -> () - apply %3(%0) : $@callee_owned (Int) -> () - %r = tuple() - return %r : $() -} +sil_stage canonical -// Currently this is not optimized by the simplification passes. -// TODO: change the check lines if we can handle generic closures +import Builtin +import Swift +import SwiftShims -// CHECK-LABEL: sil @test_generic_partial_apply_apply_inguaranteed -// CHECK: [[F:%.*]] = function_ref @generic_callee_inguaranteed -// CHECK: [[PA:%.*]] = partial_apply [[F]] -// CHECK: apply [[PA]] -// CHECK: } // end sil function 'test_generic_partial_apply_apply_inguaranteed' -sil @test_generic_partial_apply_apply_inguaranteed : $@convention(thin) (@in T, @in T) -> () { -bb0(%0 : $*T, %1 : $*T): - %f1 = function_ref @generic_callee_inguaranteed : $@convention(thin) (@in_guaranteed T, @in_guaranteed U) -> () - %pa = partial_apply %f1(%1) : $@convention(thin) (@in_guaranteed T, @in_guaranteed U) -> () - %a1 = apply %pa(%0) : $@callee_owned (@in_guaranteed T) -> () - destroy_addr %0 : $*T - %r = tuple () - return %r : $() +class Bar { + init() + func foo() -> Int } -// Currently this is not optimized by the simplification passes. -// TODO: change the check lines if we can handle non-trivial arguments - -// CHECK-LABEL: sil @dont_handle_non_trivial_pa_args -// CHECK: [[F:%.*]] = function_ref @closure2_with_args -// CHECK: [[PA:%.*]] = partial_apply [[F]] -// CHECK: apply [[PA]] -// CHECK: } // end sil function 'dont_handle_non_trivial_pa_args' -sil @dont_handle_non_trivial_pa_args : $@convention(thin) (Int, String) -> () { -bb0(%0 : $Int, %1 : $String): - %2 = function_ref @closure2_with_args : $@convention(thin) (Int, String) -> () - %3 = partial_apply %2(%1) : $@convention(thin) (Int, String) -> () - apply %3(%0) : $@callee_owned (Int) -> () - %r = tuple() - return %r : $() +// CHECK-LABEL: sil @devirt_class_method : +// CHECK: [[F:%.*]] = function_ref @bar_foo +// CHECK: apply [[F]] +// CHECK: } // end sil function 'devirt_class_method' +sil @devirt_class_method : $@convention(thin) () -> Int { +bb0: + %0 = alloc_ref $Bar + %1 = class_method %0 : $Bar, #Bar.foo : (Bar) -> () -> Int, $@convention(method) (@guaranteed Bar) -> Int + %2 = apply %1(%0) : $@convention(method) (@guaranteed Bar) -> Int + strong_release %0 : $Bar + return %2 : $Int } -// Currently this is not optimized by the simplification passes. -// TODO: change the check lines if we can handle non-trivial arguments +sil @bar_foo : $@convention(method) (@guaranteed Bar) -> Int -// CHECK-LABEL: sil @dont_handle_non_trivial_apply_args -// CHECK: [[F:%.*]] = function_ref @closure3_with_args -// CHECK: [[PA:%.*]] = partial_apply [[F]] -// CHECK: apply [[PA]] -// CHECK: } // end sil function 'dont_handle_non_trivial_apply_args' -sil @dont_handle_non_trivial_apply_args : $@convention(thin) (String, Bool) -> () { -bb0(%0 : $String, %1 : $Bool): - %2 = function_ref @closure3_with_args : $@convention(thin) (String, Bool) -> () - %3 = partial_apply %2(%1) : $@convention(thin) (String, Bool) -> () - apply %3(%0) : $@callee_owned (String) -> () - %r = tuple() - return %r : $() +sil_vtable Bar { + #Bar.foo: @bar_foo } diff --git a/test/SILOptimizer/simplify_begin_apply.sil b/test/SILOptimizer/simplify_begin_apply.sil new file mode 100644 index 0000000000000..b3308b3648b8d --- /dev/null +++ b/test/SILOptimizer/simplify_begin_apply.sil @@ -0,0 +1,36 @@ +// RUN: %target-sil-opt -enable-sil-verify-all %s -onone-simplification -simplify-instruction=begin_apply | %FileCheck %s + +// REQUIRES: swift_in_compiler + +sil_stage canonical + +import Builtin +import Swift +import SwiftShims + +class Bar { + init() + @_borrowed @_hasStorage var field: Int { get set } +} + +// CHECK-LABEL: sil @devirt_class_method : +// CHECK: [[F:%.*]] = function_ref @bar_field_read +// CHECK: begin_apply [[F]] +// CHECK: } // end sil function 'devirt_class_method' +sil @devirt_class_method : $@convention(thin) () -> (Int, @error any Error) { +bb0: + %0 = alloc_ref $Bar + %1 = class_method %0 : $Bar, #Bar.field!read : (Bar) -> () -> (), $@yield_once @convention(method) (@guaranteed Bar) -> @yields Int + (%2, %3) = begin_apply %1(%0) : $@yield_once @convention(method) (@guaranteed Bar) -> @yields Int + end_apply %3 + return %2 : $Int +} + +sil @bar_field_read : $@yield_once @convention(method) (@guaranteed Bar) -> @yields Int + +sil_vtable Bar { + #Bar.field!read: @bar_field_read +} + + + diff --git a/test/SILOptimizer/simplify_builtin.sil b/test/SILOptimizer/simplify_builtin.sil index 4d7dbf01037c9..5ff897ec6f904 100644 --- a/test/SILOptimizer/simplify_builtin.sil +++ b/test/SILOptimizer/simplify_builtin.sil @@ -1,5 +1,7 @@ -// RUN: %target-sil-opt -enable-sil-verify-all %s -onone-simplification -simplify-instruction=builtin | %FileCheck %s --check-prefix=CHECK --check-prefix=EARLY -// RUN: %target-sil-opt -enable-sil-verify-all %s -late-onone-simplification -simplify-instruction=builtin | %FileCheck %s --check-prefix=CHECK --check-prefix=LATE +// RUN: %target-sil-opt -enable-sil-verify-all %s -onone-simplification -simplify-instruction=builtin | %FileCheck %s --check-prefix=CHECK --check-prefix=CHECK-EARLY +// RUN: %target-sil-opt -enable-sil-verify-all %s -late-onone-simplification -simplify-instruction=builtin | %FileCheck %s --check-prefix=CHECK --check-prefix=CHECK-LATE +// RUN: %target-sil-opt -enable-sil-verify-all %s -assert-conf-id=1 -onone-simplification -simplify-instruction=builtin | %FileCheck %s --check-prefix=CHECK --check-prefix=CHECK-NOASSERTS +// RUN: %target-sil-opt -enable-sil-verify-all %s -assert-conf-id=2 -onone-simplification -simplify-instruction=builtin | %FileCheck %s --check-prefix=CHECK --check-prefix=CHECK-NOASSERTS // REQUIRES: swift_in_compiler @@ -55,7 +57,7 @@ bb0(%0 : $@thin Int.Type): // CHECK-LABEL: sil @isConcrete_false // CHECK: bb0(%0 : $@thin T.Type): // CHECK-EARLY: [[R:%.*]] = builtin "isConcrete"(%0 : $@thin T.Type) : $Builtin.Int1 -// CHECK-LATE: [[R:%.*]] = integer_literal $Builtin.Int1, -1 +// CHECK-LATE: [[R:%.*]] = integer_literal $Builtin.Int1, 0 // CHECK: return [[R]] // CHECK: } // end sil function 'isConcrete_false' sil @isConcrete_false : $@convention(thin) (@thin T.Type) -> Builtin.Int1 { @@ -322,3 +324,60 @@ bb0(%0 : $Builtin.RawPointer): return %3 : $() } +// CHECK-LABEL: sil @generic_canBeClass +// CHECK: [[TYPE:%.*]] = metatype $@thick T.Type +// CHECK: [[B:%.*]] = builtin "canBeClass"([[TYPE]] : $@thick T.Type) +// CHECK: [[R:%.*]] = struct $Int8 ([[B]] : $Builtin.Int8) +// CHECK: return [[R]] +// CHECK: } // end sil function 'generic_canBeClass' +sil @generic_canBeClass : $@convention(thin) (@in T) -> Int8 { +bb0(%0 : $*T): + %1 = metatype $@thick T.Type + %3 = builtin "canBeClass"(%1 : $@thick T.Type) : $Builtin.Int8 + %4 = struct $Int8 (%3 : $Builtin.Int8) + destroy_addr %0 : $*T + return %4 : $Int8 +} + +// CHECK-LABEL: sil @int_canBeClass +// CHECK-NOT: builtin "canBeClass" +// CHECK: [[L:%.*]] = integer_literal $Builtin.Int8, 0 +// CHECK: [[R:%.*]] = struct $Int8 ([[L]] : $Builtin.Int8) +// CHECK: return [[R]] +// CHECK: } // end sil function 'int_canBeClass' +sil @int_canBeClass : $@convention(thin) () -> Int8 { +bb0: + %1 = metatype $@thick Int.Type + %3 = builtin "canBeClass"(%1 : $@thick Int.Type) : $Builtin.Int8 + %4 = struct $Int8 (%3 : $Builtin.Int8) + return %4 : $Int8 +} + +// CHECK-LABEL: sil @class_canBeClass +// CHECK-NOT: builtin "canBeClass" +// CHECK: [[L:%.*]] = integer_literal $Builtin.Int8, 1 +// CHECK: [[R:%.*]] = struct $Int8 ([[L]] : $Builtin.Int8) +// CHECK: return [[R]] +// CHECK: } // end sil function 'class_canBeClass' +sil @class_canBeClass : $@convention(thin) () -> Int8 { +bb0: + %1 = metatype $@thick C1.Type + %3 = builtin "canBeClass">(%1 : $@thick C1.Type) : $Builtin.Int8 + %4 = struct $Int8 (%3 : $Builtin.Int8) + return %4 : $Int8 +} + +// CHECK-LABEL: sil @remove_assert_configuration +// CHECK-NOT: builtin "assert_configuration" +// CHECK-EARLY: [[L:%.*]] = integer_literal $Builtin.Int8, 1 +// CHECK-NOASSERTS: [[L:%.*]] = integer_literal $Builtin.Int8, 0 +// CHECK: [[R:%.*]] = struct $Int8 ([[L]] : $Builtin.Int8) +// CHECK: return [[R]] +// CHECK: } // end sil function 'remove_assert_configuration' +sil @remove_assert_configuration : $@convention(thin) () -> Int8 { +bb0: + %3 = builtin "assert_configuration"() : $Builtin.Int8 + %4 = struct $Int8 (%3 : $Builtin.Int8) + return %4 : $Int8 +} + diff --git a/test/SILOptimizer/simplify_partial_apply.sil b/test/SILOptimizer/simplify_partial_apply.sil new file mode 100644 index 0000000000000..489234e83614f --- /dev/null +++ b/test/SILOptimizer/simplify_partial_apply.sil @@ -0,0 +1,103 @@ +// RUN: %target-sil-opt -enable-sil-verify-all %s -onone-simplification -simplify-instruction=partial_apply | %FileCheck %s + +// REQUIRES: swift_in_compiler + +import Swift +import Builtin + +sil @closure_with_args : $@convention(thin) (Int, Bool) -> () +sil @closure2_with_args : $@convention(thin) (Int, String) -> () +sil @closure3_with_args : $@convention(thin) (String, Bool) -> () +sil @generic_callee_inguaranteed : $@convention(thin) (@in_guaranteed T, @in_guaranteed U) -> () + +// CHECK-LABEL: sil @test_apply_of_partial_apply +// CHECK: [[F:%.*]] = function_ref @closure_with_args +// CHECK-NOT: partial_apply +// CHECK: apply [[F]](%0, %1) +// CHECK: } // end sil function 'test_apply_of_partial_apply' +sil @test_apply_of_partial_apply : $@convention(thin) (Int, Bool) -> () { +bb0(%0 : $Int, %1 : $Bool): + %2 = function_ref @closure_with_args : $@convention(thin) (Int, Bool) -> () + %3 = partial_apply %2(%1) : $@convention(thin) (Int, Bool) -> () + apply %3(%0) : $@callee_owned (Int) -> () + %r = tuple() + return %r : $() +} + +// CHECK-LABEL: sil @test_generic_partial_apply_apply_inguaranteed +// CHECK: [[F:%.*]] = function_ref @generic_callee_inguaranteed +// CHECK: [[S:%.*]] = alloc_stack $T +// CHECK: copy_addr [take] %1 to [init] [[S]] +// CHECK-NOT: partial_apply +// CHECK: apply [[F]](%0, [[S]]) +// CHECK: destroy_addr [[S]] +// CHECK: destroy_addr %0 +// CHECK: } // end sil function 'test_generic_partial_apply_apply_inguaranteed' +sil @test_generic_partial_apply_apply_inguaranteed : $@convention(thin) (@in T, @in T) -> () { +bb0(%0 : $*T, %1 : $*T): + %f1 = function_ref @generic_callee_inguaranteed : $@convention(thin) (@in_guaranteed T, @in_guaranteed U) -> () + %pa = partial_apply %f1(%1) : $@convention(thin) (@in_guaranteed T, @in_guaranteed U) -> () + %a1 = apply %pa(%0) : $@callee_owned (@in_guaranteed T) -> () + destroy_addr %0 : $*T + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil @test_non_trivial_pa_args +// CHECK: [[F:%.*]] = function_ref @closure2_with_args +// CHECK-NOT: partial_apply +// CHECK: apply [[F]](%0, %1) +// CHECK: release_value %1 +// CHECK: } // end sil function 'test_non_trivial_pa_args' +sil @test_non_trivial_pa_args : $@convention(thin) (Int, String) -> () { +bb0(%0 : $Int, %1 : $String): + %2 = function_ref @closure2_with_args : $@convention(thin) (Int, String) -> () + %3 = partial_apply %2(%1) : $@convention(thin) (Int, String) -> () + apply %3(%0) : $@callee_owned (Int) -> () + %r = tuple() + return %r : $() +} + +// CHECK-LABEL: sil @test_non_trivial_apply_args +// CHECK: [[F:%.*]] = function_ref @closure3_with_args +// CHECK-NOT: partial_apply +// CHECK: apply [[F]](%0, %1) +// CHECK: } // end sil function 'test_non_trivial_apply_args' +sil @test_non_trivial_apply_args : $@convention(thin) (String, Bool) -> () { +bb0(%0 : $String, %1 : $Bool): + %2 = function_ref @closure3_with_args : $@convention(thin) (String, Bool) -> () + %3 = partial_apply %2(%1) : $@convention(thin) (String, Bool) -> () + apply %3(%0) : $@callee_owned (String) -> () + %r = tuple() + return %r : $() +} + +// CHECK-LABEL: sil @test_delete_dead_closure +// CHECK-NOT: function_ref +// CHECK-NOT: partial_apply +// CHECK: release_value %0 : $String +// CHECK: } // end sil function 'test_delete_dead_closure' +sil @test_delete_dead_closure : $@convention(thin) (@owned String, Bool) -> () { +bb0(%0 : $String, %1 : $Bool): + %2 = function_ref @closure3_with_args : $@convention(thin) (String, Bool) -> () + %3 = partial_apply %2(%0, %1) : $@convention(thin) (String, Bool) -> () + release_value %3 : $@callee_owned () -> () + %r = tuple() + return %r : $() +} + +// CHECK-LABEL: sil @test_dont_delete_dead_closure_with_debug_use +// CHECK: [[F:%.*]] = function_ref @closure3_with_args +// CHECK: [[C:%.*]] = partial_apply [[F]] +// CHECK: release_value [[C]] +// CHECK: } // end sil function 'test_dont_delete_dead_closure_with_debug_use' +sil @test_dont_delete_dead_closure_with_debug_use : $@convention(thin) (@owned String, Bool) -> () { +bb0(%0 : $String, %1 : $Bool): + %2 = function_ref @closure3_with_args : $@convention(thin) (String, Bool) -> () + %3 = partial_apply %2(%0, %1) : $@convention(thin) (String, Bool) -> () + debug_value %3 : $@callee_owned () -> (), let, name "x" + release_value %3 : $@callee_owned () -> () + %r = tuple() + return %r : $() +} + diff --git a/test/SILOptimizer/simplify_try_apply.sil b/test/SILOptimizer/simplify_try_apply.sil new file mode 100644 index 0000000000000..6de9efc0a904d --- /dev/null +++ b/test/SILOptimizer/simplify_try_apply.sil @@ -0,0 +1,41 @@ +// RUN: %target-sil-opt -enable-sil-verify-all %s -onone-simplification -simplify-instruction=try_apply | %FileCheck %s + +// REQUIRES: swift_in_compiler + +sil_stage canonical + +import Builtin +import Swift +import SwiftShims + +class Bar { + init() + func foo() throws -> Int +} + +// CHECK-LABEL: sil @devirt_class_method : +// CHECK: [[F:%.*]] = function_ref @bar_foo +// CHECK: try_apply [[F]] +// CHECK: } // end sil function 'devirt_class_method' +sil @devirt_class_method : $@convention(thin) () -> (Int, @error any Error) { +bb0: + %0 = alloc_ref $Bar + %1 = class_method %0 : $Bar, #Bar.foo : (Bar) -> () throws -> Int, $@convention(method) (@guaranteed Bar) -> (Int, @error any Error) + try_apply %1(%0) : $@convention(method) (@guaranteed Bar) -> (Int, @error any Error), normal bb1, error bb2 + +bb1(%3 : $Int): + strong_release %0 : $Bar + return %3 : $Int + +bb2(%6 : $Error): + strong_release %0 : $Bar + throw %6 : $Error +} + +sil @bar_foo : $@convention(method) (@guaranteed Bar) -> (Int, @error any Error) + +sil_vtable Bar { + #Bar.foo: @bar_foo +} + + diff --git a/test/SILOptimizer/simplify_unchecked_enum_data.sil b/test/SILOptimizer/simplify_unchecked_enum_data.sil index 911ff52438461..3106a8cf435aa 100644 --- a/test/SILOptimizer/simplify_unchecked_enum_data.sil +++ b/test/SILOptimizer/simplify_unchecked_enum_data.sil @@ -57,9 +57,9 @@ bb0(%0 : @owned $String): } // CHECK-LABEL: sil [ossa] @dont_forward_owned_enum_data_with_uses : -// CHECK %1 = enum -// CHECK %4 = unchecked_enum_data %1 -// CHECK return %4 +// CHECK: %1 = enum +// CHECK: %4 = unchecked_enum_data %1 +// CHECK: return %4 // CHECK: } // end sil function 'dont_forward_owned_enum_data_with_uses' sil [ossa] @dont_forward_owned_enum_data_with_uses : $@convention(thin) (@owned String) -> @owned String { bb0(%0 : @owned $String): diff --git a/test/SILOptimizer/stack_promotion.sil b/test/SILOptimizer/stack_promotion.sil index 58a8faa4756b1..b47f914908bde 100644 --- a/test/SILOptimizer/stack_promotion.sil +++ b/test/SILOptimizer/stack_promotion.sil @@ -1156,3 +1156,24 @@ bb0(%0 : $UnownedLink): %r = tuple () return %r : $() } + +// CHECK-LABEL: sil @dont_crash_with_wrong_stacknesting_with_infinite_loop +// CHECK: alloc_ref [stack] $XX +// CHECK-LABEL: } // end sil function 'dont_crash_with_wrong_stacknesting_with_infinite_loop' +sil @dont_crash_with_wrong_stacknesting_with_infinite_loop : $@convention(thin) () -> () { +bb0: + %0 = alloc_stack $Int + %1 = alloc_ref $XX + dealloc_stack %0 : $*Int + cond_br undef, bb1, bb2 +bb1: + strong_release %1 : $XX + %4 = tuple () + return %4 : $() + +bb2: + br bb3 +bb3: + br bb3 +} + diff --git a/validation-test/IDE/crashers_2_fixed/0038-setTypeForArgumentIgnoredForCompletion.swift b/validation-test/IDE/crashers_2_fixed/0038-setTypeForArgumentIgnoredForCompletion.swift new file mode 100644 index 0000000000000..81c6b5a973eec --- /dev/null +++ b/validation-test/IDE/crashers_2_fixed/0038-setTypeForArgumentIgnoredForCompletion.swift @@ -0,0 +1,24 @@ +// RUN: %empty-directory(%t) +// RUN: %target-swift-ide-test -batch-code-completion -source-filename %s -filecheck %raw-FileCheck -completion-output-dir %t + +func overloaded(content: () -> Int) {} +func overloaded(@MyResultBuilder stuff: () -> Int) {} + +@resultBuilder struct MyResultBuilder { + static func buildExpression(_ content: Int) -> Int { content } + static func buildBlock() -> Int { 4 } +} + +struct HStack { + init(spacing: Double, @MyResultBuilder content: () -> Int) {} + func qadding(_ length: Double) { } +} + +func test() { + overloaded { + HStack(spacing: #^COMPLETE^#) {} + .qadding(32) + } +} + +// COMPLETE: Literal[Integer]/None/TypeRelation[Convertible]: 0[#Double#]; name=0