From b9f727d3df188de4cd2b2cef9247fbf3745d2691 Mon Sep 17 00:00:00 2001 From: Konrad `ktoso` Malawski Date: Tue, 9 Jun 2020 23:37:36 +0900 Subject: [PATCH 1/2] =logging prepare for LoggerWithSource from swift-log =log readjust where LoggerWithSource makes sense - not everywhere --- .swiftformat | 3 +- .../main.swift | 2 +- .../main.swift | 2 +- .../it_ProcessIsolated_noLeaking/main.swift | 2 +- .../main.swift | 2 +- .../main.swift | 2 +- Package.swift | 3 +- Sources/DistributedActors/ActorLogging.swift | 193 +++++++++--------- Sources/DistributedActors/ActorSystem.swift | 34 ++- .../ActorSystemSettings.swift | 21 +- .../Cluster/ClusterShell.swift | 20 -- .../Cluster/ClusterShellState.swift | 6 +- .../Cluster/SWIM/SWIMInstance+Logging.swift | 2 +- .../Transport/TransportPipelines.swift | 32 +-- Sources/DistributedActors/DeadLetters.swift | 15 +- .../Gossip/Gossip+Shell.swift | 4 +- .../Pattern/ConvergentGossip.swift | 6 +- .../Protobuf/ActorAddress.pb.swift | 2 +- Sources/DistributedActors/Refs.swift | 4 +- .../Serialization/Serialization+Context.swift | 4 +- .../Serialization/Serialization.swift | 12 +- .../ActorPingPongBenchmarks.swift | 2 +- .../ActorRemotePingPongBenchmarks.swift | 4 +- .../SerializationCodableBenchmarks.swift | 2 +- .../SerializationProtobufBenchmarks.swift | 2 +- .../Cluster/ClusteredNodesTestBase.swift | 2 +- .../DistributedActorsTestKit/LogCapture.swift | 55 +++-- .../ActorCell/XPCServiceCellDelegate.swift | 5 +- .../ProcessIsolatedDocExamples.swift | 2 +- .../ActorRefAdapterTests.swift | 6 +- .../BehaviorTests.swift | 2 +- ...LogClusterReceptionistClusteredTests.swift | 4 + .../DeadLetterTests.swift | 2 +- 33 files changed, 223 insertions(+), 236 deletions(-) diff --git a/.swiftformat b/.swiftformat index 8d89883f7..03dba801a 100644 --- a/.swiftformat +++ b/.swiftformat @@ -1,11 +1,12 @@ # file options ---swiftversion 5.1 +--swiftversion 5.2 --exclude .build --exclude "**/*.pb.swift" --exclude "**/*+GenActor.swift" --exclude "**/*+GenCodable.swift" --exclude "**/*+XPCProtocolStub.swift" +--disable redundantParens # https://github.com/nicklockwood/SwiftFormat/issues/638 # format options diff --git a/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_backoffRespawn/main.swift b/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_backoffRespawn/main.swift index fd4f6c694..41f73354b 100644 --- a/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_backoffRespawn/main.swift +++ b/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_backoffRespawn/main.swift @@ -21,7 +21,7 @@ import Glibc import DistributedActors let isolated = ProcessIsolated { boot in - boot.settings.logging.defaultLevel = .info + boot.settings.logging.logLevel = .info boot.runOn(role: .servant) { boot.settings.failure.onGuardianFailure = .systemExit(-1) } diff --git a/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_escalatingWorkers/main.swift b/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_escalatingWorkers/main.swift index 70802948c..2e1a19c49 100644 --- a/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_escalatingWorkers/main.swift +++ b/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_escalatingWorkers/main.swift @@ -21,7 +21,7 @@ import Glibc import DistributedActors let isolated = ProcessIsolated { boot in - boot.settings.logging.defaultLevel = .info + boot.settings.logging.logLevel = .info boot.runOn(role: .servant) { boot.settings.failure.onGuardianFailure = .systemExit(-1) } diff --git a/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_noLeaking/main.swift b/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_noLeaking/main.swift index c52e46c17..61c2de3ec 100644 --- a/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_noLeaking/main.swift +++ b/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_noLeaking/main.swift @@ -22,7 +22,7 @@ import DistributedActors let isolated = ProcessIsolated { boot in // create actor system (for each process this will run a new since this is the beginning of the program) - boot.settings.logging.defaultLevel = .info + boot.settings.logging.logLevel = .info return ActorSystem(settings: boot.settings) } diff --git a/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_respawnsServants/main.swift b/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_respawnsServants/main.swift index f65b06382..9ff80af19 100644 --- a/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_respawnsServants/main.swift +++ b/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_respawnsServants/main.swift @@ -22,7 +22,7 @@ import DistributedActors let isolated = ProcessIsolated { boot in // create a new actor system (for each process this will run a new since this is the beginning of the program) - boot.settings.logging.defaultLevel = .info + boot.settings.logging.logLevel = .info return ActorSystem(settings: boot.settings) } diff --git a/IntegrationTests/tests_04_cluster/it_Clustered_swim_suspension_reachability/main.swift b/IntegrationTests/tests_04_cluster/it_Clustered_swim_suspension_reachability/main.swift index 22ebfea98..b158d6538 100644 --- a/IntegrationTests/tests_04_cluster/it_Clustered_swim_suspension_reachability/main.swift +++ b/IntegrationTests/tests_04_cluster/it_Clustered_swim_suspension_reachability/main.swift @@ -26,7 +26,7 @@ guard args.count >= 1 else { } let system = ActorSystem("System") { settings in - settings.logging.defaultLevel = .info + settings.logging.logLevel = .info settings.cluster.enabled = true settings.cluster.bindPort = Int(args[0])! diff --git a/Package.swift b/Package.swift index 2957a053e..926254870 100644 --- a/Package.swift +++ b/Package.swift @@ -258,7 +258,8 @@ var dependencies: [Package.Dependency] = [ // ~~~ SSWG APIs ~~~ - .package(url: "https://github.com/apple/swift-log.git", from: "1.0.0"), + // .package(url: "https://github.com/apple/swift-log.git", from: "1.0.0"), + .package(url: "https://github.com/weissi/swift-log.git", .branch("jw-source")), // swift-metrics 1.x and 2.x are almost API compatible, so most clients should use .package(url: "https://github.com/apple/swift-metrics.git", "1.0.0" ..< "3.0.0"), diff --git a/Sources/DistributedActors/ActorLogging.swift b/Sources/DistributedActors/ActorLogging.swift index 9670dcee2..5931203e6 100644 --- a/Sources/DistributedActors/ActorLogging.swift +++ b/Sources/DistributedActors/ActorLogging.swift @@ -16,14 +16,18 @@ import DistributedActorsConcurrencyHelpers import Foundation import Logging +/// :nodoc: /// - Warning: NOT thread safe! Only use from Actors, properly synchronize access, or create multiple instances for each execution context. -public class LoggingContext { +// TODO: deprecate, we should not need this explicit type +internal final class LoggingContext { let identifier: String // TODO: want to eventually not have this; also move to more structured logging perhaps... /// If `true` the built-in "pretty" formatter should be used, rather than passing verbatim to underlying `LogHandler` let useBuiltInFormatter: Bool + let logger: Logger + @usableFromInline internal var _storage: Logger.Metadata = [:] @@ -36,7 +40,8 @@ public class LoggingContext { } } - public init(identifier: String, useBuiltInFormatter: Bool, dispatcher: (() -> String)?) { + public init(logger: Logger, identifier: String, useBuiltInFormatter: Bool, dispatcher: (() -> String)?) { + self.logger = logger self.identifier = identifier self.useBuiltInFormatter = useBuiltInFormatter if let makeDispatcherName = dispatcher { @@ -69,40 +74,8 @@ public class LoggingContext { /// The preferred way of obtaining a logger for an actor or system is `context.log` or `system.log`, rather than creating new ones. public struct ActorLogger { public static func make(context: ActorContext) -> Logger { - if let overriddenLoggerFactory = context.system.settings.logging.overrideLoggerFactory { return overriddenLoggerFactory("\(context.path)") - } - - var proxyHandler = ActorOriginLogHandler(context) - proxyHandler.metadata["actorPath"] = "\(context.path)" - if context.system.settings.cluster.enabled { - proxyHandler.metadata["node"] = "\(context.system.settings.cluster.node)" - } else { - proxyHandler.metadata["nodeName"] = "\(context.system.name)" - } - - var log = Logger(label: "\(context.path)", factory: { _ in proxyHandler }) - log.logLevel = context.system.settings.logging.defaultLevel - return log - } - - public static func make(system: ActorSystem, identifier: String? = nil) -> Logger { - if let overriddenLoggerFactory = system.settings.logging.overrideLoggerFactory { - return overriddenLoggerFactory(identifier ?? system.name) - } - - // we need to add our own storage, and can't do so to Logger since it is a struct... - // so we need to make such "proxy log handler", that does out actor specific things. - var proxyHandler = ActorOriginLogHandler(system) - if system.settings.cluster.enabled { - proxyHandler.metadata["node"] = .lazyStringConvertible { () in - system.settings.cluster.node - } - } else { - proxyHandler.metadata["nodeName"] = .string(system.name) - } - - var log = Logger(label: identifier ?? system.name, factory: { _ in proxyHandler }) - log.logLevel = system.settings.logging.defaultLevel + var log = context.system.log.logger + log[metadataKey: "actor/path"] = Logger.MetadataValue.stringConvertible(context.path) return log } } @@ -119,19 +92,20 @@ public struct ActorOriginLogHandler: LogHandler { private let context: LoggingContext - private var loggingSystemSelectedLogger: Logger + private var targetLogger: Logger - public init(_ context: LoggingContext) { + internal init(_ context: LoggingContext) { self.context = context - self.loggingSystemSelectedLogger = Logger(label: context.identifier) - self.loggingSystemSelectedLogger.logLevel = self.logLevel + self.targetLogger = context.logger + self.targetLogger.logLevel = self.logLevel } - public init(_ context: ActorContext) { + internal init(_ context: ActorContext) { let dispatcherName = context.props.dispatcher.name self.init( LoggingContext( + logger: context.log, identifier: context.path.description, useBuiltInFormatter: context.system.settings.logging.useBuiltInFormatter, dispatcher: { () in dispatcherName } // beware of closing over the context here (!) @@ -142,6 +116,7 @@ public struct ActorOriginLogHandler: LogHandler { public init(_ system: ActorSystem, identifier: String? = nil) { self.init( LoggingContext( + logger: system.log.logger, identifier: identifier ?? system.name, useBuiltInFormatter: system.settings.logging.useBuiltInFormatter, dispatcher: { () in _hackyPThreadThreadId() } @@ -169,70 +144,74 @@ public struct ActorOriginLogHandler: LogHandler { internal func invokeConfiguredLoggingInfra(_ logMessage: LogMessage) { // TODO: here we can either log... or dispatch to actor... or invoke Logging. etc - if self.context.useBuiltInFormatter { - var l = logMessage + guard self.context.useBuiltInFormatter else { + self.targetLogger.log(level: logMessage.level, logMessage.message, metadata: self.metadata, file: logMessage.file, function: logMessage.function, line: logMessage.line) + return + } - let dispatcherPart: String - if let d = l.effectiveMetadata?.removeValue(forKey: "dispatcher") { - dispatcherPart = "[\(d)]" - } else { - dispatcherPart = "" - } - let actorPathPart: String - if let d = l.effectiveMetadata?.removeValue(forKey: "actorPath") { - actorPathPart = "[\(d)]" - } else { - actorPathPart = "" - } + var l = logMessage - let actorSystemIdentity: String - if let d = l.effectiveMetadata?.removeValue(forKey: "node") { - actorSystemIdentity = "[\(d)]" + let dispatcherPart: String + if let d = l.effectiveMetadata?.removeValue(forKey: "dispatcher") { + dispatcherPart = "[\(d)]" + } else { + dispatcherPart = "" + } + + let actorPathPart: String + if let d = l.effectiveMetadata?.removeValue(forKey: "actor/path") { + actorPathPart = "[\(d)]" + } else { + actorPathPart = "" + } + + let actorSystemIdentity: String + if let d = l.effectiveMetadata?.removeValue(forKey: "actor/node") { + actorSystemIdentity = "[\(d)]" + } else { + if let name = l.effectiveMetadata?.removeValue(forKey: "actor/nodeName") { + actorSystemIdentity = "[\(name)]" } else { - if let name = l.effectiveMetadata?.removeValue(forKey: "nodeName") { - actorSystemIdentity = "[\(name)]" - } else { - actorSystemIdentity = "" - } + actorSystemIdentity = "" } + } + + var msg = "" + msg += "\(actorSystemIdentity)" + msg += "[\(l.file.description.split(separator: "/").last ?? ""):\(l.line)]" // we only print "file" rather than full path + msg += "\(dispatcherPart)" + msg += "\(actorPathPart)" + msg += " \(l.message)" + + guard ProcessInfo.processInfo.environment["SACT_PRETTY_LOG"] != nil else { + // no "pretty" logging + self.targetLogger.log(level: logMessage.level, Logger.Message(stringLiteral: msg), metadata: l.effectiveMetadata, file: logMessage.file, function: logMessage.function, line: logMessage.line) + return + } - var msg = "" - msg += "\(actorSystemIdentity)" - msg += "[\(l.file.description.split(separator: "/").last ?? ""):\(l.line)]" - msg += "\(dispatcherPart)" - msg += "\(actorPathPart)" - msg += " \(l.message)" - - if ProcessInfo.processInfo.environment["SACT_PRETTY_LOG"] != nil { - if let metadata = l.effectiveMetadata, !metadata.isEmpty { - var metadataString = "\n// metadata:\n" - for key in metadata.keys.sorted() where key != "label" { - var allString = "\n// \"\(key)\": \(metadata[key]!)" - if allString.contains("\n") { - allString = String( - allString.split(separator: "\n").map { valueLine in - if valueLine.starts(with: "// ") { - return "\(valueLine)\n" - } else { - return "// \(valueLine)\n" - } - }.joined(separator: "") - ) - } - metadataString.append(allString) - } - metadataString = String(metadataString.dropLast(1)) - - msg += metadataString + // "pretty" logging + if let metadata = l.effectiveMetadata, !metadata.isEmpty { + var metadataString = "\n// metadata:\n" + for key in metadata.keys.sorted() where key != "label" { + var allString = "\n// \"\(key)\": \(metadata[key]!)" + if allString.contains("\n") { + allString = String( + allString.split(separator: "\n").map { valueLine in + if valueLine.starts(with: "// ") { + return "\(valueLine)\n" + } else { + return "// \(valueLine)\n" + } + }.joined(separator: "") + ) } - self.loggingSystemSelectedLogger.log(level: logMessage.level, Logger.Message(stringLiteral: msg), metadata: [:], file: logMessage.file, function: logMessage.function, line: logMessage.line) - } else { - self.loggingSystemSelectedLogger.log(level: logMessage.level, Logger.Message(stringLiteral: msg), metadata: l.effectiveMetadata, file: logMessage.file, function: logMessage.function, line: logMessage.line) + metadataString.append(allString) } + metadataString = String(metadataString.dropLast(1)) - } else { - self.loggingSystemSelectedLogger.log(level: logMessage.level, logMessage.message, metadata: self.metadata, file: logMessage.file, function: logMessage.function, line: logMessage.line) + msg += metadataString } + self.targetLogger.log(level: logMessage.level, Logger.Message(stringLiteral: msg), metadata: [:], file: logMessage.file, function: logMessage.function, line: logMessage.line) } // TODO: hope to remove this one @@ -245,15 +224,12 @@ public struct ActorOriginLogHandler: LogHandler { } } - private var _logLevel: Logger.Level = .info - public var logLevel: Logger.Level { get { - self._logLevel + self.targetLogger.logLevel } set { - self._logLevel = newValue - self.loggingSystemSelectedLogger.logLevel = newValue + self.targetLogger.logLevel = newValue } } @@ -300,8 +276,21 @@ public struct LogMessage { // MARK: Extend logging metadata storage capabilities +extension Logger.Metadata { + public static func pretty(_ value: T) -> Logger.Metadata.Value where T: CustomPrettyStringConvertible { + .string(value.prettyDescription) + } + + public static func pretty(_ value: T) -> Logger.Metadata.Value { + if let pretty = value as? CustomPrettyStringConvertible { + return .string(pretty.prettyDescription) + } else { + return .string("\(value)") + } + } +} + extension Optional where Wrapped == Logger.MetadataValue { - /// Delays rendering of value by boxing it in a `LazyMetadataBox` public static func lazyStringConvertible(_ makeValue: @escaping () -> CustomStringConvertible) -> Logger.Metadata.Value { .stringConvertible(LazyMetadataBox { makeValue() }) } diff --git a/Sources/DistributedActors/ActorSystem.swift b/Sources/DistributedActors/ActorSystem.swift index 1ba6642c1..56bef7022 100644 --- a/Sources/DistributedActors/ActorSystem.swift +++ b/Sources/DistributedActors/ActorSystem.swift @@ -108,11 +108,8 @@ public final class ActorSystem { // ==== ---------------------------------------------------------------------------------------------------------------- // MARK: Logging - public var log: Logger { - var l = ActorLogger.make(system: self) - l.logLevel = self.settings.logging.defaultLevel - return l - } + /// Root logger of this actor system, as configured in `LoggingSettings`. + public let log: LoggerWithSource // ==== ---------------------------------------------------------------------------------------------------------------- // MARK: Shutdown @@ -188,29 +185,22 @@ public final class ActorSystem { let initializationLock = ReadWriteLock() self.lazyInitializationLock = initializationLock + var rootLogger = settings.logging.logger + if settings.cluster.enabled { + rootLogger[metadataKey: "actor/node"] = "\(settings.cluster.uniqueBindNode)" + } + rootLogger[metadataKey: "actor/nodeName"] = "\(self.name)" + self.log = rootLogger.withSource("ActorSystem(\(self.name))") + + // vvv~~~~~~~~~~~~~~~~~~~ all properties initialized, self can be shared ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~vvv // + // serialization initializationLock.withWriterLockVoid { self._serialization = Serialization(settings: settings, system: self) } - // vvv~~~~~~~~~~~~~~~~~~~ all properties initialized, self can be shared ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~vvv - // dead letters init - let overrideLogger: Logger? = settings.logging.overrideLoggerFactory.map { f in f("\(ActorPath._deadLetters)") } - var deadLogger = overrideLogger ?? Logger( - label: "\(ActorPath._deadLetters)", - factory: { - let context = LoggingContext(identifier: $0, useBuiltInFormatter: settings.logging.useBuiltInFormatter, dispatcher: nil) - if settings.cluster.enabled { - context[metadataKey: "node"] = .stringConvertible(settings.cluster.uniqueBindNode) - } - context[metadataKey: "nodeName"] = .stringConvertible(name) - return ActorOriginLogHandler(context) - } - ) - deadLogger.logLevel = settings.logging.defaultLevel - - self._deadLetters = ActorRef(.deadLetters(.init(deadLogger, address: ActorAddress._deadLetters, system: self))) + self._deadLetters = ActorRef(.deadLetters(.init(rootLogger, address: ActorAddress._deadLetters, system: self))) // actor providers let localUserProvider = LocalActorRefProvider(root: Guardian(parent: theOne, name: "user", system: self)) diff --git a/Sources/DistributedActors/ActorSystemSettings.swift b/Sources/DistributedActors/ActorSystemSettings.swift index 142418c77..cd20e6fdf 100644 --- a/Sources/DistributedActors/ActorSystemSettings.swift +++ b/Sources/DistributedActors/ActorSystemSettings.swift @@ -65,11 +65,24 @@ public struct LoggingSettings { public static let `default` = LoggingSettings() /// At what level should the library actually log and print logs // TODO: We'd want to replace this by proper log handlers which allow config by labels - public var defaultLevel: Logger.Level = .info // TODO: maybe remove this? should be up to logging library to configure for us as well + public var logLevel: Logger.Level { // TODO: deprecate + get { + self.logger.logLevel + } + set { + self.logger.logLevel = newValue + } + } + + public var logger: Logger = LoggingSettings.makeDefaultLogger() + + static func makeDefaultLogger() -> Logger { + Logger(label: "<>") + } - /// Optionally override Logger that shall be offered to actors and the system. - /// This is used instead of globally configured `Logging.Logger()` factories by the actor system. - public var overrideLoggerFactory: ((String) -> Logger)? +// /// Optionally override Logger that shall be offered to actors and the system. +// /// This is used instead of globally configured `Logging.Logger()` factories by the actor system. +// public var overrideLoggerFactory: ((String) -> Logger)? // TODO: hope to remove this once a StdOutLogHandler lands that has formatting support; // logs are hard to follow with not consistent order of metadata etc (like system address etc). diff --git a/Sources/DistributedActors/Cluster/ClusterShell.swift b/Sources/DistributedActors/Cluster/ClusterShell.swift index 1273baba6..d5b58b2ce 100644 --- a/Sources/DistributedActors/Cluster/ClusterShell.swift +++ b/Sources/DistributedActors/Cluster/ClusterShell.swift @@ -410,26 +410,6 @@ extension ClusterShell { props: ._wellKnown ) -// let gossipControl = try GossipShell.start( -// context, name: "\(ActorAddress._clusterGossip.name)", -// of: Cluster.Gossip.self, ofMetadata: Cluster.Gossip.SeenTable.self, -// props: ._wellKnown, -// settings: .init( -// gossipInterval: .seconds(1), -// // TODO make a protocol to capture all those behaviors GossipLogic / GossipInstance -// onGossipReceived: { identifier, payload, _ in -// assert(identifier.gossipIdentifier == ClusterShell.gossipID.gossipIdentifier, "Received gossip with unexpected identifier [\(identifier)], expected: \(ClusterShell.gossipID)") -// // note that this is on the gossiper's thread, the only thing we can do here is to forward the message -// context.myself.tell(.gossipFromGossiper(payload)) -// }, -// onGossipRound: { identity, envelope in -// // OMG need futures so badly...! -// // we need to check with the actor what to do about this gossip round technically speaking -// envelope -// } -// ) -// ) - let state = ClusterShellState( settings: clusterSettings, channel: chan, diff --git a/Sources/DistributedActors/Cluster/ClusterShellState.swift b/Sources/DistributedActors/Cluster/ClusterShellState.swift index 0b929905f..186c6767e 100644 --- a/Sources/DistributedActors/Cluster/ClusterShellState.swift +++ b/Sources/DistributedActors/Cluster/ClusterShellState.swift @@ -19,12 +19,10 @@ import NIO // ==== ---------------------------------------------------------------------------------------------------------------- // MARK: Shell State -// TODO: we hopefully will rather than this, end up with specialized protocols depending on what we need to expose, -// and then types may require those specific capabilities from the shell; e.g. scheduling things or similar. internal protocol ReadOnlyClusterState { var log: Logger { get } var allocator: ByteBufferAllocator { get } - var eventLoopGroup: EventLoopGroup { get } // TODO: or expose the MultiThreaded one...? + var eventLoopGroup: EventLoopGroup { get } /// Base backoff strategy to use in handshake retries // TODO: move it around somewhere so only handshake cares? var handshakeBackoff: BackoffStrategy { get } @@ -83,7 +81,6 @@ internal struct ClusterShellState: ReadOnlyClusterState { self._latestGossip = next } self.gossipControl.update(payload: self._latestGossip) -// self.gossipControl.update() } } @@ -96,7 +93,6 @@ internal struct ClusterShellState: ReadOnlyClusterState { } } -// init(settings: ClusterSettings, channel: Channel, events: EventStream, gossipControl: GossipControl, log: Logger) { init(settings: ClusterSettings, channel: Channel, events: EventStream, gossipControl: ConvergentGossipControl, log: Logger) { self.log = log self.settings = settings diff --git a/Sources/DistributedActors/Cluster/SWIM/SWIMInstance+Logging.swift b/Sources/DistributedActors/Cluster/SWIM/SWIMInstance+Logging.swift index 11417b3ed..71e3d970a 100644 --- a/Sources/DistributedActors/Cluster/SWIM/SWIMInstance+Logging.swift +++ b/Sources/DistributedActors/Cluster/SWIM/SWIMInstance+Logging.swift @@ -21,7 +21,7 @@ extension SWIM.Instance { /// While the SWIM.Instance is not meant to be logging by itself, it does offer metadata for loggers to use. var metadata: Logger.Metadata { [ - "swim/membersToPing": "\(self.membersToPing)", + "swim/membersToPing": Logger.Metadata.Value.array(self.membersToPing.map { "\($0)" }), "swim/protocolPeriod": "\(self.protocolPeriod)", "swim/timeoutSuspectsBeforePeriodMax": "\(self.timeoutSuspectsBeforePeriodMax)", "swim/timeoutSuspectsBeforePeriodMin": "\(self.timeoutSuspectsBeforePeriodMin)", diff --git a/Sources/DistributedActors/Cluster/Transport/TransportPipelines.swift b/Sources/DistributedActors/Cluster/Transport/TransportPipelines.swift index 54b08ec0f..0ea91f69c 100644 --- a/Sources/DistributedActors/Cluster/Transport/TransportPipelines.swift +++ b/Sources/DistributedActors/Cluster/Transport/TransportPipelines.swift @@ -35,11 +35,11 @@ private final class InitiatingHandshakeHandler: ChannelInboundHandler, Removable typealias InboundOut = ByteBuffer typealias OutboundOut = ByteBuffer - private let log: Logger + private let log: LoggerWithSource private let handshakeOffer: Wire.HandshakeOffer private let cluster: ClusterShell.Ref - init(log: Logger, handshakeOffer: Wire.HandshakeOffer, cluster: ClusterShell.Ref) { + init(log: LoggerWithSource, handshakeOffer: Wire.HandshakeOffer, cluster: ClusterShell.Ref) { self.log = log self.handshakeOffer = handshakeOffer self.cluster = cluster @@ -112,11 +112,11 @@ final class ReceivingHandshakeHandler: ChannelInboundHandler, RemovableChannelHa typealias InboundIn = ByteBuffer typealias InboundOut = Never - private let log: Logger + private let log: LoggerWithSource private let cluster: ClusterShell.Ref private let localNode: UniqueNode - init(log: Logger, cluster: ClusterShell.Ref, localNode: UniqueNode) { + init(log: LoggerWithSource, cluster: ClusterShell.Ref, localNode: UniqueNode) { self.log = log self.cluster = cluster self.localNode = localNode @@ -251,9 +251,9 @@ private final class WireEnvelopeHandler: ChannelDuplexHandler { typealias InboundIn = ByteBuffer typealias InboundOut = Wire.Envelope - let log: Logger + let log: LoggerWithSource - init(log: Logger) { + init(log: LoggerWithSource) { self.log = log } @@ -290,10 +290,10 @@ final class OutboundSerializationHandler: ChannelOutboundHandler { typealias OutboundIn = TransportEnvelope typealias OutboundOut = Wire.Envelope - let log: Logger + let log: LoggerWithSource let serializationPool: SerializationPool - init(log: Logger, serializationPool: SerializationPool) { + init(log: LoggerWithSource, serializationPool: SerializationPool) { self.log = log self.serializationPool = serializationPool } @@ -353,7 +353,7 @@ internal final class SystemMessageRedeliveryHandler: ChannelDuplexHandler { typealias InboundIn = Wire.Envelope // we handle deserialization in case it is about a system message (since message == system message envelope) typealias InboundOut = Wire.Envelope // we pass along user messages; all system messages are handled in-place here (ack, nack), or delivered directly to the recipient from this handler - private let log: Logger + private let log: LoggerWithSource private let system: ActorSystem private let clusterShell: ClusterShell.Ref @@ -366,7 +366,7 @@ internal final class SystemMessageRedeliveryHandler: ChannelDuplexHandler { private var redeliveryScheduled: Scheduled? init( - log: Logger, + log: LoggerWithSource, system: ActorSystem, cluster: ClusterShell.Ref, serializationPool: SerializationPool, @@ -602,12 +602,12 @@ private final class UserMessageHandler: ChannelInboundHandler { typealias InboundIn = Wire.Envelope typealias InboundOut = Never // we terminate here, by sending messages off to local actors - let log: Logger + let log: LoggerWithSource let system: ActorSystem let serializationPool: SerializationPool - init(log: Logger, system: ActorSystem, serializationPool: SerializationPool) { + init(log: LoggerWithSource, system: ActorSystem, serializationPool: SerializationPool) { self.log = log self.system = system self.serializationPool = serializationPool @@ -649,9 +649,9 @@ private final class DumpRawBytesDebugHandler: ChannelInboundHandler { typealias InboundIn = ByteBuffer let role: HandlerRole - var log: Logger + var log: LoggerWithSource - init(role: HandlerRole, log: Logger) { + init(role: HandlerRole, log: LoggerWithSource) { self.role = role self.log = log } @@ -708,7 +708,7 @@ extension ClusterShell { } } - let log = ActorLogger.make(system: system, identifier: "/system/transport.server") + let log = LoggerWithSource(system.log.logger, source: "/system/transport.server") // FIXME: PASS IN FROM ASSOCIATION SINCE MUST SURVIVE CONNECTIONS! // TODO: tests about killing connections the hard way let outboundSysMsgs = OutboundSystemMessageRedelivery(settings: .default) @@ -771,7 +771,7 @@ extension ClusterShell { } } - let log = ActorLogger.make(system: system, identifier: "/system/transport.client") + let log = LoggerWithSource(system.log.logger, source: "/system/transport.client") // FIXME: PASS IN FROM ASSOCIATION SINCE MUST SURVIVE CONNECTIONS !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! let outboundSysMsgs = OutboundSystemMessageRedelivery(settings: .default) diff --git a/Sources/DistributedActors/DeadLetters.swift b/Sources/DistributedActors/DeadLetters.swift index c28446a72..2f23ab115 100644 --- a/Sources/DistributedActors/DeadLetters.swift +++ b/Sources/DistributedActors/DeadLetters.swift @@ -71,7 +71,7 @@ extension ActorSystem { /// /// We don't apply the special /dead path, as to not complicate diagnosing who actually terminated or if we were accidentally sent /// a remote actor ref that was dead(!) - return ActorRef(.deadLetters(.init(self.log, address: recipient, system: self))).adapt(from: Message.self) + return ActorRef(.deadLetters(.init(self.log.logger, address: recipient, system: self))).adapt(from: Message.self) } let localRecipient: ActorAddress @@ -82,7 +82,7 @@ extension ActorSystem { // drop the node from the address; and prepend it as known-to-be-dead localRecipient = ActorAddress(path: ActorPath._dead.appending(segments: recipient.segments), incarnation: recipient.incarnation) } - return ActorRef(.deadLetters(.init(self.log, address: localRecipient, system: self))).adapt(from: Message.self) + return ActorRef(.deadLetters(.init(self.log.logger, address: localRecipient, system: self))).adapt(from: Message.self) } /// Anonymous `/dead/letters` reference, which may be used for messages which have no logical recipient. @@ -140,11 +140,11 @@ extension ActorSystem { /// but not for anything more -- users shall assume that their communication is correct and only debug why a dead reference appeared if it indeed does happen. public final class DeadLetterOffice { let _address: ActorAddress - let log: Logger + let log: LoggerWithSource weak var system: ActorSystem? init(_ log: Logger, address: ActorAddress, system: ActorSystem?) { - self.log = log + self.log = log.withSource("DeadLetters") self._address = address self.system = system } @@ -178,10 +178,7 @@ public final class DeadLetterOffice { // as that may then change ordering; if multiple onDeadLetter executions are ongoing, we want // all of them to be piped to the exact same logging handler, do not create a new Logging.Logger() here (!) - var metadata: Logger.Metadata = [ - "deadLetter": "1", // marker, can be used by logging tools to easily capture all dead letter logging - // TODO: could coalesce and bump a counter if many times the same dead letter is logged - ] + var metadata: Logger.Metadata = [:] let recipientString: String if let recipient = deadLetter.recipient { @@ -199,7 +196,7 @@ public final class DeadLetterOffice { } } - metadata["actorPath"] = Logger.MetadataValue.stringConvertible(deadAddress) + metadata["actor/path"] = Logger.MetadataValue.stringConvertible(deadAddress) recipientString = "to [\(String(reflecting: recipient.debugDescription))]" } else { recipientString = "" diff --git a/Sources/DistributedActors/Gossip/Gossip+Shell.swift b/Sources/DistributedActors/Gossip/Gossip+Shell.swift index c468437dc..878f6f016 100644 --- a/Sources/DistributedActors/Gossip/Gossip+Shell.swift +++ b/Sources/DistributedActors/Gossip/Gossip+Shell.swift @@ -12,6 +12,8 @@ // //===----------------------------------------------------------------------===// +import Logging + /// Convergent gossip is a gossip mechanism which aims to equalize some state across all peers participating. internal final class GossipShell { let settings: Settings @@ -211,7 +213,7 @@ internal final class GossipShell { context.log.trace("Sending gossip to \(target.address)", metadata: [ "gossip/target": "\(target.address)", "gossip/peers/count": "\(self.peers.count)", - "actor/message": "\(payload)", + "actor/message": Logger.Metadata.pretty(payload), ]) let ack = target.ask(for: GossipACK.self, timeout: .seconds(3)) { replyTo in diff --git a/Sources/DistributedActors/Pattern/ConvergentGossip.swift b/Sources/DistributedActors/Pattern/ConvergentGossip.swift index bf23b6af3..ff8a7714d 100644 --- a/Sources/DistributedActors/Pattern/ConvergentGossip.swift +++ b/Sources/DistributedActors/Pattern/ConvergentGossip.swift @@ -12,6 +12,8 @@ // //===----------------------------------------------------------------------===// +import Logging + /// Convergent gossip is a gossip mechanism which aims to equalize some state across all peers participating. internal final class ConvergentGossip { typealias GossipPeerRef = ActorRef @@ -104,7 +106,7 @@ internal final class ConvergentGossip { context.log.trace( "Gossip payload updated: \(payload)", metadata: [ - "actor/message": "\(payload)", + "actor/message": Logger.Metadata.pretty(payload), "gossip/previousPayload": "\(self.payload, orElse: "nil")", ] ) @@ -141,7 +143,7 @@ internal final class ConvergentGossip { "gossip/target": "\(target.address)", "gossip/peerCount": "\(self.peers.count)", "gossip/peers": "\(self.peers.map { $0.address })", - "actor/message": "\(envelope)", + "actor/message": Logger.Metadata.pretty(envelope), ] ) diff --git a/Sources/DistributedActors/Protobuf/ActorAddress.pb.swift b/Sources/DistributedActors/Protobuf/ActorAddress.pb.swift index 36b682a89..3a17d40ab 100644 --- a/Sources/DistributedActors/Protobuf/ActorAddress.pb.swift +++ b/Sources/DistributedActors/Protobuf/ActorAddress.pb.swift @@ -205,7 +205,7 @@ extension ProtoActorAddress: SwiftProtobuf.Message, SwiftProtobuf._MessageImplem } extension ProtoActorPath: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = "ActorPath" + public static let protoMessageName: String = "actor/path" public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ 1: .same(proto: "segments"), ] diff --git a/Sources/DistributedActors/Refs.swift b/Sources/DistributedActors/Refs.swift index 6da0a876d..23a4f2578 100644 --- a/Sources/DistributedActors/Refs.swift +++ b/Sources/DistributedActors/Refs.swift @@ -577,7 +577,7 @@ public class Guardian { let message = "Escalated failure from [\(ref.address)] reached top-level guardian [\(self.address.path)], SHUTTING DOWN ActorSystem! " + "(This can be configured in `system.settings.failure.onGuardianFailure`). " + "Failure was: \(failure)" - system.log.error("\(message)", metadata: ["actorPath": "\(self.address.path)"]) + system.log.error("\(message)", metadata: ["actor/path": "\(self.address.path)"]) _ = try! Thread { system.shutdown().wait() // so we don't block anyone who sent us this signal (as we execute synchronously in the guardian) @@ -588,7 +588,7 @@ public class Guardian { #else case .systemExit(let code): let message = "Escalated failure from [\(ref.address)] reached top-level guardian [\(self.address.path)], exiting process (\(code))! Failure was: \(failure)" - system.log.error("\(message)", metadata: ["actorPath": "\(self.address.path)"]) + system.log.error("\(message)", metadata: ["actor/path": "\(self.address.path)"]) print(message) // TODO: to stderr POSIXProcessUtils._exit(Int32(code)) diff --git a/Sources/DistributedActors/Serialization/Serialization+Context.swift b/Sources/DistributedActors/Serialization/Serialization+Context.swift index bb333de79..f50b6d2f6 100644 --- a/Sources/DistributedActors/Serialization/Serialization+Context.swift +++ b/Sources/DistributedActors/Serialization/Serialization+Context.swift @@ -25,7 +25,7 @@ extension Serialization { /// /// Context MAY be accessed concurrently be encoders/decoders. public struct Context { - public let log: Logger + public let log: LoggerWithSource public let system: ActorSystem public var serialization: Serialization { @@ -40,7 +40,7 @@ extension Serialization { self.system.cluster.node } - internal init(log: Logger, system: ActorSystem, allocator: NIO.ByteBufferAllocator) { + internal init(log: LoggerWithSource, system: ActorSystem, allocator: NIO.ByteBufferAllocator) { self.log = log self.system = system self.allocator = allocator diff --git a/Sources/DistributedActors/Serialization/Serialization.swift b/Sources/DistributedActors/Serialization/Serialization.swift index 9b33fcfd0..6b74b6f3d 100644 --- a/Sources/DistributedActors/Serialization/Serialization.swift +++ b/Sources/DistributedActors/Serialization/Serialization.swift @@ -35,7 +35,7 @@ import Foundation // for Codable /// for any kind of Message type that is possible to be received by any spawned `Actor`, sub-receive, `Gossip` instance etc. /// public class Serialization { - private let log: Logger + private let log: LoggerWithSource internal let settings: Serialization.Settings @usableFromInline internal let metrics: ActorSystemMetrics // TODO: rather, do this via instrumentation @@ -180,16 +180,10 @@ public class Serialization { self.allocator = self.settings.allocator - var log = Logger( - label: "serialization", - factory: { id in - let context = LoggingContext(identifier: id, useBuiltInFormatter: system.settings.logging.useBuiltInFormatter, dispatcher: nil) - return ActorOriginLogHandler(context) - } - ) + var log = system.log.logger.withSource("serialization") // TODO: Dry up setting this metadata log[metadataKey: "node"] = .stringConvertible(systemSettings.cluster.uniqueBindNode) - log.logLevel = systemSettings.logging.defaultLevel + log.logLevel = systemSettings.logging.logLevel self.log = log self.context = Serialization.Context( diff --git a/Sources/DistributedActorsBenchmarks/ActorPingPongBenchmarks.swift b/Sources/DistributedActorsBenchmarks/ActorPingPongBenchmarks.swift index 737425be4..2de916b5a 100644 --- a/Sources/DistributedActorsBenchmarks/ActorPingPongBenchmarks.swift +++ b/Sources/DistributedActorsBenchmarks/ActorPingPongBenchmarks.swift @@ -87,7 +87,7 @@ public let ActorPingPongBenchmarks: [BenchmarkInfo] = [ private func setUp(and postSetUp: () -> Void = { () in () }) { _system = ActorSystem("ActorPingPongBenchmarks") { settings in - settings.logging.defaultLevel = .error + settings.logging.logLevel = .error } postSetUp() } diff --git a/Sources/DistributedActorsBenchmarks/ActorRemotePingPongBenchmarks.swift b/Sources/DistributedActorsBenchmarks/ActorRemotePingPongBenchmarks.swift index b837fcedf..40ad95e69 100644 --- a/Sources/DistributedActorsBenchmarks/ActorRemotePingPongBenchmarks.swift +++ b/Sources/DistributedActorsBenchmarks/ActorRemotePingPongBenchmarks.swift @@ -66,12 +66,12 @@ var _pongNode: ActorSystem? private func setUp(and postSetUp: () -> Void = { () in () }) { _system = ActorSystem("PingNode") { settings in - settings.logging.defaultLevel = .error + settings.logging.logLevel = .error settings.cluster.enabled = true settings.cluster.node.port = 7111 } _pongNode = ActorSystem("PongNode") { settings in - settings.logging.defaultLevel = .error + settings.logging.logLevel = .error settings.cluster.enabled = true settings.cluster.node.port = 7222 } diff --git a/Sources/DistributedActorsBenchmarks/SerializationCodableBenchmarks.swift b/Sources/DistributedActorsBenchmarks/SerializationCodableBenchmarks.swift index 5ded7ecaf..a64b1ea94 100644 --- a/Sources/DistributedActorsBenchmarks/SerializationCodableBenchmarks.swift +++ b/Sources/DistributedActorsBenchmarks/SerializationCodableBenchmarks.swift @@ -41,7 +41,7 @@ public let SerializationCodableBenchmarks: [BenchmarkInfo] = [ private func setUp(and postSetUp: () -> Void = { () in () }) { _system = ActorSystem("SerializationCodableBenchmarks") { settings in - settings.logging.defaultLevel = .error + settings.logging.logLevel = .error } postSetUp() } diff --git a/Sources/DistributedActorsBenchmarks/SerializationProtobufBenchmarks.swift b/Sources/DistributedActorsBenchmarks/SerializationProtobufBenchmarks.swift index 78a72860d..6d008bb2b 100644 --- a/Sources/DistributedActorsBenchmarks/SerializationProtobufBenchmarks.swift +++ b/Sources/DistributedActorsBenchmarks/SerializationProtobufBenchmarks.swift @@ -38,7 +38,7 @@ private func setUp(and postSetUp: () -> Void = { () in () }) { _system = ActorSystem("SerializationProtobufBenchmarks") { settings in - settings.logging.defaultLevel = .error + settings.logging.logLevel = .error } protoSmallMessage.number = 1337 diff --git a/Sources/DistributedActorsTestKit/Cluster/ClusteredNodesTestBase.swift b/Sources/DistributedActorsTestKit/Cluster/ClusteredNodesTestBase.swift index fc38cdb7a..b2f2562d1 100644 --- a/Sources/DistributedActorsTestKit/Cluster/ClusteredNodesTestBase.swift +++ b/Sources/DistributedActorsTestKit/Cluster/ClusteredNodesTestBase.swift @@ -65,7 +65,7 @@ open class ClusteredNodesTestBase: XCTestCase { settings.cluster.node.port = self.nextPort() if self.captureLogs { - settings.logging.overrideLoggerFactory = capture.loggerFactory(captureLabel: name) + settings.logging.logger = capture.logger(label: name) } settings.cluster.autoLeaderElection = .lowestReachable(minNumberOfMembers: 2) diff --git a/Sources/DistributedActorsTestKit/LogCapture.swift b/Sources/DistributedActorsTestKit/LogCapture.swift index 804e47768..79c87e114 100644 --- a/Sources/DistributedActorsTestKit/LogCapture.swift +++ b/Sources/DistributedActorsTestKit/LogCapture.swift @@ -34,11 +34,9 @@ public final class LogCapture { self.settings = settings } - public func loggerFactory(captureLabel: String) -> ((String) -> Logger) { - self.captureLabel = captureLabel - return { (label: String) in - Logger(label: "LogCapture(\(captureLabel) \(label))", LogCaptureLogHandler(label: label, self)) - } + public func logger(label: String) -> Logger { + self.captureLabel = label + return Logger(label: "LogCapture(\(label))", LogCaptureLogHandler(label: label, self)) } func append(_ log: CapturedLogMessage) { @@ -105,23 +103,18 @@ extension LogCapture { public func printLogs() { for log in self.logs { var metadataString: String = "" - var label = "[/?]" + var actorPath: String = "" if var metadata = log.metadata { - if let labelMeta = metadata.removeValue(forKey: "label") { - switch labelMeta { - case .string(let l): - label = "[\(l)]" - case .stringConvertible(let c): - label = "[\(c)]" - default: - label = "[/?]" - } + if let path = metadata.removeValue(forKey: "actor/path") { + actorPath = "[\(path)]" } if !metadata.isEmpty { metadataString = "\n// metadata:\n" for key in metadata.keys.sorted() where key != "label" { - var allString = "\n// \"\(key)\": \(metadata[key]!)" + let valueDescription = self.prettyPrint(metadata: metadata[key]!) + + var allString = "\n// \"\(key)\": \(valueDescription)" if allString.contains("\n") { allString = String( allString.split(separator: "\n").map { valueLine in @@ -141,8 +134,31 @@ extension LogCapture { let date = ActorOriginLogHandler._createFormatter().string(from: log.date) let file = log.file.split(separator: "/").last ?? "" let line = log.line - print("Captured log [\(self.captureLabel)][\(date)] [\(file):\(line)]\(label) [\(log.level)] \(log.message)\(metadataString)") + print("Captured log [\(self.captureLabel)][\(date)] [\(file):\(line)]\(actorPath) [\(log.level)] \(log.message)\(metadataString)") + } + } + + internal func prettyPrint(metadata: Logger.MetadataValue) -> String { + let CONSOLE_RESET = "\u{001B}[0;0m" + let CONSOLE_BOLD = "\u{001B}[1m" + + var valueDescription = "" + switch metadata { + case .string(let string): + valueDescription = string + case .stringConvertible(let convertible as CustomPrettyStringConvertible): + valueDescription = convertible.prettyDescription + case .stringConvertible(let convertible): + valueDescription = convertible.description + case .array(let array): + valueDescription = "\n \(array.map { "\($0)" }.joined(separator: "\n "))" + case .dictionary(let metadata): + for k in metadata.keys { + valueDescription += "\(CONSOLE_BOLD)\(k)\(CONSOLE_RESET): \(self.prettyPrint(metadata: metadata[k]!))" + } } + + return valueDescription } } @@ -151,6 +167,7 @@ public struct CapturedLogMessage { let level: Logger.Level var message: Logger.Message var metadata: Logger.Metadata? + let source: String let file: String let function: String let line: UInt @@ -168,7 +185,7 @@ struct LogCaptureLogHandler: LogHandler { self.capture = capture } - public func log(level: Logger.Level, message: Logger.Message, metadata: Logger.Metadata?, file: String, function: String, line: UInt) { + public func log(level: Logger.Level, message: Logger.Message, metadata: Logger.Metadata?, source: String, file: String, function: String, line: UInt) { guard self.capture.settings.filterActorPaths.contains(where: { path in self.label.starts(with: path) }) else { return // ignore this actor's logs, it was filtered out } @@ -186,7 +203,7 @@ struct LogCaptureLogHandler: LogHandler { var _metadata: Logger.Metadata = metadata ?? [:] _metadata["label"] = "\(self.label)" - self.capture.append(CapturedLogMessage(date: date, level: level, message: message, metadata: _metadata, file: file, function: function, line: line)) + self.capture.append(CapturedLogMessage(date: date, level: level, message: message, metadata: _metadata, source: source, file: file, function: function, line: line)) } public subscript(metadataKey _: String) -> Logger.Metadata.Value? { diff --git a/Sources/DistributedActorsXPC/ActorCell/XPCServiceCellDelegate.swift b/Sources/DistributedActorsXPC/ActorCell/XPCServiceCellDelegate.swift index a5bd058ef..eabf226e0 100644 --- a/Sources/DistributedActorsXPC/ActorCell/XPCServiceCellDelegate.swift +++ b/Sources/DistributedActorsXPC/ActorCell/XPCServiceCellDelegate.swift @@ -17,6 +17,7 @@ import Dispatch import DistributedActors import Files +import Logging import XPC fileprivate let _file = try! Folder(path: "/tmp").file(named: "xpc.txt") @@ -76,8 +77,8 @@ internal final class XPCServiceCellDelegate: CellDelegate master.tell(.xpcRegisterService(self.peer, myself.asAddressable())) // TODO: do we really need it? xpc_connection_set_event_handler(self.peer) { (xdict: xpc_object_t) in - var log = ActorLogger.make(system: system, identifier: "\(myself.address.name)") - log[metadataKey: "actorPath"] = "\(address)" + var log = system.log.logger.withSource("XPCService:\(myself.address.name)") + log[metadataKey: "actor/path"] = "\(address)" // TODO: connection id? switch xpc_get_type(xdict) { diff --git a/Tests/DistributedActorsDocumentationTests/ProcessIsolatedDocExamples.swift b/Tests/DistributedActorsDocumentationTests/ProcessIsolatedDocExamples.swift index 88e1a89c8..90fe46205 100644 --- a/Tests/DistributedActorsDocumentationTests/ProcessIsolatedDocExamples.swift +++ b/Tests/DistributedActorsDocumentationTests/ProcessIsolatedDocExamples.swift @@ -31,7 +31,7 @@ class ProcessIsolatedDocExamples { let isolated = ProcessIsolated { boot in // <1> // optionally configure nodes by changing the provided settings - boot.settings.logging.defaultLevel = .info + boot.settings.logging.logLevel = .info // always create the actor system based on the provided boot settings, customized if needed return ActorSystem(settings: boot.settings) diff --git a/Tests/DistributedActorsTests/ActorRefAdapterTests.swift b/Tests/DistributedActorsTests/ActorRefAdapterTests.swift index 5c29c3cc8..9177be59d 100644 --- a/Tests/DistributedActorsTests/ActorRefAdapterTests.swift +++ b/Tests/DistributedActorsTests/ActorRefAdapterTests.swift @@ -233,9 +233,9 @@ class ActorRefAdapterTests: ActorSystemTestBase { } func test_adaptedRef_shouldDeadLetter_whenOwnerTerminated() throws { - let logCaptureHandler = LogCapture() + let logCapture = LogCapture() let system = ActorSystem("\(type(of: self))-2") { settings in - settings.logging.overrideLoggerFactory = logCaptureHandler.loggerFactory(captureLabel: settings.cluster.node.systemName) + settings.logging.logger = logCapture.logger(label: settings.cluster.node.systemName) } defer { system.shutdown().wait() } @@ -268,7 +268,7 @@ class ActorRefAdapterTests: ActorSystemTestBase { let expectedLine = #line - 1 let expectedFile = #file - try logCaptureHandler.shouldContain( + try logCapture.shouldContain( message: "*was not delivered to [*", at: .info, expectedFile: expectedFile, expectedLine: expectedLine ) diff --git a/Tests/DistributedActorsTests/BehaviorTests.swift b/Tests/DistributedActorsTests/BehaviorTests.swift index 25af42cfa..66dfc0f47 100644 --- a/Tests/DistributedActorsTests/BehaviorTests.swift +++ b/Tests/DistributedActorsTests/BehaviorTests.swift @@ -347,7 +347,7 @@ final class BehaviorTests: ActorSystemTestBase { func test_makeAsynchronousCallback_shouldPrintNicelyIfThrewInsideClosure() throws { let capture = LogCapture(settings: .init()) let system = ActorSystem("CallbackCrash") { settings in - settings.logging.overrideLoggerFactory = .some(capture.loggerFactory(captureLabel: "mock")) + settings.logging.logger = capture.logger(label: "mock") } defer { system.shutdown().wait() diff --git a/Tests/DistributedActorsTests/Cluster/Reception/OpLogClusterReceptionistClusteredTests.swift b/Tests/DistributedActorsTests/Cluster/Reception/OpLogClusterReceptionistClusteredTests.swift index d2af30ecd..42d08bd7d 100644 --- a/Tests/DistributedActorsTests/Cluster/Reception/OpLogClusterReceptionistClusteredTests.swift +++ b/Tests/DistributedActorsTests/Cluster/Reception/OpLogClusterReceptionistClusteredTests.swift @@ -312,6 +312,10 @@ final class OpLogClusterReceptionistClusteredTests: ClusteredNodesTestBase { // ==== ---------------------------------------------------------------------------------------------------------------- // MARK: Multi node / streaming + override var alwaysPrintCaptureLogs: Bool { + true + } + func test_clusterReceptionist_shouldStreamAllRegisteredActorsInChunks() throws { try shouldNotThrow { let (first, second) = setUpPair { diff --git a/Tests/DistributedActorsTests/DeadLetterTests.swift b/Tests/DistributedActorsTests/DeadLetterTests.swift index 26a40d951..bb142040f 100644 --- a/Tests/DistributedActorsTests/DeadLetterTests.swift +++ b/Tests/DistributedActorsTests/DeadLetterTests.swift @@ -22,7 +22,7 @@ final class DeadLetterTests: ActorSystemTestBase { // MARK: DeadLetterOffice tests func test_deadLetters_logWithSourcePosition() throws { - let log = self.logCapture.loggerFactory(captureLabel: "/dead/letters")("/dead/letters") + let log = self.logCapture.logger(label: "/dead/letters") let address = try ActorAddress(path: ActorPath._user.appending("someone"), incarnation: .random()) let office = DeadLetterOffice(log, address: address, system: system) From 4b87412d2bc860a6bc15d9a7c8b64dd25b332b70 Mon Sep 17 00:00:00 2001 From: Konrad `ktoso` Malawski Date: Wed, 10 Jun 2020 15:30:07 +0900 Subject: [PATCH 2/2] =logging remove LoggerWithSource, keep allowing config via passing in logger --- Package.swift | 3 +- Sources/DistributedActors/ActorLogging.swift | 4 +-- Sources/DistributedActors/ActorSystem.swift | 4 +-- .../ActorSystemSettings.swift | 14 ++++---- .../Transport/TransportPipelines.swift | 34 ++++++++++--------- Sources/DistributedActors/DeadLetters.swift | 8 ++--- .../Serialization/Serialization+Context.swift | 4 +-- .../Serialization/Serialization.swift | 5 +-- .../DistributedActorsTestKit/LogCapture.swift | 11 +++--- .../ActorCell/XPCServiceCellDelegate.swift | 2 +- .../ActorSingletonPluginClusteredTests.swift | 4 --- .../ActorLeakingTests.swift | 4 +-- ...LogClusterReceptionistClusteredTests.swift | 4 --- 13 files changed, 48 insertions(+), 53 deletions(-) diff --git a/Package.swift b/Package.swift index 926254870..2957a053e 100644 --- a/Package.swift +++ b/Package.swift @@ -258,8 +258,7 @@ var dependencies: [Package.Dependency] = [ // ~~~ SSWG APIs ~~~ - // .package(url: "https://github.com/apple/swift-log.git", from: "1.0.0"), - .package(url: "https://github.com/weissi/swift-log.git", .branch("jw-source")), + .package(url: "https://github.com/apple/swift-log.git", from: "1.0.0"), // swift-metrics 1.x and 2.x are almost API compatible, so most clients should use .package(url: "https://github.com/apple/swift-metrics.git", "1.0.0" ..< "3.0.0"), diff --git a/Sources/DistributedActors/ActorLogging.swift b/Sources/DistributedActors/ActorLogging.swift index 5931203e6..f2d5c30e9 100644 --- a/Sources/DistributedActors/ActorLogging.swift +++ b/Sources/DistributedActors/ActorLogging.swift @@ -74,7 +74,7 @@ internal final class LoggingContext { /// The preferred way of obtaining a logger for an actor or system is `context.log` or `system.log`, rather than creating new ones. public struct ActorLogger { public static func make(context: ActorContext) -> Logger { - var log = context.system.log.logger + var log = context.system.log log[metadataKey: "actor/path"] = Logger.MetadataValue.stringConvertible(context.path) return log } @@ -116,7 +116,7 @@ public struct ActorOriginLogHandler: LogHandler { public init(_ system: ActorSystem, identifier: String? = nil) { self.init( LoggingContext( - logger: system.log.logger, + logger: system.log, identifier: identifier ?? system.name, useBuiltInFormatter: system.settings.logging.useBuiltInFormatter, dispatcher: { () in _hackyPThreadThreadId() } diff --git a/Sources/DistributedActors/ActorSystem.swift b/Sources/DistributedActors/ActorSystem.swift index 56bef7022..3cebae3de 100644 --- a/Sources/DistributedActors/ActorSystem.swift +++ b/Sources/DistributedActors/ActorSystem.swift @@ -109,7 +109,7 @@ public final class ActorSystem { // MARK: Logging /// Root logger of this actor system, as configured in `LoggingSettings`. - public let log: LoggerWithSource + public let log: Logger // ==== ---------------------------------------------------------------------------------------------------------------- // MARK: Shutdown @@ -190,7 +190,7 @@ public final class ActorSystem { rootLogger[metadataKey: "actor/node"] = "\(settings.cluster.uniqueBindNode)" } rootLogger[metadataKey: "actor/nodeName"] = "\(self.name)" - self.log = rootLogger.withSource("ActorSystem(\(self.name))") + self.log = rootLogger // vvv~~~~~~~~~~~~~~~~~~~ all properties initialized, self can be shared ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~vvv // diff --git a/Sources/DistributedActors/ActorSystemSettings.swift b/Sources/DistributedActors/ActorSystemSettings.swift index cd20e6fdf..eca4ddd7e 100644 --- a/Sources/DistributedActors/ActorSystemSettings.swift +++ b/Sources/DistributedActors/ActorSystemSettings.swift @@ -64,8 +64,12 @@ extension Array where Element == ActorTransport { public struct LoggingSettings { public static let `default` = LoggingSettings() - /// At what level should the library actually log and print logs // TODO: We'd want to replace this by proper log handlers which allow config by labels - public var logLevel: Logger.Level { // TODO: deprecate + /// Customize the default log level of the `system.log` (and `context.log`) loggers. + /// + /// This this modifies the current "base" logger which is `LoggingSettings.logger`, + /// it is also possible to change the logger itself, e.g. if you care about reusing a specific logger + /// or need to pass metadata through all loggers in the actor system. + public var logLevel: Logger.Level { get { self.logger.logLevel } @@ -77,13 +81,9 @@ public struct LoggingSettings { public var logger: Logger = LoggingSettings.makeDefaultLogger() static func makeDefaultLogger() -> Logger { - Logger(label: "<>") + Logger(label: "<>") // replaced by specific system name during startup } -// /// Optionally override Logger that shall be offered to actors and the system. -// /// This is used instead of globally configured `Logging.Logger()` factories by the actor system. -// public var overrideLoggerFactory: ((String) -> Logger)? - // TODO: hope to remove this once a StdOutLogHandler lands that has formatting support; // logs are hard to follow with not consistent order of metadata etc (like system address etc). public var useBuiltInFormatter: Bool = true diff --git a/Sources/DistributedActors/Cluster/Transport/TransportPipelines.swift b/Sources/DistributedActors/Cluster/Transport/TransportPipelines.swift index 0ea91f69c..f51a5bd49 100644 --- a/Sources/DistributedActors/Cluster/Transport/TransportPipelines.swift +++ b/Sources/DistributedActors/Cluster/Transport/TransportPipelines.swift @@ -35,11 +35,11 @@ private final class InitiatingHandshakeHandler: ChannelInboundHandler, Removable typealias InboundOut = ByteBuffer typealias OutboundOut = ByteBuffer - private let log: LoggerWithSource + private let log: Logger private let handshakeOffer: Wire.HandshakeOffer private let cluster: ClusterShell.Ref - init(log: LoggerWithSource, handshakeOffer: Wire.HandshakeOffer, cluster: ClusterShell.Ref) { + init(log: Logger, handshakeOffer: Wire.HandshakeOffer, cluster: ClusterShell.Ref) { self.log = log self.handshakeOffer = handshakeOffer self.cluster = cluster @@ -112,11 +112,11 @@ final class ReceivingHandshakeHandler: ChannelInboundHandler, RemovableChannelHa typealias InboundIn = ByteBuffer typealias InboundOut = Never - private let log: LoggerWithSource + private let log: Logger private let cluster: ClusterShell.Ref private let localNode: UniqueNode - init(log: LoggerWithSource, cluster: ClusterShell.Ref, localNode: UniqueNode) { + init(log: Logger, cluster: ClusterShell.Ref, localNode: UniqueNode) { self.log = log self.cluster = cluster self.localNode = localNode @@ -251,9 +251,9 @@ private final class WireEnvelopeHandler: ChannelDuplexHandler { typealias InboundIn = ByteBuffer typealias InboundOut = Wire.Envelope - let log: LoggerWithSource + let log: Logger - init(log: LoggerWithSource) { + init(log: Logger) { self.log = log } @@ -290,10 +290,10 @@ final class OutboundSerializationHandler: ChannelOutboundHandler { typealias OutboundIn = TransportEnvelope typealias OutboundOut = Wire.Envelope - let log: LoggerWithSource + let log: Logger let serializationPool: SerializationPool - init(log: LoggerWithSource, serializationPool: SerializationPool) { + init(log: Logger, serializationPool: SerializationPool) { self.log = log self.serializationPool = serializationPool } @@ -353,7 +353,7 @@ internal final class SystemMessageRedeliveryHandler: ChannelDuplexHandler { typealias InboundIn = Wire.Envelope // we handle deserialization in case it is about a system message (since message == system message envelope) typealias InboundOut = Wire.Envelope // we pass along user messages; all system messages are handled in-place here (ack, nack), or delivered directly to the recipient from this handler - private let log: LoggerWithSource + private let log: Logger private let system: ActorSystem private let clusterShell: ClusterShell.Ref @@ -366,7 +366,7 @@ internal final class SystemMessageRedeliveryHandler: ChannelDuplexHandler { private var redeliveryScheduled: Scheduled? init( - log: LoggerWithSource, + log: Logger, system: ActorSystem, cluster: ClusterShell.Ref, serializationPool: SerializationPool, @@ -602,12 +602,12 @@ private final class UserMessageHandler: ChannelInboundHandler { typealias InboundIn = Wire.Envelope typealias InboundOut = Never // we terminate here, by sending messages off to local actors - let log: LoggerWithSource + let log: Logger let system: ActorSystem let serializationPool: SerializationPool - init(log: LoggerWithSource, system: ActorSystem, serializationPool: SerializationPool) { + init(log: Logger, system: ActorSystem, serializationPool: SerializationPool) { self.log = log self.system = system self.serializationPool = serializationPool @@ -649,9 +649,9 @@ private final class DumpRawBytesDebugHandler: ChannelInboundHandler { typealias InboundIn = ByteBuffer let role: HandlerRole - var log: LoggerWithSource + var log: Logger - init(role: HandlerRole, log: LoggerWithSource) { + init(role: HandlerRole, log: Logger) { self.role = role self.log = log } @@ -708,7 +708,8 @@ extension ClusterShell { } } - let log = LoggerWithSource(system.log.logger, source: "/system/transport.server") + var log = system.log + log[metadataKey: "actor/path"] = "/system/transport.server" // TODO: this is a fake path, we could use log source: here if it gets merged // FIXME: PASS IN FROM ASSOCIATION SINCE MUST SURVIVE CONNECTIONS! // TODO: tests about killing connections the hard way let outboundSysMsgs = OutboundSystemMessageRedelivery(settings: .default) @@ -771,7 +772,8 @@ extension ClusterShell { } } - let log = LoggerWithSource(system.log.logger, source: "/system/transport.client") + var log = system.log + log[metadataKey: "actor/path"] = "/system/transport.client" // TODO: this is a fake path, we could use log source: here if it gets merged // FIXME: PASS IN FROM ASSOCIATION SINCE MUST SURVIVE CONNECTIONS !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! let outboundSysMsgs = OutboundSystemMessageRedelivery(settings: .default) diff --git a/Sources/DistributedActors/DeadLetters.swift b/Sources/DistributedActors/DeadLetters.swift index 2f23ab115..4c6a64f88 100644 --- a/Sources/DistributedActors/DeadLetters.swift +++ b/Sources/DistributedActors/DeadLetters.swift @@ -71,7 +71,7 @@ extension ActorSystem { /// /// We don't apply the special /dead path, as to not complicate diagnosing who actually terminated or if we were accidentally sent /// a remote actor ref that was dead(!) - return ActorRef(.deadLetters(.init(self.log.logger, address: recipient, system: self))).adapt(from: Message.self) + return ActorRef(.deadLetters(.init(self.log, address: recipient, system: self))).adapt(from: Message.self) } let localRecipient: ActorAddress @@ -82,7 +82,7 @@ extension ActorSystem { // drop the node from the address; and prepend it as known-to-be-dead localRecipient = ActorAddress(path: ActorPath._dead.appending(segments: recipient.segments), incarnation: recipient.incarnation) } - return ActorRef(.deadLetters(.init(self.log.logger, address: localRecipient, system: self))).adapt(from: Message.self) + return ActorRef(.deadLetters(.init(self.log, address: localRecipient, system: self))).adapt(from: Message.self) } /// Anonymous `/dead/letters` reference, which may be used for messages which have no logical recipient. @@ -140,11 +140,11 @@ extension ActorSystem { /// but not for anything more -- users shall assume that their communication is correct and only debug why a dead reference appeared if it indeed does happen. public final class DeadLetterOffice { let _address: ActorAddress - let log: LoggerWithSource + let log: Logger weak var system: ActorSystem? init(_ log: Logger, address: ActorAddress, system: ActorSystem?) { - self.log = log.withSource("DeadLetters") + self.log = log self._address = address self.system = system } diff --git a/Sources/DistributedActors/Serialization/Serialization+Context.swift b/Sources/DistributedActors/Serialization/Serialization+Context.swift index f50b6d2f6..bb333de79 100644 --- a/Sources/DistributedActors/Serialization/Serialization+Context.swift +++ b/Sources/DistributedActors/Serialization/Serialization+Context.swift @@ -25,7 +25,7 @@ extension Serialization { /// /// Context MAY be accessed concurrently be encoders/decoders. public struct Context { - public let log: LoggerWithSource + public let log: Logger public let system: ActorSystem public var serialization: Serialization { @@ -40,7 +40,7 @@ extension Serialization { self.system.cluster.node } - internal init(log: LoggerWithSource, system: ActorSystem, allocator: NIO.ByteBufferAllocator) { + internal init(log: Logger, system: ActorSystem, allocator: NIO.ByteBufferAllocator) { self.log = log self.system = system self.allocator = allocator diff --git a/Sources/DistributedActors/Serialization/Serialization.swift b/Sources/DistributedActors/Serialization/Serialization.swift index 6b74b6f3d..89a0a54f0 100644 --- a/Sources/DistributedActors/Serialization/Serialization.swift +++ b/Sources/DistributedActors/Serialization/Serialization.swift @@ -35,7 +35,7 @@ import Foundation // for Codable /// for any kind of Message type that is possible to be received by any spawned `Actor`, sub-receive, `Gossip` instance etc. /// public class Serialization { - private let log: LoggerWithSource + private let log: Logger internal let settings: Serialization.Settings @usableFromInline internal let metrics: ActorSystemMetrics // TODO: rather, do this via instrumentation @@ -180,9 +180,10 @@ public class Serialization { self.allocator = self.settings.allocator - var log = system.log.logger.withSource("serialization") + var log = system.log // TODO: Dry up setting this metadata log[metadataKey: "node"] = .stringConvertible(systemSettings.cluster.uniqueBindNode) + log[metadataKey: "actor/path"] = "/system/serialization" // TODO: this is a fake path, we could use log source: here if it gets merged log.logLevel = systemSettings.logging.logLevel self.log = log diff --git a/Sources/DistributedActorsTestKit/LogCapture.swift b/Sources/DistributedActorsTestKit/LogCapture.swift index 79c87e114..61fe9a8d8 100644 --- a/Sources/DistributedActorsTestKit/LogCapture.swift +++ b/Sources/DistributedActorsTestKit/LogCapture.swift @@ -109,10 +109,12 @@ extension LogCapture { actorPath = "[\(path)]" } + metadata.removeValue(forKey: "label") if !metadata.isEmpty { metadataString = "\n// metadata:\n" - for key in metadata.keys.sorted() where key != "label" { - let valueDescription = self.prettyPrint(metadata: metadata[key]!) + for key in metadata.keys.sorted() { + let value: Logger.MetadataValue = metadata[key]! + let valueDescription = self.prettyPrint(metadata: value) var allString = "\n// \"\(key)\": \(valueDescription)" if allString.contains("\n") { @@ -167,7 +169,6 @@ public struct CapturedLogMessage { let level: Logger.Level var message: Logger.Message var metadata: Logger.Metadata? - let source: String let file: String let function: String let line: UInt @@ -185,7 +186,7 @@ struct LogCaptureLogHandler: LogHandler { self.capture = capture } - public func log(level: Logger.Level, message: Logger.Message, metadata: Logger.Metadata?, source: String, file: String, function: String, line: UInt) { + public func log(level: Logger.Level, message: Logger.Message, metadata: Logger.Metadata?, file: String, function: String, line: UInt) { guard self.capture.settings.filterActorPaths.contains(where: { path in self.label.starts(with: path) }) else { return // ignore this actor's logs, it was filtered out } @@ -203,7 +204,7 @@ struct LogCaptureLogHandler: LogHandler { var _metadata: Logger.Metadata = metadata ?? [:] _metadata["label"] = "\(self.label)" - self.capture.append(CapturedLogMessage(date: date, level: level, message: message, metadata: _metadata, source: source, file: file, function: function, line: line)) + self.capture.append(CapturedLogMessage(date: date, level: level, message: message, metadata: _metadata, file: file, function: function, line: line)) } public subscript(metadataKey _: String) -> Logger.Metadata.Value? { diff --git a/Sources/DistributedActorsXPC/ActorCell/XPCServiceCellDelegate.swift b/Sources/DistributedActorsXPC/ActorCell/XPCServiceCellDelegate.swift index eabf226e0..450f8fd34 100644 --- a/Sources/DistributedActorsXPC/ActorCell/XPCServiceCellDelegate.swift +++ b/Sources/DistributedActorsXPC/ActorCell/XPCServiceCellDelegate.swift @@ -77,7 +77,7 @@ internal final class XPCServiceCellDelegate: CellDelegate master.tell(.xpcRegisterService(self.peer, myself.asAddressable())) // TODO: do we really need it? xpc_connection_set_event_handler(self.peer) { (xdict: xpc_object_t) in - var log = system.log.logger.withSource("XPCService:\(myself.address.name)") + var log = system.log log[metadataKey: "actor/path"] = "\(address)" // TODO: connection id? diff --git a/Tests/ActorSingletonPluginTests/ActorSingletonPluginClusteredTests.swift b/Tests/ActorSingletonPluginTests/ActorSingletonPluginClusteredTests.swift index 464028bb0..b512a40aa 100644 --- a/Tests/ActorSingletonPluginTests/ActorSingletonPluginClusteredTests.swift +++ b/Tests/ActorSingletonPluginTests/ActorSingletonPluginClusteredTests.swift @@ -26,10 +26,6 @@ final class ActorSingletonPluginClusteredTests: ClusteredNodesTestBase { ] } -// override var alwaysPrintCaptureLogs: Bool { -// true -// } - func test_singletonByClusterLeadership_happyPath() throws { try shouldNotThrow { var singletonSettings = ActorSingletonSettings(name: GreeterSingleton.name) diff --git a/Tests/DistributedActorsTests/ActorLeakingTests.swift b/Tests/DistributedActorsTests/ActorLeakingTests.swift index d11bf71c0..6955412c5 100644 --- a/Tests/DistributedActorsTests/ActorLeakingTests.swift +++ b/Tests/DistributedActorsTests/ActorLeakingTests.swift @@ -278,7 +278,7 @@ final class ActorLeakingTests: ActorSystemTestBase { let initialSystemCount = ActorSystem.actorSystemInitCounter.load() var system: ActorSystem? = ActorSystem("Test") { settings in - settings.logging.defaultLevel = .info + settings.logging.logLevel = .info } _ = try system?.spawn("logging", of: String.self, .setup { context in context.log.trace("Not going to be logged") @@ -298,7 +298,7 @@ final class ActorLeakingTests: ActorSystemTestBase { let initialSystemCount = ActorSystem.actorSystemInitCounter.load() var system: ActorSystem? = ActorSystem("Test") { settings in - settings.logging.defaultLevel = .info + settings.logging.logLevel = .info } _ = try system?.spawn("logging", of: String.self, .setup { context in context.log.warning("Not going to be logged") diff --git a/Tests/DistributedActorsTests/Cluster/Reception/OpLogClusterReceptionistClusteredTests.swift b/Tests/DistributedActorsTests/Cluster/Reception/OpLogClusterReceptionistClusteredTests.swift index 42d08bd7d..d2af30ecd 100644 --- a/Tests/DistributedActorsTests/Cluster/Reception/OpLogClusterReceptionistClusteredTests.swift +++ b/Tests/DistributedActorsTests/Cluster/Reception/OpLogClusterReceptionistClusteredTests.swift @@ -312,10 +312,6 @@ final class OpLogClusterReceptionistClusteredTests: ClusteredNodesTestBase { // ==== ---------------------------------------------------------------------------------------------------------------- // MARK: Multi node / streaming - override var alwaysPrintCaptureLogs: Bool { - true - } - func test_clusterReceptionist_shouldStreamAllRegisteredActorsInChunks() throws { try shouldNotThrow { let (first, second) = setUpPair {