diff --git a/.swiftformat b/.swiftformat index 8d89883f7..03dba801a 100644 --- a/.swiftformat +++ b/.swiftformat @@ -1,11 +1,12 @@ # file options ---swiftversion 5.1 +--swiftversion 5.2 --exclude .build --exclude "**/*.pb.swift" --exclude "**/*+GenActor.swift" --exclude "**/*+GenCodable.swift" --exclude "**/*+XPCProtocolStub.swift" +--disable redundantParens # https://github.com/nicklockwood/SwiftFormat/issues/638 # format options diff --git a/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_backoffRespawn/main.swift b/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_backoffRespawn/main.swift index fd4f6c694..41f73354b 100644 --- a/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_backoffRespawn/main.swift +++ b/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_backoffRespawn/main.swift @@ -21,7 +21,7 @@ import Glibc import DistributedActors let isolated = ProcessIsolated { boot in - boot.settings.logging.defaultLevel = .info + boot.settings.logging.logLevel = .info boot.runOn(role: .servant) { boot.settings.failure.onGuardianFailure = .systemExit(-1) } diff --git a/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_escalatingWorkers/main.swift b/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_escalatingWorkers/main.swift index 70802948c..2e1a19c49 100644 --- a/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_escalatingWorkers/main.swift +++ b/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_escalatingWorkers/main.swift @@ -21,7 +21,7 @@ import Glibc import DistributedActors let isolated = ProcessIsolated { boot in - boot.settings.logging.defaultLevel = .info + boot.settings.logging.logLevel = .info boot.runOn(role: .servant) { boot.settings.failure.onGuardianFailure = .systemExit(-1) } diff --git a/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_noLeaking/main.swift b/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_noLeaking/main.swift index c52e46c17..61c2de3ec 100644 --- a/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_noLeaking/main.swift +++ b/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_noLeaking/main.swift @@ -22,7 +22,7 @@ import DistributedActors let isolated = ProcessIsolated { boot in // create actor system (for each process this will run a new since this is the beginning of the program) - boot.settings.logging.defaultLevel = .info + boot.settings.logging.logLevel = .info return ActorSystem(settings: boot.settings) } diff --git a/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_respawnsServants/main.swift b/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_respawnsServants/main.swift index f65b06382..9ff80af19 100644 --- a/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_respawnsServants/main.swift +++ b/IntegrationTests/tests_02_process_isolated/it_ProcessIsolated_respawnsServants/main.swift @@ -22,7 +22,7 @@ import DistributedActors let isolated = ProcessIsolated { boot in // create a new actor system (for each process this will run a new since this is the beginning of the program) - boot.settings.logging.defaultLevel = .info + boot.settings.logging.logLevel = .info return ActorSystem(settings: boot.settings) } diff --git a/IntegrationTests/tests_04_cluster/it_Clustered_swim_suspension_reachability/main.swift b/IntegrationTests/tests_04_cluster/it_Clustered_swim_suspension_reachability/main.swift index 22ebfea98..b158d6538 100644 --- a/IntegrationTests/tests_04_cluster/it_Clustered_swim_suspension_reachability/main.swift +++ b/IntegrationTests/tests_04_cluster/it_Clustered_swim_suspension_reachability/main.swift @@ -26,7 +26,7 @@ guard args.count >= 1 else { } let system = ActorSystem("System") { settings in - settings.logging.defaultLevel = .info + settings.logging.logLevel = .info settings.cluster.enabled = true settings.cluster.bindPort = Int(args[0])! diff --git a/Sources/DistributedActors/ActorLogging.swift b/Sources/DistributedActors/ActorLogging.swift index 9670dcee2..f2d5c30e9 100644 --- a/Sources/DistributedActors/ActorLogging.swift +++ b/Sources/DistributedActors/ActorLogging.swift @@ -16,14 +16,18 @@ import DistributedActorsConcurrencyHelpers import Foundation import Logging +/// :nodoc: /// - Warning: NOT thread safe! Only use from Actors, properly synchronize access, or create multiple instances for each execution context. -public class LoggingContext { +// TODO: deprecate, we should not need this explicit type +internal final class LoggingContext { let identifier: String // TODO: want to eventually not have this; also move to more structured logging perhaps... /// If `true` the built-in "pretty" formatter should be used, rather than passing verbatim to underlying `LogHandler` let useBuiltInFormatter: Bool + let logger: Logger + @usableFromInline internal var _storage: Logger.Metadata = [:] @@ -36,7 +40,8 @@ public class LoggingContext { } } - public init(identifier: String, useBuiltInFormatter: Bool, dispatcher: (() -> String)?) { + public init(logger: Logger, identifier: String, useBuiltInFormatter: Bool, dispatcher: (() -> String)?) { + self.logger = logger self.identifier = identifier self.useBuiltInFormatter = useBuiltInFormatter if let makeDispatcherName = dispatcher { @@ -69,40 +74,8 @@ public class LoggingContext { /// The preferred way of obtaining a logger for an actor or system is `context.log` or `system.log`, rather than creating new ones. public struct ActorLogger { public static func make(context: ActorContext) -> Logger { - if let overriddenLoggerFactory = context.system.settings.logging.overrideLoggerFactory { return overriddenLoggerFactory("\(context.path)") - } - - var proxyHandler = ActorOriginLogHandler(context) - proxyHandler.metadata["actorPath"] = "\(context.path)" - if context.system.settings.cluster.enabled { - proxyHandler.metadata["node"] = "\(context.system.settings.cluster.node)" - } else { - proxyHandler.metadata["nodeName"] = "\(context.system.name)" - } - - var log = Logger(label: "\(context.path)", factory: { _ in proxyHandler }) - log.logLevel = context.system.settings.logging.defaultLevel - return log - } - - public static func make(system: ActorSystem, identifier: String? = nil) -> Logger { - if let overriddenLoggerFactory = system.settings.logging.overrideLoggerFactory { - return overriddenLoggerFactory(identifier ?? system.name) - } - - // we need to add our own storage, and can't do so to Logger since it is a struct... - // so we need to make such "proxy log handler", that does out actor specific things. - var proxyHandler = ActorOriginLogHandler(system) - if system.settings.cluster.enabled { - proxyHandler.metadata["node"] = .lazyStringConvertible { () in - system.settings.cluster.node - } - } else { - proxyHandler.metadata["nodeName"] = .string(system.name) - } - - var log = Logger(label: identifier ?? system.name, factory: { _ in proxyHandler }) - log.logLevel = system.settings.logging.defaultLevel + var log = context.system.log + log[metadataKey: "actor/path"] = Logger.MetadataValue.stringConvertible(context.path) return log } } @@ -119,19 +92,20 @@ public struct ActorOriginLogHandler: LogHandler { private let context: LoggingContext - private var loggingSystemSelectedLogger: Logger + private var targetLogger: Logger - public init(_ context: LoggingContext) { + internal init(_ context: LoggingContext) { self.context = context - self.loggingSystemSelectedLogger = Logger(label: context.identifier) - self.loggingSystemSelectedLogger.logLevel = self.logLevel + self.targetLogger = context.logger + self.targetLogger.logLevel = self.logLevel } - public init(_ context: ActorContext) { + internal init(_ context: ActorContext) { let dispatcherName = context.props.dispatcher.name self.init( LoggingContext( + logger: context.log, identifier: context.path.description, useBuiltInFormatter: context.system.settings.logging.useBuiltInFormatter, dispatcher: { () in dispatcherName } // beware of closing over the context here (!) @@ -142,6 +116,7 @@ public struct ActorOriginLogHandler: LogHandler { public init(_ system: ActorSystem, identifier: String? = nil) { self.init( LoggingContext( + logger: system.log, identifier: identifier ?? system.name, useBuiltInFormatter: system.settings.logging.useBuiltInFormatter, dispatcher: { () in _hackyPThreadThreadId() } @@ -169,70 +144,74 @@ public struct ActorOriginLogHandler: LogHandler { internal func invokeConfiguredLoggingInfra(_ logMessage: LogMessage) { // TODO: here we can either log... or dispatch to actor... or invoke Logging. etc - if self.context.useBuiltInFormatter { - var l = logMessage + guard self.context.useBuiltInFormatter else { + self.targetLogger.log(level: logMessage.level, logMessage.message, metadata: self.metadata, file: logMessage.file, function: logMessage.function, line: logMessage.line) + return + } - let dispatcherPart: String - if let d = l.effectiveMetadata?.removeValue(forKey: "dispatcher") { - dispatcherPart = "[\(d)]" - } else { - dispatcherPart = "" - } - let actorPathPart: String - if let d = l.effectiveMetadata?.removeValue(forKey: "actorPath") { - actorPathPart = "[\(d)]" - } else { - actorPathPart = "" - } + var l = logMessage - let actorSystemIdentity: String - if let d = l.effectiveMetadata?.removeValue(forKey: "node") { - actorSystemIdentity = "[\(d)]" + let dispatcherPart: String + if let d = l.effectiveMetadata?.removeValue(forKey: "dispatcher") { + dispatcherPart = "[\(d)]" + } else { + dispatcherPart = "" + } + + let actorPathPart: String + if let d = l.effectiveMetadata?.removeValue(forKey: "actor/path") { + actorPathPart = "[\(d)]" + } else { + actorPathPart = "" + } + + let actorSystemIdentity: String + if let d = l.effectiveMetadata?.removeValue(forKey: "actor/node") { + actorSystemIdentity = "[\(d)]" + } else { + if let name = l.effectiveMetadata?.removeValue(forKey: "actor/nodeName") { + actorSystemIdentity = "[\(name)]" } else { - if let name = l.effectiveMetadata?.removeValue(forKey: "nodeName") { - actorSystemIdentity = "[\(name)]" - } else { - actorSystemIdentity = "" - } + actorSystemIdentity = "" } + } + + var msg = "" + msg += "\(actorSystemIdentity)" + msg += "[\(l.file.description.split(separator: "/").last ?? ""):\(l.line)]" // we only print "file" rather than full path + msg += "\(dispatcherPart)" + msg += "\(actorPathPart)" + msg += " \(l.message)" + + guard ProcessInfo.processInfo.environment["SACT_PRETTY_LOG"] != nil else { + // no "pretty" logging + self.targetLogger.log(level: logMessage.level, Logger.Message(stringLiteral: msg), metadata: l.effectiveMetadata, file: logMessage.file, function: logMessage.function, line: logMessage.line) + return + } - var msg = "" - msg += "\(actorSystemIdentity)" - msg += "[\(l.file.description.split(separator: "/").last ?? ""):\(l.line)]" - msg += "\(dispatcherPart)" - msg += "\(actorPathPart)" - msg += " \(l.message)" - - if ProcessInfo.processInfo.environment["SACT_PRETTY_LOG"] != nil { - if let metadata = l.effectiveMetadata, !metadata.isEmpty { - var metadataString = "\n// metadata:\n" - for key in metadata.keys.sorted() where key != "label" { - var allString = "\n// \"\(key)\": \(metadata[key]!)" - if allString.contains("\n") { - allString = String( - allString.split(separator: "\n").map { valueLine in - if valueLine.starts(with: "// ") { - return "\(valueLine)\n" - } else { - return "// \(valueLine)\n" - } - }.joined(separator: "") - ) - } - metadataString.append(allString) - } - metadataString = String(metadataString.dropLast(1)) - - msg += metadataString + // "pretty" logging + if let metadata = l.effectiveMetadata, !metadata.isEmpty { + var metadataString = "\n// metadata:\n" + for key in metadata.keys.sorted() where key != "label" { + var allString = "\n// \"\(key)\": \(metadata[key]!)" + if allString.contains("\n") { + allString = String( + allString.split(separator: "\n").map { valueLine in + if valueLine.starts(with: "// ") { + return "\(valueLine)\n" + } else { + return "// \(valueLine)\n" + } + }.joined(separator: "") + ) } - self.loggingSystemSelectedLogger.log(level: logMessage.level, Logger.Message(stringLiteral: msg), metadata: [:], file: logMessage.file, function: logMessage.function, line: logMessage.line) - } else { - self.loggingSystemSelectedLogger.log(level: logMessage.level, Logger.Message(stringLiteral: msg), metadata: l.effectiveMetadata, file: logMessage.file, function: logMessage.function, line: logMessage.line) + metadataString.append(allString) } + metadataString = String(metadataString.dropLast(1)) - } else { - self.loggingSystemSelectedLogger.log(level: logMessage.level, logMessage.message, metadata: self.metadata, file: logMessage.file, function: logMessage.function, line: logMessage.line) + msg += metadataString } + self.targetLogger.log(level: logMessage.level, Logger.Message(stringLiteral: msg), metadata: [:], file: logMessage.file, function: logMessage.function, line: logMessage.line) } // TODO: hope to remove this one @@ -245,15 +224,12 @@ public struct ActorOriginLogHandler: LogHandler { } } - private var _logLevel: Logger.Level = .info - public var logLevel: Logger.Level { get { - self._logLevel + self.targetLogger.logLevel } set { - self._logLevel = newValue - self.loggingSystemSelectedLogger.logLevel = newValue + self.targetLogger.logLevel = newValue } } @@ -300,8 +276,21 @@ public struct LogMessage { // MARK: Extend logging metadata storage capabilities +extension Logger.Metadata { + public static func pretty(_ value: T) -> Logger.Metadata.Value where T: CustomPrettyStringConvertible { + .string(value.prettyDescription) + } + + public static func pretty(_ value: T) -> Logger.Metadata.Value { + if let pretty = value as? CustomPrettyStringConvertible { + return .string(pretty.prettyDescription) + } else { + return .string("\(value)") + } + } +} + extension Optional where Wrapped == Logger.MetadataValue { - /// Delays rendering of value by boxing it in a `LazyMetadataBox` public static func lazyStringConvertible(_ makeValue: @escaping () -> CustomStringConvertible) -> Logger.Metadata.Value { .stringConvertible(LazyMetadataBox { makeValue() }) } diff --git a/Sources/DistributedActors/ActorSystem.swift b/Sources/DistributedActors/ActorSystem.swift index 1ba6642c1..3cebae3de 100644 --- a/Sources/DistributedActors/ActorSystem.swift +++ b/Sources/DistributedActors/ActorSystem.swift @@ -108,11 +108,8 @@ public final class ActorSystem { // ==== ---------------------------------------------------------------------------------------------------------------- // MARK: Logging - public var log: Logger { - var l = ActorLogger.make(system: self) - l.logLevel = self.settings.logging.defaultLevel - return l - } + /// Root logger of this actor system, as configured in `LoggingSettings`. + public let log: Logger // ==== ---------------------------------------------------------------------------------------------------------------- // MARK: Shutdown @@ -188,29 +185,22 @@ public final class ActorSystem { let initializationLock = ReadWriteLock() self.lazyInitializationLock = initializationLock + var rootLogger = settings.logging.logger + if settings.cluster.enabled { + rootLogger[metadataKey: "actor/node"] = "\(settings.cluster.uniqueBindNode)" + } + rootLogger[metadataKey: "actor/nodeName"] = "\(self.name)" + self.log = rootLogger + + // vvv~~~~~~~~~~~~~~~~~~~ all properties initialized, self can be shared ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~vvv // + // serialization initializationLock.withWriterLockVoid { self._serialization = Serialization(settings: settings, system: self) } - // vvv~~~~~~~~~~~~~~~~~~~ all properties initialized, self can be shared ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~vvv - // dead letters init - let overrideLogger: Logger? = settings.logging.overrideLoggerFactory.map { f in f("\(ActorPath._deadLetters)") } - var deadLogger = overrideLogger ?? Logger( - label: "\(ActorPath._deadLetters)", - factory: { - let context = LoggingContext(identifier: $0, useBuiltInFormatter: settings.logging.useBuiltInFormatter, dispatcher: nil) - if settings.cluster.enabled { - context[metadataKey: "node"] = .stringConvertible(settings.cluster.uniqueBindNode) - } - context[metadataKey: "nodeName"] = .stringConvertible(name) - return ActorOriginLogHandler(context) - } - ) - deadLogger.logLevel = settings.logging.defaultLevel - - self._deadLetters = ActorRef(.deadLetters(.init(deadLogger, address: ActorAddress._deadLetters, system: self))) + self._deadLetters = ActorRef(.deadLetters(.init(rootLogger, address: ActorAddress._deadLetters, system: self))) // actor providers let localUserProvider = LocalActorRefProvider(root: Guardian(parent: theOne, name: "user", system: self)) diff --git a/Sources/DistributedActors/ActorSystemSettings.swift b/Sources/DistributedActors/ActorSystemSettings.swift index 142418c77..eca4ddd7e 100644 --- a/Sources/DistributedActors/ActorSystemSettings.swift +++ b/Sources/DistributedActors/ActorSystemSettings.swift @@ -64,12 +64,25 @@ extension Array where Element == ActorTransport { public struct LoggingSettings { public static let `default` = LoggingSettings() - /// At what level should the library actually log and print logs // TODO: We'd want to replace this by proper log handlers which allow config by labels - public var defaultLevel: Logger.Level = .info // TODO: maybe remove this? should be up to logging library to configure for us as well + /// Customize the default log level of the `system.log` (and `context.log`) loggers. + /// + /// This this modifies the current "base" logger which is `LoggingSettings.logger`, + /// it is also possible to change the logger itself, e.g. if you care about reusing a specific logger + /// or need to pass metadata through all loggers in the actor system. + public var logLevel: Logger.Level { + get { + self.logger.logLevel + } + set { + self.logger.logLevel = newValue + } + } - /// Optionally override Logger that shall be offered to actors and the system. - /// This is used instead of globally configured `Logging.Logger()` factories by the actor system. - public var overrideLoggerFactory: ((String) -> Logger)? + public var logger: Logger = LoggingSettings.makeDefaultLogger() + + static func makeDefaultLogger() -> Logger { + Logger(label: "<>") // replaced by specific system name during startup + } // TODO: hope to remove this once a StdOutLogHandler lands that has formatting support; // logs are hard to follow with not consistent order of metadata etc (like system address etc). diff --git a/Sources/DistributedActors/Cluster/ClusterShell.swift b/Sources/DistributedActors/Cluster/ClusterShell.swift index 1273baba6..d5b58b2ce 100644 --- a/Sources/DistributedActors/Cluster/ClusterShell.swift +++ b/Sources/DistributedActors/Cluster/ClusterShell.swift @@ -410,26 +410,6 @@ extension ClusterShell { props: ._wellKnown ) -// let gossipControl = try GossipShell.start( -// context, name: "\(ActorAddress._clusterGossip.name)", -// of: Cluster.Gossip.self, ofMetadata: Cluster.Gossip.SeenTable.self, -// props: ._wellKnown, -// settings: .init( -// gossipInterval: .seconds(1), -// // TODO make a protocol to capture all those behaviors GossipLogic / GossipInstance -// onGossipReceived: { identifier, payload, _ in -// assert(identifier.gossipIdentifier == ClusterShell.gossipID.gossipIdentifier, "Received gossip with unexpected identifier [\(identifier)], expected: \(ClusterShell.gossipID)") -// // note that this is on the gossiper's thread, the only thing we can do here is to forward the message -// context.myself.tell(.gossipFromGossiper(payload)) -// }, -// onGossipRound: { identity, envelope in -// // OMG need futures so badly...! -// // we need to check with the actor what to do about this gossip round technically speaking -// envelope -// } -// ) -// ) - let state = ClusterShellState( settings: clusterSettings, channel: chan, diff --git a/Sources/DistributedActors/Cluster/ClusterShellState.swift b/Sources/DistributedActors/Cluster/ClusterShellState.swift index 0b929905f..186c6767e 100644 --- a/Sources/DistributedActors/Cluster/ClusterShellState.swift +++ b/Sources/DistributedActors/Cluster/ClusterShellState.swift @@ -19,12 +19,10 @@ import NIO // ==== ---------------------------------------------------------------------------------------------------------------- // MARK: Shell State -// TODO: we hopefully will rather than this, end up with specialized protocols depending on what we need to expose, -// and then types may require those specific capabilities from the shell; e.g. scheduling things or similar. internal protocol ReadOnlyClusterState { var log: Logger { get } var allocator: ByteBufferAllocator { get } - var eventLoopGroup: EventLoopGroup { get } // TODO: or expose the MultiThreaded one...? + var eventLoopGroup: EventLoopGroup { get } /// Base backoff strategy to use in handshake retries // TODO: move it around somewhere so only handshake cares? var handshakeBackoff: BackoffStrategy { get } @@ -83,7 +81,6 @@ internal struct ClusterShellState: ReadOnlyClusterState { self._latestGossip = next } self.gossipControl.update(payload: self._latestGossip) -// self.gossipControl.update() } } @@ -96,7 +93,6 @@ internal struct ClusterShellState: ReadOnlyClusterState { } } -// init(settings: ClusterSettings, channel: Channel, events: EventStream, gossipControl: GossipControl, log: Logger) { init(settings: ClusterSettings, channel: Channel, events: EventStream, gossipControl: ConvergentGossipControl, log: Logger) { self.log = log self.settings = settings diff --git a/Sources/DistributedActors/Cluster/SWIM/SWIMInstance+Logging.swift b/Sources/DistributedActors/Cluster/SWIM/SWIMInstance+Logging.swift index 11417b3ed..71e3d970a 100644 --- a/Sources/DistributedActors/Cluster/SWIM/SWIMInstance+Logging.swift +++ b/Sources/DistributedActors/Cluster/SWIM/SWIMInstance+Logging.swift @@ -21,7 +21,7 @@ extension SWIM.Instance { /// While the SWIM.Instance is not meant to be logging by itself, it does offer metadata for loggers to use. var metadata: Logger.Metadata { [ - "swim/membersToPing": "\(self.membersToPing)", + "swim/membersToPing": Logger.Metadata.Value.array(self.membersToPing.map { "\($0)" }), "swim/protocolPeriod": "\(self.protocolPeriod)", "swim/timeoutSuspectsBeforePeriodMax": "\(self.timeoutSuspectsBeforePeriodMax)", "swim/timeoutSuspectsBeforePeriodMin": "\(self.timeoutSuspectsBeforePeriodMin)", diff --git a/Sources/DistributedActors/Cluster/Transport/TransportPipelines.swift b/Sources/DistributedActors/Cluster/Transport/TransportPipelines.swift index 54b08ec0f..f51a5bd49 100644 --- a/Sources/DistributedActors/Cluster/Transport/TransportPipelines.swift +++ b/Sources/DistributedActors/Cluster/Transport/TransportPipelines.swift @@ -708,7 +708,8 @@ extension ClusterShell { } } - let log = ActorLogger.make(system: system, identifier: "/system/transport.server") + var log = system.log + log[metadataKey: "actor/path"] = "/system/transport.server" // TODO: this is a fake path, we could use log source: here if it gets merged // FIXME: PASS IN FROM ASSOCIATION SINCE MUST SURVIVE CONNECTIONS! // TODO: tests about killing connections the hard way let outboundSysMsgs = OutboundSystemMessageRedelivery(settings: .default) @@ -771,7 +772,8 @@ extension ClusterShell { } } - let log = ActorLogger.make(system: system, identifier: "/system/transport.client") + var log = system.log + log[metadataKey: "actor/path"] = "/system/transport.client" // TODO: this is a fake path, we could use log source: here if it gets merged // FIXME: PASS IN FROM ASSOCIATION SINCE MUST SURVIVE CONNECTIONS !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! let outboundSysMsgs = OutboundSystemMessageRedelivery(settings: .default) diff --git a/Sources/DistributedActors/DeadLetters.swift b/Sources/DistributedActors/DeadLetters.swift index c28446a72..4c6a64f88 100644 --- a/Sources/DistributedActors/DeadLetters.swift +++ b/Sources/DistributedActors/DeadLetters.swift @@ -178,10 +178,7 @@ public final class DeadLetterOffice { // as that may then change ordering; if multiple onDeadLetter executions are ongoing, we want // all of them to be piped to the exact same logging handler, do not create a new Logging.Logger() here (!) - var metadata: Logger.Metadata = [ - "deadLetter": "1", // marker, can be used by logging tools to easily capture all dead letter logging - // TODO: could coalesce and bump a counter if many times the same dead letter is logged - ] + var metadata: Logger.Metadata = [:] let recipientString: String if let recipient = deadLetter.recipient { @@ -199,7 +196,7 @@ public final class DeadLetterOffice { } } - metadata["actorPath"] = Logger.MetadataValue.stringConvertible(deadAddress) + metadata["actor/path"] = Logger.MetadataValue.stringConvertible(deadAddress) recipientString = "to [\(String(reflecting: recipient.debugDescription))]" } else { recipientString = "" diff --git a/Sources/DistributedActors/Gossip/Gossip+Shell.swift b/Sources/DistributedActors/Gossip/Gossip+Shell.swift index c468437dc..878f6f016 100644 --- a/Sources/DistributedActors/Gossip/Gossip+Shell.swift +++ b/Sources/DistributedActors/Gossip/Gossip+Shell.swift @@ -12,6 +12,8 @@ // //===----------------------------------------------------------------------===// +import Logging + /// Convergent gossip is a gossip mechanism which aims to equalize some state across all peers participating. internal final class GossipShell { let settings: Settings @@ -211,7 +213,7 @@ internal final class GossipShell { context.log.trace("Sending gossip to \(target.address)", metadata: [ "gossip/target": "\(target.address)", "gossip/peers/count": "\(self.peers.count)", - "actor/message": "\(payload)", + "actor/message": Logger.Metadata.pretty(payload), ]) let ack = target.ask(for: GossipACK.self, timeout: .seconds(3)) { replyTo in diff --git a/Sources/DistributedActors/Pattern/ConvergentGossip.swift b/Sources/DistributedActors/Pattern/ConvergentGossip.swift index bf23b6af3..ff8a7714d 100644 --- a/Sources/DistributedActors/Pattern/ConvergentGossip.swift +++ b/Sources/DistributedActors/Pattern/ConvergentGossip.swift @@ -12,6 +12,8 @@ // //===----------------------------------------------------------------------===// +import Logging + /// Convergent gossip is a gossip mechanism which aims to equalize some state across all peers participating. internal final class ConvergentGossip { typealias GossipPeerRef = ActorRef @@ -104,7 +106,7 @@ internal final class ConvergentGossip { context.log.trace( "Gossip payload updated: \(payload)", metadata: [ - "actor/message": "\(payload)", + "actor/message": Logger.Metadata.pretty(payload), "gossip/previousPayload": "\(self.payload, orElse: "nil")", ] ) @@ -141,7 +143,7 @@ internal final class ConvergentGossip { "gossip/target": "\(target.address)", "gossip/peerCount": "\(self.peers.count)", "gossip/peers": "\(self.peers.map { $0.address })", - "actor/message": "\(envelope)", + "actor/message": Logger.Metadata.pretty(envelope), ] ) diff --git a/Sources/DistributedActors/Protobuf/ActorAddress.pb.swift b/Sources/DistributedActors/Protobuf/ActorAddress.pb.swift index 36b682a89..3a17d40ab 100644 --- a/Sources/DistributedActors/Protobuf/ActorAddress.pb.swift +++ b/Sources/DistributedActors/Protobuf/ActorAddress.pb.swift @@ -205,7 +205,7 @@ extension ProtoActorAddress: SwiftProtobuf.Message, SwiftProtobuf._MessageImplem } extension ProtoActorPath: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = "ActorPath" + public static let protoMessageName: String = "actor/path" public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ 1: .same(proto: "segments"), ] diff --git a/Sources/DistributedActors/Refs.swift b/Sources/DistributedActors/Refs.swift index 6da0a876d..23a4f2578 100644 --- a/Sources/DistributedActors/Refs.swift +++ b/Sources/DistributedActors/Refs.swift @@ -577,7 +577,7 @@ public class Guardian { let message = "Escalated failure from [\(ref.address)] reached top-level guardian [\(self.address.path)], SHUTTING DOWN ActorSystem! " + "(This can be configured in `system.settings.failure.onGuardianFailure`). " + "Failure was: \(failure)" - system.log.error("\(message)", metadata: ["actorPath": "\(self.address.path)"]) + system.log.error("\(message)", metadata: ["actor/path": "\(self.address.path)"]) _ = try! Thread { system.shutdown().wait() // so we don't block anyone who sent us this signal (as we execute synchronously in the guardian) @@ -588,7 +588,7 @@ public class Guardian { #else case .systemExit(let code): let message = "Escalated failure from [\(ref.address)] reached top-level guardian [\(self.address.path)], exiting process (\(code))! Failure was: \(failure)" - system.log.error("\(message)", metadata: ["actorPath": "\(self.address.path)"]) + system.log.error("\(message)", metadata: ["actor/path": "\(self.address.path)"]) print(message) // TODO: to stderr POSIXProcessUtils._exit(Int32(code)) diff --git a/Sources/DistributedActors/Serialization/Serialization.swift b/Sources/DistributedActors/Serialization/Serialization.swift index 9b33fcfd0..89a0a54f0 100644 --- a/Sources/DistributedActors/Serialization/Serialization.swift +++ b/Sources/DistributedActors/Serialization/Serialization.swift @@ -180,16 +180,11 @@ public class Serialization { self.allocator = self.settings.allocator - var log = Logger( - label: "serialization", - factory: { id in - let context = LoggingContext(identifier: id, useBuiltInFormatter: system.settings.logging.useBuiltInFormatter, dispatcher: nil) - return ActorOriginLogHandler(context) - } - ) + var log = system.log // TODO: Dry up setting this metadata log[metadataKey: "node"] = .stringConvertible(systemSettings.cluster.uniqueBindNode) - log.logLevel = systemSettings.logging.defaultLevel + log[metadataKey: "actor/path"] = "/system/serialization" // TODO: this is a fake path, we could use log source: here if it gets merged + log.logLevel = systemSettings.logging.logLevel self.log = log self.context = Serialization.Context( diff --git a/Sources/DistributedActorsBenchmarks/ActorPingPongBenchmarks.swift b/Sources/DistributedActorsBenchmarks/ActorPingPongBenchmarks.swift index 737425be4..2de916b5a 100644 --- a/Sources/DistributedActorsBenchmarks/ActorPingPongBenchmarks.swift +++ b/Sources/DistributedActorsBenchmarks/ActorPingPongBenchmarks.swift @@ -87,7 +87,7 @@ public let ActorPingPongBenchmarks: [BenchmarkInfo] = [ private func setUp(and postSetUp: () -> Void = { () in () }) { _system = ActorSystem("ActorPingPongBenchmarks") { settings in - settings.logging.defaultLevel = .error + settings.logging.logLevel = .error } postSetUp() } diff --git a/Sources/DistributedActorsBenchmarks/ActorRemotePingPongBenchmarks.swift b/Sources/DistributedActorsBenchmarks/ActorRemotePingPongBenchmarks.swift index b837fcedf..40ad95e69 100644 --- a/Sources/DistributedActorsBenchmarks/ActorRemotePingPongBenchmarks.swift +++ b/Sources/DistributedActorsBenchmarks/ActorRemotePingPongBenchmarks.swift @@ -66,12 +66,12 @@ var _pongNode: ActorSystem? private func setUp(and postSetUp: () -> Void = { () in () }) { _system = ActorSystem("PingNode") { settings in - settings.logging.defaultLevel = .error + settings.logging.logLevel = .error settings.cluster.enabled = true settings.cluster.node.port = 7111 } _pongNode = ActorSystem("PongNode") { settings in - settings.logging.defaultLevel = .error + settings.logging.logLevel = .error settings.cluster.enabled = true settings.cluster.node.port = 7222 } diff --git a/Sources/DistributedActorsBenchmarks/SerializationCodableBenchmarks.swift b/Sources/DistributedActorsBenchmarks/SerializationCodableBenchmarks.swift index 5ded7ecaf..a64b1ea94 100644 --- a/Sources/DistributedActorsBenchmarks/SerializationCodableBenchmarks.swift +++ b/Sources/DistributedActorsBenchmarks/SerializationCodableBenchmarks.swift @@ -41,7 +41,7 @@ public let SerializationCodableBenchmarks: [BenchmarkInfo] = [ private func setUp(and postSetUp: () -> Void = { () in () }) { _system = ActorSystem("SerializationCodableBenchmarks") { settings in - settings.logging.defaultLevel = .error + settings.logging.logLevel = .error } postSetUp() } diff --git a/Sources/DistributedActorsBenchmarks/SerializationProtobufBenchmarks.swift b/Sources/DistributedActorsBenchmarks/SerializationProtobufBenchmarks.swift index 78a72860d..6d008bb2b 100644 --- a/Sources/DistributedActorsBenchmarks/SerializationProtobufBenchmarks.swift +++ b/Sources/DistributedActorsBenchmarks/SerializationProtobufBenchmarks.swift @@ -38,7 +38,7 @@ private func setUp(and postSetUp: () -> Void = { () in () }) { _system = ActorSystem("SerializationProtobufBenchmarks") { settings in - settings.logging.defaultLevel = .error + settings.logging.logLevel = .error } protoSmallMessage.number = 1337 diff --git a/Sources/DistributedActorsTestKit/Cluster/ClusteredNodesTestBase.swift b/Sources/DistributedActorsTestKit/Cluster/ClusteredNodesTestBase.swift index fc38cdb7a..b2f2562d1 100644 --- a/Sources/DistributedActorsTestKit/Cluster/ClusteredNodesTestBase.swift +++ b/Sources/DistributedActorsTestKit/Cluster/ClusteredNodesTestBase.swift @@ -65,7 +65,7 @@ open class ClusteredNodesTestBase: XCTestCase { settings.cluster.node.port = self.nextPort() if self.captureLogs { - settings.logging.overrideLoggerFactory = capture.loggerFactory(captureLabel: name) + settings.logging.logger = capture.logger(label: name) } settings.cluster.autoLeaderElection = .lowestReachable(minNumberOfMembers: 2) diff --git a/Sources/DistributedActorsTestKit/LogCapture.swift b/Sources/DistributedActorsTestKit/LogCapture.swift index 804e47768..61fe9a8d8 100644 --- a/Sources/DistributedActorsTestKit/LogCapture.swift +++ b/Sources/DistributedActorsTestKit/LogCapture.swift @@ -34,11 +34,9 @@ public final class LogCapture { self.settings = settings } - public func loggerFactory(captureLabel: String) -> ((String) -> Logger) { - self.captureLabel = captureLabel - return { (label: String) in - Logger(label: "LogCapture(\(captureLabel) \(label))", LogCaptureLogHandler(label: label, self)) - } + public func logger(label: String) -> Logger { + self.captureLabel = label + return Logger(label: "LogCapture(\(label))", LogCaptureLogHandler(label: label, self)) } func append(_ log: CapturedLogMessage) { @@ -105,23 +103,20 @@ extension LogCapture { public func printLogs() { for log in self.logs { var metadataString: String = "" - var label = "[/?]" + var actorPath: String = "" if var metadata = log.metadata { - if let labelMeta = metadata.removeValue(forKey: "label") { - switch labelMeta { - case .string(let l): - label = "[\(l)]" - case .stringConvertible(let c): - label = "[\(c)]" - default: - label = "[/?]" - } + if let path = metadata.removeValue(forKey: "actor/path") { + actorPath = "[\(path)]" } + metadata.removeValue(forKey: "label") if !metadata.isEmpty { metadataString = "\n// metadata:\n" - for key in metadata.keys.sorted() where key != "label" { - var allString = "\n// \"\(key)\": \(metadata[key]!)" + for key in metadata.keys.sorted() { + let value: Logger.MetadataValue = metadata[key]! + let valueDescription = self.prettyPrint(metadata: value) + + var allString = "\n// \"\(key)\": \(valueDescription)" if allString.contains("\n") { allString = String( allString.split(separator: "\n").map { valueLine in @@ -141,8 +136,31 @@ extension LogCapture { let date = ActorOriginLogHandler._createFormatter().string(from: log.date) let file = log.file.split(separator: "/").last ?? "" let line = log.line - print("Captured log [\(self.captureLabel)][\(date)] [\(file):\(line)]\(label) [\(log.level)] \(log.message)\(metadataString)") + print("Captured log [\(self.captureLabel)][\(date)] [\(file):\(line)]\(actorPath) [\(log.level)] \(log.message)\(metadataString)") + } + } + + internal func prettyPrint(metadata: Logger.MetadataValue) -> String { + let CONSOLE_RESET = "\u{001B}[0;0m" + let CONSOLE_BOLD = "\u{001B}[1m" + + var valueDescription = "" + switch metadata { + case .string(let string): + valueDescription = string + case .stringConvertible(let convertible as CustomPrettyStringConvertible): + valueDescription = convertible.prettyDescription + case .stringConvertible(let convertible): + valueDescription = convertible.description + case .array(let array): + valueDescription = "\n \(array.map { "\($0)" }.joined(separator: "\n "))" + case .dictionary(let metadata): + for k in metadata.keys { + valueDescription += "\(CONSOLE_BOLD)\(k)\(CONSOLE_RESET): \(self.prettyPrint(metadata: metadata[k]!))" + } } + + return valueDescription } } diff --git a/Sources/DistributedActorsXPC/ActorCell/XPCServiceCellDelegate.swift b/Sources/DistributedActorsXPC/ActorCell/XPCServiceCellDelegate.swift index a5bd058ef..450f8fd34 100644 --- a/Sources/DistributedActorsXPC/ActorCell/XPCServiceCellDelegate.swift +++ b/Sources/DistributedActorsXPC/ActorCell/XPCServiceCellDelegate.swift @@ -17,6 +17,7 @@ import Dispatch import DistributedActors import Files +import Logging import XPC fileprivate let _file = try! Folder(path: "/tmp").file(named: "xpc.txt") @@ -76,8 +77,8 @@ internal final class XPCServiceCellDelegate: CellDelegate master.tell(.xpcRegisterService(self.peer, myself.asAddressable())) // TODO: do we really need it? xpc_connection_set_event_handler(self.peer) { (xdict: xpc_object_t) in - var log = ActorLogger.make(system: system, identifier: "\(myself.address.name)") - log[metadataKey: "actorPath"] = "\(address)" + var log = system.log + log[metadataKey: "actor/path"] = "\(address)" // TODO: connection id? switch xpc_get_type(xdict) { diff --git a/Tests/ActorSingletonPluginTests/ActorSingletonPluginClusteredTests.swift b/Tests/ActorSingletonPluginTests/ActorSingletonPluginClusteredTests.swift index 464028bb0..b512a40aa 100644 --- a/Tests/ActorSingletonPluginTests/ActorSingletonPluginClusteredTests.swift +++ b/Tests/ActorSingletonPluginTests/ActorSingletonPluginClusteredTests.swift @@ -26,10 +26,6 @@ final class ActorSingletonPluginClusteredTests: ClusteredNodesTestBase { ] } -// override var alwaysPrintCaptureLogs: Bool { -// true -// } - func test_singletonByClusterLeadership_happyPath() throws { try shouldNotThrow { var singletonSettings = ActorSingletonSettings(name: GreeterSingleton.name) diff --git a/Tests/DistributedActorsDocumentationTests/ProcessIsolatedDocExamples.swift b/Tests/DistributedActorsDocumentationTests/ProcessIsolatedDocExamples.swift index 88e1a89c8..90fe46205 100644 --- a/Tests/DistributedActorsDocumentationTests/ProcessIsolatedDocExamples.swift +++ b/Tests/DistributedActorsDocumentationTests/ProcessIsolatedDocExamples.swift @@ -31,7 +31,7 @@ class ProcessIsolatedDocExamples { let isolated = ProcessIsolated { boot in // <1> // optionally configure nodes by changing the provided settings - boot.settings.logging.defaultLevel = .info + boot.settings.logging.logLevel = .info // always create the actor system based on the provided boot settings, customized if needed return ActorSystem(settings: boot.settings) diff --git a/Tests/DistributedActorsTests/ActorLeakingTests.swift b/Tests/DistributedActorsTests/ActorLeakingTests.swift index d11bf71c0..6955412c5 100644 --- a/Tests/DistributedActorsTests/ActorLeakingTests.swift +++ b/Tests/DistributedActorsTests/ActorLeakingTests.swift @@ -278,7 +278,7 @@ final class ActorLeakingTests: ActorSystemTestBase { let initialSystemCount = ActorSystem.actorSystemInitCounter.load() var system: ActorSystem? = ActorSystem("Test") { settings in - settings.logging.defaultLevel = .info + settings.logging.logLevel = .info } _ = try system?.spawn("logging", of: String.self, .setup { context in context.log.trace("Not going to be logged") @@ -298,7 +298,7 @@ final class ActorLeakingTests: ActorSystemTestBase { let initialSystemCount = ActorSystem.actorSystemInitCounter.load() var system: ActorSystem? = ActorSystem("Test") { settings in - settings.logging.defaultLevel = .info + settings.logging.logLevel = .info } _ = try system?.spawn("logging", of: String.self, .setup { context in context.log.warning("Not going to be logged") diff --git a/Tests/DistributedActorsTests/ActorRefAdapterTests.swift b/Tests/DistributedActorsTests/ActorRefAdapterTests.swift index 5c29c3cc8..9177be59d 100644 --- a/Tests/DistributedActorsTests/ActorRefAdapterTests.swift +++ b/Tests/DistributedActorsTests/ActorRefAdapterTests.swift @@ -233,9 +233,9 @@ class ActorRefAdapterTests: ActorSystemTestBase { } func test_adaptedRef_shouldDeadLetter_whenOwnerTerminated() throws { - let logCaptureHandler = LogCapture() + let logCapture = LogCapture() let system = ActorSystem("\(type(of: self))-2") { settings in - settings.logging.overrideLoggerFactory = logCaptureHandler.loggerFactory(captureLabel: settings.cluster.node.systemName) + settings.logging.logger = logCapture.logger(label: settings.cluster.node.systemName) } defer { system.shutdown().wait() } @@ -268,7 +268,7 @@ class ActorRefAdapterTests: ActorSystemTestBase { let expectedLine = #line - 1 let expectedFile = #file - try logCaptureHandler.shouldContain( + try logCapture.shouldContain( message: "*was not delivered to [*", at: .info, expectedFile: expectedFile, expectedLine: expectedLine ) diff --git a/Tests/DistributedActorsTests/BehaviorTests.swift b/Tests/DistributedActorsTests/BehaviorTests.swift index 25af42cfa..66dfc0f47 100644 --- a/Tests/DistributedActorsTests/BehaviorTests.swift +++ b/Tests/DistributedActorsTests/BehaviorTests.swift @@ -347,7 +347,7 @@ final class BehaviorTests: ActorSystemTestBase { func test_makeAsynchronousCallback_shouldPrintNicelyIfThrewInsideClosure() throws { let capture = LogCapture(settings: .init()) let system = ActorSystem("CallbackCrash") { settings in - settings.logging.overrideLoggerFactory = .some(capture.loggerFactory(captureLabel: "mock")) + settings.logging.logger = capture.logger(label: "mock") } defer { system.shutdown().wait() diff --git a/Tests/DistributedActorsTests/DeadLetterTests.swift b/Tests/DistributedActorsTests/DeadLetterTests.swift index 26a40d951..bb142040f 100644 --- a/Tests/DistributedActorsTests/DeadLetterTests.swift +++ b/Tests/DistributedActorsTests/DeadLetterTests.swift @@ -22,7 +22,7 @@ final class DeadLetterTests: ActorSystemTestBase { // MARK: DeadLetterOffice tests func test_deadLetters_logWithSourcePosition() throws { - let log = self.logCapture.loggerFactory(captureLabel: "/dead/letters")("/dead/letters") + let log = self.logCapture.logger(label: "/dead/letters") let address = try ActorAddress(path: ActorPath._user.appending("someone"), incarnation: .random()) let office = DeadLetterOffice(log, address: address, system: system)