Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 17 additions & 1 deletion frontend-laminar/src/main/scala/ru/d10xa/jsonlogviewer/App.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import ru.d10xa.jsonlogviewer.Router0.ViewPage
import ru.d10xa.jsonlogviewer.Router0.navigateTo
import ru.d10xa.jsonlogviewer.decline.Config
import ru.d10xa.jsonlogviewer.decline.Config.FormatIn
import ru.d10xa.jsonlogviewer.decline.Config.FormatIn.Csv
import ru.d10xa.jsonlogviewer.decline.Config.FormatIn.Json
import ru.d10xa.jsonlogviewer.decline.Config.FormatIn.Logfmt
import ru.d10xa.jsonlogviewer.decline.Config.FormatOut
Expand Down Expand Up @@ -44,6 +45,16 @@ object App {
|@timestamp=2023-09-18T19:10:10.123456Z second line {"level":"INFO"}
|""".stripMargin

val csvSample: String =
"""@timestamp,level,logger_name,thread_name,message
|2023-09-18T19:10:10.123456Z,INFO,MakeLogs,main,"first line, with comma"
|2023-09-18T19:11:20.132318Z,INFO,MakeLogs,main,test
|2023-09-18T19:12:30.132319Z,DEBUG,MakeLogs,main,debug msg
|2023-09-18T19:13:42.132321Z,WARN,MakeLogs,main,warn msg
|2023-09-18T19:14:42.137207Z,ERROR,MakeLogs,main,"error message,error details"
|2023-09-18T19:15:42.137207Z,INFO,MakeLogs,main,last line
|""".stripMargin

val textVar: Var[String] = Var("")

val cliVar: Var[String] = Var(
Expand Down Expand Up @@ -132,13 +143,16 @@ object App {
value <-- formatInVar.signal.map {
case FormatIn.Json => "json"
case FormatIn.Logfmt => "logfmt"
case FormatIn.Csv => "csv"
},
onChange.mapToValue.map {
case "json" => FormatIn.Json
case "logfmt" => FormatIn.Logfmt
case "csv" => FormatIn.Csv
} --> formatInVar,
option(value := "json", "json"),
option(value := "logfmt", "logfmt")
option(value := "logfmt", "logfmt"),
option(value := "csv", "csv")
)
)
def formatOutDiv: ReactiveHtmlElement[HTMLDivElement] = div(
Expand Down Expand Up @@ -196,11 +210,13 @@ object App {
child.text <-- formatInVar.signal.map {
case Logfmt => "Generate logfmt logs"
case Json => "Generate json logs"
case Csv => "Generate csv logs"
},
onClick --> { _ =>
formatInVar.now() match
case Config.FormatIn.Json => textVar.set(jsonLogSample)
case Config.FormatIn.Logfmt => textVar.set(logfmtSample)
case Config.FormatIn.Csv => textVar.set(csvSample)
}
)
private def renderLivePage(): HtmlElement = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ object ViewElement {
filter = config.filter,
formatIn = config.formatIn,
rawInclude = None,
rawExclude = None
rawExclude = None,
excludeFields = None
)
)
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import cats.data.Validated
import cats.data.ValidatedNel
import cats.syntax.all.*
import io.circe.*
import io.circe.generic.auto.*
import io.circe.yaml.scalayaml.parser
import ru.d10xa.jsonlogviewer.decline.Config.FormatIn
import ru.d10xa.jsonlogviewer.decline.FormatInValidator
Expand Down Expand Up @@ -148,14 +147,20 @@ class ConfigYamlLoaderImpl extends ConfigYamlLoader {
parseOptionalListString(feedFields, "rawInclude")
val rawExcludeValidated =
parseOptionalListString(feedFields, "rawExclude")
val excludeFieldsValidated =
parseOptionalListString(
feedFields,
"excludeFields"
)
(
nameValidated,
commandsValidated,
inlineInputValidated,
filterValidated,
formatInValidated,
rawIncludeValidated,
rawExcludeValidated
rawExcludeValidated,
excludeFieldsValidated
)
.mapN(Feed.apply)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,4 +70,41 @@ class ConfigYamlLoaderTest extends FunSuite {
val errors = result.swap.toOption.get
assert(errors.exists(_.contains("Invalid 'feeds' field format, should be a list")))
}

test("parse valid yaml with excludeFields") {
val yaml =
"""|feeds:
| - name: "pod-logs"
| commands:
| - "./mock-logs.sh pod1"
| excludeFields:
| - "level"
| - "logger_name"
| - "thread_name"
| - name: "service-logs"
| commands:
| - "./mock-logs.sh service1"
| excludeFields:
| - "@timestamp"
|""".stripMargin

val result = configYamlLoader.parseYamlFile(yaml)
assert(result.isValid, s"Result should be valid: $result")

val config = result.toOption.get

val feeds = config.feeds.get
assertEquals(feeds.size, 2)

val feed1 = feeds.head
assertEquals(feed1.name, Some("pod-logs"))
assertEquals(
feed1.excludeFields,
Some(List("level", "logger_name", "thread_name"))
)

val feed2 = feeds(1)
assertEquals(feed2.name, Some("service-logs"))
assertEquals(feed2.excludeFields, Some(List("@timestamp")))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ package ru.d10xa.jsonlogviewer
import cats.effect.IO
import cats.effect.Ref
import fs2.*
import fs2.Pull
import ru.d10xa.jsonlogviewer.csv.CsvLogLineParser
import ru.d10xa.jsonlogviewer.decline.yaml.ConfigYaml
import ru.d10xa.jsonlogviewer.decline.yaml.Feed
import ru.d10xa.jsonlogviewer.decline.Config
Expand Down Expand Up @@ -35,81 +37,97 @@ object LogViewerStream {
val feedStreams = feeds.zipWithIndex.map { (feed, index) =>
val feedStream: Stream[IO, String] =
commandsAndInlineInputToStream(feed.commands, feed.inlineInput)
processStream(
config,
feedStream,
configYamlRef,
index

createProcessStream(
config = config,
lines = feedStream,
configYamlRef = configYamlRef,
index = index,
initialFormatIn = feed.formatIn.orElse(config.formatIn)
)
}
Stream.emits(feedStreams).parJoin(feedStreams.size)
case None =>
processStream(config, stdinLinesStream, configYamlRef, -1)
createProcessStream(
config = config,
lines = stdinLinesStream,
configYamlRef = configYamlRef,
index = -1,
initialFormatIn = config.formatIn
)
}

finalStream
.intersperse("\n")
.append(Stream.emit("\n"))
}

private def commandsAndInlineInputToStream(
commands: List[String],
inlineInput: Option[String]
): Stream[IO, String] =
new ShellImpl().mergeCommandsAndInlineInput(commands, inlineInput)

def makeLogLineParser(
private def createProcessStream(
config: Config,
optFormatIn: Option[FormatIn]
): LogLineParser = {
val jsonPrefixPostfix = JsonPrefixPostfix(JsonDetector())
optFormatIn match {
case Some(FormatIn.Logfmt) => LogfmtLogLineParser(config)
case _ => JsonLogLineParser(config, jsonPrefixPostfix)
lines: Stream[IO, String],
configYamlRef: Ref[IO, Option[ConfigYaml]],
index: Int,
initialFormatIn: Option[FormatIn]
): Stream[IO, String] =
if (initialFormatIn.contains(FormatIn.Csv)) {
lines.pull.uncons1.flatMap {
case Some((headerLine, rest)) =>
val csvHeaderParser = CsvLogLineParser(config, headerLine)
processStreamWithEffectiveConfig(
config = config,
lines = rest,
configYamlRef = configYamlRef,
index = index,
parser = Some(csvHeaderParser)
).pull.echo
case None =>
Pull.done
}.stream
} else {
processStreamWithEffectiveConfig(
config = config,
lines = lines,
configYamlRef = configYamlRef,
index = index,
parser = None
)
}
}

private def processStream(
baseConfig: Config,
private def processStreamWithEffectiveConfig(
config: Config,
lines: Stream[IO, String],
configYamlRef: Ref[IO, Option[ConfigYaml]],
index: Int
index: Int,
parser: Option[LogLineParser]
): Stream[IO, String] =
for {
line <- lines
optConfigYaml <- Stream.eval(configYamlRef.get)
formatIn = optConfigYaml
.flatMap(_.feeds)
.flatMap(_.lift(index).flatMap(_.formatIn))
.orElse(baseConfig.formatIn)
filter = optConfigYaml
.flatMap(_.feeds)
.flatMap(_.lift(index).flatMap(_.filter))
.orElse(baseConfig.filter)
rawInclude = optConfigYaml
.flatMap(_.feeds)
.flatMap(_.lift(index).flatMap(_.rawInclude))
rawExclude = optConfigYaml
.flatMap(_.feeds)
.flatMap(_.lift(index).flatMap(_.rawExclude))
feedName = optConfigYaml
.flatMap(_.feeds)
.flatMap(_.lift(index).flatMap(_.name))
effectiveConfig = baseConfig.copy(
filter = filter,
formatIn = formatIn

feedConfig = extractFeedConfig(optConfigYaml, index)

effectiveConfig = config.copy(
filter = feedConfig.filter.orElse(config.filter),
formatIn = feedConfig.formatIn.orElse(config.formatIn)
)

timestampFilter = TimestampFilter()
parseResultKeys = ParseResultKeys(effectiveConfig)
logLineFilter = LogLineFilter(effectiveConfig, parseResultKeys)
logLineParser = makeLogLineParser(effectiveConfig, formatIn)
outputLineFormatter = effectiveConfig.formatOut match

logLineParser = parser.getOrElse(
makeNonCsvLogLineParser(effectiveConfig, feedConfig.formatIn)
)

outputLineFormatter = effectiveConfig.formatOut match {
case Some(Config.FormatOut.Raw) => RawFormatter()
case Some(Config.FormatOut.Pretty) | None =>
ColorLineFormatter(effectiveConfig, feedName)
ColorLineFormatter(effectiveConfig, feedConfig.feedName, feedConfig.excludeFields)
}

evaluatedLine <- Stream
.emit(line)
.filter(rawFilter(_, rawInclude, rawExclude))
.filter(rawFilter(_, feedConfig.rawInclude, feedConfig.rawExclude))
.map(logLineParser.parse)
.filter(logLineFilter.grep)
.filter(logLineFilter.logLineQueryPredicate)
Expand All @@ -121,27 +139,80 @@ object LogViewerStream {
effectiveConfig.timestamp.before
)
)
.map(pr =>
Try(outputLineFormatter.formatLine(pr)) match {
case Success(formatted) => formatted.toString
case Failure(_) => pr.raw
}
)
.map(_.toString)
.map(formatWithSafety(_, outputLineFormatter))
} yield evaluatedLine

private def formatWithSafety(
parseResult: ParseResult,
formatter: OutputLineFormatter
): String =
Try(formatter.formatLine(parseResult)) match {
case Success(formatted) => formatted.toString
case Failure(_) => parseResult.raw
}

// TODO
private case class FeedConfig(
feedName: Option[String],
filter: Option[ru.d10xa.jsonlogviewer.query.QueryAST],
formatIn: Option[FormatIn],
rawInclude: Option[List[String]],
rawExclude: Option[List[String]],
excludeFields: Option[List[String]]
)

private def extractFeedConfig(
optConfigYaml: Option[ConfigYaml],
index: Int
): FeedConfig = {
val feedOpt = optConfigYaml
.flatMap(_.feeds)
.flatMap(_.lift(index))

FeedConfig(
feedName = feedOpt.flatMap(_.name),
filter = feedOpt.flatMap(_.filter),
formatIn = feedOpt.flatMap(_.formatIn),
rawInclude = feedOpt.flatMap(_.rawInclude),
rawExclude = feedOpt.flatMap(_.rawExclude),
excludeFields = feedOpt.flatMap(_.excludeFields)
)
}

private def commandsAndInlineInputToStream(
commands: List[String],
inlineInput: Option[String]
): Stream[IO, String] =
new ShellImpl().mergeCommandsAndInlineInput(commands, inlineInput)

def makeNonCsvLogLineParser(
config: Config,
optFormatIn: Option[FormatIn]
): LogLineParser = {
val jsonPrefixPostfix = JsonPrefixPostfix(JsonDetector())
optFormatIn match {
case Some(FormatIn.Logfmt) => LogfmtLogLineParser(config)
case Some(FormatIn.Csv) =>
throw new IllegalStateException(
"method makeNonCsvLogLineParser does not support csv"
)
case _ => JsonLogLineParser(config, jsonPrefixPostfix)
}
}

def rawFilter(
str: String,
include: Option[List[String]],
exclude: Option[List[String]]
): Boolean = {
val includeRegexes: List[Regex] = include.getOrElse(Nil).map(_.r)
val excludeRegexes: List[Regex] = exclude.getOrElse(Nil).map(_.r)
val includeRegexes: List[Regex] =
include.getOrElse(Nil).map(_.r)
val excludeRegexes: List[Regex] =
exclude.getOrElse(Nil).map(_.r)
val includeMatches = includeRegexes.isEmpty || includeRegexes.exists(
_.findFirstIn(str).isDefined
)
val excludeMatches = excludeRegexes.forall(_.findFirstIn(str).isEmpty)
includeMatches && excludeMatches
}

}
Loading
Loading