Skip to content

Commit

Permalink
fix(update-collection-v3): fix multiline property rendering
Browse files Browse the repository at this point in the history
Fixes #433
  • Loading branch information
andrzej-stencel committed Jan 20, 2023
1 parent 06c9992 commit 7975d8d
Show file tree
Hide file tree
Showing 6 changed files with 358 additions and 2 deletions.
2 changes: 1 addition & 1 deletion src/go/cmd/update-collection-v3/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ make build
## Known issues

1. This package is using github.com/go-yaml/yaml which unfortunately doesn't allow
to maintain `yaml` comments and order when using user defined structutres.
to maintain `yaml` comments and order when using user defined structures.

This could be done when we'd use [`yaml.Node`][yaml_node] instead of customized structs
which reflect the schema of `values.yaml` used in `sumologic-kubernetes-collection`
Expand Down
2 changes: 1 addition & 1 deletion src/go/cmd/update-collection-v3/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ func reorderYaml(input string, original string) (string, error) {

sortByBlueprint(outputMapSlice, originalMapSlice)

outputBytes, err := yaml.MarshalWithOptions(outputMapSlice, yaml.Indent(2))
outputBytes, err := yaml.MarshalWithOptions(outputMapSlice, yaml.Indent(2), yaml.UseLiteralStyleIfMultiline(true))

return string(outputBytes), err
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,170 @@
sumologic:
accessId: xxx
accessKey: yyy
clusterName: containerd-multiline

fluent-bit:
config:
customParsers: |
[PARSER]
Name containerd_multiline_pattern
Format regex
Regex (?<time>^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[.]\d+Z) (?<stream>stdout|stderr) (?<logtag>[P|F]) (?<log>\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.* .*)
Time_Key time
Time_Format %Y-%m-%dT%H:%M:%S.%LZ
filters: |
[FILTER]
Name lua
Match containers.*
script /fluent-bit/scripts/parse_logs.lua
call parse_log
inputs: |
[INPUT]
Name tail
Path /var/log/containers/*.log
Read_from_head true
Multiline On
Parser_Firstline containerd_multiline_pattern
Multiline_Flush 60
Tag containers.*
Refresh_Interval 1
Rotate_Wait 60
Mem_Buf_Limit 30MB
Skip_Long_Lines On
DB /tail-db/tail-containers-state-sumo.db
DB.Sync Normal
[INPUT]
Name systemd
Tag host.*
DB /tail-db/systemd-state-sumo.db
Systemd_Filter _SYSTEMD_UNIT=addon-config.service
Systemd_Filter _SYSTEMD_UNIT=addon-run.service
Systemd_Filter _SYSTEMD_UNIT=cfn-etcd-environment.service
Systemd_Filter _SYSTEMD_UNIT=cfn-signal.service
Systemd_Filter _SYSTEMD_UNIT=clean-ca-certificates.service
Systemd_Filter _SYSTEMD_UNIT=containerd.service
Systemd_Filter _SYSTEMD_UNIT=coreos-metadata.service
Systemd_Filter _SYSTEMD_UNIT=coreos-setup-environment.service
Systemd_Filter _SYSTEMD_UNIT=coreos-tmpfiles.service
Systemd_Filter _SYSTEMD_UNIT=dbus.service
Systemd_Filter _SYSTEMD_UNIT=docker.service
Systemd_Filter _SYSTEMD_UNIT=efs.service
Systemd_Filter _SYSTEMD_UNIT=etcd-member.service
Systemd_Filter _SYSTEMD_UNIT=etcd.service
Systemd_Filter _SYSTEMD_UNIT=etcd2.service
Systemd_Filter _SYSTEMD_UNIT=etcd3.service
Systemd_Filter _SYSTEMD_UNIT=etcdadm-check.service
Systemd_Filter _SYSTEMD_UNIT=etcdadm-reconfigure.service
Systemd_Filter _SYSTEMD_UNIT=etcdadm-save.service
Systemd_Filter _SYSTEMD_UNIT=etcdadm-update-status.service
Systemd_Filter _SYSTEMD_UNIT=flanneld.service
Systemd_Filter _SYSTEMD_UNIT=format-etcd2-volume.service
Systemd_Filter _SYSTEMD_UNIT=kube-node-taint-and-uncordon.service
Systemd_Filter _SYSTEMD_UNIT=kubelet.service
Systemd_Filter _SYSTEMD_UNIT=ldconfig.service
Systemd_Filter _SYSTEMD_UNIT=locksmithd.service
Systemd_Filter _SYSTEMD_UNIT=logrotate.service
Systemd_Filter _SYSTEMD_UNIT=lvm2-monitor.service
Systemd_Filter _SYSTEMD_UNIT=mdmon.service
Systemd_Filter _SYSTEMD_UNIT=nfs-idmapd.service
Systemd_Filter _SYSTEMD_UNIT=nfs-mountd.service
Systemd_Filter _SYSTEMD_UNIT=nfs-server.service
Systemd_Filter _SYSTEMD_UNIT=nfs-utils.service
Systemd_Filter _SYSTEMD_UNIT=node-problem-detector.service
Systemd_Filter _SYSTEMD_UNIT=ntp.service
Systemd_Filter _SYSTEMD_UNIT=oem-cloudinit.service
Systemd_Filter _SYSTEMD_UNIT=rkt-gc.service
Systemd_Filter _SYSTEMD_UNIT=rkt-metadata.service
Systemd_Filter _SYSTEMD_UNIT=rpc-idmapd.service
Systemd_Filter _SYSTEMD_UNIT=rpc-mountd.service
Systemd_Filter _SYSTEMD_UNIT=rpc-statd.service
Systemd_Filter _SYSTEMD_UNIT=rpcbind.service
Systemd_Filter _SYSTEMD_UNIT=set-aws-environment.service
Systemd_Filter _SYSTEMD_UNIT=system-cloudinit.service
Systemd_Filter _SYSTEMD_UNIT=systemd-timesyncd.service
Systemd_Filter _SYSTEMD_UNIT=update-ca-certificates.service
Systemd_Filter _SYSTEMD_UNIT=user-cloudinit.service
Systemd_Filter _SYSTEMD_UNIT=var-lib-etcd2.service
Max_Entries 1000
Read_From_Tail true
outputs: |
[OUTPUT]
Name forward
Match *
Host ${FLUENTD_LOGS_SVC}.${NAMESPACE}.svc.cluster.local.
Port 24321
Retry_Limit False
tls off
tls.verify on
tls.debug 1
# Disable keepalive for better load balancing
net.keepalive off
service: |
[SERVICE]
Flush 1
Daemon Off
Log_Level info
Parsers_File parsers.conf
Parsers_File custom_parsers.conf
HTTP_Server On
HTTP_Listen 0.0.0.0
HTTP_Port 2020
luaScripts:
parse_logs.lua: |
local function split(s, delimiter)
result = {};
for match in (s..delimiter):gmatch('(.-)'..delimiter) do
table.insert(result, match);
end
return result;
end
function get_log_content(line)
-- remove elements specific containerd log format and get log as single string
table.remove(line, 1) -- remove date
table.remove(line, 1) -- remove stream
table.remove(line, 1) -- remove log tag
return table.concat(line, " ")
end
function adjust_first_line(record, first_line)
-- adjust the first line to containerd format, it comes without date, stream and logtag
-- 'fake-date' string at the beginning is used only to have proper log format
first_line = 'fake-date' .. ' ' .. record['stream'] .. ' ' .. record['logtag'] .. ' ' .. first_line
return first_line
end
function parse_log(tag, timestamp, record)
if record['log'] == nil or record['stream'] == nil or record['logtag'] == nil then
-- log does not contain required attributes ('log', 'stream', 'logtag') to be processed by parse_log function
-- the record will not be modified
return 0, timestamp, record
end
log_lines = split(record['log'], '\n')
log_lines[1] = adjust_first_line(record, log_lines[1])
new_lines = {}
buffer = ''
for k, v in pairs(log_lines) do
line = split(v, ' ')
log_tag = line[3]
buffer = buffer .. get_log_content(line)
if log_tag == 'F' then
table.insert(new_lines, buffer)
buffer = ""
end
end
new_log = table.concat(new_lines, "\n")
record['log'] = new_log
return 2, timestamp, record
end
fluentd:
logs:
containers:
multiline:
enabled: false
Original file line number Diff line number Diff line change
@@ -0,0 +1,168 @@
sumologic:
accessId: xxx
accessKey: yyy
clusterName: containerd-multiline
fluent-bit:
config:
customParsers: |
[PARSER]
Name containerd_multiline_pattern
Format regex
Regex (?<time>^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[.]\d+Z) (?<stream>stdout|stderr) (?<logtag>[P|F]) (?<log>\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.* .*)
Time_Key time
Time_Format %Y-%m-%dT%H:%M:%S.%LZ
filters: |
[FILTER]
Name lua
Match containers.*
script /fluent-bit/scripts/parse_logs.lua
call parse_log
inputs: |
[INPUT]
Name tail
Path /var/log/containers/*.log
Read_from_head true
Multiline On
Parser_Firstline containerd_multiline_pattern
Multiline_Flush 60
Tag containers.*
Refresh_Interval 1
Rotate_Wait 60
Mem_Buf_Limit 30MB
Skip_Long_Lines On
DB /tail-db/tail-containers-state-sumo.db
DB.Sync Normal
[INPUT]
Name systemd
Tag host.*
DB /tail-db/systemd-state-sumo.db
Systemd_Filter _SYSTEMD_UNIT=addon-config.service
Systemd_Filter _SYSTEMD_UNIT=addon-run.service
Systemd_Filter _SYSTEMD_UNIT=cfn-etcd-environment.service
Systemd_Filter _SYSTEMD_UNIT=cfn-signal.service
Systemd_Filter _SYSTEMD_UNIT=clean-ca-certificates.service
Systemd_Filter _SYSTEMD_UNIT=containerd.service
Systemd_Filter _SYSTEMD_UNIT=coreos-metadata.service
Systemd_Filter _SYSTEMD_UNIT=coreos-setup-environment.service
Systemd_Filter _SYSTEMD_UNIT=coreos-tmpfiles.service
Systemd_Filter _SYSTEMD_UNIT=dbus.service
Systemd_Filter _SYSTEMD_UNIT=docker.service
Systemd_Filter _SYSTEMD_UNIT=efs.service
Systemd_Filter _SYSTEMD_UNIT=etcd-member.service
Systemd_Filter _SYSTEMD_UNIT=etcd.service
Systemd_Filter _SYSTEMD_UNIT=etcd2.service
Systemd_Filter _SYSTEMD_UNIT=etcd3.service
Systemd_Filter _SYSTEMD_UNIT=etcdadm-check.service
Systemd_Filter _SYSTEMD_UNIT=etcdadm-reconfigure.service
Systemd_Filter _SYSTEMD_UNIT=etcdadm-save.service
Systemd_Filter _SYSTEMD_UNIT=etcdadm-update-status.service
Systemd_Filter _SYSTEMD_UNIT=flanneld.service
Systemd_Filter _SYSTEMD_UNIT=format-etcd2-volume.service
Systemd_Filter _SYSTEMD_UNIT=kube-node-taint-and-uncordon.service
Systemd_Filter _SYSTEMD_UNIT=kubelet.service
Systemd_Filter _SYSTEMD_UNIT=ldconfig.service
Systemd_Filter _SYSTEMD_UNIT=locksmithd.service
Systemd_Filter _SYSTEMD_UNIT=logrotate.service
Systemd_Filter _SYSTEMD_UNIT=lvm2-monitor.service
Systemd_Filter _SYSTEMD_UNIT=mdmon.service
Systemd_Filter _SYSTEMD_UNIT=nfs-idmapd.service
Systemd_Filter _SYSTEMD_UNIT=nfs-mountd.service
Systemd_Filter _SYSTEMD_UNIT=nfs-server.service
Systemd_Filter _SYSTEMD_UNIT=nfs-utils.service
Systemd_Filter _SYSTEMD_UNIT=node-problem-detector.service
Systemd_Filter _SYSTEMD_UNIT=ntp.service
Systemd_Filter _SYSTEMD_UNIT=oem-cloudinit.service
Systemd_Filter _SYSTEMD_UNIT=rkt-gc.service
Systemd_Filter _SYSTEMD_UNIT=rkt-metadata.service
Systemd_Filter _SYSTEMD_UNIT=rpc-idmapd.service
Systemd_Filter _SYSTEMD_UNIT=rpc-mountd.service
Systemd_Filter _SYSTEMD_UNIT=rpc-statd.service
Systemd_Filter _SYSTEMD_UNIT=rpcbind.service
Systemd_Filter _SYSTEMD_UNIT=set-aws-environment.service
Systemd_Filter _SYSTEMD_UNIT=system-cloudinit.service
Systemd_Filter _SYSTEMD_UNIT=systemd-timesyncd.service
Systemd_Filter _SYSTEMD_UNIT=update-ca-certificates.service
Systemd_Filter _SYSTEMD_UNIT=user-cloudinit.service
Systemd_Filter _SYSTEMD_UNIT=var-lib-etcd2.service
Max_Entries 1000
Read_From_Tail true
outputs: |
[OUTPUT]
Name forward
Match *
Host ${FLUENTD_LOGS_SVC}.${NAMESPACE}.svc.cluster.local.
Port 24321
Retry_Limit False
tls off
tls.verify on
tls.debug 1
# Disable keepalive for better load balancing
net.keepalive off
service: |
[SERVICE]
Flush 1
Daemon Off
Log_Level info
Parsers_File parsers.conf
Parsers_File custom_parsers.conf
HTTP_Server On
HTTP_Listen 0.0.0.0
HTTP_Port 2020
luaScripts:
parse_logs.lua: |
local function split(s, delimiter)
result = {};
for match in (s..delimiter):gmatch('(.-)'..delimiter) do
table.insert(result, match);
end
return result;
end
function get_log_content(line)
-- remove elements specific containerd log format and get log as single string
table.remove(line, 1) -- remove date
table.remove(line, 1) -- remove stream
table.remove(line, 1) -- remove log tag
return table.concat(line, " ")
end
function adjust_first_line(record, first_line)
-- adjust the first line to containerd format, it comes without date, stream and logtag
-- 'fake-date' string at the beginning is used only to have proper log format
first_line = 'fake-date' .. ' ' .. record['stream'] .. ' ' .. record['logtag'] .. ' ' .. first_line
return first_line
end
function parse_log(tag, timestamp, record)
if record['log'] == nil or record['stream'] == nil or record['logtag'] == nil then
-- log does not contain required attributes ('log', 'stream', 'logtag') to be processed by parse_log function
-- the record will not be modified
return 0, timestamp, record
end
log_lines = split(record['log'], '\n')
log_lines[1] = adjust_first_line(record, log_lines[1])
new_lines = {}
buffer = ''
for k, v in pairs(log_lines) do
line = split(v, ' ')
log_tag = line[3]
buffer = buffer .. get_log_content(line)
if log_tag == 'F' then
table.insert(new_lines, buffer)
buffer = ""
end
end
new_log = table.concat(new_lines, "\n")
record['log'] = new_log
return 2, timestamp, record
end
fluentd:
logs:
containers:
multiline:
enabled: false
9 changes: 9 additions & 0 deletions src/go/cmd/update-collection-v3/testdata/multiline.input.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
sumologic:
accessId: xxx
accessKey: yyy
multiline_key: |-
line 1
line 2
line 3
another_key: yes
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
sumologic:
accessId: xxx
accessKey: yyy
multiline_key: |-
line 1
line 2
line 3
another_key: yes

0 comments on commit 7975d8d

Please sign in to comment.