Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,161 @@
<?xml version="1.0" encoding="utf-8"?>
<databaseChangeLog
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.5.xsd">

<changeSet id="20231229001" author="Freddy">

<sql dbms="postgresql" splitStatements="true" stripComments="true">
<![CDATA[
-- Updating generic pipeline filter
UPDATE utm_logstash_filter
SET logstash_filter='filter {
# Generic pipeline filter, version 1.0.1
# Supports plain logs and simple json logs

split {
field => "message"
terminator => "<utm-log-separator>"
}

#Looking for datasource generated by an agent and parse original message
if [message]=~/\[utm_stack_agent_ds=(.+)\]-(.+)/ {
grok {
match => {
"message" => [ "\[utm_stack_agent_ds=%{DATA:dataSource}\]-%{GREEDYDATA:original_log_message}" ]
}
}
}
if [original_log_message] {
mutate {
update => { "message" => "%{[original_log_message]}" }
}
}
#......................................................................#
#Generating dataSource and dataType fields required by CurrelationRulesEngine
if ![dataSource] {
mutate {
add_field => { "dataSource" => "%{host}" }
}
}
if ![dataType] {
mutate {
add_field => {"dataType" => "generic"}
}
}
#......................................................................#
#Adding json support
if [message] =~/^\{/ {
json {
source => "message"
}
}

#Remove unwanted fields if the message not match with conditions
mutate {
remove_field => ["@version","original_log_message","headers"]
}
}
',filter_version='1.0.1'
WHERE id=1521;

-- Updating Syslog filter
UPDATE utm_logstash_filter
SET logstash_filter='filter {
# Syslog filter, version 1.0.1

split {
field => "message"
terminator => "<utm-log-separator>"
}

#Looking for datasource generated by an agent and parse original message
if [message]=~/\[utm_stack_agent_ds=(.+)\]-(.+)/ {
grok {
match => {
"message" => [ "\[utm_stack_agent_ds=%{DATA:dataSource}\]-%{GREEDYDATA:original_log_message}" ]
}
}
}
if [original_log_message] {
mutate {
update => { "message" => "%{[original_log_message]}" }
}
}
#......................................................................#
#Generating dataSource field required by CurrelationRulesEngine
if ![dataSource] {
mutate {
add_field => { "dataSource" => "%{host}" }
}
}
#......................................................................#
#Generating logx structure
if ![dataType] {
mutate {
add_field => {"dataType" => "syslog"}
rename => ["message", "[logx][syslog][message]"]
}
}
#Remove unwanted fields if the message not match with conditions
mutate {
remove_field => ["@version","original_log_message","headers"]
}
}
',filter_version='1.0.1'
WHERE id=1520;

-- Updating json input filter
UPDATE utm_logstash_filter
SET logstash_filter='filter {
#Filter version 1.0.2
#Used to format generic json files

if [message] {
split {
field => "message"
terminator => "<utm-log-separator>"
}
json {
source => "message"
target => "json_input"
}

#Create logx structure
mutate {
rename => { "[message]" => "[logx][json_input][message]" }
rename => { "[json_input]" => "[logx][json_input]" }
}

#Generating dataType and dataSource fields
if ![dataType] {
mutate {
add_field => { "dataType" => "json-input" }
}
}
if ![dataSource] {
if [logx][json_input][dataSource]{
mutate {
rename => { "[logx][json_input][dataSource]" => "[dataSource]" }
}
} else {
mutate {
add_field => { "dataSource" => "%{host}" }
}
}
}
}

#Finally remove unused fields
mutate {
remove_field => ["path","@version","host","headers"]
}
}
',filter_version='1.0.2'
WHERE id=1515;

]]>
</sql>
</changeSet>
</databaseChangeLog>
2 changes: 2 additions & 0 deletions backend/src/main/resources/config/liquibase/master.xml
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,6 @@

<include file="config/liquibase/changelog/20231215001_updating_azure_filter.xml" relativeToChangelogFile="false"/>

<include file="config/liquibase/changelog/20231229001_updating_syslog_json_generic_filters.xml" relativeToChangelogFile="false"/>

</databaseChangeLog>
5 changes: 3 additions & 2 deletions filters/generic/generic.conf
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
filter {
# Generic pipeline filter, version 1.0.0
# Generic pipeline filter, version 1.0.1
# Supports plain logs and simple json logs

split {
Expand Down Expand Up @@ -27,10 +27,11 @@ if ![dataSource] {
add_field => { "dataSource" => "%{host}" }
}
}
if ![dataType] {
mutate {
add_field => {"dataType" => "generic"}
}

}
#......................................................................#
#Adding json support
if [message] =~/^\{/ {
Expand Down
8 changes: 5 additions & 3 deletions filters/json/json-input.conf
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
filter {
#Filter version 1.0.1
#Filter version 1.0.2
#Used to format generic json files

if [message] {
Expand All @@ -24,14 +24,16 @@ filter {
add_field => { "dataType" => "json-input" }
}
}
if [logx][json_input][dataSource]{
if ![dataSource] {
if [logx][json_input][dataSource]{
mutate {
rename => { "[logx][json_input][dataSource]" => "[dataSource]" }
}
} else {
} else {
mutate {
add_field => { "dataSource" => "%{host}" }
}
}
}
}

Expand Down
37 changes: 26 additions & 11 deletions filters/privafy/privafy.conf
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
filter {

# Privafy filter version 1.1.0
# Privafy filter version 1.1.1
# Based on (User Doc) https://docs.progress.com/es-ES/bundle/loadmaster-technical-note-common-event-format-cef-logs-ga/page/Common-Event-Format-CEF-Logs.html (December, 2023)
# and (User Doc) https://help.deepsecurity.trendmicro.com/20_0/on-premise/event-syslog-message-formats.html (December, 2023)
# and example logs provided by user during POC
Expand Down Expand Up @@ -28,10 +28,20 @@ filter {
}
}

if ![dataType] {
#......................................................................#
# Creating privafy message field from syslog message
if [logx][syslog][message] {
mutate {
add_field => { "prvf_message" => "%{[logx][syslog][message]}" }
}
} else {
mutate {
add_field => { "prvf_message" => "%{message}" }
}
}
#......................................................................#
# Privafy Entry point
if [message] and (("CEF:" in [message] or "LEEF:" in [message]) and [message] =~/\|(\w+)?(\s)?Privafy(\s)?(\w+)?\|/ ) {
if [prvf_message] and (("CEF:" in [prvf_message] or "LEEF:" in [prvf_message]) and [prvf_message] =~/\|(\w+)?(\s)?Privafy(\s)?(\w+)?\|/ ) {
#......................................................................#
#Generating dataSource field required by CurrelationRulesEngine
#Checks if exists, if not evaluate to the host variable
Expand All @@ -42,23 +52,29 @@ filter {
}
#......................................................................#
#Generating dataType field required by CurrelationRulesEngine
if (![dataType]){
mutate {
add_field => { "dataType" => "privafy" }
}
} else {
mutate {
update => { "dataType" => "privafy" }
}
}
#......................................................................#
#If CEF or LEEF formatted log do the parsing of the message mark as undefined syslog format
if ("CEF:" in [message] or "LEEF:" in [message] ) {
#If CEF or LEEF formatted log do the parsing of the prvf_message mark as undefined syslog format
if ("CEF:" in [prvf_message] or "LEEF:" in [prvf_message] ) {
#......................................................................#
#Using grok to parse header of the message
#Using grok to parse header of the prvf_message
grok {
match => {
"message" => [
"prvf_message" => [
"(%{INT:not_defined})?(\s)?(<%{NUMBER:priority}>)?(%{INT:syslog_version})?((\s)%{GREEDYDATA:syslog_date_host}(\s))?(?<format_type>(CEF|LEEF)):(\s)?(?<format_version>(%{INT}\.%{INT}|%{INT}))%{GREEDYDATA:cef_or_leef_msg_all}"
]
}
}
}
if ("CEF:" in [message] ) {
if ("CEF:" in [prvf_message] ) {
#......................................................................#
#Using grok to parse components of the cef_or_leef_msg_all
if [cef_or_leef_msg_all] {
Expand All @@ -72,7 +88,7 @@ filter {
}
}
}
} else if ("LEEF:" in [message] ) {
} else if ("LEEF:" in [prvf_message] ) {
#......................................................................#
#Using grok to parse components of the leef_message
if [cef_or_leef_msg_all] {
Expand Down Expand Up @@ -277,12 +293,11 @@ if [kv_field][severity]{
#......................................................................#
#Finally, remove unnecessary fields
mutate {
remove_field => ["@version","path","tags","type","syslog_version","kv_field",
remove_field => ["@version","path","tags","type","syslog_version","kv_field","prvf_message","[logx][syslog][message]",
"not_defined","cef_or_leef_msg_all","cef_or_leef_msg","syslog_date_host","irrelevant","init_msg"]
}
}
# End CEF entrypoint
}

#Also, remove unwanted fields if the message not match with conditions
mutate {
Expand Down
5 changes: 3 additions & 2 deletions filters/syslog/syslog-generic.conf
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
filter {
# Syslog filter, version 1.0.0
# Syslog filter, version 1.0.1

split {
field => "message"
Expand Down Expand Up @@ -28,11 +28,12 @@ if ![dataSource] {
}
#......................................................................#
#Generating logx structure
if ![dataType] {
mutate {
add_field => {"dataType" => "syslog"}
rename => ["message", "[logx][syslog][message]"]
}

}
#Remove unwanted fields if the message not match with conditions
mutate {
remove_field => ["@version","original_log_message","headers"]
Expand Down