diff --git a/backend/src/main/resources/config/liquibase/changelog/20231229001_updating_syslog_json_generic_filters.xml b/backend/src/main/resources/config/liquibase/changelog/20231229001_updating_syslog_json_generic_filters.xml
new file mode 100644
index 000000000..bb4e4b61d
--- /dev/null
+++ b/backend/src/main/resources/config/liquibase/changelog/20231229001_updating_syslog_json_generic_filters.xml
@@ -0,0 +1,161 @@
+
+
+
+
+
+
+ "message"
+ terminator => ""
+}
+
+#Looking for datasource generated by an agent and parse original message
+if [message]=~/\[utm_stack_agent_ds=(.+)\]-(.+)/ {
+ grok {
+ match => {
+ "message" => [ "\[utm_stack_agent_ds=%{DATA:dataSource}\]-%{GREEDYDATA:original_log_message}" ]
+ }
+ }
+}
+if [original_log_message] {
+ mutate {
+ update => { "message" => "%{[original_log_message]}" }
+ }
+}
+#......................................................................#
+#Generating dataSource and dataType fields required by CurrelationRulesEngine
+if ![dataSource] {
+ mutate {
+ add_field => { "dataSource" => "%{host}" }
+ }
+}
+if ![dataType] {
+ mutate {
+ add_field => {"dataType" => "generic"}
+ }
+}
+#......................................................................#
+#Adding json support
+if [message] =~/^\{/ {
+ json {
+ source => "message"
+ }
+}
+
+#Remove unwanted fields if the message not match with conditions
+ mutate {
+ remove_field => ["@version","original_log_message","headers"]
+ }
+}
+',filter_version='1.0.1'
+ WHERE id=1521;
+
+-- Updating Syslog filter
+ UPDATE utm_logstash_filter
+ SET logstash_filter='filter {
+# Syslog filter, version 1.0.1
+
+split {
+ field => "message"
+ terminator => ""
+}
+
+#Looking for datasource generated by an agent and parse original message
+if [message]=~/\[utm_stack_agent_ds=(.+)\]-(.+)/ {
+ grok {
+ match => {
+ "message" => [ "\[utm_stack_agent_ds=%{DATA:dataSource}\]-%{GREEDYDATA:original_log_message}" ]
+ }
+ }
+}
+if [original_log_message] {
+ mutate {
+ update => { "message" => "%{[original_log_message]}" }
+ }
+}
+#......................................................................#
+#Generating dataSource field required by CurrelationRulesEngine
+if ![dataSource] {
+ mutate {
+ add_field => { "dataSource" => "%{host}" }
+ }
+}
+#......................................................................#
+#Generating logx structure
+if ![dataType] {
+ mutate {
+ add_field => {"dataType" => "syslog"}
+ rename => ["message", "[logx][syslog][message]"]
+ }
+}
+#Remove unwanted fields if the message not match with conditions
+ mutate {
+ remove_field => ["@version","original_log_message","headers"]
+ }
+}
+',filter_version='1.0.1'
+ WHERE id=1520;
+
+-- Updating json input filter
+ UPDATE utm_logstash_filter
+ SET logstash_filter='filter {
+#Filter version 1.0.2
+#Used to format generic json files
+
+ if [message] {
+ split {
+ field => "message"
+ terminator => ""
+ }
+ json {
+ source => "message"
+ target => "json_input"
+ }
+
+ #Create logx structure
+ mutate {
+ rename => { "[message]" => "[logx][json_input][message]" }
+ rename => { "[json_input]" => "[logx][json_input]" }
+ }
+
+ #Generating dataType and dataSource fields
+ if ![dataType] {
+ mutate {
+ add_field => { "dataType" => "json-input" }
+ }
+ }
+ if ![dataSource] {
+ if [logx][json_input][dataSource]{
+ mutate {
+ rename => { "[logx][json_input][dataSource]" => "[dataSource]" }
+ }
+ } else {
+ mutate {
+ add_field => { "dataSource" => "%{host}" }
+ }
+ }
+ }
+ }
+
+ #Finally remove unused fields
+ mutate {
+ remove_field => ["path","@version","host","headers"]
+ }
+}
+',filter_version='1.0.2'
+ WHERE id=1515;
+
+ ]]>
+
+
+
diff --git a/backend/src/main/resources/config/liquibase/master.xml b/backend/src/main/resources/config/liquibase/master.xml
index 94f4689fe..c22d2aebb 100644
--- a/backend/src/main/resources/config/liquibase/master.xml
+++ b/backend/src/main/resources/config/liquibase/master.xml
@@ -26,4 +26,6 @@
+
+
diff --git a/filters/generic/generic.conf b/filters/generic/generic.conf
index 3a6d89aa1..6b563e024 100644
--- a/filters/generic/generic.conf
+++ b/filters/generic/generic.conf
@@ -1,5 +1,5 @@
filter {
-# Generic pipeline filter, version 1.0.0
+# Generic pipeline filter, version 1.0.1
# Supports plain logs and simple json logs
split {
@@ -27,10 +27,11 @@ if ![dataSource] {
add_field => { "dataSource" => "%{host}" }
}
}
+if ![dataType] {
mutate {
add_field => {"dataType" => "generic"}
}
-
+}
#......................................................................#
#Adding json support
if [message] =~/^\{/ {
diff --git a/filters/json/json-input.conf b/filters/json/json-input.conf
index 7dd4f83b5..a765843dd 100644
--- a/filters/json/json-input.conf
+++ b/filters/json/json-input.conf
@@ -1,5 +1,5 @@
filter {
-#Filter version 1.0.1
+#Filter version 1.0.2
#Used to format generic json files
if [message] {
@@ -24,14 +24,16 @@ filter {
add_field => { "dataType" => "json-input" }
}
}
- if [logx][json_input][dataSource]{
+ if ![dataSource] {
+ if [logx][json_input][dataSource]{
mutate {
rename => { "[logx][json_input][dataSource]" => "[dataSource]" }
}
- } else {
+ } else {
mutate {
add_field => { "dataSource" => "%{host}" }
}
+ }
}
}
diff --git a/filters/privafy/privafy.conf b/filters/privafy/privafy.conf
index 9ab93255e..a00e70568 100644
--- a/filters/privafy/privafy.conf
+++ b/filters/privafy/privafy.conf
@@ -1,6 +1,6 @@
filter {
-# Privafy filter version 1.1.0
+# Privafy filter version 1.1.1
# Based on (User Doc) https://docs.progress.com/es-ES/bundle/loadmaster-technical-note-common-event-format-cef-logs-ga/page/Common-Event-Format-CEF-Logs.html (December, 2023)
# and (User Doc) https://help.deepsecurity.trendmicro.com/20_0/on-premise/event-syslog-message-formats.html (December, 2023)
# and example logs provided by user during POC
@@ -28,10 +28,20 @@ filter {
}
}
- if ![dataType] {
+#......................................................................#
+# Creating privafy message field from syslog message
+ if [logx][syslog][message] {
+ mutate {
+ add_field => { "prvf_message" => "%{[logx][syslog][message]}" }
+ }
+ } else {
+ mutate {
+ add_field => { "prvf_message" => "%{message}" }
+ }
+ }
#......................................................................#
# Privafy Entry point
- if [message] and (("CEF:" in [message] or "LEEF:" in [message]) and [message] =~/\|(\w+)?(\s)?Privafy(\s)?(\w+)?\|/ ) {
+ if [prvf_message] and (("CEF:" in [prvf_message] or "LEEF:" in [prvf_message]) and [prvf_message] =~/\|(\w+)?(\s)?Privafy(\s)?(\w+)?\|/ ) {
#......................................................................#
#Generating dataSource field required by CurrelationRulesEngine
#Checks if exists, if not evaluate to the host variable
@@ -42,23 +52,29 @@ filter {
}
#......................................................................#
#Generating dataType field required by CurrelationRulesEngine
+ if (![dataType]){
mutate {
add_field => { "dataType" => "privafy" }
}
+ } else {
+ mutate {
+ update => { "dataType" => "privafy" }
+ }
+ }
#......................................................................#
-#If CEF or LEEF formatted log do the parsing of the message mark as undefined syslog format
- if ("CEF:" in [message] or "LEEF:" in [message] ) {
+#If CEF or LEEF formatted log do the parsing of the prvf_message mark as undefined syslog format
+ if ("CEF:" in [prvf_message] or "LEEF:" in [prvf_message] ) {
#......................................................................#
-#Using grok to parse header of the message
+#Using grok to parse header of the prvf_message
grok {
match => {
- "message" => [
+ "prvf_message" => [
"(%{INT:not_defined})?(\s)?(<%{NUMBER:priority}>)?(%{INT:syslog_version})?((\s)%{GREEDYDATA:syslog_date_host}(\s))?(?(CEF|LEEF)):(\s)?(?(%{INT}\.%{INT}|%{INT}))%{GREEDYDATA:cef_or_leef_msg_all}"
]
}
}
}
- if ("CEF:" in [message] ) {
+ if ("CEF:" in [prvf_message] ) {
#......................................................................#
#Using grok to parse components of the cef_or_leef_msg_all
if [cef_or_leef_msg_all] {
@@ -72,7 +88,7 @@ filter {
}
}
}
- } else if ("LEEF:" in [message] ) {
+ } else if ("LEEF:" in [prvf_message] ) {
#......................................................................#
#Using grok to parse components of the leef_message
if [cef_or_leef_msg_all] {
@@ -277,12 +293,11 @@ if [kv_field][severity]{
#......................................................................#
#Finally, remove unnecessary fields
mutate {
- remove_field => ["@version","path","tags","type","syslog_version","kv_field",
+ remove_field => ["@version","path","tags","type","syslog_version","kv_field","prvf_message","[logx][syslog][message]",
"not_defined","cef_or_leef_msg_all","cef_or_leef_msg","syslog_date_host","irrelevant","init_msg"]
}
}
# End CEF entrypoint
- }
#Also, remove unwanted fields if the message not match with conditions
mutate {
diff --git a/filters/syslog/syslog-generic.conf b/filters/syslog/syslog-generic.conf
index b33074ce1..6b25c72ec 100644
--- a/filters/syslog/syslog-generic.conf
+++ b/filters/syslog/syslog-generic.conf
@@ -1,5 +1,5 @@
filter {
-# Syslog filter, version 1.0.0
+# Syslog filter, version 1.0.1
split {
field => "message"
@@ -28,11 +28,12 @@ if ![dataSource] {
}
#......................................................................#
#Generating logx structure
+if ![dataType] {
mutate {
add_field => {"dataType" => "syslog"}
rename => ["message", "[logx][syslog][message]"]
}
-
+}
#Remove unwanted fields if the message not match with conditions
mutate {
remove_field => ["@version","original_log_message","headers"]