Skip to content

Commit

Permalink
Add event.ingested to all Filebeat modules (#20386)
Browse files Browse the repository at this point in the history
The event.ingested field defines time at which the event was ingested to Elasticsearch
and it added by the Ingest Node pipeline. This field is important when trying to build
alerts for activities that may have been reported long after they occurred (@timestamp is
much older than event.ingested). This might happen if an agent was offline for a period
of time or the processing was delayed.

This adds a test to ensure all modules create event.ingested.

Use Filebeat read time instead of ingest time as event.created in Zeek.

Closes #20073
  • Loading branch information
andrewkroh committed Aug 4, 2020
1 parent b1bd7b7 commit 829c3b7
Show file tree
Hide file tree
Showing 116 changed files with 422 additions and 49 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.next.asciidoc
Expand Up @@ -505,6 +505,7 @@ https://github.com/elastic/beats/compare/v7.0.0-alpha2...master[Check the HEAD d
- Add event.ingested for CrowdStrike module {pull}20138[20138]
- Add support for additional fields and FirewallMatchEvent type events in CrowdStrike module {pull}20138[20138]
- Add event.ingested for Suricata module {pull}20220[20220]
- Add event.ingested to all Filebeat modules. {pull}20386[20386]

*Heartbeat*

Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/apache/access/ingest/pipeline.yml
@@ -1,6 +1,9 @@
description: "Pipeline for parsing Apache HTTP Server access logs. Requires the geoip and user_agent plugins."

processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/apache/error/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing apache error logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/auditd/log/ingest/pipeline.yml
@@ -1,6 +1,9 @@
---
description: Pipeline for parsing Linux auditd logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
pattern_definitions:
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/elasticsearch/audit/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing elasticsearch audit logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- rename:
field: '@timestamp'
target_field: event.created
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/elasticsearch/deprecation/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing elasticsearch deprecation logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- rename:
field: '@timestamp'
target_field: event.created
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/elasticsearch/gc/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing Elasticsearch JVM garbage collection logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/elasticsearch/server/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing elasticsearch server logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- rename:
field: '@timestamp'
target_field: event.created
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/elasticsearch/slowlog/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing elasticsearch slow logs.
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- rename:
field: '@timestamp'
target_field: event.created
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/haproxy/log/ingest/pipeline.yml
@@ -1,6 +1,9 @@
description: Pipeline for parsing HAProxy http, tcp and default logs. Requires the
geoip plugin.
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/icinga/debug/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing icinga debug logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/icinga/main/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing icinga main logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/icinga/startup/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing icinga startup logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/iis/access/ingest/pipeline.yml
@@ -1,6 +1,9 @@
description: Pipeline for parsing IIS access logs. Requires the geoip and user_agent
plugins.
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/iis/error/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing IIS error logs. Requires the geoip plugin.
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/kafka/log/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing Kafka log messages
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
trace_match: true
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/kibana/log/ingest/pipeline.yml
Expand Up @@ -4,6 +4,9 @@ on_failure:
field: error.message
value: '{{ _ingest.on_failure_message }}'
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- rename:
field: '@timestamp'
target_field: event.created
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/logstash/log/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing logstash node logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- rename:
field: '@timestamp'
target_field: event.created
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/logstash/slowlog/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing logstash slow logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- rename:
field: '@timestamp'
target_field: event.created
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/mongodb/log/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing MongoDB logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/mysql/error/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing MySQL error logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
5 changes: 5 additions & 0 deletions filebeat/module/mysql/slowlog/ingest/pipeline.json
@@ -1,6 +1,11 @@
{
"description": "Pipeline for parsing MySQL slow logs.",
"processors": [{
"set": {
"field": "event.ingested",
"value": "{{_ingest.timestamp}}"
}
}, {
"grok": {
"field": "message",
"patterns":[
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/nats/log/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing nats log logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
5 changes: 4 additions & 1 deletion filebeat/module/nginx/access/ingest/pipeline.yml
@@ -1,6 +1,9 @@
description: Pipeline for parsing Nginx access logs. Requires the geoip and user_agent
plugins.
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down Expand Up @@ -145,7 +148,7 @@ processors:
- set:
field: event.outcome
value: failure
if: "ctx?.http?.response?.status_code != null && ctx.http.response.status_code >= 400"
if: "ctx?.http?.response?.status_code != null && ctx.http.response.status_code >= 400"
- append:
field: related.ip
value: "{{source.ip}}"
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/nginx/error/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing the Nginx error logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/nginx/ingress_controller/ingest/pipeline.yml
@@ -1,6 +1,9 @@
description: Pipeline for parsing Nginx ingress controller access logs. Requires the
geoip and user_agent plugins.
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
5 changes: 5 additions & 0 deletions filebeat/module/osquery/result/ingest/pipeline.json
Expand Up @@ -2,6 +2,11 @@
"description": "Pipeline for parsing osquery result logs",
"processors": [
{
"set":{
"field": "event.ingested",
"value": "{{_ingest.timestamp}}"
}
}, {
"rename": {
"field": "@timestamp",
"target_field": "event.created"
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/postgresql/log/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing PostgreSQL logs.
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
ignore_missing: true
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/redis/log/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing redis logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/santa/log/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing Google Santa logs.
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/system/auth/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing system authorisation/secure logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
ignore_missing: true
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/system/syslog/ingest/pipeline.yml
@@ -1,5 +1,8 @@
description: Pipeline for parsing Syslog messages.
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
3 changes: 3 additions & 0 deletions filebeat/module/traefik/access/ingest/pipeline.yml
@@ -1,6 +1,9 @@
description: Pipeline for parsing Traefik access logs. Requires the geoip and user_agent
plugins.
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- dissect:
field: message
pattern: '%{source.address} %{traefik.access.user_identifier} %{user.name} [%{traefik.access.time}]
Expand Down
4 changes: 4 additions & 0 deletions filebeat/tests/system/test_modules.py
Expand Up @@ -161,6 +161,10 @@ def run_on_file(self, module, fileset, test_file, cfgfile):
assert obj["event"]["module"] == module, "expected event.module={} but got {}".format(
module, obj["event"]["module"])

# All modules must include a set processor that adds the time that
# the event was ingested to Elasticsearch
assert "ingested" in obj["event"], "missing event.ingested timestamp"

assert "error" not in obj, "not error expected but got: {}".format(
obj)

Expand Down
3 changes: 3 additions & 0 deletions x-pack/filebeat/module/activemq/audit/ingest/pipeline.yml
@@ -1,6 +1,9 @@
---
description: Pipeline for parsing ActiveMQ audit logs.
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
pattern_definitions:
Expand Down
3 changes: 3 additions & 0 deletions x-pack/filebeat/module/activemq/log/ingest/pipeline.yml
@@ -1,6 +1,9 @@
---
description: Pipeline for parsing ActiveMQ logs.
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
pattern_definitions:
Expand Down
5 changes: 4 additions & 1 deletion x-pack/filebeat/module/aws/cloudtrail/ingest/pipeline.yml
@@ -1,6 +1,9 @@
---
description: Pipeline for AWS CloudTrail Logs
processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- rename:
field: "message"
target_field: "event.original"
Expand Down Expand Up @@ -614,7 +617,7 @@ processors:
if (ctx.event.action == 'ConsoleLogin' && ctx?.aws?.cloudtrail?.flattened?.response_elements.ConsoleLogin != null) {
ctx.event.outcome = Processors.lowercase(ctx.aws.cloudtrail.flattened.response_elements.ConsoleLogin);
}
def hm = new HashMap(params.get(ctx.event.action));
hm.forEach((k, v) -> ctx.event[k] = v);
Expand Down
3 changes: 3 additions & 0 deletions x-pack/filebeat/module/aws/cloudwatch/ingest/pipeline.yml
@@ -1,6 +1,9 @@
description: "Pipeline for CloudWatch logs"

processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
3 changes: 3 additions & 0 deletions x-pack/filebeat/module/aws/ec2/ingest/pipeline.yml
@@ -1,6 +1,9 @@
description: "Pipeline for EC2 logs in CloudWatch"

processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
patterns:
Expand Down
3 changes: 3 additions & 0 deletions x-pack/filebeat/module/aws/elb/ingest/pipeline.yml
@@ -1,6 +1,9 @@
description: "Pipeline for ELB logs"

processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- grok:
field: message
# Classic ELB patterns documented in https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/access-log-collection.html
Expand Down

0 comments on commit 829c3b7

Please sign in to comment.