From 0d4a06eeec0020642f8ceba41c9b166a59a40129 Mon Sep 17 00:00:00 2001 From: Brandon Morelli Date: Thu, 20 Mar 2025 11:33:21 -0700 Subject: [PATCH] Conflict resolution to keep both path change and version updates --- .gitignore | 1 + docs/plugins/codecs.asciidoc | 10 +- docs/plugins/codecs/avro.asciidoc | 18 +- docs/plugins/codecs/cef.asciidoc | 20 +- docs/plugins/codecs/cloudfront.asciidoc | 6 +- docs/plugins/codecs/cloudtrail.asciidoc | 8 +- docs/plugins/codecs/collectd.asciidoc | 18 +- docs/plugins/codecs/csv.asciidoc | 20 +- docs/plugins/codecs/dots.asciidoc | 2 +- docs/plugins/codecs/edn.asciidoc | 4 +- docs/plugins/codecs/edn_lines.asciidoc | 4 +- docs/plugins/codecs/es_bulk.asciidoc | 8 +- docs/plugins/codecs/fluent.asciidoc | 3 +- docs/plugins/codecs/graphite.asciidoc | 14 +- docs/plugins/codecs/gzip_lines.asciidoc | 6 +- docs/plugins/codecs/json.asciidoc | 4 +- docs/plugins/codecs/json_lines.asciidoc | 7 +- docs/plugins/codecs/line.asciidoc | 14 +- docs/plugins/codecs/msgpack.asciidoc | 7 +- docs/plugins/codecs/multiline.asciidoc | 24 +- docs/plugins/codecs/netflow.asciidoc | 24 +- docs/plugins/codecs/nmap.asciidoc | 14 +- docs/plugins/codecs/plain.asciidoc | 8 +- docs/plugins/codecs/pretty.asciidoc | 6 +- docs/plugins/codecs/protobuf.asciidoc | 3 +- docs/plugins/codecs/rubydebug.asciidoc | 6 +- docs/plugins/filters.asciidoc | 10 +- docs/plugins/filters/age.asciidoc | 6 +- docs/plugins/filters/aggregate.asciidoc | 14 +- docs/plugins/filters/alter.asciidoc | 16 +- docs/plugins/filters/bytes.asciidoc | 12 +- docs/plugins/filters/checksum.asciidoc | 6 +- docs/plugins/filters/cidr.asciidoc | 4 +- docs/plugins/filters/cipher.asciidoc | 28 +-- docs/plugins/filters/clone.asciidoc | 8 +- docs/plugins/filters/csv.asciidoc | 22 +- docs/plugins/filters/date.asciidoc | 12 +- docs/plugins/filters/de_dot.asciidoc | 8 +- docs/plugins/filters/dissect.asciidoc | 76 +++--- docs/plugins/filters/dns.asciidoc | 24 +- docs/plugins/filters/drop.asciidoc | 4 +- docs/plugins/filters/elapsed.asciidoc | 12 +- .../filters/elastic_integration.asciidoc | 18 +- docs/plugins/filters/elasticsearch.asciidoc | 28 +-- docs/plugins/filters/emoji.asciidoc | 26 +- docs/plugins/filters/environment.asciidoc | 4 +- docs/plugins/filters/extractnumbers.asciidoc | 4 +- docs/plugins/filters/fingerprint.asciidoc | 30 +-- docs/plugins/filters/geoip.asciidoc | 4 +- docs/plugins/filters/grok.asciidoc | 42 ++-- docs/plugins/filters/hashid.asciidoc | 20 +- docs/plugins/filters/http.asciidoc | 2 +- docs/plugins/filters/i18n.asciidoc | 4 +- docs/plugins/filters/jdbc_static.asciidoc | 4 +- docs/plugins/filters/jdbc_streaming.asciidoc | 2 +- docs/plugins/filters/json.asciidoc | 10 +- docs/plugins/filters/json_encode.asciidoc | 6 +- docs/plugins/filters/kv.asciidoc | 36 +-- docs/plugins/filters/memcached.asciidoc | 10 +- docs/plugins/filters/metricize.asciidoc | 10 +- docs/plugins/filters/metrics.asciidoc | 16 +- docs/plugins/filters/multiline.asciidoc | 18 +- docs/plugins/filters/mutate.asciidoc | 6 +- docs/plugins/filters/oui.asciidoc | 6 +- docs/plugins/filters/prune.asciidoc | 12 +- docs/plugins/filters/range.asciidoc | 6 +- docs/plugins/filters/ruby.asciidoc | 2 +- docs/plugins/filters/sleep.asciidoc | 8 +- docs/plugins/filters/split.asciidoc | 12 +- docs/plugins/filters/syslog_pri.asciidoc | 10 +- .../filters/threats_classifier.asciidoc | 9 +- docs/plugins/filters/throttle.asciidoc | 14 +- docs/plugins/filters/tld.asciidoc | 8 +- docs/plugins/filters/translate.asciidoc | 10 +- docs/plugins/filters/truncate.asciidoc | 6 +- docs/plugins/filters/urldecode.asciidoc | 10 +- docs/plugins/filters/useragent.asciidoc | 12 +- docs/plugins/filters/uuid.asciidoc | 6 +- .../filters/wurfl_device_detection.asciidoc | 5 +- docs/plugins/filters/xml.asciidoc | 28 +-- docs/plugins/include/attributes-ls.asciidoc | 10 + .../include/attributes-lsplugins.asciidoc | 13 + docs/plugins/include/filter.asciidoc | 234 ++++++++++++++++++ docs/plugins/include/input.asciidoc | 172 +++++++++++++ docs/plugins/include/output.asciidoc | 94 +++++++ .../include/plugin_header-core.asciidoc | 14 ++ .../plugin_header-integration.asciidoc | 19 ++ docs/plugins/include/plugin_header.asciidoc | 25 ++ .../include/version-list-intro.asciidoc | 14 ++ docs/plugins/index.asciidoc | 114 +++++++++ docs/plugins/inputs.asciidoc | 8 +- docs/plugins/inputs/azure_event_hubs.asciidoc | 76 +++--- docs/plugins/inputs/beats.asciidoc | 16 +- docs/plugins/inputs/cloudwatch.asciidoc | 30 +-- docs/plugins/inputs/couchdb_changes.asciidoc | 34 +-- .../plugins/inputs/dead_letter_queue.asciidoc | 20 +- docs/plugins/inputs/elastic_agent.asciidoc | 16 +- .../elastic_serverless_forwarder.asciidoc | 2 +- docs/plugins/inputs/elasticsearch.asciidoc | 2 +- docs/plugins/inputs/exec.asciidoc | 8 +- docs/plugins/inputs/file.asciidoc | 4 +- docs/plugins/inputs/ganglia.asciidoc | 6 +- docs/plugins/inputs/gelf.asciidoc | 10 +- docs/plugins/inputs/generator.asciidoc | 10 +- docs/plugins/inputs/github.asciidoc | 10 +- .../inputs/google_cloud_storage.asciidoc | 4 +- docs/plugins/inputs/google_pubsub.asciidoc | 90 +++---- docs/plugins/inputs/graphite.asciidoc | 22 +- docs/plugins/inputs/heartbeat.asciidoc | 12 +- docs/plugins/inputs/http.asciidoc | 46 ++-- docs/plugins/inputs/http_poller.asciidoc | 46 ++-- docs/plugins/inputs/imap.asciidoc | 2 +- docs/plugins/inputs/irc.asciidoc | 24 +- docs/plugins/inputs/jdbc.asciidoc | 2 +- docs/plugins/inputs/jms.asciidoc | 2 +- docs/plugins/inputs/jmx.asciidoc | 12 +- docs/plugins/inputs/kafka.asciidoc | 140 +++++------ docs/plugins/inputs/kinesis.asciidoc | 16 +- docs/plugins/inputs/log4j.asciidoc | 14 +- docs/plugins/inputs/logstash.asciidoc | 4 +- docs/plugins/inputs/lumberjack.asciidoc | 18 +- docs/plugins/inputs/meetup.asciidoc | 18 +- docs/plugins/inputs/pipe.asciidoc | 6 +- docs/plugins/inputs/puppet_facter.asciidoc | 16 +- docs/plugins/inputs/rabbitmq.asciidoc | 2 +- docs/plugins/inputs/redis.asciidoc | 4 +- docs/plugins/inputs/relp.asciidoc | 18 +- docs/plugins/inputs/rss.asciidoc | 6 +- docs/plugins/inputs/s3-sns-sqs.asciidoc | 5 +- docs/plugins/inputs/s3.asciidoc | 38 +-- docs/plugins/inputs/salesforce.asciidoc | 24 +- docs/plugins/inputs/snmp.asciidoc | 30 +-- docs/plugins/inputs/snmptrap.asciidoc | 30 +-- docs/plugins/inputs/sqlite.asciidoc | 10 +- docs/plugins/inputs/sqs.asciidoc | 28 +-- docs/plugins/inputs/stdin.asciidoc | 2 +- docs/plugins/inputs/stomp.asciidoc | 18 +- docs/plugins/inputs/syslog.asciidoc | 4 +- docs/plugins/inputs/tcp.asciidoc | 20 +- docs/plugins/inputs/twitter.asciidoc | 44 ++-- docs/plugins/inputs/udp.asciidoc | 14 +- docs/plugins/inputs/unix.asciidoc | 14 +- docs/plugins/inputs/varnishlog.asciidoc | 4 +- docs/plugins/inputs/websocket.asciidoc | 6 +- docs/plugins/inputs/wmi.asciidoc | 14 +- docs/plugins/inputs/xmpp.asciidoc | 10 +- docs/plugins/integrations/aws.asciidoc | 2 +- .../elastic_enterprise_search.asciidoc | 2 +- docs/plugins/integrations/jdbc.asciidoc | 4 +- docs/plugins/integrations/kafka.asciidoc | 4 +- docs/plugins/integrations/logstash.asciidoc | 2 +- docs/plugins/integrations/rabbitmq.asciidoc | 2 +- docs/plugins/integrations/sample.asciidoc | 2 +- docs/plugins/integrations/snmp.asciidoc | 34 +-- docs/plugins/outputs.asciidoc | 8 +- docs/plugins/outputs/appsearch.asciidoc | 2 +- docs/plugins/outputs/boundary.asciidoc | 18 +- docs/plugins/outputs/circonus.asciidoc | 8 +- docs/plugins/outputs/cloudwatch.asciidoc | 40 +-- docs/plugins/outputs/csv.asciidoc | 24 +- docs/plugins/outputs/datadog.asciidoc | 18 +- docs/plugins/outputs/datadog_metrics.asciidoc | 22 +- docs/plugins/outputs/dynatrace.asciidoc | 7 +- .../outputs/elastic_app_search.asciidoc | 2 +- .../outputs/elastic_workplace_search.asciidoc | 6 +- docs/plugins/outputs/elasticsearch.asciidoc | 28 +-- docs/plugins/outputs/email.asciidoc | 42 ++-- docs/plugins/outputs/exec.asciidoc | 6 +- docs/plugins/outputs/file.asciidoc | 2 +- docs/plugins/outputs/ganglia.asciidoc | 22 +- docs/plugins/outputs/gelf.asciidoc | 24 +- docs/plugins/outputs/google_bigquery.asciidoc | 2 +- .../outputs/google_cloud_storage.asciidoc | 28 +-- docs/plugins/outputs/google_pubsub.asciidoc | 2 +- docs/plugins/outputs/graphite.asciidoc | 24 +- docs/plugins/outputs/graphtastic.asciidoc | 18 +- docs/plugins/outputs/http.asciidoc | 76 +++--- docs/plugins/outputs/influxdb.asciidoc | 50 ++-- docs/plugins/outputs/irc.asciidoc | 26 +- docs/plugins/outputs/juggernaut.asciidoc | 16 +- docs/plugins/outputs/kafka.asciidoc | 94 +++---- docs/plugins/outputs/librato.asciidoc | 16 +- docs/plugins/outputs/loggly.asciidoc | 20 +- docs/plugins/outputs/logstash.asciidoc | 2 +- docs/plugins/outputs/lumberjack.asciidoc | 14 +- docs/plugins/outputs/metriccatcher.asciidoc | 18 +- docs/plugins/outputs/mongodb.asciidoc | 20 +- docs/plugins/outputs/nagios.asciidoc | 6 +- docs/plugins/outputs/nagios_nsca.asciidoc | 18 +- docs/plugins/outputs/null.asciidoc | 2 +- docs/plugins/outputs/opentsdb.asciidoc | 8 +- docs/plugins/outputs/pagerduty.asciidoc | 14 +- docs/plugins/outputs/pipe.asciidoc | 8 +- docs/plugins/outputs/rabbitmq.asciidoc | 44 ++-- docs/plugins/outputs/redis.asciidoc | 28 +-- docs/plugins/outputs/redmine.asciidoc | 28 +-- docs/plugins/outputs/riak.asciidoc | 22 +- docs/plugins/outputs/riemann.asciidoc | 16 +- docs/plugins/outputs/s3.asciidoc | 66 ++--- docs/plugins/outputs/sns.asciidoc | 18 +- docs/plugins/outputs/solr_http.asciidoc | 10 +- docs/plugins/outputs/sqs.asciidoc | 20 +- docs/plugins/outputs/statsd.asciidoc | 26 +- docs/plugins/outputs/stdout.asciidoc | 2 +- docs/plugins/outputs/stomp.asciidoc | 18 +- docs/plugins/outputs/syslog.asciidoc | 40 +-- docs/plugins/outputs/tcp.asciidoc | 16 +- docs/plugins/outputs/timber.asciidoc | 2 +- docs/plugins/outputs/udp.asciidoc | 6 +- docs/plugins/outputs/webhdfs.asciidoc | 48 ++-- docs/plugins/outputs/websocket.asciidoc | 6 +- docs/plugins/outputs/xmpp.asciidoc | 14 +- docs/plugins/outputs/zabbix.asciidoc | 16 +- .../core-plugins/codecs/java_dots.asciidoc | 24 ++ .../core-plugins/codecs/java_line.asciidoc | 63 +++++ .../core-plugins/codecs/java_plain.asciidoc | 51 ++++ .../core-plugins/filters/java_uuid.asciidoc | 91 +++++++ .../inputs/java_generator.asciidoc | 117 +++++++++ .../core-plugins/inputs/java_stdin.asciidoc | 35 +++ .../core-plugins/outputs/java_sink.asciidoc | 33 +++ .../core-plugins/outputs/java_stdout.asciidoc | 50 ++++ 221 files changed, 2819 insertions(+), 1689 deletions(-) create mode 100644 docs/plugins/include/attributes-ls.asciidoc create mode 100644 docs/plugins/include/attributes-lsplugins.asciidoc create mode 100644 docs/plugins/include/filter.asciidoc create mode 100644 docs/plugins/include/input.asciidoc create mode 100644 docs/plugins/include/output.asciidoc create mode 100644 docs/plugins/include/plugin_header-core.asciidoc create mode 100644 docs/plugins/include/plugin_header-integration.asciidoc create mode 100644 docs/plugins/include/plugin_header.asciidoc create mode 100644 docs/plugins/include/version-list-intro.asciidoc create mode 100644 docs/plugins/index.asciidoc create mode 100644 docs/plugins/static/core-plugins/codecs/java_dots.asciidoc create mode 100644 docs/plugins/static/core-plugins/codecs/java_line.asciidoc create mode 100644 docs/plugins/static/core-plugins/codecs/java_plain.asciidoc create mode 100644 docs/plugins/static/core-plugins/filters/java_uuid.asciidoc create mode 100644 docs/plugins/static/core-plugins/inputs/java_generator.asciidoc create mode 100644 docs/plugins/static/core-plugins/inputs/java_stdin.asciidoc create mode 100644 docs/plugins/static/core-plugins/outputs/java_sink.asciidoc create mode 100644 docs/plugins/static/core-plugins/outputs/java_stdout.asciidoc diff --git a/.gitignore b/.gitignore index c59c35869..cd1dddd2b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ docs/html_docs +html_docs .idea \ No newline at end of file diff --git a/docs/plugins/codecs.asciidoc b/docs/plugins/codecs.asciidoc index fcb57099b..ff51c4a70 100644 --- a/docs/plugins/codecs.asciidoc +++ b/docs/plugins/codecs.asciidoc @@ -1,7 +1,7 @@ [[codec-plugins]] == Codec plugins -A codec plugin changes the data representation of an event. Codecs are essentially stream filters that can operate as part +A codec plugin changes the data representation of an event. Codecs are essentially stream filters that can operate as part of an input or output. The following codec plugins are available below. For a list of Elastic supported plugins, please consult the https://www.elastic.co/support/matrix#show_logstash_plugins[Support Matrix]. @@ -76,13 +76,13 @@ include::codecs/graphite.asciidoc[] include::codecs/gzip_lines.asciidoc[] :edit_url: https://github.com/elastic/logstash/edit/main/docs/static/core-plugins/codecs/java_dots.asciidoc -include::../../../logstash/docs/static/core-plugins/codecs/java_dots.asciidoc[] +include::./static/core-plugins/codecs/java_dots.asciidoc[] :edit_url: https://github.com/elastic/logstash/edit/main/docs/static/core-plugins/codecs/java_line.asciidoc -include::../../../logstash/docs/static/core-plugins/codecs/java_line.asciidoc[] +include::./static/core-plugins/codecs/java_line.asciidoc[] :edit_url: https://github.com/elastic/logstash/edit/main/docs/static/core-plugins/codecs/java_plain.asciidoc -include::../../../logstash/docs/static/core-plugins/codecs/java_plain.asciidoc[] +include::./static/core-plugins/codecs/java_plain.asciidoc[] :edit_url: https://github.com/logstash-plugins/logstash-codec-json/edit/main/docs/index.asciidoc include::codecs/json.asciidoc[] @@ -115,4 +115,4 @@ include::codecs/protobuf.asciidoc[] include::codecs/rubydebug.asciidoc[] -:edit_url: +:edit_url: diff --git a/docs/plugins/codecs/avro.asciidoc b/docs/plugins/codecs/avro.asciidoc index 6bb40a239..c8ad699e2 100644 --- a/docs/plugins/codecs/avro.asciidoc +++ b/docs/plugins/codecs/avro.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.4.1 :release_date: 2023-10-16 :changelog_url: https://github.com/logstash-plugins/logstash-codec-avro/blob/v3.4.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -23,8 +23,8 @@ include::{include_path}/plugin_header.asciidoc[] Read serialized Avro records as Logstash events -This plugin is used to serialize Logstash events as -Avro datums, as well as deserializing Avro datums into +This plugin is used to serialize Logstash events as +Avro datums, as well as deserializing Avro datums into Logstash events. [id="plugins-{type}s-{plugin}-ecs_metadata"] @@ -34,8 +34,8 @@ The plugin behaves the same regardless of ECS compatibility, except adding the o ==== Encoding -This codec is for serializing individual Logstash events -as Avro datums that are Avro binary blobs. It does not encode +This codec is for serializing individual Logstash events +as Avro datums that are Avro binary blobs. It does not encode Logstash events into an Avro file. @@ -52,7 +52,7 @@ providing a schema containing a subset of the schema which was used to serialize the data. This codec *doesn't support partial deserialization of arbitrary fields*. Partial deserialization _might_ work only when providing a schema which contains -the first `N` fields of the schema used to serialize the data (and +the first `N` fields of the schema used to serialize the data (and in the same order). ================================================================================ @@ -107,14 +107,14 @@ Controls this plugin's compatibility with the {ecs-ref}[Elastic Common Schema (E * Value can be any of: `binary`, `base64` * Default value is `base64` -Set encoding for Avro's payload. +Set encoding for Avro's payload. Use `base64` (default) to indicate that this codec sends or expects to receive base64-encoded bytes. Set this option to `binary` to indicate that this codec sends or expects to receive binary Avro data. [id="plugins-{type}s-{plugin}-schema_uri"] -===== `schema_uri` +===== `schema_uri` * This is a required setting. * Value type is <> @@ -128,7 +128,7 @@ example: * file - `/path/to/schema.avsc` [id="plugins-{type}s-{plugin}-tag_on_failure"] -===== `tag_on_failure` +===== `tag_on_failure` * Value type is <> * Default value is `false` diff --git a/docs/plugins/codecs/cef.asciidoc b/docs/plugins/codecs/cef.asciidoc index f8d7f3ebc..8ad1db899 100644 --- a/docs/plugins/codecs/cef.asciidoc +++ b/docs/plugins/codecs/cef.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v6.2.8 :release_date: 2024-10-22 :changelog_url: https://github.com/logstash-plugins/logstash-codec-cef/blob/v6.2.8/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -434,7 +434,7 @@ from the CEF payload is used to interpret the given time. If the event does not include timezone information, this `default_timezone` is used instead. [id="plugins-{type}s-{plugin}-delimiter"] -===== `delimiter` +===== `delimiter` * Value type is <> * There is no default value for this setting. @@ -493,7 +493,7 @@ If the codec handles data from a variety of sources, the ECS recommendation is t Controls this plugin's compatibility with the {ecs-ref}[Elastic Common Schema (ECS)]. [id="plugins-{type}s-{plugin}-fields"] -===== `fields` +===== `fields` * Value type is <> * Default value is `[]` @@ -516,7 +516,7 @@ a localized format, this `locale` is used to interpret locale-specific strings such as month abbreviations. [id="plugins-{type}s-{plugin}-name"] -===== `name` +===== `name` * Value type is <> * Default value is `"Logstash"` @@ -527,7 +527,7 @@ value of the name field in the CEF header. The new value can include `%{foo}` st to help you build a new value from other parts of the event. [id="plugins-{type}s-{plugin}-product"] -===== `product` +===== `product` * Value type is <> * Default value is `"Logstash"` @@ -538,7 +538,7 @@ value of the device product field in CEF header. The new value can include `%{fo to help you build a new value from other parts of the event. [id="plugins-{type}s-{plugin}-raw_data_field"] -===== `raw_data_field` +===== `raw_data_field` * Value type is <> * There is no default value for this setting @@ -555,7 +555,7 @@ Store the raw data to the field, for example `[event][original]`. Existing targe Set to true to adhere to the specifications and encode using the CEF key name (short name) for the CEF field names. [id="plugins-{type}s-{plugin}-severity"] -===== `severity` +===== `severity` * Value type is <> * Default value is `"6"` @@ -570,7 +570,7 @@ to be an integer in the range from 0 to 10 (including). All invalid values will be mapped to the default of 6. [id="plugins-{type}s-{plugin}-signature"] -===== `signature` +===== `signature` * Value type is <> * Default value is `"Logstash"` @@ -581,7 +581,7 @@ value of the signature ID field in CEF header. The new value can include `%{foo} to help you build a new value from other parts of the event. [id="plugins-{type}s-{plugin}-vendor"] -===== `vendor` +===== `vendor` * Value type is <> * Default value is `"Elasticsearch"` @@ -592,7 +592,7 @@ value of the device vendor field in CEF header. The new value can include `%{foo to help you build a new value from other parts of the event. [id="plugins-{type}s-{plugin}-version"] -===== `version` +===== `version` * Value type is <> * Default value is `"1.0"` diff --git a/docs/plugins/codecs/cloudfront.asciidoc b/docs/plugins/codecs/cloudfront.asciidoc index ced63cd98..a1dfde270 100644 --- a/docs/plugins/codecs/cloudfront.asciidoc +++ b/docs/plugins/codecs/cloudfront.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.2.1 :release_date: 2025-02-13 :changelog_url: https://github.com/logstash-plugins/logstash-integration-aws/blob/v7.2.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -36,7 +36,7 @@ This codec will read cloudfront encoded content   [id="plugins-{type}s-{plugin}-charset"] -===== `charset` +===== `charset` * Value can be any of: `ASCII-8BIT`, `UTF-8`, `US-ASCII`, `Big5`, `Big5-HKSCS`, `Big5-UAO`, `CP949`, `Emacs-Mule`, `EUC-JP`, `EUC-KR`, `EUC-TW`, `GB2312`, `GB18030`, `GBK`, `ISO-8859-1`, `ISO-8859-2`, `ISO-8859-3`, `ISO-8859-4`, `ISO-8859-5`, `ISO-8859-6`, `ISO-8859-7`, `ISO-8859-8`, `ISO-8859-9`, `ISO-8859-10`, `ISO-8859-11`, `ISO-8859-13`, `ISO-8859-14`, `ISO-8859-15`, `ISO-8859-16`, `KOI8-R`, `KOI8-U`, `Shift_JIS`, `UTF-16BE`, `UTF-16LE`, `UTF-32BE`, `UTF-32LE`, `Windows-31J`, `Windows-1250`, `Windows-1251`, `Windows-1252`, `IBM437`, `IBM737`, `IBM775`, `CP850`, `IBM852`, `CP852`, `IBM855`, `CP855`, `IBM857`, `IBM860`, `IBM861`, `IBM862`, `IBM863`, `IBM864`, `IBM865`, `IBM866`, `IBM869`, `Windows-1258`, `GB1988`, `macCentEuro`, `macCroatian`, `macCyrillic`, `macGreek`, `macIceland`, `macRoman`, `macRomania`, `macThai`, `macTurkish`, `macUkraine`, `CP950`, `CP951`, `IBM037`, `stateless-ISO-2022-JP`, `eucJP-ms`, `CP51932`, `EUC-JIS-2004`, `GB12345`, `ISO-2022-JP`, `ISO-2022-JP-2`, `CP50220`, `CP50221`, `Windows-1256`, `Windows-1253`, `Windows-1255`, `Windows-1254`, `TIS-620`, `Windows-874`, `Windows-1257`, `MacJapanese`, `UTF-7`, `UTF8-MAC`, `UTF-16`, `UTF-32`, `UTF8-DoCoMo`, `SJIS-DoCoMo`, `UTF8-KDDI`, `SJIS-KDDI`, `ISO-2022-JP-KDDI`, `stateless-ISO-2022-JP-KDDI`, `UTF8-SoftBank`, `SJIS-SoftBank`, `BINARY`, `CP437`, `CP737`, `CP775`, `IBM850`, `CP857`, `CP860`, `CP861`, `CP862`, `CP863`, `CP864`, `CP865`, `CP866`, `CP869`, `CP1258`, `Big5-HKSCS:2008`, `ebcdic-cp-us`, `eucJP`, `euc-jp-ms`, `EUC-JISX0213`, `eucKR`, `eucTW`, `EUC-CN`, `eucCN`, `CP936`, `ISO2022-JP`, `ISO2022-JP2`, `ISO8859-1`, `ISO8859-2`, `ISO8859-3`, `ISO8859-4`, `ISO8859-5`, `ISO8859-6`, `CP1256`, `ISO8859-7`, `CP1253`, `ISO8859-8`, `CP1255`, `ISO8859-9`, `CP1254`, `ISO8859-10`, `ISO8859-11`, `CP874`, `ISO8859-13`, `CP1257`, `ISO8859-14`, `ISO8859-15`, `ISO8859-16`, `CP878`, `MacJapan`, `ASCII`, `ANSI_X3.4-1968`, `646`, `CP65000`, `CP65001`, `UTF-8-MAC`, `UTF-8-HFS`, `UCS-2BE`, `UCS-4BE`, `UCS-4LE`, `CP932`, `csWindows31J`, `SJIS`, `PCK`, `CP1250`, `CP1251`, `CP1252`, `external`, `locale` * Default value is `"UTF-8"` @@ -50,5 +50,3 @@ weird cases like this, you can set the charset setting to the actual encoding of the text and logstash will convert it for you. For nxlog users, you'll want to set this to "CP1252" - - diff --git a/docs/plugins/codecs/cloudtrail.asciidoc b/docs/plugins/codecs/cloudtrail.asciidoc index 8edf5bd2a..1d70514cd 100644 --- a/docs/plugins/codecs/cloudtrail.asciidoc +++ b/docs/plugins/codecs/cloudtrail.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.2.1 :release_date: 2025-02-13 :changelog_url: https://github.com/logstash-plugins/logstash-integration-aws/blob/v7.2.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -36,11 +36,7 @@ This is the base class for logstash codecs.   [id="plugins-{type}s-{plugin}-charset"] -===== `charset` +===== `charset` * Value can be any of: `ASCII-8BIT`, `UTF-8`, `US-ASCII`, `Big5`, `Big5-HKSCS`, `Big5-UAO`, `CP949`, `Emacs-Mule`, `EUC-JP`, `EUC-KR`, `EUC-TW`, `GB2312`, `GB18030`, `GBK`, `ISO-8859-1`, `ISO-8859-2`, `ISO-8859-3`, `ISO-8859-4`, `ISO-8859-5`, `ISO-8859-6`, `ISO-8859-7`, `ISO-8859-8`, `ISO-8859-9`, `ISO-8859-10`, `ISO-8859-11`, `ISO-8859-13`, `ISO-8859-14`, `ISO-8859-15`, `ISO-8859-16`, `KOI8-R`, `KOI8-U`, `Shift_JIS`, `UTF-16BE`, `UTF-16LE`, `UTF-32BE`, `UTF-32LE`, `Windows-31J`, `Windows-1250`, `Windows-1251`, `Windows-1252`, `IBM437`, `IBM737`, `IBM775`, `CP850`, `IBM852`, `CP852`, `IBM855`, `CP855`, `IBM857`, `IBM860`, `IBM861`, `IBM862`, `IBM863`, `IBM864`, `IBM865`, `IBM866`, `IBM869`, `Windows-1258`, `GB1988`, `macCentEuro`, `macCroatian`, `macCyrillic`, `macGreek`, `macIceland`, `macRoman`, `macRomania`, `macThai`, `macTurkish`, `macUkraine`, `CP950`, `CP951`, `IBM037`, `stateless-ISO-2022-JP`, `eucJP-ms`, `CP51932`, `EUC-JIS-2004`, `GB12345`, `ISO-2022-JP`, `ISO-2022-JP-2`, `CP50220`, `CP50221`, `Windows-1256`, `Windows-1253`, `Windows-1255`, `Windows-1254`, `TIS-620`, `Windows-874`, `Windows-1257`, `MacJapanese`, `UTF-7`, `UTF8-MAC`, `UTF-16`, `UTF-32`, `UTF8-DoCoMo`, `SJIS-DoCoMo`, `UTF8-KDDI`, `SJIS-KDDI`, `ISO-2022-JP-KDDI`, `stateless-ISO-2022-JP-KDDI`, `UTF8-SoftBank`, `SJIS-SoftBank`, `BINARY`, `CP437`, `CP737`, `CP775`, `IBM850`, `CP857`, `CP860`, `CP861`, `CP862`, `CP863`, `CP864`, `CP865`, `CP866`, `CP869`, `CP1258`, `Big5-HKSCS:2008`, `ebcdic-cp-us`, `eucJP`, `euc-jp-ms`, `EUC-JISX0213`, `eucKR`, `eucTW`, `EUC-CN`, `eucCN`, `CP936`, `ISO2022-JP`, `ISO2022-JP2`, `ISO8859-1`, `ISO8859-2`, `ISO8859-3`, `ISO8859-4`, `ISO8859-5`, `ISO8859-6`, `CP1256`, `ISO8859-7`, `CP1253`, `ISO8859-8`, `CP1255`, `ISO8859-9`, `CP1254`, `ISO8859-10`, `ISO8859-11`, `CP874`, `ISO8859-13`, `CP1257`, `ISO8859-14`, `ISO8859-15`, `ISO8859-16`, `CP878`, `MacJapan`, `ASCII`, `ANSI_X3.4-1968`, `646`, `CP65000`, `CP65001`, `UTF-8-MAC`, `UTF-8-HFS`, `UCS-2BE`, `UCS-4BE`, `UCS-4LE`, `CP932`, `csWindows31J`, `SJIS`, `PCK`, `CP1250`, `CP1251`, `CP1252`, `external`, `locale` * Default value is `"UTF-8"` - - - - diff --git a/docs/plugins/codecs/collectd.asciidoc b/docs/plugins/codecs/collectd.asciidoc index f301a581d..367148622 100644 --- a/docs/plugins/codecs/collectd.asciidoc +++ b/docs/plugins/codecs/collectd.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.0 :release_date: 2021-08-04 :changelog_url: https://github.com/logstash-plugins/logstash-codec-collectd/blob/v3.1.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -71,7 +71,7 @@ Be sure to replace `10.0.0.1` with the IP of your Logstash instance.   [id="plugins-{type}s-{plugin}-authfile"] -===== `authfile` +===== `authfile` * Value type is <> * There is no default value for this setting. @@ -82,7 +82,7 @@ in collectd. You only need to set this option if the `security_level` is set to `Sign` or `Encrypt` [id="plugins-{type}s-{plugin}-nan_handling"] -===== `nan_handling` +===== `nan_handling` * Value can be any of: `change_value`, `warn`, `drop` * Default value is `"change_value"` @@ -94,7 +94,7 @@ What to do when a value in the event is `NaN` (Not a Number) - drop: Drop the event containing the `NaN` (this only drops the single event, not the whole packet) [id="plugins-{type}s-{plugin}-nan_tag"] -===== `nan_tag` +===== `nan_tag` * Value type is <> * Default value is `"_collectdNaN"` @@ -103,7 +103,7 @@ The tag to add to the event if a `NaN` value was found Set this to an empty string ('') if you don't want to tag [id="plugins-{type}s-{plugin}-nan_value"] -===== `nan_value` +===== `nan_value` * Value type is <> * Default value is `0` @@ -112,7 +112,7 @@ Only relevant when `nan_handeling` is set to `change_value` Change NaN to this configured value [id="plugins-{type}s-{plugin}-prune_intervals"] -===== `prune_intervals` +===== `prune_intervals` * Value type is <> * Default value is `true` @@ -120,7 +120,7 @@ Change NaN to this configured value Prune interval records. Defaults to `true`. [id="plugins-{type}s-{plugin}-security_level"] -===== `security_level` +===== `security_level` * Value can be any of: `None`, `Sign`, `Encrypt` * Default value is `"None"` @@ -149,7 +149,7 @@ For example, if you want data to be put under the `document` field: } [id="plugins-{type}s-{plugin}-typesdb"] -===== `typesdb` +===== `typesdb` * Value type is <> * There is no default value for this setting. @@ -157,5 +157,3 @@ For example, if you want data to be put under the `document` field: File path(s) to collectd `types.db` to use. The last matching pattern wins if you have identical pattern names in multiple files. If no types.db is provided the included `types.db` will be used (currently 5.4.0). - - diff --git a/docs/plugins/codecs/csv.asciidoc b/docs/plugins/codecs/csv.asciidoc index 97b1754c2..9f54ff014 100644 --- a/docs/plugins/codecs/csv.asciidoc +++ b/docs/plugins/codecs/csv.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.1.0 :release_date: 2021-07-28 :changelog_url: https://github.com/logstash-plugins/logstash-codec-csv/blob/v1.1.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -61,7 +61,7 @@ Define whether column names should be auto-detected from the header column or no Defaults to false. [id="plugins-{type}s-{plugin}-autogenerate_column_names"] -===== `autogenerate_column_names` +===== `autogenerate_column_names` * Value type is <> * Default value is `true` @@ -70,7 +70,7 @@ Define whether column names should be autogenerated or not. Defaults to true. If set to false, columns not having a header specified will not be parsed. [id="plugins-{type}s-{plugin}-charset"] -===== `charset` +===== `charset` * Value can be any of: `ASCII-8BIT`, `UTF-8`, `US-ASCII`, `Big5`, `Big5-HKSCS`, `Big5-UAO`, `CP949`, `Emacs-Mule`, `EUC-JP`, `EUC-KR`, `EUC-TW`, `GB2312`, `GB18030`, `GBK`, `ISO-8859-1`, `ISO-8859-2`, `ISO-8859-3`, `ISO-8859-4`, `ISO-8859-5`, `ISO-8859-6`, `ISO-8859-7`, `ISO-8859-8`, `ISO-8859-9`, `ISO-8859-10`, `ISO-8859-11`, `ISO-8859-13`, `ISO-8859-14`, `ISO-8859-15`, `ISO-8859-16`, `KOI8-R`, `KOI8-U`, `Shift_JIS`, `UTF-16BE`, `UTF-16LE`, `UTF-32BE`, `UTF-32LE`, `Windows-31J`, `Windows-1250`, `Windows-1251`, `Windows-1252`, `IBM437`, `IBM737`, `IBM775`, `CP850`, `IBM852`, `CP852`, `IBM855`, `CP855`, `IBM857`, `IBM860`, `IBM861`, `IBM862`, `IBM863`, `IBM864`, `IBM865`, `IBM866`, `IBM869`, `Windows-1258`, `GB1988`, `macCentEuro`, `macCroatian`, `macCyrillic`, `macGreek`, `macIceland`, `macRoman`, `macRomania`, `macThai`, `macTurkish`, `macUkraine`, `CP950`, `CP951`, `IBM037`, `stateless-ISO-2022-JP`, `eucJP-ms`, `CP51932`, `EUC-JIS-2004`, `GB12345`, `ISO-2022-JP`, `ISO-2022-JP-2`, `CP50220`, `CP50221`, `Windows-1256`, `Windows-1253`, `Windows-1255`, `Windows-1254`, `TIS-620`, `Windows-874`, `Windows-1257`, `MacJapanese`, `UTF-7`, `UTF8-MAC`, `UTF-16`, `UTF-32`, `UTF8-DoCoMo`, `SJIS-DoCoMo`, `UTF8-KDDI`, `SJIS-KDDI`, `ISO-2022-JP-KDDI`, `stateless-ISO-2022-JP-KDDI`, `UTF8-SoftBank`, `SJIS-SoftBank`, `BINARY`, `CP437`, `CP737`, `CP775`, `IBM850`, `CP857`, `CP860`, `CP861`, `CP862`, `CP863`, `CP864`, `CP865`, `CP866`, `CP869`, `CP1258`, `Big5-HKSCS:2008`, `ebcdic-cp-us`, `eucJP`, `euc-jp-ms`, `EUC-JISX0213`, `eucKR`, `eucTW`, `EUC-CN`, `eucCN`, `CP936`, `ISO2022-JP`, `ISO2022-JP2`, `ISO8859-1`, `ISO8859-2`, `ISO8859-3`, `ISO8859-4`, `ISO8859-5`, `ISO8859-6`, `CP1256`, `ISO8859-7`, `CP1253`, `ISO8859-8`, `CP1255`, `ISO8859-9`, `CP1254`, `ISO8859-10`, `ISO8859-11`, `CP874`, `ISO8859-13`, `CP1257`, `ISO8859-14`, `ISO8859-15`, `ISO8859-16`, `CP878`, `MacJapan`, `ASCII`, `ANSI_X3.4-1968`, `646`, `CP65000`, `CP65001`, `UTF-8-MAC`, `UTF-8-HFS`, `UCS-2BE`, `UCS-4BE`, `UCS-4LE`, `CP932`, `csWindows31J`, `SJIS`, `PCK`, `CP1250`, `CP1251`, `CP1252`, `external`, `locale` * Default value is `"UTF-8"` @@ -80,7 +80,7 @@ The character encoding used in this codec. Examples include "UTF-8" and "CP1252". [id="plugins-{type}s-{plugin}-columns"] -===== `columns` +===== `columns` * Value type is <> * Default value is `[]` @@ -95,7 +95,7 @@ are not enough columns specified, the default column names are List of fields names to include in the encoded CSV, in the order listed. [id="plugins-{type}s-{plugin}-convert"] -===== `convert` +===== `convert` * Value type is <> * Default value is `{}` @@ -126,7 +126,7 @@ Possible conversions are: `integer`, `float`, `date`, `date_time`, `boolean` Controls this plugin's compatibility with the {ecs-ref}[Elastic Common Schema (ECS)]. [id="plugins-{type}s-{plugin}-include_headers"] -===== `include_headers` +===== `include_headers` * Value type is <> * Default value is `false` @@ -135,7 +135,7 @@ When **encoding** in an output plugin, include headers in the encoded CSV once per codec lifecyle (not for every event). Default => false [id="plugins-{type}s-{plugin}-quote_char"] -===== `quote_char` +===== `quote_char` * Value type is <> * Default value is `"\""` @@ -145,7 +145,7 @@ the default is a double quote `"`. Optional. [id="plugins-{type}s-{plugin}-separator"] -===== `separator` +===== `separator` * Value type is <> * Default value is `","` @@ -155,7 +155,7 @@ is a comma `,`. Optional. [id="plugins-{type}s-{plugin}-skip_empty_columns"] -===== `skip_empty_columns` +===== `skip_empty_columns` * Value type is <> * Default value is `false` @@ -182,5 +182,3 @@ For example, if you want data to be put under the `document` field: } } } - - diff --git a/docs/plugins/codecs/dots.asciidoc b/docs/plugins/codecs/dots.asciidoc index b983a8a41..4b631e115 100644 --- a/docs/plugins/codecs/dots.asciidoc +++ b/docs/plugins/codecs/dots.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.6 :release_date: 2017-11-07 :changelog_url: https://github.com/logstash-plugins/logstash-codec-dots/blob/v3.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/codecs/edn.asciidoc b/docs/plugins/codecs/edn.asciidoc index 7c7b5f88f..c7272bbc6 100644 --- a/docs/plugins/codecs/edn.asciidoc +++ b/docs/plugins/codecs/edn.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.0 :release_date: 2021-08-04 :changelog_url: https://github.com/logstash-plugins/logstash-codec-edn/blob/v3.1.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -54,5 +54,3 @@ For example, if you want data to be put under the `document` field: } } } - - diff --git a/docs/plugins/codecs/edn_lines.asciidoc b/docs/plugins/codecs/edn_lines.asciidoc index 0fc2c316a..63a889259 100644 --- a/docs/plugins/codecs/edn_lines.asciidoc +++ b/docs/plugins/codecs/edn_lines.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.0 :release_date: 2021-08-04 :changelog_url: https://github.com/logstash-plugins/logstash-codec-edn_lines/blob/v3.1.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -54,5 +54,3 @@ For example, if you want data to be put under the `document` field: } } } - - diff --git a/docs/plugins/codecs/es_bulk.asciidoc b/docs/plugins/codecs/es_bulk.asciidoc index 1b94477c8..919cf4c6e 100644 --- a/docs/plugins/codecs/es_bulk.asciidoc +++ b/docs/plugins/codecs/es_bulk.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.0 :release_date: 2021-08-19 :changelog_url: https://github.com/logstash-plugins/logstash-codec-es_bulk/blob/v3.1.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -27,11 +27,11 @@ into individual events, plus metadata into the `@metadata` field. Encoding is not supported at this time as the Elasticsearch output submits Logstash events in bulk format. -[id="plugins-{type}s-{plugin}-codec-settings"] -==== Codec settings in the `logstash-input-http` plugin +[id="plugins-{type}s-{plugin}-codec-settings"] +==== Codec settings in the `logstash-input-http` plugin The {logstash-ref}/plugins-inputs-http.html[input-http] plugin has two -configuration options for codecs: `codec` and `additional_codecs`. +configuration options for codecs: `codec` and `additional_codecs`. Values in `additional_codecs` are prioritized over those specified in the `codec` option. That is, the default `codec` is applied only if no codec diff --git a/docs/plugins/codecs/fluent.asciidoc b/docs/plugins/codecs/fluent.asciidoc index c4ec839cb..e706015a8 100644 --- a/docs/plugins/codecs/fluent.asciidoc +++ b/docs/plugins/codecs/fluent.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.4.3 :release_date: 2024-06-25 :changelog_url: https://github.com/logstash-plugins/logstash-codec-fluent/blob/v3.4.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -81,4 +81,3 @@ For example, if you want data to be put under the `logs` field: port => 4000 } } - diff --git a/docs/plugins/codecs/graphite.asciidoc b/docs/plugins/codecs/graphite.asciidoc index bd01c5bf0..d009c9982 100644 --- a/docs/plugins/codecs/graphite.asciidoc +++ b/docs/plugins/codecs/graphite.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.6 :release_date: 2021-08-12 :changelog_url: https://github.com/logstash-plugins/logstash-codec-graphite/blob/v3.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -39,7 +39,7 @@ This codec will encode and decode Graphite formated lines.   [id="plugins-{type}s-{plugin}-exclude_metrics"] -===== `exclude_metrics` +===== `exclude_metrics` * Value type is <> * Default value is `["%{[^}]+}"]` @@ -47,7 +47,7 @@ This codec will encode and decode Graphite formated lines. Exclude regex matched metric names, by default exclude unresolved %{field} strings [id="plugins-{type}s-{plugin}-fields_are_metrics"] -===== `fields_are_metrics` +===== `fields_are_metrics` * Value type is <> * Default value is `false` @@ -55,7 +55,7 @@ Exclude regex matched metric names, by default exclude unresolved %{field} strin Indicate that the event @fields should be treated as metrics and will be sent as is to graphite [id="plugins-{type}s-{plugin}-include_metrics"] -===== `include_metrics` +===== `include_metrics` * Value type is <> * Default value is `[".*"]` @@ -63,7 +63,7 @@ Indicate that the event @fields should be treated as metrics and will be sent as Include only regex matched metric names [id="plugins-{type}s-{plugin}-metrics"] -===== `metrics` +===== `metrics` * Value type is <> * Default value is `{}` @@ -78,7 +78,7 @@ The value will be coerced to a floating point value. Values which cannot be coerced will zero (0) [id="plugins-{type}s-{plugin}-metrics_format"] -===== `metrics_format` +===== `metrics_format` * Value type is <> * Default value is `"*"` @@ -90,5 +90,3 @@ strings like `%{host}`. metrics_format => "%{host}.foo.bar.*.sum" NOTE: If no metrics_format is defined the name of the metric will be used as fallback. - - diff --git a/docs/plugins/codecs/gzip_lines.asciidoc b/docs/plugins/codecs/gzip_lines.asciidoc index f44ac2dc4..60cb656e5 100644 --- a/docs/plugins/codecs/gzip_lines.asciidoc +++ b/docs/plugins/codecs/gzip_lines.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.4 :release_date: 2019-07-23 :changelog_url: https://github.com/logstash-plugins/logstash-codec-gzip_lines/blob/v3.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -35,7 +35,7 @@ This codec will read gzip encoded content   [id="plugins-{type}s-{plugin}-charset"] -===== `charset` +===== `charset` * Value can be any of: `ASCII-8BIT`, `UTF-8`, `US-ASCII`, `Big5`, `Big5-HKSCS`, `Big5-UAO`, `CP949`, `Emacs-Mule`, `EUC-JP`, `EUC-KR`, `EUC-TW`, `GB2312`, `GB18030`, `GBK`, `ISO-8859-1`, `ISO-8859-2`, `ISO-8859-3`, `ISO-8859-4`, `ISO-8859-5`, `ISO-8859-6`, `ISO-8859-7`, `ISO-8859-8`, `ISO-8859-9`, `ISO-8859-10`, `ISO-8859-11`, `ISO-8859-13`, `ISO-8859-14`, `ISO-8859-15`, `ISO-8859-16`, `KOI8-R`, `KOI8-U`, `Shift_JIS`, `UTF-16BE`, `UTF-16LE`, `UTF-32BE`, `UTF-32LE`, `Windows-31J`, `Windows-1250`, `Windows-1251`, `Windows-1252`, `IBM437`, `IBM737`, `IBM775`, `CP850`, `IBM852`, `CP852`, `IBM855`, `CP855`, `IBM857`, `IBM860`, `IBM861`, `IBM862`, `IBM863`, `IBM864`, `IBM865`, `IBM866`, `IBM869`, `Windows-1258`, `GB1988`, `macCentEuro`, `macCroatian`, `macCyrillic`, `macGreek`, `macIceland`, `macRoman`, `macRomania`, `macThai`, `macTurkish`, `macUkraine`, `CP950`, `CP951`, `IBM037`, `stateless-ISO-2022-JP`, `eucJP-ms`, `CP51932`, `EUC-JIS-2004`, `GB12345`, `ISO-2022-JP`, `ISO-2022-JP-2`, `CP50220`, `CP50221`, `Windows-1256`, `Windows-1253`, `Windows-1255`, `Windows-1254`, `TIS-620`, `Windows-874`, `Windows-1257`, `MacJapanese`, `UTF-7`, `UTF8-MAC`, `UTF-16`, `UTF-32`, `UTF8-DoCoMo`, `SJIS-DoCoMo`, `UTF8-KDDI`, `SJIS-KDDI`, `ISO-2022-JP-KDDI`, `stateless-ISO-2022-JP-KDDI`, `UTF8-SoftBank`, `SJIS-SoftBank`, `BINARY`, `CP437`, `CP737`, `CP775`, `IBM850`, `CP857`, `CP860`, `CP861`, `CP862`, `CP863`, `CP864`, `CP865`, `CP866`, `CP869`, `CP1258`, `Big5-HKSCS:2008`, `ebcdic-cp-us`, `eucJP`, `euc-jp-ms`, `EUC-JISX0213`, `eucKR`, `eucTW`, `EUC-CN`, `eucCN`, `CP936`, `ISO2022-JP`, `ISO2022-JP2`, `ISO8859-1`, `ISO8859-2`, `ISO8859-3`, `ISO8859-4`, `ISO8859-5`, `ISO8859-6`, `CP1256`, `ISO8859-7`, `CP1253`, `ISO8859-8`, `CP1255`, `ISO8859-9`, `CP1254`, `ISO8859-10`, `ISO8859-11`, `CP874`, `ISO8859-13`, `CP1257`, `ISO8859-14`, `ISO8859-15`, `ISO8859-16`, `CP878`, `MacJapan`, `ASCII`, `ANSI_X3.4-1968`, `646`, `CP65000`, `CP65001`, `UTF-8-MAC`, `UTF-8-HFS`, `UCS-2BE`, `UCS-4BE`, `UCS-4LE`, `CP932`, `csWindows31J`, `SJIS`, `PCK`, `CP1250`, `CP1251`, `CP1252`, `external`, `locale` * Default value is `"UTF-8"` @@ -49,5 +49,3 @@ weird cases like this, you can set the charset setting to the actual encoding of the text and logstash will convert it for you. For nxlog users, you'll want to set this to "CP1252" - - diff --git a/docs/plugins/codecs/json.asciidoc b/docs/plugins/codecs/json.asciidoc index c48eb5aa0..4cbee17f1 100644 --- a/docs/plugins/codecs/json.asciidoc +++ b/docs/plugins/codecs/json.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.1 :release_date: 2022-10-03 :changelog_url: https://github.com/logstash-plugins/logstash-codec-json/blob/v3.1.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -47,7 +47,7 @@ failure, the payload will be stored in the `message` field.   [id="plugins-{type}s-{plugin}-charset"] -===== `charset` +===== `charset` * Value can be any of: `ASCII-8BIT`, `UTF-8`, `US-ASCII`, `Big5`, `Big5-HKSCS`, `Big5-UAO`, `CP949`, `Emacs-Mule`, `EUC-JP`, `EUC-KR`, `EUC-TW`, `GB2312`, `GB18030`, `GBK`, `ISO-8859-1`, `ISO-8859-2`, `ISO-8859-3`, `ISO-8859-4`, `ISO-8859-5`, `ISO-8859-6`, `ISO-8859-7`, `ISO-8859-8`, `ISO-8859-9`, `ISO-8859-10`, `ISO-8859-11`, `ISO-8859-13`, `ISO-8859-14`, `ISO-8859-15`, `ISO-8859-16`, `KOI8-R`, `KOI8-U`, `Shift_JIS`, `UTF-16BE`, `UTF-16LE`, `UTF-32BE`, `UTF-32LE`, `Windows-31J`, `Windows-1250`, `Windows-1251`, `Windows-1252`, `IBM437`, `IBM737`, `IBM775`, `CP850`, `IBM852`, `CP852`, `IBM855`, `CP855`, `IBM857`, `IBM860`, `IBM861`, `IBM862`, `IBM863`, `IBM864`, `IBM865`, `IBM866`, `IBM869`, `Windows-1258`, `GB1988`, `macCentEuro`, `macCroatian`, `macCyrillic`, `macGreek`, `macIceland`, `macRoman`, `macRomania`, `macThai`, `macTurkish`, `macUkraine`, `CP950`, `CP951`, `IBM037`, `stateless-ISO-2022-JP`, `eucJP-ms`, `CP51932`, `EUC-JIS-2004`, `GB12345`, `ISO-2022-JP`, `ISO-2022-JP-2`, `CP50220`, `CP50221`, `Windows-1256`, `Windows-1253`, `Windows-1255`, `Windows-1254`, `TIS-620`, `Windows-874`, `Windows-1257`, `MacJapanese`, `UTF-7`, `UTF8-MAC`, `UTF-16`, `UTF-32`, `UTF8-DoCoMo`, `SJIS-DoCoMo`, `UTF8-KDDI`, `SJIS-KDDI`, `ISO-2022-JP-KDDI`, `stateless-ISO-2022-JP-KDDI`, `UTF8-SoftBank`, `SJIS-SoftBank`, `BINARY`, `CP437`, `CP737`, `CP775`, `IBM850`, `CP857`, `CP860`, `CP861`, `CP862`, `CP863`, `CP864`, `CP865`, `CP866`, `CP869`, `CP1258`, `Big5-HKSCS:2008`, `ebcdic-cp-us`, `eucJP`, `euc-jp-ms`, `EUC-JISX0213`, `eucKR`, `eucTW`, `EUC-CN`, `eucCN`, `CP936`, `ISO2022-JP`, `ISO2022-JP2`, `ISO8859-1`, `ISO8859-2`, `ISO8859-3`, `ISO8859-4`, `ISO8859-5`, `ISO8859-6`, `CP1256`, `ISO8859-7`, `CP1253`, `ISO8859-8`, `CP1255`, `ISO8859-9`, `CP1254`, `ISO8859-10`, `ISO8859-11`, `CP874`, `ISO8859-13`, `CP1257`, `ISO8859-14`, `ISO8859-15`, `ISO8859-16`, `CP878`, `MacJapan`, `ASCII`, `ANSI_X3.4-1968`, `646`, `CP65000`, `CP65001`, `UTF-8-MAC`, `UTF-8-HFS`, `UCS-2BE`, `UCS-4BE`, `UCS-4LE`, `CP932`, `csWindows31J`, `SJIS`, `PCK`, `CP1250`, `CP1251`, `CP1252`, `external`, `locale` * Default value is `"UTF-8"` diff --git a/docs/plugins/codecs/json_lines.asciidoc b/docs/plugins/codecs/json_lines.asciidoc index 0cb8f8f97..a370c4bc4 100644 --- a/docs/plugins/codecs/json_lines.asciidoc +++ b/docs/plugins/codecs/json_lines.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.2.2 :release_date: 2024-09-06 :changelog_url: https://github.com/logstash-plugins/logstash-codec-json_lines/blob/v3.2.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -45,7 +45,7 @@ Therefore this codec cannot work with line oriented inputs.   [id="plugins-{type}s-{plugin}-charset"] -===== `charset` +===== `charset` * Value can be any of: `ASCII-8BIT`, `UTF-8`, `US-ASCII`, `Big5`, `Big5-HKSCS`, `Big5-UAO`, `CP949`, `Emacs-Mule`, `EUC-JP`, `EUC-KR`, `EUC-TW`, `GB2312`, `GB18030`, `GBK`, `ISO-8859-1`, `ISO-8859-2`, `ISO-8859-3`, `ISO-8859-4`, `ISO-8859-5`, `ISO-8859-6`, `ISO-8859-7`, `ISO-8859-8`, `ISO-8859-9`, `ISO-8859-10`, `ISO-8859-11`, `ISO-8859-13`, `ISO-8859-14`, `ISO-8859-15`, `ISO-8859-16`, `KOI8-R`, `KOI8-U`, `Shift_JIS`, `UTF-16BE`, `UTF-16LE`, `UTF-32BE`, `UTF-32LE`, `Windows-31J`, `Windows-1250`, `Windows-1251`, `Windows-1252`, `IBM437`, `IBM737`, `IBM775`, `CP850`, `IBM852`, `CP852`, `IBM855`, `CP855`, `IBM857`, `IBM860`, `IBM861`, `IBM862`, `IBM863`, `IBM864`, `IBM865`, `IBM866`, `IBM869`, `Windows-1258`, `GB1988`, `macCentEuro`, `macCroatian`, `macCyrillic`, `macGreek`, `macIceland`, `macRoman`, `macRomania`, `macThai`, `macTurkish`, `macUkraine`, `CP950`, `CP951`, `IBM037`, `stateless-ISO-2022-JP`, `eucJP-ms`, `CP51932`, `EUC-JIS-2004`, `GB12345`, `ISO-2022-JP`, `ISO-2022-JP-2`, `CP50220`, `CP50221`, `Windows-1256`, `Windows-1253`, `Windows-1255`, `Windows-1254`, `TIS-620`, `Windows-874`, `Windows-1257`, `MacJapanese`, `UTF-7`, `UTF8-MAC`, `UTF-16`, `UTF-32`, `UTF8-DoCoMo`, `SJIS-DoCoMo`, `UTF8-KDDI`, `SJIS-KDDI`, `ISO-2022-JP-KDDI`, `stateless-ISO-2022-JP-KDDI`, `UTF8-SoftBank`, `SJIS-SoftBank`, `BINARY`, `CP437`, `CP737`, `CP775`, `IBM850`, `CP857`, `CP860`, `CP861`, `CP862`, `CP863`, `CP864`, `CP865`, `CP866`, `CP869`, `CP1258`, `Big5-HKSCS:2008`, `ebcdic-cp-us`, `eucJP`, `euc-jp-ms`, `EUC-JISX0213`, `eucKR`, `eucTW`, `EUC-CN`, `eucCN`, `CP936`, `ISO2022-JP`, `ISO2022-JP2`, `ISO8859-1`, `ISO8859-2`, `ISO8859-3`, `ISO8859-4`, `ISO8859-5`, `ISO8859-6`, `CP1256`, `ISO8859-7`, `CP1253`, `ISO8859-8`, `CP1255`, `ISO8859-9`, `CP1254`, `ISO8859-10`, `ISO8859-11`, `CP874`, `ISO8859-13`, `CP1257`, `ISO8859-14`, `ISO8859-15`, `ISO8859-16`, `CP878`, `MacJapan`, `ASCII`, `ANSI_X3.4-1968`, `646`, `CP65000`, `CP65001`, `UTF-8-MAC`, `UTF-8-HFS`, `UCS-2BE`, `UCS-4BE`, `UCS-4LE`, `CP932`, `csWindows31J`, `SJIS`, `PCK`, `CP1250`, `CP1251`, `CP1252`, `external`, `locale` * Default value is `"UTF-8"` @@ -69,7 +69,7 @@ For nxlog users, you'll want to set this to `CP1252` Maximum number of bytes for a single line before stop processing. [id="plugins-{type}s-{plugin}-delimiter"] -===== `delimiter` +===== `delimiter` * Value type is <> * Default value is `"\n"` @@ -107,4 +107,3 @@ For example, if you want data to be put under the `document` field: } } } - diff --git a/docs/plugins/codecs/line.asciidoc b/docs/plugins/codecs/line.asciidoc index bae9ef41e..fd0df584e 100644 --- a/docs/plugins/codecs/line.asciidoc +++ b/docs/plugins/codecs/line.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.1 :release_date: 2021-07-15 :changelog_url: https://github.com/logstash-plugins/logstash-codec-line/blob/v3.1.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -30,8 +30,8 @@ Encoding behavior:: Each event is emitted with a trailing newline. [id="plugins-{type}s-{plugin}-ecs"] ==== Compatibility with the Elastic Common Schema (ECS) -This plugin is compatible with the {ecs-ref}[Elastic Common Schema (ECS)]. -No additional configuration is required. +This plugin is compatible with the {ecs-ref}[Elastic Common Schema (ECS)]. +No additional configuration is required. [id="plugins-{type}s-{plugin}-options"] ==== Line codec configuration options @@ -47,7 +47,7 @@ No additional configuration is required.   [id="plugins-{type}s-{plugin}-charset"] -===== `charset` +===== `charset` * Value can be any of: `ASCII-8BIT`, `UTF-8`, `US-ASCII`, `Big5`, `Big5-HKSCS`, `Big5-UAO`, `CP949`, `Emacs-Mule`, `EUC-JP`, `EUC-KR`, `EUC-TW`, `GB2312`, `GB18030`, `GBK`, `ISO-8859-1`, `ISO-8859-2`, `ISO-8859-3`, `ISO-8859-4`, `ISO-8859-5`, `ISO-8859-6`, `ISO-8859-7`, `ISO-8859-8`, `ISO-8859-9`, `ISO-8859-10`, `ISO-8859-11`, `ISO-8859-13`, `ISO-8859-14`, `ISO-8859-15`, `ISO-8859-16`, `KOI8-R`, `KOI8-U`, `Shift_JIS`, `UTF-16BE`, `UTF-16LE`, `UTF-32BE`, `UTF-32LE`, `Windows-31J`, `Windows-1250`, `Windows-1251`, `Windows-1252`, `IBM437`, `IBM737`, `IBM775`, `CP850`, `IBM852`, `CP852`, `IBM855`, `CP855`, `IBM857`, `IBM860`, `IBM861`, `IBM862`, `IBM863`, `IBM864`, `IBM865`, `IBM866`, `IBM869`, `Windows-1258`, `GB1988`, `macCentEuro`, `macCroatian`, `macCyrillic`, `macGreek`, `macIceland`, `macRoman`, `macRomania`, `macThai`, `macTurkish`, `macUkraine`, `CP950`, `CP951`, `IBM037`, `stateless-ISO-2022-JP`, `eucJP-ms`, `CP51932`, `EUC-JIS-2004`, `GB12345`, `ISO-2022-JP`, `ISO-2022-JP-2`, `CP50220`, `CP50221`, `Windows-1256`, `Windows-1253`, `Windows-1255`, `Windows-1254`, `TIS-620`, `Windows-874`, `Windows-1257`, `MacJapanese`, `UTF-7`, `UTF8-MAC`, `UTF-16`, `UTF-32`, `UTF8-DoCoMo`, `SJIS-DoCoMo`, `UTF8-KDDI`, `SJIS-KDDI`, `ISO-2022-JP-KDDI`, `stateless-ISO-2022-JP-KDDI`, `UTF8-SoftBank`, `SJIS-SoftBank`, `BINARY`, `CP437`, `CP737`, `CP775`, `IBM850`, `CP857`, `CP860`, `CP861`, `CP862`, `CP863`, `CP864`, `CP865`, `CP866`, `CP869`, `CP1258`, `Big5-HKSCS:2008`, `ebcdic-cp-us`, `eucJP`, `euc-jp-ms`, `EUC-JISX0213`, `eucKR`, `eucTW`, `EUC-CN`, `eucCN`, `CP936`, `ISO2022-JP`, `ISO2022-JP2`, `ISO8859-1`, `ISO8859-2`, `ISO8859-3`, `ISO8859-4`, `ISO8859-5`, `ISO8859-6`, `CP1256`, `ISO8859-7`, `CP1253`, `ISO8859-8`, `CP1255`, `ISO8859-9`, `CP1254`, `ISO8859-10`, `ISO8859-11`, `CP874`, `ISO8859-13`, `CP1257`, `ISO8859-14`, `ISO8859-15`, `ISO8859-16`, `CP878`, `MacJapan`, `ASCII`, `ANSI_X3.4-1968`, `646`, `CP65000`, `CP65001`, `UTF-8-MAC`, `UTF-8-HFS`, `UCS-2BE`, `UCS-4BE`, `UCS-4LE`, `CP932`, `csWindows31J`, `SJIS`, `PCK`, `CP1250`, `CP1251`, `CP1252`, `external`, `locale` * Default value is `"UTF-8"` @@ -61,7 +61,7 @@ or in another character set other than `UTF-8`. This only affects "plain" format logs since json is `UTF-8` already. [id="plugins-{type}s-{plugin}-delimiter"] -===== `delimiter` +===== `delimiter` * Value type is <> * Default value is `"\n"` @@ -69,11 +69,9 @@ This only affects "plain" format logs since json is `UTF-8` already. Change the delimiter that separates lines [id="plugins-{type}s-{plugin}-format"] -===== `format` +===== `format` * Value type is <> * There is no default value for this setting. Set the desired text format for encoding. - - diff --git a/docs/plugins/codecs/msgpack.asciidoc b/docs/plugins/codecs/msgpack.asciidoc index 45989c04b..db100e832 100644 --- a/docs/plugins/codecs/msgpack.asciidoc +++ b/docs/plugins/codecs/msgpack.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.0 :release_date: 2021-08-09 :changelog_url: https://github.com/logstash-plugins/logstash-codec-msgpack/blob/v3.1.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -36,7 +36,7 @@ This codec reads and produces MessagePack encoded content.   [id="plugins-{type}s-{plugin}-format"] -===== `format` +===== `format` * Value type is <> * There is no default value for this setting. @@ -60,6 +60,3 @@ For example, if you want data to be put under the `document` field: } } } - - - diff --git a/docs/plugins/codecs/multiline.asciidoc b/docs/plugins/codecs/multiline.asciidoc index a935424ed..4b7ed3ebd 100644 --- a/docs/plugins/codecs/multiline.asciidoc +++ b/docs/plugins/codecs/multiline.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.2 :release_date: 2024-04-25 :changelog_url: https://github.com/logstash-plugins/logstash-codec-multiline/blob/v3.1.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -28,7 +28,7 @@ IMPORTANT: If you are using a Logstash input plugin that supports multiple hosts, such as the {logstash-ref}/plugins-inputs-beats.html[beats input plugin], you should not use the multiline codec to handle multiline events. Doing so may result in the mixing of streams and corrupted event data. In this situation, you need to -handle multiline events before sending the event data to Logstash. +handle multiline events before sending the event data to Logstash. The original goal of this codec was to allow joining of multiline messages from files into a single event. For example, joining Java exception and @@ -123,7 +123,7 @@ following line.   [id="plugins-{type}s-{plugin}-auto_flush_interval"] -===== `auto_flush_interval` +===== `auto_flush_interval` * Value type is <> * There is no default value for this setting. @@ -133,7 +133,7 @@ matching new line is seen or there has been no new data appended for this many seconds. No default. If unset, no auto_flush. Units: seconds [id="plugins-{type}s-{plugin}-charset"] -===== `charset` +===== `charset` * Value can be any of: `ASCII-8BIT`, `UTF-8`, `US-ASCII`, `Big5`, `Big5-HKSCS`, `Big5-UAO`, `CP949`, `Emacs-Mule`, `EUC-JP`, `EUC-KR`, `EUC-TW`, `GB2312`, `GB18030`, `GBK`, `ISO-8859-1`, `ISO-8859-2`, `ISO-8859-3`, `ISO-8859-4`, `ISO-8859-5`, `ISO-8859-6`, `ISO-8859-7`, `ISO-8859-8`, `ISO-8859-9`, `ISO-8859-10`, `ISO-8859-11`, `ISO-8859-13`, `ISO-8859-14`, `ISO-8859-15`, `ISO-8859-16`, `KOI8-R`, `KOI8-U`, `Shift_JIS`, `UTF-16BE`, `UTF-16LE`, `UTF-32BE`, `UTF-32LE`, `Windows-31J`, `Windows-1250`, `Windows-1251`, `Windows-1252`, `IBM437`, `IBM737`, `IBM775`, `CP850`, `IBM852`, `CP852`, `IBM855`, `CP855`, `IBM857`, `IBM860`, `IBM861`, `IBM862`, `IBM863`, `IBM864`, `IBM865`, `IBM866`, `IBM869`, `Windows-1258`, `GB1988`, `macCentEuro`, `macCroatian`, `macCyrillic`, `macGreek`, `macIceland`, `macRoman`, `macRomania`, `macThai`, `macTurkish`, `macUkraine`, `CP950`, `CP951`, `IBM037`, `stateless-ISO-2022-JP`, `eucJP-ms`, `CP51932`, `EUC-JIS-2004`, `GB12345`, `ISO-2022-JP`, `ISO-2022-JP-2`, `CP50220`, `CP50221`, `Windows-1256`, `Windows-1253`, `Windows-1255`, `Windows-1254`, `TIS-620`, `Windows-874`, `Windows-1257`, `MacJapanese`, `UTF-7`, `UTF8-MAC`, `UTF-16`, `UTF-32`, `UTF8-DoCoMo`, `SJIS-DoCoMo`, `UTF8-KDDI`, `SJIS-KDDI`, `ISO-2022-JP-KDDI`, `stateless-ISO-2022-JP-KDDI`, `UTF8-SoftBank`, `SJIS-SoftBank`, `BINARY`, `CP437`, `CP737`, `CP775`, `IBM850`, `CP857`, `CP860`, `CP861`, `CP862`, `CP863`, `CP864`, `CP865`, `CP866`, `CP869`, `CP1258`, `Big5-HKSCS:2008`, `ebcdic-cp-us`, `eucJP`, `euc-jp-ms`, `EUC-JISX0213`, `eucKR`, `eucTW`, `EUC-CN`, `eucCN`, `CP936`, `ISO2022-JP`, `ISO2022-JP2`, `ISO8859-1`, `ISO8859-2`, `ISO8859-3`, `ISO8859-4`, `ISO8859-5`, `ISO8859-6`, `CP1256`, `ISO8859-7`, `CP1253`, `ISO8859-8`, `CP1255`, `ISO8859-9`, `CP1254`, `ISO8859-10`, `ISO8859-11`, `CP874`, `ISO8859-13`, `CP1257`, `ISO8859-14`, `ISO8859-15`, `ISO8859-16`, `CP878`, `MacJapan`, `ASCII`, `ANSI_X3.4-1968`, `646`, `CP65000`, `CP65001`, `UTF-8-MAC`, `UTF-8-HFS`, `UCS-2BE`, `UCS-4BE`, `UCS-4LE`, `CP932`, `csWindows31J`, `SJIS`, `PCK`, `CP1250`, `CP1251`, `CP1252`, `external`, `locale` * Default value is `"UTF-8"` @@ -160,7 +160,7 @@ This only affects "plain" format logs since JSON is `UTF-8` already. Controls this plugin's compatibility with the {ecs-ref}[Elastic Common Schema (ECS)]. [id="plugins-{type}s-{plugin}-max_bytes"] -===== `max_bytes` +===== `max_bytes` * Value type is <> * Default value is `"10 MiB"` @@ -171,7 +171,7 @@ multiline events after reaching a number of bytes, it is used in combination max_lines. [id="plugins-{type}s-{plugin}-max_lines"] -===== `max_lines` +===== `max_lines` * Value type is <> * Default value is `500` @@ -182,7 +182,7 @@ multiline events after reaching a number of lines, it is used in combination max_bytes. [id="plugins-{type}s-{plugin}-multiline_tag"] -===== `multiline_tag` +===== `multiline_tag` * Value type is <> * Default value is `"multiline"` @@ -191,7 +191,7 @@ Tag multiline events with a given tag. This tag will only be added to events that actually have multiple lines in them. [id="plugins-{type}s-{plugin}-negate"] -===== `negate` +===== `negate` * Value type is <> * Default value is `false` @@ -199,7 +199,7 @@ to events that actually have multiple lines in them. Negate the regexp pattern ('if not matched'). [id="plugins-{type}s-{plugin}-pattern"] -===== `pattern` +===== `pattern` * This is a required setting. * Value type is <> @@ -208,7 +208,7 @@ Negate the regexp pattern ('if not matched'). The regular expression to match. [id="plugins-{type}s-{plugin}-patterns_dir"] -===== `patterns_dir` +===== `patterns_dir` * Value type is <> * Default value is `[]` @@ -226,12 +226,10 @@ For example: NUMBER \d+ [id="plugins-{type}s-{plugin}-what"] -===== `what` +===== `what` * This is a required setting. * Value can be any of: `previous`, `next` * There is no default value for this setting. If the pattern matched, does event belong to the next or previous event? - - diff --git a/docs/plugins/codecs/netflow.asciidoc b/docs/plugins/codecs/netflow.asciidoc index 48a408bae..cdc35efb6 100644 --- a/docs/plugins/codecs/netflow.asciidoc +++ b/docs/plugins/codecs/netflow.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.3.2 :release_date: 2023-12-22 :changelog_url: https://github.com/logstash-plugins/logstash-codec-netflow/blob/v4.3.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -38,10 +38,10 @@ The following Netflow/IPFIX exporters have been seen and tested with the most re |Netflow exporter | v5 | v9 | IPFIX | Remarks |Barracuda Firewall | | | y | With support for Extended Uniflow |Cisco ACI | | y | | -|Cisco ASA | | y | | +|Cisco ASA | | y | | |Cisco ASR 1k | | | N | Fails because of duplicate fields -|Cisco ASR 9k | | y | | -|Cisco IOS 12.x | | y | | +|Cisco ASR 9k | | y | | +|Cisco IOS 12.x | | y | | |Cisco ISR w/ HSL | | N | | Fails because of duplicate fields, see: https://github.com/logstash-plugins/logstash-codec-netflow/issues/93 |Cisco WLC | | y | | |Citrix Netscaler | | | y | Still some unknown fields, labeled netscalerUnknown @@ -118,7 +118,7 @@ To mitigate dropped packets, make sure to increase the Linux kernel receive buff   [id="plugins-{type}s-{plugin}-cache_save_path"] -===== `cache_save_path` +===== `cache_save_path` * Value type is <> * There is no default value for this setting. @@ -134,7 +134,7 @@ Template caches are saved as: * <>/ipfix_templates.cache for IPFIX templates. [id="plugins-{type}s-{plugin}-cache_ttl"] -===== `cache_ttl` +===== `cache_ttl` * Value type is <> * Default value is `4000` @@ -142,7 +142,7 @@ Template caches are saved as: Netflow v9/v10 template cache TTL (seconds) [id="plugins-{type}s-{plugin}-include_flowset_id"] -===== `include_flowset_id` +===== `include_flowset_id` * Value type is <> * Default value is `false` @@ -152,7 +152,7 @@ Setting to true will include the flowset_id in events Allows you to work with sequences, for instance with the aggregate filter [id="plugins-{type}s-{plugin}-ipfix_definitions"] -===== `ipfix_definitions` +===== `ipfix_definitions` * Value type is <> * There is no default value for this setting. @@ -177,7 +177,7 @@ There is an implicit PEN 0 for the standard fields. See for the base set. [id="plugins-{type}s-{plugin}-netflow_definitions"] -===== `netflow_definitions` +===== `netflow_definitions` * Value type is <> * There is no default value for this setting. @@ -201,7 +201,7 @@ id: See for the base set. [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * Default value is `"netflow"` @@ -209,11 +209,9 @@ See > * Default value is `[5, 9, 10]` Specify which Netflow versions you will accept. - - diff --git a/docs/plugins/codecs/nmap.asciidoc b/docs/plugins/codecs/nmap.asciidoc index b4add72f9..5baece9f7 100644 --- a/docs/plugins/codecs/nmap.asciidoc +++ b/docs/plugins/codecs/nmap.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v0.0.22 :release_date: 2022-11-16 :changelog_url: https://github.com/logstash-plugins/logstash-codec-nmap/blob/v0.0.22/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -21,7 +21,7 @@ include::{include_path}/plugin_header.asciidoc[] ==== Description -This codec is used to parse https://nmap.org/[nmap] output data which is serialized in XML format. Nmap ("Network Mapper") is a free and open source utility for network discovery and security auditing. +This codec is used to parse https://nmap.org/[nmap] output data which is serialized in XML format. Nmap ("Network Mapper") is a free and open source utility for network discovery and security auditing. For more information on nmap, see https://nmap.org/. This codec can only be used for decoding data. @@ -48,7 +48,7 @@ Event types are listed below   [id="plugins-{type}s-{plugin}-emit_hosts"] -===== `emit_hosts` +===== `emit_hosts` * Value type is <> * Default value is `true` @@ -56,7 +56,7 @@ Event types are listed below Emit all host data as a nested document (including ports + traceroutes) with the type 'nmap_fullscan' [id="plugins-{type}s-{plugin}-emit_ports"] -===== `emit_ports` +===== `emit_ports` * Value type is <> * Default value is `true` @@ -64,7 +64,7 @@ Emit all host data as a nested document (including ports + traceroutes) with the Emit each port as a separate document with type 'nmap_port' [id="plugins-{type}s-{plugin}-emit_scan_metadata"] -===== `emit_scan_metadata` +===== `emit_scan_metadata` * Value type is <> * Default value is `true` @@ -72,11 +72,9 @@ Emit each port as a separate document with type 'nmap_port' Emit scan metadata [id="plugins-{type}s-{plugin}-emit_traceroute_links"] -===== `emit_traceroute_links` +===== `emit_traceroute_links` * Value type is <> * Default value is `true` Emit each hop_tuple of the traceroute with type 'nmap_traceroute_link' - - diff --git a/docs/plugins/codecs/plain.asciidoc b/docs/plugins/codecs/plain.asciidoc index d7318f8f4..4b2754843 100644 --- a/docs/plugins/codecs/plain.asciidoc +++ b/docs/plugins/codecs/plain.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.0 :release_date: 2021-07-27 :changelog_url: https://github.com/logstash-plugins/logstash-codec-plain/blob/v3.1.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -40,7 +40,7 @@ framing in their transport protocol (such as zeromq, rabbitmq, redis, etc).   [id="plugins-{type}s-{plugin}-charset"] -===== `charset` +===== `charset` * Value can be any of: `ASCII-8BIT`, `UTF-8`, `US-ASCII`, `Big5`, `Big5-HKSCS`, `Big5-UAO`, `CP949`, `Emacs-Mule`, `EUC-JP`, `EUC-KR`, `EUC-TW`, `GB2312`, `GB18030`, `GBK`, `ISO-8859-1`, `ISO-8859-2`, `ISO-8859-3`, `ISO-8859-4`, `ISO-8859-5`, `ISO-8859-6`, `ISO-8859-7`, `ISO-8859-8`, `ISO-8859-9`, `ISO-8859-10`, `ISO-8859-11`, `ISO-8859-13`, `ISO-8859-14`, `ISO-8859-15`, `ISO-8859-16`, `KOI8-R`, `KOI8-U`, `Shift_JIS`, `UTF-16BE`, `UTF-16LE`, `UTF-32BE`, `UTF-32LE`, `Windows-31J`, `Windows-1250`, `Windows-1251`, `Windows-1252`, `IBM437`, `IBM737`, `IBM775`, `CP850`, `IBM852`, `CP852`, `IBM855`, `CP855`, `IBM857`, `IBM860`, `IBM861`, `IBM862`, `IBM863`, `IBM864`, `IBM865`, `IBM866`, `IBM869`, `Windows-1258`, `GB1988`, `macCentEuro`, `macCroatian`, `macCyrillic`, `macGreek`, `macIceland`, `macRoman`, `macRomania`, `macThai`, `macTurkish`, `macUkraine`, `CP950`, `CP951`, `IBM037`, `stateless-ISO-2022-JP`, `eucJP-ms`, `CP51932`, `EUC-JIS-2004`, `GB12345`, `ISO-2022-JP`, `ISO-2022-JP-2`, `CP50220`, `CP50221`, `Windows-1256`, `Windows-1253`, `Windows-1255`, `Windows-1254`, `TIS-620`, `Windows-874`, `Windows-1257`, `MacJapanese`, `UTF-7`, `UTF8-MAC`, `UTF-16`, `UTF-32`, `UTF8-DoCoMo`, `SJIS-DoCoMo`, `UTF8-KDDI`, `SJIS-KDDI`, `ISO-2022-JP-KDDI`, `stateless-ISO-2022-JP-KDDI`, `UTF8-SoftBank`, `SJIS-SoftBank`, `BINARY`, `CP437`, `CP737`, `CP775`, `IBM850`, `CP857`, `CP860`, `CP861`, `CP862`, `CP863`, `CP864`, `CP865`, `CP866`, `CP869`, `CP1258`, `Big5-HKSCS:2008`, `ebcdic-cp-us`, `eucJP`, `euc-jp-ms`, `EUC-JISX0213`, `eucKR`, `eucTW`, `EUC-CN`, `eucCN`, `CP936`, `ISO2022-JP`, `ISO2022-JP2`, `ISO8859-1`, `ISO8859-2`, `ISO8859-3`, `ISO8859-4`, `ISO8859-5`, `ISO8859-6`, `CP1256`, `ISO8859-7`, `CP1253`, `ISO8859-8`, `CP1255`, `ISO8859-9`, `CP1254`, `ISO8859-10`, `ISO8859-11`, `CP874`, `ISO8859-13`, `CP1257`, `ISO8859-14`, `ISO8859-15`, `ISO8859-16`, `CP878`, `MacJapan`, `ASCII`, `ANSI_X3.4-1968`, `646`, `CP65000`, `CP65001`, `UTF-8-MAC`, `UTF-8-HFS`, `UCS-2BE`, `UCS-4BE`, `UCS-4LE`, `CP932`, `csWindows31J`, `SJIS`, `PCK`, `CP1250`, `CP1251`, `CP1252`, `external`, `locale` * Default value is `"UTF-8"` @@ -67,7 +67,7 @@ This only affects "plain" format logs since json is `UTF-8` already. Controls this plugin's compatibility with the {ecs-ref}[Elastic Common Schema (ECS)]. [id="plugins-{type}s-{plugin}-format"] -===== `format` +===== `format` * Value type is <> * There is no default value for this setting. @@ -76,5 +76,3 @@ Set the message you which to emit for each event. This supports `sprintf` strings. This setting only affects outputs (encoding of events). - - diff --git a/docs/plugins/codecs/pretty.asciidoc b/docs/plugins/codecs/pretty.asciidoc index 852a7dbec..1188d30e3 100644 --- a/docs/plugins/codecs/pretty.asciidoc +++ b/docs/plugins/codecs/pretty.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.0.1 :release_date: 2017-08-15 :changelog_url: https://github.com/logstash-plugins/logstash-codec-pretty/blob/v1.0.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -37,11 +37,9 @@ the Ruby Awesome Print library.   [id="plugins-{type}s-{plugin}-metadata"] -===== `metadata` +===== `metadata` * Value type is <> * Default value is `false` Should the event's metadata be included in the output? - - diff --git a/docs/plugins/codecs/protobuf.asciidoc b/docs/plugins/codecs/protobuf.asciidoc index 60dd4507b..02c4d6596 100644 --- a/docs/plugins/codecs/protobuf.asciidoc +++ b/docs/plugins/codecs/protobuf.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.3.0 :release_date: 2023-09-20 :changelog_url: https://github.com/logstash-plugins/logstash-codec-protobuf/blob/v1.3.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -239,4 +239,3 @@ Example values: for the protobuf definition the field `[@metadata][pb_oneof][horse_type]` will be set to either `pegasus` or `unicorn`. Available only for protobuf version 3. - diff --git a/docs/plugins/codecs/rubydebug.asciidoc b/docs/plugins/codecs/rubydebug.asciidoc index ffc7d71f0..9eedc5ede 100644 --- a/docs/plugins/codecs/rubydebug.asciidoc +++ b/docs/plugins/codecs/rubydebug.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.0 :release_date: 2020-07-08 :changelog_url: https://github.com/logstash-plugins/logstash-codec-rubydebug/blob/v3.1.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -37,11 +37,9 @@ the Ruby Amazing Print library.   [id="plugins-{type}s-{plugin}-metadata"] -===== `metadata` +===== `metadata` * Value type is <> * Default value is `false` Should the event's metadata be included? - - diff --git a/docs/plugins/filters.asciidoc b/docs/plugins/filters.asciidoc index 693dddb7e..fb14dd8d9 100644 --- a/docs/plugins/filters.asciidoc +++ b/docs/plugins/filters.asciidoc @@ -1,7 +1,7 @@ [[filter-plugins]] == Filter plugins -A filter plugin performs intermediary processing on an event. Filters are often applied conditionally depending on the +A filter plugin performs intermediary processing on an event. Filters are often applied conditionally depending on the characteristics of the event. The following filter plugins are available below. For a list of Elastic supported plugins, please consult the https://www.elastic.co/support/matrix#show_logstash_plugins[Support Matrix]. @@ -129,7 +129,7 @@ include::filters/http.asciidoc[] include::filters/i18n.asciidoc[] :edit_url: https://github.com/elastic/logstash/edit/main/docs/static/core-plugins/filters/java_uuid.asciidoc -include::../../../logstash/docs/static/core-plugins/filters/java_uuid.asciidoc[] +include::./static/core-plugins/filters/java_uuid.asciidoc[] :edit_url: https://github.com/logstash-plugins/logstash-integration-jdbc/edit/main/docs/filter-jdbc_static.asciidoc include::filters/jdbc_static.asciidoc[] @@ -176,7 +176,7 @@ include::filters/split.asciidoc[] :edit_url: https://github.com/logstash-plugins/logstash-filter-syslog_pri/edit/main/docs/index.asciidoc include::filters/syslog_pri.asciidoc[] -:edit_url: +:edit_url: include::filters/threats_classifier.asciidoc[] :edit_url: https://github.com/logstash-plugins/logstash-filter-throttle/edit/main/docs/index.asciidoc @@ -200,11 +200,11 @@ include::filters/useragent.asciidoc[] :edit_url: https://github.com/logstash-plugins/logstash-filter-uuid/edit/main/docs/index.asciidoc include::filters/uuid.asciidoc[] -:edit_url: +:edit_url: include::filters/wurfl_device_detection.asciidoc[] :edit_url: https://github.com/logstash-plugins/logstash-filter-xml/edit/main/docs/index.asciidoc include::filters/xml.asciidoc[] -:edit_url: +:edit_url: diff --git a/docs/plugins/filters/age.asciidoc b/docs/plugins/filters/age.asciidoc index 77fb35639..228fd4d29 100644 --- a/docs/plugins/filters/age.asciidoc +++ b/docs/plugins/filters/age.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.0.3 :release_date: 2021-10-29 :changelog_url: https://github.com/logstash-plugins/logstash-filter-age/blob/v1.0.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -24,7 +24,7 @@ include::{include_path}/plugin_header.asciidoc[] A simple filter for calculating the age of an event. This filter calculates the age of an event by subtracting the event timestamp -from the current timestamp. +from the current timestamp. You can use this plugin with the {logstash-ref}/plugins-filters-drop.html[`drop` filter plugin] to drop Logstash events that are older than some threshold. @@ -55,7 +55,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * Default value is `"[@metadata][age]"` diff --git a/docs/plugins/filters/aggregate.asciidoc b/docs/plugins/filters/aggregate.asciidoc index 44faae466..b04e24d73 100644 --- a/docs/plugins/filters/aggregate.asciidoc +++ b/docs/plugins/filters/aggregate.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v2.10.0 :release_date: 2021-10-11 :changelog_url: https://github.com/logstash-plugins/logstash-filter-aggregate/blob/v2.10.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -409,7 +409,7 @@ Available variables are: `map`: aggregated map associated to `task_id`, containing key/value pairs. Data structure is a ruby http://ruby-doc.org/core-1.9.1/Hash.html[Hash] -`map_meta`: meta informations associated to aggregate map. It allows to set a custom `timeout` or `inactivity_timeout`. +`map_meta`: meta informations associated to aggregate map. It allows to set a custom `timeout` or `inactivity_timeout`. It allows also to get `creation_timestamp`, `lastevent_timestamp` and `task_id`. `new_event_block`: block used to emit new Logstash events. See the second example on how to use it. @@ -440,8 +440,8 @@ To create additional events during the code execution, to be emitted immediately } } -The parameter of the function `new_event_block.call` must be of type `LogStash::Event`. -To create such an object, the constructor of the same class can be used: `LogStash::Event.new()`. +The parameter of the function `new_event_block.call` must be of type `LogStash::Event`. +To create such an object, the constructor of the same class can be used: `LogStash::Event.new()`. `LogStash::Event.new()` can receive a parameter of type ruby http://ruby-doc.org/core-1.9.1/Hash.html[Hash] to initialize the new event fields. @@ -598,11 +598,11 @@ Example: By default, timeout is computed using system time, where Logstash is running. -When this option is set, timeout is computed using event timestamp field indicated in this option. -It means that when a first event arrives on aggregate filter and induces a map creation, map creation time will be equal to this event timestamp. +When this option is set, timeout is computed using event timestamp field indicated in this option. +It means that when a first event arrives on aggregate filter and induces a map creation, map creation time will be equal to this event timestamp. Then, each time a new event arrives on aggregate filter, event timestamp is compared to map creation time to check if timeout happened. -This option is particularly useful when processing old logs with option `push_map_as_event_on_timeout => true`. +This option is particularly useful when processing old logs with option `push_map_as_event_on_timeout => true`. It lets to generate aggregated events based on timeout on old logs, where system time is inappropriate. Warning : so that this option works fine, it must be set on first aggregate filter. diff --git a/docs/plugins/filters/alter.asciidoc b/docs/plugins/filters/alter.asciidoc index f4b490b6e..3d0ef2700 100644 --- a/docs/plugins/filters/alter.asciidoc +++ b/docs/plugins/filters/alter.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.3 :release_date: 2017-11-07 :changelog_url: https://github.com/logstash-plugins/logstash-filter-alter/blob/v3.0.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -21,8 +21,8 @@ include::{include_path}/plugin_header.asciidoc[] ==== Description -The alter filter allows you to do general alterations to fields -that are not included in the normal mutate filter. +The alter filter allows you to do general alterations to fields +that are not included in the normal mutate filter. NOTE: The functionality provided by this plugin is likely to @@ -47,7 +47,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-coalesce"] -===== `coalesce` +===== `coalesce` * Value type is <> * There is no default value for this setting. @@ -65,7 +65,7 @@ Example: } [id="plugins-{type}s-{plugin}-condrewrite"] -===== `condrewrite` +===== `condrewrite` * Value type is <> * There is no default value for this setting. @@ -77,7 +77,7 @@ Example: [source,ruby] filter { alter { - condrewrite => [ + condrewrite => [ "field_name", "expected_value", "new_value", "field_name2", "expected_value2", "new_value2", .... @@ -86,7 +86,7 @@ Example: } [id="plugins-{type}s-{plugin}-condrewriteother"] -===== `condrewriteother` +===== `condrewriteother` * Value type is <> * There is no default value for this setting. @@ -98,7 +98,7 @@ Example: [source,ruby] filter { alter { - condrewriteother => [ + condrewriteother => [ "field_name", "expected_value", "field_name_to_change", "value", "field_name2", "expected_value2", "field_name_to_change2", "value2", .... diff --git a/docs/plugins/filters/bytes.asciidoc b/docs/plugins/filters/bytes.asciidoc index 2cd42c5c2..77840d9f9 100644 --- a/docs/plugins/filters/bytes.asciidoc +++ b/docs/plugins/filters/bytes.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.0.3 :release_date: 2020-08-18 :changelog_url: https://github.com/logstash-plugins/logstash-filter-bytes/blob/v1.0.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -24,7 +24,7 @@ include::{include_path}/plugin_header.asciidoc[] Parse string representations of computer storage sizes, such as "123 MB" or "5.6gb", into their numeric value in bytes. -This plugin understands: +This plugin understands: - bytes ("B") - kilobytes ("KB" or "kB") @@ -83,7 +83,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-source"] -===== `source` +===== `source` * Value type is <> * Default value is `message` @@ -91,14 +91,14 @@ filter plugins. Name of the source field that contains the storage size [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> Name of the target field that will contain the storage size in bytes [id="plugins-{type}s-{plugin}-conversion_method"] -===== `conversion_method` +===== `conversion_method` * Value type is <> * Value can be any of: `binary`, `metric` @@ -107,7 +107,7 @@ Name of the target field that will contain the storage size in bytes Which conversion method to use when converting to bytes. `binary` uses `1K = 1024B`. `metric` uses `1K = 1000B`. [id="plugins-{type}s-{plugin}-decimal_separator"] -===== `source` +===== `source` * Value type is <> * Default value is `.` diff --git a/docs/plugins/filters/checksum.asciidoc b/docs/plugins/filters/checksum.asciidoc index 038b3759f..2bd46c022 100644 --- a/docs/plugins/filters/checksum.asciidoc +++ b/docs/plugins/filters/checksum.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.4 :release_date: 2017-08-15 :changelog_url: https://github.com/logstash-plugins/logstash-filter-checksum/blob/v3.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -46,7 +46,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-algorithm"] -===== `algorithm` +===== `algorithm` * Value can be any of: `md5`, `sha`, `sha1`, `sha256`, `sha384` * Default value is `"sha256"` @@ -54,7 +54,7 @@ filter plugins. [id="plugins-{type}s-{plugin}-keys"] -===== `keys` +===== `keys` * Value type is <> * Default value is `["message", "@timestamp", "type"]` diff --git a/docs/plugins/filters/cidr.asciidoc b/docs/plugins/filters/cidr.asciidoc index 4218172c3..0a22f4b61 100644 --- a/docs/plugins/filters/cidr.asciidoc +++ b/docs/plugins/filters/cidr.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.3 :release_date: 2019-09-18 :changelog_url: https://github.com/logstash-plugins/logstash-filter-cidr/blob/v3.1.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -89,7 +89,7 @@ The full path of the external file containing the networks the filter should che Networks are separated by a separator character defined in `separator`. [source,ruby] 192.168.1.0/24 - 192.167.0.0/16 + 192.167.0.0/16 NOTE: It is an error to specify both `network` and `network_path`. [id="plugins-{type}s-{plugin}-refresh_interval"] diff --git a/docs/plugins/filters/cipher.asciidoc b/docs/plugins/filters/cipher.asciidoc index a8e991cff..7809e018a 100644 --- a/docs/plugins/filters/cipher.asciidoc +++ b/docs/plugins/filters/cipher.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.0.3 :release_date: 2022-06-21 :changelog_url: https://github.com/logstash-plugins/logstash-filter-cipher/blob/v4.0.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -55,7 +55,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-algorithm"] -===== `algorithm` +===== `algorithm` * This is a required setting. * Value type is <> @@ -69,7 +69,7 @@ A list of supported algorithms depends on the versions of Logstash, JRuby, and J bin/ruby -ropenssl -e 'puts OpenSSL::Cipher.ciphers' [id="plugins-{type}s-{plugin}-base64"] -===== `base64` +===== `base64` * Value type is <> * Default value is `true` @@ -79,7 +79,7 @@ A list of supported algorithms depends on the versions of Logstash, JRuby, and J [id="plugins-{type}s-{plugin}-cipher_padding"] -===== `cipher_padding` +===== `cipher_padding` * Value type is <> ** `0`: means `false` @@ -99,7 +99,7 @@ fail to encrypt any input plaintext that doesn't strictly adhere to the filter { cipher { cipher_padding => 0 }} [id="plugins-{type}s-{plugin}-iv_random_length"] -===== `iv_random_length` +===== `iv_random_length` * Value type is <> * There is no default value for this setting. @@ -122,7 +122,7 @@ is used. AES Algorithms generally use a 16-byte IV: filter { cipher { iv_random_length => 16 }} [id="plugins-{type}s-{plugin}-key"] -===== `key` +===== `key` * Value type is <> * There is no default value for this setting. @@ -133,12 +133,12 @@ The key to use for encryption and decryption operations. ============ Please read the https://github.com/jruby/jruby/wiki/UnlimitedStrengthCrypto[UnlimitedStrengthCrypto topic] in the https://github.com/jruby/jruby[jruby] github repo if you see a runtime error that resembles: -`java.security.InvalidKeyException: Illegal key size: possibly you need to install +`java.security.InvalidKeyException: Illegal key size: possibly you need to install Java Cryptography Extension (JCE) Unlimited Strength Jurisdiction Policy Files for your JRE` ============ [id="plugins-{type}s-{plugin}-key_pad"] -===== `key_pad` +===== `key_pad` * Value type is <> * Default value is `"\u0000"` @@ -146,7 +146,7 @@ Java Cryptography Extension (JCE) Unlimited Strength Jurisdiction Policy Files f The character used to pad the key to the required <>. [id="plugins-{type}s-{plugin}-key_size"] -===== `key_size` +===== `key_size` * Value type is <> * Default value is `16` @@ -155,13 +155,13 @@ The cipher's required key size, which depends on which <> is specified with a shorter value, it will be padded with <>. -Example, for AES-128, we must have 16 char long key. AES-256 = 32 chars +Example, for AES-128, we must have 16 char long key. AES-256 = 32 chars [source,ruby] filter { cipher { key_size => 16 } [id="plugins-{type}s-{plugin}-max_cipher_reuse"] -===== `max_cipher_reuse` +===== `max_cipher_reuse` * Value type is <> * Default value is `1` @@ -180,7 +180,7 @@ instance and max_cipher_reuse = 1 by default filter { cipher { max_cipher_reuse => 1000 }} [id="plugins-{type}s-{plugin}-mode"] -===== `mode` +===== `mode` * This is a required setting. * Value type is <> @@ -189,7 +189,7 @@ instance and max_cipher_reuse = 1 by default * There is no default value for this setting. [id="plugins-{type}s-{plugin}-source"] -===== `source` +===== `source` * Value type is <> * Default value is `"message"` @@ -204,7 +204,7 @@ Example, to use the `message` field (default) : filter { cipher { source => "message" } } [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * Default value is `"message"` diff --git a/docs/plugins/filters/clone.asciidoc b/docs/plugins/filters/clone.asciidoc index d657e6eeb..5156e7cb0 100644 --- a/docs/plugins/filters/clone.asciidoc +++ b/docs/plugins/filters/clone.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.2.0 :release_date: 2021-11-10 :changelog_url: https://github.com/logstash-plugins/logstash-filter-clone/blob/v4.2.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -24,8 +24,8 @@ include::{include_path}/plugin_header.asciidoc[] The clone filter is for duplicating events. A clone will be created for each type in the clone list. The original event is left unchanged and a `type` field is added to the clone. -Created events are inserted into the pipeline -as normal events and will be processed by the remaining pipeline configuration +Created events are inserted into the pipeline +as normal events and will be processed by the remaining pipeline configuration starting from the filter that generated them (i.e. this plugin). ==== Event Metadata and the Elastic Common Schema (ECS) @@ -59,7 +59,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-clones"] -===== `clones` +===== `clones` * This is a required setting. * Value type is <> diff --git a/docs/plugins/filters/csv.asciidoc b/docs/plugins/filters/csv.asciidoc index 55e3dbf81..050ce3d46 100644 --- a/docs/plugins/filters/csv.asciidoc +++ b/docs/plugins/filters/csv.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.1 :release_date: 2021-06-08 :changelog_url: https://github.com/logstash-plugins/logstash-filter-csv/blob/v3.1.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -59,7 +59,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-autodetect_column_names"] -===== `autodetect_column_names` +===== `autodetect_column_names` * Value type is <> * Default value is `false` @@ -70,7 +70,7 @@ Defaults to false. Logstash pipeline workers must be set to `1` for this option to work. [id="plugins-{type}s-{plugin}-autogenerate_column_names"] -===== `autogenerate_column_names` +===== `autogenerate_column_names` * Value type is <> * Default value is `true` @@ -79,7 +79,7 @@ Define whether column names should autogenerated or not. Defaults to true. If set to false, columns not having a header specified will not be parsed. [id="plugins-{type}s-{plugin}-columns"] -===== `columns` +===== `columns` * Value type is <> * Default value is `[]` @@ -92,7 +92,7 @@ in the data than specified in this column list, extra columns will be auto-numbe (e.g. "user_defined_1", "user_defined_2", "column3", "column4", etc.) [id="plugins-{type}s-{plugin}-convert"] -===== `convert` +===== `convert` * Value type is <> * Default value is `{}` @@ -125,7 +125,7 @@ Controls this plugin's compatibility with the See <> for detailed information. [id="plugins-{type}s-{plugin}-quote_char"] -===== `quote_char` +===== `quote_char` * Value type is <> * Default value is `"\""` @@ -135,7 +135,7 @@ the default is a double quote `"`. Optional. [id="plugins-{type}s-{plugin}-separator"] -===== `separator` +===== `separator` * Value type is <> * Default value is `","` @@ -146,7 +146,7 @@ to set the value to the actual tab character and not `\t`. Optional. [id="plugins-{type}s-{plugin}-skip_empty_columns"] -===== `skip_empty_columns` +===== `skip_empty_columns` * Value type is <> * Default value is `false` @@ -155,7 +155,7 @@ Define whether empty columns should be skipped. Defaults to false. If set to true, columns containing no value will not get set. [id="plugins-{type}s-{plugin}-skip_empty_rows"] -===== `skip_empty_rows` +===== `skip_empty_rows` * Value type is <> * Default value is `false` @@ -182,7 +182,7 @@ subsequent values that match what was autodetected will be skipped. Logstash pipeline workers must be set to `1` for this option to work. [id="plugins-{type}s-{plugin}-source"] -===== `source` +===== `source` * Value type is <> * Default value is `"message"` @@ -191,7 +191,7 @@ The CSV data in the value of the `source` field will be expanded into a data structure. [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/filters/date.asciidoc b/docs/plugins/filters/date.asciidoc index 1a1038d7a..3bb4d5912 100644 --- a/docs/plugins/filters/date.asciidoc +++ b/docs/plugins/filters/date.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.15 :release_date: 2022-06-29 :changelog_url: https://github.com/logstash-plugins/logstash-filter-date/blob/v3.1.15/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -60,7 +60,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-locale"] -===== `locale` +===== `locale` * Value type is <> * There is no default value for this setting. @@ -75,7 +75,7 @@ If not specified, the platform default will be used but for non-english platform an english parser will also be used as a fallback mechanism. [id="plugins-{type}s-{plugin}-match"] -===== `match` +===== `match` * Value type is <> * Default value is `[]` @@ -178,7 +178,7 @@ Other less common date units, such as era (G), century \(C), am/pm (a), and # mo http://www.joda.org/joda-time/key_format.html[joda-time documentation]. [id="plugins-{type}s-{plugin}-tag_on_failure"] -===== `tag_on_failure` +===== `tag_on_failure` * Value type is <> * Default value is `["_dateparsefailure"]` @@ -187,7 +187,7 @@ Append values to the `tags` field when there has been no successful match [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * Default value is `"@timestamp"` @@ -196,7 +196,7 @@ Store the matching timestamp into the given target field. If not provided, default to updating the `@timestamp` field of the event. [id="plugins-{type}s-{plugin}-timezone"] -===== `timezone` +===== `timezone` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/filters/de_dot.asciidoc b/docs/plugins/filters/de_dot.asciidoc index 6b36f7d37..b056b6765 100644 --- a/docs/plugins/filters/de_dot.asciidoc +++ b/docs/plugins/filters/de_dot.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.1.0 :release_date: 2024-05-27 :changelog_url: https://github.com/logstash-plugins/logstash-filter-de_dot/blob/v1.1.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -48,7 +48,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-fields"] -===== `fields` +===== `fields` * Value type is <> * There is no default value for this setting. @@ -61,7 +61,7 @@ will result in "field_suffix" and nested or sub field ["foo"]["bar_suffix"] WARNING: This is an expensive operation. [id="plugins-{type}s-{plugin}-nested"] -===== `nested` +===== `nested` * Value type is <> * Default value is `false` @@ -79,7 +79,7 @@ If `recursive` is _true_, then recursively check sub-fields. It is recommended y only use this when setting specific fields, as this is an expensive operation. [id="plugins-{type}s-{plugin}-separator"] -===== `separator` +===== `separator` * Value type is <> * Default value is `"_"` diff --git a/docs/plugins/filters/dissect.asciidoc b/docs/plugins/filters/dissect.asciidoc index c3e105843..b364ea276 100644 --- a/docs/plugins/filters/dissect.asciidoc +++ b/docs/plugins/filters/dissect.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.2.5 :release_date: 2022-02-14 :changelog_url: https://github.com/logstash-plugins/logstash-filter-dissect/blob/v1.2.5/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -21,12 +21,12 @@ include::{include_path}/plugin_header.asciidoc[] ==== Description -The Dissect filter plugin tokenizes incoming strings using defined patterns. +The Dissect filter plugin tokenizes incoming strings using defined patterns. It extracts unstructured event data into fields using delimiters. This process is called tokenization. Unlike a regular split operation where one delimiter is applied to the whole -string, the Dissect operation applies a set of delimiters to a string value. +string, the Dissect operation applies a set of delimiters to a string value. NOTE: All keys must be found and extracted for tokenization to be successful. If one or more keys cannot be found, an error occurs and the original event is @@ -34,7 +34,7 @@ not modified. ===== Dissect or Grok? Or both? -Dissect differs from Grok in that it does not use regular expressions and is faster. +Dissect differs from Grok in that it does not use regular expressions and is faster. Dissect works well when data is reliably repeated. Grok is a better choice when the structure of your text varies from line to line. @@ -45,8 +45,8 @@ the remaining field values with more regex predictability. ===== Terminology -*dissect pattern* - the set of fields and delimiters describing the textual -format. Also known as a dissection. +*dissect pattern* - the set of fields and delimiters describing the textual +format. Also known as a dissection. The dissection is described using a set of `%{}` sections: `%{a} - %{b} - %{c}` @@ -55,16 +55,16 @@ The dissection is described using a set of `%{}` sections: *delimiter* - the text between `}` and the next `%{` characters. Any set of characters other than `%{`, `'not }'`, or `}` is a delimiter. -*key* - the text between the `%{` and `}`, exclusive of the `?`, `+`, `&` prefixes -and the ordinal suffix. +*key* - the text between the `%{` and `}`, exclusive of the `?`, `+`, `&` prefixes +and the ordinal suffix. Examples: -`%{?aaa}` - the key is `aaa` +`%{?aaa}` - the key is `aaa` -`%{+bbb/3}` - the key is `bbb` +`%{+bbb/3}` - the key is `bbb` -`%{&ccc}` - the key is `ccc` +`%{&ccc}` - the key is `ccc` NOTE: Using the `.` (dot) as `key` will generate fields with `.` in the field name. If you want to get nested fields, use the brackets notation such as `%{[fieldname][subfieldname]}`. @@ -87,7 +87,7 @@ The config might look like this: When a string is dissected from left to right, text is captured up to the first delimiter. The captured text is stored in the first field. This is repeated for each field/# delimiter pair until the last delimiter is reached. -Then *the remaining text is stored in the last field*. +Then *the remaining text is stored in the last field*. ==== Notations @@ -95,9 +95,9 @@ Then *the remaining text is stored in the last field*. <> -<> +<> -<> +<> ===== Notes and usage guidelines @@ -105,21 +105,21 @@ Then *the remaining text is stored in the last field*. * Use a Skip field if you do not want the indirection key/value stored. + -Example: +Example: + `%{?a}: %{&a}` applied to text `google: 77.98` will build a key/value of `google => 77.98`. -* Append and indirect cannot be combined. +* Append and indirect cannot be combined. + Examples: + -`%{+&something}` will add a value to the `&something` key (probably not the intended outcome). +`%{+&something}` will add a value to the `&something` key (probably not the intended outcome). + -`%{&+something}` will add a value to the `+something` key (again probably unintended). +`%{&+something}` will add a value to the `+something` key (again probably unintended). [id="plugins-{type}s-{plugin}-normal"] ===== Normal field notation -The found value is added to the Event using the key. +The found value is added to the Event using the key. A normal field has no prefix or suffix. Example: @@ -129,7 +129,7 @@ Example: [id="plugins-{type}s-{plugin}-skip"] ===== Skip field notation -The found value is stored internally, but is not added to the Event. +The found value is stored internally, but is not added to the Event. The key, if supplied, is prefixed with a `?`. Examples: @@ -140,43 +140,43 @@ Examples: [id="plugins-{type}s-{plugin}-append"] ===== Append field notation -If the value is the first field seen, it is stored. -Subsequent fields are appended to another value. +If the value is the first field seen, it is stored. +Subsequent fields are appended to another value. -The key is prefixed with a `+`. -The final value is stored in the Event using the key. +The key is prefixed with a `+`. +The final value is stored in the Event using the key. -NOTE: The delimiter found before the field is appended with the value. +NOTE: The delimiter found before the field is appended with the value. If no delimiter is found before the field, a single space character is used. Examples: -`%{+some_field}` is an append field. +`%{+some_field}` is an append field. `%{+some_field/2}` is an append field with an order modifier. **Order modifiers** -An order modifier, `/digits`, allows one to reorder the append sequence. +An order modifier, `/digits`, allows one to reorder the append sequence. -Example: +Example: -For text `1 2 3 go`, this `%{+a/2} %{+a/1} %{+a/4} %{+a/3}` will build a key/value of `a => 2 1 go 3`. +For text `1 2 3 go`, this `%{+a/2} %{+a/1} %{+a/4} %{+a/3}` will build a key/value of `a => 2 1 go 3`. -*Append fields* without an order modifier will append in declared order. +*Append fields* without an order modifier will append in declared order. Example: -For text `1 2 3 go`, this `%{a} %{b} %{+a}` will build two key/values of `a => 1 3 go, b => 2` +For text `1 2 3 go`, this `%{a} %{b} %{+a}` will build two key/values of `a => 1 3 go, b => 2` [id="plugins-{type}s-{plugin}-indirect"] ===== Indirect field notation -The found value is added to the Event using the found value of another field as the key. -The key is prefixed with a `&`. +The found value is added to the Event using the found value of another field as the key. +The key is prefixed with a `&`. Examples: -`%{&some_field}` is an indirect field where the key is indirectly sourced from the value of `some_field`. +`%{&some_field}` is an indirect field where the key is indirectly sourced from the value of `some_field`. For text `error: some_error, some_description`, this notation `error: %{?err}, %{&err}` will build a key/value of `some_error => some_description`. @@ -285,7 +285,7 @@ Here the `->` suffix moves to the `id` field because Dissect sees the padding as ==== Conditional processing -You probably want to use this filter inside an `if` block. +You probably want to use this filter inside an `if` block. This ensures that the event contains a field value with a suitable structure for the dissection. Example: @@ -327,9 +327,9 @@ filter plugins. * Value type is <> * Default value is `{}` -With this setting `int` and `float` datatype conversions can be specified. -These will be done after all `mapping` dissections have taken place. -Feel free to use this setting on its own without a `mapping` section. +With this setting `int` and `float` datatype conversions can be specified. +These will be done after all `mapping` dissections have taken place. +Feel free to use this setting on its own without a `mapping` section. *Example* diff --git a/docs/plugins/filters/dns.asciidoc b/docs/plugins/filters/dns.asciidoc index 323eb4df1..ee7fe76ae 100644 --- a/docs/plugins/filters/dns.asciidoc +++ b/docs/plugins/filters/dns.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.2.0 :release_date: 2023-01-26 :changelog_url: https://github.com/logstash-plugins/logstash-filter-dns/blob/v3.2.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -69,7 +69,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-action"] -===== `action` +===== `action` * Value can be any of: `append`, `replace` * Default value is `"append"` @@ -78,7 +78,7 @@ Determine what action to do: append or replace the values in the fields specified under `reverse` and `resolve`. [id="plugins-{type}s-{plugin}-failed_cache_size"] -===== `failed_cache_size` +===== `failed_cache_size` * Value type is <> * Default value is `0` (cache disabled) @@ -86,7 +86,7 @@ specified under `reverse` and `resolve`. cache size for failed requests [id="plugins-{type}s-{plugin}-failed_cache_ttl"] -===== `failed_cache_ttl` +===== `failed_cache_ttl` * Value type is <> * Default value is `5` @@ -94,7 +94,7 @@ cache size for failed requests how long to cache failed requests (in seconds) [id="plugins-{type}s-{plugin}-hit_cache_size"] -===== `hit_cache_size` +===== `hit_cache_size` * Value type is <> * Default value is `0` (cache disabled) @@ -102,7 +102,7 @@ how long to cache failed requests (in seconds) set the size of cache for successful requests [id="plugins-{type}s-{plugin}-hit_cache_ttl"] -===== `hit_cache_ttl` +===== `hit_cache_ttl` * Value type is <> * Default value is `60` @@ -110,7 +110,7 @@ set the size of cache for successful requests how long to cache successful requests (in seconds) [id="plugins-{type}s-{plugin}-hostsfile"] -===== `hostsfile` +===== `hostsfile` * Value type is <> * There is no default value for this setting. @@ -118,7 +118,7 @@ how long to cache successful requests (in seconds) Use custom hosts file(s). For example: `["/var/db/my_custom_hosts"]` [id="plugins-{type}s-{plugin}-max_retries"] -===== `max_retries` +===== `max_retries` * Value type is <> * Default value is `2` @@ -126,7 +126,7 @@ Use custom hosts file(s). For example: `["/var/db/my_custom_hosts"]` number of times to retry a failed resolve/reverse [id="plugins-{type}s-{plugin}-nameserver"] -===== `nameserver` +===== `nameserver` * Value type is <>, and is composed of: ** a required `address` key, whose value is either a <> or an <>, representing one or more nameserver ip addresses @@ -152,7 +152,7 @@ configure the resolver using the `nameserver`, `domain`, `search` and `ndots` directives in `/etc/resolv.conf`. [id="plugins-{type}s-{plugin}-resolve"] -===== `resolve` +===== `resolve` * Value type is <> * There is no default value for this setting. @@ -160,7 +160,7 @@ configure the resolver using the `nameserver`, `domain`, Forward resolve one or more fields. [id="plugins-{type}s-{plugin}-reverse"] -===== `reverse` +===== `reverse` * Value type is <> * There is no default value for this setting. @@ -168,7 +168,7 @@ Forward resolve one or more fields. Reverse resolve one or more fields. [id="plugins-{type}s-{plugin}-timeout"] -===== `timeout` +===== `timeout` * Value type is <> * Default value is `0.5` diff --git a/docs/plugins/filters/drop.asciidoc b/docs/plugins/filters/drop.asciidoc index b30399e2c..235b4bf91 100644 --- a/docs/plugins/filters/drop.asciidoc +++ b/docs/plugins/filters/drop.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.5 :release_date: 2017-11-07 :changelog_url: https://github.com/logstash-plugins/logstash-filter-drop/blob/v3.0.5/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -53,7 +53,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-percentage"] -===== `percentage` +===== `percentage` * Value type is <> * Default value is `100` diff --git a/docs/plugins/filters/elapsed.asciidoc b/docs/plugins/filters/elapsed.asciidoc index 13a7eaff9..5cf7a698c 100644 --- a/docs/plugins/filters/elapsed.asciidoc +++ b/docs/plugins/filters/elapsed.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.1.0 :release_date: 2018-07-31 :changelog_url: https://github.com/logstash-plugins/logstash-filter-elapsed/blob/v4.1.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -116,7 +116,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-end_tag"] -===== `end_tag` +===== `end_tag` * This is a required setting. * Value type is <> @@ -125,7 +125,7 @@ filter plugins. The name of the tag identifying the "end event" [id="plugins-{type}s-{plugin}-new_event_on_match"] -===== `new_event_on_match` +===== `new_event_on_match` * Value type is <> * Default value is `false` @@ -135,7 +135,7 @@ If it's set to `false` (default value), the elapsed information are added to the "end event"; if it's set to `true` a new "match event" is created. [id="plugins-{type}s-{plugin}-start_tag"] -===== `start_tag` +===== `start_tag` * This is a required setting. * Value type is <> @@ -144,7 +144,7 @@ to the "end event"; if it's set to `true` a new "match event" is created. The name of the tag identifying the "start event" [id="plugins-{type}s-{plugin}-timeout"] -===== `timeout` +===== `timeout` * Value type is <> * Default value is `1800` @@ -154,7 +154,7 @@ The corresponding "start event" is discarded and an "expired event" is generated. The default value is 30 minutes (1800 seconds). [id="plugins-{type}s-{plugin}-unique_id_field"] -===== `unique_id_field` +===== `unique_id_field` * This is a required setting. * Value type is <> diff --git a/docs/plugins/filters/elastic_integration.asciidoc b/docs/plugins/filters/elastic_integration.asciidoc index 96a4ae633..11589f98b 100644 --- a/docs/plugins/filters/elastic_integration.asciidoc +++ b/docs/plugins/filters/elastic_integration.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v9.0.0.prerelease01 :release_date: 2025-02-05 :changelog_url: https://github.com/elastic/logstash-filter-elastic_integration/blob/v9.0.0.prerelease01/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -53,7 +53,7 @@ Events that _fail_ ingest pipeline processing will be tagged with `_ingest_pipel - This plugin requires Java 17 minimum with {ls} `8.x` versions and Java 21 minimum with {ls} `9.x` versions. - When you upgrade the {stack}, upgrade {ls} (or this plugin specifically) _before_ you upgrade {kib}. - (Note that this requirement is a departure from the typical {stack} https://www.elastic.co/guide/en/elastic-stack/current/installing-elastic-stack.html#install-order-elastic-stack[installation order].) + (Note that this requirement is a departure from the typical {stack} https://www.elastic.co/guide/en/elastic-stack/current/installing-elastic-stack.html#install-order-elastic-stack[installation order].) + The {es}-{ls}-{kib} installation order recommended here ensures the best experience with {agent}-managed pipelines, and embeds functionality from a version of {es} Ingest Node that is compatible with the plugin version (`major`.`minor`). @@ -61,13 +61,13 @@ The {es}-{ls}-{kib} installation order recommended here ensures the best experie ===== Using `filter-elastic_integration` with `output-elasticsearch` Elastic {integrations} are designed to work with {logstash-ref}/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-data-streams[data streams] and {logstash-ref}/plugins-outputs-elasticsearch.html#_compatibility_with_the_elastic_common_schema_ecs[ECS-compatible] output. -Be sure that these features are enabled in the {logstash-ref}/plugins-outputs-elasticsearch.html[`output-elasticsearch`] plugin. +Be sure that these features are enabled in the {logstash-ref}/plugins-outputs-elasticsearch.html[`output-elasticsearch`] plugin. -* Set {logstash-ref}/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-data_stream[`data-stream`] to `true`. + - (Check out {logstash-ref}/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-data-streams[Data streams] for additional data streams settings.) +* Set {logstash-ref}/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-data_stream[`data-stream`] to `true`. + + (Check out {logstash-ref}/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-data-streams[Data streams] for additional data streams settings.) * Set {logstash-ref}/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-ecs_compatibility[`ecs-compatibility`] to `v1` or `v8`. -Check out the {logstash-ref}/plugins-outputs-elasticsearch.html[`output-elasticsearch` plugin] docs for additional settings. +Check out the {logstash-ref}/plugins-outputs-elasticsearch.html[`output-elasticsearch` plugin] docs for additional settings. [id="plugins-{type}s-{plugin}-minimum_configuration"] ==== Minimum configuration @@ -145,14 +145,14 @@ At the startup phase, this plugin confirms that current user has sufficient priv |======================================================================= -[NOTE] +[NOTE] -- This plugin cannot determine if an anonymous user has the required privileges when it connects to an {es} cluster that has security features disabled or when the user does not provide credentials. The plugin starts in an unsafe mode with a runtime error indicating that API permissions are insufficient, and prevents events from being processed by the ingest pipeline. -To avoid these issues, set up user authentication and ensure that security in {es} is enabled (default). +To avoid these issues, set up user authentication and ensure that security in {es} is enabled (default). -- - + [id="plugins-{type}s-{plugin}-supported_ingest_processors"] ==== Supported Ingest Processors diff --git a/docs/plugins/filters/elasticsearch.asciidoc b/docs/plugins/filters/elasticsearch.asciidoc index c54eabd05..8e57ec383 100644 --- a/docs/plugins/filters/elasticsearch.asciidoc +++ b/docs/plugins/filters/elasticsearch.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.1.0 :release_date: 2025-01-23 :changelog_url: https://github.com/logstash-plugins/logstash-filter-elasticsearch/blob/v4.1.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -172,7 +172,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-aggregation_fields"] -===== `aggregation_fields` +===== `aggregation_fields` * Value type is <> * Default value is `{}` @@ -243,7 +243,7 @@ Pass a set of key value pairs as the headers sent in each request to Elasticsear These custom headers will override any headers previously set by the plugin such as the User Agent or Authorization headers. [id="plugins-{type}s-{plugin}-docinfo_fields"] -===== `docinfo_fields` +===== `docinfo_fields` * Value type is <> * Default value is `{}` @@ -262,7 +262,7 @@ Example: } [id="plugins-{type}s-{plugin}-enable_sort"] -===== `enable_sort` +===== `enable_sort` * Value type is <> * Default value is `true` @@ -270,7 +270,7 @@ Example: Whether results should be sorted or not [id="plugins-{type}s-{plugin}-fields"] -===== `fields` +===== `fields` * Value type is <> * Default value is `{}` @@ -291,7 +291,7 @@ fields => { -------------------------------------------------- [id="plugins-{type}s-{plugin}-hosts"] -===== `hosts` +===== `hosts` * Value type is <> * Default value is `["localhost:9200"]` @@ -299,7 +299,7 @@ fields => { List of elasticsearch hosts to use for querying. [id="plugins-{type}s-{plugin}-index"] -===== `index` +===== `index` * Value type is <> * Default value is `""` @@ -308,7 +308,7 @@ Comma-delimited list of index names to search; use `_all` or empty string to per Field substitution (e.g. `index-name-%{date_field}`) is available [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * Value type is <> * There is no default value for this setting. @@ -326,7 +326,7 @@ An empty string is treated as if proxy was not set, and is useful when using environment variables e.g. `proxy => '${LS_PROXY:}'`. [id="plugins-{type}s-{plugin}-query"] -===== `query` +===== `query` * Value type is <> * There is no default value for this setting. @@ -338,7 +338,7 @@ Use either `query` or `query_template`. [id="plugins-{type}s-{plugin}-query_template"] -===== `query_template` +===== `query_template` * Value type is <> * There is no default value for this setting. @@ -348,7 +348,7 @@ the {ref}/query-dsl.html[Elasticsearch query documentation]. Use either `query` or `query_template`. [id="plugins-{type}s-{plugin}-result_size"] -===== `result_size` +===== `result_size` * Value type is <> * Default value is `1` @@ -375,7 +375,7 @@ Which HTTP Status codes to consider for retries (in addition to connection error [id="plugins-{type}s-{plugin}-sort"] -===== `sort` +===== `sort` * Value type is <> * Default value is `"@timestamp:desc"` @@ -517,7 +517,7 @@ has a hostname or IP address that matches the names within the certificate. WARNING: Setting certificate verification to `none` disables many security benefits of SSL/TLS, which is very dangerous. For more information on disabling certificate verification please read https://www.cs.utexas.edu/~shmat/shmat_ccs12.pdf [id="plugins-{type}s-{plugin}-tag_on_failure"] -===== `tag_on_failure` +===== `tag_on_failure` * Value type is <> * Default value is `["_elasticsearch_lookup_failure"]` @@ -525,7 +525,7 @@ WARNING: Setting certificate verification to `none` disables many security benef Tags the event on failure to look up previous log event information. This can be used in later analysis. [id="plugins-{type}s-{plugin}-user"] -===== `user` +===== `user` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/filters/emoji.asciidoc b/docs/plugins/filters/emoji.asciidoc index 4ada32c7c..c71b55fb8 100644 --- a/docs/plugins/filters/emoji.asciidoc +++ b/docs/plugins/filters/emoji.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.0.2 :release_date: 2017-08-15 :changelog_url: https://github.com/logstash-plugins/logstash-filter-emoji/blob/v1.0.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -54,7 +54,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-fallback"] -===== `fallback` +===== `fallback` * Value type is <> * There is no default value for this setting. @@ -75,7 +75,7 @@ This configuration can be dynamic and include parts of the event using the `%{field}` syntax. [id="plugins-{type}s-{plugin}-field"] -===== `field` +===== `field` * This is a required setting. * Value type is <> @@ -87,7 +87,7 @@ a match by the emoji filter (e.g. `severity`). If this field is an array, only the first value will be used. [id="plugins-{type}s-{plugin}-override"] -===== `override` +===== `override` * Value type is <> * Default value is `false` @@ -97,7 +97,7 @@ whether the filter should skip being rewritten as an emoji (default) or overwrite the target field value with the emoji value. [id="plugins-{type}s-{plugin}-sev_alert"] -===== `sev_alert` +===== `sev_alert` * Value type is <> * Default value is `"🚨"` @@ -105,7 +105,7 @@ overwrite the target field value with the emoji value. `sev_alert` selects the emoji/unicode character for Alert severity [id="plugins-{type}s-{plugin}-sev_critical"] -===== `sev_critical` +===== `sev_critical` * Value type is <> * Default value is `"🔥"` @@ -113,7 +113,7 @@ overwrite the target field value with the emoji value. `sev_critical` selects the emoji/unicode character for Critical severity [id="plugins-{type}s-{plugin}-sev_debug"] -===== `sev_debug` +===== `sev_debug` * Value type is <> * Default value is `"🐛"` @@ -121,7 +121,7 @@ overwrite the target field value with the emoji value. `sev_debug` selects the emoji/unicode character for Debug severity [id="plugins-{type}s-{plugin}-sev_emergency"] -===== `sev_emergency` +===== `sev_emergency` * Value type is <> * Default value is `"💥"` @@ -129,7 +129,7 @@ overwrite the target field value with the emoji value. `sev_emergency` selects the emoji/unicode character for Emergency severity [id="plugins-{type}s-{plugin}-sev_error"] -===== `sev_error` +===== `sev_error` * Value type is <> * Default value is `"❌"` @@ -137,7 +137,7 @@ overwrite the target field value with the emoji value. `sev_error` selects the emoji/unicode character for Error severity [id="plugins-{type}s-{plugin}-sev_info"] -===== `sev_info` +===== `sev_info` * Value type is <> * Default value is `"ℹ️"` @@ -145,7 +145,7 @@ overwrite the target field value with the emoji value. `sev_info` selects the emoji/unicode character for Informational severity [id="plugins-{type}s-{plugin}-sev_notice"] -===== `sev_notice` +===== `sev_notice` * Value type is <> * Default value is `"👀"` @@ -153,7 +153,7 @@ overwrite the target field value with the emoji value. `sev_notice` selects the emoji/unicode character for Notice severity [id="plugins-{type}s-{plugin}-sev_warning"] -===== `sev_warning` +===== `sev_warning` * Value type is <> * Default value is `"⚠️"` @@ -161,7 +161,7 @@ overwrite the target field value with the emoji value. `sev_warning` selects the emoji/unicode character for Warning severity [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * Default value is `"emoji"` diff --git a/docs/plugins/filters/environment.asciidoc b/docs/plugins/filters/environment.asciidoc index ca36aa398..bdc5e26a9 100644 --- a/docs/plugins/filters/environment.asciidoc +++ b/docs/plugins/filters/environment.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.3 :release_date: 2017-11-07 :changelog_url: https://github.com/logstash-plugins/logstash-filter-environment/blob/v3.0.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -60,7 +60,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-add_metadata_from_env"] -===== `add_metadata_from_env` +===== `add_metadata_from_env` * Value type is <> * Default value is `{}` diff --git a/docs/plugins/filters/extractnumbers.asciidoc b/docs/plugins/filters/extractnumbers.asciidoc index 2ff6726fb..bca3c80e4 100644 --- a/docs/plugins/filters/extractnumbers.asciidoc +++ b/docs/plugins/filters/extractnumbers.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.3 :release_date: 2017-11-07 :changelog_url: https://github.com/logstash-plugins/logstash-filter-extractnumbers/blob/v3.0.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -49,7 +49,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-source"] -===== `source` +===== `source` * Value type is <> * Default value is `"message"` diff --git a/docs/plugins/filters/fingerprint.asciidoc b/docs/plugins/filters/fingerprint.asciidoc index 15551a5d5..a74f2dd0e 100644 --- a/docs/plugins/filters/fingerprint.asciidoc +++ b/docs/plugins/filters/fingerprint.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.4.4 :release_date: 2024-03-19 :changelog_url: https://github.com/logstash-plugins/logstash-filter-fingerprint/blob/v3.4.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -72,7 +72,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-base64encode"] -===== `base64encode` +===== `base64encode` * Value type is <> * Default value is `false` @@ -81,7 +81,7 @@ When set to `true`, the `SHA1`, `SHA256`, `SHA384`, `SHA512`, `MD5` and `MURMUR3 base64 encoded rather than hex encoded strings. [id="plugins-{type}s-{plugin}-concatenate_sources"] -===== `concatenate_sources` +===== `concatenate_sources` * Value type is <> * Default value is `false` @@ -89,7 +89,7 @@ base64 encoded rather than hex encoded strings. When set to `true` and `method` isn't `UUID` or `PUNCTUATION`, the plugin concatenates the names and values of all fields given in the `source` option into one string (like the old checksum filter) before -doing the fingerprint computation. +doing the fingerprint computation. If `false` and multiple source fields are given, the target field will be single fingerprint of the last source field. @@ -110,12 +110,12 @@ The output is: [source,ruby] ----- -"fingerprint" => "6b6390a4416131f82b6ffb509f6e779e5dd9630f". +"fingerprint" => "6b6390a4416131f82b6ffb509f6e779e5dd9630f". ----- **Example: `concatenate_sources`=false with array** -If the last source field is an array, you get an array of fingerprints. +If the last source field is an array, you get an array of fingerprints. In this example, "siblings" is an array ["big brother", "little sister", "little brother"]. @@ -138,16 +138,16 @@ The output is: ----- [id="plugins-{type}s-{plugin}-concatenate_all_fields"] -===== `concatenate_all_fields` +===== `concatenate_all_fields` * Value type is <> * Default value is `false` When set to `true` and `method` isn't `UUID` or `PUNCTUATION`, the -plugin concatenates the names and values of all fields of the event -into one string (like the old checksum filter) before doing the -fingerprint computation. If `false` and at least one source field is -given, the target field will be an array with fingerprints of the +plugin concatenates the names and values of all fields of the event +into one string (like the old checksum filter) before doing the +fingerprint computation. If `false` and at least one source field is +given, the target field will be an array with fingerprints of the source fields given. [id="plugins-{type}s-{plugin}-ecs_compatibility"] @@ -163,7 +163,7 @@ Controls this plugin's compatibility with the See <> for detailed information. [id="plugins-{type}s-{plugin}-key"] -===== `key` +===== `key` * Value type is <> * There is no default value for this setting. @@ -172,7 +172,7 @@ When used with the `IPV4_NETWORK` method fill in the subnet prefix length. With other methods, optionally fill in the HMAC key. [id="plugins-{type}s-{plugin}-method"] -===== `method` +===== `method` * This is a required setting. * Value can be any of: `SHA1`, `SHA256`, `SHA384`, `SHA512`, `MD5`, `MURMUR3`, `MURMUR3_128`, `IPV4_NETWORK`, `UUID`, `PUNCTUATION` @@ -199,7 +199,7 @@ https://en.wikipedia.org/wiki/Universally_unique_identifier[UUID] will be generated. The result will be random and thus not a consistent hash. [id="plugins-{type}s-{plugin}-source"] -===== `source` +===== `source` * Value type is <> * Default value is `"message"` @@ -209,7 +209,7 @@ to create the fingerprint. If an array is given, see the `concatenate_sources` option. [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * Default value is `"fingerprint"` when ECS is disabled diff --git a/docs/plugins/filters/geoip.asciidoc b/docs/plugins/filters/geoip.asciidoc index ece35c61f..98cc4ecdd 100644 --- a/docs/plugins/filters/geoip.asciidoc +++ b/docs/plugins/filters/geoip.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.3.1 :release_date: 2024-10-11 :changelog_url: https://github.com/logstash-plugins/logstash-filter-geoip/blob/v7.3.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -343,7 +343,7 @@ For a complete list of available fields and how they map to an event's structure * Value type is <> * Supported values are: ** `disabled`: unstructured geo data added at root level -** `v1`, `v8`: use fields that are compatible with Elastic Common Schema. Example: `[client][geo][country_name]`. See <> for more info. +** `v1`, `v8`: use fields that are compatible with Elastic Common Schema. Example: `[client][geo][country_name]`. See <> for more info. * Default value depends on which version of Logstash is running: ** When Logstash provides a `pipeline.ecs_compatibility` setting, its value is used as the default ** Otherwise, the default value is `disabled`. diff --git a/docs/plugins/filters/grok.asciidoc b/docs/plugins/filters/grok.asciidoc index b4a9ba08e..e0d2ac31e 100644 --- a/docs/plugins/filters/grok.asciidoc +++ b/docs/plugins/filters/grok.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.4.3 :release_date: 2022-10-28 :changelog_url: https://github.com/logstash-plugins/logstash-filter-grok/blob/v4.4.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -41,7 +41,7 @@ If you need help building patterns to match your logs, you will find the The {logstash-ref}/plugins-filters-dissect.html[`dissect`] filter plugin is another way to extract unstructured event data into fields using delimiters. -Dissect differs from Grok in that it does not use regular expressions and is faster. +Dissect differs from Grok in that it does not use regular expressions and is faster. Dissect works well when data is reliably repeated. Grok is a better choice when the structure of your text varies from line to line. @@ -173,7 +173,7 @@ The `timestamp`, `logsource`, `program`, and `pid` fields come from the `SYSLOGBASE` pattern which itself is defined by other patterns. Another option is to define patterns _inline_ in the filter using `pattern_definitions`. -This is mostly for convenience and allows user to define a pattern which can be used just in that +This is mostly for convenience and allows user to define a pattern which can be used just in that filter. This newly defined patterns in `pattern_definitions` will not be available outside of that particular `grok` filter. [id="plugins-{type}s-{plugin}-ecs"] @@ -186,7 +186,7 @@ compliant with the schema. The ECS pattern set has all of the pattern definitions from the legacy set, and is a drop-in replacement. Use the <> -setting to switch modes. +setting to switch modes. New features and enhancements will be added to the ECS-compliant files. The legacy patterns may still receive bug fixes which are backwards compatible. @@ -221,7 +221,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-break_on_match"] -===== `break_on_match` +===== `break_on_match` * Value type is <> * Default value is `true` @@ -245,7 +245,7 @@ Controls this plugin's compatibility with the {ecs-ref}[Elastic Common Schema (E The value of this setting affects extracted event field names when a composite pattern (such as `HTTPD_COMMONLOG`) is matched. [id="plugins-{type}s-{plugin}-keep_empty_captures"] -===== `keep_empty_captures` +===== `keep_empty_captures` * Value type is <> * Default value is `false` @@ -253,7 +253,7 @@ The value of this setting affects extracted event field names when a composite p If `true`, keep empty captures as event fields. [id="plugins-{type}s-{plugin}-match"] -===== `match` +===== `match` * Value type is <> * Default value is `{}` @@ -282,7 +282,7 @@ If you need to match multiple patterns against a single field, the value can be } } } - + To perform matches on multiple fields just use multiple entries in the `match` hash: [source,ruby] @@ -314,7 +314,7 @@ However, if one pattern depends on a field created by a previous pattern, separa [id="plugins-{type}s-{plugin}-named_captures_only"] -===== `named_captures_only` +===== `named_captures_only` * Value type is <> * Default value is `true` @@ -322,7 +322,7 @@ However, if one pattern depends on a field created by a previous pattern, separa If `true`, only store named captures from grok. [id="plugins-{type}s-{plugin}-overwrite"] -===== `overwrite` +===== `overwrite` * Value type is <> * Default value is `[]` @@ -344,7 +344,7 @@ overwrite the `message` field with part of the match like so: In this case, a line like `May 29 16:37:11 sadness logger: hello world` will be parsed and `hello world` will overwrite the original message. -If you are using a field reference in `overwrite`, you must use the field +If you are using a field reference in `overwrite`, you must use the field reference in the pattern. Example: [source,ruby] filter { @@ -356,18 +356,18 @@ reference in the pattern. Example: [id="plugins-{type}s-{plugin}-pattern_definitions"] -===== `pattern_definitions` +===== `pattern_definitions` * Value type is <> * Default value is `{}` -A hash of pattern-name and pattern tuples defining custom patterns to be used by -the current filter. Patterns matching existing names will override the pre-existing -definition. Think of this as inline patterns available just for this definition of +A hash of pattern-name and pattern tuples defining custom patterns to be used by +the current filter. Patterns matching existing names will override the pre-existing +definition. Think of this as inline patterns available just for this definition of grok [id="plugins-{type}s-{plugin}-patterns_dir"] -===== `patterns_dir` +===== `patterns_dir` * Value type is <> * Default value is `[]` @@ -377,7 +377,7 @@ Logstash ships by default with a bunch of patterns, so you don't necessarily need to define this yourself unless you are adding additional patterns. You can point to multiple pattern directories using this setting. Note that Grok will read all files in the directory matching the patterns_files_glob -and assume it's a pattern file (including any tilde backup files). +and assume it's a pattern file (including any tilde backup files). [source,ruby] patterns_dir => ["/opt/logstash/patterns", "/opt/logstash/extra_patterns"] @@ -392,7 +392,7 @@ For example: The patterns are loaded when the pipeline is created. [id="plugins-{type}s-{plugin}-patterns_files_glob"] -===== `patterns_files_glob` +===== `patterns_files_glob` * Value type is <> * Default value is `"*"` @@ -401,7 +401,7 @@ Glob pattern, used to select the pattern files in the directories specified by patterns_dir [id="plugins-{type}s-{plugin}-tag_on_failure"] -===== `tag_on_failure` +===== `tag_on_failure` * Value type is <> * Default value is `["_grokparsefailure"]` @@ -410,7 +410,7 @@ Append values to the `tags` field when there has been no successful match [id="plugins-{type}s-{plugin}-tag_on_timeout"] -===== `tag_on_timeout` +===== `tag_on_timeout` * Value type is <> * Default value is `"_groktimeout"` @@ -426,7 +426,7 @@ Tag to apply if a grok regexp times out. Define target namespace for placing matches. [id="plugins-{type}s-{plugin}-timeout_millis"] -===== `timeout_millis` +===== `timeout_millis` * Value type is <> * Default value is `30000` diff --git a/docs/plugins/filters/hashid.asciidoc b/docs/plugins/filters/hashid.asciidoc index 4cb657e53..fc40ed04f 100644 --- a/docs/plugins/filters/hashid.asciidoc +++ b/docs/plugins/filters/hashid.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v0.1.4 :release_date: 2023-05-30 :changelog_url: https://github.com/logstash-plugins/logstash-filter-hashid/blob/v0.1.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -21,8 +21,8 @@ include::{include_path}/plugin_header.asciidoc[] ==== Description -This filter allow you to generate predictable, string encoded hashed keys -based om event contents and timestamp. This can be used to avoid getting +This filter allow you to generate predictable, string encoded hashed keys +based om event contents and timestamp. This can be used to avoid getting duplicate records indexed into Elasticsearch. Hashed keys to be generated based on full or partial hashes and @@ -31,7 +31,7 @@ to make then largely ordered by timestamp, which tend to lead to increased indexing performance for event based use cases where data is being indexed in near real time. -When used with the timestamp prefix enabled, it should ideally be run after +When used with the timestamp prefix enabled, it should ideally be run after the date filter has run and populated the @timestamp field. [id="plugins-{type}s-{plugin}-options"] @@ -56,7 +56,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-add_timestamp_prefix"] -===== `add_timestamp_prefix` +===== `add_timestamp_prefix` * Value type is <> * Default value is `true` @@ -64,7 +64,7 @@ filter plugins. Use the timestamp to generate an ID prefix [id="plugins-{type}s-{plugin}-hash_bytes_used"] -===== `hash_bytes_used` +===== `hash_bytes_used` * Value type is <> * There is no default value for this setting. @@ -73,7 +73,7 @@ If full hash generated is not to be used, this parameter specifies how many byte If not specified, the full hash will be used [id="plugins-{type}s-{plugin}-key"] -===== `key` +===== `key` * Value type is <> * Default value is `"hashid"` @@ -81,7 +81,7 @@ If not specified, the full hash will be used Encryption key to be used when generating cryptographic hashes [id="plugins-{type}s-{plugin}-method"] -===== `method` +===== `method` * Value can be any of: `SHA1`, `SHA256`, `SHA384`, `SHA512`, `MD5` * Default value is `"MD5"` @@ -89,7 +89,7 @@ Encryption key to be used when generating cryptographic hashes Hash function to use [id="plugins-{type}s-{plugin}-source"] -===== `source` +===== `source` * Value type is <> * Default value is `["message"]` @@ -97,7 +97,7 @@ Hash function to use Source field(s) to base the hash calculation on [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * Default value is `"hashid"` diff --git a/docs/plugins/filters/http.asciidoc b/docs/plugins/filters/http.asciidoc index e12619423..81b49ba1e 100644 --- a/docs/plugins/filters/http.asciidoc +++ b/docs/plugins/filters/http.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v2.0.0 :release_date: 2024-12-18 :changelog_url: https://github.com/logstash-plugins/logstash-filter-http/blob/v2.0.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/filters/i18n.asciidoc b/docs/plugins/filters/i18n.asciidoc index ff27e1adb..35c45670b 100644 --- a/docs/plugins/filters/i18n.asciidoc +++ b/docs/plugins/filters/i18n.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.3 :release_date: 2017-11-07 :changelog_url: https://github.com/logstash-plugins/logstash-filter-i18n/blob/v3.0.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -41,7 +41,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-transliterate"] -===== `transliterate` +===== `transliterate` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/filters/jdbc_static.asciidoc b/docs/plugins/filters/jdbc_static.asciidoc index c1ce739fb..a6fd9695e 100644 --- a/docs/plugins/filters/jdbc_static.asciidoc +++ b/docs/plugins/filters/jdbc_static.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v5.5.2 :release_date: 2024-12-23 :changelog_url: https://github.com/logstash-plugins/logstash-integration-jdbc/blob/v5.5.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -280,7 +280,7 @@ order is in place. [id="plugins-{type}s-{plugin}-ecs"] ==== Compatibility with the Elastic Common Schema (ECS) -This plugin is compatible with the {ecs-ref}[Elastic Common Schema (ECS)]. +This plugin is compatible with the {ecs-ref}[Elastic Common Schema (ECS)]. It behaves the same regardless of ECS compatibility, except giving a warning when ECS is enabled and `target` isn't set. TIP: Set the `target` option to avoid potential schema conflicts. diff --git a/docs/plugins/filters/jdbc_streaming.asciidoc b/docs/plugins/filters/jdbc_streaming.asciidoc index 1031a2adf..c31a60d44 100644 --- a/docs/plugins/filters/jdbc_streaming.asciidoc +++ b/docs/plugins/filters/jdbc_streaming.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v5.5.2 :release_date: 2024-12-23 :changelog_url: https://github.com/logstash-plugins/logstash-integration-jdbc/blob/v5.5.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/filters/json.asciidoc b/docs/plugins/filters/json.asciidoc index be9bd1e20..2ff701b4a 100644 --- a/docs/plugins/filters/json.asciidoc +++ b/docs/plugins/filters/json.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.2.1 :release_date: 2023-12-18 :changelog_url: https://github.com/logstash-plugins/logstash-filter-json/blob/v3.2.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -77,7 +77,7 @@ Controls this plugin's compatibility with the See <> for detailed information. [id="plugins-{type}s-{plugin}-skip_on_invalid_json"] -===== `skip_on_invalid_json` +===== `skip_on_invalid_json` * Value type is <> * Default value is `false` @@ -85,7 +85,7 @@ See <> for detailed information. Allows for skipping the filter on invalid JSON (this allows you to handle JSON and non-JSON data without warnings) [id="plugins-{type}s-{plugin}-source"] -===== `source` +===== `source` * This is a required setting. * Value type is <> @@ -106,7 +106,7 @@ For example, if you have JSON data in the `message` field: The above would parse the JSON from the `message` field. [id="plugins-{type}s-{plugin}-tag_on_failure"] -===== `tag_on_failure` +===== `tag_on_failure` * Value type is <> * Default value is `["_jsonparsefailure"]` @@ -115,7 +115,7 @@ Append values to the `tags` field when there has been no successful match [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/filters/json_encode.asciidoc b/docs/plugins/filters/json_encode.asciidoc index 6be3321f7..ff6542750 100644 --- a/docs/plugins/filters/json_encode.asciidoc +++ b/docs/plugins/filters/json_encode.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.3 :release_date: 2017-11-07 :changelog_url: https://github.com/logstash-plugins/logstash-filter-json_encode/blob/v3.0.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -54,7 +54,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-source"] -===== `source` +===== `source` * This is a required setting. * Value type is <> @@ -63,7 +63,7 @@ filter plugins. The field to convert to JSON. [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/filters/kv.asciidoc b/docs/plugins/filters/kv.asciidoc index b6c7dea5d..639b0ac93 100644 --- a/docs/plugins/filters/kv.asciidoc +++ b/docs/plugins/filters/kv.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.7.0 :release_date: 2022-03-04 :changelog_url: https://github.com/logstash-plugins/logstash-filter-kv/blob/v4.7.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -93,7 +93,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-allow_duplicate_values"] -===== `allow_duplicate_values` +===== `allow_duplicate_values` * Value type is <> * Default value is `true` @@ -123,7 +123,7 @@ When set to true, empty values will be added to the event. NOTE: Parsing empty values typically requires < strict`>>. [id="plugins-{type}s-{plugin}-default_keys"] -===== `default_keys` +===== `default_keys` * Value type is <> * Default value is `{}` @@ -150,7 +150,7 @@ Controls this plugin's compatibility with the {ecs-ref}[Elastic Common Schema (E See <> for detailed information. [id="plugins-{type}s-{plugin}-exclude_keys"] -===== `exclude_keys` +===== `exclude_keys` * Value type is <> * Default value is `[]` @@ -168,7 +168,7 @@ To exclude `from` and `to`, but retain the `foo` key, you could use this configu } [id="plugins-{type}s-{plugin}-field_split"] -===== `field_split` +===== `field_split` * Value type is <> * Default value is `" "` @@ -222,7 +222,7 @@ To split fields on a regex character that need escaping like the plus sign filter { kv { field_split_pattern => "\\+\\+" } } [id="plugins-{type}s-{plugin}-include_brackets"] -===== `include_brackets` +===== `include_brackets` * Value type is <> * Default value is `true` @@ -253,7 +253,7 @@ instead of: [id="plugins-{type}s-{plugin}-include_keys"] -===== `include_keys` +===== `include_keys` * Value type is <> * Default value is `[]` @@ -271,7 +271,7 @@ To include `from` and `to`, but exclude the `foo` key, you could use this config } [id="plugins-{type}s-{plugin}-prefix"] -===== `prefix` +===== `prefix` * Value type is <> * Default value is `""` @@ -283,7 +283,7 @@ For example, to prepend arg_ to all keys: filter { kv { prefix => "arg_" } } [id="plugins-{type}s-{plugin}-recursive"] -===== `recursive` +===== `recursive` * Value type is <> * Default value is `false` @@ -302,7 +302,7 @@ Default is not to recursive values. [id="plugins-{type}s-{plugin}-remove_char_key"] -===== `remove_char_key` +===== `remove_char_key` * Value type is <> * There is no default value for this setting. @@ -323,7 +323,7 @@ For example, to remove `<` `>` `[` `]` and `,` characters from keys: } [id="plugins-{type}s-{plugin}-remove_char_value"] -===== `remove_char_value` +===== `remove_char_value` * Value type is <> * There is no default value for this setting. @@ -344,7 +344,7 @@ For example, to remove `<`, `>`, `[`, `]` and `,` characters from values: } [id="plugins-{type}s-{plugin}-source"] -===== `source` +===== `source` * Value type is <> * Default value is `"message"` @@ -356,7 +356,7 @@ For example, to process the `not_the_message` field: filter { kv { source => "not_the_message" } } [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * There is no default value for this setting. @@ -403,7 +403,7 @@ in order to prevent the operation from blocking the pipeline (see: <>). [id="plugins-{type}s-{plugin}-transform_key"] -===== `transform_key` +===== `transform_key` * Value can be any of: `lowercase`, `uppercase`, `capitalize` * There is no default value for this setting. @@ -419,7 +419,7 @@ For example, to lowercase all keys: } [id="plugins-{type}s-{plugin}-transform_value"] -===== `transform_value` +===== `transform_value` * Value can be any of: `lowercase`, `uppercase`, `capitalize` * There is no default value for this setting. @@ -435,7 +435,7 @@ For example, to capitalize all values: } [id="plugins-{type}s-{plugin}-trim_key"] -===== `trim_key` +===== `trim_key` * Value type is <> * There is no default value for this setting. @@ -457,7 +457,7 @@ For example, to trim `<` `>` `[` `]` and `,` characters from keys: } [id="plugins-{type}s-{plugin}-trim_value"] -===== `trim_value` +===== `trim_value` * Value type is <> * There is no default value for this setting. @@ -481,7 +481,7 @@ For example, to trim `<`, `>`, `[`, `]` and `,` characters from values: } [id="plugins-{type}s-{plugin}-value_split"] -===== `value_split` +===== `value_split` * Value type is <> * Default value is `"="` diff --git a/docs/plugins/filters/memcached.asciidoc b/docs/plugins/filters/memcached.asciidoc index 4d6b5b406..92c131f4a 100644 --- a/docs/plugins/filters/memcached.asciidoc +++ b/docs/plugins/filters/memcached.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.2.0 :release_date: 2023-01-18 :changelog_url: https://github.com/logstash-plugins/logstash-filter-memcached/blob/v1.2.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -27,11 +27,11 @@ It currently provides the following facilities: * `get`: get values for one or more memcached keys and inject them into the event at the provided paths - + * `set`: set values from the event to the corresponding memcached keys - + ==== Examples - + This plugin enables key/value lookup enrichment against a Memcached object caching system. You can use this plugin to query for a value, and set it if not found. @@ -64,7 +64,7 @@ memcached { id => "memcached-set" } ----- - + [id="plugins-{type}s-{plugin}-options"] ==== Memcached Filter Configuration Options diff --git a/docs/plugins/filters/metricize.asciidoc b/docs/plugins/filters/metricize.asciidoc index e4bdfc7d3..0642452c9 100644 --- a/docs/plugins/filters/metricize.asciidoc +++ b/docs/plugins/filters/metricize.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.3 :release_date: 2017-11-07 :changelog_url: https://github.com/logstash-plugins/logstash-filter-metricize/blob/v3.0.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -71,7 +71,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-drop_original_event"] -===== `drop_original_event` +===== `drop_original_event` * Value type is <> * Default value is `false` @@ -79,7 +79,7 @@ filter plugins. Flag indicating whether the original event should be dropped or not. [id="plugins-{type}s-{plugin}-metric_field_name"] -===== `metric_field_name` +===== `metric_field_name` * Value type is <> * Default value is `"metric"` @@ -87,7 +87,7 @@ Flag indicating whether the original event should be dropped or not. Name of the field the metric name will be written to. [id="plugins-{type}s-{plugin}-metrics"] -===== `metrics` +===== `metrics` * This is a required setting. * Value type is <> @@ -97,7 +97,7 @@ A new matrics event will be created for each metric field in this list. All fields in this list will be removed from generated events. [id="plugins-{type}s-{plugin}-value_field_name"] -===== `value_field_name` +===== `value_field_name` * Value type is <> * Default value is `"value"` diff --git a/docs/plugins/filters/metrics.asciidoc b/docs/plugins/filters/metrics.asciidoc index f1b5cc115..c5fa597e5 100644 --- a/docs/plugins/filters/metrics.asciidoc +++ b/docs/plugins/filters/metrics.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.0.7 :release_date: 2021-01-20 :changelog_url: https://github.com/logstash-plugins/logstash-filter-metrics/blob/v4.0.7/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -151,7 +151,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-clear_interval"] -===== `clear_interval` +===== `clear_interval` * Value type is <> * Default value is `-1` @@ -162,7 +162,7 @@ If set to -1, the default value, the metrics will never be cleared. Otherwise, should be a multiple of 5s. [id="plugins-{type}s-{plugin}-flush_interval"] -===== `flush_interval` +===== `flush_interval` * Value type is <> * Default value is `5` @@ -170,7 +170,7 @@ Otherwise, should be a multiple of 5s. The flush interval, when the metrics event is created. Must be a multiple of 5s. [id="plugins-{type}s-{plugin}-ignore_older_than"] -===== `ignore_older_than` +===== `ignore_older_than` * Value type is <> * Default value is `0` @@ -191,7 +191,7 @@ would do this: } [id="plugins-{type}s-{plugin}-meter"] -===== `meter` +===== `meter` * Value type is <> * Default value is `[]` @@ -199,7 +199,7 @@ would do this: syntax: `meter => [ "name of metric", "name of metric" ]` [id="plugins-{type}s-{plugin}-percentiles"] -===== `percentiles` +===== `percentiles` * Value type is <> * Default value is `[1, 5, 10, 90, 95, 99, 100]` @@ -207,7 +207,7 @@ syntax: `meter => [ "name of metric", "name of metric" ]` The percentiles that should be measured and emitted for timer values. [id="plugins-{type}s-{plugin}-rates"] -===== `rates` +===== `rates` * Value type is <> * Default value is `[1, 5, 15]` @@ -216,7 +216,7 @@ The rates that should be measured, in minutes. Possible values are 1, 5, and 15. [id="plugins-{type}s-{plugin}-timer"] -===== `timer` +===== `timer` * Value type is <> * Default value is `{}` diff --git a/docs/plugins/filters/multiline.asciidoc b/docs/plugins/filters/multiline.asciidoc index 147774c31..ed92b8d7c 100644 --- a/docs/plugins/filters/multiline.asciidoc +++ b/docs/plugins/filters/multiline.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.4 :release_date: 2017-08-15 :changelog_url: https://github.com/logstash-plugins/logstash-filter-multiline/blob/v3.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -100,7 +100,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-allow_duplicates"] -===== `allow_duplicates` +===== `allow_duplicates` * Value type is <> * Default value is `true` @@ -108,7 +108,7 @@ filter plugins. Allow duplcate values on the source field. [id="plugins-{type}s-{plugin}-max_age"] -===== `max_age` +===== `max_age` * Value type is <> * Default value is `5` @@ -117,7 +117,7 @@ The maximum age an event can be (in seconds) before it is automatically flushed. [id="plugins-{type}s-{plugin}-negate"] -===== `negate` +===== `negate` * Value type is <> * Default value is `false` @@ -125,7 +125,7 @@ flushed. Negate the regexp pattern ('if not matched') [id="plugins-{type}s-{plugin}-pattern"] -===== `pattern` +===== `pattern` * This is a required setting. * Value type is <> @@ -136,7 +136,7 @@ The expression to match. The same matching engine as the a plain regular expression or one that also contains grok patterns. [id="plugins-{type}s-{plugin}-patterns_dir"] -===== `patterns_dir` +===== `patterns_dir` * Value type is <> * Default value is `[]` @@ -154,7 +154,7 @@ For example: NUMBER \d+ [id="plugins-{type}s-{plugin}-source"] -===== `source` +===== `source` * Value type is <> * Default value is `"message"` @@ -162,7 +162,7 @@ For example: The field name to execute the pattern match on. [id="plugins-{type}s-{plugin}-stream_identity"] -===== `stream_identity` +===== `stream_identity` * Value type is <> * Default value is `"%{host}.%{path}.%{type}"` @@ -181,7 +181,7 @@ may have occurred between the old and new connection. To solve this use case, you can use `%{@source_host}.%{@type}` instead. [id="plugins-{type}s-{plugin}-what"] -===== `what` +===== `what` * This is a required setting. * Value can be any of: `previous`, `next` diff --git a/docs/plugins/filters/mutate.asciidoc b/docs/plugins/filters/mutate.asciidoc index 2fbceb64c..61406c73e 100644 --- a/docs/plugins/filters/mutate.asciidoc +++ b/docs/plugins/filters/mutate.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.5.8 :release_date: 2023-11-22 :changelog_url: https://github.com/logstash-plugins/logstash-filter-mutate/blob/v3.5.8/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -55,7 +55,7 @@ filter { add_field => { "shortHostname" => "%{[hostname][0]}" } } - mutate { + mutate { rename => {"shortHostname" => "hostname"} } } @@ -142,7 +142,7 @@ Example: [source,ruby] filter { mutate { - convert => { + convert => { "fieldname" => "integer" "booleanfield" => "boolean" } diff --git a/docs/plugins/filters/oui.asciidoc b/docs/plugins/filters/oui.asciidoc index c8ece310c..01c1608b4 100644 --- a/docs/plugins/filters/oui.asciidoc +++ b/docs/plugins/filters/oui.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.2 :release_date: 2017-08-15 :changelog_url: https://github.com/logstash-plugins/logstash-filter-oui/blob/v3.0.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -41,7 +41,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-source"] -===== `source` +===== `source` * Value type is <> * Default value is `"message"` @@ -58,7 +58,7 @@ filter { The source field to parse [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * Default value is `"oui"` diff --git a/docs/plugins/filters/prune.asciidoc b/docs/plugins/filters/prune.asciidoc index 1f8bda7fa..fc9dd76ed 100644 --- a/docs/plugins/filters/prune.asciidoc +++ b/docs/plugins/filters/prune.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.4 :release_date: 2019-09-12 :changelog_url: https://github.com/logstash-plugins/logstash-filter-prune/blob/v3.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -78,7 +78,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-blacklist_names"] -===== `blacklist_names` +===== `blacklist_names` * Value type is <> * Default value is `["%{[^}]+}"]` @@ -92,7 +92,7 @@ Exclude fields whose names match specified regexps, by default exclude unresolve } [id="plugins-{type}s-{plugin}-blacklist_values"] -===== `blacklist_values` +===== `blacklist_values` * Value type is <> * Default value is `{}` @@ -109,7 +109,7 @@ In case field values are arrays, each array item is matched against the regular } [id="plugins-{type}s-{plugin}-interpolate"] -===== `interpolate` +===== `interpolate` * Value type is <> * Default value is `false` @@ -119,7 +119,7 @@ dynamic values (when resolving `%{some_field}`). Probably adds some performance overhead. Defaults to false. [id="plugins-{type}s-{plugin}-whitelist_names"] -===== `whitelist_names` +===== `whitelist_names` * Value type is <> * Default value is `[]` @@ -133,7 +133,7 @@ Include only fields only if their names match specified regexps, default to empt } [id="plugins-{type}s-{plugin}-whitelist_values"] -===== `whitelist_values` +===== `whitelist_values` * Value type is <> * Default value is `{}` diff --git a/docs/plugins/filters/range.asciidoc b/docs/plugins/filters/range.asciidoc index 2225e793e..fefb9378c 100644 --- a/docs/plugins/filters/range.asciidoc +++ b/docs/plugins/filters/range.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.3 :release_date: 2017-11-07 :changelog_url: https://github.com/logstash-plugins/logstash-filter-range/blob/v3.0.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -50,7 +50,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-negate"] -===== `negate` +===== `negate` * Value type is <> * Default value is `false` @@ -58,7 +58,7 @@ filter plugins. Negate the range match logic, events should be outsize of the specified range to match. [id="plugins-{type}s-{plugin}-ranges"] -===== `ranges` +===== `ranges` * Value type is <> * Default value is `[]` diff --git a/docs/plugins/filters/ruby.asciidoc b/docs/plugins/filters/ruby.asciidoc index 19d35dee1..ea9a3a3e9 100644 --- a/docs/plugins/filters/ruby.asciidoc +++ b/docs/plugins/filters/ruby.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.8 :release_date: 2022-01-24 :changelog_url: https://github.com/logstash-plugins/logstash-filter-ruby/blob/v3.1.8/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/filters/sleep.asciidoc b/docs/plugins/filters/sleep.asciidoc index fd4d05b59..5636688e9 100644 --- a/docs/plugins/filters/sleep.asciidoc +++ b/docs/plugins/filters/sleep.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.7 :release_date: 2020-09-04 :changelog_url: https://github.com/logstash-plugins/logstash-filter-sleep/blob/v3.0.7/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -45,7 +45,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-every"] -===== `every` +===== `every` * Value type is <> * Default value is `1` @@ -62,7 +62,7 @@ Example: } [id="plugins-{type}s-{plugin}-replay"] -===== `replay` +===== `replay` * Value type is <> * Default value is `false` @@ -93,7 +93,7 @@ The above will sleep in such a way that it will perform replay 2-times faster than the original time speed. [id="plugins-{type}s-{plugin}-time"] -===== `time` +===== `time` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/filters/split.asciidoc b/docs/plugins/filters/split.asciidoc index f33adf515..26d6e53b8 100644 --- a/docs/plugins/filters/split.asciidoc +++ b/docs/plugins/filters/split.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.8 :release_date: 2020-01-21 :changelog_url: https://github.com/logstash-plugins/logstash-filter-split/blob/v3.1.8/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -79,17 +79,17 @@ filter plugins.   [id="plugins-{type}s-{plugin}-field"] -===== `field` +===== `field` * Value type is <> * Default value is `"message"` -The field which value is split by the terminator. -Can be a multiline message or the ID of an array. +The field which value is split by the terminator. +Can be a multiline message or the ID of an array. Nested arrays are referenced like: "[object_id][array_id]" [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * There is no default value for this setting. @@ -98,7 +98,7 @@ The field within the new event which the value is split into. If not set, the target field defaults to split field name. [id="plugins-{type}s-{plugin}-terminator"] -===== `terminator` +===== `terminator` * Value type is <> * Default value is `"\n"` diff --git a/docs/plugins/filters/syslog_pri.asciidoc b/docs/plugins/filters/syslog_pri.asciidoc index 9da1219ea..cb9015d5b 100644 --- a/docs/plugins/filters/syslog_pri.asciidoc +++ b/docs/plugins/filters/syslog_pri.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.2.1 :release_date: 2024-01-17 :changelog_url: https://github.com/logstash-plugins/logstash-filter-syslog_pri/blob/v3.2.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -63,7 +63,7 @@ Controls this plugin's compatibility with the {ecs-ref}[Elastic Common Schema (E The value of this setting affects the _default_ value of <>. [id="plugins-{type}s-{plugin}-facility_labels"] -===== `facility_labels` +===== `facility_labels` * Value type is <> * Default value is `["kernel", "user-level", "mail", "daemon", "security/authorization", "syslogd", "line printer", "network news", "uucp", "clock", "security/authorization", "ftp", "ntp", "log audit", "log alert", "clock", "local0", "local1", "local2", "local3", "local4", "local5", "local6", "local7"]` @@ -73,7 +73,7 @@ If an unrecognized facility code is provided and <> * Default value is `["emergency", "alert", "critical", "error", "warning", "notice", "informational", "debug"]` @@ -81,7 +81,7 @@ is tagged with `_syslogpriparsefailure`. Labels for severity levels. This comes from RFC3164. [id="plugins-{type}s-{plugin}-syslog_pri_field_name"] -===== `syslog_pri_field_name` +===== `syslog_pri_field_name` * Value type is <> * Default value depends on whether <> is enabled: @@ -91,7 +91,7 @@ Labels for severity levels. This comes from RFC3164. Name of field which passes in the extracted PRI part of the syslog message [id="plugins-{type}s-{plugin}-use_labels"] -===== `use_labels` +===== `use_labels` * Value type is <> * Default value is `true` diff --git a/docs/plugins/filters/threats_classifier.asciidoc b/docs/plugins/filters/threats_classifier.asciidoc index a3e4cb4bb..20c0a6766 100644 --- a/docs/plugins/filters/threats_classifier.asciidoc +++ b/docs/plugins/filters/threats_classifier.asciidoc @@ -6,7 +6,7 @@ REPLACES GENERATED VARIABLES /////////////////////////////////////////// :changelog_url: https://github.com/empow/logstash-filter-empow-classifier/blob/master/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include :gem: https://rubygems.org/gems/logstash-filter-empowclassifier /////////////////////////////////////////// END - REPLACES GENERATED VARIABLES @@ -33,16 +33,15 @@ For plugins not bundled by default, it is easy to install by running +bin/logsta This plugin uses the cyber-kill-chain and MITRE representation language to enrich security logs with information about the attacker’s intent--what the attacker is trying to achieve, who they are targeting, and how they plan to -carry out the attack. +carry out the attack. ==== Documentation -Documentation for the -https://github.com/empow/logstash-filter-empow-classifier/blob/master/README.md[filter-threats_classifier plugin] +Documentation for the +https://github.com/empow/logstash-filter-empow-classifier/blob/master/README.md[filter-threats_classifier plugin] is maintained by the creators. ==== Getting Help This is a third-party plugin. For bugs or feature requests, open an issue in the https://github.com/empow/logstash-filter-empow-classifier[plugins-{type}s-{plugin} Github repo]. - diff --git a/docs/plugins/filters/throttle.asciidoc b/docs/plugins/filters/throttle.asciidoc index 812d40a44..2ea483844 100644 --- a/docs/plugins/filters/throttle.asciidoc +++ b/docs/plugins/filters/throttle.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.0.4 :release_date: 2017-11-07 :changelog_url: https://github.com/logstash-plugins/logstash-filter-throttle/blob/v4.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -184,7 +184,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-after_count"] -===== `after_count` +===== `after_count` * Value type is <> * Default value is `-1` @@ -193,7 +193,7 @@ Events greater than this count will be throttled. Setting this value to -1, the default, will cause no events to be throttled based on the upper bound. [id="plugins-{type}s-{plugin}-before_count"] -===== `before_count` +===== `before_count` * Value type is <> * Default value is `-1` @@ -202,7 +202,7 @@ Events less than this count will be throttled. Setting this value to -1, the default, will cause no events to be throttled based on the lower bound. [id="plugins-{type}s-{plugin}-key"] -===== `key` +===== `key` * This is a required setting. * Value type is <> @@ -212,7 +212,7 @@ The key used to identify events. Events with the same key are grouped together. Field substitutions are allowed, so you can combine multiple fields. [id="plugins-{type}s-{plugin}-max_age"] -===== `max_age` +===== `max_age` * Value type is <> * Default value is `3600` @@ -224,7 +224,7 @@ between unordered events with the same key. Values below the specified period g unexpected results if unordered events are processed simultaneously. [id="plugins-{type}s-{plugin}-max_counters"] -===== `max_counters` +===== `max_counters` * Value type is <> * Default value is `100000` @@ -237,7 +237,7 @@ It is recommended to leave the default value and ensure that your key is selecte such that it limits the number of counters required (i.e. don't use UUID as the key). [id="plugins-{type}s-{plugin}-period"] -===== `period` +===== `period` * Value type is <> * Default value is `"60"` diff --git a/docs/plugins/filters/tld.asciidoc b/docs/plugins/filters/tld.asciidoc index 639859762..11192e991 100644 --- a/docs/plugins/filters/tld.asciidoc +++ b/docs/plugins/filters/tld.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.3 :release_date: 2023-10-19 :changelog_url: https://github.com/logstash-plugins/logstash-filter-tld/blob/v3.1.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -21,7 +21,7 @@ include::{include_path}/plugin_header.asciidoc[] ==== Description -This example filter will replace the contents of the default +This example filter will replace the contents of the default message field with whatever you specify in the configuration. It is only intended to be used as an example. @@ -44,7 +44,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-source"] -===== `source` +===== `source` * Value type is <> * Default value is `"message"` @@ -61,7 +61,7 @@ filter { The source field to parse [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * Default value is `"tld"` diff --git a/docs/plugins/filters/translate.asciidoc b/docs/plugins/filters/translate.asciidoc index 8bc4eb951..59c7deacf 100644 --- a/docs/plugins/filters/translate.asciidoc +++ b/docs/plugins/filters/translate.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.4.2 :release_date: 2023-06-14 :changelog_url: https://github.com/logstash-plugins/logstash-filter-translate/blob/v3.4.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -25,12 +25,12 @@ A general search and replace tool that uses a configured hash and/or a file to determine replacement values. Currently supported are YAML, JSON, and CSV files. Each dictionary item is a key value pair. -You can specify dictionary entries in one of two ways: +You can specify dictionary entries in one of two ways: * The `dictionary` configuration item can contain a hash representing -the mapping. +the mapping. * An external file (readable by logstash) may be specified in the -`dictionary_path` configuration item. +`dictionary_path` configuration item. These two methods may not be used in conjunction; it will produce an error. @@ -158,7 +158,7 @@ NOTE: It is an error to specify both `dictionary` and `dictionary_path`. * There is no default value for this setting. The full path of the external dictionary file. The format of the table should be -a standard YAML, JSON, or CSV. +a standard YAML, JSON, or CSV. Specify any integer-based keys in quotes. The value taken from the event's `source` setting is converted to a string. The lookup dictionary keys must also diff --git a/docs/plugins/filters/truncate.asciidoc b/docs/plugins/filters/truncate.asciidoc index d6729403f..71bd1035d 100644 --- a/docs/plugins/filters/truncate.asciidoc +++ b/docs/plugins/filters/truncate.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.0.6 :release_date: 2023-05-10 :changelog_url: https://github.com/logstash-plugins/logstash-filter-truncate/blob/v1.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -45,7 +45,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-fields"] -===== `fields` +===== `fields` * Value type is <> * There is no default value for this setting. @@ -65,7 +65,7 @@ Special behaviors for non-string fields: contains other hashes). [id="plugins-{type}s-{plugin}-length_bytes"] -===== `length_bytes` +===== `length_bytes` * This is a required setting. * Value type is <> diff --git a/docs/plugins/filters/urldecode.asciidoc b/docs/plugins/filters/urldecode.asciidoc index 4d3538f92..8d21bed72 100644 --- a/docs/plugins/filters/urldecode.asciidoc +++ b/docs/plugins/filters/urldecode.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.6 :release_date: 2017-11-07 :changelog_url: https://github.com/logstash-plugins/logstash-filter-urldecode/blob/v3.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -43,7 +43,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-all_fields"] -===== `all_fields` +===== `all_fields` * Value type is <> * Default value is `false` @@ -51,7 +51,7 @@ filter plugins. Urldecode all fields [id="plugins-{type}s-{plugin}-charset"] -===== `charset` +===== `charset` * Value can be any of: `ASCII-8BIT`, `UTF-8`, `US-ASCII`, `Big5`, `Big5-HKSCS`, `Big5-UAO`, `CP949`, `Emacs-Mule`, `EUC-JP`, `EUC-KR`, `EUC-TW`, `GB2312`, `GB18030`, `GBK`, `ISO-8859-1`, `ISO-8859-2`, `ISO-8859-3`, `ISO-8859-4`, `ISO-8859-5`, `ISO-8859-6`, `ISO-8859-7`, `ISO-8859-8`, `ISO-8859-9`, `ISO-8859-10`, `ISO-8859-11`, `ISO-8859-13`, `ISO-8859-14`, `ISO-8859-15`, `ISO-8859-16`, `KOI8-R`, `KOI8-U`, `Shift_JIS`, `UTF-16BE`, `UTF-16LE`, `UTF-32BE`, `UTF-32LE`, `Windows-31J`, `Windows-1250`, `Windows-1251`, `Windows-1252`, `IBM437`, `IBM737`, `IBM775`, `CP850`, `IBM852`, `CP852`, `IBM855`, `CP855`, `IBM857`, `IBM860`, `IBM861`, `IBM862`, `IBM863`, `IBM864`, `IBM865`, `IBM866`, `IBM869`, `Windows-1258`, `GB1988`, `macCentEuro`, `macCroatian`, `macCyrillic`, `macGreek`, `macIceland`, `macRoman`, `macRomania`, `macThai`, `macTurkish`, `macUkraine`, `CP950`, `CP951`, `IBM037`, `stateless-ISO-2022-JP`, `eucJP-ms`, `CP51932`, `EUC-JIS-2004`, `GB12345`, `ISO-2022-JP`, `ISO-2022-JP-2`, `CP50220`, `CP50221`, `Windows-1256`, `Windows-1253`, `Windows-1255`, `Windows-1254`, `TIS-620`, `Windows-874`, `Windows-1257`, `MacJapanese`, `UTF-7`, `UTF8-MAC`, `UTF-16`, `UTF-32`, `UTF8-DoCoMo`, `SJIS-DoCoMo`, `UTF8-KDDI`, `SJIS-KDDI`, `ISO-2022-JP-KDDI`, `stateless-ISO-2022-JP-KDDI`, `UTF8-SoftBank`, `SJIS-SoftBank`, `BINARY`, `CP437`, `CP737`, `CP775`, `IBM850`, `CP857`, `CP860`, `CP861`, `CP862`, `CP863`, `CP864`, `CP865`, `CP866`, `CP869`, `CP1258`, `Big5-HKSCS:2008`, `ebcdic-cp-us`, `eucJP`, `euc-jp-ms`, `EUC-JISX0213`, `eucKR`, `eucTW`, `EUC-CN`, `eucCN`, `CP936`, `ISO2022-JP`, `ISO2022-JP2`, `ISO8859-1`, `ISO8859-2`, `ISO8859-3`, `ISO8859-4`, `ISO8859-5`, `ISO8859-6`, `CP1256`, `ISO8859-7`, `CP1253`, `ISO8859-8`, `CP1255`, `ISO8859-9`, `CP1254`, `ISO8859-10`, `ISO8859-11`, `CP874`, `ISO8859-13`, `CP1257`, `ISO8859-14`, `ISO8859-15`, `ISO8859-16`, `CP878`, `MacJapan`, `ASCII`, `ANSI_X3.4-1968`, `646`, `CP65000`, `CP65001`, `UTF-8-MAC`, `UTF-8-HFS`, `UCS-2BE`, `UCS-4BE`, `UCS-4LE`, `CP932`, `csWindows31J`, `SJIS`, `PCK`, `CP1250`, `CP1251`, `CP1252`, `external`, `locale` * Default value is `"UTF-8"` @@ -63,7 +63,7 @@ This setting is useful if your url decoded string are in `Latin-1` (aka `cp1252` or in another character set other than `UTF-8`. [id="plugins-{type}s-{plugin}-field"] -===== `field` +===== `field` * Value type is <> * Default value is `"message"` @@ -71,7 +71,7 @@ or in another character set other than `UTF-8`. The field which value is urldecoded [id="plugins-{type}s-{plugin}-tag_on_failure"] -===== `tag_on_failure` +===== `tag_on_failure` * Value type is <> * Default value is `["_urldecodefailure"]` diff --git a/docs/plugins/filters/useragent.asciidoc b/docs/plugins/filters/useragent.asciidoc index df7fa5d70..fa3b910fd 100644 --- a/docs/plugins/filters/useragent.asciidoc +++ b/docs/plugins/filters/useragent.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.3.5 :release_date: 2023-09-19 :changelog_url: https://github.com/logstash-plugins/logstash-filter-useragent/blob/v3.3.5/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -137,7 +137,7 @@ Controls this plugin's compatibility with the {ecs-ref}[Elastic Common Schema (E The value of this setting affects the _default_ value of <>. [id="plugins-{type}s-{plugin}-lru_cache_size"] -===== `lru_cache_size` +===== `lru_cache_size` * Value type is <> * Default value is `100000` @@ -158,7 +158,7 @@ to having multiple caches for different instances at different points in the pip number of cache misses and waste memory. [id="plugins-{type}s-{plugin}-prefix"] -===== `prefix` +===== `prefix` * Value type is <> * Default value is `""` @@ -166,7 +166,7 @@ number of cache misses and waste memory. A string to prepend to all of the extracted keys [id="plugins-{type}s-{plugin}-regexes"] -===== `regexes` +===== `regexes` * Value type is <> * There is no default value for this setting. @@ -178,7 +178,7 @@ You can find the latest version of this here: [id="plugins-{type}s-{plugin}-source"] -===== `source` +===== `source` * This is a required setting. * Value type is <> @@ -188,7 +188,7 @@ The field containing the user agent string. If this field is an array, only the first value will be used. [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * Default value depends on whether <> is enabled: diff --git a/docs/plugins/filters/uuid.asciidoc b/docs/plugins/filters/uuid.asciidoc index 2b64082c9..d2b6682aa 100644 --- a/docs/plugins/filters/uuid.asciidoc +++ b/docs/plugins/filters/uuid.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.5 :release_date: 2017-11-07 :changelog_url: https://github.com/logstash-plugins/logstash-filter-uuid/blob/v3.0.5/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -54,7 +54,7 @@ filter plugins.   [id="plugins-{type}s-{plugin}-overwrite"] -===== `overwrite` +===== `overwrite` * Value type is <> * Default value is `false` @@ -73,7 +73,7 @@ Example: } [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * This is a required setting. * Value type is <> diff --git a/docs/plugins/filters/wurfl_device_detection.asciidoc b/docs/plugins/filters/wurfl_device_detection.asciidoc index 8c76a0277..d5c72ca8a 100644 --- a/docs/plugins/filters/wurfl_device_detection.asciidoc +++ b/docs/plugins/filters/wurfl_device_detection.asciidoc @@ -6,7 +6,7 @@ REPLACES GENERATED VARIABLES /////////////////////////////////////////// :changelog_url: https://github.com/WURFL/logstash-filter-wurfl_device_detection/blob/master/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include :gem: https://rubygems.org/gems/logstash-filter-wurfl_device_detection /////////////////////////////////////////// END - REPLACES GENERATED VARIABLES @@ -44,11 +44,10 @@ analysis, and device analytics. Documentation for the {type}-{plugin} plugin is maintained by the creators: -* https://github.com/WURFL/logstash-filter-wurfl_device_detection/blob/master/README.md[README.md] +* https://github.com/WURFL/logstash-filter-wurfl_device_detection/blob/master/README.md[README.md] * https://github.com/WURFL/logstash-filter-wurfl_device_detection/blob/master/docs/index.asciidoc[plugin documentation] ==== Getting Help This is a third-party plugin. For bugs or feature requests, open an issue in the https://github.com/WURFL/logstash-filter-wurfl_device_detection[plugins-{type}s-{plugin} Github repo]. - diff --git a/docs/plugins/filters/xml.asciidoc b/docs/plugins/filters/xml.asciidoc index a247ef6e4..2087d950d 100644 --- a/docs/plugins/filters/xml.asciidoc +++ b/docs/plugins/filters/xml.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.3.0 :release_date: 2025-02-19 :changelog_url: https://github.com/logstash-plugins/logstash-filter-xml/blob/v4.3.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -50,16 +50,16 @@ filter plugins.   [id="plugins-{type}s-{plugin}-force_array"] -===== `force_array` +===== `force_array` * Value type is <> * Default value is `true` -By default the filter will force single elements to be arrays. Setting this to +By default the filter will force single elements to be arrays. Setting this to false will prevent storing single elements in arrays. [id="plugins-{type}s-{plugin}-force_content"] -===== `force_content` +===== `force_content` * Value type is <> * Default value is `false` @@ -69,7 +69,7 @@ of tags. This option allows you to force text content and attributes to always parse to a hash value. [id="plugins-{type}s-{plugin}-namespaces"] -===== `namespaces` +===== `namespaces` * Value type is <> * Default value is `{}` @@ -100,7 +100,7 @@ By default the parser is not strict and thus accepts some invalid content. Currently supported options are: - `strict` - forces the parser to fail early instead of accumulating errors when content is not valid xml. - + Control characters such as ASCII 0x0 are not allowed and _always_ result in non-valid XML. When XML content is not valid, it will be tagged as `_xmlparsefailure`. @@ -111,10 +111,10 @@ XML specs: * XML 1.1 Spec: https://www.w3.org/TR/xml11/#charsets - + [id="plugins-{type}s-{plugin}-remove_namespaces"] -===== `remove_namespaces` +===== `remove_namespaces` * Value type is <> * Default value is `false` @@ -123,7 +123,7 @@ Remove all namespaces from all nodes in the document. Of course, if the document had nodes with the same names but different namespaces, they will now be ambiguous. [id="plugins-{type}s-{plugin}-source"] -===== `source` +===== `source` * This is a required setting. * Value type is <> @@ -144,7 +144,7 @@ For example, if you have the whole XML document in your `message` field: The above would parse the XML from the `message` field. [id="plugins-{type}s-{plugin}-store_xml"] -===== `store_xml` +===== `store_xml` * Value type is <> * Default value is `true` @@ -153,7 +153,7 @@ By default the filter will store the whole parsed XML in the destination field as described above. Setting this to false will prevent that. [id="plugins-{type}s-{plugin}-suppress_empty"] -===== `suppress_empty` +===== `suppress_empty` * Value type is <> * Default value is `true` @@ -162,7 +162,7 @@ By default, output nothing if the element is empty. If set to `false`, empty element will result in an empty hash object. [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * There is no default value for this setting. @@ -183,7 +183,7 @@ Note: if the `target` field already exists, it will be overridden. Required if `store_xml` is true (which is the default). [id="plugins-{type}s-{plugin}-xpath"] -===== `xpath` +===== `xpath` * Value type is <> * Default value is `{}` @@ -205,7 +205,7 @@ multiple source fields will produce duplicate entries in the field. For more information on XPath, see https://www.w3schools.com/xml/xml_xpath.asp. -The https://www.w3schools.com/xml/xsl_functions.asp[XPath functions] are particularly powerful. +The https://www.w3schools.com/xml/xsl_functions.asp[XPath functions] are particularly powerful. diff --git a/docs/plugins/include/attributes-ls.asciidoc b/docs/plugins/include/attributes-ls.asciidoc new file mode 100644 index 000000000..714982cad --- /dev/null +++ b/docs/plugins/include/attributes-ls.asciidoc @@ -0,0 +1,10 @@ +///// +These settings control attributes for Logstash core content +in the Logstash Reference (LSR) only. + +Shared attributes for the plugin docs (in the LSR and VPR) should +go in /docs/include/attributes-lsplugins.asciidoc instead +with a corresponding change to the VPR settings in +logstash-docs/docs/versioned-plugins/include/attributes-ls-vpr.asciidoc +///// + diff --git a/docs/plugins/include/attributes-lsplugins.asciidoc b/docs/plugins/include/attributes-lsplugins.asciidoc new file mode 100644 index 000000000..674bcc03c --- /dev/null +++ b/docs/plugins/include/attributes-lsplugins.asciidoc @@ -0,0 +1,13 @@ +///// +These settings control attributes in the LSR only. +They correspond to the VPR settings in logstash-docs/docs/versioned-plugins/include/attributes-ls-vpr.asciidoc +When we update one, we must update settings in the other location, + +Attribute text formatted without hard wrap is deliberate. +Otherwise, text breaks at return and content after the return is dropped. + +Text is written to accommodate multiple versions because plugins are not stack versioned. +///// + + +:ecs-default: When the `ecs_compatibility` option for this plugin is not explicitly set, its effective value depends on the `pipeline.ecs_compatibility` setting for the pipeline in `pipelines.yml`, or globally in {logstash-ref}/logstash-settings-file.html[`logstash.yml`], allowing you to specify your preferred behavior at the plugin, pipeline, or system level. If no preference is specified, the default value is `v8` for Logstash 8 or `disabled` for all earlier releases of Logstash. For more information about ECS compatibility settings in Logstash and plugins, see {logstash-ref}/ecs-ls.html[ECS in Logstash]. diff --git a/docs/plugins/include/filter.asciidoc b/docs/plugins/include/filter.asciidoc new file mode 100644 index 000000000..9cd984f29 --- /dev/null +++ b/docs/plugins/include/filter.asciidoc @@ -0,0 +1,234 @@ +==== Common options + +// Contributors: You must conditionally code all internal links and IDs in this +// file to make the common files work in both the LS Reference and the versioned +// plugin docs + +These configuration options are supported by all filter plugins: + +ifeval::["{versioned_docs}"!="true"] +[cols="<,<,<",options="header",] +|======================================================================= +|Setting |Input type|Required +| <> |{logstash-ref}/configuration-file-structure.html#hash[hash]|No +| <> |{logstash-ref}/configuration-file-structure.html#array[array]|No +| <> |{logstash-ref}/configuration-file-structure.html#boolean[boolean]|No +| <> |{logstash-ref}/configuration-file-structure.html#string[string]|No +| <> |{logstash-ref}/configuration-file-structure.html#boolean[boolean]|No +| <> |{logstash-ref}/configuration-file-structure.html#array[array]|No +| <> |{logstash-ref}/configuration-file-structure.html#array[array]|No +|======================================================================= +endif::[] +ifeval::["{versioned_docs}"=="true"] +[cols="<,<,<",options="header",] +|======================================================================= +|Setting |Input type|Required +| <<{version}-plugins-{type}s-{plugin}-add_field>> |{logstash-ref}/configuration-file-structure.html#hash[hash]|No +| <<{version}-plugins-{type}s-{plugin}-add_tag>> |{logstash-ref}/configuration-file-structure.html#array[array]|No +| <<{version}-plugins-{type}s-{plugin}-enable_metric>> |{logstash-ref}/configuration-file-structure.html#boolean[boolean]|No +| <<{version}-plugins-{type}s-{plugin}-id>> |{logstash-ref}/configuration-file-structure.html#string[string]|No +| <<{version}-plugins-{type}s-{plugin}-periodic_flush>> |{logstash-ref}/configuration-file-structure.html#boolean[boolean]|No +| <<{version}-plugins-{type}s-{plugin}-remove_field>> |{logstash-ref}/configuration-file-structure.html#array[array]|No +| <<{version}-plugins-{type}s-{plugin}-remove_tag>> |{logstash-ref}/configuration-file-structure.html#array[array]|No +|======================================================================= +endif::[] + +ifeval::["{versioned_docs}"!="true"] +[id="plugins-{type}s-{plugin}-add_field"] +endif::[] +ifeval::["{versioned_docs}"=="true"] +[id="{version}-plugins-{type}s-{plugin}-add_field"] +endif::[] +===== `add_field` + + * Value type is {logstash-ref}/configuration-file-structure.html#hash[hash] + * Default value is `{}` + +If this filter is successful, add any arbitrary fields to this event. +Field names can be dynamic and include parts of the event using the `%{field}`. + +Example: + +["source","json",subs="attributes"] + filter { + {plugin} { + add_field => { "foo_%\{somefield\}" => "Hello world, from %\{host\}" } + } + } + +["source","json",subs="attributes"] + # You can also add multiple fields at once: + filter { + {plugin} { + add_field => { + "foo_%\{somefield\}" => "Hello world, from %\{host\}" + "new_field" => "new_static_value" + } + } + } + +If the event has field `"somefield" == "hello"` this filter, on success, +would add field `foo_hello` if it is present, with the +value above and the `%{host}` piece replaced with that value from the +event. The second example would also add a hardcoded field. + +ifeval::["{versioned_docs}"!="true"] +[id="plugins-{type}s-{plugin}-add_tag"] +endif::[] +ifeval::["{versioned_docs}"=="true"] +[id="{version}-plugins-{type}s-{plugin}-add_tag"] +endif::[] +===== `add_tag` + + * Value type is {logstash-ref}/configuration-file-structure.html#array[array] + * Default value is `[]` + +If this filter is successful, add arbitrary tags to the event. +Tags can be dynamic and include parts of the event using the `%{field}` +syntax. + +Example: + +["source","json",subs="attributes"] + filter { + {plugin} { + add_tag => [ "foo_%\{somefield\}" ] + } + } + +["source","json",subs="attributes"] + # You can also add multiple tags at once: + filter { + {plugin} { + add_tag => [ "foo_%\{somefield\}", "taggedy_tag"] + } + } + +If the event has field `"somefield" == "hello"` this filter, on success, +would add a tag `foo_hello` (and the second example would of course add a `taggedy_tag` tag). + +ifeval::["{versioned_docs}"!="true"] +[id="plugins-{type}s-{plugin}-enable_metric"] +endif::[] +ifeval::["{versioned_docs}"=="true"] +[id="{version}-plugins-{type}s-{plugin}-enable_metric"] +endif::[] +===== `enable_metric` + + * Value type is {logstash-ref}/configuration-file-structure.html#boolean[boolean] + * Default value is `true` + +Disable or enable metric logging for this specific plugin instance. +By default we record all the metrics we can, but you can disable metrics collection +for a specific plugin. + +ifeval::["{versioned_docs}"!="true"] +[id="plugins-{type}s-{plugin}-id"] +endif::[] +ifeval::["{versioned_docs}"=="true"] +[id="{version}-plugins-{type}s-{plugin}-id"] +endif::[] +===== `id` + + * Value type is {logstash-ref}/configuration-file-structure.html#string[string] + * There is no default value for this setting. + +Add a unique `ID` to the plugin configuration. If no ID is specified, Logstash will generate one. +It is strongly recommended to set this ID in your configuration. This is particularly useful +when you have two or more plugins of the same type, for example, if you have 2 {plugin} filters. +Adding a named ID in this case will help in monitoring Logstash when using the monitoring APIs. + + +["source","json",subs="attributes"] + filter { + {plugin} { + id => "ABC" + } + } + +NOTE: Variable substitution in the `id` field only supports environment variables + and does not support the use of values from the secret store. + +ifeval::["{versioned_docs}"!="true"] +[id="plugins-{type}s-{plugin}-periodic_flush"] +endif::[] +ifeval::["{versioned_docs}"=="true"] +[id="{version}-plugins-{type}s-{plugin}-periodic_flush"] +endif::[] +===== `periodic_flush` + + * Value type is {logstash-ref}/configuration-file-structure.html#boolean[boolean] + * Default value is `false` + +Call the filter flush method at regular interval. +Optional. + +ifeval::["{versioned_docs}"!="true"] +[id="plugins-{type}s-{plugin}-remove_field"] +endif::[] +ifeval::["{versioned_docs}"=="true"] +[id="{version}-plugins-{type}s-{plugin}-remove_field"] +endif::[] +===== `remove_field` + + * Value type is {logstash-ref}/configuration-file-structure.html#array[array] + * Default value is `[]` + +If this filter is successful, remove arbitrary fields from this event. +Fields names can be dynamic and include parts of the event using the %{field} +Example: + +["source","json",subs="attributes"] + filter { + {plugin} { + remove_field => [ "foo_%\{somefield\}" ] + } + } + +["source","json",subs="attributes"] + # You can also remove multiple fields at once: + filter { + {plugin} { + remove_field => [ "foo_%\{somefield\}", "my_extraneous_field" ] + } + } + +If the event has field `"somefield" == "hello"` this filter, on success, +would remove the field with name `foo_hello` if it is present. The second +example would remove an additional, non-dynamic field. + +ifeval::["{versioned_docs}"!="true"] +[id="plugins-{type}s-{plugin}-remove_tag"] +endif::[] +ifeval::["{versioned_docs}"=="true"] +[id="{version}-plugins-{type}s-{plugin}-remove_tag"] +endif::[] +===== `remove_tag` + + * Value type is {logstash-ref}/configuration-file-structure.html#array[array] + * Default value is `[]` + +If this filter is successful, remove arbitrary tags from the event. +Tags can be dynamic and include parts of the event using the `%{field}` +syntax. + +Example: + +["source","json",subs="attributes"] + filter { + {plugin} { + remove_tag => [ "foo_%\{somefield\}" ] + } + } + +["source","json",subs="attributes"] + # You can also remove multiple tags at once: + filter { + {plugin} { + remove_tag => [ "foo_%\{somefield\}", "sad_unwanted_tag"] + } + } + +If the event has field `"somefield" == "hello"` this filter, on success, +would remove the tag `foo_hello` if it is present. The second example +would remove a sad, unwanted tag as well. diff --git a/docs/plugins/include/input.asciidoc b/docs/plugins/include/input.asciidoc new file mode 100644 index 000000000..5ef643aac --- /dev/null +++ b/docs/plugins/include/input.asciidoc @@ -0,0 +1,172 @@ +==== Common options + +// Contributors: You must conditionally code all internal links and IDs in this +// file to make the common files work in both the LS Reference and the versioned +// plugin docs + +These configuration options are supported by all input plugins: + +[cols="<,<,<",options="header",] +ifeval::["{versioned_docs}"!="true"] +|======================================================================= +|Setting |Input type|Required +| <> |{logstash-ref}/configuration-file-structure.html#hash[hash]|No +ifndef::no_codec[] +| <> |{logstash-ref}/configuration-file-structure.html#codec[codec]|No +endif::no_codec[] +| <> |{logstash-ref}/configuration-file-structure.html#boolean[boolean]|No +| <> |{logstash-ref}/configuration-file-structure.html#string[string]|No +| <> |{logstash-ref}/configuration-file-structure.html#array[array]|No +| <> |{logstash-ref}/configuration-file-structure.html#string[string]|No +|======================================================================= +endif::[] +ifeval::["{versioned_docs}"=="true"] +|======================================================================= +|Setting |Input type|Required +| <<{version}-plugins-{type}s-{plugin}-add_field>> |{logstash-ref}/configuration-file-structure.html#hash[hash]|No +ifndef::no_codec[] +| <<{version}-plugins-{type}s-{plugin}-codec>> |{logstash-ref}/configuration-file-structure.html#codec[codec]|No +endif::no_codec[] +| <<{version}-plugins-{type}s-{plugin}-enable_metric>> |{logstash-ref}/configuration-file-structure.html#boolean[boolean]|No +| <<{version}-plugins-{type}s-{plugin}-id>> |{logstash-ref}/configuration-file-structure.html#string[string]|No +| <<{version}-plugins-{type}s-{plugin}-tags>> |{logstash-ref}/configuration-file-structure.html#array[array]|No +| <<{version}-plugins-{type}s-{plugin}-type>> |{logstash-ref}/configuration-file-structure.html#string[string]|No +|======================================================================= +endif::[] + + +ifeval::["{versioned_docs}"!="true"] +[id="plugins-{type}s-{plugin}-add_field"] +endif::[] +ifeval::["{versioned_docs}"=="true"] +[id="{version}-plugins-{type}s-{plugin}-add_field"] +endif::[] +===== `add_field` + + * Value type is {logstash-ref}/configuration-file-structure.html#hash[hash] + * Default value is `{}` + +Add a field to an event + +ifndef::no_codec[] +ifeval::["{versioned_docs}"!="true"] +[id="plugins-{type}s-{plugin}-codec"] +endif::[] +ifeval::["{versioned_docs}"=="true"] +[id="{version}-plugins-{type}s-{plugin}-codec"] +endif::[] +===== `codec` + + * Value type is {logstash-ref}/configuration-file-structure.html#codec[codec] +ifdef::default_codec[] + * Default value is +"{default_codec}"+ +endif::[] +ifndef::default_codec[] + * Default value is `"plain"` +endif::[] + +The codec used for input data. Input codecs are a convenient method for decoding your data before it enters the input, without needing a separate filter in your Logstash pipeline. +endif::no_codec[] + + +ifeval::["{versioned_docs}"!="true"] +[id="plugins-{type}s-{plugin}-enable_metric"] +endif::[] +ifeval::["{versioned_docs}"=="true"] +[id="{version}-plugins-{type}s-{plugin}-enable_metric"] +endif::[] +===== `enable_metric` + + * Value type is {logstash-ref}/configuration-file-structure.html#boolean[boolean] + * Default value is `true` + +Disable or enable metric logging for this specific plugin instance +by default we record all the metrics we can, but you can disable metrics collection +for a specific plugin. + +ifeval::["{versioned_docs}"!="true"] +[id="plugins-{type}s-{plugin}-id"] +endif::[] +ifeval::["{versioned_docs}"=="true"] +[id="{version}-plugins-{type}s-{plugin}-id"] +endif::[] +===== `id` + + * Value type is {logstash-ref}/configuration-file-structure.html#string[string] + * There is no default value for this setting. + +Add a unique `ID` to the plugin configuration. If no ID is specified, Logstash will generate one. +It is strongly recommended to set this ID in your configuration. This is particularly useful +when you have two or more plugins of the same type, for example, if you have 2 {plugin} inputs. +Adding a named ID in this case will help in monitoring Logstash when using the monitoring APIs. + +["source","json",subs="attributes"] +--------------------------------------------------------------------------------------------------- +input { + {plugin} { + id => "my_plugin_id" + } +} +--------------------------------------------------------------------------------------------------- + +NOTE: Variable substitution in the `id` field only supports environment variables + and does not support the use of values from the secret store. + +ifeval::["{versioned_docs}"!="true"] +[id="plugins-{type}s-{plugin}-tags"] +endif::[] +ifeval::["{versioned_docs}"=="true"] +[id="{version}-plugins-{type}s-{plugin}-tags"] +endif::[] +===== `tags` + + * Value type is {logstash-ref}/configuration-file-structure.html#array[array] + * There is no default value for this setting. + +Add any number of arbitrary tags to your event. + +This can help with processing later. + +ifeval::["{versioned_docs}"!="true"] +[id="plugins-{type}s-{plugin}-type"] +endif::[] +ifeval::["{versioned_docs}"=="true"] +[id="{version}-plugins-{type}s-{plugin}-type"] +endif::[] +===== `type` + + * Value type is {logstash-ref}/configuration-file-structure.html#string[string] + * There is no default value for this setting. + +Add a `type` field to all events handled by this input. + +Types are used mainly for filter activation. + +The type is stored as part of the event itself, so you can +also use the type to search for it in Kibana. + +If you try to set a type on an event that already has one (for +example when you send an event from a shipper to an indexer) then +a new input will not override the existing type. A type set at +the shipper stays with that event for its life even +when sent to another Logstash server. + +ifeval::["{type}"=="input"] +ifeval::["{plugin}"=="beats"] + +ifeval::["{versioned_docs}"!="true"] +NOTE: The Beats shipper automatically sets the `type` field on the event. +You cannot override this setting in the Logstash config. If you specify +a setting for the <> config option in +Logstash, it is ignored. +endif::[] +ifeval::["{versioned_docs}"=="true"] +NOTE: The Beats shipper automatically sets the `type` field on the event. +You cannot override this setting in the Logstash config. If you specify +a setting for the <<{version}-plugins-inputs-beats-type,`type`>> config option in +Logstash, it is ignored. +endif::[] + +endif::[] +endif::[] + diff --git a/docs/plugins/include/output.asciidoc b/docs/plugins/include/output.asciidoc new file mode 100644 index 000000000..8e9453c4e --- /dev/null +++ b/docs/plugins/include/output.asciidoc @@ -0,0 +1,94 @@ +==== Common options + +// Contributors: You must conditionally code all internal links and IDs in this +// file to make the common files work in both the LS Reference and the versioned +// plugin docs + +These configuration options are supported by all output plugins: + +ifeval::["{versioned_docs}"!="true"] +[cols="<,<,<",options="header",] +|======================================================================= +|Setting |Input type|Required +ifndef::no_codec[] +| <> |{logstash-ref}/configuration-file-structure.html#codec[codec]|No +endif::no_codec[] +| <> |{logstash-ref}/configuration-file-structure.html#boolean[boolean]|No +| <> |{logstash-ref}/configuration-file-structure.html#string[string]|No +|======================================================================= +endif::[] +ifeval::["{versioned_docs}"=="true"] +[cols="<,<,<",options="header",] +|======================================================================= +|Setting |Input type|Required +ifndef::no_codec[] +| <<{version}-plugins-{type}s-{plugin}-codec>> |{logstash-ref}/configuration-file-structure.html#codec[codec]|No +endif::no_codec[] +| <<{version}-plugins-{type}s-{plugin}-enable_metric>> |{logstash-ref}/configuration-file-structure.html#boolean[boolean]|No +| <<{version}-plugins-{type}s-{plugin}-id>> |{logstash-ref}/configuration-file-structure.html#string[string]|No +|======================================================================= +endif::[] + +ifndef::no_codec[] +ifeval::["{versioned_docs}"!="true"] +[id="plugins-{type}s-{plugin}-codec"] +endif::[] +ifeval::["{versioned_docs}"=="true"] +[id="{version}-plugins-{type}s-{plugin}-codec"] +endif::[] +===== `codec` + + * Value type is {logstash-ref}/configuration-file-structure.html#codec[codec] +ifdef::default_codec[] + * Default value is +"{default_codec}"+ +endif::[] +ifndef::default_codec[] + * Default value is `"plain"` +endif::[] + +The codec used for output data. Output codecs are a convenient method for encoding your data before it leaves the output without needing a separate filter in your Logstash pipeline. +endif::no_codec[] + +ifeval::["{versioned_docs}"!="true"] +[id="plugins-{type}s-{plugin}-enable_metric"] +endif::[] +ifeval::["{versioned_docs}"=="true"] +[id="{version}-plugins-{type}s-{plugin}-enable_metric"] +endif::[] +===== `enable_metric` + + * Value type is {logstash-ref}/configuration-file-structure.html#boolean[boolean] + * Default value is `true` + +Disable or enable metric logging for this specific plugin instance. +By default we record all the metrics we can, but you can disable metrics collection +for a specific plugin. + +ifeval::["{versioned_docs}"!="true"] +[id="plugins-{type}s-{plugin}-id"] +endif::[] +ifeval::["{versioned_docs}"=="true"] +[id="{version}-plugins-{type}s-{plugin}-id"] +endif::[] +===== `id` + + * Value type is {logstash-ref}/configuration-file-structure.html#string[string] + * There is no default value for this setting. + +Add a unique `ID` to the plugin configuration. If no ID is specified, Logstash will generate one. +It is strongly recommended to set this ID in your configuration. This is particularly useful +when you have two or more plugins of the same type. For example, if you have 2 {plugin} outputs. +Adding a named ID in this case will help in monitoring Logstash when using the monitoring APIs. + +["source","json",subs="attributes"] +--------------------------------------------------------------------------------------------------- +output { + {plugin} { + id => "my_plugin_id" + } +} +--------------------------------------------------------------------------------------------------- + +NOTE: Variable substitution in the `id` field only supports environment variables + and does not support the use of values from the secret store. + diff --git a/docs/plugins/include/plugin_header-core.asciidoc b/docs/plugins/include/plugin_header-core.asciidoc new file mode 100644 index 000000000..eec4d5901 --- /dev/null +++ b/docs/plugins/include/plugin_header-core.asciidoc @@ -0,0 +1,14 @@ +[subs="attributes"] +++++ +{plugin} +++++ + +*{ls} Core Plugin.* The {plugin} {type} plugin cannot be +installed or uninstalled independently of {ls}. + +==== Getting help + +For questions about the plugin, open a topic in the +http://discuss.elastic.co[Discuss] forums. For bugs or feature requests, open an +issue in https://github.com/logstash[Github]. + diff --git a/docs/plugins/include/plugin_header-integration.asciidoc b/docs/plugins/include/plugin_header-integration.asciidoc new file mode 100644 index 000000000..525f84181 --- /dev/null +++ b/docs/plugins/include/plugin_header-integration.asciidoc @@ -0,0 +1,19 @@ +[subs="attributes"] +++++ +{plugin} +++++ + +* A component of the <> +* Integration version: {version} +* Released on: {release_date} +* {changelog_url}[Changelog] + +For other versions, see the +{lsplugindocs}/{type}-{plugin}-index.html[Versioned plugin docs]. + +==== Getting help + +For questions about the plugin, open a topic in the http://discuss.elastic.co[Discuss] forums. +For bugs or feature requests, open an issue in https://github.com/logstash-plugins/logstash-integration-{integration}[Github]. +For the list of Elastic supported plugins, please consult the https://www.elastic.co/support/matrix#logstash_plugins[Elastic Support Matrix]. + diff --git a/docs/plugins/include/plugin_header.asciidoc b/docs/plugins/include/plugin_header.asciidoc new file mode 100644 index 000000000..c8a169810 --- /dev/null +++ b/docs/plugins/include/plugin_header.asciidoc @@ -0,0 +1,25 @@ +[subs="attributes"] +++++ +{plugin} +++++ + +* Plugin version: {version} +* Released on: {release_date} +* {changelog_url}[Changelog] + +For other versions, see the +{lsplugindocs}/{type}-{plugin}-index.html[Versioned plugin docs]. + +ifeval::["{default_plugin}"=="0"] + +==== Installation + +For plugins not bundled by default, it is easy to install by running +bin/logstash-plugin install logstash-{type}-{plugin}+. See {logstash-ref}/working-with-plugins.html[Working with plugins] for more details. + +endif::[] + +==== Getting help + +For questions about the plugin, open a topic in the http://discuss.elastic.co[Discuss] forums. For bugs or feature requests, open an issue in https://github.com/logstash-plugins/logstash-{type}-{plugin}[Github]. +For the list of Elastic supported plugins, please consult the https://www.elastic.co/support/matrix#logstash_plugins[Elastic Support Matrix]. + diff --git a/docs/plugins/include/version-list-intro.asciidoc b/docs/plugins/include/version-list-intro.asciidoc new file mode 100644 index 000000000..c396d201c --- /dev/null +++ b/docs/plugins/include/version-list-intro.asciidoc @@ -0,0 +1,14 @@ +[id="{type}-{plugin}-index"] + +== Versioned {plugin} {type} plugin docs +[subs="attributes"] +++++ +{plugin} +++++ + +This page lists all available versions of the documentation for this plugin. +To see which version of the plugin you have installed, run `bin/logstash-plugin +list --verbose`. + +NOTE: Versioned plugin documentation is not available for plugins released prior +to Logstash 6.0. diff --git a/docs/plugins/index.asciidoc b/docs/plugins/index.asciidoc new file mode 100644 index 000000000..60e3b2f99 --- /dev/null +++ b/docs/plugins/index.asciidoc @@ -0,0 +1,114 @@ +[[logstash-reference]] += Logstash Plugins only + +include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] +include::{docs-root}/shared/attributes.asciidoc[] +include::./include/attributes-ls.asciidoc[] +include::./include/attributes-lsplugins.asciidoc[] + +:include-xpack: true +:lang: en +:xls-repo-dir: {docdir}/../x-pack/docs/{lang} +:log-repo-dir: {docdir} +:plugins-repo-dir: {docdir}/../../logstash-docs/docs +:docker-repo: docker.elastic.co/logstash/logstash +:docker-image: {docker-repo}:{logstash_version} + +:versioned_docs: false + +:jdk: 1.8.0 +:lsissue: https://github.com/elastic/logstash/issues +:lsplugindocs: https://www.elastic.co/guide/en/logstash-versioned-plugins/current +:tab-widget-dir: {docdir}/static/tab-widgets + + +[[introduction]] +== Logstash Plugins + +Some sort of intro goes here. + +[[string]] +[discrete] +==== String + +A string must be a single character sequence. Note that string values are +enclosed in quotes, either double or single. + +[[array]] +[discrete] +==== Array + +This type is now mostly deprecated in favor of using a standard type like `string` with the plugin defining the `:list => true` property for better type checking. It is still needed to handle lists of hashes or mixed types where type checking is not desired. + +[[number]] +[discrete] +==== Number + +Numbers must be valid numeric values (floating point or integer). + +[[boolean]] +[discrete] +==== Boolean + +A boolean must be either `true` or `false`. Note that the `true` and `false` keywords +are not enclosed in quotes. + +[[password]] +[discrete] +==== Password + +A password is a string with a single value that is not logged or printed. + +[[path]] +[discrete] +==== Path + +A path is a string that represents a valid operating system path. + +[[hash]] +[discrete] +==== Hash + +A hash is a collection of key value pairs specified in the format `"field1" => "value1"`. +Note that multiple key value entries are separated by spaces rather than commas. + +[[uri]] +[discrete] +==== URI + +A URI can be anything from a full URL like 'http://elastic.co/' to a simple identifier +like 'foobar'. If the URI contains a password such as 'http://user:pass@example.net' the password +portion of the URI will not be logged or printed. + +[[bytes]] +[discrete] +==== Bytes + +A bytes field is a string field that represents a valid unit of bytes. It is a +convenient way to declare specific sizes in your plugin options. Both SI (k M G T P E Z Y) +and Binary (Ki Mi Gi Ti Pi Ei Zi Yi) units are supported. Binary units are in +base-1024 and SI units are in base-1000. This field is case-insensitive +and accepts space between the value and the unit. If no unit is specified, the integer string +represents the number of bytes. + +[[logstash-config-field-references]] +[discrete] +==== CFR + +words + +[[event-api]] +[discrete] +==== Event API + +words + +include::integrations.asciidoc[] + +include::inputs.asciidoc[] + +include::outputs.asciidoc[] + +include::filters.asciidoc[] + +include::codecs.asciidoc[] diff --git a/docs/plugins/inputs.asciidoc b/docs/plugins/inputs.asciidoc index 657064843..bdf9ca7e8 100644 --- a/docs/plugins/inputs.asciidoc +++ b/docs/plugins/inputs.asciidoc @@ -133,10 +133,10 @@ include::inputs/imap.asciidoc[] include::inputs/irc.asciidoc[] :edit_url: https://github.com/elastic/logstash/edit/main/docs/static/core-plugins/inputs/java_generator.asciidoc -include::../../../logstash/docs/static/core-plugins/inputs/java_generator.asciidoc[] +include::./static/core-plugins/inputs/java_generator.asciidoc[] :edit_url: https://github.com/elastic/logstash/edit/main/docs/static/core-plugins/inputs/java_stdin.asciidoc -include::../../../logstash/docs/static/core-plugins/inputs/java_stdin.asciidoc[] +include::./static/core-plugins/inputs/java_stdin.asciidoc[] :edit_url: https://github.com/logstash-plugins/logstash-integration-jdbc/edit/main/docs/input-jdbc.asciidoc include::inputs/jdbc.asciidoc[] @@ -186,7 +186,7 @@ include::inputs/rss.asciidoc[] :edit_url: https://github.com/logstash-plugins/logstash-input-s3/edit/main/docs/index.asciidoc include::inputs/s3.asciidoc[] -:edit_url: +:edit_url: include::inputs/s3-sns-sqs.asciidoc[] :edit_url: https://github.com/logstash-plugins/logstash-input-salesforce/edit/main/docs/index.asciidoc @@ -238,4 +238,4 @@ include::inputs/wmi.asciidoc[] include::inputs/xmpp.asciidoc[] -:edit_url: +:edit_url: diff --git a/docs/plugins/inputs/azure_event_hubs.asciidoc b/docs/plugins/inputs/azure_event_hubs.asciidoc index 7bcb0afe5..7b1ac3d5d 100644 --- a/docs/plugins/inputs/azure_event_hubs.asciidoc +++ b/docs/plugins/inputs/azure_event_hubs.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.5.1 :release_date: 2025-01-03 :changelog_url: https://github.com/logstash-plugins/logstash-input-azure_event_hubs/blob/v1.5.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -28,11 +28,11 @@ highly scalable data streaming platform and event ingestion service. Event producers send events to the Azure Event Hub, and this plugin consumes those events for use with Logstash. -Many Azure services integrate with the Azure Event Hubs. +Many Azure services integrate with the Azure Event Hubs. https://docs.microsoft.com/en-us/azure/monitoring-and-diagnostics/monitoring-overview-azure-monitor[Azure -Monitor], for example, integrates with Azure Event Hubs to provide infrastructure metrics. +Monitor], for example, integrates with Azure Event Hubs to provide infrastructure metrics. -IMPORTANT: This plugin requires outbound connections to ports `tcp/443`, `tcp/9093`, `tcp/5671`, and `tcp/5672`, +IMPORTANT: This plugin requires outbound connections to ports `tcp/443`, `tcp/9093`, `tcp/5671`, and `tcp/5672`, as noted in the https://learn.microsoft.com/en-us/azure/event-hubs/event-hubs-faq#what-ports-do-i-need-to-open-on-the-firewall[Microsoft Event Hub documentation]. ===== Event Hub connection string @@ -52,7 +52,7 @@ Endpoint=sb://logstash.servicebus.windows.net/;SharedAccessKeyName=activity-log- ===== Blob Storage and connection string https://azure.microsoft.com/en-us/services/storage/blobs[Azure Blob Storage -account] is an essential part of Azure-to-Logstash configuration. +account] is an essential part of Azure-to-Logstash configuration. A Blob Storage account is a central location that enables multiple instances of Logstash to work together to process events. It records the offset (location) of processed events. On restart, Logstash resumes processing @@ -62,7 +62,7 @@ Configuration notes: * A Blob Storage account is highly recommended for use with this plugin, and is likely required for production servers. -* The `storage_connection` option passes the blob storage connection string. +* The `storage_connection` option passes the blob storage connection string. * Configure all Logstash instances to use the same `storage_connection` to get the benefits of shared processing. @@ -73,7 +73,7 @@ Sample Blob Storage connection string: DefaultEndpointsProtocol=https;AccountName=logstash;AccountKey=ETOPnkd/hDAWidkEpPZDiXffQPku/SZdXhPSLnfqdRTalssdEuPkZwIcouzXjCLb/xPZjzhmHfwRCGo0SBSw==;EndpointSuffix=core.windows.net ---- -Find the connection string to Blob Storage here: +Find the connection string to Blob Storage here: https://portal.azure.com[Azure Portal]`-> Blob Storage account -> Access keys`. [id="plugins-{type}s-{plugin}-best-practices"] @@ -83,8 +83,8 @@ Here are some guidelines to help you avoid data conflicts that can cause lost events. * <> -* <> -* <> +* <> +* <> [id="plugins-{type}s-{plugin}-bp-group"] ====== Create a Logstash consumer group @@ -99,7 +99,7 @@ work together for processing events. The offsets (position) of the Event Hubs are stored in the configured Azure Blob store. The Azure Blob store uses paths like a file system to store the offsets. If the paths between multiple Event Hubs overlap, then the offsets may be stored -incorrectly. +incorrectly. To avoid duplicate file paths, use the advanced configuration model and make sure that at least one of these options is different per Event Hub: @@ -127,7 +127,7 @@ independently to each. **Example: Single event hub** -If you’re collecting activity logs from one event hub instance, +If you’re collecting activity logs from one event hub instance, then only 2 threads are required. * Event hubs = 1 @@ -137,12 +137,12 @@ then only 2 threads are required. If you are collecting activity logs from more than event hub instance, then at least 1 thread per event hub is required. -* Event hubs = 4 -* Minimum threads = 5 (4 Event Hubs + 1) +* Event hubs = 4 +* Minimum threads = 5 (4 Event Hubs + 1) When you are using multiple partitions per event hub, you may want to assign more threads. A good base level is (1 + `number of event hubs * number of partitions`). -That is, one thread for each partition across all event hubs. +That is, one thread for each partition across all event hubs. [id="plugins-{type}s-{plugin}-eh_config_models"] ==== Configuration models @@ -181,9 +181,9 @@ The advanced configuration model accommodates deployments where different Event Hubs require different configurations. Options can be configured per Event Hub. You provide a list of Event Hub names through the `event_hubs` option. Under each name, specify the configuration for that Event Hub. Options can be defined -globally or expressed per Event Hub. +globally or expressed per Event Hub. -If the same configuration option appears in both the global and `event_hub` +If the same configuration option appears in both the global and `event_hub` sections, the more specific (event_hub) setting takes precedence. NOTE: Advanced configuration is not necessary or recommended for most use cases. @@ -214,7 +214,7 @@ input { In this example, `storage_connection` and `decorate_events` are applied globally. The two Event Hubs each have their own settings for `consumer_groups` -and `initial_position`. +and `initial_position`. [id="plugins-{type}s-{plugin}-options"] ==== Azure Event Hubs Configuration Options @@ -253,7 +253,7 @@ configuration uses `event_hubs` and `event_hub_connection` (singular). * Valid entries are `basic` or `advanced` * Default value is `basic` -Sets configuration to either <> or <>. +Sets configuration to either <> or <>. [source,ruby] ---- @@ -267,10 +267,10 @@ azure_event_hubs { * Value type is <> * No default value * Ignored for basic configuration -* Required for advanced configuration +* Required for advanced configuration Defines the Event Hubs to be read. An array of hashes where each entry is a -hash of the Event Hub name and its configuration options. +hash of the Event Hub name and its configuration options. [source,ruby] ---- @@ -297,7 +297,7 @@ azure_event_hubs { * Required for basic configuration List of connection strings that identifies the Event Hubs to be read. Connection -strings include the EntityPath for the Event Hub. +strings include the EntityPath for the Event Hub. The `event_hub_connections` option is defined per Event Hub. All other configuration options are shared among Event Hubs. @@ -337,12 +337,12 @@ azure_event_hubs { * Value type is <> * Default value is `5` seconds * Set to `0` to disable. - + Interval in seconds to write checkpoints during batch processing. Checkpoints tell Logstash where to resume processing after a restart. Checkpoints are -automatically written at the end of each batch, regardless of this setting. +automatically written at the end of each batch, regardless of this setting. -Writing checkpoints too frequently can slow down processing unnecessarily. +Writing checkpoints too frequently can slow down processing unnecessarily. [source,ruby] ---- @@ -355,7 +355,7 @@ azure_event_hubs { [id="plugins-{type}s-{plugin}-consumer_group"] ===== `consumer_group` * Value type is <> -* Default value is `$Default` +* Default value is `$Default` Consumer group used to read the Event Hub(s). Create a consumer group specifically for Logstash. Then ensure that all instances of Logstash use that @@ -376,7 +376,7 @@ azure_event_hubs { * Default value is `false` Adds metadata about the Event Hub, including Event Hub name, consumer_group, -processor_host, partition, offset, sequence, timestamp, and event_size. +processor_host, partition, offset, sequence, timestamp, and event_size. [source,ruby] ---- @@ -392,10 +392,10 @@ azure_event_hubs { * Valid arguments are `beginning`, `end`, `look_back` * Default value is `beginning` -When first reading from an Event Hub, start from this position: +When first reading from an Event Hub, start from this position: -* `beginning` reads all pre-existing events in the Event Hub -* `end` does not read any pre-existing events in the Event Hub +* `beginning` reads all pre-existing events in the Event Hub +* `end` does not read any pre-existing events in the Event Hub * `look_back` reads `end` minus a number of seconds worth of pre-existing events. You control the number of seconds using the `initial_position_look_back` option. @@ -419,7 +419,7 @@ azure_event_hubs { Number of seconds to look back to find the initial position for pre-existing events. This option is used only if `initial_position` is set to `look_back`. If `storage_connection` is set, this configuration applies only the first time Logstash -reads from the Event Hub. +reads from the Event Hub. [source,ruby] ---- @@ -438,7 +438,7 @@ azure_event_hubs { Maximum number of events retrieved and processed together. A checkpoint is created after each batch. Increasing this value may help with performance, but -requires more memory. +requires more memory. [source,ruby] ---- @@ -451,11 +451,11 @@ azure_event_hubs { [id="plugins-{type}s-{plugin}-storage_connection"] ===== `storage_connection` * Value type is <> -* No default value +* No default value Connection string for blob account storage. Blob account storage persists the offsets between restarts, and ensures that multiple instances of Logstash -process different partitions. +process different partitions. When this value is set, restarts resume where processing left off. When this value is not set, the `initial_position` value is used on every restart. @@ -476,8 +476,8 @@ azure_event_hubs { * Defaults to the Event Hub name if not defined Name of the storage container used to persist offsets and allow multiple instances of Logstash -to work together. - +to work together. + [source,ruby] ---- azure_event_hubs { @@ -490,7 +490,7 @@ azure_event_hubs { To avoid overwriting offsets, you can use different storage containers. This is particularly important if you are monitoring two Event Hubs with the same name. You can use the advanced configuration model to configure different storage -containers. +containers. [source,ruby] ---- @@ -519,7 +519,7 @@ azure_event_hubs { Total number of threads used to process events. The value you set here applies to all Event Hubs. Even with advanced configuration, this value is a global -setting, and can't be set per event hub. +setting, and can't be set per event hub. [source,ruby] ---- @@ -528,7 +528,7 @@ azure_event_hubs { } ---- -The number of threads should be the number of Event Hubs plus one or more. +The number of threads should be the number of Event Hubs plus one or more. See <> for more information. [id="plugins-{type}s-{plugin}-common-options"] diff --git a/docs/plugins/inputs/beats.asciidoc b/docs/plugins/inputs/beats.asciidoc index 82d500533..f69e2741a 100644 --- a/docs/plugins/inputs/beats.asciidoc +++ b/docs/plugins/inputs/beats.asciidoc @@ -11,7 +11,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.0.2 :release_date: 2025-02-12 :changelog_url: https://github.com/logstash-plugins/logstash-input-beats/blob/v7.0.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -21,7 +21,7 @@ END - GENERATED VARIABLES, DO NOT EDIT! === {plugin-uc} input plugin NOTE: The `input-elastic_agent` plugin is the next generation of the -`input-beats` plugin. +`input-beats` plugin. They currently share code and a https://github.com/logstash-plugins/logstash-input-beats[common codebase]. include::{include_path}/plugin_header.asciidoc[] @@ -94,9 +94,9 @@ endif::[] This plugin uses "off-heap" direct memory in addition to heap memory. By default, a JVM's off-heap direct memory limit is the same as the heap size. -For example, setting `-Xmx10G` without setting the direct memory limit will allocate `10GB` for heap and an additional `10GB` for direct memory, for a total of `20GB` allocated. -You can set the amount of direct memory with `-XX:MaxDirectMemorySize` in {logstash-ref}/jvm-settings.html[Logstash JVM Settings]. -Consider setting direct memory to half of the heap size. +For example, setting `-Xmx10G` without setting the direct memory limit will allocate `10GB` for heap and an additional `10GB` for direct memory, for a total of `20GB` allocated. +You can set the amount of direct memory with `-XX:MaxDirectMemorySize` in {logstash-ref}/jvm-settings.html[Logstash JVM Settings]. +Consider setting direct memory to half of the heap size. Setting direct memory too low decreases the performance of ingestion. NOTE: Be sure that heap and direct memory combined does not exceed the total memory available on the server to avoid an OutOfDirectMemoryError @@ -146,7 +146,7 @@ endif::[] [id="plugins-{type}s-{plugin}-ecs_metadata"] ==== Event enrichment and the Elastic Common Schema (ECS) -When decoding {plugin-uc} events, this plugin enriches each event with metadata about the event's source, making this information available during further processing. +When decoding {plugin-uc} events, this plugin enriches each event with metadata about the event's source, making this information available during further processing. You can use the <> option to activate or deactivate individual enrichment categories. The location of these enrichment fields depends on whether <> is enabled: @@ -352,7 +352,7 @@ See <> configuration if you need to s The number of threads to be used to process incoming {plugin-uc} requests. By default, the {plugin-uc} input creates a number of threads equal to the number of CPU cores. -These threads handle incoming connections, reading from established sockets, and executing most of the tasks related to network connection management. +These threads handle incoming connections, reading from established sockets, and executing most of the tasks related to network connection management. Parsing the Lumberjack protocol is offloaded to a dedicated thread pool. Generally you don't need to touch this setting. @@ -459,7 +459,7 @@ Time in milliseconds for an incomplete ssl handshake to timeout * There is no default value for this setting. SSL key to use. -This key must be in the PKCS8 format and PEM encoded. +This key must be in the PKCS8 format and PEM encoded. You can use the https://www.openssl.org/docs/man1.1.1/man1/openssl-pkcs8.html[openssl pkcs8] command to complete the conversion. For example, the command to convert a PEM encoded PKCS1 private key to a PEM encoded, non-encrypted PKCS8 key is: diff --git a/docs/plugins/inputs/cloudwatch.asciidoc b/docs/plugins/inputs/cloudwatch.asciidoc index 941ee2ce6..c2ef7a8ab 100644 --- a/docs/plugins/inputs/cloudwatch.asciidoc +++ b/docs/plugins/inputs/cloudwatch.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.2.1 :release_date: 2025-02-13 :changelog_url: https://github.com/logstash-plugins/logstash-integration-aws/blob/v7.2.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -137,7 +137,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-access_key_id"] -===== `access_key_id` +===== `access_key_id` * Value type is <> * There is no default value for this setting. @@ -151,7 +151,7 @@ This plugin uses the AWS SDK and supports several ways to get credentials, which 5. IAM Instance Profile (available when running inside EC2) [id="plugins-{type}s-{plugin}-aws_credentials_file"] -===== `aws_credentials_file` +===== `aws_credentials_file` * Value type is <> * There is no default value for this setting. @@ -169,7 +169,7 @@ file should look like this: [id="plugins-{type}s-{plugin}-combined"] -===== `combined` +===== `combined` * Value type is <> * Default value is `false` @@ -187,7 +187,7 @@ This is useful when connecting to S3 compatible services, but beware that these guaranteed to work correctly with the AWS SDK. [id="plugins-{type}s-{plugin}-filters"] -===== `filters` +===== `filters` * This setting can be required or optional. See note below. * Value type is <> @@ -205,7 +205,7 @@ Each namespace uniquely support certain dimensions. Please consult the documenta to ensure you're using valid filters. [id="plugins-{type}s-{plugin}-interval"] -===== `interval` +===== `interval` * Value type is <> * Default value is `900` @@ -216,7 +216,7 @@ The default, `900`, means check every 15 minutes. Setting this value too low (generally less than 300) results in no metrics being returned from CloudWatch. [id="plugins-{type}s-{plugin}-metrics"] -===== `metrics` +===== `metrics` * Value type is <> * Default value is `["CPUUtilization", "DiskReadOps", "DiskWriteOps", "NetworkIn", "NetworkOut"]` @@ -225,7 +225,7 @@ Specify the metrics to fetch for the namespace. The defaults are AWS/EC2 specifi for the available metrics for other namespaces. [id="plugins-{type}s-{plugin}-namespace"] -===== `namespace` +===== `namespace` * Value type is <> * Default value is `"AWS/EC2"` @@ -237,7 +237,7 @@ The default is for the EC2 service. See http://docs.aws.amazon.com/AmazonCloudWa for valid values. [id="plugins-{type}s-{plugin}-period"] -===== `period` +===== `period` * Value type is <> * Default value is `300` @@ -247,7 +247,7 @@ Set the granularity of the returned datapoints. Must be at least 60 seconds and in multiples of 60. [id="plugins-{type}s-{plugin}-proxy_uri"] -===== `proxy_uri` +===== `proxy_uri` * Value type is <> * There is no default value for this setting. @@ -255,7 +255,7 @@ Must be at least 60 seconds and in multiples of 60. URI to proxy server if required [id="plugins-{type}s-{plugin}-region"] -===== `region` +===== `region` * Value type is <> * Default value is `"us-east-1"` @@ -281,7 +281,7 @@ See the https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html[ Session name to use when assuming an IAM role. [id="plugins-{type}s-{plugin}-secret_access_key"] -===== `secret_access_key` +===== `secret_access_key` * Value type is <> * There is no default value for this setting. @@ -289,7 +289,7 @@ Session name to use when assuming an IAM role. The AWS Secret Access Key [id="plugins-{type}s-{plugin}-session_token"] -===== `session_token` +===== `session_token` * Value type is <> * There is no default value for this setting. @@ -297,7 +297,7 @@ The AWS Secret Access Key The AWS Session token for temporary credential [id="plugins-{type}s-{plugin}-statistics"] -===== `statistics` +===== `statistics` * Value type is <> * Default value is `["SampleCount", "Average", "Minimum", "Maximum", "Sum"]` @@ -315,7 +315,7 @@ For cases where the default certificates are unavailable, e.g. Windows, you can set this to `true`. [id="plugins-{type}s-{plugin}-use_ssl"] -===== `use_ssl` +===== `use_ssl` * Value type is <> * Default value is `true` diff --git a/docs/plugins/inputs/couchdb_changes.asciidoc b/docs/plugins/inputs/couchdb_changes.asciidoc index 89a686edb..77ff1a36d 100644 --- a/docs/plugins/inputs/couchdb_changes.asciidoc +++ b/docs/plugins/inputs/couchdb_changes.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.6 :release_date: 2019-04-15 :changelog_url: https://github.com/logstash-plugins/logstash-input-couchdb_changes/blob/v3.1.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -67,7 +67,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-always_reconnect"] -===== `always_reconnect` +===== `always_reconnect` * Value type is <> * Default value is `true` @@ -75,7 +75,7 @@ input plugins. Reconnect flag. When true, always try to reconnect after a failure [id="plugins-{type}s-{plugin}-ca_file"] -===== `ca_file` +===== `ca_file` * Value type is <> * There is no default value for this setting. @@ -83,7 +83,7 @@ Reconnect flag. When true, always try to reconnect after a failure Path to a CA certificate file, used to validate certificates [id="plugins-{type}s-{plugin}-db"] -===== `db` +===== `db` * This is a required setting. * Value type is <> @@ -93,7 +93,7 @@ The CouchDB db to connect to. Required parameter. [id="plugins-{type}s-{plugin}-heartbeat"] -===== `heartbeat` +===== `heartbeat` * Value type is <> * Default value is `1000` @@ -104,7 +104,7 @@ CouchDB to ensure the connection is maintained. Changing this setting is not recommended unless you know what you are doing. [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"localhost"` @@ -112,7 +112,7 @@ setting is not recommended unless you know what you are doing. IP or hostname of your CouchDB instance [id="plugins-{type}s-{plugin}-ignore_attachments"] -===== `ignore_attachments` +===== `ignore_attachments` * Value type is <> * Default value is `true` @@ -123,7 +123,7 @@ will not do anything. Ignore attachments associated with CouchDB documents. [id="plugins-{type}s-{plugin}-initial_sequence"] -===== `initial_sequence` +===== `initial_sequence` * Value type is <> * There is no default value for this setting. @@ -137,7 +137,7 @@ only be doing so for an initial read under special circumstances and that you will unset this value afterwards. [id="plugins-{type}s-{plugin}-keep_id"] -===== `keep_id` +===== `keep_id` * Value type is <> * Default value is `false` @@ -146,7 +146,7 @@ Preserve the CouchDB document id "_id" value in the output. [id="plugins-{type}s-{plugin}-keep_revision"] -===== `keep_revision` +===== `keep_revision` * Value type is <> * Default value is `false` @@ -155,7 +155,7 @@ Preserve the CouchDB document revision "_rev" value in the output. [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * Value type is <> * Default value is `nil` @@ -164,7 +164,7 @@ Password, if authentication is needed to connect to CouchDB [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `5984` @@ -172,7 +172,7 @@ CouchDB Port of your CouchDB instance. [id="plugins-{type}s-{plugin}-reconnect_delay"] -===== `reconnect_delay` +===== `reconnect_delay` * Value type is <> * Default value is `10` @@ -180,7 +180,7 @@ Port of your CouchDB instance. Reconnect delay: time between reconnect attempts, in seconds. [id="plugins-{type}s-{plugin}-secure"] -===== `secure` +===== `secure` * Value type is <> * Default value is `false` @@ -189,7 +189,7 @@ Connect to CouchDB's _changes feed securely (via https) Default: false (via http) [id="plugins-{type}s-{plugin}-sequence_path"] -===== `sequence_path` +===== `sequence_path` * Value type is <> * There is no default value for this setting. @@ -198,7 +198,7 @@ File path where the last sequence number in the _changes stream is stored. If unset it will write to `$HOME/.couchdb_seq` [id="plugins-{type}s-{plugin}-timeout"] -===== `timeout` +===== `timeout` * Value type is <> * There is no default value for this setting. @@ -208,7 +208,7 @@ terminating the connection. If a timeout is set it will disable the heartbeat configuration option. [id="plugins-{type}s-{plugin}-username"] -===== `username` +===== `username` * Value type is <> * Default value is `nil` diff --git a/docs/plugins/inputs/dead_letter_queue.asciidoc b/docs/plugins/inputs/dead_letter_queue.asciidoc index 4cad5392c..1a32860e7 100644 --- a/docs/plugins/inputs/dead_letter_queue.asciidoc +++ b/docs/plugins/inputs/dead_letter_queue.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v2.0.1 :release_date: 2024-09-04 :changelog_url: https://github.com/logstash-plugins/logstash-input-dead_letter_queue/blob/v2.0.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -70,7 +70,7 @@ This feature requires that `commit_offsets` is set to `true`. If not, you'll get This feature is available in Logstash 8.4.0 and later. If this setting is `true` and and you are using a Logstash version older than 8.4.0, then you'll get a configuration error. [id="plugins-{type}s-{plugin}-commit_offsets"] -===== `commit_offsets` +===== `commit_offsets` * Value type is <> * Default value is `true` @@ -78,21 +78,21 @@ This feature is available in Logstash 8.4.0 and later. If this setting is `true` Specifies whether this input should commit offsets as it processes the events. Typically you specify `false` when you want to iterate multiple times over the events in the dead letter queue, but don't want to save state. This is when you -are exploring the events in the dead letter queue. +are exploring the events in the dead letter queue. [id="plugins-{type}s-{plugin}-path"] -===== `path` +===== `path` * This is a required setting. * Value type is <> * There is no default value for this setting. Path to the dead letter queue directory that was created by a Logstash instance. -This is the path from which "dead" events are read and is typically configured +This is the path from which "dead" events are read and is typically configured in the original Logstash instance with the setting `path.dead_letter_queue`. [id="plugins-{type}s-{plugin}-pipeline_id"] -===== `pipeline_id` +===== `pipeline_id` * Value type is <> * Default value is `"main"` @@ -100,23 +100,23 @@ in the original Logstash instance with the setting `path.dead_letter_queue`. ID of the pipeline whose events you want to read from. [id="plugins-{type}s-{plugin}-sincedb_path"] -===== `sincedb_path` +===== `sincedb_path` * Value type is <> * There is no default value for this setting. -Path of the sincedb database file (keeps track of the current position of dead letter queue) that +Path of the sincedb database file (keeps track of the current position of dead letter queue) that will be written to disk. The default will write sincedb files to `/plugins/inputs/dead_letter_queue`. NOTE: This value must be a file path and not a directory path. [id="plugins-{type}s-{plugin}-start_timestamp"] -===== `start_timestamp` +===== `start_timestamp` * Value type is <> * There is no default value for this setting. -Timestamp in ISO8601 format from when you want to start processing the events from. +Timestamp in ISO8601 format from when you want to start processing the events from. For example, `2017-04-04T23:40:37`. diff --git a/docs/plugins/inputs/elastic_agent.asciidoc b/docs/plugins/inputs/elastic_agent.asciidoc index 31ce5143e..e781f979f 100644 --- a/docs/plugins/inputs/elastic_agent.asciidoc +++ b/docs/plugins/inputs/elastic_agent.asciidoc @@ -11,7 +11,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.0.2 :release_date: 2025-02-12 :changelog_url: https://github.com/logstash-plugins/logstash-input-beats/blob/v7.0.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -21,7 +21,7 @@ END - GENERATED VARIABLES, DO NOT EDIT! === {plugin-uc} input plugin NOTE: The `input-elastic_agent` plugin is the next generation of the -`input-beats` plugin. +`input-beats` plugin. They currently share code and a https://github.com/logstash-plugins/logstash-input-beats[common codebase]. include::{include_path}/plugin_header.asciidoc[] @@ -94,9 +94,9 @@ endif::[] This plugin uses "off-heap" direct memory in addition to heap memory. By default, a JVM's off-heap direct memory limit is the same as the heap size. -For example, setting `-Xmx10G` without setting the direct memory limit will allocate `10GB` for heap and an additional `10GB` for direct memory, for a total of `20GB` allocated. -You can set the amount of direct memory with `-XX:MaxDirectMemorySize` in {logstash-ref}/jvm-settings.html[Logstash JVM Settings]. -Consider setting direct memory to half of the heap size. +For example, setting `-Xmx10G` without setting the direct memory limit will allocate `10GB` for heap and an additional `10GB` for direct memory, for a total of `20GB` allocated. +You can set the amount of direct memory with `-XX:MaxDirectMemorySize` in {logstash-ref}/jvm-settings.html[Logstash JVM Settings]. +Consider setting direct memory to half of the heap size. Setting direct memory too low decreases the performance of ingestion. NOTE: Be sure that heap and direct memory combined does not exceed the total memory available on the server to avoid an OutOfDirectMemoryError @@ -146,7 +146,7 @@ endif::[] [id="plugins-{type}s-{plugin}-ecs_metadata"] ==== Event enrichment and the Elastic Common Schema (ECS) -When decoding {plugin-uc} events, this plugin enriches each event with metadata about the event's source, making this information available during further processing. +When decoding {plugin-uc} events, this plugin enriches each event with metadata about the event's source, making this information available during further processing. You can use the <> option to activate or deactivate individual enrichment categories. The location of these enrichment fields depends on whether <> is enabled: @@ -352,7 +352,7 @@ See <> configuration if you need to s The number of threads to be used to process incoming {plugin-uc} requests. By default, the {plugin-uc} input creates a number of threads equal to the number of CPU cores. -These threads handle incoming connections, reading from established sockets, and executing most of the tasks related to network connection management. +These threads handle incoming connections, reading from established sockets, and executing most of the tasks related to network connection management. Parsing the Lumberjack protocol is offloaded to a dedicated thread pool. Generally you don't need to touch this setting. @@ -459,7 +459,7 @@ Time in milliseconds for an incomplete ssl handshake to timeout * There is no default value for this setting. SSL key to use. -This key must be in the PKCS8 format and PEM encoded. +This key must be in the PKCS8 format and PEM encoded. You can use the https://www.openssl.org/docs/man1.1.1/man1/openssl-pkcs8.html[openssl pkcs8] command to complete the conversion. For example, the command to convert a PEM encoded PKCS1 private key to a PEM encoded, non-encrypted PKCS8 key is: diff --git a/docs/plugins/inputs/elastic_serverless_forwarder.asciidoc b/docs/plugins/inputs/elastic_serverless_forwarder.asciidoc index 5361d4465..8142444bf 100644 --- a/docs/plugins/inputs/elastic_serverless_forwarder.asciidoc +++ b/docs/plugins/inputs/elastic_serverless_forwarder.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v2.0.0 :release_date: 2024-12-23 :changelog_url: https://github.com/logstash-plugins/logstash-input-elastic_serverless_forwarder/blob/v2.0.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/inputs/elasticsearch.asciidoc b/docs/plugins/inputs/elasticsearch.asciidoc index 88495366c..3ebf815b8 100644 --- a/docs/plugins/inputs/elasticsearch.asciidoc +++ b/docs/plugins/inputs/elasticsearch.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v5.0.0 :release_date: 2024-12-18 :changelog_url: https://github.com/logstash-plugins/logstash-input-elasticsearch/blob/v5.0.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/inputs/exec.asciidoc b/docs/plugins/inputs/exec.asciidoc index 586c76632..c07fa082f 100644 --- a/docs/plugins/inputs/exec.asciidoc +++ b/docs/plugins/inputs/exec.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.6.0 :release_date: 2022-06-15 :changelog_url: https://github.com/logstash-plugins/logstash-input-exec/blob/v3.6.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -89,7 +89,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-command"] -===== `command` +===== `command` * This is a required setting. * Value type is <> @@ -150,7 +150,7 @@ See <> for detailed information. ----- [id="plugins-{type}s-{plugin}-interval"] -===== `interval` +===== `interval` * Value type is <> * There is no default value for this setting. @@ -160,7 +160,7 @@ Interval to run the command. Value is in seconds. Either `interval` or `schedule` option must be defined. [id="plugins-{type}s-{plugin}-schedule"] -===== `schedule` +===== `schedule` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/inputs/file.asciidoc b/docs/plugins/inputs/file.asciidoc index 04e567d15..b3d3bd10e 100644 --- a/docs/plugins/inputs/file.asciidoc +++ b/docs/plugins/inputs/file.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.4.6 :release_date: 2023-12-13 :changelog_url: https://github.com/logstash-plugins/logstash-input-file/blob/v4.4.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -296,7 +296,7 @@ In Tail mode, you might want to exclude gzipped files: This option can be used in `read` mode to enforce closing all watchers when file gets read. Can be used in situation when content of the file is static and won't change during execution. -When set to `true` it also disables active discovery of the files - only files that were in +When set to `true` it also disables active discovery of the files - only files that were in the directories when process was started will be read. It supports `sincedb` entries. When file was processed once, then modified - next run will only read newly added entries. diff --git a/docs/plugins/inputs/ganglia.asciidoc b/docs/plugins/inputs/ganglia.asciidoc index 38a508b7a..457954322 100644 --- a/docs/plugins/inputs/ganglia.asciidoc +++ b/docs/plugins/inputs/ganglia.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.4 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-input-ganglia/blob/v3.1.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -43,7 +43,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"0.0.0.0"` @@ -51,7 +51,7 @@ input plugins. The address to listen on [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `8649` diff --git a/docs/plugins/inputs/gelf.asciidoc b/docs/plugins/inputs/gelf.asciidoc index 40674500d..67fb4a46a 100644 --- a/docs/plugins/inputs/gelf.asciidoc +++ b/docs/plugins/inputs/gelf.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.3.2 :release_date: 2022-08-22 :changelog_url: https://github.com/logstash-plugins/logstash-input-gelf/blob/v3.3.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -53,7 +53,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"0.0.0.0"` @@ -77,7 +77,7 @@ Whether to listen for gelf messages sent over udp Whether to listen for gelf messages sent over tcp [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `12201` @@ -102,7 +102,7 @@ Tcp port to listen on. Use port instead of this setting unless you need a differ Udp port to listen on. Use port instead of this setting unless you need a different port for udp than tcp [id="plugins-{type}s-{plugin}-remap"] -===== `remap` +===== `remap` * Value type is <> * Default value is `true` @@ -116,7 +116,7 @@ Remapping converts the following GELF fields to Logstash equivalents: * if there is no `full\_message`, `short\_message` becomes `event.get("message")`. [id="plugins-{type}s-{plugin}-strip_leading_underscore"] -===== `strip_leading_underscore` +===== `strip_leading_underscore` * Value type is <> * Default value is `true` diff --git a/docs/plugins/inputs/generator.asciidoc b/docs/plugins/inputs/generator.asciidoc index 98eb51d6e..5771d06eb 100644 --- a/docs/plugins/inputs/generator.asciidoc +++ b/docs/plugins/inputs/generator.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.0 :release_date: 2021-11-04 :changelog_url: https://github.com/logstash-plugins/logstash-input-generator/blob/v3.1.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -62,7 +62,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-count"] -===== `count` +===== `count` * Value type is <> * Default value is `0` @@ -108,7 +108,7 @@ See <> for detailed information. ----- [id="plugins-{type}s-{plugin}-lines"] -===== `lines` +===== `lines` * Value type is <> * There is no default value for this setting. @@ -133,7 +133,7 @@ Example: The above will emit `line 1` then `line 2` then `line 3`, then `line 1`, etc... [id="plugins-{type}s-{plugin}-message"] -===== `message` +===== `message` * Value type is <> * Default value is `"Hello world!"` @@ -146,7 +146,7 @@ stdin and use that as the message string for every event. Otherwise, this value will be used verbatim as the event message. [id="plugins-{type}s-{plugin}-threads"] -===== `threads` +===== `threads` * Value type is <> * Default value is `1` diff --git a/docs/plugins/inputs/github.asciidoc b/docs/plugins/inputs/github.asciidoc index 2fb387cf0..cc0b288ff 100644 --- a/docs/plugins/inputs/github.asciidoc +++ b/docs/plugins/inputs/github.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.11 :release_date: 2023-05-30 :changelog_url: https://github.com/logstash-plugins/logstash-input-github/blob/v3.0.11/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -44,7 +44,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-drop_invalid"] -===== `drop_invalid` +===== `drop_invalid` * Value type is <> * Default value is `false` @@ -53,7 +53,7 @@ If Secret is defined, we drop the events that don't match. Otherwise, we'll just add an invalid tag [id="plugins-{type}s-{plugin}-ip"] -===== `ip` +===== `ip` * Value type is <> * Default value is `"0.0.0.0"` @@ -61,7 +61,7 @@ Otherwise, we'll just add an invalid tag The ip to listen on [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * This is a required setting. * Value type is <> @@ -70,7 +70,7 @@ The ip to listen on The port to listen on [id="plugins-{type}s-{plugin}-secret_token"] -===== `secret_token` +===== `secret_token` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/inputs/google_cloud_storage.asciidoc b/docs/plugins/inputs/google_cloud_storage.asciidoc index a4156889a..907cb7456 100644 --- a/docs/plugins/inputs/google_cloud_storage.asciidoc +++ b/docs/plugins/inputs/google_cloud_storage.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v0.15.0 :release_date: 2023-08-22 :changelog_url: https://github.com/logstash-plugins/logstash-input-google_cloud_storage/blob/v0.15.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -222,7 +222,7 @@ The bucket containing your log files. The path to the key to authenticate your user to the bucket. This service user _should_ have the `storage.objects.update` permission so it can create metadata on the object preventing it from being scanned multiple times. -If no key is provided the plugin will try to use the https://cloud.google.com/java/docs/reference/google-auth-library/latest/com.google.auth.oauth2.GoogleCredentials#com_google_auth_oauth2_GoogleCredentials_getApplicationDefault__[default application credentials], and if they don't exist, it falls back to unauthenticated mode. +If no key is provided the plugin will try to use the https://cloud.google.com/java/docs/reference/google-auth-library/latest/com.google.auth.oauth2.GoogleCredentials#com_google_auth_oauth2_GoogleCredentials_getApplicationDefault__[default application credentials], and if they don't exist, it falls back to unauthenticated mode. [id="plugins-{type}s-{plugin}-interval"] ===== `interval` diff --git a/docs/plugins/inputs/google_pubsub.asciidoc b/docs/plugins/inputs/google_pubsub.asciidoc index 0dc02689c..a38ad060b 100644 --- a/docs/plugins/inputs/google_pubsub.asciidoc +++ b/docs/plugins/inputs/google_pubsub.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.4.0 :release_date: 2024-10-15 :changelog_url: https://github.com/logstash-plugins/logstash-input-google_pubsub/blob/v1.4.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -39,22 +39,22 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Google deps -This is a https://github.com/elastic/logstash[Logstash] input plugin for -https://cloud.google.com/pubsub/[Google Pub/Sub]. The plugin can subscribe +This is a https://github.com/elastic/logstash[Logstash] input plugin for +https://cloud.google.com/pubsub/[Google Pub/Sub]. The plugin can subscribe to a topic and ingest messages. -The main motivation behind the development of this plugin was to ingest -https://cloud.google.com/logging/[Stackdriver Logging] messages via the -https://cloud.google.com/logging/docs/export/using_exported_logs[Exported Logs] +The main motivation behind the development of this plugin was to ingest +https://cloud.google.com/logging/[Stackdriver Logging] messages via the +https://cloud.google.com/logging/docs/export/using_exported_logs[Exported Logs] feature of Stackdriver Logging. ==== Prerequisites -You must first create a Google Cloud Platform project and enable the -Google Pub/Sub API. If you intend to use the plugin ingest Stackdriver Logging -messages, you must also enable the Stackdriver Logging API and configure log -exporting to Pub/Sub. There is plentiful information on -https://cloud.google.com/ to get started: +You must first create a Google Cloud Platform project and enable the +Google Pub/Sub API. If you intend to use the plugin ingest Stackdriver Logging +messages, you must also enable the Stackdriver Logging API and configure log +exporting to Pub/Sub. There is plentiful information on +https://cloud.google.com/ to get started: - Google Cloud Platform Projects and https://cloud.google.com/docs/overview/[Overview] - Google Cloud Pub/Sub https://cloud.google.com/pubsub/[documentation] @@ -62,55 +62,55 @@ https://cloud.google.com/ to get started: ==== Cloud Pub/Sub -Currently, this module requires you to create a `topic` manually and specify -it in the logstash config file. You must also specify a `subscription`, but -the plugin will attempt to create the pull-based `subscription` on its own. +Currently, this module requires you to create a `topic` manually and specify +it in the logstash config file. You must also specify a `subscription`, but +the plugin will attempt to create the pull-based `subscription` on its own. -All messages received from Pub/Sub will be converted to a logstash `event` -and added to the processing pipeline queue. All Pub/Sub messages will be -`acknowledged` and removed from the Pub/Sub `topic` (please see more about -https://cloud.google.com/pubsub/overview#concepts)[Pub/Sub concepts]. +All messages received from Pub/Sub will be converted to a logstash `event` +and added to the processing pipeline queue. All Pub/Sub messages will be +`acknowledged` and removed from the Pub/Sub `topic` (please see more about +https://cloud.google.com/pubsub/overview#concepts)[Pub/Sub concepts]. -It is generally assumed that incoming messages will be in JSON and added to -the logstash `event` as-is. However, if a plain text message is received, the -plugin will return the raw text in as `raw_message` in the logstash `event`. +It is generally assumed that incoming messages will be in JSON and added to +the logstash `event` as-is. However, if a plain text message is received, the +plugin will return the raw text in as `raw_message` in the logstash `event`. ==== Authentication -You have two options for authentication depending on where you run Logstash. +You have two options for authentication depending on where you run Logstash. -1. If you are running Logstash outside of Google Cloud Platform, then you will +1. If you are running Logstash outside of Google Cloud Platform, then you will need to provide the path to the JSON private key file in your config in `json_key_file` setting. -If you don't want to upload the file and use its content as a string, then add the content +If you don't want to upload the file and use its content as a string, then add the content of JSON private key in `json_key_file_content` setting. You must assign sufficient -roles to the Service Account to create a subscription and to pull messages -from the subscription. Learn more about GCP Service Accounts and IAM roles +roles to the Service Account to create a subscription and to pull messages +from the subscription. Learn more about GCP Service Accounts and IAM roles here: - Google Cloud Platform IAM https://cloud.google.com/iam/[overview] - Creating Service Accounts https://cloud.google.com/iam/docs/creating-managing-service-accounts[overview] - Granting Roles https://cloud.google.com/iam/docs/granting-roles-to-service-accounts[overview] -2. If you are running Logstash on a Google Compute Engine instance, you may opt -to use Application Default Credentials. In this case, you will not need to +2. If you are running Logstash on a Google Compute Engine instance, you may opt +to use Application Default Credentials. In this case, you will not need to specify a JSON private key file in your config. ==== Stackdriver Logging (optional) -If you intend to use the logstash plugin for Stackdriver Logging message -ingestion, you must first manually set up the Export option to Cloud Pub/Sub and -the manually create the `topic`. Please see the more detailed instructions at, -https://cloud.google.com/logging/docs/export/using_exported_logs [Exported Logs] -and ensure that the https://cloud.google.com/logging/docs/export/configure_export#manual-access-pubsub[necessary permissions] +If you intend to use the logstash plugin for Stackdriver Logging message +ingestion, you must first manually set up the Export option to Cloud Pub/Sub and +the manually create the `topic`. Please see the more detailed instructions at, +https://cloud.google.com/logging/docs/export/using_exported_logs [Exported Logs] +and ensure that the https://cloud.google.com/logging/docs/export/configure_export#manual-access-pubsub[necessary permissions] have also been manually configured. -Logging messages from Stackdriver Logging exported to Pub/Sub are received as -JSON and converted to a logstash `event` as-is in +Logging messages from Stackdriver Logging exported to Pub/Sub are received as +JSON and converted to a logstash `event` as-is in https://cloud.google.com/logging/docs/export/using_exported_logs#log_entries_in_google_pubsub_topics[this format]. ==== Sample Configuration -Below is a copy of the included `example.conf-tmpl` file that shows a basic +Below is a copy of the included `example.conf-tmpl` file that shows a basic configuration for this plugin. [source,ruby] @@ -147,7 +147,7 @@ output { stdout { codec => rubydebug } } ==== Metadata and Attributes -The original Pub/Sub message is preserved in the special Logstash +The original Pub/Sub message is preserved in the special Logstash `[@metadata][pubsub_message]` field so you can fetch: * Message attributes @@ -165,7 +165,7 @@ for a full description of the fields. Example to get the message ID: [source,ruby] ----------------------------------- +---------------------------------- input {google_pubsub {...}} filter { @@ -201,7 +201,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-json_key_file"] -===== `json_key_file` +===== `json_key_file` * Value type is <> * There is no default value for this setting. @@ -211,17 +211,17 @@ GCE's Application Default Credentials. Outside of GCE, you will need to specify a Service Account JSON key file. [id="plugins-{type}s-{plugin}-max_messages"] -===== `max_messages` +===== `max_messages` * This is a required setting. * Value type is <> * Default value is `5` The maximum number of messages returned per request. -The Pub/Sub system may return fewer than the number specified. +The Pub/Sub system may return fewer than the number specified. [id="plugins-{type}s-{plugin}-project_id"] -===== `project_id` +===== `project_id` * This is a required setting. * Value type is <> @@ -230,7 +230,7 @@ The Pub/Sub system may return fewer than the number specified. Google Cloud Project ID (name, not number). [id="plugins-{type}s-{plugin}-subscription"] -===== `subscription` +===== `subscription` * This is a required setting. * Value type is <> @@ -239,7 +239,7 @@ Google Cloud Project ID (name, not number). [id="plugins-{type}s-{plugin}-topic"] -===== `topic` +===== `topic` * This is a required setting. * Value type is <> @@ -251,7 +251,7 @@ pre-configured export to PubSub configured to use the defined topic. The subscription will be created automatically by the plugin. [id="plugins-{type}s-{plugin}-include_metadata"] -===== `include_metadata` +===== `include_metadata` * Value type is <> * Default value is `false`. diff --git a/docs/plugins/inputs/graphite.asciidoc b/docs/plugins/inputs/graphite.asciidoc index c24523572..0e34d4af5 100644 --- a/docs/plugins/inputs/graphite.asciidoc +++ b/docs/plugins/inputs/graphite.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.6 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-input-graphite/blob/v3.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -70,7 +70,7 @@ input plugins. [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"0.0.0.0"` @@ -85,7 +85,7 @@ When mode is `server`, the address to listen on. When mode is `client`, the address to connect to. [id="plugins-{type}s-{plugin}-mode"] -===== `mode` +===== `mode` * Value can be any of: `server`, `client` * Default value is `"server"` @@ -94,7 +94,7 @@ Mode to operate in. `server` listens for client connections, `client` connects to a server. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * This is a required setting. * Value type is <> @@ -104,7 +104,7 @@ When mode is `server`, the port to listen on. When mode is `client`, the port to connect to. [id="plugins-{type}s-{plugin}-proxy_protocol"] -===== `proxy_protocol` +===== `proxy_protocol` * Value type is <> * Default value is `false` @@ -122,7 +122,7 @@ http://www.haproxy.org/download/1.5/doc/proxy-protocol.txt The SSL CA certificate, chainfile or CA path. The system CA path is automatically included. [id="plugins-{type}s-{plugin}-ssl_cert"] -===== `ssl_cert` +===== `ssl_cert` * Value type is <> * There is no default value for this setting. @@ -130,7 +130,7 @@ The SSL CA certificate, chainfile or CA path. The system CA path is automaticall SSL certificate path [id="plugins-{type}s-{plugin}-ssl_enable"] -===== `ssl_enable` +===== `ssl_enable` * Value type is <> * Default value is `false` @@ -138,7 +138,7 @@ SSL certificate path Enable SSL (must be set for other `ssl_` options to take effect). [id="plugins-{type}s-{plugin}-ssl_extra_chain_certs"] -===== `ssl_extra_chain_certs` +===== `ssl_extra_chain_certs` * Value type is <> * Default value is `[]` @@ -147,7 +147,7 @@ An Array of extra X509 certificates to be added to the certificate chain. Useful when the CA chain is not necessary in the system store. [id="plugins-{type}s-{plugin}-ssl_key"] -===== `ssl_key` +===== `ssl_key` * Value type is <> * There is no default value for this setting. @@ -155,7 +155,7 @@ Useful when the CA chain is not necessary in the system store. SSL key path [id="plugins-{type}s-{plugin}-ssl_key_passphrase"] -===== `ssl_key_passphrase` +===== `ssl_key_passphrase` * Value type is <> * Default value is `nil` @@ -163,7 +163,7 @@ SSL key path SSL key passphrase [id="plugins-{type}s-{plugin}-ssl_verify"] -===== `ssl_verify` +===== `ssl_verify` * Value type is <> * Default value is `true` diff --git a/docs/plugins/inputs/heartbeat.asciidoc b/docs/plugins/inputs/heartbeat.asciidoc index 5e71dfa97..c5992df19 100644 --- a/docs/plugins/inputs/heartbeat.asciidoc +++ b/docs/plugins/inputs/heartbeat.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.1 :release_date: 2021-08-04 :changelog_url: https://github.com/logstash-plugins/logstash-input-heartbeat/blob/v3.1.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -70,7 +70,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-count"] -===== `count` +===== `count` * Value type is <> * Default value is `-1` @@ -89,11 +89,11 @@ This is typically used only for testing purposes. ** When Logstash provides a `pipeline.ecs_compatibility` setting, its value is used as the default ** Otherwise, the default value is `disabled`. -Controls this plugin's compatibility with the {ecs-ref}[Elastic Common Schema (ECS)]. +Controls this plugin's compatibility with the {ecs-ref}[Elastic Common Schema (ECS)]. Refer to <> in this topic for detailed information. [id="plugins-{type}s-{plugin}-interval"] -===== `interval` +===== `interval` * Value type is <> * Default value is `60` @@ -103,7 +103,7 @@ Set how frequently messages should be sent. The default, `60`, means send a message every 60 seconds. [id="plugins-{type}s-{plugin}-message"] -===== `message` +===== `message` * Value type is <> * Default value is `"ok"` @@ -154,7 +154,7 @@ field. [id="plugins-{type}s-{plugin}-threads"] -===== `threads` +===== `threads` * Value type is <> * Default value is `1` diff --git a/docs/plugins/inputs/http.asciidoc b/docs/plugins/inputs/http.asciidoc index a073a7abd..b043153d2 100644 --- a/docs/plugins/inputs/http.asciidoc +++ b/docs/plugins/inputs/http.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.1.2 :release_date: 2025-02-12 :changelog_url: https://github.com/logstash-plugins/logstash-input-http/blob/v4.1.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -24,7 +24,7 @@ include::{include_path}/plugin_header.asciidoc[] Using this input you can receive single or multiline events over http(s). Applications can send an HTTP request to the endpoint started by this input and -Logstash will convert it into an event for subsequent processing. Users +Logstash will convert it into an event for subsequent processing. Users can pass plain text, JSON, or any formatted data and use a corresponding codec with this input. For Content-Type `application/json` the `json` codec is used, but for all other data formats, `plain` codec is used. @@ -57,16 +57,16 @@ Here’s how ECS compatibility mode affects output. ==== Blocking Behavior -The HTTP protocol doesn't deal well with long running requests. This plugin will either return -a 429 (busy) error when Logstash is backlogged, or it will time out the request. +The HTTP protocol doesn't deal well with long running requests. This plugin will either return +a 429 (busy) error when Logstash is backlogged, or it will time out the request. -If a 429 error is encountered clients should sleep, backing off exponentially with some random +If a 429 error is encountered clients should sleep, backing off exponentially with some random jitter, then retry their request. This plugin will block if the Logstash queue is blocked and there are available HTTP input threads. This will cause most HTTP clients to time out. Sent events will still be processed in this case. This -behavior is not optimal and will be changed in a future release. In the future, this plugin will always -return a 429 if the queue is busy, and will not time out in the event of a busy queue. +behavior is not optimal and will be changed in a future release. In the future, this plugin will always +return a 429 if the queue is busy, and will not time out in the event of a busy queue. ==== Security This plugin supports standard HTTP basic authentication headers to identify the requester. @@ -77,7 +77,7 @@ validating the client's certificate. [id="plugins-{type}s-{plugin}-codec-settings"] ==== Codec settings -This plugin has two configuration options for codecs: `codec` and `additional_codecs`. +This plugin has two configuration options for codecs: `codec` and `additional_codecs`. Values in `additional_codecs` are prioritized over those specified in the `codec` option. That is, the default `codec` is applied only if no codec @@ -128,7 +128,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-additional_codecs"] -===== `additional_codecs` +===== `additional_codecs` * Value type is <> * Default value is `{"application/json"=>"json"}` @@ -203,7 +203,7 @@ See <> for detailed information. ----- [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"0.0.0.0"` @@ -212,7 +212,7 @@ The host or ip to bind [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * Value type is <> * There is no default value for this setting. @@ -220,7 +220,7 @@ The host or ip to bind Password for basic authorization [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `8080` @@ -262,7 +262,7 @@ invalid credentials (401), internal errors (503) or backpressure (429). If 204 (No Content) is set, the response body will not be sent in the response. [id="plugins-{type}s-{plugin}-response_headers"] -===== `response_headers` +===== `response_headers` * Value type is <> * Default value is `{"Content-Type"=>"text/plain"}` @@ -270,7 +270,7 @@ If 204 (No Content) is set, the response body will not be sent in the response. specify a custom set of response headers [id="plugins-{type}s-{plugin}-remote_host_target_field"] -===== `remote_host_target_field` +===== `remote_host_target_field` * Value type is <> * Default value is `"host"` when ECS is disabled @@ -279,7 +279,7 @@ specify a custom set of response headers specify a target field for the client host of the http request [id="plugins-{type}s-{plugin}-request_headers_target_field"] -===== `request_headers_target_field` +===== `request_headers_target_field` * Value type is <> * Default value is `"headers"` when ECS is disabled @@ -289,7 +289,7 @@ specify target field for the client host of the http request [id="plugins-{type}s-{plugin}-ssl_certificate"] -===== `ssl_certificate` +===== `ssl_certificate` * Value type is <> * There is no default value for this setting. @@ -297,12 +297,12 @@ specify target field for the client host of the http request SSL certificate to use. [id="plugins-{type}s-{plugin}-ssl_certificate_authorities"] -===== `ssl_certificate_authorities` +===== `ssl_certificate_authorities` * Value type is <> * Default value is `[]` -Validate client certificates against these authorities. +Validate client certificates against these authorities. You can define multiple files or paths. All the certificates will be read and added to the trust store. You need to configure the <> to `optional` or `required` to enable the verification. @@ -341,7 +341,7 @@ Events are, by default, sent in plain text. You can enable encryption by setting the <> and <> options. [id="plugins-{type}s-{plugin}-ssl_handshake_timeout"] -===== `ssl_handshake_timeout` +===== `ssl_handshake_timeout` * Value type is <> * Default value is `10000` @@ -349,7 +349,7 @@ the <> and <> * There is no default value for this setting. @@ -359,7 +359,7 @@ NOTE: This key need to be in the PKCS8 format, you can convert it with https://w for more information. [id="plugins-{type}s-{plugin}-ssl_key_passphrase"] -===== `ssl_key_passphrase` +===== `ssl_key_passphrase` * Value type is <> * There is no default value for this setting. @@ -438,7 +438,7 @@ The format of the truststore file. It must be either `jks` or `pkcs12`. [id="plugins-{type}s-{plugin}-threads"] -===== `threads` +===== `threads` * Value type is <> * Default value is number of processors @@ -447,7 +447,7 @@ Number of threads to use for both accepting connections and handling requests [id="plugins-{type}s-{plugin}-user"] -===== `user` +===== `user` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/inputs/http_poller.asciidoc b/docs/plugins/inputs/http_poller.asciidoc index 7a0a2c2cc..0a31f35b3 100644 --- a/docs/plugins/inputs/http_poller.asciidoc +++ b/docs/plugins/inputs/http_poller.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v6.0.0 :release_date: 2024-12-18 :changelog_url: https://github.com/logstash-plugins/logstash-input-http_poller/blob/v6.0.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -168,7 +168,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-automatic_retries"] -===== `automatic_retries` +===== `automatic_retries` * Value type is <> * Default value is `1` @@ -178,7 +178,7 @@ to zero if keepalive is enabled. Some servers incorrectly end keepalives early r Note: if `retry_non_idempotent` is set only GET, HEAD, PUT, DELETE, OPTIONS, and TRACE requests will be retried. [id="plugins-{type}s-{plugin}-connect_timeout"] -===== `connect_timeout` +===== `connect_timeout` * Value type is <> * Default value is `10` @@ -186,7 +186,7 @@ Note: if `retry_non_idempotent` is set only GET, HEAD, PUT, DELETE, OPTIONS, and Timeout (in seconds) to wait for a connection to be established. Default is `10s` [id="plugins-{type}s-{plugin}-cookies"] -===== `cookies` +===== `cookies` * Value type is <> * Default value is `true` @@ -270,7 +270,7 @@ Example output: ---- [id="plugins-{type}s-{plugin}-follow_redirects"] -===== `follow_redirects` +===== `follow_redirects` * Value type is <> * Default value is `true` @@ -278,7 +278,7 @@ Example output: Should redirects be followed? Defaults to `true` [id="plugins-{type}s-{plugin}-keepalive"] -===== `keepalive` +===== `keepalive` * Value type is <> * Default value is `true` @@ -287,7 +287,7 @@ Turn this on to enable HTTP keepalive support. We highly recommend setting `auto one with this to fix interactions with broken keepalive implementations. [id="plugins-{type}s-{plugin}-metadata_target"] -===== `metadata_target` +===== `metadata_target` * Value type is <> * Default value is `"@metadata"` @@ -297,7 +297,7 @@ Set this value to the name of the field you'd like to store a nested hash of metadata. [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * Value type is <> * There is no default value for this setting. @@ -305,7 +305,7 @@ hash of metadata. Password to be used in conjunction with <> for HTTP authentication. [id="plugins-{type}s-{plugin}-pool_max"] -===== `pool_max` +===== `pool_max` * Value type is <> * Default value is `50` @@ -313,7 +313,7 @@ Password to be used in conjunction with <> for HT Max number of concurrent connections. Defaults to `50` [id="plugins-{type}s-{plugin}-pool_max_per_route"] -===== `pool_max_per_route` +===== `pool_max_per_route` * Value type is <> * Default value is `25` @@ -321,7 +321,7 @@ Max number of concurrent connections. Defaults to `50` Max number of concurrent connections to a single host. Defaults to `25` [id="plugins-{type}s-{plugin}-proxy"] -===== `proxy` +===== `proxy` * Value type is <> * There is no default value for this setting. @@ -333,7 +333,7 @@ If you'd like to use an HTTP proxy . This supports multiple configuration syntax 3. Proxy host in form: `{url => 'http://proxy.org:1234', user => 'username@host', password => 'password'}` [id="plugins-{type}s-{plugin}-request_timeout"] -===== `request_timeout` +===== `request_timeout` * Value type is <> * Default value is `60` @@ -341,7 +341,7 @@ If you'd like to use an HTTP proxy . This supports multiple configuration syntax Timeout (in seconds) for the entire request. [id="plugins-{type}s-{plugin}-retry_non_idempotent"] -===== `retry_non_idempotent` +===== `retry_non_idempotent` * Value type is <> * Default value is `false` @@ -349,7 +349,7 @@ Timeout (in seconds) for the entire request. If `automatic_retries` is enabled this will cause non-idempotent HTTP verbs (such as POST) to be retried. [id="plugins-{type}s-{plugin}-schedule"] -===== `schedule` +===== `schedule` * Value type is <> * There is no default value for this setting. @@ -364,7 +364,7 @@ Examples: See: rufus/scheduler for details about different schedule options and value string format [id="plugins-{type}s-{plugin}-socket_timeout"] -===== `socket_timeout` +===== `socket_timeout` * Value type is <> * Default value is `10` @@ -500,7 +500,7 @@ It is primarily intended as a temporary diagnostic mechanism when attempting to Using `none` in production environments is strongly discouraged. [id="plugins-{type}s-{plugin}-target"] -===== `target` +===== `target` * Value type is <> * There is no default value for this setting. @@ -511,7 +511,7 @@ TIP: When ECS is enabled, set `target` in the codec (if the codec has a `target` Example: `codec => json { target => "TARGET_FIELD_NAME" }` [id="plugins-{type}s-{plugin}-urls"] -===== `urls` +===== `urls` * This is a required setting. * Value type is <> @@ -526,13 +526,13 @@ The values in urls can be either: * a sub-hash containing many useful keys provided by the Manticore backend: ** url: the String url ** method: (optional) the HTTP method to use (defaults to GET) -** user: (optional) the HTTP Basic Auth user. The user must be under +** user: (optional) the HTTP Basic Auth user. The user must be under an auth sub-hash for Manticore, but this plugin also accepts it either way. -** password: (optional) the HTTP Basic Auth password. The password +** password: (optional) the HTTP Basic Auth password. The password must be under an auth sub-hash for Manticore, but this plugin accepts it either way. ** headers: a hash containing key-value pairs of headers. ** body: a string (supported only on POST and PUT requests) -** possibly other options mentioned in the +** possibly other options mentioned in the https://www.rubydoc.info/github/cheald/manticore/Manticore/Client#http-instance_method[Manticore docs]. Note that Manticore options that are not explicitly documented above are not thoroughly tested and therefore liable to break in unexpected ways if we @@ -540,7 +540,7 @@ The values in urls can be either: *Notes:* -* Passwords specified as a part of `urls` are prone to exposure in plugin log output. +* Passwords specified as a part of `urls` are prone to exposure in plugin log output. The plugin does not declare them as passwords, and therefore doesn't wrap them in leak-reducing wrappers as we do elsewhere. * We don't guarantee that boolean-type options like Manticore's `follow_redirects` are supported @@ -550,7 +550,7 @@ string is "truthy." as anything other than true [id="plugins-{type}s-{plugin}-user"] -===== `user` +===== `user` * Value type is <> * There is no default value for this setting. @@ -559,7 +559,7 @@ Username to use with HTTP authentication for ALL requests. Note that you can als If you set this you must also set the <> option. [id="plugins-{type}s-{plugin}-validate_after_inactivity"] -===== `validate_after_inactivity` +===== `validate_after_inactivity` * Value type is <> * Default value is `200` diff --git a/docs/plugins/inputs/imap.asciidoc b/docs/plugins/inputs/imap.asciidoc index 6faac6fc9..02f01dc9e 100644 --- a/docs/plugins/inputs/imap.asciidoc +++ b/docs/plugins/inputs/imap.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.2.1 :release_date: 2023-10-03 :changelog_url: https://github.com/logstash-plugins/logstash-input-imap/blob/v3.2.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/inputs/irc.asciidoc b/docs/plugins/inputs/irc.asciidoc index 3d820de8e..dcd7f07a7 100644 --- a/docs/plugins/inputs/irc.asciidoc +++ b/docs/plugins/inputs/irc.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.7 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-input-irc/blob/v3.0.7/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -52,7 +52,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-catch_all"] -===== `catch_all` +===== `catch_all` * Value type is <> * Default value is `false` @@ -60,7 +60,7 @@ input plugins. Catch all IRC channel/user events not just channel messages [id="plugins-{type}s-{plugin}-channels"] -===== `channels` +===== `channels` * This is a required setting. * Value type is <> @@ -76,7 +76,7 @@ For passworded channels, add a space and the channel password, such as [id="plugins-{type}s-{plugin}-get_stats"] -===== `get_stats` +===== `get_stats` * Value type is <> * Default value is `false` @@ -84,7 +84,7 @@ For passworded channels, add a space and the channel password, such as Gather and send user counts for channels - this requires catch_all and will force it [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * This is a required setting. * Value type is <> @@ -93,7 +93,7 @@ Gather and send user counts for channels - this requires catch_all and will forc Host of the IRC Server to connect to. [id="plugins-{type}s-{plugin}-nick"] -===== `nick` +===== `nick` * Value type is <> * Default value is `"logstash"` @@ -101,7 +101,7 @@ Host of the IRC Server to connect to. IRC Nickname [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * Value type is <> * There is no default value for this setting. @@ -109,7 +109,7 @@ IRC Nickname IRC Server password [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `6667` @@ -117,7 +117,7 @@ IRC Server password Port for the IRC Server [id="plugins-{type}s-{plugin}-real"] -===== `real` +===== `real` * Value type is <> * Default value is `"logstash"` @@ -125,7 +125,7 @@ Port for the IRC Server IRC Real name [id="plugins-{type}s-{plugin}-secure"] -===== `secure` +===== `secure` * Value type is <> * Default value is `false` @@ -133,7 +133,7 @@ IRC Real name Set this to true to enable SSL. [id="plugins-{type}s-{plugin}-stats_interval"] -===== `stats_interval` +===== `stats_interval` * Value type is <> * Default value is `5` @@ -141,7 +141,7 @@ Set this to true to enable SSL. How often in minutes to get the user count stats [id="plugins-{type}s-{plugin}-user"] -===== `user` +===== `user` * Value type is <> * Default value is `"logstash"` diff --git a/docs/plugins/inputs/jdbc.asciidoc b/docs/plugins/inputs/jdbc.asciidoc index 5dbd4ba31..7f6362675 100644 --- a/docs/plugins/inputs/jdbc.asciidoc +++ b/docs/plugins/inputs/jdbc.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v5.5.2 :release_date: 2024-12-23 :changelog_url: https://github.com/logstash-plugins/logstash-integration-jdbc/blob/v5.5.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/inputs/jms.asciidoc b/docs/plugins/inputs/jms.asciidoc index f2473ef8e..79ed58373 100644 --- a/docs/plugins/inputs/jms.asciidoc +++ b/docs/plugins/inputs/jms.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.2.2 :release_date: 2022-06-13 :changelog_url: https://github.com/logstash-plugins/logstash-input-jms/blob/v3.2.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/inputs/jmx.asciidoc b/docs/plugins/inputs/jmx.asciidoc index ca2b440ec..0bf7e6259 100644 --- a/docs/plugins/inputs/jmx.asciidoc +++ b/docs/plugins/inputs/jmx.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.7 :release_date: 2018-08-13 :changelog_url: https://github.com/logstash-plugins/logstash-input-jmx/blob/v3.0.7/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -23,7 +23,7 @@ include::{include_path}/plugin_header.asciidoc[] ==== Description This input plugin permits to retrieve metrics from remote Java applications using JMX. -Every `polling_frequency`, it scans a folder containing json configuration +Every `polling_frequency`, it scans a folder containing json configuration files describing JVMs to monitor with metrics to retrieve. Then a pool of threads will retrieve metrics and create events. @@ -83,7 +83,7 @@ Json JMX configuration example: } ] } -Here are examples of generated events. When returned metrics value type is +Here are examples of generated events. When returned metrics value type is number/boolean it is stored in `metric_value_number` event field otherwise it is stored in `metric_value_string` event field. [source,ruby] @@ -128,7 +128,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-nb_thread"] -===== `nb_thread` +===== `nb_thread` * Value type is <> * Default value is `4` @@ -136,7 +136,7 @@ input plugins. Indicate number of thread launched to retrieve metrics [id="plugins-{type}s-{plugin}-path"] -===== `path` +===== `path` * This is a required setting. * Value type is <> @@ -145,7 +145,7 @@ Indicate number of thread launched to retrieve metrics Path where json conf files are stored [id="plugins-{type}s-{plugin}-polling_frequency"] -===== `polling_frequency` +===== `polling_frequency` * Value type is <> * Default value is `60` diff --git a/docs/plugins/inputs/kafka.asciidoc b/docs/plugins/inputs/kafka.asciidoc index 719deb23b..b6746cd03 100644 --- a/docs/plugins/inputs/kafka.asciidoc +++ b/docs/plugins/inputs/kafka.asciidoc @@ -12,7 +12,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v11.6.0 :release_date: 2025-01-07 :changelog_url: https://github.com/logstash-plugins/logstash-integration-kafka/blob/v11.6.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -39,13 +39,13 @@ version upgrades), please file an issue with details about what you need. This input supports connecting to Kafka over: * SSL (requires plugin version 3.0.0 or later) -* Kerberos SASL (requires plugin version 5.1.0 or later) +* Kerberos SASL (requires plugin version 5.1.0 or later) By default security is disabled but can be turned on as needed. [NOTE] ======= -This plugin does not support using a proxy when communicating to the Kafka broker. +This plugin does not support using a proxy when communicating to the Kafka broker. This plugin does support using a proxy when communicating to the Schema Registry using the <> option. ======= @@ -54,7 +54,7 @@ The Logstash Kafka consumer handles group management and uses the default offset strategy using Kafka topics. Logstash instances by default form a single logical group to subscribe to Kafka topics -Each Logstash Kafka consumer can run multiple threads to increase read throughput. Alternatively, +Each Logstash Kafka consumer can run multiple threads to increase read throughput. Alternatively, you could run multiple Logstash instances with the same `group_id` to spread the load across physical machines. Messages in a topic will be distributed to all Logstash instances with the same `group_id`. @@ -75,7 +75,7 @@ The following metadata from Kafka broker are added under the `[@metadata]` field * `[@metadata][kafka][partition]`: Partition info for this message. * `[@metadata][kafka][offset]`: Original record offset for this message. * `[@metadata][kafka][key]`: Record key, if any. -* `[@metadata][kafka][timestamp]`: Timestamp in the Record. +* `[@metadata][kafka][timestamp]`: Timestamp in the Record. Depending on your broker configuration, this can be either when the record was created (default) or when it was received by the broker. See more about property log.message.timestamp.type at @@ -83,7 +83,7 @@ https://kafka.apache.org/{kafka_client_doc}/documentation.html#brokerconfigs Metadata is only added to the event if the `decorate_events` option is set to `basic` or `extended` (it defaults to `none`). -Please note that `@metadata` fields are not part of any of your events at output time. If you need these information to be +Please note that `@metadata` fields are not part of any of your events at output time. If you need these information to be inserted into your original event, you'll have to use the `mutate` filter to manually copy the required fields into your `event`. [id="plugins-{type}s-{plugin}-options"] @@ -175,7 +175,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-auto_commit_interval_ms"] -===== `auto_commit_interval_ms` +===== `auto_commit_interval_ms` * Value type is <> * Default value is `5000`. @@ -183,7 +183,7 @@ input plugins. The frequency in milliseconds that the consumer offsets are committed to Kafka. [id="plugins-{type}s-{plugin}-auto_offset_reset"] -===== `auto_offset_reset` +===== `auto_offset_reset` * Value type is <> * There is no default value for this setting. @@ -196,7 +196,7 @@ What to do when there is no initial offset in Kafka or if an offset is out of ra * anything else: throw exception to the consumer. [id="plugins-{type}s-{plugin}-bootstrap_servers"] -===== `bootstrap_servers` +===== `bootstrap_servers` * Value type is <> * Default value is `"localhost:9092"` @@ -208,7 +208,7 @@ so this list need not contain the full set of servers (you may want more than on case a server is down). [id="plugins-{type}s-{plugin}-check_crcs"] -===== `check_crcs` +===== `check_crcs` * Value type is <> * Default value is `true` @@ -218,14 +218,14 @@ This ensures no on-the-wire or on-disk corruption to the messages occurred. This check adds some overhead, so it may be disabled in cases seeking extreme performance. [id="plugins-{type}s-{plugin}-client_dns_lookup"] -===== `client_dns_lookup` +===== `client_dns_lookup` * Value type is <> * Default value is `"default"` -How DNS lookups should be done. If set to `use_all_dns_ips`, when the lookup returns multiple -IP addresses for a hostname, they will all be attempted to connect to before failing the -connection. If the value is `resolve_canonical_bootstrap_servers_only` each entry will be +How DNS lookups should be done. If set to `use_all_dns_ips`, when the lookup returns multiple +IP addresses for a hostname, they will all be attempted to connect to before failing the +connection. If the value is `resolve_canonical_bootstrap_servers_only` each entry will be resolved and expanded into a list of canonical names. [NOTE] @@ -235,7 +235,7 @@ If explicitly configured it fallbacks to `use_all_dns_ips`. ==== [id="plugins-{type}s-{plugin}-client_id"] -===== `client_id` +===== `client_id` * Value type is <> * Default value is `"logstash"` @@ -245,12 +245,12 @@ is to be able to track the source of requests beyond just ip/port by allowing a logical application name to be included. [id="plugins-{type}s-{plugin}-client_rack"] -===== `client_rack` +===== `client_rack` * Value type is <> * There is no default value for this setting. -A rack identifier for the Kafka consumer. +A rack identifier for the Kafka consumer. Used to select the physically closest rack for the consumer to read from. The setting corresponds with Kafka's `broker.rack` configuration. @@ -258,7 +258,7 @@ NOTE: Available only for Kafka 2.4.0 and higher. See https://cwiki.apache.org/confluence/display/KAFKA/KIP-392%3A+Allow+consumers+to+fetch+from+closest+replica[KIP-392]. [id="plugins-{type}s-{plugin}-connections_max_idle_ms"] -===== `connections_max_idle_ms` +===== `connections_max_idle_ms` * Value type is <> * Default value is `540000` milliseconds (9 minutes). @@ -266,7 +266,7 @@ https://cwiki.apache.org/confluence/display/KAFKA/KIP-392%3A+Allow+consumers+to+ Close idle connections after the number of milliseconds specified by this config. [id="plugins-{type}s-{plugin}-consumer_threads"] -===== `consumer_threads` +===== `consumer_threads` * Value type is <> * Default value is `1` @@ -275,7 +275,7 @@ Ideally you should have as many threads as the number of partitions for a perfec balance — more threads than partitions means that some threads will be idle [id="plugins-{type}s-{plugin}-decorate_events"] -===== `decorate_events` +===== `decorate_events` * Value type is <> * Accepted values are: @@ -307,7 +307,7 @@ A topic will be auto-created only if this configuration is set to `true` and a otherwise auto-topic creation is not permitted.  [id="plugins-{type}s-{plugin}-enable_auto_commit"] -===== `enable_auto_commit` +===== `enable_auto_commit` * Value type is <> * Default value is `true` @@ -320,7 +320,7 @@ the consumer. If value is `false` however, the offset is committed every time th consumer writes data fetched from the topic to the in-memory or persistent queue. [id="plugins-{type}s-{plugin}-exclude_internal_topics"] -===== `exclude_internal_topics` +===== `exclude_internal_topics` * Value type is <> * There is no default value for this setting. @@ -329,17 +329,17 @@ Whether records from internal topics (such as offsets) should be exposed to the If set to true the only way to receive records from an internal topic is subscribing to it. [id="plugins-{type}s-{plugin}-fetch_max_bytes"] -===== `fetch_max_bytes` +===== `fetch_max_bytes` * Value type is <> * Default value is `52428800` (50MB) -The maximum amount of data the server should return for a fetch request. This is not an -absolute maximum, if the first message in the first non-empty partition of the fetch is larger +The maximum amount of data the server should return for a fetch request. This is not an +absolute maximum, if the first message in the first non-empty partition of the fetch is larger than this value, the message will still be returned to ensure that the consumer can make progress. [id="plugins-{type}s-{plugin}-fetch_max_wait_ms"] -===== `fetch_max_wait_ms` +===== `fetch_max_wait_ms` * Value type is <> * Default value is `500` milliseconds. @@ -349,7 +349,7 @@ there isn't sufficient data to immediately satisfy `fetch_min_bytes`. This should be less than or equal to the timeout used in `poll_timeout_ms` [id="plugins-{type}s-{plugin}-fetch_min_bytes"] -===== `fetch_min_bytes` +===== `fetch_min_bytes` * Value type is <> * There is no default value for this setting. @@ -359,7 +359,7 @@ data is available the request will wait for that much data to accumulate before answering the request. [id="plugins-{type}s-{plugin}-group_id"] -===== `group_id` +===== `group_id` * Value type is <> * Default value is `"logstash"` @@ -395,19 +395,19 @@ NOTE: In cases when multiple threads are configured and `consumer_threads` is gr the `group_instance_id` to avoid collisions. [id="plugins-{type}s-{plugin}-heartbeat_interval_ms"] -===== `heartbeat_interval_ms` +===== `heartbeat_interval_ms` * Value type is <> * Default value is `3000` milliseconds (3 seconds). -The expected time between heartbeats to the consumer coordinator. Heartbeats are used to ensure +The expected time between heartbeats to the consumer coordinator. Heartbeats are used to ensure that the consumer's session stays active and to facilitate rebalancing when new consumers join or leave the group. The value must be set lower than `session.timeout.ms`, but typically should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances. [id="plugins-{type}s-{plugin}-isolation_level"] -===== `isolation_level` +===== `isolation_level` * Value type is <> * Default value is `"read_uncommitted"` @@ -418,12 +418,12 @@ return all messages, even transactional messages which have been aborted. Non-tr unconditionally in either mode. [id="plugins-{type}s-{plugin}-jaas_path"] -===== `jaas_path` +===== `jaas_path` * Value type is <> * There is no default value for this setting. -The Java Authentication and Authorization Service (JAAS) API supplies user authentication and authorization +The Java Authentication and Authorization Service (JAAS) API supplies user authentication and authorization services for Kafka. This setting provides the path to the JAAS file. Sample JAAS file for Kafka client: [source,java] ---------------------------------- @@ -435,13 +435,13 @@ KafkaClient { }; ---------------------------------- -Please note that specifying `jaas_path` and `kerberos_config` in the config file will add these -to the global JVM system properties. This means if you have multiple Kafka inputs, all of them would be sharing the same -`jaas_path` and `kerberos_config`. If this is not desirable, you would have to run separate instances of Logstash on +Please note that specifying `jaas_path` and `kerberos_config` in the config file will add these +to the global JVM system properties. This means if you have multiple Kafka inputs, all of them would be sharing the same +`jaas_path` and `kerberos_config`. If this is not desirable, you would have to run separate instances of Logstash on different JVM instances. [id="plugins-{type}s-{plugin}-kerberos_config"] -===== `kerberos_config` +===== `kerberos_config` * Value type is <> * There is no default value for this setting. @@ -449,7 +449,7 @@ different JVM instances. Optional path to kerberos config file. This is krb5.conf style as detailed in https://web.mit.edu/kerberos/krb5-1.12/doc/admin/conf_files/krb5_conf.html [id="plugins-{type}s-{plugin}-key_deserializer_class"] -===== `key_deserializer_class` +===== `key_deserializer_class` * Value type is <> * Default value is `"org.apache.kafka.common.serialization.StringDeserializer"` @@ -457,7 +457,7 @@ Optional path to kerberos config file. This is krb5.conf style as detailed in ht Java Class used to deserialize the record's key [id="plugins-{type}s-{plugin}-max_partition_fetch_bytes"] -===== `max_partition_fetch_bytes` +===== `max_partition_fetch_bytes` * Value type is <> * Default value is `1048576` (1MB). @@ -469,18 +469,18 @@ send messages larger than the consumer can fetch. If that happens, the consumer to fetch a large message on a certain partition. [id="plugins-{type}s-{plugin}-max_poll_interval_ms"] -===== `max_poll_interval_ms` +===== `max_poll_interval_ms` * Value type is <> * Default value is `300000` milliseconds (5 minutes). -The maximum delay between invocations of poll() when using consumer group management. This places -an upper bound on the amount of time that the consumer can be idle before fetching more records. -If poll() is not called before expiration of this timeout, then the consumer is considered failed and +The maximum delay between invocations of poll() when using consumer group management. This places +an upper bound on the amount of time that the consumer can be idle before fetching more records. +If poll() is not called before expiration of this timeout, then the consumer is considered failed and the group will rebalance in order to reassign the partitions to another member. [id="plugins-{type}s-{plugin}-max_poll_records"] -===== `max_poll_records` +===== `max_poll_records` * Value type is <> * Default value is `500`. @@ -488,7 +488,7 @@ the group will rebalance in order to reassign the partitions to another member. The maximum number of records returned in a single call to poll(). [id="plugins-{type}s-{plugin}-metadata_max_age_ms"] -===== `metadata_max_age_ms` +===== `metadata_max_age_ms` * Value type is <> * Default value is `300000` milliseconds (5 minutes). @@ -497,7 +497,7 @@ The period of time in milliseconds after which we force a refresh of metadata ev we haven't seen any partition leadership changes to proactively discover any new brokers or partitions [id="plugins-{type}s-{plugin}-partition_assignment_strategy"] -===== `partition_assignment_strategy` +===== `partition_assignment_strategy` * Value type is <> * There is no default value for this setting. @@ -514,7 +514,7 @@ These map to Kafka's corresponding https://kafka.apache.org/{kafka_client_doc}/j implementations. [id="plugins-{type}s-{plugin}-poll_timeout_ms"] -===== `poll_timeout_ms` +===== `poll_timeout_ms` * Value type is <> * Default value is `100` milliseconds. @@ -527,7 +527,7 @@ Underneath the covers, Kafka client sends periodic heartbeats to the server. The timeout specified the time to block waiting for input on each poll. [id="plugins-{type}s-{plugin}-receive_buffer_bytes"] -===== `receive_buffer_bytes` +===== `receive_buffer_bytes` * Value type is <> * Default value is `32768` (32KB). @@ -535,7 +535,7 @@ The timeout specified the time to block waiting for input on each poll. The size of the TCP receive buffer (SO_RCVBUF) to use when reading data. [id="plugins-{type}s-{plugin}-reconnect_backoff_ms"] -===== `reconnect_backoff_ms` +===== `reconnect_backoff_ms` * Value type is <> * Default value is `50` milliseconds. @@ -545,7 +545,7 @@ This avoids repeatedly connecting to a host in a tight loop. This backoff applies to all requests sent by the consumer to the broker. [id="plugins-{type}s-{plugin}-request_timeout_ms"] -===== `request_timeout_ms` +===== `request_timeout_ms` * Value type is <> * Default value is `40000` milliseconds (40 seconds). @@ -556,7 +556,7 @@ elapses the client will resend the request if necessary or fail the request if retries are exhausted. [id="plugins-{type}s-{plugin}-retry_backoff_ms"] -===== `retry_backoff_ms` +===== `retry_backoff_ms` * Value type is <> * Default value is `100` milliseconds. @@ -621,12 +621,12 @@ The SASL login callback handler class the specified SASL mechanism should use. (optional) The maximum duration, in milliseconds, for HTTPS call attempts. [id="plugins-{type}s-{plugin}-sasl_jaas_config"] -===== `sasl_jaas_config` +===== `sasl_jaas_config` * Value type is <> * There is no default value for this setting. -JAAS configuration setting local to this plugin instance, as opposed to settings using config file configured using `jaas_path`, which are shared across the JVM. This allows each plugin instance to have its own configuration. +JAAS configuration setting local to this plugin instance, as opposed to settings using config file configured using `jaas_path`, which are shared across the JVM. This allows each plugin instance to have its own configuration. If both `sasl_jaas_config` and `jaas_path` configurations are set, the setting here takes precedence. @@ -639,21 +639,21 @@ Example (setting for Azure Event Hub): } [id="plugins-{type}s-{plugin}-sasl_kerberos_service_name"] -===== `sasl_kerberos_service_name` +===== `sasl_kerberos_service_name` * Value type is <> * There is no default value for this setting. -The Kerberos principal name that Kafka broker runs as. +The Kerberos principal name that Kafka broker runs as. This can be defined either in Kafka's JAAS config or in Kafka's config. [id="plugins-{type}s-{plugin}-sasl_mechanism"] -===== `sasl_mechanism` +===== `sasl_mechanism` * Value type is <> * Default value is `"GSSAPI"` -http://kafka.apache.org/documentation.html#security_sasl[SASL mechanism] used for client connections. +http://kafka.apache.org/documentation.html#security_sasl[SASL mechanism] used for client connections. This may be any mechanism for which a security provider is available. GSSAPI is the default mechanism. @@ -756,7 +756,7 @@ In some circumstances, this process may fail when it tries to validate an authen This setting allows the plugin to skip validation during registration, which allows the plugin to continue and events to be processed. Note that an incorrectly configured schema registry will still stop the plugin from processing events. [id="plugins-{type}s-{plugin}-security_protocol"] -===== `security_protocol` +===== `security_protocol` * Value can be any of: `PLAINTEXT`, `SSL`, `SASL_PLAINTEXT`, `SASL_SSL` * Default value is `"PLAINTEXT"` @@ -764,7 +764,7 @@ This setting allows the plugin to skip validation during registration, which all Security protocol to use, which can be either of PLAINTEXT,SSL,SASL_PLAINTEXT,SASL_SSL [id="plugins-{type}s-{plugin}-send_buffer_bytes"] -===== `send_buffer_bytes` +===== `send_buffer_bytes` * Value type is <> * Default value is `131072` (128KB). @@ -772,7 +772,7 @@ Security protocol to use, which can be either of PLAINTEXT,SSL,SASL_PLAINTEXT,SA The size of the TCP send buffer (SO_SNDBUF) to use when sending data [id="plugins-{type}s-{plugin}-session_timeout_ms"] -===== `session_timeout_ms` +===== `session_timeout_ms` * Value type is <> * Default value is `10000` milliseconds (10 seconds). @@ -790,7 +790,7 @@ The endpoint identification algorithm, defaults to `"https"`. Set to empty strin [id="plugins-{type}s-{plugin}-ssl_key_password"] -===== `ssl_key_password` +===== `ssl_key_password` * Value type is <> * There is no default value for this setting. @@ -798,7 +798,7 @@ The endpoint identification algorithm, defaults to `"https"`. Set to empty strin The password of the private key in the key store file. [id="plugins-{type}s-{plugin}-ssl_keystore_location"] -===== `ssl_keystore_location` +===== `ssl_keystore_location` * Value type is <> * There is no default value for this setting. @@ -806,7 +806,7 @@ The password of the private key in the key store file. If client authentication is required, this setting stores the keystore path. [id="plugins-{type}s-{plugin}-ssl_keystore_password"] -===== `ssl_keystore_password` +===== `ssl_keystore_password` * Value type is <> * There is no default value for this setting. @@ -814,7 +814,7 @@ If client authentication is required, this setting stores the keystore path. If client authentication is required, this setting stores the keystore password [id="plugins-{type}s-{plugin}-ssl_keystore_type"] -===== `ssl_keystore_type` +===== `ssl_keystore_type` * Value type is <> * There is no default value for this setting. @@ -822,7 +822,7 @@ If client authentication is required, this setting stores the keystore password The format of the keystore file. It must be either `jks` or `PKCS12`. [id="plugins-{type}s-{plugin}-ssl_truststore_location"] -===== `ssl_truststore_location` +===== `ssl_truststore_location` * Value type is <> * There is no default value for this setting. @@ -830,7 +830,7 @@ The format of the keystore file. It must be either `jks` or `PKCS12`. The JKS truststore path to validate the Kafka broker's certificate. [id="plugins-{type}s-{plugin}-ssl_truststore_password"] -===== `ssl_truststore_password` +===== `ssl_truststore_password` * Value type is <> * There is no default value for this setting. @@ -838,7 +838,7 @@ The JKS truststore path to validate the Kafka broker's certificate. The truststore password. [id="plugins-{type}s-{plugin}-ssl_truststore_type"] -===== `ssl_truststore_type` +===== `ssl_truststore_type` * Value type is <> * There is no default value for this setting. @@ -846,7 +846,7 @@ The truststore password. The format of the truststore file. It must be either `jks` or `PKCS12`. [id="plugins-{type}s-{plugin}-topics"] -===== `topics` +===== `topics` * Value type is <> * Default value is `["logstash"]` @@ -854,7 +854,7 @@ The format of the truststore file. It must be either `jks` or `PKCS12`. A list of topics to subscribe to, defaults to ["logstash"]. [id="plugins-{type}s-{plugin}-topics_pattern"] -===== `topics_pattern` +===== `topics_pattern` * Value type is <> * There is no default value for this setting. @@ -867,7 +867,7 @@ NOTE: When the broker has some topics configured with ACL rules and they miss t happens but on the broker side it is logged that the subscription of some topics was denied to the configured user. [id="plugins-{type}s-{plugin}-value_deserializer_class"] -===== `value_deserializer_class` +===== `value_deserializer_class` * Value type is <> * Default value is `"org.apache.kafka.common.serialization.StringDeserializer"` diff --git a/docs/plugins/inputs/kinesis.asciidoc b/docs/plugins/inputs/kinesis.asciidoc index bdeaf51d0..3d3310293 100644 --- a/docs/plugins/inputs/kinesis.asciidoc +++ b/docs/plugins/inputs/kinesis.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v2.3.0 :release_date: 2023-08-28 :changelog_url: https://github.com/logstash-plugins/logstash-input-kinesis/blob/v2.3.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -23,7 +23,7 @@ include::{include_path}/plugin_header.asciidoc[] ==== Description You can use this plugin to receive events through -http://docs.aws.amazon.com/kinesis/latest/dev/introduction.html[AWS Kinesis]. +http://docs.aws.amazon.com/kinesis/latest/dev/introduction.html[AWS Kinesis]. This plugin uses the http://docs.aws.amazon.com/kinesis/latest/dev/kinesis-record-processor-implementation-app-java.html[Java Kinesis Client Library]. The documentation at https://github.com/awslabs/amazon-kinesis-client will be useful. @@ -207,21 +207,21 @@ Session name to use when assuming an IAM role. This is recorded in CloudTrail lo [id="plugins-{type}s-{plugin}-additional_settings"] ===== `additional_settings` - + * Value type is <> -* There is no default value for this setting +* There is no default value for this setting The KCL provides several configuration options which can be set in https://github.com/awslabs/amazon-kinesis-client/blob/master/amazon-kinesis-client-multilang/src/main/java/software/amazon/kinesis/coordinator/KinesisClientLibConfiguration.java[KinesisClientLibConfiguration]. These options are configured via various function calls that all begin with `with`. Some of these functions take complex types, which are not supported. However, you may invoke any one of the `withX()` functions that take a primitive -by providing key-value pairs in `snake_case`. +by providing key-value pairs in `snake_case`. -Example: +Example: -To set the dynamodb read and write capacity values, use these functions: -`withInitialLeaseTableReadCapacity` and `withInitialLeaseTableWriteCapacity`. +To set the dynamodb read and write capacity values, use these functions: +`withInitialLeaseTableReadCapacity` and `withInitialLeaseTableWriteCapacity`. [source,text] ---- diff --git a/docs/plugins/inputs/log4j.asciidoc b/docs/plugins/inputs/log4j.asciidoc index 84e1096b4..b8a2e5700 100644 --- a/docs/plugins/inputs/log4j.asciidoc +++ b/docs/plugins/inputs/log4j.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.3 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-input-log4j/blob/v3.1.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -34,7 +34,7 @@ To migrate away from log4j SocketAppender to using filebeat, you will need to ma ===== Configuring log4j for writing to local files -In your log4j.properties file, remove SocketAppender and replace it with RollingFileAppender. +In your log4j.properties file, remove SocketAppender and replace it with RollingFileAppender. For example, you can use the following log4j.properties configuration to write daily log files. @@ -66,7 +66,7 @@ configuration: logstash: hosts: ["your-logstash-host:5000"] -For more details on configuring filebeat, see +For more details on configuring filebeat, see https://www.elastic.co/guide/en/beats/filebeat/{branch}/configuring-howto-filebeat.html[Configure Filebeat]. ===== Configuring Logstash to receive from filebeat @@ -130,7 +130,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"0.0.0.0"` @@ -139,7 +139,7 @@ When mode is `server`, the address to listen on. When mode is `client`, the address to connect to. [id="plugins-{type}s-{plugin}-mode"] -===== `mode` +===== `mode` * Value can be any of: `server`, `client` * Default value is `"server"` @@ -148,7 +148,7 @@ Mode to operate in. `server` listens for client connections, `client` connects to a server. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `4560` @@ -157,7 +157,7 @@ When mode is `server`, the port to listen on. When mode is `client`, the port to connect to. [id="plugins-{type}s-{plugin}-proxy_protocol"] -===== `proxy_protocol` +===== `proxy_protocol` * Value type is <> * Default value is `false` diff --git a/docs/plugins/inputs/logstash.asciidoc b/docs/plugins/inputs/logstash.asciidoc index c26c5390d..f50c47cc9 100644 --- a/docs/plugins/inputs/logstash.asciidoc +++ b/docs/plugins/inputs/logstash.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.0.4 :release_date: 2024-12-10 :changelog_url: https://github.com/logstash-plugins/logstash-integration-logstash/blob/v1.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -92,7 +92,7 @@ NOTE: Client-certificate verification does _not_ verify identity claims on the p ===== Security: Credentials You can also configure this plugin to require a specific username/password be provided by configuring <> and <>. -Doing so requires connecting `logstash-output` plugin clients to provide matching `username` and `password`. +Doing so requires connecting `logstash-output` plugin clients to provide matching `username` and `password`. NOTE: when SSL is disabled, data and credentials will be received in clear-text. diff --git a/docs/plugins/inputs/lumberjack.asciidoc b/docs/plugins/inputs/lumberjack.asciidoc index 52a7de9b2..c3ca5963e 100644 --- a/docs/plugins/inputs/lumberjack.asciidoc +++ b/docs/plugins/inputs/lumberjack.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.6 :release_date: 2019-04-15 :changelog_url: https://github.com/logstash-plugins/logstash-input-lumberjack/blob/v3.1.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -34,8 +34,8 @@ https://github.com/elastic/logstash-forwarder[logstash-forwarder] tool that has been replaced by https://github.com/elastic/beats/tree/master/filebeat[Filebeat]. -NOTE: Consider using the {logstash-ref}/plugins-inputs-beats.html[Beats input plugin] instead. -The Beats input implements the Lumberjack protocol v1 and v2. +NOTE: Consider using the {logstash-ref}/plugins-inputs-beats.html[Beats input plugin] instead. +The Beats input implements the Lumberjack protocol v1 and v2. [id="plugins-{type}s-{plugin}-options"] ==== Lumberjack Input Configuration Options @@ -59,7 +59,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-congestion_threshold"] -===== `congestion_threshold` +===== `congestion_threshold` * Value type is <> * Default value is `5` @@ -68,7 +68,7 @@ The number of seconds before we raise a timeout, this option is useful to control how much time to wait if something is blocking the pipeline. [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"0.0.0.0"` @@ -76,7 +76,7 @@ this option is useful to control how much time to wait if something is blocking The IP address to listen on. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * This is a required setting. * Value type is <> @@ -85,7 +85,7 @@ The IP address to listen on. The port to listen on. [id="plugins-{type}s-{plugin}-ssl_certificate"] -===== `ssl_certificate` +===== `ssl_certificate` * This is a required setting. * Value type is <> @@ -94,7 +94,7 @@ The port to listen on. SSL certificate to use. [id="plugins-{type}s-{plugin}-ssl_key"] -===== `ssl_key` +===== `ssl_key` * This is a required setting. * Value type is <> @@ -103,7 +103,7 @@ SSL certificate to use. SSL key to use. [id="plugins-{type}s-{plugin}-ssl_key_passphrase"] -===== `ssl_key_passphrase` +===== `ssl_key_passphrase` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/inputs/meetup.asciidoc b/docs/plugins/inputs/meetup.asciidoc index 72ba1c43d..001a21562 100644 --- a/docs/plugins/inputs/meetup.asciidoc +++ b/docs/plugins/inputs/meetup.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.1 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-input-meetup/blob/v3.1.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -47,7 +47,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-eventstatus"] -===== `eventstatus` +===== `eventstatus` * Value type is <>. * Default value is `"upcoming,past"`. @@ -55,16 +55,16 @@ input plugins. Event Status can be one of `"upcoming"`, `"past"`, or `"upcoming,past"`. Default is `"upcoming,past"`. [id="plugins-{type}s-{plugin}-groupid"] -===== `groupid` +===== `groupid` * Value type is <>. * There is no default value for this setting. -The Group ID, multiple may be specified seperated by commas. +The Group ID, multiple may be specified seperated by commas. Must have one of `urlname`, `venueid`, `groupid`, `text`. [id="plugins-{type}s-{plugin}-interval"] -===== `interval` +===== `interval` * This is a required setting. * Value type is <>. @@ -73,7 +73,7 @@ Must have one of `urlname`, `venueid`, `groupid`, `text`. Interval to run the command. Value is in minutes. [id="plugins-{type}s-{plugin}-meetupkey"] -===== `meetupkey` +===== `meetupkey` * This is a required setting. * Value type is <>. @@ -82,7 +82,7 @@ Interval to run the command. Value is in minutes. Meetup Key, aka personal token. [id="plugins-{type}s-{plugin}-urlname"] -===== `urlname` +===== `urlname` * Value type is <>. * There is no default value for this setting. @@ -91,7 +91,7 @@ URLName - the URL name ie `ElasticSearch-Oklahoma-City`. Must have one of urlname, venue_id, group_id, `text`. [id="plugins-{type}s-{plugin}-venueid"] -===== `venueid` +===== `venueid` * Value type is <>. * There is no default value for this setting. @@ -100,7 +100,7 @@ The venue ID Must have one of `urlname`, `venue_id`, `group_id`, `text`. [id="plugins-{type}s-{plugin}-text"] -===== `text` +===== `text` * Value type is <>. * There is no default value for this setting. diff --git a/docs/plugins/inputs/pipe.asciidoc b/docs/plugins/inputs/pipe.asciidoc index ea95b4ece..98534214d 100644 --- a/docs/plugins/inputs/pipe.asciidoc +++ b/docs/plugins/inputs/pipe.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.0 :release_date: 2021-11-18 :changelog_url: https://github.com/logstash-plugins/logstash-input-pipe/blob/v3.1.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -31,7 +31,7 @@ want to join lines, you'll want to use the multiline codec. [id="plugins-{type}s-{plugin}-ecs"] ==== Compatibility with the Elastic Common Schema (ECS) -This plugin adds extra fields about the event's source. +This plugin adds extra fields about the event's source. Configure the <> option if you want to ensure that these fields are compatible with {ecs-ref}[ECS]. @@ -63,7 +63,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-command"] -===== `command` +===== `command` * This is a required setting. * Value type is <> diff --git a/docs/plugins/inputs/puppet_facter.asciidoc b/docs/plugins/inputs/puppet_facter.asciidoc index 39c1ee042..3c605b8c5 100644 --- a/docs/plugins/inputs/puppet_facter.asciidoc +++ b/docs/plugins/inputs/puppet_facter.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.4 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-input-puppet_facter/blob/v3.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -47,7 +47,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-environment"] -===== `environment` +===== `environment` * Value type is <> * Default value is `"production"` @@ -55,7 +55,7 @@ input plugins. [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"0.0.0.0"` @@ -63,7 +63,7 @@ input plugins. [id="plugins-{type}s-{plugin}-interval"] -===== `interval` +===== `interval` * Value type is <> * Default value is `600` @@ -71,7 +71,7 @@ input plugins. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `8140` @@ -79,7 +79,7 @@ input plugins. [id="plugins-{type}s-{plugin}-private_key"] -===== `private_key` +===== `private_key` * Value type is <> * There is no default value for this setting. @@ -87,7 +87,7 @@ input plugins. [id="plugins-{type}s-{plugin}-public_key"] -===== `public_key` +===== `public_key` * Value type is <> * There is no default value for this setting. @@ -95,7 +95,7 @@ input plugins. [id="plugins-{type}s-{plugin}-ssl"] -===== `ssl` +===== `ssl` * Value type is <> * Default value is `true` diff --git a/docs/plugins/inputs/rabbitmq.asciidoc b/docs/plugins/inputs/rabbitmq.asciidoc index 7bbe8c6ef..0b60957fd 100644 --- a/docs/plugins/inputs/rabbitmq.asciidoc +++ b/docs/plugins/inputs/rabbitmq.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.4.0 :release_date: 2024-09-16 :changelog_url: https://github.com/logstash-plugins/logstash-integration-rabbitmq/blob/v7.4.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/inputs/redis.asciidoc b/docs/plugins/inputs/redis.asciidoc index 003b24ed6..fd388756e 100644 --- a/docs/plugins/inputs/redis.asciidoc +++ b/docs/plugins/inputs/redis.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.7.1 :release_date: 2024-08-01 :changelog_url: https://github.com/logstash-plugins/logstash-input-redis/blob/v3.7.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -143,7 +143,7 @@ Password to authenticate with. There is no authentication by default. The port to connect on. [id="plugins-{type}s-{plugin}-ssl"] -===== `ssl` +===== `ssl` * Value type is <> * Default value is `false` diff --git a/docs/plugins/inputs/relp.asciidoc b/docs/plugins/inputs/relp.asciidoc index 72401cfa5..d69b81a38 100644 --- a/docs/plugins/inputs/relp.asciidoc +++ b/docs/plugins/inputs/relp.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.4 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-input-relp/blob/v3.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -57,7 +57,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"0.0.0.0"` @@ -65,7 +65,7 @@ input plugins. The address to listen on. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * This is a required setting. * Value type is <> @@ -74,7 +74,7 @@ The address to listen on. The port to listen on. [id="plugins-{type}s-{plugin}-ssl_cacert"] -===== `ssl_cacert` +===== `ssl_cacert` * Value type is <> * There is no default value for this setting. @@ -82,7 +82,7 @@ The port to listen on. The SSL CA certificate, chainfile or CA path. The system CA path is automatically included. [id="plugins-{type}s-{plugin}-ssl_cert"] -===== `ssl_cert` +===== `ssl_cert` * Value type is <> * There is no default value for this setting. @@ -90,7 +90,7 @@ The SSL CA certificate, chainfile or CA path. The system CA path is automaticall SSL certificate path [id="plugins-{type}s-{plugin}-ssl_enable"] -===== `ssl_enable` +===== `ssl_enable` * Value type is <> * Default value is `false` @@ -98,7 +98,7 @@ SSL certificate path Enable SSL (must be set for other `ssl_` options to take effect). [id="plugins-{type}s-{plugin}-ssl_key"] -===== `ssl_key` +===== `ssl_key` * Value type is <> * There is no default value for this setting. @@ -106,7 +106,7 @@ Enable SSL (must be set for other `ssl_` options to take effect). SSL key path [id="plugins-{type}s-{plugin}-ssl_key_passphrase"] -===== `ssl_key_passphrase` +===== `ssl_key_passphrase` * Value type is <> * Default value is `nil` @@ -114,7 +114,7 @@ SSL key path SSL key passphrase [id="plugins-{type}s-{plugin}-ssl_verify"] -===== `ssl_verify` +===== `ssl_verify` * Value type is <> * Default value is `true` diff --git a/docs/plugins/inputs/rss.asciidoc b/docs/plugins/inputs/rss.asciidoc index 3a0f51f97..2b33e875b 100644 --- a/docs/plugins/inputs/rss.asciidoc +++ b/docs/plugins/inputs/rss.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.6 :release_date: 2023-11-03 :changelog_url: https://github.com/logstash-plugins/logstash-input-rss/blob/v3.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -49,7 +49,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-interval"] -===== `interval` +===== `interval` * This is a required setting. * Value type is <> @@ -58,7 +58,7 @@ input plugins. Interval to run the command. Value is in seconds. [id="plugins-{type}s-{plugin}-url"] -===== `url` +===== `url` * This is a required setting. * Value type is <> diff --git a/docs/plugins/inputs/s3-sns-sqs.asciidoc b/docs/plugins/inputs/s3-sns-sqs.asciidoc index e45c3bcdb..d61e54e99 100644 --- a/docs/plugins/inputs/s3-sns-sqs.asciidoc +++ b/docs/plugins/inputs/s3-sns-sqs.asciidoc @@ -6,7 +6,7 @@ REPLACES GENERATED VARIABLES /////////////////////////////////////////// :changelog_url: https://github.com/cherweg/logstash-input-s3-sns-sqs/blob/master/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include :gem: https://rubygems.org/gems/logstash-input-s3-sns-sqss /////////////////////////////////////////// END - REPLACES GENERATED VARIABLES @@ -36,7 +36,7 @@ This plugin uses sqs to read logs from AWS S3 buckets in high availability setups with multiple Logstash instances. ==== Documentation - + https://github.com/cherweg/logstash-input-s3-sns-sqs/blob/master/docs/index.asciidoc[ Documentation] for the logstash-{type}-{plugin} plugin is maintained by the creator. @@ -44,4 +44,3 @@ Documentation] for the logstash-{type}-{plugin} plugin is maintained by the crea This is a third-party plugin. For bugs or feature requests, open an issue in the https://github.com/cherweg/logstash-input-s3-sns-sqs[plugins-{type}s-{plugin} Github repo]. - diff --git a/docs/plugins/inputs/s3.asciidoc b/docs/plugins/inputs/s3.asciidoc index e9c53df02..ffaa66f0d 100644 --- a/docs/plugins/inputs/s3.asciidoc +++ b/docs/plugins/inputs/s3.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.2.1 :release_date: 2025-02-13 :changelog_url: https://github.com/logstash-plugins/logstash-integration-aws/blob/v7.2.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -25,7 +25,7 @@ include::{include_path}/plugin_header-integration.asciidoc[] Stream events from files from a S3 bucket. -IMPORTANT: The S3 input plugin only supports AWS S3. +IMPORTANT: The S3 input plugin only supports AWS S3. Other S3 compatible storage solutions are not supported. Each line from each file generates an event. @@ -90,7 +90,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-access_key_id"] -===== `access_key_id` +===== `access_key_id` * Value type is <> * There is no default value for this setting. @@ -127,7 +127,7 @@ the connection to s3. See full list in https://docs.aws.amazon.com/sdk-for-ruby/ } [id="plugins-{type}s-{plugin}-aws_credentials_file"] -===== `aws_credentials_file` +===== `aws_credentials_file` * Value type is <> * There is no default value for this setting. @@ -145,7 +145,7 @@ file should look like this: [id="plugins-{type}s-{plugin}-backup_add_prefix"] -===== `backup_add_prefix` +===== `backup_add_prefix` * Value type is <> * Default value is `nil` @@ -155,7 +155,7 @@ If backing up to another (or the same) bucket, this effectively lets you choose a new 'folder' to place the files in [id="plugins-{type}s-{plugin}-backup_to_bucket"] -===== `backup_to_bucket` +===== `backup_to_bucket` * Value type is <> * Default value is `nil` @@ -163,7 +163,7 @@ choose a new 'folder' to place the files in Name of a S3 bucket to backup processed files to. [id="plugins-{type}s-{plugin}-backup_to_dir"] -===== `backup_to_dir` +===== `backup_to_dir` * Value type is <> * Default value is `nil` @@ -171,7 +171,7 @@ Name of a S3 bucket to backup processed files to. Path of a local directory to backup processed files to. [id="plugins-{type}s-{plugin}-bucket"] -===== `bucket` +===== `bucket` * This is a required setting. * Value type is <> @@ -180,7 +180,7 @@ Path of a local directory to backup processed files to. The name of the S3 bucket. [id="plugins-{type}s-{plugin}-delete"] -===== `delete` +===== `delete` * Value type is <> * Default value is `false` @@ -210,14 +210,14 @@ This is useful when connecting to S3 compatible services, but beware that these guaranteed to work correctly with the AWS SDK. [id="plugins-{type}s-{plugin}-exclude_pattern"] -===== `exclude_pattern` +===== `exclude_pattern` * Value type is <> * Default value is `nil` Ruby style regexp of keys to exclude from the bucket. -Note that files matching the pattern are skipped _after_ they have been listed. +Note that files matching the pattern are skipped _after_ they have been listed. Consider using <> instead where possible. Example: @@ -248,7 +248,7 @@ Whether or not to include the S3 object's properties (last_modified, content_typ `[@metadata][s3]`. Regardless of this setting, `[@metadata][s3][key]` will always be present. [id="plugins-{type}s-{plugin}-interval"] -===== `interval` +===== `interval` * Value type is <> * Default value is `60` @@ -257,7 +257,7 @@ Interval to wait between to check the file list again after a run is finished. Value is in seconds. [id="plugins-{type}s-{plugin}-prefix"] -===== `prefix` +===== `prefix` * Value type is <> * Default value is `nil` @@ -265,7 +265,7 @@ Value is in seconds. If specified, the prefix of filenames in the bucket must match (not a regexp) [id="plugins-{type}s-{plugin}-proxy_uri"] -===== `proxy_uri` +===== `proxy_uri` * Value type is <> * There is no default value for this setting. @@ -273,7 +273,7 @@ If specified, the prefix of filenames in the bucket must match (not a regexp) URI to proxy server if required [id="plugins-{type}s-{plugin}-region"] -===== `region` +===== `region` * Value type is <> * Default value is `"us-east-1"` @@ -299,7 +299,7 @@ See the https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html[ Session name to use when assuming an IAM role. [id="plugins-{type}s-{plugin}-secret_access_key"] -===== `secret_access_key` +===== `secret_access_key` * Value type is <> * There is no default value for this setting. @@ -307,7 +307,7 @@ Session name to use when assuming an IAM role. The AWS Secret Access Key [id="plugins-{type}s-{plugin}-session_token"] -===== `session_token` +===== `session_token` * Value type is <> * There is no default value for this setting. @@ -315,7 +315,7 @@ The AWS Secret Access Key The AWS Session token for temporary credential [id="plugins-{type}s-{plugin}-sincedb_path"] -===== `sincedb_path` +===== `sincedb_path` * Value type is <> * Default value is `nil` @@ -327,7 +327,7 @@ sincedb files to in the directory '{path.data}/plugins/inputs/s3/' If specified, this setting must be a filename path and not just a directory. [id="plugins-{type}s-{plugin}-temporary_directory"] -===== `temporary_directory` +===== `temporary_directory` * Value type is <> * Default value is `"/tmp/logstash"` diff --git a/docs/plugins/inputs/salesforce.asciidoc b/docs/plugins/inputs/salesforce.asciidoc index 3959dd34e..305928781 100644 --- a/docs/plugins/inputs/salesforce.asciidoc +++ b/docs/plugins/inputs/salesforce.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.2.1 :release_date: 2023-05-30 :changelog_url: https://github.com/logstash-plugins/logstash-input-salesforce/blob/v3.2.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -96,7 +96,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-api_version"] -===== `api_version` +===== `api_version` * Value type is <> * There is no default value for this setting. @@ -105,7 +105,7 @@ By default, this uses the default Restforce API version. To override this, set this to something like "32.0" for example [id="plugins-{type}s-{plugin}-client_id"] -===== `client_id` +===== `client_id` * This is a required setting. * Value type is <> @@ -117,7 +117,7 @@ can be found here: https://help.salesforce.com/apex/HTViewHelpDoc?id=connected_app_create.htm [id="plugins-{type}s-{plugin}-client_secret"] -===== `client_secret` +===== `client_secret` * This is a required setting. * Value type is <> @@ -126,7 +126,7 @@ https://help.salesforce.com/apex/HTViewHelpDoc?id=connected_app_create.htm Consumer Secret from your oauth enabled connected app [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * This is a required setting. * Value type is <> @@ -135,7 +135,7 @@ Consumer Secret from your oauth enabled connected app The password used to login to sfdc [id="plugins-{type}s-{plugin}-security_token"] -===== `security_token` +===== `security_token` * This is a required setting. * Value type is <> @@ -146,7 +146,7 @@ generting a security token, see: https://help.salesforce.com/apex/HTViewHelpDoc?id=user_security_token.htm [id="plugins-{type}s-{plugin}-sfdc_fields"] -===== `sfdc_fields` +===== `sfdc_fields` * Value type is <> * Default value is `[]` @@ -155,7 +155,7 @@ These are the field names to return in the Salesforce query If this is empty, all fields are returned. [id="plugins-{type}s-{plugin}-sfdc_filters"] -===== `sfdc_filters` +===== `sfdc_filters` * Value type is <> * Default value is `""` @@ -178,7 +178,7 @@ Use either this or the `use_test_sandbox` configuration option but not both to configure the url to which the plugin connects to. [id="plugins-{type}s-{plugin}-sfdc_object_name"] -===== `sfdc_object_name` +===== `sfdc_object_name` * This is a required setting. * Value type is <> @@ -187,7 +187,7 @@ but not both to configure the url to which the plugin connects to. The name of the salesforce object you are creating or updating [id="plugins-{type}s-{plugin}-to_underscores"] -===== `to_underscores` +===== `to_underscores` * Value type is <> * Default value is `false` @@ -195,7 +195,7 @@ The name of the salesforce object you are creating or updating Setting this to true will convert SFDC's NamedFields__c to named_fields__c [id="plugins-{type}s-{plugin}-use_test_sandbox"] -===== `use_test_sandbox` +===== `use_test_sandbox` * Value type is <> * Default value is `false` @@ -220,7 +220,7 @@ include reading apex unit test results, flow coverage results (e.g. coverage of elements of sfdc flows) and security health check risks. [id="plugins-{type}s-{plugin}-username"] -===== `username` +===== `username` * This is a required setting. * Value type is <> diff --git a/docs/plugins/inputs/snmp.asciidoc b/docs/plugins/inputs/snmp.asciidoc index b8dd138c0..46fd5b0a7 100644 --- a/docs/plugins/inputs/snmp.asciidoc +++ b/docs/plugins/inputs/snmp.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.0.6 :release_date: 2025-01-23 :changelog_url: https://github.com/logstash-plugins/logstash-integration-snmp/blob/v4.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -22,15 +22,15 @@ END - GENERATED VARIABLES, DO NOT EDIT! include::{include_path}/plugin_header-integration.asciidoc[] .Migrating to `logstash-integration-snmp` from stand-alone `input-snmp` -**** -The `logstash-input-snmp` plugin is now a component of the `logstash-integration-snmp` plugin which is -bundled with {ls} 8.15.0 by default. -This integrated plugin package provides better alignment in snmp processing, better resource management, -easier package maintenance, and a smaller installation footprint. +**** +The `logstash-input-snmp` plugin is now a component of the `logstash-integration-snmp` plugin which is +bundled with {ls} 8.15.0 by default. +This integrated plugin package provides better alignment in snmp processing, better resource management, +easier package maintenance, and a smaller installation footprint. -Before you upgrade to {ls} 8.15.0, be aware of link:{logstash-ref}/plugins-integrations-snmp.html#plugins-integrations-snmp-migration[behavioral and mapping differences] -between current stand-alone plugins and the new versions included in `integration-snmp`. -**** +Before you upgrade to {ls} 8.15.0, be aware of link:{logstash-ref}/plugins-integrations-snmp.html#plugins-integrations-snmp-migration[behavioral and mapping differences] +between current stand-alone plugins and the new versions included in `integration-snmp`. +**** ==== Description @@ -58,10 +58,10 @@ Metadata fields follow a specific naming convention when < ["1.3.6.1.2.1.1.1.0"] hosts => [{host => "udp:127.0.0.1/161"}] - + mib_paths => ["path/to/converted/mibfile.dic"] oid_root_skip => 0 interval => 30 @@ -504,7 +504,7 @@ By default, a `host` field is added to the event with the `[@metadata][host_addr config :add_field, :validate => :hash, :default => { "host" => "%{[@metadata][host_address]}" } ----- -You can customize the format and content of the `host` field by specifying an alternate `add_field`. +You can customize the format and content of the `host` field by specifying an alternate `add_field`. Example [source,ruby] @@ -513,7 +513,7 @@ input { snmp { get => ["1.3.6.1.2.1.1.1.0"] hosts => [{host => "udp:127.0.0.1/161"}] - + add_field => {host => "%{[@metadata][host_protocol]}:%{[@metadata][host_address]}/%{[@metadata][host_port]},%{[@metadata][host_community]}"} } } diff --git a/docs/plugins/inputs/snmptrap.asciidoc b/docs/plugins/inputs/snmptrap.asciidoc index 4be577e39..e70325b38 100644 --- a/docs/plugins/inputs/snmptrap.asciidoc +++ b/docs/plugins/inputs/snmptrap.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.0.6 :release_date: 2025-01-23 :changelog_url: https://github.com/logstash-plugins/logstash-integration-snmp/blob/v4.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -22,16 +22,16 @@ END - GENERATED VARIABLES, DO NOT EDIT! include::{include_path}/plugin_header-integration.asciidoc[] .Migrating to `logstash-integration-snmp` from stand-alone `input-snmptrap` -**** -The `logstash-input-snmptrap` plugin is now a component of the `logstash-integration-snmp` plugin which is -bundled with {ls} 8.15.0 by default. -This integrated plugin package provides better alignment in snmp processing, better resource management, -easier package maintenance, and a smaller installation footprint. - -Before you upgrade to {ls} 8.15.0, be aware of link:{logstash-ref}/plugins-integrations-snmp.html#plugins-integrations-snmp-migration[behavioral and mapping differences] -between current stand-alone plugins and the new versions included in `integration-snmp`. +**** +The `logstash-input-snmptrap` plugin is now a component of the `logstash-integration-snmp` plugin which is +bundled with {ls} 8.15.0 by default. +This integrated plugin package provides better alignment in snmp processing, better resource management, +easier package maintenance, and a smaller installation footprint. + +Before you upgrade to {ls} 8.15.0, be aware of link:{logstash-ref}/plugins-integrations-snmp.html#plugins-integrations-snmp-migration[behavioral and mapping differences] +between current stand-alone plugins and the new versions included in `integration-snmp`. If you need to maintain current mappings for the `input-snmptrap` plugin, you have options to {logstash-ref}/plugins-integrations-snmp.html#plugins-integrations-snmp-input-snmptrap-compat[preserve existing behavior]. -**** +**** ==== Description @@ -79,7 +79,7 @@ which variable binding in the list caused the error ==== Importing MIBs This plugin already includes the IETF MIBs (management information bases), and you do not need to import them. -If you need additional MIBs, you need to import them. +If you need additional MIBs, you need to import them. Check out link:{logstash-ref}/plugins-integrations-snmp.html#plugins-integrations-snmp-import-mibs[Importing MIBs] for info. [id="plugins-{type}s-{plugin}-options"] @@ -128,7 +128,7 @@ Also see <> for a list of options suppo input plugins. [id="plugins-{type}s-{plugin}-community"] -===== `community` +===== `community` * Value type is <> * Default value is `["public"]` @@ -172,7 +172,7 @@ input { Controls this plugin's compatibility with the {ecs-ref}[Elastic Common Schema (ECS)]. [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"0.0.0.0"` @@ -243,7 +243,7 @@ which would result in a field name "1.0". Similarly when a MIB is used an OID su * Use this setting only if <> is set to `default`. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `1062` @@ -301,7 +301,7 @@ This plugin provides all IETF MIBs (management information bases), publicly avai When enabled, it automatically loads the bundled MIBs and provides mapping of the numeric OIDs to MIB field names in the resulting event. [id="plugins-{type}s-{plugin}-yamlmibdir"] -===== `yamlmibdir` +===== `yamlmibdir` deprecated[4.0.0, Replaced by <>] * Value type is <> diff --git a/docs/plugins/inputs/sqlite.asciidoc b/docs/plugins/inputs/sqlite.asciidoc index e7781dfd0..7c8d71d73 100644 --- a/docs/plugins/inputs/sqlite.asciidoc +++ b/docs/plugins/inputs/sqlite.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.4 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-input-sqlite/blob/v3.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -41,7 +41,7 @@ Example ip STRING, request STRING, response INTEGER); - sqlite> INSERT INTO weblogs (ip, request, response) + sqlite> INSERT INTO weblogs (ip, request, response) VALUES ("1.2.3.4", "/index.html", 200); Then with this logstash config: @@ -95,7 +95,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-batch"] -===== `batch` +===== `batch` * Value type is <> * Default value is `5` @@ -103,7 +103,7 @@ input plugins. How many rows to fetch at a time from each `SELECT` call. [id="plugins-{type}s-{plugin}-exclude_tables"] -===== `exclude_tables` +===== `exclude_tables` * Value type is <> * Default value is `[]` @@ -112,7 +112,7 @@ Any tables to exclude by name. By default all tables are followed. [id="plugins-{type}s-{plugin}-path"] -===== `path` +===== `path` * This is a required setting. * Value type is <> diff --git a/docs/plugins/inputs/sqs.asciidoc b/docs/plugins/inputs/sqs.asciidoc index 51cdbd3a3..4b0720abc 100644 --- a/docs/plugins/inputs/sqs.asciidoc +++ b/docs/plugins/inputs/sqs.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.2.1 :release_date: 2025-02-13 :changelog_url: https://github.com/logstash-plugins/logstash-integration-aws/blob/v7.2.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -113,7 +113,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-access_key_id"] -===== `access_key_id` +===== `access_key_id` * Value type is <> * There is no default value for this setting. @@ -149,7 +149,7 @@ the connection to SQS. See full list in https://docs.aws.amazon.com/sdk-for-ruby } [id="plugins-{type}s-{plugin}-aws_credentials_file"] -===== `aws_credentials_file` +===== `aws_credentials_file` * Value type is <> * There is no default value for this setting. @@ -176,7 +176,7 @@ This is useful when connecting to S3 compatible services, but beware that these guaranteed to work correctly with the AWS SDK. [id="plugins-{type}s-{plugin}-id_field"] -===== `id_field` +===== `id_field` * Value type is <> * There is no default value for this setting. @@ -184,7 +184,7 @@ guaranteed to work correctly with the AWS SDK. Name of the event field in which to store the SQS message ID [id="plugins-{type}s-{plugin}-md5_field"] -===== `md5_field` +===== `md5_field` * Value type is <> * There is no default value for this setting. @@ -192,7 +192,7 @@ Name of the event field in which to store the SQS message ID Name of the event field in which to store the SQS message MD5 checksum [id="plugins-{type}s-{plugin}-polling_frequency"] -===== `polling_frequency` +===== `polling_frequency` * Value type is <> * Default value is `20` @@ -200,7 +200,7 @@ Name of the event field in which to store the SQS message MD5 checksum Polling frequency, default is 20 seconds [id="plugins-{type}s-{plugin}-proxy_uri"] -===== `proxy_uri` +===== `proxy_uri` * Value type is <> * There is no default value for this setting. @@ -208,7 +208,7 @@ Polling frequency, default is 20 seconds URI to proxy server if required [id="plugins-{type}s-{plugin}-queue"] -===== `queue` +===== `queue` * This is a required setting. * Value type is <> @@ -217,7 +217,7 @@ URI to proxy server if required Name of the SQS Queue name to pull messages from. Note that this is just the name of the queue, not the URL or ARN. [id="plugins-{type}s-{plugin}-queue_owner_aws_account_id"] -===== `queue_owner_aws_account_id` +===== `queue_owner_aws_account_id` * Value type is <> * There is no default value for this setting. @@ -225,7 +225,7 @@ Name of the SQS Queue name to pull messages from. Note that this is just the nam ID of the AWS account owning the queue if you want to use a https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-basic-examples-of-sqs-policies.html#grant-two-permissions-to-one-account[cross-account queue] with embedded policy. Note that AWS SDK only support numerical account ID and not account aliases. [id="plugins-{type}s-{plugin}-region"] -===== `region` +===== `region` * Value type is <> * Default value is `"us-east-1"` @@ -251,7 +251,7 @@ See the https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html[ Session name to use when assuming an IAM role. [id="plugins-{type}s-{plugin}-secret_access_key"] -===== `secret_access_key` +===== `secret_access_key` * Value type is <> * There is no default value for this setting. @@ -259,7 +259,7 @@ Session name to use when assuming an IAM role. The AWS Secret Access Key [id="plugins-{type}s-{plugin}-sent_timestamp_field"] -===== `sent_timestamp_field` +===== `sent_timestamp_field` * Value type is <> * There is no default value for this setting. @@ -267,7 +267,7 @@ The AWS Secret Access Key Name of the event field in which to store the SQS message Sent Timestamp [id="plugins-{type}s-{plugin}-session_token"] -===== `session_token` +===== `session_token` * Value type is <> * There is no default value for this setting. @@ -275,7 +275,7 @@ Name of the event field in which to store the SQS message Sent Timestamp The AWS Session token for temporary credential [id="plugins-{type}s-{plugin}-threads"] -===== `threads` +===== `threads` * Value type is <> * Default value is `1` diff --git a/docs/plugins/inputs/stdin.asciidoc b/docs/plugins/inputs/stdin.asciidoc index f82fcbe40..e040e8103 100644 --- a/docs/plugins/inputs/stdin.asciidoc +++ b/docs/plugins/inputs/stdin.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.4.0 :release_date: 2021-08-04 :changelog_url: https://github.com/logstash-plugins/logstash-input-stdin/blob/v3.4.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/inputs/stomp.asciidoc b/docs/plugins/inputs/stomp.asciidoc index 4f7fff498..7ebba3381 100644 --- a/docs/plugins/inputs/stomp.asciidoc +++ b/docs/plugins/inputs/stomp.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.8 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-input-stomp/blob/v3.0.8/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -48,7 +48,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-destination"] -===== `destination` +===== `destination` * This is a required setting. * Value type is <> @@ -59,7 +59,7 @@ The destination to read events from. Example: `/topic/logstash` [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * This is a required setting. * Value type is <> @@ -68,7 +68,7 @@ Example: `/topic/logstash` The address of the STOMP server. [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * Value type is <> * Default value is `""` @@ -76,7 +76,7 @@ The address of the STOMP server. The password to authenticate with. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `61613` @@ -84,7 +84,7 @@ The password to authenticate with. The port to connet to on your STOMP server. [id="plugins-{type}s-{plugin}-reconnect"] -===== `reconnect` +===== `reconnect` * Value type is <> * Default value is `true` @@ -92,7 +92,7 @@ The port to connet to on your STOMP server. Auto reconnect [id="plugins-{type}s-{plugin}-reconnect_interval"] -===== `reconnect_interval` +===== `reconnect_interval` * Value type is <> * Default value is `30` @@ -100,7 +100,7 @@ Auto reconnect [id="plugins-{type}s-{plugin}-user"] -===== `user` +===== `user` * Value type is <> * Default value is `""` @@ -108,7 +108,7 @@ Auto reconnect The username to authenticate with. [id="plugins-{type}s-{plugin}-vhost"] -===== `vhost` +===== `vhost` * Value type is <> * Default value is `nil` diff --git a/docs/plugins/inputs/syslog.asciidoc b/docs/plugins/inputs/syslog.asciidoc index 005dd7462..7f6b4b8ee 100644 --- a/docs/plugins/inputs/syslog.asciidoc +++ b/docs/plugins/inputs/syslog.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.7.0 :release_date: 2023-10-17 :changelog_url: https://github.com/logstash-plugins/logstash-input-syslog/blob/v3.7.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -89,7 +89,7 @@ Controls this plugin's compatibility with the Labels for facility levels defined in RFC3164. You can use this option to override the integer->label mapping for syslog inputs -that behave differently than the RFCs. +that behave differently than the RFCs. Provide a zero-indexed array with all of your facility labels _in order_. If a log message contains a facility number with no corresponding entry, diff --git a/docs/plugins/inputs/tcp.asciidoc b/docs/plugins/inputs/tcp.asciidoc index 9c310f0c1..892cfb4eb 100644 --- a/docs/plugins/inputs/tcp.asciidoc +++ b/docs/plugins/inputs/tcp.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.0.2 :release_date: 2025-02-12 :changelog_url: https://github.com/logstash-plugins/logstash-input-tcp/blob/v7.0.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -32,7 +32,7 @@ depending on `mode`. ===== Accepting log4j2 logs Log4j2 can send JSON over a socket, and we can use that combined with our tcp -input to accept the logs. +input to accept the logs. First, we need to configure your application to send logs in JSON over a socket. The following log4j2.xml accomplishes this task. @@ -178,7 +178,7 @@ Controls this plugin's compatibility with the https://www.elastic.co/guide/en/ec The value of this setting affects the <> on events. [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"0.0.0.0"` @@ -187,7 +187,7 @@ When mode is `server`, the address to listen on. When mode is `client`, the address to connect to. [id="plugins-{type}s-{plugin}-mode"] -===== `mode` +===== `mode` * Value can be any of: `server`, `client` * Default value is `"server"` @@ -196,7 +196,7 @@ Mode to operate in. `server` listens for client connections, `client` connects to a server. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * This is a required setting. * Value type is <> @@ -206,7 +206,7 @@ When mode is `server`, the port to listen on. When mode is `client`, the port to connect to. [id="plugins-{type}s-{plugin}-proxy_protocol"] -===== `proxy_protocol` +===== `proxy_protocol` * Value type is <> * Default value is `false` @@ -270,7 +270,7 @@ NOTE: This setting can be used only if <> is `ser Enable SSL (must be set for other `ssl_` options to take effect). [id="plugins-{type}s-{plugin}-ssl_extra_chain_certs"] -===== `ssl_extra_chain_certs` +===== `ssl_extra_chain_certs` * Value type is <> * Default value is `[]` @@ -280,7 +280,7 @@ These are used together with the certificate to construct the certificate chain presented to the client. [id="plugins-{type}s-{plugin}-ssl_key"] -===== `ssl_key` +===== `ssl_key` * Value type is <> * There is no default value for this setting. @@ -288,7 +288,7 @@ presented to the client. The path to the private key corresponding to the specified certificate (PEM format). [id="plugins-{type}s-{plugin}-ssl_key_passphrase"] -===== `ssl_key_passphrase` +===== `ssl_key_passphrase` * Value type is <> * Default value is `nil` @@ -341,7 +341,7 @@ keep alive setting for the underlying socket. ==== TCP Input Obsolete Configuration Options WARNING: As of version `7.0.0` of this plugin, some configuration options have been replaced. -The plugin will fail to start if it contains any of these obsolete options. +The plugin will fail to start if it contains any of these obsolete options. [cols="<,<",options="header",] diff --git a/docs/plugins/inputs/twitter.asciidoc b/docs/plugins/inputs/twitter.asciidoc index c352f52c3..b6344cd25 100644 --- a/docs/plugins/inputs/twitter.asciidoc +++ b/docs/plugins/inputs/twitter.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.1.1 :release_date: 2023-11-16 :changelog_url: https://github.com/logstash-plugins/logstash-input-twitter/blob/v4.1.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -97,7 +97,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-consumer_key"] -===== `consumer_key` +===== `consumer_key` * This is a required setting. * Value type is <> @@ -109,7 +109,7 @@ Don't know what this is? You need to create an "application" on Twitter, see this url: [id="plugins-{type}s-{plugin}-consumer_secret"] -===== `consumer_secret` +===== `consumer_secret` * This is a required setting. * Value type is <> @@ -135,7 +135,7 @@ registering a new application with Twitter: Controls this plugin's compatibility with the {ecs-ref}[Elastic Common Schema (ECS)]. [id="plugins-{type}s-{plugin}-follows"] -===== `follows` +===== `follows` * Value type is <> * There is no default value for this setting. @@ -146,7 +146,7 @@ See https://developer.twitter.com/en/docs/tweets/filter-realtime/guides/basic-st for more details. [id="plugins-{type}s-{plugin}-full_tweet"] -===== `full_tweet` +===== `full_tweet` * Value type is <> * Default value is `false` @@ -154,7 +154,7 @@ for more details. Record full tweet object as given to us by the Twitter Streaming API. [id="plugins-{type}s-{plugin}-ignore_retweets"] -===== `ignore_retweets` +===== `ignore_retweets` * Value type is <> * Default value is `false` @@ -162,33 +162,33 @@ Record full tweet object as given to us by the Twitter Streaming API. Lets you ignore the retweets coming out of the Twitter API. Default => false [id="plugins-{type}s-{plugin}-keywords"] -===== `keywords` +===== `keywords` * Value type is <> * There is no default value for this setting. Any keywords to track in the Twitter stream. For multiple keywords, use -the syntax ["foo", "bar"]. There's a logical OR between each keyword +the syntax ["foo", "bar"]. There's a logical OR between each keyword string listed and a logical AND between words separated by spaces per keyword string. -See https://dev.twitter.com/streaming/overview/request-parameters#track +See https://dev.twitter.com/streaming/overview/request-parameters#track for more details. -The wildcard "*" option is not supported. To ingest a sample stream of -all tweets, the use_samples option is recommended. +The wildcard "*" option is not supported. To ingest a sample stream of +all tweets, the use_samples option is recommended. [id="plugins-{type}s-{plugin}-languages"] -===== `languages` +===== `languages` * Value type is <> * There is no default value for this setting. A list of BCP 47 language identifiers corresponding to any of the languages listed -on Twitter’s advanced search page will only return tweets that have been detected +on Twitter’s advanced search page will only return tweets that have been detected as being written in the specified languages. [id="plugins-{type}s-{plugin}-locations"] -===== `locations` +===== `locations` * Value type is <> * There is no default value for this setting. @@ -199,7 +199,7 @@ See https://dev.twitter.com/streaming/overview/request-parameters#locations for more details. [id="plugins-{type}s-{plugin}-oauth_token"] -===== `oauth_token` +===== `oauth_token` * This is a required setting. * Value type is <> @@ -216,7 +216,7 @@ will create an oauth token and secret bound to your account and that application. [id="plugins-{type}s-{plugin}-oauth_token_secret"] -===== `oauth_token_secret` +===== `oauth_token_secret` * This is a required setting. * Value type is <> @@ -233,7 +233,7 @@ will create an oauth token and secret bound to your account and that application. [id="plugins-{type}s-{plugin}-proxy_address"] -===== `proxy_address` +===== `proxy_address` * Value type is <> * Default value is `"127.0.0.1"` @@ -241,7 +241,7 @@ application. Location of the proxy, by default the same machine as the one running this LS instance [id="plugins-{type}s-{plugin}-proxy_port"] -===== `proxy_port` +===== `proxy_port` * Value type is <> * Default value is `3128` @@ -249,7 +249,7 @@ Location of the proxy, by default the same machine as the one running this LS in Port where the proxy is listening, by default 3128 (squid) [id="plugins-{type}s-{plugin}-rate_limit_reset_in"] -===== `rate_limit_reset_in` +===== `rate_limit_reset_in` * Value type is <> * Default value is `300` @@ -259,7 +259,7 @@ In some cases the 'x-rate-limit-reset' header is not set in the response and > * Default value is `false` @@ -267,14 +267,14 @@ is nil. If this occurs then we use the integer specified here. The default is 5 When to use a proxy to handle the connections [id="plugins-{type}s-{plugin}-use_samples"] -===== `use_samples` +===== `use_samples` * Value type is <> * Default value is `false` Returns a small random sample of all public statuses. The tweets returned by the default access level are the same, so if two different clients connect -to this endpoint, they will see the same tweets. If set to true, the keywords, +to this endpoint, they will see the same tweets. If set to true, the keywords, follows, locations, and languages options will be ignored. Default => false [id="plugins-{type}s-{plugin}-target"] diff --git a/docs/plugins/inputs/udp.asciidoc b/docs/plugins/inputs/udp.asciidoc index 2461a1c09..e0405e8e7 100644 --- a/docs/plugins/inputs/udp.asciidoc +++ b/docs/plugins/inputs/udp.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.5.0 :release_date: 2021-08-04 :changelog_url: https://github.com/logstash-plugins/logstash-input-udp/blob/v3.5.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -64,7 +64,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-buffer_size"] -===== `buffer_size` +===== `buffer_size` * Value type is <> * Default value is `65536` @@ -95,7 +95,7 @@ The value of this setting affects the placement of a TCP connection's metadata o |======================================================================= [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"0.0.0.0"` @@ -103,7 +103,7 @@ The value of this setting affects the placement of a TCP connection's metadata o The address which logstash will listen on. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * This is a required setting. * Value type is <> @@ -113,7 +113,7 @@ The port which logstash will listen on. Remember that ports less than 1024 (privileged ports) may require root or elevated privileges to use. [id="plugins-{type}s-{plugin}-queue_size"] -===== `queue_size` +===== `queue_size` * Value type is <> * Default value is `2000` @@ -122,7 +122,7 @@ This is the number of unprocessed UDP packets you can hold in memory before packets will start dropping. [id="plugins-{type}s-{plugin}-receive_buffer_bytes"] -===== `receive_buffer_bytes` +===== `receive_buffer_bytes` * Value type is <> * There is no default value for this setting. @@ -150,7 +150,7 @@ Example: } [id="plugins-{type}s-{plugin}-workers"] -===== `workers` +===== `workers` * Value type is <> * Default value is `2` diff --git a/docs/plugins/inputs/unix.asciidoc b/docs/plugins/inputs/unix.asciidoc index e9ac44115..09d2d14ea 100644 --- a/docs/plugins/inputs/unix.asciidoc +++ b/docs/plugins/inputs/unix.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.2 :release_date: 2022-10-03 :changelog_url: https://github.com/logstash-plugins/logstash-input-unix/blob/v3.1.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -32,7 +32,7 @@ depending on `mode`. [id="plugins-{type}s-{plugin}-ecs"] ==== Compatibility with the Elastic Common Schema (ECS) -This plugin adds extra fields about the event's source. +This plugin adds extra fields about the event's source. Configure the <> option if you want to ensure that these fields are compatible with {ecs-ref}[ECS]. @@ -68,7 +68,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-data_timeout"] -===== `data_timeout` +===== `data_timeout` * Value type is <> * Default value is `-1` @@ -117,7 +117,7 @@ See <> for detailed information. ----- [id="plugins-{type}s-{plugin}-force_unlink"] -===== `force_unlink` +===== `force_unlink` * Value type is <> * Default value is `false` @@ -125,7 +125,7 @@ See <> for detailed information. Remove socket file in case of EADDRINUSE failure [id="plugins-{type}s-{plugin}-mode"] -===== `mode` +===== `mode` * Value can be any of: `server`, `client` * Default value is `"server"` @@ -134,7 +134,7 @@ Mode to operate in. `server` listens for client connections, `client` connects to a server. [id="plugins-{type}s-{plugin}-path"] -===== `path` +===== `path` * This is a required setting. * Value type is <> @@ -144,7 +144,7 @@ When mode is `server`, the path to listen on. When mode is `client`, the path to connect to. [id="plugins-{type}s-{plugin}-socket_not_present_retry_interval_seconds"] -===== `socket_not_present_retry_interval_seconds` +===== `socket_not_present_retry_interval_seconds` * This is a required setting. * Value type is <> diff --git a/docs/plugins/inputs/varnishlog.asciidoc b/docs/plugins/inputs/varnishlog.asciidoc index 0bd7b61d1..d8aaddb1a 100644 --- a/docs/plugins/inputs/varnishlog.asciidoc +++ b/docs/plugins/inputs/varnishlog.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.4 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-input-varnishlog/blob/v3.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -41,7 +41,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-threads"] -===== `threads` +===== `threads` * Value type is <> * Default value is `1` diff --git a/docs/plugins/inputs/websocket.asciidoc b/docs/plugins/inputs/websocket.asciidoc index 3bbdd6369..2ed3cabaf 100644 --- a/docs/plugins/inputs/websocket.asciidoc +++ b/docs/plugins/inputs/websocket.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.0.4 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-input-websocket/blob/v4.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -42,7 +42,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-mode"] -===== `mode` +===== `mode` * Value can be any of: `client` * Default value is `"client"` @@ -52,7 +52,7 @@ is supported, i.e. this plugin connects to a websocket server and receives events from the server as websocket messages. [id="plugins-{type}s-{plugin}-url"] -===== `url` +===== `url` * This is a required setting. * Value type is <> diff --git a/docs/plugins/inputs/wmi.asciidoc b/docs/plugins/inputs/wmi.asciidoc index 026ad8ffd..7e01fc0cc 100644 --- a/docs/plugins/inputs/wmi.asciidoc +++ b/docs/plugins/inputs/wmi.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.4 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-input-wmi/blob/v3.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -67,7 +67,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"localhost"` @@ -75,7 +75,7 @@ input plugins. Host to connect to ( Defaults to localhost ) [id="plugins-{type}s-{plugin}-interval"] -===== `interval` +===== `interval` * Value type is <> * Default value is `10` @@ -83,7 +83,7 @@ Host to connect to ( Defaults to localhost ) Polling interval [id="plugins-{type}s-{plugin}-namespace"] -===== `namespace` +===== `namespace` * Value type is <> * Default value is `"root\\cimv2"` @@ -91,7 +91,7 @@ Polling interval Namespace when doing remote connections [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * Value type is <> * There is no default value for this setting. @@ -99,7 +99,7 @@ Namespace when doing remote connections Password when doing remote connections [id="plugins-{type}s-{plugin}-query"] -===== `query` +===== `query` * This is a required setting. * Value type is <> @@ -108,7 +108,7 @@ Password when doing remote connections WMI query [id="plugins-{type}s-{plugin}-user"] -===== `user` +===== `user` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/inputs/xmpp.asciidoc b/docs/plugins/inputs/xmpp.asciidoc index fb7ec5f71..f04411945 100644 --- a/docs/plugins/inputs/xmpp.asciidoc +++ b/docs/plugins/inputs/xmpp.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.7 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-input-xmpp/blob/v3.1.7/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -48,7 +48,7 @@ input plugins.   [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * There is no default value for this setting. @@ -57,7 +57,7 @@ The xmpp server to connect to. This is optional. If you omit this setting, the host on the user/identity is used. (`foo.com` for `user@foo.com`) [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * This is a required setting. * Value type is <> @@ -66,7 +66,7 @@ the host on the user/identity is used. (`foo.com` for `user@foo.com`) The xmpp password for the user/identity. [id="plugins-{type}s-{plugin}-rooms"] -===== `rooms` +===== `rooms` * Value type is <> * There is no default value for this setting. @@ -75,7 +75,7 @@ if muc/multi-user-chat required, give the name of the room that you want to join: `room@conference.domain/nick` [id="plugins-{type}s-{plugin}-user"] -===== `user` +===== `user` * This is a required setting. * Value type is <> diff --git a/docs/plugins/integrations/aws.asciidoc b/docs/plugins/integrations/aws.asciidoc index 6202229ca..11c9bad2c 100644 --- a/docs/plugins/integrations/aws.asciidoc +++ b/docs/plugins/integrations/aws.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.2.1 :release_date: 2025-02-13 :changelog_url: https://github.com/logstash-plugins/logstash-integration-aws/blob/v7.2.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/integrations/elastic_enterprise_search.asciidoc b/docs/plugins/integrations/elastic_enterprise_search.asciidoc index 231ca6f4b..5ba45b58e 100644 --- a/docs/plugins/integrations/elastic_enterprise_search.asciidoc +++ b/docs/plugins/integrations/elastic_enterprise_search.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.0 :release_date: 2023-11-07 :changelog_url: https://github.com/logstash-plugins/logstash-integration-elastic_enterprise_search/blob/v3.0.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/integrations/jdbc.asciidoc b/docs/plugins/integrations/jdbc.asciidoc index 6e3c79a3a..cb0f08e86 100644 --- a/docs/plugins/integrations/jdbc.asciidoc +++ b/docs/plugins/integrations/jdbc.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v5.5.2 :release_date: 2024-12-23 :changelog_url: https://github.com/logstash-plugins/logstash-integration-jdbc/blob/v5.5.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -29,5 +29,3 @@ The JDBC Integration Plugin provides integrated plugins for working with databas - {logstash-ref}/plugins-filters-jdbc_streaming.html[JDBC Streaming Filter Plugin] :no_codec!: - - diff --git a/docs/plugins/integrations/kafka.asciidoc b/docs/plugins/integrations/kafka.asciidoc index 42f4bcf05..fd10d9399 100644 --- a/docs/plugins/integrations/kafka.asciidoc +++ b/docs/plugins/integrations/kafka.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v11.6.0 :release_date: 2025-01-07 :changelog_url: https://github.com/logstash-plugins/logstash-integration-kafka/blob/v11.6.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -28,7 +28,7 @@ https://kafka.apache.org/[Kafka] distributed streaming platform. - {logstash-ref}/plugins-inputs-kafka.html[Kafka Input Plugin] - {logstash-ref}/plugins-outputs-kafka.html[Kafka Output Plugin] - + This plugin uses Kafka Client {kafka_client}. For broker compatibility, see the official https://cwiki.apache.org/confluence/display/KAFKA/Compatibility+Matrix[Kafka compatibility reference]. If the linked compatibility wiki is not up-to-date, diff --git a/docs/plugins/integrations/logstash.asciidoc b/docs/plugins/integrations/logstash.asciidoc index 021ede31a..89b3afa7d 100644 --- a/docs/plugins/integrations/logstash.asciidoc +++ b/docs/plugins/integrations/logstash.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.0.4 :release_date: 2024-12-10 :changelog_url: https://github.com/logstash-plugins/logstash-integration-logstash/blob/v1.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/integrations/rabbitmq.asciidoc b/docs/plugins/integrations/rabbitmq.asciidoc index be156657c..98a7704e6 100644 --- a/docs/plugins/integrations/rabbitmq.asciidoc +++ b/docs/plugins/integrations/rabbitmq.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.4.0 :release_date: 2024-09-16 :changelog_url: https://github.com/logstash-plugins/logstash-integration-rabbitmq/blob/v7.4.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/integrations/sample.asciidoc b/docs/plugins/integrations/sample.asciidoc index 0391b3247..87c3faf00 100644 --- a/docs/plugins/integrations/sample.asciidoc +++ b/docs/plugins/integrations/sample.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: tbd :release_date: tbd :changelog_url: tbd -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/integrations/snmp.asciidoc b/docs/plugins/integrations/snmp.asciidoc index a4eceab43..2930ada00 100644 --- a/docs/plugins/integrations/snmp.asciidoc +++ b/docs/plugins/integrations/snmp.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.0.6 :release_date: 2025-01-23 :changelog_url: https://github.com/logstash-plugins/logstash-integration-snmp/blob/v4.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -20,7 +20,7 @@ END - GENERATED VARIABLES, DO NOT EDIT! include::{include_path}/plugin_header.asciidoc[] -.Announcing the new SNMP integration plugin +.Announcing the new SNMP integration plugin **** The new `logstash-integration-snmp` plugin is available and bundled with {ls} 8.15.0 (and later) by default. This plugin combines our classic `logstash-input-snmp` and `logstash-input-snmptrap` plugins into a single Ruby gem at v4.0.0 and later. @@ -37,11 +37,11 @@ The SNMP integration plugin includes: * link:{logstash-ref}/plugins-inputs-snmp.html[SNMP input plugin] * link:{logstash-ref}/plugins-inputs-snmptrap.html[Snmptrap input plugin] -The new `logstash-integration-snmp` plugin combines the +The new `logstash-integration-snmp` plugin combines the `logstash-input-snmp` and `logstash-input-snmptrap` plugins into one integrated plugin that encompasses -the capabilities of both. -This integrated plugin package provides better alignment in snmp processing, better resource management, -easier package maintenance, and a smaller installation footprint. +the capabilities of both. +This integrated plugin package provides better alignment in snmp processing, better resource management, +easier package maintenance, and a smaller installation footprint. In this section, we'll cover: @@ -51,14 +51,14 @@ In this section, we'll cover: [id="plugins-{type}s-{plugin}-migration"] ==== Migrating to `logstash-integration-snmp` from individual plugins -You'll retain and expand the functionality of existing stand-alone plugins, but in a more compact, integrated package. +You'll retain and expand the functionality of existing stand-alone plugins, but in a more compact, integrated package. In this section, we'll note mapping and behavioral changes, and explain how to preserve current behavior if needed. [id="plugins-{type}s-{plugin}-migration-input-snmp"] -===== Migration notes: `logstash-input-snmp` +===== Migration notes: `logstash-input-snmp` -As a component of the new `logstash-integration-snmp` plugin, the `logstash-input-snmp` plugin offers the same -capabilities as the stand-alone https://github.com/logstash-plugins/logstash-input-snmp[logstash-input-snmp]. +As a component of the new `logstash-integration-snmp` plugin, the `logstash-input-snmp` plugin offers the same +capabilities as the stand-alone https://github.com/logstash-plugins/logstash-input-snmp[logstash-input-snmp]. You might need to address some behavior changes depending on the use-case and how the ingested data is being handled through the pipeline. @@ -76,15 +76,15 @@ This change should not affect existing pipelines, unless they have custom error ===== Migration notes: `logstash-input-snmptrap` As a component of the new `logstash-integration-snmp` plugin, the `logstash-input-snmptrap` plugin offers _almost the same -capabilities_ as the stand-alone https://github.com/logstash-plugins/logstash-input-snmptrap[logstash-input-snmptrap] plugin. +capabilities_ as the stand-alone https://github.com/logstash-plugins/logstash-input-snmptrap[logstash-input-snmptrap] plugin. You might need to address some behavior changes depending on your use case and how the ingested data is being handled through the pipeline. [id="plugins-{type}s-{plugin}-input-snmptrap-mapping"] ====== Changes to mapping and error logging: `logstash-input-snmptrap` -* The *PDU variable bindings* are mapped into the {ls} event using the defined data type. - By default, the stand-alone `logstash-input-snmptrap` plugin converts all of the data to `string`, ignoring the original type. +* The *PDU variable bindings* are mapped into the {ls} event using the defined data type. + By default, the stand-alone `logstash-input-snmptrap` plugin converts all of the data to `string`, ignoring the original type. If this behavior is not what you want, you can use a filter to retain the original type. * *SNMP `TimeTicks` variables* are mapped as `Long` timestamps instead of formatted date string (`%d days, %02d:%02d:%02d.%02d`). * *`null` variables values* are mapped using the string `null` instead of `Null` (upper-case N). @@ -92,7 +92,7 @@ You might need to address some behavior changes depending on your use case and h * *No such object errors* are mapped as `error: no such object currently exists at this OID string` instead of `noSuchObject`. * *End of MIB view errors* are mapped as `error: end of MIB view` instead of `endOfMibView`. * The previous generation (stand-alone) input-snmptrap plugin formatted the *`message` field* as -a ruby-snmp `SNMP::SNMPv1_Trap` object representation. +a ruby-snmp `SNMP::SNMPv1_Trap` object representation. + [source,sh] ---- @@ -106,12 +106,12 @@ The new integrated `input-snmptrap` plugin uses JSON to format *`message` field* {"error_index":0, "variable_bindings":{"1.3.6.1.6.3.1.1.4.1.0":"SNMPv2-MIB::coldStart", "1.3.6.1.2.1.1.3.0":0}, "error_status":0, "type":"TRAP", "error_status_text":"Success", "community":"public", "version":"2c", "request_id":1436216872} ---- -// ToDo: Add more details wrt PDU variable binding. Which filter? Add sample config? +// ToDo: Add more details wrt PDU variable binding. Which filter? Add sample config? [id="plugins-{type}s-{plugin}-input-snmptrap-compat"] ====== Maintain maximum compatibility with previous implementation -If needed, you can configure the new `logstash-integration-snmp` plugin to maintain maximum compatibility with the previous (stand-alone) +If needed, you can configure the new `logstash-integration-snmp` plugin to maintain maximum compatibility with the previous (stand-alone) version of the https://github.com/logstash-plugins/logstash-input-snmp[input-snmp] plugin. [source,ruby] @@ -125,7 +125,7 @@ input { } ---- -// ToDo: Any considerations that we should point out? +// ToDo: Any considerations that we should point out? [id="plugins-{type}s-{plugin}-import-mibs"] ==== Importing MIBs diff --git a/docs/plugins/outputs.asciidoc b/docs/plugins/outputs.asciidoc index 917da5440..ecd065dde 100644 --- a/docs/plugins/outputs.asciidoc +++ b/docs/plugins/outputs.asciidoc @@ -87,7 +87,7 @@ include::outputs/datadog.asciidoc[] :edit_url: https://github.com/logstash-plugins/logstash-output-datadog_metrics/edit/main/docs/index.asciidoc include::outputs/datadog_metrics.asciidoc[] -:edit_url: +:edit_url: include::outputs/dynatrace.asciidoc[] :edit_url: https://github.com/logstash-plugins/logstash-integration-elastic_enterprise_search/edit/main/docs/output-elastic_app_search.asciidoc @@ -139,7 +139,7 @@ include::outputs/influxdb.asciidoc[] include::outputs/irc.asciidoc[] :edit_url: https://github.com/elastic/logstash/edit/main/docs/static/core-plugins/outputs/java_stdout.asciidoc -include::../../../logstash/docs/static/core-plugins/outputs/java_stdout.asciidoc[] +include::./static/core-plugins/outputs/java_stdout.asciidoc[] :edit_url: https://github.com/logstash-plugins/logstash-output-juggernaut/edit/main/docs/index.asciidoc include::outputs/juggernaut.asciidoc[] @@ -199,7 +199,7 @@ include::outputs/riemann.asciidoc[] include::outputs/s3.asciidoc[] :edit_url: https://github.com/elastic/logstash/edit/main/docs/static/core-plugins/outputs/java_sink.asciidoc -include::../../../logstash/docs/static/core-plugins/outputs/java_sink.asciidoc[] +include::./static/core-plugins/outputs/java_sink.asciidoc[] :edit_url: https://github.com/logstash-plugins/logstash-output-sns/edit/main/docs/index.asciidoc include::outputs/sns.asciidoc[] @@ -244,4 +244,4 @@ include::outputs/xmpp.asciidoc[] include::outputs/zabbix.asciidoc[] -:edit_url: +:edit_url: diff --git a/docs/plugins/outputs/appsearch.asciidoc b/docs/plugins/outputs/appsearch.asciidoc index 33a18819c..bf1b64263 100644 --- a/docs/plugins/outputs/appsearch.asciidoc +++ b/docs/plugins/outputs/appsearch.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.0.0.beta1 :release_date: 2018-10-23 :changelog_url: https://github.com/logstash-plugins/logstash-output-appsearch/blob/v1.0.0.beta1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/outputs/boundary.asciidoc b/docs/plugins/outputs/boundary.asciidoc index de5b53fb1..05df6cd63 100644 --- a/docs/plugins/outputs/boundary.asciidoc +++ b/docs/plugins/outputs/boundary.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.6 :release_date: 2023-05-30 :changelog_url: https://github.com/logstash-plugins/logstash-output-boundary/blob/v3.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -56,7 +56,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-api_key"] -===== `api_key` +===== `api_key` * This is a required setting. * Value type is <> @@ -65,7 +65,7 @@ output plugins. Your Boundary API key [id="plugins-{type}s-{plugin}-auto"] -===== `auto` +===== `auto` * Value type is <> * Default value is `false` @@ -77,7 +77,7 @@ override these. `['type', 'subtype', 'creation_time', 'end_time', 'links', 'tags', 'loc']` [id="plugins-{type}s-{plugin}-bsubtype"] -===== `bsubtype` +===== `bsubtype` * Value type is <> * There is no default value for this setting. @@ -85,7 +85,7 @@ override these. Sub-Type [id="plugins-{type}s-{plugin}-btags"] -===== `btags` +===== `btags` * Value type is <> * There is no default value for this setting. @@ -95,7 +95,7 @@ Set any custom tags for this event Default are the Logstash tags if any [id="plugins-{type}s-{plugin}-btype"] -===== `btype` +===== `btype` * Value type is <> * There is no default value for this setting. @@ -103,7 +103,7 @@ Default are the Logstash tags if any Type [id="plugins-{type}s-{plugin}-end_time"] -===== `end_time` +===== `end_time` * Value type is <> * There is no default value for this setting. @@ -115,7 +115,7 @@ If overriding, it is your responsibility to type this correctly By default this is set to `event.get("@timestamp").to_i` [id="plugins-{type}s-{plugin}-org_id"] -===== `org_id` +===== `org_id` * This is a required setting. * Value type is <> @@ -124,7 +124,7 @@ By default this is set to `event.get("@timestamp").to_i` Your Boundary Org ID [id="plugins-{type}s-{plugin}-start_time"] -===== `start_time` +===== `start_time` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/outputs/circonus.asciidoc b/docs/plugins/outputs/circonus.asciidoc index 82840cdc9..ad1b0d39d 100644 --- a/docs/plugins/outputs/circonus.asciidoc +++ b/docs/plugins/outputs/circonus.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.7 :release_date: 2023-05-30 :changelog_url: https://github.com/logstash-plugins/logstash-output-circonus/blob/v3.0.7/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -44,7 +44,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-annotation"] -===== `annotation` +===== `annotation` * This is a required setting. * Value type is <> @@ -71,7 +71,7 @@ or ----- [id="plugins-{type}s-{plugin}-api_token"] -===== `api_token` +===== `api_token` * This is a required setting. * Value type is <> @@ -80,7 +80,7 @@ or Your Circonus API Token [id="plugins-{type}s-{plugin}-app_name"] -===== `app_name` +===== `app_name` * This is a required setting. * Value type is <> diff --git a/docs/plugins/outputs/cloudwatch.asciidoc b/docs/plugins/outputs/cloudwatch.asciidoc index d0453147d..af5864116 100644 --- a/docs/plugins/outputs/cloudwatch.asciidoc +++ b/docs/plugins/outputs/cloudwatch.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.2.1 :release_date: 2025-02-13 :changelog_url: https://github.com/logstash-plugins/logstash-integration-aws/blob/v7.2.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -117,7 +117,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-access_key_id"] -===== `access_key_id` +===== `access_key_id` * Value type is <> * There is no default value for this setting. @@ -131,7 +131,7 @@ This plugin uses the AWS SDK and supports several ways to get credentials, which 5. IAM Instance Profile (available when running inside EC2) [id="plugins-{type}s-{plugin}-aws_credentials_file"] -===== `aws_credentials_file` +===== `aws_credentials_file` * Value type is <> * There is no default value for this setting. @@ -149,7 +149,7 @@ file should look like this: [id="plugins-{type}s-{plugin}-batch_size"] -===== `batch_size` +===== `batch_size` * Value type is <> * Default value is `20` @@ -157,7 +157,7 @@ file should look like this: How many data points can be given in one call to the CloudWatch API [id="plugins-{type}s-{plugin}-dimensions"] -===== `dimensions` +===== `dimensions` * Value type is <> * There is no default value for this setting. @@ -165,7 +165,7 @@ How many data points can be given in one call to the CloudWatch API The default dimensions [ name, value, ... ] to use for events which do not have a `CW_dimensions` field [id="plugins-{type}s-{plugin}-field_dimensions"] -===== `field_dimensions` +===== `field_dimensions` * Value type is <> * Default value is `"CW_dimensions"` @@ -179,7 +179,7 @@ or, equivalently... `add_field => [ "CW_dimensions", "prod" ]` [id="plugins-{type}s-{plugin}-field_metricname"] -===== `field_metricname` +===== `field_metricname` * Value type is <> * Default value is `"CW_metricname"` @@ -191,7 +191,7 @@ plugin on your logstash indexer can serve all events (which of course had fields set on your logstash shippers.) [id="plugins-{type}s-{plugin}-field_namespace"] -===== `field_namespace` +===== `field_namespace` * Value type is <> * Default value is `"CW_namespace"` @@ -202,7 +202,7 @@ so setting different namespaces will increase the number of API calls and those cost money. [id="plugins-{type}s-{plugin}-field_unit"] -===== `field_unit` +===== `field_unit` * Value type is <> * Default value is `"CW_unit"` @@ -210,7 +210,7 @@ and those cost money. The name of the field used to set the unit on an event metric [id="plugins-{type}s-{plugin}-field_value"] -===== `field_value` +===== `field_value` * Value type is <> * Default value is `"CW_value"` @@ -218,7 +218,7 @@ The name of the field used to set the unit on an event metric The name of the field used to set the value (float) on an event metric [id="plugins-{type}s-{plugin}-metricname"] -===== `metricname` +===== `metricname` * Value type is <> * There is no default value for this setting. @@ -230,7 +230,7 @@ will probably want to also restrict events from passing through this output usin type, tag, and field matching [id="plugins-{type}s-{plugin}-namespace"] -===== `namespace` +===== `namespace` * Value type is <> * Default value is `"Logstash"` @@ -238,7 +238,7 @@ type, tag, and field matching The default namespace to use for events which do not have a `CW_namespace` field [id="plugins-{type}s-{plugin}-proxy_uri"] -===== `proxy_uri` +===== `proxy_uri` * Value type is <> * There is no default value for this setting. @@ -246,7 +246,7 @@ The default namespace to use for events which do not have a `CW_namespace` field URI to proxy server if required [id="plugins-{type}s-{plugin}-queue_size"] -===== `queue_size` +===== `queue_size` * Value type is <> * Default value is `10000` @@ -255,7 +255,7 @@ How many events to queue before forcing a call to the CloudWatch API ahead of `t Set this to the number of events-per-timeframe you will be sending to CloudWatch to avoid extra API calls [id="plugins-{type}s-{plugin}-region"] -===== `region` +===== `region` * Value can be any of: `us-east-1`, `us-east-2`, `us-west-1`, `us-west-2`, `eu-central-1`, `eu-west-1`, `eu-west-2`, `ap-southeast-1`, `ap-southeast-2`, `ap-northeast-1`, `ap-northeast-2`, `sa-east-1`, `us-gov-west-1`, `cn-north-1`, `ap-south-1`, `ca-central-1` * Default value is `"us-east-1"` @@ -263,7 +263,7 @@ Set this to the number of events-per-timeframe you will be sending to CloudWatch The AWS Region [id="plugins-{type}s-{plugin}-secret_access_key"] -===== `secret_access_key` +===== `secret_access_key` * Value type is <> * There is no default value for this setting. @@ -271,7 +271,7 @@ The AWS Region The AWS Secret Access Key [id="plugins-{type}s-{plugin}-session_token"] -===== `session_token` +===== `session_token` * Value type is <> * There is no default value for this setting. @@ -279,7 +279,7 @@ The AWS Secret Access Key The AWS Session token for temporary credential [id="plugins-{type}s-{plugin}-timeframe"] -===== `timeframe` +===== `timeframe` * Value type is <> * Default value is `"1m"` @@ -296,7 +296,7 @@ We only call the API if there is data to send. See the Rufus Scheduler docs for an https://github.com/jmettraux/rufus-scheduler#the-time-strings-understood-by-rufus-scheduler[explanation of allowed values] [id="plugins-{type}s-{plugin}-unit"] -===== `unit` +===== `unit` * Value can be any of: `Seconds`, `Microseconds`, `Milliseconds`, `Bytes`, `Kilobytes`, `Megabytes`, `Gigabytes`, `Terabytes`, `Bits`, `Kilobits`, `Megabits`, `Gigabits`, `Terabits`, `Percent`, `Count`, `Bytes/Second`, `Kilobytes/Second`, `Megabytes/Second`, `Gigabytes/Second`, `Terabytes/Second`, `Bits/Second`, `Kilobits/Second`, `Megabits/Second`, `Gigabits/Second`, `Terabits/Second`, `Count/Second`, `None` * Default value is `"Count"` @@ -315,7 +315,7 @@ For cases where the default certificates are unavailable, e.g. Windows, you can set this to `true`. [id="plugins-{type}s-{plugin}-value"] -===== `value` +===== `value` * Value type is <> * Default value is `"1"` diff --git a/docs/plugins/outputs/csv.asciidoc b/docs/plugins/outputs/csv.asciidoc index e90261302..ea78bcd89 100644 --- a/docs/plugins/outputs/csv.asciidoc +++ b/docs/plugins/outputs/csv.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.10 :release_date: 2023-12-19 :changelog_url: https://github.com/logstash-plugins/logstash-output-csv/blob/v3.0.10/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -54,16 +54,16 @@ output plugins.   [id="plugins-{type}s-{plugin}-create_if_deleted"] -===== `create_if_deleted` +===== `create_if_deleted` * Value type is <> * Default value is `true` -If the configured file is deleted, but an event is handled by the plugin, +If the configured file is deleted, but an event is handled by the plugin, the plugin will recreate the file. Default => true [id="plugins-{type}s-{plugin}-csv_options"] -===== `csv_options` +===== `csv_options` * Value type is <> * Default value is `{}` @@ -73,7 +73,7 @@ Full documentation is available on the http://ruby-doc.org/stdlib-2.0.0/libdoc/c A typical use case would be to use alternative column or row separators eg: `csv_options => {"col_sep" => "\t" "row_sep" => "\r\n"}` gives tab separated data with windows line endings [id="plugins-{type}s-{plugin}-dir_mode"] -===== `dir_mode` +===== `dir_mode` * Value type is <> * Default value is `-1` @@ -84,7 +84,7 @@ Setting it to -1 uses default OS value. Example: `"dir_mode" => 0750` [id="plugins-{type}s-{plugin}-fields"] -===== `fields` +===== `fields` * This is a required setting. * Value type is <> @@ -96,7 +96,7 @@ If a field does not exist on the event, an empty string will be written. Supports field reference syntax eg: `fields => ["field1", "[nested][field]"]`. [id="plugins-{type}s-{plugin}-file_mode"] -===== `file_mode` +===== `file_mode` * Value type is <> * Default value is `-1` @@ -107,7 +107,7 @@ Setting it to -1 uses default OS value. Example: `"file_mode" => 0640` [id="plugins-{type}s-{plugin}-filename_failure"] -===== `filename_failure` +===== `filename_failure` * Value type is <> * Default value is `"_filepath_failures"` @@ -116,7 +116,7 @@ If the generated path is invalid, the events will be saved into this file and inside the defined path. [id="plugins-{type}s-{plugin}-flush_interval"] -===== `flush_interval` +===== `flush_interval` * Value type is <> * Default value is `2` @@ -125,7 +125,7 @@ Flush interval (in seconds) for flushing writes to log files. 0 will flush on every message. [id="plugins-{type}s-{plugin}-gzip"] -===== `gzip` +===== `gzip` * Value type is <> * Default value is `false` @@ -133,7 +133,7 @@ Flush interval (in seconds) for flushing writes to log files. Gzip the output stream before writing to disk. [id="plugins-{type}s-{plugin}-path"] -===== `path` +===== `path` * This is a required setting. * Value type is <> @@ -167,7 +167,7 @@ If you use an absolute path you cannot start with a dynamic string. E.g: `/%{myfield}/`, `/test-%{myfield}/` are not valid paths [id="plugins-{type}s-{plugin}-spreadsheet_safe"] -===== `spreadsheet_safe` +===== `spreadsheet_safe` * Value type is <> * Default value is `true` diff --git a/docs/plugins/outputs/datadog.asciidoc b/docs/plugins/outputs/datadog.asciidoc index f0ca6572d..43b25451b 100644 --- a/docs/plugins/outputs/datadog.asciidoc +++ b/docs/plugins/outputs/datadog.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.6 :release_date: 2023-05-31 :changelog_url: https://github.com/logstash-plugins/logstash-output-datadog/blob/v3.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -52,7 +52,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-alert_type"] -===== `alert_type` +===== `alert_type` * Value can be any of: `info`, `error`, `warning`, `success` * There is no default value for this setting. @@ -60,7 +60,7 @@ output plugins. Alert type [id="plugins-{type}s-{plugin}-api_key"] -===== `api_key` +===== `api_key` * This is a required setting. * Value type is <> @@ -69,7 +69,7 @@ Alert type Your DatadogHQ API key [id="plugins-{type}s-{plugin}-date_happened"] -===== `date_happened` +===== `date_happened` * Value type is <> * There is no default value for this setting. @@ -77,7 +77,7 @@ Your DatadogHQ API key Date Happened [id="plugins-{type}s-{plugin}-dd_tags"] -===== `dd_tags` +===== `dd_tags` * Value type is <> * There is no default value for this setting. @@ -87,7 +87,7 @@ Set any custom tags for this event Default are the Logstash tags if any [id="plugins-{type}s-{plugin}-priority"] -===== `priority` +===== `priority` * Value can be any of: `normal`, `low` * There is no default value for this setting. @@ -95,7 +95,7 @@ Default are the Logstash tags if any Priority [id="plugins-{type}s-{plugin}-source_type_name"] -===== `source_type_name` +===== `source_type_name` * Value can be any of: `nagios`, `hudson`, `jenkins`, `user`, `my apps`, `feed`, `chef`, `puppet`, `git`, `bitbucket`, `fabric`, `capistrano` * Default value is `"my apps"` @@ -103,7 +103,7 @@ Priority Source type name [id="plugins-{type}s-{plugin}-text"] -===== `text` +===== `text` * Value type is <> * Default value is `"%{message}"` @@ -111,7 +111,7 @@ Source type name Text [id="plugins-{type}s-{plugin}-title"] -===== `title` +===== `title` * Value type is <> * Default value is `"Logstash event for %{host}"` diff --git a/docs/plugins/outputs/datadog_metrics.asciidoc b/docs/plugins/outputs/datadog_metrics.asciidoc index fa6b42010..7f19fbcb1 100644 --- a/docs/plugins/outputs/datadog_metrics.asciidoc +++ b/docs/plugins/outputs/datadog_metrics.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.7 :release_date: 2024-10-25 :changelog_url: https://github.com/logstash-plugins/logstash-output-datadog_metrics/blob/v3.0.7/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -53,7 +53,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-api_key"] -===== `api_key` +===== `api_key` * This is a required setting. * Value type is <> @@ -62,7 +62,7 @@ output plugins. Your DatadogHQ API key. https://app.datadoghq.com/account/settings#api [id="plugins-{type}s-{plugin}-api_url"] -===== `api_url` +===== `api_url` * Value type is <> * Default value is `"https://api.datadoghq.com/api/v1/series"` @@ -70,7 +70,7 @@ Your DatadogHQ API key. https://app.datadoghq.com/account/settings#api Set the api endpoint for Datadog EU Site users [id="plugins-{type}s-{plugin}-dd_tags"] -===== `dd_tags` +===== `dd_tags` * Value type is <> * There is no default value for this setting. @@ -79,7 +79,7 @@ Set any custom tags for this event, default are the Logstash tags if any. [id="plugins-{type}s-{plugin}-device"] -===== `device` +===== `device` * Value type is <> * Default value is `"%{metric_device}"` @@ -87,7 +87,7 @@ default are the Logstash tags if any. The name of the device that produced the metric. [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"%{host}"` @@ -95,7 +95,7 @@ The name of the device that produced the metric. The name of the host that produced the metric. [id="plugins-{type}s-{plugin}-metric_name"] -===== `metric_name` +===== `metric_name` * Value type is <> * Default value is `"%{metric_name}"` @@ -103,7 +103,7 @@ The name of the host that produced the metric. The name of the time series. [id="plugins-{type}s-{plugin}-metric_type"] -===== `metric_type` +===== `metric_type` * Value can be any of: `gauge`, `counter`, `%{metric_type}` * Default value is `"%{metric_type}"` @@ -111,7 +111,7 @@ The name of the time series. The type of the metric. [id="plugins-{type}s-{plugin}-metric_value"] -===== `metric_value` +===== `metric_value` * Value type is <> * Default value is `"%{metric_value}"` @@ -119,7 +119,7 @@ The type of the metric. The value. [id="plugins-{type}s-{plugin}-queue_size"] -===== `queue_size` +===== `queue_size` * Value type is <> * Default value is `10` @@ -128,7 +128,7 @@ How many events to queue before flushing to Datadog prior to schedule set in `@timeframe` [id="plugins-{type}s-{plugin}-timeframe"] -===== `timeframe` +===== `timeframe` * Value type is <> * Default value is `10` diff --git a/docs/plugins/outputs/dynatrace.asciidoc b/docs/plugins/outputs/dynatrace.asciidoc index 0c01337b1..3c2159ec5 100644 --- a/docs/plugins/outputs/dynatrace.asciidoc +++ b/docs/plugins/outputs/dynatrace.asciidoc @@ -6,7 +6,7 @@ REPLACES GENERATED VARIABLES /////////////////////////////////////////// :changelog_url: https://github.com/dynatrace-oss/logstash-output-dynatrace/blob/master/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include :gem: https://rubygems.org/gems/logstash-output-dynatrace /////////////////////////////////////////// END - REPLACES GENERATED VARIABLES @@ -32,10 +32,10 @@ For plugins not bundled by default, it is easy to install by running ==== Description -This plugin sends Logstash events to the Dynatrace Generic log ingest API v2. +This plugin sends Logstash events to the Dynatrace Generic log ingest API v2. ==== Documentation - + https://github.com/dynatrace-oss/logstash-output-dynatrace/blob/main/docs/index.asciidoc[ Documentation] for the logstash-{type}-{plugin} plugin is maintained by the creator. @@ -43,4 +43,3 @@ Documentation] for the logstash-{type}-{plugin} plugin is maintained by the crea This is a third-party plugin. For bugs or feature requests, open an issue in the https://github.com/dynatrace-oss/logstash-output-dynatrace[plugins-{type}s-{plugin} Github repo]. - diff --git a/docs/plugins/outputs/elastic_app_search.asciidoc b/docs/plugins/outputs/elastic_app_search.asciidoc index 0c5e6842a..29d1afa9b 100644 --- a/docs/plugins/outputs/elastic_app_search.asciidoc +++ b/docs/plugins/outputs/elastic_app_search.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.0 :release_date: 2023-11-07 :changelog_url: https://github.com/logstash-plugins/logstash-integration-elastic_enterprise_search/blob/v3.0.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/outputs/elastic_workplace_search.asciidoc b/docs/plugins/outputs/elastic_workplace_search.asciidoc index 008438848..7b8e60e96 100644 --- a/docs/plugins/outputs/elastic_workplace_search.asciidoc +++ b/docs/plugins/outputs/elastic_workplace_search.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.0 :release_date: 2023-11-07 :changelog_url: https://github.com/logstash-plugins/logstash-integration-elastic_enterprise_search/blob/v3.0.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -228,7 +228,7 @@ To keep the timestamp field, set this value to the name of the field where you w * Value type is <> * Default value is `http://localhost:3002` -The value of the API endpoint in the form of a URL. +The value of the API endpoint in the form of a URL. **Examples** @@ -236,7 +236,7 @@ On premise instance: `http://workplace.company.com:3002` -Elastic Cloud instance: +Elastic Cloud instance: `https://7c455f508468426cb53912be65548117.ent-search.eu-west-1.aws.cloud.es.io` diff --git a/docs/plugins/outputs/elasticsearch.asciidoc b/docs/plugins/outputs/elasticsearch.asciidoc index 83a2e05ec..e96a3ed41 100644 --- a/docs/plugins/outputs/elasticsearch.asciidoc +++ b/docs/plugins/outputs/elasticsearch.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v12.0.2 :release_date: 2025-01-23 :changelog_url: https://github.com/logstash-plugins/logstash-output-elasticsearch/blob/v12.0.2/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -51,15 +51,15 @@ connecting to Elasticsearch 7.x. You can use this plugin to send your {ls} data to {es-serverless}. Some differences to note between {es-serverless} and self-managed {es}: -* Use *API keys* to access {serverless-full} from {ls}. +* Use *API keys* to access {serverless-full} from {ls}. Any user-based security settings in your <> configuration are ignored and may cause errors. -* {es-serverless} uses *data streams* and {ref}/data-stream-lifecycle.html[{dlm} ({dlm-init})] instead of {ilm} ({ilm-init}). +* {es-serverless} uses *data streams* and {ref}/data-stream-lifecycle.html[{dlm} ({dlm-init})] instead of {ilm} ({ilm-init}). Any {ilm-init} settings in your <> configuration are ignored and may cause errors. * *{ls} monitoring* is available through the https://github.com/elastic/integrations/blob/main/packages/logstash/_dev/build/docs/README.md[{ls} Integration] in {serverless-docs}/observability/what-is-observability-serverless[Elastic Observability] on {serverless-full}. .Known issue for {ls} to {es-serverless} **** -The logstash-output-elasticsearch `hosts` setting on {serverless-short} defaults the port to 9200 when omitted. +The logstash-output-elasticsearch `hosts` setting on {serverless-short} defaults the port to 9200 when omitted. Set the value to port :443 instead. **** @@ -94,11 +94,11 @@ Use the data stream options for indexing time series datasets (such as logs, metrics, and events) into {es} and {es-serverless}: * <> -* <> -* <> +* <> +* <> * <> * <> -* <> +* <> IMPORTANT: <> must be enabled (set to `v1` or `v8`) for data streams to work properly. @@ -139,7 +139,7 @@ output { ==== Writing to different indices: best practices -NOTE: You cannot use dynamic variable substitution when `ilm_enabled` is `true` +NOTE: You cannot use dynamic variable substitution when `ilm_enabled` is `true` and when using `ilm_rollover_alias`. If you're sending events to the same Elasticsearch cluster, but you're targeting different indices you can: @@ -232,7 +232,7 @@ original events causing the mapping errors are stored in a file that can be processed at a later time. Often times, the offending field can be removed and re-indexed to Elasticsearch. If the DLQ is not enabled, and a mapping error happens, the problem is logged as a warning, and the event is dropped. See -<> for more information about processing events in the DLQ. +{logstash-ref}/dead-letter-queues.html[dead-letter-queue (DLQ)] for more information about processing events in the DLQ. The list of error codes accepted for DLQ could be customized with <> but should be used only in motivated cases. @@ -241,7 +241,7 @@ but should be used only in motivated cases. [NOTE] -- -* The {ilm-cap} ({ilm-init}) feature does not apply for {es-serverless}. +* The {ilm-cap} ({ilm-init}) feature does not apply for {es-serverless}. Any {ilm-init} settings in your plugin configuration are ignored and may cause errors. * The {ilm-init} feature requires plugin version `9.3.1` or higher. * This feature requires an {es} instance of 6.6.0 or higher with at least a Basic license @@ -435,7 +435,7 @@ output plugins. ===== `action` * Value type is <> - * Default value is `create` for data streams, and `index` for non-time series data. + * Default value is `create` for data streams, and `index` for non-time series data. The Elasticsearch action to perform. Valid actions are: @@ -446,7 +446,7 @@ The Elasticsearch action to perform. Valid actions are: document if not already present. See the `doc_as_upsert` option. NOTE: This does not work and is not supported in Elasticsearch 1.x. Please upgrade to ES 2.x or greater to use this feature with Logstash! - A sprintf style string to change the action based on the content of the event. The value `%{[foo]}` - would use the foo field for the action. + would use the foo field for the action. If resolved action is not in [`index`, `delete`, `create`, `update`], the event will not be sent to {es}. Instead the event will be sent to the pipeline's {logstash-ref}/dead-letter-queues.html[dead-letter-queue (DLQ)] (if enabled), or it will be logged and dropped. @@ -815,8 +815,8 @@ NOTE: `ilm_rollover_alias` does NOT support dynamic variable substitution as ** ECS Compatibility disabled: `"logstash-%{+yyyy.MM.dd}"` ** ECS Compatibility enabled: `"ecs-logstash-%{+yyyy.MM.dd}"` -The indexing target to write events to. -Can point to an {ref}/index-mgmt.html[index], {ref}/aliases.html[alias], or {ref}/data-streams.html[data stream]. +The indexing target to write events to. +Can point to an {ref}/index-mgmt.html[index], {ref}/aliases.html[alias], or {ref}/data-streams.html[data stream]. This can be dynamic using the `%{foo}` syntax. The default value will partition your indices by day so you can more easily delete old data or only search specific date ranges. diff --git a/docs/plugins/outputs/email.asciidoc b/docs/plugins/outputs/email.asciidoc index a7619b1c6..5f7ce5094 100644 --- a/docs/plugins/outputs/email.asciidoc +++ b/docs/plugins/outputs/email.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.1.3 :release_date: 2023-10-03 :changelog_url: https://github.com/logstash-plugins/logstash-output-email/blob/v4.1.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -79,7 +79,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-address"] -===== `address` +===== `address` * Value type is <> * Default value is `"localhost"` @@ -87,7 +87,7 @@ output plugins. The address used to connect to the mail server [id="plugins-{type}s-{plugin}-attachments"] -===== `attachments` +===== `attachments` * Value type is <> * Default value is `[]` @@ -95,7 +95,7 @@ The address used to connect to the mail server Attachments - specify the name(s) and location(s) of the files. [id="plugins-{type}s-{plugin}-authentication"] -===== `authentication` +===== `authentication` * Value type is <> * There is no default value for this setting. @@ -103,7 +103,7 @@ Attachments - specify the name(s) and location(s) of the files. Authentication method used when identifying with the server [id="plugins-{type}s-{plugin}-body"] -===== `body` +===== `body` * Value type is <> * Default value is `""` @@ -111,7 +111,7 @@ Authentication method used when identifying with the server Body for the email - plain text only. [id="plugins-{type}s-{plugin}-cc"] -===== `cc` +===== `cc` * Value type is <> * There is no default value for this setting. @@ -122,7 +122,7 @@ This field also accepts a comma-separated string of addresses, for example: `"me@example.com, you@example.com"` [id="plugins-{type}s-{plugin}-bcc"] -===== `bcc` +===== `bcc` * Value type is <> * There is no default value for this setting. @@ -132,7 +132,7 @@ The fully-qualified email address(es) to include as bcc: address(es). This field accepts several addresses like cc. [id="plugins-{type}s-{plugin}-contenttype"] -===== `contenttype` +===== `contenttype` * Value type is <> * Default value is `"text/html; charset=UTF-8"` @@ -141,7 +141,7 @@ contenttype : for multipart messages, set the content-type and/or charset of the NOTE: this may not be functional (KH) [id="plugins-{type}s-{plugin}-debug"] -===== `debug` +===== `debug` * Value type is <> * Default value is `false` @@ -149,7 +149,7 @@ NOTE: this may not be functional (KH) Run the mail relay in debug mode [id="plugins-{type}s-{plugin}-domain"] -===== `domain` +===== `domain` * Value type is <> * Default value is `"localhost"` @@ -159,7 +159,7 @@ to a remote SMTP server. Some servers require this name to match the actual hostname of the connecting client. [id="plugins-{type}s-{plugin}-from"] -===== `from` +===== `from` * Value type is <> * Default value is `"logstash.alert@example.com"` @@ -167,7 +167,7 @@ actual hostname of the connecting client. The fully-qualified email address for the From: field in the email. [id="plugins-{type}s-{plugin}-htmlbody"] -===== `htmlbody` +===== `htmlbody` * Value type is <> * Default value is `""` @@ -175,7 +175,7 @@ The fully-qualified email address for the From: field in the email. HTML Body for the email, which may contain HTML markup. [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * Value type is <> * There is no default value for this setting. @@ -183,7 +183,7 @@ HTML Body for the email, which may contain HTML markup. Password to authenticate with the server [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `25` @@ -191,7 +191,7 @@ Password to authenticate with the server Port used to communicate with the mail server [id="plugins-{type}s-{plugin}-replyto"] -===== `replyto` +===== `replyto` * Value type is <> * There is no default value for this setting. @@ -199,7 +199,7 @@ Port used to communicate with the mail server The fully qualified email address for the Reply-To: field. [id="plugins-{type}s-{plugin}-subject"] -===== `subject` +===== `subject` * Value type is <> * Default value is `""` @@ -207,7 +207,7 @@ The fully qualified email address for the Reply-To: field. Subject: for the email. [id="plugins-{type}s-{plugin}-to"] -===== `to` +===== `to` * This is a required setting. * Value type is <> @@ -221,7 +221,7 @@ This field also accepts a comma-separated string of addresses, for example: You can also use dynamic fields from the event with the `%{fieldname}` syntax. [id="plugins-{type}s-{plugin}-use_tls"] -===== `use_tls` +===== `use_tls` * Value type is <> * Default value is `false` @@ -229,7 +229,7 @@ You can also use dynamic fields from the event with the `%{fieldname}` syntax. Enables TLS when communicating with the server [id="plugins-{type}s-{plugin}-username"] -===== `username` +===== `username` * Value type is <> * There is no default value for this setting. @@ -237,7 +237,7 @@ Enables TLS when communicating with the server Username to authenticate with the server [id="plugins-{type}s-{plugin}-via"] -===== `via` +===== `via` * Value type is <> * Default value is `"smtp"` @@ -245,7 +245,7 @@ Username to authenticate with the server How Logstash should send the email, either via SMTP or by invoking sendmail. [id="plugins-{type}s-{plugin}-template_file"] -===== `template_file` +===== `template_file` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/outputs/exec.asciidoc b/docs/plugins/outputs/exec.asciidoc index 7a7e660e6..e8a5dbe10 100644 --- a/docs/plugins/outputs/exec.asciidoc +++ b/docs/plugins/outputs/exec.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.4 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-exec/blob/v3.1.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -64,7 +64,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-command"] -===== `command` +===== `command` * This is a required setting. * Value type is <> @@ -75,7 +75,7 @@ make it non blocking. This value can include `%{name}` and other dynamic strings. [id="plugins-{type}s-{plugin}-quiet"] -===== `quiet` +===== `quiet` * Value type is <> * Default value is `false` diff --git a/docs/plugins/outputs/file.asciidoc b/docs/plugins/outputs/file.asciidoc index d7980cf29..ad04cd4a8 100644 --- a/docs/plugins/outputs/file.asciidoc +++ b/docs/plugins/outputs/file.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.3.0 :release_date: 2020-04-27 :changelog_url: https://github.com/logstash-plugins/logstash-output-file/blob/v4.3.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/outputs/ganglia.asciidoc b/docs/plugins/outputs/ganglia.asciidoc index 237110bf1..aeabaec16 100644 --- a/docs/plugins/outputs/ganglia.asciidoc +++ b/docs/plugins/outputs/ganglia.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.6 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-ganglia/blob/v3.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -51,7 +51,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-group"] -===== `group` +===== `group` * Value type is <> * Default value is `""` @@ -59,7 +59,7 @@ output plugins. Metric group [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"localhost"` @@ -67,7 +67,7 @@ Metric group The address of the ganglia server. [id="plugins-{type}s-{plugin}-lifetime"] -===== `lifetime` +===== `lifetime` * Value type is <> * Default value is `300` @@ -75,7 +75,7 @@ The address of the ganglia server. Lifetime in seconds of this metric [id="plugins-{type}s-{plugin}-max_interval"] -===== `max_interval` +===== `max_interval` * Value type is <> * Default value is `60` @@ -83,7 +83,7 @@ Lifetime in seconds of this metric Maximum time in seconds between gmetric calls for this metric. [id="plugins-{type}s-{plugin}-metric"] -===== `metric` +===== `metric` * This is a required setting. * Value type is <> @@ -92,7 +92,7 @@ Maximum time in seconds between gmetric calls for this metric. The metric to use. This supports dynamic strings like `%{host}` [id="plugins-{type}s-{plugin}-metric_type"] -===== `metric_type` +===== `metric_type` * Value can be any of: `string`, `int8`, `uint8`, `int16`, `uint16`, `int32`, `uint32`, `float`, `double` * Default value is `"uint8"` @@ -100,7 +100,7 @@ The metric to use. This supports dynamic strings like `%{host}` The type of value for this metric. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `8649` @@ -108,7 +108,7 @@ The type of value for this metric. The port to connect on your ganglia server. [id="plugins-{type}s-{plugin}-slope"] -===== `slope` +===== `slope` * Value can be any of: `zero`, `positive`, `negative`, `both`, `unspecified` * Default value is `"both"` @@ -116,7 +116,7 @@ The port to connect on your ganglia server. Metric slope, represents metric behavior [id="plugins-{type}s-{plugin}-units"] -===== `units` +===== `units` * Value type is <> * Default value is `""` @@ -125,7 +125,7 @@ Gmetric units for metric, such as "kb/sec" or "ms" or whatever unit this metric uses. [id="plugins-{type}s-{plugin}-value"] -===== `value` +===== `value` * This is a required setting. * Value type is <> diff --git a/docs/plugins/outputs/gelf.asciidoc b/docs/plugins/outputs/gelf.asciidoc index b3235fa0f..259d76c8d 100644 --- a/docs/plugins/outputs/gelf.asciidoc +++ b/docs/plugins/outputs/gelf.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.7 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-gelf/blob/v3.1.7/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -55,7 +55,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-chunksize"] -===== `chunksize` +===== `chunksize` * Value type is <> * Default value is `1420` @@ -63,7 +63,7 @@ output plugins. The chunksize. You usually don't need to change this. [id="plugins-{type}s-{plugin}-custom_fields"] -===== `custom_fields` +===== `custom_fields` * Value type is <> * Default value is `{}` @@ -74,7 +74,7 @@ e.g. `custom_fields => ['foo_field', 'some_value']` sets `_foo_field` = `some_value`. [id="plugins-{type}s-{plugin}-full_message"] -===== `full_message` +===== `full_message` * Value type is <> * Default value is `"%{message}"` @@ -82,7 +82,7 @@ sets `_foo_field` = `some_value`. The GELF full message. Dynamic values like `%{foo}` are permitted here. [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * This is a required setting. * Value type is <> @@ -91,7 +91,7 @@ The GELF full message. Dynamic values like `%{foo}` are permitted here. Graylog2 server IP address or hostname. [id="plugins-{type}s-{plugin}-ignore_metadata"] -===== `ignore_metadata` +===== `ignore_metadata` * Value type is <> * Default value is `["@timestamp", "@version", "severity", "host", "source_host", "source_path", "short_message"]` @@ -100,7 +100,7 @@ Ignore these fields when `ship_metadata` is set. Typically this lists the fields used in dynamic values for GELF fields. [id="plugins-{type}s-{plugin}-level"] -===== `level` +===== `level` * Value type is <> * Default value is `["%{severity}", "INFO"]` @@ -118,7 +118,7 @@ are accepted: "emergency", "alert", "critical", "warning", "notice", and "informational". [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `12201` @@ -137,7 +137,7 @@ configured to use TCP instead. Values here can be either "TCP" or "UDP". [id="plugins-{type}s-{plugin}-sender"] -===== `sender` +===== `sender` * Value type is <> * Default value is `"%{host}"` @@ -148,7 +148,7 @@ want to use something other than the event's source host as the instead of the hostname. [id="plugins-{type}s-{plugin}-ship_metadata"] -===== `ship_metadata` +===== `ship_metadata` * Value type is <> * Default value is `true` @@ -158,7 +158,7 @@ to ship any fields in the event (such as those created by grok) in the GELF messages. These will be sent as underscored "additional fields". [id="plugins-{type}s-{plugin}-ship_tags"] -===== `ship_tags` +===== `ship_tags` * Value type is <> * Default value is `true` @@ -167,7 +167,7 @@ Ship tags within events. This will cause Logstash to ship the tags of an event as the field `\_tags`. [id="plugins-{type}s-{plugin}-short_message"] -===== `short_message` +===== `short_message` * Value type is <> * Default value is `"short_message"` diff --git a/docs/plugins/outputs/google_bigquery.asciidoc b/docs/plugins/outputs/google_bigquery.asciidoc index d7b2be89e..c6a340726 100644 --- a/docs/plugins/outputs/google_bigquery.asciidoc +++ b/docs/plugins/outputs/google_bigquery.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.6.0 :release_date: 2024-09-16 :changelog_url: https://github.com/logstash-plugins/logstash-output-google_bigquery/blob/v4.6.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/outputs/google_cloud_storage.asciidoc b/docs/plugins/outputs/google_cloud_storage.asciidoc index 5eae02fca..f5485f96a 100644 --- a/docs/plugins/outputs/google_cloud_storage.asciidoc +++ b/docs/plugins/outputs/google_cloud_storage.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v4.5.0 :release_date: 2024-09-16 :changelog_url: https://github.com/logstash-plugins/logstash-output-google_cloud_storage/blob/v4.5.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -113,7 +113,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-bucket"] -===== `bucket` +===== `bucket` * This is a required setting. * Value type is <> @@ -122,7 +122,7 @@ output plugins. GCS bucket name, without "gs://" or any other prefix. [id="plugins-{type}s-{plugin}-date_pattern"] -===== `date_pattern` +===== `date_pattern` * Value type is <> * Default value is `"%Y-%m-%dT%H:00"` @@ -131,7 +131,7 @@ Time pattern for log file, defaults to hourly files. Must Time.strftime patterns: www.ruby-doc.org/core-2.0/Time.html#method-i-strftime [id="plugins-{type}s-{plugin}-flush_interval_secs"] -===== `flush_interval_secs` +===== `flush_interval_secs` * Value type is <> * Default value is `2` @@ -140,7 +140,7 @@ Flush interval in seconds for flushing writes to log files. 0 will flush on every message. [id="plugins-{type}s-{plugin}-gzip"] -===== `gzip` +===== `gzip` * Value type is <> * Default value is `false` @@ -161,12 +161,12 @@ Gzip output stream when writing events to log files and set `Content-Encoding` t This will upload your files as `gzip` saving network and storage costs, but they will be transparently decompressed when you read them from the storage bucket. -See the Cloud Storage documentation on https://cloud.google.com/storage/docs/metadata#content-encoding[metadata] and +See the Cloud Storage documentation on https://cloud.google.com/storage/docs/metadata#content-encoding[metadata] and https://cloud.google.com/storage/docs/transcoding#content-type_vs_content-encoding[transcoding] for more information. **Note**: It is not recommended to use both `gzip_content_encoding` and `gzip`. -This compresses your file _twice_, will increase the work your machine does and makes +This compresses your file _twice_, will increase the work your machine does and makes the files larger than just compressing once. [id="plugins-{type}s-{plugin}-include_hostname"] @@ -210,7 +210,7 @@ web interface or with the following command: `gcloud iam service-accounts keys create key.json --iam-account my-sa-123@my-project-123.iam.gserviceaccount.com` [id="plugins-{type}s-{plugin}-key_password"] -===== `key_password` +===== `key_password` * Value type is <> * Default value is `"notasecret"` @@ -218,7 +218,7 @@ web interface or with the following command: **Deprecated** this feature is no longer used, the setting is now a part of <>. [id="plugins-{type}s-{plugin}-log_file_prefix"] -===== `log_file_prefix` +===== `log_file_prefix` * Value type is <> * Default value is `"logstash_gcs"` @@ -238,7 +238,7 @@ to the network bandwidth available and the latency between your server and Cloud Storage. [id="plugins-{type}s-{plugin}-max_file_size_kbytes"] -===== `max_file_size_kbytes` +===== `max_file_size_kbytes` * Value type is <> * Default value is `10000` @@ -246,7 +246,7 @@ Cloud Storage. Sets max file size in kbytes. 0 disable max file check. [id="plugins-{type}s-{plugin}-output_format"] -===== `output_format` +===== `output_format` * Value can be any of: `json`, `plain`, or no value * Default value is no value @@ -261,7 +261,7 @@ The event format you want to store in files. Defaults to plain text. Note: if you want to use a codec you MUST not set this value. [id="plugins-{type}s-{plugin}-service_account"] -===== `service_account` +===== `service_account` * This is a required setting. * Value type is <> @@ -270,7 +270,7 @@ Note: if you want to use a codec you MUST not set this value. **Deprecated** this feature is no longer used, the setting is now a part of <>. [id="plugins-{type}s-{plugin}-temp_directory"] -===== `temp_directory` +===== `temp_directory` * Value type is <> * Default value is `""` @@ -279,7 +279,7 @@ Directory where temporary files are stored. Defaults to /tmp/logstash-gcs- [id="plugins-{type}s-{plugin}-uploader_interval_secs"] -===== `uploader_interval_secs` +===== `uploader_interval_secs` * Value type is <> * Default value is `60` diff --git a/docs/plugins/outputs/google_pubsub.asciidoc b/docs/plugins/outputs/google_pubsub.asciidoc index 38a4ce8a0..a87f7ca7f 100644 --- a/docs/plugins/outputs/google_pubsub.asciidoc +++ b/docs/plugins/outputs/google_pubsub.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.2.0 :release_date: 2023-08-22 :changelog_url: https://github.com/logstash-plugins/logstash-output-google_pubsub/blob/v1.2.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/outputs/graphite.asciidoc b/docs/plugins/outputs/graphite.asciidoc index 662697a30..e616da969 100644 --- a/docs/plugins/outputs/graphite.asciidoc +++ b/docs/plugins/outputs/graphite.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.6 :release_date: 2018-07-11 :changelog_url: https://github.com/logstash-plugins/logstash-output-graphite/blob/v3.1.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -56,7 +56,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-exclude_metrics"] -===== `exclude_metrics` +===== `exclude_metrics` * Value type is <> * Default value is `["%{[^}]+}"]` @@ -64,7 +64,7 @@ output plugins. Exclude regex matched metric names, by default exclude unresolved %{field} strings. [id="plugins-{type}s-{plugin}-fields_are_metrics"] -===== `fields_are_metrics` +===== `fields_are_metrics` * Value type is <> * Default value is `false` @@ -74,7 +74,7 @@ and will be sent verbatim to Graphite. You may use either `fields_are_metrics` or `metrics`, but not both. [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"localhost"` @@ -82,7 +82,7 @@ or `metrics`, but not both. The hostname or IP address of the Graphite server. [id="plugins-{type}s-{plugin}-include_metrics"] -===== `include_metrics` +===== `include_metrics` * Value type is <> * Default value is `[".*"]` @@ -90,7 +90,7 @@ The hostname or IP address of the Graphite server. Include only regex matched metric names. [id="plugins-{type}s-{plugin}-metrics"] -===== `metrics` +===== `metrics` * Value type is <> * Default value is `{}` @@ -106,7 +106,7 @@ coerced will be set to zero (0). You may use either `metrics` or `fields_are_met but not both. [id="plugins-{type}s-{plugin}-metrics_format"] -===== `metrics_format` +===== `metrics_format` * Value type is <> * Default value is `"*"` @@ -119,7 +119,7 @@ replaced with the name of the actual metric. NOTE: If no metrics_format is defined, the name of the metric will be used as fallback. [id="plugins-{type}s-{plugin}-nested_object_separator"] -===== `nested_object_separator` +===== `nested_object_separator` * Value type is <> * Default value is `"."` @@ -136,7 +136,7 @@ but you still may want control over the separator within these nested key names. This config setting changes the separator from the '.' default. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `2003` @@ -144,7 +144,7 @@ This config setting changes the separator from the '.' default. The port to connect to on the Graphite server. [id="plugins-{type}s-{plugin}-reconnect_interval"] -===== `reconnect_interval` +===== `reconnect_interval` * Value type is <> * Default value is `2` @@ -152,7 +152,7 @@ The port to connect to on the Graphite server. Interval between reconnect attempts to Carbon. [id="plugins-{type}s-{plugin}-resend_on_failure"] -===== `resend_on_failure` +===== `resend_on_failure` * Value type is <> * Default value is `false` @@ -160,7 +160,7 @@ Interval between reconnect attempts to Carbon. Should metrics be resent on failure? [id="plugins-{type}s-{plugin}-timestamp_field"] -===== `timestamp_field` +===== `timestamp_field` * Value type is <> * Default value is `"@timestamp"` diff --git a/docs/plugins/outputs/graphtastic.asciidoc b/docs/plugins/outputs/graphtastic.asciidoc index 4f7ee8511..92eefcca5 100644 --- a/docs/plugins/outputs/graphtastic.asciidoc +++ b/docs/plugins/outputs/graphtastic.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.4 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-graphtastic/blob/v3.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -58,7 +58,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-batch_number"] -===== `batch_number` +===== `batch_number` * Value type is <> * Default value is `60` @@ -67,7 +67,7 @@ the number of metrics to send to GraphTastic at one time. 60 seems to be the per amount for UDP, with default packet size. [id="plugins-{type}s-{plugin}-context"] -===== `context` +===== `context` * Value type is <> * Default value is `"graphtastic"` @@ -80,7 +80,7 @@ Please consult your application server documentation for more on application contexts. [id="plugins-{type}s-{plugin}-error_file"] -===== `error_file` +===== `error_file` * Value type is <> * Default value is `""` @@ -91,7 +91,7 @@ on how we reintegrate these error metrics NOT IMPLEMENTED! [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"127.0.0.1"` @@ -99,7 +99,7 @@ NOT IMPLEMENTED! host for the graphtastic server - defaults to 127.0.0.1 [id="plugins-{type}s-{plugin}-integration"] -===== `integration` +===== `integration` * Value can be any of: `udp`, `tcp`, `rmi`, `rest` * Default value is `"udp"` @@ -107,7 +107,7 @@ host for the graphtastic server - defaults to 127.0.0.1 options are udp(fastest - default) - rmi(faster) - rest(fast) - tcp(don't use TCP yet - some problems - errors out on linux) [id="plugins-{type}s-{plugin}-metrics"] -===== `metrics` +===== `metrics` * Value type is <> * Default value is `{}` @@ -126,7 +126,7 @@ metrics => [ "Response", "%{response}" ] NOTE: you can also use the dynamic fields for the key value as well as the actual value [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * There is no default value for this setting. @@ -134,7 +134,7 @@ NOTE: you can also use the dynamic fields for the key value as well as the actua port for the graphtastic instance - defaults to 1199 for RMI, 1299 for TCP, 1399 for UDP, and 8080 for REST [id="plugins-{type}s-{plugin}-retries"] -===== `retries` +===== `retries` * Value type is <> * Default value is `1` diff --git a/docs/plugins/outputs/http.asciidoc b/docs/plugins/outputs/http.asciidoc index 1cf3fc735..b95368bba 100644 --- a/docs/plugins/outputs/http.asciidoc +++ b/docs/plugins/outputs/http.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v6.0.0 :release_date: 2024-11-21 :changelog_url: https://github.com/logstash-plugins/logstash-output-http/blob/v6.0.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -40,29 +40,29 @@ This output has two levels of retry: library and plugin. [id="plugins-{type}s-{plugin}-library_retry"] ===== Library retry -The library retry applies to IO related failures. +The library retry applies to IO related failures. Non retriable errors include SSL related problems, unresolvable hosts, connection issues, and OS/JVM level interruptions happening during a request. -The options for library retry are: +The options for library retry are: -* <>. +* <>. Controls the number of times the plugin should retry after failures at the library level. -* <>. +* <>. When set to `false`, GET, HEAD, PUT, DELETE, OPTIONS, and TRACE requests will be retried. [id="plugins-{type}s-{plugin}-plugin_retry"] ===== Plugin retry -The options for plugin level retry are: +The options for plugin level retry are: -* <>. +* <>. When set to `true`, the plugin retries indefinitely for HTTP error response codes defined in the <> option (429, 500, 502, 503, 504) and retryable exceptions (socket timeout/ error, DNS resolution failure and client protocol exception). -* <>. -Sets http response codes that trigger a retry. +* <>. +Sets http response codes that trigger a retry. NOTE: The `retry_failed` option does not control the library level retry. @@ -71,7 +71,7 @@ NOTE: The `retry_failed` option does not control the library level retry. This plugin supports the following configuration options plus the <> described later. -NOTE: As of version `6.0.0` of this plugin, a number of previously deprecated settings related to SSL have been removed. +NOTE: As of version `6.0.0` of this plugin, a number of previously deprecated settings related to SSL have been removed. Please check out <> for details. [cols="<,<,<",options="header",] @@ -120,19 +120,19 @@ output plugins.   [id="plugins-{type}s-{plugin}-automatic_retries"] -===== `automatic_retries` +===== `automatic_retries` * Value type is <> * Default value is `1` How many times should the client retry a failing URL. We recommend setting this option -to a value other than zero if the <> is enabled. +to a value other than zero if the <> is enabled. Some servers incorrectly end keepalives early, requiring a retry. See <> for more information. [id="plugins-{type}s-{plugin}-connect_timeout"] -===== `connect_timeout` +===== `connect_timeout` * Value type is <> * Default value is `10` @@ -140,7 +140,7 @@ See <> for more information. Timeout (in seconds) to wait for a connection to be established. Default is `10s` [id="plugins-{type}s-{plugin}-content_type"] -===== `content_type` +===== `content_type` * Value type is <> * There is no default value for this setting. @@ -154,7 +154,7 @@ If not specified, this defaults to the following: * if format is "form", "application/x-www-form-urlencoded" [id="plugins-{type}s-{plugin}-cookies"] -===== `cookies` +===== `cookies` * Value type is <> * Default value is `true` @@ -163,7 +163,7 @@ Enable cookie support. With this enabled the client will persist cookies across requests as a normal web browser would. Enabled by default [id="plugins-{type}s-{plugin}-follow_redirects"] -===== `follow_redirects` +===== `follow_redirects` * Value type is <> * Default value is `true` @@ -171,7 +171,7 @@ across requests as a normal web browser would. Enabled by default Should redirects be followed? Defaults to `true` [id="plugins-{type}s-{plugin}-format"] -===== `format` +===== `format` * Value can be any of: `json`, `json_batch`, `form`, `message` * Default value is `"json"` @@ -190,7 +190,7 @@ If message, then the body will be the result of formatting the event according t Otherwise, the event is sent as json. [id="plugins-{type}s-{plugin}-headers"] -===== `headers` +===== `headers` * Value type is <> * There is no default value for this setting. @@ -208,7 +208,7 @@ Enable request compression support. With this enabled the plugin will compress http requests using gzip. [id="plugins-{type}s-{plugin}-http_method"] -===== `http_method` +===== `http_method` * This is a required setting. * Value can be any of: `put`, `post`, `patch`, `delete`, `get`, `head` @@ -217,16 +217,16 @@ http requests using gzip. The HTTP Verb. One of "put", "post", "patch", "delete", "get", "head" [id="plugins-{type}s-{plugin}-ignorable_codes"] -===== `ignorable_codes` +===== `ignorable_codes` * Value type is <> * There is no default value for this setting. -If you would like to consider some non-2xx codes to be successes +If you would like to consider some non-2xx codes to be successes enumerate them here. Responses returning these codes will be considered successes [id="plugins-{type}s-{plugin}-keepalive"] -===== `keepalive` +===== `keepalive` * Value type is <> * Default value is `true` @@ -235,7 +235,7 @@ Turn this on to enable HTTP keepalive support. We highly recommend setting `auto one with this to fix interactions with broken keepalive implementations. [id="plugins-{type}s-{plugin}-mapping"] -===== `mapping` +===== `mapping` * Value type is <> * There is no default value for this setting. @@ -249,7 +249,7 @@ For example: "bar" => "%{type}"} [id="plugins-{type}s-{plugin}-message"] -===== `message` +===== `message` * Value type is <> * There is no default value for this setting. @@ -257,7 +257,7 @@ For example: [id="plugins-{type}s-{plugin}-pool_max"] -===== `pool_max` +===== `pool_max` * Value type is <> * Default value is `50` @@ -265,7 +265,7 @@ For example: Max number of concurrent connections. Defaults to `50` [id="plugins-{type}s-{plugin}-pool_max_per_route"] -===== `pool_max_per_route` +===== `pool_max_per_route` * Value type is <> * Default value is `25` @@ -273,7 +273,7 @@ Max number of concurrent connections. Defaults to `50` Max number of concurrent connections to a single host. Defaults to `25` [id="plugins-{type}s-{plugin}-proxy"] -===== `proxy` +===== `proxy` * Value type is <> * There is no default value for this setting. @@ -285,7 +285,7 @@ If you'd like to use an HTTP proxy . This supports multiple configuration syntax 3. Proxy host in form: `{url => 'http://proxy.org:1234', user => 'username@host', password => 'password'}` [id="plugins-{type}s-{plugin}-request_timeout"] -===== `request_timeout` +===== `request_timeout` * Value type is <> * Default value is `60` @@ -296,20 +296,20 @@ For an example of its usage see https://github.com/logstash-plugins/logstash-inp Timeout (in seconds) for the entire request [id="plugins-{type}s-{plugin}-retry_failed"] -===== `retry_failed` +===== `retry_failed` * Value type is <> * Default value is `true` -Note that this option controls plugin-level retries only. -It has no affect on library-level retries. +Note that this option controls plugin-level retries only. +It has no affect on library-level retries. -Set this option to `false` if you want to disable infinite retries for HTTP error response codes defined in the <> or +Set this option to `false` if you want to disable infinite retries for HTTP error response codes defined in the <> or retryable exceptions (Timeout, SocketException, ClientProtocolException, ResolutionFailure and SocketTimeout). See <> for more information. [id="plugins-{type}s-{plugin}-retry_non_idempotent"] -===== `retry_non_idempotent` +===== `retry_non_idempotent` * Value type is <> * Default value is `false` @@ -320,7 +320,7 @@ When set to `true` and `automatic_retries` is enabled, this will cause non-idemp See <> for more information. [id="plugins-{type}s-{plugin}-retryable_codes"] -===== `retryable_codes` +===== `retryable_codes` * Value type is <> * Default value is `[429, 500, 502, 503, 504]` @@ -329,7 +329,7 @@ If the plugin encounters these response codes, the plugin will retry indefinitel See <> for more information. [id="plugins-{type}s-{plugin}-socket_timeout"] -===== `socket_timeout` +===== `socket_timeout` * Value type is <> * Default value is `10` @@ -465,7 +465,7 @@ It is primarily intended as a temporary diagnostic mechanism when attempting to Using `none` in production environments is strongly discouraged. [id="plugins-{type}s-{plugin}-url"] -===== `url` +===== `url` * This is a required setting. * Value type is <> @@ -474,7 +474,7 @@ Using `none` in production environments is strongly discouraged. URL to use [id="plugins-{type}s-{plugin}-validate_after_inactivity"] -===== `validate_after_inactivity` +===== `validate_after_inactivity` * Value type is <> * Default value is `200` @@ -490,7 +490,7 @@ See https://hc.apache.org/httpcomponents-client-ga/httpclient/apidocs/org/apache ==== HTTP Output Obsolete Configuration Options WARNING: As of version `6.0.0` of this plugin, some configuration options have been replaced. -The plugin will fail to start if it contains any of these obsolete options. +The plugin will fail to start if it contains any of these obsolete options. [cols="<,<",options="header",] diff --git a/docs/plugins/outputs/influxdb.asciidoc b/docs/plugins/outputs/influxdb.asciidoc index e56f7fa31..b5ac76513 100644 --- a/docs/plugins/outputs/influxdb.asciidoc +++ b/docs/plugins/outputs/influxdb.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v5.0.6 :release_date: 2021-06-07 :changelog_url: https://github.com/logstash-plugins/logstash-output-influxdb/blob/v5.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -68,7 +68,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-allow_time_override"] -===== `allow_time_override` +===== `allow_time_override` * Value type is <> * Default value is `false` @@ -83,7 +83,7 @@ Setting this to `true` allows you to explicitly set the `time` column yourself Note: **`time` must be an epoch value in either seconds, milliseconds or microseconds** [id="plugins-{type}s-{plugin}-coerce_values"] -===== `coerce_values` +===== `coerce_values` * Value type is <> * Default value is `{}` @@ -98,7 +98,7 @@ currently supported datatypes are `integer` and `float` [id="plugins-{type}s-{plugin}-data_points"] -===== `data_points` +===== `data_points` * This is a required setting. * Value type is <> @@ -111,7 +111,7 @@ Events for the same measurement will be batched together where possible Both keys and values support sprintf formatting [id="plugins-{type}s-{plugin}-db"] -===== `db` +===== `db` * Value type is <> * Default value is `"statistics"` @@ -119,22 +119,22 @@ Both keys and values support sprintf formatting The database to write - supports sprintf formatting [id="plugins-{type}s-{plugin}-exclude_fields"] -===== `exclude_fields` +===== `exclude_fields` * Value type is <> * Default value is `["@timestamp", "@version", "sequence", "message", "type"]` An array containing the names of fields from the event to exclude from the -data points +data points Events, in general, contain keys "@version" and "@timestamp". Other plugins -may add others that you'll want to exclude (such as "command" from the +may add others that you'll want to exclude (such as "command" from the exec plugin). This only applies when use_event_fields_for_data_points is true. [id="plugins-{type}s-{plugin}-flush_size"] -===== `flush_size` +===== `flush_size` * Value type is <> * Default value is `100` @@ -143,7 +143,7 @@ This setting controls how many events will be buffered before sending a batch of events. Note that these are only batched for the same measurement [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * This is a required setting. * Value type is <> @@ -152,7 +152,7 @@ of events. Note that these are only batched for the same measurement The hostname or IP address to reach your InfluxDB instance [id="plugins-{type}s-{plugin}-idle_flush_time"] -===== `idle_flush_time` +===== `idle_flush_time` * Value type is <> * Default value is `1` @@ -168,17 +168,17 @@ This helps keep both fast and slow log streams moving along in near-real-time. [id="plugins-{type}s-{plugin}-initial_delay"] -===== `initial_delay` +===== `initial_delay` * Value type is <> * Default value is `1` -The amount of time in seconds to delay the initial retry on connection failure. +The amount of time in seconds to delay the initial retry on connection failure. The delay will increase exponentially for each retry attempt (up to max_retries). [id="plugins-{type}s-{plugin}-max_retries"] -===== `max_retries` +===== `max_retries` * Value type is <> * Default value is `3` @@ -191,7 +191,7 @@ Otherwise it will retry up to the specified number of times. [id="plugins-{type}s-{plugin}-measurement"] -===== `measurement` +===== `measurement` * Value type is <> * Default value is `"logstash"` @@ -199,7 +199,7 @@ Otherwise it will retry up to the specified number of times. Measurement name - supports sprintf formatting [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * Value type is <> * Default value is `nil` @@ -207,7 +207,7 @@ Measurement name - supports sprintf formatting The password for the user who access to the named database [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `8086` @@ -215,7 +215,7 @@ The password for the user who access to the named database The port for InfluxDB [id="plugins-{type}s-{plugin}-retention_policy"] -===== `retention_policy` +===== `retention_policy` * Value type is <> * Default value is `"autogen"` @@ -223,18 +223,18 @@ The port for InfluxDB The retention policy to use [id="plugins-{type}s-{plugin}-send_as_tags"] -===== `send_as_tags` +===== `send_as_tags` * Value type is <> * Default value is `["host"]` -An array containing the names of fields to send to Influxdb as tags instead +An array containing the names of fields to send to Influxdb as tags instead of fields. Influxdb 0.9 convention is that values that do not change every request should be considered metadata and given as tags. Tags are only sent when -present in `data_points` or if `use_event_fields_for_data_points` is `true`. +present in `data_points` or if `use_event_fields_for_data_points` is `true`. [id="plugins-{type}s-{plugin}-ssl"] -===== `ssl` +===== `ssl` * Value type is <> * Default value is `false` @@ -242,7 +242,7 @@ present in `data_points` or if `use_event_fields_for_data_points` is `true`. Enable SSL/TLS secured communication to InfluxDB [id="plugins-{type}s-{plugin}-time_precision"] -===== `time_precision` +===== `time_precision` * Value can be any of: `n`, `u`, `ms`, `s`, `m`, `h` * Default value is `"ms"` @@ -252,7 +252,7 @@ Set the level of precision of `time` only useful when overriding the time value [id="plugins-{type}s-{plugin}-use_event_fields_for_data_points"] -===== `use_event_fields_for_data_points` +===== `use_event_fields_for_data_points` * Value type is <> * Default value is `false` @@ -260,7 +260,7 @@ only useful when overriding the time value Automatically use fields from the event as the data points sent to Influxdb [id="plugins-{type}s-{plugin}-user"] -===== `user` +===== `user` * Value type is <> * Default value is `nil` diff --git a/docs/plugins/outputs/irc.asciidoc b/docs/plugins/outputs/irc.asciidoc index 7e5c4a3ba..57b2385cd 100644 --- a/docs/plugins/outputs/irc.asciidoc +++ b/docs/plugins/outputs/irc.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.6 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-irc/blob/v3.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -53,7 +53,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-channels"] -===== `channels` +===== `channels` * This is a required setting. * Value type is <> @@ -65,7 +65,7 @@ These should be full channel names including the '#' symbol, such as "#logstash". [id="plugins-{type}s-{plugin}-format"] -===== `format` +===== `format` * Value type is <> * Default value is `"%{message}"` @@ -73,7 +73,7 @@ These should be full channel names including the '#' symbol, such as Message format to send, event tokens are usable here [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * This is a required setting. * Value type is <> @@ -82,7 +82,7 @@ Message format to send, event tokens are usable here Address of the host to connect to [id="plugins-{type}s-{plugin}-messages_per_second"] -===== `messages_per_second` +===== `messages_per_second` * Value type is <> * Default value is `0.5` @@ -90,7 +90,7 @@ Address of the host to connect to Limit the rate of messages sent to IRC in messages per second. [id="plugins-{type}s-{plugin}-nick"] -===== `nick` +===== `nick` * Value type is <> * Default value is `"logstash"` @@ -98,7 +98,7 @@ Limit the rate of messages sent to IRC in messages per second. IRC Nickname [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * Value type is <> * There is no default value for this setting. @@ -106,7 +106,7 @@ IRC Nickname IRC server password [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `6667` @@ -114,7 +114,7 @@ IRC server password Port on host to connect to. [id="plugins-{type}s-{plugin}-post_string"] -===== `post_string` +===== `post_string` * Value type is <> * There is no default value for this setting. @@ -122,7 +122,7 @@ Port on host to connect to. Static string after event [id="plugins-{type}s-{plugin}-pre_string"] -===== `pre_string` +===== `pre_string` * Value type is <> * There is no default value for this setting. @@ -130,7 +130,7 @@ Static string after event Static string before event [id="plugins-{type}s-{plugin}-real"] -===== `real` +===== `real` * Value type is <> * Default value is `"logstash"` @@ -138,7 +138,7 @@ Static string before event IRC Real name [id="plugins-{type}s-{plugin}-secure"] -===== `secure` +===== `secure` * Value type is <> * Default value is `false` @@ -146,7 +146,7 @@ IRC Real name Set this to true to enable SSL. [id="plugins-{type}s-{plugin}-user"] -===== `user` +===== `user` * Value type is <> * Default value is `"logstash"` diff --git a/docs/plugins/outputs/juggernaut.asciidoc b/docs/plugins/outputs/juggernaut.asciidoc index e6b710211..b2d6aa487 100644 --- a/docs/plugins/outputs/juggernaut.asciidoc +++ b/docs/plugins/outputs/juggernaut.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.6 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-juggernaut/blob/v3.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -54,7 +54,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-channels"] -===== `channels` +===== `channels` * This is a required setting. * Value type is <> @@ -64,7 +64,7 @@ List of channels to which to publish. Dynamic names are valid here, for example `logstash-%{type}`. [id="plugins-{type}s-{plugin}-db"] -===== `db` +===== `db` * Value type is <> * Default value is `0` @@ -72,7 +72,7 @@ valid here, for example `logstash-%{type}`. The redis database number. [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"127.0.0.1"` @@ -80,7 +80,7 @@ The redis database number. The hostname of the redis server to which juggernaut is listening. [id="plugins-{type}s-{plugin}-message_format"] -===== `message_format` +===== `message_format` * Value type is <> * There is no default value for this setting. @@ -88,7 +88,7 @@ The hostname of the redis server to which juggernaut is listening. How should the message be formatted before pushing to the websocket. [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * Value type is <> * There is no default value for this setting. @@ -96,7 +96,7 @@ How should the message be formatted before pushing to the websocket. Password to authenticate with. There is no authentication by default. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `6379` @@ -104,7 +104,7 @@ Password to authenticate with. There is no authentication by default. The port to connect on. [id="plugins-{type}s-{plugin}-timeout"] -===== `timeout` +===== `timeout` * Value type is <> * Default value is `5` diff --git a/docs/plugins/outputs/kafka.asciidoc b/docs/plugins/outputs/kafka.asciidoc index 3f668e451..6ec9a567a 100644 --- a/docs/plugins/outputs/kafka.asciidoc +++ b/docs/plugins/outputs/kafka.asciidoc @@ -12,7 +12,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v11.6.0 :release_date: 2025-01-07 :changelog_url: https://github.com/logstash-plugins/logstash-integration-kafka/blob/v11.6.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -25,7 +25,7 @@ include::{include_path}/plugin_header-integration.asciidoc[] ==== Description -Write events to a Kafka topic. +Write events to a Kafka topic. This plugin uses Kafka Client {kafka_client}. For broker compatibility, see the official @@ -43,7 +43,7 @@ This output supports connecting to Kafka over: By default security is disabled but can be turned on as needed. -The only required configuration is the topic_id. +The only required configuration is the topic_id. The default codec is plain. Logstash will encode your events with not only the message field but also with a timestamp and hostname. @@ -58,7 +58,7 @@ the codec in the output configuration like this: topic_id => "mytopic" } } - + For more information see https://kafka.apache.org/{kafka_client_doc}/documentation.html#theproducer @@ -133,7 +133,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-acks"] -===== `acks` +===== `acks` * Value can be any of: `0`, `1`, `all` * Default value is `"1"` @@ -145,12 +145,12 @@ before considering a request complete. `acks=1`. The leader will write the record to its local log, but will respond without waiting for full acknowledgement from all followers. - + `acks=all`. The leader will wait for the full set of in-sync replicas before acknowledging the record. [id="plugins-{type}s-{plugin}-batch_size"] -===== `batch_size` +===== `batch_size` * Value type is <> * Default value is `16384`. @@ -160,7 +160,7 @@ records are being sent to the same partition. This helps performance on both the and the server. This configuration controls the default batch size in bytes. [id="plugins-{type}s-{plugin}-bootstrap_servers"] -===== `bootstrap_servers` +===== `bootstrap_servers` * Value type is <> * Default value is `"localhost:9092"` @@ -172,7 +172,7 @@ established based on the broker information returned in the metadata. The format subset of brokers. [id="plugins-{type}s-{plugin}-buffer_memory"] -===== `buffer_memory` +===== `buffer_memory` * Value type is <> * Default value is `33554432` (32MB). @@ -180,7 +180,7 @@ subset of brokers. The total bytes of memory the producer can use to buffer records waiting to be sent to the server. [id="plugins-{type}s-{plugin}-client_dns_lookup"] -===== `client_dns_lookup` +===== `client_dns_lookup` * Value type is <> * Valid options are `use_all_dns_ips`, `resolve_canonical_bootstrap_servers_only`, `default` @@ -188,7 +188,7 @@ The total bytes of memory the producer can use to buffer records waiting to be s Controls how DNS lookups are done. If set to `use_all_dns_ips`, Logstash tries all IP addresses returned for a hostname before failing the connection. -If set to `resolve_canonical_bootstrap_servers_only`, each entry will be +If set to `resolve_canonical_bootstrap_servers_only`, each entry will be resolved and expanded into a list of canonical names. [NOTE] @@ -198,7 +198,7 @@ If not explicitly configured it defaults to `use_all_dns_ips`. ==== [id="plugins-{type}s-{plugin}-client_id"] -===== `client_id` +===== `client_id` * Value type is <> * Default value is `"logstash"` @@ -208,7 +208,7 @@ The purpose of this is to be able to track the source of requests beyond just ip/port by allowing a logical application name to be included with the request [id="plugins-{type}s-{plugin}-compression_type"] -===== `compression_type` +===== `compression_type` * Value can be any of: `none`, `gzip`, `snappy`, `lz4`, `zstd` * Default value is `"none"` @@ -225,12 +225,12 @@ The default is none (meaning no compression). Valid values are none, gzip, snapp Close idle connections after the number of milliseconds specified by this config. [id="plugins-{type}s-{plugin}-jaas_path"] -===== `jaas_path` +===== `jaas_path` * Value type is <> * There is no default value for this setting. -The Java Authentication and Authorization Service (JAAS) API supplies user authentication and authorization +The Java Authentication and Authorization Service (JAAS) API supplies user authentication and authorization services for Kafka. This setting provides the path to the JAAS file. Sample JAAS file for Kafka client: [source,java] ---------------------------------- @@ -242,13 +242,13 @@ KafkaClient { }; ---------------------------------- -Please note that specifying `jaas_path` and `kerberos_config` in the config file will add these -to the global JVM system properties. This means if you have multiple Kafka inputs, all of them would be sharing the same -`jaas_path` and `kerberos_config`. If this is not desirable, you would have to run separate instances of Logstash on +Please note that specifying `jaas_path` and `kerberos_config` in the config file will add these +to the global JVM system properties. This means if you have multiple Kafka inputs, all of them would be sharing the same +`jaas_path` and `kerberos_config`. If this is not desirable, you would have to run separate instances of Logstash on different JVM instances. [id="plugins-{type}s-{plugin}-kerberos_config"] -===== `kerberos_config` +===== `kerberos_config` * Value type is <> * There is no default value for this setting. @@ -256,7 +256,7 @@ different JVM instances. Optional path to kerberos config file. This is krb5.conf style as detailed in https://web.mit.edu/kerberos/krb5-1.12/doc/admin/conf_files/krb5_conf.html [id="plugins-{type}s-{plugin}-key_serializer"] -===== `key_serializer` +===== `key_serializer` * Value type is <> * Default value is `"org.apache.kafka.common.serialization.StringSerializer"` @@ -264,7 +264,7 @@ Optional path to kerberos config file. This is krb5.conf style as detailed in ht Serializer class for the key of the message [id="plugins-{type}s-{plugin}-linger_ms"] -===== `linger_ms` +===== `linger_ms` * Value type is <> * Default value is `0` @@ -278,7 +278,7 @@ rather than immediately sending out a record the producer will wait for up to th to allow other records to be sent so that the sends can be batched together. [id="plugins-{type}s-{plugin}-max_request_size"] -===== `max_request_size` +===== `max_request_size` * Value type is <> * Default value is `1048576` (1MB). @@ -302,7 +302,7 @@ Example: ---------------------------------- [id="plugins-{type}s-{plugin}-message_key"] -===== `message_key` +===== `message_key` * Value type is <> * There is no default value for this setting. @@ -310,7 +310,7 @@ Example: The key for the message. [id="plugins-{type}s-{plugin}-metadata_fetch_timeout_ms"] -===== `metadata_fetch_timeout_ms` +===== `metadata_fetch_timeout_ms` * Value type is <> * Default value is `60000` milliseconds (60 seconds). @@ -318,7 +318,7 @@ The key for the message. The timeout setting for initial metadata request to fetch topic metadata. [id="plugins-{type}s-{plugin}-metadata_max_age_ms"] -===== `metadata_max_age_ms` +===== `metadata_max_age_ms` * Value type is <> * Default value is `300000` milliseconds (5 minutes). @@ -341,7 +341,7 @@ Available options for choosing a partitioning strategy are as follows: * `uniform_sticky` sticks to a partition for the duration of a batch than randomly picks a new one [id="plugins-{type}s-{plugin}-receive_buffer_bytes"] -===== `receive_buffer_bytes` +===== `receive_buffer_bytes` * Value type is <> * Default value is `32768` (32KB). @@ -349,7 +349,7 @@ Available options for choosing a partitioning strategy are as follows: The size of the TCP receive buffer to use when reading data [id="plugins-{type}s-{plugin}-reconnect_backoff_ms"] -===== `reconnect_backoff_ms` +===== `reconnect_backoff_ms` * Value type is <> * Default value is `50`. @@ -357,7 +357,7 @@ The size of the TCP receive buffer to use when reading data The amount of time to wait before attempting to reconnect to a given host when a connection fails. [id="plugins-{type}s-{plugin}-request_timeout_ms"] -===== `request_timeout_ms` +===== `request_timeout_ms` * Value type is <> * Default value is `40000` milliseconds (40 seconds). @@ -368,7 +368,7 @@ elapses the client will resend the request if necessary or fail the request if retries are exhausted. [id="plugins-{type}s-{plugin}-retries"] -===== `retries` +===== `retries` * Value type is <> * There is no default value for this setting. @@ -393,7 +393,7 @@ This prevents the Logstash pipeline from hanging indefinitely. In versions prior to 10.5.0, any exception is retried indefinitely unless the `retries` option is configured. [id="plugins-{type}s-{plugin}-retry_backoff_ms"] -===== `retry_backoff_ms` +===== `retry_backoff_ms` * Value type is <> * Default value is `100` milliseconds. @@ -457,12 +457,12 @@ The SASL login callback handler class the specified SASL mechanism should use. (optional) The maximum duration, in milliseconds, for HTTPS call attempts. [id="plugins-{type}s-{plugin}-sasl_jaas_config"] -===== `sasl_jaas_config` +===== `sasl_jaas_config` * Value type is <> * There is no default value for this setting. -JAAS configuration setting local to this plugin instance, as opposed to settings using config file configured using `jaas_path`, which are shared across the JVM. This allows each plugin instance to have its own configuration. +JAAS configuration setting local to this plugin instance, as opposed to settings using config file configured using `jaas_path`, which are shared across the JVM. This allows each plugin instance to have its own configuration. If both `sasl_jaas_config` and `jaas_path` configurations are set, the setting here takes precedence. @@ -475,26 +475,26 @@ Example (setting for Azure Event Hub): } [id="plugins-{type}s-{plugin}-sasl_kerberos_service_name"] -===== `sasl_kerberos_service_name` +===== `sasl_kerberos_service_name` * Value type is <> * There is no default value for this setting. -The Kerberos principal name that Kafka broker runs as. +The Kerberos principal name that Kafka broker runs as. This can be defined either in Kafka's JAAS config or in Kafka's config. [id="plugins-{type}s-{plugin}-sasl_mechanism"] -===== `sasl_mechanism` +===== `sasl_mechanism` * Value type is <> * Default value is `"GSSAPI"` -http://kafka.apache.org/documentation.html#security_sasl[SASL mechanism] used for client connections. +http://kafka.apache.org/documentation.html#security_sasl[SASL mechanism] used for client connections. This may be any mechanism for which a security provider is available. GSSAPI is the default mechanism. [id="plugins-{type}s-{plugin}-security_protocol"] -===== `security_protocol` +===== `security_protocol` * Value can be any of: `PLAINTEXT`, `SSL`, `SASL_PLAINTEXT`, `SASL_SSL` * Default value is `"PLAINTEXT"` @@ -502,7 +502,7 @@ GSSAPI is the default mechanism. Security protocol to use, which can be either of PLAINTEXT,SSL,SASL_PLAINTEXT,SASL_SSL [id="plugins-{type}s-{plugin}-send_buffer_bytes"] -===== `send_buffer_bytes` +===== `send_buffer_bytes` * Value type is <> * Default value is `131072` (128KB). @@ -518,7 +518,7 @@ The size of the TCP send buffer to use when sending data. The endpoint identification algorithm, defaults to `"https"`. Set to empty string `""` to disable [id="plugins-{type}s-{plugin}-ssl_key_password"] -===== `ssl_key_password` +===== `ssl_key_password` * Value type is <> * There is no default value for this setting. @@ -526,7 +526,7 @@ The endpoint identification algorithm, defaults to `"https"`. Set to empty strin The password of the private key in the key store file. [id="plugins-{type}s-{plugin}-ssl_keystore_location"] -===== `ssl_keystore_location` +===== `ssl_keystore_location` * Value type is <> * There is no default value for this setting. @@ -534,7 +534,7 @@ The password of the private key in the key store file. If client authentication is required, this setting stores the keystore path. [id="plugins-{type}s-{plugin}-ssl_keystore_password"] -===== `ssl_keystore_password` +===== `ssl_keystore_password` * Value type is <> * There is no default value for this setting. @@ -542,7 +542,7 @@ If client authentication is required, this setting stores the keystore path. If client authentication is required, this setting stores the keystore password [id="plugins-{type}s-{plugin}-ssl_keystore_type"] -===== `ssl_keystore_type` +===== `ssl_keystore_type` * Value type is <> * There is no default value for this setting. @@ -550,7 +550,7 @@ If client authentication is required, this setting stores the keystore password The keystore type. [id="plugins-{type}s-{plugin}-ssl_truststore_location"] -===== `ssl_truststore_location` +===== `ssl_truststore_location` * Value type is <> * There is no default value for this setting. @@ -558,7 +558,7 @@ The keystore type. The JKS truststore path to validate the Kafka broker's certificate. [id="plugins-{type}s-{plugin}-ssl_truststore_password"] -===== `ssl_truststore_password` +===== `ssl_truststore_password` * Value type is <> * There is no default value for this setting. @@ -566,7 +566,7 @@ The JKS truststore path to validate the Kafka broker's certificate. The truststore password [id="plugins-{type}s-{plugin}-ssl_truststore_type"] -===== `ssl_truststore_type` +===== `ssl_truststore_type` * Value type is <> * There is no default value for this setting. @@ -574,7 +574,7 @@ The truststore password The truststore type. [id="plugins-{type}s-{plugin}-topic_id"] -===== `topic_id` +===== `topic_id` * This is a required setting. * Value type is <> @@ -583,7 +583,7 @@ The truststore type. The topic to produce messages to [id="plugins-{type}s-{plugin}-value_serializer"] -===== `value_serializer` +===== `value_serializer` * Value type is <> * Default value is `"org.apache.kafka.common.serialization.StringSerializer"` diff --git a/docs/plugins/outputs/librato.asciidoc b/docs/plugins/outputs/librato.asciidoc index aca4f5125..e8df37741 100644 --- a/docs/plugins/outputs/librato.asciidoc +++ b/docs/plugins/outputs/librato.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.7 :release_date: 2019-10-09 :changelog_url: https://github.com/logstash-plugins/logstash-output-librato/blob/v3.0.7/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -50,7 +50,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-account_id"] -===== `account_id` +===== `account_id` * This is a required setting. * Value type is <> @@ -60,7 +60,7 @@ Your Librato account usually an email address [id="plugins-{type}s-{plugin}-annotation"] -===== `annotation` +===== `annotation` * Value type is <> * Default value is `{}` @@ -93,7 +93,7 @@ or ----- [id="plugins-{type}s-{plugin}-api_token"] -===== `api_token` +===== `api_token` * This is a required setting. * Value type is <> @@ -102,7 +102,7 @@ or Your Librato API Token [id="plugins-{type}s-{plugin}-batch_size"] -===== `batch_size` +===== `batch_size` * Value type is <> * Default value is `"10"` @@ -112,7 +112,7 @@ Number of events to batch up before sending to Librato. [id="plugins-{type}s-{plugin}-counter"] -===== `counter` +===== `counter` * Value type is <> * Default value is `{}` @@ -129,7 +129,7 @@ Example: "name" => "messages_received" } ----- - + Additionally, you can override the `measure_time` for the event. Must be a unix timestamp: [source,ruby] @@ -145,7 +145,7 @@ Additionally, you can override the `measure_time` for the event. Must be a unix Default is to use the event's timestamp [id="plugins-{type}s-{plugin}-gauge"] -===== `gauge` +===== `gauge` * Value type is <> * Default value is `{}` diff --git a/docs/plugins/outputs/loggly.asciidoc b/docs/plugins/outputs/loggly.asciidoc index 18553c1ff..85e7bed66 100644 --- a/docs/plugins/outputs/loggly.asciidoc +++ b/docs/plugins/outputs/loggly.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v6.0.0 :release_date: 2018-07-03 :changelog_url: https://github.com/logstash-plugins/logstash-output-loggly/blob/v6.0.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -94,7 +94,7 @@ See the https://www.loggly.com/docs/http-endpoint/[Loggly HTTP endpoint document [id="plugins-{type}s-{plugin}-key"] -===== `key` +===== `key` * This is a required setting. * Value type is <> @@ -136,7 +136,7 @@ API limits have changed and you need to override the plugin's behaviour. See the https://www.loggly.com/docs/http-bulk-endpoint/[Loggly bulk API documentation] [id="plugins-{type}s-{plugin}-proto"] -===== `proto` +===== `proto` * Value type is <> * Default value is `"http"` @@ -144,7 +144,7 @@ See the https://www.loggly.com/docs/http-bulk-endpoint/[Loggly bulk API document Should the log action be sent over https instead of plain http [id="plugins-{type}s-{plugin}-proxy_host"] -===== `proxy_host` +===== `proxy_host` * Value type is <> * There is no default value for this setting. @@ -152,7 +152,7 @@ Should the log action be sent over https instead of plain http Proxy Host [id="plugins-{type}s-{plugin}-proxy_password"] -===== `proxy_password` +===== `proxy_password` * Value type is <> * Default value is `""` @@ -160,7 +160,7 @@ Proxy Host Proxy Password [id="plugins-{type}s-{plugin}-proxy_port"] -===== `proxy_port` +===== `proxy_port` * Value type is <> * There is no default value for this setting. @@ -168,7 +168,7 @@ Proxy Password Proxy Port [id="plugins-{type}s-{plugin}-proxy_user"] -===== `proxy_user` +===== `proxy_user` * Value type is <> * There is no default value for this setting. @@ -176,18 +176,18 @@ Proxy Port Proxy Username [id="plugins-{type}s-{plugin}-retry_count"] -===== `retry_count` +===== `retry_count` * Value type is <> * Default value is `5` -Retry count. +Retry count. It may be possible that the request may timeout due to slow Internet connection if such condition appears, retry_count helps in retrying request for multiple times It will try to submit request until retry_count and then halt [id="plugins-{type}s-{plugin}-tag"] -===== `tag` +===== `tag` * Value type is <> diff --git a/docs/plugins/outputs/logstash.asciidoc b/docs/plugins/outputs/logstash.asciidoc index 97ce25b2a..53126afbb 100644 --- a/docs/plugins/outputs/logstash.asciidoc +++ b/docs/plugins/outputs/logstash.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.0.4 :release_date: 2024-12-10 :changelog_url: https://github.com/logstash-plugins/logstash-integration-logstash/blob/v1.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/outputs/lumberjack.asciidoc b/docs/plugins/outputs/lumberjack.asciidoc index 4fe7f9ba8..8da0a6624 100644 --- a/docs/plugins/outputs/lumberjack.asciidoc +++ b/docs/plugins/outputs/lumberjack.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.9 :release_date: 2021-08-30 :changelog_url: https://github.com/logstash-plugins/logstash-output-lumberjack/blob/v3.1.9/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -46,7 +46,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-flush_size"] -===== `flush_size` +===== `flush_size` * Value type is <> * Default value is `1024` @@ -56,20 +56,20 @@ if the number of events exceed the number the declared `flush_size` we will send them to the logstash server. [id="plugins-{type}s-{plugin}-hosts"] -===== `hosts` +===== `hosts` * This is a required setting. * Value type is <> * There is no default value for this setting. List of addresses lumberjack can send to. When the plugin needs to connect to the remote -peer, it randomly selects one of the hosts. +peer, it randomly selects one of the hosts. When the plugin is registered, it opens a connection to one of the hosts. If the plugin detects a connection error, it selects a different host from the list and opens a new connection. [id="plugins-{type}s-{plugin}-idle_flush_time"] -===== `idle_flush_time` +===== `idle_flush_time` * Value type is <> * Default value is `1` @@ -85,7 +85,7 @@ This helps keep both fast and slow log streams moving along in near-real-time. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * This is a required setting. * Value type is <> @@ -94,7 +94,7 @@ near-real-time. the port to connect to [id="plugins-{type}s-{plugin}-ssl_certificate"] -===== `ssl_certificate` +===== `ssl_certificate` * This is a required setting. * Value type is <> diff --git a/docs/plugins/outputs/metriccatcher.asciidoc b/docs/plugins/outputs/metriccatcher.asciidoc index 750600575..c6cc91146 100644 --- a/docs/plugins/outputs/metriccatcher.asciidoc +++ b/docs/plugins/outputs/metriccatcher.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.4 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-metriccatcher/blob/v3.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -63,7 +63,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-biased"] -===== `biased` +===== `biased` * Value type is <> * There is no default value for this setting. @@ -76,7 +76,7 @@ The value will be coerced to a floating point value. Values which cannot be coerced will zero (0) [id="plugins-{type}s-{plugin}-counter"] -===== `counter` +===== `counter` * Value type is <> * There is no default value for this setting. @@ -91,7 +91,7 @@ The value will be coerced to a floating point value. Values which cannot be coerced will zero (0) [id="plugins-{type}s-{plugin}-gauge"] -===== `gauge` +===== `gauge` * Value type is <> * There is no default value for this setting. @@ -104,7 +104,7 @@ The value will be coerced to a floating point value. Values which cannot be coerced will zero (0) [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"localhost"` @@ -112,7 +112,7 @@ coerced will zero (0) The address of the MetricCatcher [id="plugins-{type}s-{plugin}-meter"] -===== `meter` +===== `meter` * Value type is <> * There is no default value for this setting. @@ -125,7 +125,7 @@ The value will be coerced to a floating point value. Values which cannot be coerced will zero (0) [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `1420` @@ -133,7 +133,7 @@ coerced will zero (0) The port to connect on your MetricCatcher [id="plugins-{type}s-{plugin}-timer"] -===== `timer` +===== `timer` * Value type is <> * There is no default value for this setting. @@ -148,7 +148,7 @@ The value will be coerced to a floating point value. Values which cannot be coerced will zero (0) [id="plugins-{type}s-{plugin}-uniform"] -===== `uniform` +===== `uniform` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/outputs/mongodb.asciidoc b/docs/plugins/outputs/mongodb.asciidoc index 5beb7958c..6a47d40fc 100644 --- a/docs/plugins/outputs/mongodb.asciidoc +++ b/docs/plugins/outputs/mongodb.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.8 :release_date: 2025-01-02 :changelog_url: https://github.com/logstash-plugins/logstash-output-mongodb/blob/v3.1.8/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -49,7 +49,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-bulk"] -===== `bulk` +===== `bulk` * Value type is <> * Default value is `false` @@ -57,7 +57,7 @@ output plugins. Bulk insert flag, set to true to allow bulk insertion, else it will insert events one by one. [id="plugins-{type}s-{plugin}-bulk_interval"] -===== `bulk_interval` +===== `bulk_interval` * Value type is <> * Default value is `2` @@ -65,7 +65,7 @@ Bulk insert flag, set to true to allow bulk insertion, else it will insert event Bulk interval, Used to insert events periodically if the "bulk" flag is activated. [id="plugins-{type}s-{plugin}-bulk_size"] -===== `bulk_size` +===== `bulk_size` * Value type is <> * Default value is `900` @@ -74,7 +74,7 @@ Bulk events number, if the number of events to insert into a collection raise th whatever the bulk interval value (mongodb hard limit is 1000). [id="plugins-{type}s-{plugin}-collection"] -===== `collection` +===== `collection` * This is a required setting. * Value type is <> @@ -84,7 +84,7 @@ The collection to use. This value can use `%{foo}` values to dynamically select a collection based on data in the event. [id="plugins-{type}s-{plugin}-database"] -===== `database` +===== `database` * This is a required setting. * Value type is <> @@ -93,7 +93,7 @@ select a collection based on data in the event. The database to use. [id="plugins-{type}s-{plugin}-generateId"] -===== `generateId` +===== `generateId` * Value type is <> * Default value is `false` @@ -103,7 +103,7 @@ The "_id" field will use the timestamp of the event and overwrite an existing "_id" field in the event. [id="plugins-{type}s-{plugin}-isodate"] -===== `isodate` +===== `isodate` * Value type is <> * Default value is `false` @@ -113,7 +113,7 @@ of an ISO8601 string. For more information about this, see http://www.mongodb.org/display/DOCS/Dates. [id="plugins-{type}s-{plugin}-retry_delay"] -===== `retry_delay` +===== `retry_delay` * Value type is <> * Default value is `3` @@ -121,7 +121,7 @@ http://www.mongodb.org/display/DOCS/Dates. The number of seconds to wait after failure before retrying. [id="plugins-{type}s-{plugin}-uri"] -===== `uri` +===== `uri` * This is a required setting. * Value type is <> diff --git a/docs/plugins/outputs/nagios.asciidoc b/docs/plugins/outputs/nagios.asciidoc index 938e89b7b..b6bf2d236 100644 --- a/docs/plugins/outputs/nagios.asciidoc +++ b/docs/plugins/outputs/nagios.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.6 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-nagios/blob/v3.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -70,7 +70,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-commandfile"] -===== `commandfile` +===== `commandfile` * Value type is <> * Default value is `"/var/lib/nagios3/rw/nagios.cmd"` @@ -78,7 +78,7 @@ output plugins. The full path to your Nagios command file. [id="plugins-{type}s-{plugin}-nagios_level"] -===== `nagios_level` +===== `nagios_level` * Value can be any of: `0`, `1`, `2`, `3` * Default value is `"2"` diff --git a/docs/plugins/outputs/nagios_nsca.asciidoc b/docs/plugins/outputs/nagios_nsca.asciidoc index 07921cd53..4ca383e9a 100644 --- a/docs/plugins/outputs/nagios_nsca.asciidoc +++ b/docs/plugins/outputs/nagios_nsca.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.7 :release_date: 2021-09-20 :changelog_url: https://github.com/logstash-plugins/logstash-output-nagios_nsca/blob/v3.0.7/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -66,7 +66,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"localhost"` @@ -74,7 +74,7 @@ output plugins. The nagios host or IP to send logs to. It should have a NSCA daemon running. [id="plugins-{type}s-{plugin}-message_format"] -===== `message_format` +===== `message_format` * Value type is <> * Default value is `"%{@timestamp} %{host}: %{message}"` @@ -84,7 +84,7 @@ supports any string and can include `%{name}` and other dynamic strings. [id="plugins-{type}s-{plugin}-nagios_host"] -===== `nagios_host` +===== `nagios_host` * Value type is <> * Default value is `"%{host}"` @@ -94,7 +94,7 @@ parameter accepts interpolation, e.g. you can use `@source_host` or other logstash internal variables. [id="plugins-{type}s-{plugin}-nagios_service"] -===== `nagios_service` +===== `nagios_service` * Value type is <> * Default value is `"LOGSTASH"` @@ -104,7 +104,7 @@ parameter accepts interpolation, e.g. you can use `@source_host` or other logstash internal variables. [id="plugins-{type}s-{plugin}-nagios_status"] -===== `nagios_status` +===== `nagios_status` * This is a required setting. * Value type is <> @@ -113,7 +113,7 @@ logstash internal variables. The status to send to nagios. Should be 0 = OK, 1 = WARNING, 2 = CRITICAL, 3 = UNKNOWN [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `5667` @@ -121,7 +121,7 @@ The status to send to nagios. Should be 0 = OK, 1 = WARNING, 2 = CRITICAL, 3 = U The port where the NSCA daemon on the nagios host listens. [id="plugins-{type}s-{plugin}-send_nsca_bin"] -===== `send_nsca_bin` +===== `send_nsca_bin` * Value type is <> * Default value is `"/usr/sbin/send_nsca"` @@ -129,7 +129,7 @@ The port where the NSCA daemon on the nagios host listens. The path to the 'send_nsca' binary on the local host. [id="plugins-{type}s-{plugin}-send_nsca_config"] -===== `send_nsca_config` +===== `send_nsca_config` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/outputs/null.asciidoc b/docs/plugins/outputs/null.asciidoc index 4a24a0e2b..bbeea594e 100644 --- a/docs/plugins/outputs/null.asciidoc +++ b/docs/plugins/outputs/null.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.5 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-null/blob/v3.0.5/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/outputs/opentsdb.asciidoc b/docs/plugins/outputs/opentsdb.asciidoc index b91502789..0539916b2 100644 --- a/docs/plugins/outputs/opentsdb.asciidoc +++ b/docs/plugins/outputs/opentsdb.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.5 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-opentsdb/blob/v3.1.5/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -45,7 +45,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"localhost"` @@ -53,7 +53,7 @@ output plugins. The address of the opentsdb server. [id="plugins-{type}s-{plugin}-metrics"] -===== `metrics` +===== `metrics` * This is a required setting. * Value type is <> @@ -76,7 +76,7 @@ The value will be coerced to a floating point value. Values which cannot be coerced will zero (0) [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `4242` diff --git a/docs/plugins/outputs/pagerduty.asciidoc b/docs/plugins/outputs/pagerduty.asciidoc index c1aa47dab..aca477a94 100644 --- a/docs/plugins/outputs/pagerduty.asciidoc +++ b/docs/plugins/outputs/pagerduty.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.9 :release_date: 2020-01-27 :changelog_url: https://github.com/logstash-plugins/logstash-output-pagerduty/blob/v3.0.9/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -51,7 +51,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-description"] -===== `description` +===== `description` * Value type is <> * Default value is `"Logstash event for %{host}"` @@ -59,7 +59,7 @@ output plugins. Custom description [id="plugins-{type}s-{plugin}-details"] -===== `details` +===== `details` * Value type is <> * Default value is `{"timestamp"=>"%{@timestamp}", "message"=>"%{message}"}` @@ -68,7 +68,7 @@ The event details. These might be data from the Logstash event fields you wish t Tags are automatically included if detected so there is no need to explicitly add them here. [id="plugins-{type}s-{plugin}-event_type"] -===== `event_type` +===== `event_type` * Value can be any of: `trigger`, `acknowledge`, `resolve` * Default value is `"trigger"` @@ -76,7 +76,7 @@ Tags are automatically included if detected so there is no need to explicitly ad Event type [id="plugins-{type}s-{plugin}-incident_key"] -===== `incident_key` +===== `incident_key` * Value type is <> * Default value is `"logstash/%{host}/%{type}"` @@ -84,7 +84,7 @@ Event type The service key to use. You'll need to set this up in PagerDuty beforehand. [id="plugins-{type}s-{plugin}-pdurl"] -===== `pdurl` +===== `pdurl` * Value type is <> * Default value is `"https://events.pagerduty.com/generic/2010-04-15/create_event.json"` @@ -93,7 +93,7 @@ PagerDuty API URL. You shouldn't need to change this, but is included to allow f should PagerDuty iterate the API and Logstash hasn't been updated yet. [id="plugins-{type}s-{plugin}-service_key"] -===== `service_key` +===== `service_key` * This is a required setting. * Value type is <> diff --git a/docs/plugins/outputs/pipe.asciidoc b/docs/plugins/outputs/pipe.asciidoc index 52e66b3e4..0ccf6ec4a 100644 --- a/docs/plugins/outputs/pipe.asciidoc +++ b/docs/plugins/outputs/pipe.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.6 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-pipe/blob/v3.0.6/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -47,7 +47,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-command"] -===== `command` +===== `command` * This is a required setting. * Value type is <> @@ -56,7 +56,7 @@ output plugins. Command line to launch and pipe to [id="plugins-{type}s-{plugin}-message_format"] -===== `message_format` +===== `message_format` * Value type is <> * There is no default value for this setting. @@ -69,7 +69,7 @@ If this setting is omitted, the full json representation of the event will be written as a single line. [id="plugins-{type}s-{plugin}-ttl"] -===== `ttl` +===== `ttl` * Value type is <> * Default value is `10` diff --git a/docs/plugins/outputs/rabbitmq.asciidoc b/docs/plugins/outputs/rabbitmq.asciidoc index 4bb37786c..285e06530 100644 --- a/docs/plugins/outputs/rabbitmq.asciidoc +++ b/docs/plugins/outputs/rabbitmq.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.4.0 :release_date: 2024-09-16 :changelog_url: https://github.com/logstash-plugins/logstash-integration-rabbitmq/blob/v7.4.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -68,7 +68,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-arguments"] -===== `arguments` +===== `arguments` * Value type is <> * Default value is `{}` @@ -77,7 +77,7 @@ Extra queue arguments as an array. To make a RabbitMQ queue mirrored, use: `{"x-ha-policy" => "all"}` [id="plugins-{type}s-{plugin}-automatic_recovery"] -===== `automatic_recovery` +===== `automatic_recovery` * Value type is <> * Default value is `true` @@ -85,7 +85,7 @@ To make a RabbitMQ queue mirrored, use: `{"x-ha-policy" => "all"}` Set this to automatically recover from a broken connection. You almost certainly don't want to override this!!! [id="plugins-{type}s-{plugin}-connect_retry_interval"] -===== `connect_retry_interval` +===== `connect_retry_interval` * Value type is <> * Default value is `1` @@ -93,7 +93,7 @@ Set this to automatically recover from a broken connection. You almost certainly Time in seconds to wait before retrying a connection [id="plugins-{type}s-{plugin}-connection_timeout"] -===== `connection_timeout` +===== `connection_timeout` * Value type is <> * There is no default value for this setting. @@ -101,7 +101,7 @@ Time in seconds to wait before retrying a connection The default connection timeout in milliseconds. If not specified the timeout is infinite. [id="plugins-{type}s-{plugin}-durable"] -===== `durable` +===== `durable` * Value type is <> * Default value is `true` @@ -109,7 +109,7 @@ The default connection timeout in milliseconds. If not specified the timeout is Is this exchange durable? (aka; Should it survive a broker restart?) [id="plugins-{type}s-{plugin}-exchange"] -===== `exchange` +===== `exchange` * This is a required setting. * Value type is <> @@ -118,7 +118,7 @@ Is this exchange durable? (aka; Should it survive a broker restart?) The name of the exchange [id="plugins-{type}s-{plugin}-exchange_type"] -===== `exchange_type` +===== `exchange_type` * This is a required setting. * Value can be any of: `fanout`, `direct`, `topic`, `x-consistent-hash`, `x-modulus-hash` @@ -127,7 +127,7 @@ The name of the exchange The exchange type (fanout, topic, direct) [id="plugins-{type}s-{plugin}-heartbeat"] -===== `heartbeat` +===== `heartbeat` * Value type is <> * There is no default value for this setting. @@ -135,7 +135,7 @@ The exchange type (fanout, topic, direct) Heartbeat delay in seconds. If unspecified no heartbeats will be sent [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * This is a required setting. * Value type is <> @@ -154,7 +154,7 @@ recovery attempts of the hosts is chosen at random and connected to. Note that only one host connection is active at a time. [id="plugins-{type}s-{plugin}-key"] -===== `key` +===== `key` * Value type is <> * Default value is `"logstash"` @@ -176,13 +176,13 @@ Values can be {logstash-ref}/event-dependent-configuration.html#sprintf[`sprintf Example: [source,ruby] message_properties => { - "content_type" => "application/json" + "content_type" => "application/json" "priority" => 1 } [id="plugins-{type}s-{plugin}-passive"] -===== `passive` +===== `passive` * Value type is <> * Default value is `false` @@ -190,7 +190,7 @@ Example: Passive queue creation? Useful for checking queue existance without modifying server state [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * Value type is <> * Default value is `"guest"` @@ -198,7 +198,7 @@ Passive queue creation? Useful for checking queue existance without modifying se RabbitMQ password [id="plugins-{type}s-{plugin}-persistent"] -===== `persistent` +===== `persistent` * Value type is <> * Default value is `true` @@ -206,7 +206,7 @@ RabbitMQ password Should RabbitMQ persist messages to disk? [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `5672` @@ -214,7 +214,7 @@ Should RabbitMQ persist messages to disk? RabbitMQ port to connect on [id="plugins-{type}s-{plugin}-ssl"] -===== `ssl` +===== `ssl` * Value type is <> * There is no default value for this setting. @@ -225,7 +225,7 @@ Specify ssl_certificate_path and ssl_certificate_password if you need certificate verification [id="plugins-{type}s-{plugin}-ssl_certificate_password"] -===== `ssl_certificate_password` +===== `ssl_certificate_password` * Value type is <> * There is no default value for this setting. @@ -233,7 +233,7 @@ certificate verification Password for the encrypted PKCS12 (.p12) certificate file specified in ssl_certificate_path [id="plugins-{type}s-{plugin}-ssl_certificate_path"] -===== `ssl_certificate_path` +===== `ssl_certificate_path` * Value type is <> * There is no default value for this setting. @@ -241,7 +241,7 @@ Password for the encrypted PKCS12 (.p12) certificate file specified in ssl_certi Path to an SSL certificate in PKCS12 (.p12) format used for verifying the remote host [id="plugins-{type}s-{plugin}-ssl_version"] -===== `ssl_version` +===== `ssl_version` * Value type is <> * Default value is `"TLSv1.2"` @@ -249,7 +249,7 @@ Path to an SSL certificate in PKCS12 (.p12) format used for verifying the remote Version of the SSL protocol to use. [id="plugins-{type}s-{plugin}-user"] -===== `user` +===== `user` * Value type is <> * Default value is `"guest"` @@ -257,7 +257,7 @@ Version of the SSL protocol to use. RabbitMQ username [id="plugins-{type}s-{plugin}-vhost"] -===== `vhost` +===== `vhost` * Value type is <> * Default value is `"/"` diff --git a/docs/plugins/outputs/redis.asciidoc b/docs/plugins/outputs/redis.asciidoc index 65dd2bbbd..d3e325383 100644 --- a/docs/plugins/outputs/redis.asciidoc +++ b/docs/plugins/outputs/redis.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v5.2.0 :release_date: 2024-06-04 :changelog_url: https://github.com/logstash-plugins/logstash-output-redis/blob/v5.2.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -69,7 +69,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-batch"] -===== `batch` +===== `batch` * Value type is <> * Default value is `false` @@ -83,7 +83,7 @@ If true, we send an RPUSH every "batch_events" events or Only supported for `data_type` is "list". [id="plugins-{type}s-{plugin}-batch_events"] -===== `batch_events` +===== `batch_events` * Value type is <> * Default value is `50` @@ -91,7 +91,7 @@ Only supported for `data_type` is "list". If batch is set to true, the number of events we queue up for an RPUSH. [id="plugins-{type}s-{plugin}-batch_timeout"] -===== `batch_timeout` +===== `batch_timeout` * Value type is <> * Default value is `5` @@ -100,7 +100,7 @@ If batch is set to true, the maximum amount of time between RPUSH commands when there are pending events to flush. [id="plugins-{type}s-{plugin}-congestion_interval"] -===== `congestion_interval` +===== `congestion_interval` * Value type is <> * Default value is `1` @@ -109,7 +109,7 @@ How often to check for congestion. Default is one second. Zero means to check on every event. [id="plugins-{type}s-{plugin}-congestion_threshold"] -===== `congestion_threshold` +===== `congestion_threshold` * Value type is <> * Default value is `0` @@ -123,7 +123,7 @@ A default value of 0 means that this limit is disabled. Only supported for `list` Redis `data_type`. [id="plugins-{type}s-{plugin}-data_type"] -===== `data_type` +===== `data_type` * Value can be any of: `list`, `channel` * There is no default value for this setting. @@ -132,7 +132,7 @@ Either list or channel. If `data_type` is list, then we will set RPUSH to key. If `data_type` is channel, then we will PUBLISH to `key`. [id="plugins-{type}s-{plugin}-db"] -===== `db` +===== `db` * Value type is <> * Default value is `0` @@ -140,7 +140,7 @@ RPUSH to key. If `data_type` is channel, then we will PUBLISH to `key`. The Redis database number. [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `["127.0.0.1"]` @@ -157,7 +157,7 @@ For example: ["127.0.0.1:6380", "127.0.0.1"] [id="plugins-{type}s-{plugin}-key"] -===== `key` +===== `key` * Value type is <> * There is no default value for this setting. @@ -166,7 +166,7 @@ The name of a Redis list or channel. Dynamic names are valid here, for example `logstash-%{type}`. [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * Value type is <> * There is no default value for this setting. @@ -174,7 +174,7 @@ valid here, for example `logstash-%{type}`. Password to authenticate with. There is no authentication by default. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `6379` @@ -198,7 +198,7 @@ Enable SSL support. Interval for reconnecting to failed Redis connections [id="plugins-{type}s-{plugin}-shuffle_hosts"] -===== `shuffle_hosts` +===== `shuffle_hosts` * Value type is <> * Default value is `true` @@ -286,7 +286,7 @@ has a hostname or IP address that matches the names within the certificate. `none` performs no certificate validation. [id="plugins-{type}s-{plugin}-timeout"] -===== `timeout` +===== `timeout` * Value type is <> * Default value is `5` diff --git a/docs/plugins/outputs/redmine.asciidoc b/docs/plugins/outputs/redmine.asciidoc index 10abb36f9..91b97544a 100644 --- a/docs/plugins/outputs/redmine.asciidoc +++ b/docs/plugins/outputs/redmine.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.4 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-redmine/blob/v3.0.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -70,7 +70,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-assigned_to_id"] -===== `assigned_to_id` +===== `assigned_to_id` * Value type is <> * Default value is `nil` @@ -79,7 +79,7 @@ redmine issue assigned_to not required for post_issue [id="plugins-{type}s-{plugin}-categorie_id"] -===== `categorie_id` +===== `categorie_id` * Value type is <> * Default value is `nil` @@ -87,7 +87,7 @@ not required for post_issue not required for post_issue [id="plugins-{type}s-{plugin}-description"] -===== `description` +===== `description` * Value type is <> * Default value is `"%{message}"` @@ -96,7 +96,7 @@ redmine issue description required [id="plugins-{type}s-{plugin}-fixed_version_id"] -===== `fixed_version_id` +===== `fixed_version_id` * Value type is <> * Default value is `nil` @@ -104,7 +104,7 @@ required redmine issue fixed_version_id [id="plugins-{type}s-{plugin}-parent_issue_id"] -===== `parent_issue_id` +===== `parent_issue_id` * Value type is <> * Default value is `nil` @@ -113,7 +113,7 @@ redmine issue parent_issue_id not required for post_issue [id="plugins-{type}s-{plugin}-priority_id"] -===== `priority_id` +===== `priority_id` * This is a required setting. * Value type is <> @@ -123,7 +123,7 @@ redmine issue priority_id required [id="plugins-{type}s-{plugin}-project_id"] -===== `project_id` +===== `project_id` * This is a required setting. * Value type is <> @@ -133,7 +133,7 @@ redmine issue projet_id required [id="plugins-{type}s-{plugin}-ssl"] -===== `ssl` +===== `ssl` * Value type is <> * Default value is `false` @@ -141,7 +141,7 @@ required [id="plugins-{type}s-{plugin}-status_id"] -===== `status_id` +===== `status_id` * This is a required setting. * Value type is <> @@ -151,7 +151,7 @@ redmine issue status_id required [id="plugins-{type}s-{plugin}-subject"] -===== `subject` +===== `subject` * Value type is <> * Default value is `"%{host}"` @@ -160,7 +160,7 @@ redmine issue subject required [id="plugins-{type}s-{plugin}-token"] -===== `token` +===== `token` * This is a required setting. * Value type is <> @@ -169,7 +169,7 @@ required redmine token user used for authentication [id="plugins-{type}s-{plugin}-tracker_id"] -===== `tracker_id` +===== `tracker_id` * This is a required setting. * Value type is <> @@ -179,7 +179,7 @@ redmine issue tracker_id required [id="plugins-{type}s-{plugin}-url"] -===== `url` +===== `url` * This is a required setting. * Value type is <> diff --git a/docs/plugins/outputs/riak.asciidoc b/docs/plugins/outputs/riak.asciidoc index 1eb77cc37..0503ea474 100644 --- a/docs/plugins/outputs/riak.asciidoc +++ b/docs/plugins/outputs/riak.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.5 :release_date: 2019-10-09 :changelog_url: https://github.com/logstash-plugins/logstash-output-riak/blob/v3.0.5/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -50,20 +50,20 @@ output plugins.   [id="plugins-{type}s-{plugin}-bucket"] -===== `bucket` +===== `bucket` * Value type is <> * Default value is `["logstash-%{+YYYY.MM.dd}"]` The bucket name to write events to -Expansion is supported here as values are +Expansion is supported here as values are passed through event.sprintf Multiple buckets can be specified here but any bucket-specific settings defined apply to ALL the buckets. [id="plugins-{type}s-{plugin}-bucket_props"] -===== `bucket_props` +===== `bucket_props` * Value type is <> * There is no default value for this setting. @@ -91,7 +91,7 @@ or Properties will be passed as-is [id="plugins-{type}s-{plugin}-enable_search"] -===== `enable_search` +===== `enable_search` * Value type is <> * Default value is `false` @@ -100,7 +100,7 @@ Search Enable search on the bucket defined above [id="plugins-{type}s-{plugin}-enable_ssl"] -===== `enable_ssl` +===== `enable_ssl` * Value type is <> * Default value is `false` @@ -109,7 +109,7 @@ SSL Enable SSL [id="plugins-{type}s-{plugin}-indices"] -===== `indices` +===== `indices` * Value type is <> * There is no default value for this setting. @@ -126,7 +126,7 @@ e.g. Off by default as not everyone runs eleveldb [id="plugins-{type}s-{plugin}-key_name"] -===== `key_name` +===== `key_name` * Value type is <> * There is no default value for this setting. @@ -137,7 +137,7 @@ variables are valid here. Choose this carefully. Best to let riak decide. [id="plugins-{type}s-{plugin}-nodes"] -===== `nodes` +===== `nodes` * Value type is <> * Default value is `{"localhost"=>"8098"}` @@ -156,7 +156,7 @@ e.g ----- [id="plugins-{type}s-{plugin}-proto"] -===== `proto` +===== `proto` * Value can be any of: `http`, `pb` * Default value is `"http"` @@ -167,7 +167,7 @@ Applies to ALL backends listed above No mix and match [id="plugins-{type}s-{plugin}-ssl_opts"] -===== `ssl_opts` +===== `ssl_opts` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/outputs/riemann.asciidoc b/docs/plugins/outputs/riemann.asciidoc index 937a5d8f0..4f245407e 100644 --- a/docs/plugins/outputs/riemann.asciidoc +++ b/docs/plugins/outputs/riemann.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.7 :release_date: 2020-07-15 :changelog_url: https://github.com/logstash-plugins/logstash-output-riemann/blob/v3.0.7/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -62,7 +62,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-debug"] -===== `debug` +===== `debug` * Value type is <> * Default value is `false` @@ -71,7 +71,7 @@ output plugins. Enable debugging output? [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"localhost"` @@ -79,7 +79,7 @@ Enable debugging output? The address of the Riemann server. [id="plugins-{type}s-{plugin}-map_fields"] -===== `map_fields` +===== `map_fields` * Value type is <> * Default value is `false` @@ -120,7 +120,7 @@ When used with the riemann_event any duplicate keys receive their value from riemann_event instead of the logstash event itself. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `5555` @@ -128,7 +128,7 @@ riemann_event instead of the logstash event itself. The port to connect to on your Riemann server. [id="plugins-{type}s-{plugin}-protocol"] -===== `protocol` +===== `protocol` * Value can be any of: `tcp`, `udp` * Default value is `"tcp"` @@ -142,7 +142,7 @@ is to never lose events As such, we use tcp as default here [id="plugins-{type}s-{plugin}-riemann_event"] -===== `riemann_event` +===== `riemann_event` * Value type is <> * There is no default value for this setting. @@ -176,7 +176,7 @@ Values which cannot be coerced will zero (0.0). but can be overridden here. [id="plugins-{type}s-{plugin}-sender"] -===== `sender` +===== `sender` * Value type is <> * Default value is `"%{host}"` diff --git a/docs/plugins/outputs/s3.asciidoc b/docs/plugins/outputs/s3.asciidoc index f2406eaeb..1ced7c4fa 100644 --- a/docs/plugins/outputs/s3.asciidoc +++ b/docs/plugins/outputs/s3.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.2.1 :release_date: 2025-02-13 :changelog_url: https://github.com/logstash-plugins/logstash-integration-aws/blob/v7.2.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -26,10 +26,10 @@ include::{include_path}/plugin_header-integration.asciidoc[] This plugin batches and uploads logstash events into Amazon Simple Storage Service (Amazon S3). -IMPORTANT: The S3 output plugin only supports AWS S3. +IMPORTANT: The S3 output plugin only supports AWS S3. Other S3 compatible storage solutions are not supported. -S3 outputs create temporary files into the OS' temporary directory. +S3 outputs create temporary files into the OS' temporary directory. You can specify where to save them using the `temporary_directory` option. IMPORTANT: For configurations containing multiple s3 outputs with the restore @@ -41,7 +41,7 @@ option enabled, each output should define its own 'temporary_directory'. * S3 PutObject permission ===== S3 output file - + [source,txt] ----- `ls.s3.312bc026-2f5d-49bc-ae9f-5940cf4ad9a6.2013-04-18T10.00.tag_hello.part0.txt` @@ -52,8 +52,8 @@ option enabled, each output should define its own 'temporary_directory'. | 312bc026-2f5d-49bc-ae9f-5940cf4ad9a6 | a new, random uuid per file. | | 2013-04-18T10.00 | represents the time whenever you specify time_file. | | tag_hello | indicates the event's tag. | -| part0 | If you indicate size_file, it will generate more parts if your file.size > size_file. -When a file is full, it gets pushed to the bucket and then deleted from the temporary directory. +| part0 | If you indicate size_file, it will generate more parts if your file.size > size_file. +When a file is full, it gets pushed to the bucket and then deleted from the temporary directory. If a file is empty, it is simply deleted. Empty files will not be pushed. | |======= @@ -125,7 +125,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-access_key_id"] -===== `access_key_id` +===== `access_key_id` * Value type is <> * There is no default value for this setting. @@ -162,7 +162,7 @@ the connection to S3. See full list in https://docs.aws.amazon.com/sdk-for-ruby/ } [id="plugins-{type}s-{plugin}-aws_credentials_file"] -===== `aws_credentials_file` +===== `aws_credentials_file` * Value type is <> * There is no default value for this setting. @@ -180,7 +180,7 @@ file should look like this: [id="plugins-{type}s-{plugin}-bucket"] -===== `bucket` +===== `bucket` * This is a required setting. * Value type is <> @@ -189,7 +189,7 @@ file should look like this: S3 bucket [id="plugins-{type}s-{plugin}-canned_acl"] -===== `canned_acl` +===== `canned_acl` * Value can be any of: `private`, `public-read`, `public-read-write`, `authenticated-read`, `aws-exec-read`, `bucket-owner-read`, `bucket-owner-full-control`, `log-delivery-write` * Default value is `"private"` @@ -197,7 +197,7 @@ S3 bucket The S3 canned ACL to use when putting the file. Defaults to "private". [id="plugins-{type}s-{plugin}-encoding"] -===== `encoding` +===== `encoding` * Value can be any of: `none`, `gzip` * Default value is `"none"` @@ -216,7 +216,7 @@ guaranteed to work correctly with the AWS SDK. The endpoint should be an HTTP or HTTPS URL, e.g. https://example.com [id="plugins-{type}s-{plugin}-prefix"] -===== `prefix` +===== `prefix` * Value type is <> * Default value is `""` @@ -235,7 +235,7 @@ stability issues, which can be further exacerbated when you use a rotation_strategy that delays uploads. [id="plugins-{type}s-{plugin}-proxy_uri"] -===== `proxy_uri` +===== `proxy_uri` * Value type is <> * There is no default value for this setting. @@ -243,7 +243,7 @@ rotation_strategy that delays uploads. URI to proxy server if required [id="plugins-{type}s-{plugin}-region"] -===== `region` +===== `region` * Value type is <> * Default value is `"us-east-1"` @@ -251,7 +251,7 @@ URI to proxy server if required The AWS Region [id="plugins-{type}s-{plugin}-restore"] -===== `restore` +===== `restore` * Value type is <> * Default value is `true` @@ -294,12 +294,12 @@ See the https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html[ Session name to use when assuming an IAM role. [id="plugins-{type}s-{plugin}-rotation_strategy"] -===== `rotation_strategy` +===== `rotation_strategy` * Value can be any of: `size_and_time`, `size`, `time` * Default value is `"size_and_time"` -Controls when to close the file and push it to S3. +Controls when to close the file and push it to S3. If you set this value to `size`, it uses the value set in <>. @@ -308,13 +308,13 @@ If you set this value to `time`, it uses the value set in If you set this value to `size_and_time`, it uses the values from <> and <>, and splits the file when -either one matches. +either one matches. The default strategy checks both size and time. The first value to match triggers file rotation. [id="plugins-{type}s-{plugin}-secret_access_key"] -===== `secret_access_key` +===== `secret_access_key` * Value type is <> * There is no default value for this setting. @@ -322,7 +322,7 @@ match triggers file rotation. The AWS Secret Access Key [id="plugins-{type}s-{plugin}-server_side_encryption"] -===== `server_side_encryption` +===== `server_side_encryption` * Value type is <> * Default value is `false` @@ -330,7 +330,7 @@ The AWS Secret Access Key Specifies whether or not to use S3's server side encryption. Defaults to no encryption. [id="plugins-{type}s-{plugin}-server_side_encryption_algorithm"] -===== `server_side_encryption_algorithm` +===== `server_side_encryption_algorithm` * Value can be any of: `AES256`, `aws:kms` * Default value is `"AES256"` @@ -338,7 +338,7 @@ Specifies whether or not to use S3's server side encryption. Defaults to no encr Specifies what type of encryption to use when SSE is enabled. [id="plugins-{type}s-{plugin}-session_token"] -===== `session_token` +===== `session_token` * Value type is <> * There is no default value for this setting. @@ -346,7 +346,7 @@ Specifies what type of encryption to use when SSE is enabled. The AWS Session token for temporary credential [id="plugins-{type}s-{plugin}-signature_version"] -===== `signature_version` +===== `signature_version` * Value can be any of: `v2`, `v4` * There is no default value for this setting. @@ -355,7 +355,7 @@ The version of the S3 signature hash to use. Normally uses the internal client d specified here [id="plugins-{type}s-{plugin}-size_file"] -===== `size_file` +===== `size_file` * Value type is <> * Default value is `5242880` @@ -365,7 +365,7 @@ value, a new file is created. If you use tags, Logstash generates a specific siz file for every tag. [id="plugins-{type}s-{plugin}-ssekms_key_id"] -===== `ssekms_key_id` +===== `ssekms_key_id` * Value type is <> * There is no default value for this setting. @@ -375,7 +375,7 @@ If server_side_encryption => aws:kms is set but this is not default KMS key is u http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingKMSEncryption.html [id="plugins-{type}s-{plugin}-storage_class"] -===== `storage_class` +===== `storage_class` * Value can be any of: `STANDARD`, `REDUCED_REDUNDANCY`, `STANDARD_IA`, `ONEZONE_IA`, `INTELLIGENT_TIERING`, `GLACIER`, `DEEP_ARCHIVE`, `OUTPOSTS`, `GLACIER_IR`, `SNOW`, `EXPRESS_ONEZONE` * Default value is `"STANDARD"` @@ -386,7 +386,7 @@ http://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html Defaults to STANDARD. [id="plugins-{type}s-{plugin}-temporary_directory"] -===== `temporary_directory` +===== `temporary_directory` * Value type is <> * Default value is `"/tmp/logstash"` @@ -395,13 +395,13 @@ Set the directory where logstash will store the tmp files before sending it to S default to the current OS temporary directory in linux /tmp/logstash [id="plugins-{type}s-{plugin}-time_file"] -===== `time_file` +===== `time_file` * Value type is <> * Default value is `15` Set the time, in MINUTES, to close the current sub_time_section of bucket. -If <> is set to `time` or `size_and_time`, then `time_file` cannot be set to 0. +If <> is set to `time` or `size_and_time`, then `time_file` cannot be set to 0. Otherwise, the plugin raises a configuration error. [id="plugins-{type}s-{plugin}-upload_multipart_threshold"] @@ -413,7 +413,7 @@ Otherwise, the plugin raises a configuration error. Files larger than this number are uploaded using the S3 multipart APIs [id="plugins-{type}s-{plugin}-upload_queue_size"] -===== `upload_queue_size` +===== `upload_queue_size` * Value type is <> * Default value is `4` @@ -421,7 +421,7 @@ Files larger than this number are uploaded using the S3 multipart APIs Number of items we can keep in the local queue before uploading them [id="plugins-{type}s-{plugin}-upload_workers_count"] -===== `upload_workers_count` +===== `upload_workers_count` * Value type is <> * Default value is `4` @@ -439,13 +439,13 @@ For cases where the default certificates are unavailable, e.g. Windows, you can set this to `true`. [id="plugins-{type}s-{plugin}-validate_credentials_on_root_bucket"] -===== `validate_credentials_on_root_bucket` +===== `validate_credentials_on_root_bucket` * Value type is <> * Default value is `true` The common use case is to define permissions on the root bucket and give Logstash -full access to write logs. +full access to write logs. In some circumstances, you need more granular permissions on the subfolder. This allows you to disable the check at startup. diff --git a/docs/plugins/outputs/sns.asciidoc b/docs/plugins/outputs/sns.asciidoc index 0c147ad86..ea3e23e61 100644 --- a/docs/plugins/outputs/sns.asciidoc +++ b/docs/plugins/outputs/sns.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.2.1 :release_date: 2025-02-13 :changelog_url: https://github.com/logstash-plugins/logstash-integration-aws/blob/v7.2.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -75,7 +75,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-access_key_id"] -===== `access_key_id` +===== `access_key_id` * Value type is <> * There is no default value for this setting. @@ -89,7 +89,7 @@ This plugin uses the AWS SDK and supports several ways to get credentials, which 5. IAM Instance Profile (available when running inside EC2) [id="plugins-{type}s-{plugin}-arn"] -===== `arn` +===== `arn` * Value type is <> * There is no default value for this setting. @@ -98,7 +98,7 @@ Optional ARN to send messages to. If you do not set this you must include the `sns` field in your events to set the ARN on a per-message basis! [id="plugins-{type}s-{plugin}-aws_credentials_file"] -===== `aws_credentials_file` +===== `aws_credentials_file` * Value type is <> * There is no default value for this setting. @@ -116,7 +116,7 @@ file should look like this: [id="plugins-{type}s-{plugin}-proxy_uri"] -===== `proxy_uri` +===== `proxy_uri` * Value type is <> * There is no default value for this setting. @@ -124,7 +124,7 @@ file should look like this: URI to proxy server if required [id="plugins-{type}s-{plugin}-publish_boot_message_arn"] -===== `publish_boot_message_arn` +===== `publish_boot_message_arn` * Value type is <> * There is no default value for this setting. @@ -137,7 +137,7 @@ Example: arn:aws:sns:us-east-1:770975001275:logstash-testing [id="plugins-{type}s-{plugin}-region"] -===== `region` +===== `region` * Value can be any of: `us-east-1`, `us-east-2`, `us-west-1`, `us-west-2`, `eu-central-1`, `eu-west-1`, `eu-west-2`, `ap-southeast-1`, `ap-southeast-2`, `ap-northeast-1`, `ap-northeast-2`, `sa-east-1`, `us-gov-west-1`, `cn-north-1`, `ap-south-1`, `ca-central-1` * Default value is `"us-east-1"` @@ -145,7 +145,7 @@ Example: arn:aws:sns:us-east-1:770975001275:logstash-testing The AWS Region [id="plugins-{type}s-{plugin}-secret_access_key"] -===== `secret_access_key` +===== `secret_access_key` * Value type is <> * There is no default value for this setting. @@ -153,7 +153,7 @@ The AWS Region The AWS Secret Access Key [id="plugins-{type}s-{plugin}-session_token"] -===== `session_token` +===== `session_token` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/outputs/solr_http.asciidoc b/docs/plugins/outputs/solr_http.asciidoc index 90c05a208..ab78b86f3 100644 --- a/docs/plugins/outputs/solr_http.asciidoc +++ b/docs/plugins/outputs/solr_http.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.5 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-solr_http/blob/v3.0.5/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -55,7 +55,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-document_id"] -===== `document_id` +===== `document_id` * Value type is <> * Default value is `nil` @@ -64,7 +64,7 @@ Solr document ID for events. You'd typically have a variable here, like '%{foo}' so you can assign your own IDs [id="plugins-{type}s-{plugin}-flush_size"] -===== `flush_size` +===== `flush_size` * Value type is <> * Default value is `100` @@ -72,7 +72,7 @@ Solr document ID for events. You'd typically have a variable here, like Number of events to queue up before writing to Solr [id="plugins-{type}s-{plugin}-idle_flush_time"] -===== `idle_flush_time` +===== `idle_flush_time` * Value type is <> * Default value is `1` @@ -81,7 +81,7 @@ Amount of time since the last flush before a flush is done even if the number of buffered events is smaller than flush_size [id="plugins-{type}s-{plugin}-solr_url"] -===== `solr_url` +===== `solr_url` * Value type is <> * Default value is `"http://localhost:8983/solr"` diff --git a/docs/plugins/outputs/sqs.asciidoc b/docs/plugins/outputs/sqs.asciidoc index e68915594..fe996de4f 100644 --- a/docs/plugins/outputs/sqs.asciidoc +++ b/docs/plugins/outputs/sqs.asciidoc @@ -10,7 +10,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.2.1 :release_date: 2025-02-13 :changelog_url: https://github.com/logstash-plugins/logstash-integration-aws/blob/v7.2.1/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -108,7 +108,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-access_key_id"] -===== `access_key_id` +===== `access_key_id` * Value type is <> * There is no default value for this setting. @@ -122,7 +122,7 @@ This plugin uses the AWS SDK and supports several ways to get credentials, which 5. IAM Instance Profile (available when running inside EC2) [id="plugins-{type}s-{plugin}-aws_credentials_file"] -===== `aws_credentials_file` +===== `aws_credentials_file` * Value type is <> * There is no default value for this setting. @@ -140,7 +140,7 @@ file should look like this: [id="plugins-{type}s-{plugin}-batch_events"] -===== `batch_events` +===== `batch_events` * Value type is <> * Default value is `10` @@ -159,7 +159,7 @@ This is useful when connecting to S3 compatible services, but beware that these guaranteed to work correctly with the AWS SDK. [id="plugins-{type}s-{plugin}-message_max_size"] -===== `message_max_size` +===== `message_max_size` * Value type is <> * Default value is `"256KiB"` @@ -169,7 +169,7 @@ this size will be dropped. See http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/limits-messages.html. [id="plugins-{type}s-{plugin}-proxy_uri"] -===== `proxy_uri` +===== `proxy_uri` * Value type is <> * There is no default value for this setting. @@ -177,7 +177,7 @@ http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/limits URI to proxy server if required [id="plugins-{type}s-{plugin}-queue"] -===== `queue` +===== `queue` * This is a required setting. * Value type is <> @@ -196,7 +196,7 @@ The owning account id of the target SQS queue. IAM permissions need to be configured on both accounts to function. [id="plugins-{type}s-{plugin}-region"] -===== `region` +===== `region` * Value type is <> * Default value is `"us-east-1"` @@ -222,7 +222,7 @@ See the https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html[ Session name to use when assuming an IAM role. [id="plugins-{type}s-{plugin}-secret_access_key"] -===== `secret_access_key` +===== `secret_access_key` * Value type is <> * There is no default value for this setting. @@ -230,7 +230,7 @@ Session name to use when assuming an IAM role. The AWS Secret Access Key [id="plugins-{type}s-{plugin}-session_token"] -===== `session_token` +===== `session_token` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/outputs/statsd.asciidoc b/docs/plugins/outputs/statsd.asciidoc index 595e029de..bcced7fed 100644 --- a/docs/plugins/outputs/statsd.asciidoc +++ b/docs/plugins/outputs/statsd.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.2.0 :release_date: 2018-06-05 :changelog_url: https://github.com/logstash-plugins/logstash-output-statsd/blob/v3.2.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -94,7 +94,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-count"] -===== `count` +===== `count` * Value type is <> * Default value is `{}` @@ -103,7 +103,7 @@ A count metric. `metric_name => count` as hash. `%{fieldname}` substitutions are allowed in the metric names. [id="plugins-{type}s-{plugin}-decrement"] -===== `decrement` +===== `decrement` * Value type is <> * Default value is `[]` @@ -112,7 +112,7 @@ A decrement metric. Metric names as array. `%{fieldname}` substitutions are allowed in the metric names. [id="plugins-{type}s-{plugin}-gauge"] -===== `gauge` +===== `gauge` * Value type is <> * Default value is `{}` @@ -121,7 +121,7 @@ A gauge metric. `metric_name => gauge` as hash. `%{fieldname}` substitutions are allowed in the metric names. [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"localhost"` @@ -129,7 +129,7 @@ allowed in the metric names. The hostname or IP address of the statsd server. [id="plugins-{type}s-{plugin}-increment"] -===== `increment` +===== `increment` * Value type is <> * Default value is `[]` @@ -138,7 +138,7 @@ An increment metric. Metric names as array. `%{fieldname}` substitutions are allowed in the metric names. [id="plugins-{type}s-{plugin}-namespace"] -===== `namespace` +===== `namespace` * Value type is <> * Default value is `"logstash"` @@ -147,7 +147,7 @@ The statsd namespace to use for this metric. `%{fieldname}` substitutions are allowed. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `8125` @@ -156,7 +156,7 @@ The port to connect to on your statsd server. [id="plugins-{type}s-{plugin}-protocol"] -===== `protocol` +===== `protocol` * Value type is <> * Default value is `"udp"` @@ -164,7 +164,7 @@ The port to connect to on your statsd server. The protocol to connect to on your statsd server. [id="plugins-{type}s-{plugin}-sample_rate"] -===== `sample_rate` +===== `sample_rate` * Value type is <> * Default value is `1` @@ -172,7 +172,7 @@ The protocol to connect to on your statsd server. The sample rate for the metric. [id="plugins-{type}s-{plugin}-sender"] -===== `sender` +===== `sender` * Value type is <> * Default value is `"%{host}"` @@ -181,7 +181,7 @@ The name of the sender. Dots will be replaced with underscores. `%{fieldname}` substitutions are allowed. [id="plugins-{type}s-{plugin}-set"] -===== `set` +===== `set` * Value type is <> * Default value is `{}` @@ -190,7 +190,7 @@ A set metric. `metric_name => "string"` to append as hash. `%{fieldname}` substitutions are allowed in the metric names. [id="plugins-{type}s-{plugin}-timing"] -===== `timing` +===== `timing` * Value type is <> * Default value is `{}` diff --git a/docs/plugins/outputs/stdout.asciidoc b/docs/plugins/outputs/stdout.asciidoc index 3e3885521..41580d44e 100644 --- a/docs/plugins/outputs/stdout.asciidoc +++ b/docs/plugins/outputs/stdout.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.4 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-stdout/blob/v3.1.4/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/outputs/stomp.asciidoc b/docs/plugins/outputs/stomp.asciidoc index cbeb62f08..1e467e699 100644 --- a/docs/plugins/outputs/stomp.asciidoc +++ b/docs/plugins/outputs/stomp.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.9 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-stomp/blob/v3.0.9/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -48,7 +48,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-debug"] -===== `debug` +===== `debug` * Value type is <> * Default value is `false` @@ -56,7 +56,7 @@ output plugins. Enable debugging output? [id="plugins-{type}s-{plugin}-destination"] -===== `destination` +===== `destination` * This is a required setting. * Value type is <> @@ -68,7 +68,7 @@ The destination to read events from. Supports string expansion, meaning Example: "/topic/logstash" [id="plugins-{type}s-{plugin}-headers"] -===== `headers` +===== `headers` * Value type is <> * There is no default value for this setting. @@ -79,7 +79,7 @@ Custom headers to send with each message. Supports string expansion, meaning Example: headers => ["amq-msg-type", "text", "host", "%{host}"] [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * This is a required setting. * Value type is <> @@ -88,7 +88,7 @@ Example: headers => ["amq-msg-type", "text", "host", "%{host}"] The address of the STOMP server. [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * Value type is <> * Default value is `""` @@ -96,7 +96,7 @@ The address of the STOMP server. The password to authenticate with. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `61613` @@ -104,7 +104,7 @@ The password to authenticate with. The port to connect to on your STOMP server. [id="plugins-{type}s-{plugin}-user"] -===== `user` +===== `user` * Value type is <> * Default value is `""` @@ -112,7 +112,7 @@ The port to connect to on your STOMP server. The username to authenticate with. [id="plugins-{type}s-{plugin}-vhost"] -===== `vhost` +===== `vhost` * Value type is <> * Default value is `nil` diff --git a/docs/plugins/outputs/syslog.asciidoc b/docs/plugins/outputs/syslog.asciidoc index 54ab67d4a..80fa971dd 100644 --- a/docs/plugins/outputs/syslog.asciidoc +++ b/docs/plugins/outputs/syslog.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.5 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-syslog/blob/v3.0.5/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -68,7 +68,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-appname"] -===== `appname` +===== `appname` * Value type is <> * Default value is `"LOGSTASH"` @@ -77,7 +77,7 @@ application name for syslog message. The new value can include `%{foo}` strings to help you build a new value from other parts of the event. [id="plugins-{type}s-{plugin}-facility"] -===== `facility` +===== `facility` * Value type is <> * Default value is `"user-level"` @@ -88,7 +88,7 @@ The new value can include `%{foo}` strings to help you build a new value from other parts of the event. [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * This is a required setting. * Value type is <> @@ -97,7 +97,7 @@ to help you build a new value from other parts of the event. syslog server address to connect to [id="plugins-{type}s-{plugin}-message"] -===== `message` +===== `message` * Value type is <> * Default value is `"%{message}"` @@ -106,7 +106,7 @@ message text to log. The new value can include `%{foo}` strings to help you build a new value from other parts of the event. [id="plugins-{type}s-{plugin}-msgid"] -===== `msgid` +===== `msgid` * Value type is <> * Default value is `"-"` @@ -115,7 +115,7 @@ message id for syslog message. The new value can include `%{foo}` strings to help you build a new value from other parts of the event. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * This is a required setting. * Value type is <> @@ -124,7 +124,7 @@ to help you build a new value from other parts of the event. syslog server port to connect to [id="plugins-{type}s-{plugin}-priority"] -===== `priority` +===== `priority` * Value type is <> * Default value is `"%{syslog_pri}"` @@ -134,7 +134,7 @@ The new value can include `%{foo}` strings to help you build a new value from other parts of the event. [id="plugins-{type}s-{plugin}-procid"] -===== `procid` +===== `procid` * Value type is <> * Default value is `"-"` @@ -143,7 +143,7 @@ process id for syslog message. The new value can include `%{foo}` strings to help you build a new value from other parts of the event. [id="plugins-{type}s-{plugin}-protocol"] -===== `protocol` +===== `protocol` * Value can be any of: `tcp`, `udp`, `ssl-tcp` * Default value is `"udp"` @@ -151,7 +151,7 @@ to help you build a new value from other parts of the event. syslog server protocol. you can choose between udp, tcp and ssl/tls over tcp [id="plugins-{type}s-{plugin}-reconnect_interval"] -===== `reconnect_interval` +===== `reconnect_interval` * Value type is <> * Default value is `1` @@ -159,7 +159,7 @@ syslog server protocol. you can choose between udp, tcp and ssl/tls over tcp when connection fails, retry interval in sec. [id="plugins-{type}s-{plugin}-rfc"] -===== `rfc` +===== `rfc` * Value can be any of: `rfc3164`, `rfc5424` * Default value is `"rfc3164"` @@ -167,7 +167,7 @@ when connection fails, retry interval in sec. syslog message format: you can choose between rfc3164 or rfc5424 [id="plugins-{type}s-{plugin}-severity"] -===== `severity` +===== `severity` * Value type is <> * Default value is `"notice"` @@ -178,7 +178,7 @@ The new value can include `%{foo}` strings to help you build a new value from other parts of the event. [id="plugins-{type}s-{plugin}-sourcehost"] -===== `sourcehost` +===== `sourcehost` * Value type is <> * Default value is `"%{host}"` @@ -187,7 +187,7 @@ source host for syslog message. The new value can include `%{foo}` strings to help you build a new value from other parts of the event. [id="plugins-{type}s-{plugin}-ssl_cacert"] -===== `ssl_cacert` +===== `ssl_cacert` * Value type is <> * There is no default value for this setting. @@ -195,7 +195,7 @@ to help you build a new value from other parts of the event. The SSL CA certificate, chainfile or CA path. The system CA path is automatically included. [id="plugins-{type}s-{plugin}-ssl_cert"] -===== `ssl_cert` +===== `ssl_cert` * Value type is <> * There is no default value for this setting. @@ -203,7 +203,7 @@ The SSL CA certificate, chainfile or CA path. The system CA path is automaticall SSL certificate path [id="plugins-{type}s-{plugin}-ssl_key"] -===== `ssl_key` +===== `ssl_key` * Value type is <> * There is no default value for this setting. @@ -211,7 +211,7 @@ SSL certificate path SSL key path [id="plugins-{type}s-{plugin}-ssl_key_passphrase"] -===== `ssl_key_passphrase` +===== `ssl_key_passphrase` * Value type is <> * Default value is `nil` @@ -219,7 +219,7 @@ SSL key path SSL key passphrase [id="plugins-{type}s-{plugin}-ssl_verify"] -===== `ssl_verify` +===== `ssl_verify` * Value type is <> * Default value is `false` @@ -227,7 +227,7 @@ SSL key passphrase Verify the identity of the other end of the SSL connection against the CA. [id="plugins-{type}s-{plugin}-use_labels"] -===== `use_labels` +===== `use_labels` * Value type is <> * Default value is `true` diff --git a/docs/plugins/outputs/tcp.asciidoc b/docs/plugins/outputs/tcp.asciidoc index fffa7ebe7..9ef6a9987 100644 --- a/docs/plugins/outputs/tcp.asciidoc +++ b/docs/plugins/outputs/tcp.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v7.0.0 :release_date: 2025-01-10 :changelog_url: https://github.com/logstash-plugins/logstash-output-tcp/blob/v7.0.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -62,7 +62,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * This is a required setting. * Value type is <> @@ -72,7 +72,7 @@ When mode is `server`, the address to listen on. When mode is `client`, the address to connect to. [id="plugins-{type}s-{plugin}-mode"] -===== `mode` +===== `mode` * Value can be any of: `server`, `client` * Default value is `"client"` @@ -81,7 +81,7 @@ Mode to operate in. `server` listens for client connections, `client` connects to a server. [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * This is a required setting. * Value type is <> @@ -91,7 +91,7 @@ When mode is `server`, the port to listen on. When mode is `client`, the port to connect to. [id="plugins-{type}s-{plugin}-reconnect_interval"] -===== `reconnect_interval` +===== `reconnect_interval` * Value type is <> * Default value is `10` @@ -152,7 +152,7 @@ NOTE: This setting can be used only if <> is `ser Enable SSL (must be set for other `ssl_` options to take effect). [id="plugins-{type}s-{plugin}-ssl_key"] -===== `ssl_key` +===== `ssl_key` * Value type is <> * There is no default value for this setting. @@ -160,7 +160,7 @@ Enable SSL (must be set for other `ssl_` options to take effect). SSL key path [id="plugins-{type}s-{plugin}-ssl_key_passphrase"] -===== `ssl_key_passphrase` +===== `ssl_key_passphrase` * Value type is <> * Default value is `nil` @@ -201,7 +201,7 @@ NOTE: This setting can be used only if <> is `cli ==== TCP Output Obsolete Configuration Options WARNING: As of version `6.0.0` of this plugin, some configuration options have been replaced. -The plugin will fail to start if it contains any of these obsolete options. +The plugin will fail to start if it contains any of these obsolete options. [cols="<,<",options="header",] diff --git a/docs/plugins/outputs/timber.asciidoc b/docs/plugins/outputs/timber.asciidoc index 4308332e9..71268b0e5 100644 --- a/docs/plugins/outputs/timber.asciidoc +++ b/docs/plugins/outputs/timber.asciidoc @@ -8,7 +8,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v1.0.3 :release_date: 2017-09-02 :changelog_url: https://github.com/logstash-plugins/logstash-output-timber/blob/v1.0.3/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// diff --git a/docs/plugins/outputs/udp.asciidoc b/docs/plugins/outputs/udp.asciidoc index 7e7ab5c9f..7b009fad9 100644 --- a/docs/plugins/outputs/udp.asciidoc +++ b/docs/plugins/outputs/udp.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.2.0 :release_date: 2021-07-14 :changelog_url: https://github.com/logstash-plugins/logstash-output-udp/blob/v3.2.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -53,7 +53,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * This is a required setting. * Value type is <> @@ -62,7 +62,7 @@ output plugins. The address to send messages to [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * This is a required setting. * Value type is <> diff --git a/docs/plugins/outputs/webhdfs.asciidoc b/docs/plugins/outputs/webhdfs.asciidoc index 66bc77c6d..2da8bff39 100644 --- a/docs/plugins/outputs/webhdfs.asciidoc +++ b/docs/plugins/outputs/webhdfs.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.0 :release_date: 2023-10-03 :changelog_url: https://github.com/logstash-plugins/logstash-output-webhdfs/blob/v3.1.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -98,7 +98,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-compression"] -===== `compression` +===== `compression` * Value can be any of: `none`, `snappy`, `gzip` * Default value is `"none"` @@ -106,7 +106,7 @@ output plugins. Compress output. One of ['none', 'snappy', 'gzip'] [id="plugins-{type}s-{plugin}-flush_size"] -===== `flush_size` +===== `flush_size` * Value type is <> * Default value is `500` @@ -114,7 +114,7 @@ Compress output. One of ['none', 'snappy', 'gzip'] Sending data to webhdfs if event count is above, even if `store_interval_in_secs` is not reached. [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * This is a required setting. * Value type is <> @@ -123,7 +123,7 @@ Sending data to webhdfs if event count is above, even if `store_interval_in_secs The server name for webhdfs/httpfs connections. [id="plugins-{type}s-{plugin}-idle_flush_time"] -===== `idle_flush_time` +===== `idle_flush_time` * Value type is <> * Default value is `1` @@ -131,7 +131,7 @@ The server name for webhdfs/httpfs connections. Sending data to webhdfs in x seconds intervals. [id="plugins-{type}s-{plugin}-kerberos_keytab"] -===== `kerberos_keytab` +===== `kerberos_keytab` * Value type is <> * There is no default value for this setting. @@ -139,7 +139,7 @@ Sending data to webhdfs in x seconds intervals. Set kerberos keytab file. Note that the gssapi library needs to be available to use this. [id="plugins-{type}s-{plugin}-open_timeout"] -===== `open_timeout` +===== `open_timeout` * Value type is <> * Default value is `30` @@ -147,7 +147,7 @@ Set kerberos keytab file. Note that the gssapi library needs to be available to WebHdfs open timeout, default 30s. [id="plugins-{type}s-{plugin}-path"] -===== `path` +===== `path` * This is a required setting. * Value type is <> @@ -158,7 +158,7 @@ as well as date fields in the joda time format, e.g.: `/user/logstash/dt=%{+YYYY-MM-dd}/%{@source_host}-%{+HH}.log` [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `50070` @@ -166,7 +166,7 @@ as well as date fields in the joda time format, e.g.: The server port for webhdfs/httpfs connections. [id="plugins-{type}s-{plugin}-read_timeout"] -===== `read_timeout` +===== `read_timeout` * Value type is <> * Default value is `30` @@ -174,7 +174,7 @@ The server port for webhdfs/httpfs connections. The WebHdfs read timeout, default 30s. [id="plugins-{type}s-{plugin}-retry_interval"] -===== `retry_interval` +===== `retry_interval` * Value type is <> * Default value is `0.5` @@ -182,7 +182,7 @@ The WebHdfs read timeout, default 30s. How long should we wait between retries. [id="plugins-{type}s-{plugin}-retry_known_errors"] -===== `retry_known_errors` +===== `retry_known_errors` * Value type is <> * Default value is `true` @@ -190,7 +190,7 @@ How long should we wait between retries. Retry some known webhdfs errors. These may be caused by race conditions when appending to same file, etc. [id="plugins-{type}s-{plugin}-retry_times"] -===== `retry_times` +===== `retry_times` * Value type is <> * Default value is `5` @@ -198,7 +198,7 @@ Retry some known webhdfs errors. These may be caused by race conditions when app How many times should we retry. If retry_times is exceeded, an error will be logged and the event will be discarded. [id="plugins-{type}s-{plugin}-single_file_per_thread"] -===== `single_file_per_thread` +===== `single_file_per_thread` * Value type is <> * Default value is `false` @@ -208,7 +208,7 @@ This solves some problems with multiple logstash output threads and locked file If this option is set to true, %{[@metadata][thread_id]} needs to be used in path config settting. [id="plugins-{type}s-{plugin}-snappy_bufsize"] -===== `snappy_bufsize` +===== `snappy_bufsize` * Value type is <> * Default value is `32768` @@ -217,7 +217,7 @@ Set snappy chunksize. Only neccessary for stream format. Defaults to 32k. Max is @see http://code.google.com/p/snappy/source/browse/trunk/framing_format.txt [id="plugins-{type}s-{plugin}-snappy_format"] -===== `snappy_format` +===== `snappy_format` * Value can be any of: `stream`, `file` * Default value is `"stream"` @@ -225,7 +225,7 @@ Set snappy chunksize. Only neccessary for stream format. Defaults to 32k. Max is Set snappy format. One of "stream", "file". Set to stream to be hive compatible. [id="plugins-{type}s-{plugin}-ssl_cert"] -===== `ssl_cert` +===== `ssl_cert` * Value type is <> * There is no default value for this setting. @@ -233,7 +233,7 @@ Set snappy format. One of "stream", "file". Set to stream to be hive compatible. Set ssl cert file. [id="plugins-{type}s-{plugin}-ssl_key"] -===== `ssl_key` +===== `ssl_key` * Value type is <> * There is no default value for this setting. @@ -241,7 +241,7 @@ Set ssl cert file. Set ssl key file. [id="plugins-{type}s-{plugin}-standby_host"] -===== `standby_host` +===== `standby_host` * Value type is <> * Default value is `false` @@ -249,7 +249,7 @@ Set ssl key file. Standby namenode for ha hdfs. [id="plugins-{type}s-{plugin}-standby_port"] -===== `standby_port` +===== `standby_port` * Value type is <> * Default value is `50070` @@ -257,7 +257,7 @@ Standby namenode for ha hdfs. Standby namenode port for ha hdfs. [id="plugins-{type}s-{plugin}-use_httpfs"] -===== `use_httpfs` +===== `use_httpfs` * Value type is <> * Default value is `false` @@ -265,7 +265,7 @@ Standby namenode port for ha hdfs. Use httpfs mode if set to true, else webhdfs. [id="plugins-{type}s-{plugin}-use_kerberos_auth"] -===== `use_kerberos_auth` +===== `use_kerberos_auth` * Value type is <> * Default value is `false` @@ -273,7 +273,7 @@ Use httpfs mode if set to true, else webhdfs. Set kerberos authentication. [id="plugins-{type}s-{plugin}-use_ssl_auth"] -===== `use_ssl_auth` +===== `use_ssl_auth` * Value type is <> * Default value is `false` @@ -281,7 +281,7 @@ Set kerberos authentication. Set ssl authentication. Note that the openssl library needs to be available to use this. [id="plugins-{type}s-{plugin}-user"] -===== `user` +===== `user` * This is a required setting. * Value type is <> diff --git a/docs/plugins/outputs/websocket.asciidoc b/docs/plugins/outputs/websocket.asciidoc index 3b69c2484..7954332fa 100644 --- a/docs/plugins/outputs/websocket.asciidoc +++ b/docs/plugins/outputs/websocket.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.1.0 :release_date: 2024-01-11 :changelog_url: https://github.com/logstash-plugins/logstash-output-websocket/blob/v3.1.0/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -47,7 +47,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * Default value is `"0.0.0.0"` @@ -55,7 +55,7 @@ output plugins. The address to serve websocket data from [id="plugins-{type}s-{plugin}-port"] -===== `port` +===== `port` * Value type is <> * Default value is `3232` diff --git a/docs/plugins/outputs/xmpp.asciidoc b/docs/plugins/outputs/xmpp.asciidoc index 4523af8ba..69f12c185 100644 --- a/docs/plugins/outputs/xmpp.asciidoc +++ b/docs/plugins/outputs/xmpp.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.8 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-xmpp/blob/v3.0.8/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -49,7 +49,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-host"] -===== `host` +===== `host` * Value type is <> * There is no default value for this setting. @@ -58,7 +58,7 @@ The xmpp server to connect to. This is optional. If you omit this setting, the host on the user/identity is used. (foo.com for user@foo.com) [id="plugins-{type}s-{plugin}-message"] -===== `message` +===== `message` * This is a required setting. * Value type is <> @@ -67,7 +67,7 @@ the host on the user/identity is used. (foo.com for user@foo.com) The message to send. This supports dynamic strings like `%{host}` [id="plugins-{type}s-{plugin}-password"] -===== `password` +===== `password` * This is a required setting. * Value type is <> @@ -76,7 +76,7 @@ The message to send. This supports dynamic strings like `%{host}` The xmpp password for the user/identity. [id="plugins-{type}s-{plugin}-rooms"] -===== `rooms` +===== `rooms` * Value type is <> * There is no default value for this setting. @@ -85,7 +85,7 @@ if muc/multi-user-chat required, give the name of the room that you want to join: room@conference.domain/nick [id="plugins-{type}s-{plugin}-user"] -===== `user` +===== `user` * This is a required setting. * Value type is <> @@ -94,7 +94,7 @@ you want to join: room@conference.domain/nick The user or resource ID, like foo@example.com. [id="plugins-{type}s-{plugin}-users"] -===== `users` +===== `users` * Value type is <> * There is no default value for this setting. diff --git a/docs/plugins/outputs/zabbix.asciidoc b/docs/plugins/outputs/zabbix.asciidoc index 496b43b41..f4480a022 100644 --- a/docs/plugins/outputs/zabbix.asciidoc +++ b/docs/plugins/outputs/zabbix.asciidoc @@ -9,7 +9,7 @@ START - GENERATED VARIABLES, DO NOT EDIT! :version: v3.0.5 :release_date: 2018-04-06 :changelog_url: https://github.com/logstash-plugins/logstash-output-zabbix/blob/v3.0.5/CHANGELOG.md -:include_path: ../../../../logstash/docs/include +:include_path: ../include /////////////////////////////////////////// END - GENERATED VARIABLES, DO NOT EDIT! /////////////////////////////////////////// @@ -81,7 +81,7 @@ output plugins.   [id="plugins-{type}s-{plugin}-multi_value"] -===== `multi_value` +===== `multi_value` * Value type is <> * There is no default value for this setting. @@ -99,7 +99,7 @@ This directive cannot be used in conjunction with the single-value directives `zabbix_key` and `zabbix_value`. [id="plugins-{type}s-{plugin}-timeout"] -===== `timeout` +===== `timeout` * Value type is <> * Default value is `1` @@ -109,7 +109,7 @@ server. This number should be very small, otherwise delays in delivery of other outputs could result. [id="plugins-{type}s-{plugin}-zabbix_host"] -===== `zabbix_host` +===== `zabbix_host` * This is a required setting. * Value type is <> @@ -119,7 +119,7 @@ The field name which holds the Zabbix host name. This can be a sub-field of the @metadata field. [id="plugins-{type}s-{plugin}-zabbix_key"] -===== `zabbix_key` +===== `zabbix_key` * Value type is <> * There is no default value for this setting. @@ -132,7 +132,7 @@ IMPORTANT: `zabbix_key` is required if not using `multi_value`. [id="plugins-{type}s-{plugin}-zabbix_server_host"] -===== `zabbix_server_host` +===== `zabbix_server_host` * Value type is <> * Default value is `"localhost"` @@ -140,7 +140,7 @@ IMPORTANT: `zabbix_key` is required if not using `multi_value`. The IP or resolvable hostname where the Zabbix server is running [id="plugins-{type}s-{plugin}-zabbix_server_port"] -===== `zabbix_server_port` +===== `zabbix_server_port` * Value type is <> * Default value is `10051` @@ -148,7 +148,7 @@ The IP or resolvable hostname where the Zabbix server is running The port on which the Zabbix server is running [id="plugins-{type}s-{plugin}-zabbix_value"] -===== `zabbix_value` +===== `zabbix_value` * Value type is <> * Default value is `"message"` diff --git a/docs/plugins/static/core-plugins/codecs/java_dots.asciidoc b/docs/plugins/static/core-plugins/codecs/java_dots.asciidoc new file mode 100644 index 000000000..f9a4a67ac --- /dev/null +++ b/docs/plugins/static/core-plugins/codecs/java_dots.asciidoc @@ -0,0 +1,24 @@ +:plugin: jdots +:type: codec + +/////////////////////////////////////////// +REPLACES GENERATED VARIABLES +/////////////////////////////////////////// +:include_path: ../../../include +/////////////////////////////////////////// +END - REPLACES GENERATED VARIABLES +/////////////////////////////////////////// + +[id="plugins-{type}s-{plugin}"] + +=== Jdots codec plugin + +include::{include_path}/plugin_header-core.asciidoc[] + +==== Description + +This codec renders each processed event as a dot (`.`). It is typically used with the `java_stdout` output to provide +approximate event throughput. It is especially useful when combined with `pv` and `wc -c` as follows: + +[source,bash] + bin/logstash -f /path/to/config/with/jdots/codec | pv | wc -c diff --git a/docs/plugins/static/core-plugins/codecs/java_line.asciidoc b/docs/plugins/static/core-plugins/codecs/java_line.asciidoc new file mode 100644 index 000000000..648cbe373 --- /dev/null +++ b/docs/plugins/static/core-plugins/codecs/java_line.asciidoc @@ -0,0 +1,63 @@ +:plugin: java_line +:type: codec + +/////////////////////////////////////////// +REPLACES GENERATED VARIABLES +/////////////////////////////////////////// +:include_path: ../../../include +/////////////////////////////////////////// +END - REPLACES GENERATED VARIABLES +/////////////////////////////////////////// + +[id="plugins-{type}s-{plugin}"] + +=== Java_line codec plugin + +include::{include_path}/plugin_header-core.asciidoc[] + +==== Description + +Encodes and decodes line-oriented text data. + +Decoding behavior: All text data between specified delimiters will be decoded as distinct events. + +Encoding behavior: Each event will be emitted with the specified trailing delimiter. + +[id="plugins-{type}s-{plugin}-options"] +==== Java_line Codec Configuration Options + +[cols="<,<,<",options="header",] +|======================================================================= +|Setting |Input type|Required +| <> |<>, one of `["ASCII-8BIT", "UTF-8", "US-ASCII", "Big5", "Big5-HKSCS", "Big5-UAO", "CP949", "Emacs-Mule", "EUC-JP", "EUC-KR", "EUC-TW", "GB2312", "GB18030", "GBK", "ISO-8859-1", "ISO-8859-2", "ISO-8859-3", "ISO-8859-4", "ISO-8859-5", "ISO-8859-6", "ISO-8859-7", "ISO-8859-8", "ISO-8859-9", "ISO-8859-10", "ISO-8859-11", "ISO-8859-13", "ISO-8859-14", "ISO-8859-15", "ISO-8859-16", "KOI8-R", "KOI8-U", "Shift_JIS", "UTF-16BE", "UTF-16LE", "UTF-32BE", "UTF-32LE", "Windows-31J", "Windows-1250", "Windows-1251", "Windows-1252", "IBM437", "IBM737", "IBM775", "CP850", "IBM852", "CP852", "IBM855", "CP855", "IBM857", "IBM860", "IBM861", "IBM862", "IBM863", "IBM864", "IBM865", "IBM866", "IBM869", "Windows-1258", "GB1988", "macCentEuro", "macCroatian", "macCyrillic", "macGreek", "macIceland", "macRoman", "macRomania", "macThai", "macTurkish", "macUkraine", "CP950", "CP951", "IBM037", "stateless-ISO-2022-JP", "eucJP-ms", "CP51932", "EUC-JIS-2004", "GB12345", "ISO-2022-JP", "ISO-2022-JP-2", "CP50220", "CP50221", "Windows-1256", "Windows-1253", "Windows-1255", "Windows-1254", "TIS-620", "Windows-874", "Windows-1257", "MacJapanese", "UTF-7", "UTF8-MAC", "UTF-16", "UTF-32", "UTF8-DoCoMo", "SJIS-DoCoMo", "UTF8-KDDI", "SJIS-KDDI", "ISO-2022-JP-KDDI", "stateless-ISO-2022-JP-KDDI", "UTF8-SoftBank", "SJIS-SoftBank", "BINARY", "CP437", "CP737", "CP775", "IBM850", "CP857", "CP860", "CP861", "CP862", "CP863", "CP864", "CP865", "CP866", "CP869", "CP1258", "Big5-HKSCS:2008", "ebcdic-cp-us", "eucJP", "euc-jp-ms", "EUC-JISX0213", "eucKR", "eucTW", "EUC-CN", "eucCN", "CP936", "ISO2022-JP", "ISO2022-JP2", "ISO8859-1", "ISO8859-2", "ISO8859-3", "ISO8859-4", "ISO8859-5", "ISO8859-6", "CP1256", "ISO8859-7", "CP1253", "ISO8859-8", "CP1255", "ISO8859-9", "CP1254", "ISO8859-10", "ISO8859-11", "CP874", "ISO8859-13", "CP1257", "ISO8859-14", "ISO8859-15", "ISO8859-16", "CP878", "MacJapan", "ASCII", "ANSI_X3.4-1968", "646", "CP65000", "CP65001", "UTF-8-MAC", "UTF-8-HFS", "UCS-2BE", "UCS-4BE", "UCS-4LE", "CP932", "csWindows31J", "SJIS", "PCK", "CP1250", "CP1251", "CP1252", "external", "locale"]`|No +| <> |<>|No +| <> |<>|No +|======================================================================= + +  + +[id="plugins-{type}s-{plugin}-charset"] +===== `charset` + + * Value can be any of: `ASCII-8BIT`, `UTF-8`, `US-ASCII`, `Big5`, `Big5-HKSCS`, `Big5-UAO`, `CP949`, `Emacs-Mule`, `EUC-JP`, `EUC-KR`, `EUC-TW`, `GB2312`, `GB18030`, `GBK`, `ISO-8859-1`, `ISO-8859-2`, `ISO-8859-3`, `ISO-8859-4`, `ISO-8859-5`, `ISO-8859-6`, `ISO-8859-7`, `ISO-8859-8`, `ISO-8859-9`, `ISO-8859-10`, `ISO-8859-11`, `ISO-8859-13`, `ISO-8859-14`, `ISO-8859-15`, `ISO-8859-16`, `KOI8-R`, `KOI8-U`, `Shift_JIS`, `UTF-16BE`, `UTF-16LE`, `UTF-32BE`, `UTF-32LE`, `Windows-31J`, `Windows-1250`, `Windows-1251`, `Windows-1252`, `IBM437`, `IBM737`, `IBM775`, `CP850`, `IBM852`, `CP852`, `IBM855`, `CP855`, `IBM857`, `IBM860`, `IBM861`, `IBM862`, `IBM863`, `IBM864`, `IBM865`, `IBM866`, `IBM869`, `Windows-1258`, `GB1988`, `macCentEuro`, `macCroatian`, `macCyrillic`, `macGreek`, `macIceland`, `macRoman`, `macRomania`, `macThai`, `macTurkish`, `macUkraine`, `CP950`, `CP951`, `IBM037`, `stateless-ISO-2022-JP`, `eucJP-ms`, `CP51932`, `EUC-JIS-2004`, `GB12345`, `ISO-2022-JP`, `ISO-2022-JP-2`, `CP50220`, `CP50221`, `Windows-1256`, `Windows-1253`, `Windows-1255`, `Windows-1254`, `TIS-620`, `Windows-874`, `Windows-1257`, `MacJapanese`, `UTF-7`, `UTF8-MAC`, `UTF-16`, `UTF-32`, `UTF8-DoCoMo`, `SJIS-DoCoMo`, `UTF8-KDDI`, `SJIS-KDDI`, `ISO-2022-JP-KDDI`, `stateless-ISO-2022-JP-KDDI`, `UTF8-SoftBank`, `SJIS-SoftBank`, `BINARY`, `CP437`, `CP737`, `CP775`, `IBM850`, `CP857`, `CP860`, `CP861`, `CP862`, `CP863`, `CP864`, `CP865`, `CP866`, `CP869`, `CP1258`, `Big5-HKSCS:2008`, `ebcdic-cp-us`, `eucJP`, `euc-jp-ms`, `EUC-JISX0213`, `eucKR`, `eucTW`, `EUC-CN`, `eucCN`, `CP936`, `ISO2022-JP`, `ISO2022-JP2`, `ISO8859-1`, `ISO8859-2`, `ISO8859-3`, `ISO8859-4`, `ISO8859-5`, `ISO8859-6`, `CP1256`, `ISO8859-7`, `CP1253`, `ISO8859-8`, `CP1255`, `ISO8859-9`, `CP1254`, `ISO8859-10`, `ISO8859-11`, `CP874`, `ISO8859-13`, `CP1257`, `ISO8859-14`, `ISO8859-15`, `ISO8859-16`, `CP878`, `MacJapan`, `ASCII`, `ANSI_X3.4-1968`, `646`, `CP65000`, `CP65001`, `UTF-8-MAC`, `UTF-8-HFS`, `UCS-2BE`, `UCS-4BE`, `UCS-4LE`, `CP932`, `csWindows31J`, `SJIS`, `PCK`, `CP1250`, `CP1251`, `CP1252`, `external`, `locale` + * Default value is `"UTF-8"` + +The character encoding used by this input. Examples include `UTF-8` and `cp1252`. This setting is useful if your +inputs are in `Latin-1` (aka `cp1252`) or other character sets than `UTF-8`. + +[id="plugins-{type}s-{plugin}-delimiter"] +===== `delimiter` + + * Value type is <> + * Default value is the system-dependent line separator ("\n" for UNIX systems; "\r\n" for Microsoft Windows) + +Specifies the delimiter that indicates end-of-line. + +[id="plugins-{type}s-{plugin}-format"] +===== `format` + + * Value type is <> + * There is no default value for this setting. + +Set the desired text format for encoding in +https://www.elastic.co/guide/en/logstash/current/event-dependent-configuration.html#sprintf[`sprintf`] format. diff --git a/docs/plugins/static/core-plugins/codecs/java_plain.asciidoc b/docs/plugins/static/core-plugins/codecs/java_plain.asciidoc new file mode 100644 index 000000000..806f825cc --- /dev/null +++ b/docs/plugins/static/core-plugins/codecs/java_plain.asciidoc @@ -0,0 +1,51 @@ +:plugin: java_plain +:type: codec + +/////////////////////////////////////////// +REPLACES GENERATED VARIABLES +/////////////////////////////////////////// +:include_path: ../../../include +/////////////////////////////////////////// +END - REPLACES GENERATED VARIABLES +/////////////////////////////////////////// + +[id="plugins-{type}s-{plugin}"] + +=== Java_plain codec plugin + +include::{include_path}/plugin_header-core.asciidoc[] + +==== Description + +The `java_plain` codec is for text data with no delimiters between events. It is useful mainly for inputs and outputs that +already have a defined framing in their transport protocol such as ZeroMQ, RabbitMQ, Redis, etc. + +[id="plugins-{type}s-{plugin}-options"] +==== Java_plain Codec Configuration Options + +[cols="<,<,<",options="header",] +|======================================================================= +|Setting |Input type|Required +| <> |<>, one of `["ASCII-8BIT", "UTF-8", "US-ASCII", "Big5", "Big5-HKSCS", "Big5-UAO", "CP949", "Emacs-Mule", "EUC-JP", "EUC-KR", "EUC-TW", "GB2312", "GB18030", "GBK", "ISO-8859-1", "ISO-8859-2", "ISO-8859-3", "ISO-8859-4", "ISO-8859-5", "ISO-8859-6", "ISO-8859-7", "ISO-8859-8", "ISO-8859-9", "ISO-8859-10", "ISO-8859-11", "ISO-8859-13", "ISO-8859-14", "ISO-8859-15", "ISO-8859-16", "KOI8-R", "KOI8-U", "Shift_JIS", "UTF-16BE", "UTF-16LE", "UTF-32BE", "UTF-32LE", "Windows-31J", "Windows-1250", "Windows-1251", "Windows-1252", "IBM437", "IBM737", "IBM775", "CP850", "IBM852", "CP852", "IBM855", "CP855", "IBM857", "IBM860", "IBM861", "IBM862", "IBM863", "IBM864", "IBM865", "IBM866", "IBM869", "Windows-1258", "GB1988", "macCentEuro", "macCroatian", "macCyrillic", "macGreek", "macIceland", "macRoman", "macRomania", "macThai", "macTurkish", "macUkraine", "CP950", "CP951", "IBM037", "stateless-ISO-2022-JP", "eucJP-ms", "CP51932", "EUC-JIS-2004", "GB12345", "ISO-2022-JP", "ISO-2022-JP-2", "CP50220", "CP50221", "Windows-1256", "Windows-1253", "Windows-1255", "Windows-1254", "TIS-620", "Windows-874", "Windows-1257", "MacJapanese", "UTF-7", "UTF8-MAC", "UTF-16", "UTF-32", "UTF8-DoCoMo", "SJIS-DoCoMo", "UTF8-KDDI", "SJIS-KDDI", "ISO-2022-JP-KDDI", "stateless-ISO-2022-JP-KDDI", "UTF8-SoftBank", "SJIS-SoftBank", "BINARY", "CP437", "CP737", "CP775", "IBM850", "CP857", "CP860", "CP861", "CP862", "CP863", "CP864", "CP865", "CP866", "CP869", "CP1258", "Big5-HKSCS:2008", "ebcdic-cp-us", "eucJP", "euc-jp-ms", "EUC-JISX0213", "eucKR", "eucTW", "EUC-CN", "eucCN", "CP936", "ISO2022-JP", "ISO2022-JP2", "ISO8859-1", "ISO8859-2", "ISO8859-3", "ISO8859-4", "ISO8859-5", "ISO8859-6", "CP1256", "ISO8859-7", "CP1253", "ISO8859-8", "CP1255", "ISO8859-9", "CP1254", "ISO8859-10", "ISO8859-11", "CP874", "ISO8859-13", "CP1257", "ISO8859-14", "ISO8859-15", "ISO8859-16", "CP878", "MacJapan", "ASCII", "ANSI_X3.4-1968", "646", "CP65000", "CP65001", "UTF-8-MAC", "UTF-8-HFS", "UCS-2BE", "UCS-4BE", "UCS-4LE", "CP932", "csWindows31J", "SJIS", "PCK", "CP1250", "CP1251", "CP1252", "external", "locale"]`|No +| <> |<>|No +|======================================================================= + +  + +[id="plugins-{type}s-{plugin}-charset"] +===== `charset` + + * Value can be any of: `ASCII-8BIT`, `UTF-8`, `US-ASCII`, `Big5`, `Big5-HKSCS`, `Big5-UAO`, `CP949`, `Emacs-Mule`, `EUC-JP`, `EUC-KR`, `EUC-TW`, `GB2312`, `GB18030`, `GBK`, `ISO-8859-1`, `ISO-8859-2`, `ISO-8859-3`, `ISO-8859-4`, `ISO-8859-5`, `ISO-8859-6`, `ISO-8859-7`, `ISO-8859-8`, `ISO-8859-9`, `ISO-8859-10`, `ISO-8859-11`, `ISO-8859-13`, `ISO-8859-14`, `ISO-8859-15`, `ISO-8859-16`, `KOI8-R`, `KOI8-U`, `Shift_JIS`, `UTF-16BE`, `UTF-16LE`, `UTF-32BE`, `UTF-32LE`, `Windows-31J`, `Windows-1250`, `Windows-1251`, `Windows-1252`, `IBM437`, `IBM737`, `IBM775`, `CP850`, `IBM852`, `CP852`, `IBM855`, `CP855`, `IBM857`, `IBM860`, `IBM861`, `IBM862`, `IBM863`, `IBM864`, `IBM865`, `IBM866`, `IBM869`, `Windows-1258`, `GB1988`, `macCentEuro`, `macCroatian`, `macCyrillic`, `macGreek`, `macIceland`, `macRoman`, `macRomania`, `macThai`, `macTurkish`, `macUkraine`, `CP950`, `CP951`, `IBM037`, `stateless-ISO-2022-JP`, `eucJP-ms`, `CP51932`, `EUC-JIS-2004`, `GB12345`, `ISO-2022-JP`, `ISO-2022-JP-2`, `CP50220`, `CP50221`, `Windows-1256`, `Windows-1253`, `Windows-1255`, `Windows-1254`, `TIS-620`, `Windows-874`, `Windows-1257`, `MacJapanese`, `UTF-7`, `UTF8-MAC`, `UTF-16`, `UTF-32`, `UTF8-DoCoMo`, `SJIS-DoCoMo`, `UTF8-KDDI`, `SJIS-KDDI`, `ISO-2022-JP-KDDI`, `stateless-ISO-2022-JP-KDDI`, `UTF8-SoftBank`, `SJIS-SoftBank`, `BINARY`, `CP437`, `CP737`, `CP775`, `IBM850`, `CP857`, `CP860`, `CP861`, `CP862`, `CP863`, `CP864`, `CP865`, `CP866`, `CP869`, `CP1258`, `Big5-HKSCS:2008`, `ebcdic-cp-us`, `eucJP`, `euc-jp-ms`, `EUC-JISX0213`, `eucKR`, `eucTW`, `EUC-CN`, `eucCN`, `CP936`, `ISO2022-JP`, `ISO2022-JP2`, `ISO8859-1`, `ISO8859-2`, `ISO8859-3`, `ISO8859-4`, `ISO8859-5`, `ISO8859-6`, `CP1256`, `ISO8859-7`, `CP1253`, `ISO8859-8`, `CP1255`, `ISO8859-9`, `CP1254`, `ISO8859-10`, `ISO8859-11`, `CP874`, `ISO8859-13`, `CP1257`, `ISO8859-14`, `ISO8859-15`, `ISO8859-16`, `CP878`, `MacJapan`, `ASCII`, `ANSI_X3.4-1968`, `646`, `CP65000`, `CP65001`, `UTF-8-MAC`, `UTF-8-HFS`, `UCS-2BE`, `UCS-4BE`, `UCS-4LE`, `CP932`, `csWindows31J`, `SJIS`, `PCK`, `CP1250`, `CP1251`, `CP1252`, `external`, `locale` + * Default value is `"UTF-8"` + +The character encoding used in this input. Examples include `UTF-8` and `cp1252`. This setting is useful if your data +is in a character set other than `UTF-8`. + +[id="plugins-{type}s-{plugin}-format"] +===== `format` + + * Value type is <> + * There is no default value for this setting. + +Set the desired text format for encoding in +https://www.elastic.co/guide/en/logstash/current/event-dependent-configuration.html#sprintf[`sprintf`] format. diff --git a/docs/plugins/static/core-plugins/filters/java_uuid.asciidoc b/docs/plugins/static/core-plugins/filters/java_uuid.asciidoc new file mode 100644 index 000000000..fa96faf1d --- /dev/null +++ b/docs/plugins/static/core-plugins/filters/java_uuid.asciidoc @@ -0,0 +1,91 @@ +:plugin: java_uuid +:type: filter + +/////////////////////////////////////////// +REPLACES GENERATED VARIABLES +/////////////////////////////////////////// +:include_path: ../../../include +/////////////////////////////////////////// +END - REPLACES GENERATED VARIABLES +/////////////////////////////////////////// + + +[id="plugins-{type}s-{plugin}"] + +=== Java_uuid filter plugin + +include::{include_path}/plugin_header-core.asciidoc[] + +==== Description + +The uuid filter allows you to generate a +https://en.wikipedia.org/wiki/Universally_unique_identifier[UUID] +and add it as a field to each processed event. + +This is useful if you need to generate a string that's unique for every +event even if the same input is processed multiple times. If you want +to generate strings that are identical each time an event with the same +content is processed (i.e., a hash), you should use the +{logstash-ref}/plugins-filters-fingerprint.html[fingerprint filter] instead. + +The generated UUIDs follow the version 4 definition in +https://tools.ietf.org/html/rfc4122[RFC 4122] and will be +represented in standard hexadecimal string format, e.g. +"e08806fe-02af-406c-bbde-8a5ae4475e57". + +[id="plugins-{type}s-{plugin}-options"] +==== Java_uuid Filter Configuration Options + +This plugin supports the following configuration options plus the <> described later. + +[cols="<,<,<",options="header",] +|======================================================================= +|Setting |Input type|Required +| <> |<>|No +| <> |<>|Yes +|======================================================================= + +Also see <> for a list of options supported by all +filter plugins. + +  + +[id="plugins-{type}s-{plugin}-overwrite"] +===== `overwrite` + + * Value type is <> + * Default value is `false` + +Determines if an existing value in the field specified by the `target` option should +be overwritten by the filter. + +Example: +[source,ruby] + filter { + java_uuid { + target => "uuid" + overwrite => true + } + } + +[id="plugins-{type}s-{plugin}-target"] +===== `target` + + * This is a required setting. + * Value type is <> + * There is no default value for this setting. + +Specifies the name of the field in which the generated UUID should be stored. + +Example: +[source,ruby] + filter { + java_uuid { + target => "uuid" + } + } + + + +[id="plugins-{type}s-{plugin}-common-options"] +include::{include_path}/{type}.asciidoc[] \ No newline at end of file diff --git a/docs/plugins/static/core-plugins/inputs/java_generator.asciidoc b/docs/plugins/static/core-plugins/inputs/java_generator.asciidoc new file mode 100644 index 000000000..cc5590e2a --- /dev/null +++ b/docs/plugins/static/core-plugins/inputs/java_generator.asciidoc @@ -0,0 +1,117 @@ +:plugin: java_generator +:type: input +:default_codec: plain + +/////////////////////////////////////////// +REPLACES GENERATED VARIABLES +/////////////////////////////////////////// +:include_path: ../../../include +/////////////////////////////////////////// +END - REPLACES GENERATED VARIABLES +/////////////////////////////////////////// + +[id="plugins-{type}s-{plugin}"] + +=== Java_generator input plugin + +include::{include_path}/plugin_header-core.asciidoc[] + +==== Description + +Generate synthetic log events. + +This plugin generates a stream of synthetic events that can be used to test the correctness or performance of a +Logstash pipeline. + + +[id="plugins-{type}s-{plugin}-options"] +==== Java_generator Input Configuration Options + +This plugin supports the following configuration options plus the <> described later. + +[cols="<,<,<",options="header",] +|======================================================================= +|Setting |Input type|Required +| <> |<>|No +| <> |<>|No +| <> |<>|No +| <> |<>|No +| <> |<>|No +|======================================================================= + +Also see <> for a list of options supported by all +input plugins. + +  + +[id="plugins-{type}s-{plugin}-count"] +===== `count` + + * Value type is <> + * Default value is `0` + +Sets the number of events that should be generated. + +The default, `0`, means generate an unlimited number of events. + +[id="plugins-{type}s-{plugin}-eps"] +===== `eps` + + * Value type is <> + * Default value is `0` + +Sets the rate at which events should be generated. Fractional values may be specified. For +example, a rate of `0.25` means that one event will be generated every four seconds. + +The default, `0`, means generate events as fast as possible. + +[id="plugins-{type}s-{plugin}-lines"] +===== `lines` + + * Value type is <> + * There is no default value for this setting. + +The lines to emit, in order. This option overrides the 'message' setting if it has also been specified. + +Example: +[source,ruby] + input { + java_generator { + lines => [ + "line 1", + "line 2", + "line 3" + ] + # Emit all lines 2 times. + count => 2 + } + } + +The above will emit a series of three events `line 1` then `line 2` then `line 3` two times for a total of 6 events. + +[id="plugins-{type}s-{plugin}-message"] +===== `message` + + * Value type is <> + * Default value is `"Hello world!"` + +The message string to use in the event. + +[id="plugins-{type}s-{plugin}-threads"] +===== `threads` + + * Value type is <> + * Default value is `1` + +Increasing the number of generator threads up to about the number of CPU cores generally increases overall event +throughput. The `count`, `eps`, and `lines` settings all apply on a per-thread basis. In other words, each thread +will emit the number of events specified in the `count` setting for a total of `threads * count` events. Each thread +will emit events at the `eps` rate for a total rate of `threads * eps`, and each thread will emit each line specified +in the `lines` option. + + + +[id="plugins-{type}s-{plugin}-common-options"] +include::{include_path}/{type}.asciidoc[] + +:default_codec!: \ No newline at end of file diff --git a/docs/plugins/static/core-plugins/inputs/java_stdin.asciidoc b/docs/plugins/static/core-plugins/inputs/java_stdin.asciidoc new file mode 100644 index 000000000..fdddc342d --- /dev/null +++ b/docs/plugins/static/core-plugins/inputs/java_stdin.asciidoc @@ -0,0 +1,35 @@ +:plugin: java_stdin +:type: input +:default_codec: java_line + +/////////////////////////////////////////// +REPLACES GENERATED VARIABLES +/////////////////////////////////////////// +:include_path: ../../../include +/////////////////////////////////////////// +END - REPLACES GENERATED VARIABLES +/////////////////////////////////////////// + +[id="plugins-{type}s-{plugin}"] + +=== Java_stdin input plugin + +include::{include_path}/plugin_header-core.asciidoc[] + +==== Description + +Read events from standard input. + +By default, each event is assumed to be terminated by end-of-line. If you want events delimited in a different +method, you'll need to use a codec with support for that encoding. + +[id="plugins-{type}s-{plugin}-options"] +==== Java_stdin Input Configuration Options + +There are no special configuration options for this plugin, +but it does support the <>. + +[id="plugins-{type}s-{plugin}-common-options"] +include::{include_path}/{type}.asciidoc[] + +:default_codec!: \ No newline at end of file diff --git a/docs/plugins/static/core-plugins/outputs/java_sink.asciidoc b/docs/plugins/static/core-plugins/outputs/java_sink.asciidoc new file mode 100644 index 000000000..27f31006d --- /dev/null +++ b/docs/plugins/static/core-plugins/outputs/java_sink.asciidoc @@ -0,0 +1,33 @@ +:plugin: sink +:type: output +:default_codec!: + +/////////////////////////////////////////// +REPLACES GENERATED VARIABLES +/////////////////////////////////////////// +:include_path: ../../../include +/////////////////////////////////////////// +END - REPLACES GENERATED VARIABLES +/////////////////////////////////////////// + +[id="plugins-{type}s-{plugin}"] + +=== Sink output plugin + +include::{include_path}/plugin_header-core.asciidoc[] + +==== Description + +An event sink that discards any events received. Generally useful for testing the performance of inputs +and filters. + +[id="plugins-{type}s-{plugin}-options"] +==== Sink Output Configuration Options + +There are no special configuration options for this plugin, +but it does support the <>. + +[id="plugins-{type}s-{plugin}-common-options"] +include::{include_path}/{type}.asciidoc[] + +:default_codec!: diff --git a/docs/plugins/static/core-plugins/outputs/java_stdout.asciidoc b/docs/plugins/static/core-plugins/outputs/java_stdout.asciidoc new file mode 100644 index 000000000..c07cb4d53 --- /dev/null +++ b/docs/plugins/static/core-plugins/outputs/java_stdout.asciidoc @@ -0,0 +1,50 @@ +:plugin: java_stdout +:type: output +:default_codec: java_line + +/////////////////////////////////////////// +REPLACES GENERATED VARIABLES +/////////////////////////////////////////// +:include_path: ../../../include +/////////////////////////////////////////// +END - REPLACES GENERATED VARIABLES +/////////////////////////////////////////// + +[id="plugins-{type}s-{plugin}"] + +=== Java_stdout output plugin + +include::{include_path}/plugin_header-core.asciidoc[] + +==== Description + +Prints events to the STDOUT of the shell running Logstash. This output is convenient for debugging +plugin configurations by providing instant access to event data after it has passed through the inputs and filters. + +For example, the following output configuration in conjunction with the Logstash `-e` command-line flag, will +allow you to see the results of your event pipeline for quick iteration. +[source,ruby] + output { + java_stdout {} + } + +Useful codecs include: + +`java_line`: outputs event data in JSON format followed by an end-of-line character. This is the default codec for +java_stdout. + +[source,ruby] + output { + stdout { } + } + +[id="plugins-{type}s-{plugin}-options"] +==== Java_stdout Output Configuration Options + +There are no special configuration options for this plugin, +but it does support the <>. + +[id="plugins-{type}s-{plugin}-common-options"] +include::{include_path}/{type}.asciidoc[] + +:default_codec!: \ No newline at end of file