Permalink
Switch branches/tags
Nothing to show
Find file Copy path
Fetching contributors…
Cannot retrieve contributors at this time
98 lines (96 sloc) 3.81 KB
# List of pipelines to be loaded by Logstash
#
# This document must be a list of dictionaries/hashes, where the keys/values are pipeline settings.
# Default values for ommitted settings are read from the `logstash.yml` file.
# When declaring multiple pipelines, each MUST have its own `pipeline.id`.
#
# Example of two pipelines:
#
# - pipeline.id: test
# pipeline.workers: 1
# pipeline.batch.size: 1
# config.string: "input { generator {} } filter { sleep { time => 1 } } output { stdout { codec => dots } }"
# - pipeline.id: another_test
# queue.type: persisted
# path.config: "/tmp/logstash/*.config"
#
# Available options:
#
# # name of the pipeline
# pipeline.id: mylogs
#
# # The configuration string to be used by this pipeline
# config.string: "input { generator {} } filter { sleep { time => 1 } } output { stdout { codec => dots } }"
#
# # The path from where to read the configuration text
# path.config: "/etc/conf.d/logstash/myconfig.cfg"
#
# # How many worker threads execute the Filters+Outputs stage of the pipeline
# pipeline.workers: 1 (actually defaults to number of CPUs)
#
# # How many events to retrieve from inputs before sending to filters+workers
# pipeline.batch.size: 125
#
# # How long to wait in milliseconds while polling for the next event
# # before dispatching an undersized batch to filters+outputs
# pipeline.batch.delay: 50
#
# # How many workers should be used per output plugin instance
# pipeline.output.workers: 1
#
# # Internal queuing model, "memory" for legacy in-memory based queuing and
# # "persisted" for disk-based acked queueing. Defaults is memory
# queue.type: memory
#
# # If using queue.type: persisted, the page data files size. The queue data consists of
# # append-only data files separated into pages. Default is 64mb
# queue.page_capacity: 64mb
#
# # If using queue.type: persisted, the maximum number of unread events in the queue.
# # Default is 0 (unlimited)
# queue.max_events: 0
#
# # If using queue.type: persisted, the total capacity of the queue in number of bytes.
# # Default is 1024mb or 1gb
# queue.max_bytes: 1024mb
#
# # If using queue.type: persisted, the maximum number of acked events before forcing a checkpoint
# # Default is 1024, 0 for unlimited
# queue.checkpoint.acks: 1024
#
# # If using queue.type: persisted, the maximum number of written events before forcing a checkpoint
# # Default is 1024, 0 for unlimited
# queue.checkpoint.writes: 1024
#
# # If using queue.type: persisted, the interval in milliseconds when a checkpoint is forced on the head page
# # Default is 1000, 0 for no periodic checkpoint.
# queue.checkpoint.interval: 1000
#
# # Enable Dead Letter Queueing for this pipeline.
# dead_letter_queue.enable: false
#
# If using dead_letter_queue.enable: true, the maximum size of dead letter queue for this pipeline. Entries
# will be dropped if they would increase the size of the dead letter queue beyond this setting.
# Default is 1024mb
# dead_letter_queue.max_bytes: 1024mb
#
# If using dead_letter_queue.enable: true, the directory path where the data files will be stored.
# Default is path.data/dead_letter_queue
#
# path.dead_letter_queue:
- pipeline.id: spring-cloud-poc-tcp-json
pipeline.batch.size: 1
path.config: /etc/logstash/conf.d/tpc-json.conf
queue.type: persisted
- pipeline.id: spring-cloud-poc-filebeat-json
pipeline.batch.size: 1
path.config: /etc/logstash/conf.d/filebeat-json.conf
queue.type: persisted
- pipeline.id: spring-cloud-poc-filebeat-plain
pipeline.batch.size: 1
path.config: /etc/logstash/conf.d/{input-filebeat-plain.conf,filter-output-plain.conf}
queue.type: persisted
- pipeline.id: spring-cloud-poc-file-plain
pipeline.batch.size: 1
path.config: /etc/logstash/conf.d/{input-file-plain.conf,filter-output-plain.conf}
queue.type: persisted