From 15946bc9d6a1b6742132328fb6860975ced04a67 Mon Sep 17 00:00:00 2001 From: libander Date: Mon, 20 Dec 2021 18:13:46 -0600 Subject: [PATCH] RHDEVDOCS-3548 --- ...logging-collector-log-forward-fluentd.adoc | 32 +++++++++---------- 1 file changed, 15 insertions(+), 17 deletions(-) diff --git a/modules/cluster-logging-collector-log-forward-fluentd.adoc b/modules/cluster-logging-collector-log-forward-fluentd.adoc index a51f4f03ca53..82203b3506b7 100644 --- a/modules/cluster-logging-collector-log-forward-fluentd.adoc +++ b/modules/cluster-logging-collector-log-forward-fluentd.adoc @@ -32,22 +32,21 @@ spec: url: 'tls://fluentdserver.security.example.com:24224' <5> secret: <6> name: fluentd-secret - passphrase: phrase <7> - name: fluentd-server-insecure type: fluentdForward url: 'tcp://fluentdserver.home.example.com:24224' pipelines: - - name: forward-to-fluentd-secure <8> - inputRefs: <9> + - name: forward-to-fluentd-secure <7> + inputRefs: <8> - application - audit outputRefs: - - fluentd-server-secure <10> - - default <11> - parse: json <12> + - fluentd-server-secure <9> + - default <10> + parse: json <11> labels: - clusterId: "C1234" <13> - - name: forward-to-fluentd-insecure <14> + clusterId: "C1234" <12> + - name: forward-to-fluentd-insecure <13> inputRefs: - infrastructure outputRefs: @@ -60,15 +59,14 @@ spec: <3> Specify a name for the output. <4> Specify the `fluentdForward` type. <5> Specify the URL and port of the external Fluentd instance as a valid absolute URL. You can use the `tcp` (insecure) or `tls` (secure TCP) protocol. If the cluster-wide proxy using the CIDR annotation is enabled, the output must be a server name or FQDN, not an IP address. -<6> If using a `tls` prefix, you must specify the name of the secret required by the endpoint for TLS communication. The secret must exist in the `openshift-logging` project, and must have keys of: *tls.crt*, *tls.key*, and *ca-bundle.crt* that point to the respective certificates that they represent. -<7> Optional: Specify the password or passphrase that protects the private key file. -<8> Optional: Specify a name for the pipeline. -<9> Specify which log types to forward by using the pipeline: `application,` `infrastructure`, or `audit`. -<10> Specify the name of the output to use when forwarding logs with this pipeline. -<11> Optional: Specify the `default` output to forward logs to the internal Elasticsearch instance. -<12> Optional: Specify whether to forward structured JSON log entries as JSON objects in the `structured` field. The log entry must contain valid structured JSON; otherwise, OpenShift Logging removes the `structured` field and instead sends the log entry to the default index, `app-00000x`. -<13> Optional: String. One or more labels to add to the logs. -<14> Optional: Configure multiple outputs to forward logs to other external log aggregtors of any supported type: +<6> If using a `tls` prefix, you must specify the name of the secret required by the endpoint for TLS communication. The secret must exist in the `openshift-logging` project, and must have keys of: *tls.crt*, *tls.key*, and *ca-bundle.crt* that point to the respective certificates that they represent. Otherwise, for http and https prefixes, you can specify a secret that contains a username and password. For more information, see the following "Example: Setting secret that contains a username and password." +<7> Optional: Specify a name for the pipeline. +<8> Specify which log types to forward by using the pipeline: `application,` `infrastructure`, or `audit`. +<9> Specify the name of the output to use when forwarding logs with this pipeline. +<10> Optional: Specify the `default` output to forward logs to the internal Elasticsearch instance. +<11> Optional: Specify whether to forward structured JSON log entries as JSON objects in the `structured` field. The log entry must contain valid structured JSON; otherwise, OpenShift Logging removes the `structured` field and instead sends the log entry to the default index, `app-00000x`. +<12> Optional: String. One or more labels to add to the logs. +<13> Optional: Configure multiple outputs to forward logs to other external log aggregtors of any supported type: ** A name to describe the pipeline. ** The `inputRefs` is the log type to forward by using the pipeline: `application,` `infrastructure`, or `audit`. ** The `outputRefs` is the name of the output to use.