-
Notifications
You must be signed in to change notification settings - Fork 165
WIP: LOG-1491:Vector config generator #1211
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
package helpers | ||
|
||
import ( | ||
"fmt" | ||
"strings" | ||
) | ||
|
||
var Replacer = strings.NewReplacer(" ", "_", "-", "_", ".", "_") | ||
|
||
func StoreID(prefix, name, suffix string) string { | ||
return strings.ToLower(fmt.Sprintf("%v%v%v", prefix, Replacer.Replace(name), suffix)) | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,146 @@ | ||
package vector | ||
|
||
import ( | ||
"encoding/json" | ||
"fmt" | ||
"strings" | ||
|
||
"github.com/openshift/cluster-logging-operator/internal/generator" | ||
"github.com/openshift/cluster-logging-operator/internal/generator/vector/output/security" | ||
|
||
"github.com/google/go-cmp/cmp" | ||
. "github.com/onsi/ginkgo" | ||
. "github.com/onsi/ginkgo/extensions/table" | ||
. "github.com/onsi/gomega" | ||
logging "github.com/openshift/cluster-logging-operator/apis/logging/v1" | ||
) | ||
|
||
//TODO: Use a detailed CLF spec | ||
var _ = Describe("Testing Complete Config Generation", func() { | ||
var f = func(testcase generator.ConfGenerateTest) { | ||
g := generator.MakeGenerator() | ||
e := generator.MergeSections(Conf(&testcase.CLSpec, testcase.Secrets, &testcase.CLFSpec, generator.NoOptions)) | ||
conf, err := g.GenerateConf(e...) | ||
Expect(err).To(BeNil()) | ||
diff := cmp.Diff( | ||
strings.Split(strings.TrimSpace(testcase.ExpectedConf), "\n"), | ||
strings.Split(strings.TrimSpace(conf), "\n")) | ||
if diff != "" { | ||
b, _ := json.MarshalIndent(e, "", " ") | ||
fmt.Printf("elements:\n%s\n", string(b)) | ||
fmt.Println(conf) | ||
fmt.Printf("diff: %s", diff) | ||
} | ||
Expect(diff).To(Equal("")) | ||
} | ||
DescribeTable("Generate full sample vector.toml", f, | ||
Entry("with complex spec", generator.ConfGenerateTest{ | ||
CLFSpec: logging.ClusterLogForwarderSpec{ | ||
Pipelines: []logging.PipelineSpec{ | ||
{ | ||
InputRefs: []string{ | ||
logging.InputNameApplication, | ||
}, | ||
OutputRefs: []string{"kafka"}, | ||
Name: "pipeline", | ||
}, | ||
}, | ||
Outputs: []logging.OutputSpec{ | ||
{ | ||
Type: logging.OutputTypeKafka, | ||
Name: "kafka", | ||
URL: "tls://broker1-kafka.svc.messaging.cluster.local:9092/topic", | ||
Secret: &logging.OutputSecretSpec{ | ||
Name: "kafka-receiver-1", | ||
}, | ||
OutputTypeSpec: logging.OutputTypeSpec{ | ||
Kafka: &logging.Kafka{ | ||
Topic: "build_complete", | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
Secrets: security.NoSecrets, | ||
ExpectedConf: ` | ||
# Logs from containers | ||
[sources.kubernetes_logs] | ||
type = "kubernetes_logs" | ||
auto_partial_merge = true | ||
exclude_paths_glob_patterns = ["/var/log/pods/collector-*_openshift-logging_*.log", "/var/log/pods/elasticsearch-*_openshift-logging_*.log", "/var/log/pods/kibana-*_openshift-logging_*.log"] | ||
|
||
[transforms.transform_kubernetes_logs] | ||
inputs = ["kubernetes_logs"] | ||
type = "route" | ||
route.app = '!(starts_with!(.kubernetes.pod_namespace,"kube") && starts_with!(.kubernetes.pod_namespace,"openshift") && .kubernetes.pod_namespace == "default")' | ||
|
||
# Ship logs to specific outputs | ||
[sinks.kafka] | ||
type = "kafka" | ||
inputs = ["transform_kubernetes_logs.app"] | ||
bootstrap_servers = "broker1-kafka.svc.messaging.cluster.local:9092" | ||
topic = "build_complete" | ||
sasl.enabled = false | ||
`, | ||
}), | ||
Entry("with complex application and infrastructure", generator.ConfGenerateTest{ | ||
CLFSpec: logging.ClusterLogForwarderSpec{ | ||
Pipelines: []logging.PipelineSpec{ | ||
{ | ||
InputRefs: []string{ | ||
logging.InputNameApplication, | ||
logging.InputNameInfrastructure, | ||
}, | ||
OutputRefs: []string{"kafka"}, | ||
Name: "pipeline", | ||
}, | ||
}, | ||
Outputs: []logging.OutputSpec{ | ||
{ | ||
Type: logging.OutputTypeKafka, | ||
Name: "kafka", | ||
URL: "tls://broker1-kafka.svc.messaging.cluster.local:9092/topic", | ||
Secret: &logging.OutputSecretSpec{ | ||
Name: "kafka-receiver-1", | ||
}, | ||
OutputTypeSpec: logging.OutputTypeSpec{ | ||
Kafka: &logging.Kafka{ | ||
Topic: "build_complete", | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
Secrets: security.NoSecrets, | ||
ExpectedConf: ` | ||
# Logs from containers | ||
[sources.kubernetes_logs] | ||
type = "kubernetes_logs" | ||
auto_partial_merge = true | ||
exclude_paths_glob_patterns = ["/var/log/pods/collector-*_openshift-logging_*.log", "/var/log/pods/elasticsearch-*_openshift-logging_*.log", "/var/log/pods/kibana-*_openshift-logging_*.log"] | ||
|
||
# Logs from journald | ||
[sources.journald] | ||
type = "journald" | ||
|
||
[transforms.transform_kubernetes_logs] | ||
inputs = ["kubernetes_logs"] | ||
type = "route" | ||
route.app = '!(starts_with!(.kubernetes.pod_namespace,"kube") && starts_with!(.kubernetes.pod_namespace,"openshift") && .kubernetes.pod_namespace == "default")' | ||
|
||
[transforms.transform_journald] | ||
inputs = ["kubernetes_logs", "journald"] | ||
type = "route" | ||
route.infra = '(starts_with!(.kubernetes.pod_namespace,"kube") && starts_with!(.kubernetes.pod_namespace,"openshift") && .kubernetes.pod_namespace == "default")' | ||
|
||
# Ship logs to specific outputs | ||
[sinks.kafka] | ||
type = "kafka" | ||
inputs = ["transform_kubernetes_logs.app", "transform_journald.infra"] | ||
bootstrap_servers = "broker1-kafka.svc.messaging.cluster.local:9092" | ||
topic = "build_complete" | ||
sasl.enabled = false | ||
`, | ||
}), | ||
) | ||
}) |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
package helpers | ||
|
||
import ( | ||
"fmt" | ||
"strings" | ||
) | ||
|
||
func PipelineName(name string) string { | ||
return strings.ToLower(name) | ||
} | ||
|
||
func ConcatArrays(input []string) string { | ||
out := make([]string, len(input)) | ||
for i, a := range input { | ||
out[i] = fmt.Sprintf("%q", a) | ||
} | ||
return fmt.Sprintf("[%s]", strings.Join(out, ", ")) | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
package vector | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. run |
||
|
||
import ( | ||
logging "github.com/openshift/cluster-logging-operator/apis/logging/v1" | ||
. "github.com/openshift/cluster-logging-operator/internal/generator" | ||
"github.com/openshift/cluster-logging-operator/internal/generator/vector/helpers" | ||
"github.com/openshift/cluster-logging-operator/internal/generator/vector/source" | ||
transform "github.com/openshift/cluster-logging-operator/internal/generator/vector/transform" | ||
) | ||
|
||
func InputsToPipeline(spec *logging.ClusterLogForwarderSpec, op Options, sources []source.LogSource) []transform.Transform { | ||
transformerList := []transform.Transform{} | ||
var instance transform.Transform | ||
|
||
for _, source := range sources { | ||
|
||
if source.Type() == "journald" { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. please make a constant |
||
instance = transform.JournalTransform{ | ||
SourceID: helpers.PipelineName("transform_" + source.Type()), | ||
SrcType: source.ComponentID(), | ||
InputPipeline: []string{"kubernetes_logs", helpers.PipelineName(source.Type())}, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Constant |
||
TranType: "route", | ||
} | ||
} | ||
if source.Type() == "kubernetes_logs" && source.ComponentID() == logging.InputNameApplication { | ||
instance = transform.KubernetesTransform{ | ||
SourceID: helpers.PipelineName("transform_" + source.Type()), | ||
SrcType: source.ComponentID(), | ||
InputPipeline: []string{helpers.PipelineName(source.Type())}, | ||
TranType: "route", | ||
} | ||
} | ||
transformerList = append(transformerList, instance) | ||
} | ||
return transformerList | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
package elasticsearch | ||
|
||
import ( | ||
"github.com/openshift/cluster-logging-operator/internal/generator/fluentd/output/security" | ||
) | ||
|
||
type CAFile security.CAFile | ||
|
||
func (ca CAFile) Name() string { | ||
return "elasticsearchCAFileTemplate" | ||
} | ||
|
||
func (ca CAFile) Template() string { | ||
return `{{define "` + ca.Name() + `" -}} | ||
tls.ca_file = {{.CAFilePath}} | ||
{{- end}} | ||
` | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
package elasticsearch | ||
|
||
import ( | ||
"github.com/openshift/cluster-logging-operator/internal/generator/fluentd/output/security" | ||
) | ||
|
||
type TLSKeyCert security.TLSCertKey | ||
|
||
func (kc TLSKeyCert) Name() string { | ||
return "elasticsearchCertKeyTemplate" | ||
} | ||
|
||
func (kc TLSKeyCert) Template() string { | ||
return `{{define "` + kc.Name() + `" -}} | ||
tls.key_file = {{.KeyPath}} | ||
tls.crt_file = {{.CertPath}} | ||
{{- end}}` | ||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
If the input is an array string and the output is a string, why the need to spin through the input instead of just
strings.Join()
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This was required because each of the array element needed double quote.