diff --git a/converter/internal/common/weaveworks_server.go b/converter/internal/common/weaveworks_server.go new file mode 100644 index 000000000000..5bba9af4979c --- /dev/null +++ b/converter/internal/common/weaveworks_server.go @@ -0,0 +1,81 @@ +package common + +import ( + "github.com/grafana/agent/converter/diag" + "github.com/weaveworks/common/server" +) + +func DefaultWeaveWorksServerCfg() server.Config { + cfg := server.Config{} + // NOTE: due to a bug in promtail, the default server values for loki_push_api are not applied currently, so + // we need to comment out the following line. + //cfg.RegisterFlags(flag.NewFlagSet("", flag.PanicOnError)) + return cfg +} + +func ValidateWeaveWorksServerCfg(cfg server.Config) diag.Diagnostics { + var ( + diags diag.Diagnostics + defaultCfg = DefaultWeaveWorksServerCfg() + ) + + if cfg.HTTPListenNetwork != defaultCfg.HTTPListenNetwork { + diags.Add(diag.SeverityLevelError, "http_listen_network is not supported in server config") + } + if cfg.GRPCListenNetwork != defaultCfg.GRPCListenNetwork { + diags.Add(diag.SeverityLevelError, "grpc_listen_network is not supported in server config") + } + if cfg.CipherSuites != defaultCfg.CipherSuites { + diags.Add(diag.SeverityLevelError, "tls_cipher_suites is not supported in server config") + } + if cfg.MinVersion != defaultCfg.MinVersion { + diags.Add(diag.SeverityLevelError, "tls_min_version is not supported in server config") + } + if cfg.HTTPTLSConfig != defaultCfg.HTTPTLSConfig { + diags.Add(diag.SeverityLevelError, "http_tls_config is not supported in server config") + } + if cfg.GRPCTLSConfig != defaultCfg.GRPCTLSConfig { + diags.Add(diag.SeverityLevelError, "grpc_tls_config is not supported in server config") + } + if cfg.RegisterInstrumentation { + diags.Add(diag.SeverityLevelError, "register_instrumentation is not supported in server config") + } + if cfg.ServerGracefulShutdownTimeout != defaultCfg.ServerGracefulShutdownTimeout { + diags.Add(diag.SeverityLevelError, "graceful_shutdown_timeout is not supported in server config") + } + if cfg.GRPCServerTime != defaultCfg.GRPCServerTime { + diags.Add(diag.SeverityLevelError, "grpc_server_keepalive_time is not supported in server config") + } + if cfg.GRPCServerTimeout != defaultCfg.GRPCServerTimeout { + diags.Add(diag.SeverityLevelError, "grpc_server_keepalive_timeout is not supported in server config") + } + if cfg.GRPCServerMinTimeBetweenPings != defaultCfg.GRPCServerMinTimeBetweenPings { + diags.Add(diag.SeverityLevelError, "grpc_server_min_time_between_pings is not supported in server config") + } + if cfg.GRPCServerPingWithoutStreamAllowed != defaultCfg.GRPCServerPingWithoutStreamAllowed { + diags.Add(diag.SeverityLevelError, "grpc_server_ping_without_stream_allowed is not supported in server config") + } + if cfg.LogFormat != defaultCfg.LogFormat { + diags.Add(diag.SeverityLevelError, "log_format is not supported in server config") + } + if cfg.LogLevel.String() != defaultCfg.LogLevel.String() { + diags.Add(diag.SeverityLevelError, "log_level is not supported in server config") + } + if cfg.LogSourceIPs != defaultCfg.LogSourceIPs { + diags.Add(diag.SeverityLevelError, "log_source_ips_enabled is not supported in server config") + } + if cfg.LogSourceIPsHeader != defaultCfg.LogSourceIPsHeader { + diags.Add(diag.SeverityLevelError, "log_source_ips_header is not supported in server config") + } + if cfg.LogSourceIPsRegex != defaultCfg.LogSourceIPsRegex { + diags.Add(diag.SeverityLevelError, "log_source_ips_regex is not supported in server config") + } + if cfg.LogRequestAtInfoLevel != defaultCfg.LogRequestAtInfoLevel { + diags.Add(diag.SeverityLevelError, "log_request_at_info_level_enabled is not supported in server config") + } + if cfg.PathPrefix != defaultCfg.PathPrefix { + diags.Add(diag.SeverityLevelError, "http_path_prefix is not supported in server config") + } + + return diags +} diff --git a/converter/internal/prometheusconvert/azure.go b/converter/internal/prometheusconvert/azure.go index 72a008603027..0a28ddbfc351 100644 --- a/converter/internal/prometheusconvert/azure.go +++ b/converter/internal/prometheusconvert/azure.go @@ -13,14 +13,14 @@ import ( ) func appendDiscoveryAzure(pb *prometheusBlocks, label string, sdConfig *prom_azure.SDConfig) discovery.Exports { - discoveryAzureArgs := toDiscoveryAzure(sdConfig) + discoveryAzureArgs := ToDiscoveryAzure(sdConfig) name := []string{"discovery", "azure"} block := common.NewBlockWithOverride(name, label, discoveryAzureArgs) pb.discoveryBlocks = append(pb.discoveryBlocks, newPrometheusBlock(block, name, label, "", "")) return newDiscoverExports("discovery.azure." + label + ".targets") } -func toDiscoveryAzure(sdConfig *prom_azure.SDConfig) *azure.Arguments { +func ToDiscoveryAzure(sdConfig *prom_azure.SDConfig) *azure.Arguments { if sdConfig == nil { return nil } @@ -40,7 +40,7 @@ func toDiscoveryAzure(sdConfig *prom_azure.SDConfig) *azure.Arguments { } } -func validateDiscoveryAzure(sdConfig *prom_azure.SDConfig) diag.Diagnostics { +func ValidateDiscoveryAzure(sdConfig *prom_azure.SDConfig) diag.Diagnostics { return ValidateHttpClientConfig(&sdConfig.HTTPClientConfig) } diff --git a/converter/internal/prometheusconvert/consul.go b/converter/internal/prometheusconvert/consul.go index 72a9990f9342..bc2942f9d9be 100644 --- a/converter/internal/prometheusconvert/consul.go +++ b/converter/internal/prometheusconvert/consul.go @@ -12,7 +12,7 @@ import ( ) func appendDiscoveryConsul(pb *prometheusBlocks, label string, sdConfig *prom_consul.SDConfig) discovery.Exports { - discoveryConsulArgs := toDiscoveryConsul(sdConfig) + discoveryConsulArgs := ToDiscoveryConsul(sdConfig) name := []string{"discovery", "consul"} block := common.NewBlockWithOverride(name, label, discoveryConsulArgs) pb.discoveryBlocks = append(pb.discoveryBlocks, newPrometheusBlock(block, name, label, "", "")) @@ -23,7 +23,7 @@ func validateDiscoveryConsul(sdConfig *prom_consul.SDConfig) diag.Diagnostics { return ValidateHttpClientConfig(&sdConfig.HTTPClientConfig) } -func toDiscoveryConsul(sdConfig *prom_consul.SDConfig) *consul.Arguments { +func ToDiscoveryConsul(sdConfig *prom_consul.SDConfig) *consul.Arguments { if sdConfig == nil { return nil } diff --git a/converter/internal/prometheusconvert/digitalocean.go b/converter/internal/prometheusconvert/digitalocean.go index d36199168cbf..80bd965023dd 100644 --- a/converter/internal/prometheusconvert/digitalocean.go +++ b/converter/internal/prometheusconvert/digitalocean.go @@ -15,14 +15,14 @@ import ( ) func appendDiscoveryDigitalOcean(pb *prometheusBlocks, label string, sdConfig *prom_digitalocean.SDConfig) discovery.Exports { - discoveryDigitalOceanArgs := toDiscoveryDigitalOcean(sdConfig) + discoveryDigitalOceanArgs := ToDiscoveryDigitalOcean(sdConfig) name := []string{"discovery", "digitalocean"} block := common.NewBlockWithOverride(name, label, discoveryDigitalOceanArgs) pb.discoveryBlocks = append(pb.discoveryBlocks, newPrometheusBlock(block, name, label, "", "")) return newDiscoverExports("discovery.digitalocean." + label + ".targets") } -func validateDiscoveryDigitalOcean(sdConfig *prom_digitalocean.SDConfig) diag.Diagnostics { +func ValidateDiscoveryDigitalOcean(sdConfig *prom_digitalocean.SDConfig) diag.Diagnostics { var diags diag.Diagnostics if sdConfig.HTTPClientConfig.BasicAuth != nil { @@ -47,7 +47,7 @@ func validateDiscoveryDigitalOcean(sdConfig *prom_digitalocean.SDConfig) diag.Di return diags } -func toDiscoveryDigitalOcean(sdConfig *prom_digitalocean.SDConfig) *digitalocean.Arguments { +func ToDiscoveryDigitalOcean(sdConfig *prom_digitalocean.SDConfig) *digitalocean.Arguments { if sdConfig == nil { return nil } diff --git a/converter/internal/prometheusconvert/docker.go b/converter/internal/prometheusconvert/docker.go index 8bbb3213b55a..4520d86ef3ac 100644 --- a/converter/internal/prometheusconvert/docker.go +++ b/converter/internal/prometheusconvert/docker.go @@ -11,7 +11,7 @@ import ( ) func appendDiscoveryDocker(pb *prometheusBlocks, label string, sdConfig *prom_docker.DockerSDConfig) discovery.Exports { - discoveryDockerArgs := toDiscoveryDocker(sdConfig) + discoveryDockerArgs := ToDiscoveryDocker(sdConfig) name := []string{"discovery", "docker"} block := common.NewBlockWithOverride(name, label, discoveryDockerArgs) pb.discoveryBlocks = append(pb.discoveryBlocks, newPrometheusBlock(block, name, label, "", "")) @@ -22,7 +22,7 @@ func validateDiscoveryDocker(sdConfig *prom_docker.DockerSDConfig) diag.Diagnost return ValidateHttpClientConfig(&sdConfig.HTTPClientConfig) } -func toDiscoveryDocker(sdConfig *prom_docker.DockerSDConfig) *docker.Arguments { +func ToDiscoveryDocker(sdConfig *prom_docker.DockerSDConfig) *docker.Arguments { if sdConfig == nil { return nil } diff --git a/converter/internal/prometheusconvert/ec2.go b/converter/internal/prometheusconvert/ec2.go index bbc699acaac6..ebd7cd9b336e 100644 --- a/converter/internal/prometheusconvert/ec2.go +++ b/converter/internal/prometheusconvert/ec2.go @@ -14,14 +14,14 @@ import ( ) func appendDiscoveryEC2(pb *prometheusBlocks, label string, sdConfig *prom_aws.EC2SDConfig) discovery.Exports { - discoveryec2Args := toDiscoveryEC2(sdConfig) + discoveryec2Args := ToDiscoveryEC2(sdConfig) name := []string{"discovery", "ec2"} block := common.NewBlockWithOverride(name, label, discoveryec2Args) pb.discoveryBlocks = append(pb.discoveryBlocks, newPrometheusBlock(block, name, label, "", "")) return newDiscoverExports("discovery.ec2." + label + ".targets") } -func validateDiscoveryEC2(sdConfig *prom_aws.EC2SDConfig) diag.Diagnostics { +func ValidateDiscoveryEC2(sdConfig *prom_aws.EC2SDConfig) diag.Diagnostics { var diags diag.Diagnostics if sdConfig.HTTPClientConfig.BasicAuth != nil { @@ -64,7 +64,7 @@ func validateDiscoveryEC2(sdConfig *prom_aws.EC2SDConfig) diag.Diagnostics { return diags } -func toDiscoveryEC2(sdConfig *prom_aws.EC2SDConfig) *aws.EC2Arguments { +func ToDiscoveryEC2(sdConfig *prom_aws.EC2SDConfig) *aws.EC2Arguments { if sdConfig == nil { return nil } diff --git a/converter/internal/prometheusconvert/file.go b/converter/internal/prometheusconvert/file.go index 9c407a6db0e1..0ba22a715f62 100644 --- a/converter/internal/prometheusconvert/file.go +++ b/converter/internal/prometheusconvert/file.go @@ -11,7 +11,7 @@ import ( ) func appendDiscoveryFile(pb *prometheusBlocks, label string, sdConfig *prom_file.SDConfig) discovery.Exports { - discoveryFileArgs := toDiscoveryFile(sdConfig) + discoveryFileArgs := ToDiscoveryFile(sdConfig) name := []string{"discovery", "file"} block := common.NewBlockWithOverride(name, label, discoveryFileArgs) pb.discoveryBlocks = append(pb.discoveryBlocks, newPrometheusBlock(block, name, label, "", "")) @@ -22,7 +22,7 @@ func validateDiscoveryFile(sdConfig *prom_file.SDConfig) diag.Diagnostics { return make(diag.Diagnostics, 0) } -func toDiscoveryFile(sdConfig *prom_file.SDConfig) *file.Arguments { +func ToDiscoveryFile(sdConfig *prom_file.SDConfig) *file.Arguments { if sdConfig == nil { return nil } diff --git a/converter/internal/prometheusconvert/gce.go b/converter/internal/prometheusconvert/gce.go index 888946fcf637..8f14cd64038a 100644 --- a/converter/internal/prometheusconvert/gce.go +++ b/converter/internal/prometheusconvert/gce.go @@ -11,18 +11,18 @@ import ( ) func appendDiscoveryGCE(pb *prometheusBlocks, label string, sdConfig *prom_gce.SDConfig) discovery.Exports { - discoveryGCEArgs := toDiscoveryGCE(sdConfig) + discoveryGCEArgs := ToDiscoveryGCE(sdConfig) name := []string{"discovery", "gce"} block := common.NewBlockWithOverride(name, label, discoveryGCEArgs) pb.discoveryBlocks = append(pb.discoveryBlocks, newPrometheusBlock(block, name, label, "", "")) return newDiscoverExports("discovery.gce." + label + ".targets") } -func validateDiscoveryGce(sdConfig *prom_gce.SDConfig) diag.Diagnostics { +func ValidateDiscoveryGCE(sdConfig *prom_gce.SDConfig) diag.Diagnostics { return make(diag.Diagnostics, 0) } -func toDiscoveryGCE(sdConfig *prom_gce.SDConfig) *gce.Arguments { +func ToDiscoveryGCE(sdConfig *prom_gce.SDConfig) *gce.Arguments { if sdConfig == nil { return nil } diff --git a/converter/internal/prometheusconvert/relabel.go b/converter/internal/prometheusconvert/relabel.go index 202a42470473..3adf79734179 100644 --- a/converter/internal/prometheusconvert/relabel.go +++ b/converter/internal/prometheusconvert/relabel.go @@ -49,7 +49,7 @@ func appendDiscoveryRelabel(pb *prometheusBlocks, relabelConfigs []*prom_relabel pb.discoveryRelabelBlocks = append(pb.discoveryRelabelBlocks, newPrometheusBlock(block, name, label, "", "")) return &disc_relabel.Exports{ - Output: newDiscoveryTargets(fmt.Sprintf("discovery.relabel.%s.targets", label)), + Output: newDiscoveryTargets(fmt.Sprintf("discovery.relabel.%s.output", label)), } } diff --git a/converter/internal/prometheusconvert/testdata/discovery.river b/converter/internal/prometheusconvert/testdata/discovery.river index 4341a4c0dabc..73ffc9000302 100644 --- a/converter/internal/prometheusconvert/testdata/discovery.river +++ b/converter/internal/prometheusconvert/testdata/discovery.river @@ -51,7 +51,7 @@ discovery.relabel "prometheus1" { } prometheus.scrape "prometheus1" { - targets = discovery.relabel.prometheus1.targets + targets = discovery.relabel.prometheus1.output forward_to = [prometheus.relabel.prometheus1.receiver] job_name = "prometheus1" } diff --git a/converter/internal/prometheusconvert/testdata/discovery_relabel.river b/converter/internal/prometheusconvert/testdata/discovery_relabel.river index 63d3c9b66f0f..c466e63cb750 100644 --- a/converter/internal/prometheusconvert/testdata/discovery_relabel.river +++ b/converter/internal/prometheusconvert/testdata/discovery_relabel.river @@ -59,13 +59,13 @@ discovery.relabel "prometheus2" { } prometheus.scrape "prometheus1" { - targets = discovery.relabel.prometheus1.targets + targets = discovery.relabel.prometheus1.output forward_to = [prometheus.remote_write.default.receiver] job_name = "prometheus1" } prometheus.scrape "prometheus2" { - targets = discovery.relabel.prometheus2.targets + targets = discovery.relabel.prometheus2.output forward_to = [prometheus.remote_write.default.receiver] job_name = "prometheus2" } diff --git a/converter/internal/prometheusconvert/validate.go b/converter/internal/prometheusconvert/validate.go index 491257fe1c90..1ca323e84a5a 100644 --- a/converter/internal/prometheusconvert/validate.go +++ b/converter/internal/prometheusconvert/validate.go @@ -91,21 +91,21 @@ func validateScrapeConfigs(scrapeConfigs []*prom_config.ScrapeConfig) diag.Diagn case prom_discover.StaticConfig: newDiags = validateScrapeTargets(sdc) case *prom_azure.SDConfig: - newDiags = validateDiscoveryAzure(sdc) + newDiags = ValidateDiscoveryAzure(sdc) case *prom_consul.SDConfig: newDiags = validateDiscoveryConsul(sdc) case *prom_digitalocean.SDConfig: - newDiags = validateDiscoveryDigitalOcean(sdc) + newDiags = ValidateDiscoveryDigitalOcean(sdc) case *prom_dns.SDConfig: newDiags = validateDiscoveryDns(sdc) case *prom_docker.DockerSDConfig: newDiags = validateDiscoveryDocker(sdc) case *prom_aws.EC2SDConfig: - newDiags = validateDiscoveryEC2(sdc) + newDiags = ValidateDiscoveryEC2(sdc) case *prom_file.SDConfig: newDiags = validateDiscoveryFile(sdc) case *prom_gce.SDConfig: - newDiags = validateDiscoveryGce(sdc) + newDiags = ValidateDiscoveryGCE(sdc) case *prom_kubernetes.SDConfig: newDiags = validateDiscoveryKubernetes(sdc) case *prom_aws.LightsailSDConfig: diff --git a/converter/internal/promtailconvert/internal/build/azure_sd.go b/converter/internal/promtailconvert/internal/build/azure_sd.go new file mode 100644 index 000000000000..57f154bb18e7 --- /dev/null +++ b/converter/internal/promtailconvert/internal/build/azure_sd.go @@ -0,0 +1,26 @@ +package build + +import ( + "fmt" + + "github.com/grafana/agent/converter/internal/common" + "github.com/grafana/agent/converter/internal/prometheusconvert" +) + +func (s *ScrapeConfigBuilder) AppendAzureSDs() { + if len(s.cfg.ServiceDiscoveryConfig.AzureSDConfigs) == 0 { + return + } + for i, sd := range s.cfg.ServiceDiscoveryConfig.AzureSDConfigs { + s.diags.AddAll(prometheusconvert.ValidateDiscoveryAzure(sd)) + compName := fmt.Sprintf("%s_%d", s.cfg.JobName, i) + + args := prometheusconvert.ToDiscoveryAzure(sd) + s.f.Body().AppendBlock(common.NewBlockWithOverride( + []string{"discovery", "azure"}, + compName, + args, + )) + s.allTargetsExps = append(s.allTargetsExps, "discovery.azure."+compName+".targets") + } +} diff --git a/converter/internal/promtailconvert/internal/build/cloudflare.go b/converter/internal/promtailconvert/internal/build/cloudflare.go index dd73dcfd9283..2ac36ef6ce2e 100644 --- a/converter/internal/promtailconvert/internal/build/cloudflare.go +++ b/converter/internal/promtailconvert/internal/build/cloudflare.go @@ -34,7 +34,7 @@ func (s *ScrapeConfigBuilder) AppendCloudFlareConfig() { } } s.f.Body().AppendBlock(common.NewBlockWithOverrideFn( - []string{"loki", "source", "cloudfare"}, + []string{"loki", "source", "cloudflare"}, s.cfg.JobName, args, override, diff --git a/converter/internal/promtailconvert/internal/build/consul_agent.go b/converter/internal/promtailconvert/internal/build/consul_agent.go new file mode 100644 index 000000000000..e4210062f211 --- /dev/null +++ b/converter/internal/promtailconvert/internal/build/consul_agent.go @@ -0,0 +1,13 @@ +package build + +import "github.com/grafana/agent/converter/diag" + +func (s *ScrapeConfigBuilder) AppendConsulAgentSDs() { + // TODO: implement this + if s.cfg.ServiceDiscoveryConfig.ConsulAgentSDConfigs != nil { + s.diags.Add( + diag.SeverityLevelError, + "consul_agent SDs are not currently supported in Grafana Agent Flow - see https://github.com/grafana/agent/issues/2261", + ) + } +} diff --git a/converter/internal/promtailconvert/internal/build/consul_sd.go b/converter/internal/promtailconvert/internal/build/consul_sd.go new file mode 100644 index 000000000000..994590bc8792 --- /dev/null +++ b/converter/internal/promtailconvert/internal/build/consul_sd.go @@ -0,0 +1,25 @@ +package build + +import ( + "fmt" + + "github.com/grafana/agent/converter/internal/common" + "github.com/grafana/agent/converter/internal/prometheusconvert" +) + +func (s *ScrapeConfigBuilder) AppendConsulSDs() { + if len(s.cfg.ServiceDiscoveryConfig.ConsulSDConfigs) == 0 { + return + } + + for i, sd := range s.cfg.ServiceDiscoveryConfig.ConsulSDConfigs { + args := prometheusconvert.ToDiscoveryConsul(sd) + compLabel := fmt.Sprintf("consul_sd_%d", i) + s.f.Body().AppendBlock(common.NewBlockWithOverride( + []string{"discovery", "consul"}, + compLabel, + args, + )) + s.allTargetsExps = append(s.allTargetsExps, "discovery.consul."+compLabel+".targets") + } +} diff --git a/converter/internal/promtailconvert/internal/build/digitalocean_sd.go b/converter/internal/promtailconvert/internal/build/digitalocean_sd.go new file mode 100644 index 000000000000..bb26483c3ca2 --- /dev/null +++ b/converter/internal/promtailconvert/internal/build/digitalocean_sd.go @@ -0,0 +1,26 @@ +package build + +import ( + "fmt" + + "github.com/grafana/agent/converter/internal/common" + "github.com/grafana/agent/converter/internal/prometheusconvert" +) + +func (s *ScrapeConfigBuilder) AppendDigitalOceanSDs() { + if len(s.cfg.ServiceDiscoveryConfig.DigitalOceanSDConfigs) == 0 { + return + } + for i, sd := range s.cfg.ServiceDiscoveryConfig.DigitalOceanSDConfigs { + s.diags.AddAll(prometheusconvert.ValidateDiscoveryDigitalOcean(sd)) + compName := fmt.Sprintf("%s_%d", s.cfg.JobName, i) + + args := prometheusconvert.ToDiscoveryDigitalOcean(sd) + s.f.Body().AppendBlock(common.NewBlockWithOverride( + []string{"discovery", "digitalocean"}, + compName, + args, + )) + s.allTargetsExps = append(s.allTargetsExps, "discovery.digitalocean."+compName+".targets") + } +} diff --git a/converter/internal/promtailconvert/internal/build/docker_sd.go b/converter/internal/promtailconvert/internal/build/docker_sd.go new file mode 100644 index 000000000000..d80b31bba8f4 --- /dev/null +++ b/converter/internal/promtailconvert/internal/build/docker_sd.go @@ -0,0 +1,26 @@ +package build + +import ( + "fmt" + + "github.com/grafana/agent/converter/internal/common" + "github.com/grafana/agent/converter/internal/prometheusconvert" +) + +func (s *ScrapeConfigBuilder) AppendDockerSDs() { + if len(s.cfg.DockerSDConfigs) == 0 { + return + } + for i, sd := range s.cfg.DockerSDConfigs { + s.diags.AddAll(prometheusconvert.ValidateHttpClientConfig(&sd.HTTPClientConfig)) + compName := fmt.Sprintf("%s_%d", s.cfg.JobName, i) + + args := prometheusconvert.ToDiscoveryDocker(sd) + s.f.Body().AppendBlock(common.NewBlockWithOverride( + []string{"discovery", "docker"}, + compName, + args, + )) + s.allTargetsExps = append(s.allTargetsExps, "discovery.docker."+compName+".targets") + } +} diff --git a/converter/internal/promtailconvert/internal/build/ec2_sd.go b/converter/internal/promtailconvert/internal/build/ec2_sd.go new file mode 100644 index 000000000000..a78d8dbe82ca --- /dev/null +++ b/converter/internal/promtailconvert/internal/build/ec2_sd.go @@ -0,0 +1,25 @@ +package build + +import ( + "fmt" + + "github.com/grafana/agent/converter/internal/common" + "github.com/grafana/agent/converter/internal/prometheusconvert" +) + +func (s *ScrapeConfigBuilder) AppendEC2SDs() { + if len(s.cfg.ServiceDiscoveryConfig.EC2SDConfigs) == 0 { + return + } + for i, sd := range s.cfg.ServiceDiscoveryConfig.EC2SDConfigs { + s.diags.AddAll(prometheusconvert.ValidateDiscoveryEC2(sd)) + args := prometheusconvert.ToDiscoveryEC2(sd) + compLabel := fmt.Sprintf("%s_%d", s.cfg.JobName, i) + s.f.Body().AppendBlock(common.NewBlockWithOverride( + []string{"discovery", "ec2"}, + compLabel, + args, + )) + s.allTargetsExps = append(s.allTargetsExps, "discovery.ec2."+compLabel+".targets") + } +} diff --git a/converter/internal/promtailconvert/internal/build/file_sd.go b/converter/internal/promtailconvert/internal/build/file_sd.go new file mode 100644 index 000000000000..dfce5ea8fd48 --- /dev/null +++ b/converter/internal/promtailconvert/internal/build/file_sd.go @@ -0,0 +1,20 @@ +package build + +import ( + "fmt" + + "github.com/grafana/agent/converter/internal/common" + "github.com/grafana/agent/converter/internal/prometheusconvert" +) + +func (s *ScrapeConfigBuilder) AppendFileSDs() { + if len(s.cfg.ServiceDiscoveryConfig.FileSDConfigs) == 0 { + return + } + for i, sd := range s.cfg.ServiceDiscoveryConfig.FileSDConfigs { + args := prometheusconvert.ToDiscoveryFile(sd) + compLabel := fmt.Sprintf("file_sd_%d", i) + s.f.Body().AppendBlock(common.NewBlockWithOverride([]string{"discovery", "file"}, compLabel, args)) + s.allTargetsExps = append(s.allTargetsExps, "discovery.file."+compLabel+".targets") + } +} diff --git a/converter/internal/promtailconvert/internal/build/gce_sd.go b/converter/internal/promtailconvert/internal/build/gce_sd.go new file mode 100644 index 000000000000..aa4a7621fb14 --- /dev/null +++ b/converter/internal/promtailconvert/internal/build/gce_sd.go @@ -0,0 +1,25 @@ +package build + +import ( + "fmt" + + "github.com/grafana/agent/converter/internal/common" + "github.com/grafana/agent/converter/internal/prometheusconvert" +) + +func (s *ScrapeConfigBuilder) AppendGCESDs() { + if len(s.cfg.ServiceDiscoveryConfig.GCESDConfigs) == 0 { + return + } + for i, sd := range s.cfg.ServiceDiscoveryConfig.GCESDConfigs { + s.diags.AddAll(prometheusconvert.ValidateDiscoveryGCE(sd)) + args := prometheusconvert.ToDiscoveryGCE(sd) + compLabel := fmt.Sprintf("%s_%d", s.cfg.JobName, i) + s.f.Body().AppendBlock(common.NewBlockWithOverride( + []string{"discovery", "gce"}, + compLabel, + args, + )) + s.allTargetsExps = append(s.allTargetsExps, "discovery.gce."+compLabel+".targets") + } +} diff --git a/converter/internal/promtailconvert/internal/build/kubernetes_sd.go b/converter/internal/promtailconvert/internal/build/kubernetes_sd.go index 8b92e1d3ab01..8f7711e6de48 100644 --- a/converter/internal/promtailconvert/internal/build/kubernetes_sd.go +++ b/converter/internal/promtailconvert/internal/build/kubernetes_sd.go @@ -15,12 +15,12 @@ func (s *ScrapeConfigBuilder) AppendKubernetesSDs() { for i, sd := range s.cfg.ServiceDiscoveryConfig.KubernetesSDConfigs { s.diags.AddAll(prometheusconvert.ValidateHttpClientConfig(&sd.HTTPClientConfig)) args := prometheusconvert.ToDiscoveryKubernetes(sd) - compName := fmt.Sprintf("%s_%d", s.cfg.JobName, i) + compLabel := fmt.Sprintf("%s_%d", s.cfg.JobName, i) s.f.Body().AppendBlock(common.NewBlockWithOverride( []string{"discovery", "kubernetes"}, - compName, + compLabel, args, )) - s.allTargetsExps = append(s.allTargetsExps, "discovery.kubernetes."+compName+".targets") + s.allTargetsExps = append(s.allTargetsExps, "discovery.kubernetes."+compLabel+".targets") } } diff --git a/converter/internal/promtailconvert/internal/build/loki_write.go b/converter/internal/promtailconvert/internal/build/loki_write.go new file mode 100644 index 000000000000..8b2578854ece --- /dev/null +++ b/converter/internal/promtailconvert/internal/build/loki_write.go @@ -0,0 +1,77 @@ +package build + +import ( + "fmt" + + "github.com/alecthomas/units" + "github.com/grafana/agent/component/common/loki" + lokiwrite "github.com/grafana/agent/component/loki/write" + "github.com/grafana/agent/converter/diag" + "github.com/grafana/agent/converter/internal/common" + "github.com/grafana/agent/converter/internal/prometheusconvert" + "github.com/grafana/agent/pkg/river/token/builder" + "github.com/grafana/loki/clients/pkg/promtail/client" + lokiflag "github.com/grafana/loki/pkg/util/flagext" +) + +func NewLokiWrite(client *client.Config, diags *diag.Diagnostics, index int) (*builder.Block, loki.LogsReceiver) { + label := fmt.Sprintf("default_%d", index) + lokiWriteArgs := toLokiWriteArguments(client, diags) + block := common.NewBlockWithOverride([]string{"loki", "write"}, label, lokiWriteArgs) + return block, common.ConvertLogsReceiver{ + Expr: fmt.Sprintf("loki.write.%s.receiver", label), + } +} + +func toLokiWriteArguments(config *client.Config, diags *diag.Diagnostics) *lokiwrite.Arguments { + batchSize, err := units.ParseBase2Bytes(fmt.Sprintf("%dB", config.BatchSize)) + if err != nil { + diags.Add( + diag.SeverityLevelError, + fmt.Sprintf("failed to parse BatchSize for client config %s: %s", config.Name, err.Error()), + ) + } + + // This is not supported yet - see https://github.com/grafana/agent/issues/4335. + if config.DropRateLimitedBatches { + diags.Add( + diag.SeverityLevelError, + "DropRateLimitedBatches is currently not supported in Grafana Agent Flow.", + ) + } + + // Also deprecated in promtail. + if len(config.StreamLagLabels) != 0 { + diags.Add( + diag.SeverityLevelWarn, + "stream_lag_labels is deprecated and the associated metric has been removed", + ) + } + + return &lokiwrite.Arguments{ + Endpoints: []lokiwrite.EndpointOptions{ + { + Name: config.Name, + URL: config.URL.String(), + BatchWait: config.BatchWait, + BatchSize: batchSize, + HTTPClientConfig: prometheusconvert.ToHttpClientConfig(&config.Client), + Headers: config.Headers, + MinBackoff: config.BackoffConfig.MinBackoff, + MaxBackoff: config.BackoffConfig.MaxBackoff, + MaxBackoffRetries: config.BackoffConfig.MaxRetries, + RemoteTimeout: config.Timeout, + TenantID: config.TenantID, + }, + }, + ExternalLabels: convertFlagLabels(config.ExternalLabels), + } +} + +func convertFlagLabels(labels lokiflag.LabelSet) map[string]string { + result := map[string]string{} + for k, v := range labels.LabelSet { + result[string(k)] = string(v) + } + return result +} diff --git a/converter/internal/promtailconvert/internal/build/push_api.go b/converter/internal/promtailconvert/internal/build/push_api.go new file mode 100644 index 000000000000..955002b31beb --- /dev/null +++ b/converter/internal/promtailconvert/internal/build/push_api.go @@ -0,0 +1,63 @@ +package build + +import ( + "github.com/grafana/agent/component/common/loki" + fnet "github.com/grafana/agent/component/common/net" + "github.com/grafana/agent/component/common/relabel" + "github.com/grafana/agent/component/loki/source/api" + "github.com/grafana/agent/converter/internal/common" + "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" +) + +func (s *ScrapeConfigBuilder) AppendPushAPI() { + if s.cfg.PushConfig == nil { + return + } + s.diags.AddAll(common.ValidateWeaveWorksServerCfg(s.cfg.PushConfig.Server)) + args := toLokiApiArguments(s.cfg.PushConfig, s.getOrNewProcessStageReceivers()) + override := func(val interface{}) interface{} { + switch val.(type) { + case relabel.Rules: + return common.CustomTokenizer{Expr: s.getOrNewDiscoveryRelabelRules()} + default: + return val + } + } + s.f.Body().AppendBlock(common.NewBlockWithOverrideFn( + []string{"loki", "source", "api"}, + s.cfg.JobName, + args, + override, + )) +} + +func toLokiApiArguments(config *scrapeconfig.PushTargetConfig, forwardTo []loki.LogsReceiver) api.Arguments { + return api.Arguments{ + ForwardTo: forwardTo, + RelabelRules: make(relabel.Rules, 0), + Labels: convertPromLabels(config.Labels), + UseIncomingTimestamp: config.KeepTimestamp, + Server: &fnet.ServerConfig{ + HTTP: &fnet.HTTPConfig{ + ListenAddress: config.Server.HTTPListenAddress, + ListenPort: config.Server.HTTPListenPort, + ConnLimit: config.Server.HTTPConnLimit, + ServerReadTimeout: config.Server.HTTPServerReadTimeout, + ServerWriteTimeout: config.Server.HTTPServerWriteTimeout, + ServerIdleTimeout: config.Server.HTTPServerIdleTimeout, + }, + GRPC: &fnet.GRPCConfig{ + ListenAddress: config.Server.GRPCListenAddress, + ListenPort: config.Server.GRPCListenPort, + ConnLimit: config.Server.GRPCConnLimit, + MaxConnectionAge: config.Server.GRPCServerMaxConnectionAge, + MaxConnectionAgeGrace: config.Server.GRPCServerMaxConnectionAgeGrace, + MaxConnectionIdle: config.Server.GRPCServerMaxConnectionIdle, + ServerMaxRecvMsg: config.Server.GPRCServerMaxRecvMsgSize, + ServerMaxSendMsg: config.Server.GRPCServerMaxSendMsgSize, + ServerMaxConcurrentStreams: config.Server.GPRCServerMaxConcurrentStreams, + }, + GracefulShutdownTimeout: config.Server.ServerGracefulShutdownTimeout, + }, + } +} diff --git a/converter/internal/promtailconvert/internal/build/scrape_builder.go b/converter/internal/promtailconvert/internal/build/scrape_builder.go index e7dd05bdec86..fda69d14d516 100644 --- a/converter/internal/promtailconvert/internal/build/scrape_builder.go +++ b/converter/internal/promtailconvert/internal/build/scrape_builder.go @@ -1,6 +1,7 @@ package build import ( + "bytes" "fmt" "strings" @@ -50,6 +51,27 @@ func NewScrapeConfigBuilder( } } +func (s *ScrapeConfigBuilder) Validate() { + if len(s.cfg.ServiceDiscoveryConfig.DockerSwarmSDConfigs) != 0 { + s.diags.Add(diag.SeverityLevelError, "dockerswarm_sd_configs is not supported") + } + if len(s.cfg.ServiceDiscoveryConfig.ServersetSDConfigs) != 0 { + s.diags.Add(diag.SeverityLevelError, "serverset_sd_configs is not supported") + } + if len(s.cfg.ServiceDiscoveryConfig.NerveSDConfigs) != 0 { + s.diags.Add(diag.SeverityLevelError, "nerve_sd_configs is not supported") + } + if len(s.cfg.ServiceDiscoveryConfig.MarathonSDConfigs) != 0 { + s.diags.Add(diag.SeverityLevelError, "marathon_sd_configs is not supported") + } + if len(s.cfg.ServiceDiscoveryConfig.OpenstackSDConfigs) != 0 { + s.diags.Add(diag.SeverityLevelError, "openstack_sd_configs is not supported") + } + if len(s.cfg.ServiceDiscoveryConfig.TritonSDConfigs) != 0 { + s.diags.Add(diag.SeverityLevelError, "triton_sd_configs is not supported") + } +} + func (s *ScrapeConfigBuilder) AppendLokiSourceFile() { // If there were no targets expressions collected, that means // we didn't have any components that produced SD targets, so @@ -178,12 +200,12 @@ func (s *ScrapeConfigBuilder) getExpandedFileTargetsExpr() string { } s.f.Body().AppendBlock(common.NewBlockWithOverrideFn( - []string{"discovery", "file"}, + []string{"local", "file_match"}, s.cfg.JobName, args, overrideHook, )) - s.allExpandedFileTargetsExpr = "discovery.file." + s.cfg.JobName + ".targets" + s.allExpandedFileTargetsExpr = "local.file_match." + s.cfg.JobName + ".targets" return s.allExpandedFileTargetsExpr } @@ -217,3 +239,14 @@ func logsReceiversToExpr(r []loki.LogsReceiver) string { } return "[" + strings.Join(exprs, ", ") + "]" } + +func toRiverExpression(goValue interface{}) (string, error) { + e := builder.NewExpr() + e.SetValue(goValue) + var buff bytes.Buffer + _, err := e.WriteTo(&buff) + if err != nil { + return "", err + } + return buff.String(), nil +} diff --git a/converter/internal/promtailconvert/internal/build/static_sd.go b/converter/internal/promtailconvert/internal/build/static_sd.go new file mode 100644 index 000000000000..9f9427d4931a --- /dev/null +++ b/converter/internal/promtailconvert/internal/build/static_sd.go @@ -0,0 +1,26 @@ +package build + +import ( + "github.com/grafana/agent/converter/diag" +) + +func (s *ScrapeConfigBuilder) AppendStaticSDs() { + if len(s.cfg.ServiceDiscoveryConfig.StaticConfigs) == 0 { + return + } + + var allStaticTargets []map[string]string + for _, sd := range s.cfg.ServiceDiscoveryConfig.StaticConfigs { + allStaticTargets = append(allStaticTargets, convertPromLabels(sd.Labels)) + } + + targetsExpr, err := toRiverExpression(allStaticTargets) + if err != nil { + s.diags.Add( + diag.SeverityLevelCritical, + "failed to write static SD targets as valid River expression: "+err.Error(), + ) + } + + s.allTargetsExps = append(s.allTargetsExps, targetsExpr) +} diff --git a/converter/internal/promtailconvert/promtailconvert.go b/converter/internal/promtailconvert/promtailconvert.go index 4fd72f1c3994..bdaa9d8b0ee9 100644 --- a/converter/internal/promtailconvert/promtailconvert.go +++ b/converter/internal/promtailconvert/promtailconvert.go @@ -5,22 +5,17 @@ import ( "flag" "fmt" - "github.com/alecthomas/units" "github.com/grafana/agent/component/common/loki" - lokiwrite "github.com/grafana/agent/component/loki/write" "github.com/grafana/agent/converter/diag" "github.com/grafana/agent/converter/internal/common" - "github.com/grafana/agent/converter/internal/prometheusconvert" "github.com/grafana/agent/converter/internal/promtailconvert/internal/build" "github.com/grafana/agent/pkg/river/token/builder" "github.com/grafana/dskit/flagext" - "github.com/grafana/loki/clients/pkg/promtail/client" promtailcfg "github.com/grafana/loki/clients/pkg/promtail/config" "github.com/grafana/loki/clients/pkg/promtail/limit" "github.com/grafana/loki/clients/pkg/promtail/positions" "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" lokicfgutil "github.com/grafana/loki/pkg/util/cfg" - lokiflag "github.com/grafana/loki/pkg/util/flagext" "gopkg.in/yaml.v2" ) @@ -93,7 +88,7 @@ func AppendAll(f *builder.File, cfg *promtailcfg.Config, diags diag.Diagnostics) // Each client config needs to be a separate remote_write, // because they may have different ExternalLabels fields. for i, cc := range cfg.ClientConfigs { - writeBlocks[i], writeReceivers[i] = newLokiWrite(&cc, &diags, i) + writeBlocks[i], writeReceivers[i] = build.NewLokiWrite(&cc, &diags, i) } gc := &build.GlobalContext{ @@ -131,12 +126,33 @@ func appendScrapeConfig( diags *diag.Diagnostics, gctx *build.GlobalContext, ) { + //TODO(thampiotr): need to support/warn about the following fields: + //Encoding string `mapstructure:"encoding,omitempty" yaml:"encoding,omitempty"` + //DecompressionCfg *DecompressionConfig `yaml:"decompression,omitempty"` + + //TODO(thampiotr): support/warn about the following log producing promtail configs: + //SyslogConfig *SyslogTargetConfig `mapstructure:"syslog,omitempty" yaml:"syslog,omitempty"` + //GcplogConfig *GcplogTargetConfig `mapstructure:"gcplog,omitempty" yaml:"gcplog,omitempty"` + //WindowsConfig *WindowsEventsTargetConfig `mapstructure:"windows_events,omitempty" yaml:"windows_events,omitempty"` + //KafkaConfig *KafkaTargetConfig `mapstructure:"kafka,omitempty" yaml:"kafka,omitempty"` + //AzureEventHubsConfig *AzureEventHubsTargetConfig `mapstructure:"azure_event_hubs,omitempty" yaml:"azure_event_hubs,omitempty"` + //GelfConfig *GelfTargetConfig `mapstructure:"gelf,omitempty" yaml:"gelf,omitempty"` + //HerokuDrainConfig *HerokuDrainTargetConfig `mapstructure:"heroku_drain,omitempty" yaml:"heroku_drain,omitempty"` b := build.NewScrapeConfigBuilder(f, diags, cfg, gctx) + b.Validate() // Append all the SD components b.AppendKubernetesSDs() - //TODO(thampiotr): add support for other SDs + b.AppendDockerSDs() + b.AppendStaticSDs() + b.AppendFileSDs() + b.AppendConsulSDs() + b.AppendConsulAgentSDs() + b.AppendDigitalOceanSDs() + b.AppendGCESDs() + b.AppendEC2SDs() + b.AppendAzureSDs() // Append loki.source.file to process all SD components' targets. // If any relabelling is required, it will be done via a discovery.relabel component. @@ -147,69 +163,7 @@ func appendScrapeConfig( // Append all the components that produce logs directly. // If any relabelling is required, it will be done via a loki.relabel component. // The logs are sent to loki.process if processing is needed, or directly to loki.write components. - //TODO(thampiotr): add support for other integrations b.AppendCloudFlareConfig() b.AppendJournalConfig() -} - -func newLokiWrite(client *client.Config, diags *diag.Diagnostics, index int) (*builder.Block, loki.LogsReceiver) { - label := fmt.Sprintf("default_%d", index) - lokiWriteArgs := toLokiWriteArguments(client, diags) - block := common.NewBlockWithOverride([]string{"loki", "write"}, label, lokiWriteArgs) - return block, common.ConvertLogsReceiver{ - Expr: fmt.Sprintf("loki.write.%s.receiver", label), - } -} - -func toLokiWriteArguments(config *client.Config, diags *diag.Diagnostics) *lokiwrite.Arguments { - batchSize, err := units.ParseBase2Bytes(fmt.Sprintf("%dB", config.BatchSize)) - if err != nil { - diags.Add( - diag.SeverityLevelError, - fmt.Sprintf("failed to parse BatchSize for client config %s: %s", config.Name, err.Error()), - ) - } - - // This is not supported yet - see https://github.com/grafana/agent/issues/4335. - if config.DropRateLimitedBatches { - diags.Add( - diag.SeverityLevelError, - "DropRateLimitedBatches is currently not supported in Grafana Agent Flow.", - ) - } - - // Also deprecated in promtail. - if len(config.StreamLagLabels) != 0 { - diags.Add( - diag.SeverityLevelWarn, - "stream_lag_labels is deprecated and the associated metric has been removed", - ) - } - - return &lokiwrite.Arguments{ - Endpoints: []lokiwrite.EndpointOptions{ - { - Name: config.Name, - URL: config.URL.String(), - BatchWait: config.BatchWait, - BatchSize: batchSize, - HTTPClientConfig: prometheusconvert.ToHttpClientConfig(&config.Client), - Headers: config.Headers, - MinBackoff: config.BackoffConfig.MinBackoff, - MaxBackoff: config.BackoffConfig.MaxBackoff, - MaxBackoffRetries: config.BackoffConfig.MaxRetries, - RemoteTimeout: config.Timeout, - TenantID: config.TenantID, - }, - }, - ExternalLabels: convertFlagLabels(config.ExternalLabels), - } -} - -func convertFlagLabels(labels lokiflag.LabelSet) map[string]string { - result := map[string]string{} - for k, v := range labels.LabelSet { - result[string(k)] = string(v) - } - return result + b.AppendPushAPI() } diff --git a/converter/internal/promtailconvert/testdata/azure.river b/converter/internal/promtailconvert/testdata/azure.river new file mode 100644 index 000000000000..19cc4d80d1b8 --- /dev/null +++ b/converter/internal/promtailconvert/testdata/azure.river @@ -0,0 +1,24 @@ +discovery.azure "fun_0" { + subscription_id = "subscription" + + oauth { + client_id = "client" + tenant_id = "tenant" + client_secret = "secret" + } + + managed_identity { + client_id = "client" + } + follow_redirects = true + enable_http2 = true +} + +local.file_match "fun" { + path_targets = discovery.azure.fun_0.targets +} + +loki.source.file "fun" { + targets = local.file_match.fun.targets + forward_to = [] +} diff --git a/converter/internal/promtailconvert/testdata/azure.yaml b/converter/internal/promtailconvert/testdata/azure.yaml new file mode 100644 index 000000000000..fae4081a66c5 --- /dev/null +++ b/converter/internal/promtailconvert/testdata/azure.yaml @@ -0,0 +1,9 @@ +scrape_configs: + - job_name: fun + azure_sd_configs: + - subscription_id: "subscription" + tenant_id: "tenant" + client_id: "client" + client_secret: "secret" +tracing: {enabled: false} +server: {register_instrumentation: false} diff --git a/converter/internal/promtailconvert/testdata/bad_config.yaml b/converter/internal/promtailconvert/testdata/bad_config.yaml index 0603a1ab73d8..3e4ebe56ebf5 100644 --- a/converter/internal/promtailconvert/testdata/bad_config.yaml +++ b/converter/internal/promtailconvert/testdata/bad_config.yaml @@ -12,4 +12,4 @@ not_a_thing: true server: profiling_enabled: true tracing: - enabled: false \ No newline at end of file + enabled: false diff --git a/converter/internal/promtailconvert/testdata/cloudflare_relabel.river b/converter/internal/promtailconvert/testdata/cloudflare_relabel.river index 7e27329fab46..9ea65acffd44 100644 --- a/converter/internal/promtailconvert/testdata/cloudflare_relabel.river +++ b/converter/internal/promtailconvert/testdata/cloudflare_relabel.river @@ -8,7 +8,7 @@ loki.relabel "fun" { max_cache_size = 0 } -loki.source.cloudfare "fun" { +loki.source.cloudflare "fun" { api_token = "dont_look_at_me_please" zone_id = "area51" labels = { diff --git a/converter/internal/promtailconvert/testdata/cloudflare_relabel.yaml b/converter/internal/promtailconvert/testdata/cloudflare_relabel.yaml index 17058d8f802e..5de1cd9998c5 100644 --- a/converter/internal/promtailconvert/testdata/cloudflare_relabel.yaml +++ b/converter/internal/promtailconvert/testdata/cloudflare_relabel.yaml @@ -15,4 +15,5 @@ scrape_configs: - source_labels: - __trail__ target_label: __path__ -tracing: {enabled: false} \ No newline at end of file +tracing: {enabled: false} +server: {register_instrumentation: false} diff --git a/converter/internal/promtailconvert/testdata/consul.river b/converter/internal/promtailconvert/testdata/consul.river new file mode 100644 index 000000000000..d1a7724a645f --- /dev/null +++ b/converter/internal/promtailconvert/testdata/consul.river @@ -0,0 +1,40 @@ +discovery.consul "consul_sd_0" { + datacenter = "bigdata" + tag_separator = ";" + scheme = "sketchy" + username = "toby" + password = "this_password_is_safe_innit?" + allow_stale = false + services = ["myapp"] + tags = ["better", "faster", "stronger"] + node_meta = { + what = "this", + where = "here", + } + refresh_interval = "10m0s" + + basic_auth { + username = "toby" + password = "this_password_is_safe_innit?" + } + follow_redirects = true + enable_http2 = true +} + +discovery.relabel "fun" { + targets = discovery.consul.consul_sd_0.targets + + rule { + source_labels = ["host"] + target_label = "hostname" + } +} + +local.file_match "fun" { + path_targets = discovery.relabel.fun.output +} + +loki.source.file "fun" { + targets = local.file_match.fun.targets + forward_to = [] +} diff --git a/converter/internal/promtailconvert/testdata/consul.yaml b/converter/internal/promtailconvert/testdata/consul.yaml new file mode 100644 index 000000000000..3a31fc602d76 --- /dev/null +++ b/converter/internal/promtailconvert/testdata/consul.yaml @@ -0,0 +1,27 @@ +scrape_configs: + - job_name: fun + consul_sd_configs: + - server: 'localhost:8500' + datacenter: bigdata + scheme: sketchy + username: toby + password: this_password_is_safe_innit? + tags: + - better + - faster + - stronger + services: [ 'myapp' ] + node_meta: + where: here + what: this + tag_separator: ";" + allow_stale: false + refresh_interval: 10m + + relabel_configs: + - source_labels: + - host + target_label: hostname + +tracing: {enabled: false} +server: {register_instrumentation: false} diff --git a/converter/internal/promtailconvert/testdata/digitalocean.river b/converter/internal/promtailconvert/testdata/digitalocean.river new file mode 100644 index 000000000000..d3976478b18b --- /dev/null +++ b/converter/internal/promtailconvert/testdata/digitalocean.river @@ -0,0 +1,15 @@ +discovery.digitalocean "fun_0" { + refresh_interval = "10m0s" + port = 1234 + follow_redirects = true + enable_http2 = true +} + +local.file_match "fun" { + path_targets = discovery.digitalocean.fun_0.targets +} + +loki.source.file "fun" { + targets = local.file_match.fun.targets + forward_to = [] +} diff --git a/converter/internal/promtailconvert/testdata/digitalocean.yaml b/converter/internal/promtailconvert/testdata/digitalocean.yaml new file mode 100644 index 000000000000..592b24e42305 --- /dev/null +++ b/converter/internal/promtailconvert/testdata/digitalocean.yaml @@ -0,0 +1,8 @@ +scrape_configs: + - job_name: fun + digitalocean_sd_configs: + - refresh_interval: 10m + port: 1234 + +tracing: {enabled: false} +server: {register_instrumentation: false} diff --git a/converter/internal/promtailconvert/testdata/docker.river b/converter/internal/promtailconvert/testdata/docker.river new file mode 100644 index 000000000000..c1f7837e3703 --- /dev/null +++ b/converter/internal/promtailconvert/testdata/docker.river @@ -0,0 +1,65 @@ +discovery.docker "fun_0" { + host = "unix:///var/run/docker.sock" + port = 12345 + refresh_interval = "10s" + + filter { + name = "v60" + values = ["small", "large"] + } + + filter { + name = "aeropress" + values = ["regular", "inverted"] + } + + basic_auth { + username = "robin" + password_file = "/home/robin/.password" + } + proxy_url = "http://proxy.example.com" + + tls_config { + ca_file = "/home/robin/.ca" + cert_file = "/home/robin/.cert" + key_file = "/home/robin/.key" + server_name = "example.local" + insecure_skip_verify = true + } +} + +discovery.docker "fun_1" { + host = "unix:///var/run/docker.sock" + port = 54321 + refresh_interval = "10s" + + filter { + name = "sunscreen" + values = ["spf20", "spf50", "spf100"] + } + + oauth2 { + client_id = "client_id" + client_secret_file = "foo/bar" + scopes = ["scope1", "scope2"] + token_url = "https://example/oauth2/token" + endpoint_params = { + host = "example", + path = "/oauth2/token", + } + + tls_config { } + } +} + +local.file_match "fun" { + path_targets = concat( + discovery.docker.fun_0.targets, + discovery.docker.fun_1.targets, + ) +} + +loki.source.file "fun" { + targets = local.file_match.fun.targets + forward_to = [] +} diff --git a/converter/internal/promtailconvert/testdata/docker.yaml b/converter/internal/promtailconvert/testdata/docker.yaml new file mode 100644 index 000000000000..53e786ebd8e4 --- /dev/null +++ b/converter/internal/promtailconvert/testdata/docker.yaml @@ -0,0 +1,45 @@ +scrape_configs: + - job_name: fun + docker_sd_configs: + - host: unix:///var/run/docker.sock + port: 12345 + filters: + - name: v60 + values: [small, large] + - name: aeropress + values: [regular, inverted] + refresh_interval: 10s + host_networking_host: localhost + + proxy_url: http://proxy.example.com + basic_auth: + username: robin + password_file: /home/robin/.password + tls_config: + ca_file: /home/robin/.ca + cert_file: /home/robin/.cert + key_file: /home/robin/.key + server_name: example.local + insecure_skip_verify: true + + - host: unix:///var/run/docker.sock + port: 54321 + filters: + - name: sunscreen + values: [ spf20, spf50, spf100 ] + refresh_interval: 10s + host_networking_host: localhost + + oauth2: + client_id: client_id + client_secret_file: foo/bar + scopes: + - scope1 + - scope2 + token_url: https://example/oauth2/token + endpoint_params: + host: example + path: /oauth2/token + +tracing: {enabled: false} +server: {register_instrumentation: false} diff --git a/converter/internal/promtailconvert/testdata/ec2.river b/converter/internal/promtailconvert/testdata/ec2.river new file mode 100644 index 000000000000..c64822b297db --- /dev/null +++ b/converter/internal/promtailconvert/testdata/ec2.river @@ -0,0 +1,15 @@ +discovery.ec2 "fun_0" { + region = "us-east-1" + access_key = "YOUR_ACCESS_KEY" + secret_key = "YOUR_SECRET_KEY" + port = 8080 +} + +local.file_match "fun" { + path_targets = discovery.ec2.fun_0.targets +} + +loki.source.file "fun" { + targets = local.file_match.fun.targets + forward_to = [] +} diff --git a/converter/internal/promtailconvert/testdata/ec2.yaml b/converter/internal/promtailconvert/testdata/ec2.yaml new file mode 100644 index 000000000000..efae1f456a5d --- /dev/null +++ b/converter/internal/promtailconvert/testdata/ec2.yaml @@ -0,0 +1,10 @@ +scrape_configs: + - job_name: fun + ec2_sd_configs: + - region: 'us-east-1' + access_key: 'YOUR_ACCESS_KEY' + secret_key: 'YOUR_SECRET_KEY' + port: 8080 + +tracing: {enabled: false} +server: {register_instrumentation: false} diff --git a/converter/internal/promtailconvert/testdata/file.river b/converter/internal/promtailconvert/testdata/file.river new file mode 100644 index 000000000000..1ee44ece2dae --- /dev/null +++ b/converter/internal/promtailconvert/testdata/file.river @@ -0,0 +1,20 @@ +discovery.file "file_sd_0" { + files = ["/etc/prometheus/targets/*.json", "/etc/prometheus/targets/*.yaml", "/etc/prometheus/targets/*.json"] +} + +discovery.file "file_sd_1" { + files = ["/etc/agent/targets/*.json", "/etc/agent/targets/*.yaml", "/etc/agent/targets/*.yml"] + refresh_interval = "30m0s" +} + +local.file_match "fun" { + path_targets = concat( + discovery.file.file_sd_0.targets, + discovery.file.file_sd_1.targets, + ) +} + +loki.source.file "fun" { + targets = local.file_match.fun.targets + forward_to = [] +} diff --git a/converter/internal/promtailconvert/testdata/file.yaml b/converter/internal/promtailconvert/testdata/file.yaml new file mode 100644 index 000000000000..5533051b1afd --- /dev/null +++ b/converter/internal/promtailconvert/testdata/file.yaml @@ -0,0 +1,15 @@ +scrape_configs: + - job_name: fun + file_sd_configs: + - files: + - /etc/prometheus/targets/*.json + - /etc/prometheus/targets/*.yaml + - /etc/prometheus/targets/*.json + refresh_interval: 5m + - files: + - /etc/agent/targets/*.json + - /etc/agent/targets/*.yaml + - /etc/agent/targets/*.yml + refresh_interval: 30m +tracing: {enabled: false} +server: {register_instrumentation: false} diff --git a/converter/internal/promtailconvert/testdata/gce.river b/converter/internal/promtailconvert/testdata/gce.river new file mode 100644 index 000000000000..c2bdff6a5582 --- /dev/null +++ b/converter/internal/promtailconvert/testdata/gce.river @@ -0,0 +1,14 @@ +discovery.gce "fun_0" { + project = "your-project-id" + zone = "us-central1-a" + port = 8080 +} + +local.file_match "fun" { + path_targets = discovery.gce.fun_0.targets +} + +loki.source.file "fun" { + targets = local.file_match.fun.targets + forward_to = [] +} diff --git a/converter/internal/promtailconvert/testdata/gce.yaml b/converter/internal/promtailconvert/testdata/gce.yaml new file mode 100644 index 000000000000..8c991fbae7a9 --- /dev/null +++ b/converter/internal/promtailconvert/testdata/gce.yaml @@ -0,0 +1,9 @@ +scrape_configs: + - job_name: fun + gce_sd_configs: + - project: 'your-project-id' + zone: 'us-central1-a' + port: 8080 + +tracing: {enabled: false} +server: {register_instrumentation: false} diff --git a/converter/internal/promtailconvert/testdata/journal.yaml b/converter/internal/promtailconvert/testdata/journal.yaml index 95112a37ab03..ea690fd2055f 100644 --- a/converter/internal/promtailconvert/testdata/journal.yaml +++ b/converter/internal/promtailconvert/testdata/journal.yaml @@ -8,4 +8,5 @@ scrape_configs: labels: variety: chardonnay region: chablis -tracing: {enabled: false} \ No newline at end of file +tracing: {enabled: false} +server: {register_instrumentation: false} diff --git a/converter/internal/promtailconvert/testdata/journal_relabel.yaml b/converter/internal/promtailconvert/testdata/journal_relabel.yaml index f25896ec4e25..21949ceeaaed 100644 --- a/converter/internal/promtailconvert/testdata/journal_relabel.yaml +++ b/converter/internal/promtailconvert/testdata/journal_relabel.yaml @@ -12,4 +12,5 @@ scrape_configs: - source_labels: - __trail__ target_label: __path__ -tracing: {enabled: false} \ No newline at end of file +tracing: {enabled: false} +server: {register_instrumentation: false} diff --git a/converter/internal/promtailconvert/testdata/kubernetes.river b/converter/internal/promtailconvert/testdata/kubernetes.river index cc018c7ee95d..bd10a9a7ce23 100644 --- a/converter/internal/promtailconvert/testdata/kubernetes.river +++ b/converter/internal/promtailconvert/testdata/kubernetes.river @@ -60,7 +60,7 @@ discovery.kubernetes "fun_4" { } } -discovery.file "fun" { +local.file_match "fun" { path_targets = concat( discovery.kubernetes.fun_0.targets, discovery.kubernetes.fun_1.targets, @@ -71,6 +71,6 @@ discovery.file "fun" { } loki.source.file "fun" { - targets = discovery.file.fun.targets + targets = local.file_match.fun.targets forward_to = [] } diff --git a/converter/internal/promtailconvert/testdata/kubernetes.yaml b/converter/internal/promtailconvert/testdata/kubernetes.yaml index 78b3bbc8ce84..03bda07efe17 100644 --- a/converter/internal/promtailconvert/testdata/kubernetes.yaml +++ b/converter/internal/promtailconvert/testdata/kubernetes.yaml @@ -41,4 +41,5 @@ scrape_configs: path: /oauth2/token proxy_from_environment: true -tracing: {enabled: false} \ No newline at end of file +tracing: {enabled: false} +server: {register_instrumentation: false} diff --git a/converter/internal/promtailconvert/testdata/push_api.river b/converter/internal/promtailconvert/testdata/push_api.river new file mode 100644 index 000000000000..dbbb58cb59b6 --- /dev/null +++ b/converter/internal/promtailconvert/testdata/push_api.river @@ -0,0 +1,48 @@ +discovery.relabel "fun" { + targets = [] + + rule { + source_labels = ["__trail__"] + target_label = "__path__" + } +} + +loki.source.api "fun" { + http { + listen_address = "localhost" + listen_port = 9094 + conn_limit = 100 + server_read_timeout = "10s" + server_write_timeout = "10s" + server_idle_timeout = "10s" + } + + grpc { + listen_address = "127.0.0.1" + listen_port = 9095 + conn_limit = 100 + max_connection_age = "10s" + max_connection_age_grace = "10s" + max_connection_idle = "1m0s" + server_max_recv_msg_size = 1000 + server_max_send_msg_size = 1000 + server_max_concurrent_streams = 100 + } + graceful_shutdown_timeout = "0s" + forward_to = [loki.write.default_0.receiver] + labels = { + identity = "unidentified", + object_type = "flying", + } + relabel_rules = discovery.relabel.fun.rules + use_incoming_timestamp = true +} + +loki.write "default_0" { + endpoint { + url = "http://localhost/loki/api/v1/push" + follow_redirects = false + enable_http2 = false + } + external_labels = {} +} diff --git a/converter/internal/promtailconvert/testdata/push_api.yaml b/converter/internal/promtailconvert/testdata/push_api.yaml new file mode 100644 index 000000000000..44514dae8bfb --- /dev/null +++ b/converter/internal/promtailconvert/testdata/push_api.yaml @@ -0,0 +1,32 @@ +clients: + - url: http://localhost/loki/api/v1/push +scrape_configs: + - job_name: fun + loki_push_api: + use_incoming_timestamp: true + labels: + identity: unidentified + object_type: flying + server: + http_listen_address: localhost + http_listen_port: 9094 + http_listen_conn_limit: 100 + http_server_read_timeout: 10s + http_server_write_timeout: 10s + http_server_idle_timeout: 10s + + grpc_listen_address: 127.0.0.1 + grpc_listen_port: 9095 + grpc_listen_conn_limit: 100 + grpc_server_max_connection_age: 10s + grpc_server_max_connection_age_grace: 10s + grpc_server_max_connection_idle: 1m + grpc_server_max_recv_msg_size: 1000 + grpc_server_max_send_msg_size: 1000 + grpc_server_max_concurrent_streams: 100 + relabel_configs: + - source_labels: + - __trail__ + target_label: __path__ +tracing: { enabled: false } +server: { register_instrumentation: false } diff --git a/converter/internal/promtailconvert/testdata/push_api_unsupported.diags b/converter/internal/promtailconvert/testdata/push_api_unsupported.diags new file mode 100644 index 000000000000..dfb3ecba7123 --- /dev/null +++ b/converter/internal/promtailconvert/testdata/push_api_unsupported.diags @@ -0,0 +1,19 @@ +(Error) http_listen_network is not supported in server config +(Error) grpc_listen_network is not supported in server config +(Error) tls_cipher_suites is not supported in server config +(Error) tls_min_version is not supported in server config +(Error) http_tls_config is not supported in server config +(Error) grpc_tls_config is not supported in server config +(Error) register_instrumentation is not supported in server config +(Error) graceful_shutdown_timeout is not supported in server config +(Error) grpc_server_keepalive_time is not supported in server config +(Error) grpc_server_keepalive_timeout is not supported in server config +(Error) grpc_server_min_time_between_pings is not supported in server config +(Error) grpc_server_ping_without_stream_allowed is not supported in server config +(Error) log_format is not supported in server config +(Error) log_level is not supported in server config +(Error) log_source_ips_enabled is not supported in server config +(Error) log_source_ips_header is not supported in server config +(Error) log_source_ips_regex is not supported in server config +(Error) log_request_at_info_level_enabled is not supported in server config +(Error) http_path_prefix is not supported in server config \ No newline at end of file diff --git a/converter/internal/promtailconvert/testdata/push_api_unsupported.yaml b/converter/internal/promtailconvert/testdata/push_api_unsupported.yaml new file mode 100644 index 000000000000..6e925d6add14 --- /dev/null +++ b/converter/internal/promtailconvert/testdata/push_api_unsupported.yaml @@ -0,0 +1,45 @@ +scrape_configs: + - job_name: fun + loki_push_api: + server: + http_listen_address: localhost + http_listen_port: 9094 + http_listen_conn_limit: 100 + http_server_read_timeout: 10s + http_server_write_timeout: 10s + http_server_idle_timeout: 10s + + grpc_listen_address: 127.0.0.1 + grpc_listen_port: 9095 + grpc_listen_conn_limit: 100 + grpc_server_max_connection_age: 10s + grpc_server_max_connection_age_grace: 10s + grpc_server_max_connection_idle: 1m + grpc_server_max_recv_msg_size: 1000 + grpc_server_max_send_msg_size: 1000 + grpc_server_max_concurrent_streams: 100 + + # unsupported fields + http_listen_network: tcp4 + grpc_listen_network: tcp4 + tls_cipher_suites: TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 + tls_min_version: tls12 + http_tls_config: + cert_file: /var/cert + grpc_tls_config: + cert_file: /var/cert + register_instrumentation: true + graceful_shutdown_timeout: 10s + grpc_server_keepalive_time: 10s + grpc_server_keepalive_timeout: 10s + grpc_server_min_time_between_pings: 10s + grpc_server_ping_without_stream_allowed: true + log_format: json + log_level: debug + log_source_ips_enabled: true + log_source_ips_header: IP_ADDR + log_source_ips_regex: .* + log_request_at_info_level_enabled: true + http_path_prefix: /my/path +tracing: { enabled: false } +server: { register_instrumentation: false } diff --git a/converter/internal/promtailconvert/testdata/remote_write.yaml b/converter/internal/promtailconvert/testdata/remote_write.yaml index cc6402a25c21..910f100f1d09 100644 --- a/converter/internal/promtailconvert/testdata/remote_write.yaml +++ b/converter/internal/promtailconvert/testdata/remote_write.yaml @@ -41,4 +41,5 @@ clients: tracing: enabled: false server: - disable: true \ No newline at end of file + disable: true + register_instrumentation: false diff --git a/converter/internal/promtailconvert/testdata/sd_pipeline_example.river b/converter/internal/promtailconvert/testdata/sd_pipeline_example.river index e6d1e610c9bc..99a11a2ef0f3 100644 --- a/converter/internal/promtailconvert/testdata/sd_pipeline_example.river +++ b/converter/internal/promtailconvert/testdata/sd_pipeline_example.river @@ -20,7 +20,7 @@ discovery.relabel "fun" { } } -discovery.file "fun" { +local.file_match "fun" { path_targets = discovery.relabel.fun.output } @@ -38,7 +38,7 @@ loki.process "fun" { } loki.source.file "fun" { - targets = discovery.file.fun.targets + targets = local.file_match.fun.targets forward_to = [loki.process.fun.receiver] } diff --git a/converter/internal/promtailconvert/testdata/sd_pipeline_example.yaml b/converter/internal/promtailconvert/testdata/sd_pipeline_example.yaml index dac6d7cf8d51..facdf362eabd 100644 --- a/converter/internal/promtailconvert/testdata/sd_pipeline_example.yaml +++ b/converter/internal/promtailconvert/testdata/sd_pipeline_example.yaml @@ -19,4 +19,5 @@ scrape_configs: - role: node kubeconfig_file: /home/toby/.kube/config -tracing: {enabled: false} \ No newline at end of file +tracing: {enabled: false} +server: {register_instrumentation: false} diff --git a/converter/internal/promtailconvert/testdata/source_pipeline_example.yaml b/converter/internal/promtailconvert/testdata/source_pipeline_example.yaml index 26127ef6ff3a..e9c2bff31ff6 100644 --- a/converter/internal/promtailconvert/testdata/source_pipeline_example.yaml +++ b/converter/internal/promtailconvert/testdata/source_pipeline_example.yaml @@ -19,4 +19,5 @@ scrape_configs: labels: variety: chardonnay region: chablis -tracing: {enabled: false} \ No newline at end of file +tracing: {enabled: false} +server: {register_instrumentation: false} diff --git a/converter/internal/promtailconvert/testdata/static_pipeline_example.river b/converter/internal/promtailconvert/testdata/static_pipeline_example.river new file mode 100644 index 000000000000..e016be899c49 --- /dev/null +++ b/converter/internal/promtailconvert/testdata/static_pipeline_example.river @@ -0,0 +1,61 @@ +discovery.kubernetes "example_0" { + role = "pod" + kubeconfig_file = "/home/toby/.kube/config" +} + +discovery.relabel "example" { + targets = concat( + discovery.kubernetes.example_0.targets, + [{ + __path__ = "/var/log/captain_scott_last_expedition.log", + __path__exclude__ = "/var/log/sensational_news.log", + category = "fun", + }, { + __path__ = "/shelf/books/star_trek*", + __path__exclude__ = "/shelf/books/star_wars*", + category = "sf", + quality = "high", + }, {}], + ) + + rule { + source_labels = ["__trail__"] + target_label = "__path__" + } + + rule { + source_labels = ["__name__"] + action = "drop" + } +} + +local.file_match "example" { + path_targets = discovery.relabel.example.output +} + +loki.process "example" { + forward_to = [loki.write.default_0.receiver] + + stage.json { + expressions = { + face = "smiley", + hand = "thumbs-up", + } + source = "video" + drop_malformed = true + } +} + +loki.source.file "example" { + targets = local.file_match.example.targets + forward_to = [loki.process.example.receiver] +} + +loki.write "default_0" { + endpoint { + url = "http://localhost/loki/api/v1/push" + follow_redirects = false + enable_http2 = false + } + external_labels = {} +} diff --git a/converter/internal/promtailconvert/testdata/static_pipeline_example.yaml b/converter/internal/promtailconvert/testdata/static_pipeline_example.yaml new file mode 100644 index 000000000000..a7fce3a015de --- /dev/null +++ b/converter/internal/promtailconvert/testdata/static_pipeline_example.yaml @@ -0,0 +1,41 @@ +clients: + - url: http://localhost/loki/api/v1/push +scrape_configs: + - job_name: example + pipeline_stages: + - json: + expressions: + face: smiley + hand: thumbs-up + source: video + drop_malformed: true + relabel_configs: + - source_labels: + - __trail__ + target_label: __path__ + - source_labels: + - __name__ + action: drop + static_configs: + - targets: + - this + - is + - not + - used + labels: + __path__: /var/log/captain_scott_last_expedition.log + __path__exclude__: /var/log/sensational_news.log + category: fun + - labels: + __path__: /shelf/books/star_trek* + __path__exclude__: /shelf/books/star_wars* + category: sf + quality: high + - labels: {} + # Include k8s config to demonstrate how other discovery features differ from static_configs + kubernetes_sd_configs: + - role: pod + kubeconfig_file: /home/toby/.kube/config + +tracing: {enabled: false} +server: {register_instrumentation: false} \ No newline at end of file diff --git a/converter/internal/promtailconvert/testdata/unsupported.diags b/converter/internal/promtailconvert/testdata/unsupported.diags index b0e6f6ca66aa..48a51a92aaf4 100644 --- a/converter/internal/promtailconvert/testdata/unsupported.diags +++ b/converter/internal/promtailconvert/testdata/unsupported.diags @@ -5,5 +5,16 @@ (Error) limits_config is not yet supported in Flow Mode (Error) tracing configuration cannot be migrated to Flow Mode automatically - please refer to documentation on how to configure tracing in Flow Mode (Error) reading targets from stdin is not supported in Flow Mode configuration file +(Warning) server.profiling_enabled is not supported - use Agent's main HTTP server's profiling endpoints instead. +(Warning) server.register_instrumentation is not supported - Flow mode components expose their metrics automatically in their own metrics namespace +(Warning) server.log_level is not supported - Flow mode components may produce different logs +(Error) server.http_path_prefix is not supported +(Warning) server.health_check_target disabling is not supported in Flow mode (Error) DropRateLimitedBatches is currently not supported in Grafana Agent Flow. -(Warning) stream_lag_labels is deprecated and the associated metric has been removed \ No newline at end of file +(Warning) stream_lag_labels is deprecated and the associated metric has been removed +(Error) dockerswarm_sd_configs is not supported +(Error) serverset_sd_configs is not supported +(Error) nerve_sd_configs is not supported +(Error) marathon_sd_configs is not supported +(Error) openstack_sd_configs is not supported +(Error) triton_sd_configs is not supported \ No newline at end of file diff --git a/converter/internal/promtailconvert/testdata/unsupported.yaml b/converter/internal/promtailconvert/testdata/unsupported.yaml index 51eb6c7e1828..c61235c8dea2 100644 --- a/converter/internal/promtailconvert/testdata/unsupported.yaml +++ b/converter/internal/promtailconvert/testdata/unsupported.yaml @@ -19,4 +19,34 @@ target_config: server: profiling_enabled: true register_instrumentation: true - log_level: error \ No newline at end of file + log_level: error + http_path_prefix: /cgi/bin + health_check_target: false + +scrape_configs: + - job_name: unsupported + dockerswarm_sd_configs: + - host: remote + role: tasks + port: 1234 + serverset_sd_configs: + - servers: + - localhost + paths: + - /zk/sd/some/path + nerve_sd_configs: + - servers: + - localhost + paths: + - /zk/sd/some/path + marathon_sd_configs: + - servers: [localhost] + openstack_sd_configs: + - role: instance + domain_name: domain + region: region + port: 1234 + triton_sd_configs: + - account: account + dns_suffix: .example.com + endpoint: triton.example.com diff --git a/converter/internal/promtailconvert/validate.go b/converter/internal/promtailconvert/validate.go index fcf67f34e139..595563ba8362 100644 --- a/converter/internal/promtailconvert/validate.go +++ b/converter/internal/promtailconvert/validate.go @@ -68,4 +68,26 @@ func validateTopLevelConfig(cfg *promtailcfg.Config, diags *diag.Diagnostics) { "reading targets from stdin is not supported in Flow Mode configuration file", ) } + if cfg.ServerConfig.ProfilingEnabled { + diags.Add(diag.SeverityLevelWarn, "server.profiling_enabled is not supported - use Agent's "+ + "main HTTP server's profiling endpoints instead.") + } + + if cfg.ServerConfig.RegisterInstrumentation { + diags.Add(diag.SeverityLevelWarn, "server.register_instrumentation is not supported - Flow mode "+ + "components expose their metrics automatically in their own metrics namespace") + } + + if cfg.ServerConfig.LogLevel.String() != "info" { + diags.Add(diag.SeverityLevelWarn, "server.log_level is not supported - Flow mode "+ + "components may produce different logs") + } + + if cfg.ServerConfig.PathPrefix != "" { + diags.Add(diag.SeverityLevelError, "server.http_path_prefix is not supported") + } + + if cfg.ServerConfig.HealthCheckTarget != nil && !*cfg.ServerConfig.HealthCheckTarget { + diags.Add(diag.SeverityLevelWarn, "server.health_check_target disabling is not supported in Flow mode") + } } diff --git a/converter/internal/staticconvert/testdata/prom_scrape.river b/converter/internal/staticconvert/testdata/prom_scrape.river index a1442f57dbb7..da7cdd760034 100644 --- a/converter/internal/staticconvert/testdata/prom_scrape.river +++ b/converter/internal/staticconvert/testdata/prom_scrape.river @@ -73,7 +73,7 @@ prometheus.scrape "agent_prometheus" { } prometheus.scrape "agent_promobee" { - targets = discovery.relabel.agent_promobee.targets + targets = discovery.relabel.agent_promobee.output forward_to = [prometheus.relabel.agent_promobee.receiver] job_name = "promobee" scrape_timeout = "45s" diff --git a/converter/internal/test_common/testing.go b/converter/internal/test_common/testing.go index 82ce08ce8f95..39e7f4237136 100644 --- a/converter/internal/test_common/testing.go +++ b/converter/internal/test_common/testing.go @@ -11,6 +11,9 @@ import ( "testing" "github.com/grafana/agent/converter/diag" + "github.com/grafana/agent/pkg/cluster" + "github.com/grafana/agent/pkg/flow" + "github.com/grafana/agent/pkg/flow/logging" "github.com/stretchr/testify/require" ) @@ -41,16 +44,22 @@ func TestDirectory(t *testing.T, folderPath string, sourceSuffix string, convert if strings.HasSuffix(path, sourceSuffix) { tc := getTestCaseName(path, sourceSuffix) t.Run(tc, func(t *testing.T) { + riverFile := strings.TrimSuffix(path, sourceSuffix) + flowSuffix + diagsFile := strings.TrimSuffix(path, sourceSuffix) + diagsSuffix + if !fileExists(riverFile) && !fileExists(diagsFile) { + t.Fatalf("no expected diags or river for %s - missing test expectations?", path) + } + actualRiver, actualDiags := convert(getSourceContents(t, path)) // Skip Info level diags for this testing. These would create // a lot of unnecessary noise. actualDiags.RemoveDiagsBySeverity(diag.SeverityLevelInfo) - expectedDiags := getExpectedDiags(t, strings.TrimSuffix(path, sourceSuffix)+diagsSuffix) + expectedDiags := getExpectedDiags(t, diagsFile) validateDiags(t, expectedDiags, actualDiags) - expectedRiver := getExpectedRiver(t, path, sourceSuffix) + expectedRiver := getExpectedRiver(t, riverFile) validateRiver(t, expectedRiver, actualRiver) }) } @@ -96,6 +105,7 @@ func validateDiags(t *testing.T, expectedDiags []string, actualDiags diag.Diagno if len(expectedDiags) > ix { require.Equal(t, expectedDiags[ix], diag.String()) } else { + fmt.Printf("=== EXTRA DIAGS FOUND ===\n%s\n===========================\n", actualDiags[ix:]) require.Fail(t, "unexpected diag count reach for diag: "+diag.String()) } } @@ -113,10 +123,9 @@ func normalizeLineEndings(data []byte) []byte { } // getExpectedRiver reads the expected river output file and retrieve its contents. -func getExpectedRiver(t *testing.T, path string, sourceSuffix string) []byte { - outputFile := strings.TrimSuffix(path, sourceSuffix) + flowSuffix - if _, err := os.Stat(outputFile); err == nil { - outputBytes, err := os.ReadFile(outputFile) +func getExpectedRiver(t *testing.T, filePath string) []byte { + if _, err := os.Stat(filePath); err == nil { + outputBytes, err := os.ReadFile(filePath) require.NoError(t, err) return normalizeLineEndings(outputBytes) } @@ -124,6 +133,11 @@ func getExpectedRiver(t *testing.T, path string, sourceSuffix string) []byte { return nil } +func fileExists(path string) bool { + _, err := os.Stat(path) + return err == nil +} + // validateRiver makes sure the expected river and actual river are a match func validateRiver(t *testing.T, expectedRiver []byte, actualRiver []byte) { if len(expectedRiver) > 0 { @@ -134,5 +148,31 @@ func validateRiver(t *testing.T, expectedRiver []byte, actualRiver []byte) { } require.Equal(t, string(expectedRiver), string(normalizeLineEndings(actualRiver))) + + attemptLoadingFlowConfig(t, actualRiver) + } +} + +// attemptLoadingFlowConfig will attempt to load the Flow config and report any errors. +func attemptLoadingFlowConfig(t *testing.T, river []byte) { + cfg, err := flow.ReadFile(t.Name(), river) + require.NoError(t, err, "the output River config failed to parse: %s", string(normalizeLineEndings(river))) + + logger, err := logging.New(os.Stderr, logging.DefaultOptions) + require.NoError(t, err) + f := flow.New(flow.Options{ + Logger: logger, + Clusterer: &cluster.Clusterer{Node: cluster.NewLocalNode("")}, + DataPath: t.TempDir(), + HTTPListenAddr: ":0", + }) + err = f.LoadFile(cfg, nil) + + // Many components will fail to build as e.g. the cert files are missing, so we ignore these errors. + // This is not ideal, but we still validate for other potential issues. + if err != nil && strings.Contains(err.Error(), "Failed to build component") { + t.Log("ignoring error: " + err.Error()) + return } + require.NoError(t, err, "failed to load the River config: %s", string(normalizeLineEndings(river))) }