Skip to content

Commit

Permalink
docs: Rename host options to endpoint (#3590)
Browse files Browse the repository at this point in the history
* Rename

Signed-off-by: Kruno Tomola Fabro <krunotf@gmail.com>

* Update usages of renamed

Signed-off-by: Kruno Tomola Fabro <krunotf@gmail.com>

* Revert endpoint

Signed-off-by: Kruno Tomola Fabro <krunotf@gmail.com>

* Update tests

Signed-off-by: Kruno Tomola Fabro <krunotf@gmail.com>

* Revert docs

Signed-off-by: ktf <krunotf@gmail.com>

* More test updates

Signed-off-by: ktf <krunotf@gmail.com>
  • Loading branch information
ktff committed Aug 28, 2020
1 parent f2301e2 commit 53c8e00
Show file tree
Hide file tree
Showing 24 changed files with 102 additions and 83 deletions.
4 changes: 2 additions & 2 deletions .meta/sinks/clickhouse.toml.erb
Original file line number Diff line number Diff line change
Expand Up @@ -92,12 +92,12 @@ description = "The token to use for bearer authentication"
}
) %>

[sinks.clickhouse.options.host]
[sinks.clickhouse.options.endpoint]
type = "string"
common = true
examples = ["http://localhost:8123"]
required = true
description = "The host url of the [Clickhouse][urls.clickhouse] server."
description = "The endpoint of the [Clickhouse][urls.clickhouse] server."

[sinks.clickhouse.options.table]
type = "string"
Expand Down
2 changes: 1 addition & 1 deletion .meta/sinks/datadog_metrics.toml.erb
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ examples = ["${DATADOG_API_KEY}", "ef8d5de700e7989468166c40fc8a0ccd"]
required = true
description = "Datadog [API key](https://docs.datadoghq.com/api/?lang=bash#authentication)"

[sinks.datadog_metrics.options.host]
[sinks.datadog_metrics.options.endpoint]
type = "string"
examples = ["https://api.datadoghq.com", "https://api.datadoghq.eu"]
default = "https://api.datadoghq.com"
Expand Down
6 changes: 3 additions & 3 deletions .meta/sinks/elasticsearch.toml.erb
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ relevant_when = {strategy = "aws"}
required = false
description = """\
The [AWS region][urls.aws_regions] of the target service. \
This defaults to the region named in the host parameter, \
This defaults to the region named in the endpoint parameter, \
or the value of the `$AWS_REGION` or `$AWS_DEFAULT_REGION` environment \
variables if that cannot be determined, or "us-east-1".\
"""
Expand Down Expand Up @@ -160,12 +160,12 @@ description = """\
A custom header to be added to each outgoing Elasticsearch request.\
"""

[sinks.elasticsearch.options.host]
[sinks.elasticsearch.options.endpoint]
type = "string"
common = true
examples = ["http://10.24.32.122:9000"]
description = """\
The host of your Elasticsearch cluster. This should be the full URL as shown \
The endpoint of your Elasticsearch cluster. This should be the full URL as shown \
in the example.\
"""

Expand Down
4 changes: 2 additions & 2 deletions .meta/sinks/humio_logs.toml.erb
Original file line number Diff line number Diff line change
Expand Up @@ -78,11 +78,11 @@ to use to ingest the data.
If unset, Humio will default it to none.
"""

[sinks.humio_logs.options.host]
[sinks.humio_logs.options.endpoint]
type = "string"
default = "https://cloud.humio.com"
examples = ["http://myhumiohost.com"]
description = "The optional host to send Humio logs to."
description = "The optional endpoint to send Humio logs to."

<%= render("_partials/fields/_compression_options.toml",
namespace: "sinks.humio_logs.options",
Expand Down
4 changes: 2 additions & 2 deletions .meta/sinks/logdna.toml.erb
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,11 @@ required = true
examples = ["${LOGDNA_API_KEY}", "ef8d5de700e7989468166c40fc8a0ccd"]
description = "The Ingestion API key."

[sinks.logdna.options.host]
[sinks.logdna.options.endpoint]
type = "string"
required = false
examples = ["http://127.0.0.1", "http://example.com"]
description = "An optional host that will override the default one."
description = "An optional endpoint that will override the default one."

[sinks.logdna.options.hostname]
type = "string"
Expand Down
4 changes: 2 additions & 2 deletions .meta/sinks/pulsar.toml.erb
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,12 @@ write_to_description = "[Apache Pulsar][urls.pulsar] via the [Pulsar protocol][u
default: "text"
) %>

[sinks.pulsar.options.address]
[sinks.pulsar.options.endpoint]
type = "string"
common = true
examples = ["pulsar://127.0.0.1:6650"]
required = true
description = "A host and port pair that the pulsar client should connect to."
description = "Endpoint to which the pulsar client should connect to."

[sinks.pulsar.options.topic]
type = "string"
Expand Down
6 changes: 3 additions & 3 deletions .meta/sinks/sematext_logs.toml.erb
Original file line number Diff line number Diff line change
Expand Up @@ -48,13 +48,13 @@ write_to_description = "[Sematext][urls.sematext] via the [Elasticsearch API][ur
type = "string"
required = false
examples = ["na", "eu"]
description = "The region destination to send logs to. This option is required if `host` is not set."
description = "The region destination to send logs to. This option is required if `endpoint` is not set."

[sinks.sematext_logs.options.host]
[sinks.sematext_logs.options.endpoint]
type = "string"
required = false
examples = ["http://127.0.0.1", "http://example.com"]
description = "The host that will be used to send logs to. This option is required if `region` is not set."
description = "The endpoint that will be used to send logs to. This option is required if `region` is not set."

[sinks.sematext_logs.options.token]
type = "string"
Expand Down
4 changes: 2 additions & 2 deletions .meta/sinks/splunk_hec.toml.erb
Original file line number Diff line number Diff line change
Expand Up @@ -48,12 +48,12 @@ write_to_description = "a [Splunk's HTTP Event Collector][urls.splunk_hec]"
default: "text"
) %>

[sinks.splunk_hec.options.host]
[sinks.splunk_hec.options.endpoint]
type = "string"
common = true
examples = ["http://my-splunk-host.com"]
required = true
description = "Your Splunk HEC host."
description = "Your Splunk HEC endpoint."

[sinks.splunk_hec.options.host_key]
type = "string"
Expand Down
4 changes: 2 additions & 2 deletions .meta/sources/prometheus.toml.erb
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,12 @@ through_description = "the [Prometheus text exposition format][urls.prometheus_t

<%= render("_partials/fields/_component_options.toml", type: "source", name: "prometheus") %>

[sources.prometheus.options.hosts]
[sources.prometheus.options.endpoints]
type = "[string]"
common = true
required = true
examples = [["http://localhost:9090"]]
description = "Host addresses to scrape metrics from."
description = "Endpoints to scrape metrics from."

[sources.prometheus.options.scrape_interval_secs]
type = "uint"
Expand Down
4 changes: 2 additions & 2 deletions .meta/transforms/aws_ec2_metadata.toml.erb
Original file line number Diff line number Diff line change
Expand Up @@ -40,11 +40,11 @@ common = true
default = 10
description = "The interval in seconds at which the EC2 Metadata api will be called."

[transforms.aws_ec2_metadata.options.host]
[transforms.aws_ec2_metadata.options.endpoint]
type = "string"
common = true
default = "http://169.254.169.254"
description = "Override the default EC2 Metadata host."
description = "Override the default EC2 Metadata endpoint."

[transforms.aws_ec2_metadata.fields.log.fields.ami-id]
type = "string"
Expand Down
2 changes: 1 addition & 1 deletion src/sinks/aws_kinesis_firehose.rs
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,7 @@ mod integration_tests {

let config = ElasticSearchConfig {
auth: Some(ElasticSearchAuth::Aws { assume_role: None }),
host: "http://localhost:4571".into(),
endpoint: "http://localhost:4571".into(),
index: Some(stream.clone()),
..Default::default()
};
Expand Down
14 changes: 8 additions & 6 deletions src/sinks/clickhouse.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,9 @@ use snafu::ResultExt;
#[derive(Deserialize, Serialize, Debug, Clone, Default)]
#[serde(deny_unknown_fields)]
pub struct ClickhouseConfig {
pub host: String,
// Deprecated name
#[serde(alias = "host")]
pub endpoint: String,
pub table: String,
pub database: Option<String>,
#[serde(default = "Compression::default_gzip")]
Expand Down Expand Up @@ -114,7 +116,7 @@ impl HttpSink for ClickhouseConfig {
"default"
};

let uri = encode_uri(&self.host, database, &self.table).expect("Unable to encode uri");
let uri = encode_uri(&self.endpoint, database, &self.table).expect("Unable to encode uri");

let mut builder = Request::post(&uri).header("Content-Type", "application/x-ndjson");

Expand All @@ -134,7 +136,7 @@ impl HttpSink for ClickhouseConfig {

async fn healthcheck(mut client: HttpClient, config: ClickhouseConfig) -> crate::Result<()> {
// TODO: check if table exists?
let uri = format!("{}/?query=SELECT%201", config.host);
let uri = format!("{}/?query=SELECT%201", config.endpoint);
let mut request = Request::get(uri).body(Body::empty()).unwrap();

if let Some(auth) = &config.auth {
Expand Down Expand Up @@ -251,7 +253,7 @@ mod integration_tests {
let host = String::from("http://localhost:8123");

let config = ClickhouseConfig {
host: host.clone(),
endpoint: host.clone(),
table: table.clone(),
compression: Compression::None,
batch: BatchConfig {
Expand Down Expand Up @@ -294,7 +296,7 @@ mod integration_tests {
encoding.timestamp_format = Some(TimestampFormat::Unix);

let config = ClickhouseConfig {
host: host.clone(),
endpoint: host.clone(),
table: table.clone(),
compression: Compression::None,
encoding,
Expand Down Expand Up @@ -411,7 +413,7 @@ timestamp_format = "unix""#,
let host = String::from("http://localhost:8123");

let config = ClickhouseConfig {
host: host.clone(),
endpoint: host.clone(),
table: table.clone(),
compression: Compression::None,
batch: BatchConfig {
Expand Down
4 changes: 2 additions & 2 deletions src/sinks/datadog/logs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,10 @@ impl SinkConfig for DatadogLogsConfig {
let (host, port, tls) = if let Some(uri) = &self.endpoint {
let host = uri
.host()
.ok_or_else(|| "A host is required for endpoints".to_string())?;
.ok_or_else(|| "A host is required for endpoint".to_string())?;
let port = uri
.port_u16()
.ok_or_else(|| "A port is required for endpoints".to_string())?;
.ok_or_else(|| "A port is required for endpoint".to_string())?;

(host.to_string(), port, self.tls.clone())
} else {
Expand Down
13 changes: 7 additions & 6 deletions src/sinks/datadog/metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,9 @@ struct DatadogState {
#[serde(deny_unknown_fields)]
pub struct DatadogConfig {
pub namespace: String,
#[serde(default = "default_host")]
pub host: String,
// Deprecated name
#[serde(alias = "host", default = "default_endpoint")]
pub endpoint: String,
pub api_key: String,
#[serde(default)]
pub batch: BatchConfig,
Expand All @@ -63,7 +64,7 @@ struct DatadogRequest {
series: Vec<DatadogMetric>,
}

pub fn default_host() -> String {
pub fn default_endpoint() -> String {
String::from("https://api.datadoghq.com")
}

Expand Down Expand Up @@ -114,7 +115,7 @@ impl SinkConfig for DatadogConfig {
.parse_config(self.batch)?;
let request = self.request.unwrap_with(&REQUEST_DEFAULTS);

let uri = build_uri(&self.host)?;
let uri = build_uri(&self.endpoint)?;
let timestamp = Utc::now().timestamp();

let sink = DatadogSink {
Expand Down Expand Up @@ -179,7 +180,7 @@ fn build_uri(host: &str) -> crate::Result<Uri> {
}

async fn healthcheck(config: DatadogConfig, mut client: HttpClient) -> crate::Result<()> {
let uri = format!("{}/api/v1/validate", config.host)
let uri = format!("{}/api/v1/validate", config.endpoint)
.parse::<Uri>()
.context(super::UriParseError)?;

Expand Down Expand Up @@ -417,7 +418,7 @@ mod tests {
let timestamp = Utc::now().timestamp();
let sink = DatadogSink {
config: sink,
uri: build_uri(&default_host()).unwrap(),
uri: build_uri(&default_endpoint()).unwrap(),
last_sent_timestamp: AtomicI64::new(timestamp),
};

Expand Down
24 changes: 13 additions & 11 deletions src/sinks/elasticsearch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,9 @@ use std::convert::TryFrom;
#[derive(Deserialize, Serialize, Debug, Clone, Default)]
#[serde(deny_unknown_fields)]
pub struct ElasticSearchConfig {
pub host: String,
// Deprecated name
#[serde(alias = "host")]
pub endpoint: String,
pub index: Option<String>,
pub doc_type: Option<String>,
pub id_key: Option<String>,
Expand Down Expand Up @@ -358,20 +360,20 @@ impl ElasticSearchCommon {
_ => None,
};

let base_url = config.host.clone();
let base_url = config.endpoint.clone();
let region = match &config.aws {
Some(region) => Region::try_from(region)?,
None => region_from_endpoint(&config.host)?,
None => region_from_endpoint(&config.endpoint)?,
};

// Test the configured host, but ignore the result
let uri = format!("{}/_test", &config.host);
let uri = format!("{}/_test", &config.endpoint);
let uri = uri
.parse::<Uri>()
.with_context(|| InvalidHost { host: &base_url })?;
if uri.host().is_none() {
return Err(ParseError::HostMustIncludeHostname {
host: config.host.clone(),
host: config.endpoint.clone(),
}
.into());
}
Expand Down Expand Up @@ -588,7 +590,7 @@ mod integration_tests {
let pipeline = String::from("test-pipeline");

let config = ElasticSearchConfig {
host: "http://localhost:9200".into(),
endpoint: "http://localhost:9200".into(),
index: Some(index.clone()),
pipeline: Some(pipeline.clone()),
..config()
Expand All @@ -602,7 +604,7 @@ mod integration_tests {
async fn structures_events_correctly() {
let index = gen_index();
let config = ElasticSearchConfig {
host: "http://localhost:9200".into(),
endpoint: "http://localhost:9200".into(),
index: Some(index.clone()),
doc_type: Some("log_lines".into()),
id_key: Some("my_id".into()),
Expand Down Expand Up @@ -659,7 +661,7 @@ mod integration_tests {

run_insert_tests(
ElasticSearchConfig {
host: "http://localhost:9200".into(),
endpoint: "http://localhost:9200".into(),
doc_type: Some("log_lines".into()),
compression: Compression::None,
..config()
Expand All @@ -675,7 +677,7 @@ mod integration_tests {

run_insert_tests(
ElasticSearchConfig {
host: "https://localhost:9201".into(),
endpoint: "https://localhost:9201".into(),
doc_type: Some("log_lines".into()),
compression: Compression::None,
tls: Some(TlsOptions {
Expand All @@ -696,7 +698,7 @@ mod integration_tests {
run_insert_tests(
ElasticSearchConfig {
auth: Some(ElasticSearchAuth::Aws { assume_role: None }),
host: "http://localhost:4571".into(),
endpoint: "http://localhost:4571".into(),
..config()
},
false,
Expand All @@ -710,7 +712,7 @@ mod integration_tests {

run_insert_tests(
ElasticSearchConfig {
host: "http://localhost:9200".into(),
endpoint: "http://localhost:9200".into(),
doc_type: Some("log_lines".into()),
compression: Compression::None,
..config()
Expand Down
8 changes: 5 additions & 3 deletions src/sinks/humio_logs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,9 @@ const HOST: &str = "https://cloud.humio.com";
#[derive(Clone, Debug, Deserialize, Serialize, Default)]
pub struct HumioLogsConfig {
token: String,
host: Option<String>,
// Deprecated name
#[serde(alias = "host")]
endpoint: Option<String>,
source: Option<Template>,
#[serde(
skip_serializing_if = "crate::serde::skip_serializing_if_default",
Expand Down Expand Up @@ -72,11 +74,11 @@ impl SinkConfig for HumioLogsConfig {

impl HumioLogsConfig {
fn build_hec_config(&self) -> HecSinkConfig {
let host = self.host.clone().unwrap_or_else(|| HOST.to_string());
let endpoint = self.endpoint.clone().unwrap_or_else(|| HOST.to_string());

HecSinkConfig {
token: self.token.clone(),
host,
endpoint,
source: self.source.clone(),
sourcetype: self.event_type.clone(),
encoding: self.encoding.clone().transmute(),
Expand Down
Loading

0 comments on commit 53c8e00

Please sign in to comment.